From 29a5d654ff840be59955afe675a5f9d082dc71ad Mon Sep 17 00:00:00 2001 From: konard Date: Sun, 18 Jan 2026 22:31:36 +0100 Subject: [PATCH 01/10] Initial commit with task details Adding CLAUDE.md with task information for AI processing. This file will be removed when the task is complete. Issue: https://github.com/link-foundation/links-queue/issues/25 --- CLAUDE.md | 5 +++++ 1 file changed, 5 insertions(+) create mode 100644 CLAUDE.md diff --git a/CLAUDE.md b/CLAUDE.md new file mode 100644 index 0000000..495ba33 --- /dev/null +++ b/CLAUDE.md @@ -0,0 +1,5 @@ +Issue to solve: https://github.com/link-foundation/links-queue/issues/25 +Your prepared branch: issue-25-fadd4ff659d6 +Your prepared working directory: /tmp/gh-issue-solver-1768771894624 + +Proceed. From cde27e51b39d3d17efa5d9e474f7afbd24771938 Mon Sep 17 00:00:00 2001 From: konard Date: Sun, 18 Jan 2026 22:52:17 +0100 Subject: [PATCH 02/10] feat(js): Implement Phase 7 advanced queue features Add new JavaScript feature modules under src/features/: - scheduler.js: Delayed messages, cron jobs, TTL, message expiration - rate-limiter.js: Sliding window counter, token bucket, per-queue/consumer limits - router.js: Topic-based routing with AMQP-style wildcards (*, #) - pubsub.js: Pub/sub patterns with topic management and message filtering Features include: - CronParser for 5-field cron expressions - Scheduler with delayed delivery and cron job management - SlidingWindowCounter and TokenBucket rate limiting algorithms - RateLimiter with multi-key tracking - DirectExchange, TopicExchange, FanoutExchange, HeadersExchange - TopicMatcher with wildcard pattern matching - Router for flexible message routing - PubSubBroker with subscription management - ObservableQueue for event-driven queue monitoring - QueueBackedPubSub for durable pub/sub with queue storage Add comprehensive unit tests for all features: - 22 scheduler tests - 41 rate-limiter tests - 72 router tests - 61 pubsub tests Co-Authored-By: Claude Opus 4.5 --- js/src/features/index.js | 37 + js/src/features/pubsub.js | 1120 ++++++++++++++++++++++++++++++ js/src/features/rate-limiter.js | 813 ++++++++++++++++++++++ js/src/features/router.js | 1128 +++++++++++++++++++++++++++++++ js/src/features/scheduler.js | 851 +++++++++++++++++++++++ js/src/index.js | 31 + js/tests/pubsub.test.js | 886 ++++++++++++++++++++++++ js/tests/rate-limiter.test.js | 658 ++++++++++++++++++ js/tests/router.test.js | 870 ++++++++++++++++++++++++ js/tests/scheduler.test.js | 524 ++++++++++++++ 10 files changed, 6918 insertions(+) create mode 100644 js/src/features/index.js create mode 100644 js/src/features/pubsub.js create mode 100644 js/src/features/rate-limiter.js create mode 100644 js/src/features/router.js create mode 100644 js/src/features/scheduler.js create mode 100644 js/tests/pubsub.test.js create mode 100644 js/tests/rate-limiter.test.js create mode 100644 js/tests/router.test.js create mode 100644 js/tests/scheduler.test.js diff --git a/js/src/features/index.js b/js/src/features/index.js new file mode 100644 index 0000000..eb5c273 --- /dev/null +++ b/js/src/features/index.js @@ -0,0 +1,37 @@ +/** + * Advanced queue features module exports. + * + * @module features + */ + +// Scheduler exports +export { CronParser, Scheduler, ScheduledQueue } from './scheduler.js'; + +// Rate limiter exports +export { + SlidingWindowCounter, + TokenBucket, + RateLimiter, + RateLimitedQueue, + RateLimitError, +} from './rate-limiter.js'; + +// Router exports +export { + ExchangeType, + TopicMatcher, + DirectExchange, + TopicExchange, + FanoutExchange, + HeadersExchange, + Router, + RoutedQueueManager, +} from './router.js'; + +// Pub/Sub exports +export { + MessageFilter, + PubSubBroker, + ObservableQueue, + QueueBackedPubSub, +} from './pubsub.js'; diff --git a/js/src/features/pubsub.js b/js/src/features/pubsub.js new file mode 100644 index 0000000..6ae483a --- /dev/null +++ b/js/src/features/pubsub.js @@ -0,0 +1,1120 @@ +/** + * Pub/Sub module for links-queue. + * + * This module provides publish/subscribe messaging patterns: + * - Topic creation/deletion + * - Subscribe/unsubscribe + * - Fan-out delivery + * - Message filtering + * + * @module features/pubsub + * + * @see REQUIREMENTS.md - Feature Parity with Competitors + * @see ROADMAP.md - Phase 7: Advanced Features + */ + +// ============================================================================= +// Subscription +// ============================================================================= + +/** + * Represents an active subscription. + * + * @typedef {Object} Subscription + * @property {string} id - Unique subscription identifier + * @property {string} topic - Topic name + * @property {string} [pattern] - Optional filter pattern + * @property {(message: PublishedMessage) => Promise} handler - Message handler + * @property {boolean} active - Whether the subscription is active + * @property {number} created - Creation timestamp + * @property {number} received - Number of messages received + */ + +// ============================================================================= +// Published Message +// ============================================================================= + +/** + * Represents a published message. + * + * @typedef {Object} PublishedMessage + * @property {string} id - Unique message identifier + * @property {string} topic - Topic the message was published to + * @property {import('../index.d.ts').Link} link - The link data + * @property {number} timestamp - Publication timestamp + * @property {Object} [headers] - Optional message headers + */ + +// ============================================================================= +// Topic +// ============================================================================= + +/** + * Represents a topic for pub/sub messaging. + * + * @typedef {Object} Topic + * @property {string} name - Topic name + * @property {number} created - Creation timestamp + * @property {number} messageCount - Total messages published + * @property {number} subscriberCount - Current number of subscribers + */ + +// ============================================================================= +// Message Filter +// ============================================================================= + +/** + * Filter for messages based on content. + * + * @example + * const filter = new MessageFilter(); + * + * // Match by source + * filter.matches({ source: 'user' }, { source: 'user', target: 'created' }); // true + * + * // Match by pattern + * filter.matches({ source: /^user/ }, { source: 'user-123', target: 'created' }); // true + */ +export class MessageFilter { + /** + * Checks if a link matches a filter pattern. + * + * @param {Object} pattern - Filter pattern + * @param {import('../index.d.ts').Link} link - The link to check + * @returns {boolean} True if the link matches + */ + static matches(pattern, link) { + for (const [key, value] of Object.entries(pattern)) { + const linkValue = link[key]; + + if (value instanceof RegExp) { + if (!value.test(String(linkValue))) { + return false; + } + } else if (typeof value === 'function') { + if (!value(linkValue)) { + return false; + } + } else if (linkValue !== value) { + return false; + } + } + return true; + } + + /** + * Creates a filter function from a pattern. + * + * @param {Object} pattern - Filter pattern + * @returns {(link: import('../index.d.ts').Link) => boolean} + */ + static createFilter(pattern) { + return (link) => this.matches(pattern, link); + } +} + +// ============================================================================= +// Pub/Sub Broker +// ============================================================================= + +/** + * Central broker for pub/sub messaging. + * + * Manages topics, subscriptions, and message delivery. + * + * @example + * const broker = new PubSubBroker(); + * + * // Create a topic + * broker.createTopic('events'); + * + * // Subscribe to the topic + * const sub = broker.subscribe('events', async (message) => { + * console.log('Received:', message.link); + * }); + * + * // Publish a message + * await broker.publish('events', { id: 1, source: 'user', target: 'created' }); + * + * // Unsubscribe + * broker.unsubscribe(sub.id); + */ +export class PubSubBroker { + /** + * Creates a new PubSubBroker. + * + * @param {Object} [options] - Broker options + * @param {boolean} [options.autoCreateTopics=true] - Automatically create topics on publish + * @param {number} [options.messageRetention=0] - Message retention in ms (0 = no retention) + */ + constructor(options = {}) { + /** + * Auto-create topics flag. + * @type {boolean} + * @private + */ + this._autoCreateTopics = options.autoCreateTopics ?? true; + + /** + * Message retention period. + * @type {number} + * @private + */ + this._messageRetention = options.messageRetention ?? 0; + + /** + * Topics by name. + * @type {Map} + * @private + */ + this._topics = new Map(); + + /** + * Subscriptions by ID. + * @type {Map} + * @private + */ + this._subscriptions = new Map(); + + /** + * Subscriptions grouped by topic. + * @type {Map>} + * @private + */ + this._topicSubscriptions = new Map(); + + /** + * Message history (if retention enabled). + * @type {Map} + * @private + */ + this._messageHistory = new Map(); + + /** + * ID counter. + * @type {number} + * @private + */ + this._idCounter = 0; + + /** + * Statistics. + * @type {{published: number, delivered: number, filtered: number}} + * @private + */ + this._stats = { + published: 0, + delivered: 0, + filtered: 0, + }; + } + + /** + * Generates a unique ID. + * + * @returns {string} + * @private + */ + _generateId(prefix = 'id') { + return `${prefix}_${Date.now()}_${++this._idCounter}`; + } + + /** + * Creates a new topic. + * + * @param {string} name - Topic name + * @returns {Topic} The created topic + * @throws {Error} If topic already exists + */ + createTopic(name) { + if (this._topics.has(name)) { + throw new Error(`Topic '${name}' already exists`); + } + + const topic = { + name, + created: Date.now(), + messageCount: 0, + subscriberCount: 0, + }; + + this._topics.set(name, topic); + this._topicSubscriptions.set(name, new Set()); + + if (this._messageRetention > 0) { + this._messageHistory.set(name, []); + } + + return topic; + } + + /** + * Gets a topic by name. + * + * @param {string} name - Topic name + * @returns {Topic|undefined} + */ + getTopic(name) { + return this._topics.get(name); + } + + /** + * Deletes a topic and all its subscriptions. + * + * @param {string} name - Topic name + * @returns {boolean} True if the topic was deleted + */ + deleteTopic(name) { + const topic = this._topics.get(name); + if (!topic) { + return false; + } + + // Remove all subscriptions for this topic + const subIds = this._topicSubscriptions.get(name); + if (subIds) { + for (const subId of subIds) { + this._subscriptions.delete(subId); + } + } + + this._topicSubscriptions.delete(name); + this._messageHistory.delete(name); + this._topics.delete(name); + + return true; + } + + /** + * Lists all topics. + * + * @returns {Topic[]} + */ + listTopics() { + return [...this._topics.values()]; + } + + /** + * Subscribes to a topic. + * + * @param {string} topic - Topic name + * @param {(message: PublishedMessage) => Promise} handler - Message handler + * @param {Object} [options] - Subscription options + * @param {Object} [options.filter] - Filter pattern for messages + * @returns {Subscription} The created subscription + * + * @example + * // Subscribe to all messages + * const sub1 = broker.subscribe('events', async (msg) => { + * console.log('Event:', msg.link); + * }); + * + * // Subscribe with filter + * const sub2 = broker.subscribe('events', handler, { + * filter: { source: 'user' } + * }); + */ + subscribe(topic, handler, options = {}) { + // Auto-create topic if enabled + if (!this._topics.has(topic)) { + if (this._autoCreateTopics) { + this.createTopic(topic); + } else { + throw new Error(`Topic '${topic}' does not exist`); + } + } + + const subscription = { + id: this._generateId('sub'), + topic, + pattern: options.filter || null, + handler, + active: true, + created: Date.now(), + received: 0, + }; + + this._subscriptions.set(subscription.id, subscription); + this._topicSubscriptions.get(topic).add(subscription.id); + + // Update topic subscriber count + const topicInfo = this._topics.get(topic); + topicInfo.subscriberCount++; + + return subscription; + } + + /** + * Unsubscribes from a topic. + * + * @param {string} subscriptionId - Subscription ID + * @returns {boolean} True if the subscription was removed + */ + unsubscribe(subscriptionId) { + const subscription = this._subscriptions.get(subscriptionId); + if (!subscription) { + return false; + } + + // Remove from topic's subscription set + const topicSubs = this._topicSubscriptions.get(subscription.topic); + if (topicSubs) { + topicSubs.delete(subscriptionId); + } + + // Update topic subscriber count + const topicInfo = this._topics.get(subscription.topic); + if (topicInfo) { + topicInfo.subscriberCount--; + } + + this._subscriptions.delete(subscriptionId); + return true; + } + + /** + * Pauses a subscription. + * + * @param {string} subscriptionId - Subscription ID + * @returns {boolean} True if the subscription was paused + */ + pause(subscriptionId) { + const subscription = this._subscriptions.get(subscriptionId); + if (subscription) { + subscription.active = false; + return true; + } + return false; + } + + /** + * Resumes a subscription. + * + * @param {string} subscriptionId - Subscription ID + * @returns {boolean} True if the subscription was resumed + */ + resume(subscriptionId) { + const subscription = this._subscriptions.get(subscriptionId); + if (subscription) { + subscription.active = true; + return true; + } + return false; + } + + /** + * Gets a subscription by ID. + * + * @param {string} subscriptionId - Subscription ID + * @returns {Subscription|undefined} + */ + getSubscription(subscriptionId) { + return this._subscriptions.get(subscriptionId); + } + + /** + * Lists all subscriptions for a topic. + * + * @param {string} [topic] - Optional topic to filter by + * @returns {Subscription[]} + */ + listSubscriptions(topic) { + if (topic) { + const subIds = this._topicSubscriptions.get(topic); + if (subIds) { + return [...subIds] + .map((id) => this._subscriptions.get(id)) + .filter(Boolean); + } + return []; + } + return [...this._subscriptions.values()]; + } + + /** + * Publishes a message to a topic. + * + * @param {string} topic - Topic name + * @param {import('../index.d.ts').Link} link - The link to publish + * @param {Object} [headers={}] - Optional message headers + * @returns {Promise<{delivered: number, filtered: number}>} Delivery result + * + * @example + * await broker.publish('events', { id: 1, source: 'user', target: 'created' }); + * + * // With headers + * await broker.publish('events', link, { priority: 'high' }); + */ + async publish(topic, link, headers = {}) { + // Auto-create topic if enabled + if (!this._topics.has(topic)) { + if (this._autoCreateTopics) { + this.createTopic(topic); + } else { + throw new Error(`Topic '${topic}' does not exist`); + } + } + + const message = { + id: this._generateId('msg'), + topic, + link, + timestamp: Date.now(), + headers, + }; + + // Update topic stats + const topicInfo = this._topics.get(topic); + topicInfo.messageCount++; + this._stats.published++; + + // Store in history if retention enabled + if (this._messageRetention > 0) { + const history = this._messageHistory.get(topic); + if (history) { + history.push(message); + // Clean up old messages + const cutoff = Date.now() - this._messageRetention; + while (history.length > 0 && history[0].timestamp < cutoff) { + history.shift(); + } + } + } + + // Deliver to subscribers + let delivered = 0; + let filtered = 0; + + const subIds = this._topicSubscriptions.get(topic); + if (subIds) { + for (const subId of subIds) { + const subscription = this._subscriptions.get(subId); + if (!subscription || !subscription.active) { + continue; + } + + // Check filter + if ( + subscription.pattern && + !MessageFilter.matches(subscription.pattern, link) + ) { + filtered++; + this._stats.filtered++; + continue; + } + + // Deliver message + try { + await subscription.handler(message); + subscription.received++; + delivered++; + this._stats.delivered++; + } catch (error) { + console.error(`Error delivering to subscription ${subId}:`, error); + } + } + } + + return { delivered, filtered }; + } + + /** + * Publishes to multiple topics. + * + * @param {string[]} topics - Topic names + * @param {import('../index.d.ts').Link} link - The link to publish + * @param {Object} [headers={}] - Optional message headers + * @returns {Promise>} + */ + async publishMany(topics, link, headers = {}) { + const results = new Map(); + + for (const topic of topics) { + const result = await this.publish(topic, link, headers); + results.set(topic, result); + } + + return results; + } + + /** + * Gets recent messages for a topic (if retention enabled). + * + * @param {string} topic - Topic name + * @param {number} [limit=100] - Maximum messages to return + * @returns {PublishedMessage[]} + */ + getHistory(topic, limit = 100) { + const history = this._messageHistory.get(topic); + if (!history) { + return []; + } + return history.slice(-limit); + } + + /** + * Gets broker statistics. + * + * @returns {{topics: number, subscriptions: number, published: number, delivered: number, filtered: number}} + */ + getStats() { + return { + topics: this._topics.size, + subscriptions: this._subscriptions.size, + ...this._stats, + }; + } + + /** + * Clears all state. + */ + clear() { + this._topics.clear(); + this._subscriptions.clear(); + this._topicSubscriptions.clear(); + this._messageHistory.clear(); + this._stats = { + published: 0, + delivered: 0, + filtered: 0, + }; + } +} + +// ============================================================================= +// Observable Queue +// ============================================================================= + +/** + * A queue that supports pub/sub-style subscriptions. + * + * Wraps an existing queue and emits events when items are enqueued/dequeued. + * + * @example + * const baseQueue = new LinksQueue({ name: 'tasks', store }); + * const observableQueue = new ObservableQueue(baseQueue); + * + * // Subscribe to enqueue events + * observableQueue.onEnqueue(async (link) => { + * console.log('New item:', link); + * }); + * + * // Subscribe to dequeue events + * observableQueue.onDequeue(async (link) => { + * console.log('Processing:', link); + * }); + */ +export class ObservableQueue { + /** + * Creates a new ObservableQueue. + * + * @param {import('../queue/queue.js').LinksQueue} queue - The underlying queue + */ + constructor(queue) { + /** + * The underlying queue. + * @type {import('../queue/queue.js').LinksQueue} + * @private + */ + this._queue = queue; + + /** + * Enqueue handlers. + * @type {Set<(link: import('../index.d.ts').Link) => Promise>} + * @private + */ + this._enqueueHandlers = new Set(); + + /** + * Dequeue handlers. + * @type {Set<(link: import('../index.d.ts').Link) => Promise>} + * @private + */ + this._dequeueHandlers = new Set(); + + /** + * Acknowledge handlers. + * @type {Set<(id: import('../index.d.ts').LinkId) => Promise>} + * @private + */ + this._acknowledgeHandlers = new Set(); + + /** + * Reject handlers. + * @type {Set<(id: import('../index.d.ts').LinkId, requeue: boolean) => Promise>} + * @private + */ + this._rejectHandlers = new Set(); + } + + /** + * Gets the queue name. + * + * @returns {string} + */ + get name() { + return this._queue.name; + } + + /** + * Subscribes to enqueue events. + * + * @param {(link: import('../index.d.ts').Link) => Promise} handler - Event handler + * @returns {() => void} Unsubscribe function + */ + onEnqueue(handler) { + this._enqueueHandlers.add(handler); + return () => this._enqueueHandlers.delete(handler); + } + + /** + * Subscribes to dequeue events. + * + * @param {(link: import('../index.d.ts').Link) => Promise} handler - Event handler + * @returns {() => void} Unsubscribe function + */ + onDequeue(handler) { + this._dequeueHandlers.add(handler); + return () => this._dequeueHandlers.delete(handler); + } + + /** + * Subscribes to acknowledge events. + * + * @param {(id: import('../index.d.ts').LinkId) => Promise} handler - Event handler + * @returns {() => void} Unsubscribe function + */ + onAcknowledge(handler) { + this._acknowledgeHandlers.add(handler); + return () => this._acknowledgeHandlers.delete(handler); + } + + /** + * Subscribes to reject events. + * + * @param {(id: import('../index.d.ts').LinkId, requeue: boolean) => Promise} handler - Event handler + * @returns {() => void} Unsubscribe function + */ + onReject(handler) { + this._rejectHandlers.add(handler); + return () => this._rejectHandlers.delete(handler); + } + + /** + * Enqueues a link and notifies subscribers. + * + * @param {import('../index.d.ts').Link} link - The link to enqueue + * @returns {Promise} + */ + async enqueue(link) { + const result = await this._queue.enqueue(link); + + // Notify handlers + for (const handler of this._enqueueHandlers) { + try { + await handler(link); + } catch (error) { + console.error('Enqueue handler error:', error); + } + } + + return result; + } + + /** + * Dequeues and notifies subscribers. + * + * @returns {Promise} + */ + async dequeue() { + const link = await this._queue.dequeue(); + + if (link) { + // Notify handlers + for (const handler of this._dequeueHandlers) { + try { + await handler(link); + } catch (error) { + console.error('Dequeue handler error:', error); + } + } + } + + return link; + } + + /** + * Peeks at the next item. + * + * @returns {Promise} + */ + async peek() { + return this._queue.peek(); + } + + /** + * Acknowledges and notifies subscribers. + * + * @param {import('../index.d.ts').LinkId} id - Item ID + */ + async acknowledge(id) { + await this._queue.acknowledge(id); + + // Notify handlers + for (const handler of this._acknowledgeHandlers) { + try { + await handler(id); + } catch (error) { + console.error('Acknowledge handler error:', error); + } + } + } + + /** + * Rejects and notifies subscribers. + * + * @param {import('../index.d.ts').LinkId} id - Item ID + * @param {boolean} [requeue=false] - Whether to requeue + */ + async reject(id, requeue = false) { + await this._queue.reject(id, requeue); + + // Notify handlers + for (const handler of this._rejectHandlers) { + try { + await handler(id, requeue); + } catch (error) { + console.error('Reject handler error:', error); + } + } + } + + /** + * Gets queue statistics. + * + * @returns {import('../queue/types.ts').QueueStats} + */ + getStats() { + return this._queue.getStats(); + } + + /** + * Gets the queue depth. + * + * @returns {number} + */ + getDepth() { + return this._queue.getDepth(); + } + + /** + * Clears the queue and removes all handlers. + */ + async clear() { + this._enqueueHandlers.clear(); + this._dequeueHandlers.clear(); + this._acknowledgeHandlers.clear(); + this._rejectHandlers.clear(); + await this._queue.clear(); + } +} + +// ============================================================================= +// Queue-backed Pub/Sub +// ============================================================================= + +/** + * Pub/Sub implementation backed by queues for durability. + * + * Each subscription gets its own queue for message persistence. + * + * @example + * const pubsub = new QueueBackedPubSub(queueManager); + * + * // Create a subscription with a queue + * const sub = await pubsub.subscribe('events', 'events-consumer-1', async (msg) => { + * console.log('Received:', msg); + * }); + * + * // Publish (message is enqueued to all subscriber queues) + * await pubsub.publish('events', link); + * + * // Start processing (dequeues and processes messages) + * await pubsub.startConsumer(sub.id); + */ +export class QueueBackedPubSub { + /** + * Creates a new QueueBackedPubSub. + * + * @param {import('../queue/memory-queue.js').MemoryQueueManager} queueManager - Queue manager + */ + constructor(queueManager) { + /** + * Queue manager. + * @type {import('../queue/memory-queue.js').MemoryQueueManager} + * @private + */ + this._queueManager = queueManager; + + /** + * Topics with their subscriber queues. + * @type {Map>} + * @private + */ + this._topics = new Map(); + + /** + * Subscriptions by ID. + * @type {Map} + * @private + */ + this._subscriptions = new Map(); + + /** + * ID counter. + * @type {number} + * @private + */ + this._idCounter = 0; + } + + /** + * Creates a topic. + * + * @param {string} name - Topic name + * @returns {boolean} True if created, false if already exists + */ + createTopic(name) { + if (this._topics.has(name)) { + return false; + } + this._topics.set(name, new Set()); + return true; + } + + /** + * Deletes a topic. + * + * @param {string} name - Topic name + * @returns {Promise} True if deleted + */ + async deleteTopic(name) { + const subscribers = this._topics.get(name); + if (!subscribers) { + return false; + } + + // Remove all subscriptions and their queues + for (const queueName of subscribers) { + // Find subscription for this queue + for (const [subId, sub] of this._subscriptions) { + if (sub.queueName === queueName) { + await this.unsubscribe(subId); + break; + } + } + } + + this._topics.delete(name); + return true; + } + + /** + * Subscribes to a topic with a dedicated queue. + * + * @param {string} topic - Topic name + * @param {string} subscriberId - Unique subscriber identifier + * @param {(message: import('../index.d.ts').Link) => Promise} handler - Message handler + * @returns {Promise<{id: string, queueName: string}>} + */ + async subscribe(topic, subscriberId, handler) { + // Create topic if not exists + if (!this._topics.has(topic)) { + this._topics.set(topic, new Set()); + } + + // Create a dedicated queue for this subscriber + const queueName = `${topic}-${subscriberId}`; + await this._queueManager.createQueue(queueName); + + // Register the queue with the topic + this._topics.get(topic).add(queueName); + + // Create subscription record + const subId = `sub_${++this._idCounter}`; + const subscription = { + id: subId, + topic, + queueName, + handler, + active: false, + consumerHandle: null, + }; + + this._subscriptions.set(subId, subscription); + + return { id: subId, queueName }; + } + + /** + * Unsubscribes and removes the dedicated queue. + * + * @param {string} subscriptionId - Subscription ID + * @returns {Promise} + */ + async unsubscribe(subscriptionId) { + const subscription = this._subscriptions.get(subscriptionId); + if (!subscription) { + return false; + } + + // Stop consumer + this.stopConsumer(subscriptionId); + + // Remove queue from topic + const topicQueues = this._topics.get(subscription.topic); + if (topicQueues) { + topicQueues.delete(subscription.queueName); + } + + // Delete the queue + await this._queueManager.deleteQueue(subscription.queueName); + + this._subscriptions.delete(subscriptionId); + return true; + } + + /** + * Publishes to a topic (enqueues to all subscriber queues). + * + * @param {string} topic - Topic name + * @param {import('../index.d.ts').Link} link - The link to publish + * @returns {Promise} Number of queues that received the message + */ + async publish(topic, link) { + const subscribers = this._topics.get(topic); + if (!subscribers || subscribers.size === 0) { + return 0; + } + + let count = 0; + for (const queueName of subscribers) { + const queue = await this._queueManager.getQueue(queueName); + if (queue) { + await queue.enqueue(link); + count++; + } + } + + return count; + } + + /** + * Starts consuming messages for a subscription. + * + * @param {string} subscriptionId - Subscription ID + * @param {number} [pollInterval=100] - Poll interval in ms + */ + async startConsumer(subscriptionId, pollInterval = 100) { + const subscription = this._subscriptions.get(subscriptionId); + if (!subscription || subscription.active) { + return; + } + + subscription.active = true; + + const consume = async () => { + if (!subscription.active) { + return; + } + + const queue = await this._queueManager.getQueue(subscription.queueName); + if (queue) { + const link = await queue.dequeue(); + if (link) { + try { + await subscription.handler(link); + await queue.acknowledge(link.id); + } catch (error) { + console.error('Consumer error:', error); + await queue.reject(link.id, true); + } + } + } + + subscription.consumerHandle = globalThis.setTimeout( + consume, + pollInterval + ); + }; + + consume(); + } + + /** + * Stops consuming messages for a subscription. + * + * @param {string} subscriptionId - Subscription ID + */ + stopConsumer(subscriptionId) { + const subscription = this._subscriptions.get(subscriptionId); + if (!subscription) { + return; + } + + subscription.active = false; + if (subscription.consumerHandle) { + globalThis.clearTimeout(subscription.consumerHandle); + subscription.consumerHandle = null; + } + } + + /** + * Lists all topics. + * + * @returns {Array<{name: string, subscribers: number}>} + */ + listTopics() { + return [...this._topics.entries()].map(([name, subs]) => ({ + name, + subscribers: subs.size, + })); + } + + /** + * Lists subscriptions for a topic. + * + * @param {string} [topic] - Optional topic filter + * @returns {Array<{id: string, topic: string, queueName: string, active: boolean}>} + */ + listSubscriptions(topic) { + let subs = [...this._subscriptions.values()]; + if (topic) { + subs = subs.filter((s) => s.topic === topic); + } + return subs.map((s) => ({ + id: s.id, + topic: s.topic, + queueName: s.queueName, + active: s.active, + })); + } + + /** + * Clears all state. + */ + async clear() { + // Stop all consumers + for (const subId of this._subscriptions.keys()) { + this.stopConsumer(subId); + } + + // Delete all subscription queues + for (const subscription of this._subscriptions.values()) { + await this._queueManager.deleteQueue(subscription.queueName); + } + + this._topics.clear(); + this._subscriptions.clear(); + } +} diff --git a/js/src/features/rate-limiter.js b/js/src/features/rate-limiter.js new file mode 100644 index 0000000..7dc1d68 --- /dev/null +++ b/js/src/features/rate-limiter.js @@ -0,0 +1,813 @@ +/** + * Rate limiting module for links-queue. + * + * This module provides rate limiting features for queue operations: + * - Per-queue rate limits + * - Per-consumer rate limits + * - Sliding window algorithm + * - Backpressure signaling + * + * @module features/rate-limiter + * + * @see REQUIREMENTS.md - Feature Parity with Competitors + * @see ROADMAP.md - Phase 7: Advanced Features + */ + +// ============================================================================= +// Rate Limit Configuration +// ============================================================================= + +/** + * Configuration for rate limiting. + * + * @typedef {Object} RateLimitConfig + * @property {number} max - Maximum number of operations allowed + * @property {number} window - Time window in milliseconds + * @property {string} [key] - Optional key for per-key rate limiting + */ + +// ============================================================================= +// Rate Limit Result +// ============================================================================= + +/** + * Result of a rate limit check. + * + * @typedef {Object} RateLimitResult + * @property {boolean} allowed - Whether the operation is allowed + * @property {number} remaining - Number of operations remaining in the window + * @property {number} resetAt - Timestamp when the window resets + * @property {number} retryAfter - Milliseconds to wait before retrying (0 if allowed) + */ + +// ============================================================================= +// Sliding Window Counter +// ============================================================================= + +/** + * Implements a sliding window rate limiter. + * + * Uses a sliding window algorithm that provides smoother rate limiting + * compared to fixed windows by interpolating between the previous and + * current window counts. + * + * @example + * const limiter = new SlidingWindowCounter({ + * max: 100, + * window: 60000 // 1 minute + * }); + * + * const result = limiter.check(); + * if (result.allowed) { + * limiter.increment(); + * // Perform operation + * } else { + * // Wait retryAfter milliseconds + * } + */ +export class SlidingWindowCounter { + /** + * Creates a new SlidingWindowCounter. + * + * @param {RateLimitConfig} config - Rate limit configuration + */ + constructor(config) { + /** + * Maximum allowed operations per window. + * @type {number} + * @private + */ + this._max = config.max; + + /** + * Window size in milliseconds. + * @type {number} + * @private + */ + this._windowMs = config.window; + + /** + * Count in the previous window. + * @type {number} + * @private + */ + this._previousCount = 0; + + /** + * Count in the current window. + * @type {number} + * @private + */ + this._currentCount = 0; + + /** + * Start time of the current window. + * @type {number} + * @private + */ + this._windowStart = Date.now(); + } + + /** + * Updates the window if necessary. + * + * @private + */ + _updateWindow() { + const now = Date.now(); + const elapsed = now - this._windowStart; + + if (elapsed >= this._windowMs) { + // Window has fully elapsed + const windowsElapsed = Math.floor(elapsed / this._windowMs); + + if (windowsElapsed >= 2) { + // More than one full window has passed + this._previousCount = 0; + this._currentCount = 0; + } else { + // Exactly one window has passed + this._previousCount = this._currentCount; + this._currentCount = 0; + } + + this._windowStart = now - (elapsed % this._windowMs); + } + } + + /** + * Calculates the weighted count using sliding window interpolation. + * + * @returns {number} The effective count + * @private + */ + _getWeightedCount() { + this._updateWindow(); + + const now = Date.now(); + const elapsed = now - this._windowStart; + const weight = elapsed / this._windowMs; + + // Sliding window: count = current + previous * (1 - weight) + return this._currentCount + this._previousCount * (1 - weight); + } + + /** + * Checks if an operation is allowed. + * + * @returns {RateLimitResult} The rate limit check result + */ + check() { + const count = this._getWeightedCount(); + const allowed = count < this._max; + const remaining = Math.max(0, Math.floor(this._max - count)); + const resetAt = this._windowStart + this._windowMs; + + let retryAfter = 0; + if (!allowed) { + // Calculate when we'll be under the limit + // We need to wait until enough of the previous window has "expired" + const excess = count - this._max + 1; + const timeToExpire = (excess / this._previousCount) * this._windowMs; + retryAfter = Math.max(0, Math.ceil(timeToExpire)); + } + + return { allowed, remaining, resetAt, retryAfter }; + } + + /** + * Increments the counter (use after operation is performed). + * + * @returns {RateLimitResult} The updated rate limit state + */ + increment() { + this._updateWindow(); + this._currentCount++; + return this.check(); + } + + /** + * Checks and increments in one operation. + * + * @returns {RateLimitResult} The rate limit result + */ + consume() { + const result = this.check(); + if (result.allowed) { + this._currentCount++; + result.remaining = Math.max(0, result.remaining - 1); + } + return result; + } + + /** + * Resets the counter. + */ + reset() { + this._previousCount = 0; + this._currentCount = 0; + this._windowStart = Date.now(); + } + + /** + * Gets current statistics. + * + * @returns {{currentCount: number, previousCount: number, weightedCount: number, max: number, windowMs: number}} + */ + getStats() { + return { + currentCount: this._currentCount, + previousCount: this._previousCount, + weightedCount: this._getWeightedCount(), + max: this._max, + windowMs: this._windowMs, + }; + } +} + +// ============================================================================= +// Token Bucket Rate Limiter +// ============================================================================= + +/** + * Implements a token bucket rate limiter. + * + * Tokens are added at a constant rate. Each operation consumes one token. + * This provides burst capacity while maintaining an average rate limit. + * + * @example + * const limiter = new TokenBucket({ + * max: 100, // Bucket capacity + * window: 60000 // Refill time for all tokens + * }); + * + * if (limiter.consume().allowed) { + * // Perform operation + * } + */ +export class TokenBucket { + /** + * Creates a new TokenBucket. + * + * @param {RateLimitConfig} config - Rate limit configuration + */ + constructor(config) { + /** + * Maximum tokens (bucket capacity). + * @type {number} + * @private + */ + this._max = config.max; + + /** + * Current number of tokens. + * @type {number} + * @private + */ + this._tokens = config.max; + + /** + * Tokens added per millisecond. + * @type {number} + * @private + */ + this._refillRate = config.max / config.window; + + /** + * Last refill timestamp. + * @type {number} + * @private + */ + this._lastRefill = Date.now(); + + /** + * Window size in milliseconds. + * @type {number} + * @private + */ + this._windowMs = config.window; + } + + /** + * Refills tokens based on elapsed time. + * + * @private + */ + _refill() { + const now = Date.now(); + const elapsed = now - this._lastRefill; + const tokensToAdd = elapsed * this._refillRate; + + this._tokens = Math.min(this._max, this._tokens + tokensToAdd); + this._lastRefill = now; + } + + /** + * Checks if an operation is allowed. + * + * @returns {RateLimitResult} The rate limit check result + */ + check() { + this._refill(); + + const allowed = this._tokens >= 1; + const remaining = Math.floor(this._tokens); + const resetAt = this._lastRefill + this._windowMs; + + let retryAfter = 0; + if (!allowed) { + // Calculate time to get at least 1 token + retryAfter = Math.ceil((1 - this._tokens) / this._refillRate); + } + + return { allowed, remaining, resetAt, retryAfter }; + } + + /** + * Consumes a token if available. + * + * @returns {RateLimitResult} The rate limit result + */ + consume() { + this._refill(); + + if (this._tokens >= 1) { + this._tokens--; + return { + allowed: true, + remaining: Math.floor(this._tokens), + resetAt: this._lastRefill + this._windowMs, + retryAfter: 0, + }; + } + + return this.check(); + } + + /** + * Resets the bucket to full. + */ + reset() { + this._tokens = this._max; + this._lastRefill = Date.now(); + } + + /** + * Gets current statistics. + * + * @returns {{tokens: number, max: number, refillRate: number}} + */ + getStats() { + this._refill(); + return { + tokens: this._tokens, + max: this._max, + refillRate: this._refillRate, + }; + } +} + +// ============================================================================= +// Rate Limiter (Multi-key) +// ============================================================================= + +/** + * A rate limiter that supports multiple keys. + * + * Useful for per-queue, per-consumer, or per-client rate limiting. + * + * @example + * const limiter = new RateLimiter({ + * max: 100, + * window: 60000, + * algorithm: 'sliding-window' + * }); + * + * // Per-queue rate limiting + * const result = limiter.consume('queue:tasks'); + * + * // Per-consumer rate limiting + * const result2 = limiter.consume('consumer:user-123'); + */ +export class RateLimiter { + /** + * Creates a new RateLimiter. + * + * @param {Object} config - Configuration + * @param {number} config.max - Maximum operations per window + * @param {number} config.window - Window size in milliseconds + * @param {'sliding-window' | 'token-bucket'} [config.algorithm='sliding-window'] - Algorithm to use + * @param {number} [config.cleanupInterval=60000] - Interval to clean up old entries + */ + constructor(config) { + /** + * Maximum operations per window. + * @type {number} + * @private + */ + this._max = config.max; + + /** + * Window size in milliseconds. + * @type {number} + * @private + */ + this._windowMs = config.window; + + /** + * Algorithm to use. + * @type {'sliding-window' | 'token-bucket'} + * @private + */ + this._algorithm = config.algorithm ?? 'sliding-window'; + + /** + * Per-key limiters. + * @type {Map} + * @private + */ + this._limiters = new Map(); + + /** + * Cleanup interval handle. + * @type {NodeJS.Timeout|null} + * @private + */ + this._cleanupHandle = null; + + /** + * Cleanup interval in milliseconds. + * @type {number} + * @private + */ + this._cleanupInterval = config.cleanupInterval ?? 60000; + + /** + * Last access time for each key. + * @type {Map} + * @private + */ + this._lastAccess = new Map(); + + // Start cleanup + this._startCleanup(); + } + + /** + * Gets or creates a limiter for the given key. + * + * @param {string} key - The rate limit key + * @returns {SlidingWindowCounter | TokenBucket} + * @private + */ + _getLimiter(key) { + let limiter = this._limiters.get(key); + if (!limiter) { + const config = { max: this._max, window: this._windowMs }; + limiter = + this._algorithm === 'token-bucket' + ? new TokenBucket(config) + : new SlidingWindowCounter(config); + this._limiters.set(key, limiter); + } + this._lastAccess.set(key, Date.now()); + return limiter; + } + + /** + * Checks if an operation is allowed for the given key. + * + * @param {string} [key='default'] - The rate limit key + * @returns {RateLimitResult} The rate limit check result + */ + check(key = 'default') { + return this._getLimiter(key).check(); + } + + /** + * Consumes one permit for the given key. + * + * @param {string} [key='default'] - The rate limit key + * @returns {RateLimitResult} The rate limit result + */ + consume(key = 'default') { + return this._getLimiter(key).consume(); + } + + /** + * Resets the limiter for the given key. + * + * @param {string} [key='default'] - The rate limit key + */ + reset(key = 'default') { + const limiter = this._limiters.get(key); + if (limiter) { + limiter.reset(); + } + } + + /** + * Resets all limiters. + */ + resetAll() { + for (const limiter of this._limiters.values()) { + limiter.reset(); + } + } + + /** + * Removes a limiter for the given key. + * + * @param {string} key - The rate limit key + * @returns {boolean} True if the limiter was removed + */ + remove(key) { + this._lastAccess.delete(key); + return this._limiters.delete(key); + } + + /** + * Gets statistics for a key. + * + * @param {string} [key='default'] - The rate limit key + * @returns {Object} Statistics + */ + getStats(key = 'default') { + const limiter = this._limiters.get(key); + if (limiter) { + return limiter.getStats(); + } + return null; + } + + /** + * Gets the number of tracked keys. + * + * @returns {number} + */ + get keyCount() { + return this._limiters.size; + } + + /** + * Lists all tracked keys. + * + * @returns {string[]} + */ + listKeys() { + return [...this._limiters.keys()]; + } + + /** + * Starts the cleanup process. + * + * @private + */ + _startCleanup() { + this._cleanupHandle = globalThis.setInterval(() => { + const now = Date.now(); + const expiry = this._windowMs * 2; // Keep for 2 windows after last access + + for (const [key, lastAccess] of this._lastAccess) { + if (now - lastAccess > expiry) { + this._limiters.delete(key); + this._lastAccess.delete(key); + } + } + }, this._cleanupInterval); + } + + /** + * Stops the rate limiter and cleans up. + */ + stop() { + if (this._cleanupHandle) { + globalThis.clearInterval(this._cleanupHandle); + this._cleanupHandle = null; + } + } + + /** + * Clears all state. + */ + clear() { + this.stop(); + this._limiters.clear(); + this._lastAccess.clear(); + } +} + +// ============================================================================= +// Rate Limited Queue +// ============================================================================= + +/** + * A queue wrapper that adds rate limiting. + * + * @example + * const baseQueue = new LinksQueue({ name: 'api-calls', store }); + * const rateLimitedQueue = new RateLimitedQueue(baseQueue, { + * max: 100, + * window: 60000 + * }); + * + * try { + * await rateLimitedQueue.enqueue(link); + * } catch (error) { + * if (error.code === 'RATE_LIMITED') { + * console.log(`Rate limited. Retry after ${error.retryAfter}ms`); + * } + * } + */ +export class RateLimitedQueue { + /** + * Creates a new RateLimitedQueue. + * + * @param {import('../queue/queue.js').LinksQueue} queue - The underlying queue + * @param {RateLimitConfig} config - Rate limit configuration + */ + constructor(queue, config) { + /** + * The underlying queue. + * @type {import('../queue/queue.js').LinksQueue} + * @private + */ + this._queue = queue; + + /** + * Rate limiter for enqueue operations. + * @type {SlidingWindowCounter} + * @private + */ + this._enqueueLimiter = new SlidingWindowCounter(config); + + /** + * Rate limiter for dequeue operations. + * @type {SlidingWindowCounter} + * @private + */ + this._dequeueLimiter = new SlidingWindowCounter(config); + + /** + * Per-consumer rate limiters. + * @type {RateLimiter} + * @private + */ + this._consumerLimiter = new RateLimiter(config); + } + + /** + * Gets the queue name. + * + * @returns {string} + */ + get name() { + return this._queue.name; + } + + /** + * Enqueues a link with rate limiting. + * + * @param {import('../index.d.ts').Link} link - The link to enqueue + * @returns {Promise} + * @throws {RateLimitError} If rate limited + */ + async enqueue(link) { + const result = this._enqueueLimiter.consume(); + if (!result.allowed) { + throw new RateLimitError( + 'Enqueue rate limit exceeded', + result.retryAfter, + result + ); + } + return this._queue.enqueue(link); + } + + /** + * Dequeues with rate limiting. + * + * @param {string} [consumerId] - Optional consumer ID for per-consumer limits + * @returns {Promise} + * @throws {RateLimitError} If rate limited + */ + async dequeue(consumerId) { + // Check global rate limit + let result = this._dequeueLimiter.consume(); + if (!result.allowed) { + throw new RateLimitError( + 'Dequeue rate limit exceeded', + result.retryAfter, + result + ); + } + + // Check per-consumer rate limit if consumerId provided + if (consumerId) { + result = this._consumerLimiter.consume(`consumer:${consumerId}`); + if (!result.allowed) { + throw new RateLimitError( + `Consumer ${consumerId} rate limit exceeded`, + result.retryAfter, + result + ); + } + } + + return this._queue.dequeue(); + } + + /** + * Peeks at the next item (not rate limited). + * + * @returns {Promise} + */ + async peek() { + return this._queue.peek(); + } + + /** + * Acknowledges an item (not rate limited). + * + * @param {import('../index.d.ts').LinkId} id - Item ID + */ + async acknowledge(id) { + return this._queue.acknowledge(id); + } + + /** + * Rejects an item (not rate limited). + * + * @param {import('../index.d.ts').LinkId} id - Item ID + * @param {boolean} [requeue=false] - Whether to requeue + */ + async reject(id, requeue = false) { + return this._queue.reject(id, requeue); + } + + /** + * Gets queue statistics including rate limit info. + * + * @returns {Object} + */ + getStats() { + return { + ...this._queue.getStats(), + rateLimit: { + enqueue: this._enqueueLimiter.getStats(), + dequeue: this._dequeueLimiter.getStats(), + }, + }; + } + + /** + * Gets the queue depth. + * + * @returns {number} + */ + getDepth() { + return this._queue.getDepth(); + } + + /** + * Clears the queue and resets rate limiters. + */ + async clear() { + this._enqueueLimiter.reset(); + this._dequeueLimiter.reset(); + this._consumerLimiter.resetAll(); + await this._queue.clear(); + } + + /** + * Stops the rate limiter. + */ + stop() { + this._consumerLimiter.stop(); + } +} + +// ============================================================================= +// Rate Limit Error +// ============================================================================= + +/** + * Error thrown when rate limit is exceeded. + */ +export class RateLimitError extends Error { + /** + * Creates a new RateLimitError. + * + * @param {string} message - Error message + * @param {number} retryAfter - Milliseconds to wait before retrying + * @param {RateLimitResult} result - The rate limit result + */ + constructor(message, retryAfter, result) { + super(message); + this.name = 'RateLimitError'; + this.code = 'RATE_LIMITED'; + this.retryAfter = retryAfter; + this.remaining = result.remaining; + this.resetAt = result.resetAt; + } +} diff --git a/js/src/features/router.js b/js/src/features/router.js new file mode 100644 index 0000000..98afbfb --- /dev/null +++ b/js/src/features/router.js @@ -0,0 +1,1128 @@ +/** + * Router module for links-queue. + * + * This module provides topic-based routing features: + * - Topic-based routing with wildcards + * - Exchange types (direct, topic, fanout) + * - Pattern matching on link content + * - Binding management + * + * @module features/router + * + * @see REQUIREMENTS.md - Feature Parity with Competitors + * @see ROADMAP.md - Phase 7: Advanced Features + */ + +// ============================================================================= +// Exchange Types +// ============================================================================= + +/** + * Exchange types for message routing. + * @readonly + * @enum {string} + */ +export const ExchangeType = Object.freeze({ + /** Direct exchange: routes to queues with exact routing key match */ + DIRECT: 'direct', + /** Topic exchange: routes using wildcard patterns */ + TOPIC: 'topic', + /** Fanout exchange: routes to all bound queues */ + FANOUT: 'fanout', + /** Headers exchange: routes based on message headers/attributes */ + HEADERS: 'headers', +}); + +// ============================================================================= +// Binding +// ============================================================================= + +/** + * Represents a binding between an exchange and a queue. + * + * @typedef {Object} Binding + * @property {string} id - Unique binding identifier + * @property {string} exchange - Exchange name + * @property {string} queue - Queue name + * @property {string} routingKey - Routing key or pattern + * @property {Object} [arguments] - Optional binding arguments + */ + +// ============================================================================= +// Topic Pattern Matcher +// ============================================================================= + +/** + * Matches routing keys against topic patterns. + * + * Supports AMQP-style wildcards: + * - `*` matches exactly one word + * - `#` matches zero or more words + * + * Words are separated by dots (.). + * + * @example + * const matcher = new TopicMatcher(); + * + * matcher.matches('logs.*', 'logs.error'); // true + * matcher.matches('logs.*', 'logs.error.db'); // false + * matcher.matches('logs.#', 'logs.error.db'); // true + * matcher.matches('*.error', 'logs.error'); // true + * matcher.matches('#.error', 'a.b.c.error'); // true + */ +export class TopicMatcher { + /** + * Checks if a routing key matches a pattern. + * + * @param {string} pattern - The pattern (may contain * and #) + * @param {string} routingKey - The routing key to match + * @returns {boolean} True if the key matches the pattern + */ + static matches(pattern, routingKey) { + // Convert pattern to regex + const regex = this._patternToRegex(pattern); + return regex.test(routingKey); + } + + /** + * Converts a topic pattern to a regular expression. + * Implements AMQP-style topic wildcards: + * - * matches exactly one word + * - # matches zero or more words + * + * @param {string} pattern - The topic pattern + * @returns {RegExp} The compiled regex + * @private + */ + static _patternToRegex(pattern) { + // Handle special case: just "#" matches everything + if (pattern === '#') { + return /^.+$/; + } + + // Split pattern into parts and build regex + const parts = pattern.split('.'); + const regexParts = []; + + for (let i = 0; i < parts.length; i++) { + const part = parts[i]; + + if (part === '#') { + if (i === 0) { + // # at start: matches zero or more words followed by a dot (or nothing) + regexParts.push('([^.]+\\.)*'); + } else if (i === parts.length - 1) { + // # at end: matches zero or more .word sequences + regexParts.push('(\\.[^.]+)*'); + } else { + // # in middle: matches zero or more .word. sequences + regexParts.push('(\\.[^.]+)*\\.'); + } + } else if (part === '*') { + regexParts.push('[^.]+'); + if (i < parts.length - 1) { + regexParts.push('\\.'); + } + } else { + // Escape special regex chars in literal parts + const escaped = part.replace(/[.+?^${}()|[\]\\]/g, '\\$&'); + regexParts.push(escaped); + if (i < parts.length - 1 && parts[i + 1] !== '#') { + regexParts.push('\\.'); + } + } + } + + return new RegExp(`^${regexParts.join('')}$`); + } + + /** + * Scores a pattern for specificity (more specific = higher score). + * + * @param {string} pattern - The pattern to score + * @returns {number} Specificity score + */ + static specificity(pattern) { + let score = 0; + const parts = pattern.split('.'); + + for (const part of parts) { + if (part === '#') { + score += 1; // Lowest specificity + } else if (part === '*') { + score += 10; // Medium specificity + } else { + score += 100; // Highest specificity (exact match) + } + } + + return score; + } +} + +// ============================================================================= +// Direct Exchange +// ============================================================================= + +/** + * Direct exchange: routes messages to queues with exact routing key match. + * + * @example + * const exchange = new DirectExchange('logs'); + * + * exchange.bind('errors-queue', 'error'); + * exchange.bind('info-queue', 'info'); + * + * exchange.route('error'); // ['errors-queue'] + * exchange.route('info'); // ['info-queue'] + * exchange.route('debug'); // [] + */ +export class DirectExchange { + /** + * Creates a new DirectExchange. + * + * @param {string} name - Exchange name + */ + constructor(name) { + /** + * Exchange name. + * @type {string} + */ + this.name = name; + + /** + * Exchange type. + * @type {string} + */ + this.type = ExchangeType.DIRECT; + + /** + * Bindings by routing key. + * @type {Map>} + * @private + */ + this._bindings = new Map(); + + /** + * All bindings. + * @type {Map} + * @private + */ + this._bindingDetails = new Map(); + + /** + * ID counter. + * @type {number} + * @private + */ + this._idCounter = 0; + } + + /** + * Binds a queue to this exchange with a routing key. + * + * @param {string} queue - Queue name + * @param {string} routingKey - Routing key + * @returns {Binding} The created binding + */ + bind(queue, routingKey) { + if (!this._bindings.has(routingKey)) { + this._bindings.set(routingKey, new Set()); + } + this._bindings.get(routingKey).add(queue); + + const binding = { + id: `${this.name}-${++this._idCounter}`, + exchange: this.name, + queue, + routingKey, + }; + this._bindingDetails.set(binding.id, binding); + + return binding; + } + + /** + * Unbinds a queue from this exchange. + * + * @param {string} queue - Queue name + * @param {string} routingKey - Routing key + * @returns {boolean} True if the binding was removed + */ + unbind(queue, routingKey) { + const queues = this._bindings.get(routingKey); + if (queues) { + const removed = queues.delete(queue); + if (queues.size === 0) { + this._bindings.delete(routingKey); + } + + // Remove from details + for (const [id, binding] of this._bindingDetails) { + if (binding.queue === queue && binding.routingKey === routingKey) { + this._bindingDetails.delete(id); + break; + } + } + + return removed; + } + return false; + } + + /** + * Routes a message to matching queues. + * + * @param {string} routingKey - The routing key + * @returns {string[]} Array of queue names + */ + route(routingKey) { + const queues = this._bindings.get(routingKey); + return queues ? [...queues] : []; + } + + /** + * Gets all bindings. + * + * @returns {Binding[]} + */ + getBindings() { + return [...this._bindingDetails.values()]; + } + + /** + * Clears all bindings. + */ + clear() { + this._bindings.clear(); + this._bindingDetails.clear(); + } +} + +// ============================================================================= +// Topic Exchange +// ============================================================================= + +/** + * Topic exchange: routes messages using wildcard patterns. + * + * @example + * const exchange = new TopicExchange('events'); + * + * exchange.bind('all-logs', 'logs.#'); + * exchange.bind('errors-only', 'logs.error'); + * exchange.bind('system', '*.system.*'); + * + * exchange.route('logs.error'); // ['all-logs', 'errors-only'] + * exchange.route('logs.info'); // ['all-logs'] + * exchange.route('app.system.startup'); // ['system'] + */ +export class TopicExchange { + /** + * Creates a new TopicExchange. + * + * @param {string} name - Exchange name + */ + constructor(name) { + /** + * Exchange name. + * @type {string} + */ + this.name = name; + + /** + * Exchange type. + * @type {string} + */ + this.type = ExchangeType.TOPIC; + + /** + * Bindings with patterns. + * @type {Array<{pattern: string, queue: string, binding: Binding}>} + * @private + */ + this._bindings = []; + + /** + * ID counter. + * @type {number} + * @private + */ + this._idCounter = 0; + } + + /** + * Binds a queue to this exchange with a pattern. + * + * @param {string} queue - Queue name + * @param {string} pattern - Topic pattern (may contain * and #) + * @returns {Binding} The created binding + */ + bind(queue, pattern) { + const binding = { + id: `${this.name}-${++this._idCounter}`, + exchange: this.name, + queue, + routingKey: pattern, + }; + + this._bindings.push({ pattern, queue, binding }); + + // Sort by specificity (most specific first) + this._bindings.sort( + (a, b) => + TopicMatcher.specificity(b.pattern) - + TopicMatcher.specificity(a.pattern) + ); + + return binding; + } + + /** + * Unbinds a queue from this exchange. + * + * @param {string} queue - Queue name + * @param {string} pattern - Topic pattern + * @returns {boolean} True if the binding was removed + */ + unbind(queue, pattern) { + const idx = this._bindings.findIndex( + (b) => b.queue === queue && b.pattern === pattern + ); + if (idx !== -1) { + this._bindings.splice(idx, 1); + return true; + } + return false; + } + + /** + * Routes a message to matching queues. + * + * @param {string} routingKey - The routing key + * @returns {string[]} Array of queue names (deduplicated) + */ + route(routingKey) { + const queues = new Set(); + + for (const { pattern, queue } of this._bindings) { + if (TopicMatcher.matches(pattern, routingKey)) { + queues.add(queue); + } + } + + return [...queues]; + } + + /** + * Gets all bindings. + * + * @returns {Binding[]} + */ + getBindings() { + return this._bindings.map((b) => b.binding); + } + + /** + * Clears all bindings. + */ + clear() { + this._bindings = []; + } +} + +// ============================================================================= +// Fanout Exchange +// ============================================================================= + +/** + * Fanout exchange: routes messages to all bound queues. + * + * @example + * const exchange = new FanoutExchange('notifications'); + * + * exchange.bind('email-queue'); + * exchange.bind('sms-queue'); + * exchange.bind('push-queue'); + * + * exchange.route(); // ['email-queue', 'sms-queue', 'push-queue'] + */ +export class FanoutExchange { + /** + * Creates a new FanoutExchange. + * + * @param {string} name - Exchange name + */ + constructor(name) { + /** + * Exchange name. + * @type {string} + */ + this.name = name; + + /** + * Exchange type. + * @type {string} + */ + this.type = ExchangeType.FANOUT; + + /** + * Bound queues. + * @type {Set} + * @private + */ + this._queues = new Set(); + + /** + * Bindings. + * @type {Map} + * @private + */ + this._bindings = new Map(); + + /** + * ID counter. + * @type {number} + * @private + */ + this._idCounter = 0; + } + + /** + * Binds a queue to this exchange. + * + * @param {string} queue - Queue name + * @returns {Binding} The created binding + */ + bind(queue) { + this._queues.add(queue); + + const binding = { + id: `${this.name}-${++this._idCounter}`, + exchange: this.name, + queue, + routingKey: '', + }; + this._bindings.set(queue, binding); + + return binding; + } + + /** + * Unbinds a queue from this exchange. + * + * @param {string} queue - Queue name + * @returns {boolean} True if the binding was removed + */ + unbind(queue) { + this._bindings.delete(queue); + return this._queues.delete(queue); + } + + /** + * Routes a message to all bound queues. + * + * @returns {string[]} Array of all bound queue names + */ + route() { + return [...this._queues]; + } + + /** + * Gets all bindings. + * + * @returns {Binding[]} + */ + getBindings() { + return [...this._bindings.values()]; + } + + /** + * Clears all bindings. + */ + clear() { + this._queues.clear(); + this._bindings.clear(); + } +} + +// ============================================================================= +// Headers Exchange +// ============================================================================= + +/** + * Headers exchange: routes messages based on header values. + * + * @example + * const exchange = new HeadersExchange('tasks'); + * + * exchange.bind('high-priority', { priority: 'high' }, 'all'); + * exchange.bind('urgent-system', { priority: 'high', type: 'system' }, 'all'); + * + * exchange.route({ priority: 'high' }); // ['high-priority'] + * exchange.route({ priority: 'high', type: 'system' }); // ['high-priority', 'urgent-system'] + */ +export class HeadersExchange { + /** + * Creates a new HeadersExchange. + * + * @param {string} name - Exchange name + */ + constructor(name) { + /** + * Exchange name. + * @type {string} + */ + this.name = name; + + /** + * Exchange type. + * @type {string} + */ + this.type = ExchangeType.HEADERS; + + /** + * Bindings with header matchers. + * @type {Array<{headers: Object, matchType: 'all' | 'any', queue: string, binding: Binding}>} + * @private + */ + this._bindings = []; + + /** + * ID counter. + * @type {number} + * @private + */ + this._idCounter = 0; + } + + /** + * Binds a queue to this exchange with header matching. + * + * @param {string} queue - Queue name + * @param {Object} headers - Headers to match + * @param {'all' | 'any'} [matchType='all'] - 'all' requires all headers to match, 'any' requires at least one + * @returns {Binding} The created binding + */ + bind(queue, headers, matchType = 'all') { + const binding = { + id: `${this.name}-${++this._idCounter}`, + exchange: this.name, + queue, + routingKey: '', + arguments: { headers, matchType }, + }; + + this._bindings.push({ headers, matchType, queue, binding }); + + return binding; + } + + /** + * Unbinds a queue from this exchange. + * + * @param {string} queue - Queue name + * @param {Object} headers - Headers that were used in binding + * @returns {boolean} True if the binding was removed + */ + unbind(queue, headers) { + const headerKeys = Object.keys(headers).sort().join(','); + const idx = this._bindings.findIndex((b) => { + const bKeys = Object.keys(b.headers).sort().join(','); + return b.queue === queue && bKeys === headerKeys; + }); + + if (idx !== -1) { + this._bindings.splice(idx, 1); + return true; + } + return false; + } + + /** + * Routes a message based on its headers. + * + * @param {Object} messageHeaders - The message headers + * @returns {string[]} Array of queue names + */ + route(messageHeaders) { + const queues = new Set(); + + for (const { headers, matchType, queue } of this._bindings) { + let matches = false; + + if (matchType === 'all') { + // All specified headers must match + matches = Object.entries(headers).every( + ([key, value]) => messageHeaders[key] === value + ); + } else { + // Any specified header must match + matches = Object.entries(headers).some( + ([key, value]) => messageHeaders[key] === value + ); + } + + if (matches) { + queues.add(queue); + } + } + + return [...queues]; + } + + /** + * Gets all bindings. + * + * @returns {Binding[]} + */ + getBindings() { + return this._bindings.map((b) => b.binding); + } + + /** + * Clears all bindings. + */ + clear() { + this._bindings = []; + } +} + +// ============================================================================= +// Router +// ============================================================================= + +/** + * Central router for managing exchanges and routing messages. + * + * @example + * const router = new Router(); + * + * // Create exchanges + * router.declareExchange('logs', 'topic'); + * router.declareExchange('notifications', 'fanout'); + * + * // Bind queues + * router.bind('logs', 'errors-queue', 'logs.error'); + * router.bind('logs', 'all-logs', 'logs.#'); + * router.bind('notifications', 'email-queue'); + * + * // Route messages + * const queues1 = router.route('logs', 'logs.error'); // ['errors-queue', 'all-logs'] + * const queues2 = router.route('notifications'); // ['email-queue'] + */ +export class Router { + /** + * Creates a new Router. + */ + constructor() { + /** + * Exchanges by name. + * @type {Map} + * @private + */ + this._exchanges = new Map(); + + /** + * Queue registry (for validation). + * @type {Set} + * @private + */ + this._queues = new Set(); + } + + /** + * Declares an exchange. + * + * @param {string} name - Exchange name + * @param {'direct' | 'topic' | 'fanout' | 'headers'} type - Exchange type + * @returns {DirectExchange | TopicExchange | FanoutExchange | HeadersExchange} The exchange + */ + declareExchange(name, type) { + if (this._exchanges.has(name)) { + const existing = this._exchanges.get(name); + if (existing.type !== type) { + throw new Error( + `Exchange '${name}' already exists with different type` + ); + } + return existing; + } + + let exchange; + switch (type) { + case ExchangeType.DIRECT: + exchange = new DirectExchange(name); + break; + case ExchangeType.TOPIC: + exchange = new TopicExchange(name); + break; + case ExchangeType.FANOUT: + exchange = new FanoutExchange(name); + break; + case ExchangeType.HEADERS: + exchange = new HeadersExchange(name); + break; + default: + throw new Error(`Unknown exchange type: ${type}`); + } + + this._exchanges.set(name, exchange); + return exchange; + } + + /** + * Gets an exchange by name. + * + * @param {string} name - Exchange name + * @returns {DirectExchange | TopicExchange | FanoutExchange | HeadersExchange | undefined} + */ + getExchange(name) { + return this._exchanges.get(name); + } + + /** + * Deletes an exchange. + * + * @param {string} name - Exchange name + * @returns {boolean} True if the exchange was deleted + */ + deleteExchange(name) { + return this._exchanges.delete(name); + } + + /** + * Registers a queue (for validation). + * + * @param {string} name - Queue name + */ + registerQueue(name) { + this._queues.add(name); + } + + /** + * Unregisters a queue. + * + * @param {string} name - Queue name + * @returns {boolean} + */ + unregisterQueue(name) { + return this._queues.delete(name); + } + + /** + * Binds a queue to an exchange. + * + * @param {string} exchangeName - Exchange name + * @param {string} queueName - Queue name + * @param {string} [routingKey=''] - Routing key or pattern + * @param {Object} [args] - Additional binding arguments (for headers exchange) + * @returns {Binding} The created binding + */ + bind(exchangeName, queueName, routingKey = '', args = {}) { + const exchange = this._exchanges.get(exchangeName); + if (!exchange) { + throw new Error(`Exchange '${exchangeName}' not found`); + } + + switch (exchange.type) { + case ExchangeType.DIRECT: + return exchange.bind(queueName, routingKey); + case ExchangeType.TOPIC: + return exchange.bind(queueName, routingKey); + case ExchangeType.FANOUT: + return exchange.bind(queueName); + case ExchangeType.HEADERS: + return exchange.bind(queueName, args.headers || {}, args.matchType); + default: + throw new Error(`Unknown exchange type: ${exchange.type}`); + } + } + + /** + * Unbinds a queue from an exchange. + * + * @param {string} exchangeName - Exchange name + * @param {string} queueName - Queue name + * @param {string} [routingKey=''] - Routing key or pattern + * @param {Object} [args] - Additional binding arguments + * @returns {boolean} True if the binding was removed + */ + unbind(exchangeName, queueName, routingKey = '', args = {}) { + const exchange = this._exchanges.get(exchangeName); + if (!exchange) { + return false; + } + + switch (exchange.type) { + case ExchangeType.DIRECT: + return exchange.unbind(queueName, routingKey); + case ExchangeType.TOPIC: + return exchange.unbind(queueName, routingKey); + case ExchangeType.FANOUT: + return exchange.unbind(queueName); + case ExchangeType.HEADERS: + return exchange.unbind(queueName, args.headers || {}); + default: + return false; + } + } + + /** + * Routes a message through an exchange. + * + * @param {string} exchangeName - Exchange name + * @param {string} [routingKey=''] - Routing key + * @param {Object} [headers={}] - Message headers (for headers exchange) + * @returns {string[]} Array of queue names + */ + route(exchangeName, routingKey = '', headers = {}) { + const exchange = this._exchanges.get(exchangeName); + if (!exchange) { + return []; + } + + switch (exchange.type) { + case ExchangeType.DIRECT: + return exchange.route(routingKey); + case ExchangeType.TOPIC: + return exchange.route(routingKey); + case ExchangeType.FANOUT: + return exchange.route(); + case ExchangeType.HEADERS: + return exchange.route(headers); + default: + return []; + } + } + + /** + * Lists all exchanges. + * + * @returns {Array<{name: string, type: string, bindings: number}>} + */ + listExchanges() { + return [...this._exchanges.values()].map((ex) => ({ + name: ex.name, + type: ex.type, + bindings: ex.getBindings().length, + })); + } + + /** + * Gets all bindings for an exchange. + * + * @param {string} exchangeName - Exchange name + * @returns {Binding[]} + */ + getBindings(exchangeName) { + const exchange = this._exchanges.get(exchangeName); + return exchange ? exchange.getBindings() : []; + } + + /** + * Clears all exchanges. + */ + clear() { + for (const exchange of this._exchanges.values()) { + exchange.clear(); + } + this._exchanges.clear(); + this._queues.clear(); + } +} + +// ============================================================================= +// Routed Queue Manager +// ============================================================================= + +/** + * A queue manager with built-in routing support. + * + * @example + * const manager = new RoutedQueueManager(); + * + * // Create queues + * const errorsQueue = await manager.createQueue('errors'); + * const allLogsQueue = await manager.createQueue('all-logs'); + * + * // Set up routing + * manager.declareExchange('logs', 'topic'); + * manager.bindTopic('logs', 'errors', 'logs.error'); + * manager.bindTopic('logs', 'all-logs', 'logs.#'); + * + * // Publish with routing + * await manager.publish('logs', link, 'logs.error'); + * // Message is routed to both 'errors' and 'all-logs' queues + */ +export class RoutedQueueManager { + /** + * Creates a new RoutedQueueManager. + * + * @param {import('../queue/memory-queue.js').MemoryQueueManager} queueManager - Underlying queue manager + */ + constructor(queueManager) { + /** + * The underlying queue manager. + * @type {import('../queue/memory-queue.js').MemoryQueueManager} + * @private + */ + this._queueManager = queueManager; + + /** + * The router. + * @type {Router} + * @private + */ + this._router = new Router(); + } + + /** + * Creates a queue. + * + * @param {string} name - Queue name + * @param {import('../queue/types.ts').QueueOptions} [options] - Queue options + * @returns {Promise} + */ + async createQueue(name, options) { + const queue = await this._queueManager.createQueue(name, options); + this._router.registerQueue(name); + return queue; + } + + /** + * Gets a queue by name. + * + * @param {string} name - Queue name + * @returns {Promise} + */ + async getQueue(name) { + return this._queueManager.getQueue(name); + } + + /** + * Deletes a queue. + * + * @param {string} name - Queue name + * @returns {Promise} + */ + async deleteQueue(name) { + this._router.unregisterQueue(name); + return this._queueManager.deleteQueue(name); + } + + /** + * Lists all queues. + * + * @returns {Promise} + */ + async listQueues() { + return this._queueManager.listQueues(); + } + + /** + * Declares an exchange. + * + * @param {string} name - Exchange name + * @param {'direct' | 'topic' | 'fanout' | 'headers'} type - Exchange type + * @returns {DirectExchange | TopicExchange | FanoutExchange | HeadersExchange} + */ + declareExchange(name, type) { + return this._router.declareExchange(name, type); + } + + /** + * Gets an exchange. + * + * @param {string} name - Exchange name + * @returns {DirectExchange | TopicExchange | FanoutExchange | HeadersExchange | undefined} + */ + getExchange(name) { + return this._router.getExchange(name); + } + + /** + * Deletes an exchange. + * + * @param {string} name - Exchange name + * @returns {boolean} + */ + deleteExchange(name) { + return this._router.deleteExchange(name); + } + + /** + * Binds a topic pattern. + * + * @param {string} exchange - Exchange name + * @param {string} queue - Queue name + * @param {string} pattern - Topic pattern + * @returns {Binding} + */ + bindTopic(exchange, queue, pattern) { + return this._router.bind(exchange, queue, pattern); + } + + /** + * Unbinds a topic pattern. + * + * @param {string} exchange - Exchange name + * @param {string} queue - Queue name + * @param {string} pattern - Topic pattern + * @returns {boolean} + */ + unbindTopic(exchange, queue, pattern) { + return this._router.unbind(exchange, queue, pattern); + } + + /** + * Sets up fanout routing. + * + * @param {string} exchange - Exchange name + * @param {string[]} queues - Queue names + */ + fanout(exchange, queues) { + this._router.declareExchange(exchange, ExchangeType.FANOUT); + for (const queue of queues) { + this._router.bind(exchange, queue); + } + } + + /** + * Publishes a message through an exchange. + * + * @param {string} exchange - Exchange name + * @param {import('../index.d.ts').Link} link - The link to publish + * @param {string} [routingKey=''] - Routing key + * @param {Object} [headers={}] - Message headers + * @returns {Promise} Array of queue names that received the message + */ + async publish(exchange, link, routingKey = '', headers = {}) { + const queues = this._router.route(exchange, routingKey, headers); + + for (const queueName of queues) { + const queue = await this._queueManager.getQueue(queueName); + if (queue) { + await queue.enqueue(link); + } + } + + return queues; + } + + /** + * Gets the router. + * + * @returns {Router} + */ + getRouter() { + return this._router; + } + + /** + * Clears all routing state. + */ + clear() { + this._router.clear(); + } +} diff --git a/js/src/features/scheduler.js b/js/src/features/scheduler.js new file mode 100644 index 0000000..6fb04a4 --- /dev/null +++ b/js/src/features/scheduler.js @@ -0,0 +1,851 @@ +/** + * Scheduler module for links-queue. + * + * This module provides advanced scheduling features for queue messages: + * - Delayed messages (enqueue for future delivery) + * - Cron jobs (scheduled recurring tasks) + * - TTL (time-to-live for messages) + * - Message expiration (automatic removal of expired messages) + * + * @module features/scheduler + * + * @see REQUIREMENTS.md - Feature Parity with Competitors + * @see ROADMAP.md - Phase 7: Advanced Features + */ + +// ============================================================================= +// Enqueue Options +// ============================================================================= + +/** + * Options for scheduling messages. + * + * @typedef {Object} ScheduleOptions + * @property {number} [delay] - Delay in milliseconds before the message becomes visible + * @property {number} [ttl] - Time-to-live in milliseconds; message expires after this duration + * @property {number} [deliverAt] - Unix timestamp (ms) when the message should be delivered + */ + +// ============================================================================= +// Scheduled Item +// ============================================================================= + +/** + * Represents a scheduled queue item with timing metadata. + * + * @typedef {Object} ScheduledItem + * @property {import('../index.d.ts').LinkId} id - Unique identifier for the scheduled item + * @property {import('../index.d.ts').Link} link - The link data + * @property {number} scheduledAt - Timestamp when the item was scheduled + * @property {number} deliverAt - Timestamp when the item should be delivered + * @property {number|null} expiresAt - Timestamp when the item expires (null if no TTL) + * @property {string} [cronExpression] - Cron expression for recurring jobs + * @property {boolean} isRecurring - Whether this is a recurring job + */ + +// ============================================================================= +// Cron Job +// ============================================================================= + +/** + * Represents a scheduled cron job. + * + * @typedef {Object} CronJob + * @property {string} id - Unique identifier for the cron job + * @property {string} expression - Cron expression (e.g., '0 *\/5 * * *' for every 5 minutes) + * @property {() => Promise} handler - The function to execute + * @property {number|null} nextRun - Next scheduled execution time + * @property {number|null} lastRun - Last execution time + * @property {boolean} running - Whether the job is currently running + * @property {boolean} enabled - Whether the job is enabled + */ + +// ============================================================================= +// Cron Parser (Simple Implementation) +// ============================================================================= + +/** + * Simple cron expression parser. + * + * Supports standard 5-field cron expressions: + * - minute (0-59) + * - hour (0-23) + * - day of month (1-31) + * - month (1-12) + * - day of week (0-6, Sunday = 0) + * + * Special characters: + * - '*' matches any value + * - ',' separates list items + * - '-' defines a range + * - '/' defines step values + */ +export class CronParser { + /** + * Parses a cron field and returns matching values. + * + * @param {string} field - The cron field to parse + * @param {number} min - Minimum allowed value + * @param {number} max - Maximum allowed value + * @returns {number[]} Array of matching values + */ + static parseField(field, min, max) { + const values = new Set(); + + // Handle comma-separated values + const parts = field.split(','); + + for (const part of parts) { + if (part === '*') { + // All values + for (let i = min; i <= max; i++) { + values.add(i); + } + } else if (part.includes('/')) { + // Step values (e.g., */5) + const [range, stepStr] = part.split('/'); + const step = parseInt(stepStr, 10); + const start = range === '*' ? min : parseInt(range, 10); + for (let i = start; i <= max; i += step) { + values.add(i); + } + } else if (part.includes('-')) { + // Range (e.g., 1-5) + const [startStr, endStr] = part.split('-'); + const start = parseInt(startStr, 10); + const end = parseInt(endStr, 10); + for (let i = start; i <= end; i++) { + values.add(i); + } + } else { + // Single value + values.add(parseInt(part, 10)); + } + } + + return [...values] + .filter((v) => v >= min && v <= max) + .sort((a, b) => a - b); + } + + /** + * Parses a full cron expression. + * + * @param {string} expression - Cron expression (5 fields) + * @returns {{minutes: number[], hours: number[], daysOfMonth: number[], months: number[], daysOfWeek: number[]}} + */ + static parse(expression) { + const parts = expression.trim().split(/\s+/); + if (parts.length !== 5) { + throw new Error( + `Invalid cron expression: expected 5 fields, got ${parts.length}` + ); + } + + return { + minutes: this.parseField(parts[0], 0, 59), + hours: this.parseField(parts[1], 0, 23), + daysOfMonth: this.parseField(parts[2], 1, 31), + months: this.parseField(parts[3], 1, 12), + daysOfWeek: this.parseField(parts[4], 0, 6), + }; + } + + /** + * Calculates the next run time after the given date. + * + * @param {string} expression - Cron expression + * @param {Date} [after=new Date()] - Find next run after this date + * @returns {Date|null} Next run time, or null if invalid + */ + static nextRun(expression, after = new Date()) { + const cron = this.parse(expression); + const start = new Date(after.getTime() + 60000); // Start from next minute + start.setSeconds(0, 0); + + // Search up to one year ahead + const maxDate = new Date(start.getTime() + 365 * 24 * 60 * 60 * 1000); + + const current = new Date(start); + + while (current < maxDate) { + const month = current.getMonth() + 1; + const dayOfMonth = current.getDate(); + const dayOfWeek = current.getDay(); + const hour = current.getHours(); + const minute = current.getMinutes(); + + if ( + cron.months.includes(month) && + cron.daysOfMonth.includes(dayOfMonth) && + cron.daysOfWeek.includes(dayOfWeek) && + cron.hours.includes(hour) && + cron.minutes.includes(minute) + ) { + return current; + } + + // Move to next minute + current.setMinutes(current.getMinutes() + 1); + } + + return null; + } +} + +// ============================================================================= +// Scheduler +// ============================================================================= + +/** + * Scheduler for managing delayed messages and cron jobs. + * + * Provides functionality for: + * - Scheduling messages for future delivery + * - Managing message TTL and expiration + * - Running cron jobs on a schedule + * + * @example + * import { Scheduler } from 'links-queue'; + * + * const scheduler = new Scheduler(); + * + * // Schedule a delayed message + * const item = scheduler.schedule(link, { delay: 30000 }); // 30 seconds + * + * // Schedule with TTL + * const itemWithTtl = scheduler.schedule(link, { delay: 5000, ttl: 60000 }); + * + * // Add a cron job + * scheduler.addCronJob('cleanup', '0 *\/5 * * *', async () => { + * console.log('Running cleanup...'); + * }); + * + * // Start the scheduler + * scheduler.start(); + * + * // Later: stop the scheduler + * scheduler.stop(); + */ +export class Scheduler { + /** + * Creates a new Scheduler. + * + * @param {Object} [options] - Scheduler options + * @param {number} [options.checkInterval=1000] - Interval in ms to check for due items + * @param {(item: ScheduledItem) => Promise} [options.onDue] - Callback when item is due + * @param {(item: ScheduledItem) => Promise} [options.onExpired] - Callback when item expires + */ + constructor(options = {}) { + /** + * Check interval in milliseconds. + * @type {number} + * @private + */ + this._checkInterval = options.checkInterval ?? 1000; + + /** + * Callback when an item becomes due. + * @type {(item: ScheduledItem) => Promise} + * @private + */ + this._onDue = options.onDue ?? (async () => {}); + + /** + * Callback when an item expires. + * @type {(item: ScheduledItem) => Promise} + * @private + */ + this._onExpired = options.onExpired ?? (async () => {}); + + /** + * Scheduled items waiting to be delivered. + * @type {Map} + * @private + */ + this._scheduledItems = new Map(); + + /** + * Cron jobs. + * @type {Map} + * @private + */ + this._cronJobs = new Map(); + + /** + * Timer handle for the check loop. + * @type {NodeJS.Timeout|null} + * @private + */ + this._timerHandle = null; + + /** + * Whether the scheduler is running. + * @type {boolean} + * @private + */ + this._running = false; + + /** + * Counter for generating unique IDs. + * @type {number} + * @private + */ + this._idCounter = 0; + + /** + * Statistics. + * @type {{scheduled: number, delivered: number, expired: number, cronRuns: number}} + * @private + */ + this._stats = { + scheduled: 0, + delivered: 0, + expired: 0, + cronRuns: 0, + }; + } + + /** + * Generates a unique ID for scheduled items. + * + * @returns {string} Unique identifier + * @private + */ + _generateId() { + return `sched_${Date.now()}_${++this._idCounter}`; + } + + /** + * Schedules a link for future delivery. + * + * @param {import('../index.d.ts').Link} link - The link to schedule + * @param {ScheduleOptions} [options] - Scheduling options + * @returns {ScheduledItem} The scheduled item + * + * @example + * // Delay by 30 seconds + * scheduler.schedule(link, { delay: 30000 }); + * + * // Deliver at specific time + * scheduler.schedule(link, { deliverAt: Date.now() + 60000 }); + * + * // With TTL + * scheduler.schedule(link, { delay: 5000, ttl: 60000 }); + */ + schedule(link, options = {}) { + const now = Date.now(); + const { delay = 0, ttl = null, deliverAt = null } = options; + + const scheduledAt = now; + const deliverTime = deliverAt ?? now + delay; + const expiresAt = ttl !== null ? now + ttl : null; + + const item = { + id: link.id ?? this._generateId(), + link, + scheduledAt, + deliverAt: deliverTime, + expiresAt, + cronExpression: null, + isRecurring: false, + }; + + this._scheduledItems.set(item.id, item); + this._stats.scheduled++; + + return item; + } + + /** + * Cancels a scheduled item. + * + * @param {import('../index.d.ts').LinkId} id - The ID of the item to cancel + * @returns {boolean} True if the item was cancelled + */ + cancel(id) { + return this._scheduledItems.delete(id); + } + + /** + * Gets a scheduled item by ID. + * + * @param {import('../index.d.ts').LinkId} id - The ID of the item + * @returns {ScheduledItem|undefined} The scheduled item + */ + get(id) { + return this._scheduledItems.get(id); + } + + /** + * Adds a cron job. + * + * @param {string} id - Unique identifier for the job + * @param {string} expression - Cron expression + * @param {() => Promise} handler - Function to execute + * @returns {CronJob} The created cron job + * + * @example + * // Run every 5 minutes + * scheduler.addCronJob('cleanup', '0 *\/5 * * *', async () => { + * console.log('Running cleanup...'); + * }); + * + * // Run every day at midnight + * scheduler.addCronJob('daily', '0 0 * * *', async () => { + * console.log('Daily job...'); + * }); + */ + addCronJob(id, expression, handler) { + // Validate the expression + CronParser.parse(expression); + + const nextRun = CronParser.nextRun(expression); + + const job = { + id, + expression, + handler, + nextRun: nextRun ? nextRun.getTime() : null, + lastRun: null, + running: false, + enabled: true, + }; + + this._cronJobs.set(id, job); + return job; + } + + /** + * Removes a cron job. + * + * @param {string} id - The ID of the cron job + * @returns {boolean} True if the job was removed + */ + removeCronJob(id) { + return this._cronJobs.delete(id); + } + + /** + * Gets a cron job by ID. + * + * @param {string} id - The ID of the cron job + * @returns {CronJob|undefined} The cron job + */ + getCronJob(id) { + return this._cronJobs.get(id); + } + + /** + * Enables or disables a cron job. + * + * @param {string} id - The ID of the cron job + * @param {boolean} enabled - Whether to enable the job + * @returns {boolean} True if the job was found and updated + */ + setCronJobEnabled(id, enabled) { + const job = this._cronJobs.get(id); + if (job) { + job.enabled = enabled; + if (enabled) { + // Recalculate next run + const nextRun = CronParser.nextRun(job.expression); + job.nextRun = nextRun ? nextRun.getTime() : null; + } + return true; + } + return false; + } + + /** + * Lists all cron jobs. + * + * @returns {CronJob[]} Array of cron jobs + */ + listCronJobs() { + return [...this._cronJobs.values()]; + } + + /** + * Starts the scheduler. + * + * Begins checking for due items and running cron jobs. + */ + start() { + if (this._running) { + return; + } + + this._running = true; + this._tick(); + } + + /** + * Stops the scheduler. + * + * Stops checking for due items and pauses cron jobs. + */ + stop() { + this._running = false; + if (this._timerHandle) { + globalThis.clearTimeout(this._timerHandle); + this._timerHandle = null; + } + } + + /** + * Internal tick function that checks for due items. + * + * @private + */ + async _tick() { + if (!this._running) { + return; + } + + try { + await this._processScheduledItems(); + await this._processCronJobs(); + } catch (error) { + // Log error but continue running + console.error('Scheduler tick error:', error); + } + + // Schedule next tick + this._timerHandle = globalThis.setTimeout( + () => this._tick(), + this._checkInterval + ); + } + + /** + * Processes scheduled items that are due or expired. + * + * @private + */ + async _processScheduledItems() { + const now = Date.now(); + const dueItems = []; + const expiredItems = []; + + for (const [, item] of this._scheduledItems) { + // Check for expiration first + if (item.expiresAt !== null && now >= item.expiresAt) { + expiredItems.push(item); + continue; + } + + // Check if due for delivery + if (now >= item.deliverAt) { + dueItems.push(item); + } + } + + // Process expired items + for (const item of expiredItems) { + this._scheduledItems.delete(item.id); + this._stats.expired++; + try { + await this._onExpired(item); + } catch (error) { + console.error('Error processing expired item:', error); + } + } + + // Process due items + for (const item of dueItems) { + this._scheduledItems.delete(item.id); + this._stats.delivered++; + try { + await this._onDue(item); + } catch (error) { + console.error('Error processing due item:', error); + } + } + } + + /** + * Processes cron jobs that are due to run. + * + * @private + */ + async _processCronJobs() { + const now = Date.now(); + + for (const job of this._cronJobs.values()) { + if (!job.enabled || job.running || job.nextRun === null) { + continue; + } + + if (now >= job.nextRun) { + job.running = true; + job.lastRun = now; + this._stats.cronRuns++; + + try { + await job.handler(); + } catch (error) { + console.error(`Cron job '${job.id}' error:`, error); + } finally { + job.running = false; + // Calculate next run + const nextRun = CronParser.nextRun(job.expression); + job.nextRun = nextRun ? nextRun.getTime() : null; + } + } + } + } + + /** + * Returns the number of pending scheduled items. + * + * @returns {number} Number of pending items + */ + get pendingCount() { + return this._scheduledItems.size; + } + + /** + * Returns whether the scheduler is running. + * + * @returns {boolean} True if running + */ + get isRunning() { + return this._running; + } + + /** + * Returns scheduler statistics. + * + * @returns {{scheduled: number, delivered: number, expired: number, cronRuns: number, pending: number, cronJobs: number}} + */ + getStats() { + return { + ...this._stats, + pending: this._scheduledItems.size, + cronJobs: this._cronJobs.size, + }; + } + + /** + * Clears all scheduled items and stops the scheduler. + */ + clear() { + this.stop(); + this._scheduledItems.clear(); + this._cronJobs.clear(); + this._stats = { + scheduled: 0, + delivered: 0, + expired: 0, + cronRuns: 0, + }; + } + + /** + * Returns all pending items (for inspection/debugging). + * + * @returns {ScheduledItem[]} Array of scheduled items + */ + getPendingItems() { + return [...this._scheduledItems.values()]; + } +} + +// ============================================================================= +// Scheduled Queue (Queue with Scheduling Support) +// ============================================================================= + +/** + * A queue wrapper that adds scheduling support. + * + * Wraps an existing queue and adds the ability to: + * - Enqueue items with a delay + * - Set TTL on items + * - Automatically remove expired items + * + * @example + * const baseQueue = new LinksQueue({ name: 'tasks', store }); + * const scheduledQueue = new ScheduledQueue(baseQueue); + * + * // Enqueue with delay + * await scheduledQueue.enqueue(link, { delay: 30000 }); + * + * // Enqueue with TTL + * await scheduledQueue.enqueue(link, { ttl: 60000 }); + */ +export class ScheduledQueue { + /** + * Creates a new ScheduledQueue. + * + * @param {import('../queue/queue.js').LinksQueue} queue - The underlying queue + * @param {Object} [options] - Options + * @param {number} [options.checkInterval=1000] - Interval in ms to check for due items + */ + constructor(queue, options = {}) { + /** + * The underlying queue. + * @type {import('../queue/queue.js').LinksQueue} + * @private + */ + this._queue = queue; + + /** + * The internal scheduler. + * @type {Scheduler} + * @private + */ + this._scheduler = new Scheduler({ + checkInterval: options.checkInterval ?? 1000, + onDue: async (item) => { + // When item is due, add it to the actual queue + await this._queue.enqueue(item.link); + }, + onExpired: async () => { + // Item expired before delivery - nothing to do + }, + }); + + // Start the scheduler + this._scheduler.start(); + } + + /** + * Gets the queue name. + * + * @returns {string} Queue name + */ + get name() { + return this._queue.name; + } + + /** + * Enqueues a link, optionally with scheduling options. + * + * @param {import('../index.d.ts').Link} link - The link to enqueue + * @param {ScheduleOptions} [options] - Scheduling options + * @returns {Promise} + */ + async enqueue(link, options = {}) { + const { delay = 0, ttl = null, deliverAt = null } = options; + + // If no delay, enqueue directly + if (delay === 0 && deliverAt === null) { + // But still check TTL for immediate items + if (ttl !== null) { + // Store TTL metadata (could be used for later expiration) + // For now, we enqueue normally + } + return this._queue.enqueue(link); + } + + // Schedule for later delivery + return this._scheduler.schedule(link, options); + } + + /** + * Dequeues the next available link. + * + * @returns {Promise} + */ + async dequeue() { + return this._queue.dequeue(); + } + + /** + * Peeks at the next available link. + * + * @returns {Promise} + */ + async peek() { + return this._queue.peek(); + } + + /** + * Acknowledges a dequeued item. + * + * @param {import('../index.d.ts').LinkId} id - Item ID + */ + async acknowledge(id) { + return this._queue.acknowledge(id); + } + + /** + * Rejects a dequeued item. + * + * @param {import('../index.d.ts').LinkId} id - Item ID + * @param {boolean} [requeue=false] - Whether to requeue + */ + async reject(id, requeue = false) { + return this._queue.reject(id, requeue); + } + + /** + * Gets queue statistics. + * + * @returns {import('../queue/types.ts').QueueStats & {scheduled: number}} + */ + getStats() { + const queueStats = this._queue.getStats(); + const schedulerStats = this._scheduler.getStats(); + return { + ...queueStats, + scheduled: schedulerStats.pending, + }; + } + + /** + * Gets the queue depth (including scheduled items). + * + * @returns {number} + */ + getDepth() { + return this._queue.getDepth() + this._scheduler.pendingCount; + } + + /** + * Schedules a cron job that enqueues items. + * + * @param {string} expression - Cron expression + * @param {() => Promise} linkFactory - Function that creates the link to enqueue + * @returns {CronJob} The cron job + * + * @example + * // Enqueue cleanup tasks every 5 minutes + * scheduledQueue.schedule('0 *\/5 * * *', async () => { + * return { source: 'cron', target: 'cleanup' }; + * }); + */ + scheduleCron(expression, linkFactory) { + const id = `cron_${Date.now()}_${Math.random().toString(36).slice(2, 8)}`; + return this._scheduler.addCronJob(id, expression, async () => { + const link = await linkFactory(); + await this._queue.enqueue(link); + }); + } + + /** + * Cancels a scheduled item. + * + * @param {import('../index.d.ts').LinkId} id - Item ID + * @returns {boolean} + */ + cancelScheduled(id) { + return this._scheduler.cancel(id); + } + + /** + * Stops the scheduler and clears resources. + */ + async clear() { + this._scheduler.clear(); + await this._queue.clear(); + } + + /** + * Stops the scheduler. + */ + stop() { + this._scheduler.stop(); + } +} diff --git a/js/src/index.js b/js/src/index.js index aa70b0f..a8a0df8 100644 --- a/js/src/index.js +++ b/js/src/index.js @@ -252,3 +252,34 @@ export { ClientConnection, ClientConnectionState, } from './client/index.js'; + +// ============================================================================= +// Advanced Features Exports (Phase 7) +// ============================================================================= + +export { + // Scheduler + CronParser, + Scheduler, + ScheduledQueue, + // Rate limiter + SlidingWindowCounter, + TokenBucket, + RateLimiter, + RateLimitedQueue, + RateLimitError, + // Router + ExchangeType, + TopicMatcher, + DirectExchange, + TopicExchange, + FanoutExchange, + HeadersExchange, + Router, + RoutedQueueManager, + // Pub/Sub + MessageFilter, + PubSubBroker, + ObservableQueue, + QueueBackedPubSub, +} from './features/index.js'; diff --git a/js/tests/pubsub.test.js b/js/tests/pubsub.test.js new file mode 100644 index 0000000..0c21536 --- /dev/null +++ b/js/tests/pubsub.test.js @@ -0,0 +1,886 @@ +/** + * Test file for Pub/Sub module + * Tests for publish/subscribe messaging patterns + */ + +import { describe, it, expect } from 'test-anywhere'; +import { + MessageFilter, + PubSubBroker, + ObservableQueue, + QueueBackedPubSub, +} from '../src/features/pubsub.js'; + +// ============================================================================= +// MessageFilter Tests +// ============================================================================= + +describe('MessageFilter', () => { + describe('matches', () => { + it('should match exact values', () => { + const link = { id: 1, source: 'user', target: 'created' }; + + expect(MessageFilter.matches({ source: 'user' }, link)).toBe(true); + expect(MessageFilter.matches({ source: 'admin' }, link)).toBe(false); + }); + + it('should match multiple fields', () => { + const link = { id: 1, source: 'user', target: 'created' }; + + expect( + MessageFilter.matches({ source: 'user', target: 'created' }, link) + ).toBe(true); + expect( + MessageFilter.matches({ source: 'user', target: 'deleted' }, link) + ).toBe(false); + }); + + it('should match regex patterns', () => { + const link = { id: 1, source: 'user-123', target: 'created' }; + + expect(MessageFilter.matches({ source: /^user-/ }, link)).toBe(true); + expect(MessageFilter.matches({ source: /^admin-/ }, link)).toBe(false); + }); + + it('should match with function predicates', () => { + const link = { id: 100, source: 'user', target: 'created' }; + + expect(MessageFilter.matches({ id: (v) => v > 50 }, link)).toBe(true); + expect(MessageFilter.matches({ id: (v) => v > 200 }, link)).toBe(false); + }); + + it('should match empty pattern (all messages)', () => { + const link = { id: 1, source: 'user', target: 'created' }; + + expect(MessageFilter.matches({}, link)).toBe(true); + }); + }); + + describe('createFilter', () => { + it('should create reusable filter function', () => { + const filter = MessageFilter.createFilter({ source: 'user' }); + + expect(filter({ id: 1, source: 'user', target: 'a' })).toBe(true); + expect(filter({ id: 2, source: 'admin', target: 'b' })).toBe(false); + }); + }); +}); + +// ============================================================================= +// PubSubBroker Tests +// ============================================================================= + +describe('PubSubBroker', () => { + describe('constructor', () => { + it('should create broker with default options', () => { + const broker = new PubSubBroker(); + const stats = broker.getStats(); + + expect(stats.topics).toBe(0); + expect(stats.subscriptions).toBe(0); + }); + + it('should create broker with custom options', () => { + const broker = new PubSubBroker({ + autoCreateTopics: false, + messageRetention: 60000, + }); + + expect(broker.getStats().topics).toBe(0); + }); + }); + + describe('createTopic', () => { + it('should create a new topic', () => { + const broker = new PubSubBroker(); + + const topic = broker.createTopic('events'); + + expect(topic.name).toBe('events'); + expect(topic.messageCount).toBe(0); + expect(topic.subscriberCount).toBe(0); + }); + + it('should throw on duplicate topic', () => { + const broker = new PubSubBroker(); + + broker.createTopic('events'); + + expect(() => broker.createTopic('events')).toThrow(); + }); + }); + + describe('getTopic', () => { + it('should return topic by name', () => { + const broker = new PubSubBroker(); + + broker.createTopic('events'); + const topic = broker.getTopic('events'); + + expect(topic).not.toBeUndefined(); + expect(topic.name).toBe('events'); + }); + + it('should return undefined for non-existent topic', () => { + const broker = new PubSubBroker(); + + const topic = broker.getTopic('non-existent'); + + expect(topic).toBeUndefined(); + }); + }); + + describe('deleteTopic', () => { + it('should delete topic and subscriptions', async () => { + const broker = new PubSubBroker(); + + broker.createTopic('events'); + broker.subscribe('events', async () => {}); + + const result = broker.deleteTopic('events'); + + expect(result).toBe(true); + expect(broker.getTopic('events')).toBeUndefined(); + expect(broker.listSubscriptions().length).toBe(0); + }); + + it('should return false for non-existent topic', () => { + const broker = new PubSubBroker(); + + const result = broker.deleteTopic('non-existent'); + + expect(result).toBe(false); + }); + }); + + describe('listTopics', () => { + it('should list all topics', () => { + const broker = new PubSubBroker(); + + broker.createTopic('events'); + broker.createTopic('logs'); + + const topics = broker.listTopics(); + + expect(topics.length).toBe(2); + expect(topics.find((t) => t.name === 'events')).not.toBeUndefined(); + expect(topics.find((t) => t.name === 'logs')).not.toBeUndefined(); + }); + }); + + describe('subscribe', () => { + it('should create subscription', () => { + const broker = new PubSubBroker(); + broker.createTopic('events'); + + const sub = broker.subscribe('events', async () => {}); + + expect(sub.id).not.toBeUndefined(); + expect(sub.topic).toBe('events'); + expect(sub.active).toBe(true); + }); + + it('should auto-create topic if enabled', () => { + const broker = new PubSubBroker({ autoCreateTopics: true }); + + broker.subscribe('new-topic', async () => {}); + + expect(broker.getTopic('new-topic')).not.toBeUndefined(); + }); + + it('should throw if topic does not exist and auto-create disabled', () => { + const broker = new PubSubBroker({ autoCreateTopics: false }); + + expect(() => broker.subscribe('non-existent', async () => {})).toThrow(); + }); + + it('should increment topic subscriber count', () => { + const broker = new PubSubBroker(); + broker.createTopic('events'); + + broker.subscribe('events', async () => {}); + broker.subscribe('events', async () => {}); + + const topic = broker.getTopic('events'); + expect(topic.subscriberCount).toBe(2); + }); + + it('should support filter option', () => { + const broker = new PubSubBroker(); + broker.createTopic('events'); + + const sub = broker.subscribe('events', async () => {}, { + filter: { source: 'user' }, + }); + + expect(sub.pattern).toEqual({ source: 'user' }); + }); + }); + + describe('unsubscribe', () => { + it('should remove subscription', () => { + const broker = new PubSubBroker(); + broker.createTopic('events'); + + const sub = broker.subscribe('events', async () => {}); + const result = broker.unsubscribe(sub.id); + + expect(result).toBe(true); + expect(broker.getSubscription(sub.id)).toBeUndefined(); + }); + + it('should decrement topic subscriber count', () => { + const broker = new PubSubBroker(); + broker.createTopic('events'); + + const sub = broker.subscribe('events', async () => {}); + broker.unsubscribe(sub.id); + + const topic = broker.getTopic('events'); + expect(topic.subscriberCount).toBe(0); + }); + + it('should return false for non-existent subscription', () => { + const broker = new PubSubBroker(); + + const result = broker.unsubscribe('non-existent'); + + expect(result).toBe(false); + }); + }); + + describe('pause/resume', () => { + it('should pause subscription', () => { + const broker = new PubSubBroker(); + broker.createTopic('events'); + + const sub = broker.subscribe('events', async () => {}); + broker.pause(sub.id); + + expect(broker.getSubscription(sub.id).active).toBe(false); + }); + + it('should resume subscription', () => { + const broker = new PubSubBroker(); + broker.createTopic('events'); + + const sub = broker.subscribe('events', async () => {}); + broker.pause(sub.id); + broker.resume(sub.id); + + expect(broker.getSubscription(sub.id).active).toBe(true); + }); + }); + + describe('listSubscriptions', () => { + it('should list all subscriptions', () => { + const broker = new PubSubBroker(); + broker.createTopic('events'); + broker.createTopic('logs'); + + broker.subscribe('events', async () => {}); + broker.subscribe('logs', async () => {}); + + const subs = broker.listSubscriptions(); + expect(subs.length).toBe(2); + }); + + it('should filter by topic', () => { + const broker = new PubSubBroker(); + broker.createTopic('events'); + broker.createTopic('logs'); + + broker.subscribe('events', async () => {}); + broker.subscribe('events', async () => {}); + broker.subscribe('logs', async () => {}); + + const subs = broker.listSubscriptions('events'); + expect(subs.length).toBe(2); + }); + }); + + describe('publish', () => { + it('should deliver message to all subscribers', async () => { + const broker = new PubSubBroker(); + broker.createTopic('events'); + + const received = []; + broker.subscribe('events', async (msg) => received.push(msg)); + broker.subscribe('events', async (msg) => received.push(msg)); + + const link = { id: 1, source: 'test', target: 'item' }; + const result = await broker.publish('events', link); + + expect(result.delivered).toBe(2); + expect(received.length).toBe(2); + }); + + it('should apply message filters', async () => { + const broker = new PubSubBroker(); + broker.createTopic('events'); + + const received = []; + broker.subscribe('events', async (msg) => received.push(msg), { + filter: { source: 'user' }, + }); + + // Should be filtered + await broker.publish('events', { id: 1, source: 'admin', target: 'a' }); + expect(received.length).toBe(0); + + // Should pass filter + await broker.publish('events', { id: 2, source: 'user', target: 'b' }); + expect(received.length).toBe(1); + }); + + it('should not deliver to paused subscriptions', async () => { + const broker = new PubSubBroker(); + broker.createTopic('events'); + + const received = []; + const sub = broker.subscribe('events', async (msg) => received.push(msg)); + broker.pause(sub.id); + + await broker.publish('events', { id: 1, source: 'test', target: 'item' }); + + expect(received.length).toBe(0); + }); + + it('should include headers in message', async () => { + const broker = new PubSubBroker(); + broker.createTopic('events'); + + let receivedHeaders = null; + broker.subscribe('events', async (msg) => { + receivedHeaders = msg.headers; + }); + + await broker.publish( + 'events', + { id: 1, source: 'test', target: 'item' }, + { priority: 'high' } + ); + + expect(receivedHeaders).toEqual({ priority: 'high' }); + }); + + it('should update topic message count', async () => { + const broker = new PubSubBroker(); + broker.createTopic('events'); + + await broker.publish('events', { id: 1, source: 'a', target: 'b' }); + await broker.publish('events', { id: 2, source: 'c', target: 'd' }); + + const topic = broker.getTopic('events'); + expect(topic.messageCount).toBe(2); + }); + + it('should return delivered and filtered counts', async () => { + const broker = new PubSubBroker(); + broker.createTopic('events'); + + broker.subscribe('events', async () => {}); + broker.subscribe('events', async () => {}, { + filter: { source: 'admin' }, + }); + + const result = await broker.publish('events', { + id: 1, + source: 'user', + target: 'a', + }); + + expect(result.delivered).toBe(1); + expect(result.filtered).toBe(1); + }); + + it('should auto-create topic if enabled', async () => { + const broker = new PubSubBroker({ autoCreateTopics: true }); + + await broker.publish('new-topic', { id: 1, source: 'a', target: 'b' }); + + expect(broker.getTopic('new-topic')).not.toBeUndefined(); + }); + }); + + describe('publishMany', () => { + it('should publish to multiple topics', async () => { + const broker = new PubSubBroker(); + broker.createTopic('events'); + broker.createTopic('logs'); + + const receivedEvents = []; + const receivedLogs = []; + + broker.subscribe('events', async (msg) => receivedEvents.push(msg)); + broker.subscribe('logs', async (msg) => receivedLogs.push(msg)); + + const results = await broker.publishMany(['events', 'logs'], { + id: 1, + source: 'test', + target: 'item', + }); + + expect(results.get('events').delivered).toBe(1); + expect(results.get('logs').delivered).toBe(1); + expect(receivedEvents.length).toBe(1); + expect(receivedLogs.length).toBe(1); + }); + }); + + describe('getHistory', () => { + it('should return message history when retention enabled', async () => { + const broker = new PubSubBroker({ messageRetention: 60000 }); + broker.createTopic('events'); + + await broker.publish('events', { id: 1, source: 'a', target: 'b' }); + await broker.publish('events', { id: 2, source: 'c', target: 'd' }); + + const history = broker.getHistory('events'); + + expect(history.length).toBe(2); + }); + + it('should return empty array when no retention', () => { + const broker = new PubSubBroker({ messageRetention: 0 }); + broker.createTopic('events'); + + const history = broker.getHistory('events'); + + expect(history).toEqual([]); + }); + + it('should limit returned messages', async () => { + const broker = new PubSubBroker({ messageRetention: 60000 }); + broker.createTopic('events'); + + for (let i = 0; i < 10; i++) { + await broker.publish('events', { id: i, source: 'a', target: 'b' }); + } + + const history = broker.getHistory('events', 5); + + expect(history.length).toBe(5); + }); + }); + + describe('getStats', () => { + it('should return broker statistics', async () => { + const broker = new PubSubBroker(); + broker.createTopic('events'); + broker.subscribe('events', async () => {}); + + await broker.publish('events', { id: 1, source: 'a', target: 'b' }); + + const stats = broker.getStats(); + + expect(stats.topics).toBe(1); + expect(stats.subscriptions).toBe(1); + expect(stats.published).toBe(1); + expect(stats.delivered).toBe(1); + }); + }); + + describe('clear', () => { + it('should clear all state', () => { + const broker = new PubSubBroker(); + broker.createTopic('events'); + broker.subscribe('events', async () => {}); + + broker.clear(); + + const stats = broker.getStats(); + expect(stats.topics).toBe(0); + expect(stats.subscriptions).toBe(0); + }); + }); +}); + +// ============================================================================= +// ObservableQueue Tests +// ============================================================================= + +describe('ObservableQueue', () => { + // Mock queue for testing + const createMockQueue = () => ({ + name: 'test-queue', + _items: [], + async enqueue(link) { + this._items.push(link); + return { id: link.id, position: this._items.length - 1 }; + }, + async dequeue() { + return this._items.shift() || null; + }, + async peek() { + return this._items[0] || null; + }, + async acknowledge() {}, + async reject() {}, + getStats() { + return { + depth: this._items.length, + enqueued: 0, + dequeued: 0, + acknowledged: 0, + rejected: 0, + inFlight: 0, + }; + }, + getDepth() { + return this._items.length; + }, + async clear() { + this._items = []; + }, + }); + + describe('constructor', () => { + it('should wrap queue', () => { + const mockQueue = createMockQueue(); + const observable = new ObservableQueue(mockQueue); + + expect(observable.name).toBe('test-queue'); + }); + }); + + describe('onEnqueue', () => { + it('should notify on enqueue', async () => { + const mockQueue = createMockQueue(); + const observable = new ObservableQueue(mockQueue); + + const received = []; + observable.onEnqueue(async (link) => received.push(link)); + + await observable.enqueue({ id: 1, source: 'a', target: 'b' }); + + expect(received.length).toBe(1); + expect(received[0].id).toBe(1); + }); + + it('should return unsubscribe function', async () => { + const mockQueue = createMockQueue(); + const observable = new ObservableQueue(mockQueue); + + const received = []; + const unsubscribe = observable.onEnqueue(async (link) => + received.push(link) + ); + + await observable.enqueue({ id: 1, source: 'a', target: 'b' }); + unsubscribe(); + await observable.enqueue({ id: 2, source: 'c', target: 'd' }); + + expect(received.length).toBe(1); + }); + }); + + describe('onDequeue', () => { + it('should notify on dequeue', async () => { + const mockQueue = createMockQueue(); + const observable = new ObservableQueue(mockQueue); + + const received = []; + observable.onDequeue(async (link) => received.push(link)); + + await observable.enqueue({ id: 1, source: 'a', target: 'b' }); + await observable.dequeue(); + + expect(received.length).toBe(1); + expect(received[0].id).toBe(1); + }); + + it('should not notify when queue is empty', async () => { + const mockQueue = createMockQueue(); + const observable = new ObservableQueue(mockQueue); + + const received = []; + observable.onDequeue(async (link) => received.push(link)); + + await observable.dequeue(); + + expect(received.length).toBe(0); + }); + }); + + describe('onAcknowledge', () => { + it('should notify on acknowledge', async () => { + const mockQueue = createMockQueue(); + const observable = new ObservableQueue(mockQueue); + + const received = []; + observable.onAcknowledge(async (id) => received.push(id)); + + await observable.acknowledge(42); + + expect(received.length).toBe(1); + expect(received[0]).toBe(42); + }); + }); + + describe('onReject', () => { + it('should notify on reject', async () => { + const mockQueue = createMockQueue(); + const observable = new ObservableQueue(mockQueue); + + const received = []; + observable.onReject(async (id, requeue) => + received.push({ id, requeue }) + ); + + await observable.reject(42, true); + + expect(received.length).toBe(1); + expect(received[0].id).toBe(42); + expect(received[0].requeue).toBe(true); + }); + }); + + describe('clear', () => { + it('should clear queue and remove handlers', async () => { + const mockQueue = createMockQueue(); + const observable = new ObservableQueue(mockQueue); + + const received = []; + observable.onEnqueue(async (link) => received.push(link)); + + await observable.enqueue({ id: 1, source: 'a', target: 'b' }); + await observable.clear(); + await observable.enqueue({ id: 2, source: 'c', target: 'd' }); + + // Handler should have been cleared + expect(received.length).toBe(1); + expect(observable.getDepth()).toBe(1); + }); + }); +}); + +// ============================================================================= +// QueueBackedPubSub Tests +// ============================================================================= + +describe('QueueBackedPubSub', () => { + // Mock queue manager for testing + const createMockQueueManager = () => { + const queues = new Map(); + + return { + async createQueue(name) { + const queue = { + name, + _items: [], + async enqueue(link) { + this._items.push(link); + return { id: link.id, position: this._items.length - 1 }; + }, + async dequeue() { + return this._items.shift() || null; + }, + async acknowledge() {}, + async reject() {}, + getDepth() { + return this._items.length; + }, + }; + queues.set(name, queue); + return queue; + }, + async getQueue(name) { + return queues.get(name) || null; + }, + async deleteQueue(name) { + return queues.delete(name); + }, + queues, + }; + }; + + describe('createTopic', () => { + it('should create a new topic', () => { + const mockManager = createMockQueueManager(); + const pubsub = new QueueBackedPubSub(mockManager); + + const result = pubsub.createTopic('events'); + + expect(result).toBe(true); + }); + + it('should return false for duplicate topic', () => { + const mockManager = createMockQueueManager(); + const pubsub = new QueueBackedPubSub(mockManager); + + pubsub.createTopic('events'); + const result = pubsub.createTopic('events'); + + expect(result).toBe(false); + }); + }); + + describe('deleteTopic', () => { + it('should delete topic and subscriptions', async () => { + const mockManager = createMockQueueManager(); + const pubsub = new QueueBackedPubSub(mockManager); + + pubsub.createTopic('events'); + await pubsub.subscribe('events', 'consumer1', async () => {}); + + const result = await pubsub.deleteTopic('events'); + + expect(result).toBe(true); + expect(pubsub.listTopics().length).toBe(0); + }); + }); + + describe('subscribe', () => { + it('should create subscription with dedicated queue', async () => { + const mockManager = createMockQueueManager(); + const pubsub = new QueueBackedPubSub(mockManager); + + const sub = await pubsub.subscribe('events', 'consumer1', async () => {}); + + expect(sub.id).not.toBeUndefined(); + expect(sub.queueName).toBe('events-consumer1'); + expect(mockManager.queues.has('events-consumer1')).toBe(true); + }); + + it('should auto-create topic', async () => { + const mockManager = createMockQueueManager(); + const pubsub = new QueueBackedPubSub(mockManager); + + await pubsub.subscribe('new-topic', 'consumer1', async () => {}); + + expect( + pubsub.listTopics().find((t) => t.name === 'new-topic') + ).not.toBeUndefined(); + }); + }); + + describe('unsubscribe', () => { + it('should remove subscription and delete queue', async () => { + const mockManager = createMockQueueManager(); + const pubsub = new QueueBackedPubSub(mockManager); + + const sub = await pubsub.subscribe('events', 'consumer1', async () => {}); + const result = await pubsub.unsubscribe(sub.id); + + expect(result).toBe(true); + expect(mockManager.queues.has('events-consumer1')).toBe(false); + }); + }); + + describe('publish', () => { + it('should enqueue to all subscriber queues', async () => { + const mockManager = createMockQueueManager(); + const pubsub = new QueueBackedPubSub(mockManager); + + await pubsub.subscribe('events', 'consumer1', async () => {}); + await pubsub.subscribe('events', 'consumer2', async () => {}); + + const count = await pubsub.publish('events', { + id: 1, + source: 'a', + target: 'b', + }); + + expect(count).toBe(2); + + const queue1 = await mockManager.getQueue('events-consumer1'); + const queue2 = await mockManager.getQueue('events-consumer2'); + + expect(queue1._items.length).toBe(1); + expect(queue2._items.length).toBe(1); + }); + + it('should return 0 for empty topic', async () => { + const mockManager = createMockQueueManager(); + const pubsub = new QueueBackedPubSub(mockManager); + + pubsub.createTopic('events'); + + const count = await pubsub.publish('events', { + id: 1, + source: 'a', + target: 'b', + }); + + expect(count).toBe(0); + }); + }); + + describe('startConsumer/stopConsumer', () => { + it('should start and stop consumer', async () => { + const mockManager = createMockQueueManager(); + const pubsub = new QueueBackedPubSub(mockManager); + + const sub = await pubsub.subscribe('events', 'consumer1', async () => {}); + await pubsub.startConsumer(sub.id); + + const subs = pubsub.listSubscriptions(); + expect(subs.find((s) => s.id === sub.id).active).toBe(true); + + pubsub.stopConsumer(sub.id); + + const subsAfter = pubsub.listSubscriptions(); + expect(subsAfter.find((s) => s.id === sub.id).active).toBe(false); + }); + }); + + describe('listTopics', () => { + it('should list all topics with subscriber counts', async () => { + const mockManager = createMockQueueManager(); + const pubsub = new QueueBackedPubSub(mockManager); + + await pubsub.subscribe('events', 'consumer1', async () => {}); + await pubsub.subscribe('events', 'consumer2', async () => {}); + await pubsub.subscribe('logs', 'consumer1', async () => {}); + + const topics = pubsub.listTopics(); + + expect(topics.length).toBe(2); + expect(topics.find((t) => t.name === 'events').subscribers).toBe(2); + expect(topics.find((t) => t.name === 'logs').subscribers).toBe(1); + }); + }); + + describe('listSubscriptions', () => { + it('should list all subscriptions', async () => { + const mockManager = createMockQueueManager(); + const pubsub = new QueueBackedPubSub(mockManager); + + await pubsub.subscribe('events', 'consumer1', async () => {}); + await pubsub.subscribe('logs', 'consumer1', async () => {}); + + const subs = pubsub.listSubscriptions(); + + expect(subs.length).toBe(2); + }); + + it('should filter by topic', async () => { + const mockManager = createMockQueueManager(); + const pubsub = new QueueBackedPubSub(mockManager); + + await pubsub.subscribe('events', 'consumer1', async () => {}); + await pubsub.subscribe('events', 'consumer2', async () => {}); + await pubsub.subscribe('logs', 'consumer1', async () => {}); + + const subs = pubsub.listSubscriptions('events'); + + expect(subs.length).toBe(2); + }); + }); + + describe('clear', () => { + it('should clear all state', async () => { + const mockManager = createMockQueueManager(); + const pubsub = new QueueBackedPubSub(mockManager); + + await pubsub.subscribe('events', 'consumer1', async () => {}); + await pubsub.subscribe('logs', 'consumer1', async () => {}); + + await pubsub.clear(); + + expect(pubsub.listTopics().length).toBe(0); + expect(pubsub.listSubscriptions().length).toBe(0); + }); + }); +}); diff --git a/js/tests/rate-limiter.test.js b/js/tests/rate-limiter.test.js new file mode 100644 index 0000000..2fbda60 --- /dev/null +++ b/js/tests/rate-limiter.test.js @@ -0,0 +1,658 @@ +/** + * Test file for Rate Limiter module + * Tests for sliding window, token bucket, and rate-limited queue + */ + +import { describe, it, expect } from 'test-anywhere'; +import { + SlidingWindowCounter, + TokenBucket, + RateLimiter, + RateLimitedQueue, + RateLimitError, +} from '../src/features/rate-limiter.js'; + +// ============================================================================= +// SlidingWindowCounter Tests +// ============================================================================= + +describe('SlidingWindowCounter', () => { + describe('constructor', () => { + it('should create counter with config', () => { + const counter = new SlidingWindowCounter({ max: 10, window: 60000 }); + const stats = counter.getStats(); + + expect(stats.max).toBe(10); + expect(stats.windowMs).toBe(60000); + expect(stats.currentCount).toBe(0); + expect(stats.previousCount).toBe(0); + }); + }); + + describe('check', () => { + it('should allow requests under the limit', () => { + const counter = new SlidingWindowCounter({ max: 10, window: 60000 }); + + const result = counter.check(); + + expect(result.allowed).toBe(true); + expect(result.remaining).toBe(10); + expect(result.retryAfter).toBe(0); + }); + + it('should not increment counter on check', () => { + const counter = new SlidingWindowCounter({ max: 10, window: 60000 }); + + counter.check(); + counter.check(); + counter.check(); + + const stats = counter.getStats(); + expect(stats.currentCount).toBe(0); + }); + }); + + describe('increment', () => { + it('should increment counter', () => { + const counter = new SlidingWindowCounter({ max: 10, window: 60000 }); + + counter.increment(); + counter.increment(); + counter.increment(); + + const stats = counter.getStats(); + expect(stats.currentCount).toBe(3); + }); + + it('should return updated state after increment', () => { + const counter = new SlidingWindowCounter({ max: 10, window: 60000 }); + + const result = counter.increment(); + + expect(result.remaining).toBe(9); + }); + }); + + describe('consume', () => { + it('should check and increment atomically', () => { + const counter = new SlidingWindowCounter({ max: 10, window: 60000 }); + + const result1 = counter.consume(); + expect(result1.allowed).toBe(true); + expect(result1.remaining).toBe(9); + + const result2 = counter.consume(); + expect(result2.allowed).toBe(true); + expect(result2.remaining).toBe(8); + }); + + it('should deny when limit reached', () => { + const counter = new SlidingWindowCounter({ max: 2, window: 60000 }); + + counter.consume(); + counter.consume(); + const result = counter.consume(); + + expect(result.allowed).toBe(false); + expect(result.remaining).toBe(0); + }); + + it('should not increment when denied', () => { + const counter = new SlidingWindowCounter({ max: 2, window: 60000 }); + + counter.consume(); + counter.consume(); + counter.consume(); // Should be denied + + const stats = counter.getStats(); + expect(stats.currentCount).toBe(2); + }); + }); + + describe('reset', () => { + it('should reset all counters', () => { + const counter = new SlidingWindowCounter({ max: 10, window: 60000 }); + + counter.consume(); + counter.consume(); + counter.consume(); + + counter.reset(); + + const stats = counter.getStats(); + expect(stats.currentCount).toBe(0); + expect(stats.previousCount).toBe(0); + }); + }); + + describe('getStats', () => { + it('should return current statistics', () => { + const counter = new SlidingWindowCounter({ max: 10, window: 60000 }); + + counter.consume(); + counter.consume(); + + const stats = counter.getStats(); + + expect(stats.currentCount).toBe(2); + expect(stats.max).toBe(10); + expect(stats.windowMs).toBe(60000); + expect(typeof stats.weightedCount).toBe('number'); + }); + }); +}); + +// ============================================================================= +// TokenBucket Tests +// ============================================================================= + +describe('TokenBucket', () => { + describe('constructor', () => { + it('should create bucket with full capacity', () => { + const bucket = new TokenBucket({ max: 10, window: 60000 }); + const stats = bucket.getStats(); + + expect(stats.tokens).toBe(10); + expect(stats.max).toBe(10); + }); + }); + + describe('check', () => { + it('should allow when tokens available', () => { + const bucket = new TokenBucket({ max: 10, window: 60000 }); + + const result = bucket.check(); + + expect(result.allowed).toBe(true); + expect(result.remaining).toBe(10); + }); + + it('should not consume tokens on check', () => { + const bucket = new TokenBucket({ max: 10, window: 60000 }); + + bucket.check(); + bucket.check(); + + const stats = bucket.getStats(); + expect(stats.tokens).toBe(10); + }); + }); + + describe('consume', () => { + it('should consume one token', () => { + const bucket = new TokenBucket({ max: 10, window: 60000 }); + + const result = bucket.consume(); + + expect(result.allowed).toBe(true); + expect(result.remaining).toBe(9); + }); + + it('should deny when no tokens available', () => { + const bucket = new TokenBucket({ max: 2, window: 60000 }); + + bucket.consume(); + bucket.consume(); + const result = bucket.consume(); + + expect(result.allowed).toBe(false); + expect(result.retryAfter).toBeGreaterThan(0); + }); + + it('should allow burst usage', () => { + const bucket = new TokenBucket({ max: 5, window: 60000 }); + + // Should allow all 5 quickly (burst) + for (let i = 0; i < 5; i++) { + const result = bucket.consume(); + expect(result.allowed).toBe(true); + } + + // 6th should be denied + const result = bucket.consume(); + expect(result.allowed).toBe(false); + }); + }); + + describe('reset', () => { + it('should reset bucket to full', () => { + const bucket = new TokenBucket({ max: 10, window: 60000 }); + + bucket.consume(); + bucket.consume(); + bucket.consume(); + + bucket.reset(); + + const stats = bucket.getStats(); + expect(stats.tokens).toBe(10); + }); + }); + + describe('getStats', () => { + it('should return current statistics', () => { + const bucket = new TokenBucket({ max: 10, window: 60000 }); + + bucket.consume(); + bucket.consume(); + + const stats = bucket.getStats(); + + expect(stats.tokens).toBe(8); + expect(stats.max).toBe(10); + expect(typeof stats.refillRate).toBe('number'); + }); + }); +}); + +// ============================================================================= +// RateLimiter Tests +// ============================================================================= + +describe('RateLimiter', () => { + describe('constructor', () => { + it('should create rate limiter with default algorithm', () => { + const limiter = new RateLimiter({ max: 10, window: 60000 }); + expect(limiter.keyCount).toBe(0); + limiter.stop(); + }); + + it('should create rate limiter with token-bucket algorithm', () => { + const limiter = new RateLimiter({ + max: 10, + window: 60000, + algorithm: 'token-bucket', + }); + expect(limiter.keyCount).toBe(0); + limiter.stop(); + }); + }); + + describe('check', () => { + it('should check default key', () => { + const limiter = new RateLimiter({ max: 10, window: 60000 }); + + const result = limiter.check(); + + expect(result.allowed).toBe(true); + limiter.stop(); + }); + + it('should create limiter for key on first access', () => { + const limiter = new RateLimiter({ max: 10, window: 60000 }); + + limiter.check('user:123'); + + expect(limiter.keyCount).toBe(1); + limiter.stop(); + }); + }); + + describe('consume', () => { + it('should consume for default key', () => { + const limiter = new RateLimiter({ max: 10, window: 60000 }); + + const result = limiter.consume(); + + expect(result.allowed).toBe(true); + limiter.stop(); + }); + + it('should track separate limits per key', () => { + const limiter = new RateLimiter({ max: 2, window: 60000 }); + + // Exhaust limit for key1 + limiter.consume('key1'); + limiter.consume('key1'); + const result1 = limiter.consume('key1'); + expect(result1.allowed).toBe(false); + + // key2 should still be allowed + const result2 = limiter.consume('key2'); + expect(result2.allowed).toBe(true); + + limiter.stop(); + }); + }); + + describe('reset', () => { + it('should reset specific key', () => { + const limiter = new RateLimiter({ max: 2, window: 60000 }); + + limiter.consume('key1'); + limiter.consume('key1'); + + limiter.reset('key1'); + + const result = limiter.consume('key1'); + expect(result.allowed).toBe(true); + + limiter.stop(); + }); + }); + + describe('resetAll', () => { + it('should reset all keys', () => { + const limiter = new RateLimiter({ max: 2, window: 60000 }); + + limiter.consume('key1'); + limiter.consume('key1'); + limiter.consume('key2'); + limiter.consume('key2'); + + limiter.resetAll(); + + expect(limiter.consume('key1').allowed).toBe(true); + expect(limiter.consume('key2').allowed).toBe(true); + + limiter.stop(); + }); + }); + + describe('remove', () => { + it('should remove specific key', () => { + const limiter = new RateLimiter({ max: 10, window: 60000 }); + + limiter.consume('key1'); + expect(limiter.keyCount).toBe(1); + + limiter.remove('key1'); + expect(limiter.keyCount).toBe(0); + + limiter.stop(); + }); + }); + + describe('getStats', () => { + it('should return stats for key', () => { + const limiter = new RateLimiter({ max: 10, window: 60000 }); + + limiter.consume('key1'); + limiter.consume('key1'); + + const stats = limiter.getStats('key1'); + + expect(stats).not.toBe(null); + limiter.stop(); + }); + + it('should return null for non-existent key', () => { + const limiter = new RateLimiter({ max: 10, window: 60000 }); + + const stats = limiter.getStats('non-existent'); + + expect(stats).toBe(null); + limiter.stop(); + }); + }); + + describe('listKeys', () => { + it('should list all tracked keys', () => { + const limiter = new RateLimiter({ max: 10, window: 60000 }); + + limiter.consume('key1'); + limiter.consume('key2'); + limiter.consume('key3'); + + const keys = limiter.listKeys(); + + expect(keys).toContain('key1'); + expect(keys).toContain('key2'); + expect(keys).toContain('key3'); + + limiter.stop(); + }); + }); + + describe('clear', () => { + it('should clear all state', () => { + const limiter = new RateLimiter({ max: 10, window: 60000 }); + + limiter.consume('key1'); + limiter.consume('key2'); + + limiter.clear(); + + expect(limiter.keyCount).toBe(0); + }); + }); +}); + +// ============================================================================= +// RateLimitedQueue Tests +// ============================================================================= + +describe('RateLimitedQueue', () => { + // Mock queue for testing + const createMockQueue = () => ({ + name: 'test-queue', + _items: [], + async enqueue(link) { + this._items.push(link); + return { id: link.id, position: this._items.length - 1 }; + }, + async dequeue() { + return this._items.shift() || null; + }, + async peek() { + return this._items[0] || null; + }, + async acknowledge() {}, + async reject() {}, + getStats() { + return { + depth: this._items.length, + enqueued: 0, + dequeued: 0, + acknowledged: 0, + rejected: 0, + inFlight: 0, + }; + }, + getDepth() { + return this._items.length; + }, + async clear() { + this._items = []; + }, + }); + + describe('constructor', () => { + it('should create rate-limited queue', () => { + const mockQueue = createMockQueue(); + const rateLimitedQueue = new RateLimitedQueue(mockQueue, { + max: 10, + window: 60000, + }); + + expect(rateLimitedQueue.name).toBe('test-queue'); + rateLimitedQueue.stop(); + }); + }); + + describe('enqueue', () => { + it('should enqueue when under limit', async () => { + const mockQueue = createMockQueue(); + const rateLimitedQueue = new RateLimitedQueue(mockQueue, { + max: 10, + window: 60000, + }); + + const link = { id: 1, source: 'test', target: 'item' }; + const result = await rateLimitedQueue.enqueue(link); + + expect(result.id).toBe(1); + expect(mockQueue._items.length).toBe(1); + + rateLimitedQueue.stop(); + }); + + it('should throw RateLimitError when limit exceeded', async () => { + const mockQueue = createMockQueue(); + const rateLimitedQueue = new RateLimitedQueue(mockQueue, { + max: 2, + window: 60000, + }); + + await rateLimitedQueue.enqueue({ id: 1, source: 'a', target: 'b' }); + await rateLimitedQueue.enqueue({ id: 2, source: 'c', target: 'd' }); + + let caught = false; + try { + await rateLimitedQueue.enqueue({ id: 3, source: 'e', target: 'f' }); + } catch (error) { + caught = true; + expect(error instanceof RateLimitError).toBe(true); + expect(error.code).toBe('RATE_LIMITED'); + expect(error.retryAfter).toBeGreaterThanOrEqual(0); + } + + expect(caught).toBe(true); + rateLimitedQueue.stop(); + }); + }); + + describe('dequeue', () => { + it('should dequeue when under limit', async () => { + const mockQueue = createMockQueue(); + const rateLimitedQueue = new RateLimitedQueue(mockQueue, { + max: 10, + window: 60000, + }); + + const link = { id: 1, source: 'test', target: 'item' }; + await rateLimitedQueue.enqueue(link); + + const result = await rateLimitedQueue.dequeue(); + + expect(result).toEqual(link); + rateLimitedQueue.stop(); + }); + + it('should apply per-consumer rate limit', async () => { + const mockQueue = createMockQueue(); + const rateLimitedQueue = new RateLimitedQueue(mockQueue, { + max: 2, + window: 60000, + }); + + await rateLimitedQueue.enqueue({ id: 1, source: 'a', target: 'b' }); + await rateLimitedQueue.enqueue({ id: 2, source: 'c', target: 'd' }); + + // Reset enqueue limiter to allow fresh dequeue test + await rateLimitedQueue.clear(); + await rateLimitedQueue.enqueue({ id: 1, source: 'a', target: 'b' }); + await rateLimitedQueue.enqueue({ id: 2, source: 'c', target: 'd' }); + + await rateLimitedQueue.dequeue('consumer1'); + await rateLimitedQueue.dequeue('consumer1'); + + let caught = false; + try { + await rateLimitedQueue.dequeue('consumer1'); + } catch (error) { + caught = true; + expect(error instanceof RateLimitError).toBe(true); + } + + expect(caught).toBe(true); + rateLimitedQueue.stop(); + }); + }); + + describe('peek', () => { + it('should not be rate limited', async () => { + const mockQueue = createMockQueue(); + const rateLimitedQueue = new RateLimitedQueue(mockQueue, { + max: 1, + window: 60000, + }); + + await rateLimitedQueue.enqueue({ id: 1, source: 'a', target: 'b' }); + + // Peek multiple times should not throw + await rateLimitedQueue.peek(); + await rateLimitedQueue.peek(); + await rateLimitedQueue.peek(); + + rateLimitedQueue.stop(); + }); + }); + + describe('getStats', () => { + it('should include rate limit info in stats', async () => { + const mockQueue = createMockQueue(); + const rateLimitedQueue = new RateLimitedQueue(mockQueue, { + max: 10, + window: 60000, + }); + + await rateLimitedQueue.enqueue({ id: 1, source: 'a', target: 'b' }); + + const stats = rateLimitedQueue.getStats(); + + expect(stats.depth).toBe(1); + expect(stats.rateLimit).not.toBeUndefined(); + expect(stats.rateLimit.enqueue).not.toBeUndefined(); + expect(stats.rateLimit.dequeue).not.toBeUndefined(); + + rateLimitedQueue.stop(); + }); + }); + + describe('clear', () => { + it('should reset rate limiters and clear queue', async () => { + const mockQueue = createMockQueue(); + const rateLimitedQueue = new RateLimitedQueue(mockQueue, { + max: 2, + window: 60000, + }); + + await rateLimitedQueue.enqueue({ id: 1, source: 'a', target: 'b' }); + await rateLimitedQueue.enqueue({ id: 2, source: 'c', target: 'd' }); + + await rateLimitedQueue.clear(); + + // Should be able to enqueue again + await rateLimitedQueue.enqueue({ id: 3, source: 'e', target: 'f' }); + await rateLimitedQueue.enqueue({ id: 4, source: 'g', target: 'h' }); + + expect(rateLimitedQueue.getDepth()).toBe(2); + + rateLimitedQueue.stop(); + }); + }); +}); + +// ============================================================================= +// RateLimitError Tests +// ============================================================================= + +describe('RateLimitError', () => { + it('should create error with all properties', () => { + const error = new RateLimitError('Rate limit exceeded', 5000, { + allowed: false, + remaining: 0, + resetAt: Date.now() + 60000, + retryAfter: 5000, + }); + + expect(error.name).toBe('RateLimitError'); + expect(error.code).toBe('RATE_LIMITED'); + expect(error.message).toBe('Rate limit exceeded'); + expect(error.retryAfter).toBe(5000); + expect(error.remaining).toBe(0); + expect(error.resetAt).toBeGreaterThan(Date.now()); + }); + + it('should be an instance of Error', () => { + const error = new RateLimitError('Test', 0, { + allowed: false, + remaining: 0, + resetAt: Date.now(), + retryAfter: 0, + }); + + expect(error instanceof Error).toBe(true); + }); +}); diff --git a/js/tests/router.test.js b/js/tests/router.test.js new file mode 100644 index 0000000..dfaf384 --- /dev/null +++ b/js/tests/router.test.js @@ -0,0 +1,870 @@ +/** + * Test file for Router module + * Tests for topic routing, exchanges, and pattern matching + */ + +import { describe, it, expect } from 'test-anywhere'; +import { + ExchangeType, + TopicMatcher, + DirectExchange, + TopicExchange, + FanoutExchange, + HeadersExchange, + Router, + RoutedQueueManager, +} from '../src/features/router.js'; + +// ============================================================================= +// ExchangeType Tests +// ============================================================================= + +describe('ExchangeType', () => { + it('should define all exchange types', () => { + expect(ExchangeType.DIRECT).toBe('direct'); + expect(ExchangeType.TOPIC).toBe('topic'); + expect(ExchangeType.FANOUT).toBe('fanout'); + expect(ExchangeType.HEADERS).toBe('headers'); + }); + + it('should be frozen', () => { + expect(Object.isFrozen(ExchangeType)).toBe(true); + }); +}); + +// ============================================================================= +// TopicMatcher Tests +// ============================================================================= + +describe('TopicMatcher', () => { + describe('matches', () => { + it('should match exact routing key', () => { + expect(TopicMatcher.matches('logs.error', 'logs.error')).toBe(true); + expect(TopicMatcher.matches('logs.error', 'logs.info')).toBe(false); + }); + + it('should match single-word wildcard (*)', () => { + expect(TopicMatcher.matches('logs.*', 'logs.error')).toBe(true); + expect(TopicMatcher.matches('logs.*', 'logs.info')).toBe(true); + expect(TopicMatcher.matches('logs.*', 'logs.error.db')).toBe(false); + expect(TopicMatcher.matches('*.error', 'logs.error')).toBe(true); + expect(TopicMatcher.matches('*.error', 'app.error')).toBe(true); + }); + + it('should match multi-word wildcard (#)', () => { + // logs.# matches logs followed by zero or more .word sequences + expect(TopicMatcher.matches('logs.#', 'logs')).toBe(true); + expect(TopicMatcher.matches('logs.#', 'logs.error')).toBe(true); + expect(TopicMatcher.matches('logs.#', 'logs.error.db')).toBe(true); + expect(TopicMatcher.matches('logs.#', 'logs.a.b.c.d')).toBe(true); + }); + + it('should match # at beginning', () => { + // #.error matches zero or more words followed by .error + expect(TopicMatcher.matches('#.error', 'error')).toBe(true); + expect(TopicMatcher.matches('#.error', 'logs.error')).toBe(true); + expect(TopicMatcher.matches('#.error', 'a.b.c.error')).toBe(true); + }); + + it('should match # alone', () => { + expect(TopicMatcher.matches('#', 'anything')).toBe(true); + expect(TopicMatcher.matches('#', 'a.b.c')).toBe(true); + }); + + it('should handle complex patterns', () => { + expect(TopicMatcher.matches('*.system.*', 'app.system.startup')).toBe( + true + ); + expect(TopicMatcher.matches('*.system.*', 'db.system.shutdown')).toBe( + true + ); + expect(TopicMatcher.matches('*.system.*', 'system.startup')).toBe(false); + }); + }); + + describe('specificity', () => { + it('should score exact matches highest', () => { + const exactScore = TopicMatcher.specificity('logs.error.db'); + const wildcardScore = TopicMatcher.specificity('logs.*.db'); + const hashScore = TopicMatcher.specificity('logs.#'); + + expect(exactScore).toBeGreaterThan(wildcardScore); + expect(wildcardScore).toBeGreaterThan(hashScore); + }); + + it('should give # lowest specificity', () => { + const score = TopicMatcher.specificity('#'); + expect(score).toBe(1); + }); + + it('should give * medium specificity', () => { + const score = TopicMatcher.specificity('*'); + expect(score).toBe(10); + }); + + it('should give exact word highest specificity', () => { + const score = TopicMatcher.specificity('logs'); + expect(score).toBe(100); + }); + }); +}); + +// ============================================================================= +// DirectExchange Tests +// ============================================================================= + +describe('DirectExchange', () => { + describe('constructor', () => { + it('should create exchange with name and type', () => { + const exchange = new DirectExchange('logs'); + + expect(exchange.name).toBe('logs'); + expect(exchange.type).toBe(ExchangeType.DIRECT); + }); + }); + + describe('bind', () => { + it('should bind queue with routing key', () => { + const exchange = new DirectExchange('logs'); + + const binding = exchange.bind('errors-queue', 'error'); + + expect(binding.exchange).toBe('logs'); + expect(binding.queue).toBe('errors-queue'); + expect(binding.routingKey).toBe('error'); + }); + + it('should support multiple queues for same routing key', () => { + const exchange = new DirectExchange('logs'); + + exchange.bind('queue1', 'error'); + exchange.bind('queue2', 'error'); + + const queues = exchange.route('error'); + expect(queues).toContain('queue1'); + expect(queues).toContain('queue2'); + }); + }); + + describe('unbind', () => { + it('should unbind queue from routing key', () => { + const exchange = new DirectExchange('logs'); + + exchange.bind('errors-queue', 'error'); + const result = exchange.unbind('errors-queue', 'error'); + + expect(result).toBe(true); + expect(exchange.route('error')).toEqual([]); + }); + + it('should return false for non-existent binding', () => { + const exchange = new DirectExchange('logs'); + + const result = exchange.unbind('non-existent', 'error'); + + expect(result).toBe(false); + }); + }); + + describe('route', () => { + it('should route to exact match', () => { + const exchange = new DirectExchange('logs'); + + exchange.bind('errors-queue', 'error'); + exchange.bind('info-queue', 'info'); + + expect(exchange.route('error')).toEqual(['errors-queue']); + expect(exchange.route('info')).toEqual(['info-queue']); + }); + + it('should return empty array for no match', () => { + const exchange = new DirectExchange('logs'); + + exchange.bind('errors-queue', 'error'); + + expect(exchange.route('warning')).toEqual([]); + }); + }); + + describe('getBindings', () => { + it('should return all bindings', () => { + const exchange = new DirectExchange('logs'); + + exchange.bind('errors-queue', 'error'); + exchange.bind('info-queue', 'info'); + + const bindings = exchange.getBindings(); + expect(bindings.length).toBe(2); + }); + }); + + describe('clear', () => { + it('should remove all bindings', () => { + const exchange = new DirectExchange('logs'); + + exchange.bind('errors-queue', 'error'); + exchange.bind('info-queue', 'info'); + + exchange.clear(); + + expect(exchange.getBindings().length).toBe(0); + }); + }); +}); + +// ============================================================================= +// TopicExchange Tests +// ============================================================================= + +describe('TopicExchange', () => { + describe('constructor', () => { + it('should create exchange with name and type', () => { + const exchange = new TopicExchange('events'); + + expect(exchange.name).toBe('events'); + expect(exchange.type).toBe(ExchangeType.TOPIC); + }); + }); + + describe('bind', () => { + it('should bind queue with pattern', () => { + const exchange = new TopicExchange('events'); + + const binding = exchange.bind('all-logs', 'logs.#'); + + expect(binding.exchange).toBe('events'); + expect(binding.queue).toBe('all-logs'); + expect(binding.routingKey).toBe('logs.#'); + }); + }); + + describe('unbind', () => { + it('should unbind queue from pattern', () => { + const exchange = new TopicExchange('events'); + + exchange.bind('all-logs', 'logs.#'); + const result = exchange.unbind('all-logs', 'logs.#'); + + expect(result).toBe(true); + expect(exchange.route('logs.error')).toEqual([]); + }); + }); + + describe('route', () => { + it('should route using wildcard patterns', () => { + const exchange = new TopicExchange('events'); + + exchange.bind('all-logs', 'logs.#'); + exchange.bind('errors-only', 'logs.error'); + + const queues = exchange.route('logs.error'); + expect(queues).toContain('all-logs'); + expect(queues).toContain('errors-only'); + }); + + it('should deduplicate queues', () => { + const exchange = new TopicExchange('events'); + + // Same queue with different patterns + exchange.bind('my-queue', 'logs.#'); + exchange.bind('my-queue', 'logs.error'); + + const queues = exchange.route('logs.error'); + expect(queues.length).toBe(1); + expect(queues[0]).toBe('my-queue'); + }); + + it('should match complex patterns', () => { + const exchange = new TopicExchange('events'); + + exchange.bind('system-queue', '*.system.*'); + + expect(exchange.route('app.system.startup')).toEqual(['system-queue']); + expect(exchange.route('db.system.shutdown')).toEqual(['system-queue']); + expect(exchange.route('system.startup')).toEqual([]); + }); + }); +}); + +// ============================================================================= +// FanoutExchange Tests +// ============================================================================= + +describe('FanoutExchange', () => { + describe('constructor', () => { + it('should create exchange with name and type', () => { + const exchange = new FanoutExchange('notifications'); + + expect(exchange.name).toBe('notifications'); + expect(exchange.type).toBe(ExchangeType.FANOUT); + }); + }); + + describe('bind', () => { + it('should bind queue (routing key ignored)', () => { + const exchange = new FanoutExchange('notifications'); + + const binding = exchange.bind('email-queue'); + + expect(binding.exchange).toBe('notifications'); + expect(binding.queue).toBe('email-queue'); + expect(binding.routingKey).toBe(''); + }); + }); + + describe('unbind', () => { + it('should unbind queue', () => { + const exchange = new FanoutExchange('notifications'); + + exchange.bind('email-queue'); + const result = exchange.unbind('email-queue'); + + expect(result).toBe(true); + expect(exchange.route()).toEqual([]); + }); + }); + + describe('route', () => { + it('should route to all bound queues', () => { + const exchange = new FanoutExchange('notifications'); + + exchange.bind('email-queue'); + exchange.bind('sms-queue'); + exchange.bind('push-queue'); + + const queues = exchange.route(); + + expect(queues).toContain('email-queue'); + expect(queues).toContain('sms-queue'); + expect(queues).toContain('push-queue'); + expect(queues.length).toBe(3); + }); + }); +}); + +// ============================================================================= +// HeadersExchange Tests +// ============================================================================= + +describe('HeadersExchange', () => { + describe('constructor', () => { + it('should create exchange with name and type', () => { + const exchange = new HeadersExchange('tasks'); + + expect(exchange.name).toBe('tasks'); + expect(exchange.type).toBe(ExchangeType.HEADERS); + }); + }); + + describe('bind', () => { + it('should bind queue with headers (match all)', () => { + const exchange = new HeadersExchange('tasks'); + + const binding = exchange.bind( + 'high-priority', + { priority: 'high' }, + 'all' + ); + + expect(binding.exchange).toBe('tasks'); + expect(binding.queue).toBe('high-priority'); + expect(binding.arguments.headers.priority).toBe('high'); + expect(binding.arguments.matchType).toBe('all'); + }); + + it('should default to match all', () => { + const exchange = new HeadersExchange('tasks'); + + exchange.bind('queue', { type: 'system' }); + const bindings = exchange.getBindings(); + + expect(bindings[0].arguments.matchType).toBe('all'); + }); + }); + + describe('unbind', () => { + it('should unbind queue', () => { + const exchange = new HeadersExchange('tasks'); + + exchange.bind('high-priority', { priority: 'high' }); + const result = exchange.unbind('high-priority', { priority: 'high' }); + + expect(result).toBe(true); + expect(exchange.route({ priority: 'high' })).toEqual([]); + }); + }); + + describe('route', () => { + it('should route with match all', () => { + const exchange = new HeadersExchange('tasks'); + + exchange.bind( + 'urgent-system', + { priority: 'high', type: 'system' }, + 'all' + ); + + expect(exchange.route({ priority: 'high', type: 'system' })).toEqual([ + 'urgent-system', + ]); + expect(exchange.route({ priority: 'high' })).toEqual([]); + expect(exchange.route({ type: 'system' })).toEqual([]); + }); + + it('should route with match any', () => { + const exchange = new HeadersExchange('tasks'); + + exchange.bind('special', { priority: 'high', type: 'system' }, 'any'); + + expect(exchange.route({ priority: 'high' })).toEqual(['special']); + expect(exchange.route({ type: 'system' })).toEqual(['special']); + expect(exchange.route({ priority: 'low' })).toEqual([]); + }); + + it('should handle multiple bindings', () => { + const exchange = new HeadersExchange('tasks'); + + exchange.bind('high-priority', { priority: 'high' }, 'all'); + exchange.bind('system-tasks', { type: 'system' }, 'all'); + + const queues = exchange.route({ priority: 'high', type: 'system' }); + expect(queues).toContain('high-priority'); + expect(queues).toContain('system-tasks'); + }); + }); +}); + +// ============================================================================= +// Router Tests +// ============================================================================= + +describe('Router', () => { + describe('declareExchange', () => { + it('should create direct exchange', () => { + const router = new Router(); + + const exchange = router.declareExchange('logs', 'direct'); + + expect(exchange.type).toBe('direct'); + }); + + it('should create topic exchange', () => { + const router = new Router(); + + const exchange = router.declareExchange('events', 'topic'); + + expect(exchange.type).toBe('topic'); + }); + + it('should create fanout exchange', () => { + const router = new Router(); + + const exchange = router.declareExchange('notifications', 'fanout'); + + expect(exchange.type).toBe('fanout'); + }); + + it('should create headers exchange', () => { + const router = new Router(); + + const exchange = router.declareExchange('tasks', 'headers'); + + expect(exchange.type).toBe('headers'); + }); + + it('should return existing exchange with same type', () => { + const router = new Router(); + + const exchange1 = router.declareExchange('logs', 'direct'); + const exchange2 = router.declareExchange('logs', 'direct'); + + expect(exchange1).toBe(exchange2); + }); + + it('should throw when redeclaring with different type', () => { + const router = new Router(); + + router.declareExchange('logs', 'direct'); + + expect(() => router.declareExchange('logs', 'topic')).toThrow(); + }); + + it('should throw for unknown type', () => { + const router = new Router(); + + expect(() => router.declareExchange('logs', 'invalid')).toThrow(); + }); + }); + + describe('getExchange', () => { + it('should return exchange by name', () => { + const router = new Router(); + + router.declareExchange('logs', 'direct'); + const exchange = router.getExchange('logs'); + + expect(exchange).not.toBeUndefined(); + expect(exchange.name).toBe('logs'); + }); + + it('should return undefined for non-existent exchange', () => { + const router = new Router(); + + const exchange = router.getExchange('non-existent'); + + expect(exchange).toBeUndefined(); + }); + }); + + describe('deleteExchange', () => { + it('should delete exchange', () => { + const router = new Router(); + + router.declareExchange('logs', 'direct'); + const result = router.deleteExchange('logs'); + + expect(result).toBe(true); + expect(router.getExchange('logs')).toBeUndefined(); + }); + + it('should return false for non-existent exchange', () => { + const router = new Router(); + + const result = router.deleteExchange('non-existent'); + + expect(result).toBe(false); + }); + }); + + describe('bind/unbind', () => { + it('should bind queue to direct exchange', () => { + const router = new Router(); + + router.declareExchange('logs', 'direct'); + const binding = router.bind('logs', 'errors-queue', 'error'); + + expect(binding.queue).toBe('errors-queue'); + expect(binding.routingKey).toBe('error'); + }); + + it('should bind queue to topic exchange', () => { + const router = new Router(); + + router.declareExchange('events', 'topic'); + const binding = router.bind('events', 'all-logs', 'logs.#'); + + expect(binding.routingKey).toBe('logs.#'); + }); + + it('should bind queue to fanout exchange', () => { + const router = new Router(); + + router.declareExchange('notifications', 'fanout'); + const binding = router.bind('notifications', 'email-queue'); + + expect(binding.queue).toBe('email-queue'); + }); + + it('should bind queue to headers exchange', () => { + const router = new Router(); + + router.declareExchange('tasks', 'headers'); + const binding = router.bind('tasks', 'high-priority', '', { + headers: { priority: 'high' }, + matchType: 'all', + }); + + expect(binding.arguments.headers.priority).toBe('high'); + }); + + it('should throw when binding to non-existent exchange', () => { + const router = new Router(); + + expect(() => router.bind('non-existent', 'queue', 'key')).toThrow(); + }); + + it('should unbind queue', () => { + const router = new Router(); + + router.declareExchange('logs', 'direct'); + router.bind('logs', 'errors-queue', 'error'); + + const result = router.unbind('logs', 'errors-queue', 'error'); + + expect(result).toBe(true); + }); + }); + + describe('route', () => { + it('should route through direct exchange', () => { + const router = new Router(); + + router.declareExchange('logs', 'direct'); + router.bind('logs', 'errors-queue', 'error'); + + const queues = router.route('logs', 'error'); + + expect(queues).toEqual(['errors-queue']); + }); + + it('should route through topic exchange', () => { + const router = new Router(); + + router.declareExchange('events', 'topic'); + router.bind('events', 'all-logs', 'logs.#'); + + const queues = router.route('events', 'logs.error.db'); + + expect(queues).toEqual(['all-logs']); + }); + + it('should route through fanout exchange', () => { + const router = new Router(); + + router.declareExchange('notifications', 'fanout'); + router.bind('notifications', 'email-queue'); + router.bind('notifications', 'sms-queue'); + + const queues = router.route('notifications'); + + expect(queues).toContain('email-queue'); + expect(queues).toContain('sms-queue'); + }); + + it('should route through headers exchange', () => { + const router = new Router(); + + router.declareExchange('tasks', 'headers'); + router.bind('tasks', 'high-priority', '', { + headers: { priority: 'high' }, + }); + + const queues = router.route('tasks', '', { priority: 'high' }); + + expect(queues).toEqual(['high-priority']); + }); + + it('should return empty array for non-existent exchange', () => { + const router = new Router(); + + const queues = router.route('non-existent', 'key'); + + expect(queues).toEqual([]); + }); + }); + + describe('listExchanges', () => { + it('should list all exchanges', () => { + const router = new Router(); + + router.declareExchange('logs', 'direct'); + router.declareExchange('events', 'topic'); + + const exchanges = router.listExchanges(); + + expect(exchanges.length).toBe(2); + expect(exchanges.find((e) => e.name === 'logs')).not.toBeUndefined(); + expect(exchanges.find((e) => e.name === 'events')).not.toBeUndefined(); + }); + + it('should include binding count', () => { + const router = new Router(); + + router.declareExchange('logs', 'direct'); + router.bind('logs', 'queue1', 'key1'); + router.bind('logs', 'queue2', 'key2'); + + const exchanges = router.listExchanges(); + + expect(exchanges.find((e) => e.name === 'logs').bindings).toBe(2); + }); + }); + + describe('getBindings', () => { + it('should return bindings for exchange', () => { + const router = new Router(); + + router.declareExchange('logs', 'direct'); + router.bind('logs', 'queue1', 'key1'); + router.bind('logs', 'queue2', 'key2'); + + const bindings = router.getBindings('logs'); + + expect(bindings.length).toBe(2); + }); + + it('should return empty array for non-existent exchange', () => { + const router = new Router(); + + const bindings = router.getBindings('non-existent'); + + expect(bindings).toEqual([]); + }); + }); + + describe('clear', () => { + it('should clear all exchanges', () => { + const router = new Router(); + + router.declareExchange('logs', 'direct'); + router.declareExchange('events', 'topic'); + router.bind('logs', 'queue', 'key'); + + router.clear(); + + expect(router.listExchanges().length).toBe(0); + }); + }); +}); + +// ============================================================================= +// RoutedQueueManager Tests +// ============================================================================= + +describe('RoutedQueueManager', () => { + // Mock queue manager for testing + const createMockQueueManager = () => { + const queues = new Map(); + + return { + async createQueue(name, options = {}) { + const queue = { + name, + options, + _items: [], + async enqueue(link) { + this._items.push(link); + return { id: link.id, position: this._items.length - 1 }; + }, + async dequeue() { + return this._items.shift() || null; + }, + getDepth() { + return this._items.length; + }, + }; + queues.set(name, queue); + return queue; + }, + async getQueue(name) { + return queues.get(name) || null; + }, + async deleteQueue(name) { + return queues.delete(name); + }, + async listQueues() { + return [...queues.values()].map((q) => ({ + name: q.name, + depth: q._items.length, + })); + }, + }; + }; + + describe('createQueue', () => { + it('should create queue and register with router', async () => { + const mockManager = createMockQueueManager(); + const routedManager = new RoutedQueueManager(mockManager); + + const queue = await routedManager.createQueue('tasks'); + + expect(queue.name).toBe('tasks'); + }); + }); + + describe('declareExchange', () => { + it('should delegate to router', () => { + const mockManager = createMockQueueManager(); + const routedManager = new RoutedQueueManager(mockManager); + + const exchange = routedManager.declareExchange('logs', 'topic'); + + expect(exchange.type).toBe('topic'); + }); + }); + + describe('bindTopic', () => { + it('should bind topic pattern', async () => { + const mockManager = createMockQueueManager(); + const routedManager = new RoutedQueueManager(mockManager); + + await routedManager.createQueue('all-logs'); + routedManager.declareExchange('logs', 'topic'); + + const binding = routedManager.bindTopic('logs', 'all-logs', 'logs.#'); + + expect(binding.routingKey).toBe('logs.#'); + }); + }); + + describe('publish', () => { + it('should route message to matching queues', async () => { + const mockManager = createMockQueueManager(); + const routedManager = new RoutedQueueManager(mockManager); + + await routedManager.createQueue('errors'); + await routedManager.createQueue('all-logs'); + routedManager.declareExchange('logs', 'topic'); + routedManager.bindTopic('logs', 'errors', 'logs.error'); + routedManager.bindTopic('logs', 'all-logs', 'logs.#'); + + const link = { id: 1, source: 'test', target: 'item' }; + const queues = await routedManager.publish('logs', link, 'logs.error'); + + expect(queues).toContain('errors'); + expect(queues).toContain('all-logs'); + + const errorsQueue = await routedManager.getQueue('errors'); + const allLogsQueue = await routedManager.getQueue('all-logs'); + + expect(errorsQueue._items.length).toBe(1); + expect(allLogsQueue._items.length).toBe(1); + }); + + it('should return empty array for no matches', async () => { + const mockManager = createMockQueueManager(); + const routedManager = new RoutedQueueManager(mockManager); + + routedManager.declareExchange('logs', 'topic'); + + const queues = await routedManager.publish( + 'logs', + { id: 1, source: 'a', target: 'b' }, + 'no.match' + ); + + expect(queues).toEqual([]); + }); + }); + + describe('fanout', () => { + it('should set up fanout routing', async () => { + const mockManager = createMockQueueManager(); + const routedManager = new RoutedQueueManager(mockManager); + + await routedManager.createQueue('email'); + await routedManager.createQueue('sms'); + await routedManager.createQueue('push'); + + routedManager.fanout('notifications', ['email', 'sms', 'push']); + + const link = { id: 1, source: 'test', target: 'item' }; + const queues = await routedManager.publish('notifications', link); + + expect(queues).toContain('email'); + expect(queues).toContain('sms'); + expect(queues).toContain('push'); + }); + }); + + describe('getRouter', () => { + it('should return the router', () => { + const mockManager = createMockQueueManager(); + const routedManager = new RoutedQueueManager(mockManager); + + const router = routedManager.getRouter(); + + expect(router instanceof Router).toBe(true); + }); + }); +}); diff --git a/js/tests/scheduler.test.js b/js/tests/scheduler.test.js new file mode 100644 index 0000000..211a95a --- /dev/null +++ b/js/tests/scheduler.test.js @@ -0,0 +1,524 @@ +/** + * Test file for Scheduler module + * Tests for delayed messages, cron jobs, TTL, and message expiration + */ + +import { describe, it, expect } from 'test-anywhere'; +import { + CronParser, + Scheduler, + ScheduledQueue, +} from '../src/features/scheduler.js'; + +// ============================================================================= +// CronParser Tests +// ============================================================================= + +describe('CronParser', () => { + describe('parseField', () => { + it('should parse wildcard (*)', () => { + const values = CronParser.parseField('*', 0, 5); + expect(values).toEqual([0, 1, 2, 3, 4, 5]); + }); + + it('should parse single value', () => { + const values = CronParser.parseField('5', 0, 10); + expect(values).toEqual([5]); + }); + + it('should parse comma-separated values', () => { + const values = CronParser.parseField('1,3,5', 0, 10); + expect(values).toEqual([1, 3, 5]); + }); + + it('should parse range (1-5)', () => { + const values = CronParser.parseField('1-5', 0, 10); + expect(values).toEqual([1, 2, 3, 4, 5]); + }); + + it('should parse step values (*/2)', () => { + const values = CronParser.parseField('*/2', 0, 6); + expect(values).toEqual([0, 2, 4, 6]); + }); + + it('should parse step values with start (2/3)', () => { + const values = CronParser.parseField('2/3', 0, 10); + expect(values).toEqual([2, 5, 8]); + }); + + it('should filter out values outside range', () => { + const values = CronParser.parseField('0,5,15', 0, 10); + expect(values).toEqual([0, 5]); + }); + }); + + describe('parse', () => { + it('should parse valid 5-field cron expression', () => { + const result = CronParser.parse('0 * * * *'); + expect(result.minutes).toEqual([0]); + expect(result.hours.length).toBe(24); + expect(result.daysOfMonth.length).toBe(31); + expect(result.months.length).toBe(12); + expect(result.daysOfWeek.length).toBe(7); + }); + + it('should parse every 5 minutes expression', () => { + const result = CronParser.parse('*/5 * * * *'); + expect(result.minutes).toEqual([ + 0, 5, 10, 15, 20, 25, 30, 35, 40, 45, 50, 55, + ]); + }); + + it('should parse specific time (9:30 every day)', () => { + const result = CronParser.parse('30 9 * * *'); + expect(result.minutes).toEqual([30]); + expect(result.hours).toEqual([9]); + }); + + it('should parse weekdays only', () => { + const result = CronParser.parse('0 9 * * 1-5'); + expect(result.daysOfWeek).toEqual([1, 2, 3, 4, 5]); + }); + + it('should throw on invalid expression (wrong field count)', () => { + expect(() => CronParser.parse('0 * * *')).toThrow(); + expect(() => CronParser.parse('0 * * * * *')).toThrow(); + }); + }); + + describe('nextRun', () => { + it('should calculate next run time', () => { + const expression = '* * * * *'; // Every minute + const after = new Date('2024-01-01T10:00:00'); + const next = CronParser.nextRun(expression, after); + + expect(next).not.toBe(null); + expect(next.getTime()).toBeGreaterThan(after.getTime()); + }); + + it('should return null for impossible expression', () => { + // February 30th doesn't exist + const expression = '0 0 30 2 *'; + const next = CronParser.nextRun(expression); + expect(next).toBe(null); + }); + }); +}); + +// ============================================================================= +// Scheduler Tests +// ============================================================================= + +describe('Scheduler', () => { + describe('constructor', () => { + it('should create scheduler with default options', () => { + const scheduler = new Scheduler(); + expect(scheduler.isRunning).toBe(false); + expect(scheduler.pendingCount).toBe(0); + }); + + it('should create scheduler with custom options', () => { + const onDue = async () => {}; + const scheduler = new Scheduler({ + checkInterval: 500, + onDue, + }); + expect(scheduler.isRunning).toBe(false); + }); + }); + + describe('schedule', () => { + it('should schedule an item with delay', () => { + const scheduler = new Scheduler(); + const link = { id: 1, source: 'test', target: 'item' }; + + const item = scheduler.schedule(link, { delay: 5000 }); + + expect(item.link).toBe(link); + expect(item.deliverAt).toBeGreaterThan(Date.now()); + expect(scheduler.pendingCount).toBe(1); + }); + + it('should schedule an item with specific deliverAt time', () => { + const scheduler = new Scheduler(); + const link = { id: 1, source: 'test', target: 'item' }; + const deliverAt = Date.now() + 10000; + + const item = scheduler.schedule(link, { deliverAt }); + + expect(item.deliverAt).toBe(deliverAt); + }); + + it('should schedule an item with TTL', () => { + const scheduler = new Scheduler(); + const link = { id: 1, source: 'test', target: 'item' }; + + const item = scheduler.schedule(link, { delay: 5000, ttl: 3000 }); + + expect(item.expiresAt).not.toBe(null); + expect(item.expiresAt).toBeLessThan(item.deliverAt); + }); + + it('should use link ID if provided', () => { + const scheduler = new Scheduler(); + const link = { id: 42, source: 'test', target: 'item' }; + + const item = scheduler.schedule(link); + + expect(item.id).toBe(42); + }); + + it('should generate ID if not provided', () => { + const scheduler = new Scheduler(); + const link = { source: 'test', target: 'item' }; + + const item = scheduler.schedule(link); + + expect(typeof item.id).toBe('string'); + expect(item.id.startsWith('sched_')).toBe(true); + }); + }); + + describe('cancel', () => { + it('should cancel a scheduled item', () => { + const scheduler = new Scheduler(); + const link = { id: 1, source: 'test', target: 'item' }; + + scheduler.schedule(link, { delay: 5000 }); + expect(scheduler.pendingCount).toBe(1); + + const result = scheduler.cancel(1); + expect(result).toBe(true); + expect(scheduler.pendingCount).toBe(0); + }); + + it('should return false for non-existent item', () => { + const scheduler = new Scheduler(); + const result = scheduler.cancel(999); + expect(result).toBe(false); + }); + }); + + describe('get', () => { + it('should get a scheduled item by ID', () => { + const scheduler = new Scheduler(); + const link = { id: 1, source: 'test', target: 'item' }; + + scheduler.schedule(link, { delay: 5000 }); + const item = scheduler.get(1); + + expect(item).not.toBeUndefined(); + expect(item.link).toBe(link); + }); + + it('should return undefined for non-existent item', () => { + const scheduler = new Scheduler(); + const item = scheduler.get(999); + expect(item).toBeUndefined(); + }); + }); + + describe('cron jobs', () => { + it('should add a cron job', () => { + const scheduler = new Scheduler(); + const handler = async () => {}; + + const job = scheduler.addCronJob('test-job', '* * * * *', handler); + + expect(job.id).toBe('test-job'); + expect(job.expression).toBe('* * * * *'); + expect(job.enabled).toBe(true); + expect(job.nextRun).not.toBe(null); + }); + + it('should throw on invalid cron expression', () => { + const scheduler = new Scheduler(); + const handler = async () => {}; + + expect(() => + scheduler.addCronJob('bad-job', 'invalid', handler) + ).toThrow(); + }); + + it('should get a cron job by ID', () => { + const scheduler = new Scheduler(); + const handler = async () => {}; + + scheduler.addCronJob('test-job', '* * * * *', handler); + const job = scheduler.getCronJob('test-job'); + + expect(job).not.toBeUndefined(); + expect(job.id).toBe('test-job'); + }); + + it('should remove a cron job', () => { + const scheduler = new Scheduler(); + const handler = async () => {}; + + scheduler.addCronJob('test-job', '* * * * *', handler); + const result = scheduler.removeCronJob('test-job'); + + expect(result).toBe(true); + expect(scheduler.getCronJob('test-job')).toBeUndefined(); + }); + + it('should enable and disable a cron job', () => { + const scheduler = new Scheduler(); + const handler = async () => {}; + + scheduler.addCronJob('test-job', '* * * * *', handler); + + scheduler.setCronJobEnabled('test-job', false); + expect(scheduler.getCronJob('test-job').enabled).toBe(false); + + scheduler.setCronJobEnabled('test-job', true); + expect(scheduler.getCronJob('test-job').enabled).toBe(true); + }); + + it('should list all cron jobs', () => { + const scheduler = new Scheduler(); + const handler = async () => {}; + + scheduler.addCronJob('job1', '* * * * *', handler); + scheduler.addCronJob('job2', '0 * * * *', handler); + + const jobs = scheduler.listCronJobs(); + expect(jobs.length).toBe(2); + }); + }); + + describe('start/stop', () => { + it('should start and stop the scheduler', () => { + const scheduler = new Scheduler(); + + scheduler.start(); + expect(scheduler.isRunning).toBe(true); + + scheduler.stop(); + expect(scheduler.isRunning).toBe(false); + }); + + it('should not start twice', () => { + const scheduler = new Scheduler(); + + scheduler.start(); + scheduler.start(); // Should be no-op + expect(scheduler.isRunning).toBe(true); + + scheduler.stop(); + }); + }); + + describe('getStats', () => { + it('should return statistics', () => { + const scheduler = new Scheduler(); + const link = { id: 1, source: 'test', target: 'item' }; + + scheduler.schedule(link, { delay: 5000 }); + scheduler.addCronJob('test', '* * * * *', async () => {}); + + const stats = scheduler.getStats(); + + expect(stats.scheduled).toBe(1); + expect(stats.pending).toBe(1); + expect(stats.cronJobs).toBe(1); + expect(stats.delivered).toBe(0); + expect(stats.expired).toBe(0); + }); + }); + + describe('clear', () => { + it('should clear all state', () => { + const scheduler = new Scheduler(); + const link = { id: 1, source: 'test', target: 'item' }; + + scheduler.schedule(link, { delay: 5000 }); + scheduler.addCronJob('test', '* * * * *', async () => {}); + scheduler.start(); + + scheduler.clear(); + + expect(scheduler.isRunning).toBe(false); + expect(scheduler.pendingCount).toBe(0); + expect(scheduler.listCronJobs().length).toBe(0); + }); + }); + + describe('getPendingItems', () => { + it('should return all pending items', () => { + const scheduler = new Scheduler(); + + scheduler.schedule({ id: 1, source: 'a', target: 'b' }, { delay: 5000 }); + scheduler.schedule({ id: 2, source: 'c', target: 'd' }, { delay: 10000 }); + + const items = scheduler.getPendingItems(); + expect(items.length).toBe(2); + }); + }); +}); + +// ============================================================================= +// ScheduledQueue Tests +// ============================================================================= + +describe('ScheduledQueue', () => { + // Mock queue for testing + const createMockQueue = () => ({ + name: 'test-queue', + _items: [], + async enqueue(link) { + this._items.push(link); + return { id: link.id, position: this._items.length - 1 }; + }, + async dequeue() { + return this._items.shift() || null; + }, + async peek() { + return this._items[0] || null; + }, + async acknowledge() {}, + async reject() {}, + getStats() { + return { + depth: this._items.length, + enqueued: 0, + dequeued: 0, + acknowledged: 0, + rejected: 0, + inFlight: 0, + }; + }, + getDepth() { + return this._items.length; + }, + async clear() { + this._items = []; + }, + }); + + describe('constructor', () => { + it('should create scheduled queue from base queue', () => { + const mockQueue = createMockQueue(); + const scheduledQueue = new ScheduledQueue(mockQueue); + + expect(scheduledQueue.name).toBe('test-queue'); + }); + }); + + describe('enqueue', () => { + it('should enqueue immediately without delay', async () => { + const mockQueue = createMockQueue(); + const scheduledQueue = new ScheduledQueue(mockQueue); + const link = { id: 1, source: 'test', target: 'item' }; + + await scheduledQueue.enqueue(link); + + expect(mockQueue._items.length).toBe(1); + scheduledQueue.stop(); + }); + + it('should schedule with delay', async () => { + const mockQueue = createMockQueue(); + const scheduledQueue = new ScheduledQueue(mockQueue); + const link = { id: 1, source: 'test', target: 'item' }; + + const result = await scheduledQueue.enqueue(link, { delay: 5000 }); + + // Should not be in queue yet + expect(mockQueue._items.length).toBe(0); + // Should return scheduled item + expect(result.deliverAt).toBeGreaterThan(Date.now()); + + scheduledQueue.stop(); + }); + }); + + describe('dequeue', () => { + it('should dequeue from underlying queue', async () => { + const mockQueue = createMockQueue(); + const scheduledQueue = new ScheduledQueue(mockQueue); + const link = { id: 1, source: 'test', target: 'item' }; + + await scheduledQueue.enqueue(link); + const result = await scheduledQueue.dequeue(); + + expect(result).toEqual(link); + scheduledQueue.stop(); + }); + }); + + describe('getStats', () => { + it('should include scheduled count in stats', async () => { + const mockQueue = createMockQueue(); + const scheduledQueue = new ScheduledQueue(mockQueue); + + await scheduledQueue.enqueue({ id: 1, source: 'a', target: 'b' }); + await scheduledQueue.enqueue( + { id: 2, source: 'c', target: 'd' }, + { delay: 5000 } + ); + + const stats = scheduledQueue.getStats(); + + expect(stats.depth).toBe(1); + expect(stats.scheduled).toBe(1); + + scheduledQueue.stop(); + }); + }); + + describe('getDepth', () => { + it('should include scheduled items in depth', async () => { + const mockQueue = createMockQueue(); + const scheduledQueue = new ScheduledQueue(mockQueue); + + await scheduledQueue.enqueue({ id: 1, source: 'a', target: 'b' }); + await scheduledQueue.enqueue( + { id: 2, source: 'c', target: 'd' }, + { delay: 5000 } + ); + + const depth = scheduledQueue.getDepth(); + expect(depth).toBe(2); + + scheduledQueue.stop(); + }); + }); + + describe('cancelScheduled', () => { + it('should cancel a scheduled item', async () => { + const mockQueue = createMockQueue(); + const scheduledQueue = new ScheduledQueue(mockQueue); + + await scheduledQueue.enqueue( + { id: 1, source: 'a', target: 'b' }, + { delay: 5000 } + ); + expect(scheduledQueue.getDepth()).toBe(1); + + const result = scheduledQueue.cancelScheduled(1); + expect(result).toBe(true); + expect(scheduledQueue.getDepth()).toBe(0); + + scheduledQueue.stop(); + }); + }); + + describe('clear', () => { + it('should clear both queue and scheduler', async () => { + const mockQueue = createMockQueue(); + const scheduledQueue = new ScheduledQueue(mockQueue); + + await scheduledQueue.enqueue({ id: 1, source: 'a', target: 'b' }); + await scheduledQueue.enqueue( + { id: 2, source: 'c', target: 'd' }, + { delay: 5000 } + ); + + await scheduledQueue.clear(); + + expect(scheduledQueue.getDepth()).toBe(0); + expect(mockQueue._items.length).toBe(0); + }); + }); +}); From 6586f6f89d6ba439551d6225d31b2b342156cf6b Mon Sep 17 00:00:00 2001 From: konard Date: Sun, 18 Jan 2026 23:01:05 +0100 Subject: [PATCH 03/10] Auto-commit: Changes made by Claude during problem-solving session --- rust/src/features/mod.rs | 45 + rust/src/features/pubsub.rs | 1365 +++++++++++++++++++++++++++++ rust/src/features/rate_limiter.rs | 976 +++++++++++++++++++++ rust/src/features/router.rs | 1008 +++++++++++++++++++++ rust/src/features/scheduler.rs | 1054 ++++++++++++++++++++++ 5 files changed, 4448 insertions(+) create mode 100644 rust/src/features/mod.rs create mode 100644 rust/src/features/pubsub.rs create mode 100644 rust/src/features/rate_limiter.rs create mode 100644 rust/src/features/router.rs create mode 100644 rust/src/features/scheduler.rs diff --git a/rust/src/features/mod.rs b/rust/src/features/mod.rs new file mode 100644 index 0000000..8bf9a69 --- /dev/null +++ b/rust/src/features/mod.rs @@ -0,0 +1,45 @@ +//! Advanced Queue Features (Phase 7) +//! +//! This module provides advanced queue functionality including: +//! - **Scheduling**: Delayed messages, cron jobs, TTL, message expiration +//! - **Rate Limiting**: Per-queue and per-consumer limits with sliding window algorithm +//! - **Routing**: Topic-based routing with AMQP-style wildcards +//! - **Pub/Sub**: Publish/subscribe patterns with message filtering +//! +//! # Example +//! +//! ```rust,ignore +//! use links_queue::features::{Scheduler, RateLimiter, Router, PubSubBroker}; +//! +//! // Create a scheduler for delayed messages +//! let scheduler = Scheduler::new(); +//! +//! // Create a rate limiter +//! let limiter = RateLimiter::new(100, Duration::from_secs(60)); +//! +//! // Create a router for topic-based routing +//! let router = Router::new(); +//! +//! // Create a pub/sub broker +//! let broker = PubSubBroker::new(); +//! ``` + +pub mod scheduler; +pub mod rate_limiter; +pub mod router; +pub mod pubsub; + +// Re-export main types +pub use scheduler::{CronParser, CronSchedule, Scheduler, ScheduledItem, ScheduledQueue, SchedulerStats, CronJob}; +pub use rate_limiter::{ + SlidingWindowCounter, TokenBucket, RateLimiter, RateLimitedQueue, + RateLimitResult, RateLimitError, RateLimitStats, +}; +pub use router::{ + ExchangeType, TopicMatcher, Exchange, DirectExchange, TopicExchange, + FanoutExchange, HeadersExchange, Router, RoutedQueueManager, Binding, +}; +pub use pubsub::{ + MessageFilter, PubSubBroker, Subscription, Topic, TopicInfo, + ObservableQueue, QueueEvent, PubSubStats, +}; diff --git a/rust/src/features/pubsub.rs b/rust/src/features/pubsub.rs new file mode 100644 index 0000000..a45acdf --- /dev/null +++ b/rust/src/features/pubsub.rs @@ -0,0 +1,1365 @@ +//! Pub/Sub module for links-queue. +//! +//! This module provides publish/subscribe messaging patterns: +//! - Topic creation/deletion +//! - Subscribe/unsubscribe +//! - Fan-out delivery +//! - Message filtering +//! +//! # Example +//! +//! ```rust,ignore +//! use links_queue::features::pubsub::{PubSubBroker, MessageFilter}; +//! +//! let broker = PubSubBroker::new(Default::default()); +//! +//! // Create a topic +//! broker.create_topic("events").await?; +//! +//! // Subscribe to the topic +//! let sub_id = broker.subscribe("events", |msg| async move { +//! println!("Received: {:?}", msg.data); +//! Ok(()) +//! }, None).await?; +//! +//! // Publish a message +//! broker.publish("events", "Hello, World!", None).await?; +//! +//! // Unsubscribe +//! broker.unsubscribe(&sub_id).await?; +//! ``` + +use std::collections::{HashMap, HashSet}; +use std::fmt::Debug; +use std::future::Future; +use std::pin::Pin; +use std::sync::atomic::{AtomicU64, Ordering}; +use std::sync::Arc; +use std::time::{Duration, Instant}; +use tokio::sync::RwLock; + +use crate::queue::traits::{Queue, QueueManager}; + +// ============================================================================= +// Types and Errors +// ============================================================================= + +/// Error type for pub/sub operations. +#[derive(Debug, Clone)] +pub enum PubSubError { + /// Topic already exists. + TopicExists(String), + /// Topic not found. + TopicNotFound(String), + /// Subscription not found. + SubscriptionNotFound(String), + /// Queue error. + QueueError(String), +} + +impl std::fmt::Display for PubSubError { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + PubSubError::TopicExists(name) => write!(f, "Topic '{}' already exists", name), + PubSubError::TopicNotFound(name) => write!(f, "Topic '{}' not found", name), + PubSubError::SubscriptionNotFound(id) => write!(f, "Subscription '{}' not found", id), + PubSubError::QueueError(msg) => write!(f, "Queue error: {}", msg), + } + } +} + +impl std::error::Error for PubSubError {} + +/// Result type for pub/sub operations. +pub type PubSubResult = Result; + +/// A published message. +#[derive(Debug, Clone)] +pub struct PublishedMessage { + /// Unique message identifier. + pub id: String, + /// Topic the message was published to. + pub topic: String, + /// The message data. + pub data: T, + /// Publication timestamp. + pub timestamp: Instant, + /// Optional message headers. + pub headers: HashMap, +} + +/// Topic information. +#[derive(Debug, Clone)] +pub struct Topic { + /// Topic name. + pub name: String, + /// Creation timestamp. + pub created: Instant, + /// Total messages published. + pub message_count: u64, + /// Current number of subscribers. + pub subscriber_count: usize, +} + +/// Subscription information. +#[derive(Debug, Clone)] +pub struct SubscriptionInfo { + /// Unique subscription identifier. + pub id: String, + /// Topic name. + pub topic: String, + /// Whether the subscription is active. + pub active: bool, + /// Creation timestamp. + pub created: Instant, + /// Number of messages received. + pub received: u64, +} + +/// Pub/sub broker statistics. +#[derive(Debug, Clone, Default)] +pub struct PubSubStats { + /// Number of topics. + pub topics: usize, + /// Number of subscriptions. + pub subscriptions: usize, + /// Total messages published. + pub published: u64, + /// Total messages delivered. + pub delivered: u64, + /// Total messages filtered out. + pub filtered: u64, +} + +/// Delivery result from publishing a message. +#[derive(Debug, Clone, Default)] +pub struct DeliveryResult { + /// Number of subscribers that received the message. + pub delivered: usize, + /// Number of subscribers that filtered out the message. + pub filtered: usize, +} + +/// Broker configuration options. +#[derive(Debug, Clone)] +pub struct BrokerOptions { + /// Automatically create topics on publish. + pub auto_create_topics: bool, + /// Message retention duration (None = no retention). + pub message_retention: Option, +} + +impl Default for BrokerOptions { + fn default() -> Self { + Self { + auto_create_topics: true, + message_retention: None, + } + } +} + +// ============================================================================= +// Message Filter +// ============================================================================= + +/// Filter for messages based on content. +/// +/// Provides utilities for filtering messages based on various criteria. +/// +/// # Example +/// +/// ```rust,ignore +/// use links_queue::features::pubsub::MessageFilter; +/// +/// let filter = MessageFilter::new() +/// .with_header("priority", "high") +/// .with_custom(|msg| msg.data.len() > 10); +/// +/// let matches = filter.matches(&message); +/// ``` +#[derive(Clone)] +pub struct MessageFilter { + /// Header filters (key -> expected value). + header_filters: HashMap, + /// Custom filter functions. + custom_filters: Vec) -> bool + Send + Sync>>, +} + +impl Default for MessageFilter { + fn default() -> Self { + Self::new() + } +} + +impl MessageFilter { + /// Creates a new empty filter. + pub fn new() -> Self { + Self { + header_filters: HashMap::new(), + custom_filters: Vec::new(), + } + } + + /// Adds a header filter. + pub fn with_header(mut self, key: impl Into, value: impl Into) -> Self { + self.header_filters.insert(key.into(), value.into()); + self + } + + /// Adds a custom filter function. + pub fn with_custom(mut self, filter: F) -> Self + where + F: Fn(&PublishedMessage) -> bool + Send + Sync + 'static, + { + self.custom_filters.push(Arc::new(filter)); + self + } + + /// Checks if a message matches all filters. + pub fn matches(&self, message: &PublishedMessage) -> bool { + // Check header filters + for (key, expected) in &self.header_filters { + match message.headers.get(key) { + Some(value) if value == expected => continue, + _ => return false, + } + } + + // Check custom filters + for filter in &self.custom_filters { + if !filter(message) { + return false; + } + } + + true + } + + /// Returns true if the filter has no conditions. + pub fn is_empty(&self) -> bool { + self.header_filters.is_empty() && self.custom_filters.is_empty() + } +} + +impl Debug for MessageFilter { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("MessageFilter") + .field("header_filters", &self.header_filters) + .field("custom_filters_count", &self.custom_filters.len()) + .finish() + } +} + +// ============================================================================= +// Subscription (internal) +// ============================================================================= + +type AsyncHandler = + Arc) -> Pin + Send>> + Send + Sync>; + +struct Subscription { + id: String, + topic: String, + handler: AsyncHandler, + filter: Option>, + active: bool, + created: Instant, + received: AtomicU64, +} + +// ============================================================================= +// Pub/Sub Broker +// ============================================================================= + +/// Central broker for pub/sub messaging. +/// +/// Manages topics, subscriptions, and message delivery. +/// +/// # Example +/// +/// ```rust,ignore +/// use links_queue::features::pubsub::{PubSubBroker, BrokerOptions}; +/// +/// let broker = PubSubBroker::::new(BrokerOptions::default()); +/// +/// // Create a topic +/// broker.create_topic("events").await?; +/// +/// // Subscribe +/// let sub_id = broker.subscribe("events", |msg| async move { +/// println!("Received: {}", msg.data); +/// }, None).await?; +/// +/// // Publish +/// broker.publish("events", "Hello!".to_string(), None).await?; +/// ``` +pub struct PubSubBroker { + /// Configuration options. + options: BrokerOptions, + /// Topics by name. + topics: RwLock>, + /// Subscriptions by ID. + subscriptions: RwLock>>, + /// Subscription IDs grouped by topic. + topic_subscriptions: RwLock>>, + /// Message history (if retention enabled). + message_history: RwLock>>>, + /// ID counter. + id_counter: AtomicU64, + /// Statistics. + stats: RwLock, +} + +impl PubSubBroker { + /// Creates a new PubSubBroker. + pub fn new(options: BrokerOptions) -> Self { + Self { + options, + topics: RwLock::new(HashMap::new()), + subscriptions: RwLock::new(HashMap::new()), + topic_subscriptions: RwLock::new(HashMap::new()), + message_history: RwLock::new(HashMap::new()), + id_counter: AtomicU64::new(0), + stats: RwLock::new(PubSubStats::default()), + } + } + + /// Generates a unique ID. + fn generate_id(&self, prefix: &str) -> String { + let counter = self.id_counter.fetch_add(1, Ordering::SeqCst); + format!("{}_{}", prefix, counter) + } + + /// Creates a new topic. + pub async fn create_topic(&self, name: impl Into) -> PubSubResult { + let name = name.into(); + + let mut topics = self.topics.write().await; + if topics.contains_key(&name) { + return Err(PubSubError::TopicExists(name)); + } + + let topic = Topic { + name: name.clone(), + created: Instant::now(), + message_count: 0, + subscriber_count: 0, + }; + + topics.insert(name.clone(), topic.clone()); + self.topic_subscriptions + .write() + .await + .insert(name.clone(), HashSet::new()); + + if self.options.message_retention.is_some() { + self.message_history + .write() + .await + .insert(name, Vec::new()); + } + + let mut stats = self.stats.write().await; + stats.topics += 1; + + Ok(topic) + } + + /// Gets a topic by name. + pub async fn get_topic(&self, name: &str) -> Option { + self.topics.read().await.get(name).cloned() + } + + /// Deletes a topic and all its subscriptions. + pub async fn delete_topic(&self, name: &str) -> PubSubResult { + let mut topics = self.topics.write().await; + if !topics.contains_key(name) { + return Ok(false); + } + + // Remove all subscriptions for this topic + let sub_ids: Vec = { + let topic_subs = self.topic_subscriptions.read().await; + topic_subs + .get(name) + .map(|s| s.iter().cloned().collect()) + .unwrap_or_default() + }; + + let mut subscriptions = self.subscriptions.write().await; + for sub_id in sub_ids { + subscriptions.remove(&sub_id); + } + + self.topic_subscriptions.write().await.remove(name); + self.message_history.write().await.remove(name); + topics.remove(name); + + let mut stats = self.stats.write().await; + stats.topics = stats.topics.saturating_sub(1); + + Ok(true) + } + + /// Lists all topics. + pub async fn list_topics(&self) -> Vec { + self.topics.read().await.values().cloned().collect() + } + + /// Subscribes to a topic. + pub async fn subscribe( + &self, + topic: impl Into, + handler: F, + filter: Option>, + ) -> PubSubResult + where + F: Fn(PublishedMessage) -> Fut + Send + Sync + 'static, + Fut: Future + Send + 'static, + { + let topic = topic.into(); + + // Auto-create topic if enabled + if !self.topics.read().await.contains_key(&topic) { + if self.options.auto_create_topics { + self.create_topic(&topic).await?; + } else { + return Err(PubSubError::TopicNotFound(topic)); + } + } + + let sub_id = self.generate_id("sub"); + + let subscription = Subscription { + id: sub_id.clone(), + topic: topic.clone(), + handler: Arc::new(move |msg| Box::pin(handler(msg))), + filter, + active: true, + created: Instant::now(), + received: AtomicU64::new(0), + }; + + self.subscriptions + .write() + .await + .insert(sub_id.clone(), subscription); + + self.topic_subscriptions + .write() + .await + .get_mut(&topic) + .map(|s| s.insert(sub_id.clone())); + + // Update topic subscriber count + if let Some(topic_info) = self.topics.write().await.get_mut(&topic) { + topic_info.subscriber_count += 1; + } + + let mut stats = self.stats.write().await; + stats.subscriptions += 1; + + Ok(sub_id) + } + + /// Unsubscribes from a topic. + pub async fn unsubscribe(&self, subscription_id: &str) -> PubSubResult { + let subscription = { + let subscriptions = self.subscriptions.read().await; + subscriptions.get(subscription_id).map(|s| s.topic.clone()) + }; + + let topic = match subscription { + Some(t) => t, + None => return Ok(false), + }; + + // Remove from topic's subscription set + self.topic_subscriptions + .write() + .await + .get_mut(&topic) + .map(|s| s.remove(subscription_id)); + + // Update topic subscriber count + if let Some(topic_info) = self.topics.write().await.get_mut(&topic) { + topic_info.subscriber_count = topic_info.subscriber_count.saturating_sub(1); + } + + self.subscriptions.write().await.remove(subscription_id); + + let mut stats = self.stats.write().await; + stats.subscriptions = stats.subscriptions.saturating_sub(1); + + Ok(true) + } + + /// Pauses a subscription. + pub async fn pause(&self, subscription_id: &str) -> bool { + if let Some(sub) = self.subscriptions.write().await.get_mut(subscription_id) { + sub.active = false; + true + } else { + false + } + } + + /// Resumes a subscription. + pub async fn resume(&self, subscription_id: &str) -> bool { + if let Some(sub) = self.subscriptions.write().await.get_mut(subscription_id) { + sub.active = true; + true + } else { + false + } + } + + /// Gets subscription information. + pub async fn get_subscription(&self, subscription_id: &str) -> Option { + self.subscriptions + .read() + .await + .get(subscription_id) + .map(|s| SubscriptionInfo { + id: s.id.clone(), + topic: s.topic.clone(), + active: s.active, + created: s.created, + received: s.received.load(Ordering::SeqCst), + }) + } + + /// Lists all subscriptions, optionally filtered by topic. + pub async fn list_subscriptions(&self, topic: Option<&str>) -> Vec { + let subscriptions = self.subscriptions.read().await; + subscriptions + .values() + .filter(|s| topic.map_or(true, |t| s.topic == t)) + .map(|s| SubscriptionInfo { + id: s.id.clone(), + topic: s.topic.clone(), + active: s.active, + created: s.created, + received: s.received.load(Ordering::SeqCst), + }) + .collect() + } + + /// Publishes a message to a topic. + pub async fn publish( + &self, + topic: impl Into, + data: T, + headers: Option>, + ) -> PubSubResult { + let topic = topic.into(); + + // Auto-create topic if enabled + if !self.topics.read().await.contains_key(&topic) { + if self.options.auto_create_topics { + self.create_topic(&topic).await?; + } else { + return Err(PubSubError::TopicNotFound(topic)); + } + } + + let message = PublishedMessage { + id: self.generate_id("msg"), + topic: topic.clone(), + data, + timestamp: Instant::now(), + headers: headers.unwrap_or_default(), + }; + + // Update topic stats + if let Some(topic_info) = self.topics.write().await.get_mut(&topic) { + topic_info.message_count += 1; + } + + { + let mut stats = self.stats.write().await; + stats.published += 1; + } + + // Store in history if retention enabled + if let Some(retention) = self.options.message_retention { + let mut history = self.message_history.write().await; + if let Some(topic_history) = history.get_mut(&topic) { + topic_history.push(message.clone()); + // Clean up old messages + let cutoff = Instant::now() - retention; + topic_history.retain(|m| m.timestamp >= cutoff); + } + } + + // Deliver to subscribers + let mut delivered = 0usize; + let mut filtered = 0usize; + + let sub_ids: Vec = { + let topic_subs = self.topic_subscriptions.read().await; + topic_subs + .get(&topic) + .map(|s| s.iter().cloned().collect()) + .unwrap_or_default() + }; + + let subscriptions = self.subscriptions.read().await; + for sub_id in sub_ids { + if let Some(subscription) = subscriptions.get(&sub_id) { + if !subscription.active { + continue; + } + + // Check filter + if let Some(ref filter) = subscription.filter { + if !filter.matches(&message) { + filtered += 1; + continue; + } + } + + // Deliver message + let handler = subscription.handler.clone(); + let msg = message.clone(); + handler(msg).await; + subscription.received.fetch_add(1, Ordering::SeqCst); + delivered += 1; + } + } + + { + let mut stats = self.stats.write().await; + stats.delivered += delivered as u64; + stats.filtered += filtered as u64; + } + + Ok(DeliveryResult { delivered, filtered }) + } + + /// Publishes to multiple topics. + pub async fn publish_many( + &self, + topics: &[&str], + data: T, + headers: Option>, + ) -> HashMap> { + let mut results = HashMap::new(); + + for topic in topics { + let result = self + .publish(*topic, data.clone(), headers.clone()) + .await; + results.insert(topic.to_string(), result); + } + + results + } + + /// Gets recent messages for a topic (if retention enabled). + pub async fn get_history(&self, topic: &str, limit: usize) -> Vec> { + let history = self.message_history.read().await; + history + .get(topic) + .map(|h| { + let start = h.len().saturating_sub(limit); + h[start..].to_vec() + }) + .unwrap_or_default() + } + + /// Gets broker statistics. + pub async fn get_stats(&self) -> PubSubStats { + self.stats.read().await.clone() + } + + /// Clears all state. + pub async fn clear(&self) { + self.topics.write().await.clear(); + self.subscriptions.write().await.clear(); + self.topic_subscriptions.write().await.clear(); + self.message_history.write().await.clear(); + *self.stats.write().await = PubSubStats::default(); + } +} + +// ============================================================================= +// Observable Queue +// ============================================================================= + +/// Event type for queue observations. +#[derive(Debug, Clone)] +pub enum QueueEvent { + /// Item was enqueued. + Enqueue(T), + /// Item was dequeued. + Dequeue(T), + /// Item was acknowledged. + Acknowledge(u64), + /// Item was rejected. + Reject(u64, bool), +} + +type QueueEventHandler = + Arc) -> Pin + Send>> + Send + Sync>; + +/// A queue that supports pub/sub-style subscriptions. +/// +/// Wraps an existing queue and emits events when items are enqueued/dequeued. +pub struct ObservableQueue> { + /// The underlying queue. + queue: Q, + /// Event handlers. + handlers: RwLock>>, + /// Phantom data. + _marker: std::marker::PhantomData, +} + +impl> ObservableQueue { + /// Creates a new ObservableQueue. + pub fn new(queue: Q) -> Self { + Self { + queue, + handlers: RwLock::new(Vec::new()), + _marker: std::marker::PhantomData, + } + } + + /// Subscribes to queue events. + pub async fn subscribe(&self, handler: F) -> usize + where + F: Fn(QueueEvent) -> Fut + Send + Sync + 'static, + Fut: Future + Send + 'static, + { + let mut handlers = self.handlers.write().await; + let index = handlers.len(); + handlers.push(Arc::new(move |event| Box::pin(handler(event)))); + index + } + + /// Unsubscribes from queue events. + pub async fn unsubscribe(&self, index: usize) { + let mut handlers = self.handlers.write().await; + if index < handlers.len() { + handlers.remove(index); + } + } + + /// Emits an event to all handlers. + async fn emit(&self, event: QueueEvent) { + let handlers = self.handlers.read().await; + for handler in handlers.iter() { + handler(event.clone()).await; + } + } + + /// Enqueues an item and notifies subscribers. + pub async fn enqueue(&self, item: T) -> Result { + let result = self.queue.enqueue(item.clone()).await?; + self.emit(QueueEvent::Enqueue(item)).await; + Ok(result) + } + + /// Dequeues an item and notifies subscribers. + pub async fn dequeue(&self) -> Result, Q::Error> { + let result = self.queue.dequeue().await?; + if let Some(ref item) = result { + self.emit(QueueEvent::Dequeue(item.clone())).await; + } + Ok(result) + } + + /// Peeks at the next item. + pub async fn peek(&self) -> Result, Q::Error> { + self.queue.peek().await + } + + /// Acknowledges an item and notifies subscribers. + pub async fn acknowledge(&self, id: u64) -> Result { + let result = self.queue.acknowledge(id).await?; + if result { + self.emit(QueueEvent::Acknowledge(id)).await; + } + Ok(result) + } + + /// Rejects an item and notifies subscribers. + pub async fn reject(&self, id: u64, requeue: bool) -> Result { + let result = self.queue.reject(id, requeue).await?; + if result { + self.emit(QueueEvent::Reject(id, requeue)).await; + } + Ok(result) + } + + /// Gets queue statistics. + pub async fn get_stats(&self) -> Result { + self.queue.get_stats().await + } + + /// Gets the queue depth. + pub async fn get_depth(&self) -> Result { + self.queue.get_depth().await + } + + /// Clears the queue and all handlers. + pub async fn clear(&self) -> Result<(), Q::Error> { + self.handlers.write().await.clear(); + self.queue.clear().await + } +} + +// ============================================================================= +// Queue-backed Pub/Sub +// ============================================================================= + +/// Subscription info for queue-backed pub/sub. +#[derive(Debug, Clone)] +pub struct QueueSubscription { + /// Subscription ID. + pub id: String, + /// Topic name. + pub topic: String, + /// Queue name. + pub queue_name: String, + /// Whether the consumer is active. + pub active: bool, +} + +/// Pub/Sub implementation backed by queues for durability. +/// +/// Each subscription gets its own queue for message persistence. +pub struct QueueBackedPubSub +where + T: Clone + Send + Sync + 'static, + Q: Queue, + M: QueueManager, +{ + /// Queue manager. + queue_manager: Arc, + /// Topics with their subscriber queue names. + topics: RwLock>>, + /// Subscriptions by ID. + subscriptions: RwLock>, + /// Active consumer handles. + active_consumers: RwLock>, + /// ID counter. + id_counter: AtomicU64, + /// Phantom data. + _marker: std::marker::PhantomData<(T, Q)>, +} + +impl QueueBackedPubSub +where + T: Clone + Send + Sync + 'static, + Q: Queue, + M: QueueManager, +{ + /// Creates a new QueueBackedPubSub. + pub fn new(queue_manager: Arc) -> Self { + Self { + queue_manager, + topics: RwLock::new(HashMap::new()), + subscriptions: RwLock::new(HashMap::new()), + active_consumers: RwLock::new(HashSet::new()), + id_counter: AtomicU64::new(0), + _marker: std::marker::PhantomData, + } + } + + /// Creates a topic. + pub async fn create_topic(&self, name: impl Into) -> bool { + let name = name.into(); + let mut topics = self.topics.write().await; + if topics.contains_key(&name) { + false + } else { + topics.insert(name, HashSet::new()); + true + } + } + + /// Deletes a topic. + pub async fn delete_topic(&self, name: &str) -> PubSubResult { + let subscribers = { + let topics = self.topics.read().await; + topics.get(name).cloned() + }; + + let subscribers = match subscribers { + Some(s) => s, + None => return Ok(false), + }; + + // Find and remove all subscriptions for this topic + let sub_ids: Vec = { + let subscriptions = self.subscriptions.read().await; + subscriptions + .iter() + .filter(|(_, s)| subscribers.contains(&s.queue_name)) + .map(|(id, _)| id.clone()) + .collect() + }; + + for sub_id in sub_ids { + self.unsubscribe(&sub_id).await?; + } + + self.topics.write().await.remove(name); + Ok(true) + } + + /// Subscribes to a topic with a dedicated queue. + pub async fn subscribe(&self, topic: impl Into, subscriber_id: &str) -> PubSubResult { + let topic = topic.into(); + + // Create topic if not exists + { + let mut topics = self.topics.write().await; + if !topics.contains_key(&topic) { + topics.insert(topic.clone(), HashSet::new()); + } + } + + // Create a dedicated queue for this subscriber + let queue_name = format!("{}-{}", topic, subscriber_id); + self.queue_manager + .create_queue(&queue_name) + .await + .map_err(|e| PubSubError::QueueError(format!("{:?}", e)))?; + + // Register the queue with the topic + self.topics + .write() + .await + .get_mut(&topic) + .map(|s| s.insert(queue_name.clone())); + + // Create subscription record + let sub_id = format!("sub_{}", self.id_counter.fetch_add(1, Ordering::SeqCst)); + let subscription = QueueSubscription { + id: sub_id.clone(), + topic, + queue_name, + active: false, + }; + + self.subscriptions + .write() + .await + .insert(sub_id.clone(), subscription.clone()); + + Ok(subscription) + } + + /// Unsubscribes and removes the dedicated queue. + pub async fn unsubscribe(&self, subscription_id: &str) -> PubSubResult { + let subscription = { + let subscriptions = self.subscriptions.read().await; + subscriptions.get(subscription_id).cloned() + }; + + let subscription = match subscription { + Some(s) => s, + None => return Ok(false), + }; + + // Stop consumer + self.active_consumers.write().await.remove(subscription_id); + + // Remove queue from topic + self.topics + .write() + .await + .get_mut(&subscription.topic) + .map(|s| s.remove(&subscription.queue_name)); + + // Delete the queue + self.queue_manager + .delete_queue(&subscription.queue_name) + .await + .map_err(|e| PubSubError::QueueError(format!("{:?}", e)))?; + + self.subscriptions.write().await.remove(subscription_id); + Ok(true) + } + + /// Publishes to a topic (enqueues to all subscriber queues). + pub async fn publish(&self, topic: &str, data: T) -> PubSubResult { + let subscribers = { + let topics = self.topics.read().await; + topics.get(topic).cloned().unwrap_or_default() + }; + + if subscribers.is_empty() { + return Ok(0); + } + + let mut count = 0; + for queue_name in subscribers { + if let Some(queue) = self.queue_manager.get_queue(&queue_name).await { + if queue.enqueue(data.clone()).await.is_ok() { + count += 1; + } + } + } + + Ok(count) + } + + /// Marks a consumer as active. + pub async fn start_consumer(&self, subscription_id: &str) -> bool { + let exists = self.subscriptions.read().await.contains_key(subscription_id); + if exists { + self.active_consumers.write().await.insert(subscription_id.to_string()); + if let Some(sub) = self.subscriptions.write().await.get_mut(subscription_id) { + sub.active = true; + } + true + } else { + false + } + } + + /// Marks a consumer as inactive. + pub async fn stop_consumer(&self, subscription_id: &str) { + self.active_consumers.write().await.remove(subscription_id); + if let Some(sub) = self.subscriptions.write().await.get_mut(subscription_id) { + sub.active = false; + } + } + + /// Lists all topics. + pub async fn list_topics(&self) -> Vec<(String, usize)> { + self.topics + .read() + .await + .iter() + .map(|(name, subs)| (name.clone(), subs.len())) + .collect() + } + + /// Lists subscriptions for a topic. + pub async fn list_subscriptions(&self, topic: Option<&str>) -> Vec { + let subscriptions = self.subscriptions.read().await; + subscriptions + .values() + .filter(|s| topic.map_or(true, |t| s.topic == t)) + .cloned() + .collect() + } + + /// Clears all state. + pub async fn clear(&self) -> PubSubResult<()> { + // Stop all consumers + self.active_consumers.write().await.clear(); + + // Delete all subscription queues + let subscriptions: Vec = { + self.subscriptions.read().await.values().cloned().collect() + }; + + for subscription in subscriptions { + self.queue_manager + .delete_queue(&subscription.queue_name) + .await + .map_err(|e| PubSubError::QueueError(format!("{:?}", e)))?; + } + + self.topics.write().await.clear(); + self.subscriptions.write().await.clear(); + + Ok(()) + } +} + +// ============================================================================= +// Tests +// ============================================================================= + +#[cfg(test)] +mod tests { + use super::*; + use std::sync::atomic::AtomicUsize; + + #[tokio::test] + async fn test_message_filter() { + let filter = MessageFilter::::new() + .with_header("priority", "high") + .with_custom(|msg: &PublishedMessage| msg.data.len() > 3); + + let mut headers = HashMap::new(); + headers.insert("priority".to_string(), "high".to_string()); + + let msg1 = PublishedMessage { + id: "1".to_string(), + topic: "test".to_string(), + data: "hello".to_string(), + timestamp: Instant::now(), + headers: headers.clone(), + }; + + let msg2 = PublishedMessage { + id: "2".to_string(), + topic: "test".to_string(), + data: "hi".to_string(), // Too short + timestamp: Instant::now(), + headers: headers.clone(), + }; + + let msg3 = PublishedMessage { + id: "3".to_string(), + topic: "test".to_string(), + data: "hello".to_string(), + timestamp: Instant::now(), + headers: HashMap::new(), // Missing header + }; + + assert!(filter.matches(&msg1)); + assert!(!filter.matches(&msg2)); + assert!(!filter.matches(&msg3)); + } + + #[tokio::test] + async fn test_broker_topic_management() { + let broker = PubSubBroker::::new(BrokerOptions { + auto_create_topics: false, + message_retention: None, + }); + + // Create topic + let topic = broker.create_topic("events").await.unwrap(); + assert_eq!(topic.name, "events"); + assert_eq!(topic.subscriber_count, 0); + + // Duplicate topic should fail + let result = broker.create_topic("events").await; + assert!(matches!(result, Err(PubSubError::TopicExists(_)))); + + // Get topic + let topic = broker.get_topic("events").await; + assert!(topic.is_some()); + + // List topics + let topics = broker.list_topics().await; + assert_eq!(topics.len(), 1); + + // Delete topic + let deleted = broker.delete_topic("events").await.unwrap(); + assert!(deleted); + + let topics = broker.list_topics().await; + assert!(topics.is_empty()); + } + + #[tokio::test] + async fn test_broker_subscribe_unsubscribe() { + let broker = PubSubBroker::::new(BrokerOptions::default()); + + let counter = Arc::new(AtomicUsize::new(0)); + let counter_clone = counter.clone(); + + // Subscribe + let sub_id = broker + .subscribe( + "events", + move |_msg| { + let c = counter_clone.clone(); + async move { + c.fetch_add(1, Ordering::SeqCst); + } + }, + None, + ) + .await + .unwrap(); + + // Verify subscription + let sub = broker.get_subscription(&sub_id).await; + assert!(sub.is_some()); + assert_eq!(sub.unwrap().topic, "events"); + + // Publish + let result = broker.publish("events", "test".to_string(), None).await.unwrap(); + assert_eq!(result.delivered, 1); + + // Wait a bit for async delivery + tokio::time::sleep(Duration::from_millis(10)).await; + assert_eq!(counter.load(Ordering::SeqCst), 1); + + // Unsubscribe + let unsubbed = broker.unsubscribe(&sub_id).await.unwrap(); + assert!(unsubbed); + + // Publish again (no subscribers) + let result = broker.publish("events", "test2".to_string(), None).await.unwrap(); + assert_eq!(result.delivered, 0); + } + + #[tokio::test] + async fn test_broker_pause_resume() { + let broker = PubSubBroker::::new(BrokerOptions::default()); + + let counter = Arc::new(AtomicUsize::new(0)); + let counter_clone = counter.clone(); + + let sub_id = broker + .subscribe( + "events", + move |_msg| { + let c = counter_clone.clone(); + async move { + c.fetch_add(1, Ordering::SeqCst); + } + }, + None, + ) + .await + .unwrap(); + + // Pause + broker.pause(&sub_id).await; + + // Publish (should not deliver) + broker.publish("events", "test".to_string(), None).await.unwrap(); + tokio::time::sleep(Duration::from_millis(10)).await; + assert_eq!(counter.load(Ordering::SeqCst), 0); + + // Resume + broker.resume(&sub_id).await; + + // Publish (should deliver) + broker.publish("events", "test".to_string(), None).await.unwrap(); + tokio::time::sleep(Duration::from_millis(10)).await; + assert_eq!(counter.load(Ordering::SeqCst), 1); + } + + #[tokio::test] + async fn test_broker_filtering() { + let broker = PubSubBroker::::new(BrokerOptions::default()); + + let counter = Arc::new(AtomicUsize::new(0)); + let counter_clone = counter.clone(); + + // Subscribe with filter + let filter = MessageFilter::new() + .with_header("priority", "high"); + + broker + .subscribe( + "events", + move |_msg| { + let c = counter_clone.clone(); + async move { + c.fetch_add(1, Ordering::SeqCst); + } + }, + Some(filter), + ) + .await + .unwrap(); + + // Publish without matching header (should filter) + let result = broker.publish("events", "test".to_string(), None).await.unwrap(); + assert_eq!(result.filtered, 1); + assert_eq!(result.delivered, 0); + + // Publish with matching header + let mut headers = HashMap::new(); + headers.insert("priority".to_string(), "high".to_string()); + let result = broker.publish("events", "test".to_string(), Some(headers)).await.unwrap(); + assert_eq!(result.filtered, 0); + assert_eq!(result.delivered, 1); + } + + #[tokio::test] + async fn test_broker_message_retention() { + let broker = PubSubBroker::::new(BrokerOptions { + auto_create_topics: true, + message_retention: Some(Duration::from_secs(60)), + }); + + // Publish some messages + broker.publish("events", "msg1".to_string(), None).await.unwrap(); + broker.publish("events", "msg2".to_string(), None).await.unwrap(); + broker.publish("events", "msg3".to_string(), None).await.unwrap(); + + // Get history + let history = broker.get_history("events", 10).await; + assert_eq!(history.len(), 3); + assert_eq!(history[0].data, "msg1"); + assert_eq!(history[2].data, "msg3"); + + // Get limited history + let history = broker.get_history("events", 2).await; + assert_eq!(history.len(), 2); + assert_eq!(history[0].data, "msg2"); + } + + #[tokio::test] + async fn test_broker_stats() { + let broker = PubSubBroker::::new(BrokerOptions::default()); + + // Create topics and subscriptions + broker.create_topic("topic1").await.unwrap(); + broker.create_topic("topic2").await.unwrap(); + + broker + .subscribe("topic1", |_| async {}, None) + .await + .unwrap(); + broker + .subscribe("topic1", |_| async {}, None) + .await + .unwrap(); + + // Publish + broker.publish("topic1", "test".to_string(), None).await.unwrap(); + + let stats = broker.get_stats().await; + assert_eq!(stats.topics, 2); + assert_eq!(stats.subscriptions, 2); + assert_eq!(stats.published, 1); + assert_eq!(stats.delivered, 2); + } + + #[tokio::test] + async fn test_broker_auto_create_topics() { + // With auto-create enabled + let broker = PubSubBroker::::new(BrokerOptions { + auto_create_topics: true, + message_retention: None, + }); + + // Should auto-create topic on publish + broker.publish("auto-topic", "test".to_string(), None).await.unwrap(); + let topic = broker.get_topic("auto-topic").await; + assert!(topic.is_some()); + + // With auto-create disabled + let broker = PubSubBroker::::new(BrokerOptions { + auto_create_topics: false, + message_retention: None, + }); + + // Should fail on publish to non-existent topic + let result = broker.publish("auto-topic", "test".to_string(), None).await; + assert!(matches!(result, Err(PubSubError::TopicNotFound(_)))); + } + + #[tokio::test] + async fn test_publish_many() { + let broker = PubSubBroker::::new(BrokerOptions::default()); + + broker.create_topic("topic1").await.unwrap(); + broker.create_topic("topic2").await.unwrap(); + + let results = broker + .publish_many(&["topic1", "topic2"], "test".to_string(), None) + .await; + + assert_eq!(results.len(), 2); + assert!(results.get("topic1").unwrap().is_ok()); + assert!(results.get("topic2").unwrap().is_ok()); + } +} diff --git a/rust/src/features/rate_limiter.rs b/rust/src/features/rate_limiter.rs new file mode 100644 index 0000000..1c34811 --- /dev/null +++ b/rust/src/features/rate_limiter.rs @@ -0,0 +1,976 @@ +//! Rate Limiter module for per-queue and per-consumer rate limiting. +//! +//! This module provides rate limiting functionality: +//! - **Sliding Window Counter**: Count-based rate limiting with rolling window +//! - **Token Bucket**: Burst-friendly rate limiting with refilling tokens +//! - **Rate Limiter**: Multi-key rate limiting manager +//! - **Rate Limited Queue**: Queue wrapper with rate limiting +//! +//! # Example +//! +//! ```rust,ignore +//! use links_queue::features::{RateLimiter, SlidingWindowCounter, TokenBucket}; +//! use std::time::Duration; +//! +//! // Create a sliding window counter +//! let counter = SlidingWindowCounter::new(100, Duration::from_secs(60)); +//! +//! // Create a token bucket +//! let bucket = TokenBucket::new(10, 1.0); // 10 capacity, 1 token/second +//! +//! // Create a multi-key rate limiter +//! let limiter = RateLimiter::sliding_window(100, Duration::from_secs(60)); +//! ``` + +use std::collections::HashMap; +use std::sync::atomic::{AtomicU64, Ordering}; +use std::sync::{Arc, RwLock}; +use std::time::{Duration, Instant, SystemTime, UNIX_EPOCH}; + +use crate::{Link, LinkType, Queue, QueueError, QueueResult, QueueStats, EnqueueResult}; + +// ============================================================================= +// Rate Limit Result +// ============================================================================= + +/// Result of a rate limit check. +#[derive(Debug, Clone)] +pub struct RateLimitResult { + /// Whether the request is allowed + pub allowed: bool, + /// Current count/tokens used + pub current: u64, + /// Maximum allowed count/tokens + pub limit: u64, + /// Remaining count/tokens + pub remaining: u64, + /// Time until reset (in milliseconds) + pub reset_in_ms: u64, +} + +impl RateLimitResult { + /// Creates a new allowed result. + pub fn allowed(current: u64, limit: u64, remaining: u64, reset_in_ms: u64) -> Self { + Self { + allowed: true, + current, + limit, + remaining, + reset_in_ms, + } + } + + /// Creates a new denied result. + pub fn denied(current: u64, limit: u64, reset_in_ms: u64) -> Self { + Self { + allowed: false, + current, + limit, + remaining: 0, + reset_in_ms, + } + } +} + +// ============================================================================= +// Rate Limit Error +// ============================================================================= + +/// Error returned when rate limit is exceeded. +#[derive(Debug, Clone)] +pub struct RateLimitError { + /// Time until the rate limit resets (in milliseconds) + pub retry_after_ms: u64, + /// Maximum allowed requests + pub limit: u64, + /// Window size in milliseconds + pub window_ms: u64, + /// Error message + pub message: String, +} + +impl RateLimitError { + /// Creates a new rate limit error. + pub fn new(retry_after_ms: u64, limit: u64, window_ms: u64) -> Self { + Self { + retry_after_ms, + limit, + window_ms, + message: format!( + "Rate limit exceeded: {} requests per {}ms. Retry after {}ms", + limit, window_ms, retry_after_ms + ), + } + } +} + +impl std::fmt::Display for RateLimitError { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.message) + } +} + +impl std::error::Error for RateLimitError {} + +// ============================================================================= +// Rate Limit Statistics +// ============================================================================= + +/// Statistics for rate limiting. +#[derive(Debug, Clone, Default)] +pub struct RateLimitStats { + /// Current count or tokens + pub current: u64, + /// Maximum limit + pub limit: u64, + /// Remaining allowance + pub remaining: u64, + /// Time until reset (milliseconds) + pub reset_in_ms: u64, + /// Total number of allowed requests + pub total_allowed: u64, + /// Total number of denied requests + pub total_denied: u64, +} + +// ============================================================================= +// Sliding Window Counter +// ============================================================================= + +/// A sliding window counter for rate limiting. +/// +/// Uses a fixed window approach with interpolation for a smoother +/// sliding window effect. Simple and memory-efficient. +pub struct SlidingWindowCounter { + /// Maximum requests per window + limit: u64, + /// Window duration + window: Duration, + /// Current window count + current_count: AtomicU64, + /// Previous window count + previous_count: AtomicU64, + /// Current window start time + window_start: RwLock, + /// Statistics + total_allowed: AtomicU64, + total_denied: AtomicU64, +} + +impl SlidingWindowCounter { + /// Creates a new sliding window counter. + /// + /// # Arguments + /// + /// * `limit` - Maximum requests per window + /// * `window` - Window duration + pub fn new(limit: u64, window: Duration) -> Self { + Self { + limit, + window, + current_count: AtomicU64::new(0), + previous_count: AtomicU64::new(0), + window_start: RwLock::new(Instant::now()), + total_allowed: AtomicU64::new(0), + total_denied: AtomicU64::new(0), + } + } + + /// Updates window state if needed. + fn update_window(&self) { + let mut window_start = self.window_start.write().unwrap(); + let elapsed = window_start.elapsed(); + + if elapsed >= self.window * 2 { + // Both windows expired + self.previous_count.store(0, Ordering::SeqCst); + self.current_count.store(0, Ordering::SeqCst); + *window_start = Instant::now(); + } else if elapsed >= self.window { + // Current window expired, rotate + let current = self.current_count.load(Ordering::SeqCst); + self.previous_count.store(current, Ordering::SeqCst); + self.current_count.store(0, Ordering::SeqCst); + *window_start = *window_start + self.window; + } + } + + /// Calculates the weighted count using sliding window interpolation. + fn weighted_count(&self) -> u64 { + let window_start = self.window_start.read().unwrap(); + let elapsed = window_start.elapsed(); + + let current = self.current_count.load(Ordering::SeqCst); + let previous = self.previous_count.load(Ordering::SeqCst); + + // Weight previous window by how much of it is still in our sliding window + let weight = if elapsed < self.window { + 1.0 - (elapsed.as_secs_f64() / self.window.as_secs_f64()) + } else { + 0.0 + }; + + current + (previous as f64 * weight) as u64 + } + + /// Checks if a request is allowed without consuming. + pub fn check(&self) -> RateLimitResult { + self.update_window(); + let count = self.weighted_count(); + let reset_in_ms = self.time_to_reset(); + + if count < self.limit { + RateLimitResult::allowed(count, self.limit, self.limit - count, reset_in_ms) + } else { + RateLimitResult::denied(count, self.limit, reset_in_ms) + } + } + + /// Increments the counter. + pub fn increment(&self) -> RateLimitResult { + self.update_window(); + let new_count = self.current_count.fetch_add(1, Ordering::SeqCst) + 1; + let total = self.weighted_count(); + let reset_in_ms = self.time_to_reset(); + + RateLimitResult::allowed( + total, + self.limit, + self.limit.saturating_sub(total), + reset_in_ms, + ) + } + + /// Checks and increments atomically. + pub fn consume(&self) -> RateLimitResult { + self.update_window(); + let count = self.weighted_count(); + let reset_in_ms = self.time_to_reset(); + + if count < self.limit { + self.current_count.fetch_add(1, Ordering::SeqCst); + self.total_allowed.fetch_add(1, Ordering::SeqCst); + let new_count = count + 1; + RateLimitResult::allowed( + new_count, + self.limit, + self.limit.saturating_sub(new_count), + reset_in_ms, + ) + } else { + self.total_denied.fetch_add(1, Ordering::SeqCst); + RateLimitResult::denied(count, self.limit, reset_in_ms) + } + } + + /// Resets the counter. + pub fn reset(&self) { + self.current_count.store(0, Ordering::SeqCst); + self.previous_count.store(0, Ordering::SeqCst); + let mut window_start = self.window_start.write().unwrap(); + *window_start = Instant::now(); + } + + /// Returns statistics. + pub fn stats(&self) -> RateLimitStats { + self.update_window(); + let count = self.weighted_count(); + + RateLimitStats { + current: count, + limit: self.limit, + remaining: self.limit.saturating_sub(count), + reset_in_ms: self.time_to_reset(), + total_allowed: self.total_allowed.load(Ordering::SeqCst), + total_denied: self.total_denied.load(Ordering::SeqCst), + } + } + + fn time_to_reset(&self) -> u64 { + let window_start = self.window_start.read().unwrap(); + let elapsed = window_start.elapsed(); + if elapsed < self.window { + (self.window - elapsed).as_millis() as u64 + } else { + 0 + } + } +} + +// ============================================================================= +// Token Bucket +// ============================================================================= + +/// A token bucket for burst-friendly rate limiting. +/// +/// Allows bursts up to the bucket capacity, then limits to the refill rate. +pub struct TokenBucket { + /// Maximum capacity + capacity: u64, + /// Tokens per second refill rate + refill_rate: f64, + /// Current token count (multiplied by 1000 for precision) + tokens_millis: AtomicU64, + /// Last refill time + last_refill: RwLock, + /// Statistics + total_allowed: AtomicU64, + total_denied: AtomicU64, +} + +impl TokenBucket { + /// Creates a new token bucket. + /// + /// # Arguments + /// + /// * `capacity` - Maximum number of tokens (burst limit) + /// * `refill_rate` - Tokens added per second + pub fn new(capacity: u64, refill_rate: f64) -> Self { + Self { + capacity, + refill_rate, + tokens_millis: AtomicU64::new(capacity * 1000), + last_refill: RwLock::new(Instant::now()), + total_allowed: AtomicU64::new(0), + total_denied: AtomicU64::new(0), + } + } + + /// Refills tokens based on elapsed time. + fn refill(&self) { + let mut last_refill = self.last_refill.write().unwrap(); + let elapsed = last_refill.elapsed(); + let tokens_to_add = (elapsed.as_secs_f64() * self.refill_rate * 1000.0) as u64; + + if tokens_to_add > 0 { + let current = self.tokens_millis.load(Ordering::SeqCst); + let new_tokens = (current + tokens_to_add).min(self.capacity * 1000); + self.tokens_millis.store(new_tokens, Ordering::SeqCst); + *last_refill = Instant::now(); + } + } + + /// Checks if a token is available without consuming. + pub fn check(&self) -> RateLimitResult { + self.refill(); + let tokens_millis = self.tokens_millis.load(Ordering::SeqCst); + let tokens = tokens_millis / 1000; + + if tokens > 0 { + RateLimitResult::allowed( + self.capacity - tokens, + self.capacity, + tokens, + self.time_to_next_token(), + ) + } else { + RateLimitResult::denied(self.capacity, self.capacity, self.time_to_next_token()) + } + } + + /// Consumes a token if available. + pub fn consume(&self) -> RateLimitResult { + self.refill(); + let tokens_millis = self.tokens_millis.load(Ordering::SeqCst); + + if tokens_millis >= 1000 { + self.tokens_millis.fetch_sub(1000, Ordering::SeqCst); + self.total_allowed.fetch_add(1, Ordering::SeqCst); + let new_tokens = (tokens_millis - 1000) / 1000; + RateLimitResult::allowed( + self.capacity - new_tokens, + self.capacity, + new_tokens, + self.time_to_next_token(), + ) + } else { + self.total_denied.fetch_add(1, Ordering::SeqCst); + RateLimitResult::denied(self.capacity, self.capacity, self.time_to_next_token()) + } + } + + /// Resets the bucket to full capacity. + pub fn reset(&self) { + self.tokens_millis.store(self.capacity * 1000, Ordering::SeqCst); + let mut last_refill = self.last_refill.write().unwrap(); + *last_refill = Instant::now(); + } + + /// Returns statistics. + pub fn stats(&self) -> RateLimitStats { + self.refill(); + let tokens = self.tokens_millis.load(Ordering::SeqCst) / 1000; + + RateLimitStats { + current: self.capacity - tokens, + limit: self.capacity, + remaining: tokens, + reset_in_ms: self.time_to_next_token(), + total_allowed: self.total_allowed.load(Ordering::SeqCst), + total_denied: self.total_denied.load(Ordering::SeqCst), + } + } + + fn time_to_next_token(&self) -> u64 { + if self.refill_rate > 0.0 { + (1000.0 / self.refill_rate) as u64 + } else { + u64::MAX + } + } +} + +// ============================================================================= +// Rate Limiter Algorithm +// ============================================================================= + +/// Algorithm used for rate limiting. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum RateLimitAlgorithm { + /// Sliding window counter + SlidingWindow, + /// Token bucket + TokenBucket, +} + +// ============================================================================= +// Rate Limiter +// ============================================================================= + +/// A multi-key rate limiter. +/// +/// Manages rate limits for multiple keys (e.g., per-consumer, per-IP). +pub struct RateLimiter { + /// Rate limit per key + limit: u64, + /// Window duration + window: Duration, + /// Algorithm to use + algorithm: RateLimitAlgorithm, + /// Per-key sliding window counters + sliding_windows: RwLock>>, + /// Per-key token buckets + token_buckets: RwLock>>, +} + +impl RateLimiter { + /// Creates a new rate limiter with sliding window algorithm. + pub fn sliding_window(limit: u64, window: Duration) -> Self { + Self { + limit, + window, + algorithm: RateLimitAlgorithm::SlidingWindow, + sliding_windows: RwLock::new(HashMap::new()), + token_buckets: RwLock::new(HashMap::new()), + } + } + + /// Creates a new rate limiter with token bucket algorithm. + pub fn token_bucket(capacity: u64, refill_rate: f64) -> Self { + Self { + limit: capacity, + window: Duration::from_secs_f64(1.0 / refill_rate), + algorithm: RateLimitAlgorithm::TokenBucket, + sliding_windows: RwLock::new(HashMap::new()), + token_buckets: RwLock::new(HashMap::new()), + } + } + + /// Gets or creates a sliding window counter for a key. + fn get_sliding_window(&self, key: &str) -> Arc { + { + let windows = self.sliding_windows.read().unwrap(); + if let Some(window) = windows.get(key) { + return Arc::clone(window); + } + } + + let mut windows = self.sliding_windows.write().unwrap(); + windows + .entry(key.to_string()) + .or_insert_with(|| Arc::new(SlidingWindowCounter::new(self.limit, self.window))) + .clone() + } + + /// Gets or creates a token bucket for a key. + fn get_token_bucket(&self, key: &str) -> Arc { + { + let buckets = self.token_buckets.read().unwrap(); + if let Some(bucket) = buckets.get(key) { + return Arc::clone(bucket); + } + } + + let refill_rate = 1.0 / self.window.as_secs_f64(); + let mut buckets = self.token_buckets.write().unwrap(); + buckets + .entry(key.to_string()) + .or_insert_with(|| Arc::new(TokenBucket::new(self.limit, refill_rate))) + .clone() + } + + /// Checks if a request is allowed for the default key. + pub fn check(&self) -> RateLimitResult { + self.check_key("default") + } + + /// Checks if a request is allowed for a specific key. + pub fn check_key(&self, key: &str) -> RateLimitResult { + match self.algorithm { + RateLimitAlgorithm::SlidingWindow => self.get_sliding_window(key).check(), + RateLimitAlgorithm::TokenBucket => self.get_token_bucket(key).check(), + } + } + + /// Consumes a token/slot for the default key. + pub fn consume(&self) -> RateLimitResult { + self.consume_key("default") + } + + /// Consumes a token/slot for a specific key. + pub fn consume_key(&self, key: &str) -> RateLimitResult { + match self.algorithm { + RateLimitAlgorithm::SlidingWindow => self.get_sliding_window(key).consume(), + RateLimitAlgorithm::TokenBucket => self.get_token_bucket(key).consume(), + } + } + + /// Resets rate limiting for a specific key. + pub fn reset(&self, key: &str) { + match self.algorithm { + RateLimitAlgorithm::SlidingWindow => { + if let Some(window) = self.sliding_windows.read().unwrap().get(key) { + window.reset(); + } + } + RateLimitAlgorithm::TokenBucket => { + if let Some(bucket) = self.token_buckets.read().unwrap().get(key) { + bucket.reset(); + } + } + } + } + + /// Resets rate limiting for all keys. + pub fn reset_all(&self) { + match self.algorithm { + RateLimitAlgorithm::SlidingWindow => { + for window in self.sliding_windows.read().unwrap().values() { + window.reset(); + } + } + RateLimitAlgorithm::TokenBucket => { + for bucket in self.token_buckets.read().unwrap().values() { + bucket.reset(); + } + } + } + } + + /// Removes a specific key from tracking. + pub fn remove(&self, key: &str) -> bool { + match self.algorithm { + RateLimitAlgorithm::SlidingWindow => { + self.sliding_windows.write().unwrap().remove(key).is_some() + } + RateLimitAlgorithm::TokenBucket => { + self.token_buckets.write().unwrap().remove(key).is_some() + } + } + } + + /// Gets statistics for a specific key. + pub fn stats(&self, key: &str) -> Option { + match self.algorithm { + RateLimitAlgorithm::SlidingWindow => self + .sliding_windows + .read() + .unwrap() + .get(key) + .map(|w| w.stats()), + RateLimitAlgorithm::TokenBucket => self + .token_buckets + .read() + .unwrap() + .get(key) + .map(|b| b.stats()), + } + } + + /// Lists all tracked keys. + pub fn list_keys(&self) -> Vec { + match self.algorithm { + RateLimitAlgorithm::SlidingWindow => { + self.sliding_windows.read().unwrap().keys().cloned().collect() + } + RateLimitAlgorithm::TokenBucket => { + self.token_buckets.read().unwrap().keys().cloned().collect() + } + } + } + + /// Clears all tracked keys. + pub fn clear(&self) { + self.sliding_windows.write().unwrap().clear(); + self.token_buckets.write().unwrap().clear(); + } +} + +// ============================================================================= +// Rate Limited Queue +// ============================================================================= + +/// A queue wrapper that adds rate limiting. +/// +/// Rate limits can be applied to: +/// - Enqueue operations (producer limit) +/// - Dequeue operations (consumer limit) +pub struct RateLimitedQueue> { + /// The underlying queue + queue: Q, + /// Rate limiter for enqueue operations + enqueue_limiter: RateLimiter, + /// Rate limiter for dequeue operations + dequeue_limiter: RateLimiter, + /// Phantom data for type parameter + _marker: std::marker::PhantomData, +} + +impl> RateLimitedQueue { + /// Creates a new rate-limited queue. + /// + /// # Arguments + /// + /// * `queue` - The underlying queue + /// * `enqueue_limit` - Maximum enqueue operations per window + /// * `dequeue_limit` - Maximum dequeue operations per window + /// * `window` - Window duration for rate limiting + pub fn new(queue: Q, enqueue_limit: u64, dequeue_limit: u64, window: Duration) -> Self { + Self { + queue, + enqueue_limiter: RateLimiter::sliding_window(enqueue_limit, window), + dequeue_limiter: RateLimiter::sliding_window(dequeue_limit, window), + _marker: std::marker::PhantomData, + } + } + + /// Creates a rate-limited queue with just enqueue limiting. + pub fn with_enqueue_limit(queue: Q, limit: u64, window: Duration) -> Self { + Self::new(queue, limit, u64::MAX, window) + } + + /// Creates a rate-limited queue with just dequeue limiting. + pub fn with_dequeue_limit(queue: Q, limit: u64, window: Duration) -> Self { + Self::new(queue, u64::MAX, limit, window) + } + + /// Returns the queue name. + pub fn name(&self) -> &str { + self.queue.name() + } + + /// Enqueues an item with rate limiting. + pub async fn enqueue(&self, link: Link) -> Result, RateLimitError> { + self.enqueue_with_key(link, "default").await + } + + /// Enqueues an item with rate limiting for a specific key. + pub async fn enqueue_with_key( + &self, + link: Link, + key: &str, + ) -> Result, RateLimitError> { + let result = self.enqueue_limiter.consume_key(key); + if !result.allowed { + return Err(RateLimitError::new( + result.reset_in_ms, + result.limit, + result.reset_in_ms, + )); + } + + self.queue + .enqueue(link) + .await + .map_err(|e| RateLimitError::new(0, 0, 0)) // Convert queue error + } + + /// Dequeues an item with rate limiting. + pub async fn dequeue(&self) -> Result>, RateLimitError> { + self.dequeue_with_key("default").await + } + + /// Dequeues an item with rate limiting for a specific key (consumer). + pub async fn dequeue_with_key(&self, key: &str) -> Result>, RateLimitError> { + let result = self.dequeue_limiter.consume_key(key); + if !result.allowed { + return Err(RateLimitError::new( + result.reset_in_ms, + result.limit, + result.reset_in_ms, + )); + } + + self.queue + .dequeue() + .await + .map_err(|e| RateLimitError::new(0, 0, 0)) // Convert queue error + } + + /// Peeks at the next item (not rate limited). + pub async fn peek(&self) -> QueueResult>> { + self.queue.peek().await + } + + /// Acknowledges processing of an item. + pub async fn acknowledge(&self, id: T) -> QueueResult<()> { + self.queue.acknowledge(id).await + } + + /// Rejects an item. + pub async fn reject(&self, id: T, requeue: bool) -> QueueResult<()> { + self.queue.reject(id, requeue).await + } + + /// Returns queue statistics. + pub fn stats(&self) -> QueueStats { + self.queue.stats() + } + + /// Returns the queue depth. + pub fn depth(&self) -> usize { + self.queue.stats().depth + } + + /// Resets rate limiters. + pub fn reset_rate_limits(&self) { + self.enqueue_limiter.reset_all(); + self.dequeue_limiter.reset_all(); + } + + /// Gets the enqueue rate limiter. + pub fn enqueue_limiter(&self) -> &RateLimiter { + &self.enqueue_limiter + } + + /// Gets the dequeue rate limiter. + pub fn dequeue_limiter(&self) -> &RateLimiter { + &self.dequeue_limiter + } +} + +// ============================================================================= +// Tests +// ============================================================================= + +#[cfg(test)] +mod tests { + use super::*; + + mod sliding_window_tests { + use super::*; + + #[test] + fn test_creation() { + let counter = SlidingWindowCounter::new(100, Duration::from_secs(60)); + let stats = counter.stats(); + assert_eq!(stats.limit, 100); + assert_eq!(stats.current, 0); + assert_eq!(stats.remaining, 100); + } + + #[test] + fn test_check_allows_under_limit() { + let counter = SlidingWindowCounter::new(10, Duration::from_secs(60)); + let result = counter.check(); + assert!(result.allowed); + assert_eq!(result.remaining, 10); + } + + #[test] + fn test_increment() { + let counter = SlidingWindowCounter::new(10, Duration::from_secs(60)); + counter.increment(); + counter.increment(); + let result = counter.check(); + assert!(result.allowed); + assert_eq!(result.current, 2); + } + + #[test] + fn test_consume_under_limit() { + let counter = SlidingWindowCounter::new(10, Duration::from_secs(60)); + for _ in 0..5 { + let result = counter.consume(); + assert!(result.allowed); + } + let stats = counter.stats(); + assert_eq!(stats.current, 5); + } + + #[test] + fn test_consume_at_limit() { + let counter = SlidingWindowCounter::new(3, Duration::from_secs(60)); + for _ in 0..3 { + let result = counter.consume(); + assert!(result.allowed); + } + let result = counter.consume(); + assert!(!result.allowed); + } + + #[test] + fn test_reset() { + let counter = SlidingWindowCounter::new(10, Duration::from_secs(60)); + for _ in 0..5 { + counter.consume(); + } + counter.reset(); + let stats = counter.stats(); + assert_eq!(stats.current, 0); + } + } + + mod token_bucket_tests { + use super::*; + + #[test] + fn test_creation_full() { + let bucket = TokenBucket::new(10, 1.0); + let stats = bucket.stats(); + assert_eq!(stats.limit, 10); + assert_eq!(stats.remaining, 10); + } + + #[test] + fn test_check_allows() { + let bucket = TokenBucket::new(10, 1.0); + let result = bucket.check(); + assert!(result.allowed); + } + + #[test] + fn test_consume_single() { + let bucket = TokenBucket::new(10, 1.0); + let result = bucket.consume(); + assert!(result.allowed); + let stats = bucket.stats(); + assert_eq!(stats.remaining, 9); + } + + #[test] + fn test_consume_burst() { + let bucket = TokenBucket::new(5, 1.0); + for _ in 0..5 { + let result = bucket.consume(); + assert!(result.allowed); + } + let result = bucket.consume(); + assert!(!result.allowed); + } + + #[test] + fn test_reset() { + let bucket = TokenBucket::new(10, 1.0); + for _ in 0..5 { + bucket.consume(); + } + bucket.reset(); + let stats = bucket.stats(); + assert_eq!(stats.remaining, 10); + } + } + + mod rate_limiter_tests { + use super::*; + + #[test] + fn test_sliding_window_creation() { + let limiter = RateLimiter::sliding_window(100, Duration::from_secs(60)); + let result = limiter.check(); + assert!(result.allowed); + } + + #[test] + fn test_token_bucket_creation() { + let limiter = RateLimiter::token_bucket(10, 1.0); + let result = limiter.check(); + assert!(result.allowed); + } + + #[test] + fn test_multi_key() { + let limiter = RateLimiter::sliding_window(2, Duration::from_secs(60)); + + // Key 1 - exhaust limit + limiter.consume_key("key1"); + limiter.consume_key("key1"); + let result = limiter.consume_key("key1"); + assert!(!result.allowed); + + // Key 2 - should still have full limit + let result = limiter.consume_key("key2"); + assert!(result.allowed); + } + + #[test] + fn test_reset_key() { + let limiter = RateLimiter::sliding_window(2, Duration::from_secs(60)); + limiter.consume_key("key1"); + limiter.consume_key("key1"); + + limiter.reset("key1"); + + let result = limiter.consume_key("key1"); + assert!(result.allowed); + } + + #[test] + fn test_remove_key() { + let limiter = RateLimiter::sliding_window(10, Duration::from_secs(60)); + limiter.consume_key("key1"); + + let removed = limiter.remove("key1"); + assert!(removed); + + let removed_again = limiter.remove("key1"); + assert!(!removed_again); + } + + #[test] + fn test_list_keys() { + let limiter = RateLimiter::sliding_window(10, Duration::from_secs(60)); + limiter.consume_key("key1"); + limiter.consume_key("key2"); + + let keys = limiter.list_keys(); + assert_eq!(keys.len(), 2); + assert!(keys.contains(&"key1".to_string())); + assert!(keys.contains(&"key2".to_string())); + } + + #[test] + fn test_clear() { + let limiter = RateLimiter::sliding_window(10, Duration::from_secs(60)); + limiter.consume_key("key1"); + limiter.consume_key("key2"); + + limiter.clear(); + + assert!(limiter.list_keys().is_empty()); + } + + #[test] + fn test_stats() { + let limiter = RateLimiter::sliding_window(10, Duration::from_secs(60)); + limiter.consume_key("key1"); + + let stats = limiter.stats("key1"); + assert!(stats.is_some()); + assert_eq!(stats.unwrap().current, 1); + + let stats = limiter.stats("nonexistent"); + assert!(stats.is_none()); + } + } +} diff --git a/rust/src/features/router.rs b/rust/src/features/router.rs new file mode 100644 index 0000000..ff0d573 --- /dev/null +++ b/rust/src/features/router.rs @@ -0,0 +1,1008 @@ +//! Router module for topic-based message routing. +//! +//! This module provides AMQP-style message routing functionality: +//! - **Topic Matching**: Pattern matching with wildcards (* and #) +//! - **Exchange Types**: Direct, Topic, Fanout, and Headers exchanges +//! - **Router**: Central routing management +//! - **Routed Queue Manager**: Queue management with routing integration +//! +//! # Wildcard Syntax +//! +//! - `*` (asterisk): Matches exactly one word +//! - `#` (hash): Matches zero or more words +//! +//! # Example +//! +//! ```rust,ignore +//! use links_queue::features::{Router, TopicMatcher, ExchangeType}; +//! +//! // Pattern matching +//! assert!(TopicMatcher::matches("logs.*", "logs.error")); +//! assert!(TopicMatcher::matches("logs.#", "logs.error.db")); +//! +//! // Create a router +//! let router = Router::new(); +//! router.declare_exchange("events", ExchangeType::Topic); +//! router.bind("events", "error-queue", "logs.error"); +//! ``` + +use std::collections::{HashMap, HashSet}; +use std::sync::{Arc, RwLock}; + +use crate::{Link, LinkType, Queue, QueueResult, QueueManager, QueueOptions, QueueInfo, EnqueueResult}; + +// ============================================================================= +// Exchange Types +// ============================================================================= + +/// Types of message exchanges. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub enum ExchangeType { + /// Routes to queues with exact routing key match + Direct, + /// Routes using pattern matching with wildcards + Topic, + /// Routes to all bound queues (ignores routing key) + Fanout, + /// Routes based on message headers + Headers, +} + +impl std::fmt::Display for ExchangeType { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Self::Direct => write!(f, "direct"), + Self::Topic => write!(f, "topic"), + Self::Fanout => write!(f, "fanout"), + Self::Headers => write!(f, "headers"), + } + } +} + +// ============================================================================= +// Topic Matcher +// ============================================================================= + +/// Utility for matching routing keys against topic patterns. +pub struct TopicMatcher; + +impl TopicMatcher { + /// Checks if a routing key matches a pattern. + /// + /// # Wildcard Rules + /// + /// - `*` matches exactly one word + /// - `#` matches zero or more words + /// - Words are separated by `.` + /// + /// # Examples + /// + /// ```rust,ignore + /// assert!(TopicMatcher::matches("logs.*", "logs.error")); + /// assert!(!TopicMatcher::matches("logs.*", "logs.error.db")); + /// assert!(TopicMatcher::matches("logs.#", "logs.error.db")); + /// assert!(TopicMatcher::matches("#", "anything.goes.here")); + /// ``` + pub fn matches(pattern: &str, routing_key: &str) -> bool { + // Special case: # matches everything + if pattern == "#" { + return !routing_key.is_empty(); + } + + let pattern_parts: Vec<&str> = pattern.split('.').collect(); + let key_parts: Vec<&str> = routing_key.split('.').collect(); + + Self::match_parts(&pattern_parts, &key_parts) + } + + fn match_parts(pattern: &[&str], key: &[&str]) -> bool { + let mut pi = 0; // Pattern index + let mut ki = 0; // Key index + + while pi < pattern.len() { + let part = pattern[pi]; + + if part == "#" { + // # at end matches rest + if pi == pattern.len() - 1 { + return true; + } + + // Try matching # with different lengths + for k in ki..=key.len() { + if Self::match_parts(&pattern[pi + 1..], &key[k..]) { + return true; + } + } + return false; + } else if part == "*" { + // * must match exactly one word + if ki >= key.len() { + return false; + } + pi += 1; + ki += 1; + } else { + // Exact match required + if ki >= key.len() || part != key[ki] { + return false; + } + pi += 1; + ki += 1; + } + } + + // Both must be exhausted + pi == pattern.len() && ki == key.len() + } + + /// Calculates the specificity score of a pattern. + /// + /// Higher scores indicate more specific patterns. + /// - Exact words: 100 points + /// - `*` wildcard: 10 points + /// - `#` wildcard: 1 point + pub fn specificity(pattern: &str) -> u32 { + let mut score = 0; + for part in pattern.split('.') { + score += match part { + "#" => 1, + "*" => 10, + _ => 100, + }; + } + score + } +} + +// ============================================================================= +// Binding +// ============================================================================= + +/// A binding between an exchange and a queue. +#[derive(Debug, Clone)] +pub struct Binding { + /// Exchange name + pub exchange: String, + /// Queue name + pub queue: String, + /// Routing key or pattern + pub routing_key: String, + /// Headers for headers exchange (key -> value) + pub headers: Option>, + /// Match type for headers (all or any) + pub headers_match_all: bool, +} + +impl Binding { + /// Creates a new binding. + pub fn new(exchange: &str, queue: &str, routing_key: &str) -> Self { + Self { + exchange: exchange.to_string(), + queue: queue.to_string(), + routing_key: routing_key.to_string(), + headers: None, + headers_match_all: true, + } + } + + /// Creates a binding with headers. + pub fn with_headers( + exchange: &str, + queue: &str, + headers: HashMap, + match_all: bool, + ) -> Self { + Self { + exchange: exchange.to_string(), + queue: queue.to_string(), + routing_key: String::new(), + headers: Some(headers), + headers_match_all: match_all, + } + } +} + +// ============================================================================= +// Exchange Trait +// ============================================================================= + +/// Trait for exchange implementations. +pub trait Exchange: Send + Sync { + /// Returns the exchange name. + fn name(&self) -> &str; + + /// Returns the exchange type. + fn exchange_type(&self) -> ExchangeType; + + /// Binds a queue to this exchange. + fn bind(&self, queue: &str, routing_key: &str, headers: Option<(HashMap, bool)>) -> Binding; + + /// Unbinds a queue from this exchange. + fn unbind(&self, queue: &str, routing_key: &str) -> bool; + + /// Routes a message and returns matching queue names. + fn route(&self, routing_key: &str, headers: Option<&HashMap>) -> Vec; + + /// Returns all bindings. + fn get_bindings(&self) -> Vec; + + /// Clears all bindings. + fn clear(&self); +} + +// ============================================================================= +// Direct Exchange +// ============================================================================= + +/// Direct exchange: routes to queues with exact routing key match. +pub struct DirectExchange { + name: String, + /// Routing key -> Set of queue names + bindings: RwLock>>, +} + +impl DirectExchange { + /// Creates a new direct exchange. + pub fn new(name: &str) -> Self { + Self { + name: name.to_string(), + bindings: RwLock::new(HashMap::new()), + } + } +} + +impl Exchange for DirectExchange { + fn name(&self) -> &str { + &self.name + } + + fn exchange_type(&self) -> ExchangeType { + ExchangeType::Direct + } + + fn bind(&self, queue: &str, routing_key: &str, _headers: Option<(HashMap, bool)>) -> Binding { + let mut bindings = self.bindings.write().unwrap(); + bindings + .entry(routing_key.to_string()) + .or_insert_with(HashSet::new) + .insert(queue.to_string()); + Binding::new(&self.name, queue, routing_key) + } + + fn unbind(&self, queue: &str, routing_key: &str) -> bool { + let mut bindings = self.bindings.write().unwrap(); + if let Some(queues) = bindings.get_mut(routing_key) { + let removed = queues.remove(queue); + if queues.is_empty() { + bindings.remove(routing_key); + } + removed + } else { + false + } + } + + fn route(&self, routing_key: &str, _headers: Option<&HashMap>) -> Vec { + let bindings = self.bindings.read().unwrap(); + bindings + .get(routing_key) + .map(|q| q.iter().cloned().collect()) + .unwrap_or_default() + } + + fn get_bindings(&self) -> Vec { + let bindings = self.bindings.read().unwrap(); + let mut result = Vec::new(); + for (key, queues) in bindings.iter() { + for queue in queues { + result.push(Binding::new(&self.name, queue, key)); + } + } + result + } + + fn clear(&self) { + self.bindings.write().unwrap().clear(); + } +} + +// ============================================================================= +// Topic Exchange +// ============================================================================= + +/// Topic exchange: routes using pattern matching with wildcards. +pub struct TopicExchange { + name: String, + /// Pattern -> Set of queue names + bindings: RwLock>>, +} + +impl TopicExchange { + /// Creates a new topic exchange. + pub fn new(name: &str) -> Self { + Self { + name: name.to_string(), + bindings: RwLock::new(HashMap::new()), + } + } +} + +impl Exchange for TopicExchange { + fn name(&self) -> &str { + &self.name + } + + fn exchange_type(&self) -> ExchangeType { + ExchangeType::Topic + } + + fn bind(&self, queue: &str, routing_key: &str, _headers: Option<(HashMap, bool)>) -> Binding { + let mut bindings = self.bindings.write().unwrap(); + bindings + .entry(routing_key.to_string()) + .or_insert_with(HashSet::new) + .insert(queue.to_string()); + Binding::new(&self.name, queue, routing_key) + } + + fn unbind(&self, queue: &str, routing_key: &str) -> bool { + let mut bindings = self.bindings.write().unwrap(); + if let Some(queues) = bindings.get_mut(routing_key) { + let removed = queues.remove(queue); + if queues.is_empty() { + bindings.remove(routing_key); + } + removed + } else { + false + } + } + + fn route(&self, routing_key: &str, _headers: Option<&HashMap>) -> Vec { + let bindings = self.bindings.read().unwrap(); + let mut result = HashSet::new(); + + for (pattern, queues) in bindings.iter() { + if TopicMatcher::matches(pattern, routing_key) { + result.extend(queues.iter().cloned()); + } + } + + result.into_iter().collect() + } + + fn get_bindings(&self) -> Vec { + let bindings = self.bindings.read().unwrap(); + let mut result = Vec::new(); + for (key, queues) in bindings.iter() { + for queue in queues { + result.push(Binding::new(&self.name, queue, key)); + } + } + result + } + + fn clear(&self) { + self.bindings.write().unwrap().clear(); + } +} + +// ============================================================================= +// Fanout Exchange +// ============================================================================= + +/// Fanout exchange: routes to all bound queues (ignores routing key). +pub struct FanoutExchange { + name: String, + /// Set of bound queue names + queues: RwLock>, +} + +impl FanoutExchange { + /// Creates a new fanout exchange. + pub fn new(name: &str) -> Self { + Self { + name: name.to_string(), + queues: RwLock::new(HashSet::new()), + } + } +} + +impl Exchange for FanoutExchange { + fn name(&self) -> &str { + &self.name + } + + fn exchange_type(&self) -> ExchangeType { + ExchangeType::Fanout + } + + fn bind(&self, queue: &str, _routing_key: &str, _headers: Option<(HashMap, bool)>) -> Binding { + self.queues.write().unwrap().insert(queue.to_string()); + Binding::new(&self.name, queue, "") + } + + fn unbind(&self, queue: &str, _routing_key: &str) -> bool { + self.queues.write().unwrap().remove(queue) + } + + fn route(&self, _routing_key: &str, _headers: Option<&HashMap>) -> Vec { + self.queues.read().unwrap().iter().cloned().collect() + } + + fn get_bindings(&self) -> Vec { + let queues = self.queues.read().unwrap(); + queues.iter().map(|q| Binding::new(&self.name, q, "")).collect() + } + + fn clear(&self) { + self.queues.write().unwrap().clear(); + } +} + +// ============================================================================= +// Headers Exchange +// ============================================================================= + +/// Headers exchange: routes based on message headers. +pub struct HeadersExchange { + name: String, + /// List of (queue, headers, match_all) bindings + bindings: RwLock, bool)>>, +} + +impl HeadersExchange { + /// Creates a new headers exchange. + pub fn new(name: &str) -> Self { + Self { + name: name.to_string(), + bindings: RwLock::new(Vec::new()), + } + } +} + +impl Exchange for HeadersExchange { + fn name(&self) -> &str { + &self.name + } + + fn exchange_type(&self) -> ExchangeType { + ExchangeType::Headers + } + + fn bind(&self, queue: &str, _routing_key: &str, headers: Option<(HashMap, bool)>) -> Binding { + let (hdrs, match_all) = headers.unwrap_or_else(|| (HashMap::new(), true)); + self.bindings + .write() + .unwrap() + .push((queue.to_string(), hdrs.clone(), match_all)); + Binding::with_headers(&self.name, queue, hdrs, match_all) + } + + fn unbind(&self, queue: &str, _routing_key: &str) -> bool { + let mut bindings = self.bindings.write().unwrap(); + let len_before = bindings.len(); + bindings.retain(|(q, _, _)| q != queue); + bindings.len() < len_before + } + + fn route(&self, _routing_key: &str, headers: Option<&HashMap>) -> Vec { + let headers = match headers { + Some(h) => h, + None => return Vec::new(), + }; + + let bindings = self.bindings.read().unwrap(); + let mut result = HashSet::new(); + + for (queue, required_headers, match_all) in bindings.iter() { + let matches = if *match_all { + // All headers must match + required_headers + .iter() + .all(|(k, v)| headers.get(k) == Some(v)) + } else { + // Any header must match + required_headers + .iter() + .any(|(k, v)| headers.get(k) == Some(v)) + }; + + if matches { + result.insert(queue.clone()); + } + } + + result.into_iter().collect() + } + + fn get_bindings(&self) -> Vec { + let bindings = self.bindings.read().unwrap(); + bindings + .iter() + .map(|(q, h, m)| Binding::with_headers(&self.name, q, h.clone(), *m)) + .collect() + } + + fn clear(&self) { + self.bindings.write().unwrap().clear(); + } +} + +// ============================================================================= +// Router +// ============================================================================= + +/// Central router for managing exchanges and routing messages. +pub struct Router { + /// Map of exchange name -> exchange + exchanges: RwLock>>, +} + +impl Router { + /// Creates a new router. + pub fn new() -> Self { + Self { + exchanges: RwLock::new(HashMap::new()), + } + } + + /// Declares an exchange. + /// + /// If an exchange with the same name exists and has the same type, it is returned. + /// If it has a different type, an error is returned. + pub fn declare_exchange(&self, name: &str, exchange_type: ExchangeType) -> Result, String> { + let mut exchanges = self.exchanges.write().unwrap(); + + if let Some(existing) = exchanges.get(name) { + if existing.exchange_type() == exchange_type { + return Ok(Arc::clone(existing)); + } else { + return Err(format!( + "Exchange '{}' exists with type {:?}, cannot redeclare as {:?}", + name, + existing.exchange_type(), + exchange_type + )); + } + } + + let exchange: Arc = match exchange_type { + ExchangeType::Direct => Arc::new(DirectExchange::new(name)), + ExchangeType::Topic => Arc::new(TopicExchange::new(name)), + ExchangeType::Fanout => Arc::new(FanoutExchange::new(name)), + ExchangeType::Headers => Arc::new(HeadersExchange::new(name)), + }; + + exchanges.insert(name.to_string(), Arc::clone(&exchange)); + Ok(exchange) + } + + /// Gets an exchange by name. + pub fn get_exchange(&self, name: &str) -> Option> { + self.exchanges.read().unwrap().get(name).cloned() + } + + /// Deletes an exchange. + pub fn delete_exchange(&self, name: &str) -> bool { + self.exchanges.write().unwrap().remove(name).is_some() + } + + /// Binds a queue to an exchange. + pub fn bind( + &self, + exchange: &str, + queue: &str, + routing_key: &str, + headers: Option<(HashMap, bool)>, + ) -> Result { + let exchanges = self.exchanges.read().unwrap(); + let ex = exchanges + .get(exchange) + .ok_or_else(|| format!("Exchange '{}' not found", exchange))?; + Ok(ex.bind(queue, routing_key, headers)) + } + + /// Unbinds a queue from an exchange. + pub fn unbind(&self, exchange: &str, queue: &str, routing_key: &str) -> bool { + let exchanges = self.exchanges.read().unwrap(); + if let Some(ex) = exchanges.get(exchange) { + ex.unbind(queue, routing_key) + } else { + false + } + } + + /// Routes a message through an exchange. + pub fn route( + &self, + exchange: &str, + routing_key: &str, + headers: Option<&HashMap>, + ) -> Vec { + let exchanges = self.exchanges.read().unwrap(); + if let Some(ex) = exchanges.get(exchange) { + ex.route(routing_key, headers) + } else { + Vec::new() + } + } + + /// Lists all exchanges. + pub fn list_exchanges(&self) -> Vec<(String, ExchangeType, usize)> { + let exchanges = self.exchanges.read().unwrap(); + exchanges + .values() + .map(|e| (e.name().to_string(), e.exchange_type(), e.get_bindings().len())) + .collect() + } + + /// Gets bindings for an exchange. + pub fn get_bindings(&self, exchange: &str) -> Vec { + let exchanges = self.exchanges.read().unwrap(); + exchanges + .get(exchange) + .map(|e| e.get_bindings()) + .unwrap_or_default() + } + + /// Clears all exchanges. + pub fn clear(&self) { + self.exchanges.write().unwrap().clear(); + } +} + +impl Default for Router { + fn default() -> Self { + Self::new() + } +} + +// ============================================================================= +// Routed Queue Manager +// ============================================================================= + +/// A queue manager with integrated routing support. +pub struct RoutedQueueManager +where + T: LinkType, + Q: Queue, + M: QueueManager, +{ + /// The underlying queue manager + manager: M, + /// The router + router: Router, + /// Phantom data + _marker: std::marker::PhantomData<(T, Q)>, +} + +impl RoutedQueueManager +where + T: LinkType, + Q: Queue, + M: QueueManager, +{ + /// Creates a new routed queue manager. + pub fn new(manager: M) -> Self { + Self { + manager, + router: Router::new(), + _marker: std::marker::PhantomData, + } + } + + /// Creates a queue. + pub async fn create_queue(&self, name: &str, options: QueueOptions) -> QueueResult { + self.manager.create_queue(name, options).await + } + + /// Gets a queue by name. + pub async fn get_queue(&self, name: &str) -> QueueResult> { + self.manager.get_queue(name).await + } + + /// Deletes a queue. + pub async fn delete_queue(&self, name: &str) -> QueueResult { + self.manager.delete_queue(name).await + } + + /// Lists all queues. + pub async fn list_queues(&self) -> QueueResult> { + self.manager.list_queues().await + } + + /// Declares an exchange. + pub fn declare_exchange(&self, name: &str, exchange_type: ExchangeType) -> Result, String> { + self.router.declare_exchange(name, exchange_type) + } + + /// Binds a queue to an exchange with a routing key. + pub fn bind(&self, exchange: &str, queue: &str, routing_key: &str) -> Result { + self.router.bind(exchange, queue, routing_key, None) + } + + /// Binds a queue to a topic exchange with a pattern. + pub fn bind_topic(&self, exchange: &str, queue: &str, pattern: &str) -> Result { + self.router.bind(exchange, queue, pattern, None) + } + + /// Unbinds a queue from an exchange. + pub fn unbind(&self, exchange: &str, queue: &str, routing_key: &str) -> bool { + self.router.unbind(exchange, queue, routing_key) + } + + /// Publishes a message through an exchange. + pub async fn publish( + &self, + exchange: &str, + link: Link, + routing_key: &str, + ) -> QueueResult> { + let queues = self.router.route(exchange, routing_key, None); + + for queue_name in &queues { + if let Some(queue) = self.manager.get_queue(queue_name).await? { + queue.enqueue(link.clone()).await?; + } + } + + Ok(queues) + } + + /// Publishes a message with headers through an exchange. + pub async fn publish_with_headers( + &self, + exchange: &str, + link: Link, + routing_key: &str, + headers: &HashMap, + ) -> QueueResult> { + let queues = self.router.route(exchange, routing_key, Some(headers)); + + for queue_name in &queues { + if let Some(queue) = self.manager.get_queue(queue_name).await? { + queue.enqueue(link.clone()).await?; + } + } + + Ok(queues) + } + + /// Sets up a fanout pattern: creates a fanout exchange and binds queues. + pub fn fanout(&self, exchange: &str, queues: &[&str]) -> Result<(), String> { + self.router.declare_exchange(exchange, ExchangeType::Fanout)?; + for queue in queues { + self.router.bind(exchange, queue, "", None)?; + } + Ok(()) + } + + /// Gets the router. + pub fn router(&self) -> &Router { + &self.router + } +} + +// ============================================================================= +// Tests +// ============================================================================= + +#[cfg(test)] +mod tests { + use super::*; + + mod topic_matcher_tests { + use super::*; + + #[test] + fn test_exact_match() { + assert!(TopicMatcher::matches("logs.error", "logs.error")); + assert!(!TopicMatcher::matches("logs.error", "logs.info")); + } + + #[test] + fn test_star_wildcard() { + assert!(TopicMatcher::matches("logs.*", "logs.error")); + assert!(TopicMatcher::matches("logs.*", "logs.info")); + assert!(!TopicMatcher::matches("logs.*", "logs.error.db")); + assert!(TopicMatcher::matches("*.error", "logs.error")); + assert!(TopicMatcher::matches("*.error", "app.error")); + } + + #[test] + fn test_hash_wildcard() { + assert!(TopicMatcher::matches("logs.#", "logs.error")); + assert!(TopicMatcher::matches("logs.#", "logs.error.db")); + assert!(TopicMatcher::matches("logs.#", "logs.a.b.c.d")); + } + + #[test] + fn test_hash_at_beginning() { + assert!(TopicMatcher::matches("#.error", "logs.error")); + assert!(TopicMatcher::matches("#.error", "a.b.c.error")); + } + + #[test] + fn test_hash_alone() { + assert!(TopicMatcher::matches("#", "anything")); + assert!(TopicMatcher::matches("#", "a.b.c")); + } + + #[test] + fn test_complex_patterns() { + assert!(TopicMatcher::matches("*.system.*", "app.system.startup")); + assert!(TopicMatcher::matches("*.system.*", "db.system.shutdown")); + assert!(!TopicMatcher::matches("*.system.*", "system.startup")); + } + + #[test] + fn test_specificity() { + let exact = TopicMatcher::specificity("logs.error.db"); + let star = TopicMatcher::specificity("logs.*.db"); + let hash = TopicMatcher::specificity("logs.#"); + + assert!(exact > star); + assert!(star > hash); + } + } + + mod exchange_tests { + use super::*; + + #[test] + fn test_direct_exchange() { + let exchange = DirectExchange::new("logs"); + + exchange.bind("errors-queue", "error", None); + exchange.bind("info-queue", "info", None); + + assert_eq!(exchange.route("error", None), vec!["errors-queue".to_string()]); + assert_eq!(exchange.route("info", None), vec!["info-queue".to_string()]); + assert!(exchange.route("debug", None).is_empty()); + } + + #[test] + fn test_direct_exchange_unbind() { + let exchange = DirectExchange::new("logs"); + + exchange.bind("queue", "key", None); + assert!(exchange.unbind("queue", "key")); + assert!(exchange.route("key", None).is_empty()); + } + + #[test] + fn test_topic_exchange() { + let exchange = TopicExchange::new("events"); + + exchange.bind("all-logs", "logs.#", None); + exchange.bind("errors-only", "logs.error", None); + + let queues = exchange.route("logs.error", None); + assert!(queues.contains(&"all-logs".to_string())); + assert!(queues.contains(&"errors-only".to_string())); + } + + #[test] + fn test_fanout_exchange() { + let exchange = FanoutExchange::new("notifications"); + + exchange.bind("email-queue", "", None); + exchange.bind("sms-queue", "", None); + exchange.bind("push-queue", "", None); + + let queues = exchange.route("", None); + assert_eq!(queues.len(), 3); + } + + #[test] + fn test_headers_exchange_match_all() { + let exchange = HeadersExchange::new("tasks"); + + let mut headers = HashMap::new(); + headers.insert("priority".to_string(), "high".to_string()); + headers.insert("type".to_string(), "system".to_string()); + + exchange.bind("urgent-system", "", Some((headers.clone(), true))); + + // Must match all headers + assert_eq!( + exchange.route("", Some(&headers)), + vec!["urgent-system".to_string()] + ); + + // Missing header - no match + let mut partial = HashMap::new(); + partial.insert("priority".to_string(), "high".to_string()); + assert!(exchange.route("", Some(&partial)).is_empty()); + } + + #[test] + fn test_headers_exchange_match_any() { + let exchange = HeadersExchange::new("tasks"); + + let mut headers = HashMap::new(); + headers.insert("priority".to_string(), "high".to_string()); + headers.insert("type".to_string(), "system".to_string()); + + exchange.bind("special", "", Some((headers.clone(), false))); + + // Any header matches + let mut partial = HashMap::new(); + partial.insert("priority".to_string(), "high".to_string()); + assert_eq!( + exchange.route("", Some(&partial)), + vec!["special".to_string()] + ); + } + } + + mod router_tests { + use super::*; + + #[test] + fn test_declare_exchange() { + let router = Router::new(); + + let result = router.declare_exchange("logs", ExchangeType::Direct); + assert!(result.is_ok()); + + let result = router.declare_exchange("logs", ExchangeType::Direct); + assert!(result.is_ok()); // Same type - OK + + let result = router.declare_exchange("logs", ExchangeType::Topic); + assert!(result.is_err()); // Different type - Error + } + + #[test] + fn test_get_exchange() { + let router = Router::new(); + router.declare_exchange("logs", ExchangeType::Direct).unwrap(); + + assert!(router.get_exchange("logs").is_some()); + assert!(router.get_exchange("nonexistent").is_none()); + } + + #[test] + fn test_delete_exchange() { + let router = Router::new(); + router.declare_exchange("logs", ExchangeType::Direct).unwrap(); + + assert!(router.delete_exchange("logs")); + assert!(!router.delete_exchange("logs")); // Already deleted + } + + #[test] + fn test_bind_unbind() { + let router = Router::new(); + router.declare_exchange("logs", ExchangeType::Direct).unwrap(); + + router.bind("logs", "queue", "key", None).unwrap(); + assert_eq!(router.route("logs", "key", None), vec!["queue".to_string()]); + + router.unbind("logs", "queue", "key"); + assert!(router.route("logs", "key", None).is_empty()); + } + + #[test] + fn test_list_exchanges() { + let router = Router::new(); + router.declare_exchange("logs", ExchangeType::Direct).unwrap(); + router.declare_exchange("events", ExchangeType::Topic).unwrap(); + + let exchanges = router.list_exchanges(); + assert_eq!(exchanges.len(), 2); + } + + #[test] + fn test_clear() { + let router = Router::new(); + router.declare_exchange("logs", ExchangeType::Direct).unwrap(); + router.declare_exchange("events", ExchangeType::Topic).unwrap(); + + router.clear(); + assert!(router.list_exchanges().is_empty()); + } + } +} diff --git a/rust/src/features/scheduler.rs b/rust/src/features/scheduler.rs new file mode 100644 index 0000000..3b846f5 --- /dev/null +++ b/rust/src/features/scheduler.rs @@ -0,0 +1,1054 @@ +//! Scheduler module for delayed messages, cron jobs, TTL, and message expiration. +//! +//! This module provides scheduling functionality for queue operations: +//! - **Delayed Messages**: Schedule items for future delivery +//! - **Cron Jobs**: Recurring jobs using cron expressions +//! - **TTL**: Time-to-live for automatic message expiration +//! - **Message Expiration**: Automatic cleanup of expired messages +//! +//! # Example +//! +//! ```rust,ignore +//! use links_queue::features::{Scheduler, CronParser}; +//! use links_queue::{Link, LinkRef}; +//! use std::time::Duration; +//! +//! let mut scheduler = Scheduler::new(Duration::from_secs(1)); +//! +//! // Schedule a delayed message +//! let link = Link::new(1u64, LinkRef::Id(2), LinkRef::Id(3)); +//! scheduler.schedule(link, Duration::from_secs(5), None); +//! +//! // Add a cron job +//! scheduler.add_cron_job("hourly-task", "0 * * * *", |_| async { +//! println!("Hourly task executed"); +//! }); +//! ``` + +use std::collections::HashMap; +use std::sync::atomic::{AtomicBool, AtomicU64, Ordering}; +use std::sync::{Arc, RwLock}; +use std::time::{Duration, Instant, SystemTime, UNIX_EPOCH}; +use tokio::sync::mpsc; + +use crate::{Link, LinkType, Queue, QueueError, QueueResult, QueueStats, EnqueueResult}; + +// ============================================================================= +// Cron Parser +// ============================================================================= + +/// Parsed cron schedule with expanded field values. +#[derive(Debug, Clone)] +pub struct CronSchedule { + /// Allowed minute values (0-59) + pub minutes: Vec, + /// Allowed hour values (0-23) + pub hours: Vec, + /// Allowed day of month values (1-31) + pub days_of_month: Vec, + /// Allowed month values (1-12) + pub months: Vec, + /// Allowed day of week values (0-6, Sunday=0) + pub days_of_week: Vec, +} + +/// Parser for cron expressions (5-field format). +/// +/// Supports standard cron syntax: +/// - `*`: Any value +/// - `1,2,3`: List of values +/// - `1-5`: Range +/// - `*/2`: Step values +/// - `2/3`: Start at value with step +pub struct CronParser; + +impl CronParser { + /// Parses a single cron field. + /// + /// # Arguments + /// + /// * `field` - The field string to parse + /// * `min` - Minimum allowed value + /// * `max` - Maximum allowed value + /// + /// # Returns + /// + /// A sorted vector of values within the valid range. + pub fn parse_field(field: &str, min: u8, max: u8) -> Vec { + let mut values = Vec::new(); + + for part in field.split(',') { + let part = part.trim(); + + if part == "*" { + // Wildcard: all values + values.extend(min..=max); + } else if part.contains('/') { + // Step value + let parts: Vec<&str> = part.split('/').collect(); + if parts.len() == 2 { + let step: u8 = parts[1].parse().unwrap_or(1); + let start = if parts[0] == "*" { + min + } else { + parts[0].parse().unwrap_or(min) + }; + let mut i = start; + while i <= max { + values.push(i); + i = i.saturating_add(step); + } + } + } else if part.contains('-') { + // Range + let parts: Vec<&str> = part.split('-').collect(); + if parts.len() == 2 { + let start: u8 = parts[0].parse().unwrap_or(min); + let end: u8 = parts[1].parse().unwrap_or(max); + values.extend(start..=end); + } + } else if let Ok(val) = part.parse::() { + values.push(val); + } + } + + // Filter and deduplicate + values.retain(|&v| v >= min && v <= max); + values.sort_unstable(); + values.dedup(); + values + } + + /// Parses a complete 5-field cron expression. + /// + /// # Arguments + /// + /// * `expression` - Cron expression in format "minute hour day month weekday" + /// + /// # Returns + /// + /// Parsed `CronSchedule` or error string. + pub fn parse(expression: &str) -> Result { + let fields: Vec<&str> = expression.split_whitespace().collect(); + + if fields.len() != 5 { + return Err(format!( + "Invalid cron expression: expected 5 fields, got {}", + fields.len() + )); + } + + Ok(CronSchedule { + minutes: Self::parse_field(fields[0], 0, 59), + hours: Self::parse_field(fields[1], 0, 23), + days_of_month: Self::parse_field(fields[2], 1, 31), + months: Self::parse_field(fields[3], 1, 12), + days_of_week: Self::parse_field(fields[4], 0, 6), + }) + } + + /// Calculates the next run time for a cron expression. + /// + /// # Arguments + /// + /// * `expression` - Cron expression + /// * `after` - Time to start searching from (defaults to now) + /// + /// # Returns + /// + /// Unix timestamp of next run time, or None if no valid time found. + pub fn next_run(expression: &str, after: Option) -> Option { + let schedule = Self::parse(expression).ok()?; + Self::next_run_from_schedule(&schedule, after) + } + + /// Calculates the next run time from a parsed schedule. + pub fn next_run_from_schedule(schedule: &CronSchedule, after: Option) -> Option { + let after_ms = after.unwrap_or_else(|| { + SystemTime::now() + .duration_since(UNIX_EPOCH) + .unwrap() + .as_millis() as u64 + }); + + // Convert to seconds for easier calculation + let after_secs = after_ms / 1000; + + // Start searching from the next minute + let mut candidate = after_secs - (after_secs % 60) + 60; + + // Search for up to 4 years + let max_iterations = 4 * 365 * 24 * 60; + + for _ in 0..max_iterations { + let datetime = Self::timestamp_to_parts(candidate); + + // Check all fields + if schedule.months.contains(&datetime.month) + && schedule.days_of_month.contains(&datetime.day) + && schedule.days_of_week.contains(&datetime.weekday) + && schedule.hours.contains(&datetime.hour) + && schedule.minutes.contains(&datetime.minute) + { + return Some(candidate * 1000); + } + + candidate += 60; // Move to next minute + } + + None + } + + /// Converts a Unix timestamp to date/time parts. + fn timestamp_to_parts(timestamp: u64) -> DateTimeParts { + // Simple implementation - this is approximate but works for our needs + let days_since_epoch = timestamp / 86400; + let time_of_day = timestamp % 86400; + + let hour = (time_of_day / 3600) as u8; + let minute = ((time_of_day % 3600) / 60) as u8; + + // Calculate weekday (Jan 1, 1970 was Thursday = 4) + let weekday = ((days_since_epoch + 4) % 7) as u8; + + // Approximate year/month/day calculation + let mut year = 1970i32; + let mut remaining_days = days_since_epoch as i32; + + loop { + let days_in_year = if Self::is_leap_year(year) { 366 } else { 365 }; + if remaining_days < days_in_year { + break; + } + remaining_days -= days_in_year; + year += 1; + } + + let mut month = 1u8; + let days_in_months = Self::days_in_months(Self::is_leap_year(year)); + for (i, &days) in days_in_months.iter().enumerate() { + if remaining_days < i32::from(days) { + month = (i + 1) as u8; + break; + } + remaining_days -= i32::from(days); + } + + let day = (remaining_days + 1) as u8; + + DateTimeParts { + minute, + hour, + day, + month, + weekday, + } + } + + fn is_leap_year(year: i32) -> bool { + (year % 4 == 0 && year % 100 != 0) || (year % 400 == 0) + } + + fn days_in_months(leap: bool) -> [u8; 12] { + if leap { + [31, 29, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31] + } else { + [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31] + } + } +} + +struct DateTimeParts { + minute: u8, + hour: u8, + day: u8, + month: u8, + weekday: u8, +} + +// ============================================================================= +// Scheduled Item +// ============================================================================= + +/// A scheduled item waiting for delivery. +#[derive(Debug, Clone)] +pub struct ScheduledItem { + /// Unique identifier for this scheduled item + pub id: T, + /// The link to deliver + pub link: Link, + /// Scheduled delivery time (Unix timestamp in milliseconds) + pub deliver_at: u64, + /// Expiration time (Unix timestamp in milliseconds), if set + pub expires_at: Option, + /// When this item was scheduled + pub scheduled_at: u64, +} + +impl ScheduledItem { + /// Creates a new scheduled item. + pub fn new(id: T, link: Link, deliver_at: u64, expires_at: Option) -> Self { + let scheduled_at = SystemTime::now() + .duration_since(UNIX_EPOCH) + .unwrap() + .as_millis() as u64; + + Self { + id, + link, + deliver_at, + expires_at, + scheduled_at, + } + } + + /// Checks if this item has expired. + pub fn is_expired(&self) -> bool { + if let Some(expires_at) = self.expires_at { + let now = SystemTime::now() + .duration_since(UNIX_EPOCH) + .unwrap() + .as_millis() as u64; + now >= expires_at + } else { + false + } + } + + /// Checks if this item is due for delivery. + pub fn is_due(&self) -> bool { + let now = SystemTime::now() + .duration_since(UNIX_EPOCH) + .unwrap() + .as_millis() as u64; + now >= self.deliver_at + } +} + +// ============================================================================= +// Cron Job +// ============================================================================= + +/// A recurring cron job. +pub struct CronJob { + /// Unique identifier for this job + pub id: String, + /// The cron expression + pub expression: String, + /// Parsed schedule + pub schedule: CronSchedule, + /// Whether the job is enabled + pub enabled: bool, + /// Next scheduled run time + pub next_run: Option, + /// Handler function + handler: F, +} + +impl CronJob { + /// Creates a new cron job. + pub fn new(id: String, expression: String, handler: F) -> Result { + let schedule = CronParser::parse(&expression)?; + let next_run = CronParser::next_run_from_schedule(&schedule, None); + + Ok(Self { + id, + expression, + schedule, + enabled: true, + next_run, + handler, + }) + } + + /// Updates the next run time. + pub fn update_next_run(&mut self) { + self.next_run = CronParser::next_run_from_schedule(&self.schedule, None); + } + + /// Gets a reference to the handler. + pub fn handler(&self) -> &F { + &self.handler + } +} + +// ============================================================================= +// Scheduler Statistics +// ============================================================================= + +/// Statistics for the scheduler. +#[derive(Debug, Clone, Default)] +pub struct SchedulerStats { + /// Number of items currently scheduled + pub scheduled: usize, + /// Number of items pending delivery + pub pending: usize, + /// Number of cron jobs + pub cron_jobs: usize, + /// Total number of items delivered + pub delivered: u64, + /// Total number of items expired + pub expired: u64, +} + +// ============================================================================= +// Scheduler +// ============================================================================= + +/// Configuration for the scheduler. +#[derive(Debug, Clone)] +pub struct SchedulerConfig { + /// How often to check for due items (default: 1 second) + pub check_interval: Duration, +} + +impl Default for SchedulerConfig { + fn default() -> Self { + Self { + check_interval: Duration::from_secs(1), + } + } +} + +/// The main scheduler for managing delayed messages and cron jobs. +/// +/// Type parameter `T` is the link ID type. +/// Type parameter `F` is the cron job handler type. +pub struct Scheduler)> { + /// Scheduled items + items: Arc>>>, + /// Cron jobs + cron_jobs: Arc>>>, + /// Scheduler configuration + config: SchedulerConfig, + /// Whether the scheduler is running + running: Arc, + /// Counter for delivered items + delivered_count: Arc, + /// Counter for expired items + expired_count: Arc, + /// ID counter for auto-generated IDs + id_counter: Arc, +} + +impl, F> Scheduler { + /// Creates a new scheduler with default configuration. + pub fn new() -> Self { + Self::with_config(SchedulerConfig::default()) + } + + /// Creates a new scheduler with custom configuration. + pub fn with_config(config: SchedulerConfig) -> Self { + Self { + items: Arc::new(RwLock::new(HashMap::new())), + cron_jobs: Arc::new(RwLock::new(HashMap::new())), + config, + running: Arc::new(AtomicBool::new(false)), + delivered_count: Arc::new(AtomicU64::new(0)), + expired_count: Arc::new(AtomicU64::new(0)), + id_counter: Arc::new(AtomicU64::new(1)), + } + } + + /// Schedules an item for delayed delivery. + /// + /// # Arguments + /// + /// * `link` - The link to deliver + /// * `delay` - Delay duration before delivery + /// * `ttl` - Optional time-to-live (item expires if not delivered before this) + /// + /// # Returns + /// + /// The scheduled item. + pub fn schedule(&self, link: Link, delay: Duration, ttl: Option) -> ScheduledItem { + let now = SystemTime::now() + .duration_since(UNIX_EPOCH) + .unwrap() + .as_millis() as u64; + + let deliver_at = now + delay.as_millis() as u64; + let expires_at = ttl.map(|d| now + d.as_millis() as u64); + + let id = T::from(self.id_counter.fetch_add(1, Ordering::SeqCst)); + let item = ScheduledItem::new(id.clone(), link, deliver_at, expires_at); + + let mut items = self.items.write().unwrap(); + items.insert(id.clone(), item.clone()); + + item + } + + /// Schedules an item with a specific delivery time. + pub fn schedule_at(&self, link: Link, deliver_at: u64, ttl: Option) -> ScheduledItem { + let now = SystemTime::now() + .duration_since(UNIX_EPOCH) + .unwrap() + .as_millis() as u64; + + let expires_at = ttl.map(|d| now + d.as_millis() as u64); + + let id = T::from(self.id_counter.fetch_add(1, Ordering::SeqCst)); + let item = ScheduledItem::new(id.clone(), link, deliver_at, expires_at); + + let mut items = self.items.write().unwrap(); + items.insert(id.clone(), item.clone()); + + item + } + + /// Cancels a scheduled item. + /// + /// # Returns + /// + /// `true` if the item was found and cancelled. + pub fn cancel(&self, id: &T) -> bool { + let mut items = self.items.write().unwrap(); + items.remove(id).is_some() + } + + /// Gets a scheduled item by ID. + pub fn get(&self, id: &T) -> Option> { + let items = self.items.read().unwrap(); + items.get(id).cloned() + } + + /// Returns all pending items. + pub fn get_pending_items(&self) -> Vec> { + let items = self.items.read().unwrap(); + items.values().cloned().collect() + } + + /// Returns the number of pending items. + pub fn pending_count(&self) -> usize { + let items = self.items.read().unwrap(); + items.len() + } + + /// Checks if the scheduler is running. + pub fn is_running(&self) -> bool { + self.running.load(Ordering::SeqCst) + } + + /// Returns scheduler statistics. + pub fn stats(&self) -> SchedulerStats { + let items = self.items.read().unwrap(); + let cron_jobs = self.cron_jobs.read().unwrap(); + + SchedulerStats { + scheduled: items.len(), + pending: items.len(), + cron_jobs: cron_jobs.len(), + delivered: self.delivered_count.load(Ordering::SeqCst), + expired: self.expired_count.load(Ordering::SeqCst), + } + } + + /// Clears all scheduled items and cron jobs. + pub fn clear(&self) { + self.stop(); + let mut items = self.items.write().unwrap(); + items.clear(); + let mut cron_jobs = self.cron_jobs.write().unwrap(); + cron_jobs.clear(); + } + + /// Starts the scheduler background task. + pub fn start(&self) { + self.running.store(true, Ordering::SeqCst); + } + + /// Stops the scheduler background task. + pub fn stop(&self) { + self.running.store(false, Ordering::SeqCst); + } + + /// Process due items and return them. + /// + /// This should be called periodically (or in response to a timer). + pub fn process_due(&self) -> (Vec>, Vec>) { + let now = SystemTime::now() + .duration_since(UNIX_EPOCH) + .unwrap() + .as_millis() as u64; + + let mut due_items = Vec::new(); + let mut expired_items = Vec::new(); + + let mut items = self.items.write().unwrap(); + let mut to_remove = Vec::new(); + + for (id, item) in items.iter() { + if item.expires_at.map_or(false, |exp| now >= exp) { + // Item expired + expired_items.push(item.clone()); + to_remove.push(id.clone()); + } else if now >= item.deliver_at { + // Item is due + due_items.push(item.link.clone()); + to_remove.push(id.clone()); + } + } + + // Remove processed items + for id in to_remove { + items.remove(&id); + } + + // Update counters + self.delivered_count.fetch_add(due_items.len() as u64, Ordering::SeqCst); + self.expired_count.fetch_add(expired_items.len() as u64, Ordering::SeqCst); + + (due_items, expired_items) + } +} + +impl, F> Scheduler { + /// Adds a cron job. + /// + /// # Arguments + /// + /// * `id` - Unique identifier for the job + /// * `expression` - Cron expression + /// * `handler` - Handler function to call when the job runs + /// + /// # Returns + /// + /// The created `CronJob` or an error if the expression is invalid. + pub fn add_cron_job(&self, id: &str, expression: &str, handler: F) -> Result<(), String> { + let job = CronJob::new(id.to_string(), expression.to_string(), handler)?; + let mut cron_jobs = self.cron_jobs.write().unwrap(); + cron_jobs.insert(id.to_string(), job); + Ok(()) + } + + /// Removes a cron job. + pub fn remove_cron_job(&self, id: &str) -> bool { + let mut cron_jobs = self.cron_jobs.write().unwrap(); + cron_jobs.remove(id).is_some() + } + + /// Gets a cron job by ID. + pub fn get_cron_job(&self, id: &str) -> Option<(String, bool, Option)> { + let cron_jobs = self.cron_jobs.read().unwrap(); + cron_jobs.get(id).map(|job| { + (job.expression.clone(), job.enabled, job.next_run) + }) + } + + /// Enables or disables a cron job. + pub fn set_cron_job_enabled(&self, id: &str, enabled: bool) -> bool { + let mut cron_jobs = self.cron_jobs.write().unwrap(); + if let Some(job) = cron_jobs.get_mut(id) { + job.enabled = enabled; + if enabled { + job.update_next_run(); + } + true + } else { + false + } + } + + /// Lists all cron jobs. + pub fn list_cron_jobs(&self) -> Vec<(String, String, bool, Option)> { + let cron_jobs = self.cron_jobs.read().unwrap(); + cron_jobs + .values() + .map(|job| (job.id.clone(), job.expression.clone(), job.enabled, job.next_run)) + .collect() + } +} + +impl, F> Default for Scheduler { + fn default() -> Self { + Self::new() + } +} + +// ============================================================================= +// Scheduled Queue +// ============================================================================= + +/// A queue wrapper that adds scheduling capabilities. +/// +/// This wraps an existing queue implementation to add support for: +/// - Delayed message delivery +/// - TTL-based expiration +/// - Scheduled enqueue operations +pub struct ScheduledQueue, Q: Queue> { + /// The underlying queue + queue: Q, + /// The scheduler for delayed items + scheduler: Scheduler, +} + +impl, Q: Queue> ScheduledQueue { + /// Creates a new scheduled queue wrapping the given queue. + pub fn new(queue: Q) -> Self { + Self { + queue, + scheduler: Scheduler::new(), + } + } + + /// Returns the queue name. + pub fn name(&self) -> &str { + self.queue.name() + } + + /// Enqueues an item with optional delay and TTL. + /// + /// If delay is provided, the item is scheduled for later delivery. + /// If no delay, the item is enqueued immediately. + pub async fn enqueue( + &self, + link: Link, + delay: Option, + ttl: Option, + ) -> QueueResult> { + match delay { + Some(d) if d > Duration::ZERO => { + // Schedule for later + let item = self.scheduler.schedule(link, d, ttl); + Ok(EnqueueResult::new(item.id, 0)) + } + _ => { + // Enqueue immediately + self.queue.enqueue(link).await + } + } + } + + /// Dequeues the next available item. + pub async fn dequeue(&self) -> QueueResult>> { + // First, process any due items from the scheduler + let (due_items, _) = self.scheduler.process_due(); + for link in due_items { + self.queue.enqueue(link).await?; + } + + self.queue.dequeue().await + } + + /// Peeks at the next item without removing it. + pub async fn peek(&self) -> QueueResult>> { + self.queue.peek().await + } + + /// Acknowledges processing of an item. + pub async fn acknowledge(&self, id: T) -> QueueResult<()> { + self.queue.acknowledge(id).await + } + + /// Rejects an item. + pub async fn reject(&self, id: T, requeue: bool) -> QueueResult<()> { + self.queue.reject(id, requeue).await + } + + /// Cancels a scheduled item. + pub fn cancel_scheduled(&self, id: &T) -> bool { + self.scheduler.cancel(id) + } + + /// Returns queue statistics including scheduled items. + pub fn stats(&self) -> QueueStats { + self.queue.stats() + } + + /// Returns the queue depth including scheduled items. + pub fn depth(&self) -> usize { + self.queue.stats().depth + self.scheduler.pending_count() + } + + /// Returns the scheduler. + pub fn scheduler(&self) -> &Scheduler { + &self.scheduler + } + + /// Starts the scheduler. + pub fn start(&self) { + self.scheduler.start(); + } + + /// Stops the scheduler. + pub fn stop(&self) { + self.scheduler.stop(); + } + + /// Clears the queue and scheduler. + pub async fn clear(&self) { + self.scheduler.clear(); + // Note: The base queue doesn't have a clear method in the trait, + // so we just clear the scheduler state + } +} + +// ============================================================================= +// Tests +// ============================================================================= + +#[cfg(test)] +mod tests { + use super::*; + + mod cron_parser_tests { + use super::*; + + #[test] + fn test_parse_wildcard() { + let values = CronParser::parse_field("*", 0, 5); + assert_eq!(values, vec![0, 1, 2, 3, 4, 5]); + } + + #[test] + fn test_parse_single_value() { + let values = CronParser::parse_field("5", 0, 10); + assert_eq!(values, vec![5]); + } + + #[test] + fn test_parse_comma_separated() { + let values = CronParser::parse_field("1,3,5", 0, 10); + assert_eq!(values, vec![1, 3, 5]); + } + + #[test] + fn test_parse_range() { + let values = CronParser::parse_field("1-5", 0, 10); + assert_eq!(values, vec![1, 2, 3, 4, 5]); + } + + #[test] + fn test_parse_step() { + let values = CronParser::parse_field("*/2", 0, 6); + assert_eq!(values, vec![0, 2, 4, 6]); + } + + #[test] + fn test_parse_step_with_start() { + let values = CronParser::parse_field("2/3", 0, 10); + assert_eq!(values, vec![2, 5, 8]); + } + + #[test] + fn test_parse_filters_out_of_range() { + let values = CronParser::parse_field("0,5,15", 0, 10); + assert_eq!(values, vec![0, 5]); + } + + #[test] + fn test_parse_full_expression() { + let result = CronParser::parse("0 * * * *"); + assert!(result.is_ok()); + let schedule = result.unwrap(); + assert_eq!(schedule.minutes, vec![0]); + assert_eq!(schedule.hours.len(), 24); + assert_eq!(schedule.days_of_month.len(), 31); + assert_eq!(schedule.months.len(), 12); + assert_eq!(schedule.days_of_week.len(), 7); + } + + #[test] + fn test_parse_invalid_field_count() { + let result = CronParser::parse("0 * * *"); + assert!(result.is_err()); + } + + #[test] + fn test_next_run() { + let next = CronParser::next_run("* * * * *", None); + assert!(next.is_some()); + } + } + + mod scheduler_tests { + use super::*; + use crate::{LinkRef}; + + #[test] + fn test_scheduler_creation() { + let scheduler: Scheduler = Scheduler::new(); + assert!(!scheduler.is_running()); + assert_eq!(scheduler.pending_count(), 0); + } + + #[test] + fn test_schedule_item() { + let scheduler: Scheduler = Scheduler::new(); + let link = Link::new(1u64, LinkRef::Id(2), LinkRef::Id(3)); + + let item = scheduler.schedule(link, Duration::from_secs(5), None); + + assert!(item.deliver_at > item.scheduled_at); + assert_eq!(scheduler.pending_count(), 1); + } + + #[test] + fn test_schedule_with_ttl() { + let scheduler: Scheduler = Scheduler::new(); + let link = Link::new(1u64, LinkRef::Id(2), LinkRef::Id(3)); + + let item = scheduler.schedule(link, Duration::from_secs(5), Some(Duration::from_secs(3))); + + assert!(item.expires_at.is_some()); + assert!(item.expires_at.unwrap() < item.deliver_at); + } + + #[test] + fn test_cancel_item() { + let scheduler: Scheduler = Scheduler::new(); + let link = Link::new(1u64, LinkRef::Id(2), LinkRef::Id(3)); + + let item = scheduler.schedule(link, Duration::from_secs(5), None); + assert_eq!(scheduler.pending_count(), 1); + + let result = scheduler.cancel(&item.id); + assert!(result); + assert_eq!(scheduler.pending_count(), 0); + } + + #[test] + fn test_cancel_nonexistent() { + let scheduler: Scheduler = Scheduler::new(); + let result = scheduler.cancel(&999); + assert!(!result); + } + + #[test] + fn test_get_item() { + let scheduler: Scheduler = Scheduler::new(); + let link = Link::new(1u64, LinkRef::Id(2), LinkRef::Id(3)); + + let item = scheduler.schedule(link.clone(), Duration::from_secs(5), None); + let retrieved = scheduler.get(&item.id); + + assert!(retrieved.is_some()); + assert_eq!(retrieved.unwrap().link.id, link.id); + } + + #[test] + fn test_add_cron_job() { + let scheduler: Scheduler)> = Scheduler::new(); + let handler: fn(Link) = |_| {}; + + let result = scheduler.add_cron_job("test-job", "* * * * *", handler); + assert!(result.is_ok()); + + let job = scheduler.get_cron_job("test-job"); + assert!(job.is_some()); + } + + #[test] + fn test_add_invalid_cron_job() { + let scheduler: Scheduler)> = Scheduler::new(); + let handler: fn(Link) = |_| {}; + + let result = scheduler.add_cron_job("bad-job", "invalid", handler); + assert!(result.is_err()); + } + + #[test] + fn test_remove_cron_job() { + let scheduler: Scheduler)> = Scheduler::new(); + let handler: fn(Link) = |_| {}; + + scheduler.add_cron_job("test-job", "* * * * *", handler).unwrap(); + let result = scheduler.remove_cron_job("test-job"); + assert!(result); + + let job = scheduler.get_cron_job("test-job"); + assert!(job.is_none()); + } + + #[test] + fn test_enable_disable_cron_job() { + let scheduler: Scheduler)> = Scheduler::new(); + let handler: fn(Link) = |_| {}; + + scheduler.add_cron_job("test-job", "* * * * *", handler).unwrap(); + + scheduler.set_cron_job_enabled("test-job", false); + let job = scheduler.get_cron_job("test-job"); + assert!(!job.unwrap().1); + + scheduler.set_cron_job_enabled("test-job", true); + let job = scheduler.get_cron_job("test-job"); + assert!(job.unwrap().1); + } + + #[test] + fn test_list_cron_jobs() { + let scheduler: Scheduler)> = Scheduler::new(); + let handler: fn(Link) = |_| {}; + + scheduler.add_cron_job("job1", "* * * * *", handler).unwrap(); + scheduler.add_cron_job("job2", "0 * * * *", handler).unwrap(); + + let jobs = scheduler.list_cron_jobs(); + assert_eq!(jobs.len(), 2); + } + + #[test] + fn test_start_stop() { + let scheduler: Scheduler = Scheduler::new(); + + scheduler.start(); + assert!(scheduler.is_running()); + + scheduler.stop(); + assert!(!scheduler.is_running()); + } + + #[test] + fn test_stats() { + let scheduler: Scheduler)> = Scheduler::new(); + let link = Link::new(1u64, LinkRef::Id(2), LinkRef::Id(3)); + + scheduler.schedule(link, Duration::from_secs(5), None); + let handler: fn(Link) = |_| {}; + scheduler.add_cron_job("test", "* * * * *", handler).unwrap(); + + let stats = scheduler.stats(); + assert_eq!(stats.scheduled, 1); + assert_eq!(stats.pending, 1); + assert_eq!(stats.cron_jobs, 1); + } + + #[test] + fn test_clear() { + let scheduler: Scheduler)> = Scheduler::new(); + let link = Link::new(1u64, LinkRef::Id(2), LinkRef::Id(3)); + let handler: fn(Link) = |_| {}; + + scheduler.schedule(link, Duration::from_secs(5), None); + scheduler.add_cron_job("test", "* * * * *", handler).unwrap(); + scheduler.start(); + + scheduler.clear(); + + assert!(!scheduler.is_running()); + assert_eq!(scheduler.pending_count(), 0); + assert_eq!(scheduler.list_cron_jobs().len(), 0); + } + + #[test] + fn test_get_pending_items() { + let scheduler: Scheduler = Scheduler::new(); + + scheduler.schedule( + Link::new(1u64, LinkRef::Id(2), LinkRef::Id(3)), + Duration::from_secs(5), + None, + ); + scheduler.schedule( + Link::new(2u64, LinkRef::Id(4), LinkRef::Id(5)), + Duration::from_secs(10), + None, + ); + + let items = scheduler.get_pending_items(); + assert_eq!(items.len(), 2); + } + } +} From 217d1e448a0c665b090cf2cee9d2cea11d866e7c Mon Sep 17 00:00:00 2001 From: konard Date: Sun, 18 Jan 2026 23:01:06 +0100 Subject: [PATCH 04/10] Revert "Initial commit with task details" This reverts commit 29a5d654ff840be59955afe675a5f9d082dc71ad. --- CLAUDE.md | 5 ----- 1 file changed, 5 deletions(-) delete mode 100644 CLAUDE.md diff --git a/CLAUDE.md b/CLAUDE.md deleted file mode 100644 index 495ba33..0000000 --- a/CLAUDE.md +++ /dev/null @@ -1,5 +0,0 @@ -Issue to solve: https://github.com/link-foundation/links-queue/issues/25 -Your prepared branch: issue-25-fadd4ff659d6 -Your prepared working directory: /tmp/gh-issue-solver-1768771894624 - -Proceed. From 79c017a9a705475871798e21f1456fd4a0bbf81e Mon Sep 17 00:00:00 2001 From: konard Date: Tue, 20 Jan 2026 23:42:10 +0100 Subject: [PATCH 05/10] refactor(rust): Extract tests to separate files to fix line count CI Move tests from pubsub.rs, router.rs, and scheduler.rs into separate test modules following the existing crate pattern (e.g., client/tests.rs). This reduces line counts to under 1000 lines per file: - pubsub.rs: 1366 -> 990 lines - router.rs: 1009 -> 787 lines - scheduler.rs: 1055 -> 786 lines Also condensed some documentation comments to further reduce line counts. Co-Authored-By: Claude Opus 4.5 --- rust/src/features/mod.rs | 7 + rust/src/features/pubsub.rs | 383 +-------------------------- rust/src/features/pubsub_tests.rs | 288 ++++++++++++++++++++ rust/src/features/router.rs | 221 ---------------- rust/src/features/router_tests.rs | 217 +++++++++++++++ rust/src/features/scheduler.rs | 268 ------------------- rust/src/features/scheduler_tests.rs | 264 ++++++++++++++++++ 7 files changed, 780 insertions(+), 868 deletions(-) create mode 100644 rust/src/features/pubsub_tests.rs create mode 100644 rust/src/features/router_tests.rs create mode 100644 rust/src/features/scheduler_tests.rs diff --git a/rust/src/features/mod.rs b/rust/src/features/mod.rs index 8bf9a69..10ee83a 100644 --- a/rust/src/features/mod.rs +++ b/rust/src/features/mod.rs @@ -29,6 +29,13 @@ pub mod rate_limiter; pub mod router; pub mod pubsub; +#[cfg(test)] +mod scheduler_tests; +#[cfg(test)] +mod router_tests; +#[cfg(test)] +mod pubsub_tests; + // Re-export main types pub use scheduler::{CronParser, CronSchedule, Scheduler, ScheduledItem, ScheduledQueue, SchedulerStats, CronJob}; pub use rate_limiter::{ diff --git a/rust/src/features/pubsub.rs b/rust/src/features/pubsub.rs index a45acdf..cef6576 100644 --- a/rust/src/features/pubsub.rs +++ b/rust/src/features/pubsub.rs @@ -1,33 +1,7 @@ //! Pub/Sub module for links-queue. //! -//! This module provides publish/subscribe messaging patterns: -//! - Topic creation/deletion -//! - Subscribe/unsubscribe -//! - Fan-out delivery -//! - Message filtering -//! -//! # Example -//! -//! ```rust,ignore -//! use links_queue::features::pubsub::{PubSubBroker, MessageFilter}; -//! -//! let broker = PubSubBroker::new(Default::default()); -//! -//! // Create a topic -//! broker.create_topic("events").await?; -//! -//! // Subscribe to the topic -//! let sub_id = broker.subscribe("events", |msg| async move { -//! println!("Received: {:?}", msg.data); -//! Ok(()) -//! }, None).await?; -//! -//! // Publish a message -//! broker.publish("events", "Hello, World!", None).await?; -//! -//! // Unsubscribe -//! broker.unsubscribe(&sub_id).await?; -//! ``` +//! Provides publish/subscribe messaging patterns: topic creation/deletion, +//! subscribe/unsubscribe, fan-out delivery, and message filtering. use std::collections::{HashMap, HashSet}; use std::fmt::Debug; @@ -40,10 +14,6 @@ use tokio::sync::RwLock; use crate::queue::traits::{Queue, QueueManager}; -// ============================================================================= -// Types and Errors -// ============================================================================= - /// Error type for pub/sub operations. #[derive(Debug, Clone)] pub enum PubSubError { @@ -158,25 +128,7 @@ impl Default for BrokerOptions { } } -// ============================================================================= -// Message Filter -// ============================================================================= - -/// Filter for messages based on content. -/// -/// Provides utilities for filtering messages based on various criteria. -/// -/// # Example -/// -/// ```rust,ignore -/// use links_queue::features::pubsub::MessageFilter; -/// -/// let filter = MessageFilter::new() -/// .with_header("priority", "high") -/// .with_custom(|msg| msg.data.len() > 10); -/// -/// let matches = filter.matches(&message); -/// ``` +/// Filter for messages based on content and headers. #[derive(Clone)] pub struct MessageFilter { /// Header filters (key -> expected value). @@ -250,10 +202,6 @@ impl Debug for MessageFilter { } } -// ============================================================================= -// Subscription (internal) -// ============================================================================= - type AsyncHandler = Arc) -> Pin + Send>> + Send + Sync>; @@ -267,32 +215,7 @@ struct Subscription { received: AtomicU64, } -// ============================================================================= -// Pub/Sub Broker -// ============================================================================= - -/// Central broker for pub/sub messaging. -/// -/// Manages topics, subscriptions, and message delivery. -/// -/// # Example -/// -/// ```rust,ignore -/// use links_queue::features::pubsub::{PubSubBroker, BrokerOptions}; -/// -/// let broker = PubSubBroker::::new(BrokerOptions::default()); -/// -/// // Create a topic -/// broker.create_topic("events").await?; -/// -/// // Subscribe -/// let sub_id = broker.subscribe("events", |msg| async move { -/// println!("Received: {}", msg.data); -/// }, None).await?; -/// -/// // Publish -/// broker.publish("events", "Hello!".to_string(), None).await?; -/// ``` +/// Central broker for pub/sub messaging. Manages topics, subscriptions, and message delivery. pub struct PubSubBroker { /// Configuration options. options: BrokerOptions, @@ -683,10 +606,6 @@ impl PubSubBroker { } } -// ============================================================================= -// Observable Queue -// ============================================================================= - /// Event type for queue observations. #[derive(Debug, Clone)] pub enum QueueEvent { @@ -809,10 +728,6 @@ impl> ObservableQueue { } } -// ============================================================================= -// Queue-backed Pub/Sub -// ============================================================================= - /// Subscription info for queue-backed pub/sub. #[derive(Debug, Clone)] pub struct QueueSubscription { @@ -1073,293 +988,3 @@ where } } -// ============================================================================= -// Tests -// ============================================================================= - -#[cfg(test)] -mod tests { - use super::*; - use std::sync::atomic::AtomicUsize; - - #[tokio::test] - async fn test_message_filter() { - let filter = MessageFilter::::new() - .with_header("priority", "high") - .with_custom(|msg: &PublishedMessage| msg.data.len() > 3); - - let mut headers = HashMap::new(); - headers.insert("priority".to_string(), "high".to_string()); - - let msg1 = PublishedMessage { - id: "1".to_string(), - topic: "test".to_string(), - data: "hello".to_string(), - timestamp: Instant::now(), - headers: headers.clone(), - }; - - let msg2 = PublishedMessage { - id: "2".to_string(), - topic: "test".to_string(), - data: "hi".to_string(), // Too short - timestamp: Instant::now(), - headers: headers.clone(), - }; - - let msg3 = PublishedMessage { - id: "3".to_string(), - topic: "test".to_string(), - data: "hello".to_string(), - timestamp: Instant::now(), - headers: HashMap::new(), // Missing header - }; - - assert!(filter.matches(&msg1)); - assert!(!filter.matches(&msg2)); - assert!(!filter.matches(&msg3)); - } - - #[tokio::test] - async fn test_broker_topic_management() { - let broker = PubSubBroker::::new(BrokerOptions { - auto_create_topics: false, - message_retention: None, - }); - - // Create topic - let topic = broker.create_topic("events").await.unwrap(); - assert_eq!(topic.name, "events"); - assert_eq!(topic.subscriber_count, 0); - - // Duplicate topic should fail - let result = broker.create_topic("events").await; - assert!(matches!(result, Err(PubSubError::TopicExists(_)))); - - // Get topic - let topic = broker.get_topic("events").await; - assert!(topic.is_some()); - - // List topics - let topics = broker.list_topics().await; - assert_eq!(topics.len(), 1); - - // Delete topic - let deleted = broker.delete_topic("events").await.unwrap(); - assert!(deleted); - - let topics = broker.list_topics().await; - assert!(topics.is_empty()); - } - - #[tokio::test] - async fn test_broker_subscribe_unsubscribe() { - let broker = PubSubBroker::::new(BrokerOptions::default()); - - let counter = Arc::new(AtomicUsize::new(0)); - let counter_clone = counter.clone(); - - // Subscribe - let sub_id = broker - .subscribe( - "events", - move |_msg| { - let c = counter_clone.clone(); - async move { - c.fetch_add(1, Ordering::SeqCst); - } - }, - None, - ) - .await - .unwrap(); - - // Verify subscription - let sub = broker.get_subscription(&sub_id).await; - assert!(sub.is_some()); - assert_eq!(sub.unwrap().topic, "events"); - - // Publish - let result = broker.publish("events", "test".to_string(), None).await.unwrap(); - assert_eq!(result.delivered, 1); - - // Wait a bit for async delivery - tokio::time::sleep(Duration::from_millis(10)).await; - assert_eq!(counter.load(Ordering::SeqCst), 1); - - // Unsubscribe - let unsubbed = broker.unsubscribe(&sub_id).await.unwrap(); - assert!(unsubbed); - - // Publish again (no subscribers) - let result = broker.publish("events", "test2".to_string(), None).await.unwrap(); - assert_eq!(result.delivered, 0); - } - - #[tokio::test] - async fn test_broker_pause_resume() { - let broker = PubSubBroker::::new(BrokerOptions::default()); - - let counter = Arc::new(AtomicUsize::new(0)); - let counter_clone = counter.clone(); - - let sub_id = broker - .subscribe( - "events", - move |_msg| { - let c = counter_clone.clone(); - async move { - c.fetch_add(1, Ordering::SeqCst); - } - }, - None, - ) - .await - .unwrap(); - - // Pause - broker.pause(&sub_id).await; - - // Publish (should not deliver) - broker.publish("events", "test".to_string(), None).await.unwrap(); - tokio::time::sleep(Duration::from_millis(10)).await; - assert_eq!(counter.load(Ordering::SeqCst), 0); - - // Resume - broker.resume(&sub_id).await; - - // Publish (should deliver) - broker.publish("events", "test".to_string(), None).await.unwrap(); - tokio::time::sleep(Duration::from_millis(10)).await; - assert_eq!(counter.load(Ordering::SeqCst), 1); - } - - #[tokio::test] - async fn test_broker_filtering() { - let broker = PubSubBroker::::new(BrokerOptions::default()); - - let counter = Arc::new(AtomicUsize::new(0)); - let counter_clone = counter.clone(); - - // Subscribe with filter - let filter = MessageFilter::new() - .with_header("priority", "high"); - - broker - .subscribe( - "events", - move |_msg| { - let c = counter_clone.clone(); - async move { - c.fetch_add(1, Ordering::SeqCst); - } - }, - Some(filter), - ) - .await - .unwrap(); - - // Publish without matching header (should filter) - let result = broker.publish("events", "test".to_string(), None).await.unwrap(); - assert_eq!(result.filtered, 1); - assert_eq!(result.delivered, 0); - - // Publish with matching header - let mut headers = HashMap::new(); - headers.insert("priority".to_string(), "high".to_string()); - let result = broker.publish("events", "test".to_string(), Some(headers)).await.unwrap(); - assert_eq!(result.filtered, 0); - assert_eq!(result.delivered, 1); - } - - #[tokio::test] - async fn test_broker_message_retention() { - let broker = PubSubBroker::::new(BrokerOptions { - auto_create_topics: true, - message_retention: Some(Duration::from_secs(60)), - }); - - // Publish some messages - broker.publish("events", "msg1".to_string(), None).await.unwrap(); - broker.publish("events", "msg2".to_string(), None).await.unwrap(); - broker.publish("events", "msg3".to_string(), None).await.unwrap(); - - // Get history - let history = broker.get_history("events", 10).await; - assert_eq!(history.len(), 3); - assert_eq!(history[0].data, "msg1"); - assert_eq!(history[2].data, "msg3"); - - // Get limited history - let history = broker.get_history("events", 2).await; - assert_eq!(history.len(), 2); - assert_eq!(history[0].data, "msg2"); - } - - #[tokio::test] - async fn test_broker_stats() { - let broker = PubSubBroker::::new(BrokerOptions::default()); - - // Create topics and subscriptions - broker.create_topic("topic1").await.unwrap(); - broker.create_topic("topic2").await.unwrap(); - - broker - .subscribe("topic1", |_| async {}, None) - .await - .unwrap(); - broker - .subscribe("topic1", |_| async {}, None) - .await - .unwrap(); - - // Publish - broker.publish("topic1", "test".to_string(), None).await.unwrap(); - - let stats = broker.get_stats().await; - assert_eq!(stats.topics, 2); - assert_eq!(stats.subscriptions, 2); - assert_eq!(stats.published, 1); - assert_eq!(stats.delivered, 2); - } - - #[tokio::test] - async fn test_broker_auto_create_topics() { - // With auto-create enabled - let broker = PubSubBroker::::new(BrokerOptions { - auto_create_topics: true, - message_retention: None, - }); - - // Should auto-create topic on publish - broker.publish("auto-topic", "test".to_string(), None).await.unwrap(); - let topic = broker.get_topic("auto-topic").await; - assert!(topic.is_some()); - - // With auto-create disabled - let broker = PubSubBroker::::new(BrokerOptions { - auto_create_topics: false, - message_retention: None, - }); - - // Should fail on publish to non-existent topic - let result = broker.publish("auto-topic", "test".to_string(), None).await; - assert!(matches!(result, Err(PubSubError::TopicNotFound(_)))); - } - - #[tokio::test] - async fn test_publish_many() { - let broker = PubSubBroker::::new(BrokerOptions::default()); - - broker.create_topic("topic1").await.unwrap(); - broker.create_topic("topic2").await.unwrap(); - - let results = broker - .publish_many(&["topic1", "topic2"], "test".to_string(), None) - .await; - - assert_eq!(results.len(), 2); - assert!(results.get("topic1").unwrap().is_ok()); - assert!(results.get("topic2").unwrap().is_ok()); - } -} diff --git a/rust/src/features/pubsub_tests.rs b/rust/src/features/pubsub_tests.rs new file mode 100644 index 0000000..3e3ebe3 --- /dev/null +++ b/rust/src/features/pubsub_tests.rs @@ -0,0 +1,288 @@ +//! Tests for the pub/sub module. + +use super::pubsub::*; +use std::collections::HashMap; +use std::sync::atomic::{AtomicUsize, Ordering}; +use std::sync::Arc; +use std::time::{Duration, Instant}; + +#[tokio::test] +async fn test_message_filter() { + let filter = MessageFilter::::new() + .with_header("priority", "high") + .with_custom(|msg: &PublishedMessage| msg.data.len() > 3); + + let mut headers = HashMap::new(); + headers.insert("priority".to_string(), "high".to_string()); + + let msg1 = PublishedMessage { + id: "1".to_string(), + topic: "test".to_string(), + data: "hello".to_string(), + timestamp: Instant::now(), + headers: headers.clone(), + }; + + let msg2 = PublishedMessage { + id: "2".to_string(), + topic: "test".to_string(), + data: "hi".to_string(), // Too short + timestamp: Instant::now(), + headers: headers.clone(), + }; + + let msg3 = PublishedMessage { + id: "3".to_string(), + topic: "test".to_string(), + data: "hello".to_string(), + timestamp: Instant::now(), + headers: HashMap::new(), // Missing header + }; + + assert!(filter.matches(&msg1)); + assert!(!filter.matches(&msg2)); + assert!(!filter.matches(&msg3)); +} + +#[tokio::test] +async fn test_broker_topic_management() { + let broker = PubSubBroker::::new(BrokerOptions { + auto_create_topics: false, + message_retention: None, + }); + + // Create topic + let topic = broker.create_topic("events").await.unwrap(); + assert_eq!(topic.name, "events"); + assert_eq!(topic.subscriber_count, 0); + + // Duplicate topic should fail + let result = broker.create_topic("events").await; + assert!(matches!(result, Err(PubSubError::TopicExists(_)))); + + // Get topic + let topic = broker.get_topic("events").await; + assert!(topic.is_some()); + + // List topics + let topics = broker.list_topics().await; + assert_eq!(topics.len(), 1); + + // Delete topic + let deleted = broker.delete_topic("events").await.unwrap(); + assert!(deleted); + + let topics = broker.list_topics().await; + assert!(topics.is_empty()); +} + +#[tokio::test] +async fn test_broker_subscribe_unsubscribe() { + let broker = PubSubBroker::::new(BrokerOptions::default()); + + let counter = Arc::new(AtomicUsize::new(0)); + let counter_clone = counter.clone(); + + // Subscribe + let sub_id = broker + .subscribe( + "events", + move |_msg| { + let c = counter_clone.clone(); + async move { + c.fetch_add(1, Ordering::SeqCst); + } + }, + None, + ) + .await + .unwrap(); + + // Verify subscription + let sub = broker.get_subscription(&sub_id).await; + assert!(sub.is_some()); + assert_eq!(sub.unwrap().topic, "events"); + + // Publish + let result = broker.publish("events", "test".to_string(), None).await.unwrap(); + assert_eq!(result.delivered, 1); + + // Wait a bit for async delivery + tokio::time::sleep(Duration::from_millis(10)).await; + assert_eq!(counter.load(Ordering::SeqCst), 1); + + // Unsubscribe + let unsubbed = broker.unsubscribe(&sub_id).await.unwrap(); + assert!(unsubbed); + + // Publish again (no subscribers) + let result = broker.publish("events", "test2".to_string(), None).await.unwrap(); + assert_eq!(result.delivered, 0); +} + +#[tokio::test] +async fn test_broker_pause_resume() { + let broker = PubSubBroker::::new(BrokerOptions::default()); + + let counter = Arc::new(AtomicUsize::new(0)); + let counter_clone = counter.clone(); + + let sub_id = broker + .subscribe( + "events", + move |_msg| { + let c = counter_clone.clone(); + async move { + c.fetch_add(1, Ordering::SeqCst); + } + }, + None, + ) + .await + .unwrap(); + + // Pause + broker.pause(&sub_id).await; + + // Publish (should not deliver) + broker.publish("events", "test".to_string(), None).await.unwrap(); + tokio::time::sleep(Duration::from_millis(10)).await; + assert_eq!(counter.load(Ordering::SeqCst), 0); + + // Resume + broker.resume(&sub_id).await; + + // Publish (should deliver) + broker.publish("events", "test".to_string(), None).await.unwrap(); + tokio::time::sleep(Duration::from_millis(10)).await; + assert_eq!(counter.load(Ordering::SeqCst), 1); +} + +#[tokio::test] +async fn test_broker_filtering() { + let broker = PubSubBroker::::new(BrokerOptions::default()); + + let counter = Arc::new(AtomicUsize::new(0)); + let counter_clone = counter.clone(); + + // Subscribe with filter + let filter = MessageFilter::new() + .with_header("priority", "high"); + + broker + .subscribe( + "events", + move |_msg| { + let c = counter_clone.clone(); + async move { + c.fetch_add(1, Ordering::SeqCst); + } + }, + Some(filter), + ) + .await + .unwrap(); + + // Publish without matching header (should filter) + let result = broker.publish("events", "test".to_string(), None).await.unwrap(); + assert_eq!(result.filtered, 1); + assert_eq!(result.delivered, 0); + + // Publish with matching header + let mut headers = HashMap::new(); + headers.insert("priority".to_string(), "high".to_string()); + let result = broker.publish("events", "test".to_string(), Some(headers)).await.unwrap(); + assert_eq!(result.filtered, 0); + assert_eq!(result.delivered, 1); +} + +#[tokio::test] +async fn test_broker_message_retention() { + let broker = PubSubBroker::::new(BrokerOptions { + auto_create_topics: true, + message_retention: Some(Duration::from_secs(60)), + }); + + // Publish some messages + broker.publish("events", "msg1".to_string(), None).await.unwrap(); + broker.publish("events", "msg2".to_string(), None).await.unwrap(); + broker.publish("events", "msg3".to_string(), None).await.unwrap(); + + // Get history + let history = broker.get_history("events", 10).await; + assert_eq!(history.len(), 3); + assert_eq!(history[0].data, "msg1"); + assert_eq!(history[2].data, "msg3"); + + // Get limited history + let history = broker.get_history("events", 2).await; + assert_eq!(history.len(), 2); + assert_eq!(history[0].data, "msg2"); +} + +#[tokio::test] +async fn test_broker_stats() { + let broker = PubSubBroker::::new(BrokerOptions::default()); + + // Create topics and subscriptions + broker.create_topic("topic1").await.unwrap(); + broker.create_topic("topic2").await.unwrap(); + + broker + .subscribe("topic1", |_| async {}, None) + .await + .unwrap(); + broker + .subscribe("topic1", |_| async {}, None) + .await + .unwrap(); + + // Publish + broker.publish("topic1", "test".to_string(), None).await.unwrap(); + + let stats = broker.get_stats().await; + assert_eq!(stats.topics, 2); + assert_eq!(stats.subscriptions, 2); + assert_eq!(stats.published, 1); + assert_eq!(stats.delivered, 2); +} + +#[tokio::test] +async fn test_broker_auto_create_topics() { + // With auto-create enabled + let broker = PubSubBroker::::new(BrokerOptions { + auto_create_topics: true, + message_retention: None, + }); + + // Should auto-create topic on publish + broker.publish("auto-topic", "test".to_string(), None).await.unwrap(); + let topic = broker.get_topic("auto-topic").await; + assert!(topic.is_some()); + + // With auto-create disabled + let broker = PubSubBroker::::new(BrokerOptions { + auto_create_topics: false, + message_retention: None, + }); + + // Should fail on publish to non-existent topic + let result = broker.publish("auto-topic", "test".to_string(), None).await; + assert!(matches!(result, Err(PubSubError::TopicNotFound(_)))); +} + +#[tokio::test] +async fn test_publish_many() { + let broker = PubSubBroker::::new(BrokerOptions::default()); + + broker.create_topic("topic1").await.unwrap(); + broker.create_topic("topic2").await.unwrap(); + + let results = broker + .publish_many(&["topic1", "topic2"], "test".to_string(), None) + .await; + + assert_eq!(results.len(), 2); + assert!(results.get("topic1").unwrap().is_ok()); + assert!(results.get("topic2").unwrap().is_ok()); +} diff --git a/rust/src/features/router.rs b/rust/src/features/router.rs index ff0d573..95a5408 100644 --- a/rust/src/features/router.rs +++ b/rust/src/features/router.rs @@ -785,224 +785,3 @@ where } } -// ============================================================================= -// Tests -// ============================================================================= - -#[cfg(test)] -mod tests { - use super::*; - - mod topic_matcher_tests { - use super::*; - - #[test] - fn test_exact_match() { - assert!(TopicMatcher::matches("logs.error", "logs.error")); - assert!(!TopicMatcher::matches("logs.error", "logs.info")); - } - - #[test] - fn test_star_wildcard() { - assert!(TopicMatcher::matches("logs.*", "logs.error")); - assert!(TopicMatcher::matches("logs.*", "logs.info")); - assert!(!TopicMatcher::matches("logs.*", "logs.error.db")); - assert!(TopicMatcher::matches("*.error", "logs.error")); - assert!(TopicMatcher::matches("*.error", "app.error")); - } - - #[test] - fn test_hash_wildcard() { - assert!(TopicMatcher::matches("logs.#", "logs.error")); - assert!(TopicMatcher::matches("logs.#", "logs.error.db")); - assert!(TopicMatcher::matches("logs.#", "logs.a.b.c.d")); - } - - #[test] - fn test_hash_at_beginning() { - assert!(TopicMatcher::matches("#.error", "logs.error")); - assert!(TopicMatcher::matches("#.error", "a.b.c.error")); - } - - #[test] - fn test_hash_alone() { - assert!(TopicMatcher::matches("#", "anything")); - assert!(TopicMatcher::matches("#", "a.b.c")); - } - - #[test] - fn test_complex_patterns() { - assert!(TopicMatcher::matches("*.system.*", "app.system.startup")); - assert!(TopicMatcher::matches("*.system.*", "db.system.shutdown")); - assert!(!TopicMatcher::matches("*.system.*", "system.startup")); - } - - #[test] - fn test_specificity() { - let exact = TopicMatcher::specificity("logs.error.db"); - let star = TopicMatcher::specificity("logs.*.db"); - let hash = TopicMatcher::specificity("logs.#"); - - assert!(exact > star); - assert!(star > hash); - } - } - - mod exchange_tests { - use super::*; - - #[test] - fn test_direct_exchange() { - let exchange = DirectExchange::new("logs"); - - exchange.bind("errors-queue", "error", None); - exchange.bind("info-queue", "info", None); - - assert_eq!(exchange.route("error", None), vec!["errors-queue".to_string()]); - assert_eq!(exchange.route("info", None), vec!["info-queue".to_string()]); - assert!(exchange.route("debug", None).is_empty()); - } - - #[test] - fn test_direct_exchange_unbind() { - let exchange = DirectExchange::new("logs"); - - exchange.bind("queue", "key", None); - assert!(exchange.unbind("queue", "key")); - assert!(exchange.route("key", None).is_empty()); - } - - #[test] - fn test_topic_exchange() { - let exchange = TopicExchange::new("events"); - - exchange.bind("all-logs", "logs.#", None); - exchange.bind("errors-only", "logs.error", None); - - let queues = exchange.route("logs.error", None); - assert!(queues.contains(&"all-logs".to_string())); - assert!(queues.contains(&"errors-only".to_string())); - } - - #[test] - fn test_fanout_exchange() { - let exchange = FanoutExchange::new("notifications"); - - exchange.bind("email-queue", "", None); - exchange.bind("sms-queue", "", None); - exchange.bind("push-queue", "", None); - - let queues = exchange.route("", None); - assert_eq!(queues.len(), 3); - } - - #[test] - fn test_headers_exchange_match_all() { - let exchange = HeadersExchange::new("tasks"); - - let mut headers = HashMap::new(); - headers.insert("priority".to_string(), "high".to_string()); - headers.insert("type".to_string(), "system".to_string()); - - exchange.bind("urgent-system", "", Some((headers.clone(), true))); - - // Must match all headers - assert_eq!( - exchange.route("", Some(&headers)), - vec!["urgent-system".to_string()] - ); - - // Missing header - no match - let mut partial = HashMap::new(); - partial.insert("priority".to_string(), "high".to_string()); - assert!(exchange.route("", Some(&partial)).is_empty()); - } - - #[test] - fn test_headers_exchange_match_any() { - let exchange = HeadersExchange::new("tasks"); - - let mut headers = HashMap::new(); - headers.insert("priority".to_string(), "high".to_string()); - headers.insert("type".to_string(), "system".to_string()); - - exchange.bind("special", "", Some((headers.clone(), false))); - - // Any header matches - let mut partial = HashMap::new(); - partial.insert("priority".to_string(), "high".to_string()); - assert_eq!( - exchange.route("", Some(&partial)), - vec!["special".to_string()] - ); - } - } - - mod router_tests { - use super::*; - - #[test] - fn test_declare_exchange() { - let router = Router::new(); - - let result = router.declare_exchange("logs", ExchangeType::Direct); - assert!(result.is_ok()); - - let result = router.declare_exchange("logs", ExchangeType::Direct); - assert!(result.is_ok()); // Same type - OK - - let result = router.declare_exchange("logs", ExchangeType::Topic); - assert!(result.is_err()); // Different type - Error - } - - #[test] - fn test_get_exchange() { - let router = Router::new(); - router.declare_exchange("logs", ExchangeType::Direct).unwrap(); - - assert!(router.get_exchange("logs").is_some()); - assert!(router.get_exchange("nonexistent").is_none()); - } - - #[test] - fn test_delete_exchange() { - let router = Router::new(); - router.declare_exchange("logs", ExchangeType::Direct).unwrap(); - - assert!(router.delete_exchange("logs")); - assert!(!router.delete_exchange("logs")); // Already deleted - } - - #[test] - fn test_bind_unbind() { - let router = Router::new(); - router.declare_exchange("logs", ExchangeType::Direct).unwrap(); - - router.bind("logs", "queue", "key", None).unwrap(); - assert_eq!(router.route("logs", "key", None), vec!["queue".to_string()]); - - router.unbind("logs", "queue", "key"); - assert!(router.route("logs", "key", None).is_empty()); - } - - #[test] - fn test_list_exchanges() { - let router = Router::new(); - router.declare_exchange("logs", ExchangeType::Direct).unwrap(); - router.declare_exchange("events", ExchangeType::Topic).unwrap(); - - let exchanges = router.list_exchanges(); - assert_eq!(exchanges.len(), 2); - } - - #[test] - fn test_clear() { - let router = Router::new(); - router.declare_exchange("logs", ExchangeType::Direct).unwrap(); - router.declare_exchange("events", ExchangeType::Topic).unwrap(); - - router.clear(); - assert!(router.list_exchanges().is_empty()); - } - } -} diff --git a/rust/src/features/router_tests.rs b/rust/src/features/router_tests.rs new file mode 100644 index 0000000..e1723bf --- /dev/null +++ b/rust/src/features/router_tests.rs @@ -0,0 +1,217 @@ +//! Tests for the router module. + +use super::router::*; +use std::collections::HashMap; + +mod topic_matcher_tests { + use super::*; + + #[test] + fn test_exact_match() { + assert!(TopicMatcher::matches("logs.error", "logs.error")); + assert!(!TopicMatcher::matches("logs.error", "logs.info")); + } + + #[test] + fn test_star_wildcard() { + assert!(TopicMatcher::matches("logs.*", "logs.error")); + assert!(TopicMatcher::matches("logs.*", "logs.info")); + assert!(!TopicMatcher::matches("logs.*", "logs.error.db")); + assert!(TopicMatcher::matches("*.error", "logs.error")); + assert!(TopicMatcher::matches("*.error", "app.error")); + } + + #[test] + fn test_hash_wildcard() { + assert!(TopicMatcher::matches("logs.#", "logs.error")); + assert!(TopicMatcher::matches("logs.#", "logs.error.db")); + assert!(TopicMatcher::matches("logs.#", "logs.a.b.c.d")); + } + + #[test] + fn test_hash_at_beginning() { + assert!(TopicMatcher::matches("#.error", "logs.error")); + assert!(TopicMatcher::matches("#.error", "a.b.c.error")); + } + + #[test] + fn test_hash_alone() { + assert!(TopicMatcher::matches("#", "anything")); + assert!(TopicMatcher::matches("#", "a.b.c")); + } + + #[test] + fn test_complex_patterns() { + assert!(TopicMatcher::matches("*.system.*", "app.system.startup")); + assert!(TopicMatcher::matches("*.system.*", "db.system.shutdown")); + assert!(!TopicMatcher::matches("*.system.*", "system.startup")); + } + + #[test] + fn test_specificity() { + let exact = TopicMatcher::specificity("logs.error.db"); + let star = TopicMatcher::specificity("logs.*.db"); + let hash = TopicMatcher::specificity("logs.#"); + + assert!(exact > star); + assert!(star > hash); + } +} + +mod exchange_tests { + use super::*; + + #[test] + fn test_direct_exchange() { + let exchange = DirectExchange::new("logs"); + + exchange.bind("errors-queue", "error", None); + exchange.bind("info-queue", "info", None); + + assert_eq!(exchange.route("error", None), vec!["errors-queue".to_string()]); + assert_eq!(exchange.route("info", None), vec!["info-queue".to_string()]); + assert!(exchange.route("debug", None).is_empty()); + } + + #[test] + fn test_direct_exchange_unbind() { + let exchange = DirectExchange::new("logs"); + + exchange.bind("queue", "key", None); + assert!(exchange.unbind("queue", "key")); + assert!(exchange.route("key", None).is_empty()); + } + + #[test] + fn test_topic_exchange() { + let exchange = TopicExchange::new("events"); + + exchange.bind("all-logs", "logs.#", None); + exchange.bind("errors-only", "logs.error", None); + + let queues = exchange.route("logs.error", None); + assert!(queues.contains(&"all-logs".to_string())); + assert!(queues.contains(&"errors-only".to_string())); + } + + #[test] + fn test_fanout_exchange() { + let exchange = FanoutExchange::new("notifications"); + + exchange.bind("email-queue", "", None); + exchange.bind("sms-queue", "", None); + exchange.bind("push-queue", "", None); + + let queues = exchange.route("", None); + assert_eq!(queues.len(), 3); + } + + #[test] + fn test_headers_exchange_match_all() { + let exchange = HeadersExchange::new("tasks"); + + let mut headers = HashMap::new(); + headers.insert("priority".to_string(), "high".to_string()); + headers.insert("type".to_string(), "system".to_string()); + + exchange.bind("urgent-system", "", Some((headers.clone(), true))); + + // Must match all headers + assert_eq!( + exchange.route("", Some(&headers)), + vec!["urgent-system".to_string()] + ); + + // Missing header - no match + let mut partial = HashMap::new(); + partial.insert("priority".to_string(), "high".to_string()); + assert!(exchange.route("", Some(&partial)).is_empty()); + } + + #[test] + fn test_headers_exchange_match_any() { + let exchange = HeadersExchange::new("tasks"); + + let mut headers = HashMap::new(); + headers.insert("priority".to_string(), "high".to_string()); + headers.insert("type".to_string(), "system".to_string()); + + exchange.bind("special", "", Some((headers.clone(), false))); + + // Any header matches + let mut partial = HashMap::new(); + partial.insert("priority".to_string(), "high".to_string()); + assert_eq!( + exchange.route("", Some(&partial)), + vec!["special".to_string()] + ); + } +} + +mod router_tests { + use super::*; + + #[test] + fn test_declare_exchange() { + let router = Router::new(); + + let result = router.declare_exchange("logs", ExchangeType::Direct); + assert!(result.is_ok()); + + let result = router.declare_exchange("logs", ExchangeType::Direct); + assert!(result.is_ok()); // Same type - OK + + let result = router.declare_exchange("logs", ExchangeType::Topic); + assert!(result.is_err()); // Different type - Error + } + + #[test] + fn test_get_exchange() { + let router = Router::new(); + router.declare_exchange("logs", ExchangeType::Direct).unwrap(); + + assert!(router.get_exchange("logs").is_some()); + assert!(router.get_exchange("nonexistent").is_none()); + } + + #[test] + fn test_delete_exchange() { + let router = Router::new(); + router.declare_exchange("logs", ExchangeType::Direct).unwrap(); + + assert!(router.delete_exchange("logs")); + assert!(!router.delete_exchange("logs")); // Already deleted + } + + #[test] + fn test_bind_unbind() { + let router = Router::new(); + router.declare_exchange("logs", ExchangeType::Direct).unwrap(); + + router.bind("logs", "queue", "key", None).unwrap(); + assert_eq!(router.route("logs", "key", None), vec!["queue".to_string()]); + + router.unbind("logs", "queue", "key"); + assert!(router.route("logs", "key", None).is_empty()); + } + + #[test] + fn test_list_exchanges() { + let router = Router::new(); + router.declare_exchange("logs", ExchangeType::Direct).unwrap(); + router.declare_exchange("events", ExchangeType::Topic).unwrap(); + + let exchanges = router.list_exchanges(); + assert_eq!(exchanges.len(), 2); + } + + #[test] + fn test_clear() { + let router = Router::new(); + router.declare_exchange("logs", ExchangeType::Direct).unwrap(); + router.declare_exchange("events", ExchangeType::Topic).unwrap(); + + router.clear(); + assert!(router.list_exchanges().is_empty()); + } +} diff --git a/rust/src/features/scheduler.rs b/rust/src/features/scheduler.rs index 3b846f5..15f9e49 100644 --- a/rust/src/features/scheduler.rs +++ b/rust/src/features/scheduler.rs @@ -784,271 +784,3 @@ impl, Q: Queue> ScheduledQueue { } } -// ============================================================================= -// Tests -// ============================================================================= - -#[cfg(test)] -mod tests { - use super::*; - - mod cron_parser_tests { - use super::*; - - #[test] - fn test_parse_wildcard() { - let values = CronParser::parse_field("*", 0, 5); - assert_eq!(values, vec![0, 1, 2, 3, 4, 5]); - } - - #[test] - fn test_parse_single_value() { - let values = CronParser::parse_field("5", 0, 10); - assert_eq!(values, vec![5]); - } - - #[test] - fn test_parse_comma_separated() { - let values = CronParser::parse_field("1,3,5", 0, 10); - assert_eq!(values, vec![1, 3, 5]); - } - - #[test] - fn test_parse_range() { - let values = CronParser::parse_field("1-5", 0, 10); - assert_eq!(values, vec![1, 2, 3, 4, 5]); - } - - #[test] - fn test_parse_step() { - let values = CronParser::parse_field("*/2", 0, 6); - assert_eq!(values, vec![0, 2, 4, 6]); - } - - #[test] - fn test_parse_step_with_start() { - let values = CronParser::parse_field("2/3", 0, 10); - assert_eq!(values, vec![2, 5, 8]); - } - - #[test] - fn test_parse_filters_out_of_range() { - let values = CronParser::parse_field("0,5,15", 0, 10); - assert_eq!(values, vec![0, 5]); - } - - #[test] - fn test_parse_full_expression() { - let result = CronParser::parse("0 * * * *"); - assert!(result.is_ok()); - let schedule = result.unwrap(); - assert_eq!(schedule.minutes, vec![0]); - assert_eq!(schedule.hours.len(), 24); - assert_eq!(schedule.days_of_month.len(), 31); - assert_eq!(schedule.months.len(), 12); - assert_eq!(schedule.days_of_week.len(), 7); - } - - #[test] - fn test_parse_invalid_field_count() { - let result = CronParser::parse("0 * * *"); - assert!(result.is_err()); - } - - #[test] - fn test_next_run() { - let next = CronParser::next_run("* * * * *", None); - assert!(next.is_some()); - } - } - - mod scheduler_tests { - use super::*; - use crate::{LinkRef}; - - #[test] - fn test_scheduler_creation() { - let scheduler: Scheduler = Scheduler::new(); - assert!(!scheduler.is_running()); - assert_eq!(scheduler.pending_count(), 0); - } - - #[test] - fn test_schedule_item() { - let scheduler: Scheduler = Scheduler::new(); - let link = Link::new(1u64, LinkRef::Id(2), LinkRef::Id(3)); - - let item = scheduler.schedule(link, Duration::from_secs(5), None); - - assert!(item.deliver_at > item.scheduled_at); - assert_eq!(scheduler.pending_count(), 1); - } - - #[test] - fn test_schedule_with_ttl() { - let scheduler: Scheduler = Scheduler::new(); - let link = Link::new(1u64, LinkRef::Id(2), LinkRef::Id(3)); - - let item = scheduler.schedule(link, Duration::from_secs(5), Some(Duration::from_secs(3))); - - assert!(item.expires_at.is_some()); - assert!(item.expires_at.unwrap() < item.deliver_at); - } - - #[test] - fn test_cancel_item() { - let scheduler: Scheduler = Scheduler::new(); - let link = Link::new(1u64, LinkRef::Id(2), LinkRef::Id(3)); - - let item = scheduler.schedule(link, Duration::from_secs(5), None); - assert_eq!(scheduler.pending_count(), 1); - - let result = scheduler.cancel(&item.id); - assert!(result); - assert_eq!(scheduler.pending_count(), 0); - } - - #[test] - fn test_cancel_nonexistent() { - let scheduler: Scheduler = Scheduler::new(); - let result = scheduler.cancel(&999); - assert!(!result); - } - - #[test] - fn test_get_item() { - let scheduler: Scheduler = Scheduler::new(); - let link = Link::new(1u64, LinkRef::Id(2), LinkRef::Id(3)); - - let item = scheduler.schedule(link.clone(), Duration::from_secs(5), None); - let retrieved = scheduler.get(&item.id); - - assert!(retrieved.is_some()); - assert_eq!(retrieved.unwrap().link.id, link.id); - } - - #[test] - fn test_add_cron_job() { - let scheduler: Scheduler)> = Scheduler::new(); - let handler: fn(Link) = |_| {}; - - let result = scheduler.add_cron_job("test-job", "* * * * *", handler); - assert!(result.is_ok()); - - let job = scheduler.get_cron_job("test-job"); - assert!(job.is_some()); - } - - #[test] - fn test_add_invalid_cron_job() { - let scheduler: Scheduler)> = Scheduler::new(); - let handler: fn(Link) = |_| {}; - - let result = scheduler.add_cron_job("bad-job", "invalid", handler); - assert!(result.is_err()); - } - - #[test] - fn test_remove_cron_job() { - let scheduler: Scheduler)> = Scheduler::new(); - let handler: fn(Link) = |_| {}; - - scheduler.add_cron_job("test-job", "* * * * *", handler).unwrap(); - let result = scheduler.remove_cron_job("test-job"); - assert!(result); - - let job = scheduler.get_cron_job("test-job"); - assert!(job.is_none()); - } - - #[test] - fn test_enable_disable_cron_job() { - let scheduler: Scheduler)> = Scheduler::new(); - let handler: fn(Link) = |_| {}; - - scheduler.add_cron_job("test-job", "* * * * *", handler).unwrap(); - - scheduler.set_cron_job_enabled("test-job", false); - let job = scheduler.get_cron_job("test-job"); - assert!(!job.unwrap().1); - - scheduler.set_cron_job_enabled("test-job", true); - let job = scheduler.get_cron_job("test-job"); - assert!(job.unwrap().1); - } - - #[test] - fn test_list_cron_jobs() { - let scheduler: Scheduler)> = Scheduler::new(); - let handler: fn(Link) = |_| {}; - - scheduler.add_cron_job("job1", "* * * * *", handler).unwrap(); - scheduler.add_cron_job("job2", "0 * * * *", handler).unwrap(); - - let jobs = scheduler.list_cron_jobs(); - assert_eq!(jobs.len(), 2); - } - - #[test] - fn test_start_stop() { - let scheduler: Scheduler = Scheduler::new(); - - scheduler.start(); - assert!(scheduler.is_running()); - - scheduler.stop(); - assert!(!scheduler.is_running()); - } - - #[test] - fn test_stats() { - let scheduler: Scheduler)> = Scheduler::new(); - let link = Link::new(1u64, LinkRef::Id(2), LinkRef::Id(3)); - - scheduler.schedule(link, Duration::from_secs(5), None); - let handler: fn(Link) = |_| {}; - scheduler.add_cron_job("test", "* * * * *", handler).unwrap(); - - let stats = scheduler.stats(); - assert_eq!(stats.scheduled, 1); - assert_eq!(stats.pending, 1); - assert_eq!(stats.cron_jobs, 1); - } - - #[test] - fn test_clear() { - let scheduler: Scheduler)> = Scheduler::new(); - let link = Link::new(1u64, LinkRef::Id(2), LinkRef::Id(3)); - let handler: fn(Link) = |_| {}; - - scheduler.schedule(link, Duration::from_secs(5), None); - scheduler.add_cron_job("test", "* * * * *", handler).unwrap(); - scheduler.start(); - - scheduler.clear(); - - assert!(!scheduler.is_running()); - assert_eq!(scheduler.pending_count(), 0); - assert_eq!(scheduler.list_cron_jobs().len(), 0); - } - - #[test] - fn test_get_pending_items() { - let scheduler: Scheduler = Scheduler::new(); - - scheduler.schedule( - Link::new(1u64, LinkRef::Id(2), LinkRef::Id(3)), - Duration::from_secs(5), - None, - ); - scheduler.schedule( - Link::new(2u64, LinkRef::Id(4), LinkRef::Id(5)), - Duration::from_secs(10), - None, - ); - - let items = scheduler.get_pending_items(); - assert_eq!(items.len(), 2); - } - } -} diff --git a/rust/src/features/scheduler_tests.rs b/rust/src/features/scheduler_tests.rs new file mode 100644 index 0000000..9ff8b96 --- /dev/null +++ b/rust/src/features/scheduler_tests.rs @@ -0,0 +1,264 @@ +//! Tests for the scheduler module. + +use super::scheduler::*; +use crate::{Link, LinkRef}; +use std::time::Duration; + +mod cron_parser_tests { + use super::*; + + #[test] + fn test_parse_wildcard() { + let values = CronParser::parse_field("*", 0, 5); + assert_eq!(values, vec![0, 1, 2, 3, 4, 5]); + } + + #[test] + fn test_parse_single_value() { + let values = CronParser::parse_field("5", 0, 10); + assert_eq!(values, vec![5]); + } + + #[test] + fn test_parse_comma_separated() { + let values = CronParser::parse_field("1,3,5", 0, 10); + assert_eq!(values, vec![1, 3, 5]); + } + + #[test] + fn test_parse_range() { + let values = CronParser::parse_field("1-5", 0, 10); + assert_eq!(values, vec![1, 2, 3, 4, 5]); + } + + #[test] + fn test_parse_step() { + let values = CronParser::parse_field("*/2", 0, 6); + assert_eq!(values, vec![0, 2, 4, 6]); + } + + #[test] + fn test_parse_step_with_start() { + let values = CronParser::parse_field("2/3", 0, 10); + assert_eq!(values, vec![2, 5, 8]); + } + + #[test] + fn test_parse_filters_out_of_range() { + let values = CronParser::parse_field("0,5,15", 0, 10); + assert_eq!(values, vec![0, 5]); + } + + #[test] + fn test_parse_full_expression() { + let result = CronParser::parse("0 * * * *"); + assert!(result.is_ok()); + let schedule = result.unwrap(); + assert_eq!(schedule.minutes, vec![0]); + assert_eq!(schedule.hours.len(), 24); + assert_eq!(schedule.days_of_month.len(), 31); + assert_eq!(schedule.months.len(), 12); + assert_eq!(schedule.days_of_week.len(), 7); + } + + #[test] + fn test_parse_invalid_field_count() { + let result = CronParser::parse("0 * * *"); + assert!(result.is_err()); + } + + #[test] + fn test_next_run() { + let next = CronParser::next_run("* * * * *", None); + assert!(next.is_some()); + } +} + +mod scheduler_tests { + use super::*; + + #[test] + fn test_scheduler_creation() { + let scheduler: Scheduler = Scheduler::new(); + assert!(!scheduler.is_running()); + assert_eq!(scheduler.pending_count(), 0); + } + + #[test] + fn test_schedule_item() { + let scheduler: Scheduler = Scheduler::new(); + let link = Link::new(1u64, LinkRef::Id(2), LinkRef::Id(3)); + + let item = scheduler.schedule(link, Duration::from_secs(5), None); + + assert!(item.deliver_at > item.scheduled_at); + assert_eq!(scheduler.pending_count(), 1); + } + + #[test] + fn test_schedule_with_ttl() { + let scheduler: Scheduler = Scheduler::new(); + let link = Link::new(1u64, LinkRef::Id(2), LinkRef::Id(3)); + + let item = scheduler.schedule(link, Duration::from_secs(5), Some(Duration::from_secs(3))); + + assert!(item.expires_at.is_some()); + assert!(item.expires_at.unwrap() < item.deliver_at); + } + + #[test] + fn test_cancel_item() { + let scheduler: Scheduler = Scheduler::new(); + let link = Link::new(1u64, LinkRef::Id(2), LinkRef::Id(3)); + + let item = scheduler.schedule(link, Duration::from_secs(5), None); + assert_eq!(scheduler.pending_count(), 1); + + let result = scheduler.cancel(&item.id); + assert!(result); + assert_eq!(scheduler.pending_count(), 0); + } + + #[test] + fn test_cancel_nonexistent() { + let scheduler: Scheduler = Scheduler::new(); + let result = scheduler.cancel(&999); + assert!(!result); + } + + #[test] + fn test_get_item() { + let scheduler: Scheduler = Scheduler::new(); + let link = Link::new(1u64, LinkRef::Id(2), LinkRef::Id(3)); + + let item = scheduler.schedule(link.clone(), Duration::from_secs(5), None); + let retrieved = scheduler.get(&item.id); + + assert!(retrieved.is_some()); + assert_eq!(retrieved.unwrap().link.id, link.id); + } + + #[test] + fn test_add_cron_job() { + let scheduler: Scheduler)> = Scheduler::new(); + let handler: fn(Link) = |_| {}; + + let result = scheduler.add_cron_job("test-job", "* * * * *", handler); + assert!(result.is_ok()); + + let job = scheduler.get_cron_job("test-job"); + assert!(job.is_some()); + } + + #[test] + fn test_add_invalid_cron_job() { + let scheduler: Scheduler)> = Scheduler::new(); + let handler: fn(Link) = |_| {}; + + let result = scheduler.add_cron_job("bad-job", "invalid", handler); + assert!(result.is_err()); + } + + #[test] + fn test_remove_cron_job() { + let scheduler: Scheduler)> = Scheduler::new(); + let handler: fn(Link) = |_| {}; + + scheduler.add_cron_job("test-job", "* * * * *", handler).unwrap(); + let result = scheduler.remove_cron_job("test-job"); + assert!(result); + + let job = scheduler.get_cron_job("test-job"); + assert!(job.is_none()); + } + + #[test] + fn test_enable_disable_cron_job() { + let scheduler: Scheduler)> = Scheduler::new(); + let handler: fn(Link) = |_| {}; + + scheduler.add_cron_job("test-job", "* * * * *", handler).unwrap(); + + scheduler.set_cron_job_enabled("test-job", false); + let job = scheduler.get_cron_job("test-job"); + assert!(!job.unwrap().1); + + scheduler.set_cron_job_enabled("test-job", true); + let job = scheduler.get_cron_job("test-job"); + assert!(job.unwrap().1); + } + + #[test] + fn test_list_cron_jobs() { + let scheduler: Scheduler)> = Scheduler::new(); + let handler: fn(Link) = |_| {}; + + scheduler.add_cron_job("job1", "* * * * *", handler).unwrap(); + scheduler.add_cron_job("job2", "0 * * * *", handler).unwrap(); + + let jobs = scheduler.list_cron_jobs(); + assert_eq!(jobs.len(), 2); + } + + #[test] + fn test_start_stop() { + let scheduler: Scheduler = Scheduler::new(); + + scheduler.start(); + assert!(scheduler.is_running()); + + scheduler.stop(); + assert!(!scheduler.is_running()); + } + + #[test] + fn test_stats() { + let scheduler: Scheduler)> = Scheduler::new(); + let link = Link::new(1u64, LinkRef::Id(2), LinkRef::Id(3)); + + scheduler.schedule(link, Duration::from_secs(5), None); + let handler: fn(Link) = |_| {}; + scheduler.add_cron_job("test", "* * * * *", handler).unwrap(); + + let stats = scheduler.stats(); + assert_eq!(stats.scheduled, 1); + assert_eq!(stats.pending, 1); + assert_eq!(stats.cron_jobs, 1); + } + + #[test] + fn test_clear() { + let scheduler: Scheduler)> = Scheduler::new(); + let link = Link::new(1u64, LinkRef::Id(2), LinkRef::Id(3)); + let handler: fn(Link) = |_| {}; + + scheduler.schedule(link, Duration::from_secs(5), None); + scheduler.add_cron_job("test", "* * * * *", handler).unwrap(); + scheduler.start(); + + scheduler.clear(); + + assert!(!scheduler.is_running()); + assert_eq!(scheduler.pending_count(), 0); + assert_eq!(scheduler.list_cron_jobs().len(), 0); + } + + #[test] + fn test_get_pending_items() { + let scheduler: Scheduler = Scheduler::new(); + + scheduler.schedule( + Link::new(1u64, LinkRef::Id(2), LinkRef::Id(3)), + Duration::from_secs(5), + None, + ); + scheduler.schedule( + Link::new(2u64, LinkRef::Id(4), LinkRef::Id(5)), + Duration::from_secs(10), + None, + ); + + let items = scheduler.get_pending_items(); + assert_eq!(items.len(), 2); + } +} From 8e7a8d7dc823a1f66bc253f898559b7a6854367e Mon Sep 17 00:00:00 2001 From: konard Date: Tue, 20 Jan 2026 23:42:49 +0100 Subject: [PATCH 06/10] chore(js): Add changeset for Phase 7 advanced queue features Add changeset documenting the minor version bump for: - Scheduling (CronParser, Scheduler, ScheduledQueue) - Rate Limiting (SlidingWindowCounter, TokenBucket, RateLimiter, RateLimitedQueue) - Routing (TopicMatcher, Exchanges, Router, RoutedQueueManager) - Pub/Sub (MessageFilter, PubSubBroker, ObservableQueue, QueueBackedPubSub) Co-Authored-By: Claude Opus 4.5 --- js/.changeset/phase7-advanced-features.md | 10 ++++++++++ 1 file changed, 10 insertions(+) create mode 100644 js/.changeset/phase7-advanced-features.md diff --git a/js/.changeset/phase7-advanced-features.md b/js/.changeset/phase7-advanced-features.md new file mode 100644 index 0000000..a1069e0 --- /dev/null +++ b/js/.changeset/phase7-advanced-features.md @@ -0,0 +1,10 @@ +--- +"links-queue-js": minor +--- + +Add Phase 7 advanced queue features + +- **Scheduling**: Implement `CronParser`, `Scheduler`, and `ScheduledQueue` for delayed messages, cron jobs, TTL, and message expiration +- **Rate Limiting**: Implement `SlidingWindowCounter`, `TokenBucket`, `RateLimiter`, and `RateLimitedQueue` with sliding window algorithm +- **Routing**: Implement `TopicMatcher`, `DirectExchange`, `TopicExchange`, `FanoutExchange`, `HeadersExchange`, `Router`, and `RoutedQueueManager` for topic-based routing with AMQP-style wildcards +- **Pub/Sub**: Implement `MessageFilter`, `PubSubBroker`, `ObservableQueue`, and `QueueBackedPubSub` for publish/subscribe patterns with message filtering From 00bcdddd4c7a43a8b5a3db25c4f4f75a80ae23ee Mon Sep 17 00:00:00 2001 From: konard Date: Wed, 21 Jan 2026 09:35:03 +0100 Subject: [PATCH 07/10] fix(js): Fix CI failures - prettier formatting and Deno timer leaks - Fix prettier formatting in changeset file - Fix Deno test timer leaks in scheduler.test.js by using longer check intervals and ensuring proper cleanup - Fix Deno test timer leak in pubsub.test.js by checking subscription active state after async operations before setting new timers - Add small delay in pubsub test to allow consumer to set up timer before stopping The Deno runtime has stricter timer leak detection than Node.js. These changes ensure timers are properly cleaned up in tests. Co-Authored-By: Claude Opus 4.5 --- js/.changeset/phase7-advanced-features.md | 2 +- js/src/features/pubsub.js | 5 +++++ js/tests/pubsub.test.js | 4 ++++ js/tests/scheduler.test.js | 20 ++++++++++++++------ 4 files changed, 24 insertions(+), 7 deletions(-) diff --git a/js/.changeset/phase7-advanced-features.md b/js/.changeset/phase7-advanced-features.md index a1069e0..88082c9 100644 --- a/js/.changeset/phase7-advanced-features.md +++ b/js/.changeset/phase7-advanced-features.md @@ -1,5 +1,5 @@ --- -"links-queue-js": minor +'links-queue-js': minor --- Add Phase 7 advanced queue features diff --git a/js/src/features/pubsub.js b/js/src/features/pubsub.js index 6ae483a..d700ad8 100644 --- a/js/src/features/pubsub.js +++ b/js/src/features/pubsub.js @@ -1042,6 +1042,11 @@ export class QueueBackedPubSub { } } + // Check active again after async operations to avoid timer leak + if (!subscription.active) { + return; + } + subscription.consumerHandle = globalThis.setTimeout( consume, pollInterval diff --git a/js/tests/pubsub.test.js b/js/tests/pubsub.test.js index 0c21536..f4c5872 100644 --- a/js/tests/pubsub.test.js +++ b/js/tests/pubsub.test.js @@ -818,6 +818,10 @@ describe('QueueBackedPubSub', () => { const subs = pubsub.listSubscriptions(); expect(subs.find((s) => s.id === sub.id).active).toBe(true); + // Small delay to allow the consumer's first iteration to complete + // and set up its timer, which we can then properly clear + await new Promise((resolve) => globalThis.setTimeout(resolve, 10)); + pubsub.stopConsumer(sub.id); const subsAfter = pubsub.listSubscriptions(); diff --git a/js/tests/scheduler.test.js b/js/tests/scheduler.test.js index 211a95a..477f48a 100644 --- a/js/tests/scheduler.test.js +++ b/js/tests/scheduler.test.js @@ -289,7 +289,7 @@ describe('Scheduler', () => { describe('start/stop', () => { it('should start and stop the scheduler', () => { - const scheduler = new Scheduler(); + const scheduler = new Scheduler({ checkInterval: 60000 }); // Long interval to avoid timer firing during test scheduler.start(); expect(scheduler.isRunning).toBe(true); @@ -299,7 +299,7 @@ describe('Scheduler', () => { }); it('should not start twice', () => { - const scheduler = new Scheduler(); + const scheduler = new Scheduler({ checkInterval: 60000 }); // Long interval to avoid timer firing during test scheduler.start(); scheduler.start(); // Should be no-op @@ -329,7 +329,7 @@ describe('Scheduler', () => { describe('clear', () => { it('should clear all state', () => { - const scheduler = new Scheduler(); + const scheduler = new Scheduler({ checkInterval: 60000 }); // Long interval to avoid timer firing during test const link = { id: 1, source: 'test', target: 'item' }; scheduler.schedule(link, { delay: 5000 }); @@ -399,9 +399,13 @@ describe('ScheduledQueue', () => { describe('constructor', () => { it('should create scheduled queue from base queue', () => { const mockQueue = createMockQueue(); - const scheduledQueue = new ScheduledQueue(mockQueue); + const scheduledQueue = new ScheduledQueue(mockQueue, { + checkInterval: 60000, + }); // Long interval to avoid timer firing during test expect(scheduledQueue.name).toBe('test-queue'); + + scheduledQueue.stop(); }); }); @@ -419,7 +423,9 @@ describe('ScheduledQueue', () => { it('should schedule with delay', async () => { const mockQueue = createMockQueue(); - const scheduledQueue = new ScheduledQueue(mockQueue); + const scheduledQueue = new ScheduledQueue(mockQueue, { + checkInterval: 60000, + }); // Long interval to avoid timer firing during test const link = { id: 1, source: 'test', target: 'item' }; const result = await scheduledQueue.enqueue(link, { delay: 5000 }); @@ -488,7 +494,9 @@ describe('ScheduledQueue', () => { describe('cancelScheduled', () => { it('should cancel a scheduled item', async () => { const mockQueue = createMockQueue(); - const scheduledQueue = new ScheduledQueue(mockQueue); + const scheduledQueue = new ScheduledQueue(mockQueue, { + checkInterval: 60000, + }); // Long interval to avoid timer firing during test await scheduledQueue.enqueue( { id: 1, source: 'a', target: 'b' }, From d3efa035bdaf03dd97c3a89d1ad8cdd0c0bb8e77 Mon Sep 17 00:00:00 2001 From: konard Date: Wed, 21 Jan 2026 09:50:44 +0100 Subject: [PATCH 08/10] chore: Trigger CI rebuild From 4a5b0102d755a1ec5aa40bdeafe40ab38eb86583 Mon Sep 17 00:00:00 2001 From: konard Date: Wed, 21 Jan 2026 10:00:25 +0100 Subject: [PATCH 09/10] ci(js): Add timeout-minutes to prevent hung jobs - Add 30-minute timeout to test jobs - Add 10-minute timeout to lint job - Add 15-minute timeout to coverage job This prevents jobs from hanging indefinitely and blocking CI pipelines. --- .github/workflows/js.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.github/workflows/js.yml b/.github/workflows/js.yml index 43e1369..ef4d933 100644 --- a/.github/workflows/js.yml +++ b/.github/workflows/js.yml @@ -107,6 +107,7 @@ jobs: lint: name: Lint and Format Check runs-on: ubuntu-latest + timeout-minutes: 10 needs: [detect-changes] if: | github.event_name == 'push' || @@ -140,6 +141,7 @@ jobs: test: name: Test (${{ matrix.runtime }} on ${{ matrix.os }}) runs-on: ${{ matrix.os }} + timeout-minutes: 30 needs: [detect-changes, changeset-check] if: always() && (github.event_name == 'push' || github.event_name == 'workflow_dispatch' || needs.changeset-check.result == 'success' || needs.changeset-check.result == 'skipped') strategy: @@ -192,6 +194,7 @@ jobs: coverage: name: Test Coverage runs-on: ubuntu-latest + timeout-minutes: 15 needs: [detect-changes, changeset-check] if: always() && (github.event_name == 'push' || github.event_name == 'workflow_dispatch' || needs.changeset-check.result == 'success' || needs.changeset-check.result == 'skipped') steps: From e55d9d580b9a39179227e04c4ef4352debe6daae Mon Sep 17 00:00:00 2001 From: konard Date: Wed, 21 Jan 2026 11:53:08 +0100 Subject: [PATCH 10/10] fix(js): Fix scheduler async timer leak race condition Add check for _running flag after async operations in _tick() method to prevent timer leaks when stop() is called during processing. The race condition occurred because: 1. start() calls _tick() asynchronously 2. _tick() awaits async processing (processScheduledItems/processCronJobs) 3. stop() can be called during this await, setting _running=false 4. After await completes, _tick() was still setting a new timer The fix checks _running again after the await completes, before scheduling the next tick, ensuring proper cleanup. Co-Authored-By: Claude Opus 4.5 --- js/src/features/scheduler.js | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/js/src/features/scheduler.js b/js/src/features/scheduler.js index 6fb04a4..591b71a 100644 --- a/js/src/features/scheduler.js +++ b/js/src/features/scheduler.js @@ -511,6 +511,12 @@ export class Scheduler { console.error('Scheduler tick error:', error); } + // Check if still running after async operations before scheduling next tick + // This prevents timer leaks when stop() is called during processing + if (!this._running) { + return; + } + // Schedule next tick this._timerHandle = globalThis.setTimeout( () => this._tick(),