From 99a13bacfdcb08340490771c57f2c86d910a47f1 Mon Sep 17 00:00:00 2001 From: Danilo Alonso Date: Sat, 27 Dec 2025 20:26:57 -0500 Subject: [PATCH 01/13] feat(hooks): initial beta release of @logosdx/hooks A lightweight, type-safe hook system for extending function behavior with before/after/error extension points. - HookEngine class with make(), wrap(), extend(), and clear() methods - Extension options: once, ignoreOnFail - Context API: setArgs, setResult, returnEarly, fail, removeHook - HookError class and isHookError() type guard - Comprehensive test suite (45 tests) - Full JSDoc documentation --- .changeset/pre.json | 17 + .changeset/rich-pears-jam.md | 13 + .github/workflows/publish-beta.yml | 67 +++ packages/hooks/LICENSE | 12 + packages/hooks/package.json | 33 ++ packages/hooks/src/index.ts | 440 ++++++++++++++ packages/hooks/tsconfig.json | 5 + pnpm-lock.yaml | 16 +- tests/src/hooks.ts | 917 +++++++++++++++++++++++++++++ 9 files changed, 1513 insertions(+), 7 deletions(-) create mode 100644 .changeset/pre.json create mode 100644 .changeset/rich-pears-jam.md create mode 100644 .github/workflows/publish-beta.yml create mode 100644 packages/hooks/LICENSE create mode 100644 packages/hooks/package.json create mode 100644 packages/hooks/src/index.ts create mode 100644 packages/hooks/tsconfig.json create mode 100644 tests/src/hooks.ts diff --git a/.changeset/pre.json b/.changeset/pre.json new file mode 100644 index 0000000..219c1cb --- /dev/null +++ b/.changeset/pre.json @@ -0,0 +1,17 @@ +{ + "mode": "pre", + "tag": "beta", + "initialVersions": { + "@logosdx/dom": "2.0.15", + "@logosdx/fetch": "7.0.3", + "@logosdx/hooks": "0.0.1", + "@logosdx/kit": "4.0.3", + "@logosdx/localize": "1.0.19", + "@logosdx/observer": "2.2.0", + "@logosdx/state-machine": "1.0.19", + "@logosdx/storage": "1.0.19", + "@logosdx/utils": "5.0.0", + "@logosdx/tests": "0.0.1" + }, + "changesets": [] +} diff --git a/.changeset/rich-pears-jam.md b/.changeset/rich-pears-jam.md new file mode 100644 index 0000000..833321f --- /dev/null +++ b/.changeset/rich-pears-jam.md @@ -0,0 +1,13 @@ +--- +"@logosdx/hooks": major +--- + +Initial beta release of @logosdx/hooks - a lightweight, type-safe hook system for extending function behavior. + +Features: + +- `HookEngine` class for wrapping functions with before/after/error extension points +- `make()` and `wrap()` methods for creating hookable functions +- Extension options: `once`, `ignoreOnFail` +- Context methods: `setArgs`, `setResult`, `returnEarly`, `fail`, `removeHook` +- `HookError` and `isHookError()` for typed error handling diff --git a/.github/workflows/publish-beta.yml b/.github/workflows/publish-beta.yml new file mode 100644 index 0000000..738a68b --- /dev/null +++ b/.github/workflows/publish-beta.yml @@ -0,0 +1,67 @@ +name: Publish Beta +on: + push: + branches: + - "beta" + workflow_dispatch: + +concurrency: ${{ github.workflow }}-${{ github.ref }} + +permissions: + id-token: write # Required for OIDC + contents: write + actions: write + issues: write + pull-requests: write + + +jobs: + check-prerelease: + runs-on: ubuntu-latest + outputs: + is_prerelease: ${{ steps.check.outputs.is_prerelease }} + steps: + + - uses: actions/checkout@v6 + + - name: Check if pre-release mode is enabled + id: check + run: | + if [ -f ".changeset/pre.json" ]; then + echo "Pre-release mode is enabled" + echo "is_prerelease=true" >> $GITHUB_OUTPUT + else + echo "Pre-release mode is not enabled, skipping publish" + echo "is_prerelease=false" >> $GITHUB_OUTPUT + fi + + publish: + needs: check-prerelease + if: needs.check-prerelease.outputs.is_prerelease == 'true' + runs-on: ubuntu-latest + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + NPM_TOKEN: ${{ secrets.NPM_TOKEN }} + steps: + + - uses: actions/checkout@v6 + + - uses: pnpm/action-setup@v4 + with: + version: 10 + + - uses: actions/setup-node@v6 + with: + node-version: 22.x + cache: "pnpm" + + - run: pnpm install --frozen-lockfile + + - run: pnpm recursive run build + + - name: Publish + uses: changesets/action@v1 + with: + publish: pnpm run release + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/packages/hooks/LICENSE b/packages/hooks/LICENSE new file mode 100644 index 0000000..b701955 --- /dev/null +++ b/packages/hooks/LICENSE @@ -0,0 +1,12 @@ +Copyright 2023 LogosDX contributors + +Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS “AS IS” AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + diff --git a/packages/hooks/package.json b/packages/hooks/package.json new file mode 100644 index 0000000..8400a39 --- /dev/null +++ b/packages/hooks/package.json @@ -0,0 +1,33 @@ +{ + "name": "@logosdx/hooks", + "version": "0.0.1", + "description": "", + "exports": { + ".": { + "types": "./dist/types/index.d.ts", + "require": "./dist/cjs/index.js", + "import": "./dist/esm/index.mjs" + } + }, + "scripts": { + "build": "node ../../scripts/build.mjs", + "lint": "pnpm tsc --noEmit --project tsconfig.json" + }, + "files": [ + "dist/", + "docs/", + "readme.md", + "LICENSE" + ], + "keywords": [], + "homepage": "https://logosdx.dev/", + "bugs": { + "url": "https://github.com/logosdx/monorepo/issues", + "email": "danilo@alonso.network" + }, + "author": "Danilo Alonso ", + "license": "BSD-3-Clause", + "dependencies": { + "@logosdx/utils": "workspace:^" + } +} diff --git a/packages/hooks/src/index.ts b/packages/hooks/src/index.ts new file mode 100644 index 0000000..43534eb --- /dev/null +++ b/packages/hooks/src/index.ts @@ -0,0 +1,440 @@ +import { + assert, + AsyncFunc, + attempt, + isFunction, + isObject, + FunctionProps +} from '@logosdx/utils'; + +/** + * Error thrown when a hook extension calls `fail()` or when hook execution fails. + * + * @example + * engine.extend('save', 'before', async (ctx) => { + * if (!ctx.args[0].isValid) { + * ctx.fail('Validation failed'); + * } + * }); + * + * const [, err] = await attempt(() => app.save(data)); + * if (isHookError(err)) { + * console.log(err.hookName); // 'save' + * console.log(err.extPoint); // 'before' + * } + */ +export class HookError extends Error { + + /** Name of the hook where the error occurred */ + hookName?: string; + + /** Extension point where the error occurred: 'before', 'after', or 'error' */ + extPoint?: string; + + /** Original error if `fail()` was called with an Error instance */ + originalError?: Error; + + /** Whether the hook was explicitly aborted via `fail()` */ + aborted = false; + + constructor(message: string) { + + super(message) + } +} + +/** + * Type guard to check if an error is a HookError. + * + * @example + * const [result, err] = await attempt(() => app.save(data)); + * if (isHookError(err)) { + * console.log(`Hook "${err.hookName}" failed at "${err.extPoint}"`); + * } + */ +export const isHookError = (error: unknown): error is HookError => { + + return (error as HookError)?.constructor?.name === HookError.name +} + +interface HookShape { + args: Parameters, + results?: Awaited> +} + +/** + * Context object passed to hook extension callbacks. + * Provides access to arguments, results, and control methods. + * + * @example + * engine.extend('fetch', 'before', async (ctx) => { + * // Read current arguments + * const [url, options] = ctx.args; + * + * // Modify arguments before the original function runs + * ctx.setArgs([url, { ...options, cache: 'force-cache' }]); + * + * // Or skip the original function entirely + * if (isCached(url)) { + * ctx.setResult(getCached(url)); + * ctx.returnEarly(); + * } + * }); + */ +export interface HookContext extends HookShape { + + /** Current extension point: 'before', 'after', or 'error' */ + point: keyof Hook; + + /** Error from the original function (only set in 'error' extensions) */ + error?: unknown, + + /** Abort hook execution with an error. Throws a HookError. */ + fail: (error?: unknown) => never, + + /** Replace the arguments passed to the original function */ + setArgs: (next: Parameters) => void, + + /** Replace the result returned from the hook chain */ + setResult: (next: Awaited>) => void, + + /** Skip the original function and return early with the current result */ + returnEarly: () => void; + + /** Remove this extension from the hook (useful with `once` behavior) */ + removeHook: () => void; +} + +export type HookFn = (ctx: HookContext) => Promise; + +class Hook { + before: Set> = new Set(); + after: Set> = new Set(); + error: Set> = new Set(); +} + +const allowedExtPoints = new Set([ + 'before', + 'after', + 'error' +]); + +type HookExtOptions = { + callback: HookFn, + once?: true, + ignoreOnFail?: true +} + +type HookExtOrOptions = HookFn | HookExtOptions + +type MakeHookOptions = { + bindTo?: any +} + +type FuncOrNever = T extends AsyncFunc ? T : never; + +/** + * A lightweight, type-safe hook system for extending function behavior. + * + * HookEngine allows you to wrap functions and add extensions that run + * before, after, or on error. Extensions can modify arguments, change + * results, or abort execution entirely. + * + * @example + * interface MyApp { + * save(data: Data): Promise; + * load(id: string): Promise; + * } + * + * const app = new MyAppImpl(); + * const hooks = new HookEngine(); + * + * // Wrap a method to make it hookable + * hooks.wrap(app, 'save'); + * + * // Add a validation extension + * hooks.extend('save', 'before', async (ctx) => { + * if (!ctx.args[0].isValid) { + * ctx.fail('Validation failed'); + * } + * }); + * + * // Add logging extension + * hooks.extend('save', 'after', async (ctx) => { + * console.log('Saved:', ctx.results); + * }); + * + * @typeParam Shape - Interface defining the hookable functions + */ +export class HookEngine { + + #registered = new Set(); + #hooks: Map>> = new Map(); + #hookFnOpts = new WeakMap(); + #wrapped = new WeakMap(); + + /** + * Add an extension to a registered hook. + * + * Extensions run at specific points in the hook lifecycle: + * - `before`: Runs before the original function. Can modify args or return early. + * - `after`: Runs after successful execution. Can modify the result. + * - `error`: Runs when the original function throws. Can handle or transform errors. + * + * @param name - Name of the registered hook to extend + * @param extensionPoint - When to run: 'before', 'after', or 'error' + * @param cbOrOpts - Extension callback or options object + * @returns Cleanup function to remove the extension + * + * @example + * // Simple callback + * const cleanup = hooks.extend('save', 'before', async (ctx) => { + * console.log('About to save:', ctx.args); + * }); + * + * // With options + * hooks.extend('save', 'after', { + * callback: async (ctx) => { console.log('Saved!'); }, + * once: true, // Remove after first run + * ignoreOnFail: true // Don't throw if this extension fails + * }); + * + * // Later: remove the extension + * cleanup(); + */ + extend>( + name: K, + extensionPoint: keyof Hook>, + cbOrOpts: HookExtOrOptions> + ) { + const callback = typeof cbOrOpts === 'function' ? cbOrOpts : cbOrOpts?.callback; + const opts = typeof cbOrOpts === 'function' ? {} as HookExtOptions> : cbOrOpts; + + assert(typeof name === 'string', '"name" must be a string'); + assert(this.#registered.has(name), `'${name.toString()}' is not a registered hook`); + assert(typeof extensionPoint === 'string', '"extensionPoint" must be a string'); + assert(allowedExtPoints.has(extensionPoint), `'${extensionPoint}' is not a valid extension point`); + assert(isFunction(callback) || isObject(cbOrOpts), '"cbOrOpts" must be a extension callback or options'); + assert(isFunction(callback), 'callback must be a function'); + + const hook = this.#hooks.get(name) ?? new Hook>(); + + hook[extensionPoint].add(callback); + + this.#hooks.set(name, hook); + this.#hookFnOpts.set(callback, opts); + + /** + * Removes the registered hook extension + */ + return () => { + + hook[extensionPoint].delete(callback); + } + } + + /** + * Register a function as a hookable and return the wrapped version. + * + * The wrapped function behaves identically to the original but allows + * extensions to be added via `extend()`. Use `wrap()` for a simpler API + * when working with object methods. + * + * @param name - Unique name for this hook (must match a key in Shape) + * @param cb - The original function to wrap + * @param opts - Options for the wrapped function + * @returns Wrapped function with hook support + * + * @example + * const hooks = new HookEngine<{ fetch: typeof fetch }>(); + * + * const hookedFetch = hooks.make('fetch', fetch); + * + * hooks.extend('fetch', 'before', async (ctx) => { + * console.log('Fetching:', ctx.args[0]); + * }); + * + * await hookedFetch('/api/data'); + */ + make>( + name: K, + cb: FuncOrNever, + opts: MakeHookOptions> = {} + ) { + + assert(typeof name === 'string', '"name" must be a string'); + assert(!this.#registered.has(name), `'${name.toString()}' hook is already registered`); + assert(isFunction(cb), '"cb" must be a function'); + assert(isObject(opts), '"opts" must be an object'); + + this.#registered.add(name); + + if (this.#wrapped.has(cb)) { + + return this.#wrapped.get(cb) as FuncOrNever; + } + + const callback = async (...origArgs: Parameters>) => { + + let returnEarly = false; + + const hook = this.#hooks.get(name)!; + + const context: HookContext> = { + args: origArgs, + point: 'before', + removeHook() {}, + returnEarly() { + returnEarly = true; + }, + setArgs(next) { + + assert( + Array.isArray(next), + `setArgs: next args for '${context.point}' '${name.toString()}' must be an array of arguments` + ); + + context.args = next; + }, + setResult(next) { + context.results = next; + }, + fail(reason) { + + const error = new HookError(`Hook Aborted: ${reason ?? 'unknown'}`); + + if (reason instanceof Error) { + + error.originalError = reason; + } + + error.extPoint = context.point; + error.hookName = name as string; + + throw error; + }, + } + + const { before, after, error: errorFns } = hook ?? new Hook>(); + + const handleSet = async ( + which: typeof before, + point: keyof typeof hook + ) => { + + context.point = point; + + for (const fn of which) { + + context.removeHook = () => which.delete(fn); + + const opts: HookExtOptions> = this.#hookFnOpts.get(fn); + const [, err] = await attempt(() => fn({ ...context })); + + if (opts.once) context.removeHook(); + + if (err && opts.ignoreOnFail !== true) { + throw err; + } + + if (returnEarly) break; + } + } + + await handleSet(before, 'before'); + + if (returnEarly) return context.results! + + const [res, err] = await attempt(() => cb.apply(opts?.bindTo || cb, context.args)); + + context.results = res; + context.error = err; + + if (err) { + context.point = 'error'; + + await handleSet(errorFns, 'error'); + + throw err; + } + + await handleSet(after, 'after'); + + return context.results!; + } + + return callback as FuncOrNever; + } + + /** + * Wrap an object method in-place to make it hookable. + * + * This is a convenience method that combines `make()` with automatic + * binding and reassignment. The method is replaced on the instance + * with the wrapped version. + * + * @param instance - Object containing the method to wrap + * @param name - Name of the method to wrap + * @param opts - Additional options + * + * @example + * class UserService { + * async save(user: User) { ... } + * } + * + * const service = new UserService(); + * const hooks = new HookEngine(); + * + * hooks.wrap(service, 'save'); + * + * // Now service.save() is hookable + * hooks.extend('save', 'before', async (ctx) => { + * console.log('Saving user:', ctx.args[0]); + * }); + */ + wrap>( + instance: Shape, + name: K, + opts?: MakeHookOptions> + ) { + + assert(isObject(instance), '"instance" must be an object'); + + const wrapped = this.make( + name, + instance[name] as FuncOrNever, + { + bindTo: instance, + ...opts + } + ); + + this.#wrapped.set(wrapped, instance[name] as AsyncFunc); + + instance[name] = wrapped as Shape[K]; + + } + + /** + * Clear all registered hooks and extensions. + * + * After calling this method, all hooks are unregistered and all + * extensions are removed. Previously wrapped functions will continue + * to work but without any extensions. + * + * @example + * hooks.wrap(app, 'save'); + * hooks.extend('save', 'before', validator); + * + * // Reset for testing + * hooks.clear(); + * + * // app.save() still works, but validator no longer runs + */ + clear() { + + this.#registered.clear(); + this.#hooks.clear(); + this.#hookFnOpts = new WeakMap(); + } +} \ No newline at end of file diff --git a/packages/hooks/tsconfig.json b/packages/hooks/tsconfig.json new file mode 100644 index 0000000..ecdc3a2 --- /dev/null +++ b/packages/hooks/tsconfig.json @@ -0,0 +1,5 @@ +{ + "extends": "../../tsconfig.json", + "compilerOptions": { "noEmit": false, "declarationDir": "./dist/types" }, + "include": ["src"] +} \ No newline at end of file diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 316a5a0..8aa9ba4 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -64,6 +64,12 @@ importers: specifier: workspace:^ version: link:../utils + packages/hooks: + dependencies: + '@logosdx/utils': + specifier: workspace:^ + version: link:../utils + packages/kit: dependencies: '@logosdx/fetch': @@ -4032,10 +4038,6 @@ snapshots: whatwg-mimetype: 4.0.0 whatwg-url: 15.1.0 - debug@4.3.4: - dependencies: - ms: 2.1.2 - debug@4.3.4(supports-color@5.5.0): dependencies: ms: 2.1.2 @@ -4302,7 +4304,7 @@ snapshots: http-proxy-agent@7.0.2: dependencies: agent-base: 7.1.3 - debug: 4.3.4 + debug: 4.3.4(supports-color@5.5.0) transitivePeerDependencies: - supports-color @@ -4314,7 +4316,7 @@ snapshots: https-proxy-agent@7.0.6: dependencies: agent-base: 7.1.3 - debug: 4.3.4 + debug: 4.3.4(supports-color@5.5.0) transitivePeerDependencies: - supports-color @@ -4381,7 +4383,7 @@ snapshots: istanbul-lib-source-maps@5.0.6: dependencies: '@jridgewell/trace-mapping': 0.3.31 - debug: 4.3.4 + debug: 4.3.4(supports-color@5.5.0) istanbul-lib-coverage: 3.2.2 transitivePeerDependencies: - supports-color diff --git a/tests/src/hooks.ts b/tests/src/hooks.ts new file mode 100644 index 0000000..34bd7a6 --- /dev/null +++ b/tests/src/hooks.ts @@ -0,0 +1,917 @@ +import { + describe, + it, + beforeEach, + expect, + vi +} from 'vitest' + +import { HookEngine, HookError, isHookError } from '../../packages/hooks/src/index.ts'; +import { attempt, noop } from '../../packages/utils/src/index.ts'; + +describe('@logosdx/hooks', () => { + + const startFn = vi.fn(); + const stopFn = vi.fn(); + const requestFn = vi.fn(); + const beforeFn = vi.fn(); + const afterFn = vi.fn(); + const errorFn = vi.fn(); + + class TestApp { + + notAFunc = 'hello'; + start(...args: any[]) { return startFn(...args) } + stop(...args: any[]) { return stopFn(...args) } + request(...args: any[]) { return requestFn(...args) } + }; + + + beforeEach(() => { + + vi.resetAllMocks(); + }); + + it('instantiates', () => { + + new HookEngine(); + }); + + it('runs the happy path', async () => { + + const app = new TestApp(); + const engine = new HookEngine; + + const wrapped = engine.make('start', app.start, { bindTo: app }); + + app.start = wrapped; + + engine.extend('start', 'before', beforeFn); + engine.extend('start', 'after', afterFn); + + // @ts-expect-error - testing invalid attribute type (only functions should be picke up) + expect(() => engine.extend('notAFunc', 'before', noop)).to.throw(); + + await app.start(); + + expect(startFn).toHaveBeenCalledOnce(); + expect(beforeFn).toHaveBeenCalledOnce(); + expect(afterFn).toHaveBeenCalledOnce(); + }); + + it('rejects invalid usage of extend', () => { + + // [name, extensionPoint, cbOrOpts] + const badArgs = [ + [null], + [1], + ['nonexistentHook'], + ['stop'], + ['start'], + ['start', 1], + ['start', 'invalidExtensionPoint'], + ['start', 'before', null], + ['start', 'before', {}], + ['start', 'before', { callback: null }], + ] as unknown as Array['extend']>>; + + const app = new TestApp(); + const engine = new HookEngine; + + engine.make('start', app.start, { bindTo: app }); + + for (const args of badArgs) { + expect(() => engine.extend(...args)).to.throw(); + } + }); + + it('rejects invalid usage of make', () => { + + // [name, cb, opts] + const badArgs = [ + [null], + [1], + ['stop', null], + ['stop', 'notAFunction'], + ['stop', noop, 'notAnObject'], + ['start', noop], // already registered + ] as unknown as Array['make']>>; + + const app = new TestApp(); + const engine = new HookEngine; + + engine.make('start', app.start, { bindTo: app }); + + for (const args of badArgs) { + expect(() => engine.make(...args)).to.throw(); + } + }); + + describe('engine.make()', () => { + + it('registers a hook and returns a wrapped function', async () => { + + const engine = new HookEngine(); + const app = new TestApp(); + + const wrapped = engine.make('start', app.start, { bindTo: app }); + + expect(wrapped).to.be.a('function'); + expect(wrapped).to.not.equal(app.start); + + // Should be able to extend after registration + engine.extend('start', 'before', beforeFn); + + await wrapped(); + + expect(beforeFn).toHaveBeenCalledOnce(); + }); + + it('executes the original function', async () => { + + const engine = new HookEngine(); + const app = new TestApp(); + + const wrapped = engine.make('start', app.start, { bindTo: app }); + + await wrapped(); + + expect(startFn).toHaveBeenCalledOnce(); + }); + + it('returns the original function return value', async () => { + + const engine = new HookEngine(); + const app = new TestApp(); + const expectedResult = { success: true }; + + startFn.mockReturnValue(expectedResult); + + const wrapped = engine.make('start', app.start, { bindTo: app }); + + const result = await wrapped(); + + expect(result).to.equal(expectedResult); + }); + + it('keeps the original function arguments', async () => { + + const engine = new HookEngine(); + const app = new TestApp(); + + const wrapped = engine.make('start', app.start, { bindTo: app }); + + await wrapped('arg1', 'arg2', 123); + + expect(startFn).toHaveBeenCalledWith('arg1', 'arg2', 123); + }); + + it('binds the original function to the provided context', async () => { + + const contextCapture = vi.fn(); + + class ContextClass { + + value = 42; + + async doWork() { + + contextCapture(this.value); + } + } + + const instance = new ContextClass(); + const customEngine = new HookEngine(); + + const wrapped = customEngine.make('doWork', instance.doWork, { bindTo: instance }); + + await wrapped(); + + expect(contextCapture).toHaveBeenCalledWith(42); + }); + + }); + + describe('hook extensions', () => { + + let app = new TestApp(); + let engine = new HookEngine; + + + beforeEach(() => { + + vi.resetAllMocks(); + app = new TestApp(); + engine = new HookEngine; + + engine.wrap(app, 'start'); + }); + + it('allows the addition of a before extension', async () => { + + engine.extend('start', 'before', beforeFn); + + await app.start(); + + expect(beforeFn).toHaveBeenCalledOnce(); + expect(startFn).toHaveBeenCalledOnce(); + expect(beforeFn).toHaveBeenCalledBefore(startFn); + + const ctx = beforeFn.mock.calls[0]![0]; + + expect(ctx).to.have.property('point', 'before'); + expect(ctx).to.have.property('args').that.is.an('array'); + + expect(startFn).toHaveBeenCalledWith(...ctx.args); + + }); + + it('allows the addition of an after extension', async () => { + + engine.extend('start', 'after', afterFn); + + await app.start(); + + expect(afterFn).toHaveBeenCalledOnce(); + expect(startFn).toHaveBeenCalledOnce(); + + expect(afterFn).toHaveBeenCalledAfter(startFn); + + const ctx = afterFn.mock.calls[0]![0]; + + expect(ctx).to.have.property('point', 'after'); + expect(ctx).to.have.property('args').that.is.an('array'); + + expect(startFn).toHaveBeenCalledWith(...ctx.args); + }); + + it('allows the addition of an error extension', async () => { + + engine.extend('start', 'error', errorFn); + + const error = new Error('Test error'); + + startFn.mockImplementation(() => { throw error; }); + + const [, err] = await attempt(() => app.start()); + + expect(err).to.equal(error); + + expect(errorFn).toHaveBeenCalledOnce(); + expect(startFn).toHaveBeenCalledOnce(); + + expect(errorFn).toHaveBeenCalledAfter(startFn); + + const ctx = errorFn.mock.calls[0]![0]; + + expect(ctx).to.have.property('point', 'error'); + expect(ctx).to.have.property('args').that.is.an('array'); + expect(ctx).to.have.property('error', error); + + expect(startFn).toHaveBeenCalledWith(...ctx.args); + }); + + it('preserves execution order of extensions', async () => { + + engine.extend('start', 'before', beforeFn); + engine.extend('start', 'after', afterFn); + engine.extend('start', 'error', errorFn); + + await app.start(); + + expect(beforeFn).toHaveBeenCalledBefore(startFn); + expect(afterFn).toHaveBeenCalledAfter(startFn); + expect(errorFn).not.toHaveBeenCalled(); + + const error = new Error('Test error'); + + beforeFn.mockReset(); + afterFn.mockReset(); + errorFn.mockReset(); + startFn.mockReset(); + + startFn.mockImplementation(() => { throw error; }); + + const [, err] = await attempt(() => app.start()); + + expect(err).to.equal(error); + + expect(beforeFn).toHaveBeenCalledBefore(startFn); + expect(errorFn).toHaveBeenCalledAfter(startFn); + expect(afterFn).not.toHaveBeenCalled(); + }); + + it('allows the addition of more than one extension per extension point', async () => { + + const anotherBeforeFn = vi.fn(); + const anotherAfterFn = vi.fn(); + const anotherErrorFn = vi.fn(); + + engine.extend('start', 'before', beforeFn); + engine.extend('start', 'before', anotherBeforeFn); + + engine.extend('start', 'after', afterFn); + engine.extend('start', 'after', anotherAfterFn); + + engine.extend('start', 'error', errorFn); + engine.extend('start', 'error', anotherErrorFn); + + expect(beforeFn).not.toHaveBeenCalled(); + expect(anotherBeforeFn).not.toHaveBeenCalled(); + expect(afterFn).not.toHaveBeenCalled(); + expect(anotherAfterFn).not.toHaveBeenCalled(); + expect(errorFn).not.toHaveBeenCalled(); + expect(anotherErrorFn).not.toHaveBeenCalled(); + + await app.start(); + + expect(beforeFn).toHaveBeenCalledOnce(); + expect(anotherBeforeFn).toHaveBeenCalledOnce(); + + expect(beforeFn).toHaveBeenCalledBefore(anotherBeforeFn); + expect(anotherBeforeFn).toHaveBeenCalledBefore(startFn); + + expect(afterFn).toHaveBeenCalledOnce(); + expect(anotherAfterFn).toHaveBeenCalledOnce(); + + expect(afterFn).toHaveBeenCalledAfter(startFn); + expect(anotherAfterFn).toHaveBeenCalledAfter(afterFn); + + const error = new Error('Test error'); + + beforeFn.mockReset(); + anotherBeforeFn.mockReset(); + afterFn.mockReset(); + anotherAfterFn.mockReset(); + errorFn.mockReset(); + anotherErrorFn.mockReset(); + startFn.mockReset(); + + startFn.mockImplementation(() => { throw error; }); + + const [, err] = await attempt(() => app.start()); + + expect(err).to.equal(error); + + expect(beforeFn).toHaveBeenCalledOnce(); + expect(anotherBeforeFn).toHaveBeenCalledOnce(); + + expect(beforeFn).toHaveBeenCalledBefore(anotherBeforeFn); + expect(anotherBeforeFn).toHaveBeenCalledBefore(startFn); + + expect(errorFn).toHaveBeenCalledOnce(); + expect(anotherErrorFn).toHaveBeenCalledOnce(); + + expect(errorFn).toHaveBeenCalledAfter(startFn); + expect(anotherErrorFn).toHaveBeenCalledAfter(errorFn); + }); + + it('allows the cleanup of an extension point', async () => { + + const cleanup = engine.extend('start', 'before', beforeFn); + + expect(beforeFn).not.toHaveBeenCalled(); + + await app.start(); + + expect(beforeFn).toHaveBeenCalledOnce(); + expect(startFn).toHaveBeenCalledOnce(); + + cleanup(); + + await app.start(); + + expect(beforeFn).toHaveBeenCalledOnce(); + expect(startFn).toHaveBeenCalledTimes(2); + }); + + it('allows extensions to modify the original function arguments', async () => { + + const modifiedArgs = ['modified', 'args']; + + engine.extend('start', 'before', async (ctx) => { + + ctx.setArgs(modifiedArgs as any); + }); + + await app.start(); + + expect(startFn).toHaveBeenCalledOnce(); + expect(startFn).toHaveBeenCalledWith(...modifiedArgs); + }); + + it('allows extensions to return early from the hook chain', async () => { + + const earlyResult = { early: true }; + + engine.extend('start', 'before', async (ctx) => { + + ctx.setResult(earlyResult as any); + ctx.returnEarly(); + }); + + const result = await app.start(); + + expect(startFn).not.toHaveBeenCalled(); + expect(result).to.equal(earlyResult); + }); + + it('doesnt duplicate extensions added more than once', async () => { + + engine.extend('start', 'before', beforeFn); + engine.extend('start', 'before', beforeFn); + engine.extend('start', 'before', beforeFn); + + await app.start(); + + expect(beforeFn).toHaveBeenCalledOnce(); + }); + + it('can run an extension only once when specified', async () => { + + engine.extend('start', 'before', { + callback: beforeFn, + once: true + }); + + await app.start(); + await app.start(); + await app.start(); + + expect(beforeFn).toHaveBeenCalledOnce(); + expect(startFn).toHaveBeenCalledTimes(3); + }); + + it('captures and re-throws errors from the original function in error extensions', async () => { + + const originalError = new Error('Original function error'); + + startFn.mockImplementation(() => { throw originalError; }); + + engine.extend('start', 'error', errorFn); + + const [, err] = await attempt(() => app.start()); + + expect(err).to.equal(originalError); + expect(errorFn).toHaveBeenCalledOnce(); + + const ctx = errorFn.mock.calls[0]![0]; + + expect(ctx).to.have.property('error', originalError); + expect(ctx).to.have.property('point', 'error'); + }); + + it('captures and re-throws errors from before extensions', async () => { + + const beforeError = new Error('Before extension error'); + + engine.extend('start', 'before', async () => { + + throw beforeError; + }); + + engine.extend('start', 'error', errorFn); + + const [, err] = await attempt(() => app.start()); + + expect(err).to.equal(beforeError); + expect(startFn).not.toHaveBeenCalled(); + expect(errorFn).not.toHaveBeenCalled(); + }); + + it('captures and re-throws errors from after extensions', async () => { + + const afterError = new Error('After extension error'); + + engine.extend('start', 'after', async () => { + + throw afterError; + }); + + engine.extend('start', 'error', errorFn); + + const [, err] = await attempt(() => app.start()); + + expect(err).to.equal(afterError); + expect(startFn).toHaveBeenCalledOnce(); + expect(errorFn).not.toHaveBeenCalled(); + }); + + it('captures and re-throws errors from error extensions as well', async () => { + + const originalError = new Error('Original error'); + const errorExtensionError = new Error('Error extension error'); + + startFn.mockImplementation(() => { throw originalError; }); + + engine.extend('start', 'error', async () => { + + throw errorExtensionError; + }); + + const [, err] = await attempt(() => app.start()); + + expect(err).to.equal(errorExtensionError); + }); + + it('ignores errors thrown by extension if specified', async () => { + + const extensionError = new Error('Extension error'); + + engine.extend('start', 'before', { + callback: async () => { throw extensionError; }, + ignoreOnFail: true + }); + + engine.extend('start', 'after', afterFn); + + const [, err] = await attempt(() => app.start()); + + expect(err).to.be.null; + expect(startFn).toHaveBeenCalledOnce(); + expect(afterFn).toHaveBeenCalledOnce(); + }); + + it('captures results from original function', async () => { + + const originalResult = { data: 'test' }; + + startFn.mockReturnValue(originalResult); + + engine.extend('start', 'after', afterFn); + + const result = await app.start(); + + expect(result).to.equal(originalResult); + + const ctx = afterFn.mock.calls[0]![0]; + + expect(ctx).to.have.property('results', originalResult); + }); + + it('captures results from before extensions when early return is used', async () => { + + const earlyResult = { early: 'result' }; + + engine.extend('start', 'before', async (ctx) => { + + ctx.setResult(earlyResult as any); + ctx.returnEarly(); + }); + + const result = await app.start(); + + expect(result).to.equal(earlyResult); + expect(startFn).not.toHaveBeenCalled(); + }); + + it('captures results from after extensions via setResult', async () => { + + const originalResult = { original: true }; + const modifiedResult = { modified: true }; + + startFn.mockReturnValue(originalResult); + + engine.extend('start', 'after', async (ctx) => { + + expect(ctx.results).to.equal(originalResult); + ctx.setResult(modifiedResult as any); + }); + + const result = await app.start(); + + expect(result).to.equal(modifiedResult); + }); + }); + + describe('isHookError()', () => { + + it('returns true for HookError instances', () => { + + const error = new HookError('Test error'); + + expect(isHookError(error)).to.be.true; + }); + + it('returns false for regular Error instances', () => { + + const error = new Error('Regular error'); + + expect(isHookError(error)).to.be.false; + }); + + it('returns false for non-error values', () => { + + expect(isHookError(null)).to.be.false; + expect(isHookError(undefined)).to.be.false; + expect(isHookError('string')).to.be.false; + expect(isHookError(123)).to.be.false; + expect(isHookError({})).to.be.false; + }); + }); + + describe('HookError properties via fail()', () => { + + let app: TestApp; + let engine: HookEngine; + + beforeEach(() => { + + vi.resetAllMocks(); + app = new TestApp(); + engine = new HookEngine(); + engine.wrap(app, 'start'); + }); + + it('sets hookName and extPoint when fail() is called in before', async () => { + + engine.extend('start', 'before', async (ctx) => { + + ctx.fail('Test failure'); + }); + + const [, err] = await attempt(() => app.start()); + + expect(isHookError(err)).to.be.true; + expect(err).to.have.property('hookName', 'start'); + expect(err).to.have.property('extPoint', 'before'); + expect(err).to.have.property('message').that.includes('Test failure'); + }); + + it('sets hookName and extPoint when fail() is called in after', async () => { + + engine.extend('start', 'after', async (ctx) => { + + ctx.fail('After failure'); + }); + + const [, err] = await attempt(() => app.start()); + + expect(isHookError(err)).to.be.true; + expect(err).to.have.property('hookName', 'start'); + expect(err).to.have.property('extPoint', 'after'); + }); + + it('sets hookName and extPoint when fail() is called in error', async () => { + + const originalError = new Error('Original'); + + startFn.mockImplementation(() => { throw originalError; }); + + engine.extend('start', 'error', async (ctx) => { + + ctx.fail('Error handler failure'); + }); + + const [, err] = await attempt(() => app.start()); + + expect(isHookError(err)).to.be.true; + expect(err).to.have.property('hookName', 'start'); + expect(err).to.have.property('extPoint', 'error'); + }); + + it('sets originalError when fail() is called with an Error', async () => { + + const originalError = new Error('Original error'); + + engine.extend('start', 'before', async (ctx) => { + + ctx.fail(originalError); + }); + + const [, err] = await attempt(() => app.start()); + + expect(isHookError(err)).to.be.true; + expect(err).to.have.property('originalError', originalError); + }); + + it('does not set originalError when fail() is called with a string', async () => { + + engine.extend('start', 'before', async (ctx) => { + + ctx.fail('String message'); + }); + + const [, err] = await attempt(() => app.start()); + + expect(isHookError(err)).to.be.true; + expect(err).to.have.property('originalError', undefined); + }); + }); + + describe('engine.wrap()', () => { + + it('wraps an object method in-place', async () => { + + const app = new TestApp(); + const engine = new HookEngine(); + const originalStart = app.start; + + engine.wrap(app, 'start'); + + expect(app.start).to.not.equal(originalStart); + expect(app.start).to.be.a('function'); + + await app.start(); + + expect(startFn).toHaveBeenCalledOnce(); + }); + + it('binds to the instance automatically', async () => { + + const contextCapture = vi.fn(); + + class ContextApp { + + value = 'instance-value'; + + async getValue() { + + contextCapture(this.value); + return this.value; + } + } + + const app = new ContextApp(); + const engine = new HookEngine(); + + engine.wrap(app, 'getValue'); + + const result = await app.getValue(); + + expect(contextCapture).toHaveBeenCalledWith('instance-value'); + expect(result).to.equal('instance-value'); + }); + + it('allows extensions after wrapping', async () => { + + const app = new TestApp(); + const engine = new HookEngine(); + + engine.wrap(app, 'start'); + engine.extend('start', 'before', beforeFn); + engine.extend('start', 'after', afterFn); + + await app.start(); + + expect(beforeFn).toHaveBeenCalledOnce(); + expect(startFn).toHaveBeenCalledOnce(); + expect(afterFn).toHaveBeenCalledOnce(); + }); + + it('rejects invalid instance', () => { + + const engine = new HookEngine(); + + expect(() => engine.wrap(null as any, 'start')).to.throw(); + expect(() => engine.wrap(undefined as any, 'start')).to.throw(); + expect(() => engine.wrap('string' as any, 'start')).to.throw(); + }); + + it('preserves arguments and return values', async () => { + + const app = new TestApp(); + const engine = new HookEngine(); + const expectedResult = { wrapped: true }; + + startFn.mockReturnValue(expectedResult); + + engine.wrap(app, 'start'); + + const result = await app.start('arg1', 'arg2'); + + expect(startFn).toHaveBeenCalledWith('arg1', 'arg2'); + expect(result).to.equal(expectedResult); + }); + }); + + describe('engine.clear()', () => { + + it('removes all extensions', async () => { + + const app = new TestApp(); + const engine = new HookEngine(); + + engine.wrap(app, 'start'); + engine.extend('start', 'before', beforeFn); + engine.extend('start', 'after', afterFn); + + await app.start(); + + expect(beforeFn).toHaveBeenCalledOnce(); + expect(afterFn).toHaveBeenCalledOnce(); + + beforeFn.mockReset(); + afterFn.mockReset(); + + engine.clear(); + + // Re-wrap after clear + engine.wrap(app, 'start'); + + await app.start(); + + expect(beforeFn).not.toHaveBeenCalled(); + expect(afterFn).not.toHaveBeenCalled(); + expect(startFn).toHaveBeenCalled(); + }); + + it('allows re-registration of hooks after clear', async () => { + + const app1 = new TestApp(); + const app2 = new TestApp(); + const engine = new HookEngine(); + + engine.wrap(app1, 'start'); + + engine.clear(); + + // Should not throw - hook can be registered again with fresh instance + engine.wrap(app2, 'start'); + engine.extend('start', 'before', beforeFn); + + await app2.start(); + + expect(beforeFn).toHaveBeenCalledOnce(); + }); + + it('clears registrations so hooks can be re-made', () => { + + const app = new TestApp(); + const engine = new HookEngine(); + + engine.make('start', app.start, { bindTo: app }); + + // Should throw - already registered + expect(() => engine.make('start', app.start, { bindTo: app })).to.throw(); + + engine.clear(); + + // Should not throw after clear + expect(() => engine.make('start', app.start, { bindTo: app })).to.not.throw(); + }); + }); + + describe('context.removeHook()', () => { + + it('allows an extension to remove itself', async () => { + + const app = new TestApp(); + const engine = new HookEngine(); + + engine.wrap(app, 'start'); + + let callCount = 0; + engine.extend('start', 'before', async (ctx) => { + + callCount++; + + if (callCount >= 2) { + ctx.removeHook(); + } + }); + + await app.start(); + await app.start(); + await app.start(); + await app.start(); + + expect(callCount).to.equal(2); + }); + + it('removes the correct extension from multiple', async () => { + + const app = new TestApp(); + const engine = new HookEngine(); + + engine.wrap(app, 'start'); + + const firstFn = vi.fn(); + const selfRemovingFn = vi.fn(async (ctx) => { + + ctx.removeHook(); + }); + const lastFn = vi.fn(); + + engine.extend('start', 'before', firstFn); + engine.extend('start', 'before', selfRemovingFn); + engine.extend('start', 'before', lastFn); + + await app.start(); + + expect(firstFn).toHaveBeenCalledOnce(); + expect(selfRemovingFn).toHaveBeenCalledOnce(); + expect(lastFn).toHaveBeenCalledOnce(); + + firstFn.mockReset(); + selfRemovingFn.mockReset(); + lastFn.mockReset(); + + await app.start(); + + expect(firstFn).toHaveBeenCalledOnce(); + expect(selfRemovingFn).not.toHaveBeenCalled(); + expect(lastFn).toHaveBeenCalledOnce(); + }); + }); +}); From 77cee9c517e7d67ee6738368b19fcc6b734bb623 Mon Sep 17 00:00:00 2001 From: Danilo Alonso Date: Sat, 27 Dec 2025 20:32:07 -0500 Subject: [PATCH 02/13] chore(hooks): add package metadata and fix unused type parameter --- packages/hooks/package.json | 16 +++++++++------- packages/hooks/src/index.ts | 6 +++--- 2 files changed, 12 insertions(+), 10 deletions(-) diff --git a/packages/hooks/package.json b/packages/hooks/package.json index 8400a39..aa101ed 100644 --- a/packages/hooks/package.json +++ b/packages/hooks/package.json @@ -1,7 +1,7 @@ { "name": "@logosdx/hooks", "version": "0.0.1", - "description": "", + "description": "A lightweight, type-safe hook system for extending function behavior", "exports": { ".": { "types": "./dist/types/index.d.ts", @@ -9,17 +9,19 @@ "import": "./dist/esm/index.mjs" } }, + "sideEffects": false, + "browserNamespace": "LogosDx.Hooks", "scripts": { "build": "node ../../scripts/build.mjs", "lint": "pnpm tsc --noEmit --project tsconfig.json" }, - "files": [ - "dist/", - "docs/", - "readme.md", - "LICENSE" + "files": [], + "keywords": [ + "hooks", + "middleware", + "extensions", + "before after" ], - "keywords": [], "homepage": "https://logosdx.dev/", "bugs": { "url": "https://github.com/logosdx/monorepo/issues", diff --git a/packages/hooks/src/index.ts b/packages/hooks/src/index.ts index 43534eb..5adacd2 100644 --- a/packages/hooks/src/index.ts +++ b/packages/hooks/src/index.ts @@ -127,7 +127,7 @@ type HookExtOptions = { type HookExtOrOptions = HookFn | HookExtOptions -type MakeHookOptions = { +type MakeHookOptions = { bindTo?: any } @@ -259,7 +259,7 @@ export class HookEngine { make>( name: K, cb: FuncOrNever, - opts: MakeHookOptions> = {} + opts: MakeHookOptions = {} ) { assert(typeof name === 'string', '"name" must be a string'); @@ -395,7 +395,7 @@ export class HookEngine { wrap>( instance: Shape, name: K, - opts?: MakeHookOptions> + opts?: MakeHookOptions ) { assert(isObject(instance), '"instance" must be an object'); From 42e4b2f6843d8151e8159eac05904841080d9d1f Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Sun, 28 Dec 2025 01:33:06 +0000 Subject: [PATCH 03/13] Version Packages (beta) --- .changeset/pre.json | 4 +++- packages/hooks/CHANGELOG.md | 15 +++++++++++++++ packages/hooks/package.json | 2 +- 3 files changed, 19 insertions(+), 2 deletions(-) create mode 100644 packages/hooks/CHANGELOG.md diff --git a/.changeset/pre.json b/.changeset/pre.json index 219c1cb..7dd9455 100644 --- a/.changeset/pre.json +++ b/.changeset/pre.json @@ -13,5 +13,7 @@ "@logosdx/utils": "5.0.0", "@logosdx/tests": "0.0.1" }, - "changesets": [] + "changesets": [ + "rich-pears-jam" + ] } diff --git a/packages/hooks/CHANGELOG.md b/packages/hooks/CHANGELOG.md new file mode 100644 index 0000000..812a2a0 --- /dev/null +++ b/packages/hooks/CHANGELOG.md @@ -0,0 +1,15 @@ +# @logosdx/hooks + +## 1.0.0-beta.0 + +### Major Changes + +- 99a13ba: Initial beta release of @logosdx/hooks - a lightweight, type-safe hook system for extending function behavior. + + Features: + + - `HookEngine` class for wrapping functions with before/after/error extension points + - `make()` and `wrap()` methods for creating hookable functions + - Extension options: `once`, `ignoreOnFail` + - Context methods: `setArgs`, `setResult`, `returnEarly`, `fail`, `removeHook` + - `HookError` and `isHookError()` for typed error handling diff --git a/packages/hooks/package.json b/packages/hooks/package.json index aa101ed..46c8650 100644 --- a/packages/hooks/package.json +++ b/packages/hooks/package.json @@ -1,6 +1,6 @@ { "name": "@logosdx/hooks", - "version": "0.0.1", + "version": "1.0.0-beta.0", "description": "A lightweight, type-safe hook system for extending function behavior", "exports": { ".": { From 735b053ba74940ebbbe4f2bfc0a9aa4b38f3ca2c Mon Sep 17 00:00:00 2001 From: Danilo Alonso Date: Sat, 27 Dec 2025 20:26:57 -0500 Subject: [PATCH 04/13] feat(hooks): initial beta release of @logosdx/hooks A lightweight, type-safe hook system for extending function behavior with before/after/error extension points. - HookEngine class with make(), wrap(), extend(), and clear() methods - Extension options: once, ignoreOnFail - Context API: setArgs, setResult, returnEarly, fail, removeHook - HookError class and isHookError() type guard - Comprehensive test suite (45 tests) - Full JSDoc documentation --- .changeset/pre.json | 17 + .changeset/rich-pears-jam.md | 13 + .github/workflows/publish-beta.yml | 67 +++ packages/hooks/LICENSE | 12 + packages/hooks/package.json | 33 ++ packages/hooks/src/index.ts | 440 ++++++++++++++ packages/hooks/tsconfig.json | 5 + pnpm-lock.yaml | 6 + tests/src/hooks.ts | 917 +++++++++++++++++++++++++++++ 9 files changed, 1510 insertions(+) create mode 100644 .changeset/pre.json create mode 100644 .changeset/rich-pears-jam.md create mode 100644 .github/workflows/publish-beta.yml create mode 100644 packages/hooks/LICENSE create mode 100644 packages/hooks/package.json create mode 100644 packages/hooks/src/index.ts create mode 100644 packages/hooks/tsconfig.json create mode 100644 tests/src/hooks.ts diff --git a/.changeset/pre.json b/.changeset/pre.json new file mode 100644 index 0000000..219c1cb --- /dev/null +++ b/.changeset/pre.json @@ -0,0 +1,17 @@ +{ + "mode": "pre", + "tag": "beta", + "initialVersions": { + "@logosdx/dom": "2.0.15", + "@logosdx/fetch": "7.0.3", + "@logosdx/hooks": "0.0.1", + "@logosdx/kit": "4.0.3", + "@logosdx/localize": "1.0.19", + "@logosdx/observer": "2.2.0", + "@logosdx/state-machine": "1.0.19", + "@logosdx/storage": "1.0.19", + "@logosdx/utils": "5.0.0", + "@logosdx/tests": "0.0.1" + }, + "changesets": [] +} diff --git a/.changeset/rich-pears-jam.md b/.changeset/rich-pears-jam.md new file mode 100644 index 0000000..833321f --- /dev/null +++ b/.changeset/rich-pears-jam.md @@ -0,0 +1,13 @@ +--- +"@logosdx/hooks": major +--- + +Initial beta release of @logosdx/hooks - a lightweight, type-safe hook system for extending function behavior. + +Features: + +- `HookEngine` class for wrapping functions with before/after/error extension points +- `make()` and `wrap()` methods for creating hookable functions +- Extension options: `once`, `ignoreOnFail` +- Context methods: `setArgs`, `setResult`, `returnEarly`, `fail`, `removeHook` +- `HookError` and `isHookError()` for typed error handling diff --git a/.github/workflows/publish-beta.yml b/.github/workflows/publish-beta.yml new file mode 100644 index 0000000..738a68b --- /dev/null +++ b/.github/workflows/publish-beta.yml @@ -0,0 +1,67 @@ +name: Publish Beta +on: + push: + branches: + - "beta" + workflow_dispatch: + +concurrency: ${{ github.workflow }}-${{ github.ref }} + +permissions: + id-token: write # Required for OIDC + contents: write + actions: write + issues: write + pull-requests: write + + +jobs: + check-prerelease: + runs-on: ubuntu-latest + outputs: + is_prerelease: ${{ steps.check.outputs.is_prerelease }} + steps: + + - uses: actions/checkout@v6 + + - name: Check if pre-release mode is enabled + id: check + run: | + if [ -f ".changeset/pre.json" ]; then + echo "Pre-release mode is enabled" + echo "is_prerelease=true" >> $GITHUB_OUTPUT + else + echo "Pre-release mode is not enabled, skipping publish" + echo "is_prerelease=false" >> $GITHUB_OUTPUT + fi + + publish: + needs: check-prerelease + if: needs.check-prerelease.outputs.is_prerelease == 'true' + runs-on: ubuntu-latest + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + NPM_TOKEN: ${{ secrets.NPM_TOKEN }} + steps: + + - uses: actions/checkout@v6 + + - uses: pnpm/action-setup@v4 + with: + version: 10 + + - uses: actions/setup-node@v6 + with: + node-version: 22.x + cache: "pnpm" + + - run: pnpm install --frozen-lockfile + + - run: pnpm recursive run build + + - name: Publish + uses: changesets/action@v1 + with: + publish: pnpm run release + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/packages/hooks/LICENSE b/packages/hooks/LICENSE new file mode 100644 index 0000000..b701955 --- /dev/null +++ b/packages/hooks/LICENSE @@ -0,0 +1,12 @@ +Copyright 2023 LogosDX contributors + +Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS “AS IS” AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + diff --git a/packages/hooks/package.json b/packages/hooks/package.json new file mode 100644 index 0000000..8400a39 --- /dev/null +++ b/packages/hooks/package.json @@ -0,0 +1,33 @@ +{ + "name": "@logosdx/hooks", + "version": "0.0.1", + "description": "", + "exports": { + ".": { + "types": "./dist/types/index.d.ts", + "require": "./dist/cjs/index.js", + "import": "./dist/esm/index.mjs" + } + }, + "scripts": { + "build": "node ../../scripts/build.mjs", + "lint": "pnpm tsc --noEmit --project tsconfig.json" + }, + "files": [ + "dist/", + "docs/", + "readme.md", + "LICENSE" + ], + "keywords": [], + "homepage": "https://logosdx.dev/", + "bugs": { + "url": "https://github.com/logosdx/monorepo/issues", + "email": "danilo@alonso.network" + }, + "author": "Danilo Alonso ", + "license": "BSD-3-Clause", + "dependencies": { + "@logosdx/utils": "workspace:^" + } +} diff --git a/packages/hooks/src/index.ts b/packages/hooks/src/index.ts new file mode 100644 index 0000000..43534eb --- /dev/null +++ b/packages/hooks/src/index.ts @@ -0,0 +1,440 @@ +import { + assert, + AsyncFunc, + attempt, + isFunction, + isObject, + FunctionProps +} from '@logosdx/utils'; + +/** + * Error thrown when a hook extension calls `fail()` or when hook execution fails. + * + * @example + * engine.extend('save', 'before', async (ctx) => { + * if (!ctx.args[0].isValid) { + * ctx.fail('Validation failed'); + * } + * }); + * + * const [, err] = await attempt(() => app.save(data)); + * if (isHookError(err)) { + * console.log(err.hookName); // 'save' + * console.log(err.extPoint); // 'before' + * } + */ +export class HookError extends Error { + + /** Name of the hook where the error occurred */ + hookName?: string; + + /** Extension point where the error occurred: 'before', 'after', or 'error' */ + extPoint?: string; + + /** Original error if `fail()` was called with an Error instance */ + originalError?: Error; + + /** Whether the hook was explicitly aborted via `fail()` */ + aborted = false; + + constructor(message: string) { + + super(message) + } +} + +/** + * Type guard to check if an error is a HookError. + * + * @example + * const [result, err] = await attempt(() => app.save(data)); + * if (isHookError(err)) { + * console.log(`Hook "${err.hookName}" failed at "${err.extPoint}"`); + * } + */ +export const isHookError = (error: unknown): error is HookError => { + + return (error as HookError)?.constructor?.name === HookError.name +} + +interface HookShape { + args: Parameters, + results?: Awaited> +} + +/** + * Context object passed to hook extension callbacks. + * Provides access to arguments, results, and control methods. + * + * @example + * engine.extend('fetch', 'before', async (ctx) => { + * // Read current arguments + * const [url, options] = ctx.args; + * + * // Modify arguments before the original function runs + * ctx.setArgs([url, { ...options, cache: 'force-cache' }]); + * + * // Or skip the original function entirely + * if (isCached(url)) { + * ctx.setResult(getCached(url)); + * ctx.returnEarly(); + * } + * }); + */ +export interface HookContext extends HookShape { + + /** Current extension point: 'before', 'after', or 'error' */ + point: keyof Hook; + + /** Error from the original function (only set in 'error' extensions) */ + error?: unknown, + + /** Abort hook execution with an error. Throws a HookError. */ + fail: (error?: unknown) => never, + + /** Replace the arguments passed to the original function */ + setArgs: (next: Parameters) => void, + + /** Replace the result returned from the hook chain */ + setResult: (next: Awaited>) => void, + + /** Skip the original function and return early with the current result */ + returnEarly: () => void; + + /** Remove this extension from the hook (useful with `once` behavior) */ + removeHook: () => void; +} + +export type HookFn = (ctx: HookContext) => Promise; + +class Hook { + before: Set> = new Set(); + after: Set> = new Set(); + error: Set> = new Set(); +} + +const allowedExtPoints = new Set([ + 'before', + 'after', + 'error' +]); + +type HookExtOptions = { + callback: HookFn, + once?: true, + ignoreOnFail?: true +} + +type HookExtOrOptions = HookFn | HookExtOptions + +type MakeHookOptions = { + bindTo?: any +} + +type FuncOrNever = T extends AsyncFunc ? T : never; + +/** + * A lightweight, type-safe hook system for extending function behavior. + * + * HookEngine allows you to wrap functions and add extensions that run + * before, after, or on error. Extensions can modify arguments, change + * results, or abort execution entirely. + * + * @example + * interface MyApp { + * save(data: Data): Promise; + * load(id: string): Promise; + * } + * + * const app = new MyAppImpl(); + * const hooks = new HookEngine(); + * + * // Wrap a method to make it hookable + * hooks.wrap(app, 'save'); + * + * // Add a validation extension + * hooks.extend('save', 'before', async (ctx) => { + * if (!ctx.args[0].isValid) { + * ctx.fail('Validation failed'); + * } + * }); + * + * // Add logging extension + * hooks.extend('save', 'after', async (ctx) => { + * console.log('Saved:', ctx.results); + * }); + * + * @typeParam Shape - Interface defining the hookable functions + */ +export class HookEngine { + + #registered = new Set(); + #hooks: Map>> = new Map(); + #hookFnOpts = new WeakMap(); + #wrapped = new WeakMap(); + + /** + * Add an extension to a registered hook. + * + * Extensions run at specific points in the hook lifecycle: + * - `before`: Runs before the original function. Can modify args or return early. + * - `after`: Runs after successful execution. Can modify the result. + * - `error`: Runs when the original function throws. Can handle or transform errors. + * + * @param name - Name of the registered hook to extend + * @param extensionPoint - When to run: 'before', 'after', or 'error' + * @param cbOrOpts - Extension callback or options object + * @returns Cleanup function to remove the extension + * + * @example + * // Simple callback + * const cleanup = hooks.extend('save', 'before', async (ctx) => { + * console.log('About to save:', ctx.args); + * }); + * + * // With options + * hooks.extend('save', 'after', { + * callback: async (ctx) => { console.log('Saved!'); }, + * once: true, // Remove after first run + * ignoreOnFail: true // Don't throw if this extension fails + * }); + * + * // Later: remove the extension + * cleanup(); + */ + extend>( + name: K, + extensionPoint: keyof Hook>, + cbOrOpts: HookExtOrOptions> + ) { + const callback = typeof cbOrOpts === 'function' ? cbOrOpts : cbOrOpts?.callback; + const opts = typeof cbOrOpts === 'function' ? {} as HookExtOptions> : cbOrOpts; + + assert(typeof name === 'string', '"name" must be a string'); + assert(this.#registered.has(name), `'${name.toString()}' is not a registered hook`); + assert(typeof extensionPoint === 'string', '"extensionPoint" must be a string'); + assert(allowedExtPoints.has(extensionPoint), `'${extensionPoint}' is not a valid extension point`); + assert(isFunction(callback) || isObject(cbOrOpts), '"cbOrOpts" must be a extension callback or options'); + assert(isFunction(callback), 'callback must be a function'); + + const hook = this.#hooks.get(name) ?? new Hook>(); + + hook[extensionPoint].add(callback); + + this.#hooks.set(name, hook); + this.#hookFnOpts.set(callback, opts); + + /** + * Removes the registered hook extension + */ + return () => { + + hook[extensionPoint].delete(callback); + } + } + + /** + * Register a function as a hookable and return the wrapped version. + * + * The wrapped function behaves identically to the original but allows + * extensions to be added via `extend()`. Use `wrap()` for a simpler API + * when working with object methods. + * + * @param name - Unique name for this hook (must match a key in Shape) + * @param cb - The original function to wrap + * @param opts - Options for the wrapped function + * @returns Wrapped function with hook support + * + * @example + * const hooks = new HookEngine<{ fetch: typeof fetch }>(); + * + * const hookedFetch = hooks.make('fetch', fetch); + * + * hooks.extend('fetch', 'before', async (ctx) => { + * console.log('Fetching:', ctx.args[0]); + * }); + * + * await hookedFetch('/api/data'); + */ + make>( + name: K, + cb: FuncOrNever, + opts: MakeHookOptions> = {} + ) { + + assert(typeof name === 'string', '"name" must be a string'); + assert(!this.#registered.has(name), `'${name.toString()}' hook is already registered`); + assert(isFunction(cb), '"cb" must be a function'); + assert(isObject(opts), '"opts" must be an object'); + + this.#registered.add(name); + + if (this.#wrapped.has(cb)) { + + return this.#wrapped.get(cb) as FuncOrNever; + } + + const callback = async (...origArgs: Parameters>) => { + + let returnEarly = false; + + const hook = this.#hooks.get(name)!; + + const context: HookContext> = { + args: origArgs, + point: 'before', + removeHook() {}, + returnEarly() { + returnEarly = true; + }, + setArgs(next) { + + assert( + Array.isArray(next), + `setArgs: next args for '${context.point}' '${name.toString()}' must be an array of arguments` + ); + + context.args = next; + }, + setResult(next) { + context.results = next; + }, + fail(reason) { + + const error = new HookError(`Hook Aborted: ${reason ?? 'unknown'}`); + + if (reason instanceof Error) { + + error.originalError = reason; + } + + error.extPoint = context.point; + error.hookName = name as string; + + throw error; + }, + } + + const { before, after, error: errorFns } = hook ?? new Hook>(); + + const handleSet = async ( + which: typeof before, + point: keyof typeof hook + ) => { + + context.point = point; + + for (const fn of which) { + + context.removeHook = () => which.delete(fn); + + const opts: HookExtOptions> = this.#hookFnOpts.get(fn); + const [, err] = await attempt(() => fn({ ...context })); + + if (opts.once) context.removeHook(); + + if (err && opts.ignoreOnFail !== true) { + throw err; + } + + if (returnEarly) break; + } + } + + await handleSet(before, 'before'); + + if (returnEarly) return context.results! + + const [res, err] = await attempt(() => cb.apply(opts?.bindTo || cb, context.args)); + + context.results = res; + context.error = err; + + if (err) { + context.point = 'error'; + + await handleSet(errorFns, 'error'); + + throw err; + } + + await handleSet(after, 'after'); + + return context.results!; + } + + return callback as FuncOrNever; + } + + /** + * Wrap an object method in-place to make it hookable. + * + * This is a convenience method that combines `make()` with automatic + * binding and reassignment. The method is replaced on the instance + * with the wrapped version. + * + * @param instance - Object containing the method to wrap + * @param name - Name of the method to wrap + * @param opts - Additional options + * + * @example + * class UserService { + * async save(user: User) { ... } + * } + * + * const service = new UserService(); + * const hooks = new HookEngine(); + * + * hooks.wrap(service, 'save'); + * + * // Now service.save() is hookable + * hooks.extend('save', 'before', async (ctx) => { + * console.log('Saving user:', ctx.args[0]); + * }); + */ + wrap>( + instance: Shape, + name: K, + opts?: MakeHookOptions> + ) { + + assert(isObject(instance), '"instance" must be an object'); + + const wrapped = this.make( + name, + instance[name] as FuncOrNever, + { + bindTo: instance, + ...opts + } + ); + + this.#wrapped.set(wrapped, instance[name] as AsyncFunc); + + instance[name] = wrapped as Shape[K]; + + } + + /** + * Clear all registered hooks and extensions. + * + * After calling this method, all hooks are unregistered and all + * extensions are removed. Previously wrapped functions will continue + * to work but without any extensions. + * + * @example + * hooks.wrap(app, 'save'); + * hooks.extend('save', 'before', validator); + * + * // Reset for testing + * hooks.clear(); + * + * // app.save() still works, but validator no longer runs + */ + clear() { + + this.#registered.clear(); + this.#hooks.clear(); + this.#hookFnOpts = new WeakMap(); + } +} \ No newline at end of file diff --git a/packages/hooks/tsconfig.json b/packages/hooks/tsconfig.json new file mode 100644 index 0000000..ecdc3a2 --- /dev/null +++ b/packages/hooks/tsconfig.json @@ -0,0 +1,5 @@ +{ + "extends": "../../tsconfig.json", + "compilerOptions": { "noEmit": false, "declarationDir": "./dist/types" }, + "include": ["src"] +} \ No newline at end of file diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 66bcdf0..cc817bf 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -67,6 +67,12 @@ importers: specifier: workspace:^ version: link:../utils + packages/hooks: + dependencies: + '@logosdx/utils': + specifier: workspace:^ + version: link:../utils + packages/kit: dependencies: '@logosdx/fetch': diff --git a/tests/src/hooks.ts b/tests/src/hooks.ts new file mode 100644 index 0000000..34bd7a6 --- /dev/null +++ b/tests/src/hooks.ts @@ -0,0 +1,917 @@ +import { + describe, + it, + beforeEach, + expect, + vi +} from 'vitest' + +import { HookEngine, HookError, isHookError } from '../../packages/hooks/src/index.ts'; +import { attempt, noop } from '../../packages/utils/src/index.ts'; + +describe('@logosdx/hooks', () => { + + const startFn = vi.fn(); + const stopFn = vi.fn(); + const requestFn = vi.fn(); + const beforeFn = vi.fn(); + const afterFn = vi.fn(); + const errorFn = vi.fn(); + + class TestApp { + + notAFunc = 'hello'; + start(...args: any[]) { return startFn(...args) } + stop(...args: any[]) { return stopFn(...args) } + request(...args: any[]) { return requestFn(...args) } + }; + + + beforeEach(() => { + + vi.resetAllMocks(); + }); + + it('instantiates', () => { + + new HookEngine(); + }); + + it('runs the happy path', async () => { + + const app = new TestApp(); + const engine = new HookEngine; + + const wrapped = engine.make('start', app.start, { bindTo: app }); + + app.start = wrapped; + + engine.extend('start', 'before', beforeFn); + engine.extend('start', 'after', afterFn); + + // @ts-expect-error - testing invalid attribute type (only functions should be picke up) + expect(() => engine.extend('notAFunc', 'before', noop)).to.throw(); + + await app.start(); + + expect(startFn).toHaveBeenCalledOnce(); + expect(beforeFn).toHaveBeenCalledOnce(); + expect(afterFn).toHaveBeenCalledOnce(); + }); + + it('rejects invalid usage of extend', () => { + + // [name, extensionPoint, cbOrOpts] + const badArgs = [ + [null], + [1], + ['nonexistentHook'], + ['stop'], + ['start'], + ['start', 1], + ['start', 'invalidExtensionPoint'], + ['start', 'before', null], + ['start', 'before', {}], + ['start', 'before', { callback: null }], + ] as unknown as Array['extend']>>; + + const app = new TestApp(); + const engine = new HookEngine; + + engine.make('start', app.start, { bindTo: app }); + + for (const args of badArgs) { + expect(() => engine.extend(...args)).to.throw(); + } + }); + + it('rejects invalid usage of make', () => { + + // [name, cb, opts] + const badArgs = [ + [null], + [1], + ['stop', null], + ['stop', 'notAFunction'], + ['stop', noop, 'notAnObject'], + ['start', noop], // already registered + ] as unknown as Array['make']>>; + + const app = new TestApp(); + const engine = new HookEngine; + + engine.make('start', app.start, { bindTo: app }); + + for (const args of badArgs) { + expect(() => engine.make(...args)).to.throw(); + } + }); + + describe('engine.make()', () => { + + it('registers a hook and returns a wrapped function', async () => { + + const engine = new HookEngine(); + const app = new TestApp(); + + const wrapped = engine.make('start', app.start, { bindTo: app }); + + expect(wrapped).to.be.a('function'); + expect(wrapped).to.not.equal(app.start); + + // Should be able to extend after registration + engine.extend('start', 'before', beforeFn); + + await wrapped(); + + expect(beforeFn).toHaveBeenCalledOnce(); + }); + + it('executes the original function', async () => { + + const engine = new HookEngine(); + const app = new TestApp(); + + const wrapped = engine.make('start', app.start, { bindTo: app }); + + await wrapped(); + + expect(startFn).toHaveBeenCalledOnce(); + }); + + it('returns the original function return value', async () => { + + const engine = new HookEngine(); + const app = new TestApp(); + const expectedResult = { success: true }; + + startFn.mockReturnValue(expectedResult); + + const wrapped = engine.make('start', app.start, { bindTo: app }); + + const result = await wrapped(); + + expect(result).to.equal(expectedResult); + }); + + it('keeps the original function arguments', async () => { + + const engine = new HookEngine(); + const app = new TestApp(); + + const wrapped = engine.make('start', app.start, { bindTo: app }); + + await wrapped('arg1', 'arg2', 123); + + expect(startFn).toHaveBeenCalledWith('arg1', 'arg2', 123); + }); + + it('binds the original function to the provided context', async () => { + + const contextCapture = vi.fn(); + + class ContextClass { + + value = 42; + + async doWork() { + + contextCapture(this.value); + } + } + + const instance = new ContextClass(); + const customEngine = new HookEngine(); + + const wrapped = customEngine.make('doWork', instance.doWork, { bindTo: instance }); + + await wrapped(); + + expect(contextCapture).toHaveBeenCalledWith(42); + }); + + }); + + describe('hook extensions', () => { + + let app = new TestApp(); + let engine = new HookEngine; + + + beforeEach(() => { + + vi.resetAllMocks(); + app = new TestApp(); + engine = new HookEngine; + + engine.wrap(app, 'start'); + }); + + it('allows the addition of a before extension', async () => { + + engine.extend('start', 'before', beforeFn); + + await app.start(); + + expect(beforeFn).toHaveBeenCalledOnce(); + expect(startFn).toHaveBeenCalledOnce(); + expect(beforeFn).toHaveBeenCalledBefore(startFn); + + const ctx = beforeFn.mock.calls[0]![0]; + + expect(ctx).to.have.property('point', 'before'); + expect(ctx).to.have.property('args').that.is.an('array'); + + expect(startFn).toHaveBeenCalledWith(...ctx.args); + + }); + + it('allows the addition of an after extension', async () => { + + engine.extend('start', 'after', afterFn); + + await app.start(); + + expect(afterFn).toHaveBeenCalledOnce(); + expect(startFn).toHaveBeenCalledOnce(); + + expect(afterFn).toHaveBeenCalledAfter(startFn); + + const ctx = afterFn.mock.calls[0]![0]; + + expect(ctx).to.have.property('point', 'after'); + expect(ctx).to.have.property('args').that.is.an('array'); + + expect(startFn).toHaveBeenCalledWith(...ctx.args); + }); + + it('allows the addition of an error extension', async () => { + + engine.extend('start', 'error', errorFn); + + const error = new Error('Test error'); + + startFn.mockImplementation(() => { throw error; }); + + const [, err] = await attempt(() => app.start()); + + expect(err).to.equal(error); + + expect(errorFn).toHaveBeenCalledOnce(); + expect(startFn).toHaveBeenCalledOnce(); + + expect(errorFn).toHaveBeenCalledAfter(startFn); + + const ctx = errorFn.mock.calls[0]![0]; + + expect(ctx).to.have.property('point', 'error'); + expect(ctx).to.have.property('args').that.is.an('array'); + expect(ctx).to.have.property('error', error); + + expect(startFn).toHaveBeenCalledWith(...ctx.args); + }); + + it('preserves execution order of extensions', async () => { + + engine.extend('start', 'before', beforeFn); + engine.extend('start', 'after', afterFn); + engine.extend('start', 'error', errorFn); + + await app.start(); + + expect(beforeFn).toHaveBeenCalledBefore(startFn); + expect(afterFn).toHaveBeenCalledAfter(startFn); + expect(errorFn).not.toHaveBeenCalled(); + + const error = new Error('Test error'); + + beforeFn.mockReset(); + afterFn.mockReset(); + errorFn.mockReset(); + startFn.mockReset(); + + startFn.mockImplementation(() => { throw error; }); + + const [, err] = await attempt(() => app.start()); + + expect(err).to.equal(error); + + expect(beforeFn).toHaveBeenCalledBefore(startFn); + expect(errorFn).toHaveBeenCalledAfter(startFn); + expect(afterFn).not.toHaveBeenCalled(); + }); + + it('allows the addition of more than one extension per extension point', async () => { + + const anotherBeforeFn = vi.fn(); + const anotherAfterFn = vi.fn(); + const anotherErrorFn = vi.fn(); + + engine.extend('start', 'before', beforeFn); + engine.extend('start', 'before', anotherBeforeFn); + + engine.extend('start', 'after', afterFn); + engine.extend('start', 'after', anotherAfterFn); + + engine.extend('start', 'error', errorFn); + engine.extend('start', 'error', anotherErrorFn); + + expect(beforeFn).not.toHaveBeenCalled(); + expect(anotherBeforeFn).not.toHaveBeenCalled(); + expect(afterFn).not.toHaveBeenCalled(); + expect(anotherAfterFn).not.toHaveBeenCalled(); + expect(errorFn).not.toHaveBeenCalled(); + expect(anotherErrorFn).not.toHaveBeenCalled(); + + await app.start(); + + expect(beforeFn).toHaveBeenCalledOnce(); + expect(anotherBeforeFn).toHaveBeenCalledOnce(); + + expect(beforeFn).toHaveBeenCalledBefore(anotherBeforeFn); + expect(anotherBeforeFn).toHaveBeenCalledBefore(startFn); + + expect(afterFn).toHaveBeenCalledOnce(); + expect(anotherAfterFn).toHaveBeenCalledOnce(); + + expect(afterFn).toHaveBeenCalledAfter(startFn); + expect(anotherAfterFn).toHaveBeenCalledAfter(afterFn); + + const error = new Error('Test error'); + + beforeFn.mockReset(); + anotherBeforeFn.mockReset(); + afterFn.mockReset(); + anotherAfterFn.mockReset(); + errorFn.mockReset(); + anotherErrorFn.mockReset(); + startFn.mockReset(); + + startFn.mockImplementation(() => { throw error; }); + + const [, err] = await attempt(() => app.start()); + + expect(err).to.equal(error); + + expect(beforeFn).toHaveBeenCalledOnce(); + expect(anotherBeforeFn).toHaveBeenCalledOnce(); + + expect(beforeFn).toHaveBeenCalledBefore(anotherBeforeFn); + expect(anotherBeforeFn).toHaveBeenCalledBefore(startFn); + + expect(errorFn).toHaveBeenCalledOnce(); + expect(anotherErrorFn).toHaveBeenCalledOnce(); + + expect(errorFn).toHaveBeenCalledAfter(startFn); + expect(anotherErrorFn).toHaveBeenCalledAfter(errorFn); + }); + + it('allows the cleanup of an extension point', async () => { + + const cleanup = engine.extend('start', 'before', beforeFn); + + expect(beforeFn).not.toHaveBeenCalled(); + + await app.start(); + + expect(beforeFn).toHaveBeenCalledOnce(); + expect(startFn).toHaveBeenCalledOnce(); + + cleanup(); + + await app.start(); + + expect(beforeFn).toHaveBeenCalledOnce(); + expect(startFn).toHaveBeenCalledTimes(2); + }); + + it('allows extensions to modify the original function arguments', async () => { + + const modifiedArgs = ['modified', 'args']; + + engine.extend('start', 'before', async (ctx) => { + + ctx.setArgs(modifiedArgs as any); + }); + + await app.start(); + + expect(startFn).toHaveBeenCalledOnce(); + expect(startFn).toHaveBeenCalledWith(...modifiedArgs); + }); + + it('allows extensions to return early from the hook chain', async () => { + + const earlyResult = { early: true }; + + engine.extend('start', 'before', async (ctx) => { + + ctx.setResult(earlyResult as any); + ctx.returnEarly(); + }); + + const result = await app.start(); + + expect(startFn).not.toHaveBeenCalled(); + expect(result).to.equal(earlyResult); + }); + + it('doesnt duplicate extensions added more than once', async () => { + + engine.extend('start', 'before', beforeFn); + engine.extend('start', 'before', beforeFn); + engine.extend('start', 'before', beforeFn); + + await app.start(); + + expect(beforeFn).toHaveBeenCalledOnce(); + }); + + it('can run an extension only once when specified', async () => { + + engine.extend('start', 'before', { + callback: beforeFn, + once: true + }); + + await app.start(); + await app.start(); + await app.start(); + + expect(beforeFn).toHaveBeenCalledOnce(); + expect(startFn).toHaveBeenCalledTimes(3); + }); + + it('captures and re-throws errors from the original function in error extensions', async () => { + + const originalError = new Error('Original function error'); + + startFn.mockImplementation(() => { throw originalError; }); + + engine.extend('start', 'error', errorFn); + + const [, err] = await attempt(() => app.start()); + + expect(err).to.equal(originalError); + expect(errorFn).toHaveBeenCalledOnce(); + + const ctx = errorFn.mock.calls[0]![0]; + + expect(ctx).to.have.property('error', originalError); + expect(ctx).to.have.property('point', 'error'); + }); + + it('captures and re-throws errors from before extensions', async () => { + + const beforeError = new Error('Before extension error'); + + engine.extend('start', 'before', async () => { + + throw beforeError; + }); + + engine.extend('start', 'error', errorFn); + + const [, err] = await attempt(() => app.start()); + + expect(err).to.equal(beforeError); + expect(startFn).not.toHaveBeenCalled(); + expect(errorFn).not.toHaveBeenCalled(); + }); + + it('captures and re-throws errors from after extensions', async () => { + + const afterError = new Error('After extension error'); + + engine.extend('start', 'after', async () => { + + throw afterError; + }); + + engine.extend('start', 'error', errorFn); + + const [, err] = await attempt(() => app.start()); + + expect(err).to.equal(afterError); + expect(startFn).toHaveBeenCalledOnce(); + expect(errorFn).not.toHaveBeenCalled(); + }); + + it('captures and re-throws errors from error extensions as well', async () => { + + const originalError = new Error('Original error'); + const errorExtensionError = new Error('Error extension error'); + + startFn.mockImplementation(() => { throw originalError; }); + + engine.extend('start', 'error', async () => { + + throw errorExtensionError; + }); + + const [, err] = await attempt(() => app.start()); + + expect(err).to.equal(errorExtensionError); + }); + + it('ignores errors thrown by extension if specified', async () => { + + const extensionError = new Error('Extension error'); + + engine.extend('start', 'before', { + callback: async () => { throw extensionError; }, + ignoreOnFail: true + }); + + engine.extend('start', 'after', afterFn); + + const [, err] = await attempt(() => app.start()); + + expect(err).to.be.null; + expect(startFn).toHaveBeenCalledOnce(); + expect(afterFn).toHaveBeenCalledOnce(); + }); + + it('captures results from original function', async () => { + + const originalResult = { data: 'test' }; + + startFn.mockReturnValue(originalResult); + + engine.extend('start', 'after', afterFn); + + const result = await app.start(); + + expect(result).to.equal(originalResult); + + const ctx = afterFn.mock.calls[0]![0]; + + expect(ctx).to.have.property('results', originalResult); + }); + + it('captures results from before extensions when early return is used', async () => { + + const earlyResult = { early: 'result' }; + + engine.extend('start', 'before', async (ctx) => { + + ctx.setResult(earlyResult as any); + ctx.returnEarly(); + }); + + const result = await app.start(); + + expect(result).to.equal(earlyResult); + expect(startFn).not.toHaveBeenCalled(); + }); + + it('captures results from after extensions via setResult', async () => { + + const originalResult = { original: true }; + const modifiedResult = { modified: true }; + + startFn.mockReturnValue(originalResult); + + engine.extend('start', 'after', async (ctx) => { + + expect(ctx.results).to.equal(originalResult); + ctx.setResult(modifiedResult as any); + }); + + const result = await app.start(); + + expect(result).to.equal(modifiedResult); + }); + }); + + describe('isHookError()', () => { + + it('returns true for HookError instances', () => { + + const error = new HookError('Test error'); + + expect(isHookError(error)).to.be.true; + }); + + it('returns false for regular Error instances', () => { + + const error = new Error('Regular error'); + + expect(isHookError(error)).to.be.false; + }); + + it('returns false for non-error values', () => { + + expect(isHookError(null)).to.be.false; + expect(isHookError(undefined)).to.be.false; + expect(isHookError('string')).to.be.false; + expect(isHookError(123)).to.be.false; + expect(isHookError({})).to.be.false; + }); + }); + + describe('HookError properties via fail()', () => { + + let app: TestApp; + let engine: HookEngine; + + beforeEach(() => { + + vi.resetAllMocks(); + app = new TestApp(); + engine = new HookEngine(); + engine.wrap(app, 'start'); + }); + + it('sets hookName and extPoint when fail() is called in before', async () => { + + engine.extend('start', 'before', async (ctx) => { + + ctx.fail('Test failure'); + }); + + const [, err] = await attempt(() => app.start()); + + expect(isHookError(err)).to.be.true; + expect(err).to.have.property('hookName', 'start'); + expect(err).to.have.property('extPoint', 'before'); + expect(err).to.have.property('message').that.includes('Test failure'); + }); + + it('sets hookName and extPoint when fail() is called in after', async () => { + + engine.extend('start', 'after', async (ctx) => { + + ctx.fail('After failure'); + }); + + const [, err] = await attempt(() => app.start()); + + expect(isHookError(err)).to.be.true; + expect(err).to.have.property('hookName', 'start'); + expect(err).to.have.property('extPoint', 'after'); + }); + + it('sets hookName and extPoint when fail() is called in error', async () => { + + const originalError = new Error('Original'); + + startFn.mockImplementation(() => { throw originalError; }); + + engine.extend('start', 'error', async (ctx) => { + + ctx.fail('Error handler failure'); + }); + + const [, err] = await attempt(() => app.start()); + + expect(isHookError(err)).to.be.true; + expect(err).to.have.property('hookName', 'start'); + expect(err).to.have.property('extPoint', 'error'); + }); + + it('sets originalError when fail() is called with an Error', async () => { + + const originalError = new Error('Original error'); + + engine.extend('start', 'before', async (ctx) => { + + ctx.fail(originalError); + }); + + const [, err] = await attempt(() => app.start()); + + expect(isHookError(err)).to.be.true; + expect(err).to.have.property('originalError', originalError); + }); + + it('does not set originalError when fail() is called with a string', async () => { + + engine.extend('start', 'before', async (ctx) => { + + ctx.fail('String message'); + }); + + const [, err] = await attempt(() => app.start()); + + expect(isHookError(err)).to.be.true; + expect(err).to.have.property('originalError', undefined); + }); + }); + + describe('engine.wrap()', () => { + + it('wraps an object method in-place', async () => { + + const app = new TestApp(); + const engine = new HookEngine(); + const originalStart = app.start; + + engine.wrap(app, 'start'); + + expect(app.start).to.not.equal(originalStart); + expect(app.start).to.be.a('function'); + + await app.start(); + + expect(startFn).toHaveBeenCalledOnce(); + }); + + it('binds to the instance automatically', async () => { + + const contextCapture = vi.fn(); + + class ContextApp { + + value = 'instance-value'; + + async getValue() { + + contextCapture(this.value); + return this.value; + } + } + + const app = new ContextApp(); + const engine = new HookEngine(); + + engine.wrap(app, 'getValue'); + + const result = await app.getValue(); + + expect(contextCapture).toHaveBeenCalledWith('instance-value'); + expect(result).to.equal('instance-value'); + }); + + it('allows extensions after wrapping', async () => { + + const app = new TestApp(); + const engine = new HookEngine(); + + engine.wrap(app, 'start'); + engine.extend('start', 'before', beforeFn); + engine.extend('start', 'after', afterFn); + + await app.start(); + + expect(beforeFn).toHaveBeenCalledOnce(); + expect(startFn).toHaveBeenCalledOnce(); + expect(afterFn).toHaveBeenCalledOnce(); + }); + + it('rejects invalid instance', () => { + + const engine = new HookEngine(); + + expect(() => engine.wrap(null as any, 'start')).to.throw(); + expect(() => engine.wrap(undefined as any, 'start')).to.throw(); + expect(() => engine.wrap('string' as any, 'start')).to.throw(); + }); + + it('preserves arguments and return values', async () => { + + const app = new TestApp(); + const engine = new HookEngine(); + const expectedResult = { wrapped: true }; + + startFn.mockReturnValue(expectedResult); + + engine.wrap(app, 'start'); + + const result = await app.start('arg1', 'arg2'); + + expect(startFn).toHaveBeenCalledWith('arg1', 'arg2'); + expect(result).to.equal(expectedResult); + }); + }); + + describe('engine.clear()', () => { + + it('removes all extensions', async () => { + + const app = new TestApp(); + const engine = new HookEngine(); + + engine.wrap(app, 'start'); + engine.extend('start', 'before', beforeFn); + engine.extend('start', 'after', afterFn); + + await app.start(); + + expect(beforeFn).toHaveBeenCalledOnce(); + expect(afterFn).toHaveBeenCalledOnce(); + + beforeFn.mockReset(); + afterFn.mockReset(); + + engine.clear(); + + // Re-wrap after clear + engine.wrap(app, 'start'); + + await app.start(); + + expect(beforeFn).not.toHaveBeenCalled(); + expect(afterFn).not.toHaveBeenCalled(); + expect(startFn).toHaveBeenCalled(); + }); + + it('allows re-registration of hooks after clear', async () => { + + const app1 = new TestApp(); + const app2 = new TestApp(); + const engine = new HookEngine(); + + engine.wrap(app1, 'start'); + + engine.clear(); + + // Should not throw - hook can be registered again with fresh instance + engine.wrap(app2, 'start'); + engine.extend('start', 'before', beforeFn); + + await app2.start(); + + expect(beforeFn).toHaveBeenCalledOnce(); + }); + + it('clears registrations so hooks can be re-made', () => { + + const app = new TestApp(); + const engine = new HookEngine(); + + engine.make('start', app.start, { bindTo: app }); + + // Should throw - already registered + expect(() => engine.make('start', app.start, { bindTo: app })).to.throw(); + + engine.clear(); + + // Should not throw after clear + expect(() => engine.make('start', app.start, { bindTo: app })).to.not.throw(); + }); + }); + + describe('context.removeHook()', () => { + + it('allows an extension to remove itself', async () => { + + const app = new TestApp(); + const engine = new HookEngine(); + + engine.wrap(app, 'start'); + + let callCount = 0; + engine.extend('start', 'before', async (ctx) => { + + callCount++; + + if (callCount >= 2) { + ctx.removeHook(); + } + }); + + await app.start(); + await app.start(); + await app.start(); + await app.start(); + + expect(callCount).to.equal(2); + }); + + it('removes the correct extension from multiple', async () => { + + const app = new TestApp(); + const engine = new HookEngine(); + + engine.wrap(app, 'start'); + + const firstFn = vi.fn(); + const selfRemovingFn = vi.fn(async (ctx) => { + + ctx.removeHook(); + }); + const lastFn = vi.fn(); + + engine.extend('start', 'before', firstFn); + engine.extend('start', 'before', selfRemovingFn); + engine.extend('start', 'before', lastFn); + + await app.start(); + + expect(firstFn).toHaveBeenCalledOnce(); + expect(selfRemovingFn).toHaveBeenCalledOnce(); + expect(lastFn).toHaveBeenCalledOnce(); + + firstFn.mockReset(); + selfRemovingFn.mockReset(); + lastFn.mockReset(); + + await app.start(); + + expect(firstFn).toHaveBeenCalledOnce(); + expect(selfRemovingFn).not.toHaveBeenCalled(); + expect(lastFn).toHaveBeenCalledOnce(); + }); + }); +}); From 75821bd12e318b1574c388ebfaf89935e97f7c4d Mon Sep 17 00:00:00 2001 From: Danilo Alonso Date: Sat, 27 Dec 2025 20:32:07 -0500 Subject: [PATCH 05/13] chore(hooks): add package metadata and fix unused type parameter --- packages/hooks/package.json | 16 +++++++++------- packages/hooks/src/index.ts | 6 +++--- 2 files changed, 12 insertions(+), 10 deletions(-) diff --git a/packages/hooks/package.json b/packages/hooks/package.json index 8400a39..aa101ed 100644 --- a/packages/hooks/package.json +++ b/packages/hooks/package.json @@ -1,7 +1,7 @@ { "name": "@logosdx/hooks", "version": "0.0.1", - "description": "", + "description": "A lightweight, type-safe hook system for extending function behavior", "exports": { ".": { "types": "./dist/types/index.d.ts", @@ -9,17 +9,19 @@ "import": "./dist/esm/index.mjs" } }, + "sideEffects": false, + "browserNamespace": "LogosDx.Hooks", "scripts": { "build": "node ../../scripts/build.mjs", "lint": "pnpm tsc --noEmit --project tsconfig.json" }, - "files": [ - "dist/", - "docs/", - "readme.md", - "LICENSE" + "files": [], + "keywords": [ + "hooks", + "middleware", + "extensions", + "before after" ], - "keywords": [], "homepage": "https://logosdx.dev/", "bugs": { "url": "https://github.com/logosdx/monorepo/issues", diff --git a/packages/hooks/src/index.ts b/packages/hooks/src/index.ts index 43534eb..5adacd2 100644 --- a/packages/hooks/src/index.ts +++ b/packages/hooks/src/index.ts @@ -127,7 +127,7 @@ type HookExtOptions = { type HookExtOrOptions = HookFn | HookExtOptions -type MakeHookOptions = { +type MakeHookOptions = { bindTo?: any } @@ -259,7 +259,7 @@ export class HookEngine { make>( name: K, cb: FuncOrNever, - opts: MakeHookOptions> = {} + opts: MakeHookOptions = {} ) { assert(typeof name === 'string', '"name" must be a string'); @@ -395,7 +395,7 @@ export class HookEngine { wrap>( instance: Shape, name: K, - opts?: MakeHookOptions> + opts?: MakeHookOptions ) { assert(isObject(instance), '"instance" must be an object'); From 362ff622c888c8c0e9a87880a2114546ed5e51ed Mon Sep 17 00:00:00 2001 From: Danilo Alonso Date: Sun, 11 Jan 2026 03:33:24 -0500 Subject: [PATCH 06/13] feat(fetch)!: add PropertyStore, resilience policies, and serializers BREAKING CHANGE: `.headers` and `.params` getters now return lowercase method keys (e.g., `post` instead of `POST`). - Add PropertyStore for unified header/param management with method overrides - Add predicate function support to invalidatePath() for custom key matching - Add endpointSerializer and requestSerializer for customizable cache keys - Export ResiliencePolicy, DedupePolicy, CachePolicy, RateLimitPolicy - Refactor engine.ts to use composition via PropertyStore and policies - Normalize HTTP methods to lowercase internally for consistent storage - Add comprehensive tests for PropertyStore (36) and serializers (22) --- .changeset/pre.json | 5 +- .changeset/thin-crews-vanish.md | 35 + docs/.vitepress/config.mts | 1 + docs/packages/fetch.md | 303 +++++ docs/packages/hooks.md | 667 +++++++++++ llm-helpers/fetch.md | 123 +- packages/fetch/src/engine.ts | 1251 ++++---------------- packages/fetch/src/index.ts | 30 + packages/fetch/src/policies/base.ts | 232 ++++ packages/fetch/src/policies/cache.ts | 423 +++++++ packages/fetch/src/policies/dedupe.ts | 206 ++++ packages/fetch/src/policies/index.ts | 24 + packages/fetch/src/policies/rate-limit.ts | 414 +++++++ packages/fetch/src/policies/types.ts | 95 ++ packages/fetch/src/property-store.ts | 356 ++++++ packages/fetch/src/serializers/endpoint.ts | 26 + packages/fetch/src/serializers/index.ts | 2 + packages/fetch/src/serializers/request.ts | 116 ++ tests/src/fetch/caching.ts | 66 ++ tests/src/fetch/property-store.ts | 463 ++++++++ tests/src/fetch/serializers.ts | 429 +++++++ 21 files changed, 4221 insertions(+), 1046 deletions(-) create mode 100644 .changeset/thin-crews-vanish.md create mode 100644 docs/packages/hooks.md create mode 100644 packages/fetch/src/policies/base.ts create mode 100644 packages/fetch/src/policies/cache.ts create mode 100644 packages/fetch/src/policies/dedupe.ts create mode 100644 packages/fetch/src/policies/index.ts create mode 100644 packages/fetch/src/policies/rate-limit.ts create mode 100644 packages/fetch/src/policies/types.ts create mode 100644 packages/fetch/src/property-store.ts create mode 100644 packages/fetch/src/serializers/endpoint.ts create mode 100644 packages/fetch/src/serializers/index.ts create mode 100644 packages/fetch/src/serializers/request.ts create mode 100644 tests/src/fetch/property-store.ts create mode 100644 tests/src/fetch/serializers.ts diff --git a/.changeset/pre.json b/.changeset/pre.json index 219c1cb..93879fc 100644 --- a/.changeset/pre.json +++ b/.changeset/pre.json @@ -13,5 +13,8 @@ "@logosdx/utils": "5.0.0", "@logosdx/tests": "0.0.1" }, - "changesets": [] + "changesets": [ + "rich-pears-jam", + "thin-crews-vanish" + ] } diff --git a/.changeset/thin-crews-vanish.md b/.changeset/thin-crews-vanish.md new file mode 100644 index 0000000..3fd7da3 --- /dev/null +++ b/.changeset/thin-crews-vanish.md @@ -0,0 +1,35 @@ +--- +"@logosdx/fetch": major +--- + +## Breaking Changes + +### `.headers` and `.params` getters now return lowercase method keys + +Method keys in the headers/params getters are now normalized to lowercase. + +**Before:** +```ts +const { POST: postHeaders } = api.headers; +const { GET: getParams } = api.params; +``` + +**After:** +```ts +const { post: postHeaders } = api.headers; +const { get: getParams } = api.params; +``` + +**Migration:** Update any code accessing method-specific headers/params via the getters to use lowercase method names. + +## Added + +* `feat(fetch):` Add `PropertyStore` for unified header/param management with method-specific overrides +* `feat(fetch):` Add predicate function support to `invalidatePath()` for custom cache key matching +* `feat(fetch):` Add `endpointSerializer` and `requestSerializer` for customizable cache/dedupe keys +* `feat(fetch):` Export `ResiliencePolicy`, `DedupePolicy`, `CachePolicy`, `RateLimitPolicy` classes + +## Changed + +* `refactor(fetch):` Internal refactor to use `PropertyStore` for headers/params (API unchanged) +* `refactor(fetch):` Normalize HTTP methods to lowercase internally for consistent storage diff --git a/docs/.vitepress/config.mts b/docs/.vitepress/config.mts index ecb8dc4..dddbd31 100644 --- a/docs/.vitepress/config.mts +++ b/docs/.vitepress/config.mts @@ -2,6 +2,7 @@ import { DefaultTheme, defineConfig } from 'vitepress' const packages: DefaultTheme.SidebarItem[] = [ ['Observer', 'observer'], + ['Hooks', 'hooks'], ['Utils', 'utils'], ['Fetch', 'fetch'], ['Dom', 'dom'], diff --git a/docs/packages/fetch.md b/docs/packages/fetch.md index b6a193e..9c4dbb4 100644 --- a/docs/packages/fetch.md +++ b/docs/packages/fetch.md @@ -1323,6 +1323,12 @@ await api.invalidatePath('/users'); // Invalidate by path pattern (RegExp) await api.invalidatePath(/^\/api\/v\d+\/users/); +// Invalidate with custom predicate (for custom serializers) +await api.invalidatePath((key) => { + // Full control over key matching - useful when using custom serializers + return key.includes('/users') && key.includes('Bearer'); +}); + // Get cache statistics const stats = api.cacheStats(); console.log('Cache size:', stats.cacheSize); @@ -1727,6 +1733,175 @@ Route matching runs on **every request**. Poorly written regular expressions can **Best practice:** Prefer string-based matchers (`startsWith`, `endsWith`, `includes`, `is`) over regex. They're faster and immune to ReDoS. Only use `match` when you need pattern complexity that strings can't express. ::: +## Request Serializers + +Serializers generate unique keys for identifying requests. These keys are used by deduplication, caching, and rate limiting to determine which requests should share state. + +### Built-in Serializers + +FetchEngine provides two built-in serializers, each optimized for different use cases: + +#### Request Serializer (Default for Cache & Dedupe) + +Generates keys based on full request identity: method, path, query string, payload, and stable headers. + +```typescript +// Key format: method|path+query|payload|headers +// Example: "GET|/users/123?page=1|undefined|{"accept":"application/json","authorization":"Bearer token"}" +``` + +**Stable Headers Only:** The request serializer only includes semantically meaningful headers that affect response content: + +| Included Headers | Purpose | +|-----------------|---------| +| `authorization` | Different users get different responses | +| `accept` | Different response formats (JSON, XML, etc.) | +| `accept-language` | Localized responses | +| `content-type` | Format of request payload (for POST/PUT) | +| `accept-encoding` | Response compression format | + +**Excluded Headers (Dynamic):** +- `X-Timestamp`, `Date` - Change every request +- `X-HMAC-Signature` - Computed per-request +- `X-Request-Id`, `X-Correlation-Id` - Unique per-request +- `Cache-Control`, `Pragma` - Control directives, not identity + +This prevents cache pollution from dynamic headers that would make every request unique. + +#### Endpoint Serializer (Default for Rate Limit) + +Generates keys based on endpoint identity only: method and pathname (excludes query string and payload). + +```typescript +// Key format: method|pathname +// Example: "GET|/users/123" +``` + +This groups all requests to the same endpoint together, ideal for rate limiting where you want to protect an endpoint from overload regardless of specific parameters. + +### Using Built-in Serializers + +```typescript +import { endpointSerializer, requestSerializer } from '@logosdx/fetch'; + +// Use endpoint serializer for cache (group by endpoint) +const api = new FetchEngine({ + baseUrl: 'https://api.example.com', + cachePolicy: { + serializer: endpointSerializer, // All /users/123?page=1 and /users/123?page=2 share cache + ttl: 60000 + } +}); + +// Use request serializer for rate limiting (per unique request) +const api2 = new FetchEngine({ + baseUrl: 'https://api.example.com', + rateLimitPolicy: { + serializer: requestSerializer, // Each unique request gets its own bucket + maxCalls: 100, + windowMs: 60000 + } +}); +``` + +### Custom Serializers + +Create custom serializers when the built-ins don't match your needs: + +```typescript +// User-scoped rate limiting +const userSerializer = (ctx: RequestKeyOptions) => { + return `user:${ctx.state?.userId ?? 'anonymous'}|${ctx.method}|${ctx.url.pathname}`; +}; + +const api = new FetchEngine({ + baseUrl: 'https://api.example.com', + rateLimitPolicy: { + serializer: userSerializer, // Each user gets their own rate limit bucket + maxCalls: 100, + windowMs: 60000 + } +}); + +// Tenant-scoped caching +const tenantSerializer = (ctx: RequestKeyOptions) => { + const tenant = ctx.headers?.['X-Tenant-ID'] ?? 'default'; + return `${tenant}|${ctx.method}|${ctx.url.pathname}${ctx.url.search}`; +}; + +const multiTenantApi = new FetchEngine({ + baseUrl: 'https://api.example.com', + cachePolicy: { + serializer: tenantSerializer, // Each tenant has separate cache + ttl: 60000 + } +}); + +// Ignore certain params for caching +const ignoreTimestampSerializer = (ctx: RequestKeyOptions) => { + const url = new URL(ctx.url); + url.searchParams.delete('_t'); // Remove timestamp param + url.searchParams.delete('nocache'); + return `${ctx.method}|${url.pathname}${url.search}`; +}; +``` + +### Serializer Signature + +```typescript +type RequestSerializer = ( + ctx: RequestKeyOptions +) => string; + +interface RequestKeyOptions { + method: string; // HTTP method (uppercase) + path: string; // Original path from request + url: URL; // Full URL object (includes pathname, search, etc.) + payload?: unknown; // Request body (if any) + headers?: H; // Request headers + params?: P; // URL parameters + state?: S; // Instance state +} +``` + +### Per-Rule Serializers + +Override serializers for specific routes: + +```typescript +const api = new FetchEngine({ + baseUrl: 'https://api.example.com', + cachePolicy: { + enabled: true, + ttl: 60000, + rules: [ + // GraphQL: cache by operation name only + { + is: '/graphql', + serializer: (ctx) => `graphql:${ctx.payload?.operationName ?? 'unknown'}` + }, + + // Search: ignore pagination for cache + { + startsWith: '/search', + serializer: (ctx) => { + const url = new URL(ctx.url); + url.searchParams.delete('page'); + url.searchParams.delete('limit'); + return `search:${url.search}`; + } + }, + + // User profile: cache per user + { + match: /^\/users\/\d+$/, + serializer: (ctx) => `user:${ctx.url.pathname}` + } + ] + } +}); +``` + ## Event System FetchEngine extends EventTarget with comprehensive lifecycle events, providing observability in all JavaScript environments. @@ -2431,3 +2606,131 @@ if (isDev) { }); } ``` + +## Policy Architecture + +FetchEngine's resilience policies (deduplication, caching, rate limiting) share a common architecture that enables consistent behavior and efficient configuration resolution. + +### Three-Method Pattern + +All policies implement the same three-method pattern: + +``` +┌─────────────────────────────────────────────────────────────┐ +│ ResiliencePolicy │ +├─────────────────────────────────────────────────────────────┤ +│ init(config) Parse config → Initialize state (O(1)) │ +│ resolve(...) Memoized lookup + dynamic checks (O(1)*) │ +│ compute(...) Rule matching (O(n) first time only) │ +└─────────────────────────────────────────────────────────────┘ + * O(1) amortized due to memoization +``` + +1. **`init`**: Called during FetchEngine construction. Parses configuration, validates rules, and sets up internal state. + +2. **`resolve`**: Called for every request. Returns the effective policy configuration by combining memoized rule matching with dynamic skip callbacks. + +3. **`compute`**: Called once per unique method+path combination. Performs O(n) rule matching and caches the result. + +### Configuration Resolution + +When a request is made, each policy resolves its configuration in order: + +``` +Request → Policy.resolve(method, path, context) + │ + ├── Check memoized cache (O(1)) + │ └── Cache miss? → compute() → cache result + │ + ├── Check dynamic skip callback + │ └── Skip? → return null + │ + └── Return merged rule (policy defaults + matched rule) +``` + +### Rule Matching Priority + +Rules are evaluated in declaration order. The first matching rule wins: + +```typescript +rules: [ + { is: '/users', ttl: 30000 }, // Checked first (exact match) + { startsWith: '/users', ttl: 60000 }, // Checked second + { match: /^\/users/, ttl: 120000 } // Checked third +] +// Request to '/users' matches first rule (30s TTL) +// Request to '/users/123' matches second rule (60s TTL) +``` + +### Policy Execution Order + +Policies are evaluated in a specific order during request processing: + +``` +Request + │ + ├── 1. Rate Limit (guard) ─────────┐ + │ └── Wait or reject │ + │ │ + ├── 2. Cache Check ────────────────┤ + │ └── Hit? Return cached │ + │ │ + ├── 3. Dedupe Check ───────────────┤ + │ └── In-flight? Join it │ + │ │ + ├── 4. Network Request ────────────┤ + │ │ + ├── 5. Store Cache (on success) ───┤ + │ │ + └── Response ──────────────────────┘ +``` + +**Key implications:** +- Rate limiting runs **before** cache checks - cached responses don't consume rate limit tokens +- Deduplication runs **after** cache checks - cache hits return immediately without dedupe +- Only the request initiator consumes a rate limit token; joiners share the result + +### Memoization Strategy + +Rule matching results are cached by `method:path` key: + +```typescript +// First request to GET /users/123 +resolve('GET', '/users/123', ctx) + → compute() runs, caches result + → rulesCache.set('GET:/users/123', resolvedRule) + +// Subsequent requests to same endpoint +resolve('GET', '/users/123', ctx) + → rulesCache.get('GET:/users/123') // O(1) hit + → Check skip callback + → Return cached rule +``` + +This means: +- First request to each endpoint: O(n) rule matching +- Subsequent requests: O(1) cache lookup +- Skip callbacks always run (they depend on request-specific context) + +### Policy State + +Each policy maintains its own internal state: + +```typescript +interface PolicyInternalState { + enabled: boolean; // Global enable/disable + methods: Set; // Applicable HTTP methods + serializer: RequestSerializer; // Key generation function + rulesCache: Map; // Memoized rule lookups +} +``` + +### Extending Policies + +While the built-in policies cover most use cases, the architecture is designed for extensibility. Each policy class extends `ResiliencePolicy` and implements: + +- `getDefaultSerializer()` - Returns the default key generation function +- `getDefaultMethods()` - Returns which HTTP methods are enabled by default +- `mergeRuleWithDefaults(rule)` - Merges matched rules with policy defaults + +This shared base ensures consistent configuration handling across all resilience features. diff --git a/docs/packages/hooks.md b/docs/packages/hooks.md new file mode 100644 index 0000000..92656c0 --- /dev/null +++ b/docs/packages/hooks.md @@ -0,0 +1,667 @@ +--- +title: Hooks +description: A lightweight, type-safe hook system for extending function behavior. +--- + +# Hooks + +Functions do one thing well — until you need to add logging, validation, caching, or metrics. `@logosdx/hooks` lets you extend function behavior without modifying the original code. Wrap any function, add before/after/error extensions, modify arguments, change results, or abort execution entirely. Every extension is type-safe, every hook is trackable, and cleanup is automatic. It's aspect-oriented programming that actually makes sense. + +[[toc]] + +## Installation + + +::: code-group + +```bash [npm] +npm install @logosdx/hooks +``` + +```bash [yarn] +yarn add @logosdx/hooks +``` + +```bash [pnpm] +pnpm add @logosdx/hooks +``` + +::: + + +**CDN:** + +```html + + +``` + +## Quick Start + +```typescript +import { HookEngine } from '@logosdx/hooks' + +// Define your hookable interface +interface UserService { + save(user: User): Promise + delete(id: string): Promise +} + +// Create engine and wrap methods +const hooks = new HookEngine() +const service = new UserServiceImpl() + +hooks.wrap(service, 'save') + +// Add validation before save +hooks.extend('save', 'before', async (ctx) => { + const [user] = ctx.args + if (!user.email) { + ctx.fail('Email is required') + } +}) + +// Add logging after save +hooks.extend('save', 'after', async (ctx) => { + console.log('User saved:', ctx.results) +}) + +// Add error handling +hooks.extend('save', 'error', async (ctx) => { + console.error('Save failed:', ctx.error) + // Could retry, transform error, or notify +}) + +// Use normally - hooks run automatically +await service.save({ name: 'John', email: 'john@example.com' }) +``` + +## Core Concepts + +Hooks is built around three ideas: + +1. **Wrapping** - Transform any function into a hookable function +2. **Extension Points** - Add behavior at `before`, `after`, or `error` stages +3. **Context Control** - Modify arguments, results, or abort execution + +Extensions are registered with `extend()` and run in insertion order. Each extension receives a context object with full control over the hook lifecycle. + +## HookEngine + +The main class for creating and managing hooks. + +### Constructor + +```typescript +new HookEngine() +``` + +**Type Parameters:** + +- `Shape` - Interface defining your hookable functions + +**Example:** + +```typescript +interface PaymentService { + charge(amount: number, cardId: string): Promise + refund(receiptId: string): Promise +} + +const hooks = new HookEngine() +``` + +### Creating Hooks + +#### `wrap()` + +Wrap an object method in-place to make it hookable. + +```typescript +wrap>( + instance: Shape, + name: K, + opts?: MakeHookOptions +): void +``` + +**Parameters:** + +- `instance` - Object containing the method to wrap +- `name` - Name of the method to wrap +- `opts` - Optional configuration + +**Example:** + +```typescript +class OrderService { + async process(order: Order) { + // processing logic + } +} + +const service = new OrderService() +const hooks = new HookEngine() + +hooks.wrap(service, 'process') + +// Now service.process() is hookable +hooks.extend('process', 'before', async (ctx) => { + console.log('Processing order:', ctx.args[0]) +}) +``` + +#### `make()` + +Create a hookable function without modifying the original. + +```typescript +make>( + name: K, + cb: Function, + opts?: MakeHookOptions +): Function +``` + +**Parameters:** + +- `name` - Unique name for this hook +- `cb` - The original function to wrap +- `opts` - Optional configuration (e.g., `bindTo` for `this` context) + +**Returns:** Wrapped function with hook support + +**Example:** + +```typescript +const hooks = new HookEngine<{ fetch: typeof fetch }>() + +const hookedFetch = hooks.make('fetch', fetch) + +hooks.extend('fetch', 'before', async (ctx) => { + console.log('Fetching:', ctx.args[0]) +}) + +await hookedFetch('/api/users') +``` + +### Adding Extensions + +#### `extend()` + +Add an extension to a registered hook. + +```typescript +extend>( + name: K, + extensionPoint: 'before' | 'after' | 'error', + cbOrOpts: HookFn | HookExtOptions +): Cleanup +``` + +**Parameters:** + +- `name` - Name of the registered hook +- `extensionPoint` - When to run: `before`, `after`, or `error` +- `cbOrOpts` - Extension callback or options object + +**Returns:** Cleanup function to remove the extension + +**Extension Points:** + +| Point | When it runs | Can modify | +|-------|--------------|------------| +| `before` | Before original function | Arguments, can return early | +| `after` | After successful execution | Results | +| `error` | When original throws | Can handle/transform errors | + +**Examples:** + +```typescript +// Simple callback +const cleanup = hooks.extend('save', 'before', async (ctx) => { + console.log('About to save:', ctx.args) +}) + +// With options +hooks.extend('save', 'after', { + callback: async (ctx) => { console.log('Saved!') }, + once: true, // Remove after first run + ignoreOnFail: true // Don't throw if this extension fails +}) + +// Remove extension later +cleanup() +``` + +### Utility Methods + +#### `clear()` + +Remove all registered hooks and extensions. + +```typescript +clear(): void +``` + +**Example:** + +```typescript +hooks.wrap(service, 'save') +hooks.extend('save', 'before', validator) + +// Reset for testing +hooks.clear() + +// service.save() still works, but validator no longer runs +``` + +## HookContext + +Context object passed to every extension callback. + +### Properties + +```typescript +interface HookContext { + args: Parameters // Current arguments + results?: ReturnType // Results (in after/error) + point: 'before' | 'after' | 'error' // Current extension point + error?: unknown // Error (only in error extensions) +} +``` + +### Methods + +#### `setArgs()` + +Replace the arguments passed to the original function. + +```typescript +setArgs(next: Parameters): void +``` + +**Example:** + +```typescript +hooks.extend('save', 'before', async (ctx) => { + const [user] = ctx.args + + // Add timestamp to user + ctx.setArgs([{ ...user, updatedAt: new Date() }]) +}) +``` + +#### `setResult()` + +Replace the result returned from the hook chain. + +```typescript +setResult(next: ReturnType): void +``` + +**Example:** + +```typescript +hooks.extend('fetch', 'after', async (ctx) => { + // Transform response + ctx.setResult({ + ...ctx.results, + cached: true, + fetchedAt: new Date() + }) +}) +``` + +#### `returnEarly()` + +Skip the original function and return with current results. + +```typescript +returnEarly(): void +``` + +**Example:** + +```typescript +hooks.extend('fetch', 'before', async (ctx) => { + const [url] = ctx.args + const cached = cache.get(url) + + if (cached) { + ctx.setResult(cached) + ctx.returnEarly() // Skip actual fetch + } +}) +``` + +#### `fail()` + +Abort execution and throw a HookError. + +```typescript +fail(error?: unknown): never +``` + +**Example:** + +```typescript +hooks.extend('save', 'before', async (ctx) => { + const [user] = ctx.args + + if (!user.email) { + ctx.fail('Email is required') + } + + if (!isValidEmail(user.email)) { + ctx.fail(new ValidationError('Invalid email format')) + } +}) +``` + +#### `removeHook()` + +Remove the current extension from future executions. + +```typescript +removeHook(): void +``` + +**Example:** + +```typescript +let attempts = 0 + +hooks.extend('connect', 'error', async (ctx) => { + attempts++ + + if (attempts >= 3) { + console.log('Max retries reached, removing retry handler') + ctx.removeHook() + } +}) +``` + +## Extension Options + +When using the options object form of `extend()`: + +```typescript +interface HookExtOptions { + callback: HookFn // The extension function + once?: true // Remove after first execution + ignoreOnFail?: true // Don't throw if extension fails +} +``` + +### `once` + +Extension runs only once, then removes itself. + +```typescript +hooks.extend('init', 'before', { + callback: async (ctx) => { + console.log('First-time initialization') + }, + once: true +}) +``` + +### `ignoreOnFail` + +If the extension throws, continue execution instead of failing. + +```typescript +hooks.extend('save', 'after', { + callback: async (ctx) => { + await analytics.track('user_saved', ctx.results) // Non-critical + }, + ignoreOnFail: true // Don't fail the save if analytics fails +}) +``` + +## Error Handling + +### HookError + +Error thrown when `fail()` is called or hook execution fails. + +```typescript +class HookError extends Error { + hookName?: string // Name of the hook + extPoint?: string // Extension point: 'before', 'after', 'error' + originalError?: Error // Original error if fail() was called with one + aborted: boolean // Whether explicitly aborted via fail() +} +``` + +### isHookError() + +Type guard to check if an error is a HookError. + +```typescript +isHookError(error: unknown): error is HookError +``` + +**Example:** + +```typescript +import { attempt } from '@logosdx/utils' +import { isHookError } from '@logosdx/hooks' + +const [result, err] = await attempt(() => service.save(user)) + +if (isHookError(err)) { + console.log(`Hook "${err.hookName}" failed at "${err.extPoint}"`) + console.log('Reason:', err.message) + + if (err.originalError) { + console.log('Caused by:', err.originalError) + } +} +``` + +## Patterns & Examples + +### Validation + +```typescript +hooks.extend('createUser', 'before', async (ctx) => { + const [userData] = ctx.args + + const errors: string[] = [] + + if (!userData.email) errors.push('Email required') + if (!userData.password) errors.push('Password required') + if (userData.password?.length < 8) errors.push('Password too short') + + if (errors.length > 0) { + ctx.fail(new ValidationError(errors.join(', '))) + } +}) +``` + +### Caching + +```typescript +const cache = new Map() + +hooks.extend('fetchUser', 'before', async (ctx) => { + const [userId] = ctx.args + const cached = cache.get(userId) + + if (cached && !isExpired(cached)) { + ctx.setResult(cached.data) + ctx.returnEarly() + } +}) + +hooks.extend('fetchUser', 'after', async (ctx) => { + const [userId] = ctx.args + cache.set(userId, { + data: ctx.results, + expiresAt: Date.now() + 60000 + }) +}) +``` + +### Logging & Metrics + +```typescript +hooks.extend('processOrder', 'before', async (ctx) => { + const [order] = ctx.args + console.log(`Processing order ${order.id}`) + ctx.args[0] = { ...order, startedAt: Date.now() } + ctx.setArgs(ctx.args) +}) + +hooks.extend('processOrder', 'after', async (ctx) => { + const duration = Date.now() - ctx.args[0].startedAt + metrics.record('order.processing.duration', duration) +}) + +hooks.extend('processOrder', 'error', async (ctx) => { + metrics.increment('order.processing.failures') + console.error('Order processing failed:', ctx.error) +}) +``` + +### Authentication + +```typescript +hooks.extend('secureEndpoint', 'before', async (ctx) => { + const token = getAuthToken() + + if (!token) { + ctx.fail(new AuthError('Not authenticated')) + } + + const user = await validateToken(token) + + if (!user) { + ctx.fail(new AuthError('Invalid token')) + } + + // Inject user into args + ctx.setArgs([...ctx.args, { user }]) +}) +``` + +### Retry Logic + +```typescript +hooks.extend('unreliableService', 'error', async (ctx) => { + const maxRetries = 3 + let retries = ctx.args[ctx.args.length - 1]?.retries ?? 0 + + if (retries < maxRetries) { + console.log(`Retry attempt ${retries + 1}/${maxRetries}`) + + // Modify args to track retries + ctx.setArgs([...ctx.args.slice(0, -1), { retries: retries + 1 }]) + + // Note: This doesn't actually retry - you'd need external retry logic + // This pattern is better suited for logging/metrics in error handlers + } +}) +``` + +## Type Definitions + +### Core Types + +```typescript +// Hook function signature +type HookFn = (ctx: HookContext) => Promise + +// Extension options +interface HookExtOptions { + callback: HookFn + once?: true + ignoreOnFail?: true +} + +// Make options +interface MakeHookOptions { + bindTo?: any // `this` context for the wrapped function +} +``` + +### HookContext + +```typescript +interface HookContext { + args: Parameters + results?: Awaited> + point: 'before' | 'after' | 'error' + error?: unknown + + fail: (error?: unknown) => never + setArgs: (next: Parameters) => void + setResult: (next: Awaited>) => void + returnEarly: () => void + removeHook: () => void +} +``` + +## Best Practices + +### Keep Extensions Focused + +```typescript +// Good: Single responsibility +hooks.extend('save', 'before', validateUser) +hooks.extend('save', 'before', sanitizeInput) +hooks.extend('save', 'after', logSuccess) + +// Avoid: Multiple responsibilities in one extension +hooks.extend('save', 'before', async (ctx) => { + // validation AND sanitization AND logging... +}) +``` + +### Use `ignoreOnFail` for Non-Critical Extensions + +```typescript +// Critical: validation must succeed +hooks.extend('save', 'before', validateUser) + +// Non-critical: analytics can fail silently +hooks.extend('save', 'after', { + callback: trackAnalytics, + ignoreOnFail: true +}) +``` + +### Clean Up When Done + +```typescript +// Store cleanup functions +const cleanups = [ + hooks.extend('save', 'before', validator), + hooks.extend('save', 'after', logger) +] + +// Clean up all at once +cleanups.forEach(cleanup => cleanup()) +``` + +### Type Your Hook Shapes + +```typescript +// Define clear interfaces for hookable services +interface OrderService { + create(order: OrderInput): Promise + update(id: string, updates: Partial): Promise + cancel(id: string, reason: string): Promise +} + +const hooks = new HookEngine() +// Now all hook names and argument types are enforced +``` + +## Summary + +The `@logosdx/hooks` library provides a clean way to extend function behavior without modifying original code. Use it for cross-cutting concerns like validation, caching, logging, and error handling while keeping your core logic clean and focused. diff --git a/llm-helpers/fetch.md b/llm-helpers/fetch.md index a00b7d6..0387ed4 100644 --- a/llm-helpers/fetch.md +++ b/llm-helpers/fetch.md @@ -429,8 +429,9 @@ await api.clearCache(); // Clear all await api.deleteCache(key); // Delete specific key await api.invalidateCache((key) => key.includes('user')); // By predicate await api.invalidatePath('/users'); // By path prefix -await api.invalidatePath(/^\/api\/v\d+/); // By regex -const stats = api.cacheStats(); // { cacheSize, inflightCount } +await api.invalidatePath(/^\/api\/v\d+/); // By regex +await api.invalidatePath((key) => key.includes('user')); // By predicate (custom serializers) +const stats = api.cacheStats(); // { cacheSize, inflightCount } ``` ### Caching Types @@ -1006,4 +1007,122 @@ const timeoutId = setTimeout(() => { const [response, err] = await attempt(() => request); clearTimeout(timeoutId); +``` + +## Request Serializers + +Serializers generate unique keys for identifying requests. Used by dedupe, cache, and rate limit policies. + +### Built-in Serializers + +```typescript +import { endpointSerializer, requestSerializer } from '@logosdx/fetch'; + +// requestSerializer (Default for Cache & Dedupe) +// Format: method|path+query|payload|stableHeaders +// Only includes stable headers: authorization, accept, accept-language, content-type, accept-encoding +// Excludes dynamic headers: X-Timestamp, X-HMAC-Signature, X-Request-Id, etc. + +// endpointSerializer (Default for Rate Limit) +// Format: method|pathname +// Groups all requests to same endpoint regardless of params +``` + +### Custom Serializers + +```typescript +// User-scoped rate limiting +const api = new FetchEngine({ + baseUrl: 'https://api.example.com', + rateLimitPolicy: { + serializer: (ctx) => `user:${ctx.state?.userId ?? 'anonymous'}`, + maxCalls: 100 + } +}); + +// Tenant-scoped caching +const api = new FetchEngine({ + baseUrl: 'https://api.example.com', + cachePolicy: { + serializer: (ctx) => `${ctx.headers?.['X-Tenant-ID'] ?? 'default'}|${ctx.method}|${ctx.url.pathname}`, + ttl: 60000 + } +}); + +// Per-rule serializer override +{ + cachePolicy: { + rules: [ + { is: '/graphql', serializer: (ctx) => `graphql:${ctx.payload?.operationName}` } + ] + } +} +``` + +### Serializer Signature + +```typescript +type RequestSerializer = (ctx: RequestKeyOptions) => string; + +interface RequestKeyOptions { + method: string; // HTTP method (uppercase) + path: string; // Original path + url: URL; // Full URL object + payload?: unknown; // Request body + headers?: H; // Request headers + params?: P; // URL parameters + state?: S; // Instance state +} +``` + +## Policy Architecture + +FetchEngine policies share a common architecture for consistent behavior and performance. + +### Three-Method Pattern + +``` +ResiliencePolicy +├── init(config) → Parse config, validate rules, setup state (O(1)) +├── resolve(...) → Memoized lookup + dynamic skip checks (O(1) amortized) +└── compute(...) → Rule matching, cached per method:path (O(n) first call only) +``` + +### Policy Execution Order + +``` +Request +├── 1. Rate Limit Guard → Wait or reject if exceeded +├── 2. Cache Check → Return cached if hit +├── 3. Dedupe Check → Join in-flight if exists +├── 4. Network Request → Actual HTTP call +└── 5. Cache Store → Cache successful response +``` + +**Key implications:** +- Cached responses don't consume rate limit tokens +- Dedupe joins happen after cache checks +- Only initiator consumes rate limit token; joiners share result + +### Rule Matching + +Rules evaluated in declaration order, first match wins: + +```typescript +rules: [ + { is: '/users', ttl: 30000 }, // Exact match first + { startsWith: '/users', ttl: 60000 }, // Prefix second + { match: /^\/users/, ttl: 120000 } // Regex third +] +``` + +### Policy State + +```typescript +interface PolicyInternalState { + enabled: boolean; // Global enable/disable + methods: Set; // Applicable HTTP methods + serializer: RequestSerializer; // Key generation function + rulesCache: Map; // Memoized rule lookups +} ``` \ No newline at end of file diff --git a/packages/fetch/src/engine.ts b/packages/fetch/src/engine.ts index 771a883..e38feb0 100644 --- a/packages/fetch/src/engine.ts +++ b/packages/fetch/src/engine.ts @@ -6,8 +6,7 @@ import { wait, SingleFlight, Deferred, - RateLimitTokenBucket, - RateLimitError, + type Func, } from '@logosdx/utils'; import { ObserverEngine } from '@logosdx/observer'; @@ -20,16 +19,7 @@ import { type RetryConfig, type FetchResponse, type FetchConfig, - type DeduplicationConfig, - type CacheConfig, - type RateLimitConfig, - type RequestKeyOptions, - type DeduplicationInternalState, - type DedupeRule, - type CacheInternalState, type CacheRule, - type RateLimitInternalState, - type RateLimitRule, } from './types.ts'; import { @@ -37,13 +27,13 @@ import { fetchTypes, validateOptions, DEFAULT_RETRY_CONFIG, - DEFAULT_INFLIGHT_METHODS, - validateMatchRules, - findMatchingRule, - defaultRequestSerializer, - defaultRateLimitSerializer, } from './helpers.ts'; +import { DedupePolicy } from './policies/dedupe.ts'; +import { CachePolicy } from './policies/cache.ts'; +import { RateLimitPolicy } from './policies/rate-limit.ts'; +import { PropertyStore, type MethodOverrides } from './property-store.ts'; + /** * Internal normalized request options - flat structure used throughout FetchEngine. * @@ -207,10 +197,8 @@ export class FetchEngine< #baseUrl: URL; #options: Partial; - #headers: FetchEngine.Headers; - #methodHeaders: MethodHeaders; - #params: FetchEngine.Params

; - #methodParams: HttpMethodOpts

; + #headerStore: PropertyStore>; + #paramStore: PropertyStore>; #type: FetchEngine.Type; #modifyOptions?: FetchEngine.Options['modifyOptions']; @@ -244,17 +232,31 @@ export class FetchEngine< #retry: Required; - // Deduplication - #flight = new SingleFlight(); + /** + * SingleFlight instance for deduplication and caching. + * @internal Used by policies + */ + _flight = new SingleFlight(); - #dedupe: DeduplicationInternalState | null = null; + // Policies - initialized with engine reference (dedupe and cache need it for _flight access) + #dedupePolicy = new DedupePolicy(this); + #cachePolicy = new CachePolicy(this); + #rateLimitPolicy = new RateLimitPolicy(); - // Caching - #cache: CacheInternalState | null = null; + /** + * Get the internal policies used by the FetchEngine instance. + * For internal use and debugging only. + */ + protected $policies() { - // Rate Limiting - #rateLimit: RateLimitInternalState | null = null; + return { + dedupe: this.#dedupePolicy, + cache: this.#cachePolicy, + rateLimit: this.#rateLimitPolicy, + flight: this._flight + } + } get #destroyed() { @@ -466,14 +468,32 @@ export class FetchEngine< } = opts; this.#options = rest; - this.#headers = opts.headers || {} as FetchEngine.Headers; - this.#methodHeaders = Object.fromEntries( + + // Initialize header store with defaults and method overrides + const normalizedMethodHeaders = Object.fromEntries( Object.keys(opts.methodHeaders || {}).map( - (method) => ([method.toUpperCase(), opts.methodHeaders![method as never]]) + (method) => ([method.toLowerCase(), opts.methodHeaders![method as never]]) ) - ); - this.#params = opts.params || {} as FetchEngine.Params

; - this.#methodParams = opts.methodParams || {} as HttpMethodOpts

; + ) as MethodHeaders; + + this.#headerStore = new PropertyStore>({ + defaults: opts.headers || {} as FetchEngine.Headers, + methodOverrides: normalizedMethodHeaders, + ...(validate?.headers && { validate: validate.headers }) + }); + + // Initialize param store with defaults and method overrides + const normalizedMethodParams = Object.fromEntries( + Object.keys(opts.methodParams || {}).map( + (method) => ([method.toLowerCase(), opts.methodParams![method as never]]) + ) + ) as HttpMethodOpts

; + + this.#paramStore = new PropertyStore>({ + defaults: opts.params || {} as FetchEngine.Params

, + methodOverrides: normalizedMethodParams as MethodOverrides>, + ...(validate?.params && { validate: validate.params }) + }); this.#modifyOptions = modifyOptions; this.#modifyMethodOptions = modifyMethodOptions!; @@ -482,570 +502,20 @@ export class FetchEngine< this.removeHeader = this.rmHeader.bind(this) as FetchEngine['rmHeader']; this.removeParam = this.rmParams.bind(this) as FetchEngine['rmParams']; - this.#validateHeaders(this.#headers); - - // Initialize deduplication - this.#initDeduplication(opts.dedupePolicy); - - // Initialize caching - this.#initCache(opts.cachePolicy); - - // Initialize rate limiting - this.#initRateLimit(opts.rateLimitPolicy); - } - - - /** - * Initialize deduplication configuration. - * - * @param config - Deduplication config from options - */ - #initDeduplication(config?: boolean | DeduplicationConfig): void { - - if (!config) return; - - if (config === true) { - - this.#dedupe = { - enabled: true, - methods: new Set(DEFAULT_INFLIGHT_METHODS), - config: {}, - serializer: defaultRequestSerializer, - rulesCache: new Map() - } - - return; - } - - // Full config object - this.#dedupe = { - enabled: config.enabled !== false, - methods: new Set(config.methods ?? DEFAULT_INFLIGHT_METHODS), - config, - serializer: config.serializer ?? defaultRequestSerializer, - rulesCache: new Map() - } - - // Validate rules if provided - if (config.rules) { - - validateMatchRules(config.rules); - } - } - - - /** - * Initialize cache configuration. - * - * @param config - Cache config from options - */ - #initCache(config?: boolean | CacheConfig): void { - - if (!config) return; - - if (config === true) { - - // Boolean true = enable with defaults - this.#cache = { - enabled: true, - methods: new Set(DEFAULT_INFLIGHT_METHODS), - config: {}, - ttl: 60000, - staleIn: undefined, - serializer: defaultRequestSerializer, - rulesCache: new Map(), - activeKeys: new Set(), - revalidatingKeys: new Set() - } - - return; - } - - // Full config object - - this.#cache = { - enabled: config.enabled !== false, - methods: new Set(config.methods ?? DEFAULT_INFLIGHT_METHODS), - config, - ttl: config.ttl ?? 60000, - staleIn: config.staleIn, - serializer: config.serializer ?? defaultRequestSerializer, - rulesCache: new Map(), - activeKeys: new Set(), - revalidatingKeys: new Set() - } - - // Validate rules if provided - if (config.rules) { - - validateMatchRules(config.rules); - } + // Initialize policies + this.#dedupePolicy.init(opts.dedupePolicy); + this.#rateLimitPolicy.init(opts.rateLimitPolicy); + this.#cachePolicy.init(opts.cachePolicy); // Initialize SingleFlight with adapter if provided - this.#flight = new SingleFlight({ - adapter: config.adapter, - defaultTtl: this.#cache.ttl, - defaultStaleIn: this.#cache.staleIn - }); - } - - - /** - * Default methods for rate limiting (all methods by default). - */ - static #DEFAULT_RATELIMIT_METHODS: _InternalHttpMethods[] = ['GET', 'POST', 'PUT', 'DELETE', 'PATCH', 'OPTIONS']; - - - /** - * Initialize rate limit configuration. - * - * @param config - Rate limit config from options - */ - #initRateLimit(config?: boolean | RateLimitConfig): void { - - if (!config) return; - - if (config === true) { - - // Boolean true = enable with defaults - this.#rateLimit = { - enabled: true, - methods: new Set(FetchEngine.#DEFAULT_RATELIMIT_METHODS), - config: {}, - maxCalls: 100, - windowMs: 60000, - waitForToken: true, - serializer: defaultRateLimitSerializer, - rulesCache: new Map(), - rateLimiters: new Map() - }; - return; - } - - // Full config object - this.#rateLimit = { - enabled: config.enabled !== false, - methods: new Set(config.methods ?? FetchEngine.#DEFAULT_RATELIMIT_METHODS), - config, - maxCalls: config.maxCalls ?? 100, - windowMs: config.windowMs ?? 60000, - waitForToken: config.waitForToken ?? true, - serializer: config.serializer ?? defaultRateLimitSerializer, - rulesCache: new Map(), - rateLimiters: new Map() - } - - // Validate rules if provided - if (config.rules) { - - validateMatchRules(config.rules); - } - } - - - /** - * Resolve rate limit configuration for a specific request. - * - * Uses memoization for rule matching (O(n) only once per method+path). - * The shouldRateLimit callback is always evaluated since it depends on request context. - * - * @returns Object with config or null if disabled - */ - #resolveRateLimitConfig( - method: string, - path: string, - keyContext: RequestKeyOptions - ): RateLimitRule | null { - - // If globally disabled AND no rules defined, skip everything - // But if rules are defined, allow them to enable rate limiting for specific routes - if ( - !this.#rateLimit || - ( - - !this.#rateLimit?.enabled && - !this.#rateLimit?.config?.rules?.length - ) - ) { - - return null; - } - - const upperMethod = method.toUpperCase(); - const cacheKey = `${upperMethod}:${path}`; - - // Check cache for rule resolution - let cached = this.#rateLimit.rulesCache.get(cacheKey); - - if (cached === undefined) { - - // Not in cache - compute and store - cached = this.#computeRateLimitRuleConfig(upperMethod, path); - this.#rateLimit.rulesCache.set(cacheKey, cached); - } - - // If rule resolution returned null, rate limiting is disabled for this route - if (cached === null) return null; - - // Apply dynamic shouldRateLimit check (not cached) - if ( - this.#rateLimit.config?.shouldRateLimit && - this.#rateLimit.config.shouldRateLimit(keyContext) === false - ) { - - return null; - } - - return cached; - } - - - /** - * Compute rate limit rule configuration for a method+path combination. - * This is the expensive O(n) operation that gets memoized. - */ - #computeRateLimitRuleConfig( - method: string, - path: string - ): RateLimitRule | null { - - if (!this.#rateLimit) return null; - - // When globally disabled, start with enabled=false - // Rules with explicit enabled:true can still enable rate limiting - let enabled = this.#rateLimit.enabled && this.#rateLimit.methods.has(method); - let serializer = this.#rateLimit.serializer; - let maxCalls = this.#rateLimit.maxCalls; - let windowMs = this.#rateLimit.windowMs; - let waitForToken = this.#rateLimit.waitForToken; - - // Check for matching rule - if (this.#rateLimit.config?.rules?.length) { - - const rule = findMatchingRule( - this.#rateLimit.config.rules, - method, - path, - [...this.#rateLimit.methods] as _InternalHttpMethods[] - ); - - if (rule) { - - // Rule can disable for this route - if (rule.enabled === false) { - - return null; - } - - // Rule can override methods - if (rule.methods) { - - enabled = rule.methods.includes(method as _InternalHttpMethods); - } - else { - - // Rule matched by path, so it's enabled for this method - enabled = true; - } - - // Rule can override serializer - if (rule.serializer) { - - serializer = rule.serializer; - } - - // Rule can override maxCalls - if (rule.maxCalls !== undefined) { - - maxCalls = rule.maxCalls; - } - - // Rule can override windowMs - if (rule.windowMs !== undefined) { - - windowMs = rule.windowMs; - } - - // Rule can override waitForToken - if (rule.waitForToken !== undefined) { - - waitForToken = rule.waitForToken; - } - } - } - - if (!enabled) { - - return null; - } - - return { - enabled: true, - serializer, - maxCalls, - windowMs, - waitForToken - }; - } - - - /** - * Get or create a rate limiter for the given key. - * - * Rate limiters are cached by key to ensure all requests to the same - * endpoint share the same token bucket. - */ - #getRateLimiter(key: string, maxCalls: number, windowMs: number): RateLimitTokenBucket { - - if (!this.#rateLimit) throw new Error('Rate limiting not initialized'); - - let bucket = this.#rateLimit.rateLimiters.get(key); - - if (!bucket) { - - // Token bucket needs: capacity and time per token - // If maxCalls=100 and windowMs=60000, we want 100 requests per minute - // So refillIntervalMs = windowMs / maxCalls = 600ms per token - const refillIntervalMs = windowMs / maxCalls; - bucket = new RateLimitTokenBucket({ capacity: maxCalls, refillIntervalMs }); - - this.#rateLimit.rateLimiters.set(key, bucket); - } - - return bucket; - } - - - /** - * Resolve cache configuration for a specific request. - * - * Uses memoization for rule matching (O(n) only once per method+path). - * The skip callback is always evaluated since it depends on request context. - * - * @returns Object with config or null if disabled - */ - #resolveCacheConfig( - method: string, - path: string, - keyContext: RequestKeyOptions - ): CacheRule | null { - - if (!this.#cache) return null; - - // If globally disabled AND no rules defined, skip everything - // But if rules are defined, allow them to enable caching for specific routes - if ( - !this.#cache.enabled && - !this.#cache.config?.rules?.length - ) { - - return null; - } - - const upperMethod = method.toUpperCase(); - const cacheKey = `${upperMethod}:${path}`; - - // Check cache for rule resolution - let cached = this.#cache.rulesCache.get(cacheKey); - - if (cached === undefined) { - - // Not in cache - compute and store - cached = this.#computeCacheRuleConfig(upperMethod, path); - this.#cache.rulesCache.set(cacheKey, cached); - } - - // If rule resolution returned null, caching is disabled for this route - if (cached === null) return null; - - // Apply dynamic skip check (not cached) - if ( - this.#cache.config?.skip && - this.#cache.config.skip(keyContext) === true - ) { - - return null; - } - - return cached; - } - - - /** - * Compute cache rule configuration for a method+path combination. - * This is the expensive O(n) operation that gets memoized. - */ - #computeCacheRuleConfig( - method: string, - path: string - ): CacheRule | null { - - if (!this.#cache) return null; - - // When globally disabled, start with enabled=false - // Rules with explicit enabled:true can still enable caching - let enabled = this.#cache.enabled && this.#cache.methods.has(method); - let serializer = this.#cache.serializer; - let ttl = this.#cache.ttl; - let staleIn = this.#cache.staleIn; - - // Check for matching rule - if (this.#cache.config?.rules?.length) { - - const rule = findMatchingRule( - this.#cache.config.rules, - method, - path, - [...this.#cache.methods] as _InternalHttpMethods[] - ); - - if (rule) { - - // Rule can disable for this route - if (rule.enabled === false) { - - return null; - } - - // Rule can override methods - if (rule.methods) { - - enabled = rule.methods.includes(method as _InternalHttpMethods); - } - else { - - // Rule matched by path, so it's enabled for this method - enabled = true; - } - - // Rule can override serializer - if (rule.serializer) { - - serializer = rule.serializer; - } - - // Rule can override TTL - if (rule.ttl !== undefined) { - - ttl = rule.ttl; - } - - // Rule can override staleIn - if (rule.staleIn !== undefined) { - - staleIn = rule.staleIn; - } - } - } - - if (!enabled) { - - return null; - } - - return { enabled: true, serializer, ttl, staleIn }; - } - - - /** - * Resolve deduplication configuration for a specific request. - * - * Uses memoization for rule matching (O(n) only once per method+path). - * The shouldDedupe callback is always evaluated since it depends on request context. - * - * @returns Object with `enabled` flag and `serializer` to use, or null if disabled - */ - #resolveDedupeConfig( - method: string, - path: string, - keyContext: RequestKeyOptions - ): DedupeRule | null { - - if (!this.#dedupe) return null; - - // If globally disabled AND no rules defined, skip everything - // But if rules are defined, allow them to enable deduplication for specific routes - if (!this.#dedupe.enabled && !this.#dedupe.config?.rules?.length) { - - return null; - } - - const upperMethod = method.toUpperCase(); - const cacheKey = `${upperMethod}:${path}`; - - // Check cache for rule resolution - let cached = this.#dedupe.rulesCache.get(cacheKey); - - if (cached === undefined) { - - // Not in cache - compute and store - cached = this.#computeDedupeRuleConfig(upperMethod, path); - this.#dedupe.rulesCache.set(cacheKey, cached); - } + if (opts.cachePolicy && opts.cachePolicy !== true) { - // If rule resolution returned null, deduplication is disabled for this route - if (cached === null) return null; - - // Apply dynamic shouldDedupe check (not cached) - if ( - this.#dedupe.config?.shouldDedupe && - this.#dedupe.config.shouldDedupe(keyContext) === false - ) { - - return null; - } - - return cached; - } - - - /** - * Compute deduplication rule configuration for a method+path combination. - * This is the expensive O(n) operation that gets memoized. - */ - #computeDedupeRuleConfig( - method: string, - path: string - ): DedupeRule | null { - - if (!this.#dedupe) return null; - - // When globally disabled, start with enabled=false - // Rules with explicit enabled:true can still enable deduplication - let enabled = this.#dedupe.enabled && this.#dedupe.methods.has(method); - let serializer = this.#dedupe.serializer; - - // Check for matching rule - if (this.#dedupe.config?.rules?.length) { - const rule = findMatchingRule( - this.#dedupe.config.rules, - method, - path, - [...this.#dedupe.methods] as _InternalHttpMethods[] - ); - - if (rule) { - - // Rule can disable for this route - if (rule.enabled === false) return null; - - // Rule can be overridden by methods - enabled = rule.methods?.includes(method as _InternalHttpMethods) ?? true; - - // Rule can override serializer - if (rule.serializer) { - - serializer = rule.serializer; - } - } + this._flight = new SingleFlight({ + adapter: opts.cachePolicy.adapter, + defaultTtl: this.#cachePolicy.defaultTtl, + defaultStaleIn: this.#cachePolicy.defaultStaleIn + }); } - - if (!enabled) return null; - - return { - enabled: true, - serializer - }; } @@ -1103,15 +573,7 @@ export class FetchEngine< */ #makeHeaders(override: FetchEngine.Headers = {}, method?: HttpMethods) { - const methodHeaders = this.#methodHeaders; - - const key = method?.toUpperCase() as keyof typeof methodHeaders; - - return { - ...this.#headers, - ...(methodHeaders[key] || {}), - ...override - }; + return this.#headerStore.resolve(method || 'GET', override); } /** @@ -1134,15 +596,7 @@ export class FetchEngine< */ #makeParams(override: FetchEngine.Params

= {}, method?: HttpMethods) { - const methodParams = this.#methodParams; - - const key = method?.toUpperCase() as keyof typeof methodParams; - - return { - ...(this.#params || {}), - ...(methodParams[key] || {}), - ...override - }; + return this.#paramStore.resolve(method || 'GET', override); } /** @@ -1798,37 +1252,14 @@ export class FetchEngine< */ get headers() { - const method = Object.keys(this.#methodHeaders).reduce( - (acc, k) => { - - const key = k as _InternalHttpMethods; - const methodHeaders = this.#methodHeaders; - - const headers = this.#methodHeaders[k as keyof typeof methodHeaders]; - - if (headers) { - - acc[key] = { ...headers }; - } - - return acc; - }, - {} as MethodHeaders - ); - - return { - default: { - ...this.#headers - }, - ...method - } as { + return this.#headerStore.all as { readonly default: Readonly>, - readonly get?: Readonly>, - readonly post?: Readonly>, - readonly put?: Readonly>, - readonly delete?: Readonly>, - readonly options?: Readonly>, - readonly patch?: Readonly>, + readonly GET?: Readonly>, + readonly POST?: Readonly>, + readonly PUT?: Readonly>, + readonly DELETE?: Readonly>, + readonly OPTIONS?: Readonly>, + readonly PATCH?: Readonly>, } } @@ -1838,37 +1269,14 @@ export class FetchEngine< */ get params() { - const method = Object.keys(this.#methodParams).reduce( - (acc, k) => { - - const key = k as _InternalHttpMethods; - const methodParams = this.#methodParams; - - const params = this.#methodParams[k as keyof typeof methodParams]; - - if (params) { - - acc[key] = { ...params }; - } - - return acc; - }, - {} as HttpMethodOpts

- ); - - return { - default: { - ...this.#params - }, - ...method - } as { + return this.#paramStore.all as { readonly default: Readonly>, - readonly get?: Readonly>, - readonly post?: Readonly>, - readonly put?: Readonly>, - readonly delete?: Readonly>, - readonly options?: Readonly>, - readonly patch?: Readonly>, + readonly GET?: Readonly>, + readonly POST?: Readonly>, + readonly PUT?: Readonly>, + readonly DELETE?: Readonly>, + readonly OPTIONS?: Readonly>, + readonly PATCH?: Readonly>, } } @@ -2042,188 +1450,66 @@ export class FetchEngine< // === Rate Limit Check === // Rate limiting MUST come first - before any network activity or cache lookups // that might trigger background revalidation - const rateLimitConfig = this.#resolveRateLimitConfig(method, path, normalizedOpts); - - if (rateLimitConfig !== null) { - - const rateLimitKey = rateLimitConfig.serializer!(normalizedOpts); - const bucket = this.#getRateLimiter( - rateLimitKey, - rateLimitConfig.maxCalls!, - rateLimitConfig.windowMs! - ); - - const snapshot = bucket.snapshot; - const waitTimeMs = bucket.getWaitTimeMs(1); - - // Build event data once for reuse - const rateLimitEventData = { - ...normalizedOpts, - key: rateLimitKey, - currentTokens: snapshot.currentTokens, - capacity: snapshot.capacity, - waitTimeMs, - nextAvailable: bucket.getNextAvailable(1), - }; - - if (waitTimeMs > 0) { - - // Rate limit exceeded - need to wait or reject - if (!rateLimitConfig.waitForToken) { - - // Reject immediately - this.emit('fetch-ratelimit-reject', rateLimitEventData as any); - - totalTimeout?.clear(); - - const err = new RateLimitError( - `Rate limit exceeded for ${rateLimitKey}. Try again in ${waitTimeMs}ms`, - rateLimitConfig.maxCalls! - ); - - throw err; - } - - // Wait for token - this.emit('fetch-ratelimit-wait', rateLimitEventData as any); - - // Call the onRateLimit callback if configured - if (this.#rateLimit!.config?.onRateLimit) { - - await this.#rateLimit!.config.onRateLimit(normalizedOpts, waitTimeMs); - } - - // Wait and consume atomically, respecting abort signal - const acquired = await bucket.waitAndConsume(1, { - abortController: controller, - }); - - if (!acquired) { - - // Aborted while waiting - totalTimeout?.clear(); - - const err = new FetchError('Request aborted while waiting for rate limit'); - err.aborted = true; - err.method = method as HttpMethods; - err.path = path; - err.status = 0; - err.step = 'fetch'; - - throw err; - } - - // Token acquired after waiting - const postWaitSnapshot = bucket.snapshot; - - this.emit('fetch-ratelimit-acquire', { - ...normalizedOpts, - key: rateLimitKey, - currentTokens: postWaitSnapshot.currentTokens, - capacity: postWaitSnapshot.capacity, - waitTimeMs: 0, - nextAvailable: bucket.getNextAvailable(1), - } as any); - } - else { - - // Token available immediately - consume it - bucket.consume(1); - - // Get post-consumption snapshot for event data - const postConsumeSnapshot = bucket.snapshot; - - this.emit('fetch-ratelimit-acquire', { - ...normalizedOpts, - key: rateLimitKey, - currentTokens: postConsumeSnapshot.currentTokens, - capacity: postConsumeSnapshot.capacity, - waitTimeMs: 0, - nextAvailable: bucket.getNextAvailable(1), - } as any); + await this.#rateLimitPolicy.executeGuard({ + method, + path, + normalizedOpts, + controller, + emit: (event, data) => this.emit(event as any, data as any), + clearTimeout: () => totalTimeout?.clear(), + createAbortError: (message) => { + + const err = new FetchError(message); + err.aborted = true; + err.method = normalizedOpts.method; + err.path = path; + err.status = 0; + err.step = 'fetch'; + return err; } - } + }); // === Cache Check === - // normalizedOpts satisfies RequestKeyOptions - use it directly - const cacheConfig = this.#resolveCacheConfig(method, path, normalizedOpts); - let cacheKey: string | null = null; - - if (cacheConfig) { - - cacheKey = cacheConfig.serializer!(normalizedOpts); - const cached = await this.#flight.getCache(cacheKey); - - if (cached) { - - const expiresIn = cached.expiresAt - Date.now(); - - if (!cached.isStale) { - - this.emit('fetch-cache-hit', { - ...normalizedOpts, - key: cacheKey, - isStale: false, - expiresIn, - } as any); + const cacheResult = await this.#cachePolicy.checkCache>({ + method, + path, + normalizedOpts, + options, + clearTimeout: () => totalTimeout?.clear() + }); - totalTimeout?.clear(); - return cached.value as FetchResponse; - } + let cacheKey: string | null = null; + let cacheConfig: CacheRule | null = null; - // Stale - return immediately + background revalidation - this.emit('fetch-cache-stale', { - ...normalizedOpts, - key: cacheKey, - isStale: true, - expiresIn, - } as any); + if (cacheResult?.hit) { - this.#triggerBackgroundRevalidation(method, path, options, cacheKey, cacheConfig); - totalTimeout?.clear(); + return cacheResult.value; + } - return cached.value as FetchResponse; - } + if (cacheResult && !cacheResult.hit) { - this.emit('fetch-cache-miss', { - ...normalizedOpts, - key: cacheKey, - } as any); + cacheKey = cacheResult.key; + cacheConfig = cacheResult.config; } // === Deduplication Check === - const dedupeConfig = this.#resolveDedupeConfig(method, path, normalizedOpts); + const dedupeResult = this.#dedupePolicy.checkInflight>({ + method, + path, + normalizedOpts + }); + let dedupeKey: string | null = null; let cleanup: (() => void) | null = null; - if (dedupeConfig) { - - dedupeKey = dedupeConfig.serializer!(normalizedOpts); - const inflight = this.#flight.getInflight(dedupeKey); - - if (inflight) { - - const waitingCount = this.#flight.joinInflight(dedupeKey); + if (dedupeResult?.joined) { - this.emit('fetch-dedupe-join', { - ...normalizedOpts, - key: dedupeKey, - waitingCount, - } as any); + return this.#awaitWithIndependentTimeout(dedupeResult.promise, controller, totalTimeout, normalizedOpts.method, path); + } - return this.#awaitWithIndependentTimeout( - inflight.promise as Promise>, - controller, - totalTimeout, - normalizedOpts.method, - path - ); - } + if (dedupeResult && !dedupeResult.joined) { - this.emit('fetch-dedupe-start', { - ...normalizedOpts, - key: dedupeKey, - } as any); + dedupeKey = dedupeResult.key; } // === Execute Request === @@ -2240,7 +1526,7 @@ export class FetchEngine< // when the promise is rejected but no one is listening (no joiners) deferred.promise.catch(() => { /* handled by the request flow */ }); - cleanup = this.#flight.trackInflight(dedupeKey, deferred.promise); + cleanup = this._flight.trackInflight(dedupeKey, deferred.promise); } const requestPromise = this.#attemptCall(normalizedOpts); @@ -2261,12 +1547,12 @@ export class FetchEngine< if (cacheKey && cacheConfig) { - await this.#flight.setCache(cacheKey, res, { + await this._flight.setCache(cacheKey, res, { ttl: cacheConfig.ttl, staleIn: cacheConfig.staleIn }); - this.#cache?.activeKeys.add(cacheKey); + this.#cachePolicy.markActive(cacheKey); this.emit('fetch-cache-set', { ...normalizedOpts, @@ -2334,8 +1620,10 @@ export class FetchEngine< /** * Triggers a background revalidation for stale-while-revalidate. * Fire and forget - errors are emitted as events, not propagated. + * + * @internal Used by CachePolicy */ - async #triggerBackgroundRevalidation( + async _triggerBackgroundRevalidation( method: HttpMethods, path: string, options: FetchEngine.CallOptions & { payload?: unknown }, @@ -2344,12 +1632,12 @@ export class FetchEngine< ): Promise { // Prevent multiple concurrent revalidations for the same key - if (this.#cache?.revalidatingKeys.has(cacheKey)) { + if (this.#cachePolicy.isRevalidating(cacheKey)) { return; } - this.#cache?.revalidatingKeys.add(cacheKey); + this.#cachePolicy.markRevalidating(cacheKey); // Build normalized options for the background request const controller = new AbortController(); @@ -2367,7 +1655,7 @@ export class FetchEngine< this.#attemptCall(normalizedOpts) ); - this.#cache?.revalidatingKeys.delete(cacheKey); + this.#cachePolicy.unmarkRevalidating(cacheKey); if (fetchErr) { @@ -2382,7 +1670,7 @@ export class FetchEngine< const [, cacheErr] = await attempt(() => ( - this.#flight.setCache(cacheKey, res, { + this._flight.setCache(cacheKey, res, { ttl: cacheConfig.ttl, staleIn: cacheConfig.staleIn }) @@ -2399,7 +1687,7 @@ export class FetchEngine< return; } - this.#cache?.activeKeys.add(cacheKey); + this.#cachePolicy.markActive(cacheKey); this.emit('fetch-cache-set', { ...normalizedOpts, @@ -2637,66 +1925,25 @@ export class FetchEngine< 'addHeader requires a string method' ); - const isString = typeof headers === 'string'; - - if (isString) { + if (typeof headers === 'string') { assert( typeof value !== 'undefined', 'addHeader requires a value when setting a single property' ); - } - else { - method = method || value as _InternalHttpMethods; - } - - let updated = { - ...this.#headers - } as FetchEngine.Headers; - - if (method) { - - if (this.#methodHeaders[method]) { - updated = { - ...this.#methodHeaders[method] - } as FetchEngine.Headers; - } - else { - this.#methodHeaders[method] = {}; - } - } - - if (typeof headers === 'string') { - - updated[ - headers as keyof FetchEngine.Headers - ] = value as never; + this.#headerStore.set(headers, value, method); } else { - Object - .keys(headers) - .forEach( - (name) => { - - const key = name as keyof FetchEngine.Headers; - - updated[key] = headers[key as never] - } - ); - } - - this.#validateHeaders(updated); - - if (method) { - - this.#methodHeaders[method] = updated; + // When headers is an object, value might be the method + const actualMethod = method || value as _InternalHttpMethods; + this.#headerStore.set(headers as Partial>, actualMethod); } - else { - this.#headers = updated; - } + const updated = method + ? this.#headerStore.forMethod(method) + : this.#headerStore.defaults; this.emit('fetch-header-add', { state: this.#state, @@ -2745,46 +1992,24 @@ export class FetchEngine< return; } - let updated = { ...this.#headers }; - - if (method) { - - if (this.#methodHeaders[method]) { - updated = { - ...this.#methodHeaders[method] - } as FetchEngine.Headers; - } - else { - this.#methodHeaders[method] = {}; - } - } + // Normalize to array of keys + let keys: string[]; if (typeof headers === 'string') { - - delete updated[headers]; + keys = [headers]; } - - let _names = headers as (keyof FetchEngine.Headers)[]; - - if (!Array.isArray(headers)) { - - _names = Object.keys(headers); + else if (Array.isArray(headers)) { + keys = headers as string[]; } - - for (const name of _names) { - delete updated[name]; + else { + keys = Object.keys(headers as object); } - this.#validateHeaders(updated); - - if (method) { + this.#headerStore.remove(keys, method); - this.#methodHeaders[method] = updated; - } - else { - - this.#headers = updated; - } + const updated = method + ? this.#headerStore.forMethod(method) + : this.#headerStore.defaults; this.emit('fetch-header-remove', { state: this.#state, @@ -2821,12 +2046,7 @@ export class FetchEngine< hasHeader(name: string, method?: _InternalHttpMethods): boolean hasHeader(name: string, method?: _InternalHttpMethods): boolean { - if (method) { - - return this.#methodHeaders[method]?.hasOwnProperty(name) || false; - } - - return this.#headers.hasOwnProperty(name); + return this.#headerStore.has(name, method); } /** @@ -2882,66 +2102,25 @@ export class FetchEngine< 'addParam requires a string method' ); - const paramsIsString = typeof params === 'string'; - - if (paramsIsString) { + if (typeof params === 'string') { assert( typeof value !== 'undefined', 'addParam requires a value when setting a single property' ); - } - else { - - method = method || value as _InternalHttpMethods; - } - - let updated = { - ...this.#params - } as FetchEngine.Params

; - - if (method) { - - if (this.#methodParams[method]) { - updated = { - ...this.#methodParams[method] - }; - } - else { - this.#methodParams[method] = {} as P; - } - } - - if (paramsIsString) { - - updated[ - params as keyof FetchEngine.Params

- ] = value as never; - } - else { - - Object - .keys(params) - .forEach( - (name) => { - - const key = name as keyof FetchEngine.Params

; - - updated[key] = params[key as never] - } - ); - } - - if (method) { - this.#methodParams[method] = updated as P; + this.#paramStore.set(params, value, method); } else { - this.#params = updated; + // When params is an object, value might be the method + const actualMethod = method || value as _InternalHttpMethods; + this.#paramStore.set(params as Partial>, actualMethod); } - this.#validateParams(updated); + const updated = method + ? this.#paramStore.forMethod(method) + : this.#paramStore.defaults; this.emit('fetch-param-add', { state: this.#state, @@ -2989,44 +2168,24 @@ export class FetchEngine< return; } - let updated = { ...this.#params }; - - if (method) { - - if (this.#methodParams[method]) { - updated = { - ...this.#methodParams[method] - }; - } - else { - this.#methodParams[method] = {} as P; - } - } + // Normalize to array of keys + let keys: string[]; if (typeof params === 'string') { - - delete updated[params]; + keys = [params]; } - - let _names = params as (keyof FetchEngine.Params

)[]; - - if (!Array.isArray(params)) { - - _names = Object.keys(params); + else if (Array.isArray(params)) { + keys = params as string[]; } - - for (const name of _names) { - delete updated[name]; + else { + keys = Object.keys(params as object); } - if (method) { + this.#paramStore.remove(keys, method); - this.#methodParams[method] = updated as P; - } - else { - - this.#params = updated; - } + const updated = method + ? this.#paramStore.forMethod(method) + : this.#paramStore.defaults; this.emit('fetch-param-remove', { state: this.#state, @@ -3063,12 +2222,7 @@ export class FetchEngine< hasParam(name: string, method?: _InternalHttpMethods): boolean hasParam(name: string, method?: _InternalHttpMethods): boolean { - if (method) { - - return this.#methodParams[method]?.hasOwnProperty(name) || false; - } - - return this.#params.hasOwnProperty(name); + return this.#paramStore.has(name, method); } @@ -3300,8 +2454,8 @@ export class FetchEngine< */ async clearCache(): Promise { - await this.#flight.clearCache(); - this.#cache?.activeKeys.clear(); + await this._flight.clearCache(); + this.#cachePolicy.clearActiveKeys(); } /** @@ -3318,11 +2472,11 @@ export class FetchEngine< */ async deleteCache(key: string): Promise { - const deleted = await this.#flight.deleteCache(key); + const deleted = await this._flight.deleteCache(key); if (deleted) { - this.#cache?.activeKeys.delete(key); + this.#cachePolicy.unmarkActive(key); } return deleted; @@ -3347,15 +2501,15 @@ export class FetchEngine< let invalidated = 0; - for (const key of this.#cache?.activeKeys ?? []) { + for (const key of this.#cachePolicy.getActiveKeys()) { if (predicate(key)) { - const deleted = await this.#flight.deleteCache(key); + const deleted = await this._flight.deleteCache(key); if (deleted) { - this.#cache?.activeKeys.delete(key); + this.#cachePolicy.unmarkActive(key); invalidated++; } } @@ -3365,12 +2519,13 @@ export class FetchEngine< } /** - * Invalidates cache entries matching a path pattern. + * Invalidates cache entries matching a path pattern or custom predicate. * * Convenience method for invalidating cache based on URL path patterns. - * Supports both string prefix matching and RegExp patterns. + * Supports string prefix matching, RegExp patterns, or a custom predicate + * function for full control over key matching (useful with custom serializers). * - * @param pattern - String prefix or RegExp to match against paths in cache keys + * @param patternOrPredicate - String prefix, RegExp, or predicate function * @returns Number of entries invalidated * * @example @@ -3380,10 +2535,22 @@ export class FetchEngine< * @example * // Invalidate using regex pattern * await api.invalidatePath(/\/api\/v[12]\//); + * + * @example + * // Invalidate using custom predicate (for custom serializers) + * await api.invalidatePath((key) => { + * const parsed = myCustomKeyParser(key); + * return parsed.path.startsWith('/users'); + * }); */ - async invalidatePath(pattern: string | RegExp): Promise { + async invalidatePath(patternOrPredicate: string | RegExp | Func<[string], boolean>): Promise { + + if (typeof patternOrPredicate === 'function') { + + return this.invalidateCache(patternOrPredicate); + } - const isRegex = pattern instanceof RegExp; + const isRegex = patternOrPredicate instanceof RegExp; return this.invalidateCache((key) => { @@ -3403,10 +2570,10 @@ export class FetchEngine< if (isRegex) { - return pattern.test(path); + return patternOrPredicate.test(path); } - return path.startsWith(pattern); + return path.startsWith(patternOrPredicate); }); } @@ -3424,7 +2591,7 @@ export class FetchEngine< */ cacheStats(): { cacheSize: number; inflightCount: number } { - return this.#flight.stats(); + return this._flight.stats(); } /** @@ -3464,14 +2631,12 @@ export class FetchEngine< // Reset the flight controller to clear cache and inflight tracking // This is synchronous and creates a new SingleFlight instance - this.#flight = new SingleFlight(); + this._flight = new SingleFlight(); // Clear all internal references to allow garbage collection this.#state = {} as S; - this.#headers = {} as FetchEngine.Headers; - this.#methodHeaders = {}; - this.#params = {} as FetchEngine.Params

; - this.#methodParams = {}; + this.#headerStore = new PropertyStore>(); + this.#paramStore = new PropertyStore>(); this.#options = {}; this.#baseUrl = new URL('about:blank'); @@ -3483,10 +2648,10 @@ export class FetchEngine< // Clear retry config this.#retry = undefined as never; - // Clear rate limiting state - this.#rateLimit = null; - this.#cache = null; - this.#dedupe = null; + // Clear policy state + this.#rateLimitPolicy.init(); + this.#cachePolicy.init(); + this.#dedupePolicy.init(); } /** diff --git a/packages/fetch/src/index.ts b/packages/fetch/src/index.ts index 2d4e125..2e49e6f 100644 --- a/packages/fetch/src/index.ts +++ b/packages/fetch/src/index.ts @@ -24,6 +24,36 @@ export { defaultRateLimitSerializer } from './helpers.ts'; +export { + endpointSerializer, + requestSerializer +} from './serializers/index.ts'; + +export { + ResiliencePolicy, + DedupePolicy, + CachePolicy, + RateLimitPolicy +} from './policies/index.ts'; + +export type { + BasePolicyRule, + BasePolicyConfig, + PolicyInternalState, + CachePolicyState, + RateLimitPolicyState +} from './policies/index.ts'; + +export { + PropertyStore +} from './property-store.ts'; + +export type { + PropertyStoreOptions, + PropertyValidateFn, + MethodOverrides +} from './property-store.ts'; + export { FetchEngine } from './engine.ts'; diff --git a/packages/fetch/src/policies/base.ts b/packages/fetch/src/policies/base.ts new file mode 100644 index 0000000..41acf08 --- /dev/null +++ b/packages/fetch/src/policies/base.ts @@ -0,0 +1,232 @@ +import type { + _InternalHttpMethods, + RequestKeyOptions, + RequestSerializer +} from '../types.ts'; + +import type { + BasePolicyRule, + BasePolicyConfig, + PolicyInternalState +} from './types.ts'; + +import { findMatchingRule, validateMatchRules } from '../helpers.ts'; + + +/** + * Abstract base class for resilience policies. + * + * Provides the common three-method pattern used by all policies: + * - `init`: Parse config, initialize state (O(1)) + * - `resolve`: Memoized lookup + dynamic checks (O(1) amortized) + * - `compute`: Rule matching, memoized (O(n) first call only) + * + * Subclasses must implement: + * - `getDefaultSerializer()`: Return the default serializer for this policy + * - `getDefaultMethods()`: Return the default HTTP methods for this policy + * - `mergeRuleWithDefaults(rule)`: Merge a rule with policy defaults + * + * @template TConfig - The policy config type extending BasePolicyConfig + * @template TRule - The rule type extending BasePolicyRule + * @template S - Instance state type + * @template H - Headers type + * @template P - Params type + */ +export abstract class ResiliencePolicy< + TConfig extends BasePolicyConfig, + TRule extends BasePolicyRule, + S = any, + H = any, + P = any +> { + + protected state: PolicyInternalState | null = null; + protected config: TConfig | null = null; + + /** + * Get the default serializer for this policy. + * Subclasses must implement this. + */ + protected abstract getDefaultSerializer(): RequestSerializer; + + /** + * Get the default HTTP methods for this policy. + * Subclasses must implement this. + */ + protected abstract getDefaultMethods(): _InternalHttpMethods[]; + + /** + * Merge a matched rule with policy defaults. + * Subclasses must implement this to handle policy-specific fields. + * + * @param rule - The matched rule (or null for global defaults) + * @returns The merged rule with all defaults applied + */ + protected abstract mergeRuleWithDefaults(rule: TRule | null): TRule; + + + /** + * Whether the policy is initialized and enabled. + */ + get isEnabled(): boolean { + + return this.state?.enabled ?? false; + } + + + /** + * Initialize the policy with configuration. + * + * Parses the config (boolean or object), validates rules, + * and sets up internal state for fast lookups. + * + * @param config - Boolean true for defaults, or full config object + */ + init(config?: boolean | TConfig): void { + + if (!config) { + + this.state = null; + this.config = null; + return; + } + + if (config === true) { + + this.state = { + enabled: true, + methods: new Set(this.getDefaultMethods()), + serializer: this.getDefaultSerializer(), + rulesCache: new Map() + }; + this.config = {} as TConfig; + return; + } + + this.config = config; + + this.state = { + enabled: config.enabled !== false, + methods: new Set(config.methods ?? this.getDefaultMethods()), + serializer: config.serializer ?? this.getDefaultSerializer(), + rulesCache: new Map() + }; + + if (config.rules) { + + validateMatchRules(config.rules); + } + } + + + /** + * Resolve policy configuration for a specific request. + * + * Uses memoization for rule matching (O(n) only once per method+path). + * Skip callbacks are always evaluated since they depend on request context. + * + * @param method - HTTP method (uppercase) + * @param path - Request path + * @param ctx - Full request context for skip callback + * @param skipCallback - Optional skip callback from config + * @returns Resolved rule or null if disabled + */ + resolve( + method: string, + path: string, + ctx: RequestKeyOptions, + skipCallback?: (ctx: RequestKeyOptions) => boolean | undefined + ): TRule | null { + + if (!this.state) return null; + + if (!this.state.enabled && !this.config?.rules?.length) { + + return null; + } + + const upperMethod = method.toUpperCase(); + const cacheKey = `${upperMethod}:${path}`; + + let cached = this.state.rulesCache.get(cacheKey); + + if (cached === undefined) { + + cached = this.compute(upperMethod, path); + this.state.rulesCache.set(cacheKey, cached); + } + + if (cached === null) return null; + + if (skipCallback && skipCallback(ctx) === true) { + + return null; + } + + return cached; + } + + + /** + * Compute rule configuration for a method+path combination. + * + * This is the expensive O(n) operation that gets memoized. + * Finds matching rule and merges with policy defaults. + * + * @param method - HTTP method (uppercase) + * @param path - Request path + * @returns Computed rule or null if disabled + */ + protected compute(method: string, path: string): TRule | null { + + if (!this.state) return null; + + let enabled = this.state.enabled && this.state.methods.has(method); + let matchedRule: TRule | null = null; + + if (this.config?.rules?.length) { + + const rule = findMatchingRule( + this.config.rules, + method, + path, + [...this.state.methods] as _InternalHttpMethods[] + ) as TRule | undefined; + + if (rule) { + + if (rule.enabled === false) { + + return null; + } + + if (rule.methods) { + + enabled = rule.methods.includes(method as _InternalHttpMethods); + } + else { + + enabled = true; + } + + matchedRule = rule; + } + } + + if (!enabled) return null; + + return this.mergeRuleWithDefaults(matchedRule); + } + + + /** + * Clear the rules cache. + * + * Call this if you need to force re-computation of rules, + * though typically this is not needed. + */ + clearCache(): void { + + this.state?.rulesCache.clear(); + } +} diff --git a/packages/fetch/src/policies/cache.ts b/packages/fetch/src/policies/cache.ts new file mode 100644 index 0000000..e6e92ce --- /dev/null +++ b/packages/fetch/src/policies/cache.ts @@ -0,0 +1,423 @@ +import type { + _InternalHttpMethods, + CacheRule, + CacheConfig, + RequestSerializer, + CacheAdapter, + RequestKeyOptions +} from '../types.ts'; + +import type { FetchEngine } from '../engine.ts'; + +import { ResiliencePolicy } from './base.ts'; +import { requestSerializer } from '../serializers/index.ts'; +import { validateMatchRules } from '../helpers.ts'; + + +/** + * Result of cache check operation. + */ +export type CacheCheckResult = + | { hit: true; value: T; key: string } + | { hit: false; key: string; config: CacheRule } + | null; + + +/** + * Execution context for cache check. + */ +export interface CacheExecutionContext { + + /** HTTP method */ + method: string; + + /** Request path */ + path: string; + + /** Full normalized request options */ + normalizedOpts: RequestKeyOptions; + + /** Original request options (for background revalidation) */ + options: unknown; + + /** Clear any pending timeout */ + clearTimeout: () => void; +} + + +/** + * Default HTTP methods for caching. + * Only GET requests are cached by default. + */ +const DEFAULT_CACHE_METHODS: _InternalHttpMethods[] = ['GET']; + +/** + * Default TTL for cached responses (60 seconds). + */ +const DEFAULT_CACHE_TTL = 60000; + + +/** + * Extended internal state for cache policy. + * Includes SWR tracking sets for background revalidation. + */ +export interface CachePolicyState { + + /** Whether the policy is globally enabled */ + enabled: boolean; + + /** Set of HTTP methods this policy applies to */ + methods: Set; + + /** The serializer function for key generation */ + serializer: RequestSerializer; + + /** Memoized rule cache: method:path -> resolved rule or null */ + rulesCache: Map | null>; + + /** Default TTL in milliseconds */ + ttl: number; + + /** Default stale time for SWR in milliseconds */ + staleIn: number | undefined; + + /** Keys currently being fetched (for SWR deduplication) */ + activeKeys: Set; + + /** Keys currently being revalidated in background */ + revalidatingKeys: Set; +} + + +/** + * Cache policy for storing and retrieving response data. + * + * Supports stale-while-revalidate (SWR) pattern where stale cached data + * is returned immediately while fresh data is fetched in the background. + * + * Uses request-scoped serialization by default (method + path + params + payload), + * meaning requests are only considered cache hits if they have identical + * method, path, parameters, and payload. + * + * @template S - Instance state type + * @template H - Headers type + * @template P - Params type + * + * @example + * ```typescript + * const cachePolicy = new CachePolicy(); + * + * cachePolicy.init({ + * enabled: true, + * ttl: 300000, // 5 minutes + * staleIn: 60000, // Stale after 1 minute (triggers SWR) + * rules: [ + * { startsWith: '/static', ttl: 3600000 }, // 1 hour for static + * { startsWith: '/admin', enabled: false } // No caching for admin + * ] + * }); + * ``` + */ +export class CachePolicy< + S = unknown, + H = unknown, + P = unknown +> extends ResiliencePolicy, CacheRule, S, H, P> { + + /** Reference to the FetchEngine instance */ + #engine: FetchEngine; + + /** + * Extended state with cache-specific fields. + * Note: We override the base state type to include cache-specific fields. + */ + protected state: CachePolicyState | null = null; + + /** + * Cache adapter for external storage backends. + */ + #adapter: CacheAdapter | undefined; + + constructor(engine: FetchEngine) { + + super(); + this.#engine = engine; + } + + /** + * Get the cache adapter (if configured). + */ + get adapter(): CacheAdapter | undefined { + + return this.#adapter; + } + + /** + * Get active keys set (for SWR tracking). + */ + get activeKeys(): Set { + + return this.state?.activeKeys ?? new Set(); + } + + /** + * Get revalidating keys set (for SWR tracking). + */ + get revalidatingKeys(): Set { + + return this.state?.revalidatingKeys ?? new Set(); + } + + /** + * Get the default TTL. + */ + get defaultTtl(): number { + + return this.state?.ttl ?? DEFAULT_CACHE_TTL; + } + + /** + * Get the default stale time for SWR. + */ + get defaultStaleIn(): number | undefined { + + return this.state?.staleIn; + } + + /** + * Get the default serializer for caching. + * Uses request-scoped serialization (method + path + params + payload). + */ + protected getDefaultSerializer(): RequestSerializer { + + return requestSerializer as RequestSerializer; + } + + /** + * Get the default HTTP methods for caching. + * Only GET requests are cached by default. + */ + protected getDefaultMethods(): _InternalHttpMethods[] { + + return DEFAULT_CACHE_METHODS; + } + + /** + * Initialize the cache policy with configuration. + * + * Extends base init to handle cache-specific fields (ttl, staleIn, adapter). + */ + init(config?: boolean | CacheConfig): void { + + if (!config) { + + this.state = null; + this.config = null; + this.#adapter = undefined; + return; + } + + if (config === true) { + + this.state = { + enabled: true, + methods: new Set(this.getDefaultMethods()), + serializer: this.getDefaultSerializer(), + rulesCache: new Map(), + ttl: DEFAULT_CACHE_TTL, + staleIn: undefined, + activeKeys: new Set(), + revalidatingKeys: new Set() + }; + this.config = {} as CacheConfig; + this.#adapter = undefined; + return; + } + + this.config = config; + this.#adapter = config.adapter; + + this.state = { + enabled: config.enabled !== false, + methods: new Set(config.methods ?? this.getDefaultMethods()), + serializer: config.serializer ?? this.getDefaultSerializer(), + rulesCache: new Map(), + ttl: config.ttl ?? DEFAULT_CACHE_TTL, + staleIn: config.staleIn, + activeKeys: new Set(), + revalidatingKeys: new Set() + }; + + if (config.rules) { + + validateMatchRules(config.rules); + } + } + + /** + * Merge a matched rule with policy defaults. + * Includes cache-specific fields (ttl, staleIn). + */ + protected mergeRuleWithDefaults(rule: CacheRule | null): CacheRule { + + if (!this.state) { + + return { + enabled: true, + serializer: this.getDefaultSerializer(), + ttl: DEFAULT_CACHE_TTL, + staleIn: undefined + }; + } + + return { + enabled: true, + serializer: rule?.serializer ?? this.state.serializer, + ttl: rule?.ttl ?? this.state.ttl, + staleIn: rule?.staleIn ?? this.state.staleIn + }; + } + + /** + * Resolve cache configuration for a request. + * + * Convenience method that wraps the base `resolve()` with the + * policy-specific skip callback. + */ + resolveForRequest( + method: string, + path: string, + ctx: RequestKeyOptions + ): CacheRule | null { + + const skipCallback = this.config?.skip + ? (c: RequestKeyOptions) => this.config!.skip!(c) === true + : undefined; + + return this.resolve(method, path, ctx, skipCallback); + } + + /** + * Mark a key as actively being fetched. + */ + markActive(key: string): void { + + this.state?.activeKeys.add(key); + } + + /** + * Unmark a key as actively being fetched. + */ + unmarkActive(key: string): void { + + this.state?.activeKeys.delete(key); + } + + /** + * Check if a key is currently being revalidated. + */ + isRevalidating(key: string): boolean { + + return this.state?.revalidatingKeys.has(key) ?? false; + } + + /** + * Mark a key as being revalidated in background. + */ + markRevalidating(key: string): void { + + this.state?.revalidatingKeys.add(key); + } + + /** + * Unmark a key as being revalidated. + */ + unmarkRevalidating(key: string): void { + + this.state?.revalidatingKeys.delete(key); + } + + /** + * Clear all active keys. + */ + clearActiveKeys(): void { + + this.state?.activeKeys.clear(); + } + + /** + * Get all active cache keys. + */ + getActiveKeys(): string[] { + + return [...(this.state?.activeKeys ?? [])]; + } + + /** + * Check cache for a request. + * + * Handles cache hit/miss logic including stale-while-revalidate: + * - Fresh hit: returns cached value, clears timeout + * - Stale hit: returns cached value, triggers background revalidation, clears timeout + * - Miss: returns null with config for the caller to proceed with fetch + * - Disabled: returns null + * + * @param ctx - Execution context with request info + * @returns Cache check result or null if caching disabled + */ + async checkCache(ctx: CacheExecutionContext): Promise> { + + const { method, path, normalizedOpts, options, clearTimeout } = ctx; + + const config = this.resolveForRequest(method, path, normalizedOpts); + + if (!config) { + + return null; + } + + const key = config.serializer!(normalizedOpts); + const cached = await this.#engine._flight.getCache(key); + + if (cached) { + + const expiresIn = cached.expiresAt - Date.now(); + + if (!cached.isStale) { + + // Fresh cache hit + this.#engine.emit('fetch-cache-hit' as any, { + ...normalizedOpts, + key, + isStale: false, + expiresIn, + }); + + clearTimeout(); + + return { hit: true, value: cached.value as T, key }; + } + + // Stale - return immediately + trigger background revalidation + this.#engine.emit('fetch-cache-stale' as any, { + ...normalizedOpts, + key, + isStale: true, + expiresIn, + }); + + this.#engine._triggerBackgroundRevalidation(method, path, options as any, key, config); + clearTimeout(); + + return { hit: true, value: cached.value as T, key }; + } + + // Cache miss + this.#engine.emit('fetch-cache-miss' as any, { + ...normalizedOpts, + key, + }); + + return { hit: false, key, config }; + } +} diff --git a/packages/fetch/src/policies/dedupe.ts b/packages/fetch/src/policies/dedupe.ts new file mode 100644 index 0000000..0b252e0 --- /dev/null +++ b/packages/fetch/src/policies/dedupe.ts @@ -0,0 +1,206 @@ +import type { + _InternalHttpMethods, + DedupeRule, + DeduplicationConfig, + RequestSerializer, + RequestKeyOptions +} from '../types.ts'; + +import type { FetchEngine } from '../engine.ts'; + +import { ResiliencePolicy } from './base.ts'; +import { requestSerializer } from '../serializers/index.ts'; + + +/** + * Result of dedupe inflight check. + */ +export type DedupeCheckResult = + | { joined: true; promise: Promise; key: string } + | { joined: false; key: string; config: DedupeRule } + | null; + + +/** + * Execution context for dedupe check. + */ +export interface DedupeExecutionContext { + + /** HTTP method */ + method: string; + + /** Request path */ + path: string; + + /** Full normalized request options */ + normalizedOpts: RequestKeyOptions; +} + + +/** + * Default HTTP methods for deduplication. + * Only GET requests are deduplicated by default. + */ +const DEFAULT_DEDUPE_METHODS: _InternalHttpMethods[] = ['GET']; + + +/** + * Deduplication policy for preventing duplicate concurrent requests. + * + * When multiple identical requests are made concurrently, deduplication + * ensures only one actual network request is made. All callers share + * the same in-flight promise. + * + * Uses request-scoped serialization by default (method + path + params + payload), + * meaning requests are only considered duplicates if they have identical + * method, path, parameters, and payload. + * + * @template S - Instance state type + * @template H - Headers type + * @template P - Params type + * + * @example + * ```typescript + * const dedupePolicy = new DedupePolicy(); + * + * dedupePolicy.init({ + * enabled: true, + * methods: ['GET', 'POST'], + * rules: [ + * { startsWith: '/admin', enabled: false } + * ] + * }); + * + * const config = dedupePolicy.resolve('GET', '/users', ctx); + * if (config) { + * const key = config.serializer(ctx); + * // Use key for deduplication lookup + * } + * ``` + */ +export class DedupePolicy< + S = unknown, + H = unknown, + P = unknown +> extends ResiliencePolicy, DedupeRule, S, H, P> { + + /** Reference to the FetchEngine instance */ + #engine: FetchEngine; + + constructor(engine: FetchEngine) { + + super(); + this.#engine = engine; + } + + /** + * Get the default serializer for deduplication. + * Uses request-scoped serialization (method + path + params + payload). + */ + protected getDefaultSerializer(): RequestSerializer { + + return requestSerializer as RequestSerializer; + } + + /** + * Get the default HTTP methods for deduplication. + * Only GET requests are deduplicated by default. + */ + protected getDefaultMethods(): _InternalHttpMethods[] { + + return DEFAULT_DEDUPE_METHODS; + } + + /** + * Merge a matched rule with policy defaults. + * For deduplication, this just applies the serializer override. + */ + protected mergeRuleWithDefaults(rule: DedupeRule | null): DedupeRule { + + const serializer = rule?.serializer ?? this.state!.serializer; + + return { + enabled: true, + serializer + }; + } + + /** + * Resolve deduplication configuration for a request. + * + * Convenience method that wraps the base `resolve()` with the + * policy-specific skip callback. + */ + resolveForRequest( + method: string, + path: string, + ctx: RequestKeyOptions + ): DedupeRule | null { + + const skipCallback = this.config?.shouldDedupe + ? (c: RequestKeyOptions) => this.config!.shouldDedupe!(c) === false + : undefined; + + return this.resolve(method, path, ctx, skipCallback); + } + + /** + * Check for in-flight request and handle joining. + * + * If an in-flight request is found, this method: + * 1. Joins the in-flight request + * 2. Emits the fetch-dedupe-join event + * 3. Returns the promise for the caller to await with timeout handling + * + * If no in-flight request, this method: + * 1. Emits the fetch-dedupe-start event + * 2. Returns config for the caller to track the new request + * + * @param ctx - Execution context with request info + * @returns Check result or null if deduplication disabled + */ + checkInflight(ctx: DedupeExecutionContext): DedupeCheckResult { + + const { method, path, normalizedOpts } = ctx; + + const config = this.resolveForRequest(method, path, normalizedOpts); + + if (!config) { + + return null; + } + + const key = config.serializer!(normalizedOpts); + const inflight = this.#engine._flight.getInflight(key); + + if (inflight) { + + // Join existing in-flight request + const waitingCount = this.#engine._flight.joinInflight(key); + + this.#engine.emit('fetch-dedupe-join' as any, { + ...normalizedOpts, + key, + waitingCount, + }); + + return { + joined: true, + promise: inflight.promise as Promise, + key + }; + } + + // No in-flight request - emit start event, caller will track + this.#engine.emit('fetch-dedupe-start' as any, { + ...normalizedOpts, + key, + }); + + return { + joined: false, + key, + config + }; + } +} diff --git a/packages/fetch/src/policies/index.ts b/packages/fetch/src/policies/index.ts new file mode 100644 index 0000000..715be93 --- /dev/null +++ b/packages/fetch/src/policies/index.ts @@ -0,0 +1,24 @@ +export { ResiliencePolicy } from './base.ts'; +export { + DedupePolicy, + type DedupeCheckResult, + type DedupeExecutionContext +} from './dedupe.ts'; +export { + CachePolicy, + type CachePolicyState, + type CacheCheckResult, + type CacheExecutionContext +} from './cache.ts'; +export { + RateLimitPolicy, + type RateLimitPolicyState, + type RateLimitExecutionContext +} from './rate-limit.ts'; + +export type { + BasePolicyRule, + BasePolicyConfig, + PolicyInternalState, + RequestKeyOptions +} from './types.ts'; diff --git a/packages/fetch/src/policies/rate-limit.ts b/packages/fetch/src/policies/rate-limit.ts new file mode 100644 index 0000000..a52b9dd --- /dev/null +++ b/packages/fetch/src/policies/rate-limit.ts @@ -0,0 +1,414 @@ +import { RateLimitTokenBucket, RateLimitError } from '@logosdx/utils'; + +import type { + _InternalHttpMethods, + RateLimitRule, + RateLimitConfig, + RequestSerializer, + CacheAdapter, + RequestKeyOptions +} from '../types.ts'; + +import { ResiliencePolicy } from './base.ts'; +import { endpointSerializer } from '../serializers/index.ts'; +import { validateMatchRules } from '../helpers.ts'; + + +/** + * Execution context for rate limit guard. + * Provides dependencies needed to execute rate limiting logic. + */ +export interface RateLimitExecutionContext { + + /** HTTP method */ + method: string; + + /** Request path */ + path: string; + + /** Full normalized request options (used for serializer and events) */ + normalizedOpts: RequestKeyOptions; + + /** AbortController for cancellation */ + controller: AbortController; + + /** Emit an event */ + emit: (event: string, data: unknown) => void; + + /** Clear any pending timeout */ + clearTimeout: () => void; + + /** Factory for creating abort errors */ + createAbortError: (message: string) => Error; +} + + +/** + * Default HTTP methods for rate limiting. + * All methods are rate limited by default. + */ +const DEFAULT_RATELIMIT_METHODS: _InternalHttpMethods[] = [ + 'GET', 'POST', 'PUT', 'DELETE', 'PATCH', 'OPTIONS' +]; + +/** + * Default max calls per window. + */ +const DEFAULT_MAX_CALLS = 100; + +/** + * Default window duration in milliseconds (1 minute). + */ +const DEFAULT_WINDOW_MS = 60000; + + +/** + * Extended internal state for rate limit policy. + * Includes rate limit-specific fields and token bucket management. + */ +export interface RateLimitPolicyState { + + /** Whether the policy is globally enabled */ + enabled: boolean; + + /** Set of HTTP methods this policy applies to */ + methods: Set; + + /** The serializer function for bucket key generation */ + serializer: RequestSerializer; + + /** Memoized rule cache: method:path -> resolved rule or null */ + rulesCache: Map | null>; + + /** Max calls per window */ + maxCalls: number; + + /** Window duration in milliseconds */ + windowMs: number; + + /** Whether to wait for token vs reject immediately */ + waitForToken: boolean; + + /** Token buckets by key */ + rateLimiters: Map; +} + + +/** + * Rate limit policy for controlling request rate. + * + * Uses token bucket algorithm to enforce rate limits. Each unique key + * (generated by the serializer) gets its own bucket, allowing per-endpoint + * or per-user rate limiting. + * + * Uses endpoint-scoped serialization by default (method + path), + * meaning all requests to the same endpoint share a rate limit bucket + * regardless of their parameters or payload. + * + * @template S - Instance state type + * @template H - Headers type + * @template P - Params type + * + * @example + * ```typescript + * const rateLimitPolicy = new RateLimitPolicy(); + * + * rateLimitPolicy.init({ + * enabled: true, + * maxCalls: 60, + * windowMs: 60000, // 60 req/min + * rules: [ + * { startsWith: '/api/search', maxCalls: 10, windowMs: 1000 }, // 10/sec + * { startsWith: '/admin', enabled: false } // No limit for admin + * ] + * }); + * ``` + */ +export class RateLimitPolicy< + S = unknown, + H = unknown, + P = unknown +> extends ResiliencePolicy, RateLimitRule, S, H, P> { + + /** + * Extended state with rate limit-specific fields. + * Note: We override the base state type to include rate limit-specific fields. + */ + protected state: RateLimitPolicyState | null = null; + + /** + * Adapter for distributed rate limiting. + */ + #adapter: CacheAdapter | undefined; + + /** + * Get the adapter (if configured). + */ + get adapter(): CacheAdapter | undefined { + + return this.#adapter; + } + + /** + * Get the default serializer for rate limiting. + * Uses endpoint-scoped serialization (method + path). + */ + protected getDefaultSerializer(): RequestSerializer { + + return endpointSerializer as RequestSerializer; + } + + /** + * Get the default HTTP methods for rate limiting. + * All methods are rate limited by default. + */ + protected getDefaultMethods(): _InternalHttpMethods[] { + + return DEFAULT_RATELIMIT_METHODS; + } + + /** + * Initialize the rate limit policy with configuration. + * + * Extends base init to handle rate limit-specific fields. + */ + init(config?: boolean | RateLimitConfig): void { + + if (!config) { + + this.state = null; + this.config = null; + this.#adapter = undefined; + return; + } + + if (config === true) { + + this.state = { + enabled: true, + methods: new Set(this.getDefaultMethods()), + serializer: this.getDefaultSerializer(), + rulesCache: new Map(), + maxCalls: DEFAULT_MAX_CALLS, + windowMs: DEFAULT_WINDOW_MS, + waitForToken: true, + rateLimiters: new Map() + }; + this.config = {} as RateLimitConfig; + this.#adapter = undefined; + return; + } + + this.config = config; + this.#adapter = (config as any).adapter; // adapter support from design doc + + this.state = { + enabled: config.enabled !== false, + methods: new Set(config.methods ?? this.getDefaultMethods()), + serializer: config.serializer ?? this.getDefaultSerializer(), + rulesCache: new Map(), + maxCalls: config.maxCalls ?? DEFAULT_MAX_CALLS, + windowMs: config.windowMs ?? DEFAULT_WINDOW_MS, + waitForToken: config.waitForToken ?? true, + rateLimiters: new Map() + }; + + if (config.rules) { + + validateMatchRules(config.rules); + } + } + + /** + * Merge a matched rule with policy defaults. + * Includes rate limit-specific fields (maxCalls, windowMs, waitForToken). + */ + protected mergeRuleWithDefaults(rule: RateLimitRule | null): RateLimitRule { + + if (!this.state) { + + return { + enabled: true, + serializer: this.getDefaultSerializer(), + maxCalls: DEFAULT_MAX_CALLS, + windowMs: DEFAULT_WINDOW_MS, + waitForToken: true + }; + } + + return { + enabled: true, + serializer: rule?.serializer ?? this.state.serializer, + maxCalls: rule?.maxCalls ?? this.state.maxCalls, + windowMs: rule?.windowMs ?? this.state.windowMs, + waitForToken: rule?.waitForToken ?? this.state.waitForToken + }; + } + + /** + * Resolve rate limit configuration for a request. + * + * Convenience method that wraps the base `resolve()` with the + * policy-specific skip callback. + */ + resolveForRequest( + method: string, + path: string, + ctx: RequestKeyOptions + ): RateLimitRule | null { + + const skipCallback = this.config?.shouldRateLimit + ? (c: RequestKeyOptions) => this.config!.shouldRateLimit!(c) === false + : undefined; + + return this.resolve(method, path, ctx, skipCallback); + } + + /** + * Get or create a rate limiter for the given key. + * + * Rate limiters are cached by key to ensure all requests to the same + * endpoint share the same token bucket. + * + * @param key - The bucket key (from serializer) + * @param maxCalls - Max calls per window + * @param windowMs - Window duration in milliseconds + * @returns The token bucket for this key + */ + getRateLimiter(key: string, maxCalls: number, windowMs: number): RateLimitTokenBucket { + + if (!this.state) { + + throw new Error('Rate limiting not initialized'); + } + + let bucket = this.state.rateLimiters.get(key); + + if (!bucket) { + + // Token bucket: capacity and time per token + // If maxCalls=100 and windowMs=60000, we want 100 requests per minute + // So refillIntervalMs = windowMs / maxCalls = 600ms per token + const refillIntervalMs = windowMs / maxCalls; + bucket = new RateLimitTokenBucket({ capacity: maxCalls, refillIntervalMs }); + + this.state.rateLimiters.set(key, bucket); + } + + return bucket; + } + + /** + * Get the onRateLimit callback from config. + */ + get onRateLimit(): RateLimitConfig['onRateLimit'] { + + return this.config?.onRateLimit; + } + + /** + * Execute rate limit guard for a request. + * + * Resolves config, checks token availability, and either: + * - Returns immediately if token is available + * - Waits for token if waitForToken is true + * - Throws RateLimitError if waitForToken is false + * + * @param ctx - Execution context with dependencies + * @throws RateLimitError if rate limit exceeded and waitForToken is false + * @throws Error if request is aborted while waiting + */ + async executeGuard(ctx: RateLimitExecutionContext): Promise { + + const { method, path, normalizedOpts, controller, emit, clearTimeout, createAbortError } = ctx; + + const config = this.resolveForRequest(method, path, normalizedOpts); + + if (config === null) { + + return; + } + + const key = config.serializer!(normalizedOpts); + const bucket = this.getRateLimiter(key, config.maxCalls!, config.windowMs!); + + const snapshot = bucket.snapshot; + const waitTimeMs = bucket.getWaitTimeMs(1); + + // Build event data once for reuse + const eventData = { + ...normalizedOpts, + key, + currentTokens: snapshot.currentTokens, + capacity: snapshot.capacity, + waitTimeMs, + nextAvailable: bucket.getNextAvailable(1), + }; + + if (waitTimeMs > 0) { + + // Rate limit exceeded - need to wait or reject + if (!config.waitForToken) { + + // Reject immediately + emit('fetch-ratelimit-reject', eventData); + clearTimeout(); + + throw new RateLimitError( + `Rate limit exceeded for ${key}. Try again in ${waitTimeMs}ms`, + config.maxCalls! + ); + } + + // Wait for token + emit('fetch-ratelimit-wait', eventData); + + // Call the onRateLimit callback if configured + if (this.onRateLimit) { + + await this.onRateLimit(normalizedOpts, waitTimeMs); + } + + // Wait and consume atomically, respecting abort signal + const acquired = await bucket.waitAndConsume(1, { + abortController: controller, + }); + + if (!acquired) { + + // Aborted while waiting + clearTimeout(); + throw createAbortError('Request aborted while waiting for rate limit'); + } + + // Token acquired after waiting + const postWaitSnapshot = bucket.snapshot; + + emit('fetch-ratelimit-acquire', { + ...normalizedOpts, + key, + currentTokens: postWaitSnapshot.currentTokens, + capacity: postWaitSnapshot.capacity, + waitTimeMs: 0, + nextAvailable: bucket.getNextAvailable(1), + }); + } + else { + + // Token available immediately - consume it + bucket.consume(1); + + // Get post-consumption snapshot for event data + const postConsumeSnapshot = bucket.snapshot; + + emit('fetch-ratelimit-acquire', { + ...normalizedOpts, + key, + currentTokens: postConsumeSnapshot.currentTokens, + capacity: postConsumeSnapshot.capacity, + waitTimeMs: 0, + nextAvailable: bucket.getNextAvailable(1), + }); + } + } +} diff --git a/packages/fetch/src/policies/types.ts b/packages/fetch/src/policies/types.ts new file mode 100644 index 0000000..38d1741 --- /dev/null +++ b/packages/fetch/src/policies/types.ts @@ -0,0 +1,95 @@ +import type { + MatchTypes, + RequestKeyOptions, + RequestSerializer, + _InternalHttpMethods +} from '../types.ts'; + + +/** + * Base interface for all policy rules. + * + * Extends MatchTypes for route matching and provides common fields + * shared across all resilience policies (dedupe, cache, rate-limit, retry). + * + * @template S - Instance state type + * @template H - Headers type + * @template P - Params type + */ +export interface BasePolicyRule extends MatchTypes { + + /** HTTP methods this rule applies to */ + methods?: _InternalHttpMethods[] | undefined; + + /** Enable/disable for matched routes */ + enabled?: boolean | undefined; + + /** Custom serializer for this rule's key generation */ + serializer?: RequestSerializer | undefined; +} + + +/** + * Base interface for all policy configurations. + * + * Provides common fields shared across all resilience policy configs + * (dedupe, cache, rate-limit, retry). + * + * @template S - Instance state type + * @template H - Headers type + * @template P - Params type + * @template TRule - The specific rule type that extends BasePolicyRule + */ +export interface BasePolicyConfig< + S = unknown, + H = unknown, + P = unknown, + TRule extends BasePolicyRule = BasePolicyRule +> { + + /** Enable policy globally */ + enabled?: boolean | undefined; + + /** HTTP methods to apply by default */ + methods?: _InternalHttpMethods[] | undefined; + + /** Default serializer for key generation */ + serializer?: RequestSerializer | undefined; + + /** Route-specific rules */ + rules?: TRule[] | undefined; +} + + +/** + * Internal state managed by ResiliencePolicy base class. + * + * This is the memoized state computed from config during initialization. + * + * @template TRule - The specific rule type that extends BasePolicyRule + * @template S - Instance state type (defaults to any for flexibility) + * @template H - Headers type (defaults to any for flexibility) + * @template P - Params type (defaults to any for flexibility) + */ +export interface PolicyInternalState { + + /** Whether the policy is globally enabled */ + enabled: boolean; + + /** Set of HTTP methods this policy applies to */ + methods: Set; + + /** The serializer function for key generation */ + serializer: RequestSerializer; + + /** Memoized rule cache: method:path -> resolved rule or null */ + rulesCache: Map; +} + + +/** + * Context passed to skip callbacks and serializers. + * + * Re-exported from types.ts for convenience. + */ +export type { RequestKeyOptions }; diff --git a/packages/fetch/src/property-store.ts b/packages/fetch/src/property-store.ts new file mode 100644 index 0000000..c67f763 --- /dev/null +++ b/packages/fetch/src/property-store.ts @@ -0,0 +1,356 @@ +import type { _InternalHttpMethods, HttpMethods } from './types.ts'; + + +/** + * Validation function type for PropertyStore. + * + * @template T - The property type + */ +export type PropertyValidateFn = ( + value: T, + method?: _InternalHttpMethods +) => void; + + +/** + * Method-specific overrides type for PropertyStore. + * + * @template T - The property type + */ +export type MethodOverrides = Partial>>; + + +/** + * PropertyStore constructor options. + * + * @template T - The property type + */ +export interface PropertyStoreOptions { + + /** Default values applied to all requests */ + defaults?: T; + + /** Method-specific overrides (e.g., POST has different headers than GET) */ + methodOverrides?: MethodOverrides; + + /** Validation function called when values are set */ + validate?: PropertyValidateFn; +} + + +/** + * Generic store for request properties (headers, params). + * + * Handles CRUD operations, method-specific overrides, and merging + * for requests. This class is used internally by FetchEngine + * to manage both headers and params with a unified API. + * + * Properties are resolved in order: + * 1. Instance defaults + * 2. Method-specific overrides + * 3. Request-level overrides + * + * @template T - The property type (e.g., Record) + * + * @example + * ```typescript + * const headers = new PropertyStore({ + * defaults: { 'Content-Type': 'application/json' }, + * methodOverrides: { + * POST: { 'X-Custom': 'post-only' } + * } + * }); + * + * // Add header globally + * headers.set('Authorization', 'Bearer token'); + * + * // Add header for specific method + * headers.set('X-Request-ID', 'abc', 'POST'); + * + * // Resolve headers for a request + * const resolved = headers.resolve('POST', { 'X-Override': 'value' }); + * ``` + */ +export class PropertyStore> { + + #defaults: T; + #methodOverrides: Map>; + #validate: PropertyValidateFn | undefined; + + constructor(options: PropertyStoreOptions = {}) { + + this.#defaults = (options.defaults ?? {}) as T; + this.#methodOverrides = new Map(); + this.#validate = options.validate; + + if (options.methodOverrides) { + + for (const [method, overrides] of Object.entries(options.methodOverrides)) { + + if (overrides) { + + this.#methodOverrides.set(method.toLowerCase(), overrides); + } + } + } + } + + + /** + * Set a property value globally or for a specific method. + * + * @param key - Property key + * @param value - Property value + * @param method - Optional HTTP method for method-specific override + * + * @example + * headers.set('Authorization', 'Bearer token'); + * headers.set('X-Custom', 'value', 'POST'); + */ + set(key: string, value: unknown, method?: HttpMethods): void; + + /** + * Set multiple property values globally or for a specific method. + * + * @param values - Object with key-value pairs to set + * @param method - Optional HTTP method for method-specific overrides + * + * @example + * headers.set({ 'Authorization': 'Bearer token', 'X-API-Key': 'abc' }); + * headers.set({ 'X-Custom': 'value' }, 'POST'); + */ + set(values: Partial, method?: HttpMethods): void; + + set( + keyOrValues: string | Partial, + valueOrMethod?: unknown | HttpMethods, + maybeMethod?: HttpMethods + ): void { + + if (typeof keyOrValues === 'string') { + + const key = keyOrValues; + const value = valueOrMethod; + const method = maybeMethod?.toLowerCase(); + + if (method) { + + const existing = this.#methodOverrides.get(method) ?? {}; + + const updated = { ...existing, [key]: value } as Partial; + this.#methodOverrides.set(method, updated); + + if (this.#validate) { + + this.#validate( + { ...this.#defaults, ...updated } as T, + method as _InternalHttpMethods + ); + } + } + else { + + (this.#defaults as Record)[key] = value; + + if (this.#validate) { + + this.#validate(this.#defaults); + } + } + } + else { + + const values = keyOrValues; + const method = (valueOrMethod as HttpMethods | undefined)?.toLowerCase(); + + if (method) { + + const existing = this.#methodOverrides.get(method) ?? {}; + const updated = { ...existing, ...values } as Partial; + this.#methodOverrides.set(method, updated); + + if (this.#validate) { + + this.#validate( + { ...this.#defaults, ...updated } as T, + method as _InternalHttpMethods + ); + } + } + else { + + Object.assign(this.#defaults, values); + + if (this.#validate) { + + this.#validate(this.#defaults); + } + } + } + } + + + /** + * Remove a property globally or for a specific method. + * + * @param key - Property key to remove + * @param method - Optional HTTP method for method-specific removal + * + * @example + * headers.remove('Authorization'); + * headers.remove('X-Custom', 'POST'); + */ + remove(key: string, method?: HttpMethods): void; + + /** + * Remove multiple properties globally or for a specific method. + * + * @param keys - Array of property keys to remove + * @param method - Optional HTTP method for method-specific removal + * + * @example + * headers.remove(['Authorization', 'X-API-Key']); + * headers.remove(['X-Custom', 'X-Other'], 'POST'); + */ + remove(keys: string[], method?: HttpMethods): void; + + remove(keyOrKeys: string | string[], method?: HttpMethods): void { + + const keys = Array.isArray(keyOrKeys) ? keyOrKeys : [keyOrKeys]; + const lowerMethod = method?.toLowerCase(); + + if (lowerMethod) { + + const existing = this.#methodOverrides.get(lowerMethod); + + if (existing) { + + for (const key of keys) { + + delete (existing as Record)[key]; + } + } + } + else { + + for (const key of keys) { + + delete (this.#defaults as Record)[key]; + } + } + } + + + /** + * Check if a property exists globally or for a specific method. + * + * @param key - Property key to check + * @param method - Optional HTTP method to check method-specific value + * @returns True if the property exists + * + * @example + * if (headers.has('Authorization')) { + * console.log('Auth header is set'); + * } + */ + has(key: string, method?: HttpMethods): boolean { + + const lowerMethod = method?.toLowerCase(); + + if (lowerMethod) { + + const methodValues = this.#methodOverrides.get(lowerMethod); + + if (methodValues && key in methodValues) { + + return true; + } + } + + return key in this.#defaults; + } + + + /** + * Get the default values (without method overrides). + * + * @returns Clone of the default values + * + * @example + * const defaultHeaders = headers.defaults; + */ + get defaults(): T { + + return { ...this.#defaults }; + } + + + /** + * Get all values including method overrides. + * + * Returns an object with 'default' key for defaults and + * method names as keys for method-specific overrides. + * + * @returns Object with all property values + * + * @example + * const all = headers.all; + * // { default: { Authorization: '...' }, POST: { 'X-Custom': '...' } } + */ + get all(): { default: T } & Record> { + + const result: { default: T } & Record> = { + default: { ...this.#defaults } + }; + + for (const [method, values] of this.#methodOverrides) { + + result[method] = { ...values }; + } + + return result; + } + + + /** + * Get method-specific overrides only (not merged with defaults). + * + * @param method - HTTP method + * @returns Method-specific overrides or empty object + * + * @example + * const postHeaders = headers.forMethod('POST'); + */ + forMethod(method: HttpMethods): Partial { + + const lowerMethod = method.toLowerCase(); + const overrides = this.#methodOverrides.get(lowerMethod); + + return overrides ? { ...overrides } : {} as Partial; + } + + + /** + * Resolve the final property values for a specific method. + * + * Merges in order: defaults → method overrides → request overrides. + * + * @param method - HTTP method + * @param requestOverrides - Request-level overrides (highest priority) + * @returns Merged property values + * + * @example + * const headers = this.headers.resolve('POST', { 'X-Request-ID': '123' }); + */ + resolve(method: HttpMethods, requestOverrides?: Partial): T { + + const lowerMethod = method.toLowerCase(); + const methodOverrides = this.#methodOverrides.get(lowerMethod) ?? {}; + + return { + ...this.#defaults, + ...methodOverrides, + ...(requestOverrides ?? {}) + } as T; + } +} diff --git a/packages/fetch/src/serializers/endpoint.ts b/packages/fetch/src/serializers/endpoint.ts new file mode 100644 index 0000000..b567328 --- /dev/null +++ b/packages/fetch/src/serializers/endpoint.ts @@ -0,0 +1,26 @@ +import type { RequestKeyOptions } from '../types.ts'; + +/** + * Endpoint serializer for generating rate limit and retry keys. + * + * Serializes by endpoint identity only (method + pathname). + * Used for policies that protect endpoints from overload. + * + * @param ctx - Request key context + * @returns A unique string key for the endpoint + * + * @example + * ```typescript + * endpointSerializer({ + * method: 'GET', + * path: '/users/123', + * url: new URL('https://api.example.com/users/123?page=1'), + * headers: { Authorization: 'Bearer token' } + * }); + * // Returns: 'GET|/users/123' + * ``` + */ +export const endpointSerializer = (ctx: RequestKeyOptions): string => { + + return `${ctx.method}|${ctx.url.pathname}`; +}; diff --git a/packages/fetch/src/serializers/index.ts b/packages/fetch/src/serializers/index.ts new file mode 100644 index 0000000..b3e293f --- /dev/null +++ b/packages/fetch/src/serializers/index.ts @@ -0,0 +1,2 @@ +export { endpointSerializer } from './endpoint.ts'; +export { requestSerializer } from './request.ts'; diff --git a/packages/fetch/src/serializers/request.ts b/packages/fetch/src/serializers/request.ts new file mode 100644 index 0000000..add4951 --- /dev/null +++ b/packages/fetch/src/serializers/request.ts @@ -0,0 +1,116 @@ +import { serializer } from '@logosdx/utils'; +import type { RequestKeyOptions } from '../types.ts'; + + +/** + * Headers that are stable across requests and semantically meaningful + * for cache/dedupe key generation. + * + * These headers affect the response content and should differentiate cache entries: + * - authorization: Different users get different responses + * - accept: Different response formats (JSON, XML, etc.) + * - accept-language: Localized responses + * - content-type: Format of request payload (for POST/PUT) + * - accept-encoding: Response compression format + * + * Headers NOT included (dynamic per-request): + * - X-Timestamp, Date: Change every request + * - X-HMAC-Signature: Computed per-request + * - X-Request-Id, X-Correlation-Id: Unique per-request + * - Cache-Control, Pragma: Control directives, not identity + */ +const KEY_HEADERS = new Set([ + 'authorization', + 'accept', + 'accept-language', + 'content-type', + 'accept-encoding' +]); + + +/** + * Extract stable headers for key generation. + * + * Only includes headers that are semantically meaningful for cache/dedupe. + * Iterates over header keys and matches against lowercase. + * + * @param headers - Request headers object + * @returns Object with only stable headers (lowercase keys), or undefined if none present + */ +const extractKeyHeaders = ( + headers: Record | undefined +): Record | undefined => { + + if (!headers) { + + return undefined; + } + + const result: Record = {}; + let hasHeaders = false; + + for (const key in headers) { + + const lowerKey = key.toLowerCase(); + + if (KEY_HEADERS.has(lowerKey) && headers[key] !== undefined) { + + result[lowerKey] = headers[key]; + hasHeaders = true; + } + } + + return hasHeaders ? result : undefined; +}; + + +/** + * Request serializer for generating deduplication and cache keys. + * + * Serializes by request identity: method + URL path+search + payload + stable headers. + * Used for policies that identify duplicate requests. + * + * Only includes stable, semantically-meaningful headers (Authorization, Accept, + * Accept-Language, Content-Type, Accept-Encoding). Dynamic headers like timestamps, + * HMAC signatures, and request IDs are excluded to prevent cache pollution. + * + * Header lookup is case-insensitive (keys are lowercased for comparison). + * + * Uses `url.pathname + url.search` which: + * - Includes the full path and query parameters + * - Excludes the hash fragment (which shouldn't affect request identity) + * - Excludes the origin (handled by FetchEngine instance) + * + * @param ctx - Request key context + * @returns A unique string key for the request + * + * @example + * ```typescript + * requestSerializer({ + * method: 'GET', + * path: '/users/123', + * url: new URL('https://api.example.com/users/123?page=1'), + * payload: undefined, + * headers: { + * 'Authorization': 'Bearer token', + * 'X-Timestamp': '1234567890', // Ignored (dynamic) + * 'Accept': 'application/json' + * } + * }); + * // Returns: 'GET|/users/123?page=1|undefined|{"accept":"application/json","authorization":"Bearer token"}' + * ``` + */ +export const requestSerializer = (ctx: RequestKeyOptions): string => { + + const urlKey = ctx.url.pathname + ctx.url.search; + const keyHeaders = extractKeyHeaders(ctx.headers as Record); + + const parts = [ + serializer([ctx.method]), + serializer([urlKey]), + serializer([ctx.payload]), + serializer([keyHeaders]) + ]; + + return parts.join('|'); +}; diff --git a/tests/src/fetch/caching.ts b/tests/src/fetch/caching.ts index 1e7dcb8..cbc137c 100644 --- a/tests/src/fetch/caching.ts +++ b/tests/src/fetch/caching.ts @@ -810,6 +810,72 @@ describe('@logosdx/fetch: caching', async () => { api.destroy(); }); + it('should support predicate function with invalidatePath', async () => { + + const api = new FetchEngine({ + baseUrl: testUrl, + cachePolicy: true + }); + + await api.get('/json'); + await api.get('/json1'); + await api.get('/json2'); + + // Invalidate using custom predicate (useful for custom serializers) + const count = await api.invalidatePath((key) => { + + return key.includes('/json1') || key.includes('/json2'); + }); + + expect(count).to.equal(2); + + const hitEvents: string[] = []; + api.on('fetch-cache-hit', (data) => hitEvents.push(data.path!)); + + // /json should still be cached + await api.get('/json'); + expect(hitEvents).to.include('/json'); + + api.destroy(); + }); + + it('should return 0 when predicate matches nothing', async () => { + + const api = new FetchEngine({ + baseUrl: testUrl, + cachePolicy: true + }); + + await api.get('/json'); + await api.get('/json1'); + + const count = await api.invalidatePath(() => false); + + expect(count).to.equal(0); + expect(api.cacheStats().cacheSize).to.equal(2); + + api.destroy(); + }); + + it('should invalidate all when predicate always returns true', async () => { + + const api = new FetchEngine({ + baseUrl: testUrl, + cachePolicy: true + }); + + await api.get('/json'); + await api.get('/json1'); + await api.get('/json2'); + + const count = await api.invalidatePath(() => true); + + expect(count).to.equal(3); + expect(api.cacheStats().cacheSize).to.equal(0); + + api.destroy(); + }); + it('should return correct counts with cacheStats', async () => { const api = new FetchEngine({ diff --git a/tests/src/fetch/property-store.ts b/tests/src/fetch/property-store.ts new file mode 100644 index 0000000..9c32157 --- /dev/null +++ b/tests/src/fetch/property-store.ts @@ -0,0 +1,463 @@ +import { + describe, + it, + expect, + vi +} from 'vitest' + +import { PropertyStore } from '../../../packages/fetch/src/property-store.ts'; + + +type TestHeaders = Record; + + +describe('@logosdx/fetch: PropertyStore', () => { + + describe('constructor', () => { + + it('should create with empty options', () => { + + const store = new PropertyStore(); + + expect(store.defaults).to.deep.equal({}); + }); + + it('should create with defaults', () => { + + const store = new PropertyStore({ + defaults: { 'Content-Type': 'application/json' } + }); + + expect(store.defaults).to.deep.equal({ 'Content-Type': 'application/json' }); + }); + + it('should create with method overrides', () => { + + const store = new PropertyStore({ + defaults: { 'Content-Type': 'application/json' }, + methodOverrides: { + POST: { 'X-Custom': 'post-value' } + } + }); + + expect(store.forMethod('POST')).to.deep.equal({ 'X-Custom': 'post-value' }); + }); + + it('should normalize method names to lowercase', () => { + + const store = new PropertyStore({ + methodOverrides: { + POST: { 'X-Custom': 'value' } + } as any + }); + + expect(store.forMethod('post')).to.deep.equal({ 'X-Custom': 'value' }); + }); + }); + + describe('set()', () => { + + it('should set single key-value pair globally', () => { + + const store = new PropertyStore(); + + store.set('Authorization', 'Bearer token'); + + expect(store.defaults).to.deep.equal({ Authorization: 'Bearer token' }); + }); + + it('should set multiple values globally', () => { + + const store = new PropertyStore(); + + store.set({ + Authorization: 'Bearer token', + 'X-API-Key': 'abc123' + }); + + expect(store.defaults).to.deep.equal({ + Authorization: 'Bearer token', + 'X-API-Key': 'abc123' + }); + }); + + it('should set single key-value for specific method', () => { + + const store = new PropertyStore(); + + store.set('X-Custom', 'post-value', 'POST'); + + expect(store.defaults).to.deep.equal({}); + expect(store.forMethod('POST')).to.deep.equal({ 'X-Custom': 'post-value' }); + }); + + it('should set multiple values for specific method', () => { + + const store = new PropertyStore(); + + store.set({ + 'X-Custom': 'post-value', + 'X-Other': 'other-value' + }, 'POST'); + + expect(store.forMethod('POST')).to.deep.equal({ + 'X-Custom': 'post-value', + 'X-Other': 'other-value' + }); + }); + + it('should normalize method to lowercase', () => { + + const store = new PropertyStore(); + + store.set('X-Custom', 'value', 'POST' as any); + + expect(store.forMethod('post')).to.deep.equal({ 'X-Custom': 'value' }); + }); + + it('should merge with existing values', () => { + + const store = new PropertyStore({ + defaults: { 'Content-Type': 'application/json' } + }); + + store.set('Authorization', 'Bearer token'); + + expect(store.defaults).to.deep.equal({ + 'Content-Type': 'application/json', + Authorization: 'Bearer token' + }); + }); + + it('should call validation function for global set', () => { + + const validate = vi.fn(); + const store = new PropertyStore({ validate }); + + store.set('Authorization', 'Bearer token'); + + expect(validate).toHaveBeenCalledTimes(1); + expect(validate).toHaveBeenCalledWith({ Authorization: 'Bearer token' }); + }); + + it('should call validation function for method-specific set', () => { + + const validate = vi.fn(); + const store = new PropertyStore({ + defaults: { 'Content-Type': 'application/json' }, + validate + }); + + store.set('X-Custom', 'value', 'POST'); + + expect(validate).toHaveBeenCalledTimes(1); + expect(validate).toHaveBeenCalledWith( + { 'Content-Type': 'application/json', 'X-Custom': 'value' }, + 'post' + ); + }); + }); + + describe('remove()', () => { + + it('should remove single key globally', () => { + + const store = new PropertyStore({ + defaults: { Authorization: 'token', 'X-Other': 'value' } + }); + + store.remove('Authorization'); + + expect(store.defaults).to.deep.equal({ 'X-Other': 'value' }); + }); + + it('should remove multiple keys globally', () => { + + const store = new PropertyStore({ + defaults: { Authorization: 'token', 'X-Other': 'value', 'X-Third': 'third' } + }); + + store.remove(['Authorization', 'X-Other']); + + expect(store.defaults).to.deep.equal({ 'X-Third': 'third' }); + }); + + it('should remove key for specific method', () => { + + const store = new PropertyStore({ + methodOverrides: { + POST: { 'X-Custom': 'value', 'X-Other': 'other' } + } + }); + + store.remove('X-Custom', 'POST'); + + expect(store.forMethod('POST')).to.deep.equal({ 'X-Other': 'other' }); + }); + + it('should remove multiple keys for specific method', () => { + + const store = new PropertyStore({ + methodOverrides: { + POST: { 'X-Custom': 'value', 'X-Other': 'other', 'X-Third': 'third' } + } + }); + + store.remove(['X-Custom', 'X-Other'], 'POST'); + + expect(store.forMethod('POST')).to.deep.equal({ 'X-Third': 'third' }); + }); + + it('should handle removing non-existent key', () => { + + const store = new PropertyStore({ + defaults: { Authorization: 'token' } + }); + + store.remove('NonExistent'); + + expect(store.defaults).to.deep.equal({ Authorization: 'token' }); + }); + + it('should handle removing from non-existent method', () => { + + const store = new PropertyStore(); + + store.remove('X-Custom', 'DELETE'); + + expect(store.forMethod('DELETE')).to.deep.equal({}); + }); + }); + + describe('has()', () => { + + it('should return true for existing global key', () => { + + const store = new PropertyStore({ + defaults: { Authorization: 'token' } + }); + + expect(store.has('Authorization')).to.be.true; + }); + + it('should return false for non-existing global key', () => { + + const store = new PropertyStore(); + + expect(store.has('Authorization')).to.be.false; + }); + + it('should return true for existing method-specific key', () => { + + const store = new PropertyStore({ + methodOverrides: { + POST: { 'X-Custom': 'value' } + } + }); + + expect(store.has('X-Custom', 'POST')).to.be.true; + }); + + it('should return false for non-existing method-specific key', () => { + + const store = new PropertyStore({ + methodOverrides: { + POST: { 'X-Custom': 'value' } + } + }); + + expect(store.has('X-Other', 'POST')).to.be.false; + }); + + it('should check global when method key not found', () => { + + const store = new PropertyStore({ + defaults: { Authorization: 'token' }, + methodOverrides: { + POST: { 'X-Custom': 'value' } + } + }); + + // Authorization is global, checking with POST method should still find it + expect(store.has('Authorization', 'POST')).to.be.true; + }); + }); + + describe('defaults getter', () => { + + it('should return clone of defaults', () => { + + const store = new PropertyStore({ + defaults: { Authorization: 'token' } + }); + + const defaults = store.defaults; + defaults['Modified'] = 'value'; + + expect(store.defaults).to.deep.equal({ Authorization: 'token' }); + }); + }); + + describe('all getter', () => { + + it('should return defaults and method overrides', () => { + + const store = new PropertyStore({ + defaults: { 'Content-Type': 'application/json' }, + methodOverrides: { + POST: { 'X-Custom': 'post' }, + PUT: { 'X-Custom': 'put' } + } + }); + + const all = store.all; + + expect(all).to.deep.equal({ + default: { 'Content-Type': 'application/json' }, + post: { 'X-Custom': 'post' }, + put: { 'X-Custom': 'put' } + }); + }); + + it('should return only defaults when no method overrides', () => { + + const store = new PropertyStore({ + defaults: { Authorization: 'token' } + }); + + expect(store.all).to.deep.equal({ + default: { Authorization: 'token' } + }); + }); + }); + + describe('forMethod()', () => { + + it('should return method-specific overrides', () => { + + const store = new PropertyStore({ + methodOverrides: { + POST: { 'X-Custom': 'value' } + } + }); + + expect(store.forMethod('POST')).to.deep.equal({ 'X-Custom': 'value' }); + }); + + it('should return empty object for non-existing method', () => { + + const store = new PropertyStore(); + + expect(store.forMethod('DELETE')).to.deep.equal({}); + }); + + it('should normalize method to lowercase', () => { + + const store = new PropertyStore({ + methodOverrides: { + POST: { 'X-Custom': 'value' } + } + }); + + expect(store.forMethod('POST' as any)).to.deep.equal({ 'X-Custom': 'value' }); + }); + + it('should return clone not reference', () => { + + const store = new PropertyStore({ + methodOverrides: { + POST: { 'X-Custom': 'value' } + } + }); + + const overrides = store.forMethod('POST'); + overrides['Modified'] = 'changed'; + + expect(store.forMethod('POST')).to.deep.equal({ 'X-Custom': 'value' }); + }); + }); + + describe('resolve()', () => { + + it('should merge defaults with method overrides', () => { + + const store = new PropertyStore({ + defaults: { 'Content-Type': 'application/json', Authorization: 'token' }, + methodOverrides: { + POST: { 'X-Custom': 'post-value' } + } + }); + + const resolved = store.resolve('POST'); + + expect(resolved).to.deep.equal({ + 'Content-Type': 'application/json', + Authorization: 'token', + 'X-Custom': 'post-value' + }); + }); + + it('should include request overrides with highest priority', () => { + + const store = new PropertyStore({ + defaults: { 'Content-Type': 'application/json' }, + methodOverrides: { + POST: { 'Content-Type': 'text/plain' } + } + }); + + const resolved = store.resolve('POST', { 'Content-Type': 'application/xml' }); + + expect(resolved['Content-Type']).to.equal('application/xml'); + }); + + it('should allow method overrides to override defaults', () => { + + const store = new PropertyStore({ + defaults: { 'Content-Type': 'application/json' }, + methodOverrides: { + POST: { 'Content-Type': 'multipart/form-data' } + } + }); + + const resolved = store.resolve('POST'); + + expect(resolved['Content-Type']).to.equal('multipart/form-data'); + }); + + it('should return only defaults when no method overrides', () => { + + const store = new PropertyStore({ + defaults: { Authorization: 'token' } + }); + + const resolved = store.resolve('GET'); + + expect(resolved).to.deep.equal({ Authorization: 'token' }); + }); + + it('should handle undefined request overrides', () => { + + const store = new PropertyStore({ + defaults: { Authorization: 'token' } + }); + + const resolved = store.resolve('GET', undefined); + + expect(resolved).to.deep.equal({ Authorization: 'token' }); + }); + + it('should normalize method to lowercase', () => { + + const store = new PropertyStore({ + methodOverrides: { + POST: { 'X-Custom': 'value' } + } + }); + + const resolved = store.resolve('POST' as any); + + expect(resolved).to.deep.equal({ 'X-Custom': 'value' }); + }); + }); +}); diff --git a/tests/src/fetch/serializers.ts b/tests/src/fetch/serializers.ts new file mode 100644 index 0000000..e9062f2 --- /dev/null +++ b/tests/src/fetch/serializers.ts @@ -0,0 +1,429 @@ +import { + describe, + it, + expect +} from 'vitest' + +import { + endpointSerializer, + requestSerializer +} from '../../../packages/fetch/src/serializers/index.ts'; + + +describe('@logosdx/fetch: serializers', () => { + + describe('endpointSerializer', () => { + + it('should serialize method and pathname', () => { + + const key = endpointSerializer({ + method: 'GET', + path: '/users/123', + url: new URL('https://api.example.com/users/123'), + headers: {} + }); + + expect(key).to.equal('GET|/users/123'); + }); + + it('should exclude query parameters', () => { + + const key = endpointSerializer({ + method: 'GET', + path: '/users', + url: new URL('https://api.example.com/users?page=1&limit=10'), + headers: {} + }); + + expect(key).to.equal('GET|/users'); + }); + + it('should produce different keys for different methods', () => { + + const url = new URL('https://api.example.com/users'); + + const getKey = endpointSerializer({ + method: 'GET', + path: '/users', + url, + headers: {} + }); + + const postKey = endpointSerializer({ + method: 'POST', + path: '/users', + url, + headers: {} + }); + + expect(getKey).to.not.equal(postKey); + expect(getKey).to.equal('GET|/users'); + expect(postKey).to.equal('POST|/users'); + }); + + it('should produce different keys for different paths', () => { + + const key1 = endpointSerializer({ + method: 'GET', + path: '/users', + url: new URL('https://api.example.com/users'), + headers: {} + }); + + const key2 = endpointSerializer({ + method: 'GET', + path: '/posts', + url: new URL('https://api.example.com/posts'), + headers: {} + }); + + expect(key1).to.not.equal(key2); + }); + + it('should be deterministic', () => { + + const ctx = { + method: 'GET', + path: '/users/123', + url: new URL('https://api.example.com/users/123?page=1'), + headers: { Authorization: 'Bearer token' } + }; + + const key1 = endpointSerializer(ctx); + const key2 = endpointSerializer(ctx); + + expect(key1).to.equal(key2); + }); + + it('should ignore headers', () => { + + const url = new URL('https://api.example.com/users'); + + const key1 = endpointSerializer({ + method: 'GET', + path: '/users', + url, + headers: { Authorization: 'Bearer token1' } + }); + + const key2 = endpointSerializer({ + method: 'GET', + path: '/users', + url, + headers: { Authorization: 'Bearer token2' } + }); + + expect(key1).to.equal(key2); + }); + + it('should ignore payload', () => { + + const url = new URL('https://api.example.com/users'); + + const key1 = endpointSerializer({ + method: 'POST', + path: '/users', + url, + headers: {}, + payload: { name: 'John' } + }); + + const key2 = endpointSerializer({ + method: 'POST', + path: '/users', + url, + headers: {}, + payload: { name: 'Jane' } + }); + + expect(key1).to.equal(key2); + }); + }); + + describe('requestSerializer', () => { + + it('should serialize method, path, and query params', () => { + + const key = requestSerializer({ + method: 'GET', + path: '/users', + url: new URL('https://api.example.com/users?page=1'), + headers: {} + }); + + expect(key).to.include('GET'); + expect(key).to.include('/users?page=1'); + }); + + it('should include payload in serialization', () => { + + const url = new URL('https://api.example.com/users'); + + const key1 = requestSerializer({ + method: 'POST', + path: '/users', + url, + headers: {}, + payload: { name: 'John' } + }); + + const key2 = requestSerializer({ + method: 'POST', + path: '/users', + url, + headers: {}, + payload: { name: 'Jane' } + }); + + expect(key1).to.not.equal(key2); + }); + + it('should include stable headers in serialization', () => { + + const url = new URL('https://api.example.com/users'); + + const key1 = requestSerializer({ + method: 'GET', + path: '/users', + url, + headers: { Authorization: 'Bearer token1' } + }); + + const key2 = requestSerializer({ + method: 'GET', + path: '/users', + url, + headers: { Authorization: 'Bearer token2' } + }); + + expect(key1).to.not.equal(key2); + }); + + it('should exclude dynamic headers (X-Timestamp)', () => { + + const url = new URL('https://api.example.com/users'); + + const key1 = requestSerializer({ + method: 'GET', + path: '/users', + url, + headers: { 'X-Timestamp': '1234567890' } + }); + + const key2 = requestSerializer({ + method: 'GET', + path: '/users', + url, + headers: { 'X-Timestamp': '9876543210' } + }); + + expect(key1).to.equal(key2); + }); + + it('should exclude dynamic headers (X-Request-Id)', () => { + + const url = new URL('https://api.example.com/users'); + + const key1 = requestSerializer({ + method: 'GET', + path: '/users', + url, + headers: { 'X-Request-Id': 'abc-123' } + }); + + const key2 = requestSerializer({ + method: 'GET', + path: '/users', + url, + headers: { 'X-Request-Id': 'xyz-789' } + }); + + expect(key1).to.equal(key2); + }); + + it('should be case-insensitive for header keys', () => { + + const url = new URL('https://api.example.com/users'); + + const key1 = requestSerializer({ + method: 'GET', + path: '/users', + url, + headers: { 'Authorization': 'Bearer token' } + }); + + const key2 = requestSerializer({ + method: 'GET', + path: '/users', + url, + headers: { 'authorization': 'Bearer token' } + }); + + expect(key1).to.equal(key2); + }); + + it('should include Accept header', () => { + + const url = new URL('https://api.example.com/users'); + + const key1 = requestSerializer({ + method: 'GET', + path: '/users', + url, + headers: { Accept: 'application/json' } + }); + + const key2 = requestSerializer({ + method: 'GET', + path: '/users', + url, + headers: { Accept: 'application/xml' } + }); + + expect(key1).to.not.equal(key2); + }); + + it('should include Accept-Language header', () => { + + const url = new URL('https://api.example.com/users'); + + const key1 = requestSerializer({ + method: 'GET', + path: '/users', + url, + headers: { 'Accept-Language': 'en-US' } + }); + + const key2 = requestSerializer({ + method: 'GET', + path: '/users', + url, + headers: { 'Accept-Language': 'es-ES' } + }); + + expect(key1).to.not.equal(key2); + }); + + it('should include Content-Type header', () => { + + const url = new URL('https://api.example.com/users'); + + const key1 = requestSerializer({ + method: 'POST', + path: '/users', + url, + headers: { 'Content-Type': 'application/json' } + }); + + const key2 = requestSerializer({ + method: 'POST', + path: '/users', + url, + headers: { 'Content-Type': 'application/x-www-form-urlencoded' } + }); + + expect(key1).to.not.equal(key2); + }); + + it('should be deterministic', () => { + + const ctx = { + method: 'GET', + path: '/users/123', + url: new URL('https://api.example.com/users/123?page=1'), + headers: { Authorization: 'Bearer token' }, + payload: { data: 'test' } + }; + + const key1 = requestSerializer(ctx); + const key2 = requestSerializer(ctx); + + expect(key1).to.equal(key2); + }); + + it('should handle undefined headers', () => { + + const key = requestSerializer({ + method: 'GET', + path: '/users', + url: new URL('https://api.example.com/users'), + headers: undefined as any + }); + + expect(key).to.be.a('string'); + expect(key).to.include('GET'); + }); + + it('should handle empty headers', () => { + + const key = requestSerializer({ + method: 'GET', + path: '/users', + url: new URL('https://api.example.com/users'), + headers: {} + }); + + expect(key).to.be.a('string'); + expect(key).to.include('GET'); + }); + + it('should exclude URL hash fragment', () => { + + const key1 = requestSerializer({ + method: 'GET', + path: '/users', + url: new URL('https://api.example.com/users#section1'), + headers: {} + }); + + const key2 = requestSerializer({ + method: 'GET', + path: '/users', + url: new URL('https://api.example.com/users#section2'), + headers: {} + }); + + expect(key1).to.equal(key2); + }); + + it('should differentiate by query parameters', () => { + + const key1 = requestSerializer({ + method: 'GET', + path: '/users', + url: new URL('https://api.example.com/users?page=1'), + headers: {} + }); + + const key2 = requestSerializer({ + method: 'GET', + path: '/users', + url: new URL('https://api.example.com/users?page=2'), + headers: {} + }); + + expect(key1).to.not.equal(key2); + }); + + it('should produce different keys for different methods', () => { + + const url = new URL('https://api.example.com/users'); + + const getKey = requestSerializer({ + method: 'GET', + path: '/users', + url, + headers: {} + }); + + const postKey = requestSerializer({ + method: 'POST', + path: '/users', + url, + headers: {} + }); + + expect(getKey).to.not.equal(postKey); + }); + }); +}); From db9e0c4a63f7abef212c4027f561ba4bf3eb2a77 Mon Sep 17 00:00:00 2001 From: Danilo Alonso Date: Sun, 11 Jan 2026 22:54:26 -0500 Subject: [PATCH 07/13] feat(hooks)!: redesign as lifecycle event system Replace rigid before/after/error extension points with flexible lifecycle events. Users can now define arbitrary hooks like `rateLimit`, `cacheHit`, `retry` without forcing pre/post patterns. - Add `on()`, `once()`, `emit()`, `wrap()`, `register()`, `clear()` - `emit()` returns `EmitResult` for bidirectional communication - `register()` enables strict mode to catch typos at runtime - `HookName` filters to function properties only - Custom `handleFail` option for Firebase HttpsError, Boom, etc. - Rewrite docs with library integration examples --- .gitignore | 1 + docs/packages/hooks.md | 693 +++++----------- llm-helpers/hooks.md | 194 +++++ packages/hooks/src/index.ts | 647 +++++++++------ tests/src/hooks.ts | 1531 +++++++++++++++++++++++------------ 5 files changed, 1789 insertions(+), 1277 deletions(-) create mode 100644 llm-helpers/hooks.md diff --git a/.gitignore b/.gitignore index 7699000..e3bacce 100644 --- a/.gitignore +++ b/.gitignore @@ -6,6 +6,7 @@ docs/types typedoc tmp/* !tmp/.gitkeep +packages/hooks/tmp/ packages/test tests/src/experiments/* !tests/src/experiments/memory-tests/ diff --git a/docs/packages/hooks.md b/docs/packages/hooks.md index 92656c0..8ab99a3 100644 --- a/docs/packages/hooks.md +++ b/docs/packages/hooks.md @@ -1,11 +1,11 @@ --- title: Hooks -description: A lightweight, type-safe hook system for extending function behavior. +description: A lightweight, type-safe lifecycle hook system for extending behavior without modifying code. --- # Hooks -Functions do one thing well — until you need to add logging, validation, caching, or metrics. `@logosdx/hooks` lets you extend function behavior without modifying the original code. Wrap any function, add before/after/error extensions, modify arguments, change results, or abort execution entirely. Every extension is type-safe, every hook is trackable, and cleanup is automatic. It's aspect-oriented programming that actually makes sense. +Lifecycle hooks let you respond to events without coupling your code. Unlike traditional events (fire-and-forget), hooks support bidirectional communication - callbacks can modify arguments, set results, return early, or abort execution. [[toc]] @@ -29,639 +29,318 @@ pnpm add @logosdx/hooks ::: -**CDN:** - -```html - - -``` - ## Quick Start ```typescript -import { HookEngine } from '@logosdx/hooks' +import { HookEngine } from '@logosdx/hooks'; -// Define your hookable interface -interface UserService { - save(user: User): Promise - delete(id: string): Promise +interface FetchLifecycle { + beforeFetch(url: string, options: RequestInit): Promise; + afterFetch(response: Response, url: string): Promise; + rateLimit(retryAfter: number, attempt: number): Promise; } -// Create engine and wrap methods -const hooks = new HookEngine() -const service = new UserServiceImpl() +const hooks = new HookEngine() + .register('beforeFetch', 'afterFetch', 'rateLimit'); -hooks.wrap(service, 'save') +// Subscribe to events +hooks.on('beforeFetch', async (ctx) => { + const [url, options] = ctx.args; + ctx.setArgs([url, { + ...options, + headers: { ...options.headers, Authorization: `Bearer ${token}` } + }]); +}); -// Add validation before save -hooks.extend('save', 'before', async (ctx) => { - const [user] = ctx.args - if (!user.email) { - ctx.fail('Email is required') - } -}) +// Emit in your library code +async function fetchWithHooks(url: string, options: RequestInit = {}) { -// Add logging after save -hooks.extend('save', 'after', async (ctx) => { - console.log('User saved:', ctx.results) -}) + const before = await hooks.emit('beforeFetch', url, options); + if (before.earlyReturn) return before.result!; -// Add error handling -hooks.extend('save', 'error', async (ctx) => { - console.error('Save failed:', ctx.error) - // Could retry, transform error, or notify -}) + const response = await fetch(...before.args); -// Use normally - hooks run automatically -await service.save({ name: 'John', email: 'john@example.com' }) + const after = await hooks.emit('afterFetch', response, url); + return after.result ?? response; +} ``` -## Core Concepts +## Library Integration -Hooks is built around three ideas: +The real power of hooks is giving library users extension points. Use `emit()` at key moments: -1. **Wrapping** - Transform any function into a hookable function -2. **Extension Points** - Add behavior at `before`, `after`, or `error` stages -3. **Context Control** - Modify arguments, results, or abort execution - -Extensions are registered with `extend()` and run in insertion order. Each extension receives a context object with full control over the hook lifecycle. - -## HookEngine +```typescript +export class DataService { -The main class for creating and managing hooks. + #hooks = new HookEngine() + .register('beforeSave', 'afterSave', 'beforeDelete'); -### Constructor + get hooks() { return this.#hooks; } -```typescript -new HookEngine() -``` + async save(record: Record) { -**Type Parameters:** + const before = await this.#hooks.emit('beforeSave', record); + if (before.earlyReturn) return before.result!; -- `Shape` - Interface defining your hookable functions + const saved = await this.#db.insert(before.args[0]); + await this.#hooks.emit('afterSave', saved); -**Example:** - -```typescript -interface PaymentService { - charge(amount: number, cardId: string): Promise - refund(receiptId: string): Promise + return saved; + } } - -const hooks = new HookEngine() ``` -### Creating Hooks - -#### `wrap()` +### Exposing Hooks -Wrap an object method in-place to make it hookable. +Two patterns for giving consumers access: ```typescript -wrap>( - instance: Shape, - name: K, - opts?: MakeHookOptions -): void -``` - -**Parameters:** - -- `instance` - Object containing the method to wrap -- `name` - Name of the method to wrap -- `opts` - Optional configuration - -**Example:** +// Option 1: Export hooks directly +export const hooks = new HookEngine(); +export function doWork() { /* uses hooks */ } -```typescript -class OrderService { - async process(order: Order) { - // processing logic - } +// Option 2: Expose via instance property +export class MySdk { + hooks = new HookEngine(); + doWork() { /* uses this.hooks */ } } -const service = new OrderService() -const hooks = new HookEngine() - -hooks.wrap(service, 'process') - -// Now service.process() is hookable -hooks.extend('process', 'before', async (ctx) => { - console.log('Processing order:', ctx.args[0]) -}) -``` - -#### `make()` - -Create a hookable function without modifying the original. +// Consumer usage (either pattern) +import { MySdk } from 'your-library'; -```typescript -make>( - name: K, - cb: Function, - opts?: MakeHookOptions -): Function +const sdk = new MySdk(); +sdk.hooks.on('beforeSave', async (ctx) => { + console.log('Saving:', ctx.args[0]); +}); ``` -**Parameters:** - -- `name` - Unique name for this hook -- `cb` - The original function to wrap -- `opts` - Optional configuration (e.g., `bindTo` for `this` context) - -**Returns:** Wrapped function with hook support +### Standalone Events -**Example:** +Not all hooks need pre/post patterns. Emit events for moments users care about: ```typescript -const hooks = new HookEngine<{ fetch: typeof fetch }>() +// In your library +await this.hooks.emit('rateLimit', retryAfter, attempt); +await this.hooks.emit('retry', error, attempt); +await this.hooks.emit('cacheHit', key, value); -const hookedFetch = hooks.make('fetch', fetch) - -hooks.extend('fetch', 'before', async (ctx) => { - console.log('Fetching:', ctx.args[0]) -}) - -await hookedFetch('/api/users') +// Consumer subscribes +sdk.hooks.on('rateLimit', async (ctx) => { + const [retryAfter] = ctx.args; + await sleep(retryAfter); +}); ``` -### Adding Extensions - -#### `extend()` +## API Reference -Add an extension to a registered hook. +### HookEngine ```typescript -extend>( - name: K, - extensionPoint: 'before' | 'after' | 'error', - cbOrOpts: HookFn | HookExtOptions -): Cleanup +new HookEngine(options?) ``` -**Parameters:** - -- `name` - Name of the registered hook -- `extensionPoint` - When to run: `before`, `after`, or `error` -- `cbOrOpts` - Extension callback or options object - -**Returns:** Cleanup function to remove the extension +| Method | Description | +|--------|-------------| +| `register(...names)` | Enable strict mode. Returns `this` for chaining. | +| `on(name, cbOrOpts)` | Subscribe. Returns cleanup function. | +| `once(name, cb)` | Subscribe once. Sugar for `{ callback, once: true }`. | +| `emit(name, ...args)` | Emit hook. Returns `EmitResult`. | +| `wrap(fn, { pre?, post? })` | Wrap function with pre/post hooks. | +| `clear()` | Remove all hooks, reset to permissive mode. | -**Extension Points:** - -| Point | When it runs | Can modify | -|-------|--------------|------------| -| `before` | Before original function | Arguments, can return early | -| `after` | After successful execution | Results | -| `error` | When original throws | Can handle/transform errors | - -**Examples:** +**Constructor Options:** ```typescript -// Simple callback -const cleanup = hooks.extend('save', 'before', async (ctx) => { - console.log('About to save:', ctx.args) -}) - -// With options -hooks.extend('save', 'after', { - callback: async (ctx) => { console.log('Saved!') }, - once: true, // Remove after first run - ignoreOnFail: true // Don't throw if this extension fails -}) - -// Remove extension later -cleanup() -``` - -### Utility Methods - -#### `clear()` +// Custom error type for ctx.fail() +import { HttpsError } from 'firebase-functions/v2/https'; -Remove all registered hooks and extensions. +const hooks = new HookEngine({ + handleFail: HttpsError +}); -```typescript -clear(): void +hooks.on('validate', async (ctx) => { + ctx.fail('invalid-argument', 'Email invalid', { field: 'email' }); +}); ``` -**Example:** - -```typescript -hooks.wrap(service, 'save') -hooks.extend('save', 'before', validator) - -// Reset for testing -hooks.clear() - -// service.save() still works, but validator no longer runs -``` +### HookContext -## HookContext +Passed to every callback: -Context object passed to every extension callback. +| Property/Method | Description | +|-----------------|-------------| +| `ctx.args` | Current arguments (readonly) | +| `ctx.result` | Current result if set (readonly) | +| `ctx.setArgs(next)` | Replace args for subsequent callbacks | +| `ctx.setResult(next)` | Set result value | +| `ctx.returnEarly()` | Stop processing, signal early return | +| `ctx.fail(...args)` | Abort with error | +| `ctx.removeHook()` | Remove this callback from future emissions | -### Properties +### EmitResult ```typescript -interface HookContext { - args: Parameters // Current arguments - results?: ReturnType // Results (in after/error) - point: 'before' | 'after' | 'error' // Current extension point - error?: unknown // Error (only in error extensions) +interface EmitResult { + args: Parameters; // Final args (possibly modified) + result?: ReturnType; // Result if set + earlyReturn: boolean; // Whether returnEarly() was called } ``` -### Methods - -#### `setArgs()` - -Replace the arguments passed to the original function. +Usage pattern: ```typescript -setArgs(next: Parameters): void -``` - -**Example:** - -```typescript -hooks.extend('save', 'before', async (ctx) => { - const [user] = ctx.args - - // Add timestamp to user - ctx.setArgs([{ ...user, updatedAt: new Date() }]) -}) -``` - -#### `setResult()` +const { args, result, earlyReturn } = await hooks.emit('beforeProcess', data); -Replace the result returned from the hook chain. +if (earlyReturn) return result; -```typescript -setResult(next: ReturnType): void +// Continue with (possibly modified) args +const actualResult = await doWork(...args); ``` -**Example:** +### Hook Options ```typescript -hooks.extend('fetch', 'after', async (ctx) => { - // Transform response - ctx.setResult({ - ...ctx.results, - cached: true, - fetchedAt: new Date() - }) -}) +hooks.on('name', { + callback: async (ctx) => { /* ... */ }, + once: true, // Remove after first run + ignoreOnFail: true // Continue if callback throws +}); ``` -#### `returnEarly()` - -Skip the original function and return with current results. - -```typescript -returnEarly(): void -``` +### Registration -**Example:** +Catches typos at runtime: ```typescript -hooks.extend('fetch', 'before', async (ctx) => { - const [url] = ctx.args - const cached = cache.get(url) +const hooks = new HookEngine() + .register('beforeFetch', 'afterFetch'); - if (cached) { - ctx.setResult(cached) - ctx.returnEarly() // Skip actual fetch - } -}) +hooks.on('beforeFecth', cb); +// Error: Hook "beforeFecth" is not registered. +// Registered hooks: beforeFetch, afterFetch ``` -#### `fail()` +### wrap() -Abort execution and throw a HookError. +Shorthand for the pre/post pattern: ```typescript -fail(error?: unknown): never -``` - -**Example:** +const wrappedFetch = hooks.wrap( + async (url: string) => fetch(url), + { pre: 'beforeFetch', post: 'afterFetch' } +); -```typescript -hooks.extend('save', 'before', async (ctx) => { - const [user] = ctx.args - - if (!user.email) { - ctx.fail('Email is required') - } - - if (!isValidEmail(user.email)) { - ctx.fail(new ValidationError('Invalid email format')) - } -}) +// Pre: receives args, can modify or returnEarly +// Post: receives [result, ...args], can modify result ``` -#### `removeHook()` - -Remove the current extension from future executions. - -```typescript -removeHook(): void -``` +## Patterns -**Example:** +### Caching with Early Return ```typescript -let attempts = 0 - -hooks.extend('connect', 'error', async (ctx) => { - attempts++ - - if (attempts >= 3) { - console.log('Max retries reached, removing retry handler') - ctx.removeHook() +hooks.on('beforeGet', async (ctx) => { + const cached = cache.get(ctx.args[0]); + if (cached) { + ctx.setResult(cached); + ctx.returnEarly(); } -}) -``` - -## Extension Options +}); -When using the options object form of `extend()`: - -```typescript -interface HookExtOptions { - callback: HookFn // The extension function - once?: true // Remove after first execution - ignoreOnFail?: true // Don't throw if extension fails -} +hooks.on('afterGet', async (ctx) => { + const [result, key] = ctx.args; + cache.set(key, result); +}); ``` -### `once` - -Extension runs only once, then removes itself. +### Validation ```typescript -hooks.extend('init', 'before', { - callback: async (ctx) => { - console.log('First-time initialization') - }, - once: true -}) +hooks.on('validate', async (ctx) => { + const [user] = ctx.args; + if (!user.email) ctx.fail('Email required'); + if (!user.password) ctx.fail('Password required'); +}); ``` -### `ignoreOnFail` - -If the extension throws, continue execution instead of failing. +### Non-Critical Hooks ```typescript -hooks.extend('save', 'after', { - callback: async (ctx) => { - await analytics.track('user_saved', ctx.results) // Non-critical - }, - ignoreOnFail: true // Don't fail the save if analytics fails -}) +hooks.on('analytics', { + callback: async (ctx) => await track(ctx.args), + ignoreOnFail: true // Don't fail if analytics fails +}); ``` ## Error Handling ### HookError -Error thrown when `fail()` is called or hook execution fails. +Default error from `ctx.fail()`: ```typescript class HookError extends Error { - hookName?: string // Name of the hook - extPoint?: string // Extension point: 'before', 'after', 'error' - originalError?: Error // Original error if fail() was called with one - aborted: boolean // Whether explicitly aborted via fail() + hookName?: string; + originalError?: Error; } -``` - -### isHookError() - -Type guard to check if an error is a HookError. - -```typescript -isHookError(error: unknown): error is HookError -``` - -**Example:** - -```typescript -import { attempt } from '@logosdx/utils' -import { isHookError } from '@logosdx/hooks' -const [result, err] = await attempt(() => service.save(user)) +// Type guard +import { isHookError } from '@logosdx/hooks'; if (isHookError(err)) { - console.log(`Hook "${err.hookName}" failed at "${err.extPoint}"`) - console.log('Reason:', err.message) - - if (err.originalError) { - console.log('Caused by:', err.originalError) - } + console.log(`Hook "${err.hookName}" failed: ${err.message}`); } ``` -## Patterns & Examples - -### Validation +### Custom Errors ```typescript -hooks.extend('createUser', 'before', async (ctx) => { - const [userData] = ctx.args - - const errors: string[] = [] - - if (!userData.email) errors.push('Email required') - if (!userData.password) errors.push('Password required') - if (userData.password?.length < 8) errors.push('Password too short') - - if (errors.length > 0) { - ctx.fail(new ValidationError(errors.join(', '))) - } -}) -``` +// Firebase +const hooks = new HookEngine({ + handleFail: HttpsError +}); -### Caching - -```typescript -const cache = new Map() - -hooks.extend('fetchUser', 'before', async (ctx) => { - const [userId] = ctx.args - const cached = cache.get(userId) - - if (cached && !isExpired(cached)) { - ctx.setResult(cached.data) - ctx.returnEarly() - } -}) - -hooks.extend('fetchUser', 'after', async (ctx) => { - const [userId] = ctx.args - cache.set(userId, { - data: ctx.results, - expiresAt: Date.now() + 60000 - }) -}) -``` - -### Logging & Metrics - -```typescript -hooks.extend('processOrder', 'before', async (ctx) => { - const [order] = ctx.args - console.log(`Processing order ${order.id}`) - ctx.args[0] = { ...order, startedAt: Date.now() } - ctx.setArgs(ctx.args) -}) - -hooks.extend('processOrder', 'after', async (ctx) => { - const duration = Date.now() - ctx.args[0].startedAt - metrics.record('order.processing.duration', duration) -}) - -hooks.extend('processOrder', 'error', async (ctx) => { - metrics.increment('order.processing.failures') - console.error('Order processing failed:', ctx.error) -}) -``` - -### Authentication - -```typescript -hooks.extend('secureEndpoint', 'before', async (ctx) => { - const token = getAuthToken() - - if (!token) { - ctx.fail(new AuthError('Not authenticated')) - } - - const user = await validateToken(token) - - if (!user) { - ctx.fail(new AuthError('Invalid token')) - } - - // Inject user into args - ctx.setArgs([...ctx.args, { user }]) -}) -``` - -### Retry Logic - -```typescript -hooks.extend('unreliableService', 'error', async (ctx) => { - const maxRetries = 3 - let retries = ctx.args[ctx.args.length - 1]?.retries ?? 0 - - if (retries < maxRetries) { - console.log(`Retry attempt ${retries + 1}/${maxRetries}`) - - // Modify args to track retries - ctx.setArgs([...ctx.args.slice(0, -1), { retries: retries + 1 }]) - - // Note: This doesn't actually retry - you'd need external retry logic - // This pattern is better suited for logging/metrics in error handlers - } -}) +// Boom +const hooks = new HookEngine({ + handleFail: (msg, data) => { throw Boom.badRequest(msg, data); } +}); ``` ## Type Definitions -### Core Types - ```typescript -// Hook function signature -type HookFn = (ctx: HookContext) => Promise - -// Extension options -interface HookExtOptions { - callback: HookFn - once?: true - ignoreOnFail?: true -} +type AsyncFunc = (...args: any[]) => Promise; -// Make options -interface MakeHookOptions { - bindTo?: any // `this` context for the wrapped function -} -``` +// Only function properties are valid hook names +type HookName = FunctionProps; -### HookContext +type HookFn = (ctx: HookContext) => Promise; -```typescript -interface HookContext { - args: Parameters - results?: Awaited> - point: 'before' | 'after' | 'error' - error?: unknown - - fail: (error?: unknown) => never - setArgs: (next: Parameters) => void - setResult: (next: Awaited>) => void - returnEarly: () => void - removeHook: () => void +interface HookOptions { + callback: HookFn; + once?: true; + ignoreOnFail?: true; } -``` - -## Best Practices - -### Keep Extensions Focused - -```typescript -// Good: Single responsibility -hooks.extend('save', 'before', validateUser) -hooks.extend('save', 'before', sanitizeInput) -hooks.extend('save', 'after', logSuccess) - -// Avoid: Multiple responsibilities in one extension -hooks.extend('save', 'before', async (ctx) => { - // validation AND sanitization AND logging... -}) -``` - -### Use `ignoreOnFail` for Non-Critical Extensions -```typescript -// Critical: validation must succeed -hooks.extend('save', 'before', validateUser) - -// Non-critical: analytics can fail silently -hooks.extend('save', 'after', { - callback: trackAnalytics, - ignoreOnFail: true -}) +type HandleFail = + | (new (...args: Args) => Error) + | ((...args: Args) => never); ``` -### Clean Up When Done +### Function Properties Only -```typescript -// Store cleanup functions -const cleanups = [ - hooks.extend('save', 'before', validator), - hooks.extend('save', 'after', logger) -] - -// Clean up all at once -cleanups.forEach(cleanup => cleanup()) -``` - -### Type Your Hook Shapes +Only function properties are available as hook names. Data properties are excluded: ```typescript -// Define clear interfaces for hookable services -interface OrderService { - create(order: OrderInput): Promise - update(id: string, updates: Partial): Promise - cancel(id: string, reason: string): Promise +interface Doc { + id: string; // Data property - excluded + save(): Promise; // Function - available as hook + delete(): Promise; // Function - available as hook } -const hooks = new HookEngine() -// Now all hook names and argument types are enforced +const hooks = new HookEngine(); +hooks.on('save', cb); // ✓ OK +hooks.on('delete', cb); // ✓ OK +hooks.on('id', cb); // ✗ Type error - 'id' is not a function ``` - -## Summary - -The `@logosdx/hooks` library provides a clean way to extend function behavior without modifying original code. Use it for cross-cutting concerns like validation, caching, logging, and error handling while keeping your core logic clean and focused. diff --git a/llm-helpers/hooks.md b/llm-helpers/hooks.md new file mode 100644 index 0000000..1dfdc9d --- /dev/null +++ b/llm-helpers/hooks.md @@ -0,0 +1,194 @@ +# @logosdx/hooks - LLM Helper + +A lightweight, type-safe lifecycle hook system for extending behavior without modifying code. + +## Core Concept + +Lifecycle hooks let you respond to named events with bidirectional communication. Unlike traditional events (fire-and-forget), hooks return `EmitResult` so callers know what happened. + +## API Overview + +```typescript +import { HookEngine, HookError, isHookError } from '@logosdx/hooks'; + +interface Lifecycle { + beforeFetch(url: string): Promise; + afterFetch(result: Response, url: string): Promise; + rateLimit(retryAfter: number, attempt: number): Promise; +} + +const hooks = new HookEngine() + .register('beforeFetch', 'afterFetch', 'rateLimit'); + +// Subscribe +hooks.on('beforeFetch', async (ctx) => { /* ... */ }); +hooks.once('init', async (ctx) => { /* one-time */ }); + +// Emit +const result = await hooks.emit('beforeFetch', url); +// result.args, result.result, result.earlyReturn + +// Wrap function with pre/post hooks +const wrapped = hooks.wrap(fn, { pre: 'beforeFetch', post: 'afterFetch' }); + +// Clear all +hooks.clear(); +``` + +## HookEngine Methods + +| Method | Description | +|--------|-------------| +| `register(...names)` | Register hooks for runtime validation. Returns `this`. | +| `on(name, cbOrOpts)` | Subscribe to hook. Returns cleanup function. | +| `once(name, cb)` | Subscribe once. Sugar for `on(name, { callback, once: true })`. | +| `emit(name, ...args)` | Emit hook. Returns `EmitResult`. | +| `wrap(fn, { pre?, post? })` | Wrap function with pre/post hooks. | +| `clear()` | Remove all hooks, reset to permissive mode. | + +## HookContext Methods + +Passed to every callback: + +| Method | Description | +|--------|-------------| +| `ctx.args` | Current arguments | +| `ctx.result` | Current result (if set) | +| `ctx.setArgs(next)` | Replace args for subsequent callbacks | +| `ctx.setResult(next)` | Set result value | +| `ctx.returnEarly()` | Stop processing, signal early return | +| `ctx.fail(...args)` | Abort with error | +| `ctx.removeHook()` | Remove this callback from future emissions | + +## EmitResult + +```typescript +interface EmitResult { + args: Parameters; // Final args (possibly modified) + result?: ReturnType; // Result (if set) + earlyReturn: boolean; // Whether returnEarly() was called +} +``` + +## Library Integration + +Use `emit()` to provide extension points in your library: + +```typescript +async function fetchWithHooks(url: string, options: RequestInit = {}) { + + const before = await hooks.emit('beforeFetch', url, options); + if (before.earlyReturn) return before.result!; + + const response = await fetch(...before.args); + + const after = await hooks.emit('afterFetch', response, url); + return after.result ?? response; +} +``` + +Expose hooks via instance or export: + +```typescript +// Via instance +export class MySdk { + hooks = new HookEngine(); +} + +// Via export +export const hooks = new HookEngine(); +``` + +## Hook Options + +```typescript +hooks.on('name', { + callback: async (ctx) => { /* ... */ }, + once: true, // Remove after first run + ignoreOnFail: true // Continue if callback throws +}); +``` + +## Registration System + +Once `register()` is called, ALL hooks must be registered: + +```typescript +const hooks = new HookEngine() + .register('beforeFetch', 'afterFetch'); + +hooks.on('beforeFecth', cb); +// Error: Hook "beforeFecth" is not registered. +// Registered hooks: beforeFetch, afterFetch +``` + +## Custom Error Handler + +```typescript +// Firebase HttpsError +const hooks = new HookEngine({ + handleFail: HttpsError +}); + +hooks.on('validate', async (ctx) => { + ctx.fail('invalid-argument', 'Email invalid', { field: 'email' }); +}); + +// Custom function +const hooks = new HookEngine({ + handleFail: (msg, data) => { throw Boom.badRequest(msg, data); } +}); +``` + +## Common Patterns + +### Caching with Early Return + +```typescript +hooks.on('beforeGet', async (ctx) => { + const cached = cache.get(ctx.args[0]); + if (cached) { + ctx.setResult(cached); + ctx.returnEarly(); + } +}); +``` + +### Validation + +```typescript +hooks.on('validate', async (ctx) => { + const [data] = ctx.args; + if (!data.email) ctx.fail('Email required'); +}); +``` + +### Non-Critical Hooks + +```typescript +hooks.on('analytics', { + callback: async (ctx) => { await track(ctx.args); }, + ignoreOnFail: true +}); +``` + +## Type Parameters + +```typescript +new HookEngine() +``` + +- `Lifecycle` - Interface defining hooks (default: permissive `Record`) +- `FailArgs` - Tuple for `ctx.fail()` args (default: `[string]`) + +**Only function properties are valid hook names:** + +```typescript +interface Doc { + id: string; // Excluded - data property + save(): Promise; // Available as hook +} + +hooks.on('save', cb); // ✓ OK +hooks.on('id', cb); // ✗ Type error +``` diff --git a/packages/hooks/src/index.ts b/packages/hooks/src/index.ts index 5adacd2..dd2d75d 100644 --- a/packages/hooks/src/index.ts +++ b/packages/hooks/src/index.ts @@ -2,25 +2,27 @@ import { assert, AsyncFunc, attempt, + FunctionProps, isFunction, - isObject, - FunctionProps + isObject } from '@logosdx/utils'; /** - * Error thrown when a hook extension calls `fail()` or when hook execution fails. + * Error thrown when a hook calls `ctx.fail()`. + * + * This error is only created when using the default `handleFail` behavior. + * If a custom `handleFail` is provided, that error type is thrown instead. * * @example - * engine.extend('save', 'before', async (ctx) => { + * hooks.on('validate', async (ctx) => { * if (!ctx.args[0].isValid) { * ctx.fail('Validation failed'); * } * }); * - * const [, err] = await attempt(() => app.save(data)); + * const [, err] = await attempt(() => engine.emit('validate', data)); * if (isHookError(err)) { - * console.log(err.hookName); // 'save' - * console.log(err.extPoint); // 'before' + * console.log(err.hookName); // 'validate' * } */ export class HookError extends Error { @@ -28,15 +30,9 @@ export class HookError extends Error { /** Name of the hook where the error occurred */ hookName?: string; - /** Extension point where the error occurred: 'before', 'after', or 'error' */ - extPoint?: string; - /** Original error if `fail()` was called with an Error instance */ originalError?: Error; - /** Whether the hook was explicitly aborted via `fail()` */ - aborted = false; - constructor(message: string) { super(message) @@ -47,9 +43,9 @@ export class HookError extends Error { * Type guard to check if an error is a HookError. * * @example - * const [result, err] = await attempt(() => app.save(data)); - * if (isHookError(err)) { - * console.log(`Hook "${err.hookName}" failed at "${err.extPoint}"`); + * const { error } = await engine.emit('validate', data); + * if (isHookError(error)) { + * console.log(`Hook "${error.hookName}" failed`); * } */ export const isHookError = (error: unknown): error is HookError => { @@ -57,384 +53,519 @@ export const isHookError = (error: unknown): error is HookError => { return (error as HookError)?.constructor?.name === HookError.name } -interface HookShape { - args: Parameters, - results?: Awaited> +/** + * Result returned from `emit()` after running all hook callbacks. + */ +export interface EmitResult { + + /** Current arguments (possibly modified by callbacks) */ + args: Parameters; + + /** Result value (if set by a callback) */ + result?: Awaited> | undefined; + + /** Whether a callback called `returnEarly()` */ + earlyReturn: boolean; } /** - * Context object passed to hook extension callbacks. + * Context object passed to hook callbacks. * Provides access to arguments, results, and control methods. * * @example - * engine.extend('fetch', 'before', async (ctx) => { - * // Read current arguments - * const [url, options] = ctx.args; + * hooks.on('cacheCheck', async (ctx) => { + * const [url] = ctx.args; + * const cached = cache.get(url); * - * // Modify arguments before the original function runs - * ctx.setArgs([url, { ...options, cache: 'force-cache' }]); - * - * // Or skip the original function entirely - * if (isCached(url)) { - * ctx.setResult(getCached(url)); + * if (cached) { + * ctx.setResult(cached); * ctx.returnEarly(); * } * }); */ -export interface HookContext extends HookShape { +export interface HookContext { - /** Current extension point: 'before', 'after', or 'error' */ - point: keyof Hook; + /** Current arguments passed to emit() */ + args: Parameters; - /** Error from the original function (only set in 'error' extensions) */ - error?: unknown, + /** Result value (can be set by callbacks) */ + result?: Awaited>; - /** Abort hook execution with an error. Throws a HookError. */ - fail: (error?: unknown) => never, + /** Abort hook execution with an error. */ + fail: (...args: FailArgs) => never; - /** Replace the arguments passed to the original function */ - setArgs: (next: Parameters) => void, + /** Replace the arguments for subsequent callbacks */ + setArgs: (next: Parameters) => void; - /** Replace the result returned from the hook chain */ - setResult: (next: Awaited>) => void, + /** Set the result value */ + setResult: (next: Awaited>) => void; - /** Skip the original function and return early with the current result */ + /** Stop processing remaining callbacks and return early */ returnEarly: () => void; - /** Remove this extension from the hook (useful with `once` behavior) */ + /** Remove this callback from the hook */ removeHook: () => void; } -export type HookFn = (ctx: HookContext) => Promise; +export type HookFn = + (ctx: HookContext) => Promise; -class Hook { - before: Set> = new Set(); - after: Set> = new Set(); - error: Set> = new Set(); +type HookOptions = { + callback: HookFn; + once?: true; + ignoreOnFail?: true; } -const allowedExtPoints = new Set([ - 'before', - 'after', - 'error' -]); +type HookOrOptions = + HookFn | HookOptions; -type HookExtOptions = { - callback: HookFn, - once?: true, - ignoreOnFail?: true -} +type FuncOrNever = T extends AsyncFunc ? T : never; -type HookExtOrOptions = HookFn | HookExtOptions +/** + * Custom error handler for `ctx.fail()`. + * Can be an Error constructor or a function that throws. + */ +export type HandleFail = + | (new (...args: Args) => Error) + | ((...args: Args) => never); -type MakeHookOptions = { - bindTo?: any -} +/** + * Options for HookEngine constructor. + */ +export interface HookEngineOptions { -type FuncOrNever = T extends AsyncFunc ? T : never; + /** + * Custom handler for `ctx.fail()`. + * Can be an Error constructor or a function that throws. + * + * @example + * // Use Firebase HttpsError + * new HookEngine({ handleFail: HttpsError }); + * + * // Use custom function + * new HookEngine({ + * handleFail: (msg, data) => { throw Boom.badRequest(msg, data); } + * }); + */ + handleFail?: HandleFail; +} /** - * A lightweight, type-safe hook system for extending function behavior. + * A lightweight, type-safe lifecycle hook system. * - * HookEngine allows you to wrap functions and add extensions that run - * before, after, or on error. Extensions can modify arguments, change - * results, or abort execution entirely. + * HookEngine allows you to define lifecycle events and subscribe to them. + * Callbacks can modify arguments, set results, or abort execution. * * @example - * interface MyApp { - * save(data: Data): Promise; - * load(id: string): Promise; + * interface FetchLifecycle { + * preRequest(url: string, options: RequestInit): Promise; + * rateLimit(error: Error, attempt: number): Promise; + * cacheHit(url: string, data: unknown): Promise; * } * - * const app = new MyAppImpl(); - * const hooks = new HookEngine(); + * const hooks = new HookEngine(); * - * // Wrap a method to make it hookable - * hooks.wrap(app, 'save'); - * - * // Add a validation extension - * hooks.extend('save', 'before', async (ctx) => { - * if (!ctx.args[0].isValid) { - * ctx.fail('Validation failed'); - * } + * hooks.on('rateLimit', async (ctx) => { + * const [error, attempt] = ctx.args; + * if (attempt > 3) ctx.fail('Max retries exceeded'); + * await sleep(error.retryAfter * 1000); * }); * - * // Add logging extension - * hooks.extend('save', 'after', async (ctx) => { - * console.log('Saved:', ctx.results); + * hooks.on('cacheHit', async (ctx) => { + * console.log('Cache hit for:', ctx.args[0]); * }); * - * @typeParam Shape - Interface defining the hookable functions + * // In your implementation + * const result = await hooks.emit('cacheHit', url, cachedData); + * + * @typeParam Lifecycle - Interface defining the lifecycle hooks + * @typeParam FailArgs - Arguments type for ctx.fail() (default: [string]) + */ +/** + * Default permissive lifecycle type when no type parameter is provided. + */ +type DefaultLifecycle = Record; + +/** + * Extract only function property keys from a type. + * This ensures only methods are available as hook names, not data properties. + * + * @example + * interface Doc { + * id: string; + * save(): Promise; + * delete(): Promise; + * } + * + * type DocHooks = HookName; // 'save' | 'delete' (excludes 'id') */ -export class HookEngine { +export type HookName = FunctionProps; + +export class HookEngine { + + #hooks: Map, Set]>, FailArgs>>> = new Map(); + #hookOpts = new WeakMap, HookOptions>(); + #handleFail: HandleFail; + #registered: Set> | null = null; + + constructor(options: HookEngineOptions = {}) { + + this.#handleFail = options.handleFail ?? ((message: string): never => { + + throw new HookError(message); + }) as unknown as HandleFail; + } - #registered = new Set(); - #hooks: Map>> = new Map(); - #hookFnOpts = new WeakMap(); - #wrapped = new WeakMap(); + /** + * Validate that a hook is registered (if registration is enabled). + */ + #assertRegistered(name: HookName, method: string) { + + if (this.#registered !== null && !this.#registered.has(name)) { + + const registered = [...this.#registered].map(String).join(', '); + throw new Error( + `Hook "${String(name)}" is not registered. ` + + `Call register("${String(name)}") before using ${method}(). ` + + `Registered hooks: ${registered || '(none)'}` + ); + } + } /** - * Add an extension to a registered hook. + * Register hook names for runtime validation. + * Once any hooks are registered, all hooks must be registered before use. * - * Extensions run at specific points in the hook lifecycle: - * - `before`: Runs before the original function. Can modify args or return early. - * - `after`: Runs after successful execution. Can modify the result. - * - `error`: Runs when the original function throws. Can handle or transform errors. + * @param names - Hook names to register + * @returns this (for chaining) * - * @param name - Name of the registered hook to extend - * @param extensionPoint - When to run: 'before', 'after', or 'error' - * @param cbOrOpts - Extension callback or options object - * @returns Cleanup function to remove the extension + * @example + * const hooks = new HookEngine() + * .register('preRequest', 'postRequest', 'rateLimit'); + * + * hooks.on('preRequest', cb); // OK + * hooks.on('preRequset', cb); // Error: not registered (typo caught!) + */ + register(...names: HookName[]) { + + assert(names.length > 0, 'register() requires at least one hook name'); + + if (this.#registered === null) { + + this.#registered = new Set(); + } + + for (const name of names) { + + assert(typeof name === 'string', `Hook name must be a string, got ${typeof name}`); + this.#registered.add(name); + } + + return this; + } + + /** + * Subscribe to a lifecycle hook. + * + * @param name - Name of the lifecycle hook + * @param cbOrOpts - Callback function or options object + * @returns Cleanup function to remove the subscription * * @example * // Simple callback - * const cleanup = hooks.extend('save', 'before', async (ctx) => { - * console.log('About to save:', ctx.args); + * const cleanup = hooks.on('preRequest', async (ctx) => { + * console.log('Request:', ctx.args[0]); * }); * * // With options - * hooks.extend('save', 'after', { - * callback: async (ctx) => { console.log('Saved!'); }, + * hooks.on('analytics', { + * callback: async (ctx) => { track(ctx.args); }, * once: true, // Remove after first run - * ignoreOnFail: true // Don't throw if this extension fails + * ignoreOnFail: true // Don't throw if callback fails * }); * - * // Later: remove the extension + * // Remove subscription * cleanup(); */ - extend>( + on>( name: K, - extensionPoint: keyof Hook>, - cbOrOpts: HookExtOrOptions> + cbOrOpts: HookOrOptions, FailArgs> ) { + const callback = typeof cbOrOpts === 'function' ? cbOrOpts : cbOrOpts?.callback; - const opts = typeof cbOrOpts === 'function' ? {} as HookExtOptions> : cbOrOpts; + const opts = typeof cbOrOpts === 'function' + ? {} as HookOptions, FailArgs> + : cbOrOpts; assert(typeof name === 'string', '"name" must be a string'); - assert(this.#registered.has(name), `'${name.toString()}' is not a registered hook`); - assert(typeof extensionPoint === 'string', '"extensionPoint" must be a string'); - assert(allowedExtPoints.has(extensionPoint), `'${extensionPoint}' is not a valid extension point`); - assert(isFunction(callback) || isObject(cbOrOpts), '"cbOrOpts" must be a extension callback or options'); + assert(isFunction(callback) || isObject(cbOrOpts), '"cbOrOpts" must be a callback or options'); assert(isFunction(callback), 'callback must be a function'); - const hook = this.#hooks.get(name) ?? new Hook>(); + this.#assertRegistered(name, 'on'); + + const hooks = this.#hooks.get(name) ?? new Set(); - hook[extensionPoint].add(callback); + hooks.add(callback as HookFn, FailArgs>); - this.#hooks.set(name, hook); - this.#hookFnOpts.set(callback, opts); + this.#hooks.set(name, hooks); + this.#hookOpts.set(callback, opts); - /** - * Removes the registered hook extension - */ return () => { - hook[extensionPoint].delete(callback); + hooks.delete(callback as HookFn, FailArgs>); } } /** - * Register a function as a hookable and return the wrapped version. + * Subscribe to a lifecycle hook that fires only once. + * Sugar for `on(name, { callback, once: true })`. * - * The wrapped function behaves identically to the original but allows - * extensions to be added via `extend()`. Use `wrap()` for a simpler API - * when working with object methods. - * - * @param name - Unique name for this hook (must match a key in Shape) - * @param cb - The original function to wrap - * @param opts - Options for the wrapped function - * @returns Wrapped function with hook support + * @param name - Name of the lifecycle hook + * @param callback - Callback function + * @returns Cleanup function to remove the subscription * * @example - * const hooks = new HookEngine<{ fetch: typeof fetch }>(); - * - * const hookedFetch = hooks.make('fetch', fetch); - * - * hooks.extend('fetch', 'before', async (ctx) => { - * console.log('Fetching:', ctx.args[0]); + * // Log only the first request + * hooks.once('preRequest', async (ctx) => { + * console.log('First request:', ctx.args[0]); * }); - * - * await hookedFetch('/api/data'); */ - make>( + once>( name: K, - cb: FuncOrNever, - opts: MakeHookOptions = {} + callback: HookFn, FailArgs> ) { - assert(typeof name === 'string', '"name" must be a string'); - assert(!this.#registered.has(name), `'${name.toString()}' hook is already registered`); - assert(isFunction(cb), '"cb" must be a function'); - assert(isObject(opts), '"opts" must be an object'); - - this.#registered.add(name); + return this.on(name, { callback, once: true }); + } - if (this.#wrapped.has(cb)) { + /** + * Emit a lifecycle hook, running all subscribed callbacks. + * + * @param name - Name of the lifecycle hook to emit + * @param args - Arguments to pass to callbacks + * @returns EmitResult with final args, result, and earlyReturn flag + * + * @example + * const result = await hooks.emit('cacheCheck', url); + * + * if (result.earlyReturn && result.result) { + * return result.result; // Use cached value + * } + * + * // Continue with modified args + * const [modifiedUrl] = result.args; + */ + async emit>( + name: K, + ...args: Parameters> + ): Promise>> { - return this.#wrapped.get(cb) as FuncOrNever; - } + this.#assertRegistered(name, 'emit'); - const callback = async (...origArgs: Parameters>) => { + let earlyReturn = false; - let returnEarly = false; + const hooks = this.#hooks.get(name); - const hook = this.#hooks.get(name)!; + const context: HookContext, FailArgs> = { + args, + removeHook() {}, + returnEarly() { - const context: HookContext> = { - args: origArgs, - point: 'before', - removeHook() {}, - returnEarly() { - returnEarly = true; - }, - setArgs(next) { + earlyReturn = true; + }, + setArgs: (next) => { - assert( - Array.isArray(next), - `setArgs: next args for '${context.point}' '${name.toString()}' must be an array of arguments` - ); + assert( + Array.isArray(next), + `setArgs: args for '${String(name)}' must be an array` + ); - context.args = next; - }, - setResult(next) { - context.results = next; - }, - fail(reason) { + context.args = next; + }, + setResult: (next) => { - const error = new HookError(`Hook Aborted: ${reason ?? 'unknown'}`); + context.result = next; + }, + fail: ((...failArgs: FailArgs) => { - if (reason instanceof Error) { + const handler = this.#handleFail; - error.originalError = reason; - } + // Check if handler is a constructor (class or function with prototype) + const isConstructor = typeof handler === 'function' && + handler.prototype?.constructor === handler; - error.extPoint = context.point; - error.hookName = name as string; + if (isConstructor) { - throw error; - }, - } + const error = new (handler as new (...args: FailArgs) => Error)(...failArgs); - const { before, after, error: errorFns } = hook ?? new Hook>(); + if (error instanceof HookError) { - const handleSet = async ( - which: typeof before, - point: keyof typeof hook - ) => { - - context.point = point; + error.hookName = String(name); + } - for (const fn of which) { + throw error; + } - context.removeHook = () => which.delete(fn); + // For functions, call them and catch any thrown error to set hookName + try { - const opts: HookExtOptions> = this.#hookFnOpts.get(fn); - const [, err] = await attempt(() => fn({ ...context })); + (handler as (...args: FailArgs) => never)(...failArgs); + } + catch (error) { - if (opts.once) context.removeHook(); + if (error instanceof HookError) { - if (err && opts.ignoreOnFail !== true) { - throw err; + error.hookName = String(name); } - if (returnEarly) break; + throw error; } - } - await handleSet(before, 'before'); + // If handler didn't throw, we need to throw something + throw new HookError('ctx.fail() handler did not throw'); + }) as (...args: FailArgs) => never + }; - if (returnEarly) return context.results! + if (!hooks || hooks.size === 0) { + + return { + args: context.args, + result: context.result, + earlyReturn: false + }; + } - const [res, err] = await attempt(() => cb.apply(opts?.bindTo || cb, context.args)); + for (const fn of hooks) { - context.results = res; - context.error = err; + context.removeHook = () => hooks.delete(fn as any); - if (err) { - context.point = 'error'; + const opts: HookOptions = this.#hookOpts.get(fn) ?? { callback: fn }; + const [, err] = await attempt(() => fn({ ...context } as any)); - await handleSet(errorFns, 'error'); + if (opts.once) context.removeHook(); + + if (err && opts.ignoreOnFail !== true) { throw err; } - await handleSet(after, 'after'); - - return context.results!; + if (earlyReturn) break; } - return callback as FuncOrNever; + return { + args: context.args, + result: context.result, + earlyReturn + }; + } + + /** + * Clear all registered hooks. + * + * @example + * hooks.on('preRequest', validator); + * hooks.on('postRequest', logger); + * + * // Reset for testing + * hooks.clear(); + */ + clear() { + + this.#hooks.clear(); + this.#hookOpts = new WeakMap(); + this.#registered = null; } /** - * Wrap an object method in-place to make it hookable. + * Wrap a function with pre/post lifecycle hooks. * - * This is a convenience method that combines `make()` with automatic - * binding and reassignment. The method is replaced on the instance - * with the wrapped version. + * - Pre hook: emitted with function args, can modify args or returnEarly with result + * - Post hook: emitted with [result, ...args], can modify result * - * @param instance - Object containing the method to wrap - * @param name - Name of the method to wrap - * @param opts - Additional options + * @param fn - The async function to wrap + * @param hooks - Object with optional pre and post hook names + * @returns Wrapped function with same signature * * @example - * class UserService { - * async save(user: User) { ... } + * interface Lifecycle { + * preRequest(url: string, opts: RequestInit): Promise; + * postRequest(result: Response, url: string, opts: RequestInit): Promise; * } * - * const service = new UserService(); - * const hooks = new HookEngine(); + * const hooks = new HookEngine(); * - * hooks.wrap(service, 'save'); + * // Add cache check in pre hook + * hooks.on('preRequest', async (ctx) => { + * const cached = cache.get(ctx.args[0]); + * if (cached) { + * ctx.setResult(cached); + * ctx.returnEarly(); + * } + * }); * - * // Now service.save() is hookable - * hooks.extend('save', 'before', async (ctx) => { - * console.log('Saving user:', ctx.args[0]); + * // Log result in post hook + * hooks.on('postRequest', async (ctx) => { + * const [result, url] = ctx.args; + * console.log(`Fetched ${url}:`, result.status); * }); + * + * // Wrap the fetch function + * const wrappedFetch = hooks.wrap( + * async (url: string, opts: RequestInit) => fetch(url, opts), + * { pre: 'preRequest', post: 'postRequest' } + * ); */ - wrap>( - instance: Shape, - name: K, - opts?: MakeHookOptions - ) { + wrap( + fn: F, + hooks: + | { pre: HookName; post?: HookName } + | { pre?: HookName; post: HookName } + ): (...args: Parameters) => Promise>> { + + assert( + hooks.pre || hooks.post, + 'wrap() requires at least one of "pre" or "post" hooks' + ); + + if (hooks.pre) this.#assertRegistered(hooks.pre, 'wrap'); + if (hooks.post) this.#assertRegistered(hooks.post, 'wrap'); + + return async (...args: Parameters): Promise>> => { + + let currentArgs = args; + let result: Awaited> | undefined; - assert(isObject(instance), '"instance" must be an object'); + // Pre hook + if (hooks.pre) { - const wrapped = this.make( - name, - instance[name] as FuncOrNever, - { - bindTo: instance, - ...opts + const preResult = await this.emit(hooks.pre, ...currentArgs as any); + + currentArgs = preResult.args as Parameters; + + if (preResult.earlyReturn && preResult.result !== undefined) { + + return preResult.result as Awaited>; + } } - ); - this.#wrapped.set(wrapped, instance[name] as AsyncFunc); + // Execute function + result = await fn(...currentArgs); - instance[name] = wrapped as Shape[K]; + // Post hook + if (hooks.post) { - } + const postResult = await this.emit( + hooks.post, + ...[result, ...currentArgs] as any + ); - /** - * Clear all registered hooks and extensions. - * - * After calling this method, all hooks are unregistered and all - * extensions are removed. Previously wrapped functions will continue - * to work but without any extensions. - * - * @example - * hooks.wrap(app, 'save'); - * hooks.extend('save', 'before', validator); - * - * // Reset for testing - * hooks.clear(); - * - * // app.save() still works, but validator no longer runs - */ - clear() { + if (postResult.result !== undefined) { - this.#registered.clear(); - this.#hooks.clear(); - this.#hookFnOpts = new WeakMap(); + return postResult.result as Awaited>; + } + } + + return result as Awaited>; + }; } -} \ No newline at end of file +} diff --git a/tests/src/hooks.ts b/tests/src/hooks.ts index 34bd7a6..ef6a322 100644 --- a/tests/src/hooks.ts +++ b/tests/src/hooks.ts @@ -1,586 +1,974 @@ import { describe, it, - beforeEach, expect, vi } from 'vitest' import { HookEngine, HookError, isHookError } from '../../packages/hooks/src/index.ts'; -import { attempt, noop } from '../../packages/utils/src/index.ts'; +import { attempt } from '../../packages/utils/src/index.ts'; describe('@logosdx/hooks', () => { - const startFn = vi.fn(); - const stopFn = vi.fn(); - const requestFn = vi.fn(); - const beforeFn = vi.fn(); - const afterFn = vi.fn(); - const errorFn = vi.fn(); + describe('HookEngine instantiation', () => { - class TestApp { + it('instantiates with no options', () => { - notAFunc = 'hello'; - start(...args: any[]) { return startFn(...args) } - stop(...args: any[]) { return stopFn(...args) } - request(...args: any[]) { return requestFn(...args) } - }; + const engine = new HookEngine(); + expect(engine).to.be.instanceOf(HookEngine); + }); + it('instantiates with custom handleFail', () => { - beforeEach(() => { + class CustomError extends Error {} - vi.resetAllMocks(); - }); + const engine = new HookEngine({ + handleFail: CustomError + }); - it('instantiates', () => { + expect(engine).to.be.instanceOf(HookEngine); + }); - new HookEngine(); - }); + it('is permissive by default (accepts any hook name)', async () => { + + // No type parameter - permissive mode + const engine = new HookEngine(); + const callback = vi.fn(); - it('runs the happy path', async () => { + engine.on('anyHookName', callback); + engine.on('anotherHook', callback); + engine.on('yetAnother', callback); - const app = new TestApp(); - const engine = new HookEngine; + await engine.emit('anyHookName'); + await engine.emit('anotherHook', 'arg1', 'arg2'); - const wrapped = engine.make('start', app.start, { bindTo: app }); + expect(callback).toHaveBeenCalledTimes(2); + }); - app.start = wrapped; + it('is strict when type parameter is provided', async () => { - engine.extend('start', 'before', beforeFn); - engine.extend('start', 'after', afterFn); + interface StrictLifecycle { + preRequest(url: string): Promise; + postRequest(url: string, response: Response): Promise; + } - // @ts-expect-error - testing invalid attribute type (only functions should be picke up) - expect(() => engine.extend('notAFunc', 'before', noop)).to.throw(); + const engine = new HookEngine(); + const callback = vi.fn(); - await app.start(); + // These work because they're defined in the interface + engine.on('preRequest', callback); + engine.on('postRequest', callback); - expect(startFn).toHaveBeenCalledOnce(); - expect(beforeFn).toHaveBeenCalledOnce(); - expect(afterFn).toHaveBeenCalledOnce(); - }); + await engine.emit('preRequest', 'https://example.com'); - it('rejects invalid usage of extend', () => { - - // [name, extensionPoint, cbOrOpts] - const badArgs = [ - [null], - [1], - ['nonexistentHook'], - ['stop'], - ['start'], - ['start', 1], - ['start', 'invalidExtensionPoint'], - ['start', 'before', null], - ['start', 'before', {}], - ['start', 'before', { callback: null }], - ] as unknown as Array['extend']>>; - - const app = new TestApp(); - const engine = new HookEngine; - - engine.make('start', app.start, { bindTo: app }); - - for (const args of badArgs) { - expect(() => engine.extend(...args)).to.throw(); - } + expect(callback).toHaveBeenCalledOnce(); + + // TypeScript would error on: engine.on('undefinedHook', callback) + // But we can't test compile-time errors at runtime + }); }); - it('rejects invalid usage of make', () => { + describe('engine.register()', () => { + + it('returns this for chaining', () => { + + const engine = new HookEngine(); + const result = engine.register('hook1', 'hook2'); + + expect(result).to.equal(engine); + }); + + it('requires at least one hook name', () => { + + const engine = new HookEngine(); + + expect(() => engine.register()).to.throw(); + }); + + it('enables strict mode after first registration', async () => { + + const engine = new HookEngine(); + + // Before registration - permissive + engine.on('anyHook', async () => {}); + + engine.clear(); + engine.register('allowedHook'); + + // After registration - strict + expect(() => engine.on('unregisteredHook', async () => {})).to.throw(/not registered/); + }); + + it('allows registered hooks', async () => { + + const engine = new HookEngine(); + const callback = vi.fn(); + + engine.register('myHook'); + engine.on('myHook', callback); + + await engine.emit('myHook'); + + expect(callback).toHaveBeenCalledOnce(); + }); + + it('throws on unregistered hook in on()', () => { + + const engine = new HookEngine(); + engine.register('validHook'); + + expect(() => engine.on('invalidHook', async () => {})).to.throw( + /Hook "invalidHook" is not registered/ + ); + }); + + it('throws on unregistered hook in emit()', async () => { + + const engine = new HookEngine(); + engine.register('validHook'); + + await expect(engine.emit('invalidHook')).rejects.toThrow( + /Hook "invalidHook" is not registered/ + ); + }); + + it('throws on unregistered hook in wrap()', () => { + + const engine = new HookEngine(); + engine.register('preValid'); + + expect(() => engine.wrap( + async () => 'result', + { pre: 'preInvalid' } + )).to.throw(/Hook "preInvalid" is not registered/); + }); + + it('shows registered hooks in error message', () => { + + const engine = new HookEngine(); + engine.register('preRequest', 'postRequest', 'rateLimit'); - // [name, cb, opts] - const badArgs = [ - [null], - [1], - ['stop', null], - ['stop', 'notAFunction'], - ['stop', noop, 'notAnObject'], - ['start', noop], // already registered - ] as unknown as Array['make']>>; + expect(() => engine.on('preRequset', async () => {})).to.throw( + /Registered hooks: preRequest, postRequest, rateLimit/ + ); + }); + + it('clear() resets to permissive mode', async () => { + + const engine = new HookEngine(); + engine.register('strictHook'); - const app = new TestApp(); - const engine = new HookEngine; + expect(() => engine.on('unregistered', async () => {})).to.throw(); - engine.make('start', app.start, { bindTo: app }); + engine.clear(); - for (const args of badArgs) { - expect(() => engine.make(...args)).to.throw(); - } + // Back to permissive + engine.on('anyHook', async () => {}); + await engine.emit('anyHook'); + }); }); - describe('engine.make()', () => { + describe('wrap() runtime validation', () => { + + it('throws when neither pre nor post provided', () => { - it('registers a hook and returns a wrapped function', async () => { + const engine = new HookEngine(); - const engine = new HookEngine(); - const app = new TestApp(); + // Cast to bypass TypeScript - test runtime validation + expect(() => engine.wrap( + async () => 'result', + {} as any + )).to.throw(/requires at least one of "pre" or "post"/); + }); - const wrapped = engine.make('start', app.start, { bindTo: app }); + it('validates pre hook is registered', () => { - expect(wrapped).to.be.a('function'); - expect(wrapped).to.not.equal(app.start); + const engine = new HookEngine(); + engine.register('validPre'); + + expect(() => engine.wrap( + async () => 'result', + { pre: 'invalidPre' } + )).to.throw(/not registered/); + }); - // Should be able to extend after registration - engine.extend('start', 'before', beforeFn); + it('validates post hook is registered', () => { - await wrapped(); + const engine = new HookEngine(); + engine.register('validPost'); - expect(beforeFn).toHaveBeenCalledOnce(); + expect(() => engine.wrap( + async () => 'result', + { post: 'invalidPost' } + )).to.throw(/not registered/); }); - it('executes the original function', async () => { + it('validates both pre and post hooks are registered', () => { - const engine = new HookEngine(); - const app = new TestApp(); + const engine = new HookEngine(); + engine.register('validPre', 'validPost'); - const wrapped = engine.make('start', app.start, { bindTo: app }); + // This should work + const wrapped = engine.wrap( + async (x: number) => x * 2, + { pre: 'validPre', post: 'validPost' } + ); - await wrapped(); + expect(wrapped).to.be.a('function'); + }); + }); + + describe('engine.on()', () => { + + it('registers a hook callback', async () => { + + interface Lifecycle { + test(): Promise; + } + + const engine = new HookEngine(); + const callback = vi.fn(); - expect(startFn).toHaveBeenCalledOnce(); + engine.on('test', callback); + + await engine.emit('test'); + + expect(callback).toHaveBeenCalledOnce(); }); - it('returns the original function return value', async () => { + it('returns a cleanup function', async () => { - const engine = new HookEngine(); - const app = new TestApp(); - const expectedResult = { success: true }; + interface Lifecycle { + test(): Promise; + } - startFn.mockReturnValue(expectedResult); + const engine = new HookEngine(); + const callback = vi.fn(); - const wrapped = engine.make('start', app.start, { bindTo: app }); + const cleanup = engine.on('test', callback); - const result = await wrapped(); + await engine.emit('test'); + expect(callback).toHaveBeenCalledOnce(); + + cleanup(); - expect(result).to.equal(expectedResult); + await engine.emit('test'); + expect(callback).toHaveBeenCalledOnce(); // Still 1, not called again }); - it('keeps the original function arguments', async () => { + it('accepts options object with callback', async () => { - const engine = new HookEngine(); - const app = new TestApp(); + interface Lifecycle { + test(): Promise; + } + + const engine = new HookEngine(); + const callback = vi.fn(); - const wrapped = engine.make('start', app.start, { bindTo: app }); + engine.on('test', { callback }); - await wrapped('arg1', 'arg2', 123); + await engine.emit('test'); - expect(startFn).toHaveBeenCalledWith('arg1', 'arg2', 123); + expect(callback).toHaveBeenCalledOnce(); }); - it('binds the original function to the provided context', async () => { + it('rejects invalid name', () => { + + // Permissive mode - any string hook name works + const engine = new HookEngine(); - const contextCapture = vi.fn(); + expect(() => engine.on(null as any, async () => {})).to.throw(); + expect(() => engine.on(123 as any, async () => {})).to.throw(); + }); - class ContextClass { + it('rejects invalid callback', () => { - value = 42; + // Permissive mode - any string hook name works + const engine = new HookEngine(); - async doWork() { + expect(() => engine.on('test', null as any)).to.throw(); + expect(() => engine.on('test', 'notAFunction' as any)).to.throw(); + expect(() => engine.on('test', {} as any)).to.throw(); + expect(() => engine.on('test', { callback: null } as any)).to.throw(); + }); - contextCapture(this.value); - } + it('does not duplicate callbacks added more than once', async () => { + + interface Lifecycle { + test(): Promise; } - const instance = new ContextClass(); - const customEngine = new HookEngine(); + const engine = new HookEngine(); + const callback = vi.fn(); - const wrapped = customEngine.make('doWork', instance.doWork, { bindTo: instance }); + engine.on('test', callback); + engine.on('test', callback); + engine.on('test', callback); - await wrapped(); + await engine.emit('test'); - expect(contextCapture).toHaveBeenCalledWith(42); + expect(callback).toHaveBeenCalledOnce(); }); + it('runs callbacks in insertion order', async () => { + + interface Lifecycle { + test(): Promise; + } + + const engine = new HookEngine(); + const order: number[] = []; + + engine.on('test', async () => { order.push(1); }); + engine.on('test', async () => { order.push(2); }); + engine.on('test', async () => { order.push(3); }); + + await engine.emit('test'); + + expect(order).to.deep.equal([1, 2, 3]); + }); }); - describe('hook extensions', () => { + describe('engine.emit()', () => { - let app = new TestApp(); - let engine = new HookEngine; + it('returns EmitResult with args', async () => { + interface Lifecycle { + test(a: string, b: number): Promise; + } - beforeEach(() => { + const engine = new HookEngine(); - vi.resetAllMocks(); - app = new TestApp(); - engine = new HookEngine; + const result = await engine.emit('test', 'hello', 42); - engine.wrap(app, 'start'); + expect(result.args).to.deep.equal(['hello', 42]); + expect(result.earlyReturn).to.be.false; + expect(result.result).to.be.undefined; }); - it('allows the addition of a before extension', async () => { + it('returns EmitResult when no callbacks registered', async () => { - engine.extend('start', 'before', beforeFn); + interface Lifecycle { + test(a: string): Promise; + } + + const engine = new HookEngine(); - await app.start(); + const result = await engine.emit('test', 'value'); - expect(beforeFn).toHaveBeenCalledOnce(); - expect(startFn).toHaveBeenCalledOnce(); - expect(beforeFn).toHaveBeenCalledBefore(startFn); + expect(result.args).to.deep.equal(['value']); + expect(result.earlyReturn).to.be.false; + }); - const ctx = beforeFn.mock.calls[0]![0]; + it('passes context to callbacks', async () => { - expect(ctx).to.have.property('point', 'before'); - expect(ctx).to.have.property('args').that.is.an('array'); + interface Lifecycle { + test(url: string): Promise; + } + + const engine = new HookEngine(); + let receivedContext: any; + + engine.on('test', async (ctx) => { + + receivedContext = ctx; + }); - expect(startFn).toHaveBeenCalledWith(...ctx.args); + await engine.emit('test', 'https://example.com'); + expect(receivedContext).to.have.property('args').that.deep.equals(['https://example.com']); + expect(receivedContext).to.have.property('setArgs').that.is.a('function'); + expect(receivedContext).to.have.property('setResult').that.is.a('function'); + expect(receivedContext).to.have.property('returnEarly').that.is.a('function'); + expect(receivedContext).to.have.property('fail').that.is.a('function'); + expect(receivedContext).to.have.property('removeHook').that.is.a('function'); }); + }); + + describe('context.setArgs()', () => { - it('allows the addition of an after extension', async () => { + it('modifies args for subsequent callbacks', async () => { - engine.extend('start', 'after', afterFn); + interface Lifecycle { + test(value: string): Promise; + } - await app.start(); + const engine = new HookEngine(); + const secondCallback = vi.fn(); - expect(afterFn).toHaveBeenCalledOnce(); - expect(startFn).toHaveBeenCalledOnce(); + engine.on('test', async (ctx) => { - expect(afterFn).toHaveBeenCalledAfter(startFn); + ctx.setArgs(['modified']); + }); - const ctx = afterFn.mock.calls[0]![0]; + engine.on('test', secondCallback); - expect(ctx).to.have.property('point', 'after'); - expect(ctx).to.have.property('args').that.is.an('array'); + await engine.emit('test', 'original'); - expect(startFn).toHaveBeenCalledWith(...ctx.args); + const receivedCtx = secondCallback.mock.calls[0]![0]; + expect(receivedCtx.args).to.deep.equal(['modified']); }); - it('allows the addition of an error extension', async () => { + it('returns modified args in EmitResult', async () => { + + interface Lifecycle { + test(value: string): Promise; + } + + const engine = new HookEngine(); + + engine.on('test', async (ctx) => { - engine.extend('start', 'error', errorFn); + ctx.setArgs(['modified']); + }); - const error = new Error('Test error'); + const result = await engine.emit('test', 'original'); - startFn.mockImplementation(() => { throw error; }); + expect(result.args).to.deep.equal(['modified']); + }); - const [, err] = await attempt(() => app.start()); + it('rejects non-array args', async () => { - expect(err).to.equal(error); + interface Lifecycle { + test(value: string): Promise; + } - expect(errorFn).toHaveBeenCalledOnce(); - expect(startFn).toHaveBeenCalledOnce(); + const engine = new HookEngine(); - expect(errorFn).toHaveBeenCalledAfter(startFn); + engine.on('test', async (ctx) => { - const ctx = errorFn.mock.calls[0]![0]; + ctx.setArgs('not an array' as any); + }); - expect(ctx).to.have.property('point', 'error'); - expect(ctx).to.have.property('args').that.is.an('array'); - expect(ctx).to.have.property('error', error); + const [, err] = await attempt(() => engine.emit('test', 'value')); - expect(startFn).toHaveBeenCalledWith(...ctx.args); + expect(err).to.be.instanceOf(Error); }); + }); + + describe('context.setResult()', () => { + + it('sets result in EmitResult', async () => { + + interface Lifecycle { + test(): Promise; + } + + const engine = new HookEngine(); - it('preserves execution order of extensions', async () => { + engine.on('test', async (ctx) => { + + ctx.setResult('my result'); + }); + + const result = await engine.emit('test'); + + expect(result.result).to.equal('my result'); + }); - engine.extend('start', 'before', beforeFn); - engine.extend('start', 'after', afterFn); - engine.extend('start', 'error', errorFn); + it('subsequent callbacks can read and modify result', async () => { - await app.start(); + interface Lifecycle { + test(): Promise; + } - expect(beforeFn).toHaveBeenCalledBefore(startFn); - expect(afterFn).toHaveBeenCalledAfter(startFn); - expect(errorFn).not.toHaveBeenCalled(); + const engine = new HookEngine(); - const error = new Error('Test error'); + engine.on('test', async (ctx) => { - beforeFn.mockReset(); - afterFn.mockReset(); - errorFn.mockReset(); - startFn.mockReset(); + ctx.setResult(10); + }); - startFn.mockImplementation(() => { throw error; }); + engine.on('test', async (ctx) => { - const [, err] = await attempt(() => app.start()); + ctx.setResult((ctx.result ?? 0) * 2); + }); - expect(err).to.equal(error); + const result = await engine.emit('test'); - expect(beforeFn).toHaveBeenCalledBefore(startFn); - expect(errorFn).toHaveBeenCalledAfter(startFn); - expect(afterFn).not.toHaveBeenCalled(); + expect(result.result).to.equal(20); }); + }); - it('allows the addition of more than one extension per extension point', async () => { + describe('context.returnEarly()', () => { - const anotherBeforeFn = vi.fn(); - const anotherAfterFn = vi.fn(); - const anotherErrorFn = vi.fn(); + it('stops processing remaining callbacks', async () => { - engine.extend('start', 'before', beforeFn); - engine.extend('start', 'before', anotherBeforeFn); + interface Lifecycle { + test(): Promise; + } - engine.extend('start', 'after', afterFn); - engine.extend('start', 'after', anotherAfterFn); + const engine = new HookEngine(); + const firstCallback = vi.fn(async (ctx) => ctx.returnEarly()); + const secondCallback = vi.fn(); - engine.extend('start', 'error', errorFn); - engine.extend('start', 'error', anotherErrorFn); + engine.on('test', firstCallback); + engine.on('test', secondCallback); - expect(beforeFn).not.toHaveBeenCalled(); - expect(anotherBeforeFn).not.toHaveBeenCalled(); - expect(afterFn).not.toHaveBeenCalled(); - expect(anotherAfterFn).not.toHaveBeenCalled(); - expect(errorFn).not.toHaveBeenCalled(); - expect(anotherErrorFn).not.toHaveBeenCalled(); + await engine.emit('test'); - await app.start(); + expect(firstCallback).toHaveBeenCalledOnce(); + expect(secondCallback).not.toHaveBeenCalled(); + }); - expect(beforeFn).toHaveBeenCalledOnce(); - expect(anotherBeforeFn).toHaveBeenCalledOnce(); + it('sets earlyReturn flag in EmitResult', async () => { - expect(beforeFn).toHaveBeenCalledBefore(anotherBeforeFn); - expect(anotherBeforeFn).toHaveBeenCalledBefore(startFn); + interface Lifecycle { + test(): Promise; + } - expect(afterFn).toHaveBeenCalledOnce(); - expect(anotherAfterFn).toHaveBeenCalledOnce(); + const engine = new HookEngine(); - expect(afterFn).toHaveBeenCalledAfter(startFn); - expect(anotherAfterFn).toHaveBeenCalledAfter(afterFn); + engine.on('test', async (ctx) => { - const error = new Error('Test error'); + ctx.returnEarly(); + }); - beforeFn.mockReset(); - anotherBeforeFn.mockReset(); - afterFn.mockReset(); - anotherAfterFn.mockReset(); - errorFn.mockReset(); - anotherErrorFn.mockReset(); - startFn.mockReset(); + const result = await engine.emit('test'); - startFn.mockImplementation(() => { throw error; }); + expect(result.earlyReturn).to.be.true; + }); - const [, err] = await attempt(() => app.start()); + it('preserves result set before returnEarly', async () => { - expect(err).to.equal(error); + interface Lifecycle { + test(): Promise; + } - expect(beforeFn).toHaveBeenCalledOnce(); - expect(anotherBeforeFn).toHaveBeenCalledOnce(); + const engine = new HookEngine(); - expect(beforeFn).toHaveBeenCalledBefore(anotherBeforeFn); - expect(anotherBeforeFn).toHaveBeenCalledBefore(startFn); + engine.on('test', async (ctx) => { - expect(errorFn).toHaveBeenCalledOnce(); - expect(anotherErrorFn).toHaveBeenCalledOnce(); + ctx.setResult('cached value'); + ctx.returnEarly(); + }); + + const result = await engine.emit('test'); - expect(errorFn).toHaveBeenCalledAfter(startFn); - expect(anotherErrorFn).toHaveBeenCalledAfter(errorFn); + expect(result.result).to.equal('cached value'); + expect(result.earlyReturn).to.be.true; }); + }); - it('allows the cleanup of an extension point', async () => { + describe('context.fail()', () => { - const cleanup = engine.extend('start', 'before', beforeFn); + it('throws HookError by default', async () => { - expect(beforeFn).not.toHaveBeenCalled(); + interface Lifecycle { + test(): Promise; + } - await app.start(); + const engine = new HookEngine(); - expect(beforeFn).toHaveBeenCalledOnce(); - expect(startFn).toHaveBeenCalledOnce(); + engine.on('test', async (ctx) => { - cleanup(); + ctx.fail('Validation failed'); + }); - await app.start(); + const [, err] = await attempt(() => engine.emit('test')); - expect(beforeFn).toHaveBeenCalledOnce(); - expect(startFn).toHaveBeenCalledTimes(2); + expect(isHookError(err)).to.be.true; + expect(err).to.have.property('message').that.includes('Validation failed'); }); - it('allows extensions to modify the original function arguments', async () => { + it('sets hookName on HookError', async () => { + + interface Lifecycle { + validate(): Promise; + } - const modifiedArgs = ['modified', 'args']; + const engine = new HookEngine(); - engine.extend('start', 'before', async (ctx) => { + engine.on('validate', async (ctx) => { - ctx.setArgs(modifiedArgs as any); + ctx.fail('Invalid'); }); - await app.start(); + const [, err] = await attempt(() => engine.emit('validate')); - expect(startFn).toHaveBeenCalledOnce(); - expect(startFn).toHaveBeenCalledWith(...modifiedArgs); + expect(isHookError(err)).to.be.true; + expect(err).to.have.property('hookName', 'validate'); }); - it('allows extensions to return early from the hook chain', async () => { + it('stops processing remaining callbacks', async () => { - const earlyResult = { early: true }; + interface Lifecycle { + test(): Promise; + } - engine.extend('start', 'before', async (ctx) => { + const engine = new HookEngine(); + const secondCallback = vi.fn(); - ctx.setResult(earlyResult as any); - ctx.returnEarly(); + engine.on('test', async (ctx) => { + + ctx.fail('Stop here'); }); - const result = await app.start(); + engine.on('test', secondCallback); + + await attempt(() => engine.emit('test')); - expect(startFn).not.toHaveBeenCalled(); - expect(result).to.equal(earlyResult); + expect(secondCallback).not.toHaveBeenCalled(); }); + }); + + describe('context.removeHook()', () => { - it('doesnt duplicate extensions added more than once', async () => { + it('removes the callback from future emissions', async () => { - engine.extend('start', 'before', beforeFn); - engine.extend('start', 'before', beforeFn); - engine.extend('start', 'before', beforeFn); + interface Lifecycle { + test(): Promise; + } + + const engine = new HookEngine(); + let callCount = 0; - await app.start(); + engine.on('test', async (ctx) => { - expect(beforeFn).toHaveBeenCalledOnce(); + callCount++; + + if (callCount >= 2) { + ctx.removeHook(); + } + }); + + await engine.emit('test'); + await engine.emit('test'); + await engine.emit('test'); + await engine.emit('test'); + + expect(callCount).to.equal(2); }); - it('can run an extension only once when specified', async () => { + it('removes only the current callback', async () => { + + interface Lifecycle { + test(): Promise; + } + + const engine = new HookEngine(); + const firstFn = vi.fn(); + const selfRemovingFn = vi.fn(async (ctx) => ctx.removeHook()); + const lastFn = vi.fn(); + + engine.on('test', firstFn); + engine.on('test', selfRemovingFn); + engine.on('test', lastFn); + + await engine.emit('test'); + + expect(firstFn).toHaveBeenCalledOnce(); + expect(selfRemovingFn).toHaveBeenCalledOnce(); + expect(lastFn).toHaveBeenCalledOnce(); + + firstFn.mockReset(); + selfRemovingFn.mockReset(); + lastFn.mockReset(); + + await engine.emit('test'); + + expect(firstFn).toHaveBeenCalledOnce(); + expect(selfRemovingFn).not.toHaveBeenCalled(); + expect(lastFn).toHaveBeenCalledOnce(); + }); + }); + + describe('once option', () => { + + it('removes callback after first execution', async () => { + + interface Lifecycle { + test(): Promise; + } + + const engine = new HookEngine(); + const callback = vi.fn(); - engine.extend('start', 'before', { - callback: beforeFn, + engine.on('test', { + callback, once: true }); - await app.start(); - await app.start(); - await app.start(); + await engine.emit('test'); + await engine.emit('test'); + await engine.emit('test'); - expect(beforeFn).toHaveBeenCalledOnce(); - expect(startFn).toHaveBeenCalledTimes(3); + expect(callback).toHaveBeenCalledOnce(); }); + }); - it('captures and re-throws errors from the original function in error extensions', async () => { + describe('ignoreOnFail option', () => { - const originalError = new Error('Original function error'); + it('swallows errors from callback', async () => { - startFn.mockImplementation(() => { throw originalError; }); + interface Lifecycle { + test(): Promise; + } - engine.extend('start', 'error', errorFn); + const engine = new HookEngine(); + const afterCallback = vi.fn(); - const [, err] = await attempt(() => app.start()); + engine.on('test', { + callback: async () => { throw new Error('Should be ignored'); }, + ignoreOnFail: true + }); - expect(err).to.equal(originalError); - expect(errorFn).toHaveBeenCalledOnce(); + engine.on('test', afterCallback); - const ctx = errorFn.mock.calls[0]![0]; + const [, err] = await attempt(() => engine.emit('test')); - expect(ctx).to.have.property('error', originalError); - expect(ctx).to.have.property('point', 'error'); + expect(err).to.be.null; + expect(afterCallback).toHaveBeenCalledOnce(); }); - it('captures and re-throws errors from before extensions', async () => { + it('swallows ctx.fail() errors', async () => { - const beforeError = new Error('Before extension error'); + interface Lifecycle { + test(): Promise; + } - engine.extend('start', 'before', async () => { + const engine = new HookEngine(); + const afterCallback = vi.fn(); - throw beforeError; + engine.on('test', { + callback: async (ctx) => { ctx.fail('Should be ignored'); }, + ignoreOnFail: true }); - engine.extend('start', 'error', errorFn); + engine.on('test', afterCallback); - const [, err] = await attempt(() => app.start()); + const [, err] = await attempt(() => engine.emit('test')); - expect(err).to.equal(beforeError); - expect(startFn).not.toHaveBeenCalled(); - expect(errorFn).not.toHaveBeenCalled(); + expect(err).to.be.null; + expect(afterCallback).toHaveBeenCalledOnce(); }); + }); + + describe('error handling', () => { + + it('propagates user-thrown errors as-is (not wrapped in HookError)', async () => { + + class CustomError extends Error { + code = 'CUSTOM'; + } + + interface Lifecycle { + test(): Promise; + } + + const engine = new HookEngine(); + + engine.on('test', async () => { + + throw new CustomError('Custom error'); + }); + + const [, err] = await attempt(() => engine.emit('test')); + + expect(err).to.be.instanceOf(CustomError); + expect(isHookError(err)).to.be.false; + expect((err as CustomError).code).to.equal('CUSTOM'); + }); + + it('stops at first error without ignoreOnFail', async () => { - it('captures and re-throws errors from after extensions', async () => { + interface Lifecycle { + test(): Promise; + } - const afterError = new Error('After extension error'); + const engine = new HookEngine(); + const secondCallback = vi.fn(); - engine.extend('start', 'after', async () => { + engine.on('test', async () => { - throw afterError; + throw new Error('First error'); }); - engine.extend('start', 'error', errorFn); + engine.on('test', secondCallback); - const [, err] = await attempt(() => app.start()); + await attempt(() => engine.emit('test')); - expect(err).to.equal(afterError); - expect(startFn).toHaveBeenCalledOnce(); - expect(errorFn).not.toHaveBeenCalled(); + expect(secondCallback).not.toHaveBeenCalled(); }); + }); + + describe('custom handleFail', () => { - it('captures and re-throws errors from error extensions as well', async () => { + it('uses Error constructor with single argument', async () => { - const originalError = new Error('Original error'); - const errorExtensionError = new Error('Error extension error'); + class CustomError extends Error { + name = 'CustomError'; + } + + interface Lifecycle { + test(): Promise; + } - startFn.mockImplementation(() => { throw originalError; }); + const engine = new HookEngine({ + handleFail: CustomError + }); - engine.extend('start', 'error', async () => { + engine.on('test', async (ctx) => { - throw errorExtensionError; + ctx.fail('Custom message'); }); - const [, err] = await attempt(() => app.start()); + const [, err] = await attempt(() => engine.emit('test')); - expect(err).to.equal(errorExtensionError); + expect(err).to.be.instanceOf(CustomError); + expect(err).to.have.property('message', 'Custom message'); }); - it('ignores errors thrown by extension if specified', async () => { + it('uses Error constructor with multiple arguments (Firebase-style)', async () => { - const extensionError = new Error('Extension error'); + // Simulates Firebase HttpsError: new HttpsError(code, message, details?) + class HttpsError extends Error { - engine.extend('start', 'before', { - callback: async () => { throw extensionError; }, - ignoreOnFail: true + code: string; + details?: object | undefined; + + constructor(code: string, message: string, details?: object | undefined) { + + super(message); + this.code = code; + this.details = details; + } + } + + interface Lifecycle { + validate(): Promise; + } + + const engine = new HookEngine({ + handleFail: HttpsError }); - engine.extend('start', 'after', afterFn); + engine.on('validate', async (ctx) => { - const [, err] = await attempt(() => app.start()); + ctx.fail('failed-precondition', 'Email is required', { field: 'email' }); + }); - expect(err).to.be.null; - expect(startFn).toHaveBeenCalledOnce(); - expect(afterFn).toHaveBeenCalledOnce(); + const [, err] = await attempt(() => engine.emit('validate')); + + expect(err).to.be.instanceOf(HttpsError); + expect(err).to.have.property('code', 'failed-precondition'); + expect(err).to.have.property('message', 'Email is required'); + expect(err).to.have.property('details').that.deep.equals({ field: 'email' }); }); - it('captures results from original function', async () => { + it('uses custom function that throws', async () => { - const originalResult = { data: 'test' }; + interface Lifecycle { + test(): Promise; + } - startFn.mockReturnValue(originalResult); + const engine = new HookEngine({ + handleFail: (message: string, code: number): never => { - engine.extend('start', 'after', afterFn); + const error = new Error(message); + (error as any).code = code; + throw error; + } + }); - const result = await app.start(); + engine.on('test', async (ctx) => { - expect(result).to.equal(originalResult); + ctx.fail('Error message', 500); + }); - const ctx = afterFn.mock.calls[0]![0]; + const [, err] = await attempt(() => engine.emit('test')); - expect(ctx).to.have.property('results', originalResult); + expect(err).to.have.property('message', 'Error message'); + expect(err).to.have.property('code', 500); }); - it('captures results from before extensions when early return is used', async () => { + it('passes all arguments to handleFail function', async () => { - const earlyResult = { early: 'result' }; + interface Lifecycle { + test(): Promise; + } - engine.extend('start', 'before', async (ctx) => { + const receivedArgs: unknown[] = []; - ctx.setResult(earlyResult as any); - ctx.returnEarly(); + const engine = new HookEngine({ + handleFail: (message: string, data: object, code: number): never => { + + receivedArgs.push(message, data, code); + throw new Error('fail'); + } + }); + + engine.on('test', async (ctx) => { + + ctx.fail('message', { data: true }, 123); }); - const result = await app.start(); + await attempt(() => engine.emit('test')); - expect(result).to.equal(earlyResult); - expect(startFn).not.toHaveBeenCalled(); + expect(receivedArgs).to.deep.equal(['message', { data: true }, 123]); }); - it('captures results from after extensions via setResult', async () => { + it('default handleFail accepts string only', async () => { - const originalResult = { original: true }; - const modifiedResult = { modified: true }; + interface Lifecycle { + test(): Promise; + } - startFn.mockReturnValue(originalResult); + // No FailArgs specified - defaults to [string] + const engine = new HookEngine(); - engine.extend('start', 'after', async (ctx) => { + engine.on('test', async (ctx) => { - expect(ctx.results).to.equal(originalResult); - ctx.setResult(modifiedResult as any); + // This should only accept a string + ctx.fail('Just a message'); }); - const result = await app.start(); + const [, err] = await attempt(() => engine.emit('test')); - expect(result).to.equal(modifiedResult); + expect(err).to.have.property('message', 'Just a message'); + }); + }); + + describe('engine.clear()', () => { + + it('removes all hooks', async () => { + + interface Lifecycle { + test(): Promise; + } + + const engine = new HookEngine(); + const callback = vi.fn(); + + engine.on('test', callback); + + await engine.emit('test'); + expect(callback).toHaveBeenCalledOnce(); + + engine.clear(); + + await engine.emit('test'); + expect(callback).toHaveBeenCalledOnce(); // Still 1 + }); + + it('allows re-registration after clear', async () => { + + interface Lifecycle { + test(): Promise; + } + + const engine = new HookEngine(); + const callback1 = vi.fn(); + const callback2 = vi.fn(); + + engine.on('test', callback1); + engine.clear(); + engine.on('test', callback2); + + await engine.emit('test'); + + expect(callback1).not.toHaveBeenCalled(); + expect(callback2).toHaveBeenCalledOnce(); }); }); @@ -610,308 +998,427 @@ describe('@logosdx/hooks', () => { }); }); - describe('HookError properties via fail()', () => { + describe('engine.once()', () => { - let app: TestApp; - let engine: HookEngine; + it('is sugar for on() with once: true', async () => { - beforeEach(() => { + interface Lifecycle { + test(): Promise; + } + + const engine = new HookEngine(); + const callback = vi.fn(); + + engine.once('test', callback); - vi.resetAllMocks(); - app = new TestApp(); - engine = new HookEngine(); - engine.wrap(app, 'start'); + await engine.emit('test'); + await engine.emit('test'); + await engine.emit('test'); + + expect(callback).toHaveBeenCalledOnce(); }); - it('sets hookName and extPoint when fail() is called in before', async () => { + it('returns cleanup function', async () => { + + interface Lifecycle { + test(): Promise; + } - engine.extend('start', 'before', async (ctx) => { + const engine = new HookEngine(); + const callback = vi.fn(); - ctx.fail('Test failure'); - }); + const cleanup = engine.once('test', callback); + cleanup(); - const [, err] = await attempt(() => app.start()); + await engine.emit('test'); - expect(isHookError(err)).to.be.true; - expect(err).to.have.property('hookName', 'start'); - expect(err).to.have.property('extPoint', 'before'); - expect(err).to.have.property('message').that.includes('Test failure'); + expect(callback).not.toHaveBeenCalled(); }); - it('sets hookName and extPoint when fail() is called in after', async () => { + it('receives full context', async () => { + + interface Lifecycle { + test(value: string): Promise; + } - engine.extend('start', 'after', async (ctx) => { + const engine = new HookEngine(); + let receivedContext: any; - ctx.fail('After failure'); + engine.once('test', async (ctx) => { + + receivedContext = ctx; + ctx.setResult('modified'); }); - const [, err] = await attempt(() => app.start()); + const result = await engine.emit('test', 'original'); - expect(isHookError(err)).to.be.true; - expect(err).to.have.property('hookName', 'start'); - expect(err).to.have.property('extPoint', 'after'); + expect(receivedContext.args).to.deep.equal(['original']); + expect(result.result).to.equal('modified'); + }); + }); + + describe('engine.wrap()', () => { + + it('wraps a function with pre hook', async () => { + + interface Lifecycle { + preProcess(value: number): Promise; + } + + const engine = new HookEngine(); + const preCallback = vi.fn(); + + engine.on('preProcess', preCallback); + + const wrapped = engine.wrap( + async (value: number) => value * 2, + { pre: 'preProcess' } + ); + + const result = await wrapped(5); + + expect(result).to.equal(10); + expect(preCallback).toHaveBeenCalledOnce(); }); - it('sets hookName and extPoint when fail() is called in error', async () => { + it('wraps a function with post hook', async () => { - const originalError = new Error('Original'); + interface Lifecycle { + postProcess(result: number, value: number): Promise; + } - startFn.mockImplementation(() => { throw originalError; }); + const engine = new HookEngine(); + const postCallback = vi.fn(); - engine.extend('start', 'error', async (ctx) => { + engine.on('postProcess', postCallback); - ctx.fail('Error handler failure'); - }); + const wrapped = engine.wrap( + async (value: number) => value * 2, + { post: 'postProcess' } + ); - const [, err] = await attempt(() => app.start()); + const result = await wrapped(5); - expect(isHookError(err)).to.be.true; - expect(err).to.have.property('hookName', 'start'); - expect(err).to.have.property('extPoint', 'error'); + expect(result).to.equal(10); + expect(postCallback).toHaveBeenCalledOnce(); }); - it('sets originalError when fail() is called with an Error', async () => { + it('pre hook can modify arguments', async () => { - const originalError = new Error('Original error'); + interface Lifecycle { + preAdd(a: number, b: number): Promise; + } + + const engine = new HookEngine(); - engine.extend('start', 'before', async (ctx) => { + engine.on('preAdd', async (ctx) => { - ctx.fail(originalError); + const [a, b] = ctx.args; + ctx.setArgs([a * 10, b * 10]); }); - const [, err] = await attempt(() => app.start()); + const wrapped = engine.wrap( + async (a: number, b: number) => a + b, + { pre: 'preAdd' } + ); - expect(isHookError(err)).to.be.true; - expect(err).to.have.property('originalError', originalError); + const result = await wrapped(2, 3); + + expect(result).to.equal(50); // (2*10) + (3*10) }); - it('does not set originalError when fail() is called with a string', async () => { + it('pre hook can return early with cached result', async () => { + + interface Lifecycle { + preGet(key: string): Promise; + } - engine.extend('start', 'before', async (ctx) => { + const engine = new HookEngine(); + const cache = new Map([['foo', 'cached-foo']]); + const actualFn = vi.fn(async (key: string) => `fetched-${key}`); - ctx.fail('String message'); + engine.on('preGet', async (ctx) => { + + const [key] = ctx.args; + const cached = cache.get(key); + + if (cached) { + ctx.setResult(cached); + ctx.returnEarly(); + } }); - const [, err] = await attempt(() => app.start()); + const wrapped = engine.wrap(actualFn, { pre: 'preGet' }); - expect(isHookError(err)).to.be.true; - expect(err).to.have.property('originalError', undefined); + const cachedResult = await wrapped('foo'); + expect(cachedResult).to.equal('cached-foo'); + expect(actualFn).not.toHaveBeenCalled(); + + const freshResult = await wrapped('bar'); + expect(freshResult).to.equal('fetched-bar'); + expect(actualFn).toHaveBeenCalledOnce(); }); - }); - describe('engine.wrap()', () => { + it('post hook can modify result', async () => { + + interface Lifecycle { + postDouble(result: number, input: number): Promise; + } - it('wraps an object method in-place', async () => { + const engine = new HookEngine(); - const app = new TestApp(); - const engine = new HookEngine(); - const originalStart = app.start; + engine.on('postDouble', async (ctx) => { - engine.wrap(app, 'start'); + const [result] = ctx.args; + ctx.setResult(result * 2); + }); - expect(app.start).to.not.equal(originalStart); - expect(app.start).to.be.a('function'); + const wrapped = engine.wrap( + async (input: number) => input + 1, + { post: 'postDouble' } + ); - await app.start(); + const result = await wrapped(5); - expect(startFn).toHaveBeenCalledOnce(); + expect(result).to.equal(12); // (5 + 1) * 2 }); - it('binds to the instance automatically', async () => { + it('works with both pre and post hooks', async () => { - const contextCapture = vi.fn(); + interface Lifecycle { + preTransform(value: string): Promise; + postTransform(result: string, value: string): Promise; + } - class ContextApp { + const engine = new HookEngine(); + const callOrder: string[] = []; - value = 'instance-value'; + engine.on('preTransform', async (ctx) => { - async getValue() { + callOrder.push('pre'); + const [value] = ctx.args; + ctx.setArgs([value.toUpperCase()]); + }); - contextCapture(this.value); - return this.value; - } - } + engine.on('postTransform', async (ctx) => { - const app = new ContextApp(); - const engine = new HookEngine(); + callOrder.push('post'); + const [result] = ctx.args; + ctx.setResult(`[${result}]`); + }); - engine.wrap(app, 'getValue'); + const wrapped = engine.wrap( + async (value: string) => `processed:${value}`, + { pre: 'preTransform', post: 'postTransform' } + ); - const result = await app.getValue(); + const result = await wrapped('hello'); - expect(contextCapture).toHaveBeenCalledWith('instance-value'); - expect(result).to.equal('instance-value'); + expect(callOrder).to.deep.equal(['pre', 'post']); + expect(result).to.equal('[processed:HELLO]'); }); - it('allows extensions after wrapping', async () => { + it('pre hook early return skips function and post hook receives early result', async () => { - const app = new TestApp(); - const engine = new HookEngine(); + interface Lifecycle { + preFetch(url: string): Promise; + postFetch(result: string, url: string): Promise; + } - engine.wrap(app, 'start'); - engine.extend('start', 'before', beforeFn); - engine.extend('start', 'after', afterFn); + const engine = new HookEngine(); + const fetchFn = vi.fn(async (url: string) => `fetched:${url}`); + const postCallback = vi.fn(); - await app.start(); + engine.on('preFetch', async (ctx) => { - expect(beforeFn).toHaveBeenCalledOnce(); - expect(startFn).toHaveBeenCalledOnce(); - expect(afterFn).toHaveBeenCalledOnce(); - }); + ctx.setResult('cached-result'); + ctx.returnEarly(); + }); + + engine.on('postFetch', postCallback); - it('rejects invalid instance', () => { + const wrapped = engine.wrap(fetchFn, { pre: 'preFetch', post: 'postFetch' }); - const engine = new HookEngine(); + const result = await wrapped('https://example.com'); - expect(() => engine.wrap(null as any, 'start')).to.throw(); - expect(() => engine.wrap(undefined as any, 'start')).to.throw(); - expect(() => engine.wrap('string' as any, 'start')).to.throw(); + expect(result).to.equal('cached-result'); + expect(fetchFn).not.toHaveBeenCalled(); + expect(postCallback).not.toHaveBeenCalled(); }); - it('preserves arguments and return values', async () => { + it('passes result and args to post hook', async () => { - const app = new TestApp(); - const engine = new HookEngine(); - const expectedResult = { wrapped: true }; + interface Lifecycle { + postLog(result: number, a: number, b: number): Promise; + } - startFn.mockReturnValue(expectedResult); + const engine = new HookEngine(); + let receivedArgs: unknown[] = []; - engine.wrap(app, 'start'); + engine.on('postLog', async (ctx) => { - const result = await app.start('arg1', 'arg2'); + receivedArgs = [...ctx.args]; + }); - expect(startFn).toHaveBeenCalledWith('arg1', 'arg2'); - expect(result).to.equal(expectedResult); - }); - }); + const wrapped = engine.wrap( + async (a: number, b: number) => a + b, + { post: 'postLog' } + ); - describe('engine.clear()', () => { + await wrapped(3, 7); - it('removes all extensions', async () => { + expect(receivedArgs).to.deep.equal([10, 3, 7]); // [result, ...originalArgs] + }); - const app = new TestApp(); - const engine = new HookEngine(); + it('works with only pre hook', async () => { - engine.wrap(app, 'start'); - engine.extend('start', 'before', beforeFn); - engine.extend('start', 'after', afterFn); + const engine = new HookEngine(); - await app.start(); + const wrapped = engine.wrap( + async (value: number) => value * 2, + { pre: 'preProcess' } + ); - expect(beforeFn).toHaveBeenCalledOnce(); - expect(afterFn).toHaveBeenCalledOnce(); + const result = await wrapped(5); - beforeFn.mockReset(); - afterFn.mockReset(); + expect(result).to.equal(10); + }); - engine.clear(); + it('works with only post hook', async () => { - // Re-wrap after clear - engine.wrap(app, 'start'); + const engine = new HookEngine(); - await app.start(); + const wrapped = engine.wrap( + async (value: number) => value * 2, + { post: 'postProcess' } + ); - expect(beforeFn).not.toHaveBeenCalled(); - expect(afterFn).not.toHaveBeenCalled(); - expect(startFn).toHaveBeenCalled(); + const result = await wrapped(5); + + expect(result).to.equal(10); }); + }); + + describe('real-world patterns', () => { - it('allows re-registration of hooks after clear', async () => { + it('implements caching pattern', async () => { - const app1 = new TestApp(); - const app2 = new TestApp(); - const engine = new HookEngine(); + interface FetchLifecycle { + cacheCheck(url: string): Promise; + } - engine.wrap(app1, 'start'); + const engine = new HookEngine(); + const cache = new Map([['cached-url', 'cached-data']]); - engine.clear(); + engine.on('cacheCheck', async (ctx) => { + + const [url] = ctx.args; + const cached = cache.get(url); - // Should not throw - hook can be registered again with fresh instance - engine.wrap(app2, 'start'); - engine.extend('start', 'before', beforeFn); + if (cached) { + ctx.setResult(cached); + ctx.returnEarly(); + } + }); - await app2.start(); + const cachedResult = await engine.emit('cacheCheck', 'cached-url'); + expect(cachedResult.result).to.equal('cached-data'); + expect(cachedResult.earlyReturn).to.be.true; - expect(beforeFn).toHaveBeenCalledOnce(); + const missResult = await engine.emit('cacheCheck', 'not-cached'); + expect(missResult.result).to.be.undefined; + expect(missResult.earlyReturn).to.be.false; }); - it('clears registrations so hooks can be re-made', () => { + it('implements validation pattern', async () => { - const app = new TestApp(); - const engine = new HookEngine(); + interface UserLifecycle { + validate(data: { email?: string }): Promise; + } - engine.make('start', app.start, { bindTo: app }); + const engine = new HookEngine(); - // Should throw - already registered - expect(() => engine.make('start', app.start, { bindTo: app })).to.throw(); + engine.on('validate', async (ctx) => { - engine.clear(); + const [data] = ctx.args; - // Should not throw after clear - expect(() => engine.make('start', app.start, { bindTo: app })).to.not.throw(); - }); - }); + if (!data.email) { + ctx.fail('Email is required'); + } + }); - describe('context.removeHook()', () => { + const [, err] = await attempt(() => engine.emit('validate', {})); - it('allows an extension to remove itself', async () => { + expect(isHookError(err)).to.be.true; + expect(err?.message).to.include('Email is required'); - const app = new TestApp(); - const engine = new HookEngine(); + const [result] = await attempt(() => engine.emit('validate', { email: 'test@example.com' })); - engine.wrap(app, 'start'); + expect(result).to.have.property('earlyReturn', false); + }); - let callCount = 0; - engine.extend('start', 'before', async (ctx) => { + it('implements rate limiting pattern', async () => { - callCount++; + interface ApiLifecycle { + rateLimit(retryAfter: number, attempt: number): Promise; + } - if (callCount >= 2) { - ctx.removeHook(); + const engine = new HookEngine(); + const delays: number[] = []; + + engine.on('rateLimit', async (ctx) => { + + const [retryAfter, attempt] = ctx.args; + + if (attempt > 3) { + ctx.fail('Max retries exceeded'); } + + delays.push(retryAfter); }); - await app.start(); - await app.start(); - await app.start(); - await app.start(); + await engine.emit('rateLimit', 100, 1); + await engine.emit('rateLimit', 200, 2); + await engine.emit('rateLimit', 300, 3); - expect(callCount).to.equal(2); - }); + const [, err] = await attempt(() => engine.emit('rateLimit', 400, 4)); - it('removes the correct extension from multiple', async () => { + expect(delays).to.deep.equal([100, 200, 300]); + expect(isHookError(err)).to.be.true; + }); - const app = new TestApp(); - const engine = new HookEngine(); + it('implements analytics pattern with ignoreOnFail', async () => { - engine.wrap(app, 'start'); + interface AppLifecycle { + action(name: string): Promise; + } - const firstFn = vi.fn(); - const selfRemovingFn = vi.fn(async (ctx) => { + const engine = new HookEngine(); + const tracked: string[] = []; + const importantCallback = vi.fn(); - ctx.removeHook(); - }); - const lastFn = vi.fn(); + engine.on('action', { + callback: async (ctx) => { - engine.extend('start', 'before', firstFn); - engine.extend('start', 'before', selfRemovingFn); - engine.extend('start', 'before', lastFn); + const [name] = ctx.args; - await app.start(); + if (name === 'fail') { + throw new Error('Analytics failed'); + } - expect(firstFn).toHaveBeenCalledOnce(); - expect(selfRemovingFn).toHaveBeenCalledOnce(); - expect(lastFn).toHaveBeenCalledOnce(); + tracked.push(name); + }, + ignoreOnFail: true + }); - firstFn.mockReset(); - selfRemovingFn.mockReset(); - lastFn.mockReset(); + engine.on('action', importantCallback); - await app.start(); + await engine.emit('action', 'click'); + await engine.emit('action', 'fail'); + await engine.emit('action', 'scroll'); - expect(firstFn).toHaveBeenCalledOnce(); - expect(selfRemovingFn).not.toHaveBeenCalled(); - expect(lastFn).toHaveBeenCalledOnce(); + expect(tracked).to.deep.equal(['click', 'scroll']); + expect(importantCallback).toHaveBeenCalledTimes(3); }); }); }); From 13123b31e0886aff705fb7c3812889859fef8f4e Mon Sep 17 00:00:00 2001 From: Danilo Alonso Date: Mon, 12 Jan 2026 22:30:34 -0500 Subject: [PATCH 08/13] feat(docs): add llms.txt endpoint following llmstxt.org spec Add zx script to generate llms.txt index file and copy package documentation to /llm/*.md for LLM-friendly access. The script runs automatically before docs build. --- .gitignore | 2 + docs/.vitepress/config.mts | 3 +- package.json | 4 +- pnpm-lock.yaml | 54 ++++++++++----------- scripts/build-llm-txt.mjs | 99 ++++++++++++++++++++++++++++++++++++++ 5 files changed, 132 insertions(+), 30 deletions(-) create mode 100644 scripts/build-llm-txt.mjs diff --git a/.gitignore b/.gitignore index e3bacce..1eced18 100644 --- a/.gitignore +++ b/.gitignore @@ -11,4 +11,6 @@ packages/test tests/src/experiments/* !tests/src/experiments/memory-tests/ docs/.vitepress/cache/* +docs/public/llms.txt +docs/public/llm/ .env \ No newline at end of file diff --git a/docs/.vitepress/config.mts b/docs/.vitepress/config.mts index dddbd31..01801dc 100644 --- a/docs/.vitepress/config.mts +++ b/docs/.vitepress/config.mts @@ -35,7 +35,8 @@ export default defineConfig({ nav: [ { text: 'Home', link: '/' }, - { text: 'TypeDocs', link: 'https://typedoc.logosdx.dev' } + { text: 'TypeDocs', link: 'https://typedoc.logosdx.dev' }, + { text: 'llms.txt', link: '/llms.txt' } ], diff --git a/package.json b/package.json index 2a6d96f..bde36d2 100644 --- a/package.json +++ b/package.json @@ -13,14 +13,14 @@ "dts": "node ./scripts/build.mjs", "build:docs": "bash ./scripts/docs.zsh", "docs:dev": "vitepress dev docs", - "docs:build": "vitepress build docs", + "docs:build": "zx scripts/build-llm-txt.mjs && vitepress build docs", "docs:preview": "vitepress preview docs" }, "dependencies": { "@changesets/cli": "^2.29.4", "@swc/cli": "^0.7.7", "@swc/core": "^1.11.29", - "@types/node": "^24.10.1", + "@types/node": "^24.10.7", "tsx": "^4.19.4", "typedoc": "^0.28.4", "typescript": "^5.8.2" diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index cc817bf..d186dc2 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -18,8 +18,8 @@ importers: specifier: ^1.11.29 version: 1.11.29 '@types/node': - specifier: ^24.10.1 - version: 24.10.1 + specifier: ^24.10.7 + version: 24.10.7 tsx: specifier: ^4.19.4 version: 4.19.4 @@ -44,10 +44,10 @@ importers: version: 4.0.0(typedoc@0.28.4(typescript@5.8.3)) vite: specifier: ^7 - version: 7.0.6(@types/node@24.10.1)(tsx@4.19.4)(yaml@2.8.0) + version: 7.0.6(@types/node@24.10.7)(tsx@4.19.4)(yaml@2.8.0) vitepress: specifier: 2.0.0-alpha.15 - version: 2.0.0-alpha.15(@types/node@24.10.1)(postcss@8.5.6)(tsx@4.19.4)(typescript@5.8.3)(yaml@2.8.0) + version: 2.0.0-alpha.15(@types/node@24.10.7)(postcss@8.5.6)(tsx@4.19.4)(typescript@5.8.3)(yaml@2.8.0) zx: specifier: ^8.8.5 version: 8.8.5 @@ -142,7 +142,7 @@ importers: version: 21.0.0 '@vitest/coverage-v8': specifier: ^4 - version: 4.0.14(vitest@4.0.14(@types/node@24.10.1)(jsdom@27.2.0)(tsx@4.19.4)(yaml@2.8.0)) + version: 4.0.14(vitest@4.0.14(@types/node@24.10.7)(jsdom@27.2.0)(tsx@4.19.4)(yaml@2.8.0)) better-sse: specifier: ^0.15.1 version: 0.15.1 @@ -166,7 +166,7 @@ importers: version: 5.0.4 vitest: specifier: ^4 - version: 4.0.14(@types/node@24.10.1)(jsdom@27.2.0)(tsx@4.19.4)(yaml@2.8.0) + version: 4.0.14(@types/node@24.10.7)(jsdom@27.2.0)(tsx@4.19.4)(yaml@2.8.0) packages: @@ -1099,8 +1099,8 @@ packages: '@types/node@12.20.55': resolution: {integrity: sha512-J8xLz7q2OFulZ2cyGTLE1TbbZcjpno7FaN6zdJNrgAdrJ+DZzh/uFR6YrTb4C+nXakvud8Q4+rbhoIWlYQbUFQ==} - '@types/node@24.10.1': - resolution: {integrity: sha512-GNWcUTRBgIRJD5zj+Tq0fKOJ5XZajIiBroOF0yvj2bSU1WvNdYS/dn9UxwsujGW4JX06dnHyjV2y9rRaybH0iQ==} + '@types/node@24.10.7': + resolution: {integrity: sha512-+054pVMzVTmRQV8BhpGv3UyfZ2Llgl8rdpDTon+cUH9+na0ncBVXj3wTUKh14+Kiz18ziM3b4ikpP5/Pc0rQEQ==} '@types/sinon@21.0.0': resolution: {integrity: sha512-+oHKZ0lTI+WVLxx1IbJDNmReQaIsQJjN2e7UUrJHEeByG7bFeKJYsv1E75JxTQ9QKJDp21bAa/0W2Xo4srsDnw==} @@ -3604,7 +3604,7 @@ snapshots: '@types/jsdom@27.0.0': dependencies: - '@types/node': 24.10.1 + '@types/node': 24.10.7 '@types/tough-cookie': 4.0.5 parse5: 7.3.0 @@ -3623,7 +3623,7 @@ snapshots: '@types/node@12.20.55': {} - '@types/node@24.10.1': + '@types/node@24.10.7': dependencies: undici-types: 7.16.0 @@ -3641,13 +3641,13 @@ snapshots: '@ungap/structured-clone@1.3.0': {} - '@vitejs/plugin-vue@6.0.1(vite@7.2.4(@types/node@24.10.1)(tsx@4.19.4)(yaml@2.8.0))(vue@3.5.25(typescript@5.8.3))': + '@vitejs/plugin-vue@6.0.1(vite@7.2.4(@types/node@24.10.7)(tsx@4.19.4)(yaml@2.8.0))(vue@3.5.25(typescript@5.8.3))': dependencies: '@rolldown/pluginutils': 1.0.0-beta.29 - vite: 7.2.4(@types/node@24.10.1)(tsx@4.19.4)(yaml@2.8.0) + vite: 7.2.4(@types/node@24.10.7)(tsx@4.19.4)(yaml@2.8.0) vue: 3.5.25(typescript@5.8.3) - '@vitest/coverage-v8@4.0.14(vitest@4.0.14(@types/node@24.10.1)(jsdom@27.2.0)(tsx@4.19.4)(yaml@2.8.0))': + '@vitest/coverage-v8@4.0.14(vitest@4.0.14(@types/node@24.10.7)(jsdom@27.2.0)(tsx@4.19.4)(yaml@2.8.0))': dependencies: '@bcoe/v8-coverage': 1.0.2 '@vitest/utils': 4.0.14 @@ -3660,7 +3660,7 @@ snapshots: obug: 2.1.1 std-env: 3.10.0 tinyrainbow: 3.0.3 - vitest: 4.0.14(@types/node@24.10.1)(jsdom@27.2.0)(tsx@4.19.4)(yaml@2.8.0) + vitest: 4.0.14(@types/node@24.10.7)(jsdom@27.2.0)(tsx@4.19.4)(yaml@2.8.0) transitivePeerDependencies: - supports-color @@ -3673,13 +3673,13 @@ snapshots: chai: 6.2.1 tinyrainbow: 3.0.3 - '@vitest/mocker@4.0.14(vite@7.0.6(@types/node@24.10.1)(tsx@4.19.4)(yaml@2.8.0))': + '@vitest/mocker@4.0.14(vite@7.0.6(@types/node@24.10.7)(tsx@4.19.4)(yaml@2.8.0))': dependencies: '@vitest/spy': 4.0.14 estree-walker: 3.0.3 magic-string: 0.30.21 optionalDependencies: - vite: 7.0.6(@types/node@24.10.1)(tsx@4.19.4)(yaml@2.8.0) + vite: 7.0.6(@types/node@24.10.7)(tsx@4.19.4)(yaml@2.8.0) '@vitest/pretty-format@4.0.14': dependencies: @@ -5116,7 +5116,7 @@ snapshots: '@types/unist': 3.0.3 vfile-message: 4.0.3 - vite@7.0.6(@types/node@24.10.1)(tsx@4.19.4)(yaml@2.8.0): + vite@7.0.6(@types/node@24.10.7)(tsx@4.19.4)(yaml@2.8.0): dependencies: esbuild: 0.25.4 fdir: 6.5.0(picomatch@4.0.3) @@ -5125,12 +5125,12 @@ snapshots: rollup: 4.41.1 tinyglobby: 0.2.15 optionalDependencies: - '@types/node': 24.10.1 + '@types/node': 24.10.7 fsevents: 2.3.3 tsx: 4.19.4 yaml: 2.8.0 - vite@7.2.4(@types/node@24.10.1)(tsx@4.19.4)(yaml@2.8.0): + vite@7.2.4(@types/node@24.10.7)(tsx@4.19.4)(yaml@2.8.0): dependencies: esbuild: 0.25.4 fdir: 6.5.0(picomatch@4.0.3) @@ -5139,12 +5139,12 @@ snapshots: rollup: 4.53.3 tinyglobby: 0.2.15 optionalDependencies: - '@types/node': 24.10.1 + '@types/node': 24.10.7 fsevents: 2.3.3 tsx: 4.19.4 yaml: 2.8.0 - vitepress@2.0.0-alpha.15(@types/node@24.10.1)(postcss@8.5.6)(tsx@4.19.4)(typescript@5.8.3)(yaml@2.8.0): + vitepress@2.0.0-alpha.15(@types/node@24.10.7)(postcss@8.5.6)(tsx@4.19.4)(typescript@5.8.3)(yaml@2.8.0): dependencies: '@docsearch/css': 4.3.2 '@docsearch/js': 4.3.2 @@ -5153,7 +5153,7 @@ snapshots: '@shikijs/transformers': 3.17.0 '@shikijs/types': 3.17.0 '@types/markdown-it': 14.1.2 - '@vitejs/plugin-vue': 6.0.1(vite@7.2.4(@types/node@24.10.1)(tsx@4.19.4)(yaml@2.8.0))(vue@3.5.25(typescript@5.8.3)) + '@vitejs/plugin-vue': 6.0.1(vite@7.2.4(@types/node@24.10.7)(tsx@4.19.4)(yaml@2.8.0))(vue@3.5.25(typescript@5.8.3)) '@vue/devtools-api': 8.0.5 '@vue/shared': 3.5.25 '@vueuse/core': 14.1.0(vue@3.5.25(typescript@5.8.3)) @@ -5162,7 +5162,7 @@ snapshots: mark.js: 8.11.1 minisearch: 7.2.0 shiki: 3.17.0 - vite: 7.2.4(@types/node@24.10.1)(tsx@4.19.4)(yaml@2.8.0) + vite: 7.2.4(@types/node@24.10.7)(tsx@4.19.4)(yaml@2.8.0) vue: 3.5.25(typescript@5.8.3) optionalDependencies: postcss: 8.5.6 @@ -5191,10 +5191,10 @@ snapshots: - universal-cookie - yaml - vitest@4.0.14(@types/node@24.10.1)(jsdom@27.2.0)(tsx@4.19.4)(yaml@2.8.0): + vitest@4.0.14(@types/node@24.10.7)(jsdom@27.2.0)(tsx@4.19.4)(yaml@2.8.0): dependencies: '@vitest/expect': 4.0.14 - '@vitest/mocker': 4.0.14(vite@7.0.6(@types/node@24.10.1)(tsx@4.19.4)(yaml@2.8.0)) + '@vitest/mocker': 4.0.14(vite@7.0.6(@types/node@24.10.7)(tsx@4.19.4)(yaml@2.8.0)) '@vitest/pretty-format': 4.0.14 '@vitest/runner': 4.0.14 '@vitest/snapshot': 4.0.14 @@ -5211,10 +5211,10 @@ snapshots: tinyexec: 0.3.2 tinyglobby: 0.2.15 tinyrainbow: 3.0.3 - vite: 7.0.6(@types/node@24.10.1)(tsx@4.19.4)(yaml@2.8.0) + vite: 7.0.6(@types/node@24.10.7)(tsx@4.19.4)(yaml@2.8.0) why-is-node-running: 2.3.0 optionalDependencies: - '@types/node': 24.10.1 + '@types/node': 24.10.7 jsdom: 27.2.0 transitivePeerDependencies: - jiti diff --git a/scripts/build-llm-txt.mjs b/scripts/build-llm-txt.mjs new file mode 100644 index 0000000..f86b517 --- /dev/null +++ b/scripts/build-llm-txt.mjs @@ -0,0 +1,99 @@ +#!/usr/bin/env zx + +/** + * Generates llms.txt following the llmstxt.org specification. + * + * Structure: + * - H1: Project name (required) + * - Blockquote: Brief summary + * - H2 sections: File lists with markdown links + * + * Also copies llm-helpers/*.md to docs/public/llm/ for direct access. + */ + +import 'zx/globals'; + +const ROOT = path.join(import.meta.dirname, '..'); +const LLM_HELPERS_DIR = path.join(ROOT, 'llm-helpers'); +const OUTPUT_DIR = path.join(ROOT, 'docs', 'public', 'llm'); +const OUTPUT_PATH = path.join(ROOT, 'docs', 'public', 'llms.txt'); + +$.verbose = false; + +const log = { + info: (msg) => console.log(chalk.cyan(`→ ${msg}`)), + success: (msg) => console.log(chalk.green(`✓ ${msg}`)), + error: (msg) => console.log(chalk.red(`✗ ${msg}`)), +}; + +log.info('Building llms.txt from llm-helpers...'); + +const files = await fs.readdir(LLM_HELPERS_DIR); +const mdFiles = files + .filter(f => f.endsWith('.md') && f !== 'README.md') + .sort(); + +if (mdFiles.length === 0) { + + log.error('No markdown files found in llm-helpers/'); + process.exit(1); +} + +// Copy markdown files to public/llm/ for direct access +await fs.ensureDir(OUTPUT_DIR); + +for (const file of mdFiles) { + + await fs.copy( + path.join(LLM_HELPERS_DIR, file), + path.join(OUTPUT_DIR, file) + ); +} + +log.info(`Copied ${mdFiles.length} files to docs/public/llm/`); + +// Build package links with descriptions +const packageDescriptions = { + dom: 'DOM manipulation utilities for CSS, attributes, events, and behaviors', + fetch: 'HTTP client with retry logic, lifecycle hooks, and state management', + hooks: 'Lifecycle event system for extensible architectures', + localize: 'Internationalization system for multi-language support', + observer: 'Event-driven architecture with queues and regex matching', + storage: 'Type-safe persistence layer for browser storage', + utils: 'Core utilities for flow control, data structures, and validation', +}; + +const packageLinks = mdFiles + .map((file) => { + + const name = file.replace('.md', ''); + const desc = packageDescriptions[name] || ''; + return `- [${name}](/llm/${file}): ${desc}`; + }) + .join('\n'); + +// Generate llms.txt following the spec +const output = `# LogosDX + +> Focused TypeScript utilities for building JavaScript applications in any runtime. Zero dependencies, type-safe, and designed for production resilience. + +LogosDX provides a collection of packages that work together or independently. Each package follows consistent patterns: error tuples with \`attempt()\`, event-driven architecture, and comprehensive TypeScript support. + +## Documentation + +- [Getting Started](https://logosdx.dev/getting-started): Installation and basic usage +- [API Reference](https://typedoc.logosdx.dev): Full TypeScript API documentation +- [Cheat Sheet](https://logosdx.dev/cheat-sheet): Quick reference for common patterns + +## Packages + +${packageLinks} + +## Optional + +- [GitHub Repository](https://github.com/logosdx/monorepo): Source code and issue tracker +`; + +await fs.writeFile(OUTPUT_PATH, output); + +log.success(`Generated llms.txt with ${mdFiles.length} package links`); From 11e823399e8e69941ed4d7688a92c2648200a9d0 Mon Sep 17 00:00:00 2001 From: Danilo Alonso Date: Tue, 3 Feb 2026 23:49:14 -0500 Subject: [PATCH 09/13] feat(fetch)!: modular engine architecture, event timing, and resilience MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Refactored FetchEngine from a monolith into modular architecture with clear separation of concerns: - state/, headers/, params/, options/ managers replace flat methods - Events drop `fetch-` prefix (fetch-before → before) - Added requestStart/requestEnd timestamps to lifecycle event data - FetchError helpers: isTimeout(), isCancelled(), isConnectionLost() - Separate attemptTimeout and totalTimeout for retry control - PropertyStore for unified header/param management - Exported ResiliencePolicy, DedupePolicy, CachePolicy, RateLimitPolicy fix(observer): buffer events in EventGenerator to prevent silent drops Replaced single Deferred pattern with PriorityQueue buffer to fix a race condition where burst emissions were lost between async iterations. feat(utils): add SingleFlight.invalidateCache(predicate) chore: remove @logosdx/kit, split changesets per-package, clean pre.json --- .changeset/bright-dogs-swim.md | 143 + .changeset/calm-trees-grow.md | 7 + .changeset/pre.json | 3 +- .changeset/quiet-birds-sing.md | 7 + .changeset/warm-lights-shine.md | 23 + .gitignore | 4 +- docs/.vitepress/config.mts | 21 +- docs/cheat-sheet.md | 107 +- docs/getting-started.md | 24 +- docs/index.md | 2 +- docs/packages/fetch.md | 2736 ----------------- docs/packages/fetch/advanced.md | 716 +++++ docs/packages/fetch/configuration.md | 434 +++ docs/packages/fetch/events.md | 433 +++ docs/packages/fetch/index.md | 245 ++ docs/packages/fetch/policies.md | 982 ++++++ docs/packages/fetch/requests.md | 344 +++ docs/packages/fetch/resilience.md | 498 +++ docs/packages/observer.md | 30 +- docs/what-is-logosdx.md | 21 +- llm-helpers/fetch.md | 216 +- llm-helpers/observer.md | 12 +- package.json | 6 +- packages/fetch/src/engine.ts | 2671 ---------------- packages/fetch/src/engine/events.ts | 251 ++ packages/fetch/src/engine/executor.ts | 1393 +++++++++ packages/fetch/src/engine/index.ts | 726 +++++ packages/fetch/src/engine/types.ts | 188 ++ packages/fetch/src/helpers.ts | 621 ---- packages/fetch/src/helpers/fetch-error.ts | 104 + packages/fetch/src/helpers/index.ts | 6 + packages/fetch/src/helpers/validations.ts | 218 ++ packages/fetch/src/index.ts | 117 +- packages/fetch/src/options/index.ts | 182 ++ packages/fetch/src/options/types.ts | 274 ++ packages/fetch/src/policies/base.ts | 2 +- packages/fetch/src/policies/cache.ts | 30 +- packages/fetch/src/policies/dedupe.ts | 22 +- packages/fetch/src/policies/helpers.ts | 234 ++ packages/fetch/src/policies/index.ts | 3 + packages/fetch/src/policies/rate-limit.ts | 11 +- packages/fetch/src/properties/headers.ts | 215 ++ packages/fetch/src/properties/index.ts | 10 + packages/fetch/src/properties/params.ts | 216 ++ .../store.ts} | 52 +- packages/fetch/src/state/index.ts | 174 ++ packages/fetch/src/types.ts | 514 +--- packages/kit/.swcrc | 19 - packages/kit/CHANGELOG.md | 509 --- packages/kit/package.json | 52 - packages/kit/src/index.ts | 281 -- packages/kit/tsconfig.json | 5 - packages/kit/typedoc.json | 5 - packages/observer/src/generator.ts | 53 +- packages/utils/src/async/singleflight.ts | 49 + packages/utils/src/types.ts | 9 +- pnpm-lock.yaml | 933 ++++-- scripts/Dockerfile | 57 + scripts/build-llm-txt.mjs | 20 +- scripts/ralph-wiggum.sh | 327 ++ tests/package.json | 20 +- tests/src/fetch/_helpers.ts | 2 +- .../{adapter-fs.ts => adapters/fs.test.ts} | 22 +- tests/src/fetch/engine/configuration.test.ts | 178 ++ .../fetch/{base.ts => engine/core.test.ts} | 1104 ++----- tests/src/fetch/engine/integration.test.ts | 73 + tests/src/fetch/engine/lifecycle.test.ts | 116 + tests/src/fetch/engine/performance.test.ts | 258 ++ tests/src/fetch/engine/request-init.test.ts | 377 +++ tests/src/fetch/engine/response.test.ts | 79 + .../{retry.ts => executor/retry.test.ts} | 73 +- tests/src/fetch/executor/timeout.test.ts | 131 + tests/src/fetch/options/get.test.ts | 83 + tests/src/fetch/options/set.test.ts | 116 + .../{caching.ts => policies/cache.test.ts} | 443 ++- .../dedupe.test.ts} | 218 +- .../rate-limit.test.ts} | 90 +- tests/src/fetch/properties/headers.test.ts | 238 ++ tests/src/fetch/properties/params.test.ts | 238 ++ .../store.test.ts} | 2 +- .../index.test.ts} | 2 +- tests/src/fetch/state/get.test.ts | 55 + tests/src/fetch/state/reset.test.ts | 60 + tests/src/fetch/state/set.test.ts | 103 + tests/src/observable/engine.ts | 53 + tests/src/smoke/dom.test.ts | 86 + tests/src/smoke/fetch.test.ts | 102 + tests/src/smoke/hooks.test.ts | 116 + tests/src/smoke/kit.test.ts | 107 + tests/src/smoke/localize.test.ts | 84 + tests/src/smoke/observer.test.ts | 90 + tests/src/smoke/setup.ts | 45 + tests/src/smoke/state-machine.test.ts | 94 + tests/src/smoke/storage.test.ts | 104 + tests/src/smoke/utils.test.ts | 118 + tests/src/storage.ts | 1 - tests/src/utils/data-structures.ts | 2 - tests/src/utils/flow-control/batch.ts | 1 - tests/src/utils/flow-control/compose-flow.ts | 13 +- tests/src/utils/flow-control/rate-limit.ts | 2 - tests/src/utils/flow-control/retry.ts | 15 +- tests/src/utils/flow-control/singleflight.ts | 157 +- tests/src/utils/flow-control/throttle.ts | 1 - tests/tsconfig.json | 5 + tests/vitest.config.ts | 97 +- 105 files changed, 13534 insertions(+), 9407 deletions(-) create mode 100644 .changeset/bright-dogs-swim.md create mode 100644 .changeset/calm-trees-grow.md create mode 100644 .changeset/quiet-birds-sing.md create mode 100644 .changeset/warm-lights-shine.md delete mode 100644 docs/packages/fetch.md create mode 100644 docs/packages/fetch/advanced.md create mode 100644 docs/packages/fetch/configuration.md create mode 100644 docs/packages/fetch/events.md create mode 100644 docs/packages/fetch/index.md create mode 100644 docs/packages/fetch/policies.md create mode 100644 docs/packages/fetch/requests.md create mode 100644 docs/packages/fetch/resilience.md delete mode 100644 packages/fetch/src/engine.ts create mode 100644 packages/fetch/src/engine/events.ts create mode 100644 packages/fetch/src/engine/executor.ts create mode 100644 packages/fetch/src/engine/index.ts create mode 100644 packages/fetch/src/engine/types.ts delete mode 100644 packages/fetch/src/helpers.ts create mode 100644 packages/fetch/src/helpers/fetch-error.ts create mode 100644 packages/fetch/src/helpers/index.ts create mode 100644 packages/fetch/src/helpers/validations.ts create mode 100644 packages/fetch/src/options/index.ts create mode 100644 packages/fetch/src/options/types.ts create mode 100644 packages/fetch/src/policies/helpers.ts create mode 100644 packages/fetch/src/properties/headers.ts create mode 100644 packages/fetch/src/properties/index.ts create mode 100644 packages/fetch/src/properties/params.ts rename packages/fetch/src/{property-store.ts => properties/store.ts} (88%) create mode 100644 packages/fetch/src/state/index.ts delete mode 100644 packages/kit/.swcrc delete mode 100644 packages/kit/CHANGELOG.md delete mode 100644 packages/kit/package.json delete mode 100644 packages/kit/src/index.ts delete mode 100644 packages/kit/tsconfig.json delete mode 100644 packages/kit/typedoc.json create mode 100644 scripts/Dockerfile create mode 100755 scripts/ralph-wiggum.sh rename tests/src/fetch/{adapter-fs.ts => adapters/fs.test.ts} (91%) create mode 100644 tests/src/fetch/engine/configuration.test.ts rename tests/src/fetch/{base.ts => engine/core.test.ts} (62%) create mode 100644 tests/src/fetch/engine/integration.test.ts create mode 100644 tests/src/fetch/engine/lifecycle.test.ts create mode 100644 tests/src/fetch/engine/performance.test.ts create mode 100644 tests/src/fetch/engine/request-init.test.ts create mode 100644 tests/src/fetch/engine/response.test.ts rename tests/src/fetch/{retry.ts => executor/retry.test.ts} (91%) create mode 100644 tests/src/fetch/executor/timeout.test.ts create mode 100644 tests/src/fetch/options/get.test.ts create mode 100644 tests/src/fetch/options/set.test.ts rename tests/src/fetch/{caching.ts => policies/cache.test.ts} (90%) rename tests/src/fetch/{deduplication.ts => policies/dedupe.test.ts} (93%) rename tests/src/fetch/{rate-limiting.ts => policies/rate-limit.test.ts} (92%) create mode 100644 tests/src/fetch/properties/headers.test.ts create mode 100644 tests/src/fetch/properties/params.test.ts rename tests/src/fetch/{property-store.ts => properties/store.test.ts} (99%) rename tests/src/fetch/{serializers.ts => serializers/index.test.ts} (99%) create mode 100644 tests/src/fetch/state/get.test.ts create mode 100644 tests/src/fetch/state/reset.test.ts create mode 100644 tests/src/fetch/state/set.test.ts create mode 100644 tests/src/smoke/dom.test.ts create mode 100644 tests/src/smoke/fetch.test.ts create mode 100644 tests/src/smoke/hooks.test.ts create mode 100644 tests/src/smoke/kit.test.ts create mode 100644 tests/src/smoke/localize.test.ts create mode 100644 tests/src/smoke/observer.test.ts create mode 100644 tests/src/smoke/setup.ts create mode 100644 tests/src/smoke/state-machine.test.ts create mode 100644 tests/src/smoke/storage.test.ts create mode 100644 tests/src/smoke/utils.test.ts diff --git a/.changeset/bright-dogs-swim.md b/.changeset/bright-dogs-swim.md new file mode 100644 index 0000000..5fe1ab9 --- /dev/null +++ b/.changeset/bright-dogs-swim.md @@ -0,0 +1,143 @@ +--- +"@logosdx/fetch": major +--- + +Refactored FetchEngine from a 2,671-line monolith into a modular architecture with clear separation of concerns. The core HTTP API (`get`, `post`, `put`, `patch`, `delete`, `request`) remains unchanged. + +### Breaking Changes + +#### State Management + +State methods moved to a dedicated `state` property: + +```typescript +// Before +engine.getState(); +engine.setState('token', 'abc123'); +engine.resetState(); + +// After +engine.state.get(); +engine.state.set('token', 'abc123'); +engine.state.reset(); +``` + +#### Header Management + +Header methods moved to a dedicated `headers` manager with method-specific support: + +```typescript +// Before +engine.addHeader('Authorization', 'Bearer token'); +engine.hasHeader('Authorization'); +engine.rmHeader('Authorization'); +engine.headers; // getter returned object + +// After +engine.headers.set('Authorization', 'Bearer token'); +engine.headers.set('X-Custom', 'post-only', 'POST'); // NEW: method-specific +engine.headers.has('Authorization'); +engine.headers.remove('Authorization'); +engine.headers.all; // property with default + method overrides +``` + +#### Parameter Management + +Parameter methods moved to a dedicated `params` manager: + +```typescript +// Before +engine.addParam('api_key', 'abc123'); +engine.hasParam('api_key'); +engine.rmParams('api_key'); +engine.params; // getter returned object + +// After +engine.params.set('api_key', 'abc123'); +engine.params.set('format', 'json', 'GET'); // NEW: method-specific +engine.params.has('api_key'); +engine.params.remove('api_key'); +engine.params.all; // property with default + method overrides +``` + +#### Configuration Management + +Configuration methods replaced with unified `options` store supporting deep path access: + +```typescript +// Before +engine.changeBaseUrl('https://new-api.com'); +engine.changeModifyOptions(fn); +engine.changeModifyMethodOptions('POST', fn); + +// After +engine.options.set('baseUrl', 'https://new-api.com'); +engine.options.set('modifyOptions', fn); +engine.options.set('modifyMethodOptions.POST', fn); + +// NEW: Deep path access for any nested option +engine.options.get('retry.maxAttempts'); +engine.options.set('retry.maxAttempts', 5); +engine.options.set('dedupePolicy', { enabled: false }); +``` + +#### Event Names + +Events drop the `fetch-` prefix for cleaner names: + +| Before | After | +|--------|-------| +| `fetch-before` | `before` | +| `fetch-after` | `after` | +| `fetch-response` | `response` | +| `fetch-error` | `error` | +| `fetch-cache-hit` | `cache-hit` | +| `fetch-dedupe-join` | `dedupe-join` | +| `fetch-state-set` | `state-set` | +| `fetch-header-add` | `header-add` | + +```typescript +// Before +engine.on('fetch-before', handler); +engine.on('fetch-cache-hit', handler); + +// After +engine.on('before', handler); +engine.on('cache-hit', handler); +``` + +#### Internal API Removed + +- `engine._flight` is no longer exposed (internal via RequestExecutor) + +### Why These Changes + +1. **Modular Architecture**: Split monolithic engine into focused modules (state/, options/, properties/, policies/) for easier testing and maintenance + +2. **Single Source of Truth**: All configuration flows through OptionsStore with type-safe deep path access + +3. **Runtime Configurable**: Any option can now be changed at runtime, enabling dynamic API endpoints and feature flags + +4. **Method-Specific Properties**: Headers and params can now be configured per-HTTP-method + +5. **Cleaner Event Names**: Events match their domain without redundant prefixes + +### Backward Compatibility + +Deprecated methods still work during migration: + +```typescript +// These still work (deprecated) +engine.getState(); // → engine.state.get() +engine.addHeader(k, v); // → engine.headers.set(k, v) +engine.changeBaseUrl(); // → engine.options.set('baseUrl', ...) + +// Old event names still emit (deprecated) +engine.on('fetch-before', handler); // still works +``` + +### New Capabilities + +- **FetchError helpers**: `err.isTimeout()`, `err.isCancelled()`, `err.isConnectionLost()` +- **Attempt timeouts**: Separate `attemptTimeout` and `totalTimeout` for retry control +- **Deep config access**: `engine.options.get('retry.maxAttempts')` diff --git a/.changeset/calm-trees-grow.md b/.changeset/calm-trees-grow.md new file mode 100644 index 0000000..9e82e7d --- /dev/null +++ b/.changeset/calm-trees-grow.md @@ -0,0 +1,7 @@ +--- +"@logosdx/utils": minor +--- + +### Added + +- `SingleFlight.invalidateCache(predicate)`: Selectively invalidate cache entries matching a predicate function diff --git a/.changeset/pre.json b/.changeset/pre.json index 93879fc..b06c481 100644 --- a/.changeset/pre.json +++ b/.changeset/pre.json @@ -3,9 +3,8 @@ "tag": "beta", "initialVersions": { "@logosdx/dom": "2.0.15", - "@logosdx/fetch": "7.0.3", + "@logosdx/fetch": "7.1.0", "@logosdx/hooks": "0.0.1", - "@logosdx/kit": "4.0.3", "@logosdx/localize": "1.0.19", "@logosdx/observer": "2.2.0", "@logosdx/state-machine": "1.0.19", diff --git a/.changeset/quiet-birds-sing.md b/.changeset/quiet-birds-sing.md new file mode 100644 index 0000000..87d2d4d --- /dev/null +++ b/.changeset/quiet-birds-sing.md @@ -0,0 +1,7 @@ +--- +"@logosdx/observer": patch +--- + +### Fixed + +- **EventGenerator buffering**: Fixed a race condition where events emitted faster than the async iterator could consume them were silently dropped. Replaced single Deferred pattern with a PriorityQueue buffer, ensuring no events are lost under burst conditions. All existing consumer code continues to work unchanged. diff --git a/.changeset/warm-lights-shine.md b/.changeset/warm-lights-shine.md new file mode 100644 index 0000000..b46ecf3 --- /dev/null +++ b/.changeset/warm-lights-shine.md @@ -0,0 +1,23 @@ +--- +"@logosdx/fetch": minor +--- + +### Added + +- **Event timing data**: All request lifecycle events now include a `requestStart` timestamp (`Date.now()` captured at pipeline entry). Terminal events (`response`, `error`, `abort`) also include a `requestEnd` timestamp, enabling duration calculation directly from event data. + +```typescript +engine.on('response', (event) => { + const duration = event.requestEnd - event.requestStart; + console.log(`Request completed in ${duration}ms`); +}); +``` + +| Event | `requestStart` | `requestEnd` | +|-------|:-:|:-:| +| `before-request` | yes | - | +| `after-request` | yes | - | +| `retry` | yes | - | +| `response` | yes | yes | +| `error` | yes | yes | +| `abort` | yes | yes | diff --git a/.gitignore b/.gitignore index 1eced18..a225cf3 100644 --- a/.gitignore +++ b/.gitignore @@ -13,4 +13,6 @@ tests/src/experiments/* docs/.vitepress/cache/* docs/public/llms.txt docs/public/llm/ -.env \ No newline at end of file +.env +tests/src/smoke/__screenshots__/**/* +.swp \ No newline at end of file diff --git a/docs/.vitepress/config.mts b/docs/.vitepress/config.mts index 01801dc..dd056d9 100644 --- a/docs/.vitepress/config.mts +++ b/docs/.vitepress/config.mts @@ -1,10 +1,9 @@ import { DefaultTheme, defineConfig } from 'vitepress' -const packages: DefaultTheme.SidebarItem[] = [ +const simplePackages: DefaultTheme.SidebarItem[] = [ ['Observer', 'observer'], ['Hooks', 'hooks'], ['Utils', 'utils'], - ['Fetch', 'fetch'], ['Dom', 'dom'], ['Storage', 'storage'], ['Localize', 'localize'], @@ -14,6 +13,24 @@ const packages: DefaultTheme.SidebarItem[] = [ link: `/packages/${link}`, })); +const packages: DefaultTheme.SidebarItem[] = [ + ...simplePackages.slice(0, 3), // Observer, Hooks, Utils + { + text: 'Fetch', + link: '/packages/fetch/', + collapsed: true, + items: [ + { text: 'Configuration', link: '/packages/fetch/configuration' }, + { text: 'Making Requests', link: '/packages/fetch/requests' }, + { text: 'Resilience', link: '/packages/fetch/resilience' }, + { text: 'Policies', link: '/packages/fetch/policies' }, + { text: 'Events', link: '/packages/fetch/events' }, + { text: 'Advanced', link: '/packages/fetch/advanced' }, + ] + }, + ...simplePackages.slice(3), // Dom, Storage, Localize +]; + const metadata = { title: 'Logos DX', description: 'Focused TypeScript utilities for building JS apps in any runtime', diff --git a/docs/cheat-sheet.md b/docs/cheat-sheet.md index 7676b58..8732da0 100644 --- a/docs/cheat-sheet.md +++ b/docs/cheat-sheet.md @@ -15,7 +15,7 @@ import { FetchEngine } from '@logosdx/fetch'; const api = new FetchEngine({ baseUrl: 'https://api.example.com', defaultType: 'json', - timeout: 5000 + totalTimeout: 5000 }); // With typed headers, params, and state @@ -115,43 +115,43 @@ if (err && request.isAborted) { ```typescript // Set entire state -api.setState({ +api.state.set({ userId: '123', sessionId: 'abc' }); // Set individual property -api.setState('userId', '456'); +api.state.set('userId', '456'); // Get state (returns deep clone) -const state = api.getState(); +const state = api.state.get(); // Reset state -api.resetState(); +api.state.reset(); ``` ### Headers Management ```typescript -// Add global header -api.addHeader('Authorization', 'Bearer token123'); +// Set global header +api.headers.set('Authorization', 'Bearer token123'); -// Add multiple headers -api.addHeader({ +// Set multiple headers +api.headers.set({ 'X-API-Version': 'v2', 'X-Client': 'web-app' }); -// Add method-specific header -api.addHeader('X-CSRF-Token', 'csrf123', 'POST'); +// Set method-specific header +api.headers.set('X-CSRF-Token', 'csrf123', 'POST'); // Remove headers -api.rmHeader('Authorization'); -api.rmHeader(['X-API-Version', 'X-Client']); +api.headers.remove('Authorization'); +api.headers.remove(['X-API-Version', 'X-Client']); // Check header existence -if (api.hasHeader('Authorization')) { +if (api.headers.has('Authorization')) { // Header exists } ``` @@ -160,34 +160,38 @@ if (api.hasHeader('Authorization')) { ### Parameters Management ```typescript -// Add global parameter -api.addParam('version', 'v1'); +// Set global parameter +api.params.set('version', 'v1'); -// Add multiple parameters -api.addParam({ +// Set multiple parameters +api.params.set({ format: 'json', locale: 'en-US' }); -// Add method-specific parameter -api.addParam('include_deleted', true, 'GET'); +// Set method-specific parameter +api.params.set('include_deleted', true, 'GET'); // Remove parameters -api.rmParam('version'); -api.rmParam(['format', 'locale']); +api.params.remove('version'); +api.params.remove(['format', 'locale']); // Check parameter existence -if (api.hasParam('version')) { +if (api.params.has('version')) { // Parameter exists } ``` -### URL Management +### Configuration Management ```typescript // Change base URL -api.changeBaseUrl('https://staging.example.com'); +api.config.set('baseUrl', 'https://staging.example.com'); + +// Change other config at runtime +api.config.set('totalTimeout', 60000); +api.config.set('retry.maxAttempts', 5); ``` @@ -222,12 +226,12 @@ const api = new FetchEngine({ ```typescript // Listen to events -const cleanup = api.on('fetch-error', (event) => { +const cleanup = api.on('error', (event) => { console.error('Request failed:', event.error?.message); }); // Listen once -api.once('fetch-response', (event) => { +api.once('response', (event) => { console.log('First response:', event.response); }); @@ -239,7 +243,7 @@ api.on('*', (event) => { // Remove listener cleanup(); // or -api.off('fetch-error', callback); +api.off('error', callback); // Emit custom event api.emit('custom-event', { data: 'value' }); @@ -247,19 +251,19 @@ api.emit('custom-event', { data: 'value' }); #### Available Events -- `fetch-before` - Before request -- `fetch-after` - After request -- `fetch-abort` - Request aborted -- `fetch-error` - Request error -- `fetch-response` - Response received -- `fetch-retry` - Retry attempt -- `fetch-header-add` - Header added -- `fetch-header-remove` - Header removed -- `fetch-param-add` - Parameter added -- `fetch-param-remove` - Parameter removed -- `fetch-state-set` - State updated -- `fetch-state-reset` - State reset -- `fetch-url-change` - Base URL changed +- `before-request` - Before request +- `after-request` - After request +- `abort` - Request aborted +- `error` - Request error +- `response` - Response received +- `retry` - Retry attempt +- `header-add` - Header added +- `header-remove` - Header removed +- `param-add` - Parameter added +- `param-remove` - Parameter removed +- `state-set` - State updated +- `state-reset` - State reset +- `url-change` - Base URL changed ### Error Handling @@ -305,16 +309,16 @@ const api = new FetchEngine({ DELETE: { soft: true } }, - // Modify options before request - modifyOptions: (opts, state) => { + // Modify config before request + modifyConfig: (opts, state) => { if (state.authToken) { opts.headers.Authorization = `Bearer ${state.authToken}`; } return opts; }, - // Method-specific option modification - modifyMethodOptions: { + // Method-specific config modification + modifyMethodConfig: { POST: (opts, state) => { opts.headers['X-User-ID'] = state.userId; return opts; @@ -346,10 +350,7 @@ const api = new FetchEngine({ return 'blob'; } return FetchEngine.useDefault; - }, - - // Format headers - formatHeaders: 'lowercase' // or 'uppercase' or custom function + } }); ``` @@ -391,8 +392,8 @@ const [loginResponse, err] = await attempt(() => ); if (!err && loginResponse) { - api.setState('authToken', loginResponse.token); - api.addHeader('Authorization', `Bearer ${loginResponse.token}`); + api.state.set('authToken', loginResponse.data.token); + api.headers.set('Authorization', `Bearer ${loginResponse.data.token}`); } ``` @@ -447,8 +448,8 @@ const urls = { }; observer.on('environment-changed', ({ env }) => { - api.changeBaseUrl(urls[env]); - api.resetState(); // Clear auth on env change + api.config.set('baseUrl', urls[env]); + api.state.reset(); // Clear auth on env change }); ``` diff --git a/docs/getting-started.md b/docs/getting-started.md index b08e518..3e5805b 100644 --- a/docs/getting-started.md +++ b/docs/getting-started.md @@ -33,7 +33,7 @@ Or in your browser, use the CDN: @@ -90,7 +90,7 @@ observer.on('stop-being-nosy', () => stopBeingNosy()); **Let's give these users something to actually log in to.** ```ts -import { FetchFactory } from '@logosdx/fetch'; +import { FetchEngine } from '@logosdx/fetch'; import { attempt } from '@logosdx/utils' // Your Fetch instance can have a state from which you can make @@ -110,7 +110,7 @@ type ApiQueryParams = { page: string; } -const api = new FetchFactory({ +const api = new FetchEngine({ baseUrl: 'https://rainbow-loans.com', retry: { maxAttempts: 3, @@ -120,7 +120,7 @@ const api = new FetchFactory({ 'Content-Type': 'application/json', 'Accept': 'application/json' }, - modifyOptions: (opts, state) => { + modifyConfig: (opts, state) => { if (state.authToken) { opts.headers.Authorization = `Bearer ${state.authToken}` @@ -138,20 +138,20 @@ observer.on('user:login', ({ userId, token }) => { // Once you have a token, you can set the state of the // Fetch instance to use it in the next request. - api.setState({ authToken: token, userId }); + api.state.set({ authToken: token, userId }); }); observer.on('user:logout', () => { // When the user logs out, you can clear the state of the // Fetch instance to avoid using the token in the next request. - api.setState({ authToken: null, userId: null }); + api.state.set({ authToken: null, userId: null }); }); export const signIn = async (user: string, password: string) => { // Go-style error handling, with type safety. - const [resPayload, err] = await attempt(() => api.post('/signin', { user, password })); + const [response, err] = await attempt(() => api.post('/signin', { user, password })); if (err) { @@ -163,7 +163,7 @@ export const signIn = async (user: string, password: string) => { throw err; } - const { userId, token } = resPayload; + const { userId, token } = response.data; observer.emit('user:login', { userId: user, @@ -180,7 +180,7 @@ export const signIn = async (user: string, password: string) => { ```ts import { composeFlow, attempt } from '@logosdx/utils'; -const painPal = new FetchFactory({ +const painPal = new FetchEngine({ baseUrl: 'https://painpal.com', retry: { maxAttempts: 3, @@ -195,7 +195,7 @@ const painPal = new FetchFactory({ const _makePayment = async (paymentToken: string, amount: number) => { - const [resPayload, err] = await attempt(() => api.post('/payments', { paymentToken, amount })); + const [response, err] = await attempt(() => api.post('/payments', { paymentToken, amount })); if (err) { @@ -214,7 +214,7 @@ const _makePayment = async (paymentToken: string, amount: number) => { timestamp: Date.now() }); - return resPayload; + return response.data; } // This makePayment function is now rate-limited to 10 @@ -254,7 +254,7 @@ Get start with the packages: - [@logosdx/observer](/packages/observer) - [@logosdx/utils](/packages/utils) -- [@logosdx/fetch](/packages/fetch) +- [@logosdx/fetch](/packages/fetch/) - [@logosdx/dom](/packages/dom) - [@logosdx/storage](/packages/storage) - [@logosdx/localize](/packages/localize) \ No newline at end of file diff --git a/docs/index.md b/docs/index.md index 69860f1..618e4be 100644 --- a/docs/index.md +++ b/docs/index.md @@ -34,7 +34,7 @@ features: - title: Fetch details: Native fetch without the verbosity. Retries, state management, and more. icon: { src: /images/svg/cloud.svg, alt: Fetch } - link: /packages/fetch + link: /packages/fetch/ - title: Storage details: Storage API that works with any interface that looks like LocalStorage or SessionStorage. icon: { src: /images/svg/box.svg, alt: Storage } diff --git a/docs/packages/fetch.md b/docs/packages/fetch.md deleted file mode 100644 index 9c4dbb4..0000000 --- a/docs/packages/fetch.md +++ /dev/null @@ -1,2736 +0,0 @@ ---- -title: Fetch -description: HTTP that handles failure. Automatically. ---- - -# Fetch - -Your API calls fail and `fetch` just throws. `@logosdx/fetch` transforms the basic Fetch API into a production-ready HTTP client. Automatic retries with exponential backoff, request deduplication, response caching with stale-while-revalidate, configurable timeouts, request cancellation, and comprehensive lifecycle events. Smart retry strategy for transient failures (network errors, 429s, 500s). Configure once with base URLs and headers, then make type-safe requests that handle network failures gracefully. It's `fetch`, but built for the real world. - -[[toc]] - -## Installation - -::: code-group - -```bash [npm] -npm install @logosdx/fetch -``` - -```bash [yarn] -yarn add @logosdx/fetch -``` - -```bash [pnpm] -pnpm add @logosdx/fetch -``` - -::: - -**CDN:** - -```html - - -``` - -## Quick Start - -```typescript -import { FetchEngine, FetchResponse } from '@logosdx/fetch' -import { attempt } from '@logosdx/utils' - -// Create HTTP client -const api = new FetchEngine({ - baseUrl: 'https://api.example.com', - defaultType: 'json', - totalTimeout: 5000 -}); - -// Make requests with error handling - returns FetchResponse object -const [response, err] = await attempt(() => api.get('/users')); -if (err) { - console.error('Failed to fetch users:', err.message); - return; -} -console.log('Users:', response.data); -console.log('Status:', response.status); -console.log('Headers:', response.headers['content-type']); - -``` - -### Global Instance (Simplified Usage) - -```typescript -// Use the default global instance -import fetch from '@logosdx/fetch' -import { attempt } from '@logosdx/utils' - -// Automatically uses current domain as base URL - returns FetchResponse -const [response, err] = await attempt(() => fetch.get('/api/users')); -if (!err) { - console.log('Users:', response.data); - console.log('Status:', response.status); -} - -// Backward compatibility - destructure just the data -const { data: users } = await fetch.get('/api/users'); - -// Or destructure methods for convenience -import { get, post, setState, addHeader, changeModifyOptions, changeModifyMethodOptions } from '@logosdx/fetch' - -// Configure globally -addHeader('Authorization', 'Bearer token123'); -setState('userId', '456'); - -// Set global request modifier -changeModifyOptions((opts, state) => { - opts.headers['X-Client-Version'] = '2.1.0'; - return opts; -}); - -// Set method-specific modifier -changeModifyMethodOptions('POST', (opts, state) => { - opts.headers['X-CSRF-Token'] = state.csrfToken || ''; - return opts; -}); - -// Make requests - returns FetchResponse objects -const [userResponse, err] = await attempt(() => get('/api/users/456')); -if (!err) { - const { data: user } = userResponse; // Destructure for backward compatibility - console.log('User:', user); -} - -const [newUserResponse, err2] = await attempt(() => - post('/api/users', userData) -); - -// Smart URL handling - absolute URLs bypass base URL -const [external, err] = await attempt(() => - get('https://api.external.com/data') -); -``` - -## Core Concepts - -FetchEngine provides type-safe headers and parameters with intelligent retry logic. All HTTP methods return a `FetchResponse` object containing parsed data, response metadata, and request context. The event system enables comprehensive monitoring and debugging across all JavaScript environments. Built-in error handling patterns work seamlessly with @logosdx/utils attempt/attemptSync functions. - -## FetchEngine Class - -### Constructor - -```typescript -new FetchEngine(options?: FetchEngine.Options) -``` - -Creates a new HTTP client instance with type-safe headers, parameters, and state management. - -**Type Parameters:** - -- `H` - Interface for typed headers (optional) -- `P` - Interface for typed parameters (optional) -- `S` - Interface for typed state (defaults to `InstanceState`) -- `RH` - Interface for typed response headers (defaults to `InstanceResponseHeaders`) - -**Example:** - -```typescript -interface AppHeaders { - Authorization?: string; - 'X-API-Key'?: string; -} - -interface AppParams { - version?: string; - format?: 'json' | 'xml'; -} - -interface AppState { - userId?: string; - sessionId?: string; -} - -const api = new FetchEngine({ - baseUrl: 'https://api.example.com', - defaultType: 'json', - totalTimeout: 5000 -}); -``` - -### Configuration Options - -**Note:** `FetchEngine.Options` extends the Fetch API's `RequestInit` interface. Any standard Fetch API options can be passed and will be merged with `FetchEngine` defaults. - -**FetchEngine Options** - -| Option | Type | Description | -| --------------------- | --------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------- | -| `baseUrl` (required) | `string` | The base URL for all requests | -| `defaultType` | `'json' \| 'text' \| 'blob' \| 'arrayBuffer' \| 'formData'` | The default type of response expected from the server | -| `totalTimeout` | `number` | Total timeout in milliseconds for entire request lifecycle (including all retries) | -| `attemptTimeout` | `number` | Per-attempt timeout in milliseconds. Each retry gets a fresh timeout | -| `timeout` *(deprecated)* | `number` | Alias for `totalTimeout`. Use `totalTimeout` instead | -| `headers` | `Headers` | The headers to be set on all requests | -| `methodHeaders` | `{ [key in HttpMethods]?: Headers }` | The headers to be set on requests of a specific method | -| `params` | `Params

` | The parameters to be set on all requests | -| `methodParams` | `{ [key in HttpMethods]?: Params

}` | The parameters to be set on requests of a specific method | -| `retry` | `RetryConfig \| boolean` | The retry configuration for the fetch request. Set to `false` to disable retries, `true` to use defaults | -| `dedupePolicy` | `boolean \| DeduplicationConfig` | Request deduplication configuration. `true` enables with defaults (GET only) | -| `cachePolicy` | `boolean \| CacheConfig` | Response caching configuration. `true` enables with defaults (GET, 60s TTL) | -| `modifyOptions` | `(opts: RequestOpts, state: S) => RequestOpts` | A function that can be used to modify the options for all requests | -| `modifyMethodOptions` | `{ [key in HttpMethods]?: (opts: RequestOpts, state: S) => RequestOpts }` | A function that can be used to modify the options for requests of a specific method | -| `validate` | Validate Config (see below) | Validators for when setting headers and state | -| `determineType` | `(response: Response) => 'json' \| 'text' \| 'blob' \| 'arrayBuffer' \| 'formData' \| Symbol` | The function to determine the type of response expected from the server. May return `FetchEngine.useDefault` to use built-in detection | - -**Validate Config** - -| Option | Type | Description | -| ------------ | ----------------------------------------------------- | --------------------------------------------------------------------------------- | -| `headers` | `(headers: Headers, method?: HttpMethods) => void` | A function that can be used to validate the headers before the request is made | -| `params` | `(params: Params

, method?: HttpMethods) => void` | A function that can be used to validate the parameters before the request is made | -| `state` | `(state: S) => void` | A function that can be used to validate the state before the request is made | -| `perRequest` | `{ headers?: boolean, params?: boolean }` | Whether to validate the headers and parameters before the request is made | - -**Retry Config** - -| Option | Type | Description | -| ----------------------- | ------------------------------------------------------------ | ----------------------------------------------------------------------- | -| `baseDelay` | `number` | The base delay between retry attempts in milliseconds (default: 1000) | -| `maxAttempts` | `number` | The maximum number of retry attempts | -| `maxDelay` | `number` | The maximum delay between retry attempts in milliseconds | -| `useExponentialBackoff` | `boolean` | Whether to use exponential backoff for retry attempts | -| `retryableStatusCodes` | `number[]` | The status codes that should trigger a retry | -| `shouldRetry` | `(error: FetchError, attempt: number) => boolean \| number` | A function to determine if a request should be retried. Return `false` to stop, `true` to retry with default delay, or a number for custom delay in ms | - -**Type Definition** - -```typescript -interface FetchEngine.Options { - baseUrl: string; - defaultType?: 'json' | 'text' | 'blob' | 'arrayBuffer' | 'formData'; - headers?: Headers; - methodHeaders?: { - GET?: Headers; - POST?: Headers; - PUT?: Headers; - PATCH?: Headers; - DELETE?: Headers; - HEAD?: Headers; - OPTIONS?: Headers; - }; - params?: Params

; - methodParams?: { - GET?: Params

; - POST?: Params

; - PUT?: Params

; - PATCH?: Params

; - DELETE?: Params

; - HEAD?: Params

; - OPTIONS?: Params

; - }; - retry?: RetryConfig | false; - modifyOptions?: (opts: RequestOpts, state: S) => RequestOpts; - modifyMethodOptions?: { - GET?: (opts: RequestOpts, state: S) => RequestOpts; - POST?: (opts: RequestOpts, state: S) => RequestOpts; - PUT?: (opts: RequestOpts, state: S) => RequestOpts; - PATCH?: (opts: RequestOpts, state: S) => RequestOpts; - DELETE?: (opts: RequestOpts, state: S) => RequestOpts; - HEAD?: (opts: RequestOpts, state: S) => RequestOpts; - OPTIONS?: (opts: RequestOpts, state: S) => RequestOpts; - }; - validate?: { - headers?: (headers: Headers, method?: HttpMethods) => void; - params?: (params: Params

, method?: HttpMethods) => void; - state?: (state: S) => void; - perRequest?: { - headers?: boolean; - params?: boolean; - }; - }; - determineType?: (response: Response) => 'json' | 'text' | 'blob' | 'arrayBuffer' | 'formData' | Symbol; - - // Request deduplication (prevents duplicate concurrent requests) - dedupePolicy?: boolean | DeduplicationConfig; - - // Response caching with TTL and SWR support - cachePolicy?: boolean | CacheConfig; - - // Rate limiting with token bucket algorithm - rateLimitPolicy?: boolean | RateLimitConfig; -} -``` - -### Request Methods - -All request methods return an `AbortablePromise>` that can be cancelled and provides status information. The response object contains the parsed data along with typed response headers, status, request details, and typed configuration matching your custom headers and params interfaces. - -**Parameters:** - -| Parameter | Description | -| --------- | ----------------------------------------- | -| `path` | API endpoint path (relative to baseUrl) | -| `payload` | Request body data (optional) | -| `options` | Optional request configuration (optional) | - -#### Without a payload - -**GET** - -```typescript -api.get(path: string, options?: RequestOptions): AbortablePromise> -``` - -**DELETE** - -```typescript -api.delete(path: string, options?: RequestOptions): AbortablePromise> -``` - -**OPTIONS** - -```typescript -api.options(path: string, options?: RequestOptions): AbortablePromise> -``` - -**Example:** - -```typescript -const [response, err] = await attempt(() => api.get('/users')); -if (!err) { - console.log('Users:', response.data); - console.log('Total:', response.headers['x-total-count']); -} - -const [userResponse, err2] = await attempt(() => api.get('/users/123', { - headers: { 'X-Include': 'profile' }, - params: { include: 'permissions' } -})); - -// Backward compatibility - destructure just the data -const { data: users } = await api.get('/users'); - -// Smart URL handling - absolute URLs bypass base URL -const [externalResponse, err3] = await attempt(() => - api.get('https://api.external.com/data') -); - -const [deleteResponse, err4] = await attempt(() => api.delete('/users/123')); -``` - -#### With a payload - -**POST** - -```typescript -api.post(path: string, payload?: D, options?: RequestOptions): AbortablePromise> -``` - -**PUT** - -```typescript -api.put(path: string, payload?: D, options?: RequestOptions): AbortablePromise> -``` - -**PATCH** - -```typescript -api.patch(path: string, payload?: D, options?: RequestOptions): AbortablePromise> -``` - -**Example:** - -```typescript -const [newUser, err] = await attempt(() => - api.post('/users', { - name: 'John Doe', - email: 'john@example.com' - }) -); - -const [updatedUser, err] = await attempt(() => - api.put( - '/users/123', - { - name: 'Jane Doe', - email: 'jane@example.com' - }, - { - headers: { - 'X-Partial-Update': 'true' - }, - params: { - include: 'permissions' - } - } - ) -); -``` - -### Generic Request Method - - -### FetchResponse Object - -Every HTTP request returns an enhanced response object with typed configuration: - -```typescript -interface FetchResponse { - data: T; // Parsed response body - headers: Partial; // Response headers as typed plain object - status: number; // HTTP status code - request: Request; // Original request object - config: FetchConfig; // Typed configuration used for request -} - -interface FetchConfig { - baseUrl?: string; - /** @deprecated Use totalTimeout instead */ - timeout?: number; - totalTimeout?: number; // Total timeout for entire lifecycle - attemptTimeout?: number; // Per-attempt timeout - headers?: H; // Typed headers from your custom interface - params?: P; // Typed params from your custom interface - retry?: RetryConfig | false; - method?: string; - determineType?: any; -} -``` - -**Usage Examples:** - -```typescript -// Access full response details -const response = await api.get('/users'); -console.log('Data:', response.data); // Parsed users array -console.log('Status:', response.status); // HTTP status code -console.log('Headers:', response.headers); // Access to all headers -console.log('Config:', response.config); // Request configuration used - -// Backward compatibility - destructure just the data -const { data: users } = await api.get('/users'); - -// Access specific response metadata -const contentType = response.headers['content-type']; -const rateLimit = response.headers['x-rate-limit-remaining']; -const requestUrl = response.request.url; -const usedTimeout = response.config.timeout; -``` - -#### `request(method, path, options?)` - -```typescript -request( - method: HttpMethods, - path: string, - options?: RequestOptions & { payload?: D } -): AbortablePromise> -``` - -**Example:** - -```typescript -const [result, err] = await attempt(() => - api.request('PATCH', '/settings', { - payload: { theme: 'dark' }, - headers: { 'X-Partial-Update': 'true' } - }) -); -``` - -## Request Options - -**Note:** `RequestOptions` extends the Fetch API's `RequestInit` interface. All standard Fetch API options are supported, with the following exceptions: - -- `baseUrl` -- `defaultType` -- `body` -- `method` -- `controller` (you can pass it as `abortController` instead) - -**Request Options** - -| Option | Type | Description | -| ----------------- | ----------------- | ----------------------------------------------------------------------- | -| `abortController` | `AbortController` | The abort controller to be used to abort the request | -| `headers` | `Headers` | The headers to be set on the request | -| `params` | `Params

` | The parameters to be set on the request | -| `totalTimeout` | `number` | Total timeout for entire request lifecycle (including all retries) | -| `attemptTimeout` | `number` | Per-attempt timeout. Each retry gets a fresh timeout | -| `timeout` *(deprecated)* | `number` | Alias for `totalTimeout`. Use `totalTimeout` instead | -| `determineType` | `DetermineTypeFn` | The function to determine the type of response expected from the server | -| `retry` | `RetryConfig` | Retry configuration overrides for this request | -| `onBeforeReq` | `(opts) => void \| Promise` | Lifecycle hook called before the request is made | -| `onAfterReq` | `(response, opts) => void \| Promise` | Lifecycle hook called after the request completes | -| `onError` | `(err) => void \| Promise` | Lifecycle hook called when the request errors | - -**Type Definition** - -```typescript -type Lifecycle = { - - onBeforeReq?: (opts: FetchEngine.RequestOpts) => void | Promise - onAfterReq?: (response: Response, opts: FetchEngine.RequestOpts) => void | Promise - onError?: (err: FetchError) => void | Promise -}; - -type RequestOpts = { - - controller: AbortController, - headers?: Headers, - params?: Params

, - /** @deprecated Use totalTimeout instead */ - timeout?: number, - totalTimeout?: number, - attemptTimeout?: number, - determineType?: DetermineTypeFn, - retry?: RetryConfig -}; - -type CallOptions = ( - Lifecycle & - RequestOpts & - RequestInit -); -``` - -## AbortablePromise - -All HTTP methods return an `AbortablePromise` with additional capabilities: - -```typescript -interface AbortablePromise extends Promise { - isFinished: boolean; - isAborted: boolean; - abort(reason?: string): void; -} -``` - -**Example:** - -```typescript -const request = api.get('/slow-endpoint'); - -// My boss made me do it -onceMyReallyWeirdConditionHits(() => { - !request.isFinished && request.abort('User timeout') -}); - -const [data, err] = await attempt(() => request); - -if (err && request.isAborted) { - console.log('Request was cancelled'); -} -``` - -## State Management - -### `setState(state)` / `setState(key, value)` - -```typescript -setState(state: Partial): void -setState(key: K, value: S[K]): void -``` - -Update the client's internal state. - -**Example:** - -```typescript -// Set entire state object -api.setState({ - userId: '123', - sessionId: 'abc', - preferences: { theme: 'dark' } -}); - -// Set individual property -api.setState('userId', '456'); -``` - -### `getState()` - -```typescript -getState(): S -``` - -Get a deep clone of the current state. - -**Example:** - -```typescript -const currentState = api.getState(); -console.log('Current user:', currentState.userId); -``` - -### `resetState()` - -```typescript -resetState(): void -``` - -Clear all state properties. - -## Headers Management - -### `addHeader(name, value, method?)` - -```typescript -addHeader(name: string, value: string, method?: HttpMethods): void -addHeader(headers: Record, method?: HttpMethods): void -``` - -Add headers globally or for specific HTTP methods. - -**Example:** - -```typescript -// Global header -api.addHeader('Authorization', 'Bearer token123'); - -// Multiple headers -api.addHeader({ - 'X-API-Version': 'v2', - 'X-Client': 'web-app' -}); - -// Method-specific header -api.addHeader('X-CSRF-Token', 'csrf123', 'POST'); -``` - -### `rmHeader(name, method?)` - -```typescript -rmHeader(name: string | string[], method?: HttpMethods): void -``` - -Remove headers. - -**Example:** - -```typescript -// Remove global header -api.rmHeader('Authorization'); - -// Remove multiple headers -api.rmHeader(['X-API-Version', 'X-Client']); - -// Remove method-specific header -api.rmHeader('X-CSRF-Token', 'POST'); -``` - -### `hasHeader(name, method?)` - -```typescript -hasHeader(name: string, method?: HttpMethods): boolean -``` - -Check if a header exists. - -## Parameters Management - -### `addParam(name, value, method?)` - -```typescript -addParam(name: string, value: string | number | boolean, method?: HttpMethods): void -addParam(params: Record, method?: HttpMethods): void -``` - -Add URL parameters globally or for specific methods. - -**Example:** - -```typescript -// Global parameter -api.addParam('version', 'v1'); - -// Multiple parameters -api.addParam({ - format: 'json', - locale: 'en-US' -}); - -// Method-specific parameter -api.addParam('include_deleted', true, 'GET'); -``` - -### `rmParam(name, method?)` - -```typescript -rmParam(name: string | string[], method?: HttpMethods): void -``` - -Remove parameters. - -### `hasParam(name, method?)` - -```typescript -hasParam(name: string, method?: HttpMethods): boolean -``` - -Check if a parameter exists. - -## URL Management - -### `changeBaseUrl(url)` - -```typescript -changeBaseUrl(url: string): void -``` - -Update the base URL for all requests. - -**Example:** - -```typescript -observer.on('environment-changed', ({ env }) => { - - // Switch to other environment - api.changeBaseUrl(`https://${env}.fubar.com`); -}); -``` - -### `changeModifyOptions(fn?)` - -```typescript -changeModifyOptions(fn?: (opts: RequestOpts, state: S) => RequestOpts): void -``` - -Updates the global modifyOptions function for this FetchEngine instance. Changes the global options modification function that is applied to all requests before they are sent. Pass undefined to clear the function. Dispatches a 'fetch-modify-options-change' event when updated. - -**Example:** - -```typescript -// Set a global request modifier -api.changeModifyOptions((opts, state) => { - opts.headers = { ...opts.headers, 'X-Request-ID': crypto.randomUUID() }; - return opts; -}); - -// Add authentication based on state -api.changeModifyOptions((opts, state) => { - if (state.authToken) { - opts.headers.Authorization = `Bearer ${state.authToken}`; - } - return opts; -}); - -// Clear the modifier -api.changeModifyOptions(undefined); -``` - -### `changeModifyMethodOptions(method, fn?)` - -```typescript -changeModifyMethodOptions(method: HttpMethods, fn?: (opts: RequestOpts, state: S) => RequestOpts): void -``` - -Updates the modifyOptions function for a specific HTTP method. Changes the method-specific options modification function that is applied to requests of the specified HTTP method before they are sent. Pass undefined to clear the function for that method. Dispatches a 'fetch-modify-method-options-change' event when updated. - -**Example:** - -```typescript -// Set a POST-specific request modifier -api.changeModifyMethodOptions('POST', (opts, state) => { - opts.headers = { ...opts.headers, 'Content-Type': 'application/json' }; - return opts; -}); - -// Add CSRF token to state-changing methods -api.changeModifyMethodOptions('POST', (opts, state) => { - if (state.csrfToken) { - opts.headers['X-CSRF-Token'] = state.csrfToken; - } - return opts; -}); - -// Clear the POST modifier -api.changeModifyMethodOptions('POST', undefined); -``` - -## Retry Configuration - -The retry option accepts three types of values: -- `true` - Enable retries with default configuration -- `false` - Disable retries completely -- `RetryConfig` object - Custom retry configuration - -**Default values (when `retry: true` or partial config):** -```typescript -{ - maxAttempts: 3, - baseDelay: 1000, - maxDelay: 10000, - useExponentialBackoff: true, - retryableStatusCodes: [408, 429, 499, 500, 502, 503, 504] -} -``` - -```typescript -interface RetryConfig { - maxAttempts?: number; // default: 3 - baseDelay?: number; // default: 1000 (in milliseconds) - maxDelay?: number; // default: 10000 - useExponentialBackoff?: boolean; // default: true - retryableStatusCodes?: number[]; // default: [408, 429, 499, 500, 502, 503, 504] - - // shouldRetry can return a boolean or a custom delay in milliseconds - // When returning a number, it specifies the exact delay before the next retry - // default: () => true - shouldRetry?: (error: FetchError, attempt: number) => boolean | number; -} -``` - -### Custom Retry Logic - -The `shouldRetry` function will be awaited and can return: - -- `true` - Retry with default exponential backoff (uses `baseDelay`) -- `false` - Don't retry -- `number` - Retry with this exact delay in milliseconds (overrides exponential backoff) - -**Examples:** - -```typescript -// Use default retry configuration -const defaultRetryApi = new FetchEngine({ - baseUrl: 'https://api.example.com', - retry: true // Uses defaults: 3 attempts, 1s base delay, exponential backoff -}); - -// Disable retries completely -const noRetryApi = new FetchEngine({ - baseUrl: 'https://api.example.com', - retry: false // No retries at all -}); - -// Custom retry logic with shouldRetry -const api = new FetchEngine({ - baseUrl: 'https://api.example.com', - retry: { - maxAttempts: 5, - baseDelay: 1000, // Used for exponential backoff when shouldRetry returns true - shouldRetry: (error, attempt) => { - // Custom delay for rate limits (overrides exponential backoff) - if (error.status === 429) { - const retryAfter = error.headers?.['retry-after']; - return retryAfter ? parseInt(retryAfter) * 1000 : 5000; - } - - // Don't retry client errors - if (error.status >= 400 && error.status < 500) { - return false; - } - - // Custom delay for server errors (overrides exponential backoff) - if (error.status >= 500) { - return Math.min(1000 * Math.pow(2, attempt - 1), 30000); - } - - return true; // Use default exponential backoff with baseDelay - } - } -}); -``` - -## Timeout Configuration - - -FetchEngine provides two complementary timeout mechanisms for fine-grained control over request timing: - -- **`totalTimeout`**: Caps the entire request lifecycle, including all retry attempts -- **`attemptTimeout`**: Applies per-attempt, with each retry getting a fresh timeout - -### Type Definitions - -```typescript -interface TimeoutOptions { - - /** - * Total timeout for the entire request lifecycle in milliseconds. - * Applies to the complete operation including all retry attempts. - * When this fires, the request stops immediately with no more retries. - */ - totalTimeout?: number; - - /** - * Per-attempt timeout in milliseconds. - * Each retry attempt gets a fresh timeout and AbortController. - * When an attempt times out, it can still be retried (if retry is configured). - */ - attemptTimeout?: number; - - /** - * @deprecated Use `totalTimeout` instead. This is now an alias for `totalTimeout`. - */ - timeout?: number; -} -``` - -### Basic Usage - -```typescript -// Instance-level timeouts -const api = new FetchEngine({ - baseUrl: 'https://api.example.com', - totalTimeout: 30000, // 30s max for entire operation - attemptTimeout: 5000 // 5s per attempt -}); - -// Per-request overrides -const [response, err] = await attempt(() => - api.get('/slow-endpoint', { - totalTimeout: 60000, // Override: 60s for this request - attemptTimeout: 10000 // Override: 10s per attempt - }) -); -``` - -### How Timeouts Work Together - -When both timeouts are configured, they work in a parent-child relationship: - -``` -┌─────────────────────────────────────────────────────────────────────┐ -│ totalTimeout (30s) │ -│ ┌─────────────────┐ ┌─────────────────┐ ┌─────────────────┐ │ -│ │ Attempt 1 (5s) │ │ Attempt 2 (5s) │ │ Attempt 3 (5s) │ │ -│ │ attemptTimeout │ │ attemptTimeout │ │ attemptTimeout │ │ -│ └─────────────────┘ └─────────────────┘ └─────────────────┘ │ -│ ↓ ↓ ↓ │ -│ [timeout] [timeout] [success] │ -│ retry → retry → return │ -└─────────────────────────────────────────────────────────────────────┘ -``` - -**Key behaviors:** - -1. **totalTimeout fires**: Everything stops immediately, no more retries -2. **attemptTimeout fires**: That attempt fails, but can retry if configured -3. **Both configured**: Each attempt has its own fresh AbortController - -### Controller Architecture - -``` -┌──────────────────────────────────────────────────────────────────┐ -│ Parent Controller │ -│ (totalTimeout attached) │ -│ │ -│ ┌───────────────┐ ┌───────────────┐ ┌───────────────┐ │ -│ │ Child 1 │ │ Child 2 │ │ Child 3 │ │ -│ │ (attempt 1) │ │ (attempt 2) │ │ (attempt 3) │ │ -│ │ attemptTimeout│ │ attemptTimeout│ │ attemptTimeout│ │ -│ └───────────────┘ └───────────────┘ └───────────────┘ │ -│ │ -│ - Parent abort → All children abort (totalTimeout fired) │ -│ - Child abort → Only that attempt fails (attemptTimeout fired) │ -└──────────────────────────────────────────────────────────────────┘ -``` - -### With Retry Configuration - -```typescript -const api = new FetchEngine({ - baseUrl: 'https://api.example.com', - totalTimeout: 30000, // 30s total - attemptTimeout: 5000, // 5s per attempt - retry: { - maxAttempts: 5, - baseDelay: 1000, - useExponentialBackoff: true - } -}); - -// Scenario: Each attempt can take up to 5s, retries if it times out -// Total operation cannot exceed 30s regardless of retry attempts -const [response, err] = await attempt(() => api.get('/flaky-endpoint')); - -if (err && err.timedOut) { - // The request timed out (either totalTimeout or attemptTimeout) - console.log('Request timed out after all retries'); -} -``` - -### The `timedOut` Flag - -The `FetchError` object includes a `timedOut` flag that distinguishes timeout aborts from other abort causes: - -```typescript -interface FetchError> extends Error { - - // ... other properties - - /** - * Whether the request was aborted (any cause: manual, timeout, or server). - */ - aborted?: boolean; - - /** - * Whether the abort was caused by a timeout (attemptTimeout or totalTimeout). - * - `true`: The abort was caused by a timeout firing - * - `undefined`: The abort was manual or server-initiated - * - * When `timedOut` is true, `aborted` will also be true. - */ - timedOut?: boolean; -} -``` - -**Usage:** - -```typescript -const [response, err] = await attempt(() => - api.get('/endpoint', { totalTimeout: 5000 }) -); - -if (err) { - if (err.aborted && err.timedOut) { - // Timed out - show user-friendly message - console.log('Request took too long'); - } - else if (err.aborted) { - // Manual abort or server disconnect - console.log('Request was cancelled'); - } - else { - // Other error (network, HTTP error, etc.) - console.log('Request failed:', err.message); - } -} -``` - -### Default Retry Behavior with Timeouts - -The default `shouldRetry` function returns `true` for status code `499`, which is set when a request is aborted (including by `attemptTimeout`). This means: - -- **attemptTimeout fires** → Status 499 → Can retry (if within maxAttempts) -- **totalTimeout fires** → Parent controller aborts → No retry possible - -```typescript -// Default retry configuration -{ - maxAttempts: 3, - baseDelay: 1000, - retryableStatusCodes: [408, 429, 499, 500, 502, 503, 504], - shouldRetry(error) { - if (error.status === 499) return true; // Includes attemptTimeout - return this.retryableStatusCodes?.includes(error.status) ?? false; - } -} -``` - -### Migration from `timeout` - -The `timeout` option is deprecated but continues to work as an alias for `totalTimeout`: - -```typescript -// Old code (still works) -const api = new FetchEngine({ - baseUrl: 'https://api.example.com', - timeout: 5000 -}); - -// New code (recommended) -const api = new FetchEngine({ - baseUrl: 'https://api.example.com', - totalTimeout: 5000 -}); - -// Both are equivalent - totalTimeout applies to entire lifecycle -``` - -::: warning Migration Note -If you were using `timeout` expecting it to be per-attempt, you should now use `attemptTimeout` instead. The behavior of `timeout` (now `totalTimeout`) has always been for the entire operation. -::: - -### Real-World Examples - -**API Gateway with Strict Limits:** - -```typescript -// Gateway has 30s hard limit, but individual services might be slow -const api = new FetchEngine({ - baseUrl: 'https://gateway.example.com', - totalTimeout: 28000, // Under gateway limit - attemptTimeout: 8000, // Allow slow services - retry: { - maxAttempts: 3, - baseDelay: 500 - } -}); -``` - -**User-Facing with Fallback:** - -```typescript -const api = new FetchEngine({ - baseUrl: 'https://api.example.com', - totalTimeout: 10000, // Users won't wait more than 10s - attemptTimeout: 3000, // Quick feedback per attempt - retry: { - maxAttempts: 3, - shouldRetry: (error) => { - // Only retry on timeout, not on 4xx errors - return error.timedOut || error.status >= 500; - } - } -}); -``` - -**Background Sync with Long Tolerance:** - -```typescript -const syncApi = new FetchEngine({ - baseUrl: 'https://sync.example.com', - totalTimeout: 300000, // 5 minutes for batch operations - attemptTimeout: 60000, // 1 minute per attempt - retry: { - maxAttempts: 5, - baseDelay: 5000, - useExponentialBackoff: true - } -}); -``` - -## Request Deduplication - -When multiple parts of your application make identical requests simultaneously, FetchEngine can deduplicate them by sharing a single in-flight promise. This reduces network traffic, server load, and prevents race conditions. - -### Quick Start - -```typescript -// Enable with defaults (GET requests only) -const api = new FetchEngine({ - baseUrl: 'https://api.example.com', - dedupePolicy: true -}); - -// Three concurrent calls → one network request -const [user1, user2, user3] = await Promise.all([ - api.get('/users/123'), - api.get('/users/123'), - api.get('/users/123') -]); -// All three receive the same result from a single HTTP request -``` - -### Configuration - -| Option | Type | Default | Description | -|--------|------|---------|-------------| -| `enabled` | `boolean` | `true` | Enable/disable deduplication | -| `methods` | `HttpMethod[]` | `['GET']` | HTTP methods to deduplicate | -| `serializer` | `RequestSerializer` | `defaultRequestSerializer` | Function to generate request keys | -| `shouldDedupe` | `(ctx) => boolean` | - | Dynamic skip check (called per-request) | -| `rules` | `DedupeRule[]` | - | Route-specific configuration | - -**Full Configuration Example:** - -```typescript -const api = new FetchEngine({ - baseUrl: 'https://api.example.com', - dedupePolicy: { - enabled: true, - methods: ['GET', 'POST'], - serializer: (ctx) => `${ctx.method}:${ctx.path}:${JSON.stringify(ctx.payload)}`, - shouldDedupe: (ctx) => !ctx.headers?.['X-Force-Fresh'], - rules: [ - // Disable deduplication for admin endpoints - { startsWith: '/admin', enabled: false }, - - // Custom serializer for search (ignore timestamp param) - { - startsWith: '/search', - serializer: (ctx) => `${ctx.method}:${ctx.path}:${ctx.payload?.query}` - }, - - // Enable POST deduplication for specific endpoint - { is: '/graphql', methods: ['POST'] } - ] - } -}); -``` - -### Deduplication Events - -```typescript -// Emitted when a new request starts tracking -api.on('fetch-dedupe-start', (event) => { - console.log('New request:', event.key); -}); - -// Emitted when a caller joins an existing in-flight request -api.on('fetch-dedupe-join', (event) => { - console.log('Joined:', event.key, 'waiters:', event.waitingCount); -}); -``` - -### Independent Timeout per Caller - -Each caller can have independent timeout and abort constraints: - -```typescript -// Caller A starts request with 10s timeout -const promiseA = api.get('/slow-endpoint', { timeout: 10000 }); - -// Caller B joins with 2s timeout -const promiseB = api.get('/slow-endpoint', { timeout: 2000 }); - -// After 2s: B times out and rejects → A continues waiting -// At 5s: Request completes → A gets the result - -// Semantics: -// - Initiator's abort/timeout → cancels fetch → everyone fails -// - Joiner's abort/timeout → only that joiner fails → others unaffected -``` - -## Response Caching - -FetchEngine supports response caching with TTL and stale-while-revalidate (SWR) for improved performance and reduced API load. - -### Quick Start - -```typescript -// Enable with defaults (GET requests, 60s TTL) -const api = new FetchEngine({ - baseUrl: 'https://api.example.com', - cachePolicy: true -}); - -// First call: fetches from network, caches response -const users1 = await api.get('/users'); - -// Subsequent calls within TTL: instant cache hit -const users2 = await api.get('/users'); -``` - -### Configuration - -| Option | Type | Default | Description | -|--------|------|---------|-------------| -| `enabled` | `boolean` | `true` | Enable/disable caching | -| `methods` | `HttpMethod[]` | `['GET']` | HTTP methods to cache | -| `ttl` | `number` | `60000` | Time to live in milliseconds | -| `staleIn` | `number` | - | Time until stale for SWR (ms) | -| `serializer` | `RequestSerializer` | `defaultRequestSerializer` | Function to generate cache keys | -| `skip` | `(ctx) => boolean` | - | Dynamic skip check | -| `rules` | `CacheRule[]` | - | Route-specific configuration | -| `adapter` | `CacheAdapter` | `MapCacheAdapter` | Custom cache storage backend | - -**Full Configuration with SWR:** - -```typescript -const api = new FetchEngine({ - baseUrl: 'https://api.example.com', - cachePolicy: { - enabled: true, - methods: ['GET'], - ttl: 300000, // 5 minutes - staleIn: 60000, // Consider stale after 1 minute - - // Skip caching for certain requests - skip: (ctx) => ctx.headers?.['Cache-Control'] === 'no-cache', - - rules: [ - // Long cache for static content - { startsWith: '/static', ttl: 3600000 }, - - // Short cache for user data - { startsWith: '/user', ttl: 30000, staleIn: 10000 }, - - // No caching for realtime endpoints - { includes: '/realtime', enabled: false }, - - // No caching for admin - { startsWith: '/admin', enabled: false } - ] - } -}); -``` - -### Stale-While-Revalidate (SWR) - -When `staleIn` is configured, FetchEngine implements stale-while-revalidate: - -```typescript -const api = new FetchEngine({ - baseUrl: 'https://api.example.com', - cachePolicy: { - ttl: 60000, // Expire after 60 seconds - staleIn: 30000 // Consider stale after 30 seconds - } -}); - -// Timeline: -// 0-30s: Fresh cache hit - returns cached data immediately -// 30-60s: Stale cache hit - returns cached data + background revalidation -// >60s: Cache miss - fetches fresh data -``` - -### Cache Events - -```typescript -// Fresh cache hit -api.on('fetch-cache-hit', (event) => { - console.log('Cache hit:', event.key, 'expires in:', event.expiresIn); -}); - -// Stale cache hit (SWR) -api.on('fetch-cache-stale', (event) => { - console.log('Stale hit:', event.key, 'revalidating...'); -}); - -// Cache miss -api.on('fetch-cache-miss', (event) => { - console.log('Cache miss:', event.key); -}); - -// New cache entry stored -api.on('fetch-cache-set', (event) => { - console.log('Cached:', event.key, 'TTL:', event.expiresIn); -}); - -// SWR background revalidation started -api.on('fetch-cache-revalidate', (event) => { - console.log('Background revalidation:', event.key); -}); - -// SWR background revalidation failed -api.on('fetch-cache-revalidate-error', (event) => { - console.error('Revalidation failed:', event.key, event.error); -}); -``` - -### Cache Invalidation - -```typescript -// Clear all cached responses -await api.clearCache(); - -// Delete specific cache entry by key -await api.deleteCache(cacheKey); - -// Invalidate entries matching a predicate -const count = await api.invalidateCache((key) => key.includes('user')); -console.log(`Invalidated ${count} entries`); - -// Invalidate by path pattern (string prefix) -await api.invalidatePath('/users'); - -// Invalidate by path pattern (RegExp) -await api.invalidatePath(/^\/api\/v\d+\/users/); - -// Invalidate with custom predicate (for custom serializers) -await api.invalidatePath((key) => { - // Full control over key matching - useful when using custom serializers - return key.includes('/users') && key.includes('Bearer'); -}); - -// Get cache statistics -const stats = api.cacheStats(); -console.log('Cache size:', stats.cacheSize); -console.log('In-flight:', stats.inflightCount); -``` - -### Custom Cache Adapters - -FetchEngine supports pluggable cache backends via the `CacheAdapter` interface. This enables caching to Redis, IndexedDB, AsyncStorage, localStorage, or any custom storage. - -```typescript -import { FetchEngine, CacheAdapter } from '@logosdx/fetch'; -import { CacheItem } from '@logosdx/utils'; - -// Example: localStorage adapter -class LocalStorageCacheAdapter implements CacheAdapter { - - #prefix: string; - #data = new Map>(); - - constructor(prefix = 'api-cache') { - this.#prefix = prefix; - this.#loadFromStorage(); - } - - get size() { return this.#data.size; } - - async get(key: string) { - return this.#data.get(key); - } - - async set(key: string, item: CacheItem) { - this.#data.set(key, item); - this.#saveToStorage(); - } - - async delete(key: string) { - const existed = this.#data.delete(key); - this.#saveToStorage(); - return existed; - } - - async has(key: string) { - return this.#data.has(key); - } - - async clear() { - this.#data.clear(); - localStorage.removeItem(this.#prefix); - } - - #loadFromStorage() { - const stored = localStorage.getItem(this.#prefix); - if (stored) { - const entries = JSON.parse(stored); - this.#data = new Map(entries); - } - } - - #saveToStorage() { - localStorage.setItem(this.#prefix, JSON.stringify([...this.#data])); - } -} - -// Use the custom adapter -const api = new FetchEngine({ - baseUrl: 'https://api.example.com', - cachePolicy: { - adapter: new LocalStorageCacheAdapter('my-api'), - ttl: 300000 - } -}); -``` - -The `CacheAdapter` interface: - -```typescript -interface CacheAdapter { - get(key: string): Promise | undefined>; - set(key: string, item: CacheItem, expiresAt?: number): Promise; - delete(key: string): Promise; - has(key: string): Promise; - clear(): Promise; - readonly size: number; -} - -interface CacheItem { - value: T; - createdAt: number; - expiresAt: number; - staleAt?: number; // For SWR -} -``` - -## Rate Limiting - -Control outgoing request rates using a token bucket algorithm. Each unique request key (generated by the serializer) gets its own rate limiter, enabling per-endpoint or per-user throttling. - -This re-uses the same rate limiting logic found in the [function utility in](https://logosdx.dev/packages/utils.html#ratelimit) utils package. - -### Quick Start - -```typescript -// Enable with defaults (100 requests/minute, all HTTP methods) -const api = new FetchEngine({ - baseUrl: 'https://api.example.com', - rateLimitPolicy: true -}); - -// Requests are automatically throttled -// If rate limit is exceeded, requests wait for tokens by default -await api.get('/users'); // Waits if needed -``` - -### Configuration - -```typescript -const api = new FetchEngine({ - baseUrl: 'https://api.example.com', - rateLimitPolicy: { - // Global settings - enabled: true, - methods: ['GET', 'POST', 'PUT', 'DELETE', 'PATCH', 'OPTIONS'], // All by default - maxCalls: 100, // Requests per window (default: 100) - windowMs: 60000, // Time window in ms (default: 60000 = 1 minute) - waitForToken: true, // true = wait, false = reject immediately - - // Custom key generation (default: method + pathname) - serializer: (ctx) => `${ctx.method}|${ctx.url.pathname}`, - - // Dynamic bypass - shouldRateLimit: (ctx) => { - // Return false to bypass rate limiting - return !ctx.headers?.['X-Bypass-RateLimit']; - }, - - // Callback when rate limited - onRateLimit: (ctx, waitTimeMs) => { - console.log(`Rate limited for ${waitTimeMs}ms:`, ctx.path); - }, - - // Route-specific rules - rules: [ - // Stricter limits for search - { startsWith: '/api/search', maxCalls: 10, windowMs: 60000 }, - - // Reject immediately for bulk operations - { startsWith: '/api/bulk', waitForToken: false }, - - // No rate limiting for health checks - { startsWith: '/health', enabled: false }, - - // Custom serializer for user-specific limiting - { - startsWith: '/api/user', - serializer: (ctx) => `user:${ctx.headers?.['X-User-ID'] ?? 'anonymous'}` - } - ] - } -}); -``` - -### Token Bucket Algorithm - -Rate limiting uses a token bucket that refills continuously: - -- **Capacity**: `maxCalls` tokens -- **Refill Rate**: `maxCalls / windowMs` tokens per millisecond -- Each request consumes 1 token -- If no tokens available: - - `waitForToken: true` → waits until token available - - `waitForToken: false` → throws `RateLimitError` immediately - -```typescript -// Example: 10 requests per minute = 1 token every 6 seconds -{ - maxCalls: 10, - windowMs: 60000 // 60000ms / 10 = 6000ms per token -} -``` - -### Rate Limit Events - -```typescript -// Emitted when request must wait for a token -api.on('fetch-ratelimit-wait', (event) => { - console.log('Waiting for rate limit:', { - key: event.key, - waitTimeMs: event.waitTimeMs, - currentTokens: event.currentTokens, - capacity: event.capacity, - nextAvailable: event.nextAvailable - }); -}); - -// Emitted when request is rejected (waitForToken: false) -api.on('fetch-ratelimit-reject', (event) => { - console.log('Rate limit exceeded:', { - key: event.key, - waitTimeMs: event.waitTimeMs // How long they would have waited - }); -}); - -// Emitted after token is successfully acquired -api.on('fetch-ratelimit-acquire', (event) => { - console.log('Token acquired:', { - key: event.key, - currentTokens: event.currentTokens, // Remaining tokens - capacity: event.capacity - }); -}); -``` - -### Rate Limiting Order - -Rate limiting is evaluated **before** cache and deduplication: - -``` -Request → Rate Limit → Cache Check → Dedupe Check → Network -``` - -This means: - -- Cached responses do NOT consume rate limit tokens -- Deduplicated requests only consume one token (the initiator's) -- Rate limiting protects your API from being overwhelmed - -### Per-User Rate Limiting - -```typescript -const api = new FetchEngine({ - baseUrl: 'https://api.example.com', - rateLimitPolicy: { - maxCalls: 100, - windowMs: 60000, - // Group requests by user ID - serializer: (ctx) => `user:${ctx.state?.userId ?? 'anonymous'}` - } -}); - -// Each user gets their own 100 req/min bucket -api.setState('userId', 'user-123'); -await api.get('/data'); // Uses user-123's bucket - -api.setState('userId', 'user-456'); -await api.get('/data'); // Uses user-456's bucket -``` - -### Global Rate Limiting - -```typescript -const api = new FetchEngine({ - baseUrl: 'https://api.example.com', - rateLimitPolicy: { - maxCalls: 1000, - windowMs: 60000, - // All requests share one bucket - serializer: () => 'global' - } -}); -``` - -### Handling Rate Limit Errors - -```typescript -import { attempt, isRateLimitError } from '@logosdx/utils'; - -const [response, err] = await attempt(() => api.get('/users')); - -if (err) { - if (isRateLimitError(err)) { - console.log('Rate limited:', err.message); - console.log('Limit:', err.limit); // maxCalls value - // Retry after some time, or show user feedback - } -} -``` - -### Rate Limiting Types - -```typescript -interface RateLimitConfig { - /** Enable rate limiting globally. Default: true */ - enabled?: boolean; - - /** HTTP methods to rate limit. Default: all methods */ - methods?: HttpMethod[]; - - /** Maximum calls allowed within the time window. Default: 100 */ - maxCalls?: number; - - /** Time window in milliseconds. Default: 60000 (1 minute) */ - windowMs?: number; - - /** Wait for token (true) or reject immediately (false). Default: true */ - waitForToken?: boolean; - - /** Custom serializer for bucket key generation */ - serializer?: RequestSerializer; - - /** Dynamic bypass callback. Return false to skip rate limiting */ - shouldRateLimit?: (ctx: RequestKeyOptions) => boolean; - - /** Callback when rate limited (before waiting or rejecting) */ - onRateLimit?: (ctx: RequestKeyOptions, waitTimeMs: number) => void | Promise; - - /** Route-specific rules */ - rules?: RateLimitRule[]; -} - -interface RateLimitRule extends MatchTypes { - methods?: HttpMethod[]; - enabled?: boolean; - maxCalls?: number; - windowMs?: number; - waitForToken?: boolean; - serializer?: RequestSerializer; -} - -interface RateLimitEventData extends EventData { - key: string; // Rate limit bucket key - currentTokens: number; // Current tokens in bucket - capacity: number; // Max capacity (maxCalls) - waitTimeMs: number; // Time until next token (ms) - nextAvailable: Date; // When next token available -} -``` - -## Route Matching - -Deduplication, caching, and rate limiting all support flexible route matching via `MatchTypes`: - -```typescript -interface MatchTypes { - is?: string; // Exact path match - startsWith?: string; // Path prefix match - endsWith?: string; // Path suffix match - includes?: string; // Path contains substring - match?: RegExp; // Regular expression match -} -``` - -**Match Type Behavior:** -- `is` requires an exact match and cannot be combined with other types -- Other types can be combined with AND logic (all must match) - -**Examples:** - -```typescript -const rules = [ - // Exact match - { is: '/users' }, - - // Prefix match - { startsWith: '/api/v2' }, - - // Suffix match - { endsWith: '.json' }, - - // Substring match - { includes: 'admin' }, - - // Regex match - { match: /^\/v\d+\/users/ }, - - // Combined (AND logic) - { startsWith: '/api', endsWith: '.json' }, // Must satisfy both - { includes: 'user', match: /\/\d+$/ } // Must satisfy both -]; -``` - -::: warning Regex Performance (ReDoS) -Route matching runs on **every request**. Poorly written regular expressions can cause catastrophic backtracking, severely degrading performance or hanging your application. - -**Dangerous patterns to avoid:** - -```typescript -// ❌ BAD: Nested quantifiers cause exponential backtracking -{ match: /(a+)+b/ } -{ match: /^\/api\/v\d+\/.*$/ } // .* with anchors can backtrack -{ match: /(\w+)*@/ } // Nested quantifiers - -// ❌ BAD: Overlapping alternatives -{ match: /(a|a)+/ } -{ match: /(\d+|\d+\.)+/ } -``` - -**Safe patterns:** - -```typescript -// ✅ GOOD: Simple, non-nested quantifiers -{ match: /^\/v\d+\/users/ } // No trailing .* -{ match: /\/users\/\d+$/ } // Anchored end, simple pattern -{ match: /\.(json|xml)$/ } // Non-overlapping alternatives - -// ✅ BETTER: Use string matchers when possible (faster, no ReDoS risk) -{ startsWith: '/api/v2' } // Instead of /^\/api\/v2/ -{ endsWith: '.json' } // Instead of /\.json$/ -{ includes: '/users/' } // Instead of /\/users\// -``` - -**Best practice:** Prefer string-based matchers (`startsWith`, `endsWith`, `includes`, `is`) over regex. They're faster and immune to ReDoS. Only use `match` when you need pattern complexity that strings can't express. -::: - -## Request Serializers - -Serializers generate unique keys for identifying requests. These keys are used by deduplication, caching, and rate limiting to determine which requests should share state. - -### Built-in Serializers - -FetchEngine provides two built-in serializers, each optimized for different use cases: - -#### Request Serializer (Default for Cache & Dedupe) - -Generates keys based on full request identity: method, path, query string, payload, and stable headers. - -```typescript -// Key format: method|path+query|payload|headers -// Example: "GET|/users/123?page=1|undefined|{"accept":"application/json","authorization":"Bearer token"}" -``` - -**Stable Headers Only:** The request serializer only includes semantically meaningful headers that affect response content: - -| Included Headers | Purpose | -|-----------------|---------| -| `authorization` | Different users get different responses | -| `accept` | Different response formats (JSON, XML, etc.) | -| `accept-language` | Localized responses | -| `content-type` | Format of request payload (for POST/PUT) | -| `accept-encoding` | Response compression format | - -**Excluded Headers (Dynamic):** -- `X-Timestamp`, `Date` - Change every request -- `X-HMAC-Signature` - Computed per-request -- `X-Request-Id`, `X-Correlation-Id` - Unique per-request -- `Cache-Control`, `Pragma` - Control directives, not identity - -This prevents cache pollution from dynamic headers that would make every request unique. - -#### Endpoint Serializer (Default for Rate Limit) - -Generates keys based on endpoint identity only: method and pathname (excludes query string and payload). - -```typescript -// Key format: method|pathname -// Example: "GET|/users/123" -``` - -This groups all requests to the same endpoint together, ideal for rate limiting where you want to protect an endpoint from overload regardless of specific parameters. - -### Using Built-in Serializers - -```typescript -import { endpointSerializer, requestSerializer } from '@logosdx/fetch'; - -// Use endpoint serializer for cache (group by endpoint) -const api = new FetchEngine({ - baseUrl: 'https://api.example.com', - cachePolicy: { - serializer: endpointSerializer, // All /users/123?page=1 and /users/123?page=2 share cache - ttl: 60000 - } -}); - -// Use request serializer for rate limiting (per unique request) -const api2 = new FetchEngine({ - baseUrl: 'https://api.example.com', - rateLimitPolicy: { - serializer: requestSerializer, // Each unique request gets its own bucket - maxCalls: 100, - windowMs: 60000 - } -}); -``` - -### Custom Serializers - -Create custom serializers when the built-ins don't match your needs: - -```typescript -// User-scoped rate limiting -const userSerializer = (ctx: RequestKeyOptions) => { - return `user:${ctx.state?.userId ?? 'anonymous'}|${ctx.method}|${ctx.url.pathname}`; -}; - -const api = new FetchEngine({ - baseUrl: 'https://api.example.com', - rateLimitPolicy: { - serializer: userSerializer, // Each user gets their own rate limit bucket - maxCalls: 100, - windowMs: 60000 - } -}); - -// Tenant-scoped caching -const tenantSerializer = (ctx: RequestKeyOptions) => { - const tenant = ctx.headers?.['X-Tenant-ID'] ?? 'default'; - return `${tenant}|${ctx.method}|${ctx.url.pathname}${ctx.url.search}`; -}; - -const multiTenantApi = new FetchEngine({ - baseUrl: 'https://api.example.com', - cachePolicy: { - serializer: tenantSerializer, // Each tenant has separate cache - ttl: 60000 - } -}); - -// Ignore certain params for caching -const ignoreTimestampSerializer = (ctx: RequestKeyOptions) => { - const url = new URL(ctx.url); - url.searchParams.delete('_t'); // Remove timestamp param - url.searchParams.delete('nocache'); - return `${ctx.method}|${url.pathname}${url.search}`; -}; -``` - -### Serializer Signature - -```typescript -type RequestSerializer = ( - ctx: RequestKeyOptions -) => string; - -interface RequestKeyOptions { - method: string; // HTTP method (uppercase) - path: string; // Original path from request - url: URL; // Full URL object (includes pathname, search, etc.) - payload?: unknown; // Request body (if any) - headers?: H; // Request headers - params?: P; // URL parameters - state?: S; // Instance state -} -``` - -### Per-Rule Serializers - -Override serializers for specific routes: - -```typescript -const api = new FetchEngine({ - baseUrl: 'https://api.example.com', - cachePolicy: { - enabled: true, - ttl: 60000, - rules: [ - // GraphQL: cache by operation name only - { - is: '/graphql', - serializer: (ctx) => `graphql:${ctx.payload?.operationName ?? 'unknown'}` - }, - - // Search: ignore pagination for cache - { - startsWith: '/search', - serializer: (ctx) => { - const url = new URL(ctx.url); - url.searchParams.delete('page'); - url.searchParams.delete('limit'); - return `search:${url.search}`; - } - }, - - // User profile: cache per user - { - match: /^\/users\/\d+$/, - serializer: (ctx) => `user:${ctx.url.pathname}` - } - ] - } -}); -``` - -## Event System - -FetchEngine extends EventTarget with comprehensive lifecycle events, providing observability in all JavaScript environments. - -### Event Types - -```typescript -enum FetchEventNames { - // Request lifecycle - 'fetch-before' = 'fetch-before', - 'fetch-after' = 'fetch-after', - 'fetch-abort' = 'fetch-abort', - 'fetch-error' = 'fetch-error', - 'fetch-response' = 'fetch-response', - 'fetch-retry' = 'fetch-retry', - - // Configuration changes - 'fetch-header-add' = 'fetch-header-add', - 'fetch-header-remove' = 'fetch-header-remove', - 'fetch-param-add' = 'fetch-param-add', - 'fetch-param-remove' = 'fetch-param-remove', - 'fetch-state-set' = 'fetch-state-set', - 'fetch-state-reset' = 'fetch-state-reset', - 'fetch-url-change' = 'fetch-url-change', - 'fetch-modify-options-change' = 'fetch-modify-options-change', - 'fetch-modify-method-options-change' = 'fetch-modify-method-options-change', - - // Deduplication events - 'fetch-dedupe-start' = 'fetch-dedupe-start', - 'fetch-dedupe-join' = 'fetch-dedupe-join', - - // Caching events - 'fetch-cache-hit' = 'fetch-cache-hit', - 'fetch-cache-stale' = 'fetch-cache-stale', - 'fetch-cache-miss' = 'fetch-cache-miss', - 'fetch-cache-set' = 'fetch-cache-set', - 'fetch-cache-revalidate' = 'fetch-cache-revalidate', - 'fetch-cache-revalidate-error' = 'fetch-cache-revalidate-error', - - // Rate limiting events - 'fetch-ratelimit-wait' = 'fetch-ratelimit-wait', - 'fetch-ratelimit-reject' = 'fetch-ratelimit-reject', - 'fetch-ratelimit-acquire' = 'fetch-ratelimit-acquire' -} -``` - -### Event Methods - -#### `on(event, callback)` - -```typescript -on( - event: E | '*', - callback: (event: FetchEvent) => void -): () => void -``` - -Listen to events. Returns cleanup function. - -**Example:** - -```typescript -// Listen to specific event -const cleanup = api.on('fetch-error', (event) => { - console.error('Request failed:', event.error?.message); -}); - -// Listen to all events -api.on('*', (event) => { - console.log(`Event: ${event.type}`, event); -}); - -// Listen to modify options changes -api.on('fetch-modify-options-change', (event) => { - console.log('Global modifier changed:', event.data ? 'set' : 'cleared'); -}); - -// Listen to method-specific modify options changes -api.on('fetch-modify-method-options-change', (event) => { - console.log(`${event.data.method} modifier:`, event.data.fn ? 'set' : 'cleared'); -}); - -// Clean up listener -cleanup(); -``` - -#### `once(event, callback)` - -Listen to event once. - -```typescript -once( - event: E, - callback: (event: FetchEvent) => void -): () => void -``` - -#### `off(event, callback)` - -Remove event listeners. - -```typescript -off( - event: E | '*', - callback?: (event: FetchEvent) => void -): void -``` - -#### `emit(event, data?)` - -Manually emit events. - -```typescript -emit(event: E | Event, data?: unknown): void -``` - -### Event Object Structure - -```typescript -interface FetchEvent { - type: FetchEventNames; - url?: string; - path?: string; - method?: HttpMethods; - headers?: Record; - data?: any; - response?: Response; - error?: FetchError; - attempt?: number; - nextAttempt?: number; - maxAttempts?: number; - delay?: number; - state?: any; - header?: string; - value?: string; - param?: string; -} -``` - -## Lifecycle Management - -### `destroy()` - -```typescript -destroy(): void -``` - -Destroys the FetchEngine instance and cleans up resources. After calling `destroy()`, new requests will throw an error. This method prevents memory leaks by clearing internal state references and automatically removing all event listeners added via `on()` or `once()`. - -**Event Listener Cleanup:** -- Listeners added via `on()` or `once()` are **automatically removed** when `destroy()` is called -- Listeners added via `addEventListener()` with your own AbortController must be cleaned up manually -- `on()` returns a cleanup function if you need manual control before destroy - -**Example:** - -```typescript -// Basic cleanup - listeners added via on() automatically cleaned up -const api = new FetchEngine({ baseUrl: 'https://api.example.com' }); - -const cleanup = api.on('fetch-error', (e) => console.error(e)); - -// destroy() automatically removes the listener above -api.destroy(); - -// Attempting requests after destroy throws error -await api.get('/users'); // throws: "Cannot make requests on destroyed FetchEngine instance" - -// Option 1: Use on() with cleanup function (recommended) -const errorCleanup = api.on('fetch-error', errorHandler); -const responseCleanup = api.on('fetch-response', responseHandler); - -// Manual cleanup if needed before destroy -errorCleanup(); -responseCleanup(); - -// Or just destroy - automatically removes all on() listeners -api.destroy(); - -// Option 2: Use off() for manual removal -api.on('fetch-error', errorHandler); -api.on('fetch-response', responseHandler); - -api.off('fetch-error', errorHandler); -api.off('fetch-response', responseHandler); -api.destroy(); - -// Option 3: Use addEventListener with your own AbortController (advanced) -const controller = new AbortController(); - -api.addEventListener('fetch-error', errorHandler, { signal: controller.signal }); -api.addEventListener('fetch-response', responseHandler, { signal: controller.signal }); - -controller.abort(); // Required - not automatic -api.destroy(); - -// Component lifecycle integration (simplest approach) -class MyComponent { - - constructor() { - - this.api = new FetchEngine({ baseUrl: 'https://api.example.com' }); - - // on() automatically cleaned up on destroy() - api.on('fetch-error', this.handleError); - api.on('fetch-response', this.handleResponse); - } - - async fetchData() { - - if (this.api.isDestroyed()) { - - throw new Error('API instance destroyed'); - } - return this.api.get('/data'); - } - - destroy() { - - // Automatically removes all listeners added via on() - this.api.destroy(); - this.api = null; - } -} -``` - -### `isDestroyed()` - -```typescript -isDestroyed(): boolean -``` - -Checks if the FetchEngine instance has been destroyed. - -**Example:** - -```typescript -if (!api.isDestroyed()) { - - await api.get('/users'); -} -``` - -## Error Handling - -### FetchError - -```typescript -interface FetchError> extends Error { - data: T | null; // Response body (if parseable) - status: number; // HTTP status code - method: HttpMethods; // HTTP method used - path: string; // Request path - aborted?: boolean; // Whether request was cancelled (any cause) - timedOut?: boolean; // Whether abort was caused by timeout - attempt?: number; // Retry attempt number - step?: 'fetch' | 'parse' | 'response'; // Where error occurred - url?: string; // Full request URL - headers?: H; // Response headers - - // Helper methods for distinguishing 499 error types - isCancelled(): boolean; // Manual abort (user/app initiated) - isTimeout(): boolean; // Timeout fired (attemptTimeout or totalTimeout) - isConnectionLost(): boolean; // Server/network dropped connection -} -``` - -**Important:** - -- Server-aborted responses receive status code `499` (following Nginx convention) -- Parse errors without status codes receive status code `999` - -### FetchError Helper Methods - -All three scenarios below result in status code 499, but have different causes. Use these helper methods to distinguish them: - -| Method | Returns `true` when | Use case | -|--------|---------------------|----------| -| `isCancelled()` | Request was manually aborted (not by timeout) | User navigated away, component unmounted | -| `isTimeout()` | Timeout fired (`attemptTimeout` or `totalTimeout`) | Show "request timed out" message | -| `isConnectionLost()` | Server dropped connection or network failed | Show "connection lost" message | - -::: info -All helper methods return `false` for non-499 errors. They only apply to connection-level failures. -::: - -**Example:** - -```typescript -const [response, err] = await attempt(() => api.get('/data')); - -if (err) { - if (err.isCancelled()) { - // User/app intentionally cancelled - don't show error - return; - } - - if (err.isTimeout()) { - toast.warn('Request timed out. Please try again.'); - } - else if (err.isConnectionLost()) { - toast.error('Connection lost. Check your internet.'); - } - else { - // HTTP error (4xx, 5xx) - check err.status directly - toast.error(`Request failed: ${err.message}`); - } -} -``` - -**How it works:** - -The helpers combine multiple error properties to determine the cause: - -```typescript -// isCancelled(): Manual abort (user navigated away, app cancelled) -status === 499 && aborted === true && timedOut !== true - -// isTimeout(): Our timeout fired -status === 499 && timedOut === true - -// isConnectionLost(): Server/network dropped us (we didn't abort) -status === 499 && step === 'fetch' && aborted === false -``` - -### Type Guard - -```typescript -isFetchError(error: unknown): error is FetchError -``` - -**Example:** - -```typescript -const [response, err] = await attempt(() => api.get('/users')); - -if (err) { - if (isFetchError(err)) { - // Types are available - console.log('HTTP Error:', err.status, err.message); - console.log('Failed at step:', err.step); - console.log('Response data:', err.data); - } else { - console.log('Network or other error:', err.message); - } -} -``` - -## Type Definitions - -### Common Types - -```typescript -type HttpMethods = 'GET' | 'POST' | 'PUT' | 'DELETE' | 'OPTIONS' | 'PATCH' | string; - -type Headers> = H & Record; - -type Params

> = P & Record; - -interface RequestOpts { - method: HttpMethods; - url: string; - headers: Headers; - params: Params

; - payload?: any; - /** @deprecated Use totalTimeout instead */ - timeout?: number; - totalTimeout?: number; - attemptTimeout?: number; - retry?: RetryConfig | false; -} -``` - -### Route Matching Types - -```typescript -interface MatchTypes { - is?: string; // Exact path match - startsWith?: string; // Path prefix match - endsWith?: string; // Path suffix match - includes?: string; // Path contains substring - match?: RegExp; // Regular expression match -} - -interface RequestKeyOptions { - method: string; - path: string; - payload?: unknown; - headers?: H; - params?: P; - state?: S; -} - -type RequestSerializer = (ctx: RequestKeyOptions) => string; -``` - -### Deduplication Types - -```typescript -interface DeduplicationConfig { - enabled?: boolean; // Default: true - methods?: HttpMethod[]; // Default: ['GET'] - serializer?: RequestSerializer; // Default: defaultRequestSerializer - shouldDedupe?: (ctx: RequestKeyOptions) => boolean; - rules?: DedupeRule[]; -} - -interface DedupeRule extends MatchTypes { - methods?: HttpMethod[]; - enabled?: boolean; - serializer?: RequestSerializer; -} -``` - -### Caching Types - -```typescript -interface CacheConfig { - enabled?: boolean; // Default: true - methods?: HttpMethod[]; // Default: ['GET'] - ttl?: number; // Default: 60000 (1 minute) - staleIn?: number; // Default: undefined (no SWR) - serializer?: RequestSerializer; - skip?: (ctx: RequestKeyOptions) => boolean; - rules?: CacheRule[]; -} - -interface CacheRule extends MatchTypes { - methods?: HttpMethod[]; - enabled?: boolean; - ttl?: number; - staleIn?: number; - serializer?: RequestSerializer; - skip?: (ctx: RequestKeyOptions) => boolean; -} -``` - -### Rate Limiting Types - -```typescript -interface RateLimitConfig { - enabled?: boolean; // Default: true - methods?: HttpMethod[]; // Default: all methods - maxCalls?: number; // Default: 100 - windowMs?: number; // Default: 60000 (1 minute) - waitForToken?: boolean; // Default: true - serializer?: RequestSerializer; - shouldRateLimit?: (ctx: RequestKeyOptions) => boolean; - onRateLimit?: (ctx: RequestKeyOptions, waitTimeMs: number) => void | Promise; - rules?: RateLimitRule[]; -} - -interface RateLimitRule extends MatchTypes { - methods?: HttpMethod[]; - enabled?: boolean; - maxCalls?: number; - windowMs?: number; - waitForToken?: boolean; - serializer?: RequestSerializer; -} -``` - -### TypeScript Module Declaration - -Extend interfaces for better type safety in your application: - -```typescript -declare module '@logosdx/fetch' { - namespace FetchEngine { - interface InstanceHeaders { - Authorization?: string; - 'Content-Type'?: string; - 'X-API-Key'?: string; - 'X-User-ID'?: string; - } - - interface InstanceParams { - version?: string; - format?: 'json' | 'xml'; - locale?: string; - } - - interface InstanceResponseHeaders extends Record { - 'x-rate-limit-remaining'?: string; - 'x-rate-limit-reset'?: string; - 'x-request-id'?: string; - 'content-type'?: string; - } - - interface InstanceState { - authToken?: string; - userId?: string; - sessionId?: string; - preferences?: { - theme: 'light' | 'dark'; - language: string; - }; - } - } -} - -// Now both custom instances and the global instance are typed -import fetch, { get, post } from '@logosdx/fetch'; - -// All methods are properly typed with your custom interfaces -fetch.addHeader('X-API-Key', 'key123'); // ✅ Typed -fetch.setState('authToken', 'token'); // ✅ Typed - -// Response is properly typed with FetchResponse including typed config -const [response] = await attempt(() => get('/api/data')); // ✅ Typed -if (response) { - response.data; // ✅ Typed as User - response.status; // ✅ Typed as number - response.headers; // ✅ Typed as Partial - response.headers['x-rate-limit-remaining']; // ✅ Typed access to response headers - response.config.headers; // ✅ Typed as InstanceHeaders - response.config.params; // ✅ Typed as InstanceParams -} -``` - -## Advanced Configuration Examples - -### Production Setup - -```typescript -const api = new FetchEngine({ - baseUrl: process.env.API_BASE_URL!, - defaultType: 'json', - totalTimeout: 30000, // 30s max for entire operation - attemptTimeout: 10000, // 10s per attempt - - // Global headers - headers: { - 'Content-Type': 'application/json', - 'Accept': 'application/json' - }, - - // Request deduplication - prevent duplicate concurrent requests - dedupePolicy: { - enabled: true, - methods: ['GET'], - rules: [ - { includes: '/realtime', enabled: false }, - { includes: '/stream', enabled: false } - ] - }, - - // Response caching with SWR for fast responses - cachePolicy: { - enabled: true, - methods: ['GET'], - ttl: 60000, // 1 minute - staleIn: 30000, // Stale after 30 seconds - rules: [ - { startsWith: '/static', ttl: 3600000 }, // 1 hour for static - { startsWith: '/user/me', ttl: 300000 }, // 5 minutes for profile - { includes: '/realtime', enabled: false } // No caching for realtime - ] - }, - - // Rate limiting - protect against overwhelming the API - rateLimitPolicy: { - enabled: true, - maxCalls: 100, // 100 requests per minute - windowMs: 60000, - waitForToken: true, // Wait rather than reject - rules: [ - { startsWith: '/api/search', maxCalls: 10 }, // Stricter for search - { startsWith: '/api/bulk', waitForToken: false }, // Reject bulk if limited - { startsWith: '/health', enabled: false } // No limits for health - ] - }, - - // Authentication and context injection - modifyOptions: (opts, state) => { - if (state.authToken) { - opts.headers.Authorization = `Bearer ${state.authToken}`; - } - if (state.userId) { - opts.headers['X-User-ID'] = state.userId; - } - if (state.sessionId) { - opts.headers['X-Session-ID'] = state.sessionId; - } - return opts; - }, - - // Intelligent retry logic - retry: { - maxAttempts: 3, - baseDelay: 1000, - useExponentialBackoff: true, - shouldRetry: (error, attempt) => { - // Don't retry if user aborted - if (error.aborted) return false; - - // Don't retry client errors except rate limits - if (error.status >= 400 && error.status < 500 && error.status !== 429) { - return false; - } - - // Respect rate limit headers - if (error.status === 429) { - const retryAfter = error.headers?.['retry-after']; - return retryAfter ? parseInt(retryAfter) * 1000 : 5000; - } - - // Retry server errors and network failures - return error.status >= 500 || !error.status; - } - }, - - // Request/response validation - validate: { - state: (state) => { - if (process.env.NODE_ENV === 'production' && !state.authToken) { - throw new Error('Authentication required in production'); - } - } - }, - - // Custom response type detection - determineType: (response) => { - const contentType = response.headers.get('content-type'); - - if (contentType?.includes('application/vnd.api+json')) { - return 'json'; // JSON:API responses - } - - if (response.url.includes('/download/')) { - return 'blob'; // Force blob for downloads - } - - return FetchEngine.useDefault; // Use built-in detection - } -}); - -// Production monitoring -api.on('fetch-error', (event) => { - errorReporting.captureException(event.error, { - tags: { - endpoint: event.path, - method: event.method, - status: event.error?.status - }, - extra: { - attempt: event.attempt, - userId: api.getState().userId - } - }); -}); - -api.on('fetch-after', (event) => { - metrics.timing('api.request', event.duration, { - endpoint: event.path, - method: event.method, - status: event.response?.status - }); -}); - -// Cache monitoring -api.on('fetch-cache-hit', (event) => { - metrics.increment('api.cache.hit', { path: event.path }); -}); - -api.on('fetch-cache-miss', (event) => { - metrics.increment('api.cache.miss', { path: event.path }); -}); - -api.on('fetch-cache-stale', (event) => { - metrics.increment('api.cache.stale', { path: event.path }); -}); - -// Deduplication monitoring -api.on('fetch-dedupe-join', (event) => { - metrics.increment('api.dedupe.saved', { path: event.path }); - logger.debug(`Request deduplicated: ${event.key}, waiters: ${event.waitingCount}`); -}); -``` - -### Development Setup - -```typescript -const isDev = process.env.NODE_ENV === 'development'; - -const api = new FetchEngine({ - baseUrl: 'http://localhost:3001/api', - totalTimeout: isDev ? 60000 : 30000, // Longer total timeout in dev - attemptTimeout: isDev ? 30000 : 10000, // Longer per-attempt in dev - retry: isDev ? false : { // No retries in dev, 3 retries in prod - maxAttempts: 3, - baseDelay: 1000 - } -}); - -// Development-only logging -if (isDev) { - api.on('*', (event) => { - console.group(`🌐 API ${event.type}`); - console.log('Event:', event); - console.groupEnd(); - }); -} -``` - -## Policy Architecture - -FetchEngine's resilience policies (deduplication, caching, rate limiting) share a common architecture that enables consistent behavior and efficient configuration resolution. - -### Three-Method Pattern - -All policies implement the same three-method pattern: - -``` -┌─────────────────────────────────────────────────────────────┐ -│ ResiliencePolicy │ -├─────────────────────────────────────────────────────────────┤ -│ init(config) Parse config → Initialize state (O(1)) │ -│ resolve(...) Memoized lookup + dynamic checks (O(1)*) │ -│ compute(...) Rule matching (O(n) first time only) │ -└─────────────────────────────────────────────────────────────┘ - * O(1) amortized due to memoization -``` - -1. **`init`**: Called during FetchEngine construction. Parses configuration, validates rules, and sets up internal state. - -2. **`resolve`**: Called for every request. Returns the effective policy configuration by combining memoized rule matching with dynamic skip callbacks. - -3. **`compute`**: Called once per unique method+path combination. Performs O(n) rule matching and caches the result. - -### Configuration Resolution - -When a request is made, each policy resolves its configuration in order: - -``` -Request → Policy.resolve(method, path, context) - │ - ├── Check memoized cache (O(1)) - │ └── Cache miss? → compute() → cache result - │ - ├── Check dynamic skip callback - │ └── Skip? → return null - │ - └── Return merged rule (policy defaults + matched rule) -``` - -### Rule Matching Priority - -Rules are evaluated in declaration order. The first matching rule wins: - -```typescript -rules: [ - { is: '/users', ttl: 30000 }, // Checked first (exact match) - { startsWith: '/users', ttl: 60000 }, // Checked second - { match: /^\/users/, ttl: 120000 } // Checked third -] -// Request to '/users' matches first rule (30s TTL) -// Request to '/users/123' matches second rule (60s TTL) -``` - -### Policy Execution Order - -Policies are evaluated in a specific order during request processing: - -``` -Request - │ - ├── 1. Rate Limit (guard) ─────────┐ - │ └── Wait or reject │ - │ │ - ├── 2. Cache Check ────────────────┤ - │ └── Hit? Return cached │ - │ │ - ├── 3. Dedupe Check ───────────────┤ - │ └── In-flight? Join it │ - │ │ - ├── 4. Network Request ────────────┤ - │ │ - ├── 5. Store Cache (on success) ───┤ - │ │ - └── Response ──────────────────────┘ -``` - -**Key implications:** -- Rate limiting runs **before** cache checks - cached responses don't consume rate limit tokens -- Deduplication runs **after** cache checks - cache hits return immediately without dedupe -- Only the request initiator consumes a rate limit token; joiners share the result - -### Memoization Strategy - -Rule matching results are cached by `method:path` key: - -```typescript -// First request to GET /users/123 -resolve('GET', '/users/123', ctx) - → compute() runs, caches result - → rulesCache.set('GET:/users/123', resolvedRule) - -// Subsequent requests to same endpoint -resolve('GET', '/users/123', ctx) - → rulesCache.get('GET:/users/123') // O(1) hit - → Check skip callback - → Return cached rule -``` - -This means: -- First request to each endpoint: O(n) rule matching -- Subsequent requests: O(1) cache lookup -- Skip callbacks always run (they depend on request-specific context) - -### Policy State - -Each policy maintains its own internal state: - -```typescript -interface PolicyInternalState { - enabled: boolean; // Global enable/disable - methods: Set; // Applicable HTTP methods - serializer: RequestSerializer; // Key generation function - rulesCache: Map; // Memoized rule lookups -} -``` - -### Extending Policies - -While the built-in policies cover most use cases, the architecture is designed for extensibility. Each policy class extends `ResiliencePolicy` and implements: - -- `getDefaultSerializer()` - Returns the default key generation function -- `getDefaultMethods()` - Returns which HTTP methods are enabled by default -- `mergeRuleWithDefaults(rule)` - Merges matched rules with policy defaults - -This shared base ensures consistent configuration handling across all resilience features. diff --git a/docs/packages/fetch/advanced.md b/docs/packages/fetch/advanced.md new file mode 100644 index 0000000..2c74907 --- /dev/null +++ b/docs/packages/fetch/advanced.md @@ -0,0 +1,716 @@ +--- +title: Advanced +description: Type definitions, TypeScript patterns, and production examples for FetchEngine. +--- + +# Advanced + + +Advanced TypeScript patterns, complete type definitions, and production configuration examples for FetchEngine. + +[[toc]] + + +## Type Definitions + + +### Common Types + + +```typescript +type HttpMethods = 'GET' | 'POST' | 'PUT' | 'DELETE' | 'OPTIONS' | 'PATCH' | string; + +type Headers> = H & Record; + +type Params

> = P & Record; + +interface RequestOpts { + + method: HttpMethods; + url: string; + headers: Headers; + params: Params

; + payload?: any; + /** @deprecated Use totalTimeout instead */ + timeout?: number; + totalTimeout?: number; + attemptTimeout?: number; + retry?: RetryConfig | false; +} +``` + + +### Route Matching Types + + +```typescript +interface MatchTypes { + + is?: string; // Exact path match + startsWith?: string; // Path prefix match + endsWith?: string; // Path suffix match + includes?: string; // Path contains substring + match?: RegExp; // Regular expression match +} + +interface RequestKeyOptions { + + method: string; + path: string; + url: URL; + payload?: unknown; + headers?: H; + params?: P; + state?: S; +} + +type RequestSerializer = (ctx: RequestKeyOptions) => string; +``` + + +### Deduplication Types + + +```typescript +interface DeduplicationConfig { + + enabled?: boolean; // Default: true + methods?: HttpMethod[]; // Default: ['GET'] + serializer?: RequestSerializer; // Default: defaultRequestSerializer + shouldDedupe?: (ctx: RequestKeyOptions) => boolean; + rules?: DedupeRule[]; +} + +interface DedupeRule extends MatchTypes { + + methods?: HttpMethod[]; + enabled?: boolean; + serializer?: RequestSerializer; +} +``` + + +### Caching Types + + +```typescript +interface CacheConfig { + + enabled?: boolean; // Default: true + methods?: HttpMethod[]; // Default: ['GET'] + ttl?: number; // Default: 60000 (1 minute) + staleIn?: number; // Default: undefined (no SWR) + serializer?: RequestSerializer; + skip?: (ctx: RequestKeyOptions) => boolean; + rules?: CacheRule[]; +} + +interface CacheRule extends MatchTypes { + + methods?: HttpMethod[]; + enabled?: boolean; + ttl?: number; + staleIn?: number; + serializer?: RequestSerializer; + skip?: (ctx: RequestKeyOptions) => boolean; +} +``` + + +### Rate Limiting Types + + +```typescript +interface RateLimitConfig { + + enabled?: boolean; // Default: true + methods?: HttpMethod[]; // Default: all methods + maxCalls?: number; // Default: 100 + windowMs?: number; // Default: 60000 (1 minute) + waitForToken?: boolean; // Default: true + serializer?: RequestSerializer; + shouldRateLimit?: (ctx: RequestKeyOptions) => boolean; + onRateLimit?: (ctx: RequestKeyOptions, waitTimeMs: number) => void | Promise; + rules?: RateLimitRule[]; +} + +interface RateLimitRule extends MatchTypes { + + methods?: HttpMethod[]; + enabled?: boolean; + maxCalls?: number; + windowMs?: number; + waitForToken?: boolean; + serializer?: RequestSerializer; +} +``` + + +## TypeScript Customization + + +FetchEngine supports two approaches for custom types: + +1. **Module Augmentation** - Define types once, apply globally to all instances +2. **Generic Parameters** - Pass types explicitly per instance + +### Module Augmentation (Recommended) + +Augment the `FetchEngine` namespace to define types once for your entire application. All FetchEngine instances and the global API will use your custom types: + +```typescript +declare module '@logosdx/fetch' { + + namespace FetchEngine { + + interface InstanceHeaders { + Authorization?: string; + 'Content-Type'?: string; + 'X-API-Key'?: string; + 'X-User-ID'?: string; + } + + interface InstanceParams { + version?: string; + format?: 'json' | 'xml'; + locale?: string; + } + + interface InstanceResponseHeaders extends Record { + 'x-rate-limit-remaining'?: string; + 'x-rate-limit-reset'?: string; + 'x-request-id'?: string; + 'content-type'?: string; + } + + interface InstanceState { + authToken?: string; + userId?: string; + sessionId?: string; + preferences?: { + theme: 'light' | 'dark'; + language: string; + }; + } + } +} + +// Now ALL FetchEngine instances use your custom types automatically +import { FetchEngine } from '@logosdx/fetch'; +import { attempt } from '@logosdx/utils'; + +const api = new FetchEngine({ baseUrl: 'https://api.example.com' }); + +// All methods are properly typed with your augmented interfaces +api.headers.set('X-API-Key', 'key123'); // Typed - knows X-API-Key exists +api.state.set('authToken', 'token'); // Typed - knows authToken exists + +// Response includes your typed headers +const [response] = await attempt(() => api.get('/api/data')); +if (response) { + response.data; // User + response.status; // number + response.headers; // Partial + response.headers['x-rate-limit-remaining']; // string | undefined + response.config.headers; // InstanceHeaders + response.config.params; // InstanceParams +} +``` + + +### Generic Parameters (Per-Instance) + +For cases where different instances need different types, pass generics directly: + +```typescript +interface ServiceHeaders { + 'X-Service-Key': string; + 'X-Trace-ID'?: string; +} + +interface ServiceState { + serviceToken: string; +} + +// Types apply only to this instance +const serviceApi = new FetchEngine({ + baseUrl: 'https://internal-service.example.com' +}); + +serviceApi.headers.set('X-Service-Key', 'key'); // Typed +serviceApi.state.set('serviceToken', 'token'); // Typed +``` + +**When to use each approach:** + +| Approach | Use Case | +|----------|----------| +| Module Augmentation | Single API, consistent types across app | +| Generic Parameters | Multiple APIs with different type requirements | +``` + + +## Production Setup + + +```typescript +const api = new FetchEngine({ + baseUrl: process.env.API_BASE_URL!, + defaultType: 'json', + totalTimeout: 30000, // 30s max for entire operation + attemptTimeout: 10000, // 10s per attempt + + // Global headers + headers: { + 'Content-Type': 'application/json', + 'Accept': 'application/json' + }, + + // Request deduplication - prevent duplicate concurrent requests + dedupePolicy: { + enabled: true, + methods: ['GET'], + rules: [ + { includes: '/realtime', enabled: false }, + { includes: '/stream', enabled: false } + ] + }, + + // Response caching with SWR for fast responses + cachePolicy: { + enabled: true, + methods: ['GET'], + ttl: 60000, // 1 minute + staleIn: 30000, // Stale after 30 seconds + rules: [ + { startsWith: '/static', ttl: 3600000 }, // 1 hour for static + { startsWith: '/user/me', ttl: 300000 }, // 5 minutes for profile + { includes: '/realtime', enabled: false } // No caching for realtime + ] + }, + + // Rate limiting - protect against overwhelming the API + rateLimitPolicy: { + enabled: true, + maxCalls: 100, // 100 requests per minute + windowMs: 60000, + waitForToken: true, // Wait rather than reject + rules: [ + { startsWith: '/api/search', maxCalls: 10 }, // Stricter for search + { startsWith: '/api/bulk', waitForToken: false }, // Reject bulk if limited + { startsWith: '/health', enabled: false } // No limits for health + ] + }, + + // Authentication and context injection + modifyConfig: (opts, state) => { + if (state.authToken) { + opts.headers.Authorization = `Bearer ${state.authToken}`; + } + if (state.userId) { + opts.headers['X-User-ID'] = state.userId; + } + if (state.sessionId) { + opts.headers['X-Session-ID'] = state.sessionId; + } + return opts; + }, + + // Intelligent retry logic + retry: { + maxAttempts: 3, + baseDelay: 1000, + useExponentialBackoff: true, + shouldRetry: (error, attempt) => { + // Don't retry if user aborted + if (error.aborted) return false; + + // Don't retry client errors except rate limits + if (error.status >= 400 && error.status < 500 && error.status !== 429) { + return false; + } + + // Respect rate limit headers + if (error.status === 429) { + const retryAfter = error.headers?.['retry-after']; + return retryAfter ? parseInt(retryAfter) * 1000 : 5000; + } + + // Retry server errors and network failures + return error.status >= 500 || !error.status; + } + }, + + // Request/response validation + validate: { + state: (state) => { + if (process.env.NODE_ENV === 'production' && !state.authToken) { + throw new Error('Authentication required in production'); + } + } + }, + + // Custom response type detection + determineType: (response) => { + const contentType = response.headers.get('content-type'); + + if (contentType?.includes('application/vnd.api+json')) { + return 'json'; // JSON:API responses + } + + if (response.url.includes('/download/')) { + return 'blob'; // Force blob for downloads + } + + return FetchEngine.useDefault; // Use built-in detection + } +}); + +// Production monitoring +api.on('error', (event) => { + errorReporting.captureException(event.error, { + tags: { + endpoint: event.path, + method: event.method, + status: event.error?.status + }, + extra: { + attempt: event.attempt, + userId: api.state.get().userId + } + }); +}); + +api.on('after-request', (event) => { + metrics.timing('api.request', event.duration, { + endpoint: event.path, + method: event.method, + status: event.response?.status + }); +}); + +// Cache monitoring +api.on('cache-hit', (event) => { + metrics.increment('api.cache.hit', { path: event.path }); +}); + +api.on('cache-miss', (event) => { + metrics.increment('api.cache.miss', { path: event.path }); +}); + +api.on('cache-stale', (event) => { + metrics.increment('api.cache.stale', { path: event.path }); +}); + +// Deduplication monitoring +api.on('dedupe-join', (event) => { + metrics.increment('api.dedupe.saved', { path: event.path }); + logger.debug(`Request deduplicated: ${event.key}, waiters: ${event.waitingCount}`); +}); +``` + + +## Development Setup + + +```typescript +const isDev = process.env.NODE_ENV === 'development'; + +const api = new FetchEngine({ + baseUrl: 'http://localhost:3001/api', + totalTimeout: isDev ? 60000 : 30000, // Longer total timeout in dev + attemptTimeout: isDev ? 30000 : 10000, // Longer per-attempt in dev + retry: isDev ? false : { // No retries in dev, 3 retries in prod + maxAttempts: 3, + baseDelay: 1000 + } +}); + +// Development-only logging +if (isDev) { + api.on(/./, ({ event, data }) => { + console.group(`API ${event}`); + console.log('Data:', data); + console.groupEnd(); + }); +} +``` + + +## Multi-Environment Configuration + + +```typescript +interface EnvironmentConfig { + + baseUrl: string; + timeout: number; + retryEnabled: boolean; +} + +const environments: Record = { + development: { + baseUrl: 'http://localhost:3001/api', + timeout: 60000, + retryEnabled: false + }, + staging: { + baseUrl: 'https://staging-api.example.com', + timeout: 30000, + retryEnabled: true + }, + production: { + baseUrl: 'https://api.example.com', + timeout: 15000, + retryEnabled: true + } +}; + +const env = environments[process.env.NODE_ENV || 'development']; + +const api = new FetchEngine({ + baseUrl: env.baseUrl, + totalTimeout: env.timeout, + retry: env.retryEnabled ? { + maxAttempts: 3, + baseDelay: 1000 + } : false +}); +``` + + +## React Integration + + +```typescript +import { useEffect, useRef } from 'react'; +import { FetchEngine } from '@logosdx/fetch'; + +function useApi() { + + const apiRef = useRef(null); + + useEffect(() => { + apiRef.current = new FetchEngine({ + baseUrl: 'https://api.example.com', + totalTimeout: 10000 + }); + + return () => { + apiRef.current?.destroy(); + apiRef.current = null; + }; + }, []); + + return apiRef.current; +} + +function UserProfile({ userId }: { userId: string }) { + + const api = useApi(); + const [user, setUser] = useState(null); + + useEffect(() => { + if (!api) return; + + const fetchUser = async () => { + const [response, err] = await attempt(() => + api.get(`/users/${userId}`) + ); + if (!err) setUser(response.data); + }; + + fetchUser(); + }, [api, userId]); + + return user ?

{user.name}
:
Loading...
; +} +``` + + +## Service Layer Pattern + + +```typescript +import { FetchEngine, FetchResponse } from '@logosdx/fetch'; +import { attempt } from '@logosdx/utils'; + +class ApiService { + + #api: FetchEngine; + + constructor(baseUrl: string) { + this.#api = new FetchEngine({ + baseUrl, + defaultType: 'json', + totalTimeout: 15000, + cachePolicy: true, + dedupePolicy: true + }); + } + + async getUser(id: string): Promise { + const [response, err] = await attempt(() => + this.#api.get(`/users/${id}`) + ); + return err ? null : response.data; + } + + async createUser(data: CreateUserData): Promise { + const [response, err] = await attempt(() => + this.#api.post('/users', data) + ); + return err ? null : response.data; + } + + async updateUser(id: string, data: UpdateUserData): Promise { + const [response, err] = await attempt(() => + this.#api.patch(`/users/${id}`, data) + ); + + // Invalidate cache on successful update + if (!err) { + await this.#api.invalidatePath(`/users/${id}`); + } + + return err ? null : response.data; + } + + setAuthToken(token: string) { + this.#api.headers.set('Authorization', `Bearer ${token}`); + } + + destroy() { + this.#api.destroy(); + } +} + +// Usage +const api = new ApiService('https://api.example.com'); +api.setAuthToken('user-token'); + +const user = await api.getUser('123'); +``` + + +## GraphQL Integration + + +```typescript +const graphqlApi = new FetchEngine({ + baseUrl: 'https://api.example.com', + defaultType: 'json', + + // Dedupe GraphQL queries by operation name + dedupePolicy: { + methods: ['POST'], + rules: [{ + is: '/graphql', + serializer: (ctx) => + `graphql:${ctx.payload?.operationName}:${JSON.stringify(ctx.payload?.variables)}` + }] + }, + + // Cache queries but not mutations + cachePolicy: { + methods: ['POST'], + rules: [{ + is: '/graphql', + skip: (ctx) => { + const query = ctx.payload?.query || ''; + return query.trimStart().startsWith('mutation'); + }, + serializer: (ctx) => + `graphql:${ctx.payload?.operationName}:${JSON.stringify(ctx.payload?.variables)}` + }] + } +}); + +// GraphQL query helper +async function query( + operationName: string, + query: string, + variables?: Record +): Promise { + + const [response, err] = await attempt(() => + graphqlApi.post<{ data: T }>('/graphql', { + operationName, + query, + variables + }) + ); + + return err ? null : response.data.data; +} + +// Usage +const user = await query('GetUser', ` + query GetUser($id: ID!) { + user(id: $id) { + id + name + email + } + } +`, { id: '123' }); +``` + + +## File Upload with Progress + + +```typescript +async function uploadFile(file: File, onProgress?: (percent: number) => void) { + + const formData = new FormData(); + formData.append('file', file); + + // For progress tracking, use XMLHttpRequest or fetch with ReadableStream + // FetchEngine works with FormData natively + const [response, err] = await attempt(() => + api.post<{ url: string }>('/upload', formData, { + headers: { + // Don't set Content-Type - browser will set multipart/form-data with boundary + }, + totalTimeout: 300000 // 5 minutes for large files + }) + ); + + return err ? null : response.data.url; +} +``` + + +## Batch Requests + + +```typescript +async function batchFetch( + paths: string[], + options?: { concurrency?: number } +): Promise> { + + const results = new Map(); + const concurrency = options?.concurrency ?? 5; + + // Process in batches + for (let i = 0; i < paths.length; i += concurrency) { + const batch = paths.slice(i, i + concurrency); + const promises = batch.map(async (path) => { + const [response, err] = await attempt(() => api.get(path)); + results.set(path, err ? null : response.data); + }); + await Promise.all(promises); + } + + return results; +} + +// Usage +const userData = await batchFetch([ + '/users/1', + '/users/2', + '/users/3' +]); +``` diff --git a/docs/packages/fetch/configuration.md b/docs/packages/fetch/configuration.md new file mode 100644 index 0000000..3cf6b32 --- /dev/null +++ b/docs/packages/fetch/configuration.md @@ -0,0 +1,434 @@ +--- +title: Configuration +description: FetchEngine configuration options, headers, parameters, and state management. +--- + +# Configuration + + +FetchEngine provides comprehensive configuration options for customizing HTTP behavior at both the instance and request level. + +[[toc]] + + +## Engine Configuration + + +The `FetchEngine.Config` interface defines all options for creating a FetchEngine instance. + +| Option | Type | Description | +| ------ | ---- | ----------- | +| `baseUrl` (required) | `string` | Base URL for all requests | +| `defaultType` | `'json' \| 'text' \| 'blob' \| 'arrayBuffer' \| 'formData'` | Default response type | +| `totalTimeout` | `number` | Total timeout for entire request lifecycle including retries (ms) | +| `attemptTimeout` | `number` | Per-attempt timeout (ms). Each retry gets a fresh timeout | +| `headers` | `DictAndT` | Default headers for all requests | +| `methodHeaders` | `{ GET?: ..., POST?: ... }` | Method-specific default headers | +| `params` | `DictAndT

` | Default URL parameters for all requests | +| `methodParams` | `{ GET?: ..., POST?: ... }` | Method-specific default parameters | +| `modifyConfig` | `(opts, state) => opts` | Function to modify request config | +| `modifyMethodConfig` | `{ GET?: fn, POST?: fn }` | Method-specific config modifiers | +| `validate` | `ValidateConfig` | Validators for headers, params, and state | +| `retry` | `RetryConfig \| boolean` | Retry configuration. `true` uses defaults, `false` disables | +| `dedupePolicy` | `boolean \| DeduplicationConfig` | Request deduplication configuration | +| `cachePolicy` | `boolean \| CacheConfig` | Response caching configuration | +| `rateLimitPolicy` | `boolean \| RateLimitConfig` | Rate limiting configuration | +| `determineType` | `(response: Response) => DetermineTypeResult` | Custom response type detection | +| `name` | `string` | Instance name for debugging | +| `onBeforeReq` | `(opts) => void` | Lifecycle hook before each request | +| `onAfterReq` | `(response, opts) => void` | Lifecycle hook after each request | +| `onError` | `(err) => void` | Lifecycle hook on request error | +| `credentials` | `'include' \| 'same-origin' \| 'omit'` | Native fetch credentials mode | +| `mode` | `'cors' \| 'same-origin' \| 'no-cors'` | Native fetch request mode | +| `cache` | `RequestCache` | Native fetch cache mode (browser caching) | +| `redirect` | `'follow' \| 'error' \| 'manual'` | Native fetch redirect handling | +| `referrerPolicy` | `ReferrerPolicy` | Native fetch referrer policy | +| `keepalive` | `boolean` | Keep connection alive after page unload | +| `integrity` | `string` | Subresource integrity hash | + +**Example:** + +```typescript +const api = new FetchEngine({ + baseUrl: 'https://api.example.com', + defaultType: 'json', + totalTimeout: 30000, + attemptTimeout: 10000, + + headers: { + 'Content-Type': 'application/json', + 'Accept': 'application/json' + }, + + retry: { + maxAttempts: 3, + baseDelay: 1000, + useExponentialBackoff: true + }, + + modifyConfig: (opts, state) => { + if (state.authToken) { + opts.headers = { + ...opts.headers, + Authorization: `Bearer ${state.authToken}` + }; + } + return opts; + } +}); +``` + + +## ConfigStore + + +Access and modify configuration at runtime via `api.config`. + + +### `config.get(path?)` + + +Get configuration value by dot-notation path, or entire config if no path provided. + +```typescript +// Get entire config +const config = api.config.get(); + +// Get specific values by path +const baseUrl = api.config.get('baseUrl'); +const maxAttempts = api.config.get('retry.maxAttempts'); +const modifyFn = api.config.get('modifyConfig'); +``` + + +### `config.set(path, value)` / `config.set(partial)` + + +Set configuration value by path or merge partial config. + +```typescript +// Set by path +api.config.set('baseUrl', 'https://new-api.example.com'); +api.config.set('retry.maxAttempts', 5); + +// Merge partial config +api.config.set({ + totalTimeout: 60000, + retry: { maxAttempts: 5 } +}); + +// Change the modifyConfig function at runtime +api.config.set('modifyConfig', (opts, state) => { + if (state.authToken) { + opts.headers = { + ...opts.headers, + Authorization: `Bearer ${state.authToken}` + }; + } + return opts; +}); + +// Change method-specific modifier +api.config.set('modifyMethodConfig', { + POST: (opts, state) => { + if (state.csrfToken) { + opts.headers = { + ...opts.headers, + 'X-CSRF-Token': state.csrfToken + }; + } + return opts; + } +}); + +// Clear a modifier by setting to undefined +api.config.set('modifyConfig', undefined); +``` + +::: info +Setting config emits a `config-change` event, `modify-config-change` event, or `modify-method-config-change` event depending on what was changed. +::: + + +## Headers Management + + +Manage headers at runtime via `api.headers`. + + +### `headers.set(key, value, method?)` / `headers.set(headers, method?)` + + +Add headers globally or for a specific HTTP method. + +```typescript +// Global header +api.headers.set('Authorization', 'Bearer token123'); + +// Multiple headers +api.headers.set({ + 'X-API-Version': 'v2', + 'X-Client': 'web-app' +}); + +// Method-specific header (POST only) +api.headers.set('Content-Type', 'application/json', 'POST'); +``` + + +### `headers.remove(key, method?)` / `headers.remove(keys[], method?)` + + +Remove headers. + +```typescript +// Remove single header +api.headers.remove('Authorization'); + +// Remove multiple headers +api.headers.remove(['X-API-Version', 'X-Client']); + +// Remove method-specific header +api.headers.remove('Content-Type', 'POST'); +``` + + +### `headers.has(key, method?)` + + +Check if a header exists. + +```typescript +if (api.headers.has('Authorization')) { + console.log('Auth header is set'); +} +``` + + +### `headers.resolve(method, overrides?)` + + +Get final resolved headers for a request (merges defaults, method overrides, and request overrides). + +```typescript +const headers = api.headers.resolve('POST', { 'X-Request-ID': '123' }); +``` + + +### `headers.defaults` / `headers.all` + + +Access default headers or all headers including method overrides. + +```typescript +// Default headers only +const defaults = api.headers.defaults; + +// All headers including method overrides +const all = api.headers.all; +// { default: { Authorization: '...' }, POST: { 'Content-Type': '...' } } +``` + + +## Parameters Management + + +Manage URL parameters at runtime via `api.params`. Has the same API as `api.headers`. + + +### `params.set(key, value, method?)` / `params.set(params, method?)` + + +```typescript +// Global parameter +api.params.set('version', 'v1'); + +// Multiple parameters +api.params.set({ + format: 'json', + locale: 'en-US' +}); + +// Method-specific parameter +api.params.set('include_deleted', 'true', 'GET'); +``` + + +### `params.remove(key, method?)` / `params.remove(keys[], method?)` + + +```typescript +api.params.remove('version'); +api.params.remove(['format', 'locale']); +``` + + +### `params.has(key, method?)` + + +```typescript +if (api.params.has('version')) { + console.log('Version param is set'); +} +``` + + +## State Management + + +Manage instance state at runtime via `api.state`. State is available in `modifyConfig` callbacks. + + +### `state.get()` + + +Get a deep clone of the current state. + +```typescript +const state = api.state.get(); +console.log('Current user:', state.userId); +``` + + +### `state.set(key, value)` / `state.set(partial)` + + +Set state by key-value or merge partial state. + +```typescript +// Set single property +api.state.set('authToken', 'bearer-123'); + +// Merge multiple properties +api.state.set({ + userId: 'user-123', + sessionId: 'session-456' +}); +``` + + +### `state.reset()` + + +Reset state to empty object. + +```typescript +api.state.reset(); +console.log(api.state.get()); // {} +``` + + +## Using State in Requests + + +State is passed to `modifyConfig` callbacks, allowing dynamic request modification: + +```typescript +const api = new FetchEngine({ + baseUrl: 'https://api.example.com', + + modifyConfig: (opts, state) => { + // Add auth header from state + if (state.authToken) { + opts.headers = { + ...opts.headers, + Authorization: `Bearer ${state.authToken}` + }; + } + + // Add user ID header + if (state.userId) { + opts.headers = { + ...opts.headers, + 'X-User-ID': state.userId + }; + } + + return opts; + } +}); + +// Set state +api.state.set('authToken', 'my-token'); +api.state.set('userId', 'user-123'); + +// Requests now include auth header and user ID +const { data } = await api.get('/protected-resource'); +``` + + +## Method-Specific Configuration + + +Configure headers, params, or modifyConfig per HTTP method: + +```typescript +const api = new FetchEngine({ + baseUrl: 'https://api.example.com', + + // Global headers + headers: { + 'Accept': 'application/json' + }, + + // POST-specific headers + methodHeaders: { + POST: { 'Content-Type': 'application/json' }, + PUT: { 'Content-Type': 'application/json' } + }, + + // GET-specific params + methodParams: { + GET: { include: 'metadata' } + }, + + // Method-specific config modification + modifyMethodConfig: { + POST: (opts, state) => { + if (state.csrfToken) { + opts.headers = { + ...opts.headers, + 'X-CSRF-Token': state.csrfToken + }; + } + return opts; + } + } +}); +``` + + +## Validation + + +Validate headers, params, and state before requests are made: + +```typescript +const api = new FetchEngine({ + baseUrl: 'https://api.example.com', + + validate: { + headers: (headers, method) => { + if (method === 'POST' && !headers['Content-Type']) { + throw new Error('POST requests require Content-Type'); + } + }, + + params: (params, method) => { + // Validate params + }, + + state: (state) => { + if (!state.authToken) { + throw new Error('Auth token required'); + } + }, + + // Run validation before each request (default: false) + perRequest: { + headers: true, + params: false + } + } +}); +``` diff --git a/docs/packages/fetch/events.md b/docs/packages/fetch/events.md new file mode 100644 index 0000000..7e549d5 --- /dev/null +++ b/docs/packages/fetch/events.md @@ -0,0 +1,433 @@ +--- +title: Events +description: Event system and lifecycle management in FetchEngine. +--- + +# Events + + +FetchEngine extends ObserverEngine with comprehensive lifecycle events for monitoring requests, configuration changes, and policy activity. + +[[toc]] + + +## Event Names + + +Events are organized into categories: + +**Request Lifecycle:** +- `before-request` - Before each request attempt +- `after-request` - After response is parsed and ready (includes `data`) +- `response` - When raw response is received (before parsing) +- `error` - On request failure +- `retry` - Before retry attempt +- `abort` - When request is aborted + +**Property Changes:** +- `header-add` - When header is added +- `header-remove` - When header is removed +- `param-add` - When param is added +- `param-remove` - When param is removed + +**State Changes:** +- `state-set` - When state is updated +- `state-reset` - When state is reset + +**Configuration Changes:** +- `config-change` - When config is modified +- `modify-config-change` - When modifyConfig function changes +- `modify-method-config-change` - When method-specific modifier changes +- `url-change` - When base URL changes + +**Deduplication:** +- `dedupe-start` - New request starts tracking +- `dedupe-join` - Caller joins existing request + +**Cache:** +- `cache-hit` - Fresh cache hit +- `cache-stale` - Stale cache hit (SWR) +- `cache-miss` - Cache miss +- `cache-set` - Entry cached +- `cache-revalidate` - SWR revalidation started +- `cache-revalidate-error` - SWR revalidation failed + +**Rate Limiting:** +- `ratelimit-wait` - Waiting for token +- `ratelimit-reject` - Request rejected +- `ratelimit-acquire` - Token acquired + + +## Subscribing to Events + + +### `on(event, callback)` + + +Subscribe to events. Returns a cleanup function. + +```typescript +// Subscribe to specific event +const cleanup = api.on('error', (data) => { + console.error('Request failed:', data.error?.message); +}); + +// Later: cleanup(); +``` + + +### `on(/./, callback)` + + +Subscribe to all events using a regex pattern. Regex listeners receive `{ event, data }` as the first argument. + +```typescript +api.on(/./, ({ event, data }) => { + console.log('Event:', event, data); +}); +``` + + +### `once(event, callback)` + + +Subscribe to event once (auto-removes after first emission). + +```typescript +api.once('after-request', (data) => { + console.log('First request completed'); +}); +``` + + +### `off(event, callback?)` + + +Unsubscribe from events. + +```typescript +const handler = (data) => console.log(data); + +api.on('error', handler); +api.off('error', handler); + +// Remove all listeners for an event +api.off('error'); +``` + + +## Event Data Types + + +### Request Lifecycle Events + + +```typescript +interface EventData { + + state: S; + url?: string | URL; + method?: HttpMethods; + headers?: DictAndT; + params?: DictAndT

; + error?: Error | FetchError; + response?: Response; + data?: unknown; + payload?: unknown; + attempt?: number; + nextAttempt?: number; + delay?: number; + step?: 'fetch' | 'parse' | 'response'; + status?: number; + path?: string; + aborted?: boolean; + requestStart?: number; // Timestamp (ms) when request entered pipeline + requestEnd?: number; // Timestamp (ms) when request resolved +} +``` + +**Timing fields:** + +| Field | Present in | Description | +|-------|-----------|-------------| +| `requestStart` | All request events | `Date.now()` when the request entered the execution pipeline | +| `requestEnd` | `response`, `error`, `abort` | `Date.now()` when the request resolved (success, error, or abort) | + +`requestStart` is set once at the beginning of execution and flows through all events via the normalized options. `requestEnd` is only added to terminal events where the request has completed. + + +### State Events + + +```typescript +interface StateEventData { + + key?: keyof S; // Key that was set (for single key updates) + value?: S[keyof S] | Partial; // Value that was set + previous?: S; // Previous state + current: S; // Current state after change +} +``` + + +### Property Events + + +```typescript +interface PropertyEventData { + + key?: string | string[]; // Key(s) that were added/removed + value?: string | Partial; // Value that was set + method?: HttpMethods; // HTTP method this applies to +} +``` + + +### Config Events + + +```typescript +interface OptionsEventData { + + path?: string; // Path that was changed + value?: unknown; // Value that was set +} +``` + + +### Deduplication Events + + +```typescript +interface DedupeEventData extends EventData { + + key: string; // Deduplication key + waitingCount?: number; // Number of callers waiting (join events) +} +``` + + +### Cache Events + + +```typescript +interface CacheEventData extends EventData { + + key: string; // Cache key + isStale?: boolean; // Whether entry is stale (SWR) + expiresIn?: number; // Time until expiration (ms) +} +``` + + +### Rate Limit Events + + +```typescript +interface RateLimitEventData extends EventData { + + key: string; // Rate limit bucket key + currentTokens: number; // Current tokens in bucket + capacity: number; // Maximum capacity + waitTimeMs: number; // Time until next token (ms) + nextAvailable: Date; // When next token available +} +``` + + +## Event Examples + + +### Request Logging + + +```typescript +api.on('before-request', (data) => { + console.log(`→ ${data.method} ${data.path}`); +}); + +api.on('after-request', (data) => { + console.log(`← ${data.status} ${data.path}`); +}); + +api.on('error', (data) => { + console.error(`✗ ${data.status} ${data.path}: ${data.error?.message}`); +}); +``` + + +### Retry Monitoring + + +```typescript +api.on('retry', (data) => { + console.log(`Retrying ${data.path}`); + console.log(`Attempt ${data.attempt} of ${data.nextAttempt}`); + console.log(`Waiting ${data.delay}ms`); +}); +``` + + +### State Change Tracking + + +```typescript +api.on('state-set', (data) => { + console.log('State changed'); + console.log('Key:', data.key); + console.log('Previous:', data.previous); + console.log('Current:', data.current); +}); +``` + + +### Cache Monitoring + + +```typescript +api.on('cache-hit', (data) => { + console.log('Cache hit:', data.key); + console.log('Expires in:', data.expiresIn, 'ms'); +}); + +api.on('cache-miss', (data) => { + console.log('Cache miss:', data.key); +}); + +api.on('cache-stale', (data) => { + console.log('Stale cache, revalidating:', data.key); +}); +``` + + +### Deduplication Monitoring + + +```typescript +api.on('dedupe-start', (data) => { + console.log('New request:', data.key); +}); + +api.on('dedupe-join', (data) => { + console.log('Joined existing request:', data.key); + console.log('Waiters:', data.waitingCount); +}); +``` + + +### Rate Limit Monitoring + + +```typescript +api.on('ratelimit-wait', (data) => { + console.log('Waiting for rate limit'); + console.log('Wait time:', data.waitTimeMs, 'ms'); + console.log('Tokens:', data.currentTokens, '/', data.capacity); +}); + +api.on('ratelimit-reject', (data) => { + console.log('Rate limit exceeded'); + console.log('Would have waited:', data.waitTimeMs, 'ms'); +}); +``` + + +## Lifecycle Management + + +### `destroy()` + + +Destroy the FetchEngine instance. Aborts all pending requests and cleans up resources. + +```typescript +api.destroy(); + +// After destroy, requests throw an error +try { + await api.get('/users'); +} +catch (e) { + console.log(e.message); // "Cannot make requests on destroyed FetchEngine instance" +} +``` + + +### `isDestroyed()` + + +Check if the engine has been destroyed. + +```typescript +if (!api.isDestroyed()) { + await api.get('/users'); +} +``` + + +### React Integration + + +```typescript +import { useEffect, useRef } from 'react'; +import { FetchEngine } from '@logosdx/fetch'; + +function useApi() { + + const apiRef = useRef(null); + + useEffect(() => { + apiRef.current = new FetchEngine({ + baseUrl: '/api' + }); + + // Cleanup on unmount + return () => { + apiRef.current?.destroy(); + apiRef.current = null; + }; + }, []); + + return apiRef; +} +``` + + +## Production Monitoring + + +```typescript +const api = new FetchEngine({ + baseUrl: 'https://api.example.com' +}); + +// Error reporting +api.on('error', (data) => { + errorReporting.captureException(data.error, { + tags: { + endpoint: data.path, + method: data.method, + status: data.status + }, + extra: { + attempt: data.attempt + } + }); +}); + +// Metrics — use built-in requestStart/requestEnd timestamps +api.on('response', (data) => { + metrics.timing('api.request.duration', data.requestEnd - data.requestStart, { + endpoint: data.path, + method: data.method, + status: data.status + }); +}); + +// Cache metrics +api.on('cache-hit', () => metrics.increment('api.cache.hit')); +api.on('cache-miss', () => metrics.increment('api.cache.miss')); + +// Dedupe metrics +api.on('dedupe-join', () => metrics.increment('api.dedupe.saved')); +``` diff --git a/docs/packages/fetch/index.md b/docs/packages/fetch/index.md new file mode 100644 index 0000000..1bed394 --- /dev/null +++ b/docs/packages/fetch/index.md @@ -0,0 +1,245 @@ +--- +title: Fetch +description: HTTP that handles failure. Automatically. +--- + +# Fetch + + +Your API calls fail and `fetch` just throws. `@logosdx/fetch` transforms the basic Fetch API into a production-ready HTTP client. Automatic retries with exponential backoff, request deduplication, response caching with stale-while-revalidate, configurable timeouts, request cancellation, and comprehensive lifecycle events. Smart retry strategy for transient failures (network errors, 429s, 500s). Configure once with base URLs and headers, then make type-safe requests that handle network failures gracefully. It's `fetch`, but built for the real world. + + +## Installation + + +::: code-group + +```bash [npm] +npm install @logosdx/fetch +``` + +```bash [yarn] +yarn add @logosdx/fetch +``` + +```bash [pnpm] +pnpm add @logosdx/fetch +``` + +::: + +**CDN:** + +```html + + +``` + + +## Quick Start + + +```typescript +import { FetchEngine } from '@logosdx/fetch' +import { attempt } from '@logosdx/utils' + +// Create HTTP client +const api = new FetchEngine({ + baseUrl: 'https://api.example.com', + defaultType: 'json', + totalTimeout: 5000 +}); + +// Make requests with error handling +const [response, err] = await attempt(() => api.get('/users')); +if (err) { + console.error('Failed to fetch users:', err.message); + return; +} + +// Access response data and metadata +console.log('Users:', response.data); +console.log('Status:', response.status); +console.log('Headers:', response.headers['content-type']); +``` + + +## Global Instance + + +For simple use cases, a pre-configured global instance is available: + +```typescript +// Default export - pre-configured instance +// Uses window.location.origin in browsers, or 'https://logosdx.dev' as fallback +import fetch from '@logosdx/fetch'; +import { attempt } from '@logosdx/utils'; + +const [response, err] = await attempt(() => fetch.get('/api/users')); +if (!err) { + console.log('Users:', response.data); +} + +// Or import individual methods and managers +import { get, post, headers, state, config, on } from '@logosdx/fetch'; + +// Configure the global instance +headers.set('Authorization', 'Bearer token123'); +state.set('userId', '456'); +config.set('modifyConfig', (opts, state) => { + opts.headers = { ...opts.headers, 'X-Client-Version': '2.1.0' }; + return opts; +}); + +// Make requests +const [userResponse, err] = await attempt(() => get('/api/users/456')); + +// Listen to events +on('error', (event) => console.error('Request failed:', event.error)); +``` + +**Available exports from global instance:** +- Methods: `get`, `post`, `put`, `patch`, `del`, `head`, `options`, `request` +- Managers: `headers`, `params`, `state`, `config` +- Events: `on`, `off` + + +## Core Concepts + + +FetchEngine returns a `FetchResponse` object containing parsed data, response metadata, and request context. All HTTP methods return an `AbortablePromise` that can be cancelled. + +```typescript +// Destructure just the data +const { data: users } = await api.get('/users'); + +// Or access full response +const response = await api.get('/users'); +console.log(response.data); // Parsed data +console.log(response.status); // HTTP status +console.log(response.headers); // Response headers +console.log(response.config); // Request configuration +``` + + +## FetchEngine Class + + +### Constructor + + +```typescript +new FetchEngine(config: FetchEngine.Config) +``` + +Creates a new HTTP client instance with type-safe headers, parameters, and state management. + +**Type Parameters:** + +- `H` - Interface for typed headers +- `P` - Interface for typed URL parameters +- `S` - Interface for typed state +- `RH` - Interface for typed response headers + +**Example:** + +```typescript +interface AppHeaders { + Authorization?: string; + 'X-API-Key'?: string; +} + +interface AppParams { + version?: string; + format?: 'json' | 'xml'; +} + +interface AppState { + userId?: string; + sessionId?: string; +} + +const api = new FetchEngine({ + baseUrl: 'https://api.example.com', + defaultType: 'json', + totalTimeout: 5000 +}); +``` + + +### Public Properties + + +FetchEngine exposes four manager objects for runtime configuration: + +| Property | Type | Description | +|----------|------|-------------| +| `state` | `FetchState` | Instance state management | +| `config` | `ConfigStore` | Configuration access and mutation | +| `headers` | `HeadersManager` | Header management | +| `params` | `ParamsManager

` | URL parameter management | + +**Example:** + +```typescript +// Set headers +api.headers.set('Authorization', 'Bearer token'); + +// Set state +api.state.set('userId', '123'); + +// Change configuration +api.config.set('baseUrl', 'https://new-api.example.com'); + +// Get current config +const baseUrl = api.config.get('baseUrl'); +``` + + +## HTTP Methods + + +All request methods return an `AbortablePromise>`. + +```typescript +// GET - retrieve data +const { data } = await api.get('/users/123'); + +// POST - create resource +const { data } = await api.post('/users', { name: 'John' }); + +// PUT - replace resource +const { data } = await api.put('/users/123', { name: 'Jane' }); + +// PATCH - partial update +const { data } = await api.patch('/users/123', { email: 'new@example.com' }); + +// DELETE - remove resource +await api.delete('/users/123'); + +// OPTIONS - check capabilities +const { headers } = await api.options('/users'); + +// HEAD - retrieve headers only +const { headers } = await api.head('/users/123'); + +// Generic request method +const { data } = await api.request('PATCH', '/users/123', { + payload: { name: 'Updated' } +}); +``` + + +## Documentation Pages + + +Explore each aspect of FetchEngine in detail: + +- **[Configuration](./configuration)** - Engine config, headers, parameters, and state management +- **[Making Requests](./requests)** - HTTP methods, request options, and AbortablePromise +- **[Resilience](./resilience)** - Retry configuration, timeouts, and error handling +- **[Policies](./policies)** - Request deduplication, response caching, and rate limiting +- **[Events](./events)** - Event system and lifecycle management +- **[Advanced](./advanced)** - TypeScript patterns, serializers, and production examples diff --git a/docs/packages/fetch/policies.md b/docs/packages/fetch/policies.md new file mode 100644 index 0000000..4f1d9a6 --- /dev/null +++ b/docs/packages/fetch/policies.md @@ -0,0 +1,982 @@ +--- +title: Policies +description: Request deduplication, response caching, rate limiting, and route matching in FetchEngine. +--- + +# Policies + + +FetchEngine provides three resilience policies that share a common architecture: request deduplication, response caching, and rate limiting. + +[[toc]] + + +## Request Deduplication + + +When multiple parts of your application make identical requests simultaneously, FetchEngine can deduplicate them by sharing a single in-flight promise. This reduces network traffic, server load, and prevents race conditions. + + +### Quick Start + + +```typescript +// Enable with defaults (GET requests only) +const api = new FetchEngine({ + baseUrl: 'https://api.example.com', + dedupePolicy: true +}); + +// Three concurrent calls → one network request +const [user1, user2, user3] = await Promise.all([ + api.get('/users/123'), + api.get('/users/123'), + api.get('/users/123') +]); +// All three receive the same result from a single HTTP request +``` + + +### Configuration + + +| Option | Type | Default | Description | +|--------|------|---------|-------------| +| `enabled` | `boolean` | `true` | Enable/disable deduplication | +| `methods` | `HttpMethod[]` | `['GET']` | HTTP methods to deduplicate | +| `serializer` | `RequestSerializer` | `defaultRequestSerializer` | Function to generate request keys | +| `shouldDedupe` | `(ctx) => boolean` | - | Dynamic skip check (called per-request) | +| `rules` | `DedupeRule[]` | - | Route-specific configuration | + +**Full Configuration Example:** + +```typescript +const api = new FetchEngine({ + baseUrl: 'https://api.example.com', + dedupePolicy: { + enabled: true, + methods: ['GET', 'POST'], + serializer: (ctx) => `${ctx.method}:${ctx.path}:${JSON.stringify(ctx.payload)}`, + shouldDedupe: (ctx) => !ctx.headers?.['X-Force-Fresh'], + rules: [ + // Disable deduplication for admin endpoints + { startsWith: '/admin', enabled: false }, + + // Custom serializer for search (ignore timestamp param) + { + startsWith: '/search', + serializer: (ctx) => `${ctx.method}:${ctx.path}:${ctx.payload?.query}` + }, + + // Enable POST deduplication for specific endpoint + { is: '/graphql', methods: ['POST'] } + ] + } +}); +``` + + +### Deduplication Events + + +```typescript +// Emitted when a new request starts tracking +api.on('dedupe-start', (event) => { + console.log('New request:', event.key); +}); + +// Emitted when a caller joins an existing in-flight request +api.on('dedupe-join', (event) => { + console.log('Joined:', event.key, 'waiters:', event.waitingCount); +}); +``` + + +### Independent Timeout per Caller + + +Each caller can have independent timeout and abort constraints: + +```typescript +// Caller A starts request with 10s timeout +const promiseA = api.get('/slow-endpoint', { totalTimeout: 10000 }); + +// Caller B joins with 2s timeout +const promiseB = api.get('/slow-endpoint', { totalTimeout: 2000 }); + +// After 2s: B times out and rejects → A continues waiting +// At 5s: Request completes → A gets the result + +// Semantics: +// - Initiator's abort/timeout → cancels fetch → everyone fails +// - Joiner's abort/timeout → only that joiner fails → others unaffected +``` + + +## Response Caching + + +FetchEngine supports response caching with TTL and stale-while-revalidate (SWR) for improved performance and reduced API load. + + +### Quick Start + + +```typescript +// Enable with defaults (GET requests, 60s TTL) +const api = new FetchEngine({ + baseUrl: 'https://api.example.com', + cachePolicy: true +}); + +// First call: fetches from network, caches response +const users1 = await api.get('/users'); + +// Subsequent calls within TTL: instant cache hit +const users2 = await api.get('/users'); +``` + + +### Configuration + + +| Option | Type | Default | Description | +|--------|------|---------|-------------| +| `enabled` | `boolean` | `true` | Enable/disable caching | +| `methods` | `HttpMethod[]` | `['GET']` | HTTP methods to cache | +| `ttl` | `number` | `60000` | Time to live in milliseconds | +| `staleIn` | `number` | - | Time until stale for SWR (ms) | +| `serializer` | `RequestSerializer` | `defaultRequestSerializer` | Function to generate cache keys | +| `skip` | `(ctx) => boolean` | - | Dynamic skip check | +| `rules` | `CacheRule[]` | - | Route-specific configuration | +| `adapter` | `CacheAdapter` | `MapCacheAdapter` | Custom cache storage backend | + +**Full Configuration with SWR:** + +```typescript +const api = new FetchEngine({ + baseUrl: 'https://api.example.com', + cachePolicy: { + enabled: true, + methods: ['GET'], + ttl: 300000, // 5 minutes + staleIn: 60000, // Consider stale after 1 minute + + // Skip caching for certain requests + skip: (ctx) => ctx.headers?.['Cache-Control'] === 'no-cache', + + rules: [ + // Long cache for static content + { startsWith: '/static', ttl: 3600000 }, + + // Short cache for user data + { startsWith: '/user', ttl: 30000, staleIn: 10000 }, + + // No caching for realtime endpoints + { includes: '/realtime', enabled: false }, + + // No caching for admin + { startsWith: '/admin', enabled: false } + ] + } +}); +``` + + +### Stale-While-Revalidate (SWR) + + +When `staleIn` is configured, FetchEngine implements stale-while-revalidate: + +```typescript +const api = new FetchEngine({ + baseUrl: 'https://api.example.com', + cachePolicy: { + ttl: 60000, // Expire after 60 seconds + staleIn: 30000 // Consider stale after 30 seconds + } +}); + +// Timeline: +// 0-30s: Fresh cache hit - returns cached data immediately +// 30-60s: Stale cache hit - returns cached data + background revalidation +// >60s: Cache miss - fetches fresh data +``` + + +### Cache Events + + +```typescript +// Fresh cache hit +api.on('cache-hit', (event) => { + console.log('Cache hit:', event.key, 'expires in:', event.expiresIn); +}); + +// Stale cache hit (SWR) +api.on('cache-stale', (event) => { + console.log('Stale hit:', event.key, 'revalidating...'); +}); + +// Cache miss +api.on('cache-miss', (event) => { + console.log('Cache miss:', event.key); +}); + +// New cache entry stored +api.on('cache-set', (event) => { + console.log('Cached:', event.key, 'TTL:', event.expiresIn); +}); + +// SWR background revalidation started +api.on('cache-revalidate', (event) => { + console.log('Background revalidation:', event.key); +}); + +// SWR background revalidation failed +api.on('cache-revalidate-error', (event) => { + console.error('Revalidation failed:', event.key, event.error); +}); +``` + + +### Cache Invalidation + + +```typescript +// Clear all cached responses +api.clearCache(); + +// Delete specific cache entry (sync, fire-and-forget) +api.clearCacheKey(cacheKey); + +// Delete specific cache entry (async, returns whether it existed) +const existed = await api.deleteCache(cacheKey); + +// Invalidate entries matching a predicate +const count = await api.invalidateCache((key) => key.includes('user')); +console.log(`Invalidated ${count} entries`); + +// Invalidate by path pattern (string prefix) +await api.invalidatePath('/users'); + +// Invalidate by path pattern (RegExp) +await api.invalidatePath(/^\/api\/v\d+\/users/); + +// Invalidate with custom predicate (for custom serializers) +await api.invalidatePath((key) => { + // Full control over key matching - useful when using custom serializers + return key.includes('/users') && key.includes('Bearer'); +}); + +// Get cache statistics +const stats = api.cacheStats(); +console.log('Cache size:', stats.cacheSize); +console.log('In-flight:', stats.inflightCount); +``` + + +### Custom Cache Adapters + + +FetchEngine supports pluggable cache backends via the `CacheAdapter` interface. This enables caching to Redis, IndexedDB, AsyncStorage, localStorage, or any custom storage. + +```typescript +import { FetchEngine } from '@logosdx/fetch'; +import { CacheAdapter, CacheItem } from '@logosdx/utils'; + +// Example: localStorage adapter +class LocalStorageCacheAdapter implements CacheAdapter { + + #prefix: string; + #data = new Map>(); + + constructor(prefix = 'api-cache') { + this.#prefix = prefix; + this.#loadFromStorage(); + } + + get size() { return this.#data.size; } + + async get(key: string) { + return this.#data.get(key); + } + + async set(key: string, item: CacheItem) { + this.#data.set(key, item); + this.#saveToStorage(); + } + + async delete(key: string) { + const existed = this.#data.delete(key); + this.#saveToStorage(); + return existed; + } + + async has(key: string) { + return this.#data.has(key); + } + + async clear() { + this.#data.clear(); + localStorage.removeItem(this.#prefix); + } + + #loadFromStorage() { + const stored = localStorage.getItem(this.#prefix); + if (stored) { + const entries = JSON.parse(stored); + this.#data = new Map(entries); + } + } + + #saveToStorage() { + localStorage.setItem(this.#prefix, JSON.stringify([...this.#data])); + } +} + +// Use the custom adapter +const api = new FetchEngine({ + baseUrl: 'https://api.example.com', + cachePolicy: { + adapter: new LocalStorageCacheAdapter('my-api'), + ttl: 300000 + } +}); +``` + +The `CacheAdapter` interface: + +```typescript +interface CacheAdapter { + + get(key: string): Promise | undefined>; + set(key: string, item: CacheItem, expiresAt?: number): Promise; + delete(key: string): Promise; + has(key: string): Promise; + clear(): Promise; + readonly size: number; +} + +interface CacheItem { + + value: T; + createdAt: number; + expiresAt: number; + staleAt?: number; // For SWR +} +``` + + +## Rate Limiting + + +Control outgoing request rates using a token bucket algorithm. Each unique request key (generated by the serializer) gets its own rate limiter, enabling per-endpoint or per-user throttling. + +This re-uses the same rate limiting logic found in the [function utility](https://logosdx.dev/packages/utils.html#ratelimit) in the utils package. + + +### Quick Start + + +```typescript +// Enable with defaults (100 requests/minute, all HTTP methods) +const api = new FetchEngine({ + baseUrl: 'https://api.example.com', + rateLimitPolicy: true +}); + +// Requests are automatically throttled +// If rate limit is exceeded, requests wait for tokens by default +await api.get('/users'); // Waits if needed +``` + + +### Configuration + + +```typescript +const api = new FetchEngine({ + baseUrl: 'https://api.example.com', + rateLimitPolicy: { + // Global settings + enabled: true, + methods: ['GET', 'POST', 'PUT', 'DELETE', 'PATCH', 'OPTIONS'], // All by default + maxCalls: 100, // Requests per window (default: 100) + windowMs: 60000, // Time window in ms (default: 60000 = 1 minute) + waitForToken: true, // true = wait, false = reject immediately + + // Custom key generation (default: method + pathname) + serializer: (ctx) => `${ctx.method}|${ctx.url.pathname}`, + + // Dynamic bypass + shouldRateLimit: (ctx) => { + // Return false to bypass rate limiting + return !ctx.headers?.['X-Bypass-RateLimit']; + }, + + // Callback when rate limited + onRateLimit: (ctx, waitTimeMs) => { + console.log(`Rate limited for ${waitTimeMs}ms:`, ctx.path); + }, + + // Route-specific rules + rules: [ + // Stricter limits for search + { startsWith: '/api/search', maxCalls: 10, windowMs: 60000 }, + + // Reject immediately for bulk operations + { startsWith: '/api/bulk', waitForToken: false }, + + // No rate limiting for health checks + { startsWith: '/health', enabled: false }, + + // Custom serializer for user-specific limiting + { + startsWith: '/api/user', + serializer: (ctx) => `user:${ctx.headers?.['X-User-ID'] ?? 'anonymous'}` + } + ] + } +}); +``` + + +### Token Bucket Algorithm + + +Rate limiting uses a token bucket that refills continuously: + +- **Capacity**: `maxCalls` tokens +- **Refill Rate**: `maxCalls / windowMs` tokens per millisecond +- Each request consumes 1 token +- If no tokens available: + - `waitForToken: true` → waits until token available + - `waitForToken: false` → throws `RateLimitError` immediately + +```typescript +// Example: 10 requests per minute = 1 token every 6 seconds +{ + maxCalls: 10, + windowMs: 60000 // 60000ms / 10 = 6000ms per token +} +``` + + +### Rate Limit Events + + +```typescript +// Emitted when request must wait for a token +api.on('ratelimit-wait', (event) => { + console.log('Waiting for rate limit:', { + key: event.key, + waitTimeMs: event.waitTimeMs, + currentTokens: event.currentTokens, + capacity: event.capacity, + nextAvailable: event.nextAvailable + }); +}); + +// Emitted when request is rejected (waitForToken: false) +api.on('ratelimit-reject', (event) => { + console.log('Rate limit exceeded:', { + key: event.key, + waitTimeMs: event.waitTimeMs // How long they would have waited + }); +}); + +// Emitted after token is successfully acquired +api.on('ratelimit-acquire', (event) => { + console.log('Token acquired:', { + key: event.key, + currentTokens: event.currentTokens, // Remaining tokens + capacity: event.capacity + }); +}); +``` + + +### Rate Limiting Order + + +Rate limiting is evaluated **after** the cache check but **before** deduplication: + +``` +Request → Cache Check → Rate Limit → Dedupe Check → Network +``` + +This means: + +- Cached responses return immediately **without** consuming rate limit tokens +- Rate limiting only gates actual outbound requests (after cache miss) +- Deduplicated requests only consume one token (the initiator's) +- Rate limiting protects your API from being overwhelmed + + +### Per-User Rate Limiting + + +```typescript +const api = new FetchEngine({ + baseUrl: 'https://api.example.com', + rateLimitPolicy: { + maxCalls: 100, + windowMs: 60000, + // Group requests by user ID + serializer: (ctx) => `user:${ctx.state?.userId ?? 'anonymous'}` + } +}); + +// Each user gets their own 100 req/min bucket +api.state.set('userId', 'user-123'); +await api.get('/data'); // Uses user-123's bucket + +api.state.set('userId', 'user-456'); +await api.get('/data'); // Uses user-456's bucket +``` + + +### Global Rate Limiting + + +```typescript +const api = new FetchEngine({ + baseUrl: 'https://api.example.com', + rateLimitPolicy: { + maxCalls: 1000, + windowMs: 60000, + // All requests share one bucket + serializer: () => 'global' + } +}); +``` + + +### Handling Rate Limit Errors + + +```typescript +import { attempt, isRateLimitError } from '@logosdx/utils'; + +const [response, err] = await attempt(() => api.get('/users')); + +if (err) { + if (isRateLimitError(err)) { + console.log('Rate limited:', err.message); + console.log('Limit:', err.limit); // maxCalls value + // Retry after some time, or show user feedback + } +} +``` + + +## Route Matching + + +Deduplication, caching, and rate limiting all support flexible route matching via `MatchTypes`: + +```typescript +interface MatchTypes { + + is?: string; // Exact path match + startsWith?: string; // Path prefix match + endsWith?: string; // Path suffix match + includes?: string; // Path contains substring + match?: RegExp; // Regular expression match +} +``` + +**Match Type Behavior:** +- `is` requires an exact match and cannot be combined with other types +- Other types can be combined with AND logic (all must match) + +**Examples:** + +```typescript +const rules = [ + // Exact match + { is: '/users' }, + + // Prefix match + { startsWith: '/api/v2' }, + + // Suffix match + { endsWith: '.json' }, + + // Substring match + { includes: 'admin' }, + + // Regex match + { match: /^\/v\d+\/users/ }, + + // Combined (AND logic) + { startsWith: '/api', endsWith: '.json' }, // Must satisfy both + { includes: 'user', match: /\/\d+$/ } // Must satisfy both +]; +``` + +::: warning Regex Performance (ReDoS) +Route matching runs on **every request**. Poorly written regular expressions can cause catastrophic backtracking, severely degrading performance or hanging your application. + +**Dangerous patterns to avoid:** + +```typescript +// BAD: Nested quantifiers cause exponential backtracking +{ match: /(a+)+b/ } +{ match: /^\/api\/v\d+\/.*$/ } // .* with anchors can backtrack +{ match: /(\w+)*@/ } // Nested quantifiers + +// BAD: Overlapping alternatives +{ match: /(a|a)+/ } +{ match: /(\d+|\d+\.)+/ } +``` + +**Safe patterns:** + +```typescript +// GOOD: Simple, non-nested quantifiers +{ match: /^\/v\d+\/users/ } // No trailing .* +{ match: /\/users\/\d+$/ } // Anchored end, simple pattern +{ match: /\.(json|xml)$/ } // Non-overlapping alternatives + +// BETTER: Use string matchers when possible (faster, no ReDoS risk) +{ startsWith: '/api/v2' } // Instead of /^\/api\/v2/ +{ endsWith: '.json' } // Instead of /\.json$/ +{ includes: '/users/' } // Instead of /\/users\// +``` + +**Best practice:** Prefer string-based matchers (`startsWith`, `endsWith`, `includes`, `is`) over regex. They're faster and immune to ReDoS. Only use `match` when you need pattern complexity that strings can't express. +::: + + +## Request Serializers + + +Serializers generate unique keys for identifying requests. These keys are used by deduplication, caching, and rate limiting to determine which requests should share state. + + +### Built-in Serializers + + +FetchEngine provides two built-in serializers, each optimized for different use cases: + + +#### Request Serializer (Default for Cache & Dedupe) + + +Generates keys based on full request identity: method, path, query string, payload, and stable headers. + +```typescript +// Key format: method|path+query|payload|headers +// Example: "GET|/users/123?page=1|undefined|{"accept":"application/json","authorization":"Bearer token"}" +``` + +**Stable Headers Only:** The request serializer only includes semantically meaningful headers that affect response content: + +| Included Headers | Purpose | +|-----------------|---------| +| `authorization` | Different users get different responses | +| `accept` | Different response formats (JSON, XML, etc.) | +| `accept-language` | Localized responses | +| `content-type` | Format of request payload (for POST/PUT) | +| `accept-encoding` | Response compression format | + +**Excluded Headers (Dynamic):** +- `X-Timestamp`, `Date` - Change every request +- `X-HMAC-Signature` - Computed per-request +- `X-Request-Id`, `X-Correlation-Id` - Unique per-request +- `Cache-Control`, `Pragma` - Control directives, not identity + +This prevents cache pollution from dynamic headers that would make every request unique. + + +#### Endpoint Serializer (Default for Rate Limit) + + +Generates keys based on endpoint identity only: method and pathname (excludes query string and payload). + +```typescript +// Key format: method|pathname +// Example: "GET|/users/123" +``` + +This groups all requests to the same endpoint together, ideal for rate limiting where you want to protect an endpoint from overload regardless of specific parameters. + + +### Using Built-in Serializers + + +```typescript +import { endpointSerializer, requestSerializer } from '@logosdx/fetch'; + +// Use endpoint serializer for cache (group by endpoint) +const api = new FetchEngine({ + baseUrl: 'https://api.example.com', + cachePolicy: { + serializer: endpointSerializer, // All /users/123?page=1 and /users/123?page=2 share cache + ttl: 60000 + } +}); + +// Use request serializer for rate limiting (per unique request) +const api2 = new FetchEngine({ + baseUrl: 'https://api.example.com', + rateLimitPolicy: { + serializer: requestSerializer, // Each unique request gets its own bucket + maxCalls: 100, + windowMs: 60000 + } +}); +``` + + +### Custom Serializers + + +Create custom serializers when the built-ins don't match your needs: + +```typescript +// User-scoped rate limiting +const userSerializer = (ctx: RequestKeyOptions) => { + return `user:${ctx.state?.userId ?? 'anonymous'}|${ctx.method}|${ctx.url.pathname}`; +}; + +const api = new FetchEngine({ + baseUrl: 'https://api.example.com', + rateLimitPolicy: { + serializer: userSerializer, // Each user gets their own rate limit bucket + maxCalls: 100, + windowMs: 60000 + } +}); + +// Tenant-scoped caching +const tenantSerializer = (ctx: RequestKeyOptions) => { + const tenant = ctx.headers?.['X-Tenant-ID'] ?? 'default'; + return `${tenant}|${ctx.method}|${ctx.url.pathname}${ctx.url.search}`; +}; + +const multiTenantApi = new FetchEngine({ + baseUrl: 'https://api.example.com', + cachePolicy: { + serializer: tenantSerializer, // Each tenant has separate cache + ttl: 60000 + } +}); + +// Ignore certain params for caching +const ignoreTimestampSerializer = (ctx: RequestKeyOptions) => { + const url = new URL(ctx.url); + url.searchParams.delete('_t'); // Remove timestamp param + url.searchParams.delete('nocache'); + return `${ctx.method}|${url.pathname}${url.search}`; +}; +``` + + +### Serializer Signature + + +```typescript +type RequestSerializer = ( + ctx: RequestKeyOptions +) => string; + +interface RequestKeyOptions { + + method: string; // HTTP method (uppercase) + path: string; // Original path from request + url: URL; // Full URL object (includes pathname, search, etc.) + payload?: unknown; // Request body (if any) + headers?: H; // Request headers + params?: P; // URL parameters + state?: S; // Instance state +} +``` + + +### Per-Rule Serializers + + +Override serializers for specific routes: + +```typescript +const api = new FetchEngine({ + baseUrl: 'https://api.example.com', + cachePolicy: { + enabled: true, + ttl: 60000, + rules: [ + // GraphQL: cache by operation name only + { + is: '/graphql', + serializer: (ctx) => `graphql:${ctx.payload?.operationName ?? 'unknown'}` + }, + + // Search: ignore pagination for cache + { + startsWith: '/search', + serializer: (ctx) => { + const url = new URL(ctx.url); + url.searchParams.delete('page'); + url.searchParams.delete('limit'); + return `search:${url.search}`; + } + }, + + // User profile: cache per user + { + match: /^\/users\/\d+$/, + serializer: (ctx) => `user:${ctx.url.pathname}` + } + ] + } +}); +``` + + +## Policy Architecture + + +FetchEngine's resilience policies (deduplication, caching, rate limiting) share a common architecture that enables consistent behavior and efficient configuration resolution. + + +### Three-Method Pattern + + +All policies implement the same three-method pattern: + +``` +┌─────────────────────────────────────────────────────────────┐ +│ ResiliencePolicy │ +├─────────────────────────────────────────────────────────────┤ +│ init(config) Parse config → Initialize state (O(1)) │ +│ resolve(...) Memoized lookup + dynamic checks (O(1)*) │ +│ compute(...) Rule matching (O(n) first time only) │ +└─────────────────────────────────────────────────────────────┘ + * O(1) amortized due to memoization +``` + +1. **`init`**: Called during FetchEngine construction. Parses configuration, validates rules, and sets up internal state. + +2. **`resolve`**: Called for every request. Returns the effective policy configuration by combining memoized rule matching with dynamic skip callbacks. + +3. **`compute`**: Called once per unique method+path combination. Performs O(n) rule matching and caches the result. + + +### Configuration Resolution + + +When a request is made, each policy resolves its configuration in order: + +``` +Request → Policy.resolve(method, path, context) + │ + ├── Check memoized cache (O(1)) + │ └── Cache miss? → compute() → cache result + │ + ├── Check dynamic skip callback + │ └── Skip? → return null + │ + └── Return merged rule (policy defaults + matched rule) +``` + + +### Rule Matching Priority + + +Rules are evaluated in declaration order. The first matching rule wins: + +```typescript +rules: [ + { is: '/users', ttl: 30000 }, // Checked first (exact match) + { startsWith: '/users', ttl: 60000 }, // Checked second + { match: /^\/users/, ttl: 120000 } // Checked third +] +// Request to '/users' matches first rule (30s TTL) +// Request to '/users/123' matches second rule (60s TTL) +``` + + +### Policy Execution Order + + +Policies are evaluated in a specific order during request processing: + +``` +Request + │ + ├── 1. Cache Check ────────────────┐ + │ └── Hit? Return cached │ + │ │ + ├── 2. Rate Limit (guard) ─────────┤ + │ └── Wait or reject │ + │ │ + ├── 3. Dedupe Check ───────────────┤ + │ └── In-flight? Join it │ + │ │ + ├── 4. Network Request ────────────┤ + │ │ + ├── 5. Store Cache (on success) ───┤ + │ │ + └── Response ──────────────────────┘ +``` + +**Key implications:** +- Cache checks run **first** — cached responses return immediately without consuming rate limit tokens +- Rate limiting only runs on cache misses — it protects the upstream API, not local cache reads +- Deduplication runs **after** rate limiting — joining an in-flight request avoids a new API call +- Only the request initiator consumes a rate limit token; joiners share the result + + +### Memoization Strategy + + +Rule matching results are cached by `method:path` key: + +```typescript +// First request to GET /users/123 +resolve('GET', '/users/123', ctx) + → compute() runs, caches result + → rulesCache.set('GET:/users/123', resolvedRule) + +// Subsequent requests to same endpoint +resolve('GET', '/users/123', ctx) + → rulesCache.get('GET:/users/123') // O(1) hit + → Check skip callback + → Return cached rule +``` + +This means: +- First request to each endpoint: O(n) rule matching +- Subsequent requests: O(1) cache lookup +- Skip callbacks always run (they depend on request-specific context) + + +### Policy State + + +Each policy maintains its own internal state: + +```typescript +interface PolicyInternalState { + + enabled: boolean; // Global enable/disable + methods: Set; // Applicable HTTP methods + serializer: RequestSerializer; // Key generation function + rulesCache: Map; // Memoized rule lookups +} +``` + + +### Extending Policies + + +While the built-in policies cover most use cases, the architecture is designed for extensibility. Each policy class extends `ResiliencePolicy` and implements: + +- `getDefaultSerializer()` - Returns the default key generation function +- `getDefaultMethods()` - Returns which HTTP methods are enabled by default +- `mergeRuleWithDefaults(rule)` - Merges matched rules with policy defaults + +This shared base ensures consistent configuration handling across all resilience features. diff --git a/docs/packages/fetch/requests.md b/docs/packages/fetch/requests.md new file mode 100644 index 0000000..0321b63 --- /dev/null +++ b/docs/packages/fetch/requests.md @@ -0,0 +1,344 @@ +--- +title: Making Requests +description: HTTP methods, request options, and AbortablePromise in FetchEngine. +--- + +# Making Requests + + +FetchEngine provides type-safe HTTP methods that return rich response objects with full request context. + +[[toc]] + + +## HTTP Methods + + +All request methods return an `AbortablePromise>`. + + +### GET + + +```typescript +api.get(path: string, options?: CallConfig): AbortablePromise> +``` + +Retrieve data from the server. + +```typescript +const { data: users } = await api.get('/users'); + +const { data: user } = await api.get('/users/123', { + params: { include: 'profile' } +}); +``` + + +### POST + + +```typescript +api.post(path: string, payload?: Data, options?: CallConfig): AbortablePromise> +``` + +Create a new resource. + +```typescript +const { data: user } = await api.post('/users', { + name: 'John Doe', + email: 'john@example.com' +}); +``` + + +### PUT + + +```typescript +api.put(path: string, payload?: Data, options?: CallConfig): AbortablePromise> +``` + +Replace a resource. + +```typescript +const { data: user } = await api.put('/users/123', { + name: 'Jane Doe', + email: 'jane@example.com' +}); +``` + + +### PATCH + + +```typescript +api.patch(path: string, payload?: Data, options?: CallConfig): AbortablePromise> +``` + +Partially update a resource. + +```typescript +const { data: user } = await api.patch>('/users/123', { + email: 'new@example.com' +}); +``` + + +### DELETE + + +```typescript +api.delete(path: string, payload?: Data, options?: CallConfig): AbortablePromise> +``` + +Remove a resource. + +```typescript +await api.delete('/users/123'); + +// With request body +await api.delete('/users/batch', { ids: ['1', '2', '3'] }); +``` + + +### OPTIONS + + +```typescript +api.options(path: string, options?: CallConfig): AbortablePromise> +``` + +Check server capabilities. + +```typescript +const { headers } = await api.options('/users'); +``` + + +### HEAD + + +```typescript +api.head(path: string, options?: CallConfig): AbortablePromise> +``` + +Retrieve headers only (no body). + +```typescript +const { headers } = await api.head('/users/123'); +``` + + +### Generic Request + + +```typescript +api.request( + method: HttpMethods, + path: string, + options?: CallConfig & { payload?: Data } +): AbortablePromise> +``` + +Make a request with any HTTP method. + +```typescript +const { data } = await api.request('PATCH', '/users/123', { + payload: { name: 'Updated' }, + headers: { 'X-Custom': 'value' } +}); +``` + + +## FetchResponse + + +Every HTTP method returns a `FetchResponse` object: + +```typescript +interface FetchResponse { + + data: T; // Parsed response body + headers: Partial; // Response headers + status: number; // HTTP status code + request: Request; // Original request object + config: FetchConfig; // Configuration used for request +} +``` + +**Example:** + +```typescript +const response = await api.get('/users'); + +console.log(response.data); // User[] +console.log(response.status); // 200 +console.log(response.headers); // { 'content-type': 'application/json', ... } +console.log(response.config); // { baseUrl: '...', headers: {...}, ... } + +// Destructure just the data +const { data: users } = await api.get('/users'); +``` + + +## Per-Request Options + + +Override instance configuration for individual requests using `CallConfig`: + +| Option | Type | Description | +|--------|------|-------------| +| `headers` | `DictAndT` | Request-specific headers | +| `params` | `DictAndT

` | Request-specific URL parameters | +| `totalTimeout` | `number` | Total timeout including retries (ms) | +| `attemptTimeout` | `number` | Per-attempt timeout (ms) | +| `retry` | `RetryConfig \| boolean` | Override retry configuration | +| `abortController` | `AbortController` | Custom abort controller | +| `determineType` | `DetermineTypeFn` | Custom response type detection | +| `onBeforeReq` | `(opts) => void` | Called before request | +| `onAfterReq` | `(response, opts) => void` | Called after response | +| `onError` | `(err) => void` | Called on error | + +**Example:** + +```typescript +const { data } = await api.get('/users/123', { + headers: { 'X-Include': 'profile' }, + params: { version: 'v2' }, + totalTimeout: 60000, + retry: { maxAttempts: 5 } +}); +``` + + +## AbortablePromise + + +All HTTP methods return an `AbortablePromise` that can be cancelled: + +```typescript +interface AbortablePromise extends Promise { + + isFinished: boolean; // Whether request completed + isAborted: boolean; // Whether request was aborted + abort(reason?: string): void; // Cancel the request +} +``` + +**Example:** + +```typescript +const request = api.get('/slow-endpoint'); + +// Abort after 5 seconds +setTimeout(() => { + if (!request.isFinished) { + request.abort('Timeout'); + } +}, 5000); + +const [data, err] = await attempt(() => request); + +if (err && request.isAborted) { + console.log('Request was cancelled'); +} +``` + + +## URL Handling + + +### Relative Paths + + +Relative paths are joined with the base URL: + +```typescript +const api = new FetchEngine({ baseUrl: 'https://api.example.com' }); + +await api.get('/users'); // → https://api.example.com/users +await api.get('/users/123'); // → https://api.example.com/users/123 +``` + + +### Absolute URLs + + +Absolute URLs bypass the base URL: + +```typescript +// Uses external URL directly +const { data } = await api.get('https://other-api.com/data'); +``` + + +### URL Parameters + + +Parameters are appended to the query string: + +```typescript +await api.get('/users', { + params: { page: '1', limit: '10' } +}); +// → https://api.example.com/users?page=1&limit=10 + +// Combined with existing query string +await api.get('/users?active=true', { + params: { page: '1' } +}); +// → https://api.example.com/users?active=true&page=1 +``` + + +## Request Lifecycle Hooks + + +Add per-request callbacks: + +```typescript +const { data } = await api.post('/users', userData, { + onBeforeReq: (opts) => { + console.log('Starting request:', opts.method, opts.url); + }, + + onAfterReq: (response, opts) => { + console.log('Completed:', response.status); + }, + + onError: (err) => { + console.error('Failed:', err.message); + } +}); +``` + + +## Type Safety + + +FetchEngine supports full TypeScript generics: + +```typescript +interface User { + id: string; + name: string; + email: string; +} + +interface CreateUserData { + name: string; + email: string; +} + +// Response type is inferred +const { data } = await api.get('/users/123'); +// data: User + +// Payload type is validated +const { data: newUser } = await api.post('/users', { + name: 'John', + email: 'john@example.com' + // TypeScript error if payload doesn't match CreateUserData +}); +``` diff --git a/docs/packages/fetch/resilience.md b/docs/packages/fetch/resilience.md new file mode 100644 index 0000000..c860c4b --- /dev/null +++ b/docs/packages/fetch/resilience.md @@ -0,0 +1,498 @@ +--- +title: Resilience +description: Retry configuration, timeouts, and error handling in FetchEngine. +--- + +# Resilience + + +FetchEngine provides robust resilience features including intelligent retry logic, flexible timeout configuration, and comprehensive error handling. + +[[toc]] + + +## Retry Configuration + + +The retry option accepts three types of values: +- `true` - Enable retries with default configuration +- `false` - Disable retries completely +- `RetryConfig` object - Custom retry configuration + +**Default values (when `retry: true` or partial config):** + +```typescript +{ + maxAttempts: 3, + baseDelay: 1000, + maxDelay: 10000, + useExponentialBackoff: true, + retryableStatusCodes: [408, 429, 499, 500, 502, 503, 504] +} +``` + + +### RetryConfig Interface + + +```typescript +interface RetryConfig { + + maxAttempts?: number; // default: 3 + baseDelay?: number; // default: 1000 (in milliseconds) + maxDelay?: number; // default: 10000 + useExponentialBackoff?: boolean; // default: true + retryableStatusCodes?: number[]; // default: [408, 429, 499, 500, 502, 503, 504] + + // shouldRetry can return a boolean or a custom delay in milliseconds + // When returning a number, it specifies the exact delay before the next retry + // default: () => true + shouldRetry?: (error: FetchError, attempt: number) => boolean | number; +} +``` + + +### Custom Retry Logic + + +The `shouldRetry` function will be awaited and can return: + +- `true` - Retry with default exponential backoff (uses `baseDelay`) +- `false` - Don't retry +- `number` - Retry with this exact delay in milliseconds (overrides exponential backoff) + +**Examples:** + +```typescript +// Use default retry configuration +const defaultRetryApi = new FetchEngine({ + baseUrl: 'https://api.example.com', + retry: true // Uses defaults: 3 attempts, 1s base delay, exponential backoff +}); + +// Disable retries completely +const noRetryApi = new FetchEngine({ + baseUrl: 'https://api.example.com', + retry: false // No retries at all +}); + +// Custom retry logic with shouldRetry +const api = new FetchEngine({ + baseUrl: 'https://api.example.com', + retry: { + maxAttempts: 5, + baseDelay: 1000, // Used for exponential backoff when shouldRetry returns true + shouldRetry: (error, attempt) => { + // Custom delay for rate limits (overrides exponential backoff) + if (error.status === 429) { + const retryAfter = error.headers?.['retry-after']; + return retryAfter ? parseInt(retryAfter) * 1000 : 5000; + } + + // Don't retry client errors + if (error.status >= 400 && error.status < 500) { + return false; + } + + // Custom delay for server errors (overrides exponential backoff) + if (error.status >= 500) { + return Math.min(1000 * Math.pow(2, attempt - 1), 30000); + } + + return true; // Use default exponential backoff with baseDelay + } + } +}); +``` + + +## Timeout Configuration + + +FetchEngine provides two complementary timeout mechanisms for fine-grained control over request timing: + +- **`totalTimeout`**: Caps the entire request lifecycle, including all retry attempts +- **`attemptTimeout`**: Applies per-attempt, with each retry getting a fresh timeout + + +### Type Definitions + + +```typescript +interface TimeoutOptions { + + /** + * Total timeout for the entire request lifecycle in milliseconds. + * Applies to the complete operation including all retry attempts. + * When this fires, the request stops immediately with no more retries. + */ + totalTimeout?: number; + + /** + * Per-attempt timeout in milliseconds. + * Each retry attempt gets a fresh timeout and AbortController. + * When an attempt times out, it can still be retried (if retry is configured). + */ + attemptTimeout?: number; + + /** + * @deprecated Use `totalTimeout` instead. This is now an alias for `totalTimeout`. + */ + timeout?: number; +} +``` + + +### Basic Usage + + +```typescript +// Instance-level timeouts +const api = new FetchEngine({ + baseUrl: 'https://api.example.com', + totalTimeout: 30000, // 30s max for entire operation + attemptTimeout: 5000 // 5s per attempt +}); + +// Per-request overrides +const [response, err] = await attempt(() => + api.get('/slow-endpoint', { + totalTimeout: 60000, // Override: 60s for this request + attemptTimeout: 10000 // Override: 10s per attempt + }) +); +``` + + +### How Timeouts Work Together + + +When both timeouts are configured, they work in a parent-child relationship: + +``` +┌─────────────────────────────────────────────────────────────────────┐ +│ totalTimeout (30s) │ +│ ┌─────────────────┐ ┌─────────────────┐ ┌─────────────────┐ │ +│ │ Attempt 1 (5s) │ │ Attempt 2 (5s) │ │ Attempt 3 (5s) │ │ +│ │ attemptTimeout │ │ attemptTimeout │ │ attemptTimeout │ │ +│ └─────────────────┘ └─────────────────┘ └─────────────────┘ │ +│ ↓ ↓ ↓ │ +│ [timeout] [timeout] [success] │ +│ retry → retry → return │ +└─────────────────────────────────────────────────────────────────────┘ +``` + +**Key behaviors:** + +1. **totalTimeout fires**: Everything stops immediately, no more retries +2. **attemptTimeout fires**: That attempt fails, but can retry if configured +3. **Both configured**: Each attempt has its own fresh AbortController + + +### Controller Architecture + + +``` +┌──────────────────────────────────────────────────────────────────┐ +│ Parent Controller │ +│ (totalTimeout attached) │ +│ │ +│ ┌───────────────┐ ┌───────────────┐ ┌───────────────┐ │ +│ │ Child 1 │ │ Child 2 │ │ Child 3 │ │ +│ │ (attempt 1) │ │ (attempt 2) │ │ (attempt 3) │ │ +│ │ attemptTimeout│ │ attemptTimeout│ │ attemptTimeout│ │ +│ └───────────────┘ └───────────────┘ └───────────────┘ │ +│ │ +│ - Parent abort → All children abort (totalTimeout fired) │ +│ - Child abort → Only that attempt fails (attemptTimeout fired) │ +└──────────────────────────────────────────────────────────────────┘ +``` + + +### With Retry Configuration + + +```typescript +const api = new FetchEngine({ + baseUrl: 'https://api.example.com', + totalTimeout: 30000, // 30s total + attemptTimeout: 5000, // 5s per attempt + retry: { + maxAttempts: 5, + baseDelay: 1000, + useExponentialBackoff: true + } +}); + +// Scenario: Each attempt can take up to 5s, retries if it times out +// Total operation cannot exceed 30s regardless of retry attempts +const [response, err] = await attempt(() => api.get('/flaky-endpoint')); + +if (err && err.timedOut) { + // The request timed out (either totalTimeout or attemptTimeout) + console.log('Request timed out after all retries'); +} +``` + + +### Default Retry Behavior with Timeouts + + +The default `shouldRetry` function returns `true` for status code `499`, which is set when a request is aborted (including by `attemptTimeout`). This means: + +- **attemptTimeout fires** → Status 499 → Can retry (if within maxAttempts) +- **totalTimeout fires** → Parent controller aborts → No retry possible + +```typescript +// Default retry configuration +{ + maxAttempts: 3, + baseDelay: 1000, + retryableStatusCodes: [408, 429, 499, 500, 502, 503, 504], + shouldRetry(error) { + if (error.status === 499) return true; // Includes attemptTimeout + return this.retryableStatusCodes?.includes(error.status) ?? false; + } +} +``` + + +### Migration from `timeout` + + +The `timeout` option is deprecated but continues to work as an alias for `totalTimeout`: + +```typescript +// Old code (still works) +const api = new FetchEngine({ + baseUrl: 'https://api.example.com', + timeout: 5000 +}); + +// New code (recommended) +const api = new FetchEngine({ + baseUrl: 'https://api.example.com', + totalTimeout: 5000 +}); + +// Both are equivalent - totalTimeout applies to entire lifecycle +``` + +::: warning Migration Note +If you were using `timeout` expecting it to be per-attempt, you should now use `attemptTimeout` instead. The behavior of `timeout` (now `totalTimeout`) has always been for the entire operation. +::: + + +### Real-World Examples + + +**API Gateway with Strict Limits:** + +```typescript +// Gateway has 30s hard limit, but individual services might be slow +const api = new FetchEngine({ + baseUrl: 'https://gateway.example.com', + totalTimeout: 28000, // Under gateway limit + attemptTimeout: 8000, // Allow slow services + retry: { + maxAttempts: 3, + baseDelay: 500 + } +}); +``` + +**User-Facing with Fallback:** + +```typescript +const api = new FetchEngine({ + baseUrl: 'https://api.example.com', + totalTimeout: 10000, // Users won't wait more than 10s + attemptTimeout: 3000, // Quick feedback per attempt + retry: { + maxAttempts: 3, + shouldRetry: (error) => { + // Only retry on timeout, not on 4xx errors + return error.timedOut || error.status >= 500; + } + } +}); +``` + +**Background Sync with Long Tolerance:** + +```typescript +const syncApi = new FetchEngine({ + baseUrl: 'https://sync.example.com', + totalTimeout: 300000, // 5 minutes for batch operations + attemptTimeout: 60000, // 1 minute per attempt + retry: { + maxAttempts: 5, + baseDelay: 5000, + useExponentialBackoff: true + } +}); +``` + + +## Error Handling + + +### FetchError + + +```typescript +interface FetchError> extends Error { + + data: T | null; // Response body (if parseable) + status: number; // HTTP status code + method: HttpMethods; // HTTP method used + path: string; // Request path + aborted?: boolean; // Whether request was cancelled (any cause) + timedOut?: boolean; // Whether abort was caused by timeout + attempt?: number; // Retry attempt number + step?: 'fetch' | 'parse' | 'response'; // Where error occurred + url?: string; // Full request URL + headers?: H; // Response headers + + // Helper methods for distinguishing 499 error types + isCancelled(): boolean; // Manual abort (user/app initiated) + isTimeout(): boolean; // Timeout fired (attemptTimeout or totalTimeout) + isConnectionLost(): boolean; // Server/network dropped connection +} +``` + +**Important:** + +- Server-aborted responses receive status code `499` (following Nginx convention) +- Parse errors without status codes receive status code `999` + + +### The `timedOut` Flag + + +The `FetchError` object includes a `timedOut` flag that distinguishes timeout aborts from other abort causes: + +```typescript +interface FetchError> extends Error { + + // ... other properties + + /** + * Whether the request was aborted (any cause: manual, timeout, or server). + */ + aborted?: boolean; + + /** + * Whether the abort was caused by a timeout (attemptTimeout or totalTimeout). + * - `true`: The abort was caused by a timeout firing + * - `undefined`: The abort was manual or server-initiated + * + * When `timedOut` is true, `aborted` will also be true. + */ + timedOut?: boolean; +} +``` + +**Usage:** + +```typescript +const [response, err] = await attempt(() => + api.get('/endpoint', { totalTimeout: 5000 }) +); + +if (err) { + if (err.aborted && err.timedOut) { + // Timed out - show user-friendly message + console.log('Request took too long'); + } + else if (err.aborted) { + // Manual abort or server disconnect + console.log('Request was cancelled'); + } + else { + // Other error (network, HTTP error, etc.) + console.log('Request failed:', err.message); + } +} +``` + + +### FetchError Helper Methods + + +All three scenarios below result in status code 499, but have different causes. Use these helper methods to distinguish them: + +| Method | Returns `true` when | Use case | +|--------|---------------------|----------| +| `isCancelled()` | Request was manually aborted (not by timeout) | User navigated away, component unmounted | +| `isTimeout()` | Timeout fired (`attemptTimeout` or `totalTimeout`) | Show "request timed out" message | +| `isConnectionLost()` | Server dropped connection or network failed | Show "connection lost" message | + +::: info +All helper methods return `false` for non-499 errors. They only apply to connection-level failures. +::: + +**Example:** + +```typescript +const [response, err] = await attempt(() => api.get('/data')); + +if (err) { + if (err.isCancelled()) { + // User/app intentionally cancelled - don't show error + return; + } + + if (err.isTimeout()) { + toast.warn('Request timed out. Please try again.'); + } + else if (err.isConnectionLost()) { + toast.error('Connection lost. Check your internet.'); + } + else { + // HTTP error (4xx, 5xx) - check err.status directly + toast.error(`Request failed: ${err.message}`); + } +} +``` + +**How it works:** + +The helpers combine multiple error properties to determine the cause: + +```typescript +// isCancelled(): Manual abort (user navigated away, app cancelled) +status === 499 && aborted === true && timedOut !== true + +// isTimeout(): Our timeout fired +status === 499 && timedOut === true + +// isConnectionLost(): Server/network dropped us (we didn't abort) +status === 499 && step === 'fetch' && aborted === false +``` + + +### Type Guard + + +```typescript +isFetchError(error: unknown): error is FetchError +``` + +**Example:** + +```typescript +const [response, err] = await attempt(() => api.get('/users')); + +if (err) { + if (isFetchError(err)) { + // Types are available + console.log('HTTP Error:', err.status, err.message); + console.log('Failed at step:', err.step); + console.log('Response data:', err.data); + } + else { + console.log('Network or other error:', err.message); + } +} +``` diff --git a/docs/packages/observer.md b/docs/packages/observer.md index d5aa41c..0033656 100644 --- a/docs/packages/observer.md +++ b/docs/packages/observer.md @@ -331,7 +331,7 @@ Enable/disable debug tracing. ## EventGenerator -Async iterator for event streams with manual control. +Async iterator for event streams with manual control. Events are internally buffered, so none are lost even if they arrive faster than the consumer can process them. ### Properties @@ -399,6 +399,34 @@ for await (const data of generator) { } ``` +### Event Buffering + +Events that arrive while the consumer is busy (e.g., doing async work between iterations) are buffered internally and delivered in FIFO order on the next `next()` call. This guarantees no events are dropped during async iteration. + +```typescript +const generator = observer.on('job:complete') + +for await (const result of generator) { + // Even if multiple 'job:complete' events fire during this await, + // they are buffered and delivered on subsequent iterations. + await saveToDatabase(result) +} +``` + +Events emitted before any call to `next()` are also buffered: + +```typescript +const generator = observer.on('status') + +observer.emit('status', 'a') +observer.emit('status', 'b') +observer.emit('status', 'c') + +await generator.next() // 'a' +await generator.next() // 'b' +await generator.next() // 'c' +``` + ## EventQueue Process events with concurrency control, rate limiting, and comprehensive state management. diff --git a/docs/what-is-logosdx.md b/docs/what-is-logosdx.md index ae658d4..2d13326 100644 --- a/docs/what-is-logosdx.md +++ b/docs/what-is-logosdx.md @@ -23,7 +23,7 @@ The rational principles that create order from development complexity. 1. **Explicit error handling control flow**: Utilities like `attempt`/`attemptSync` return `[value, error]` tuples, which eliminates the need for try/catch— no more invisible error paths due to nested logic. This makes tests, retries, and fallbacks straightforward and more legible. -2. **Resilience is a primary concern**: `retry`, `withTimeout`, `circuitBreaker`, and `rateLimit` are available as primitives. The `FetchEngine` adds timeouts, retries, backoff, and gives you abstractions to handle common patterns (e.g., honor `Retry-After`) on top of the standard Fetch API. +2. **Resilience is a primary concern**: `retry`, `withTimeout`, `circuitBreaker`, and `rateLimit` are available as primitives. `FetchEngine` adds timeouts, retries, backoff, and gives you abstractions to handle common patterns (e.g., honor `Retry-After`) on top of the standard Fetch API. 3. **It offers observability, but more advanced**: `ObserverEngine` provides typed topics, regex subscriptions, async iteration, and priority queues so you can coordinate workloads. TypeScript and debugging aren't an afterthought. @@ -73,24 +73,23 @@ import { attempt } from '@logosdx/utils' const api = new FetchEngine({ baseUrl: 'https://api.example.com', retry: { maxAttempts: 3, baseDelay: 250 }, - timeoutMs: 5_000, + totalTimeout: 5_000, defaultType: 'json', headers: { 'Content-Type': 'application/json', }, - state: { - userId: '123', - }, }); -const [res, err] = await attempt(() => api.get('/orders')) +api.state.set({ userId: '123' }); + +const [response, err] = await attempt(() => api.get('/orders')) if (isFetchError(err)) { // Handle error appropriately } // Or use the global instance -const [res, err] = await attempt(() => Fetch.get('https://logosdx.dev/cheat-sheet.html')); +const [response, err] = await attempt(() => Fetch.get('https://logosdx.dev/cheat-sheet.html')); ``` The `ObserverEngine` is a mature event system with typed topics, regex subscriptions, async iteration, and priority queues. It's a great way to coordinate work across your application. Use it to build event-driven systems, background work, and more. @@ -160,7 +159,7 @@ const api = new FetchEngine< AppState, >({ baseUrl: window.location.origin, - modifyOptions: (opts, state) => { + modifyConfig: async (opts, state) => { if (opts.url.includes('/api/')) { opts.headers['X-Client-Version'] = '1.0.0'; @@ -179,7 +178,7 @@ const api = new FetchEngine< return opts; }, - modifyMethodOptions: { + modifyMethodConfig: { POST: (opts) => { const tag = $('meta[name="csrf-token"]').pop(); @@ -190,7 +189,7 @@ const api = new FetchEngine< } }); -api.on('fetch-state-set', (state) => storage.set('apiState', state)); +api.on('state-set', (state) => storage.set('apiState', state)); const app = async () => { @@ -198,7 +197,7 @@ const app = async () => { if (apiErr) throw apiErr; - api.setState(apiState); // Restore the state after a refresh + api.state.set(apiState); // Restore the state after a refresh bus.emit('app:ready'); } diff --git a/llm-helpers/fetch.md b/llm-helpers/fetch.md index 0387ed4..5e0e024 100644 --- a/llm-helpers/fetch.md +++ b/llm-helpers/fetch.md @@ -34,14 +34,14 @@ console.log('User:', user); console.log('Rate limit:', response.headers['x-rate-limit-remaining']); // Global instance (simplified usage) -import fetch, { get, post, setState, addHeader } from '@logosdx/fetch'; +import fetch, { get, post, headers, params, state, config, on, off } from '@logosdx/fetch'; // Global instance auto-uses current domain as base URL const [{ data: users }, err] = await attempt(() => fetch.get('/api/users')); // Or use destructured methods -addHeader('Authorization', 'Bearer token'); -setState('userId', '123'); +headers.set('Authorization', 'Bearer token'); +state.set('userId', '123'); const [{ data: user }, err] = await attempt(() => get('/api/users/123')); // Smart URL handling - absolute URLs bypass base URL @@ -94,14 +94,14 @@ const request = api.get('/users'); setTimeout(() => request.abort('User cancelled'), 2000); // Dynamic request modification -api.changeModifyOptions(fn?: (opts: RequestOpts, state: S) => RequestOpts) -api.changeModifyMethodOptions(method: HttpMethods, fn?: (opts: RequestOpts, state: S) => RequestOpts) +api.config.set('modifyConfig', fn?: (opts: RequestOpts, state: S) => RequestOpts) +api.config.set('modifyMethodConfig', { [method]: fn?: (opts: RequestOpts, state: S) => RequestOpts }) ``` ## Configuration ```typescript -interface FetchEngine.Options { +interface FetchEngine.Config { baseUrl: string; defaultType?: 'json' | 'text' | 'blob' | 'arrayBuffer' | 'formData'; @@ -135,12 +135,12 @@ interface FetchEngine.Options { } | false; // Request modification (initial setup) - modifyOptions?: (opts: RequestOpts, state: S) => RequestOpts; - modifyMethodOptions?: { + modifyConfig?: (opts: RequestOpts, state: S) => RequestOpts; + modifyMethodConfig?: { GET?: (opts: RequestOpts, state: S) => RequestOpts; // ... other methods }; - // Note: Use changeModifyOptions() and changeModifyMethodOptions() for runtime changes + // Note: Use config.set('modifyConfig', fn) and config.set('modifyMethodConfig', {...}) for runtime changes // Validation validate?: { @@ -191,11 +191,11 @@ if (isFetchError(error)) { } // Lifecycle events -api.on('fetch-error', (event: FetchEvent) => { +api.on('error', (event: FetchEvent) => { console.error('Request failed:', event.error); }); -api.on('fetch-retry', (event: FetchEvent) => { +api.on('retry', (event: FetchEvent) => { console.log(`Retrying attempt ${event.nextAttempt} after ${event.delay}ms`); }); ``` @@ -204,38 +204,45 @@ api.on('fetch-retry', (event: FetchEvent) => { ```typescript // Headers -api.addHeader('Authorization', 'Bearer new-token'); -api.addHeader({ 'X-API-Version': 'v2', 'X-Client': 'web' }); -api.addHeader('Content-Type', 'application/json', 'POST'); // method-specific -api.rmHeader('Authorization'); -api.rmHeader(['X-API-Version', 'X-Client']); -api.hasHeader('Authorization'); // boolean +api.headers.set('Authorization', 'Bearer new-token'); +api.headers.set({ 'X-API-Version': 'v2', 'X-Client': 'web' }); +api.headers.set('Content-Type', 'application/json', 'POST'); // method-specific +api.headers.remove('Authorization'); +api.headers.remove(['X-API-Version', 'X-Client']); +api.headers.has('Authorization'); // boolean // Parameters -api.addParam('version', 'v1'); -api.addParam({ api_key: 'abc123', format: 'json' }); -api.addParam('page', '1', 'GET'); // method-specific -api.rmParams('version'); -api.hasParam('api_key'); // boolean +api.params.set('version', 'v1'); +api.params.set({ api_key: 'abc123', format: 'json' }); +api.params.set('page', '1', 'GET'); // method-specific +api.params.remove('version'); +api.params.has('api_key'); // boolean // Access current configuration -const { default: globalHeaders, get: getHeaders } = api.headers; -const { default: globalParams, get: getParams } = api.params; +api.headers.defaults; // Default headers (global) +api.headers.all; // All headers including method overrides +api.headers.resolve('GET'); // Resolved headers for a specific method -// With global instance and destructured methods -import { addHeader, addParam, rmHeader, hasHeader, changeModifyOptions, changeModifyMethodOptions } from '@logosdx/fetch'; -addHeader('X-API-Key', 'key123'); -addParam('version', 'v1'); +api.params.defaults; // Default params (global) +api.params.all; // All params including method overrides +api.params.resolve('GET'); // Resolved params for a specific method + +// With global instance and destructured managers +import { headers, params, config } from '@logosdx/fetch'; +headers.set('X-API-Key', 'key123'); +params.set('version', 'v1'); // Dynamic request modification -changeModifyOptions((opts, state) => { +config.set('modifyConfig', (opts, state) => { opts.headers['X-Request-ID'] = crypto.randomUUID(); return opts; }); -changeModifyMethodOptions('POST', (opts, state) => { - opts.headers['Content-Type'] = 'application/json'; - return opts; +config.set('modifyMethodConfig', { + POST: (opts, state) => { + opts.headers['Content-Type'] = 'application/json'; + return opts; + } }); ``` @@ -243,20 +250,20 @@ changeModifyMethodOptions('POST', (opts, state) => { ```typescript // Internal state for auth tokens, user context, etc. -api.setState('authToken', 'bearer-token-123'); -api.setState({ +api.state.set('authToken', 'bearer-token-123'); +api.state.set({ userId: '456', sessionId: 'abc', preferences: { theme: 'dark' } }); -const state = api.getState(); // deep clone -api.resetState(); // clear all state +const currentState = api.state.get(); // deep clone +api.state.reset(); // clear all state // Use state in request modification const api = new FetchEngine({ baseUrl: 'https://api.example.com', - modifyOptions: (opts, state) => { + modifyConfig: (opts, state) => { if (state.authToken) { opts.headers.Authorization = `Bearer ${state.authToken}`; } @@ -282,47 +289,48 @@ if (response.status === 200) { ```typescript enum FetchEventNames { // Request lifecycle - 'fetch-before' = 'fetch-before', - 'fetch-after' = 'fetch-after', - 'fetch-abort' = 'fetch-abort', - 'fetch-error' = 'fetch-error', - 'fetch-response' = 'fetch-response', - 'fetch-retry' = 'fetch-retry', + 'before-request' = 'before-request', + 'after-request' = 'after-request', + 'abort' = 'abort', + 'error' = 'error', + 'response' = 'response', + 'retry' = 'retry', // Configuration changes - 'fetch-header-add' = 'fetch-header-add', - 'fetch-header-remove' = 'fetch-header-remove', - 'fetch-param-add' = 'fetch-param-add', - 'fetch-param-remove' = 'fetch-param-remove', - 'fetch-state-set' = 'fetch-state-set', - 'fetch-state-reset' = 'fetch-state-reset', - 'fetch-url-change' = 'fetch-url-change', - 'fetch-modify-options-change' = 'fetch-modify-options-change', - 'fetch-modify-method-options-change' = 'fetch-modify-method-options-change', + 'header-add' = 'header-add', + 'header-remove' = 'header-remove', + 'param-add' = 'param-add', + 'param-remove' = 'param-remove', + 'state-set' = 'state-set', + 'state-reset' = 'state-reset', + 'url-change' = 'url-change', + 'config-change' = 'config-change', + 'modify-config-change' = 'modify-config-change', + 'modify-method-config-change' = 'modify-method-config-change', // Deduplication events - 'fetch-dedupe-start' = 'fetch-dedupe-start', // New request tracked - 'fetch-dedupe-join' = 'fetch-dedupe-join', // Caller joined existing + 'dedupe-start' = 'dedupe-start', // New request tracked + 'dedupe-join' = 'dedupe-join', // Caller joined existing // Caching events - 'fetch-cache-hit' = 'fetch-cache-hit', // Fresh cache hit - 'fetch-cache-stale' = 'fetch-cache-stale', // Stale cache hit (SWR) - 'fetch-cache-miss' = 'fetch-cache-miss', // No cache entry - 'fetch-cache-set' = 'fetch-cache-set', // New cache entry stored - 'fetch-cache-revalidate' = 'fetch-cache-revalidate', // SWR background refresh started - 'fetch-cache-revalidate-error' = 'fetch-cache-revalidate-error', // SWR background refresh failed + 'cache-hit' = 'cache-hit', // Fresh cache hit + 'cache-stale' = 'cache-stale', // Stale cache hit (SWR) + 'cache-miss' = 'cache-miss', // No cache entry + 'cache-set' = 'cache-set', // New cache entry stored + 'cache-revalidate' = 'cache-revalidate', // SWR background refresh started + 'cache-revalidate-error' = 'cache-revalidate-error', // SWR background refresh failed // Rate limiting events - 'fetch-ratelimit-wait' = 'fetch-ratelimit-wait', // Waiting for token - 'fetch-ratelimit-reject' = 'fetch-ratelimit-reject', // Rejected (waitForToken: false) - 'fetch-ratelimit-acquire' = 'fetch-ratelimit-acquire' // Token acquired + 'ratelimit-wait' = 'ratelimit-wait', // Waiting for token + 'ratelimit-reject' = 'ratelimit-reject', // Rejected (waitForToken: false) + 'ratelimit-acquire' = 'ratelimit-acquire' // Token acquired } // Event listeners api.on('*', (event) => console.log('Any event:', event.type)); -api.on('fetch-before', (event) => console.log('Request starting:', event.url)); -api.on('fetch-error', (event) => console.error('Request failed:', event.error)); -api.off('fetch-error', errorHandler); // remove listener +api.on('before-request', (event) => console.log('Request starting:', event.url)); +api.on('error', (event) => console.error('Request failed:', event.error)); +api.off('error', errorHandler); // remove listener ``` ## Request Deduplication @@ -359,8 +367,8 @@ const api = new FetchEngine({ }); // Events -api.on('fetch-dedupe-start', (e) => console.log('New request:', e.key)); -api.on('fetch-dedupe-join', (e) => console.log('Joined request:', e.key, 'waiters:', e.waitingCount)); +api.on('dedupe-start', (e) => console.log('New request:', e.key)); +api.on('dedupe-join', (e) => console.log('Joined request:', e.key, 'waiters:', e.waitingCount)); ``` ### Deduplication Types @@ -418,11 +426,11 @@ const api = new FetchEngine({ }); // Cache events -api.on('fetch-cache-hit', (e) => console.log('Cache hit:', e.key, 'stale:', e.isStale)); -api.on('fetch-cache-miss', (e) => console.log('Cache miss:', e.key)); -api.on('fetch-cache-set', (e) => console.log('Cached:', e.key, 'expires in:', e.expiresIn)); -api.on('fetch-cache-stale', (e) => console.log('Stale:', e.key)); -api.on('fetch-cache-revalidate', (e) => console.log('Revalidating:', e.key)); +api.on('cache-hit', (e) => console.log('Cache hit:', e.key, 'stale:', e.isStale)); +api.on('cache-miss', (e) => console.log('Cache miss:', e.key)); +api.on('cache-set', (e) => console.log('Cached:', e.key, 'expires in:', e.expiresIn)); +api.on('cache-stale', (e) => console.log('Stale:', e.key)); +api.on('cache-revalidate', (e) => console.log('Revalidating:', e.key)); // Cache invalidation API await api.clearCache(); // Clear all @@ -493,9 +501,9 @@ const api = new FetchEngine({ // So cached responses don't consume rate limit tokens // Events -api.on('fetch-ratelimit-wait', (e) => console.log('Waiting for token:', e.key, e.waitTimeMs)); -api.on('fetch-ratelimit-reject', (e) => console.log('Rate limited:', e.key)); -api.on('fetch-ratelimit-acquire', (e) => console.log('Token acquired:', e.key, 'remaining:', e.currentTokens)); +api.on('ratelimit-wait', (e) => console.log('Waiting for token:', e.key, e.waitTimeMs)); +api.on('ratelimit-reject', (e) => console.log('Rate limited:', e.key)); +api.on('ratelimit-acquire', (e) => console.log('Token acquired:', e.key, 'remaining:', e.currentTokens)); ``` ### Rate Limiting Types @@ -721,10 +729,10 @@ const api = new FetchEngine({ }); // Environment switching -api.changeBaseUrl('https://api.staging.com'); +api.config.set('baseUrl', 'https://api.staging.com'); // Dynamic request modification -api.changeModifyOptions((opts, state) => { +api.config.set('modifyConfig', (opts, state) => { if (state.authToken) { opts.headers.Authorization = `Bearer ${state.authToken}`; } @@ -732,14 +740,16 @@ api.changeModifyOptions((opts, state) => { return opts; }); -api.changeModifyMethodOptions('POST', (opts, state) => { - opts.headers['Content-Type'] = 'application/json'; - return opts; +api.config.set('modifyMethodConfig', { + POST: (opts, state) => { + opts.headers['Content-Type'] = 'application/json'; + return opts; + } }); // Clear modifiers -api.changeModifyOptions(undefined); -api.changeModifyMethodOptions('POST', undefined); +api.config.set('modifyConfig', undefined); +api.config.set('modifyMethodConfig', { POST: undefined }); // Per-request options const [response, err] = await attempt(() => @@ -816,8 +826,8 @@ const api = new FetchEngine< }); // Global instance automatically gets the extended types -import { setState, getState, get, put, post, patch, del, options } from '@logosdx/fetch'; -setState('authToken', 'token123'); // ✅ Typed +import { state, get, put, post, patch, del, options } from '@logosdx/fetch'; +state.set('authToken', 'token123'); // Typed // Response is properly typed with FetchResponse including typed config const response = await get('/api/user'); @@ -856,8 +866,8 @@ await api.get('/users'); // throws: "Cannot make requests on destroyed FetchEngi // Listener cleanup - Option 1: Use on() with cleanup function (recommended) // Listeners added via on() are automatically removed when destroy() is called -const cleanup1 = api.on('fetch-error', (e) => console.error(e)); -const cleanup2 = api.on('fetch-response', (e) => console.log(e)); +const cleanup1 = api.on('error', (e) => console.error(e)); +const cleanup2 = api.on('response', (e) => console.log(e)); // Manual cleanup (if you stored the cleanup functions) cleanup1(); @@ -870,20 +880,20 @@ api.destroy(); const errorHandler = (e) => console.error(e); const responseHandler = (e) => console.log(e); -api.on('fetch-error', errorHandler); -api.on('fetch-response', responseHandler); +api.on('error', errorHandler); +api.on('response', responseHandler); // Remove specific listeners manually -api.off('fetch-error', errorHandler); -api.off('fetch-response', responseHandler); +api.off('error', errorHandler); +api.off('response', responseHandler); api.destroy(); // Listener cleanup - Option 3: Use addEventListener with your own AbortController // For advanced use cases where you need fine-grained control const controller = new AbortController(); -api.addEventListener('fetch-error', errorHandler, { signal: controller.signal }); -api.addEventListener('fetch-response', responseHandler, { signal: controller.signal }); +api.addEventListener('error', errorHandler, { signal: controller.signal }); +api.addEventListener('response', responseHandler, { signal: controller.signal }); // Remove all listeners at once controller.abort(); @@ -896,8 +906,8 @@ class MyComponent { // on() returns cleanup function and automatically cleaned on destroy() this.cleanups = [ - this.api.on('fetch-error', this.handleError), - this.api.on('fetch-response', this.handleResponse) + this.api.on('error', this.handleError), + this.api.on('response', this.handleResponse) ]; } @@ -928,7 +938,7 @@ class MyComponent { const api = new FetchEngine({ baseUrl: process.env.API_BASE_URL, defaultType: 'json', - timeout: 5000, + totalTimeout: 5000, // Deduplication - prevent duplicate concurrent requests dedupePolicy: { @@ -969,7 +979,7 @@ const api = new FetchEngine({ }); // Global error handling -api.on('fetch-error', (event) => { +api.on('error', (event) => { // Log to monitoring service console.error('API Error:', { url: event.url, @@ -979,15 +989,15 @@ api.on('fetch-error', (event) => { }); }); -api.on('fetch-retry', (event) => { - console.log(`Retry ${event.nextAttempt}/${api.retry.maxAttempts} after ${event.delay}ms`); +api.on('retry', (event) => { + console.log(`Retry ${event.nextAttempt}/${api.config.get('retry.maxAttempts')} after ${event.delay}ms`); }); // Dynamic state management -api.setState('authToken', await getAuthToken()); +api.state.set('authToken', await getAuthToken()); -// Use changeModifyOptions for dynamic auth token injection -api.changeModifyOptions((opts, state) => { +// Use config.set for dynamic auth token injection +api.config.set('modifyConfig', (opts, state) => { if (state.authToken) { opts.headers.Authorization = `Bearer ${state.authToken}`; } @@ -996,7 +1006,7 @@ api.changeModifyOptions((opts, state) => { // Environment switching if (process.env.NODE_ENV === 'development') { - api.changeBaseUrl('https://api.dev.com'); + api.config.set('baseUrl', 'https://api.dev.com'); } // AbortablePromise with timeout diff --git a/llm-helpers/observer.md b/llm-helpers/observer.md index 1a12e1a..624b6e1 100644 --- a/llm-helpers/observer.md +++ b/llm-helpers/observer.md @@ -76,13 +76,17 @@ observer.emit(/^user:/, { type: 'broadcast' }) ## EventGenerator - Async Iteration +Events are buffered internally in FIFO order. No events are dropped even if +they arrive faster than the consumer iterates. + ```ts // Generator from on() without callback const userEvents = observer.on('user:login') -// Async iteration +// Async iteration (events buffered while doing async work between iterations) for await (const loginData of userEvents) { console.log('User logged in:', loginData.userId) + await saveToDatabase(loginData) // buffered events won't be lost here if (shouldStop) { userEvents.cleanup() @@ -94,6 +98,12 @@ for await (const loginData of userEvents) { const loginData = await userEvents.next() console.log(loginData) // { userId: string; timestamp: number } +// Events emitted before next() is called are also buffered +observer.emit('user:login', first) +observer.emit('user:login', second) +await userEvents.next() // first +await userEvents.next() // second + // Generator properties userEvents.lastValue // last received value userEvents.done // boolean diff --git a/package.json b/package.json index bde36d2..9a48a5a 100644 --- a/package.json +++ b/package.json @@ -13,14 +13,14 @@ "dts": "node ./scripts/build.mjs", "build:docs": "bash ./scripts/docs.zsh", "docs:dev": "vitepress dev docs", - "docs:build": "zx scripts/build-llm-txt.mjs && vitepress build docs", + "docs:build": "vitepress build docs && zx scripts/build-llm-txt.mjs", "docs:preview": "vitepress preview docs" }, "dependencies": { "@changesets/cli": "^2.29.4", "@swc/cli": "^0.7.7", "@swc/core": "^1.11.29", - "@types/node": "^24.10.7", + "@types/node": "^24.10.9", "tsx": "^4.19.4", "typedoc": "^0.28.4", "typescript": "^5.8.2" @@ -54,7 +54,7 @@ "typedoc-plugin-mdn-links": "^5.0.3", "typedoc-plugin-missing-exports": "^4.0.0", "vite": "^7", - "vitepress": "2.0.0-alpha.15", + "vitepress": "2.0.0-alpha.16", "zx": "^8.8.5" } } \ No newline at end of file diff --git a/packages/fetch/src/engine.ts b/packages/fetch/src/engine.ts deleted file mode 100644 index e38feb0..0000000 --- a/packages/fetch/src/engine.ts +++ /dev/null @@ -1,2671 +0,0 @@ -import { - assert, - assertOptional, - clone, - attempt, - wait, - SingleFlight, - Deferred, - type Func, -} from '@logosdx/utils'; - -import { ObserverEngine } from '@logosdx/observer'; - -import { - type _InternalHttpMethods, - type HttpMethodOpts, - type HttpMethods, - type MethodHeaders, - type RetryConfig, - type FetchResponse, - type FetchConfig, - type CacheRule, -} from './types.ts'; - -import { - FetchError, - fetchTypes, - validateOptions, - DEFAULT_RETRY_CONFIG, -} from './helpers.ts'; - -import { DedupePolicy } from './policies/dedupe.ts'; -import { CachePolicy } from './policies/cache.ts'; -import { RateLimitPolicy } from './policies/rate-limit.ts'; -import { PropertyStore, type MethodOverrides } from './property-store.ts'; - -/** - * Internal normalized request options - flat structure used throughout FetchEngine. - * - * This is the single source of truth for all request data, flowing to: - * - Cache/dedupe serializers (satisfies RequestKeyOptions) - * - Event data (spread directly into events) - * - Request execution (#attemptCall → #makeCall) - */ -type InternalReqOptions = { - // === Request identity (satisfies RequestKeyOptions) === - /** HTTP method (uppercase) */ - method: HttpMethods; - /** Original request path */ - path: string; - /** Request payload/body */ - payload?: unknown; - /** Merged headers (instance + method + request) */ - headers: FetchEngine.Headers; - /** URL parameters as flat object (from url.searchParams) */ - params: FetchEngine.Params

; - /** Instance state */ - state: S; - - // === URL (native URL class) === - /** Fully-constructed URL - source of truth for path + params */ - url: URL; - - // === Execution plumbing === - /** AbortSignal for request cancellation */ - signal: AbortSignal; - /** AbortController for cancelling the request (parent controller for totalTimeout) */ - controller: AbortController; - /** Serialized body for fetch() */ - body?: BodyInit | undefined; - - /** - * @deprecated Use `totalTimeout` instead. - * Request timeout in ms (alias for totalTimeout) - */ - timeout?: number | undefined; - - /** - * Total timeout for the entire request lifecycle in ms. - * Applies to all retry attempts combined. - */ - totalTimeout?: number | undefined; - - /** - * Per-attempt timeout in ms. - * Each retry gets a fresh timeout and controller. - */ - attemptTimeout?: number | undefined; - - /** - * Function to get whether totalTimeout has fired. - * Returns true if the parent timeout has fired. - */ - getTotalTimeoutFired?: (() => boolean) | undefined; - - /** Retry configuration */ - retry?: RetryConfig | false | undefined; - /** Custom response type determination */ - determineType?: FetchEngine.DetermineTypeFn | undefined; - - // === Callbacks === - onBeforeRequest?: FetchEngine.CallOptions['onBeforeReq'] | undefined; - onAfterRequest?: FetchEngine.CallOptions['onAfterReq'] | undefined; - onError?: FetchEngine.CallOptions['onError'] | undefined; - - // === Runtime state === - /** - * Current attempt number (used in retry logic and events). - * Named differently from the `attempt` utility function to avoid shadowing. - */ - attempt?: number | undefined; -} - -/** - * Creates a wrapper around `window.fetch` that allows - * certain overrides of default fetch options. Implements - * an abort controller per request that can be intercepted - * using `opts.signal.abort()`. - * - * Provides resilient HTTP client with retry logic, request/response - * interception, and comprehensive error handling for production - * applications that need reliable API communication. - * - * * See abort controller: - * * * https://developer.mozilla.org/en-US/docs/Web/API/AbortSignal - * * * https://github.com/facebook/react-native/blob/0.67-stable/packages/rn-tester/js/examples/XHR/XHRExampleAbortController.js - * - * @template H - Type of request headers - * @template P - Type of request params - * @template S - Type of instance state - * @template RH - Type of response headers - * - * @example - * // Basic setup with error handling - * const api = new FetchEngine({ - * baseUrl: 'https://api.example.com', - * defaultType: 'json', - * headers: { 'Authorization': 'Bearer token' } - * }); - * - * const [user, err] = await attempt(() => api.get('/users/123')); - * if (err) { - * console.error('Failed to fetch user:', err); - * return; - * } - * - * @example - * // Advanced setup with retry and validation - * const api = new FetchEngine({ - * baseUrl: 'https://api.example.com', - * retry: { - * maxAttempts: 3, - * baseDelay: 1000, - * shouldRetry: (error) => error.status >= 500 - * }, - * validate: { - * headers: (headers) => { - * if (!headers.Authorization) { - * throw new Error('Authorization header required'); - * } - * } - * } - * }); - */ -export class FetchEngine< - H = FetchEngine.InstanceHeaders, - P = FetchEngine.InstanceParams, - S = FetchEngine.InstanceState, - RH = FetchEngine.InstanceResponseHeaders, -> extends ObserverEngine> { - - /** - * Symbol to use the default value or configuration - * for a given option. For example, if you want to - * handle the response type yourself, you can set the - * `determineType` option to a function that returns - * the type of the response, or you can return the - * `FetchEngine.useDefault` to use the internal logic. - * - * Allows custom type determination logic to fall back to - * the built-in content-type detection when needed. - * - * @example - * const api = new FetchEngine({ - * baseUrl: 'https://api.example.com', - * determineType: (response) => { - * // Custom logic for special endpoints - * if (response.url.includes('/download')) { - * return 'blob'; - * } - * // Fall back to default detection - * return FetchEngine.useDefault; - * } - * }); - */ - static useDefault = Symbol('useDefault'); - - #baseUrl: URL; - #options: Partial; - #headerStore: PropertyStore>; - #paramStore: PropertyStore>; - #type: FetchEngine.Type; - - #modifyOptions?: FetchEngine.Options['modifyOptions']; - #modifyMethodOptions?: HttpMethodOpts['modifyOptions']>; - - #validate?: FetchEngine.Options['validate']; - - #instanceAbortController = new AbortController(); - - /** - * For saving values that may be needed to craft requests as the - * application progresses; for example: as you login, you get a - * token of some sort which is used to generate an hmac. - * - * Maintains request context across multiple API calls, such as - * authentication tokens, session data, or user preferences that - * need to be included in subsequent requests. - * - * @example - * const api = new FetchEngine({ - * baseUrl: 'https://api.example.com' - * }); - * - * // Store auth token after login - * api.setState('authToken', 'bearer-token-123'); - * - * // Use token in subsequent requests - * api.addHeader('Authorization', `Bearer ${api.getState().authToken}`); - */ - #state: S = {} as S; - - #retry: Required; - - /** - * SingleFlight instance for deduplication and caching. - * @internal Used by policies - */ - _flight = new SingleFlight(); - - // Policies - initialized with engine reference (dedupe and cache need it for _flight access) - #dedupePolicy = new DedupePolicy(this); - #cachePolicy = new CachePolicy(this); - #rateLimitPolicy = new RateLimitPolicy(); - - - /** - * Get the internal policies used by the FetchEngine instance. - * For internal use and debugging only. - */ - protected $policies() { - - return { - dedupe: this.#dedupePolicy, - cache: this.#cachePolicy, - rateLimit: this.#rateLimitPolicy, - flight: this._flight - } - } - - get #destroyed() { - - return this.#instanceAbortController.signal.aborted; - } - - /** - * Removes a header from the `FetchEngine` instance - */ - removeHeader: FetchEngine['rmHeader']; - - /** - * Removes a param from the `FetchEngine` instance - */ - removeParam: FetchEngine['rmParams']; - - #validateHeaders(headers: FetchEngine.Headers, method?: HttpMethods) { - - if (this.#validate?.headers) { - - this.#validate.headers( - headers, - method?.toUpperCase() as _InternalHttpMethods - ); - } - } - - /** - * Validates parameters using the configured validation function. - * - * Ensures request parameters meet requirements before making requests, - * allowing custom validation logic for data integrity. - * - * @param params - Parameters to validate - * @param method - HTTP method for context-specific validation - * @internal - */ - #validateParams(params: FetchEngine.Params

, method?: HttpMethods) { - - if (this.#validate?.params) { - - this.#validate.params( - params, - method?.toUpperCase() as _InternalHttpMethods - ); - } - } - - /** - * Validates state using the configured validation function. - * - * Ensures internal state meets requirements when updated, - * allowing custom validation logic for state consistency. - * - * @param state - State to validate - * @internal - */ - #validateState(state: S) { - - if (this.#validate?.state) { - - this.#validate.state(state); - } - } - - /** - * Determines the response type based on content-type header or custom logic. - * - * Analyzes the response content-type to determine how to parse the response. - * Supports custom type determination through the determineType option, - * with fallback to built-in content-type detection logic. - * - * @param response - Fetch Response object to analyze - * @returns Object with type and isJson flag - * @throws {FetchError} When content-type is unknown and no fallback is available - * @internal - * - * @example - * // Custom type determination - * const api = new FetchEngine({ - * baseUrl: 'https://api.example.com', - * determineType: (response) => { - * if (response.url.includes('/csv')) return 'text'; - * if (response.url.includes('/download')) return 'blob'; - * return FetchEngine.useDefault; // Use built-in detection - * } - * }); - */ - #determineType(response: Response): { - type: FetchEngine.Type, - isJson: boolean - } { - - if (this.#options.determineType) { - - const type = this.#options.determineType(response); - - if (FetchEngine.useDefault !== type) { - - if (!fetchTypes.includes(type as FetchEngine.Type)) { - - console.warn(`Invalid type: '${type}'. Defaulting to '${this.#type}'`); - - return { - type: this.#type, - isJson: this.#type === 'json' - } - } - - return { - type: type as FetchEngine.Type, - isJson: type === 'json' - }; - } - } - - const contentType = ( - response.headers.get('content-type') || - response.headers.get('Content-Type') || - '' - ); - - if (contentType) { - - if (/text|xml|html|form-urlencoded/.test(contentType)) { - - return { type: 'text', isJson: false }; - } - else if (/json/.test(contentType)) { - - return { type: 'json', isJson: true }; - } - else if (/form-data/.test(contentType)) { - - return { type: 'formData', isJson: false }; - } - else if (/image|audio|video|font|binary|application/.test(contentType)) { - - return { type: 'blob', isJson: false }; - } - else { - - throw new FetchError( - 'Unknown content type: ' + - contentType + - ' You may need to set the "determineType" option' + - ' to customize how the response is parsed.' - ); - } - } - - return { type: this.#type, isJson: this.#type === 'json' }; - } - - - /** - * Initializes a new FetchEngine instance with the provided configuration. - * - * Sets up the HTTP client with base URL, default settings, retry configuration, - * and validation rules. Validates all options and establishes the initial state. - * - * @param _opts - Configuration options for the FetchEngine instance - * @throws {Error} When validation fails for required options - * - * @example - * const api = new FetchEngine({ - * baseUrl: 'https://api.example.com', - * defaultType: 'json', - * headers: { 'Authorization': 'Bearer token' }, - * retry: { - * maxAttempts: 3, - * baseDelay: 1000 - * } - * }); - */ - constructor(_opts: FetchEngine.Options) { - - // Extract ObserverEngine options and pass to super - super({ - name: _opts.name, - spy: _opts.spy as any - }); - - validateOptions(_opts); - - const { baseUrl, defaultType, name: _name, spy: _spy, ...opts } = _opts; - let { retry } = _opts; - - if (retry === true) { - retry = {} - } - - this.#baseUrl = new URL(baseUrl); - this.#type = defaultType || 'json'; - this.#retry = { - ...DEFAULT_RETRY_CONFIG, - ...( - retry ? retry : { - maxAttempts: 0 - } - ) - }; - - const { - modifyOptions, - modifyMethodOptions, - validate, - ...rest - } = opts; - - this.#options = rest; - - // Initialize header store with defaults and method overrides - const normalizedMethodHeaders = Object.fromEntries( - Object.keys(opts.methodHeaders || {}).map( - (method) => ([method.toLowerCase(), opts.methodHeaders![method as never]]) - ) - ) as MethodHeaders; - - this.#headerStore = new PropertyStore>({ - defaults: opts.headers || {} as FetchEngine.Headers, - methodOverrides: normalizedMethodHeaders, - ...(validate?.headers && { validate: validate.headers }) - }); - - // Initialize param store with defaults and method overrides - const normalizedMethodParams = Object.fromEntries( - Object.keys(opts.methodParams || {}).map( - (method) => ([method.toLowerCase(), opts.methodParams![method as never]]) - ) - ) as HttpMethodOpts

; - - this.#paramStore = new PropertyStore>({ - defaults: opts.params || {} as FetchEngine.Params

, - methodOverrides: normalizedMethodParams as MethodOverrides>, - ...(validate?.params && { validate: validate.params }) - }); - - this.#modifyOptions = modifyOptions; - this.#modifyMethodOptions = modifyMethodOptions!; - this.#validate = validate; - - this.removeHeader = this.rmHeader.bind(this) as FetchEngine['rmHeader']; - this.removeParam = this.rmParams.bind(this) as FetchEngine['rmParams']; - - // Initialize policies - this.#dedupePolicy.init(opts.dedupePolicy); - this.#rateLimitPolicy.init(opts.rateLimitPolicy); - this.#cachePolicy.init(opts.cachePolicy); - - // Initialize SingleFlight with adapter if provided - if (opts.cachePolicy && opts.cachePolicy !== true) { - - this._flight = new SingleFlight({ - adapter: opts.cachePolicy.adapter, - defaultTtl: this.#cachePolicy.defaultTtl, - defaultStaleIn: this.#cachePolicy.defaultStaleIn - }); - } - } - - - /** - * Calculate delay for retry attempt using exponential backoff. - * - * Implements exponential backoff strategy to prevent overwhelming - * servers during retry attempts. Supports both fixed and dynamic - * delay calculations based on error conditions. - * - * @param attemptNo - Current attempt number (1-based) - * @param retry - Retry configuration with delay settings - * @param error - Optional error for dynamic delay calculation - * @returns Delay in milliseconds before next retry attempt - * @internal - * - * @example - * // Exponential backoff: 1000ms, 2000ms, 4000ms, 8000ms... - * const delay = this.#calculateRetryDelay(3, { - * baseDelay: 1000, - * maxDelay: 10000, - * useExponentialBackoff: true - * }); - * // Returns 4000ms for 3rd attempt - */ - #calculateRetryDelay(attemptNo: number, retry: Required): number { - - const { baseDelay, maxDelay, useExponentialBackoff } = retry; - - if (!useExponentialBackoff) return Math.min(baseDelay, maxDelay!); - - const delay = baseDelay * Math.pow(2, attemptNo - 1); - - return Math.min(delay, maxDelay!); - } - - /** - * Merges default headers with method-specific and override headers. - * - * Combines instance headers, method-specific headers, and request-specific - * overrides to create the final header set for a request. Applies - * formatting rules to ensure consistent header casing. - * - * @param override - Request-specific header overrides - * @param method - HTTP method for method-specific headers - * @returns Merged and formatted headers - * @internal - * - * @example - * // Merges: default headers + POST headers + request overrides - * const headers = this.#makeHeaders( - * { 'X-Request-ID': '123' }, - * 'POST' - * ); - */ - #makeHeaders(override: FetchEngine.Headers = {}, method?: HttpMethods) { - - return this.#headerStore.resolve(method || 'GET', override); - } - - /** - * Merges default parameters with method-specific and override parameters. - * - * Combines instance parameters, method-specific parameters, and request-specific - * overrides to create the final parameter set for a request URL. - * - * @param override - Request-specific parameter overrides - * @param method - HTTP method for method-specific parameters - * @returns Merged parameters - * @internal - * - * @example - * // Merges: default params + GET params + request overrides - * const params = this.#makeParams( - * { page: 2 }, - * 'GET' - * ); - */ - #makeParams(override: FetchEngine.Params

= {}, method?: HttpMethods) { - - return this.#paramStore.resolve(method || 'GET', override); - } - - /** - * Constructs the full URL by combining base URL, path, and parameters. - * - * Builds the complete request URL by merging the base URL with the path - * and appending merged parameters. Handles existing query parameters - * in the path and merges them with instance and method-specific parameters. - * - * @param path - Request path (may include existing query parameters) - * @param _params - Request-specific parameters to merge - * @param method - HTTP method for method-specific parameters - * @returns Complete URL with merged parameters - * @internal - * - * @example - * // Input: path='/users?page=1', params={limit: 10} - * // Output: 'https://api.example.com/users?page=1&limit=10' - * const url = this.#makeUrl('/users?page=1', { limit: 10 }, 'GET'); - */ - #makeUrl(path: string, _params?: P, method?: HttpMethods) { - - if (path.startsWith('http')) { - - const url = new URL(path); - const params = this.#makeParams(_params!, method); - - Object.entries(params).forEach(([key, value]) => { - - url.searchParams.set(key, value as string); - }); - - return url; - } - - path = path?.replace(/^\/{1,}/, ''); - if (path[0] !== '/') (path = `/${path}`); - - const fullPath = this.#baseUrl.toString().replace(/\/$/, ''); - const params = this.#makeParams(_params!, method); - - const url = new URL(fullPath + path); - - for (const [key, value] of Object.entries(params)) { - - url.searchParams.set(key, value as string); - } - - if (this.#validate?.perRequest?.params) { - - this.#validateParams( - Object.fromEntries(url.searchParams.entries()) as FetchEngine.Params

, - method - ); - } - - return url; - } - - #makeRequestOptions ( - _method: HttpMethods, - path: string, - options: ( - FetchEngine.CallOptions & - { - payload?: unknown, - controller: AbortController, - attempt?: number - } - ) - ): InternalReqOptions { - - const { - payload, - controller, - onAfterReq: onAfterRequest, - onBeforeReq: onBeforeRequest, - onError, - timeout = this.#options.timeout, - attemptTimeout, - getTotalTimeoutFired, - params, - attempt: attemptNum, - signal, - determineType, - retry, - headers: requestHeaders, - ...rest - } = options as typeof options & { - attemptTimeout?: number; - getTotalTimeoutFired?: () => boolean; - }; - - const type = this.#type; - const state = this.#state; - const modifyOptions = this.#modifyOptions; - const modifyMethodOptions = this.#modifyMethodOptions; - const method = _method.toUpperCase() as _InternalHttpMethods; - const url = this.#makeUrl(path, params as P, method); - - // Merge headers (instance + method + request-level) - let headers = this.#makeHeaders(requestHeaders, method); - - // Build body for mutating methods - let body: BodyInit | undefined; - - if (/put|post|patch|delete/i.test(method)) { - - // Check if payload is already a valid BodyInit type that doesn't need serialization - const isValidBodyInit = ( - payload === null || - payload === undefined || - typeof payload === 'string' || - payload instanceof Blob || - payload instanceof ArrayBuffer || - payload instanceof FormData || - payload instanceof URLSearchParams || - payload instanceof ReadableStream || - ArrayBuffer.isView(payload) - ); - - if (type === 'json' && !isValidBodyInit) { - // JSON.stringify any object, array, or primitive that isn't already a valid BodyInit - body = JSON.stringify(payload); - } - else if (payload !== null && payload !== undefined) { - body = payload as BodyInit; - } - } - - // Build opts for modifyOptions compatibility (temporary structure) - // Note: spread rest first, then explicit properties to ensure they aren't overwritten - // Note: RequestOpts extends RequestInit which expects body: BodyInit | null - let opts: FetchEngine.RequestOpts = { - ...rest, - method, - signal: signal || controller.signal, - controller, - headers, - body: body ?? null, - timeout, - retry, - determineType, - }; - - // Apply global modifyOptions - opts = modifyOptions - ? modifyOptions(opts as never, state) - : opts; - - // Apply method-specific modifyOptions - const methodSpecificModify = modifyMethodOptions?.[method] as typeof modifyOptions; - - if (methodSpecificModify) { - opts = methodSpecificModify(opts as never, state); - } - - // Extract final values after modification - headers = (opts.headers || {}) as FetchEngine.Headers; - body = opts.body ?? undefined; - - if (this.#validate?.perRequest?.headers) { - - this.#validateHeaders(headers, method); - } - - // Return flat structure - the normalized options IS the context - return { - // Request identity (satisfies RequestKeyOptions) - method, - path, - payload, - headers, - params: Object.fromEntries(url.searchParams.entries()) as FetchEngine.Params

, - state, - - // URL - url, - - // Execution plumbing - signal: opts.signal || controller.signal, - controller, - body, - timeout: opts.timeout, - attemptTimeout, - getTotalTimeoutFired, - retry: opts.retry === true ? {} : opts.retry, - determineType: opts.determineType || this.#options.determineType, - - // Callbacks - onBeforeRequest, - onAfterRequest, - onError, - - // Runtime state - attempt: attemptNum - }; - } - - #extractRetry(opts: InternalReqOptions) { - - // retry is already normalized (true converted to {} in #makeRequestOptions) - return opts.retry; - } - - #handleError( - normalizedOpts: InternalReqOptions, - errorOpts: { - error: FetchError | Error, - step: 'fetch' | 'parse' | 'response', - status?: number, - data?: unknown - } - ) { - - const { - method, - path, - headers, - controller, - onError, - attempt: attemptNum - } = normalizedOpts; - - const { - error, - step, - status, - data - } = errorOpts; - - const aborted = controller.signal.aborted; - - let err = error as FetchError<{}, H>; - - if (step === 'fetch') { - - err = new FetchError(err.message); - - err.status = 499; - err.message = err.message || 'Fetch error'; - } - - if (step === 'parse') { - - err = new FetchError(err.message); - - err.status = status || 999; - err.message = err.message || 'Parse error'; - } - - if (step === 'response') { - - const asAgg = error as AggregateError; - let errors = asAgg.errors as Error[]; - let errCode = ''; - - // Handle undici errors - if ( - !errors || - errors.length === 0 && - error.cause instanceof AggregateError - ) { - - errors = (error.cause as AggregateError)?.errors as Error[]; - } - - if ((error as any)?.code) { - errCode = (error as any).code; - } - - if (errors && errors.length > 0) { - - const msgs = errors.map((e) => e.message).join('; '); - - err = new FetchError(`${errCode}: ${msgs}`); - } - else { - - err = new FetchError(error.message); - } - } - - err.attempt = attemptNum; - err.status = err.status || status!; - err.method = err.method || method!; - err.path = err.path || path!; - err.aborted = err.aborted || aborted; - err.data = err.data || data as null; - err.step = err.step || step; - err.headers = (err.headers || headers) as H; - - // Emit error event with normalizedOpts as base - // normalizedOpts already contains attempt, so just add error-specific fields - const eventData = { - ...normalizedOpts, - error: err, - step, - status, - aborted, - data - }; - - if (aborted) { - - this.emit('fetch-abort', eventData as any); - } - else { - - this.emit('fetch-error', eventData as any); - } - - onError && onError(err); - - throw err; - } - - /** - * Makes an API call using fetch with retry logic and returns enhanced response object. - * - * Performs the actual HTTP request and returns a comprehensive FetchResponse - * object containing the parsed data, response headers, status code, request - * details, and configuration used. This provides full context about the - * request and response for better debugging and conditional logic. - * - * @param options - Flat normalized request options (InternalReqOptions) - * @returns FetchResponse object with data, headers, status, request, and config - * @internal - */ - async #makeCall ( - options: InternalReqOptions - ): Promise> { - - const { - // Request identity - method, - headers: reqHeaders, - params, - - // URL - url, - - // Execution plumbing - signal, - controller, - body, - timeout, - retry, - determineType, - - // Callbacks - onBeforeRequest, - onAfterRequest - } = options; - - // Emit fetch-before with flat options (normalizedOpts already contains attempt) - this.emit('fetch-before', { - ...options - } as any); - - // Build RequestOpts for callbacks (legacy compatibility) - // Note: RequestOpts extends RequestInit which expects body: BodyInit | null - const callbackOpts: FetchEngine.RequestOpts = { - method, - signal, - controller, - headers: reqHeaders, - body: body ?? null, - timeout, - retry, - determineType - }; - - onBeforeRequest && await onBeforeRequest(callbackOpts); - - // Build fetch options - only include what native fetch understands - // Note: RequestInit expects body: BodyInit | null, we use undefined internally - const fetchOpts: RequestInit = { - method, - signal, - headers: reqHeaders, - body: body ?? null - }; - - const [response, resErr] = await attempt(async () => { - return await fetch(url, fetchOpts) as Response; - }); - - // Fetch will only throw if the request is aborted, - // denied, timed out, reset, etc. - if (resErr) { - - this.#handleError(options, { - error: resErr, - step: 'fetch' - }); - - // #handleError throws, so this should never be reached - throw resErr; - } - - this.emit('fetch-after', { - ...options, - response: response.clone(), - } as any); - - onAfterRequest && await onAfterRequest(response.clone(), callbackOpts); - - const [data, parseErr] = await attempt(async () => { - - const { type, isJson } = this.#determineType(response); - - if (isJson) { - - const text = await response.text(); - - if (text) { - return JSON.parse(text) as Res; - } - - return null; - } - else { - - return await response[type]() as Res; - } - }); - - if (parseErr) { - - this.#handleError(options, { - error: parseErr, - step: 'parse', - status: response.status, - data - }); - - // #handleError throws, so this should never be reached - throw parseErr; - } - - if (response.ok === false) { - - this.#handleError(options, { - error: new FetchError(response.statusText), - step: 'response', - status: response.status, - data - }); - - // #handleError throws, so this should never be reached - throw new FetchError(response.statusText); - } - - // Emit fetch-response (normalizedOpts already contains attempt) - this.emit('fetch-response', { - ...options, - response, - data - } as any); - - const config: FetchConfig = { - baseUrl: this.#baseUrl.toString(), - timeout, - method, - headers: reqHeaders as H, - params: params as P, - retry: this.#retry, - determineType: determineType, - }; - - // Create the Request object for the response - const request = new Request(url, fetchOpts); - - // Convert response headers to plain object for typed access - const responseHeaders = {} as Partial; - - response.headers.forEach((value, key) => { - - responseHeaders[key as keyof ResHdr] = value as ResHdr[keyof ResHdr]; - }); - - // Return the enhanced response object - return { - data: data!, - headers: responseHeaders, - status: response.status, - request, - config - } - } - - async #attemptCall( - options: InternalReqOptions - ): Promise> { - - const mergedRetry = { - ...this.#retry, - ...this.#extractRetry(options) - }; - - if (mergedRetry.maxAttempts === 0) { - - const [result, err] = await attempt( - async () => this.#makeCall(options) - ); - - if (err) { - - // Set timedOut flag if the abort was caused by totalTimeout - if ((err as FetchError).aborted && options.getTotalTimeoutFired?.()) { - - (err as FetchError).timedOut = true; - } - - throw err; - } - - return result; - } - - let _attempt = 1; - let lastError: FetchError | undefined; - - while (_attempt <= mergedRetry.maxAttempts!) { - - // Check if parent (totalTimeout) already aborted - stop retrying - if (options.controller.signal.aborted) { - - const err = lastError ?? new FetchError('Request aborted by totalTimeout'); - err.timedOut = options.getTotalTimeoutFired?.() ?? false; - throw err; - } - - // Create child controller for this attempt if using attemptTimeout - let attemptController: AbortController; - let attemptTimeoutPromise: ReturnType | undefined; - let attemptTimeoutFired = false; - - if (options.attemptTimeout !== undefined) { - - attemptController = new AbortController(); - - // Link child to parent - if parent aborts, child aborts - // Using { once: true } for auto-cleanup when listener fires - options.controller.signal.addEventListener('abort', () => { - - attemptTimeoutPromise?.clear(); - attemptController.abort(); - }, { once: true }); - - // Set up per-attempt timeout - attemptTimeoutPromise = wait(options.attemptTimeout); - attemptTimeoutPromise.then(() => { - - attemptTimeoutFired = true; - attemptController.abort(); - }); - } - else { - - attemptController = options.controller; - } - - const [result, err] = await attempt( - async () => ( - this.#makeCall({ - ...options, - controller: attemptController, - signal: attemptController.signal, - attempt: _attempt - }) - ) - ); - - // Always cleanup attempt timeout (success or failure) - attemptTimeoutPromise?.clear(); - - if (err === null) { - - return result; - } - - lastError = err as FetchError; - - // Set timedOut flag only when a timeout actually fired - // (not for manual abort or server disconnect) - if (lastError.aborted) { - - const totalTimeoutFired = options.getTotalTimeoutFired?.() ?? false; - - if (attemptTimeoutFired || totalTimeoutFired) { - - lastError.timedOut = true; - } - } - - // If parent controller aborted (totalTimeout), don't retry - if (options.controller.signal.aborted) { - - throw lastError; - } - - // Check if we should retry - const shouldRetry = await mergedRetry.shouldRetry(lastError, _attempt); - - if (shouldRetry && _attempt < mergedRetry.maxAttempts!) { - - // If shouldRetry is a number, use it as the delay - // Otherwise, calculate the delay using the default logic - const delay = ( - typeof shouldRetry === 'number' ? - shouldRetry : - this.#calculateRetryDelay(_attempt, mergedRetry) - ); - - this.emit('fetch-retry', { - ...options, - error: lastError, - attempt: _attempt, - nextAttempt: _attempt + 1, - delay - } as any); - - await wait(delay); - - // Check if parent controller aborted during the delay - if (options.controller.signal.aborted) { - - // Update timedOut flag if totalTimeout fired during delay - if (options.getTotalTimeoutFired?.()) { - - lastError.timedOut = true; - } - - throw lastError; - } - - _attempt++; - continue; - } - - throw lastError; - } - - // This should never be reached - all paths should either return or throw - throw new FetchError('Unexpected end of retry logic'); - } - - /** - * Returns all the headers configured for this instance, - * including the method specific headers. - */ - get headers() { - - return this.#headerStore.all as { - readonly default: Readonly>, - readonly GET?: Readonly>, - readonly POST?: Readonly>, - readonly PUT?: Readonly>, - readonly DELETE?: Readonly>, - readonly OPTIONS?: Readonly>, - readonly PATCH?: Readonly>, - } - } - - /** - * Returns all the params configured for this instance, - * including the method specific params. - */ - get params() { - - return this.#paramStore.all as { - readonly default: Readonly>, - readonly GET?: Readonly>, - readonly POST?: Readonly>, - readonly PUT?: Readonly>, - readonly DELETE?: Readonly>, - readonly OPTIONS?: Readonly>, - readonly PATCH?: Readonly>, - } - } - - /** - * Makes an HTTP request with comprehensive error handling and retry logic. - * - * Executes HTTP requests with automatic retry on failure, timeout handling, - * and abort controller support. Returns an enhanced response object containing - * parsed data, HTTP metadata, request details, and configuration used. - * - * @param method - HTTP method (GET, POST, PUT, DELETE, etc.) - * @param path - Request path relative to base URL - * @param options - Request options including payload, timeout, and callbacks - * @returns AbortablePromise that resolves to FetchResponse object with data and metadata - * - * @example - * // Access response data and metadata - * const [response, err] = await attempt(() => - * api.request('GET', '/users/123') - * ); - * if (err) { - * console.error('Request failed:', err.status, err.message); - * return; - * } - * - * console.log('User data:', response.data); - * console.log('Status:', response.status); - * console.log('Headers:', response.headers.get('content-type')); - * - * @example - * // Destructure just the data for backward compatibility - * const { data: user } = await api.request('GET', '/users/123'); - * - * @example - * // Request with payload and timeout - * const request = api.request('POST', '/users', { - * payload: { name: 'John', email: 'john@example.com' }, - * timeout: 5000, - * onBeforeReq: (opts) => console.log('Making request:', opts), - * onError: (err) => console.error('Request error:', err) - * }); - * - * // Abort request if needed - * setTimeout(() => request.abort('User cancelled'), 2000); - */ - request ( - method: HttpMethods, - path: string, - options: ( - FetchEngine.CallOptions & - ({ payload: Data | null } | {}) - ) = { payload: null } - ): FetchEngine.AbortablePromise> { - - // Prevent requests on destroyed instances (memory leak prevention) - if (this.#destroyed) { - - throw new Error('Cannot make requests on destroyed FetchEngine instance'); - } - - const controller = options.abortController ?? new AbortController(); - - // Resolve timeout options with fallback chain: - // Request-level totalTimeout/timeout → Instance-level totalTimeout/timeout - const opts = options as FetchEngine.CallOptions & { - totalTimeout?: number; - attemptTimeout?: number; - }; - const instanceOpts = this.#options as Partial & { - totalTimeout?: number; - attemptTimeout?: number; - }; - - const totalTimeoutMs = opts.totalTimeout ?? opts.timeout ?? instanceOpts.totalTimeout ?? instanceOpts.timeout; - const attemptTimeoutMs = opts.attemptTimeout ?? instanceOpts.attemptTimeout; - - if (typeof totalTimeoutMs === 'number') { - - assert(totalTimeoutMs >= 0, 'totalTimeout must be non-negative number'); - } - - if (typeof attemptTimeoutMs === 'number') { - - assert(attemptTimeoutMs >= 0, 'attemptTimeout must be non-negative number'); - } - - // Track if totalTimeout fires (for timedOut flag on errors) - let totalTimeoutFired = false; - - // Set up totalTimeout on the parent controller - const totalTimeout = typeof totalTimeoutMs === 'number' ? wait(totalTimeoutMs) : undefined; - - totalTimeout?.then(() => { - - totalTimeoutFired = true; - controller.abort(); - }); - - // Abort this request when the instance is destroyed - const instanceSignal = this.#instanceAbortController.signal; - - if (!instanceSignal.aborted) { - - const onInstanceAbort = () => controller.abort(); - instanceSignal.addEventListener('abort', onInstanceAbort, { once: true }); - } - - // Execute async logic and wrap as AbortablePromise - const promise = this.#executeRequest( - method, - path, - options, - controller, - totalTimeout, - attemptTimeoutMs, - () => totalTimeoutFired - ); - - const call = this.#wrapAsAbortable>( - promise.then((res) => { - - call.isFinished = true; - return res; - }), - controller - ); - - return call; - } - - - /** - * Executes the request with cache checking, deduplication, and actual fetch. - */ - async #executeRequest( - method: HttpMethods, - path: string, - options: FetchEngine.CallOptions & { payload?: unknown }, - controller: AbortController, - totalTimeout: ReturnType | undefined, - attemptTimeoutMs: number | undefined, - getTotalTimeoutFired: () => boolean - ): Promise> { - - const onAfterReq = (...args: any[]) => { - - totalTimeout?.clear(); - options.onAfterReq?.apply(this, args as never); - }; - - const onError = (...args: any[]) => { - - totalTimeout?.clear(); - options.onError?.apply(this, args as never); - }; - - // normalizedOpts IS the context - single source of truth - const normalizedOpts = this.#makeRequestOptions( - method, - path, - { - ...options, - onAfterReq, - onError, - controller, - attemptTimeout: attemptTimeoutMs, - getTotalTimeoutFired - } as any - ); - - // === Rate Limit Check === - // Rate limiting MUST come first - before any network activity or cache lookups - // that might trigger background revalidation - await this.#rateLimitPolicy.executeGuard({ - method, - path, - normalizedOpts, - controller, - emit: (event, data) => this.emit(event as any, data as any), - clearTimeout: () => totalTimeout?.clear(), - createAbortError: (message) => { - - const err = new FetchError(message); - err.aborted = true; - err.method = normalizedOpts.method; - err.path = path; - err.status = 0; - err.step = 'fetch'; - return err; - } - }); - - // === Cache Check === - const cacheResult = await this.#cachePolicy.checkCache>({ - method, - path, - normalizedOpts, - options, - clearTimeout: () => totalTimeout?.clear() - }); - - let cacheKey: string | null = null; - let cacheConfig: CacheRule | null = null; - - if (cacheResult?.hit) { - - return cacheResult.value; - } - - if (cacheResult && !cacheResult.hit) { - - cacheKey = cacheResult.key; - cacheConfig = cacheResult.config; - } - - // === Deduplication Check === - const dedupeResult = this.#dedupePolicy.checkInflight>({ - method, - path, - normalizedOpts - }); - - let dedupeKey: string | null = null; - let cleanup: (() => void) | null = null; - - if (dedupeResult?.joined) { - - return this.#awaitWithIndependentTimeout(dedupeResult.promise, controller, totalTimeout, normalizedOpts.method, path); - } - - if (dedupeResult && !dedupeResult.joined) { - - dedupeKey = dedupeResult.key; - } - - // === Execute Request === - // Use Deferred to register the promise BEFORE starting the request - // This prevents race conditions where multiple concurrent requests - // check getInflight() before any have called trackInflight() - let deferred: Deferred> | null = null; - - if (dedupeKey) { - - deferred = new Deferred>(); - - // Attach a no-op catch handler to prevent unhandled rejection warnings - // when the promise is rejected but no one is listening (no joiners) - deferred.promise.catch(() => { /* handled by the request flow */ }); - - cleanup = this._flight.trackInflight(dedupeKey, deferred.promise); - } - - const requestPromise = this.#attemptCall(normalizedOpts); - - const [res, err] = await attempt(() => requestPromise); - - totalTimeout?.clear(); - - if (err) { - - deferred?.reject(err); - cleanup?.(); - throw err; - } - - deferred?.resolve(res); - cleanup?.(); - - if (cacheKey && cacheConfig) { - - await this._flight.setCache(cacheKey, res, { - ttl: cacheConfig.ttl, - staleIn: cacheConfig.staleIn - }); - - this.#cachePolicy.markActive(cacheKey); - - this.emit('fetch-cache-set', { - ...normalizedOpts, - key: cacheKey, - expiresIn: cacheConfig.ttl, - } as any); - } - - return res; - } - - - /** - * Awaits a shared promise with independent timeout/abort for the joiner. - */ - #awaitWithIndependentTimeout( - sharedPromise: Promise, - controller: AbortController, - timeout: ReturnType | undefined, - method: string, - path: string - ): Promise { - - const deferred = new Deferred(); - let isSettled = false; - - const settle = (fn: () => void) => { - - if (isSettled) return; - isSettled = true; - timeout?.clear(); - fn(); - }; - - const createJoinerError = (message: string): FetchError => { - - const err = new FetchError(message); - err.aborted = true; - err.method = method as HttpMethods; - err.path = path; - err.status = 0; - err.step = 'fetch'; - - return err; - }; - - timeout?.then(() => { - - settle(() => deferred.reject(createJoinerError('Request timed out (joiner)'))); - }); - - controller.signal.addEventListener('abort', () => { - - settle(() => deferred.reject(createJoinerError('Request aborted (joiner)'))); - }, { once: true }); - - sharedPromise - .then((value) => settle(() => deferred.resolve(value))) - .catch((error) => settle(() => deferred.reject(error))); - - return deferred.promise; - } - - - /** - * Triggers a background revalidation for stale-while-revalidate. - * Fire and forget - errors are emitted as events, not propagated. - * - * @internal Used by CachePolicy - */ - async _triggerBackgroundRevalidation( - method: HttpMethods, - path: string, - options: FetchEngine.CallOptions & { payload?: unknown }, - cacheKey: string, - cacheConfig: CacheRule - ): Promise { - - // Prevent multiple concurrent revalidations for the same key - if (this.#cachePolicy.isRevalidating(cacheKey)) { - - return; - } - - this.#cachePolicy.markRevalidating(cacheKey); - - // Build normalized options for the background request - const controller = new AbortController(); - const normalizedOpts = this.#makeRequestOptions(method, path, { - ...options, - controller - }); - - this.emit('fetch-cache-revalidate', { - ...normalizedOpts, - key: cacheKey - } as any); - - const [res, fetchErr] = await attempt(() => - this.#attemptCall(normalizedOpts) - ); - - this.#cachePolicy.unmarkRevalidating(cacheKey); - - if (fetchErr) { - - this.emit('fetch-cache-revalidate-error', { - ...normalizedOpts, - key: cacheKey, - error: fetchErr - } as any); - - return; - } - - const [, cacheErr] = await attempt(() => ( - - this._flight.setCache(cacheKey, res, { - ttl: cacheConfig.ttl, - staleIn: cacheConfig.staleIn - }) - )); - - if (cacheErr) { - - this.emit('fetch-cache-revalidate-error', { - ...normalizedOpts, - key: cacheKey, - error: cacheErr - } as any); - - return; - } - - this.#cachePolicy.markActive(cacheKey); - - this.emit('fetch-cache-set', { - ...normalizedOpts, - key: cacheKey, - expiresIn: cacheConfig.ttl - } as any); - } - - - /** - * Wraps a promise with AbortablePromise properties. - */ - #wrapAsAbortable( - promise: Promise, - controller: AbortController - ): FetchEngine.AbortablePromise { - - const abortable = promise as FetchEngine.AbortablePromise; - - Object.defineProperty(abortable, 'isAborted', { - get: () => controller.signal.aborted, - }); - - abortable.isFinished = false; - abortable.abort = (reason?: string) => controller.abort(reason); - - return abortable; - } - - - /** - * Makes an OPTIONS request to check server capabilities. - * - * Convenience method for OPTIONS requests, commonly used for CORS - * preflight checks and discovering server capabilities. - * - * @param path - Request path relative to base URL - * @param options - Request options - * @returns AbortablePromise that resolves to response data - * - * @example - * const [capabilities, err] = await attempt(() => - * api.options('/users') - * ); - */ - options (path: string, options: FetchEngine.CallOptions = {}) { - - return this.request ('options', path, options); - } - - /** - * Makes a GET request to retrieve data. - * - * Convenience method for GET requests, the most common HTTP method - * for retrieving data from APIs. Returns an enhanced response object - * containing parsed data, headers, status, and request context. - * - * @param path - Request path relative to base URL - * @param options - Request options - * @returns AbortablePromise that resolves to FetchResponse object - * - * @example - * // Access full response details - * const [response, err] = await attempt(() => - * api.get('/users?page=1&limit=10') - * ); - * if (err) return; - * - * console.log('Users:', response.data); - * console.log('Status:', response.status); - * console.log('Content-Type:', response.headers['content-type']); - * - * @example - * // Destructure just the data - * const { data: users } = await api.get('/users'); - */ - get (path: string, options: FetchEngine.CallOptions = {}) { - - return this.request ('get', path, options); - } - - /** - * Makes a DELETE request to remove a resource. - * - * Convenience method for DELETE requests, typically used to remove - * resources from the server. - * - * @param path - Request path relative to base URL - * @param payload - Optional payload for the request body - * @param options - Request options - * @returns AbortablePromise that resolves to response data - * - * @example - * const [result, err] = await attempt(() => - * api.delete('/users/123') - * ); - */ - delete (path: string, payload: Data | null = null, options: FetchEngine.CallOptions = {}) { - - return this.request ('delete', path, { ...options, payload }); - } - - /** - * Makes a POST request to create a new resource. - * - * Convenience method for POST requests, typically used to create - * new resources on the server. Returns an enhanced response object - * containing parsed data, headers, status, and request context. - * - * @param path - Request path relative to base URL - * @param payload - Data to send in the request body - * @param options - Request options - * @returns AbortablePromise that resolves to FetchResponse object - * - * @example - * // Access full response details - * const [response, err] = await attempt(() => - * api.post('/users', { - * name: 'John Doe', - * email: 'john@example.com' - * }) - * ); - * if (err) return; - * - * console.log('Created user:', response.data); - * console.log('Location header:', response.headers['location']); - * - * @example - * // Destructure just the data - * const { data: newUser } = await api.post('/users', userData); - */ - post (path: string, payload: Data | null = null, options: FetchEngine.CallOptions = {}) { - - return this.request ('post', path, { ...options, payload }); - } - - /** - * Makes a PUT request to replace a resource. - * - * Convenience method for PUT requests, typically used to completely - * replace an existing resource on the server. - * - * @param path - Request path relative to base URL - * @param payload - Data to send in the request body - * @param options - Request options - * @returns AbortablePromise that resolves to response data - * - * @example - * const [updatedUser, err] = await attempt(() => - * api.put('/users/123', { - * name: 'Jane Doe', - * email: 'jane@example.com' - * }) - * ); - */ - put (path: string, payload: Data | null = null, options: FetchEngine.CallOptions = {}) { - - return this.request ('put', path, { ...options, payload }); - } - - /** - * Makes a PATCH request to partially update a resource. - * - * Convenience method for PATCH requests, typically used to partially - * update an existing resource on the server. - * - * @param path - Request path relative to base URL - * @param payload - Partial data to update in the request body - * @param options - Request options - * @returns AbortablePromise that resolves to response data - * - * @example - * const [updatedUser, err] = await attempt(() => - * api.patch('/users/123', { - * email: 'newemail@example.com' - * }) - * ); - */ - patch (path: string, payload: Data | null = null, options: FetchEngine.CallOptions = {}) { - - return this.request ('patch', path, { ...options, payload }); - } - - /** - * Adds headers to the FetchEngine instance for use in requests. - * - * Supports adding individual headers, multiple headers at once, and - * method-specific headers. Headers can be added globally or for - * specific HTTP methods to provide fine-grained control over - * request headers. - * - * @param headers - Header name, object of headers, or header name with value - * @param value - Header value (when adding single header) - * @param method - Optional HTTP method for method-specific headers - * - * @example - * // Add single header globally - * api.addHeader('Authorization', 'Bearer token'); - * - * // Add multiple headers globally - * api.addHeader({ - * 'Content-Type': 'application/json', - * 'X-API-Version': 'v1' - * }); - * - * // Add method-specific headers - * api.addHeader('Content-Type', 'application/json', 'POST'); - * api.addHeader({ - * 'X-Request-ID': '123', - * 'X-User-ID': '456' - * }, 'GET'); - */ - addHeader(name: K, value: H[K], method?: _InternalHttpMethods): void - addHeader(name: string, value: string, method?: _InternalHttpMethods): void - addHeader(headers: FetchEngine.Headers, method?: _InternalHttpMethods): void - addHeader( - headers: ( - FetchEngine.Headers | - keyof H | - string - ), - value?: string | H[keyof H], - method?: _InternalHttpMethods - ) { - - assert( - (typeof headers === 'string' && !!value) || - typeof headers === 'object', - 'addHeader requires a string and value or an object' - ); - - assertOptional( - method, - !!method && typeof method === 'string', - 'addHeader requires a string method' - ); - - if (typeof headers === 'string') { - - assert( - typeof value !== 'undefined', - 'addHeader requires a value when setting a single property' - ); - - this.#headerStore.set(headers, value, method); - } - else { - - // When headers is an object, value might be the method - const actualMethod = method || value as _InternalHttpMethods; - this.#headerStore.set(headers as Partial>, actualMethod); - } - - const updated = method - ? this.#headerStore.forMethod(method) - : this.#headerStore.defaults; - - this.emit('fetch-header-add', { - state: this.#state, - data: { - headers, - value, - updated, - method - } - }); - } - - /** - * Removes headers from the FetchEngine instance. - * - * Supports removing individual headers, multiple headers at once, and - * method-specific headers. Headers can be removed globally or for - * specific HTTP methods to provide fine-grained control over - * request headers. - * - * @param headers - Header name, array of header names, or object with header names - * @param method - Optional HTTP method for method-specific header removal - * - * @example - * // Remove single header globally - * api.rmHeader('Authorization'); - * - * // Remove multiple headers globally - * api.rmHeader(['Content-Type', 'X-API-Version']); - * - * // Remove method-specific headers - * api.rmHeader('Content-Type', 'POST'); - * api.rmHeader(['X-Request-ID', 'X-User-ID'], 'GET'); - * - * // Remove headers by object reference - * const headersToRemove = { 'Content-Type': true, 'Authorization': true }; - * api.rmHeader(headersToRemove); - */ - rmHeader (headers: keyof H, method?: _InternalHttpMethods): void - rmHeader (headers: (keyof H)[], method?: _InternalHttpMethods): void - rmHeader (headers: string, method?: _InternalHttpMethods): void - rmHeader (headers: string[], method?: _InternalHttpMethods): void - rmHeader (headers: unknown, method?: _InternalHttpMethods): void { - - if (!headers) { - return; - } - - // Normalize to array of keys - let keys: string[]; - - if (typeof headers === 'string') { - keys = [headers]; - } - else if (Array.isArray(headers)) { - keys = headers as string[]; - } - else { - keys = Object.keys(headers as object); - } - - this.#headerStore.remove(keys, method); - - const updated = method - ? this.#headerStore.forMethod(method) - : this.#headerStore.defaults; - - this.emit('fetch-header-remove', { - state: this.#state, - data: { - headers, - updated, - method, - } - }); - } - - /** - * Checks if a header is configured for the FetchEngine instance. - * - * Determines whether a specific header exists in the global headers - * or method-specific headers, allowing validation of header configuration. - * - * @param name - Header name to check - * @param method - Optional HTTP method for method-specific header check - * @returns True if the header exists, false otherwise - * - * @example - * // Check global headers - * if (api.hasHeader('Authorization')) { - * console.log('Auth header is configured'); - * } - * - * // Check method-specific headers - * if (api.hasHeader('Content-Type', 'POST')) { - * console.log('POST requests have Content-Type header'); - * } - */ - hasHeader(name: K, method?: _InternalHttpMethods): boolean - hasHeader(name: string, method?: _InternalHttpMethods): boolean - hasHeader(name: string, method?: _InternalHttpMethods): boolean { - - return this.#headerStore.has(name, method); - } - - /** - * Adds parameters to the FetchEngine instance for use in request URLs. - * - * Supports adding individual parameters, multiple parameters at once, and - * method-specific parameters. Parameters can be added globally or for - * specific HTTP methods to provide fine-grained control over URL parameters. - * - * @param params - Parameter name, object of parameters, or parameter name with value - * @param value - Parameter value (when adding single parameter) - * @param method - Optional HTTP method for method-specific parameters - * - * @example - * // Add single parameter globally - * api.addParam('version', 'v1'); - * - * // Add multiple parameters globally - * api.addParam({ - * 'api_key': 'abc123', - * 'format': 'json' - * }); - * - * // Add method-specific parameters - * api.addParam('page', '1', 'GET'); - * api.addParam({ - * 'limit': '10', - * 'sort': 'name' - * }, 'GET'); - */ - addParam(name: K, value: P[K], method?: _InternalHttpMethods): void - addParam(name: string, value: string, method?: _InternalHttpMethods): void - addParam(params: FetchEngine.Params

, method?: _InternalHttpMethods): void - addParam( - params: ( - FetchEngine.Params

| - keyof P | - string - ), - value?: string | P[keyof P], - method?: _InternalHttpMethods - ) { - - assert( - (typeof params === 'string' && !!value) || - typeof params === 'object', - 'addParam requires a string and value or an object' - ); - - assertOptional( - method, - !!method && typeof method === 'string', - 'addParam requires a string method' - ); - - if (typeof params === 'string') { - - assert( - typeof value !== 'undefined', - 'addParam requires a value when setting a single property' - ); - - this.#paramStore.set(params, value, method); - } - else { - - // When params is an object, value might be the method - const actualMethod = method || value as _InternalHttpMethods; - this.#paramStore.set(params as Partial>, actualMethod); - } - - const updated = method - ? this.#paramStore.forMethod(method) - : this.#paramStore.defaults; - - this.emit('fetch-param-add', { - state: this.#state, - data: { - params, - value, - updated, - method - } - }); - } - - /** - * Removes parameters from the FetchEngine instance. - * - * Supports removing individual parameters, multiple parameters at once, and - * method-specific parameters. Parameters can be removed globally or for - * specific HTTP methods to provide fine-grained control over URL parameters. - * - * @param params - Parameter name, array of parameter names, or object with parameter names - * @param method - Optional HTTP method for method-specific parameter removal - * - * @example - * // Remove single parameter globally - * api.rmParams('version'); - * - * // Remove multiple parameters globally - * api.rmParams(['api_key', 'format']); - * - * // Remove method-specific parameters - * api.rmParams('page', 'GET'); - * api.rmParams(['limit', 'sort'], 'GET'); - * - * // Remove parameters by object reference - * const paramsToRemove = { 'api_key': true, 'format': true }; - * api.rmParams(paramsToRemove); - */ - rmParams (params: keyof P, method?: _InternalHttpMethods): void - rmParams (params: (keyof P)[], method?: _InternalHttpMethods): void - rmParams (params: string, method?: _InternalHttpMethods): void - rmParams (params: string[], method?: _InternalHttpMethods): void - rmParams (params: unknown, method?: _InternalHttpMethods): void { - - if (!params) { - return; - } - - // Normalize to array of keys - let keys: string[]; - - if (typeof params === 'string') { - keys = [params]; - } - else if (Array.isArray(params)) { - keys = params as string[]; - } - else { - keys = Object.keys(params as object); - } - - this.#paramStore.remove(keys, method); - - const updated = method - ? this.#paramStore.forMethod(method) - : this.#paramStore.defaults; - - this.emit('fetch-param-remove', { - state: this.#state, - data: { - params, - updated, - method - } - }); - } - - /** - * Checks if a parameter is configured for the FetchEngine instance. - * - * Determines whether a specific parameter exists in the global parameters - * or method-specific parameters, allowing validation of parameter configuration. - * - * @param name - Parameter name to check - * @param method - Optional HTTP method for method-specific parameter check - * @returns True if the parameter exists, false otherwise - * - * @example - * // Check global parameters - * if (api.hasParam('version')) { - * console.log('Version parameter is configured'); - * } - * - * // Check method-specific parameters - * if (api.hasParam('page', 'GET')) { - * console.log('GET requests have page parameter'); - * } - */ - hasParam(name: K, method?: _InternalHttpMethods): boolean - hasParam(name: string, method?: _InternalHttpMethods): boolean - hasParam(name: string, method?: _InternalHttpMethods): boolean { - - return this.#paramStore.has(name, method); - } - - - /** - * Updates the FetchEngine instance state with new values. - * - * Merges new state values into the existing state, supporting both - * individual property updates and bulk object updates. State is used - * to maintain request context across multiple API calls. - * - * @param conf - State property name or object with state updates - * @param value - Value to set (when updating single property) - * - * @example - * // Set single state property - * api.setState('authToken', 'bearer-token-123'); - * - * // Set multiple state properties - * api.setState({ - * userId: '123', - * sessionId: 'abc', - * preferences: { theme: 'dark' } - * }); - */ - setState(name: N, value: S[N]): void - setState(conf: Partial): void - setState(conf: unknown, value?: unknown) { - - assert( - typeof conf === 'object' || typeof conf === 'string', - 'setState requires an object or string' - ); - - const updated = { - ...this.#state - }; - - if (typeof conf === 'string') { - - assert( - typeof value !== 'undefined', - 'setState requires a value when setting a single property' - ); - - updated[conf as keyof S] = value as S[keyof S]; - } - else { - - Object - .keys(conf as object) - .forEach( - (name) => { - - const key = name as keyof S; - - updated[key] = (conf as S)[key]; - } - ); - } - - this.#validateState(updated); - - this.#state = updated as S; - - this.emit('fetch-state-set', { - state: updated, - data: conf - }); - } - - /** - * Resets the FetchEngine instance state to an empty object. - * - * Clears all stored state values and dispatches a state reset event. - * Useful for cleaning up state when switching between different - * user sessions or contexts. - * - * @example - * // Clear all state when user logs out - * api.resetState(); - * console.log(api.getState()); // {} - */ - resetState() { - - this.#state = {} as S; - - this.#validateState(this.#state); - - this.emit('fetch-state-reset', { - state: this.#state, - }); - } - - /** - * Returns a deep clone of the FetchEngine instance state. - * - * Provides a safe copy of the current state that can be inspected - * or modified without affecting the original state. - * - * @returns Deep clone of the current state object - * - * @example - * const state = api.getState(); - * console.log('Current state:', state); - * // { authToken: 'bearer-123', userId: '456' } - */ - getState() { - - return clone(this.#state); - } - - /** - * Changes the base URL for this FetchEngine instance. - * - * Updates the base URL used for all subsequent requests and dispatches - * a URL change event. Useful for switching between different API - * environments (development, staging, production). - * - * @param url - New base URL for the FetchEngine instance - * - * @example - * // Switch to production API - * api.changeBaseUrl('https://api.production.com'); - * - * // Switch to staging API - * api.changeBaseUrl('https://api.staging.com'); - */ - changeBaseUrl(url: string) { - - this.#baseUrl = new URL(url); - - this.emit('fetch-url-change', { - state: this.#state, - data: url - }); - } - - /** - * Updates the modifyOptions function for this FetchEngine instance. - * - * Changes the global options modification function that is applied to all - * requests before they are sent. Pass undefined to clear the function. - * Dispatches a modify options change event when updated. - * - * @param fn - New modifyOptions function or undefined to clear - * - * @example - * // Set a global request modifier - * api.changeModifyOptions((opts, state) => { - * opts.headers = { ...opts.headers, 'X-Request-ID': crypto.randomUUID() }; - * return opts; - * }); - * - * // Clear the modifier - * api.changeModifyOptions(undefined); - */ - changeModifyOptions(fn?: FetchEngine.ModifyOptionsFn) { - - this.#modifyOptions = fn; - - this.emit('fetch-modify-options-change', { - state: this.#state, - data: fn - }); - } - - /** - * Updates the modifyOptions function for a specific HTTP method. - * - * Changes the method-specific options modification function that is applied - * to requests of the specified HTTP method before they are sent. Pass undefined - * to clear the function for that method. Dispatches a modify method options - * change event when updated. - * - * @param method - HTTP method to modify options for - * @param fn - New modifyOptions function or undefined to clear - * - * @example - * // Set a POST-specific request modifier - * api.changeModifyMethodOptions('POST', (opts, state) => { - * opts.headers = { ...opts.headers, 'Content-Type': 'application/json' }; - * return opts; - * }); - * - * // Clear the POST modifier - * api.changeModifyMethodOptions('POST', undefined); - */ - changeModifyMethodOptions(method: HttpMethods, fn?: FetchEngine.ModifyOptionsFn) { - - const normalizedMethod = method.toUpperCase() as _InternalHttpMethods; - - if (!this.#modifyMethodOptions) { - - this.#modifyMethodOptions = {}; - } - - if (fn === undefined) { - - delete this.#modifyMethodOptions[normalizedMethod]; - } - else { - - this.#modifyMethodOptions[normalizedMethod] = fn; - } - - this.emit('fetch-modify-method-options-change', { - state: this.#state, - data: { - method: normalizedMethod, - fn - } - }); - } - - // Note: on() and off() are inherited from ObserverEngine - // Use this.on('fetch-error', handler) or this.on(/fetch-.*/, handler) for wildcard - - // === Cache Invalidation API === - - /** - * Clears all cached responses. - * - * Removes all entries from the response cache. Does not affect - * in-flight requests tracked for deduplication. - * - * @example - * // Clear cache after user logout - * api.clearCache(); - */ - async clearCache(): Promise { - - await this._flight.clearCache(); - this.#cachePolicy.clearActiveKeys(); - } - - /** - * Deletes a specific cache entry by key. - * - * Use this when you know the exact cache key (e.g., from a cache event). - * - * @param key - The cache key to delete - * @returns true if the entry existed and was deleted - * - * @example - * // Delete specific cached response - * await api.deleteCache('GET|/users/123|undefined|{}'); - */ - async deleteCache(key: string): Promise { - - const deleted = await this._flight.deleteCache(key); - - if (deleted) { - - this.#cachePolicy.unmarkActive(key); - } - - return deleted; - } - - /** - * Invalidates cache entries matching a predicate function. - * - * Iterates through all cache keys and deletes entries where the - * predicate returns true. Useful for targeted invalidation based - * on custom logic. - * - * @param predicate - Function that returns true for keys to invalidate - * @returns Number of entries invalidated - * - * @example - * // Invalidate all user-related cache entries - * const count = await api.invalidateCache(key => key.includes('/users')); - * console.log(`Invalidated ${count} entries`); - */ - async invalidateCache(predicate: (key: string) => boolean): Promise { - - let invalidated = 0; - - for (const key of this.#cachePolicy.getActiveKeys()) { - - if (predicate(key)) { - - const deleted = await this._flight.deleteCache(key); - - if (deleted) { - - this.#cachePolicy.unmarkActive(key); - invalidated++; - } - } - } - - return invalidated; - } - - /** - * Invalidates cache entries matching a path pattern or custom predicate. - * - * Convenience method for invalidating cache based on URL path patterns. - * Supports string prefix matching, RegExp patterns, or a custom predicate - * function for full control over key matching (useful with custom serializers). - * - * @param patternOrPredicate - String prefix, RegExp, or predicate function - * @returns Number of entries invalidated - * - * @example - * // Invalidate all entries for a specific endpoint - * await api.invalidatePath('/users'); - * - * @example - * // Invalidate using regex pattern - * await api.invalidatePath(/\/api\/v[12]\//); - * - * @example - * // Invalidate using custom predicate (for custom serializers) - * await api.invalidatePath((key) => { - * const parsed = myCustomKeyParser(key); - * return parsed.path.startsWith('/users'); - * }); - */ - async invalidatePath(patternOrPredicate: string | RegExp | Func<[string], boolean>): Promise { - - if (typeof patternOrPredicate === 'function') { - - return this.invalidateCache(patternOrPredicate); - } - - const isRegex = patternOrPredicate instanceof RegExp; - - return this.invalidateCache((key) => { - - // Cache keys are serialized as: METHOD|/path|payload|headers - // Extract the path portion (second segment after first |) - const pipeIndex = key.indexOf('|'); - - if (pipeIndex === -1) return false; - - // Find the next pipe after the method - const secondPipeIndex = key.indexOf('|', pipeIndex + 1); - const path = secondPipeIndex === -1 - ? key.slice(pipeIndex + 1) - : key.slice(pipeIndex + 1, secondPipeIndex); - - if (!path) return false; - - if (isRegex) { - - return patternOrPredicate.test(path); - } - - return path.startsWith(patternOrPredicate); - }); - } - - /** - * Returns statistics about the cache state. - * - * Provides insight into cache usage and effectiveness. - * - * @returns Object with cache size and in-flight count - * - * @example - * const stats = api.cacheStats(); - * console.log(`Cache entries: ${stats.cacheSize}`); - * console.log(`In-flight requests: ${stats.inflightCount}`); - */ - cacheStats(): { cacheSize: number; inflightCount: number } { - - return this._flight.stats(); - } - - /** - * Destroys the FetchEngine instance and cleans up all resources. - * - * Marks the instance as destroyed and clears internal state references. - * After calling destroy(), the instance should not be used for new requests. - * - * **Memory Leak Prevention:** - * - Prevents new requests from being made (throws error if attempted) - * - Clears all event listeners via ObserverEngine's clear() - * - Clears internal state references - * - Marks instance as destroyed - * - * @example - * const api = new FetchEngine({ baseUrl: 'https://api.example.com' }); - * - * api.on('fetch-error', (data) => console.error(data.error)); - * api.on('fetch-response', (data) => console.log(data)); - * - * // destroy() automatically clears all listeners - * api.destroy(); - */ - destroy() { - - if (this.#destroyed) { - - console.warn('FetchEngine instance already destroyed'); - return; - } - - // Abort any ongoing requests first (this sets #destroyed to true via the getter) - this.#instanceAbortController.abort(); - - // Clear all event listeners via ObserverEngine - this.clear(); - - // Reset the flight controller to clear cache and inflight tracking - // This is synchronous and creates a new SingleFlight instance - this._flight = new SingleFlight(); - - // Clear all internal references to allow garbage collection - this.#state = {} as S; - this.#headerStore = new PropertyStore>(); - this.#paramStore = new PropertyStore>(); - this.#options = {}; - this.#baseUrl = new URL('about:blank'); - - // Clear function references (closures may capture large data) - this.#modifyOptions = undefined; - this.#modifyMethodOptions = undefined as never; - this.#validate = undefined; - - // Clear retry config - this.#retry = undefined as never; - - // Clear policy state - this.#rateLimitPolicy.init(); - this.#cachePolicy.init(); - this.#dedupePolicy.init(); - } - - /** - * Checks if the FetchEngine instance has been destroyed. - * - * @returns true if destroy() has been called - * - * @example - * if (!api.isDestroyed()) { - * await api.get('/users'); - * } - */ - isDestroyed(): boolean { - - return this.#destroyed; - } -} diff --git a/packages/fetch/src/engine/events.ts b/packages/fetch/src/engine/events.ts new file mode 100644 index 0000000..6f67b32 --- /dev/null +++ b/packages/fetch/src/engine/events.ts @@ -0,0 +1,251 @@ +import type { HttpMethods, DictAndT } from '../types.ts'; +import type { FetchError } from '../helpers/fetch-error.ts'; + + +/** + * Base event data payload for FetchEngine events. + * + * Contains common fields shared across all fetch-related events. + * + * @template S - Instance state type + * @template H - Instance headers type + * @template P - Instance params type + */ +export interface EventData { + + state: S; + url?: string | URL | undefined; + method?: HttpMethods | undefined; + headers?: DictAndT | undefined; + params?: DictAndT

| undefined; + error?: Error | FetchError<{}, DictAndT> | undefined; + response?: Response | undefined; + data?: unknown; + payload?: unknown; + attempt?: number | undefined; + nextAttempt?: number | undefined; + delay?: number | undefined; + step?: 'fetch' | 'parse' | 'response' | undefined; + status?: number | undefined; + path?: string | undefined; + aborted?: boolean | undefined; + + /** Timestamp (ms) when the request entered the execution pipeline */ + requestStart?: number | undefined; + + /** Timestamp (ms) when the request resolved (success, error, or abort) */ + requestEnd?: number | undefined; +} + + +/** + * Event data for deduplication events. + * + * Extends base event data with deduplication-specific fields. + * + * @template S - Instance state type + * @template H - Instance headers type + * @template P - Instance params type + */ +export interface DedupeEventData extends EventData { + + /** The generated deduplication key */ + key: string; + + /** Number of callers waiting on this request (join events only) */ + waitingCount?: number | undefined; +} + + +/** + * Event data for cache events. + * + * Extends base event data with cache-specific fields. + * + * @template S - Instance state type + * @template H - Instance headers type + * @template P - Instance params type + */ +export interface CacheEventData extends EventData { + + /** The generated cache key */ + key: string; + + /** Whether the cache entry is stale (SWR) */ + isStale?: boolean | undefined; + + /** Time until expiration (ms) */ + expiresIn?: number | undefined; +} + + +/** + * Event data for rate limit events. + * + * Extends base event data with rate limiting-specific fields. + * + * @template S - Instance state type + * @template H - Instance headers type + * @template P - Instance params type + */ +export interface RateLimitEventData extends EventData { + + /** The rate limit bucket key */ + key: string; + + /** Current tokens available in the bucket */ + currentTokens: number; + + /** Maximum capacity of the bucket */ + capacity: number; + + /** Time to wait before next token is available (ms) */ + waitTimeMs: number; + + /** When the next token will be available */ + nextAvailable: Date; +} + + +/** + * Event data for state mutation events. + * + * @template S - Instance state type + */ +export interface StateEventData { + + /** Key that was set (for single key updates) */ + key?: keyof S | undefined; + + /** Value that was set */ + value?: S[keyof S] | Partial | undefined; + + /** Previous state before the change */ + previous?: S | undefined; + + /** Current state after the change */ + current: S; +} + + +/** + * Event data for property (header/param) events. + * + * @template T - Property type (headers or params) + */ +export interface PropertyEventData { + + /** Key that was added/removed */ + key?: string | string[] | undefined; + + /** Value that was set (for add events) */ + value?: string | Partial | undefined; + + /** HTTP method this change applies to (undefined = all methods) */ + method?: HttpMethods | undefined; +} + + +/** + * Event data for options change events. + */ +export interface OptionsEventData { + + /** Path that was changed (for path-based sets) */ + path?: string | undefined; + + /** Value that was set */ + value?: unknown; +} + + +/** + * Event data for modifyOptions change events. + */ +export interface ModifyOptionsEventData { + + /** The new modifyOptions function (or undefined if cleared) */ + fn?: ((opts: unknown, state: unknown) => unknown) | undefined; +} + + +/** + * Event data for modifyMethodOptions change events. + */ +export interface ModifyMethodOptionsEventData { + + /** HTTP method this change applies to */ + method: string; + + /** The new modifyOptions function for the method (or undefined if cleared) */ + fn?: ((opts: unknown, state: unknown) => unknown) | undefined; +} + + +/** + * Event map for FetchEngine - maps event names to their data types. + * + * Event names have been simplified by removing the `fetch-` prefix. + * This provides cleaner API while maintaining full type safety. + * + * @template S - Instance state type + * @template H - Instance headers type + * @template P - Instance params type + * + * @example + * ```typescript + * // Subscribe to events + * engine.on('before-request', (data) => console.log('Request starting:', data.url)); + * engine.on('cache-hit', (data) => console.log('Cache hit:', data.key)); + * engine.on('state-set', (data) => console.log('State changed:', data.current)); + * ``` + */ +export interface EventMap { + + // Request lifecycle events + 'before-request': EventData; + 'after-request': EventData; + 'abort': EventData; + 'error': EventData; + 'response': EventData; + 'retry': EventData; + + // Property mutation events + 'header-add': PropertyEventData>; + 'header-remove': PropertyEventData>; + 'param-add': PropertyEventData>; + 'param-remove': PropertyEventData>; + + // State mutation events + 'state-set': StateEventData; + 'state-reset': StateEventData; + + // Configuration change events + 'config-change': OptionsEventData; + 'modify-config-change': ModifyOptionsEventData; + 'modify-method-config-change': ModifyMethodOptionsEventData; + 'url-change': { url: string; state: S }; + + // Deduplication events + 'dedupe-start': DedupeEventData; + 'dedupe-join': DedupeEventData; + + // Cache events + 'cache-hit': CacheEventData; + 'cache-stale': CacheEventData; + 'cache-miss': CacheEventData; + 'cache-set': CacheEventData; + 'cache-revalidate': CacheEventData; + 'cache-revalidate-error': CacheEventData; + + // Rate limiting events + 'ratelimit-wait': RateLimitEventData; + 'ratelimit-reject': RateLimitEventData; + 'ratelimit-acquire': RateLimitEventData; +} + + +/** + * Helper type to extract event names from EventMap. + */ +export type EventNames = keyof EventMap; diff --git a/packages/fetch/src/engine/executor.ts b/packages/fetch/src/engine/executor.ts new file mode 100644 index 0000000..d29bc0e --- /dev/null +++ b/packages/fetch/src/engine/executor.ts @@ -0,0 +1,1393 @@ +import { + attempt, + wait, + SingleFlight, + Deferred, + assert, +} from '@logosdx/utils'; + +import type { + HttpMethods, + _InternalHttpMethods, + FetchResponse, + RetryConfig, + CacheRule, + DictAndT, + FetchConfig, +} from '../types.ts'; + +import type { EngineRequestConfig, CallConfig } from '../options/types.ts'; + +import { FetchError, DEFAULT_RETRY_CONFIG } from '../helpers/index.ts'; + +import { DedupePolicy } from '../policies/dedupe.ts'; +import { CachePolicy } from '../policies/cache.ts'; +import { RateLimitPolicy } from '../policies/rate-limit.ts'; + +import type { FetchEngineCore, InternalReqOptions } from './types.ts'; + + +// CallOptions removed - using CallConfig from options/types.ts + + +/** + * Promise that can be aborted. + */ +interface AbortablePromise extends Promise { + isFinished: boolean; + isAborted: boolean; + abort(reason?: string): void; +} + +/** + * Handles request execution with retry logic, timeouts, and policy integration. + * + * The RequestExecutor is the core request processing engine, responsible for: + * - Building normalized request options from method/path/options + * - Executing requests with retry logic and timeout handling + * - Coordinating with policies (dedupe, cache, rate-limit) + * - Managing SingleFlight for deduplication and caching + * + * All policies receive this executor and access the engine through `executor.engine`. + * This provides policies access to `engine.emit()` for type-safe event emission. + * + * @template S - Instance state type + * @template H - Headers type + * @template P - Params type + * + * @example + * ```typescript + * const executor = new RequestExecutor(engine); + * + * // Policies access engine through executor + * executor.engine.emit('cache-hit', { key: '...', ... }); + * + * // Execute a request + * const response = await executor.execute('GET', '/users', options); + * ``` + */ +export class RequestExecutor< + H = unknown, + P = unknown, + S = unknown +> { + + /** Reference to the FetchEngine instance (public for policy access) */ + engine: FetchEngineCore; + + /** SingleFlight for deduplication and caching */ + flight: SingleFlight; + + /** Deduplication policy */ + dedupePolicy: DedupePolicy; + + /** Cache policy */ + cachePolicy: CachePolicy; + + /** Rate limit policy */ + rateLimitPolicy: RateLimitPolicy; + + constructor(engine: FetchEngineCore) { + + this.engine = engine; + this.flight = new SingleFlight(); + + // Policies receive this executor - they access engine through executor.engine + this.dedupePolicy = new DedupePolicy(this); + this.cachePolicy = new CachePolicy(this); + this.rateLimitPolicy = new RateLimitPolicy(); + } + + /** + * Get retry configuration from engine options. + */ + get retryConfig(): Required { + + const config = this.engine.config.get('retry'); + + // retry: false explicitly disables retry + if (config === false) { + + return { ...DEFAULT_RETRY_CONFIG, maxAttempts: 0 }; + } + + // retry: undefined or true uses defaults + if (!config || config === true) { + + return DEFAULT_RETRY_CONFIG; + } + + return { ...DEFAULT_RETRY_CONFIG, ...(config as RetryConfig) } as Required; + } + + /** + * Get base URL from engine options. + */ + get baseUrl(): string { + + return this.engine.config.get('baseUrl'); + } + + /** + * Get default type from engine options. + */ + get defaultType(): 'json' | 'text' | 'blob' | 'arrayBuffer' { + + return (this.engine.config.get('defaultType') as string || 'json') as 'json' | 'text' | 'blob' | 'arrayBuffer'; + } + + // ===================================================================== + // PUBLIC API - Entry points for FetchEngine HTTP methods + // ===================================================================== + + /** + * Execute a request with the full lifecycle: timeout, options building, policies, fetch. + * + * This is the main entry point called by FetchEngine HTTP methods. + * + * @param method - HTTP method + * @param path - Request path + * @param payloadOrOptions - Payload (for POST/PUT/PATCH) or options + * @param options - Call options + * @returns AbortablePromise with FetchResponse + */ + execute( + method: HttpMethods, + path: string, + payloadOrOptions?: Data | CallConfig, + options?: CallConfig + ): AbortablePromise, DictAndT

, ResHdr>> { + + // Normalize arguments: POST/PUT/PATCH/DELETE have payload, GET/OPTIONS don't + let payload: Data | undefined; + let opts: CallConfig; + + if (options !== undefined) { + + payload = payloadOrOptions as Data; + opts = options; + } + else if (payloadOrOptions && typeof payloadOrOptions === 'object' && !Array.isArray(payloadOrOptions)) { + + // Check if it's call options or payload + const hasCallOptionKeys = 'headers' in payloadOrOptions || + 'params' in payloadOrOptions || + 'timeout' in payloadOrOptions || + 'retry' in payloadOrOptions || + 'abortController' in payloadOrOptions || + 'onError' in payloadOrOptions || + 'onBeforeReq' in payloadOrOptions || + 'onAfterReq' in payloadOrOptions; + + if (hasCallOptionKeys && !/POST|PUT|PATCH|DELETE/i.test(method)) { + + opts = payloadOrOptions as CallConfig; + } + else { + + payload = payloadOrOptions as Data; + opts = {}; + } + } + else { + + payload = payloadOrOptions as Data; + opts = {}; + } + + const controller = opts.abortController ?? new AbortController(); + + // Resolve timeout options + const totalTimeoutMs = opts.totalTimeout ?? opts.timeout ?? this.engine.config.get('totalTimeout'); + const attemptTimeoutMs = opts.attemptTimeout ?? this.engine.config.get('attemptTimeout'); + + if (typeof totalTimeoutMs === 'number') { + + assert(totalTimeoutMs >= 0, 'totalTimeout must be non-negative number'); + } + + if (typeof attemptTimeoutMs === 'number') { + + assert(attemptTimeoutMs >= 0, 'attemptTimeout must be non-negative number'); + } + + // Track if totalTimeout fires + let totalTimeoutFired = false; + + // Set up totalTimeout + const totalTimeout = typeof totalTimeoutMs === 'number' ? wait(totalTimeoutMs) : undefined; + + totalTimeout?.then(() => { + + totalTimeoutFired = true; + controller.abort(); + }); + + // Execute async logic + const promise = this.#executeWithOptions( + method, + path, + payload, + opts, + controller, + totalTimeout, + attemptTimeoutMs, + () => totalTimeoutFired + ); + + // Wrap as AbortablePromise + return this.#wrapAsAbortable, DictAndT

, ResHdr>>( + promise, + controller + ); + } + + /** + * Internal method that builds options and executes the request. + */ + async #executeWithOptions( + method: HttpMethods, + path: string, + payload: unknown, + options: CallConfig, + controller: AbortController, + totalTimeout: ReturnType | undefined, + attemptTimeoutMs: number | undefined, + getTotalTimeoutFired: () => boolean + ): Promise, DictAndT

, ResHdr>> { + + const onAfterReq = (...args: any[]) => { + + totalTimeout?.clear(); + options.onAfterReq?.apply(this, args as never); + }; + + const onError = (...args: any[]) => { + + totalTimeout?.clear(); + options.onError?.apply(this, args as never); + }; + + // Build normalized options + const normalizedOpts = this.makeRequestOptions( + method, + path, + { + ...options, + payload, + controller, + onAfterReq, + onError, + attemptTimeout: attemptTimeoutMs, + getTotalTimeoutFired + } + ); + + return this.executeRequest(normalizedOpts, totalTimeout); + } + + /** + * Build normalized request options from method/path/options. + * + * @param method - HTTP method + * @param path - Request path + * @param options - Call options with payload and controller + * @returns Normalized InternalReqOptions + */ + makeRequestOptions( + _method: HttpMethods, + path: string, + options: CallConfig & { + payload?: unknown; + controller: AbortController; + attemptTimeout?: number | undefined; + getTotalTimeoutFired?: (() => boolean) | undefined; + } + ): InternalReqOptions { + + const { + payload, + controller, + onAfterReq: onAfterRequest, + onBeforeReq: onBeforeRequest, + onError, + timeout = this.engine.config.get('totalTimeout') as number | undefined, + attemptTimeout, + getTotalTimeoutFired, + params: requestParams, + signal, + determineType, + retry, + headers: requestHeaders, + // RequestInit options (per-request overrides config defaults) + ...perRequestInit + } = options; + + const method = _method.toUpperCase() as _InternalHttpMethods; + const state = this.engine.state.get(); + + // Build URL with merged params + const url = this.#makeUrl(path, requestParams, method); + + // Merge headers + let headers = this.engine.headerStore.resolve(method, requestHeaders) as DictAndT; + + // Build body for mutating methods + let body: BodyInit | undefined; + const type = this.defaultType; + + if (/put|post|patch|delete/i.test(method)) { + + const isValidBodyInit = ( + payload === null || + payload === undefined || + typeof payload === 'string' || + payload instanceof Blob || + payload instanceof ArrayBuffer || + payload instanceof FormData || + payload instanceof URLSearchParams || + payload instanceof ReadableStream || + ArrayBuffer.isView(payload) + ); + + if (type === 'json' && !isValidBodyInit) { + + body = JSON.stringify(payload); + } + else if (payload !== null && payload !== undefined) { + + body = payload as BodyInit; + } + } + + // Build opts for modifyConfig - include RequestInit options from config + per-request + const config = this.engine.config.get(); + + let opts: EngineRequestConfig = { + // RequestInit options from config (credentials, mode, cache, etc.) + ...config, + // Per-request RequestInit overrides + ...perRequestInit, + // Explicit values + method, + signal: signal || controller.signal, + controller, + headers, + body: body ?? null, + totalTimeout: timeout, + retry, + }; + + // Apply global modifyConfig + const modifyConfig = this.engine.config.get('modifyConfig'); + + if (modifyConfig) { + + opts = modifyConfig(opts, state) as EngineRequestConfig; + } + + // Apply method-specific modifyConfig + const modifyMethodConfig = this.engine.config.get('modifyMethodConfig'); + const methodSpecificModify = modifyMethodConfig?.[method]; + + if (methodSpecificModify) { + + opts = methodSpecificModify(opts, state) as EngineRequestConfig; + } + + // Extract final values after modification + headers = (opts.headers || {}) as DictAndT; + body = opts.body ?? undefined; + + // Per-request validation + const validate = this.engine.config.get('validate'); + + if (validate?.perRequest?.headers && validate.headers) { + + validate.headers(headers, method); + } + + // Normalize retry (convert true to {}, false to maxAttempts: 0) + const normalizedRetry = opts.retry === true + ? {} + : (opts.retry === false ? { maxAttempts: 0 } : opts.retry); + + // Return normalized options + // opts now contains all RequestInit options (config + per-request + modifyConfig) + return { + // Spread opts to get all RequestInit options after modifyConfig + ...opts, + // Explicit values (override anything from opts) + method, + path, + payload, + headers, + params: Object.fromEntries(url.searchParams.entries()) as DictAndT

, + state, + url, + signal: opts.signal || controller.signal, + controller, + body, + timeout: opts.totalTimeout, + attemptTimeout, + getTotalTimeoutFired, + retry: normalizedRetry, + determineType: determineType as InternalReqOptions['determineType'], + onBeforeRequest: onBeforeRequest as InternalReqOptions['onBeforeRequest'], + onAfterRequest: onAfterRequest as InternalReqOptions['onAfterRequest'], + onError: onError as InternalReqOptions['onError'], + }; + } + + /** + * Build URL from path and params. + */ + #makeUrl(path: string, requestParams?: DictAndT

, method?: HttpMethods): URL { + + const params = this.engine.paramStore.resolve( + method || 'GET', + requestParams + ) as DictAndT

; + + if (path.startsWith('http')) { + + const url = new URL(path); + + Object.entries(params).forEach(([key, value]) => { + + url.searchParams.set(key, value as string); + }); + + return url; + } + + path = path?.replace(/^\/{1,}/, ''); + + if (path[0] !== '/') { + + path = `/${path}`; + } + + const baseUrl = this.baseUrl.replace(/\/$/, ''); + const url = new URL(baseUrl + path); + + for (const [key, value] of Object.entries(params)) { + + url.searchParams.set(key, value as string); + } + + // Per-request param validation + const validate = this.engine.config.get('validate'); + + if (validate?.perRequest?.params && validate.params) { + + validate.params( + Object.fromEntries(url.searchParams.entries()) as DictAndT

, + method as _InternalHttpMethods | undefined + ); + } + + return url; + } + + /** + * Wrap a promise as an AbortablePromise. + */ + #wrapAsAbortable(promise: Promise, controller: AbortController): AbortablePromise { + + const abortable = promise as AbortablePromise; + + abortable.isFinished = false; + abortable.isAborted = false; + + // Listen to abort signal to update isAborted when aborted externally + controller.signal.addEventListener('abort', () => { + + abortable.isAborted = true; + }, { once: true }); + + abortable.abort = (reason?: string) => { + + abortable.isAborted = true; + controller.abort(reason); + }; + + promise.then(() => { + + abortable.isFinished = true; + }).catch(() => { + + // Only set isFinished if not aborted + if (!abortable.isAborted) { + + abortable.isFinished = true; + } + }); + + return abortable; + } + + // ===================================================================== + // INTERNAL METHODS + // ===================================================================== + + /** + * Calculate delay for retry attempt using exponential backoff. + * + * @param attemptNo - Current attempt number (1-based) + * @param retry - Retry configuration with delay settings + * @returns Delay in milliseconds before next retry attempt + */ + calculateRetryDelay(attemptNo: number, retry: Required): number { + + const { baseDelay, maxDelay, useExponentialBackoff } = retry; + + if (!useExponentialBackoff) return Math.min(baseDelay, maxDelay!); + + const delay = baseDelay * Math.pow(2, attemptNo - 1); + + return Math.min(delay, maxDelay!); + } + + /** + * Determine response type based on content-type header. + * + * @param response - Fetch response + * @returns Type, isJson flag, and whether the content-type is recognized + */ + determineType(response: Response): { type: 'json' | 'text' | 'blob' | 'arrayBuffer'; isJson: boolean; isRecognized: boolean } { + + const contentType = response.headers.get('content-type') || ''; + + if (contentType.includes('application/json')) { + + return { type: 'json', isJson: true, isRecognized: true }; + } + + if (contentType.includes('text/')) { + + return { type: 'text', isJson: false, isRecognized: true }; + } + + // Default to configured type for unknown content-types + return { type: this.defaultType, isJson: false, isRecognized: false }; + } + + /** + * Extract retry configuration from request options. + */ + #extractRetry(opts: InternalReqOptions): RetryConfig | undefined { + + // retry is already normalized (true converted to {} in makeRequestOptions) + return opts.retry; + } + + /** + * Handle errors with proper event emission and error formatting. + */ + #handleError( + normalizedOpts: InternalReqOptions, + errorOpts: { + error: FetchError | Error, + step: 'fetch' | 'parse' | 'response', + status?: number, + data?: unknown + } + ) { + + const { + method, + path, + headers, + controller, + onError, + attempt: attemptNum + } = normalizedOpts; + + const { + error, + step, + status, + data + } = errorOpts; + + const aborted = controller.signal.aborted; + + let err = error as FetchError<{}, DictAndT>; + + if (step === 'fetch') { + + err = new FetchError(err.message) as FetchError<{}, DictAndT>; + + err.status = 499; + err.message = err.message || 'Fetch error'; + } + + if (step === 'parse') { + + err = new FetchError(err.message) as FetchError<{}, DictAndT>; + + err.status = status || 999; + err.message = err.message || 'Parse error'; + } + + if (step === 'response') { + + const asAgg = error as AggregateError; + let errors = asAgg.errors as Error[]; + let errCode = ''; + + // Handle undici errors + if ( + !errors || + errors.length === 0 && + error.cause instanceof AggregateError + ) { + + errors = (error.cause as AggregateError)?.errors as Error[]; + } + + if ((error as any)?.code) { + errCode = (error as any).code; + } + + if (errors && errors.length > 0) { + + const msgs = errors.map((e) => e.message).join('; '); + + err = new FetchError(`${errCode}: ${msgs}`) as FetchError<{}, DictAndT>; + } + else { + + err = new FetchError(error.message) as FetchError<{}, DictAndT>; + } + } + + err.attempt = attemptNum; + err.status = err.status || status!; + err.method = err.method || method!; + err.path = err.path || path!; + err.aborted = err.aborted || aborted; + err.data = err.data || data as null; + err.step = err.step || step; + err.headers = err.headers || headers; + + // Emit error event with normalizedOpts as base + const eventData = { + ...normalizedOpts, + error: err, + step, + status, + aborted, + data, + requestEnd: Date.now() + }; + + if (aborted) { + + this.engine.emit('abort', eventData); + } + else { + + this.engine.emit('error', eventData); + } + + onError && onError(err); + + throw err; + } + + /** + * Makes an API call using fetch and returns enhanced response object. + * + * @param options - Flat normalized request options (InternalReqOptions) + * @returns FetchResponse object with data, headers, status, request, and config + */ + async makeCall( + options: InternalReqOptions + ): Promise, DictAndT

, ResHdr>> { + + const { + // Request identity + method, + headers: reqHeaders, + params, + + // URL + url, + + // Execution plumbing + signal, + controller, + body, + timeout, + retry, + determineType, + + // Callbacks + onBeforeRequest, + onAfterRequest, + + // RequestInit options (rest spread) + ...requestInit + } = options; + + // Emit before event + this.engine.emit('before-request', options); + + // Build RequestOpts for callbacks (legacy compatibility) + const callbackOpts = { + method, + signal, + controller, + headers: reqHeaders, + body: body ?? null, + timeout, + retry, + determineType + }; + + onBeforeRequest && await onBeforeRequest(callbackOpts); + + // Build fetch options - spread RequestInit options, then our explicit overrides + const fetchOpts: RequestInit = { + ...requestInit, + method, + signal, + headers: reqHeaders as HeadersInit, + body: body ?? null, + }; + + const [response, resErr] = await attempt(async () => { + + return await fetch(url, fetchOpts) as Response; + }); + + // Fetch will only throw if the request is aborted, denied, timed out, etc. + if (resErr) { + + this.#handleError(options, { + error: resErr, + step: 'fetch' + }); + + throw resErr; + } + + this.engine.emit('after-request', { + ...options, + // Clone response if after-request listeners exist + // to prevent body stream locking issues, allow multiple + // reads, and copying an entire body stream into memory. + response: ( + this.engine.$has('after-request') ? + response.clone() : + response + ), + }); + + onAfterRequest && await onAfterRequest(response.clone(), callbackOpts); + + const [data, parseErr] = await attempt(async () => { + + const typeResult = determineType + ? determineType(response) + : this.determineType(response); + + const { type, isJson } = typeResult; + + // Custom determineType may not return isRecognized, default to true if not provided + const isRecognized = 'isRecognized' in typeResult ? (typeResult as any).isRecognized : true; + + // Handle 204 No Content - always return null regardless of content-type + if (response.status === 204) { + + return null; + } + + if (isJson) { + + const text = await response.text(); + + if (text) { + + return JSON.parse(text) as Res; + } + + return null; + } + else if (isRecognized) { + + // Known non-JSON content-type (e.g., text/*) + return await response[type]() as Res; + } + else { + + // Unknown content-type - try text first to check if body exists + const text = await response.text(); + + if (text) { + + // Has content but unknown content-type - try default parsing + if (type === 'json') { + + return JSON.parse(text) as Res; + } + + return text as Res; + } + + // Empty body with unknown content-type - throw parse error + throw new Error(`Unknown content-type: ${response.headers.get('content-type')}`); + } + }); + + if (parseErr) { + + this.#handleError(options, { + error: parseErr, + step: 'parse', + status: response.status, + data + }); + + throw parseErr; + } + + if (response.ok === false) { + + this.#handleError(options, { + error: new FetchError(response.statusText), + step: 'response', + status: response.status, + data + }); + + throw new FetchError(response.statusText); + } + + // Emit response event + this.engine.emit('response', { + ...options, + response, + data, + requestEnd: Date.now() + }); + + const config: FetchConfig, DictAndT

> = { + baseUrl: this.baseUrl.toString(), + attemptTimeout: options.attemptTimeout, + method, + headers: reqHeaders, + params: params, + retry: this.retryConfig, + determineType: determineType, + }; + + // Create the Request object for the response + const request = new Request(url, fetchOpts); + + // Convert response headers to plain object for typed access + const responseHeaders = {} as Partial; + + response.headers.forEach((value, key) => { + + responseHeaders[key as keyof ResHdr] = value as ResHdr[keyof ResHdr]; + }); + + // Return the enhanced response object + return { + data: data!, + headers: responseHeaders, + status: response.status, + request, + config + }; + } + + /** + * Attempts a call with retry logic. + * + * @param options - Normalized request options + * @returns Response from successful attempt + */ + async attemptCall( + options: InternalReqOptions + ): Promise, DictAndT

, ResHdr>> { + + const mergedRetry = { + ...this.retryConfig, + ...this.#extractRetry(options) + }; + + if (mergedRetry.maxAttempts === 0) { + + const [result, err] = await attempt( + async () => this.makeCall(options) + ); + + if (err) { + + // Set timedOut flag if the abort was caused by totalTimeout + if ((err as FetchError).aborted && options.getTotalTimeoutFired?.()) { + + (err as FetchError).timedOut = true; + } + + throw err; + } + + return result; + } + + let _attempt = 1; + let lastError: FetchError<{}, DictAndT> | undefined; + + while (_attempt <= mergedRetry.maxAttempts!) { + + // Check if parent (totalTimeout) already aborted - stop retrying + if (options.controller.signal.aborted) { + + const err = lastError ?? new FetchError('Request aborted by totalTimeout'); + err.timedOut = options.getTotalTimeoutFired?.() ?? false; + throw err; + } + + // Create child controller for this attempt if using attemptTimeout + let attemptController: AbortController; + let attemptTimeoutPromise: ReturnType | undefined; + let attemptTimeoutFired = false; + + if (options.attemptTimeout !== undefined) { + + attemptController = new AbortController(); + + // Link child to parent - if parent aborts, child aborts + options.controller.signal.addEventListener('abort', () => { + + attemptTimeoutPromise?.clear(); + attemptController.abort(); + }, { once: true }); + + // Set up per-attempt timeout + attemptTimeoutPromise = wait(options.attemptTimeout); + attemptTimeoutPromise.then(() => { + + attemptTimeoutFired = true; + attemptController.abort(); + }); + } + else { + + attemptController = options.controller; + } + + const [result, err] = await attempt( + async () => ( + this.makeCall({ + ...options, + controller: attemptController, + signal: attemptController.signal, + attempt: _attempt + }) + ) + ); + + // Always cleanup attempt timeout (success or failure) + attemptTimeoutPromise?.clear(); + + if (err === null) { + + return result; + } + + lastError = err as FetchError<{}, DictAndT>; + + // Set timedOut flag only when a timeout actually fired + if (lastError!.aborted) { + + const totalTimeoutFired = options.getTotalTimeoutFired?.() ?? false; + + if (attemptTimeoutFired || totalTimeoutFired) { + + lastError!.timedOut = true; + } + } + + // If parent controller aborted (totalTimeout), don't retry + if (options.controller.signal.aborted) { + + throw lastError!; + } + + // Check if we should retry + const shouldRetry = await mergedRetry.shouldRetry(lastError!, _attempt); + + if (shouldRetry && _attempt < mergedRetry.maxAttempts!) { + + // If shouldRetry is a number, use it as the delay + const delay = ( + typeof shouldRetry === 'number' ? + shouldRetry : + this.calculateRetryDelay(_attempt, mergedRetry) + ); + + this.engine.emit('retry', { + ...options, + error: lastError, + attempt: _attempt, + nextAttempt: _attempt + 1, + delay + }); + + await wait(delay); + + // Check if parent controller aborted during the delay + if (options.controller.signal.aborted) { + + // Update timedOut flag if totalTimeout fired during delay + if (options.getTotalTimeoutFired?.()) { + + lastError!.timedOut = true; + } + + throw lastError!; + } + + _attempt++; + continue; + } + + throw lastError!; + } + + // This should never be reached + throw new FetchError('Unexpected end of retry logic'); + } + + /** + * Awaits a shared promise with independent timeout/abort for the joiner. + */ + #awaitWithIndependentTimeout( + sharedPromise: Promise, + controller: AbortController, + timeout: ReturnType | undefined, + method: string, + path: string + ): Promise { + + const deferred = new Deferred(); + let isSettled = false; + + const settle = (fn: () => void) => { + + if (isSettled) return; + isSettled = true; + timeout?.clear(); + fn(); + }; + + const createJoinerError = (message: string): FetchError => { + + const err = new FetchError(message); + err.aborted = true; + err.method = method as HttpMethods; + err.path = path; + err.status = 0; + err.step = 'fetch'; + + return err; + }; + + timeout?.then(() => { + + settle(() => deferred.reject(createJoinerError('Request timed out (joiner)'))); + }); + + controller.signal.addEventListener('abort', () => { + + settle(() => deferred.reject(createJoinerError('Request aborted (joiner)'))); + }, { once: true }); + + sharedPromise + .then((value) => settle(() => deferred.resolve(value))) + .catch((error) => settle(() => deferred.reject(error))); + + return deferred.promise; + } + + /** + * Triggers a background revalidation for stale-while-revalidate. + * Fire and forget - errors are emitted as events, not propagated. + * + * @param method - HTTP method + * @param path - Request path + * @param options - Original request options + * @param normalizedOpts - Normalized options for makeRequestOptions + * @param cacheKey - Cache key for the entry + * @param cacheConfig - Cache configuration + */ + async triggerBackgroundRevalidation( + _method: HttpMethods, + _path: string, + normalizedOpts: InternalReqOptions, + cacheKey: string, + cacheConfig: CacheRule + ): Promise { + + // Prevent multiple concurrent revalidations for the same key + if (this.cachePolicy.isRevalidating(cacheKey)) { + + return; + } + + this.cachePolicy.markRevalidating(cacheKey); + + // Build normalized options for the background request + // Disable retries for background revalidation to fail fast + const controller = new AbortController(); + const bgOptions: InternalReqOptions = { + ...normalizedOpts, + controller, + signal: controller.signal, + retry: { maxAttempts: 0 } + }; + + this.engine.emit('cache-revalidate', { + ...bgOptions, + key: cacheKey + }); + + const [res, fetchErr] = await attempt(() => + this.attemptCall(bgOptions) + ); + + this.cachePolicy.unmarkRevalidating(cacheKey); + + if (fetchErr) { + + this.engine.emit('cache-revalidate-error', { + ...bgOptions, + key: cacheKey, + error: fetchErr + }); + + return; + } + + const [, cacheErr] = await attempt(() => ( + + this.flight.setCache(cacheKey, res, { + ttl: cacheConfig.ttl, + staleIn: cacheConfig.staleIn + }) + )); + + if (cacheErr) { + + this.engine.emit('cache-revalidate-error', { + ...bgOptions, + key: cacheKey, + error: cacheErr + }); + + return; + } + + this.cachePolicy.markActive(cacheKey); + + this.engine.emit('cache-set', { + ...bgOptions, + key: cacheKey, + expiresIn: cacheConfig.ttl + }); + } + + /** + * Executes a request with cache checking, deduplication, and rate limiting. + * + * This is the main entry point for request execution. It: + * 1. Checks cache (returns cached value if hit) + * 2. Checks rate limit (blocks if needed) + * 3. Checks for in-flight request (joins if found) + * 4. Executes the actual request + * 5. Stores result in cache if applicable + * + * @param normalizedOpts - Normalized request options + * @param options - Original options (for background revalidation) + * @param totalTimeout - Total timeout promise + * @returns Response from the request + */ + async executeRequest( + normalizedOpts: InternalReqOptions, + totalTimeout: ReturnType | undefined + ): Promise, DictAndT

, ResHdr>> { + + normalizedOpts.requestStart = Date.now(); + + const { method, path, controller } = normalizedOpts; + + // === Cache Check === + // Cache runs first: cached responses return immediately without consuming rate limit tokens. + const cacheResult = await this.cachePolicy.checkCache, DictAndT

, ResHdr>>({ + method, + path, + normalizedOpts: normalizedOpts as any, + options: normalizedOpts as any, + clearTimeout: () => totalTimeout?.clear() + }); + + let cacheKey: string | null = null; + let cacheConfig: CacheRule | null = null; + + if (cacheResult?.hit) { + + return cacheResult.value; + } + + if (cacheResult && !cacheResult.hit) { + + cacheKey = cacheResult.key; + cacheConfig = cacheResult.config as CacheRule; + } + + // === Rate Limit Check === + // Rate limiting only gates actual outbound requests (after cache miss). + await this.rateLimitPolicy.executeGuard({ + method, + path, + normalizedOpts: normalizedOpts as any, + controller, + emit: (event, data) => this.engine.emit(event as any, data as any), + clearTimeout: () => totalTimeout?.clear(), + createAbortError: (message) => { + + const err = new FetchError(message); + err.aborted = true; + err.method = normalizedOpts.method; + err.path = path; + err.status = 0; + err.step = 'fetch'; + return err; + } + }); + + // === Deduplication Check === + // Cast normalizedOpts for policy compatibility (internal type order mismatch) + const dedupeResult = this.dedupePolicy.checkInflight, DictAndT

, ResHdr>>({ + method, + path, + normalizedOpts: normalizedOpts as any + }); + + let dedupeKey: string | null = null; + let cleanup: (() => void) | null = null; + + if (dedupeResult?.joined) { + + return this.#awaitWithIndependentTimeout( + dedupeResult.promise, + controller, + totalTimeout, + normalizedOpts.method, + path + ); + } + + if (dedupeResult && !dedupeResult.joined) { + + dedupeKey = dedupeResult.key; + } + + // === Execute Request === + let deferred: Deferred, DictAndT

, ResHdr>> | null = null; + + if (dedupeKey) { + + deferred = new Deferred, DictAndT

, ResHdr>>(); + + // Attach a no-op catch handler to prevent unhandled rejection warnings + deferred.promise.catch(() => { /* handled by the request flow */ }); + + cleanup = this.flight.trackInflight(dedupeKey, deferred.promise); + } + + const requestPromise = this.attemptCall(normalizedOpts); + + const [res, err] = await attempt(() => requestPromise); + + totalTimeout?.clear(); + + if (err) { + + deferred?.reject(err); + cleanup?.(); + throw err; + } + + deferred?.resolve(res); + cleanup?.(); + + if (cacheKey && cacheConfig) { + + await this.flight.setCache(cacheKey, res, { + ttl: cacheConfig.ttl, + staleIn: cacheConfig.staleIn + }); + + this.cachePolicy.markActive(cacheKey); + + this.engine.emit('cache-set', { + ...normalizedOpts, + key: cacheKey, + expiresIn: cacheConfig.ttl, + }); + } + + return res; + } + + /** + * Initialize policies with configuration from engine options. + * + * Called during engine construction after options are set. + */ + initPolicies(): void { + + const dedupeConfig = this.engine.config.get('dedupePolicy'); + const cacheConfig = this.engine.config.get('cachePolicy'); + const rateLimitConfig = this.engine.config.get('rateLimitPolicy'); + + this.dedupePolicy.init(dedupeConfig as any); + this.cachePolicy.init(cacheConfig as any); + this.rateLimitPolicy.init(rateLimitConfig as any); + + // Re-initialize SingleFlight with cache adapter if provided + if (cacheConfig && cacheConfig !== true) { + + const config = cacheConfig as { adapter?: unknown; ttl?: number; staleIn?: number }; + + this.flight = new SingleFlight({ + adapter: config.adapter as any, + defaultTtl: this.cachePolicy.defaultTtl, + defaultStaleIn: this.cachePolicy.defaultStaleIn + }); + } + } + + /** + * Clear the cache. + */ + clearCache(): void { + + this.flight.clearCache(); + } + + /** + * Get cache statistics. + */ + cacheStats() { + + return this.flight.stats(); + } +} diff --git a/packages/fetch/src/engine/index.ts b/packages/fetch/src/engine/index.ts new file mode 100644 index 0000000..37502f7 --- /dev/null +++ b/packages/fetch/src/engine/index.ts @@ -0,0 +1,726 @@ +/** + * Engine module for FetchEngine. + * + * Contains the core engine class, event definitions, and internal types. + */ + +import { ObserverEngine } from '@logosdx/observer'; + +import type { + EventMap, + EventData as EventsEventData, + DedupeEventData as EventsDedupeEventData, + CacheEventData as EventsCacheEventData, + RateLimitEventData as EventsRateLimitEventData, + StateEventData as EventsStateEventData, + PropertyEventData as EventsPropertyEventData, + OptionsEventData as EventsOptionsEventData +} from './events.ts'; +import type { FetchEngineCore } from './types.ts'; +import { RequestExecutor } from './executor.ts'; +import { FetchState } from '../state/index.ts'; +import { ConfigStore } from '../options/index.ts'; +import { HeadersManager } from '../properties/headers.ts'; +import { ParamsManager } from '../properties/params.ts'; +import { PropertyStore } from '../properties/store.ts'; +import type { + EngineConfig, + EngineType, + EngineRequestConfig, + EngineLifecycle, + ValidateConfig, + CallConfig, + ModifyConfigFn as OptionsModifyConfigFn, + DetermineTypeFn as OptionsDetermineTypeFn, + InstanceHeaders as OptionsInstanceHeaders, + InstanceParams as OptionsInstanceParams, + InstanceState as OptionsInstanceState +} from '../options/types.ts'; +import type { HttpMethods, FetchResponse, DictAndT } from '../types.ts'; + + +// Re-export types +export * from './events.ts'; +export * from './types.ts'; + + +// CallConfig is now imported from options/types.ts +export type { CallConfig } from '../options/types.ts'; + + +/** + * Promise that can be aborted. + */ +export interface AbortablePromise extends Promise { + + isFinished: boolean; + isAborted: boolean; + abort(reason?: string): void; +} + + +/** + * Response headers type for type inference. + */ +export interface InstanceResponseHeaders extends Record {} + + +/** + * Creates a wrapper around `fetch` with configurable defaults, retry logic, + * request deduplication, caching, and rate limiting. + * + * Provides resilient HTTP client for production applications that need + * reliable API communication with comprehensive error handling. + * + * @template H - Type of request headers + * @template P - Type of request params + * @template S - Type of instance state + * @template RH - Type of response headers + * + * @example + * ```typescript + * // Basic setup with error handling + * const api = new FetchEngine({ + * baseUrl: 'https://api.example.com', + * defaultType: 'json', + * headers: { 'Authorization': 'Bearer token' } + * }); + * + * const [user, err] = await attempt(() => api.get('/users/123')); + * if (err) { + * console.error('Failed to fetch user:', err); + * return; + * } + * ``` + * + * @example + * ```typescript + * // Advanced setup with retry and caching + * const api = new FetchEngine({ + * baseUrl: 'https://api.example.com', + * retry: { maxAttempts: 3, baseDelay: 1000 }, + * cachePolicy: { enabled: true, ttl: 60000 }, + * dedupePolicy: true + * }); + * ``` + */ +export class FetchEngine< + H = FetchEngine.InstanceHeaders, + P = FetchEngine.InstanceParams, + S = FetchEngine.InstanceState, + RH = FetchEngine.InstanceResponseHeaders, +> extends ObserverEngine> implements FetchEngineCore { + + /** + * Symbol to use the default value or configuration. + * + * When returned from `determineType`, uses built-in content-type detection. + * + * @example + * ```typescript + * const api = new FetchEngine({ + * baseUrl: 'https://api.example.com', + * determineType: (response) => { + * if (response.url.includes('/download')) return 'blob'; + * return FetchEngine.useDefault; // Use built-in detection + * } + * }); + * ``` + */ + static useDefault = Symbol('useDefault'); + + /** + * State store for managing instance state. + */ + readonly state: FetchState; + + /** + * Options store for accessing all configuration. + */ + readonly config: ConfigStore; + + /** + * Headers manager for adding/removing/resolving headers. + */ + readonly headers: HeadersManager; + + /** + * Params manager for adding/removing/resolving URL parameters. + */ + readonly params: ParamsManager

; + + #executor: RequestExecutor; + #instanceAbortController = new AbortController(); + + /** + * Create a new FetchEngine instance. + * + * @param opts - Configuration options + */ + constructor(opts: EngineConfig) { + + super({ name: opts.name, spy: opts.spy as any }); + + // OptionsStore is FIRST - single source of truth for all configuration + // We cast to `any` because the stores aren't initialized yet + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const engine = this as any; + + this.config = new ConfigStore(engine, opts); + + // State store - pulls validate from options + this.state = new FetchState(engine); + + // Property managers - pull initial values from options + this.headers = new HeadersManager(engine); + this.params = new ParamsManager(engine); + + // Request executor - owns policies and request lifecycle + this.#executor = new RequestExecutor(engine); + + // Initialize policies with options from store + this.#executor.initPolicies(); + } + + /** + * Property store for headers (FetchEngineCore compliance). + * + * Internal components access this for header resolution. + */ + get headerStore(): PropertyStore> { + + return this.headers.$store; + } + + /** + * Property store for params (FetchEngineCore compliance). + * + * Internal components access this for param resolution. + */ + get paramStore(): PropertyStore> { + + return this.params.$store; + } + + /** + * Check if the engine has been destroyed. + */ + get #destroyed(): boolean { + + return this.#instanceAbortController.signal.aborted; + } + + // ===== HTTP Methods ===== + + /** + * Makes a GET request to retrieve data. + * + * @param path - Request path relative to base URL + * @param options - Request options + * @returns AbortablePromise that resolves to FetchResponse + * + * @example + * ```typescript + * const { data: users } = await api.get('/users'); + * ``` + */ + get( + path: string, + options: CallConfig = {} + ): AbortablePromise, DictAndT

, ResHdr>> { + + return this.request('GET', path, options); + } + + /** + * Makes a POST request to create a new resource. + * + * @param path - Request path relative to base URL + * @param payload - Data to send in the request body + * @param options - Request options + * @returns AbortablePromise that resolves to FetchResponse + * + * @example + * ```typescript + * const { data: user } = await api.post('/users', { name: 'John' }); + * ``` + */ + post( + path: string, + payload?: Data, + options: CallConfig = {} + ): AbortablePromise, DictAndT

, ResHdr>> { + + return this.#executor.execute('POST', path, payload, options); + } + + /** + * Makes a PUT request to replace a resource. + * + * @param path - Request path relative to base URL + * @param payload - Data to send in the request body + * @param options - Request options + * @returns AbortablePromise that resolves to FetchResponse + * + * @example + * ```typescript + * const { data: user } = await api.put('/users/123', { name: 'Jane' }); + * ``` + */ + put( + path: string, + payload?: Data, + options: CallConfig = {} + ): AbortablePromise, DictAndT

, ResHdr>> { + + return this.#executor.execute('PUT', path, payload, options); + } + + /** + * Makes a PATCH request to partially update a resource. + * + * @param path - Request path relative to base URL + * @param payload - Partial data to update + * @param options - Request options + * @returns AbortablePromise that resolves to FetchResponse + * + * @example + * ```typescript + * const { data } = await api.patch('/users/123', { email: 'new@example.com' }); + * ``` + */ + patch( + path: string, + payload?: Data, + options: CallConfig = {} + ): AbortablePromise, DictAndT

, ResHdr>> { + + return this.#executor.execute('PATCH', path, payload, options); + } + + /** + * Makes a DELETE request to remove a resource. + * + * @param path - Request path relative to base URL + * @param payload - Optional payload for the request body + * @param options - Request options + * @returns AbortablePromise that resolves to FetchResponse + * + * @example + * ```typescript + * await api.delete('/users/123'); + * ``` + */ + delete( + path: string, + payload?: Data, + options: CallConfig = {} + ): AbortablePromise, DictAndT

, ResHdr>> { + + return this.#executor.execute('DELETE', path, payload, options); + } + + /** + * Makes an HTTP OPTIONS request to check server capabilities. + * + * You can also use `request('OPTIONS', path, opts)` directly. + * + * @param path - Request path relative to base URL + * @param opts - Request options + * @returns AbortablePromise that resolves to FetchResponse + * + * @example + * ```typescript + * const { headers } = await api.options('/users'); + * // Or use request() directly: + * const { headers } = await api.request('OPTIONS', '/users'); + * ``` + */ + options( + path: string, + opts: CallConfig = {} + ): AbortablePromise, DictAndT

, ResHdr>> { + + return this.request('OPTIONS', path, opts); + } + + /** + * Makes an HTTP HEAD request to retrieve headers only. + * + * You can also use `request('HEAD', path, opts)` directly. + * + * @param path - Request path relative to base URL + * @param opts - Request options + * @returns AbortablePromise that resolves to FetchResponse + * + * @example + * ```typescript + * const { headers } = await api.head('/users/123'); + * // Or use request() directly: + * const { headers } = await api.request('HEAD', '/users/123'); + * ``` + */ + head( + path: string, + opts: CallConfig = {} + ): AbortablePromise, DictAndT

, ResHdr>> { + + return this.request('HEAD', path, opts); + } + + /** + * Makes an HTTP request with the specified method. + * + * @param method - HTTP method (GET, POST, PUT, PATCH, DELETE, OPTIONS) + * @param path - Request path relative to base URL + * @param options - Request options (may include payload) + * @returns AbortablePromise that resolves to FetchResponse + * + * @example + * ```typescript + * const response = await api.request('GET', '/users'); + * ``` + */ + request( + method: HttpMethods, + path: string, + options: CallConfig & { payload?: Data } = {} + ): AbortablePromise, DictAndT

, ResHdr>> { + + if (this.#destroyed) { + + throw new Error('Cannot make requests on destroyed FetchEngine instance'); + } + + const { payload, ...rest } = options; + + // Create a controller that's linked to the instance abort signal + // so destroy() can abort all in-flight requests + const controller = rest.abortController ?? new AbortController(); + const instanceSignal = this.#instanceAbortController.signal; + + if (!instanceSignal.aborted) { + + instanceSignal.addEventListener('abort', () => controller.abort('FetchEngine destroyed'), { once: true }); + } + + return this.#executor.execute( + method, + path, + payload, + { ...rest, abortController: controller } + ); + } + + // ===== Cache Methods ===== + + /** + * Clear all cached responses. + * + * @example + * ```typescript + * api.clearCache(); + * ``` + */ + clearCache(): void { + + this.#executor.flight.clearCache(); + } + + /** + * Clear a specific cache entry. + * + * @param key - Cache key to clear + */ + clearCacheKey(key: string): void { + + this.#executor.flight.deleteCache(key); + } + + /** + * Delete a specific cache entry. + * + * @param key - Cache key to delete + * @returns true if entry existed and was deleted + * + * @example + * ```typescript + * const deleted = await api.deleteCache('cache-key'); + * if (deleted) { + * console.log('Cache entry removed'); + * } + * ``` + */ + deleteCache(key: string): Promise { + + return this.#executor.flight.deleteCache(key); + } + + /** + * Invalidate cache entries matching a predicate. + * + * @param predicate - Function that returns true for keys to delete + * @returns Number of entries deleted + * + * @example + * ```typescript + * // Delete all entries containing 'user' + * const count = await api.invalidateCache(key => key.includes('user')); + * ``` + */ + invalidateCache(predicate: (key: string) => boolean): Promise { + + return this.#executor.flight.invalidateCache(predicate); + } + + /** + * Invalidate cache entries by path pattern. + * + * Accepts a string (prefix match), RegExp, or predicate function. + * + * @param pattern - String prefix, RegExp, or predicate function + * @returns Number of entries deleted + * + * @example + * ```typescript + * // By prefix - invalidates /users, /users/123, etc. + * await api.invalidatePath('/users'); + * + * // By RegExp + * await api.invalidatePath(/\/users\/\d+/); + * + * // By predicate + * await api.invalidatePath(key => key.includes('/api/v1')); + * ``` + */ + invalidatePath(pattern: string | RegExp | ((key: string) => boolean)): Promise { + + if (typeof pattern === 'function') { + + return this.#executor.flight.invalidateCache(pattern); + } + + if (pattern instanceof RegExp) { + + return this.#executor.flight.invalidateCache(key => pattern.test(key)); + } + + // String - match as prefix + return this.#executor.flight.invalidateCache(key => key.includes(pattern)); + } + + /** + * Get cache statistics. + * + * @returns Object with `inflightCount` and `cacheSize` properties + * + * @example + * ```typescript + * const stats = api.cacheStats(); + * console.log('Inflight:', stats.inflightCount); + * console.log('Cache size:', stats.cacheSize); + * ``` + */ + cacheStats() { + + return this.#executor.cacheStats(); + } + + // ===== Lifecycle Methods ===== + + /** + * Destroy the FetchEngine instance. + * + * Aborts all pending requests and cleans up resources. + * After calling destroy(), the instance cannot be used. + * + * @example + * ```typescript + * // In React effect cleanup + * useEffect(() => { + * const api = new FetchEngine({ baseUrl: '/api' }); + * return () => api.destroy(); + * }, []); + * ``` + */ + destroy(): void { + + if (this.#destroyed) { + + console.warn('FetchEngine instance already destroyed'); + return; + } + + // Abort all pending requests + this.#instanceAbortController.abort('FetchEngine destroyed'); + + // Clear cache + this.clearCache(); + } + + /** + * Check if the engine has been destroyed. + * + * @returns true if destroy() has been called + */ + isDestroyed(): boolean { + + return this.#destroyed; + } +} + + +// ===== FetchEngine Namespace Declaration ===== +// Uses declaration merging - namespace must come AFTER the class + +/** + * Namespace for FetchEngine types. + * + * Contains all types associated with FetchEngine using declaration merging. + * This allows users to reference types as `FetchEngine.Options`, `FetchEngine.EventData`, etc. + * + * **Augmentable Interfaces:** + * + * Users can extend these interfaces via module augmentation to add custom properties: + * + * @example + * ```typescript + * // In your app's type declaration file + * declare module '@logosdx/fetch' { + * namespace FetchEngine { + * interface InstanceHeaders { + * 'X-Custom-Header': string; + * Authorization: string; + * } + * + * interface InstanceParams { + * apiKey: string; + * } + * + * interface InstanceState { + * userId: string; + * token: string; + * } + * + * interface InstanceResponseHeaders { + * 'x-rate-limit': string; + * 'x-request-id': string; + * } + * } + * } + * ``` + */ +export namespace FetchEngine { + + // ===== Augmentable Interfaces ===== + // These are empty by default but can be extended via module augmentation + + /** + * Override this interface with the headers you intend to use throughout your app. + * + * @example + * ```typescript + * declare module '@logosdx/fetch' { + * namespace FetchEngine { + * interface InstanceHeaders { + * Authorization: string; + * 'X-API-Key': string; + * } + * } + * } + * ``` + */ + export interface InstanceHeaders extends OptionsInstanceHeaders {} + + /** + * Override this interface with the URL params you intend to use throughout your app. + */ + export interface InstanceParams extends OptionsInstanceParams {} + + /** + * Override this interface with the state you intend to use throughout your app. + */ + export interface InstanceState extends OptionsInstanceState {} + + /** + * Override this interface with the response headers you expect from your API. + */ + export interface InstanceResponseHeaders extends Record {} + + + // ===== Type Aliases ===== + // Forward types from modular implementations + + /** Response body type (json, text, blob, etc.) */ + export type Type = EngineType; + + /** Full configuration options for FetchEngine */ + export type Config = EngineConfig; + + /** Request options passed to modifyOptions callbacks */ + export type RequestOpts = EngineRequestConfig; + + /** Function type for modifying request options */ + export type ModifyConfigFn = OptionsModifyConfigFn; + + /** Function type for determining response body type */ + export type DetermineTypeFn = OptionsDetermineTypeFn; + + /** Per-request configuration options */ + export type CallConfig = import('../options/types.ts').CallConfig; + + /** Lifecycle hooks for requests */ + export type Lifecycle = EngineLifecycle; + + /** Validation configuration */ + export type Validate = ValidateConfig; + + + // ===== Helper Types ===== + + /** Headers type that combines a custom type with string dict */ + export type Headers = DictAndT; + + /** Params type that combines a custom type with string dict */ + export type Params = DictAndT; + + /** Response headers type */ + export type ResponseHeaders = DictAndT; + + /** Header key names */ + export type HeaderKeys = keyof Headers; + + + // ===== Event Types ===== + // Re-export event types for namespace access using type aliases + + /** Event data for FetchEngine events */ + export type EventData = EventsEventData; + + /** Event data for deduplication events */ + export type DedupeEventData = EventsDedupeEventData; + + /** Event data for cache events */ + export type CacheEventData = EventsCacheEventData; + + /** Event data for rate limit events */ + export type RateLimitEventData = EventsRateLimitEventData; + + /** Event data for state mutation events */ + export type StateEventData = EventsStateEventData; + + /** Event data for property (header/param) events */ + export type PropertyEventData = EventsPropertyEventData; + + /** Event data for options change events */ + export type OptionsEventData = EventsOptionsEventData; + + /** Event map for ObserverEngine */ + export type EventMap = import('./events.ts').EventMap; + + + // ===== Promise Type ===== + + /** Promise that can be aborted */ + export type AbortPromise = AbortablePromise; +} diff --git a/packages/fetch/src/engine/types.ts b/packages/fetch/src/engine/types.ts new file mode 100644 index 0000000..f79f875 --- /dev/null +++ b/packages/fetch/src/engine/types.ts @@ -0,0 +1,188 @@ +import type { ObserverEngine } from '@logosdx/observer'; +import type { EventMap } from './events.ts'; +import type { FetchState } from '../state/index.ts'; +import type { ConfigStore } from '../options/index.ts'; +import type { PropertyStore } from '../properties/store.ts'; +import type { DictAndT, HttpMethods, FetchResponse, RetryConfig } from '../types.ts'; + + +/** + * Core interface that FetchEngine implements. + * + * This interface defines what internal components (RequestExecutor, policies, + * stores) can access from the engine. All configuration is accessed through + * stores, and events are emitted through the ObserverEngine base. + * + * FetchEngineCore extends ObserverEngine which provides type-safe + * event emission. This is the ONLY way for internal components to emit events, + * guaranteeing type safety at emit time. + * + * @template H - Headers type + * @template P - Params type + * @template S - State type + * + * @example + * ```typescript + * // Components receive FetchEngineCore and access everything through it + * class RequestExecutor { + * constructor(private engine: FetchEngineCore) {} + * + * execute() { + * // Type-safe event emission through engine + * this.engine.emit('before', { state: this.engine.state.get() }); + * + * // Access config through options store + * const timeout = this.engine.options.get('totalTimeout'); + * } + * } + * ``` + */ +export interface FetchEngineCore< + H = unknown, + P = unknown, + S = unknown +> extends ObserverEngine> { + + /** + * State store for managing instance state. + * + * Provides get/set/reset operations with event emission. + */ + readonly state: FetchState; + + /** + * Options store for accessing all configuration. + * + * Single source of truth for ALL configuration. Supports deep + * path access via `get('retry.maxAttempts')` and runtime updates + * via `set('baseUrl', newUrl)`. + * + * Fully typed with FetchEngine.Options. + */ + readonly config: ConfigStore; + + /** + * Property store for headers. + * + * Manages default headers, method-specific headers, and resolution. + */ + readonly headerStore: PropertyStore>; + + /** + * Property store for URL parameters. + * + * Manages default params, method-specific params, and resolution. + */ + readonly paramStore: PropertyStore>; +} + + +/** + * Internal normalized request options - flat structure used throughout FetchEngine. + * + * This is the single source of truth for all request data, flowing to: + * - Cache/dedupe serializers (satisfies RequestKeyOptions) + * - Event data (spread directly into events) + * - Request execution (attemptCall → makeCall) + * + * Extends native RequestInit (minus headers/signal/body/method which we handle) + * to support instance-level defaults for credentials, mode, cache, etc. + * + * @template H - Headers type + * @template P - Params type + * @template S - State type + */ +export interface InternalReqOptions + extends Omit { + + // === Request identity (satisfies RequestKeyOptions) === + + /** HTTP method (uppercase) */ + method: HttpMethods; + + /** Original request path */ + path: string; + + /** Request payload/body */ + payload?: unknown | undefined; + + /** Merged headers (instance + method + request) */ + headers: DictAndT; + + /** URL parameters as flat object (from url.searchParams) */ + params: DictAndT

; + + /** Instance state */ + state: S; + + // === URL === + + /** Fully constructed URL */ + url: URL; + + // === Execution plumbing === + + /** AbortSignal for cancellation */ + signal: AbortSignal; + + /** AbortController (for child signals in retry) */ + controller: AbortController; + + /** Serialized request body */ + body?: BodyInit | undefined; + + /** Request timeout (ms) - deprecated, use totalTimeout */ + timeout?: number | undefined; + + /** Per-attempt timeout (ms) */ + attemptTimeout?: number | undefined; + + /** Function to check if total timeout has fired */ + getTotalTimeoutFired?: (() => boolean) | undefined; + + /** Retry configuration (true normalized to {}) */ + retry?: RetryConfig | undefined; + + /** Response type determination function */ + determineType?: ((response: Response) => { type: 'json' | 'text' | 'blob' | 'arrayBuffer'; isJson: boolean }) | undefined; + + // === Callbacks === + + /** Called before request */ + onBeforeRequest?: ((opts: unknown) => void | Promise) | undefined; + + /** Called after request (before parse) */ + onAfterRequest?: ((response: Response, opts: unknown) => void | Promise) | undefined; + + /** Called on error */ + onError?: ((error: Error) => void) | undefined; + + // === Runtime state === + + /** Current attempt number (1-based) */ + attempt?: number | undefined; + + /** Timestamp (ms) when the request entered the execution pipeline */ + requestStart?: number | undefined; +} + + +/** + * Result of a request execution. + * + * @template T - Response data type + * @template H - Headers type + * @template P - Params type + * @template RH - Response headers type + */ +export interface ExecuteResult< + T = unknown, + H = unknown, + P = unknown, + RH = unknown +> { + + response: FetchResponse; + fromCache: boolean; + cacheKey?: string; +} diff --git a/packages/fetch/src/helpers.ts b/packages/fetch/src/helpers.ts deleted file mode 100644 index f241cf8..0000000 --- a/packages/fetch/src/helpers.ts +++ /dev/null @@ -1,621 +0,0 @@ -import { - assert, - assertOptional, - isObject, - isFunction, - allKeysValid, - serializer -} from '@logosdx/utils'; -import type { FetchEngine } from './engine.ts'; -import { - type HttpMethods, - type RetryConfig, - type MatchTypes, - type RequestKeyOptions, - type _InternalHttpMethods -} from './types.ts'; - -export interface FetchError extends Error { - data: T | null; - status: number; - method: HttpMethods; - path: string; - - /** - * Whether the request was aborted (any cause: manual, timeout, or server). - */ - aborted?: boolean | undefined; - - /** - * Whether the request was aborted due to a timeout (attemptTimeout or totalTimeout). - * When true, `aborted` will also be true. - * When false but `aborted` is true, the abort was manual or server-initiated. - */ - timedOut?: boolean | undefined; - - attempt?: number | undefined; - step?: 'fetch' | 'parse' | 'response' | undefined; - url?: string | undefined; - headers?: H | undefined; -} - -export class FetchError extends Error { - - /** - * Returns true if the request was intentionally cancelled by the client - * (not due to a timeout). This indicates a user/app initiated abort. - * - * Use this to distinguish between "user navigated away" vs "request failed". - * - * @returns true if manually aborted, false otherwise - * - * @example - * ```typescript - * const [res, err] = await attempt(() => api.get('/data')); - * if (err?.isCancelled()) { - * // User cancelled - don't show error, don't log - * return; - * } - * ``` - */ - isCancelled(): boolean { - - if (this.status !== 499) return false; - - return this.aborted === true && this.timedOut !== true; - } - - /** - * Returns true if the request timed out (either attemptTimeout or totalTimeout). - * - * Use this to show "request timed out" messages or decide whether to retry. - * - * @returns true if a timeout fired, false otherwise - * - * @example - * ```typescript - * const [res, err] = await attempt(() => api.get('/data')); - * if (err?.isTimeout()) { - * toast.warn('Request timed out. Retrying...'); - * } - * ``` - */ - isTimeout(): boolean { - - if (this.status !== 499) return false; - - return this.timedOut === true; - } - - /** - * Returns true if the connection was lost (server dropped, network failed, etc.). - * This indicates the failure was NOT initiated by the client. - * - * Use this to show "connection lost" messages or trigger offline mode. - * - * @returns true if connection was lost, false otherwise - * - * @example - * ```typescript - * const [res, err] = await attempt(() => api.get('/data')); - * if (err?.isConnectionLost()) { - * toast.error('Connection lost. Check your internet.'); - * } - * ``` - */ - isConnectionLost(): boolean { - - if (this.status !== 499) return false; - - return this.step === 'fetch' && this.aborted === false; - } -} - -export const isFetchError = (error: unknown): error is FetchError => { - return error instanceof FetchError; -} - -export const fetchTypes = [ - 'arrayBuffer', - 'blob', - 'formData', - 'json', - 'text', -] satisfies FetchEngine.Type[]; - - -export const validateOptions = ( - opts: FetchEngine.Options -) => { - - const { - baseUrl, - defaultType, - headers, - methodHeaders, - params, - methodParams, - modifyOptions, - modifyMethodOptions, - timeout, - totalTimeout, - attemptTimeout, - validate, - determineType, - retry, - } = opts as FetchEngine.Options & { - totalTimeout?: number; - attemptTimeout?: number; - }; - - assert(baseUrl, 'baseUrl is required'); - - assertOptional( - defaultType, - fetchTypes.includes(defaultType!), - 'invalid type' - ); - - assertOptional( - timeout, - Number.isInteger(timeout!) && timeout! > -1, - 'timeout must be non-negative integer' - ); - - assertOptional( - totalTimeout, - Number.isInteger(totalTimeout!) && totalTimeout! > -1, - 'totalTimeout must be non-negative integer' - ); - - assertOptional( - attemptTimeout, - Number.isInteger(attemptTimeout!) && attemptTimeout! > -1, - 'attemptTimeout must be non-negative integer' - ); - - assertOptional( - headers, - isObject(headers), - 'headers must be an object' - ); - - assertOptional( - methodHeaders, - isObject(methodHeaders), - 'methodHeaders must be an object' - ); - - assertOptional( - methodHeaders, - () => allKeysValid(methodHeaders!, isObject), - 'methodHeaders items must be objects' - ); - - assertOptional( - params, - isObject(params), - 'params must be an object' - ); - - assertOptional( - methodParams, - isObject(methodParams), - 'methodParams must be an object' - ); - - assertOptional( - methodParams, - () => allKeysValid(methodParams!, isObject), - 'methodParams items must be objects' - ); - - assertOptional( - modifyOptions, - isFunction(modifyOptions), - 'modifyOptions must be a function' - ); - - assertOptional( - modifyMethodOptions, - isObject(modifyMethodOptions), - 'modifyMethodOptions must be an object' - ); - - assertOptional( - modifyMethodOptions, - () => allKeysValid(modifyMethodOptions!, isFunction), - 'modifyMethodOptions items must be functions' - ); - - assertOptional( - validate, - isObject(validate), - 'validate must be an object' - ); - - if (validate) { - - const { headers, state, perRequest } = validate; - - assertOptional( - headers, - isFunction(headers), - 'validate.headers must be a function' - ); - - assertOptional( - state, - isFunction(state), - 'validate.state must be a function' - ); - - assertOptional( - perRequest, - isObject(perRequest), - 'validate.perRequest must be an object' - ); - - if (perRequest) { - - const { headers } = perRequest; - - assertOptional( - headers, - typeof headers === 'boolean', - 'validate.perRequest.headers must be a boolean' - ); - } - } - - assertOptional( - determineType, - typeof determineType === 'function', - 'determineType must be a function' - ); - - - if (retry) { - - const optionalNumbers = [ - 'maxAttempts', - 'baseDelay', - 'maxDelay', - ] as const; - - for (const key of optionalNumbers) { - - const value = (retry as RetryConfig)[key]; - - if (typeof value !== 'number') continue; - - assertOptional( - value, - Number.isInteger(value) && value > 0, - `retry.${key} must be a positive number, got ${value}` - ); - } - - } -} - - -// Add default retry configuration -export const DEFAULT_RETRY_CONFIG: Required = { - maxAttempts: 3, - baseDelay: 1000, - maxDelay: 10000, - useExponentialBackoff: true, - retryableStatusCodes: [408, 429, 499, 500, 502, 503, 504], - shouldRetry(error) { - - // Note: Parent controller abort (totalTimeout) is checked in the retry loop, - // not here. This allows attemptTimeout aborts to still be retried. - - if (!error.status) return false; // No status means it failed in a way that was not handled by the engine - if (error.status === 499) return true; // We set 499 for requests that were reset, dropped, etc. - - // Retry on configured status codes - return this.retryableStatusCodes?.includes(error.status) ?? false; - } -}; - -/** - * Default HTTP methods that are subject to inflight deduplication. - * Only GET requests are deduplicated by default since they are idempotent. - */ -export const DEFAULT_INFLIGHT_METHODS: Set<_InternalHttpMethods> = new Set(['GET']); - -/** - * Checks if a path matches a rule's match type. - * - * Rules should specify exactly one match type. If multiple are specified, - * they are checked in order: is → startsWith → endsWith → includes → match. - * - * @param rule - The rule containing match type(s) - * @param path - The request path to match against - * @returns false if the path matches the rule's criteria - * - * @example - * ```typescript - * matchPath({ is: '/users' }, '/users'); // true - * matchPath({ startsWith: '/api' }, '/api/users'); // true - * matchPath({ endsWith: '.json' }, '/data.json'); // true - * matchPath({ includes: 'admin' }, '/admin/dash'); // true - * matchPath({ match: /^\/v\d+/ }, '/v2/users'); // true - * ``` - */ -export const matchPath = (rule: MatchTypes, path: string): boolean => { - - // 'is' is an exact match that can't be combined with others - if (rule.is !== undefined) { - - return path === rule.is; - } - - // For other match types, all specified types must match (AND logic) - // If no types are specified, return false - - let hasMatch = true; - let hasMatchType = false; - - if (rule.startsWith !== undefined) { - - hasMatchType = true; - hasMatch = hasMatch && path.startsWith(rule.startsWith); - } - - if (rule.endsWith !== undefined) { - - hasMatchType = true; - hasMatch = hasMatch && path.endsWith(rule.endsWith); - } - - if (rule.includes !== undefined) { - - hasMatchType = true; - hasMatch = hasMatch && path.includes(rule.includes); - } - - if (rule.match !== undefined) { - - hasMatchType = true; - hasMatch = hasMatch && rule.match.test(path); - } - - // Must have at least one match type and all specified types must match - return hasMatchType && hasMatch; -}; - -const toSet = (arr?: T[] | Set): Set | undefined => { - - if (!arr) return; - - return arr instanceof Set ? arr : new Set(arr); -}; - -/** - * Checks if a method matches the rule's method constraints. - * - * @param method - The HTTP method to check - * @param ruleMethods - Methods specified in the rule (undefined means inherit from defaults) - * @param defaultMethods - Default methods when rule doesn't specify any - * @returns true if the method is allowed by the rule - */ -export const matchMethod = ( - method: string, - ruleMethods: _InternalHttpMethods[] | undefined, - defaultMethods: _InternalHttpMethods[] | undefined, -): boolean => { - - const methods = toSet(ruleMethods) ?? toSet(defaultMethods) ?? DEFAULT_INFLIGHT_METHODS; - const normalizedMethod = method.toUpperCase() as _InternalHttpMethods; - - return methods.has(normalizedMethod); -}; - - -/** - * Validates an array of match rules. - * - * Each rule must be an object specifying at least one match type: - * is, startsWith, endsWith, includes, or match. - * - * Rules can specify multiple match types, which are combined with a logical AND. - * Except for 'is', which cannot be combined with other types, because it logically - * contradicts anything except an exact match. - * - * @param rules - Array of match rules to validate - * @throws {AssertionError} If any rule is invalid - * - * @example - * ```typescript - * const rules = [ - * { is: '/users' }, - * { startsWith: '/api' }, - * { endsWith: '.json' }, - * { includes: 'admin' }, - * { match: /^\/v\d+/ }, - * { startsWith: '/public', endsWith: '.html' }, - * { includes: 'user', match: /\/dash/ } - * ]; - * - * validateMatchRules(rules); // No error thrown - * ``` - */ -export const validateMatchRules = ( - rules: T[] -) => { - - assert(Array.isArray(rules), 'rules must be an array'); - - for (const r in rules) { - - const rule = rules[r]!; - - assert(rule && typeof rule === 'object', `rule[${r}] must be an object`); - - const matchTypes = ['is', 'startsWith', 'endsWith', 'includes', 'match'] as const; - - assert( - matchTypes.some(type => rule[type] !== undefined), - `rule[${r}] must specify at least one match type (is, startsWith, endsWith, includes, match)` - ); - - for (const type of matchTypes) { - - const value = rule[type]; - - if (value === undefined) { - - continue; - } - - if (type === 'match') { - - assert( - value instanceof RegExp, - `rule[${r}].match must be a RegExp` - ); - - continue; - } - - assert( - typeof value === 'string', - `rule[${r}].${type} must be a string` - ); - - assert( - (value as string).length > 0, - `rule[${r}].${type} cannot be an empty string` - ); - } - - // If 'is' is defined, no other match types should be defined - if (rule.is !== undefined) { - - assert( - !matchTypes - .filter(type => type !== 'is') - .some(type => rule[type] !== undefined), - `rule[${r}] 'is' contradicts with other match types and cannot be used together` - ); - } - } - -} - -/** - * Finds the first matching rule from a list of rules. - * - * Rules are checked in order - first match wins. Both path and method - * must match for a rule to be considered a match. - * - * @param rules - Array of rules to check - * @param method - HTTP method of the request - * @param path - Request path - * @param defaultMethods - Default methods to use when rule doesn't specify any - * @returns The first matching rule, or undefined if no rules match - * - * @example - * ```typescript - * const rules = [ - * { startsWith: '/admin', enabled: false }, - * { endsWith: '/stream', enabled: false }, - * { startsWith: '/api', methods: ['GET', 'POST'] } - * ]; - * - * findMatchingRule(rules, 'GET', '/admin/users', ['GET']); - * // Returns: { startsWith: '/admin', enabled: false } - * ``` - */ -export const findMatchingRule = ( - rules: T[], - method: string, - path: string, - defaultMethods: _InternalHttpMethods[] -): T | undefined => { - - for (const rule of rules) { - - if (matchPath(rule, path) && matchMethod(method, rule.methods, defaultMethods)) { - - return rule; - } - } - - return undefined; -}; - -/** - * Default request serializer for generating deduplication and cache keys. - * - * Serializes method, URL path+search, payload, and headers into a unique string key. - * Objects are serialized with sorted keys for consistency. - * - * Uses `url.pathname + url.search` which: - * - Includes the full path and query parameters - * - Excludes the hash fragment (which shouldn't affect request identity) - * - Excludes the origin (handled by FetchEngine instance) - * - * Note: `params` is no longer serialized separately since it's already - * included in `url.search`. - * - * @param opts - Request key options containing method, url, payload, headers - * @returns A unique string key for the request - * - * @example - * ```typescript - * defaultRequestSerializer({ - * method: 'GET', - * path: '/users/123', - * url: new URL('https://api.example.com/users/123?page=1'), - * payload: undefined, - * headers: { Authorization: 'Bearer token' } - * }); - * // Returns: 'GET|/users/123?page=1|undefined|{"Authorization":"Bearer token"}' - * ``` - */ -export const defaultRequestSerializer = (opts: RequestKeyOptions): string => { - - // Use url.pathname + url.search to get path with query params, excluding hash - const urlKey = opts.url.pathname + opts.url.search; - - const parts = [ - serializer([opts.method]), - serializer([urlKey]), - serializer([opts.payload]), - serializer([opts.headers]) - ]; - - return parts.join('|'); -}; - - -/** - * Default serializer for rate limit bucket keys. - * - * Groups requests by method + pathname only (no query params, payload, or headers). - * This creates per-endpoint rate limiting where all requests to the same path - * share the same rate limit bucket. - * - * @param opts - Request key options - * @returns A unique string key for the rate limit bucket - * - * @example - * ```typescript - * defaultRateLimitSerializer({ - * method: 'GET', - * path: '/users/123', - * url: new URL('https://api.example.com/users/123?page=1'), - * headers: { Authorization: 'Bearer token' } - * }); - * // Returns: 'GET|/users/123' - * ``` - */ -export const defaultRateLimitSerializer = (opts: RequestKeyOptions): string => { - - return `${opts.method}|${opts.url.pathname}`; -}; - diff --git a/packages/fetch/src/helpers/fetch-error.ts b/packages/fetch/src/helpers/fetch-error.ts new file mode 100644 index 0000000..b2ac5bd --- /dev/null +++ b/packages/fetch/src/helpers/fetch-error.ts @@ -0,0 +1,104 @@ +import type { FetchEngine } from '../engine/index.ts'; +import type { HttpMethods } from '../types.ts'; + + +export interface FetchError extends Error { + data: T | null; + status: number; + method: HttpMethods; + path: string; + + /** + * Whether the request was aborted (any cause: manual, timeout, or server). + */ + aborted?: boolean | undefined; + + /** + * Whether the request was aborted due to a timeout (attemptTimeout or totalTimeout). + * When true, `aborted` will also be true. + * When false but `aborted` is true, the abort was manual or server-initiated. + */ + timedOut?: boolean | undefined; + + attempt?: number | undefined; + step?: 'fetch' | 'parse' | 'response' | undefined; + url?: string | undefined; + headers?: H | undefined; +} + +export class FetchError extends Error { + + /** + * Returns true if the request was intentionally cancelled by the client + * (not due to a timeout). This indicates a user/app initiated abort. + * + * Use this to distinguish between "user navigated away" vs "request failed". + * + * @returns true if manually aborted, false otherwise + * + * @example + * ```typescript + * const [res, err] = await attempt(() => api.get('/data')); + * if (err?.isCancelled()) { + * // User cancelled - don't show error, don't log + * return; + * } + * ``` + */ + isCancelled(): boolean { + + if (this.status !== 499) return false; + + return this.aborted === true && this.timedOut !== true; + } + + /** + * Returns true if the request timed out (either attemptTimeout or totalTimeout). + * + * Use this to show "request timed out" messages or decide whether to retry. + * + * @returns true if a timeout fired, false otherwise + * + * @example + * ```typescript + * const [res, err] = await attempt(() => api.get('/data')); + * if (err?.isTimeout()) { + * toast.warn('Request timed out. Retrying...'); + * } + * ``` + */ + isTimeout(): boolean { + + if (this.status !== 499) return false; + + return this.timedOut === true; + } + + /** + * Returns true if the connection was lost (server dropped, network failed, etc.). + * This indicates the failure was NOT initiated by the client. + * + * Use this to show "connection lost" messages or trigger offline mode. + * + * @returns true if connection was lost, false otherwise + * + * @example + * ```typescript + * const [res, err] = await attempt(() => api.get('/data')); + * if (err?.isConnectionLost()) { + * toast.error('Connection lost. Check your internet.'); + * } + * ``` + */ + isConnectionLost(): boolean { + + if (this.status !== 499) return false; + + return this.step === 'fetch' && this.aborted === false; + } +} + +export const isFetchError = (error: unknown): error is FetchError => { + + return error instanceof FetchError; +}; diff --git a/packages/fetch/src/helpers/index.ts b/packages/fetch/src/helpers/index.ts new file mode 100644 index 0000000..f12a777 --- /dev/null +++ b/packages/fetch/src/helpers/index.ts @@ -0,0 +1,6 @@ +// FetchError exports +export { FetchError, isFetchError } from './fetch-error.ts'; +export type { FetchError as FetchErrorType } from './fetch-error.ts'; + +// Validation exports +export { validateOptions, fetchTypes, DEFAULT_RETRY_CONFIG } from './validations.ts'; diff --git a/packages/fetch/src/helpers/validations.ts b/packages/fetch/src/helpers/validations.ts new file mode 100644 index 0000000..de15db1 --- /dev/null +++ b/packages/fetch/src/helpers/validations.ts @@ -0,0 +1,218 @@ +import { + assert, + assertOptional, + isObject, + isFunction, + allKeysValid, +} from '@logosdx/utils'; +import type { FetchEngine } from '../engine/index.ts'; +import type { RetryConfig } from '../types.ts'; + + +export const fetchTypes = [ + 'arrayBuffer', + 'blob', + 'formData', + 'json', + 'text', +] satisfies FetchEngine.Type[]; + + +export const validateOptions = ( + opts: FetchEngine.Config +) => { + + const { + baseUrl, + defaultType, + headers, + methodHeaders, + params, + methodParams, + modifyConfig, + modifyMethodConfig, + totalTimeout, + attemptTimeout, + validate, + determineType, + retry, + } = opts as FetchEngine.Config & { + totalTimeout?: number; + attemptTimeout?: number; + }; + + assert(baseUrl, 'baseUrl is required'); + + // Validate URL format + try { + + new URL(baseUrl); + } + catch { + + throw new Error('Invalid URL'); + } + + assertOptional( + defaultType, + fetchTypes.includes(defaultType!), + 'invalid type' + ); + + assertOptional( + totalTimeout, + Number.isInteger(totalTimeout!) && totalTimeout! > -1, + 'totalTimeout must be non-negative integer' + ); + + assertOptional( + attemptTimeout, + Number.isInteger(attemptTimeout!) && attemptTimeout! > -1, + 'attemptTimeout must be non-negative integer' + ); + + assertOptional( + headers, + isObject(headers), + 'headers must be an object' + ); + + assertOptional( + methodHeaders, + isObject(methodHeaders), + 'methodHeaders must be an object' + ); + + assertOptional( + methodHeaders, + () => allKeysValid(methodHeaders!, isObject), + 'methodHeaders items must be objects' + ); + + assertOptional( + params, + isObject(params), + 'params must be an object' + ); + + assertOptional( + methodParams, + isObject(methodParams), + 'methodParams must be an object' + ); + + assertOptional( + methodParams, + () => allKeysValid(methodParams!, isObject), + 'methodParams items must be objects' + ); + + assertOptional( + modifyConfig, + isFunction(modifyConfig), + 'modifyConfig must be a function' + ); + + assertOptional( + modifyMethodConfig, + isObject(modifyMethodConfig), + 'modifyMethodConfig must be an object' + ); + + assertOptional( + modifyMethodConfig, + () => allKeysValid(modifyMethodConfig!, isFunction), + 'modifyMethodConfig items must be functions' + ); + + assertOptional( + validate, + isObject(validate), + 'validate must be an object' + ); + + if (validate) { + + const { headers, state, perRequest } = validate; + + assertOptional( + headers, + isFunction(headers), + 'validate.headers must be a function' + ); + + assertOptional( + state, + isFunction(state), + 'validate.state must be a function' + ); + + assertOptional( + perRequest, + isObject(perRequest), + 'validate.perRequest must be an object' + ); + + if (perRequest) { + + const { headers } = perRequest; + + assertOptional( + headers, + typeof headers === 'boolean', + 'validate.perRequest.headers must be a boolean' + ); + } + } + + assertOptional( + determineType, + typeof determineType === 'function', + 'determineType must be a function' + ); + + + if (retry) { + + const optionalNumbers = [ + 'maxAttempts', + 'baseDelay', + 'maxDelay', + ] as const; + + for (const key of optionalNumbers) { + + const value = (retry as RetryConfig)[key]; + + if (typeof value !== 'number') continue; + + assertOptional( + value, + Number.isInteger(value) && value > 0, + `retry.${key} must be a positive number, got ${value}` + ); + } + + } +} + + +// Add default retry configuration +export const DEFAULT_RETRY_CONFIG: Required = { + maxAttempts: 3, + baseDelay: 1000, + maxDelay: 10000, + useExponentialBackoff: true, + retryableStatusCodes: [408, 429, 499, 500, 502, 503, 504], + shouldRetry(error) { + + // Note: Parent controller abort (totalTimeout) is checked in the retry loop, + // not here. This allows attemptTimeout aborts to still be retried. + + if (!error.status) return false; // No status means it failed in a way that was not handled by the engine + if (error.status === 499) return true; // We set 499 for requests that were reset, dropped, etc. + + // Retry on configured status codes + return this.retryableStatusCodes?.includes(error.status) ?? false; + } +}; diff --git a/packages/fetch/src/index.ts b/packages/fetch/src/index.ts index 2e49e6f..10c29e4 100644 --- a/packages/fetch/src/index.ts +++ b/packages/fetch/src/index.ts @@ -1,9 +1,10 @@ -import { FetchEngine } from './engine.ts'; +// New modular imports +import { FetchEngine } from './engine/index.ts'; export { FetchError, isFetchError -} from './helpers.ts'; +} from './helpers/index.ts'; export type { FetchResponse, @@ -19,11 +20,7 @@ export type { RequestSerializer } from './types.ts'; -export { - defaultRequestSerializer, - defaultRateLimitSerializer -} from './helpers.ts'; - +// Serializers (requestSerializer for dedupe/cache, endpointSerializer for rate-limit) export { endpointSerializer, requestSerializer @@ -44,19 +41,51 @@ export type { RateLimitPolicyState } from './policies/index.ts'; -export { - PropertyStore -} from './property-store.ts'; +export { FetchEngine } from './engine/index.ts'; +export { FetchState } from './state/index.ts'; +export { ConfigStore } from './options/index.ts'; +export { HeadersManager } from './properties/headers.ts'; +export { ParamsManager } from './properties/params.ts'; +export { PropertyStore } from './properties/store.ts'; + +export type { + FetchEngineCore, + InternalReqOptions, + ExecuteResult, + CallConfig, + AbortablePromise, + InstanceResponseHeaders +} from './engine/index.ts'; + +export type { + EventMap, + EventData, + DedupeEventData, + CacheEventData, + RateLimitEventData, + StateEventData, + PropertyEventData, + OptionsEventData +} from './engine/index.ts'; + +export type { + EngineConfig, + EngineType, + EngineRequestConfig as EngineRequestOpts, + EngineLifecycle, + ValidateConfig, + ModifyConfigFn, + DetermineTypeFn, + InstanceHeaders, + InstanceParams, + InstanceState +} from './options/types.ts'; export type { PropertyStoreOptions, PropertyValidateFn, MethodOverrides -} from './property-store.ts'; - -export { - FetchEngine -} from './engine.ts'; +} from './properties/store.ts'; const baseEngine = new FetchEngine({ baseUrl: globalThis?.location?.origin ?? 'https://logosdx.dev', @@ -68,6 +97,9 @@ export const request = baseEngine.request.bind(baseEngine); /** See {@link FetchEngine.options}. */ export const options = baseEngine.options.bind(baseEngine); +/** See {@link FetchEngine.head}. */ +export const head = baseEngine.head.bind(baseEngine); + /** See {@link FetchEngine.get}. */ export const get = baseEngine.get.bind(baseEngine); @@ -83,55 +115,19 @@ export const put = baseEngine.put.bind(baseEngine); /** See {@link FetchEngine.patch}. */ export const patch = baseEngine.patch.bind(baseEngine); +/** See {@link FetchEngine.headers}. */ +export const headers = baseEngine.headers; +/** See {@link FetchEngine.params}. */ +export const params = baseEngine.params; +/** See {@link FetchEngine.state}. */ +export const state = baseEngine.state; -/** See {@link FetchEngine.removeHeader}. */ -export const removeHeader = baseEngine.removeHeader.bind(baseEngine); - -/** See {@link FetchEngine.removeParam}. */ -export const removeParam = baseEngine.removeParam.bind(baseEngine); - - - -/** See {@link FetchEngine.addHeader}. */ -export const addHeader = baseEngine.addHeader.bind(baseEngine); - -/** See {@link FetchEngine.addParam}. */ -export const addParam = baseEngine.addParam.bind(baseEngine); - - - -/** See {@link FetchEngine.hasHeader}. */ -export const hasHeader = baseEngine.hasHeader.bind(baseEngine); - -/** See {@link FetchEngine.hasParam}. */ -export const hasParam = baseEngine.hasParam.bind(baseEngine); - - - -/** See {@link FetchEngine.setState}. */ -export const setState = baseEngine.setState.bind(baseEngine); - -/** See {@link FetchEngine.resetState}. */ -export const resetState = baseEngine.resetState.bind(baseEngine); - - - -/** See {@link FetchEngine.getState}. */ -export const getState = baseEngine.getState.bind(baseEngine); - -/** See {@link FetchEngine.changeBaseUrl}. */ -export const changeBaseUrl = baseEngine.changeBaseUrl.bind(baseEngine); - -/** See {@link FetchEngine.changeModifyOptions}. */ -export const changeModifyOptions = baseEngine.changeModifyOptions.bind(baseEngine); - -/** See {@link FetchEngine.changeModifyMethodOptions}. */ -export const changeModifyMethodOptions = baseEngine.changeModifyMethodOptions.bind(baseEngine); - - +/** See {@link FetchEngine.config}. */ +export const config = baseEngine.config; +// Event methods /** See {@link FetchEngine.on}. */ export const on = baseEngine.on.bind(baseEngine); @@ -139,6 +135,5 @@ export const on = baseEngine.on.bind(baseEngine); export const off = baseEngine.off.bind(baseEngine); - /** See {@link FetchEngine}. */ export default baseEngine; \ No newline at end of file diff --git a/packages/fetch/src/options/index.ts b/packages/fetch/src/options/index.ts new file mode 100644 index 0000000..26a193e --- /dev/null +++ b/packages/fetch/src/options/index.ts @@ -0,0 +1,182 @@ +import { clone, assert, isObject, reach, setDeep, type PathNames, type PathValue } from '@logosdx/utils'; +import type { FetchEngineCore } from '../engine/types.ts'; +import type { OptionsEventData } from '../engine/events.ts'; +import type { EngineConfig, InstanceHeaders, InstanceParams, InstanceState } from './types.ts'; +import { validateOptions } from '../helpers/validations.ts'; + + +/** + * Manages configuration options for FetchEngine with deep path access. + * + * Provides a clean API for getting and setting nested configuration + * values with type-safe paths and automatic event emission on mutations. + * ConfigStore is the single source of truth for ALL configuration. + * + * The store is fully typed with EngineConfig, ensuring: + * - `get('baseUrl')` returns `string` + * - `get('retry.maxAttempts')` returns `number` + * - `get('dedupePolicy')` returns the correct policy type + * - `set('timeout', value)` validates value is a number + * + * @template H - Headers type + * @template P - Params type + * @template S - State type + * + * @example + * ```typescript + * // Access via engine.config + * engine.config.get('baseUrl') // string + * engine.config.get('retry.maxAttempts') // number + * + * // Set options (runtime configurable) + * engine.config.set('baseUrl', 'https://new-api.com') + * engine.config.set('retry.maxAttempts', 5) + * + * // Merge partial options + * engine.config.set({ retry: { maxAttempts: 5 } }) + * ``` + */ +export class ConfigStore< + H = InstanceHeaders, + P = InstanceParams, + S = InstanceState +> { + + #engine: FetchEngineCore; + #config: EngineConfig; + + constructor(engine: FetchEngineCore, initialConfig: EngineConfig) { + + validateOptions(initialConfig as any); + this.#engine = engine; + this.#config = clone(initialConfig); + } + + /** + * Get a deep clone of all options or a specific nested value. + * + * Returns a cloned copy to prevent external mutations. + * All return types are properly inferred from EngineConfig. + * + * @example + * ```typescript + * // Get all options + * const opts = engine.config.get(); // EngineConfig + * + * // Get nested value + * const maxAttempts = engine.config.get('retry.maxAttempts'); // number + * const baseUrl = engine.config.get('baseUrl'); // string + * ``` + */ + get(): EngineConfig; + get> & string>(path: K): PathValue, K>; + get> & string>(path?: K): EngineConfig | PathValue, K> { + + if (path === undefined) { + + return clone(this.#config) as EngineConfig; + } + + const value = reach(this.#config, path); + return (isObject(value) ? clone(value) : value) as PathValue, K>; + } + + /** + * Set options by path-value or by partial object merge. + * + * Emits 'options-change' event after successful update. + * All values are type-checked against EngineConfig. + * + * @example + * ```typescript + * // Set by path (type-checked) + * engine.config.set('baseUrl', 'https://new-api.com'); // OK + * engine.config.set('retry.maxAttempts', 5); // OK + * engine.config.set('retry.maxAttempts', 'five'); // Type error! + * + * // Merge partial options + * engine.config.set({ retry: { maxAttempts: 5 } }); + * ``` + */ + set> & string>(path: K, value: PathValue, K>): void; + set> & string>(path: K, value: undefined): void; + set(partial: Partial>): void; + set> & string>( + pathOrPartial: K | Partial>, + value?: PathValue, K> + ): void { + + const isPath = typeof pathOrPartial === 'string'; + + assert( + isObject(pathOrPartial) || isPath, + 'set requires a path string or config object' + ); + + if (isPath) { + + setDeep(this.#config, pathOrPartial, value as any); + + const eventData = { path: pathOrPartial, value } as OptionsEventData; + this.#engine.emit('config-change', eventData as any); + } + else { + + this.#mergeDeep(this.#config, pathOrPartial as Partial>); + + const eventData = { value: pathOrPartial } as OptionsEventData; + this.#engine.emit('config-change', eventData as any); + } + } + + /** + * Deep merge source into target, mutating target. + */ + #mergeDeep(target: any, source: any): void { + + for (const key of Object.keys(source)) { + + const sourceVal = source[key]; + const targetVal = target[key]; + + if (isObject(sourceVal) && isObject(targetVal)) { + + this.#mergeDeep(targetVal, sourceVal); + } + else { + + target[key] = sourceVal; + } + } + } + + /** + * Set an option directly without emitting events. + * + * Used internally for backward compatibility methods that + * emit their own specific events. + * + * @internal + */ + _setDirect> & string>(path: K, value: PathValue, K>): void { + + setDeep(this.#config, path, value); + } +} + + +// Re-export types +export type { + EngineConfig, + EngineType, + RequestConfig, + CallConfig, + EngineRequestConfig, + EngineLifecycle, + ValidateConfig, + ModifyConfigFn, + DetermineTypeFn, + InstanceHeaders, + InstanceParams, + InstanceState +} from './types.ts'; diff --git a/packages/fetch/src/options/types.ts b/packages/fetch/src/options/types.ts new file mode 100644 index 0000000..4dd4f07 --- /dev/null +++ b/packages/fetch/src/options/types.ts @@ -0,0 +1,274 @@ +/** + * Engine Config Types + * + * This file defines the Config interface independently of the FetchEngine class + * to avoid circular dependencies. The FetchEngine.Config namespace type in + * types.ts is defined to match this interface. + */ + +import type { + _InternalHttpMethods, + HttpMethodOpts, + RetryConfig, + DeduplicationConfig, + CacheConfig, + RateLimitConfig, + DictAndT, + MethodHeaders, +} from '../types.ts'; + +import type { FetchError } from '../helpers/fetch-error.ts'; + + +/** + * Response type that can be returned from the server. + */ +export type EngineType = 'arrayBuffer' | 'blob' | 'formData' | 'json' | 'text'; + + +/** + * Base headers interface that users can augment. + */ +export interface InstanceHeaders { + + Authorization?: string; + 'Content-Type'?: string; + Accept?: string; + 'Accept-Language'?: string; +} + + +/** + * Base params interface that users can augment. + */ +export interface InstanceParams {} + + +/** + * Base state interface that users can augment. + */ +export interface InstanceState {} + + +/** + * Result from determineType function. + */ +export interface DetermineTypeResult { + + type: Exclude; + isJson: boolean; +} + + +/** + * Function type for determining response body type. + */ +export interface DetermineTypeFn { + + (response: Response): DetermineTypeResult; +} + + +/** + * Base request configuration shared between per-request and instance-level config. + * + * Extends native fetch RequestInit with typed headers/params and timeout settings. + * This is the foundation for both CallConfig and EngineConfig. + * + * @template H - Headers type + * @template P - Params type + */ +export interface RequestConfig + extends Omit { + + /** Request headers (merged with instance defaults) */ + headers?: DictAndT | undefined; + + /** URL parameters (merged with instance defaults) */ + params?: DictAndT

| undefined; + + /** AbortSignal for request cancellation */ + signal?: AbortSignal | undefined; + + /** Total timeout for entire request lifecycle including retries (ms) */ + totalTimeout?: number | undefined; + + /** Per-attempt timeout (ms) - each retry gets fresh timeout */ + attemptTimeout?: number | undefined; + + /** Function to determine response body type based on response */ + determineType?: DetermineTypeFn | undefined; + + /** Retry configuration */ + retry?: RetryConfig | boolean | undefined; +} + + +/** + * Per-request configuration passed to HTTP methods (get, post, etc). + * + * Extends RequestConfig with per-request lifecycle hooks and abort controller. + * + * @template H - Headers type + * @template P - Params type + */ +export interface CallConfig + extends RequestConfig, EngineLifecycle { + + /** AbortController for manual request cancellation */ + abortController?: AbortController | undefined; + + /** @deprecated Use totalTimeout instead */ + timeout?: number | undefined; +} + + +/** + * Request config passed to modifyConfig and callbacks. + * + * This is what callbacks receive - includes the controller that was created + * for the request. + */ +export interface EngineRequestConfig + extends RequestConfig { + + /** The AbortController created for this request */ + controller: AbortController; +} + + +/** + * Function type for modifying request config before they are sent. + */ +export type ModifyConfigFn = + (opts: EngineRequestConfig, state: S) => EngineRequestConfig; + + +/** + * Lifecycle hooks for requests. + */ +export interface EngineLifecycle { + + onError?: ((err: FetchError) => void | Promise) | undefined; + onBeforeReq?: ((opts: EngineRequestConfig) => void | Promise) | undefined; + onAfterReq?: ((response: Response, opts: EngineRequestConfig) => void | Promise) | undefined; +} + + +/** + * Validation configuration for headers, params, and state. + */ +export interface ValidateConfig { + + headers?: ((headers: DictAndT, method?: _InternalHttpMethods) => void) | undefined; + params?: ((params: DictAndT

, method?: _InternalHttpMethods) => void) | undefined; + state?: ((state: S) => void) | undefined; + + perRequest?: { + headers?: boolean | undefined; + params?: boolean | undefined; + } | undefined; +} + + +/** + * Full configuration options for FetchEngine. + * + * This is the primary configuration interface. It's defined here + * independently of the FetchEngine class to avoid circular dependencies. + * + * Extends native fetch RequestInit to allow instance-level defaults for + * options like `credentials`, `mode`, `cache`, `redirect`, etc. + * + * @template H - Headers type + * @template P - Params type + * @template S - State type + */ +export interface EngineConfig< + H = InstanceHeaders, + P = InstanceParams, + S = InstanceState +> extends Omit, EngineLifecycle { + + /** + * The base URL for all requests. + */ + baseUrl: string; + + /** + * The default type of response expected from the server. + */ + defaultType?: EngineType | undefined; + + /** + * The headers to be set on all requests. + */ + headers?: DictAndT | undefined; + + /** + * The headers to be set on requests of a specific method. + */ + methodHeaders?: MethodHeaders | undefined; + + /** + * URL parameters to be set on all requests. + */ + params?: DictAndT

| undefined; + + /** + * URL parameters to be set on requests of a specific method. + */ + methodParams?: HttpMethodOpts>> | undefined; + + /** + * Function that can be used to change the config before a request. + */ + modifyConfig?: ModifyConfigFn | undefined; + + /** + * Object for modifying config for requests of a specific method. + */ + modifyMethodConfig?: HttpMethodOpts> | undefined; + + /** + * Validators for headers, params, and state. + */ + validate?: ValidateConfig; + + /** + * Optional name for this FetchEngine instance. + */ + name?: string | undefined; + + /** + * Spy function that receives all event emissions. + */ + spy?: ((action: { + event: string | RegExp | '*', + fn: 'on' | 'once' | 'off' | 'emit' | 'cleanup', + data?: unknown, + listener?: Function | null, + context: any + }) => void) | undefined; + + /** + * Deduplication policy configuration. + */ + dedupePolicy?: boolean | DeduplicationConfig | undefined; + + /** + * Cache policy configuration. + */ + cachePolicy?: boolean | CacheConfig | undefined; + + /** + * Rate limit policy configuration. + */ + rateLimitPolicy?: boolean | RateLimitConfig | undefined; + + // From RequestOpts + totalTimeout?: number | undefined; + attemptTimeout?: number | undefined; + determineType?: DetermineTypeFn | undefined; + retry?: RetryConfig | boolean | undefined; +} diff --git a/packages/fetch/src/policies/base.ts b/packages/fetch/src/policies/base.ts index 41acf08..e23b1fb 100644 --- a/packages/fetch/src/policies/base.ts +++ b/packages/fetch/src/policies/base.ts @@ -10,7 +10,7 @@ import type { PolicyInternalState } from './types.ts'; -import { findMatchingRule, validateMatchRules } from '../helpers.ts'; +import { findMatchingRule, validateMatchRules } from './helpers.ts'; /** diff --git a/packages/fetch/src/policies/cache.ts b/packages/fetch/src/policies/cache.ts index e6e92ce..2f2d5f1 100644 --- a/packages/fetch/src/policies/cache.ts +++ b/packages/fetch/src/policies/cache.ts @@ -7,11 +7,11 @@ import type { RequestKeyOptions } from '../types.ts'; -import type { FetchEngine } from '../engine.ts'; +import type { RequestExecutor } from '../engine/executor.ts'; +import { requestSerializer } from '../serializers/index.ts'; +import { validateMatchRules } from './helpers.ts'; import { ResiliencePolicy } from './base.ts'; -import { requestSerializer } from '../serializers/index.ts'; -import { validateMatchRules } from '../helpers.ts'; /** @@ -124,8 +124,8 @@ export class CachePolicy< P = unknown > extends ResiliencePolicy, CacheRule, S, H, P> { - /** Reference to the FetchEngine instance */ - #engine: FetchEngine; + /** Reference to the RequestExecutor instance */ + #executor: RequestExecutor; /** * Extended state with cache-specific fields. @@ -138,10 +138,10 @@ export class CachePolicy< */ #adapter: CacheAdapter | undefined; - constructor(engine: FetchEngine) { + constructor(executor: RequestExecutor) { super(); - this.#engine = engine; + this.#executor = executor; } /** @@ -377,7 +377,7 @@ export class CachePolicy< } const key = config.serializer!(normalizedOpts); - const cached = await this.#engine._flight.getCache(key); + const cached = await this.#executor.flight.getCache(key); if (cached) { @@ -386,12 +386,12 @@ export class CachePolicy< if (!cached.isStale) { // Fresh cache hit - this.#engine.emit('fetch-cache-hit' as any, { + this.#executor.engine.emit('cache-hit' as any, { ...normalizedOpts, key, isStale: false, expiresIn, - }); + } as any); clearTimeout(); @@ -399,24 +399,24 @@ export class CachePolicy< } // Stale - return immediately + trigger background revalidation - this.#engine.emit('fetch-cache-stale' as any, { + this.#executor.engine.emit('cache-stale' as any, { ...normalizedOpts, key, isStale: true, expiresIn, - }); + } as any); - this.#engine._triggerBackgroundRevalidation(method, path, options as any, key, config); + this.#executor.triggerBackgroundRevalidation(method, path, options as any, key, config as any); clearTimeout(); return { hit: true, value: cached.value as T, key }; } // Cache miss - this.#engine.emit('fetch-cache-miss' as any, { + this.#executor.engine.emit('cache-miss' as any, { ...normalizedOpts, key, - }); + } as any); return { hit: false, key, config }; } diff --git a/packages/fetch/src/policies/dedupe.ts b/packages/fetch/src/policies/dedupe.ts index 0b252e0..0e072e0 100644 --- a/packages/fetch/src/policies/dedupe.ts +++ b/packages/fetch/src/policies/dedupe.ts @@ -6,7 +6,7 @@ import type { RequestKeyOptions } from '../types.ts'; -import type { FetchEngine } from '../engine.ts'; +import type { RequestExecutor } from '../engine/executor.ts'; import { ResiliencePolicy } from './base.ts'; import { requestSerializer } from '../serializers/index.ts'; @@ -84,13 +84,13 @@ export class DedupePolicy< P = unknown > extends ResiliencePolicy, DedupeRule, S, H, P> { - /** Reference to the FetchEngine instance */ - #engine: FetchEngine; + /** Reference to the RequestExecutor instance */ + #executor: RequestExecutor; - constructor(engine: FetchEngine) { + constructor(executor: RequestExecutor) { super(); - this.#engine = engine; + this.#executor = executor; } /** @@ -171,18 +171,18 @@ export class DedupePolicy< } const key = config.serializer!(normalizedOpts); - const inflight = this.#engine._flight.getInflight(key); + const inflight = this.#executor.flight.getInflight(key); if (inflight) { // Join existing in-flight request - const waitingCount = this.#engine._flight.joinInflight(key); + const waitingCount = this.#executor.flight.joinInflight(key); - this.#engine.emit('fetch-dedupe-join' as any, { + this.#executor.engine.emit('dedupe-join' as any, { ...normalizedOpts, key, waitingCount, - }); + } as any); return { joined: true, @@ -192,10 +192,10 @@ export class DedupePolicy< } // No in-flight request - emit start event, caller will track - this.#engine.emit('fetch-dedupe-start' as any, { + this.#executor.engine.emit('dedupe-start' as any, { ...normalizedOpts, key, - }); + } as any); return { joined: false, diff --git a/packages/fetch/src/policies/helpers.ts b/packages/fetch/src/policies/helpers.ts new file mode 100644 index 0000000..813b107 --- /dev/null +++ b/packages/fetch/src/policies/helpers.ts @@ -0,0 +1,234 @@ +import { assert } from '@logosdx/utils'; +import type { MatchTypes, _InternalHttpMethods } from '../types.ts'; + + +/** + * Default HTTP methods that are subject to inflight deduplication. + * Only GET requests are deduplicated by default since they are idempotent. + */ +export const DEFAULT_INFLIGHT_METHODS: Set<_InternalHttpMethods> = new Set(['GET']); + + +/** + * Checks if a path matches a rule's match type. + * + * Rules should specify exactly one match type. If multiple are specified, + * they are checked in order: is → startsWith → endsWith → includes → match. + * + * @param rule - The rule containing match type(s) + * @param path - The request path to match against + * @returns false if the path matches the rule's criteria + * + * @example + * ```typescript + * matchPath({ is: '/users' }, '/users'); // true + * matchPath({ startsWith: '/api' }, '/api/users'); // true + * matchPath({ endsWith: '.json' }, '/data.json'); // true + * matchPath({ includes: 'admin' }, '/admin/dash'); // true + * matchPath({ match: /^\/v\d+/ }, '/v2/users'); // true + * ``` + */ +export const matchPath = (rule: MatchTypes, path: string): boolean => { + + // 'is' is an exact match that can't be combined with others + if (rule.is !== undefined) { + + return path === rule.is; + } + + // For other match types, all specified types must match (AND logic) + // If no types are specified, return false + + let hasMatch = true; + let hasMatchType = false; + + if (rule.startsWith !== undefined) { + + hasMatchType = true; + hasMatch = hasMatch && path.startsWith(rule.startsWith); + } + + if (rule.endsWith !== undefined) { + + hasMatchType = true; + hasMatch = hasMatch && path.endsWith(rule.endsWith); + } + + if (rule.includes !== undefined) { + + hasMatchType = true; + hasMatch = hasMatch && path.includes(rule.includes); + } + + if (rule.match !== undefined) { + + hasMatchType = true; + hasMatch = hasMatch && rule.match.test(path); + } + + // Must have at least one match type and all specified types must match + return hasMatchType && hasMatch; +}; + + +const toSet = (arr?: T[] | Set): Set | undefined => { + + if (!arr) return; + + return arr instanceof Set ? arr : new Set(arr); +}; + + +/** + * Checks if a method matches the rule's method constraints. + * + * @param method - The HTTP method to check + * @param ruleMethods - Methods specified in the rule (undefined means inherit from defaults) + * @param defaultMethods - Default methods when rule doesn't specify any + * @returns true if the method is allowed by the rule + */ +export const matchMethod = ( + method: string, + ruleMethods: _InternalHttpMethods[] | undefined, + defaultMethods: _InternalHttpMethods[] | undefined, +): boolean => { + + const methods = toSet(ruleMethods) ?? toSet(defaultMethods) ?? DEFAULT_INFLIGHT_METHODS; + const normalizedMethod = method.toUpperCase() as _InternalHttpMethods; + + return methods.has(normalizedMethod); +}; + + +/** + * Validates an array of match rules. + * + * Each rule must be an object specifying at least one match type: + * is, startsWith, endsWith, includes, or match. + * + * Rules can specify multiple match types, which are combined with a logical AND. + * Except for 'is', which cannot be combined with other types, because it logically + * contradicts anything except an exact match. + * + * @param rules - Array of match rules to validate + * @throws {AssertionError} If any rule is invalid + * + * @example + * ```typescript + * const rules = [ + * { is: '/users' }, + * { startsWith: '/api' }, + * { endsWith: '.json' }, + * { includes: 'admin' }, + * { match: /^\/v\d+/ }, + * { startsWith: '/public', endsWith: '.html' }, + * { includes: 'user', match: /\/dash/ } + * ]; + * + * validateMatchRules(rules); // No error thrown + * ``` + */ +export const validateMatchRules = ( + rules: T[] +) => { + + assert(Array.isArray(rules), 'rules must be an array'); + + for (const r in rules) { + + const rule = rules[r]!; + + assert(rule && typeof rule === 'object', `rule[${r}] must be an object`); + + const matchTypes = ['is', 'startsWith', 'endsWith', 'includes', 'match'] as const; + + assert( + matchTypes.some(type => rule[type] !== undefined), + `rule[${r}] must specify at least one match type (is, startsWith, endsWith, includes, match)` + ); + + for (const type of matchTypes) { + + const value = rule[type]; + + if (value === undefined) { + + continue; + } + + if (type === 'match') { + + assert( + value instanceof RegExp, + `rule[${r}].match must be a RegExp` + ); + + continue; + } + + assert( + typeof value === 'string', + `rule[${r}].${type} must be a string` + ); + + assert( + (value as string).length > 0, + `rule[${r}].${type} cannot be an empty string` + ); + } + + // If 'is' is defined, no other match types should be defined + if (rule.is !== undefined) { + + assert( + !matchTypes + .filter(type => type !== 'is') + .some(type => rule[type] !== undefined), + `rule[${r}] 'is' contradicts with other match types and cannot be used together` + ); + } + } +} + + +/** + * Finds the first matching rule from a list of rules. + * + * Rules are checked in order - first match wins. Both path and method + * must match for a rule to be considered a match. + * + * @param rules - Array of rules to check + * @param method - HTTP method of the request + * @param path - Request path + * @param defaultMethods - Default methods to use when rule doesn't specify any + * @returns The first matching rule, or undefined if no rules match + * + * @example + * ```typescript + * const rules = [ + * { startsWith: '/admin', enabled: false }, + * { endsWith: '/stream', enabled: false }, + * { startsWith: '/api', methods: ['GET', 'POST'] } + * ]; + * + * findMatchingRule(rules, 'GET', '/admin/users', ['GET']); + * // Returns: { startsWith: '/admin', enabled: false } + * ``` + */ +export const findMatchingRule = ( + rules: T[], + method: string, + path: string, + defaultMethods: _InternalHttpMethods[] +): T | undefined => { + + for (const rule of rules) { + + if (matchPath(rule, path) && matchMethod(method, rule.methods, defaultMethods)) { + + return rule; + } + } + + return undefined; +}; diff --git a/packages/fetch/src/policies/index.ts b/packages/fetch/src/policies/index.ts index 715be93..c7d801d 100644 --- a/packages/fetch/src/policies/index.ts +++ b/packages/fetch/src/policies/index.ts @@ -1,15 +1,18 @@ export { ResiliencePolicy } from './base.ts'; + export { DedupePolicy, type DedupeCheckResult, type DedupeExecutionContext } from './dedupe.ts'; + export { CachePolicy, type CachePolicyState, type CacheCheckResult, type CacheExecutionContext } from './cache.ts'; + export { RateLimitPolicy, type RateLimitPolicyState, diff --git a/packages/fetch/src/policies/rate-limit.ts b/packages/fetch/src/policies/rate-limit.ts index a52b9dd..fa7e9ab 100644 --- a/packages/fetch/src/policies/rate-limit.ts +++ b/packages/fetch/src/policies/rate-limit.ts @@ -11,8 +11,7 @@ import type { import { ResiliencePolicy } from './base.ts'; import { endpointSerializer } from '../serializers/index.ts'; -import { validateMatchRules } from '../helpers.ts'; - +import { validateMatchRules } from './helpers.ts'; /** * Execution context for rate limit guard. @@ -351,7 +350,7 @@ export class RateLimitPolicy< if (!config.waitForToken) { // Reject immediately - emit('fetch-ratelimit-reject', eventData); + emit('ratelimit-reject', eventData); clearTimeout(); throw new RateLimitError( @@ -361,7 +360,7 @@ export class RateLimitPolicy< } // Wait for token - emit('fetch-ratelimit-wait', eventData); + emit('ratelimit-wait', eventData); // Call the onRateLimit callback if configured if (this.onRateLimit) { @@ -384,7 +383,7 @@ export class RateLimitPolicy< // Token acquired after waiting const postWaitSnapshot = bucket.snapshot; - emit('fetch-ratelimit-acquire', { + emit('ratelimit-acquire', { ...normalizedOpts, key, currentTokens: postWaitSnapshot.currentTokens, @@ -401,7 +400,7 @@ export class RateLimitPolicy< // Get post-consumption snapshot for event data const postConsumeSnapshot = bucket.snapshot; - emit('fetch-ratelimit-acquire', { + emit('ratelimit-acquire', { ...normalizedOpts, key, currentTokens: postConsumeSnapshot.currentTokens, diff --git a/packages/fetch/src/properties/headers.ts b/packages/fetch/src/properties/headers.ts new file mode 100644 index 0000000..c90b42c --- /dev/null +++ b/packages/fetch/src/properties/headers.ts @@ -0,0 +1,215 @@ +import type { FetchEngineCore } from '../engine/types.ts'; +import type { HttpMethods, DictAndT } from '../types.ts'; +import { PropertyStore, type PropertyStoreOptions } from './store.ts'; + + +/** + * Manages HTTP headers for FetchEngine with event emission. + * + * Wraps PropertyStore with FetchEngine-specific event emission. + * Pulls initial headers and validation from engine options. + * + * @template H - Headers type + * + * @example + * ```typescript + * // Access via engine.headers + * engine.headers.set('Authorization', 'Bearer token'); + * engine.headers.set({ 'X-API-Key': 'abc', 'X-Request-ID': '123' }); + * + * // Method-specific headers + * engine.headers.set('Content-Type', 'application/json', 'POST'); + * + * // Remove headers + * engine.headers.remove('Authorization'); + * engine.headers.remove(['X-API-Key', 'X-Request-ID']); + * + * // Check if header exists + * if (engine.headers.has('Authorization')) { ... } + * + * // Get resolved headers for a request + * const headers = engine.headers.resolve('POST', { 'X-Override': 'value' }); + * ``` + */ +export class HeadersManager { + + #engine: FetchEngineCore; + #store: PropertyStore>; + + constructor(engine: FetchEngineCore) { + + this.#engine = engine; + + const defaults = engine.config.get('headers') ?? {} as DictAndT; + const methodOverrides = engine.config.get('methodHeaders'); + const validate = engine.config.get('validate.headers'); + + const storeOptions: PropertyStoreOptions> = { defaults }; + + if (methodOverrides !== undefined) { + + storeOptions.methodOverrides = methodOverrides; + } + + if (validate !== undefined) { + + storeOptions.validate = validate; + } + + this.#store = new PropertyStore>(storeOptions); + } + + /** + * Set a header value globally or for a specific method. + * + * @example + * ```typescript + * engine.headers.set('Authorization', 'Bearer token'); + * engine.headers.set('Content-Type', 'application/json', 'POST'); + * ``` + */ + set(key: string, value: string, method?: HttpMethods): void; + + /** + * Set multiple header values globally or for a specific method. + * + * @example + * ```typescript + * engine.headers.set({ Authorization: 'Bearer token', 'X-API-Key': 'abc' }); + * engine.headers.set({ 'Content-Type': 'application/json' }, 'POST'); + * ``` + */ + set(headers: Partial>, method?: HttpMethods): void; + + set( + keyOrHeaders: string | Partial>, + valueOrMethod?: string | HttpMethods, + maybeMethod?: HttpMethods + ): void { + + if (typeof keyOrHeaders === 'string') { + + const key = keyOrHeaders; + const value = valueOrMethod as string; + const method = maybeMethod; + + this.#store.set(key, value, method); + + const eventData = { key, value, method }; + this.#engine.emit('header-add' as any, eventData); + + return; + } + + const headers = keyOrHeaders; + const method = valueOrMethod as HttpMethods | undefined; + + this.#store.set(headers, method); + + const eventData = { value: headers as Partial>, method }; + this.#engine.emit('header-add' as any, eventData); + } + + /** + * Remove a header globally or for a specific method. + * + * @example + * ```typescript + * engine.headers.remove('Authorization'); + * engine.headers.remove('Content-Type', 'POST'); + * ``` + */ + remove(key: string, method?: HttpMethods): void; + + /** + * Remove multiple headers globally or for a specific method. + * + * @example + * ```typescript + * engine.headers.remove(['Authorization', 'X-API-Key']); + * engine.headers.remove(['Content-Type'], 'POST'); + * ``` + */ + remove(keys: string[], method?: HttpMethods): void; + + remove(keyOrKeys: string | string[], method?: HttpMethods): void { + + this.#store.remove(keyOrKeys as string, method); + + const eventData = { key: keyOrKeys, method }; + this.#engine.emit('header-remove' as any, eventData); + } + + /** + * Check if a header exists globally or for a specific method. + * + * @example + * ```typescript + * if (engine.headers.has('Authorization')) { + * console.log('Auth header is set'); + * } + * ``` + */ + has(key: string, method?: HttpMethods): boolean { + + return this.#store.has(key, method); + } + + /** + * Resolve the final headers for a specific method. + * + * Merges in order: defaults → method overrides → request overrides. + * + * @example + * ```typescript + * const headers = engine.headers.resolve('POST', { 'X-Request-ID': '123' }); + * ``` + */ + resolve(method: HttpMethods, requestOverrides?: Partial>): DictAndT { + + return this.#store.resolve(method, requestOverrides); + } + + /** + * Get the default headers (without method overrides). + */ + get defaults(): DictAndT { + + return this.#store.defaults; + } + + /** + * Get all headers including method overrides. + * + * @example + * ```typescript + * const all = engine.headers.all; + * // { default: { Authorization: '...' }, post: { 'Content-Type': '...' } } + * ``` + */ + get all(): { default: DictAndT } & Record>> { + + return this.#store.all; + } + + /** + * Get method-specific headers only (not merged with defaults). + */ + forMethod(method: HttpMethods): Partial> { + + return this.#store.forMethod(method); + } + + /** + * Get the underlying PropertyStore for internal use. + * + * Exposed for FetchEngineCore compliance. Internal components + * (executor, policies) access the store directly for resolution. + * + * @internal + */ + get $store(): PropertyStore> { + + return this.#store; + } +} diff --git a/packages/fetch/src/properties/index.ts b/packages/fetch/src/properties/index.ts new file mode 100644 index 0000000..2d2105e --- /dev/null +++ b/packages/fetch/src/properties/index.ts @@ -0,0 +1,10 @@ +/** + * Properties module for FetchEngine. + * + * Provides property management for headers and URL parameters + * with CRUD operations, method-specific overrides, and event emission. + */ + +export { PropertyStore, type PropertyStoreOptions, type PropertyValidateFn, type MethodOverrides } from './store.ts'; +export { HeadersManager } from './headers.ts'; +export { ParamsManager } from './params.ts'; diff --git a/packages/fetch/src/properties/params.ts b/packages/fetch/src/properties/params.ts new file mode 100644 index 0000000..d888e33 --- /dev/null +++ b/packages/fetch/src/properties/params.ts @@ -0,0 +1,216 @@ +import type { FetchEngineCore } from '../engine/types.ts'; +import type { PropertyEventData } from '../engine/events.ts'; +import type { HttpMethods, DictAndT } from '../types.ts'; +import { PropertyStore, type PropertyStoreOptions } from './store.ts'; + + +/** + * Manages URL parameters for FetchEngine with event emission. + * + * Wraps PropertyStore with FetchEngine-specific event emission. + * Pulls initial params and validation from engine options. + * + * @template P - Params type + * + * @example + * ```typescript + * // Access via engine.params + * engine.params.set('apiKey', 'abc123'); + * engine.params.set({ page: '1', limit: '10' }); + * + * // Method-specific params + * engine.params.set('format', 'json', 'GET'); + * + * // Remove params + * engine.params.remove('apiKey'); + * engine.params.remove(['page', 'limit']); + * + * // Check if param exists + * if (engine.params.has('apiKey')) { ... } + * + * // Get resolved params for a request + * const params = engine.params.resolve('GET', { extra: 'value' }); + * ``` + */ +export class ParamsManager

{ + + #engine: FetchEngineCore; + #store: PropertyStore>; + + constructor(engine: FetchEngineCore) { + + this.#engine = engine; + + const defaults = engine.config.get('params') ?? {} as DictAndT

; + const methodOverrides = engine.config.get('methodParams'); + const validate = engine.config.get('validate.params'); + + const storeOptions: PropertyStoreOptions> = { defaults }; + + if (methodOverrides !== undefined) { + + storeOptions.methodOverrides = methodOverrides; + } + + if (validate !== undefined) { + + storeOptions.validate = validate; + } + + this.#store = new PropertyStore>(storeOptions); + } + + /** + * Set a param value globally or for a specific method. + * + * @example + * ```typescript + * engine.params.set('apiKey', 'abc123'); + * engine.params.set('format', 'json', 'GET'); + * ``` + */ + set(key: string, value: string, method?: HttpMethods): void; + + /** + * Set multiple param values globally or for a specific method. + * + * @example + * ```typescript + * engine.params.set({ page: '1', limit: '10' }); + * engine.params.set({ format: 'json' }, 'GET'); + * ``` + */ + set(params: Partial>, method?: HttpMethods): void; + + set( + keyOrParams: string | Partial>, + valueOrMethod?: string | HttpMethods, + maybeMethod?: HttpMethods + ): void { + + if (typeof keyOrParams === 'string') { + + const key = keyOrParams; + const value = valueOrMethod as string; + const method = maybeMethod; + + this.#store.set(key, value, method); + + const eventData = { key, value, method } as PropertyEventData>; + this.#engine.emit('param-add', eventData); + } + else { + + const params = keyOrParams; + const method = valueOrMethod as HttpMethods | undefined; + + this.#store.set(params, method); + + const eventData = { value: params as Partial>, method } as PropertyEventData>; + this.#engine.emit('param-add', eventData); + } + } + + /** + * Remove a param globally or for a specific method. + * + * @example + * ```typescript + * engine.params.remove('apiKey'); + * engine.params.remove('format', 'GET'); + * ``` + */ + remove(key: string, method?: HttpMethods): void; + + /** + * Remove multiple params globally or for a specific method. + * + * @example + * ```typescript + * engine.params.remove(['page', 'limit']); + * engine.params.remove(['format'], 'GET'); + * ``` + */ + remove(keys: string[], method?: HttpMethods): void; + + remove(keyOrKeys: string | string[], method?: HttpMethods): void { + + this.#store.remove(keyOrKeys as string, method); + + const eventData = { key: keyOrKeys, method } as PropertyEventData>; + this.#engine.emit('param-remove', eventData); + } + + /** + * Check if a param exists globally or for a specific method. + * + * @example + * ```typescript + * if (engine.params.has('apiKey')) { + * console.log('API key is set'); + * } + * ``` + */ + has(key: string, method?: HttpMethods): boolean { + + return this.#store.has(key, method); + } + + /** + * Resolve the final params for a specific method. + * + * Merges in order: defaults → method overrides → request overrides. + * + * @example + * ```typescript + * const params = engine.params.resolve('GET', { extra: 'value' }); + * ``` + */ + resolve(method: HttpMethods, requestOverrides?: Partial>): DictAndT

{ + + return this.#store.resolve(method, requestOverrides); + } + + /** + * Get the default params (without method overrides). + */ + get defaults(): DictAndT

{ + + return this.#store.defaults; + } + + /** + * Get all params including method overrides. + * + * @example + * ```typescript + * const all = engine.params.all; + * // { default: { apiKey: '...' }, get: { format: '...' } } + * ``` + */ + get all(): { default: DictAndT

} & Record>> { + + return this.#store.all; + } + + /** + * Get method-specific params only (not merged with defaults). + */ + forMethod(method: HttpMethods): Partial> { + + return this.#store.forMethod(method); + } + + /** + * Get the underlying PropertyStore for internal use. + * + * Exposed for FetchEngineCore compliance. Internal components + * (executor, policies) access the store directly for resolution. + * + * @internal + */ + get $store(): PropertyStore> { + + return this.#store; + } +} diff --git a/packages/fetch/src/property-store.ts b/packages/fetch/src/properties/store.ts similarity index 88% rename from packages/fetch/src/property-store.ts rename to packages/fetch/src/properties/store.ts index c67f763..d0ccd31 100644 --- a/packages/fetch/src/property-store.ts +++ b/packages/fetch/src/properties/store.ts @@ -1,4 +1,4 @@ -import type { _InternalHttpMethods, HttpMethods } from './types.ts'; +import type { _InternalHttpMethods, HttpMethods } from '../types.ts'; /** @@ -28,13 +28,13 @@ export type MethodOverrides = Partial export interface PropertyStoreOptions { /** Default values applied to all requests */ - defaults?: T; + defaults?: T | undefined; /** Method-specific overrides (e.g., POST has different headers than GET) */ - methodOverrides?: MethodOverrides; + methodOverrides?: Partial> | undefined; /** Validation function called when values are set */ - validate?: PropertyValidateFn; + validate?: PropertyValidateFn | undefined; } @@ -157,34 +157,34 @@ export class PropertyStore> { this.#validate(this.#defaults); } } + + return; } - else { - const values = keyOrValues; - const method = (valueOrMethod as HttpMethods | undefined)?.toLowerCase(); + const values = keyOrValues; + const method = (valueOrMethod as HttpMethods | undefined)?.toLowerCase(); - if (method) { + if (method) { - const existing = this.#methodOverrides.get(method) ?? {}; - const updated = { ...existing, ...values } as Partial; - this.#methodOverrides.set(method, updated); + const existing = this.#methodOverrides.get(method) ?? {}; + const updated = { ...existing, ...values } as Partial; + this.#methodOverrides.set(method, updated); - if (this.#validate) { + if (this.#validate) { - this.#validate( - { ...this.#defaults, ...updated } as T, - method as _InternalHttpMethods - ); - } + this.#validate( + { ...this.#defaults, ...updated } as T, + method as _InternalHttpMethods + ); } - else { + } + else { - Object.assign(this.#defaults, values); + Object.assign(this.#defaults, values); - if (this.#validate) { + if (this.#validate) { - this.#validate(this.#defaults); - } + this.#validate(this.#defaults); } } } @@ -230,13 +230,13 @@ export class PropertyStore> { delete (existing as Record)[key]; } } + + return; } - else { - for (const key of keys) { + for (const key of keys) { - delete (this.#defaults as Record)[key]; - } + delete (this.#defaults as Record)[key]; } } diff --git a/packages/fetch/src/state/index.ts b/packages/fetch/src/state/index.ts new file mode 100644 index 0000000..c8d23df --- /dev/null +++ b/packages/fetch/src/state/index.ts @@ -0,0 +1,174 @@ +import { clone, assert, isObject } from '@logosdx/utils'; +import type { FetchEngineCore } from '../engine/types.ts'; +import type { StateEventData } from '../engine/events.ts'; + + +/** + * Manages instance state for FetchEngine with event emission. + * + * Provides a clean API for getting, setting, and resetting state + * with automatic event emission on mutations. Validation is pulled + * from engine options, not passed as a constructor parameter. + * + * @template S - State type + * + * @example + * ```typescript + * // Access via engine.state + * engine.state.set('authToken', 'bearer-123'); + * engine.state.set({ user: 'john', role: 'admin' }); + * + * const state = engine.state.get(); + * console.log(state.authToken); + * + * engine.state.reset(); + * ``` + */ +export class FetchState { + + #engine: FetchEngineCore; + #state: S; + + constructor(engine: FetchEngineCore) { + + this.#engine = engine; + this.#state = {} as S; + } + + /** + * Get the validate function from engine options. + * This is pulled dynamically to allow runtime changes. + */ + #getValidate(): ((state: S) => void) | undefined { + + return this.#engine.config.get('validate.state') as ((state: S) => void) | undefined; + } + + /** + * Get a deep clone of the current state. + * + * Returns a cloned copy to prevent external mutations. + * + * @example + * ```typescript + * const state = engine.state.get(); + * console.log(state.authToken); + * ``` + */ + get(): S { + + return clone(this.#state); + } + + /** + * Set state by key-value or by partial object merge. + * + * Emits 'state-set' event after successful update. + * + * @example + * ```typescript + * // Set single property + * engine.state.set('authToken', 'bearer-123'); + * + * // Merge multiple properties + * engine.state.set({ user: 'john', role: 'admin' }); + * ``` + */ + set(key: K, value: S[K]): void; + set(partial: Partial): void; + set(keyOrPartial: unknown, value?: unknown): void { + + const isKey = typeof keyOrPartial === 'string'; + + assert( + isObject(keyOrPartial) || isKey, + 'set requires an object or string key' + ); + + const previous = this.#state; + let key: keyof S | undefined; + let setValue: S[keyof S] | Partial; + + if (isKey) { + + assert( + value !== undefined, + 'set requires a value when setting by key' + ); + + key = keyOrPartial as keyof S; + setValue = value as S[keyof S]; + + this.#state = { + ...this.#state, + [key]: setValue + }; + } + else { + + setValue = keyOrPartial as Partial; + + this.#state = { + ...this.#state, + ...setValue + }; + } + + const validate = this.#getValidate(); + + if (validate) { + + validate(this.#state); + } + + const eventData = { + key, + value: setValue, + previous, + current: this.#state + } as StateEventData; + + this.#engine.emit('state-set', eventData); + } + + /** + * Reset state to empty object. + * + * Emits 'state-reset' event after reset. + * + * @example + * ```typescript + * engine.state.reset(); + * console.log(engine.state.get()); // {} + * ``` + */ + reset(): void { + + const previous = this.#state; + this.#state = {} as S; + + const validate = this.#getValidate(); + + if (validate) { + + validate(this.#state); + } + + const eventData = { + previous, + current: this.#state + } as StateEventData; + + this.#engine.emit('state-reset', eventData); + } + + /** + * Internal method to set state directly without events. + * Used during engine initialization. + * @internal + */ + _setDirect(state: S): void { + + this.#state = state; + } +} diff --git a/packages/fetch/src/types.ts b/packages/fetch/src/types.ts index 161b726..df966ff 100644 --- a/packages/fetch/src/types.ts +++ b/packages/fetch/src/types.ts @@ -1,6 +1,6 @@ import { type MaybePromise, type CacheAdapter, RateLimitTokenBucket } from '@logosdx/utils'; -import { FetchError } from './helpers.ts'; -import { type FetchEngine } from './engine.ts'; +import { FetchError } from './helpers/fetch-error.ts'; +import { type FetchEngine } from './engine/index.ts'; export type { CacheAdapter }; @@ -62,10 +62,10 @@ export interface RequestKeyOptions { payload?: unknown | undefined; /** Request headers (instance headers merged with method headers) */ - headers?: DictOrT | undefined; + headers?: DictAndT | undefined; /** URL parameters (extracted from url.searchParams for flat access) */ - params?: DictOrT

| undefined; + params?: DictAndT

| undefined; /** Instance state */ state?: S | undefined; @@ -317,9 +317,8 @@ export interface CacheConfig { adapter?: CacheAdapter | undefined; } -export type RawRequestOptions = Omit -export type DictOrT = Record & Partial; -export type MethodHeaders = HttpMethodOpts>; +export type DictAndT = Record & Partial; +export type MethodHeaders = HttpMethodOpts>; /** * Configuration object used for a fetch request, combining instance-level @@ -341,11 +340,6 @@ export type MethodHeaders = HttpMethodOpts>; export interface FetchConfig { baseUrl?: string; - /** - * @deprecated Use `totalTimeout` instead. This is an alias for `totalTimeout`. - */ - timeout?: number | undefined; - /** * Total timeout for the entire request lifecycle in milliseconds. * Applies to the complete operation including all retry attempts. @@ -521,484 +515,18 @@ export interface RetryConfig { shouldRetry?: (error: FetchError, attempt: number) => MaybePromise | undefined; } -declare module './engine.ts' { - export namespace FetchEngine { - - export type Type = 'arrayBuffer' | 'blob' | 'formData' | 'json' | 'text'; - - /** - * Event data payload for FetchEngine events - */ - export interface EventData { - state: S; - url?: string | URL | undefined; - method?: HttpMethods | undefined; - headers?: H | undefined; - params?: InstanceParams | undefined; - error?: FetchError | undefined; - response?: Response | undefined; - data?: unknown; - payload?: unknown; - attempt?: number | undefined; - nextAttempt?: number | undefined; - delay?: number | undefined; - step?: 'fetch' | 'parse' | 'response' | undefined; - status?: number | undefined; - path?: string | undefined; - aborted?: boolean | undefined; - } - - /** - * Event data for deduplication events - */ - export interface DedupeEventData extends EventData { - - /** The generated deduplication key */ - key: string; - - /** Number of callers waiting on this request (join events only) */ - waitingCount?: number | undefined; - } - - /** - * Event data for cache events - */ - export interface CacheEventData extends EventData { - - /** The generated cache key */ - key: string; - - /** Whether the cache entry is stale (SWR) */ - isStale?: boolean | undefined; - - /** Time until expiration (ms) */ - expiresIn?: number | undefined; - } - - /** - * Event data for rate limit events - */ - export interface RateLimitEventData extends EventData { - - /** The rate limit bucket key */ - key: string; - - /** Current tokens available in the bucket */ - currentTokens: number; - - /** Maximum capacity of the bucket */ - capacity: number; - - /** Time to wait before next token is available (ms) */ - waitTimeMs: number; - - /** When the next token will be available */ - nextAvailable: Date; - } - - /** - * Event map for ObserverEngine - maps event names to their data types - */ - export interface EventMap { - 'fetch-before': EventData; - 'fetch-after': EventData; - 'fetch-abort': EventData; - 'fetch-error': EventData; - 'fetch-response': EventData; - 'fetch-header-add': EventData; - 'fetch-header-remove': EventData; - 'fetch-param-add': EventData; - 'fetch-param-remove': EventData; - 'fetch-state-set': EventData; - 'fetch-state-reset': EventData; - 'fetch-url-change': EventData; - 'fetch-modify-options-change': EventData; - 'fetch-modify-method-options-change': EventData; - 'fetch-retry': EventData; - 'fetch-dedupe-start': DedupeEventData; - 'fetch-dedupe-join': DedupeEventData; - 'fetch-cache-hit': CacheEventData; - 'fetch-cache-stale': CacheEventData; - 'fetch-cache-miss': CacheEventData; - 'fetch-cache-set': CacheEventData; - 'fetch-cache-revalidate': CacheEventData; - 'fetch-cache-revalidate-error': CacheEventData; - 'fetch-ratelimit-wait': RateLimitEventData; - 'fetch-ratelimit-reject': RateLimitEventData; - 'fetch-ratelimit-acquire': RateLimitEventData; - } - - /** - * Override this interface with the headers you intend - * to use and set throughout your app. These are the - * universal headers that will be set on all requests. - */ - export interface InstanceHeaders { - Authorization?: string; - 'Content-Type'?: string; - } - - /** - * Override this interface with the params you intend - * to use and set throughout your app. These are the - * universal params that will be set on all requests. - */ - export interface InstanceParams { - } - - /** - * Override this interface with the response headers you expect - * to receive from your API. These are the headers that will be - * returned in the FetchResponse object. - */ - export interface InstanceResponseHeaders extends Record { - } - - /** - * Override this interface with the state you intend - * to use and set throughout your app. These are the - * universal state that will be set on all requests. - */ - export interface InstanceState { - } - - /** - * Headers helper type that can be used to set headers - */ - export type Headers = DictOrT; - - /** - * Params helper type that can be used to set URL parameters - * on requests - */ - export type Params = DictOrT; - - /** - * Response headers helper type that represents headers received - * from the server in API responses - */ - export type ResponseHeaders = DictOrT; - - /** - * Function type for modifying request options before they are sent. - * Used by modifyOptions and modifyMethodOptions configuration. - */ - export type ModifyOptionsFn = (opts: RequestOpts, state: S) => RequestOpts; - - export type HeaderKeys = keyof Headers; - - /** - * If you don't want FetchEngine to guess your content type, - * you can set it explicitly here. You should return the name - * of the function that will be used to parse the response body. - * - * @example - * - * const determineType: DetermineTypeFn = (response) => { - * - * if (response.headers.get('content-type') === 'application/json') { - * return 'json'; - * } - * } - * - */ - export interface DetermineTypeFn { - (response: Response): Type | Symbol - } - - - /** - * Lifecycle hooks that can be used to handle various - * events during the fetch request lifecycle. - */ - export type Lifecycle = { - /** - * Called when the fetch request errors - */ - onError?: (err: FetchError) => void | Promise | undefined - - /** - * Called before the fetch request is made - */ - onBeforeReq?: ((opts: FetchEngine.RequestOpts) => void | Promise) | undefined - - /** - * Called after the fetch request is made. The response - * object is cloned before it is passed to this function. - */ - onAfterReq?: ((response: Response, opts: FetchEngine.RequestOpts) => void | Promise) | undefined - }; - - - export type RequestOpts = RawRequestOptions & { - - /** - * The abort controller to be used to abort the request - */ - controller: AbortController, - headers?: Headers | undefined, - params?: Params

| undefined, - - /** - * @deprecated Use `totalTimeout` instead. This is an alias for `totalTimeout`. - */ - timeout?: number | undefined, - - /** - * Total timeout for the entire request lifecycle in milliseconds. - * Applies to the complete operation including all retry attempts. - * If this fires, no more retries will be attempted. - */ - totalTimeout?: number | undefined, - - /** - * Per-attempt timeout in milliseconds. - * Each retry attempt gets a fresh timeout. If an attempt times out, - * it can still be retried (unlike totalTimeout which stops everything). - */ - attemptTimeout?: number | undefined, - - /** - * The type of response expected from the server - */ - determineType?: DetermineTypeFn | undefined, - - - /** - * The retry configuration for the fetch request. If false, or undefined, - * no retries will be made. - */ - retry?: RetryConfig | boolean | undefined - }; - - export type Options< - H = Headers, - P = Params, - S = InstanceState, - > = ( - - Omit< - RequestOpts, - 'method' | 'body' | 'integrity' | 'controller' - > & - - { - /** - * The base URL for all requests - */ - baseUrl: string, - - /** - * The default type of response expected from the server. - * This will be used to determine how to parse the - * response from the server when content-type headers - * are not present or fail to do so. - */ - defaultType?: Type | undefined, - - /** - * The headers to be set on all requests - */ - headers?: DictOrT | undefined, - - /** - * The headers to be set on requests of a specific method - * @example - * { - * GET: { 'content-type': 'application/json' }, - * POST: { 'content-type': 'application/x-www-form-urlencoded' - * } - */ - methodHeaders?: MethodHeaders | undefined, - - /** - * URL parameters to be set on all requests - */ - params?: DictOrT

| undefined, - - /** - * URL parameters to be set on requests of a specific method - */ - methodParams?: HttpMethodOpts

| undefined, - - // Applies to requests of a specific method - /** - * Function that can be used to change the options in a specific - * way before they are used to make a request. The passed options - * are mutable objects. The returned object will be used instead - * of the original. - * - * @example - * - * const modifyOptions: ModifyOptionsFn = (opts, state) => { - * return opts; - * } - */ - modifyOptions?: ModifyOptionsFn | undefined - - /** - * Object that can be used to modify the options for requests of a specific method - * @example - * - * const modifyMethodOptions: ModifyMethodOptions = { - * GET: (opts, state) => { - * return opts; - * }, - * POST: (opts, state) => { - * return opts; - * } - * } - */ - modifyMethodOptions?: HttpMethodOpts> | undefined, - - /** - * Validators for when setting headers and state - */ - validate?: { - headers?: ((headers: Headers, method?: _InternalHttpMethods) => void) | undefined, - params?: ((params: Params

, method?: _InternalHttpMethods) => void) | undefined, - state?: ((state: S) => void) | undefined, - - perRequest?: { - /** - * Whether to validate the headers before the request is made - */ - headers?: boolean | undefined, - - /** - * Whether to validate the params before the request is made - */ - params?: boolean | undefined, - } | undefined - }, - - /** - * Optional name for this FetchEngine instance. - * Useful for debugging when using multiple instances. - */ - name?: string | undefined, - - /** - * Spy function that receives all event emissions. - * Useful for debugging and logging event flow. - * - * @example - * const api = new FetchEngine({ - * baseUrl: 'https://api.example.com', - * spy: ({ event, fn, data }) => { - * console.log(`[${event}] ${fn}:`, data); - * } - * }); - */ - spy?: ((action: { - event: keyof EventMap | RegExp | '*', - fn: 'on' | 'once' | 'off' | 'emit' | 'cleanup', - data?: unknown, - listener?: Function | null, - context: any - }) => void) | undefined, - - /** - * Deduplication policy configuration. - * - * - `true`: Enable with defaults (GET requests only) - * - `false` | omitted: Disabled - * - Object: Full configuration - * - * @example - * // Enable with defaults (GET only) - * dedupePolicy: true - * - * @example - * // Custom configuration - * dedupePolicy: { - * enabled: true, - * methods: ['GET', 'POST'], - * rules: [{ startsWith: '/admin', enabled: false }] - * } - */ - dedupePolicy?: boolean | DeduplicationConfig | undefined, - - /** - * Cache policy configuration. - * - * - `true`: Enable with defaults (GET requests, 60s TTL) - * - `false` | omitted: Disabled - * - Object: Full configuration - * - * @example - * // Enable with defaults (GET only, 60s TTL) - * cachePolicy: true - * - * @example - * // Custom configuration with SWR - * cachePolicy: { - * enabled: true, - * methods: ['GET'], - * ttl: 300000, // 5 minutes - * staleIn: 60000, // Stale after 1 minute - * rules: [ - * { startsWith: '/static', ttl: 3600000 }, - * { startsWith: '/admin', enabled: false } - * ] - * } - */ - cachePolicy?: boolean | CacheConfig | undefined, - - /** - * Rate limit policy configuration. - * - * - `true`: Enable with defaults (100 req/min for all methods) - * - `false` | omitted: Disabled - * - Object: Full configuration - * - * @example - * // Enable with defaults (100 req/min) - * rateLimitPolicy: true - * - * @example - * // Custom configuration with per-route limits - * rateLimitPolicy: { - * enabled: true, - * maxCalls: 60, - * windowMs: 60000, // 60 req/min globally - * rules: [ - * { startsWith: '/api/search', maxCalls: 10, windowMs: 1000 }, // 10/sec for search - * { startsWith: '/admin', enabled: false } // No limit for admin - * ] - * } - * - * @example - * // Per-user rate limiting - * rateLimitPolicy: { - * enabled: true, - * maxCalls: 100, - * windowMs: 60000, - * serializer: (ctx) => `user:${ctx.headers?.['X-User-ID'] ?? 'anonymous'}` - * } - */ - rateLimitPolicy?: boolean | RateLimitConfig | undefined, - } - ); - - export interface AbortablePromise extends Promise { - - isFinished: boolean - isAborted: boolean - abort(reason?: string): void | undefined - } - - /** - * Options used when making a fetch request - */ - export type CallOptions = ( - Lifecycle & - Omit & - { - headers?: DictOrT | undefined, - params?: DictOrT

| undefined, - abortController?: AbortController | undefined, - } - ); - } -} +// Note: The FetchEngine namespace is now defined via declaration merging +// in src/engine/index.ts. Types are defined in their modular locations +// and forwarded to the namespace for external access. +// +// Users can augment FetchEngine.InstanceHeaders, FetchEngine.InstanceParams, +// FetchEngine.InstanceState, and FetchEngine.InstanceResponseHeaders +// using module augmentation: +// +// declare module '@logosdx/fetch' { +// namespace FetchEngine { +// interface InstanceHeaders { +// 'X-Custom-Header': string; +// } +// } +// } diff --git a/packages/kit/.swcrc b/packages/kit/.swcrc deleted file mode 100644 index 24a6e47..0000000 --- a/packages/kit/.swcrc +++ /dev/null @@ -1,19 +0,0 @@ -{ - "$schema": "https://json.schemastore.org/swcrc", - "jsc": { - "parser": { - "syntax": "typescript", - "dynamicImport": false, - "decorators": false, - "dts": true - }, - "target": "es2022", - "loose": false, - "externalHelpers": false - }, - "minify": false, - "module": { - "type": "commonjs" - }, - "exclude": ["dist"] -} \ No newline at end of file diff --git a/packages/kit/CHANGELOG.md b/packages/kit/CHANGELOG.md deleted file mode 100644 index c6ede49..0000000 --- a/packages/kit/CHANGELOG.md +++ /dev/null @@ -1,509 +0,0 @@ -# @logosdx/kit - -## 4.0.6 - -### Patch Changes - -- Updated dependencies [164bd3c] -- Updated dependencies [164bd3c] - - @logosdx/fetch@7.1.0 - - @logosdx/observer@2.3.0 - -## 4.0.5 - -### Patch Changes - -- Updated dependencies [5380675] - - @logosdx/utils@6.0.0 - - @logosdx/observer@2.2.2 - - @logosdx/fetch@7.0.5 - - @logosdx/localize@1.0.21 - - @logosdx/state-machine@1.0.21 - - @logosdx/storage@1.0.21 - -## 4.0.4 - -### Patch Changes - -- Updated dependencies [ea81582] - - @logosdx/utils@5.1.0 - - @logosdx/fetch@7.0.4 - - @logosdx/localize@1.0.20 - - @logosdx/observer@2.2.1 - - @logosdx/state-machine@1.0.20 - - @logosdx/storage@1.0.20 - -## 4.0.3 - -### Patch Changes - -- Updated dependencies [923f8c7] - - @logosdx/observer@2.2.0 - - @logosdx/fetch@7.0.3 - -## 4.0.2 - -### Patch Changes - -- Updated dependencies [7fd7216] - - @logosdx/fetch@7.0.2 - -## 4.0.1 - -### Patch Changes - -- Updated dependencies [37d3b47] - - @logosdx/fetch@7.0.1 - -## 4.0.0 - -### Major Changes - -- 582644e: ## @logosdx/fetch - - ### Added - - - `feat(fetch):` Request deduplication via `dedupePolicy` - share in-flight promises across concurrent identical requests _(closes #91)_ - - `feat(fetch):` Response caching with TTL and stale-while-revalidate (SWR) via `cachePolicy` _(closes #92)_ - - `feat(fetch):` Rate limiting via `rateLimitPolicy` - token bucket algorithm with per-endpoint buckets _(closes #93)_ - - `feat(fetch):` Route matching rules with `is`, `startsWith`, `endsWith`, `includes`, and `match` (regex) patterns - - `feat(fetch):` New deduplication events: `fetch-dedupe-start`, `fetch-dedupe-join`, `fetch-dedupe-complete`, `fetch-dedupe-error` - - `feat(fetch):` New cache events: `fetch-cache-hit`, `fetch-cache-miss`, `fetch-cache-stale`, `fetch-cache-set`, `fetch-cache-expire`, `fetch-cache-revalidate`, `fetch-cache-revalidate-error` - - `feat(fetch):` New rate limit events: `fetch-ratelimit-wait`, `fetch-ratelimit-reject`, `fetch-ratelimit-acquire` - - `feat(fetch):` Cache invalidation API: `clearCache()`, `deleteCache(key)`, `invalidateCache(predicate)`, `invalidatePath(pattern)`, `cacheStats()` - - `feat(fetch):` Independent timeout/abort per caller when joining deduplicated requests - - `feat(fetch):` Pluggable cache adapter via `cachePolicy.adapter` for Redis, IndexedDB, AsyncStorage, localStorage, etc. - - `feat(fetch):` `defaultRequestSerializer` - generates cache/dedupe keys from method + URL path + query + payload - - `feat(fetch):` `defaultRateLimitSerializer` - groups requests by method + pathname for per-endpoint rate limiting - - `feat(fetch):` New type exports: `CacheConfig`, `CacheRule`, `RateLimitConfig`, `RateLimitRule` - - ## @logosdx/utils - - ### Added - - - `feat(utils):` `SingleFlight` - generic coordinator for cache and in-flight request deduplication with SWR support - - `feat(utils):` `Deferred` - promise with external resolve/reject control - - `feat(utils):` `serializer()` - enhanced key generation handling circular refs, functions, symbols, Maps, Sets, Dates, and more - - ### Changed - - - **Breaking:** `refactor(utils)!:` `CacheAdapter` interface is now async-only with string keys: `CacheAdapter` replaces `CacheAdapter` - - **Breaking:** `refactor(utils)!:` `CacheItem` properties `accessCount`, `lastAccessed`, `accessSequence` are now optional; added `staleAt` for SWR - -### Patch Changes - -- Updated dependencies [582644e] -- Updated dependencies [e4e4f43] - - @logosdx/fetch@7.0.0 - - @logosdx/utils@5.0.0 - - @logosdx/localize@1.0.19 - - @logosdx/observer@2.1.1 - - @logosdx/state-machine@1.0.19 - - @logosdx/storage@1.0.19 - -## 3.0.0 - -### Major Changes - -- 204dd76: ## @logosdx/utils - - ### Added - - - `feat(memo): add shouldCache option for conditional caching bypass` - Allows memoized functions to conditionally bypass cache based on request context (e.g., cache-busting flags). Bypassed calls still benefit from inflight deduplication. Added to both `memoize` and `memoizeSync` functions. _(#92)_ - - `feat(inflight): add shouldDedupe option for conditional deduplication bypass` - Enables withInflightDedup to conditionally bypass deduplication and execute producers directly based on request parameters. Early-exit optimization avoids serialization overhead when bypassed. _(#91)_ - - ### Changed - - - **Breaking:** `refactor(memo)!: change shouldCache args signature to spread pattern` - `shouldCache` receives spread arguments `(...args)` matching function signature, while `generateKey` continues receiving tuple `([arg1, arg2, ...])` for consistency with existing serialization patterns. - - **Breaking:** `refactor(inflight)!: change keyFn signature to spread pattern for consistency` - `keyFn` now receives spread arguments `(...args)` matching the wrapped function signature. Previously received tuple-style arguments. Updated for consistency with `shouldDedupe`. - - ### Fixed - - - `fix(memo): add error handling for shouldCache predicate failures` - shouldCache errors gracefully fall back to normal caching behavior via attemptSync, preventing function execution failures. - - `fix(inflight): add error handling for shouldDedupe predicate failures` - shouldDedupe errors gracefully fall back to normal deduplication behavior via attemptSync. - - *** - - ## Documentation - - ### Changed - - - `docs(utils): update llm-helpers with conditional caching examples` - Added comprehensive examples for `shouldCache` and `shouldDedupe` usage patterns including cache-busting scenarios. - - `docs(utils): update package docs with conditional caching patterns` - Added examples and usage guidance for new conditional bypass options. - - *** - - ## Testing - - ### Added - - - `test(inflight): add comprehensive shouldDedupe test coverage` - 8 new test scenarios covering bypass behavior, hook suppression, concurrent mixing, error handling, and argument passing. - - `test(memo): add comprehensive shouldCache test coverage` - 7 new test scenarios for both memoize and memoizeSync covering cache bypass, deduplication interaction, error handling, and cache size verification. - - *** - - ## Summary - - This release introduces **conditional caching and deduplication** capabilities to `@logosdx/utils` memoization and inflight utilities. Key features: - - 1. **Selective Cache Bypass**: `shouldCache` option allows cache-busting while retaining deduplication benefits - 2. **Selective Deduplication Bypass**: `shouldDedupe` option enables direct execution bypassing inflight tracking - 3. **Performance Optimized**: Early-exit paths avoid serialization overhead when bypassing - 4. **Error Resilient**: Predicate errors gracefully fall back to default behavior - 5. **Consistent API**: Spread argument pattern across both memoize and inflight utilities - - **Breaking Changes**: Function signature updates for `keyFn` and `shouldCache` options require argument pattern adjustments when using custom key generators. - - **Related Issues**: Partial implementation for #91 (Request Deduplication), #92 (Request Memoization/Caching) - -### Patch Changes - -- Updated dependencies [567ed1f] -- Updated dependencies [204dd76] -- Updated dependencies [931a1e5] - - @logosdx/fetch@6.0.0 - - @logosdx/utils@4.0.0 - - @logosdx/observer@2.1.0 - - @logosdx/localize@1.0.18 - - @logosdx/state-machine@1.0.18 - - @logosdx/storage@1.0.18 - -## 2.0.1 - -### Patch Changes - -- Updated dependencies [e6b07d8] - - @logosdx/utils@3.0.1 - - @logosdx/fetch@5.0.4 - - @logosdx/localize@1.0.17 - - @logosdx/observer@2.0.13 - - @logosdx/state-machine@1.0.17 - - @logosdx/storage@1.0.17 - -## 2.0.0 - -### Major Changes - -- 96fe247: # @logosdx/utils Major Reorganization & API Improvements - - ## Utils Package - - ### Changed - - - **Breaking:** `refactor(config): makeNestedConfig now returns object with allConfigs() and getConfig(path, default) methods instead of single overloaded function` - - - **Old API**: `config()` returned full config, `config('path')` accessed nested value - - **New API**: `config.allConfigs()` returns full config, `config.getConfig('path', default?)` accesses nested value - - **Rationale**: Eliminates confusing function overload, provides clearer API surface - - **Migration**: Replace `config()` → `config.allConfigs()`, `config('path')` → `config.getConfig('path')` - - - `refactor(structure): reorganized internal module structure for better discoverability` - - Moved async operations (`attempt`, `retry`, `batch`, `inflight`) to dedicated `async/` directory - - Split `validation.ts` into focused modules: `type-guards.ts`, `assert.ts`, `comparisons.ts`, `environment.ts`, `values.ts` - - Split `misc.ts` into domain-specific modules: `misc/index.ts`, `array-utils/`, `object-utils/` - - Split `units.ts` into `units/time.ts` and `units/bytes.ts` - - Updated all internal imports to reflect new structure - - ### Added - - - `feat(config): new castValuesToTypes() function for type coercion from environment variables` - - - Supports `parseUnits` option for parsing time durations ('5m', '1hour') and byte sizes ('10mb', '100gb') - - Supports `skipConversion` callback for selective value preservation (e.g., API keys) - - Recursively processes nested objects - - Mutates input object in-place for performance - - - `feat(config): new makeNestedConfig() with enhanced configuration loading` - - - Converts flat environment variable structures to nested objects - - Supports custom separators, prefix stripping, and casing control - - Optional memoization support via `memoizeOpts` - - Improved error messages for configuration conflicts - - - `feat(array-utils): extracted array utilities to dedicated module` - - - `itemsToArray()`: Normalizes single items to arrays - - `oneOrMany()`: Unwraps single-item arrays - - `chunk()`: Splits arrays into batches - - - `feat(object-utils): extracted object utilities to dedicated module` - - `reach()`: Deep property access with dot notation - - `setDeep()`: Deep property setting with automatic intermediate object creation - - `setDeepMany()`: Batch deep property setting - - ### Fixed - - - `fix(imports): updated all cross-module imports to use new structure` - - Flow control modules now import from `../async/` - - All modules now import validation from `../validation/index.ts` - - Memoization imports updated for new async location - - ## Kit Package - - ### Changed - - - `refactor(deps): updated to support new @logosdx/utils major version` - - No API changes to kit itself - - Compatible with reorganized utils structure - - *** - - **Testing**: All 121 tests pass, including 26 comprehensive tests for `makeNestedConfig` covering all options and edge cases. - - **Migration Guide**: - - ```typescript - // Before (v1.x) - const config = makeNestedConfig(process.env, opts); - const fullConfig = config(); - const dbHost = config("db.host"); - - // After (v2.x) - const config = makeNestedConfig(process.env, opts); - const fullConfig = config.allConfigs(); - const dbHost = config.getConfig("db.host"); - const dbHostWithDefault = config.getConfig("db.host", "localhost"); - ``` - -### Patch Changes - -- Updated dependencies [96fe247] - - @logosdx/utils@3.0.0 - - @logosdx/fetch@5.0.3 - - @logosdx/localize@1.0.16 - - @logosdx/observer@2.0.12 - - @logosdx/state-machine@1.0.16 - - @logosdx/storage@1.0.16 - -## 1.0.20 - -### Patch Changes - -- Updated dependencies [6416ac4] - - @logosdx/utils@2.5.0 - - @logosdx/fetch@5.0.2 - - @logosdx/localize@1.0.15 - - @logosdx/observer@2.0.11 - - @logosdx/state-machine@1.0.15 - - @logosdx/storage@1.0.15 - -## 1.0.19 - -### Patch Changes - -- Updated dependencies [8fda604] - - @logosdx/utils@2.4.0 - - @logosdx/fetch@5.0.1 - - @logosdx/localize@1.0.14 - - @logosdx/observer@2.0.10 - - @logosdx/state-machine@1.0.14 - - @logosdx/storage@1.0.14 - -## 1.0.18 - -### Patch Changes - -- Updated dependencies [ba282ad] - - @logosdx/fetch@5.0.0 - -## 1.0.17 - -### Patch Changes - -- Updated dependencies [9edb1c4] -- Updated dependencies [6560f02] - - @logosdx/observer@2.0.9 - - @logosdx/utils@2.3.0 - - @logosdx/fetch@4.0.1 - - @logosdx/localize@1.0.13 - - @logosdx/state-machine@1.0.13 - - @logosdx/storage@1.0.13 - -## 1.0.16 - -### Patch Changes - -- Updated dependencies [0cf6edd] -- Updated dependencies [0cf6edd] -- Updated dependencies [0cf6edd] -- Updated dependencies [0cf6edd] - - @logosdx/utils@2.2.0 - - @logosdx/fetch@4.0.0 - - @logosdx/localize@1.0.12 - - @logosdx/observer@2.0.8 - - @logosdx/state-machine@1.0.12 - - @logosdx/storage@1.0.12 - -## 1.0.15 - -### Patch Changes - -- Updated dependencies [9e6afcd] -- Updated dependencies [e1c0ba2] - - @logosdx/utils@2.1.2 - - @logosdx/fetch@3.0.1 - - @logosdx/localize@1.0.11 - - @logosdx/observer@2.0.7 - - @logosdx/state-machine@1.0.11 - - @logosdx/storage@1.0.11 - -## 1.0.14 - -### Patch Changes - -- Updated dependencies [7fdab75] - - @logosdx/fetch@3.0.0 - -## 1.0.13 - -### Patch Changes - -- Updated dependencies [fdec519] - - @logosdx/fetch@2.0.0 - -## 1.0.12 - -### Patch Changes - -- Updated dependencies [cd91503] - - @logosdx/fetch@1.2.0 - -## 1.0.11 - -### Patch Changes - -- Updated dependencies [c6a8fd2] - - @logosdx/observer@2.0.6 - -## 1.0.10 - -### Patch Changes - -- Updated dependencies [2c6c8cc] - - @logosdx/utils@2.1.1 - - @logosdx/fetch@1.1.5 - - @logosdx/localize@1.0.10 - - @logosdx/observer@2.0.5 - - @logosdx/state-machine@1.0.10 - - @logosdx/storage@1.0.10 - -## 1.0.9 - -### Patch Changes - -- Updated dependencies [755e80d] - - @logosdx/utils@2.1.0 - - @logosdx/fetch@1.1.4 - - @logosdx/localize@1.0.9 - - @logosdx/observer@2.0.4 - - @logosdx/state-machine@1.0.9 - - @logosdx/storage@1.0.9 - -## 1.0.8 - -### Patch Changes - -- Updated dependencies [cbd0e23] - - @logosdx/utils@2.0.3 - - @logosdx/fetch@1.1.3 - - @logosdx/localize@1.0.8 - - @logosdx/observer@2.0.3 - - @logosdx/state-machine@1.0.8 - - @logosdx/storage@1.0.8 - -## 1.0.7 - -### Patch Changes - -- eecc5d4: Export type so they aren't compiled into ESM files -- Updated dependencies [eecc5d4] - - @logosdx/fetch@1.1.2 - - @logosdx/localize@1.0.7 - - @logosdx/observer@2.0.2 - - @logosdx/state-machine@1.0.7 - - @logosdx/storage@1.0.7 - - @logosdx/utils@2.0.2 - -## 1.0.6 - -### Patch Changes - -- 43b3457: ### Fixed - - - Export bug from utils. - - Better naming for options - -- Updated dependencies [43b3457] - - @logosdx/fetch@1.1.1 - - @logosdx/localize@1.0.6 - - @logosdx/observer@2.0.1 - - @logosdx/state-machine@1.0.6 - - @logosdx/storage@1.0.6 - - @logosdx/utils@2.0.1 - -## 1.0.5 - -### Patch Changes - -- Updated dependencies [68b2d8b] - - @logosdx/observer@2.0.0 - - @logosdx/utils@2.0.0 - - @logosdx/fetch@1.1.0 - - @logosdx/localize@1.0.5 - - @logosdx/state-machine@1.0.5 - - @logosdx/storage@1.0.5 - -## 1.0.4 - -### Patch Changes - -- 062ceab: Missed update -- Updated dependencies [062ceab] - - @logosdx/fetch@1.0.4 - - @logosdx/localize@1.0.4 - - @logosdx/observer@1.0.4 - - @logosdx/state-machine@1.0.4 - - @logosdx/storage@1.0.4 - -## 1.0.3 - -### Patch Changes - -- a84138b: Force release due to bad build -- Updated dependencies [1dcc2d1] -- Updated dependencies [a84138b] - - @logosdx/utils@1.1.0 - - @logosdx/fetch@1.0.3 - - @logosdx/localize@1.0.3 - - @logosdx/observer@1.0.3 - - @logosdx/state-machine@1.0.3 - - @logosdx/storage@1.0.3 - -## 1.0.2 - -### Patch Changes - -- 0704421: publish .d.ts files -- Updated dependencies [0704421] - - @logosdx/state-machine@1.0.2 - - @logosdx/localize@1.0.2 - - @logosdx/observer@1.0.2 - - @logosdx/storage@1.0.2 - - @logosdx/fetch@1.0.2 - - @logosdx/utils@1.0.2 - -## 1.0.0 - -### Major Changes - -- b051504: Re-release as LogosDX - -### Patch Changes - -- Updated dependencies [b051504] - - @logosdx/fetch@1.0.0 - - @logosdx/localize@1.0.0 - - @logosdx/observer@1.0.0 - - @logosdx/state-machine@1.0.0 - - @logosdx/storage@1.0.0 - - @logosdx/utils@1.0.0 diff --git a/packages/kit/package.json b/packages/kit/package.json deleted file mode 100644 index 96de91b..0000000 --- a/packages/kit/package.json +++ /dev/null @@ -1,52 +0,0 @@ -{ - "name": "@logosdx/kit", - "version": "4.0.6", - "description": "A collection of strongly-typed LogosDX libraries to kick-start your app", - "exports": { - ".": { - "types": "./dist/types/index.d.ts", - "require": "./dist/cjs/index.js", - "import": "./dist/esm/index.mjs" - } - }, - "sideEffects": false, - "browserNamespace": "LogosDx.Kit", - "scripts": { - "build": "zx ../../scripts/build.mjs", - "lint": "pnpm tsc --noEmit --project tsconfig.json" - }, - "files": [], - "keywords": [ - "web apps", - "boilerplate", - "ui kit", - "state manager", - "observer", - "event emitter", - "localization", - "localStorage", - "sessionStorage", - "app storage", - "utilities", - "fetch", - "axios", - "xhr requests", - "node-fetch", - "ajax" - ], - "homepage": "https://logosdx.dev/", - "bugs": { - "url": "https://github.com/logosdx/monorepo/issues", - "email": "danilo@alonso.network" - }, - "author": "Danilo Alonso ", - "license": "BSD-3-Clause", - "dependencies": { - "@logosdx/fetch": "workspace:^", - "@logosdx/localize": "workspace:^", - "@logosdx/observer": "workspace:^", - "@logosdx/state-machine": "workspace:^", - "@logosdx/storage": "workspace:^", - "@logosdx/utils": "workspace:^" - } -} diff --git a/packages/kit/src/index.ts b/packages/kit/src/index.ts deleted file mode 100644 index e5f0342..0000000 --- a/packages/kit/src/index.ts +++ /dev/null @@ -1,281 +0,0 @@ -import { LocaleManager } from '@logosdx/localize'; -import { ObserverEngine } from '@logosdx/observer'; -import { type ReducerFunction, StateMachine, StateMachineOptions } from '@logosdx/state-machine'; -import { type StorageImplementation } from '@logosdx/storage'; -import { StorageAdapter } from '@logosdx/storage'; -import { FetchEngine } from '@logosdx/fetch'; -import { assert, isObject, type NotUndefined } from '@logosdx/utils'; - -export * from '@logosdx/fetch'; -export * from '@logosdx/localize'; -export * from '@logosdx/observer'; -export * from '@logosdx/state-machine'; -export * from '@logosdx/storage'; -export * from '@logosdx/utils'; - -/** - * Configuration for locale settings in the app kit. - */ -export type AppKitLocale = { - /** The locale type for internationalization */ - locale: LocaleManager.LocaleType, - /** The locale codes for language/region identification */ - codes: string -} - -/** - * Configuration for state machine settings in the app kit. - */ -export type AppKitStateMachine = { - /** The state type for the state machine */ - state: unknown, - /** The reducer value type for state transitions */ - reducerValue: unknown -} - -/** - * Configuration for fetch settings in the app kit. - */ -export type AppKitFetch = { - /** Optional state for fetch operations */ - state?: unknown, - /** Optional headers for HTTP requests */ - headers?: Record, - /** Optional parameters for API calls */ - params?: Record -} - -/** - * Main configuration type for the app kit. - * Defines the structure for events, storage, locales, state machine, fetch, and APIs. - */ -export type AppKitType = { - /** Event definitions for the observer engine */ - events?: Record, - /** Storage configuration for data persistence */ - storage?: Record, - /** Locale configuration for internationalization */ - locales?: AppKitLocale, - /** State machine configuration for state management */ - stateMachine?: AppKitStateMachine, - /** Default fetch configuration for HTTP requests */ - fetch?: AppKitFetch, - /** Named API configurations for different endpoints */ - apis?: { - [key: string]: AppKitFetch - } -} - -/** - * Utility type to create a kit type with proper typing. - * @template KitType - The kit type to be created - * - * @example - * ```typescript - * type MyKit = MakeKitType<{ - * events: { - * 'my-event': { - * payload: { - * name: string; - * age: number; - * }; - * }; - * }; - * }>; - * - * const kit = appKit({ - * observer: {} - * }); - * ``` - */ -export type MakeKitType = KitType - -/** - * Options configuration for the app kit. - * Defines all possible configuration options for each component. - * @template KitType - The kit type that defines the structure - */ -export type AppKitOpts = { - /** Observer engine options for event handling */ - observer?: ObserverEngine.Options>, - /** State machine configuration with initial state, options, and reducer */ - stateMachine?: { - /** Initial state for the state machine */ - initial: NotUndefined['state'] - /** Optional configuration for the state machine */ - options?: StateMachineOptions, - /** Reducer function for state transitions */ - reducer: ReducerFunction< - NotUndefined['state'], - NotUndefined['reducerValue'] - > - }, - /** Locale manager options for internationalization */ - locales?: LocaleManager.LocaleOpts< - NotUndefined['locale'], - NotUndefined['codes'] - >, - /** Storage configuration with implementation and optional prefix */ - storage?: { - /** Storage implementation to use */ - implementation: StorageImplementation, - /** Optional prefix for storage keys */ - prefix?: string - }, - /** Default fetch engine options */ - fetch?: FetchEngine.Options< - NotUndefined['headers'], - NotUndefined['state'] - >, - /** Named API configurations for different endpoints */ - apis?: { - [key in keyof KitType['apis']]: FetchEngine.Options< - NotUndefined[key]['headers'], - NotUndefined[key]['params'], - NotUndefined[key]['state'] - > - } -} - -/** - * Automatically instantiates UI components when passed opts. - * Creates and configures observer, locale manager, state machine, storage, fetch engine, and APIs - * based on the provided configuration options. - * - * @template Kit - The kit type that defines the structure and typing - * @param opts - Configuration options for different logosdx components - * @returns An object containing instantiated components based on the provided options - * - * @example - * ```typescript - * const kit = appKit({ - * observer: { ... }, - * locales: { ... }, - * stateMachine: { - * initial: { count: 0 }, - * reducer: (state, action) => { ... } - * }, - * storage: { - * implementation: new LocalStorageAdapter(), - * prefix: 'myapp' - * }, - * fetch: { ... }, - * apis: { - * users: { ... }, - * posts: { ... } - * } - * }); - * ``` - */ -export const appKit = ( - opts: AppKitOpts -) => { - - // Type aliases for better readability - type LocaleType = NotUndefined['locale']; - type LocaleCodes = NotUndefined['codes']; - type StateType = NotUndefined['state']; - type ReducerValType = NotUndefined['reducerValue']; - type FetchStateType = NotUndefined['state']; - type FetchHeadersType = NotUndefined['headers']; - - // Component type aliases - type KitObserver = ObserverEngine>; - type KitLocales = LocaleManager; - type KitStateMachine = StateMachine; - type KitStorage = StorageAdapter; - type KitFetch = FetchEngine; - type KitApis = { - [key in keyof Kit['apis']]: FetchEngine< - NotUndefined[key]['headers'], - NotUndefined[key]['state'] - > - } - - // Initialize component instances - let observer: null | KitObserver = null; - let locale: null | KitLocales = null; - let stateMachine: null | KitStateMachine = null; - let storage: null | KitStorage = null; - let fetch: null | KitFetch = null; - let apis: null | KitApis = null; - - // Initialize observer if options provided - if (opts.observer) { - - observer = new ObserverEngine(opts.observer) as KitObserver; - } - - // Initialize locale manager if options provided - if (opts.locales) { - - locale = new LocaleManager(opts.locales); - } - - // Initialize state machine if options provided - if (opts.stateMachine) { - stateMachine = new StateMachine( - opts.stateMachine.initial, - opts.stateMachine.options - ); - - stateMachine.addReducer( - opts.stateMachine.reducer - ); - } - - // Initialize storage adapter if options provided - if (opts.storage) { - storage = new StorageAdapter(opts.storage.implementation, opts.storage.prefix); - } - - // Initialize fetch engine if options provided - if (opts.fetch) { - - fetch = new FetchEngine(opts.fetch) as KitFetch; - } - - // Initialize named APIs if options provided - if (opts.apis) { - - assert(isObject(opts.apis), 'apis must be an object'); - - apis = {} as KitApis; - - let i = 0; - - for (const key in opts.apis) { - - assert(isObject(opts.apis[key]), `apis key ${key} must be an object`); - - apis[key] = new FetchEngine(opts.apis[key]) as never - - i++; - } - - assert(i > 0, 'apis must contain at least one key'); - } - - // Return typed object with conditional properties based on provided options - return { - observer, - locale, - stateMachine, - storage, - fetch, - apis - } as { - observer: Kit['events'] extends undefined ? never : KitObserver, - locale: Kit['locales'] extends undefined ? never : KitLocales, - stateMachine: Kit['stateMachine'] extends undefined ? never : KitStateMachine, - storage: Kit['storage'] extends undefined ? never : KitStorage, - fetch: Kit['fetch'] extends undefined ? never : KitFetch, - apis: { - [key in keyof Kit['apis']]: FetchEngine< - NotUndefined[key]['headers'], - NotUndefined[key]['params'], - NotUndefined[key]['state'] - > - } - } -}; diff --git a/packages/kit/tsconfig.json b/packages/kit/tsconfig.json deleted file mode 100644 index ecdc3a2..0000000 --- a/packages/kit/tsconfig.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "extends": "../../tsconfig.json", - "compilerOptions": { "noEmit": false, "declarationDir": "./dist/types" }, - "include": ["src"] -} \ No newline at end of file diff --git a/packages/kit/typedoc.json b/packages/kit/typedoc.json deleted file mode 100644 index 138ce32..0000000 --- a/packages/kit/typedoc.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "$schema": "https://typedoc.org/schema.json", - "includeVersion": true, - "entryPoints": ["src/index.ts"], -} \ No newline at end of file diff --git a/packages/observer/src/generator.ts b/packages/observer/src/generator.ts index 335c352..34ab647 100644 --- a/packages/observer/src/generator.ts +++ b/packages/observer/src/generator.ts @@ -1,4 +1,4 @@ -import { Deferred } from '@logosdx/utils'; +import { Deferred, PriorityQueue } from '@logosdx/utils'; import { type EventData, EventPromise, EventError } from './helpers.ts'; import { type ObserverEngine } from './engine.ts'; @@ -18,7 +18,8 @@ export class EventGenerator, E extends Events | #observer: ObserverEngine; #event: E | RegExp; - #defer: DeferredEvent>; + #buffer: PriorityQueue>; + #waiting: DeferredEvent> | null = null; #done: boolean = false; #listener: ObserverEngine.EventCallback | null = null; #lastValue: unknown | null = null; @@ -49,18 +50,22 @@ export class EventGenerator, E extends Events | this.#observer = observer; this.#event = event; - - this.#defer = new DeferredEvent(); - this.#defer.promise.cleanup = this.cleanup; - this.#defer.promise.resolve = this.#defer.resolve; + this.#buffer = new PriorityQueue>(); this.#listener = (data: unknown) => { this.#lastValue = data; - this.#defer.resolve(data as EventData); - this.#defer = new DeferredEvent(); - this.#defer.promise.cleanup = this.cleanup; - this.#defer.promise.resolve = this.#defer.resolve; + + if (this.#waiting) { + + const defer = this.#waiting; + this.#waiting = null; + defer.resolve(data as EventData); + } + else { + + this.#buffer.push(data as EventData); + } } const off = observer.on( @@ -75,7 +80,21 @@ export class EventGenerator, E extends Events | this.#assertNotDestroyed(); - return this.#defer.promise + const buffered = this.#buffer.pop(); + + if (buffered !== null) { + + return Promise.resolve(buffered); + } + + if (!this.#waiting) { + + this.#waiting = new DeferredEvent(); + this.#waiting.promise.cleanup = this.cleanup; + this.#waiting.promise.resolve = this.#waiting.resolve; + } + + return this.#waiting.promise; }; this.cleanup = () => { @@ -85,14 +104,22 @@ export class EventGenerator, E extends Events | off(); this.#done = true; - // Resolve all lingering promises + // Resolve the waiting deferred with the last value + if (this.#waiting) { + + this.#waiting.resolve(this.#lastValue as EventData); + this.#waiting = null; + } + + // Resolve all lingering iterator promises // with the last value this.#_iterPromise.forEach(promise => { promise.resolve?.(this.#lastValue as EventData); }); - // Cleanup the set + // Cleanup the set and buffer this.#_iterPromise.clear(); + this.#buffer.clear(); this.#lastValue = null; // Abort the generator's abort controller if it exists diff --git a/packages/utils/src/async/singleflight.ts b/packages/utils/src/async/singleflight.ts index c508178..c53ee52 100644 --- a/packages/utils/src/async/singleflight.ts +++ b/packages/utils/src/async/singleflight.ts @@ -391,6 +391,55 @@ export class SingleFlight { await this.#adapter.clear(); } + /** + * Invalidate cache entries matching a predicate. + * + * Uses adapter's `keys()` method if available (MapCacheAdapter has it). + * Returns 0 if adapter doesn't support key iteration. + * + * @param predicate - Function that returns true for keys to delete + * @returns Number of entries deleted + * + * @example + * ```typescript + * // Delete all user-related entries + * const count = await flight.invalidateCache(key => key.startsWith('user:')); + * ``` + */ + async invalidateCache(predicate: (key: string) => boolean): Promise { + + const adapter = this.#adapter as CacheAdapter & { keys?: () => IterableIterator }; + + if (typeof adapter.keys !== 'function') { + + return 0; + } + + const keysToDelete: string[] = []; + + for (const key of adapter.keys()) { + + if (predicate(key)) { + + keysToDelete.push(key); + } + } + + let deleted = 0; + + for (const key of keysToDelete) { + + const wasDeleted = await this.#adapter.delete(key); + + if (wasDeleted) { + + deleted++; + } + } + + return deleted; + } + /** * Get statistics about current state. * diff --git a/packages/utils/src/types.ts b/packages/utils/src/types.ts index fcad0c8..2afd904 100644 --- a/packages/utils/src/types.ts +++ b/packages/utils/src/types.ts @@ -119,6 +119,7 @@ export type PathNames = T extends object ? { [K in keyof T]: }`}` }[keyof T] : never + /** * Generates only the leaf paths (final values) for an object type. * @@ -129,8 +130,12 @@ export type PathNames = T extends object ? { [K in keyof T]: * interface User { profile: { name: string; age: number; }; } * type UserLeaves = PathLeaves; // 'profile.name' | 'profile.age' */ -export type PathLeaves = T extends object ? { [K in keyof T]: - `${Exclude}${PathLeaves extends never ? "" : `.${PathLeaves}`}` +export type PathLeaves = T extends object ? { + [K in keyof T]-?: ( + T[K] extends undefined + ? never + : `${Exclude}${PathLeaves extends never ? "" : `.${PathLeaves}`}` + ) }[keyof T] : never /** diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index d186dc2..796d284 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -18,8 +18,8 @@ importers: specifier: ^1.11.29 version: 1.11.29 '@types/node': - specifier: ^24.10.7 - version: 24.10.7 + specifier: ^24.10.9 + version: 24.10.10 tsx: specifier: ^4.19.4 version: 4.19.4 @@ -44,10 +44,10 @@ importers: version: 4.0.0(typedoc@0.28.4(typescript@5.8.3)) vite: specifier: ^7 - version: 7.0.6(@types/node@24.10.7)(tsx@4.19.4)(yaml@2.8.0) + version: 7.0.6(@types/node@24.10.10)(tsx@4.19.4)(yaml@2.8.0) vitepress: - specifier: 2.0.0-alpha.15 - version: 2.0.0-alpha.15(@types/node@24.10.7)(postcss@8.5.6)(tsx@4.19.4)(typescript@5.8.3)(yaml@2.8.0) + specifier: 2.0.0-alpha.16 + version: 2.0.0-alpha.16(@types/node@24.10.10)(postcss@8.5.6)(tsx@4.19.4)(typescript@5.8.3)(yaml@2.8.0) zx: specifier: ^8.8.5 version: 8.8.5 @@ -140,12 +140,15 @@ importers: '@types/sinon': specifier: ^21 version: 21.0.0 + '@vitest/browser-playwright': + specifier: ^4.0.14 + version: 4.0.14(playwright@1.58.0)(vite@7.0.6(@types/node@24.10.10)(tsx@4.19.4)(yaml@2.8.0))(vitest@4.0.14) '@vitest/coverage-v8': specifier: ^4 - version: 4.0.14(vitest@4.0.14(@types/node@24.10.7)(jsdom@27.2.0)(tsx@4.19.4)(yaml@2.8.0)) + version: 4.0.14(@vitest/browser@4.0.14(vite@7.0.6(@types/node@24.10.10)(tsx@4.19.4)(yaml@2.8.0))(vitest@4.0.14))(vitest@4.0.14) better-sse: - specifier: ^0.15.1 - version: 0.15.1 + specifier: ^0.16.1 + version: 0.16.1 fast-check: specifier: ^4.1.1 version: 4.1.1 @@ -153,11 +156,14 @@ importers: specifier: ^18 version: 18.0.2 jsdom: - specifier: ^27 - version: 27.2.0 + specifier: ^28 + version: 28.0.0 node-test-github-reporter: specifier: ^1.3.0 version: 1.3.0 + playwright: + specifier: ^1.58.0 + version: 1.58.0 sinon: specifier: ^21 version: 21.0.0 @@ -166,12 +172,12 @@ importers: version: 5.0.4 vitest: specifier: ^4 - version: 4.0.14(@types/node@24.10.7)(jsdom@27.2.0)(tsx@4.19.4)(yaml@2.8.0) + version: 4.0.14(@types/node@24.10.10)(@vitest/browser-playwright@4.0.14)(jsdom@28.0.0)(tsx@4.19.4)(yaml@2.8.0) packages: - '@acemir/cssom@0.9.24': - resolution: {integrity: sha512-5YjgMmAiT2rjJZU7XK1SNI7iqTy92DpaYVgG6x63FxkJ11UpYfLndHJATtinWJClAXiOlW9XWaUyAQf8pMrQPg==} + '@acemir/cssom@0.9.31': + resolution: {integrity: sha512-ZnR3GSaH+/vJ0YlHau21FjfLYjMpYVIzTD8M8vIEQvIGxeOXyXdzCI140rrCY862p/C/BbzWsjc1dgnM9mkoTA==} '@actions/core@1.10.1': resolution: {integrity: sha512-3lBR9EDAY+iYIpTnTIXmWcNbX3T2kCkAEQGIQx4NVQ0575nk2k3GRZDTPQG+vVtS2izSLmINlxXf0uLtnrTP+g==} @@ -179,11 +185,11 @@ packages: '@actions/http-client@2.2.3': resolution: {integrity: sha512-mx8hyJi/hjFvbPokCg4uRd4ZX78t+YyRPtnKWwIl+RzNaVuFpQHfmlGVfsKEJN8LwTCvL+DfVgAM04XaHkm6bA==} - '@asamuzakjp/css-color@4.1.0': - resolution: {integrity: sha512-9xiBAtLn4aNsa4mDnpovJvBn72tNEIACyvlqaNJ+ADemR+yeMJWnBudOi2qGDviJa7SwcDOU/TRh5dnET7qk0w==} + '@asamuzakjp/css-color@4.1.1': + resolution: {integrity: sha512-B0Hv6G3gWGMn0xKJ0txEi/jM5iFpT3MfDxmhZFb4W047GvytCf1DHQ1D69W3zHI4yWe2aTZAA0JnbMZ7Xc8DuQ==} - '@asamuzakjp/dom-selector@6.7.4': - resolution: {integrity: sha512-buQDjkm+wDPXd6c13534URWZqbz0RP5PAhXZ+LIoa5LgwInT9HVJvGIJivg75vi8I13CxDGdTnz+aY5YUJlIAA==} + '@asamuzakjp/dom-selector@6.7.7': + resolution: {integrity: sha512-8CO/UQ4tzDd7ula+/CVimJIVWez99UJlbMyIgk8xOnhAVPKLnBZmUFYVgugS441v2ZqUq5EnSh6B0Ua0liSFAA==} '@asamuzakjp/nwsapi@2.3.9': resolution: {integrity: sha512-n8GuYSrI9bF7FFZ/SjhwevlHc8xaVlb/7HmHelnc/PZXBD2ZR49NnN9sMMuDdEGPeeRQ5d0hqlSlEpgCX3Wl0Q==} @@ -292,19 +298,21 @@ packages: peerDependencies: '@csstools/css-tokenizer': ^3.0.4 - '@csstools/css-syntax-patches-for-csstree@1.0.19': - resolution: {integrity: sha512-QW5/SM2ARltEhoKcmRI1LoLf3/C7dHGswwCnfLcoMgqurBT4f8GvwXMgAbK/FwcxthmJRK5MGTtddj0yQn0J9g==} - engines: {node: '>=18'} + '@csstools/css-syntax-patches-for-csstree@1.0.26': + resolution: {integrity: sha512-6boXK0KkzT5u5xOgF6TKB+CLq9SOpEGmkZw0g5n9/7yg85wab3UzSxB8TxhLJ31L4SGJ6BCFRw/iftTha1CJXA==} '@csstools/css-tokenizer@3.0.4': resolution: {integrity: sha512-Vd/9EVDiu6PPJt9yAh6roZP6El1xHrdvIVGjyBsHR0RYwNHgL7FJPyIIW4fANJNG6FtyZfvlRPpFI4ZM/lubvw==} engines: {node: '>=18'} - '@docsearch/css@4.3.2': - resolution: {integrity: sha512-K3Yhay9MgkBjJJ0WEL5MxnACModX9xuNt3UlQQkDEDZJZ0+aeWKtOkxHNndMRkMBnHdYvQjxkm6mdlneOtU1IQ==} + '@docsearch/css@4.5.3': + resolution: {integrity: sha512-kUpHaxn0AgI3LQfyzTYkNUuaFY4uEz/Ym9/N/FvyDE+PzSgZsCyDH9jE49B6N6f1eLCm9Yp64J9wENd6vypdxA==} - '@docsearch/js@4.3.2': - resolution: {integrity: sha512-xdfpPXMgKRY9EW7U1vtY7gLKbLZFa9ed+t0Dacquq8zXBqAlH9HlUf0h4Mhxm0xatsVeMaIR2wr/u6g0GsZyQw==} + '@docsearch/js@4.5.3': + resolution: {integrity: sha512-rcBiUMCXbZLqrLIT6F6FDcrG/tyvM2WM0zum6NPbIiQNDQxbSgmNc+/bToS0rxBsXaxiU64esiWoS02WqrWLsg==} + + '@docsearch/sidepanel-js@4.5.3': + resolution: {integrity: sha512-DmcZYc1ZMMcabtKrCU2RIf1z09LwazKCyoPFU/ijJiBg2LdqMLmkyDKHGy1OIYEyUx4ok5RIbkVGaRfD55BqZQ==} '@esbuild/aix-ppc64@0.25.4': resolution: {integrity: sha512-1VCICWypeQKhVbE9oW/sJaAmjLxhVqacdkvPLEjwlttjfwENRSClS8EjBz0KzRyFSCPDIkuXW34Je/vk7zdB7Q==} @@ -312,150 +320,315 @@ packages: cpu: [ppc64] os: [aix] + '@esbuild/aix-ppc64@0.27.2': + resolution: {integrity: sha512-GZMB+a0mOMZs4MpDbj8RJp4cw+w1WV5NYD6xzgvzUJ5Ek2jerwfO2eADyI6ExDSUED+1X8aMbegahsJi+8mgpw==} + engines: {node: '>=18'} + cpu: [ppc64] + os: [aix] + '@esbuild/android-arm64@0.25.4': resolution: {integrity: sha512-bBy69pgfhMGtCnwpC/x5QhfxAz/cBgQ9enbtwjf6V9lnPI/hMyT9iWpR1arm0l3kttTr4L0KSLpKmLp/ilKS9A==} engines: {node: '>=18'} cpu: [arm64] os: [android] + '@esbuild/android-arm64@0.27.2': + resolution: {integrity: sha512-pvz8ZZ7ot/RBphf8fv60ljmaoydPU12VuXHImtAs0XhLLw+EXBi2BLe3OYSBslR4rryHvweW5gmkKFwTiFy6KA==} + engines: {node: '>=18'} + cpu: [arm64] + os: [android] + '@esbuild/android-arm@0.25.4': resolution: {integrity: sha512-QNdQEps7DfFwE3hXiU4BZeOV68HHzYwGd0Nthhd3uCkkEKK7/R6MTgM0P7H7FAs5pU/DIWsviMmEGxEoxIZ+ZQ==} engines: {node: '>=18'} cpu: [arm] os: [android] + '@esbuild/android-arm@0.27.2': + resolution: {integrity: sha512-DVNI8jlPa7Ujbr1yjU2PfUSRtAUZPG9I1RwW4F4xFB1Imiu2on0ADiI/c3td+KmDtVKNbi+nffGDQMfcIMkwIA==} + engines: {node: '>=18'} + cpu: [arm] + os: [android] + '@esbuild/android-x64@0.25.4': resolution: {integrity: sha512-TVhdVtQIFuVpIIR282btcGC2oGQoSfZfmBdTip2anCaVYcqWlZXGcdcKIUklfX2wj0JklNYgz39OBqh2cqXvcQ==} engines: {node: '>=18'} cpu: [x64] os: [android] + '@esbuild/android-x64@0.27.2': + resolution: {integrity: sha512-z8Ank4Byh4TJJOh4wpz8g2vDy75zFL0TlZlkUkEwYXuPSgX8yzep596n6mT7905kA9uHZsf/o2OJZubl2l3M7A==} + engines: {node: '>=18'} + cpu: [x64] + os: [android] + '@esbuild/darwin-arm64@0.25.4': resolution: {integrity: sha512-Y1giCfM4nlHDWEfSckMzeWNdQS31BQGs9/rouw6Ub91tkK79aIMTH3q9xHvzH8d0wDru5Ci0kWB8b3up/nl16g==} engines: {node: '>=18'} cpu: [arm64] os: [darwin] + '@esbuild/darwin-arm64@0.27.2': + resolution: {integrity: sha512-davCD2Zc80nzDVRwXTcQP/28fiJbcOwvdolL0sOiOsbwBa72kegmVU0Wrh1MYrbuCL98Omp5dVhQFWRKR2ZAlg==} + engines: {node: '>=18'} + cpu: [arm64] + os: [darwin] + '@esbuild/darwin-x64@0.25.4': resolution: {integrity: sha512-CJsry8ZGM5VFVeyUYB3cdKpd/H69PYez4eJh1W/t38vzutdjEjtP7hB6eLKBoOdxcAlCtEYHzQ/PJ/oU9I4u0A==} engines: {node: '>=18'} cpu: [x64] os: [darwin] + '@esbuild/darwin-x64@0.27.2': + resolution: {integrity: sha512-ZxtijOmlQCBWGwbVmwOF/UCzuGIbUkqB1faQRf5akQmxRJ1ujusWsb3CVfk/9iZKr2L5SMU5wPBi1UWbvL+VQA==} + engines: {node: '>=18'} + cpu: [x64] + os: [darwin] + '@esbuild/freebsd-arm64@0.25.4': resolution: {integrity: sha512-yYq+39NlTRzU2XmoPW4l5Ifpl9fqSk0nAJYM/V/WUGPEFfek1epLHJIkTQM6bBs1swApjO5nWgvr843g6TjxuQ==} engines: {node: '>=18'} cpu: [arm64] os: [freebsd] + '@esbuild/freebsd-arm64@0.27.2': + resolution: {integrity: sha512-lS/9CN+rgqQ9czogxlMcBMGd+l8Q3Nj1MFQwBZJyoEKI50XGxwuzznYdwcav6lpOGv5BqaZXqvBSiB/kJ5op+g==} + engines: {node: '>=18'} + cpu: [arm64] + os: [freebsd] + '@esbuild/freebsd-x64@0.25.4': resolution: {integrity: sha512-0FgvOJ6UUMflsHSPLzdfDnnBBVoCDtBTVyn/MrWloUNvq/5SFmh13l3dvgRPkDihRxb77Y17MbqbCAa2strMQQ==} engines: {node: '>=18'} cpu: [x64] os: [freebsd] + '@esbuild/freebsd-x64@0.27.2': + resolution: {integrity: sha512-tAfqtNYb4YgPnJlEFu4c212HYjQWSO/w/h/lQaBK7RbwGIkBOuNKQI9tqWzx7Wtp7bTPaGC6MJvWI608P3wXYA==} + engines: {node: '>=18'} + cpu: [x64] + os: [freebsd] + '@esbuild/linux-arm64@0.25.4': resolution: {integrity: sha512-+89UsQTfXdmjIvZS6nUnOOLoXnkUTB9hR5QAeLrQdzOSWZvNSAXAtcRDHWtqAUtAmv7ZM1WPOOeSxDzzzMogiQ==} engines: {node: '>=18'} cpu: [arm64] os: [linux] + '@esbuild/linux-arm64@0.27.2': + resolution: {integrity: sha512-hYxN8pr66NsCCiRFkHUAsxylNOcAQaxSSkHMMjcpx0si13t1LHFphxJZUiGwojB1a/Hd5OiPIqDdXONia6bhTw==} + engines: {node: '>=18'} + cpu: [arm64] + os: [linux] + '@esbuild/linux-arm@0.25.4': resolution: {integrity: sha512-kro4c0P85GMfFYqW4TWOpvmF8rFShbWGnrLqlzp4X1TNWjRY3JMYUfDCtOxPKOIY8B0WC8HN51hGP4I4hz4AaQ==} engines: {node: '>=18'} cpu: [arm] os: [linux] + '@esbuild/linux-arm@0.27.2': + resolution: {integrity: sha512-vWfq4GaIMP9AIe4yj1ZUW18RDhx6EPQKjwe7n8BbIecFtCQG4CfHGaHuh7fdfq+y3LIA2vGS/o9ZBGVxIDi9hw==} + engines: {node: '>=18'} + cpu: [arm] + os: [linux] + '@esbuild/linux-ia32@0.25.4': resolution: {integrity: sha512-yTEjoapy8UP3rv8dB0ip3AfMpRbyhSN3+hY8mo/i4QXFeDxmiYbEKp3ZRjBKcOP862Ua4b1PDfwlvbuwY7hIGQ==} engines: {node: '>=18'} cpu: [ia32] os: [linux] + '@esbuild/linux-ia32@0.27.2': + resolution: {integrity: sha512-MJt5BRRSScPDwG2hLelYhAAKh9imjHK5+NE/tvnRLbIqUWa+0E9N4WNMjmp/kXXPHZGqPLxggwVhz7QP8CTR8w==} + engines: {node: '>=18'} + cpu: [ia32] + os: [linux] + '@esbuild/linux-loong64@0.25.4': resolution: {integrity: sha512-NeqqYkrcGzFwi6CGRGNMOjWGGSYOpqwCjS9fvaUlX5s3zwOtn1qwg1s2iE2svBe4Q/YOG1q6875lcAoQK/F4VA==} engines: {node: '>=18'} cpu: [loong64] os: [linux] + '@esbuild/linux-loong64@0.27.2': + resolution: {integrity: sha512-lugyF1atnAT463aO6KPshVCJK5NgRnU4yb3FUumyVz+cGvZbontBgzeGFO1nF+dPueHD367a2ZXe1NtUkAjOtg==} + engines: {node: '>=18'} + cpu: [loong64] + os: [linux] + '@esbuild/linux-mips64el@0.25.4': resolution: {integrity: sha512-IcvTlF9dtLrfL/M8WgNI/qJYBENP3ekgsHbYUIzEzq5XJzzVEV/fXY9WFPfEEXmu3ck2qJP8LG/p3Q8f7Zc2Xg==} engines: {node: '>=18'} cpu: [mips64el] os: [linux] + '@esbuild/linux-mips64el@0.27.2': + resolution: {integrity: sha512-nlP2I6ArEBewvJ2gjrrkESEZkB5mIoaTswuqNFRv/WYd+ATtUpe9Y09RnJvgvdag7he0OWgEZWhviS1OTOKixw==} + engines: {node: '>=18'} + cpu: [mips64el] + os: [linux] + '@esbuild/linux-ppc64@0.25.4': resolution: {integrity: sha512-HOy0aLTJTVtoTeGZh4HSXaO6M95qu4k5lJcH4gxv56iaycfz1S8GO/5Jh6X4Y1YiI0h7cRyLi+HixMR+88swag==} engines: {node: '>=18'} cpu: [ppc64] os: [linux] + '@esbuild/linux-ppc64@0.27.2': + resolution: {integrity: sha512-C92gnpey7tUQONqg1n6dKVbx3vphKtTHJaNG2Ok9lGwbZil6DrfyecMsp9CrmXGQJmZ7iiVXvvZH6Ml5hL6XdQ==} + engines: {node: '>=18'} + cpu: [ppc64] + os: [linux] + '@esbuild/linux-riscv64@0.25.4': resolution: {integrity: sha512-i8JUDAufpz9jOzo4yIShCTcXzS07vEgWzyX3NH2G7LEFVgrLEhjwL3ajFE4fZI3I4ZgiM7JH3GQ7ReObROvSUA==} engines: {node: '>=18'} cpu: [riscv64] os: [linux] + '@esbuild/linux-riscv64@0.27.2': + resolution: {integrity: sha512-B5BOmojNtUyN8AXlK0QJyvjEZkWwy/FKvakkTDCziX95AowLZKR6aCDhG7LeF7uMCXEJqwa8Bejz5LTPYm8AvA==} + engines: {node: '>=18'} + cpu: [riscv64] + os: [linux] + '@esbuild/linux-s390x@0.25.4': resolution: {integrity: sha512-jFnu+6UbLlzIjPQpWCNh5QtrcNfMLjgIavnwPQAfoGx4q17ocOU9MsQ2QVvFxwQoWpZT8DvTLooTvmOQXkO51g==} engines: {node: '>=18'} cpu: [s390x] os: [linux] + '@esbuild/linux-s390x@0.27.2': + resolution: {integrity: sha512-p4bm9+wsPwup5Z8f4EpfN63qNagQ47Ua2znaqGH6bqLlmJ4bx97Y9JdqxgGZ6Y8xVTixUnEkoKSHcpRlDnNr5w==} + engines: {node: '>=18'} + cpu: [s390x] + os: [linux] + '@esbuild/linux-x64@0.25.4': resolution: {integrity: sha512-6e0cvXwzOnVWJHq+mskP8DNSrKBr1bULBvnFLpc1KY+d+irZSgZ02TGse5FsafKS5jg2e4pbvK6TPXaF/A6+CA==} engines: {node: '>=18'} cpu: [x64] os: [linux] + '@esbuild/linux-x64@0.27.2': + resolution: {integrity: sha512-uwp2Tip5aPmH+NRUwTcfLb+W32WXjpFejTIOWZFw/v7/KnpCDKG66u4DLcurQpiYTiYwQ9B7KOeMJvLCu/OvbA==} + engines: {node: '>=18'} + cpu: [x64] + os: [linux] + '@esbuild/netbsd-arm64@0.25.4': resolution: {integrity: sha512-vUnkBYxZW4hL/ie91hSqaSNjulOnYXE1VSLusnvHg2u3jewJBz3YzB9+oCw8DABeVqZGg94t9tyZFoHma8gWZQ==} engines: {node: '>=18'} cpu: [arm64] os: [netbsd] + '@esbuild/netbsd-arm64@0.27.2': + resolution: {integrity: sha512-Kj6DiBlwXrPsCRDeRvGAUb/LNrBASrfqAIok+xB0LxK8CHqxZ037viF13ugfsIpePH93mX7xfJp97cyDuTZ3cw==} + engines: {node: '>=18'} + cpu: [arm64] + os: [netbsd] + '@esbuild/netbsd-x64@0.25.4': resolution: {integrity: sha512-XAg8pIQn5CzhOB8odIcAm42QsOfa98SBeKUdo4xa8OvX8LbMZqEtgeWE9P/Wxt7MlG2QqvjGths+nq48TrUiKw==} engines: {node: '>=18'} cpu: [x64] os: [netbsd] + '@esbuild/netbsd-x64@0.27.2': + resolution: {integrity: sha512-HwGDZ0VLVBY3Y+Nw0JexZy9o/nUAWq9MlV7cahpaXKW6TOzfVno3y3/M8Ga8u8Yr7GldLOov27xiCnqRZf0tCA==} + engines: {node: '>=18'} + cpu: [x64] + os: [netbsd] + '@esbuild/openbsd-arm64@0.25.4': resolution: {integrity: sha512-Ct2WcFEANlFDtp1nVAXSNBPDxyU+j7+tId//iHXU2f/lN5AmO4zLyhDcpR5Cz1r08mVxzt3Jpyt4PmXQ1O6+7A==} engines: {node: '>=18'} cpu: [arm64] os: [openbsd] + '@esbuild/openbsd-arm64@0.27.2': + resolution: {integrity: sha512-DNIHH2BPQ5551A7oSHD0CKbwIA/Ox7+78/AWkbS5QoRzaqlev2uFayfSxq68EkonB+IKjiuxBFoV8ESJy8bOHA==} + engines: {node: '>=18'} + cpu: [arm64] + os: [openbsd] + '@esbuild/openbsd-x64@0.25.4': resolution: {integrity: sha512-xAGGhyOQ9Otm1Xu8NT1ifGLnA6M3sJxZ6ixylb+vIUVzvvd6GOALpwQrYrtlPouMqd/vSbgehz6HaVk4+7Afhw==} engines: {node: '>=18'} cpu: [x64] os: [openbsd] + '@esbuild/openbsd-x64@0.27.2': + resolution: {integrity: sha512-/it7w9Nb7+0KFIzjalNJVR5bOzA9Vay+yIPLVHfIQYG/j+j9VTH84aNB8ExGKPU4AzfaEvN9/V4HV+F+vo8OEg==} + engines: {node: '>=18'} + cpu: [x64] + os: [openbsd] + + '@esbuild/openharmony-arm64@0.27.2': + resolution: {integrity: sha512-LRBbCmiU51IXfeXk59csuX/aSaToeG7w48nMwA6049Y4J4+VbWALAuXcs+qcD04rHDuSCSRKdmY63sruDS5qag==} + engines: {node: '>=18'} + cpu: [arm64] + os: [openharmony] + '@esbuild/sunos-x64@0.25.4': resolution: {integrity: sha512-Mw+tzy4pp6wZEK0+Lwr76pWLjrtjmJyUB23tHKqEDP74R3q95luY/bXqXZeYl4NYlvwOqoRKlInQialgCKy67Q==} engines: {node: '>=18'} cpu: [x64] os: [sunos] + '@esbuild/sunos-x64@0.27.2': + resolution: {integrity: sha512-kMtx1yqJHTmqaqHPAzKCAkDaKsffmXkPHThSfRwZGyuqyIeBvf08KSsYXl+abf5HDAPMJIPnbBfXvP2ZC2TfHg==} + engines: {node: '>=18'} + cpu: [x64] + os: [sunos] + '@esbuild/win32-arm64@0.25.4': resolution: {integrity: sha512-AVUP428VQTSddguz9dO9ngb+E5aScyg7nOeJDrF1HPYu555gmza3bDGMPhmVXL8svDSoqPCsCPjb265yG/kLKQ==} engines: {node: '>=18'} cpu: [arm64] os: [win32] + '@esbuild/win32-arm64@0.27.2': + resolution: {integrity: sha512-Yaf78O/B3Kkh+nKABUF++bvJv5Ijoy9AN1ww904rOXZFLWVc5OLOfL56W+C8F9xn5JQZa3UX6m+IktJnIb1Jjg==} + engines: {node: '>=18'} + cpu: [arm64] + os: [win32] + '@esbuild/win32-ia32@0.25.4': resolution: {integrity: sha512-i1sW+1i+oWvQzSgfRcxxG2k4I9n3O9NRqy8U+uugaT2Dy7kLO9Y7wI72haOahxceMX8hZAzgGou1FhndRldxRg==} engines: {node: '>=18'} cpu: [ia32] os: [win32] + '@esbuild/win32-ia32@0.27.2': + resolution: {integrity: sha512-Iuws0kxo4yusk7sw70Xa2E2imZU5HoixzxfGCdxwBdhiDgt9vX9VUCBhqcwY7/uh//78A1hMkkROMJq9l27oLQ==} + engines: {node: '>=18'} + cpu: [ia32] + os: [win32] + '@esbuild/win32-x64@0.25.4': resolution: {integrity: sha512-nOT2vZNw6hJ+z43oP1SPea/G/6AbN6X+bGNhNuq8NtRHy4wsMhw765IKLNmnjek7GvjWBYQ8Q5VBoYTFg9y1UQ==} engines: {node: '>=18'} cpu: [x64] os: [win32] + '@esbuild/win32-x64@0.27.2': + resolution: {integrity: sha512-sRdU18mcKf7F+YgheI/zGf5alZatMUTKj/jNS6l744f9u3WFu4v7twcUI9vu4mknF4Y9aDlblIie0IM+5xxaqQ==} + engines: {node: '>=18'} + cpu: [x64] + os: [win32] + + '@exodus/bytes@1.11.0': + resolution: {integrity: sha512-wO3vd8nsEHdumsXrjGO/v4p6irbg7hy9kvIeR6i2AwylZSk4HJdWgL0FNaVquW1+AweJcdvU1IEpuIWk/WaPnA==} + engines: {node: ^20.19.0 || ^22.12.0 || >=24.0.0} + peerDependencies: + '@noble/hashes': ^1.8.0 || ^2.0.0 + peerDependenciesMeta: + '@noble/hashes': + optional: true + '@fastify/busboy@2.1.1': resolution: {integrity: sha512-vBZP4NlzfOlerQTnba4aqZoMhE/a9HY7HRqoOPaETQcSQuWEIyZMHGfVu6w9wGtGK5fED5qRs2DteVCjOH60sA==} engines: {node: '>=14'} @@ -568,8 +741,8 @@ packages: '@hapi/wreck@18.0.1': resolution: {integrity: sha512-OLHER70+rZxvDl75xq3xXOfd3e8XIvz8fWY0dqg92UvhZ29zo24vQgfqgHSYhB5ZiuFpSLeriOisAlxAo/1jWg==} - '@iconify-json/simple-icons@1.2.60': - resolution: {integrity: sha512-KlwLBKCdMCqfySdkAA+jehdUx6VSjnj6lvzQKus7HjkPSQ6QP58d6xiptkIp0jd/Hw3PW2++nRuGvCvSYaF0Mg==} + '@iconify-json/simple-icons@1.2.69': + resolution: {integrity: sha512-T/rhy5n7pzE0ZOxQVlF68SNPCYYjRBpddjgjrJO5WWVRG8es5BQmvxIE9kKF+t2hhPGvuGQFpXmUyqbOtnxirQ==} '@iconify/types@2.0.0': resolution: {integrity: sha512-+wluvCrRhXrhyOmRDJ3q8mux9JkKy5SJ/v8ol2tu4FVjyYvtEzkc/3pK15ET6RKg4b4w4BmTk1+gsCUhf21Ykg==} @@ -702,8 +875,11 @@ packages: resolution: {integrity: sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==} engines: {node: '>= 8'} - '@rolldown/pluginutils@1.0.0-beta.29': - resolution: {integrity: sha512-NIJgOsMjbxAXvoGq/X0gD7VPMQ8j9g0BiDaNjVNVjvl+iKXxL3Jre0v31RmBYeLEmkbj2s02v8vFTbUXi5XS2Q==} + '@polka/url@1.0.0-next.29': + resolution: {integrity: sha512-wwQAWhWSuHaag8c4q/KN/vCoeOJYshAIvMQwD4GpSb3OiZklFfvAgmj0VCBBImRpuF/aFgIRzllXlVX93Jevww==} + + '@rolldown/pluginutils@1.0.0-rc.2': + resolution: {integrity: sha512-izyXV/v+cHiRfozX62W9htOAvwMo4/bXKDrQ+vom1L1qRuexPock/7VZDAhnpHCLNejd3NJ6hiab+tO0D44Rgw==} '@rollup/rollup-android-arm-eabi@4.41.1': resolution: {integrity: sha512-NELNvyEWZ6R9QMkiytB4/L4zSEaBC03KIXEghptLGLZWJ6VPrL63ooZQCOnlx36aQPGhzuOMwDerC1Eb2VmrLw==} @@ -918,35 +1094,35 @@ packages: '@sec-ant/readable-stream@0.4.1': resolution: {integrity: sha512-831qok9r2t8AlxLko40y2ebgSDhenenCatLVeW/uBtnHPyhHOvG0C7TvfgecV+wHzIm5KUICgzmVpWS+IMEAeg==} - '@shikijs/core@3.17.0': - resolution: {integrity: sha512-/HjeOnbc62C+n33QFNFrAhUlIADKwfuoS50Ht0pxujxP4QjZAlFp5Q+OkDo531SCTzivx5T18khwyBdKoPdkuw==} + '@shikijs/core@3.22.0': + resolution: {integrity: sha512-iAlTtSDDbJiRpvgL5ugKEATDtHdUVkqgHDm/gbD2ZS9c88mx7G1zSYjjOxp5Qa0eaW0MAQosFRmJSk354PRoQA==} - '@shikijs/engine-javascript@3.17.0': - resolution: {integrity: sha512-WwF99xdP8KfuDrIbT4wxyypfhoIxMeeOCp1AiuvzzZ6JT5B3vIuoclL8xOuuydA6LBeeNXUF/XV5zlwwex1jlA==} + '@shikijs/engine-javascript@3.22.0': + resolution: {integrity: sha512-jdKhfgW9CRtj3Tor0L7+yPwdG3CgP7W+ZEqSsojrMzCjD1e0IxIbwUMDDpYlVBlC08TACg4puwFGkZfLS+56Tw==} - '@shikijs/engine-oniguruma@3.17.0': - resolution: {integrity: sha512-flSbHZAiOZDNTrEbULY8DLWavu/TyVu/E7RChpLB4WvKX4iHMfj80C6Hi3TjIWaQtHOW0KC6kzMcuB5TO1hZ8Q==} + '@shikijs/engine-oniguruma@3.22.0': + resolution: {integrity: sha512-DyXsOG0vGtNtl7ygvabHd7Mt5EY8gCNqR9Y7Lpbbd/PbJvgWrqaKzH1JW6H6qFkuUa8aCxoiYVv8/YfFljiQxA==} '@shikijs/engine-oniguruma@3.4.2': resolution: {integrity: sha512-zcZKMnNndgRa3ORja6Iemsr3DrLtkX3cAF7lTJkdMB6v9alhlBsX9uNiCpqofNrXOvpA3h6lHcLJxgCIhVOU5Q==} - '@shikijs/langs@3.17.0': - resolution: {integrity: sha512-icmur2n5Ojb+HAiQu6NEcIIJ8oWDFGGEpiqSCe43539Sabpx7Y829WR3QuUW2zjTM4l6V8Sazgb3rrHO2orEAw==} + '@shikijs/langs@3.22.0': + resolution: {integrity: sha512-x/42TfhWmp6H00T6uwVrdTJGKgNdFbrEdhaDwSR5fd5zhQ1Q46bHq9EO61SCEWJR0HY7z2HNDMaBZp8JRmKiIA==} '@shikijs/langs@3.4.2': resolution: {integrity: sha512-H6azIAM+OXD98yztIfs/KH5H4PU39t+SREhmM8LaNXyUrqj2mx+zVkr8MWYqjceSjDw9I1jawm1WdFqU806rMA==} - '@shikijs/themes@3.17.0': - resolution: {integrity: sha512-/xEizMHLBmMHwtx4JuOkRf3zwhWD2bmG5BRr0IPjpcWpaq4C3mYEuTk/USAEglN0qPrTwEHwKVpSu/y2jhferA==} + '@shikijs/themes@3.22.0': + resolution: {integrity: sha512-o+tlOKqsr6FE4+mYJG08tfCFDS+3CG20HbldXeVoyP+cYSUxDhrFf3GPjE60U55iOkkjbpY2uC3It/eeja35/g==} '@shikijs/themes@3.4.2': resolution: {integrity: sha512-qAEuAQh+brd8Jyej2UDDf+b4V2g1Rm8aBIdvt32XhDPrHvDkEnpb7Kzc9hSuHUxz0Iuflmq7elaDuQAP9bHIhg==} - '@shikijs/transformers@3.17.0': - resolution: {integrity: sha512-b14s8lPt/3K/PjtGgvdS4oU676Ke/ct9kdi6ksEb2rHzRVBAoWJeRwvDQcHASiiZbrDHlnnC8VnwL2Bw0T/nlw==} + '@shikijs/transformers@3.22.0': + resolution: {integrity: sha512-E7eRV7mwDBjueLF6852n2oYeJYxBq3NSsDk+uyruYAXONv4U8holGmIrT+mPRJQ1J1SNOH6L8G19KRzmBawrFw==} - '@shikijs/types@3.17.0': - resolution: {integrity: sha512-wjLVfutYWVUnxAjsWEob98xgyaGv0dTEnMZDruU5mRjVN7szcGOfgO+997W2yR6odp+1PtSBNeSITRRTfUzK/g==} + '@shikijs/types@3.22.0': + resolution: {integrity: sha512-491iAekgKDBFE67z70Ok5a8KBMsQ2IJwOWw3us/7ffQkIBCyOQfm/aNwVMBUriP02QshIfgHCBSIYAl3u2eWjg==} '@shikijs/types@3.4.2': resolution: {integrity: sha512-zHC1l7L+eQlDXLnxvM9R91Efh2V4+rN3oMVS2swCBssbj2U/FBwybD1eeLaq8yl/iwT+zih8iUbTBCgGZOYlVg==} @@ -1099,8 +1275,8 @@ packages: '@types/node@12.20.55': resolution: {integrity: sha512-J8xLz7q2OFulZ2cyGTLE1TbbZcjpno7FaN6zdJNrgAdrJ+DZzh/uFR6YrTb4C+nXakvud8Q4+rbhoIWlYQbUFQ==} - '@types/node@24.10.7': - resolution: {integrity: sha512-+054pVMzVTmRQV8BhpGv3UyfZ2Llgl8rdpDTon+cUH9+na0ncBVXj3wTUKh14+Kiz18ziM3b4ikpP5/Pc0rQEQ==} + '@types/node@24.10.10': + resolution: {integrity: sha512-+0/4J266CBGPUq/ELg7QUHhN25WYjE0wYTPSQJn1xeu8DOlIOPxXxrNGiLmfAWl7HMMgWFWXpt9IDjMWrF5Iow==} '@types/sinon@21.0.0': resolution: {integrity: sha512-+oHKZ0lTI+WVLxx1IbJDNmReQaIsQJjN2e7UUrJHEeByG7bFeKJYsv1E75JxTQ9QKJDp21bAa/0W2Xo4srsDnw==} @@ -1120,13 +1296,24 @@ packages: '@ungap/structured-clone@1.3.0': resolution: {integrity: sha512-WmoN8qaIAo7WTYWbAZuG8PYEhn5fkz7dZrqTBZ7dtt//lL2Gwms1IcnQ5yHqjDfX8Ft5j4YzDM23f87zBfDe9g==} - '@vitejs/plugin-vue@6.0.1': - resolution: {integrity: sha512-+MaE752hU0wfPFJEUAIxqw18+20euHHdxVtMvbFcOEpjEyfqXH/5DCoTHiVJ0J29EhTJdoTkjEv5YBKU9dnoTw==} + '@vitejs/plugin-vue@6.0.4': + resolution: {integrity: sha512-uM5iXipgYIn13UUQCZNdWkYk+sysBeA97d5mHsAoAt1u/wpN3+zxOmsVJWosuzX+IMGRzeYUNytztrYznboIkQ==} engines: {node: ^20.19.0 || >=22.12.0} peerDependencies: - vite: ^5.0.0 || ^6.0.0 || ^7.0.0 + vite: ^5.0.0 || ^6.0.0 || ^7.0.0 || ^8.0.0-0 vue: ^3.2.25 + '@vitest/browser-playwright@4.0.14': + resolution: {integrity: sha512-rUvyz6wX6wDjcYzf/7fgXYfca2bAu0Axoq/v9LYdELzcBSS9UKjnZ7MaMY4UDP78HHHCdmdtceuSao1s51ON8A==} + peerDependencies: + playwright: '*' + vitest: 4.0.14 + + '@vitest/browser@4.0.14': + resolution: {integrity: sha512-vO0uqR8SnPTd8ykp14yaIuUyMZ9HEBYuoZrVdUp7RrEp76VEnkrX9fDkGnK0NyBdfWXB6cqp7BmqVekd8yKHFQ==} + peerDependencies: + vitest: 4.0.14 + '@vitest/coverage-v8@4.0.14': resolution: {integrity: sha512-EYHLqN/BY6b47qHH7gtMxAg++saoGmsjWmAq9MlXxAz4M0NcHh9iOyKhBZyU4yxZqOd8Xnqp80/5saeitz4Cng==} peerDependencies: @@ -1165,17 +1352,17 @@ packages: '@vitest/utils@4.0.14': resolution: {integrity: sha512-hLqXZKAWNg8pI+SQXyXxWCTOpA3MvsqcbVeNgSi8x/CSN2wi26dSzn1wrOhmCmFjEvN9p8/kLFRHa6PI8jHazw==} - '@vue/compiler-core@3.5.25': - resolution: {integrity: sha512-vay5/oQJdsNHmliWoZfHPoVZZRmnSWhug0BYT34njkYTPqClh3DNWLkZNJBVSjsNMrg0CCrBfoKkjZQPM/QVUw==} + '@vue/compiler-core@3.5.27': + resolution: {integrity: sha512-gnSBQjZA+//qDZen+6a2EdHqJ68Z7uybrMf3SPjEGgG4dicklwDVmMC1AeIHxtLVPT7sn6sH1KOO+tS6gwOUeQ==} - '@vue/compiler-dom@3.5.25': - resolution: {integrity: sha512-4We0OAcMZsKgYoGlMjzYvaoErltdFI2/25wqanuTu+S4gismOTRTBPi4IASOjxWdzIwrYSjnqONfKvuqkXzE2Q==} + '@vue/compiler-dom@3.5.27': + resolution: {integrity: sha512-oAFea8dZgCtVVVTEC7fv3T5CbZW9BxpFzGGxC79xakTr6ooeEqmRuvQydIiDAkglZEAd09LgVf1RoDnL54fu5w==} - '@vue/compiler-sfc@3.5.25': - resolution: {integrity: sha512-PUgKp2rn8fFsI++lF2sO7gwO2d9Yj57Utr5yEsDf3GNaQcowCLKL7sf+LvVFvtJDXUp/03+dC6f2+LCv5aK1ag==} + '@vue/compiler-sfc@3.5.27': + resolution: {integrity: sha512-sHZu9QyDPeDmN/MRoshhggVOWE5WlGFStKFwu8G52swATgSny27hJRWteKDSUUzUH+wp+bmeNbhJnEAel/auUQ==} - '@vue/compiler-ssr@3.5.25': - resolution: {integrity: sha512-ritPSKLBcParnsKYi+GNtbdbrIE1mtuFEJ4U1sWeuOMlIziK5GtOL85t5RhsNy4uWIXPgk+OUdpnXiTdzn8o3A==} + '@vue/compiler-ssr@3.5.27': + resolution: {integrity: sha512-Sj7h+JHt512fV1cTxKlYhg7qxBvack+BGncSpH+8vnN+KN95iPIcqB5rsbblX40XorP+ilO7VIKlkuu3Xq2vjw==} '@vue/devtools-api@8.0.5': resolution: {integrity: sha512-DgVcW8H/Nral7LgZEecYFFYXnAvGuN9C3L3DtWekAncFBedBczpNW8iHKExfaM559Zm8wQWrwtYZ9lXthEHtDw==} @@ -1186,22 +1373,22 @@ packages: '@vue/devtools-shared@8.0.5': resolution: {integrity: sha512-bRLn6/spxpmgLk+iwOrR29KrYnJjG9DGpHGkDFG82UM21ZpJ39ztUT9OXX3g+usW7/b2z+h46I9ZiYyB07XMXg==} - '@vue/reactivity@3.5.25': - resolution: {integrity: sha512-5xfAypCQepv4Jog1U4zn8cZIcbKKFka3AgWHEFQeK65OW+Ys4XybP6z2kKgws4YB43KGpqp5D/K3go2UPPunLA==} + '@vue/reactivity@3.5.27': + resolution: {integrity: sha512-vvorxn2KXfJ0nBEnj4GYshSgsyMNFnIQah/wczXlsNXt+ijhugmW+PpJ2cNPe4V6jpnBcs0MhCODKllWG+nvoQ==} - '@vue/runtime-core@3.5.25': - resolution: {integrity: sha512-Z751v203YWwYzy460bzsYQISDfPjHTl+6Zzwo/a3CsAf+0ccEjQ8c+0CdX1WsumRTHeywvyUFtW6KvNukT/smA==} + '@vue/runtime-core@3.5.27': + resolution: {integrity: sha512-fxVuX/fzgzeMPn/CLQecWeDIFNt3gQVhxM0rW02Tvp/YmZfXQgcTXlakq7IMutuZ/+Ogbn+K0oct9J3JZfyk3A==} - '@vue/runtime-dom@3.5.25': - resolution: {integrity: sha512-a4WrkYFbb19i9pjkz38zJBg8wa/rboNERq3+hRRb0dHiJh13c+6kAbgqCPfMaJ2gg4weWD3APZswASOfmKwamA==} + '@vue/runtime-dom@3.5.27': + resolution: {integrity: sha512-/QnLslQgYqSJ5aUmb5F0z0caZPGHRB8LEAQ1s81vHFM5CBfnun63rxhvE/scVb/j3TbBuoZwkJyiLCkBluMpeg==} - '@vue/server-renderer@3.5.25': - resolution: {integrity: sha512-UJaXR54vMG61i8XNIzTSf2Q7MOqZHpp8+x3XLGtE3+fL+nQd+k7O5+X3D/uWrnQXOdMw5VPih+Uremcw+u1woQ==} + '@vue/server-renderer@3.5.27': + resolution: {integrity: sha512-qOz/5thjeP1vAFc4+BY3Nr6wxyLhpeQgAE/8dDtKo6a6xdk+L4W46HDZgNmLOBUDEkFXV3G7pRiUqxjX0/2zWA==} peerDependencies: - vue: 3.5.25 + vue: 3.5.27 - '@vue/shared@3.5.25': - resolution: {integrity: sha512-AbOPdQQnAnzs58H2FrrDxYj/TJfmeS2jdfEEhgiKINy+bnOANmVizIEgq1r+C5zsbs6l1CCQxtcj71rwNQ4jWg==} + '@vue/shared@3.5.27': + resolution: {integrity: sha512-dXr/3CgqXsJkZ0n9F3I4elY8wM9jMJpP3pvRG52r6m0tu/MsAFIe6JpXVGeNMd/D9F4hQynWT8Rfuj0bdm9kFQ==} '@vueuse/core@14.1.0': resolution: {integrity: sha512-rgBinKs07hAYyPF834mDTigH7BtPqvZ3Pryuzt1SD/lg5wEcWqvwzXXYGEDb2/cP0Sj5zSvHl3WkmMELr5kfWw==} @@ -1350,9 +1537,9 @@ packages: resolution: {integrity: sha512-pbnl5XzGBdrFU/wT4jqmJVPn2B6UHPBOhzMQkY/SPUPB6QtUXtmBHBIwCbXJol93mOpGMnQyP/+BB19q04xj7g==} engines: {node: '>=4'} - better-sse@0.15.1: - resolution: {integrity: sha512-E8YmmIs7P2T5nHM8VG5Pmtp8CgDsIhT+6CYhg+8Gmke5kJb11ohN1jtEXn/2taSr7WkcU3yNTgoqNYvZ/GQ4sw==} - engines: {node: '>=20', pnpm: '>=9'} + better-sse@0.16.1: + resolution: {integrity: sha512-1gc5LUmdMX49Whq/XWOfNKBmMpZCaFLWk0Pt4A5p41T/6/sK1cV7Px/3ep84gwZs6Vz6V/tgozz6EscTT0lLoA==} + engines: {node: '>=20', pnpm: '>=10'} bidi-js@1.0.3: resolution: {integrity: sha512-RKshQI1R3YQ+n9YJz2QQ147P66ELpa1FQEg20Dk8oW9t2KgLbpDLLp9aGZ7y8WHSshDknG0bknqGw5/tyCs5tw==} @@ -1469,16 +1656,16 @@ packages: resolution: {integrity: sha512-0eW44TGN5SQXU1mWSkKwFstI/22X2bG1nYzZTYMAWjylYURhse752YgbE4Cx46AC+bAvI+/dYTPRk1LqSUnu6w==} engines: {node: ^10 || ^12.20.0 || ^14.13.0 || >=15.0.0} - cssstyle@5.3.3: - resolution: {integrity: sha512-OytmFH+13/QXONJcC75QNdMtKpceNk3u8ThBjyyYjkEcy/ekBwR1mMAuNvi3gdBPW3N5TlCzQ0WZw8H0lN/bDw==} + cssstyle@5.3.7: + resolution: {integrity: sha512-7D2EPVltRrsTkhpQmksIu+LxeWAIEk6wRDMJ1qljlv+CKHJM+cJLlfhWIzNA44eAsHXSNe3+vO6DW1yCYx8SuQ==} engines: {node: '>=20'} - csstype@3.1.3: - resolution: {integrity: sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw==} + csstype@3.2.3: + resolution: {integrity: sha512-z1HGKcYy2xA8AGQfwrn0PAy+PB7X/GSj3UVJW9qKyn43xWa+gl5nXmU4qqLMRzWVLFC8KusUX8T/0kCiOYpAIQ==} - data-urls@6.0.0: - resolution: {integrity: sha512-BnBS08aLUM+DKamupXs3w2tJJoqU+AkaE/+6vQxi/G/DPmIZFJJp9Dkb1kM03AZx8ADehDUZgsNxju3mPXZYIA==} - engines: {node: '>=20'} + data-urls@7.0.0: + resolution: {integrity: sha512-23XHcCF+coGYevirZceTVD7NdJOqVn+49IHyxgszm+JIiHLoB2TkmPtsYkNWT1pvRSGkc35L6NHs0yHkN2SumA==} + engines: {node: ^20.19.0 || ^22.12.0 || >=24.0.0} debug@4.3.4: resolution: {integrity: sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==} @@ -1535,6 +1722,10 @@ packages: resolution: {integrity: sha512-aKstq2TDOndCn4diEyp9Uq/Flu2i1GlLkc6XIDQSDMuaFE3OPW5OphLCyQ5SpSJZTb4reN+kTcYru5yIfXoRPw==} engines: {node: '>=0.12'} + entities@7.0.1: + resolution: {integrity: sha512-TWrgLOFUQTH994YUyl1yT4uyavY5nNB5muff+RtWaqNVCAK408b5ZnnbNAUEWLTCpum9w6arT70i1XdQ4UeOPA==} + engines: {node: '>=0.12'} + error-stack-parser@2.1.4: resolution: {integrity: sha512-Sk5V6wVazPhq5MhpO+AUxJn5x7XSXGl1R93Vn7i+zS15KDVxQijejNCrz8340/2bgLBjR9GtEG8ZVKONDjcqGQ==} @@ -1546,6 +1737,11 @@ packages: engines: {node: '>=18'} hasBin: true + esbuild@0.27.2: + resolution: {integrity: sha512-HyNQImnsOC7X9PMNaCIeAm4ISCQXs5a5YasTXVliKv4uuBo1dKrG0A+uQS8M5eXjVMnLg3WgXaKvprHlFJQffw==} + engines: {node: '>=18'} + hasBin: true + escape-string-regexp@2.0.0: resolution: {integrity: sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w==} engines: {node: '>=8'} @@ -1635,8 +1831,8 @@ packages: resolution: {integrity: sha512-+iwzCJ7C5v5KgcBuueqVoNiHVoQpwiUK5XFLjf0affFTep+Wcw93tPvmb8tqujDNmzhBDPddnWV/qgWSXgq+Hg==} engines: {node: '>=12'} - focus-trap@7.6.6: - resolution: {integrity: sha512-v/Z8bvMCajtx4mEXmOo7QEsIzlIOqRXTIwgUfsFOF9gEsespdbD0AkPIka1bSXZ8Y8oZ+2IVDQZePkTfEHZl7Q==} + focus-trap@7.8.0: + resolution: {integrity: sha512-/yNdlIkpWbM0ptxno3ONTuf+2g318kh2ez3KSeZN5dZ8YC6AAmgeWz+GasYYiBJPFaYcSAPeu4GfhUaChzIJXA==} form-data-encoder@2.1.4: resolution: {integrity: sha512-yDYSgNMraqvnxiEXO4hi88+YZxaHC6QKzb5N84iRCTDeRO7ZALpir/lVmf/uXUhnwUr2O4HU8s/n6x+yNjQkHw==} @@ -1650,6 +1846,11 @@ packages: resolution: {integrity: sha512-yhlQgA6mnOJUKOsRUFsgJdQCvkKhcz8tlZG5HBQfReYZy46OwLcY+Zia0mtdHsOo9y/hP+CxMN0TU9QxoOtG4g==} engines: {node: '>=6 <7 || >=8'} + fsevents@2.3.2: + resolution: {integrity: sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==} + engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0} + os: [darwin] + fsevents@2.3.3: resolution: {integrity: sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==} engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0} @@ -1698,12 +1899,9 @@ packages: hookable@5.5.3: resolution: {integrity: sha512-Yc+BQe8SvoXH1643Qez1zqLRmbA5rCL+sSmk6TVos0LWVfNIB7PGncdlId77WzLGSIB5KaWgTaNTs2lNVEI6VQ==} - htm@3.1.1: - resolution: {integrity: sha512-983Vyg8NwUE7JkZ6NmOqpCZ+sh1bKv2iYTlUkzlWmA5JD2acKoxd4KVxbMmxX/85mtfdnDmTFoNKcg5DGAvxNQ==} - - html-encoding-sniffer@4.0.0: - resolution: {integrity: sha512-Y22oTqIU4uuPgEemfz7NDJz6OeKf12Lsu+QC+s3BVpda64lTiMYCyGwg5ki4vFxkMwQdeZDl2adZoqUgdFuTgQ==} - engines: {node: '>=18'} + html-encoding-sniffer@6.0.0: + resolution: {integrity: sha512-CV9TW3Y3f8/wT0BRFc1/KAVQ3TUHiXmaAb6VW9vtiMFf7SLoMd1PdAc4W3KFOFETBJUb90KatHqlsZMWV+R9Gg==} + engines: {node: ^20.19.0 || ^22.12.0 || >=24.0.0} html-escaper@2.0.2: resolution: {integrity: sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg==} @@ -1738,10 +1936,6 @@ packages: resolution: {integrity: sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==} engines: {node: '>=0.10.0'} - iconv-lite@0.6.3: - resolution: {integrity: sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==} - engines: {node: '>=0.10.0'} - ieee754@1.2.1: resolution: {integrity: sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==} @@ -1828,8 +2022,8 @@ packages: resolution: {integrity: sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==} hasBin: true - jsdom@27.2.0: - resolution: {integrity: sha512-454TI39PeRDW1LgpyLPyURtB4Zx1tklSr6+OFOipsxGUH1WMTvk6C65JQdrj455+DP2uJ1+veBEHTGFKWVLFoA==} + jsdom@28.0.0: + resolution: {integrity: sha512-KDYJgZ6T2TKdU8yBfYueq5EPG/EylMsBvCaenWMJb2OXmjgczzwveRCoJ+Hgj1lXPDyasvrgneSn4GBuR1hYyA==} engines: {node: ^20.19.0 || ^22.12.0 || >=24.0.0} peerDependencies: canvas: ^3.0.0 @@ -1868,8 +2062,8 @@ packages: resolution: {integrity: sha512-ozCC6gdQ+glXOQsveKD0YsDy8DSQFjDTz4zyzEHNV5+JP5D62LmfDZ6o1cycFx9ouG940M5dE8C8CTewdj2YWQ==} engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} - lru-cache@11.2.2: - resolution: {integrity: sha512-F9ODfyqML2coTIsQpSkRHnLSZMtkU8Q+mSfcaIyKwy58u+8k5nvAYeiNhsyMARvzNcXJ9QfWVrcPsC9e9rAxtg==} + lru-cache@11.2.5: + resolution: {integrity: sha512-vFrFJkWtJvJnD5hg+hJvVE8Lh/TcMzKnTgCWmtBipwI5yLX/iX+5UB2tfuyODF5E7k9xEzMdYgGqaSb1c0c5Yw==} engines: {node: 20 || >=22} lru-cache@6.0.0: @@ -1975,6 +2169,10 @@ packages: resolution: {integrity: sha512-tzzskb3bG8LvYGFF/mDTpq3jpI6Q9wc3LEmBaghu+DdCssd1FakN7Bc0hVNmEyGq1bq3RgfkCb3cmQLpNPOroA==} engines: {node: '>=4'} + mrmime@2.0.1: + resolution: {integrity: sha512-Y3wQdFg2Va6etvQ5I82yUhGdsKrcYox6p7FfL1LbK2J4V01F9TGlepTIhnK24t7koZibmg82KGglhA1XK5IsLQ==} + engines: {node: '>=10'} + ms@2.1.2: resolution: {integrity: sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==} @@ -2102,6 +2300,24 @@ packages: piscina@4.9.2: resolution: {integrity: sha512-Fq0FERJWFEUpB4eSY59wSNwXD4RYqR+nR/WiEVcZW8IWfVBxJJafcgTEZDQo8k3w0sUarJ8RyVbbUF4GQ2LGbQ==} + pixelmatch@7.1.0: + resolution: {integrity: sha512-1wrVzJ2STrpmONHKBy228LM1b84msXDUoAzVEl0R8Mz4Ce6EPr+IVtxm8+yvrqLYMHswREkjYFaMxnyGnaY3Ng==} + hasBin: true + + playwright-core@1.58.0: + resolution: {integrity: sha512-aaoB1RWrdNi3//rOeKuMiS65UCcgOVljU46At6eFcOFPFHWtd2weHRRow6z/n+Lec0Lvu0k9ZPKJSjPugikirw==} + engines: {node: '>=18'} + hasBin: true + + playwright@1.58.0: + resolution: {integrity: sha512-2SVA0sbPktiIY/MCOPX8e86ehA/e+tDNq+e5Y8qjKYti2Z/JG7xnronT/TXTIkKbYGWlCbuucZ6dziEgkoEjQQ==} + engines: {node: '>=18'} + hasBin: true + + pngjs@7.0.0: + resolution: {integrity: sha512-LKWqWJRhstyYo9pGvgor/ivk2w94eSjE3RGVuzLGlr3NmD8bf7RcYGze1mNdEHRP6TRP6rMuDHk5t44hnTRyow==} + engines: {node: '>=14.19.0'} + postcss@8.5.6: resolution: {integrity: sha512-3Ybi1tAuwAP9s0r1UQ2J4n5Y0G05bJkpUIO0/bI9MhwmD70S5aTWbXGBwxHrelT+XM1k6dM0pk+SwNkpTRN7Pg==} engines: {node: ^10 || ^12 || >=14} @@ -2231,8 +2447,8 @@ packages: resolution: {integrity: sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==} engines: {node: '>=8'} - shiki@3.17.0: - resolution: {integrity: sha512-lUZfWsyW7czITYTdo/Tb6ZM4VfyXlzmKYBQBjTz+pBzPPkP08RgIt00Ls1Z50Cl3SfwJsue6WbJeF3UgqLVI9Q==} + shiki@3.22.0: + resolution: {integrity: sha512-LBnhsoYEe0Eou4e1VgJACes+O6S6QC0w71fCSp5Oya79inkwkm15gQ1UF6VtQ8j/taMDh79hAB49WUk8ALQW3g==} siginfo@2.0.0: resolution: {integrity: sha512-ybx0WO1/8bSBLEWXZvEd7gMW3Sn3JFlW3TvX1nREbDLRNQNaeNN8WK0meBwPdAaOI7TtRRRJn/Es1zhrrCHu7g==} @@ -2251,6 +2467,10 @@ packages: sinon@21.0.0: resolution: {integrity: sha512-TOgRcwFPbfGtpqvZw+hyqJDvqfapr1qUlOizROIk4bBLjlsjlB00Pg6wMFXNtJRpu+eCZuVOaLatG7M8105kAw==} + sirv@3.0.2: + resolution: {integrity: sha512-2wcC/oGxHis/BoHkkPwldgiPSYcpZK3JU28WoMVv55yHJgcZ8rlXvuG9iZggz+sU1d4bRgIGASwyWqjxu3FM0g==} + engines: {node: '>=18'} + slash@3.0.0: resolution: {integrity: sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==} engines: {node: '>=8'} @@ -2337,8 +2557,8 @@ packages: symbol-tree@3.2.4: resolution: {integrity: sha512-9QNk5KwDF+Bvz+PyObkmSYjI5ksVUYtjW7AU22r2NKcfLJcXp96hkDWU3+XndOsUb+AQ9QhfzfCT2O+CNWT5Tw==} - tabbable@6.3.0: - resolution: {integrity: sha512-EIHvdY5bPLuWForiR/AN2Bxngzpuwn1is4asboytXtpTgsArc+WmSJKVLlhdh71u7jFcryDqB2A8lQvj78MkyQ==} + tabbable@6.4.0: + resolution: {integrity: sha512-05PUHKSNE8ou2dwIxTngl4EzcnsCDZGJ/iCLtDflR/SHB/ny14rXc+qU5P4mG9JkusiV7EivzY9Mhm55AzAvCg==} tar-stream@3.1.7: resolution: {integrity: sha512-qJj60CXt7IU1Ffyc3NJMjh6EkuCFej46zUqJ4J7pqYlThyd9bO0XBTmcOIhSzZJVWfsLks0+nle/j538YAW9RQ==} @@ -2386,6 +2606,10 @@ packages: resolution: {integrity: sha512-lbDrTLVsHhOMljPscd0yitpozq7Ga2M5Cvez5AjGg8GASBjtt6iERCAJ93yommPmz62fb45oFIXHEZ3u9bfJEA==} engines: {node: '>=14.16'} + totalist@3.0.1: + resolution: {integrity: sha512-sf4i37nQ2LBx4m3wB74y+ubopq6W/dIzXg0FDGjsYnZHVa1Da8FH853wlL2gtUhg+xJXjfk3kUZS3BRoQeoQBQ==} + engines: {node: '>=6'} + touch@3.1.1: resolution: {integrity: sha512-r0eojU4bI8MnHr8c5bNo7lJDdI2qXlWWJk6a9EAFG7vbhTjElYhBVS3/miuE0uOuoLdb8Mc/rVfsmm6eo5o9GA==} hasBin: true @@ -2486,6 +2710,10 @@ packages: resolution: {integrity: sha512-72RFADWFqKmUb2hmmvNODKL3p9hcB6Gt2DOQMis1SEBaV6a4MH8soBvzg+95CYhCKPFedut2JY9bMfrDl9D23g==} engines: {node: '>=14.0'} + undici@7.20.0: + resolution: {integrity: sha512-MJZrkjyd7DeC+uPZh+5/YaMDxFiiEEaDgbUSVMXayofAkDWF1088CDo+2RPg7B1BuS1qf1vgNE7xqwPxE0DuSQ==} + engines: {node: '>=20.18.1'} + unist-util-is@6.0.0: resolution: {integrity: sha512-2qCTHimwdxLfz+YzdGfkqNlH0tLi9xjTnHddPmJwtIG9MGsdbutfTc4P+haPD7l7Cjxf/WZj+we5qfVPvvxfYw==} @@ -2555,8 +2783,8 @@ packages: yaml: optional: true - vite@7.2.4: - resolution: {integrity: sha512-NL8jTlbo0Tn4dUEXEsUg8KeyG/Lkmc4Fnzb8JXN/Ykm9G4HNImjtABMJgkQoVjOBN/j2WAwDTRytdqJbZsah7w==} + vite@7.3.1: + resolution: {integrity: sha512-w+N7Hifpc3gRjZ63vYBXA56dvvRlNWRczTdmCBBa+CotUzAPf5b7YMdMR/8CQoeYE5LX3W4wj6RYTgonm1b9DA==} engines: {node: ^20.19.0 || >=22.12.0} hasBin: true peerDependencies: @@ -2595,8 +2823,8 @@ packages: yaml: optional: true - vitepress@2.0.0-alpha.15: - resolution: {integrity: sha512-jhjSYd10Z6RZiKOa7jy0xMVf5NB5oSc/lS3bD/QoUc6V8PrvQR5JhC9104NEt6+oTGY/ftieVWxY9v7YI+1IjA==} + vitepress@2.0.0-alpha.16: + resolution: {integrity: sha512-w1nwsefDVIsje7BZr2tsKxkZutDGjG0YoQ2yxO7+a9tvYVqfljYbwj5LMYkPy8Tb7YbPwa22HtIhk62jbrvuEQ==} hasBin: true peerDependencies: markdown-it-mathjax3: ^4 @@ -2644,8 +2872,8 @@ packages: jsdom: optional: true - vue@3.5.25: - resolution: {integrity: sha512-YLVdgv2K13WJ6n+kD5owehKtEXwdwXuj2TTyJMsO7pSeKw2bfRNZGjhB7YzrpbMYj5b5QsUebHpOqR3R3ziy/g==} + vue@3.5.27: + resolution: {integrity: sha512-aJ/UtoEyFySPBGarREmN4z6qNKpbEguYHMmXSiOGk69czc+zhs0NF6tEFrY8TZKAl8N/LYAkd4JHVd5E/AsSmw==} peerDependencies: typescript: '*' peerDependenciesMeta: @@ -2656,22 +2884,18 @@ packages: resolution: {integrity: sha512-o8qghlI8NZHU1lLPrpi2+Uq7abh4GGPpYANlalzWxyWteJOCsr/P+oPBA49TOLu5FTZO4d3F9MnWJfiMo4BkmA==} engines: {node: '>=18'} - webidl-conversions@8.0.0: - resolution: {integrity: sha512-n4W4YFyz5JzOfQeA8oN7dUYpR+MBP3PIUsn2jLjWXwK5ASUzt0Jc/A5sAUZoCYFJRGF0FBKJ+1JjN43rNdsQzA==} + webidl-conversions@8.0.1: + resolution: {integrity: sha512-BMhLD/Sw+GbJC21C/UgyaZX41nPt8bUTg+jWyDeg7e7YN4xOM05YPSIXceACnXVtqyEw/LMClUQMtMZ+PGGpqQ==} engines: {node: '>=20'} - whatwg-encoding@3.1.1: - resolution: {integrity: sha512-6qN4hJdMwfYBtE3YBTTHhoeuUrDBPZmbQaxWAqSALV/MeEnR5z1xd8UKud2RAkFoPkmB+hli1TZSnyi84xz1vQ==} - engines: {node: '>=18'} - - whatwg-mimetype@4.0.0: - resolution: {integrity: sha512-QaKxh0eNIi2mE9p2vEdzfagOKHCcj1pJ56EEHGQOVxp8r9/iszLUUV7v89x9O1p/T+NlTM5W7jW6+cz4Fq1YVg==} - engines: {node: '>=18'} - - whatwg-url@15.1.0: - resolution: {integrity: sha512-2ytDk0kiEj/yu90JOAp44PVPUkO9+jVhyf+SybKlRHSDlvOOZhdPIrr7xTH64l4WixO2cP+wQIcgujkGBPPz6g==} + whatwg-mimetype@5.0.0: + resolution: {integrity: sha512-sXcNcHOC51uPGF0P/D4NVtrkjSU2fNsm9iog4ZvZJsL3rjoDAzXZhkm2MWt1y+PUdggKAYVoMAIYcs78wJ51Cw==} engines: {node: '>=20'} + whatwg-url@16.0.0: + resolution: {integrity: sha512-9CcxtEKsf53UFwkSUZjG+9vydAsFO4lFHBpJUtjBcoJOCJpKnSJNwCw813zrYJHpCJ7sgfbtOe0V5Ku7Pa1XMQ==} + engines: {node: ^20.19.0 || ^22.12.0 || >=24.0.0} + which@2.0.2: resolution: {integrity: sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==} engines: {node: '>= 8'} @@ -2723,7 +2947,7 @@ packages: snapshots: - '@acemir/cssom@0.9.24': {} + '@acemir/cssom@0.9.31': {} '@actions/core@1.10.1': dependencies: @@ -2735,21 +2959,21 @@ snapshots: tunnel: 0.0.6 undici: 5.28.4 - '@asamuzakjp/css-color@4.1.0': + '@asamuzakjp/css-color@4.1.1': dependencies: '@csstools/css-calc': 2.1.4(@csstools/css-parser-algorithms@3.0.5(@csstools/css-tokenizer@3.0.4))(@csstools/css-tokenizer@3.0.4) '@csstools/css-color-parser': 3.1.0(@csstools/css-parser-algorithms@3.0.5(@csstools/css-tokenizer@3.0.4))(@csstools/css-tokenizer@3.0.4) '@csstools/css-parser-algorithms': 3.0.5(@csstools/css-tokenizer@3.0.4) '@csstools/css-tokenizer': 3.0.4 - lru-cache: 11.2.2 + lru-cache: 11.2.5 - '@asamuzakjp/dom-selector@6.7.4': + '@asamuzakjp/dom-selector@6.7.7': dependencies: '@asamuzakjp/nwsapi': 2.3.9 bidi-js: 1.0.3 css-tree: 3.1.0 is-potential-custom-element-name: 1.0.1 - lru-cache: 11.2.2 + lru-cache: 11.2.5 '@asamuzakjp/nwsapi@2.3.9': {} @@ -2932,91 +3156,171 @@ snapshots: dependencies: '@csstools/css-tokenizer': 3.0.4 - '@csstools/css-syntax-patches-for-csstree@1.0.19': {} + '@csstools/css-syntax-patches-for-csstree@1.0.26': {} '@csstools/css-tokenizer@3.0.4': {} - '@docsearch/css@4.3.2': {} + '@docsearch/css@4.5.3': {} - '@docsearch/js@4.3.2': - dependencies: - htm: 3.1.1 + '@docsearch/js@4.5.3': {} + + '@docsearch/sidepanel-js@4.5.3': {} '@esbuild/aix-ppc64@0.25.4': optional: true + '@esbuild/aix-ppc64@0.27.2': + optional: true + '@esbuild/android-arm64@0.25.4': optional: true + '@esbuild/android-arm64@0.27.2': + optional: true + '@esbuild/android-arm@0.25.4': optional: true + '@esbuild/android-arm@0.27.2': + optional: true + '@esbuild/android-x64@0.25.4': optional: true + '@esbuild/android-x64@0.27.2': + optional: true + '@esbuild/darwin-arm64@0.25.4': optional: true + '@esbuild/darwin-arm64@0.27.2': + optional: true + '@esbuild/darwin-x64@0.25.4': optional: true + '@esbuild/darwin-x64@0.27.2': + optional: true + '@esbuild/freebsd-arm64@0.25.4': optional: true + '@esbuild/freebsd-arm64@0.27.2': + optional: true + '@esbuild/freebsd-x64@0.25.4': optional: true + '@esbuild/freebsd-x64@0.27.2': + optional: true + '@esbuild/linux-arm64@0.25.4': optional: true + '@esbuild/linux-arm64@0.27.2': + optional: true + '@esbuild/linux-arm@0.25.4': optional: true + '@esbuild/linux-arm@0.27.2': + optional: true + '@esbuild/linux-ia32@0.25.4': optional: true + '@esbuild/linux-ia32@0.27.2': + optional: true + '@esbuild/linux-loong64@0.25.4': optional: true + '@esbuild/linux-loong64@0.27.2': + optional: true + '@esbuild/linux-mips64el@0.25.4': optional: true + '@esbuild/linux-mips64el@0.27.2': + optional: true + '@esbuild/linux-ppc64@0.25.4': optional: true + '@esbuild/linux-ppc64@0.27.2': + optional: true + '@esbuild/linux-riscv64@0.25.4': optional: true + '@esbuild/linux-riscv64@0.27.2': + optional: true + '@esbuild/linux-s390x@0.25.4': optional: true + '@esbuild/linux-s390x@0.27.2': + optional: true + '@esbuild/linux-x64@0.25.4': optional: true + '@esbuild/linux-x64@0.27.2': + optional: true + '@esbuild/netbsd-arm64@0.25.4': optional: true + '@esbuild/netbsd-arm64@0.27.2': + optional: true + '@esbuild/netbsd-x64@0.25.4': optional: true + '@esbuild/netbsd-x64@0.27.2': + optional: true + '@esbuild/openbsd-arm64@0.25.4': optional: true + '@esbuild/openbsd-arm64@0.27.2': + optional: true + '@esbuild/openbsd-x64@0.25.4': optional: true + '@esbuild/openbsd-x64@0.27.2': + optional: true + + '@esbuild/openharmony-arm64@0.27.2': + optional: true + '@esbuild/sunos-x64@0.25.4': optional: true + '@esbuild/sunos-x64@0.27.2': + optional: true + '@esbuild/win32-arm64@0.25.4': optional: true + '@esbuild/win32-arm64@0.27.2': + optional: true + '@esbuild/win32-ia32@0.25.4': optional: true + '@esbuild/win32-ia32@0.27.2': + optional: true + '@esbuild/win32-x64@0.25.4': optional: true + '@esbuild/win32-x64@0.27.2': + optional: true + + '@exodus/bytes@1.11.0': {} + '@fastify/busboy@2.1.1': {} '@gerrit0/mini-shiki@3.4.2': @@ -3200,7 +3504,7 @@ snapshots: '@hapi/bourne': 3.0.0 '@hapi/hoek': 11.0.7 - '@iconify-json/simple-icons@1.2.60': + '@iconify-json/simple-icons@1.2.69': dependencies: '@iconify/types': 2.0.0 @@ -3311,7 +3615,9 @@ snapshots: '@nodelib/fs.scandir': 2.1.5 fastq: 1.15.0 - '@rolldown/pluginutils@1.0.0-beta.29': {} + '@polka/url@1.0.0-next.29': {} + + '@rolldown/pluginutils@1.0.0-rc.2': {} '@rollup/rollup-android-arm-eabi@4.41.1': optional: true @@ -3441,22 +3747,22 @@ snapshots: '@sec-ant/readable-stream@0.4.1': {} - '@shikijs/core@3.17.0': + '@shikijs/core@3.22.0': dependencies: - '@shikijs/types': 3.17.0 + '@shikijs/types': 3.22.0 '@shikijs/vscode-textmate': 10.0.2 '@types/hast': 3.0.4 hast-util-to-html: 9.0.5 - '@shikijs/engine-javascript@3.17.0': + '@shikijs/engine-javascript@3.22.0': dependencies: - '@shikijs/types': 3.17.0 + '@shikijs/types': 3.22.0 '@shikijs/vscode-textmate': 10.0.2 oniguruma-to-es: 4.3.4 - '@shikijs/engine-oniguruma@3.17.0': + '@shikijs/engine-oniguruma@3.22.0': dependencies: - '@shikijs/types': 3.17.0 + '@shikijs/types': 3.22.0 '@shikijs/vscode-textmate': 10.0.2 '@shikijs/engine-oniguruma@3.4.2': @@ -3464,28 +3770,28 @@ snapshots: '@shikijs/types': 3.4.2 '@shikijs/vscode-textmate': 10.0.2 - '@shikijs/langs@3.17.0': + '@shikijs/langs@3.22.0': dependencies: - '@shikijs/types': 3.17.0 + '@shikijs/types': 3.22.0 '@shikijs/langs@3.4.2': dependencies: '@shikijs/types': 3.4.2 - '@shikijs/themes@3.17.0': + '@shikijs/themes@3.22.0': dependencies: - '@shikijs/types': 3.17.0 + '@shikijs/types': 3.22.0 '@shikijs/themes@3.4.2': dependencies: '@shikijs/types': 3.4.2 - '@shikijs/transformers@3.17.0': + '@shikijs/transformers@3.22.0': dependencies: - '@shikijs/core': 3.17.0 - '@shikijs/types': 3.17.0 + '@shikijs/core': 3.22.0 + '@shikijs/types': 3.22.0 - '@shikijs/types@3.17.0': + '@shikijs/types@3.22.0': dependencies: '@shikijs/vscode-textmate': 10.0.2 '@types/hast': 3.0.4 @@ -3604,7 +3910,7 @@ snapshots: '@types/jsdom@27.0.0': dependencies: - '@types/node': 24.10.7 + '@types/node': 24.10.10 '@types/tough-cookie': 4.0.5 parse5: 7.3.0 @@ -3623,7 +3929,7 @@ snapshots: '@types/node@12.20.55': {} - '@types/node@24.10.7': + '@types/node@24.10.10': dependencies: undici-types: 7.16.0 @@ -3641,13 +3947,43 @@ snapshots: '@ungap/structured-clone@1.3.0': {} - '@vitejs/plugin-vue@6.0.1(vite@7.2.4(@types/node@24.10.7)(tsx@4.19.4)(yaml@2.8.0))(vue@3.5.25(typescript@5.8.3))': + '@vitejs/plugin-vue@6.0.4(vite@7.3.1(@types/node@24.10.10)(tsx@4.19.4)(yaml@2.8.0))(vue@3.5.27(typescript@5.8.3))': dependencies: - '@rolldown/pluginutils': 1.0.0-beta.29 - vite: 7.2.4(@types/node@24.10.7)(tsx@4.19.4)(yaml@2.8.0) - vue: 3.5.25(typescript@5.8.3) + '@rolldown/pluginutils': 1.0.0-rc.2 + vite: 7.3.1(@types/node@24.10.10)(tsx@4.19.4)(yaml@2.8.0) + vue: 3.5.27(typescript@5.8.3) + + '@vitest/browser-playwright@4.0.14(playwright@1.58.0)(vite@7.0.6(@types/node@24.10.10)(tsx@4.19.4)(yaml@2.8.0))(vitest@4.0.14)': + dependencies: + '@vitest/browser': 4.0.14(vite@7.0.6(@types/node@24.10.10)(tsx@4.19.4)(yaml@2.8.0))(vitest@4.0.14) + '@vitest/mocker': 4.0.14(vite@7.0.6(@types/node@24.10.10)(tsx@4.19.4)(yaml@2.8.0)) + playwright: 1.58.0 + tinyrainbow: 3.0.3 + vitest: 4.0.14(@types/node@24.10.10)(@vitest/browser-playwright@4.0.14)(jsdom@28.0.0)(tsx@4.19.4)(yaml@2.8.0) + transitivePeerDependencies: + - bufferutil + - msw + - utf-8-validate + - vite + + '@vitest/browser@4.0.14(vite@7.0.6(@types/node@24.10.10)(tsx@4.19.4)(yaml@2.8.0))(vitest@4.0.14)': + dependencies: + '@vitest/mocker': 4.0.14(vite@7.0.6(@types/node@24.10.10)(tsx@4.19.4)(yaml@2.8.0)) + '@vitest/utils': 4.0.14 + magic-string: 0.30.21 + pixelmatch: 7.1.0 + pngjs: 7.0.0 + sirv: 3.0.2 + tinyrainbow: 3.0.3 + vitest: 4.0.14(@types/node@24.10.10)(@vitest/browser-playwright@4.0.14)(jsdom@28.0.0)(tsx@4.19.4)(yaml@2.8.0) + ws: 8.18.3 + transitivePeerDependencies: + - bufferutil + - msw + - utf-8-validate + - vite - '@vitest/coverage-v8@4.0.14(vitest@4.0.14(@types/node@24.10.7)(jsdom@27.2.0)(tsx@4.19.4)(yaml@2.8.0))': + '@vitest/coverage-v8@4.0.14(@vitest/browser@4.0.14(vite@7.0.6(@types/node@24.10.10)(tsx@4.19.4)(yaml@2.8.0))(vitest@4.0.14))(vitest@4.0.14)': dependencies: '@bcoe/v8-coverage': 1.0.2 '@vitest/utils': 4.0.14 @@ -3660,7 +3996,9 @@ snapshots: obug: 2.1.1 std-env: 3.10.0 tinyrainbow: 3.0.3 - vitest: 4.0.14(@types/node@24.10.7)(jsdom@27.2.0)(tsx@4.19.4)(yaml@2.8.0) + vitest: 4.0.14(@types/node@24.10.10)(@vitest/browser-playwright@4.0.14)(jsdom@28.0.0)(tsx@4.19.4)(yaml@2.8.0) + optionalDependencies: + '@vitest/browser': 4.0.14(vite@7.0.6(@types/node@24.10.10)(tsx@4.19.4)(yaml@2.8.0))(vitest@4.0.14) transitivePeerDependencies: - supports-color @@ -3673,13 +4011,13 @@ snapshots: chai: 6.2.1 tinyrainbow: 3.0.3 - '@vitest/mocker@4.0.14(vite@7.0.6(@types/node@24.10.7)(tsx@4.19.4)(yaml@2.8.0))': + '@vitest/mocker@4.0.14(vite@7.0.6(@types/node@24.10.10)(tsx@4.19.4)(yaml@2.8.0))': dependencies: '@vitest/spy': 4.0.14 estree-walker: 3.0.3 magic-string: 0.30.21 optionalDependencies: - vite: 7.0.6(@types/node@24.10.7)(tsx@4.19.4)(yaml@2.8.0) + vite: 7.0.6(@types/node@24.10.10)(tsx@4.19.4)(yaml@2.8.0) '@vitest/pretty-format@4.0.14': dependencies: @@ -3703,35 +4041,35 @@ snapshots: '@vitest/pretty-format': 4.0.14 tinyrainbow: 3.0.3 - '@vue/compiler-core@3.5.25': + '@vue/compiler-core@3.5.27': dependencies: '@babel/parser': 7.28.5 - '@vue/shared': 3.5.25 - entities: 4.5.0 + '@vue/shared': 3.5.27 + entities: 7.0.1 estree-walker: 2.0.2 source-map-js: 1.2.1 - '@vue/compiler-dom@3.5.25': + '@vue/compiler-dom@3.5.27': dependencies: - '@vue/compiler-core': 3.5.25 - '@vue/shared': 3.5.25 + '@vue/compiler-core': 3.5.27 + '@vue/shared': 3.5.27 - '@vue/compiler-sfc@3.5.25': + '@vue/compiler-sfc@3.5.27': dependencies: '@babel/parser': 7.28.5 - '@vue/compiler-core': 3.5.25 - '@vue/compiler-dom': 3.5.25 - '@vue/compiler-ssr': 3.5.25 - '@vue/shared': 3.5.25 + '@vue/compiler-core': 3.5.27 + '@vue/compiler-dom': 3.5.27 + '@vue/compiler-ssr': 3.5.27 + '@vue/shared': 3.5.27 estree-walker: 2.0.2 magic-string: 0.30.21 postcss: 8.5.6 source-map-js: 1.2.1 - '@vue/compiler-ssr@3.5.25': + '@vue/compiler-ssr@3.5.27': dependencies: - '@vue/compiler-dom': 3.5.25 - '@vue/shared': 3.5.25 + '@vue/compiler-dom': 3.5.27 + '@vue/shared': 3.5.27 '@vue/devtools-api@8.0.5': dependencies: @@ -3751,50 +4089,50 @@ snapshots: dependencies: rfdc: 1.4.1 - '@vue/reactivity@3.5.25': + '@vue/reactivity@3.5.27': dependencies: - '@vue/shared': 3.5.25 + '@vue/shared': 3.5.27 - '@vue/runtime-core@3.5.25': + '@vue/runtime-core@3.5.27': dependencies: - '@vue/reactivity': 3.5.25 - '@vue/shared': 3.5.25 + '@vue/reactivity': 3.5.27 + '@vue/shared': 3.5.27 - '@vue/runtime-dom@3.5.25': + '@vue/runtime-dom@3.5.27': dependencies: - '@vue/reactivity': 3.5.25 - '@vue/runtime-core': 3.5.25 - '@vue/shared': 3.5.25 - csstype: 3.1.3 + '@vue/reactivity': 3.5.27 + '@vue/runtime-core': 3.5.27 + '@vue/shared': 3.5.27 + csstype: 3.2.3 - '@vue/server-renderer@3.5.25(vue@3.5.25(typescript@5.8.3))': + '@vue/server-renderer@3.5.27(vue@3.5.27(typescript@5.8.3))': dependencies: - '@vue/compiler-ssr': 3.5.25 - '@vue/shared': 3.5.25 - vue: 3.5.25(typescript@5.8.3) + '@vue/compiler-ssr': 3.5.27 + '@vue/shared': 3.5.27 + vue: 3.5.27(typescript@5.8.3) - '@vue/shared@3.5.25': {} + '@vue/shared@3.5.27': {} - '@vueuse/core@14.1.0(vue@3.5.25(typescript@5.8.3))': + '@vueuse/core@14.1.0(vue@3.5.27(typescript@5.8.3))': dependencies: '@types/web-bluetooth': 0.0.21 '@vueuse/metadata': 14.1.0 - '@vueuse/shared': 14.1.0(vue@3.5.25(typescript@5.8.3)) - vue: 3.5.25(typescript@5.8.3) + '@vueuse/shared': 14.1.0(vue@3.5.27(typescript@5.8.3)) + vue: 3.5.27(typescript@5.8.3) - '@vueuse/integrations@14.1.0(focus-trap@7.6.6)(vue@3.5.25(typescript@5.8.3))': + '@vueuse/integrations@14.1.0(focus-trap@7.8.0)(vue@3.5.27(typescript@5.8.3))': dependencies: - '@vueuse/core': 14.1.0(vue@3.5.25(typescript@5.8.3)) - '@vueuse/shared': 14.1.0(vue@3.5.25(typescript@5.8.3)) - vue: 3.5.25(typescript@5.8.3) + '@vueuse/core': 14.1.0(vue@3.5.27(typescript@5.8.3)) + '@vueuse/shared': 14.1.0(vue@3.5.27(typescript@5.8.3)) + vue: 3.5.27(typescript@5.8.3) optionalDependencies: - focus-trap: 7.6.6 + focus-trap: 7.8.0 '@vueuse/metadata@14.1.0': {} - '@vueuse/shared@14.1.0(vue@3.5.25(typescript@5.8.3))': + '@vueuse/shared@14.1.0(vue@3.5.27(typescript@5.8.3))': dependencies: - vue: 3.5.25(typescript@5.8.3) + vue: 3.5.27(typescript@5.8.3) '@xhmikosr/archive-type@7.0.0': dependencies: @@ -3908,7 +4246,7 @@ snapshots: dependencies: is-windows: 1.0.2 - better-sse@0.15.1: {} + better-sse@0.16.1: {} bidi-js@1.0.3: dependencies: @@ -4033,18 +4371,21 @@ snapshots: mdn-data: 2.12.2 source-map-js: 1.2.1 - cssstyle@5.3.3: + cssstyle@5.3.7: dependencies: - '@asamuzakjp/css-color': 4.1.0 - '@csstools/css-syntax-patches-for-csstree': 1.0.19 + '@asamuzakjp/css-color': 4.1.1 + '@csstools/css-syntax-patches-for-csstree': 1.0.26 css-tree: 3.1.0 + lru-cache: 11.2.5 - csstype@3.1.3: {} + csstype@3.2.3: {} - data-urls@6.0.0: + data-urls@7.0.0: dependencies: - whatwg-mimetype: 4.0.0 - whatwg-url: 15.1.0 + whatwg-mimetype: 5.0.0 + whatwg-url: 16.0.0 + transitivePeerDependencies: + - '@noble/hashes' debug@4.3.4(supports-color@5.5.0): dependencies: @@ -4085,6 +4426,8 @@ snapshots: entities@6.0.0: {} + entities@7.0.1: {} + error-stack-parser@2.1.4: dependencies: stackframe: 1.3.4 @@ -4119,6 +4462,35 @@ snapshots: '@esbuild/win32-ia32': 0.25.4 '@esbuild/win32-x64': 0.25.4 + esbuild@0.27.2: + optionalDependencies: + '@esbuild/aix-ppc64': 0.27.2 + '@esbuild/android-arm': 0.27.2 + '@esbuild/android-arm64': 0.27.2 + '@esbuild/android-x64': 0.27.2 + '@esbuild/darwin-arm64': 0.27.2 + '@esbuild/darwin-x64': 0.27.2 + '@esbuild/freebsd-arm64': 0.27.2 + '@esbuild/freebsd-x64': 0.27.2 + '@esbuild/linux-arm': 0.27.2 + '@esbuild/linux-arm64': 0.27.2 + '@esbuild/linux-ia32': 0.27.2 + '@esbuild/linux-loong64': 0.27.2 + '@esbuild/linux-mips64el': 0.27.2 + '@esbuild/linux-ppc64': 0.27.2 + '@esbuild/linux-riscv64': 0.27.2 + '@esbuild/linux-s390x': 0.27.2 + '@esbuild/linux-x64': 0.27.2 + '@esbuild/netbsd-arm64': 0.27.2 + '@esbuild/netbsd-x64': 0.27.2 + '@esbuild/openbsd-arm64': 0.27.2 + '@esbuild/openbsd-x64': 0.27.2 + '@esbuild/openharmony-arm64': 0.27.2 + '@esbuild/sunos-x64': 0.27.2 + '@esbuild/win32-arm64': 0.27.2 + '@esbuild/win32-ia32': 0.27.2 + '@esbuild/win32-x64': 0.27.2 + escape-string-regexp@2.0.0: {} esprima@4.0.1: {} @@ -4212,9 +4584,9 @@ snapshots: dependencies: semver-regex: 4.0.5 - focus-trap@7.6.6: + focus-trap@7.8.0: dependencies: - tabbable: 6.3.0 + tabbable: 6.4.0 form-data-encoder@2.1.4: {} @@ -4230,6 +4602,9 @@ snapshots: jsonfile: 4.0.0 universalify: 0.1.2 + fsevents@2.3.2: + optional: true + fsevents@2.3.3: optional: true @@ -4297,11 +4672,11 @@ snapshots: hookable@5.5.3: {} - htm@3.1.1: {} - - html-encoding-sniffer@4.0.0: + html-encoding-sniffer@6.0.0: dependencies: - whatwg-encoding: 3.1.1 + '@exodus/bytes': 1.11.0 + transitivePeerDependencies: + - '@noble/hashes' html-escaper@2.0.2: {} @@ -4336,10 +4711,6 @@ snapshots: dependencies: safer-buffer: 2.1.2 - iconv-lite@0.6.3: - dependencies: - safer-buffer: 2.1.2 - ieee754@1.2.1: {} ignore-by-default@1.0.1: {} @@ -4418,14 +4789,15 @@ snapshots: argparse: 1.0.10 esprima: 4.0.1 - jsdom@27.2.0: + jsdom@28.0.0: dependencies: - '@acemir/cssom': 0.9.24 - '@asamuzakjp/dom-selector': 6.7.4 - cssstyle: 5.3.3 - data-urls: 6.0.0 + '@acemir/cssom': 0.9.31 + '@asamuzakjp/dom-selector': 6.7.7 + '@exodus/bytes': 1.11.0 + cssstyle: 5.3.7 + data-urls: 7.0.0 decimal.js: 10.6.0 - html-encoding-sniffer: 4.0.0 + html-encoding-sniffer: 6.0.0 http-proxy-agent: 7.0.2 https-proxy-agent: 7.0.6 is-potential-custom-element-name: 1.0.1 @@ -4433,17 +4805,15 @@ snapshots: saxes: 6.0.0 symbol-tree: 3.2.4 tough-cookie: 6.0.0 + undici: 7.20.0 w3c-xmlserializer: 5.0.0 - webidl-conversions: 8.0.0 - whatwg-encoding: 3.1.1 - whatwg-mimetype: 4.0.0 - whatwg-url: 15.1.0 - ws: 8.18.3 + webidl-conversions: 8.0.1 + whatwg-mimetype: 5.0.0 + whatwg-url: 16.0.0 xml-name-validator: 5.0.0 transitivePeerDependencies: - - bufferutil + - '@noble/hashes' - supports-color - - utf-8-validate json-buffer@3.0.1: {} @@ -4471,7 +4841,7 @@ snapshots: lowercase-keys@3.0.0: {} - lru-cache@11.2.2: {} + lru-cache@11.2.5: {} lru-cache@6.0.0: dependencies: @@ -4579,6 +4949,8 @@ snapshots: mri@1.2.0: {} + mrmime@2.0.1: {} + ms@2.1.2: {} nanoid@3.3.11: {} @@ -4687,6 +5059,20 @@ snapshots: optionalDependencies: '@napi-rs/nice': 1.0.1 + pixelmatch@7.1.0: + dependencies: + pngjs: 7.0.0 + + playwright-core@1.58.0: {} + + playwright@1.58.0: + dependencies: + playwright-core: 1.58.0 + optionalDependencies: + fsevents: 2.3.2 + + pngjs@7.0.0: {} + postcss@8.5.6: dependencies: nanoid: 3.3.11 @@ -4836,14 +5222,14 @@ snapshots: shebang-regex@3.0.0: {} - shiki@3.17.0: + shiki@3.22.0: dependencies: - '@shikijs/core': 3.17.0 - '@shikijs/engine-javascript': 3.17.0 - '@shikijs/engine-oniguruma': 3.17.0 - '@shikijs/langs': 3.17.0 - '@shikijs/themes': 3.17.0 - '@shikijs/types': 3.17.0 + '@shikijs/core': 3.22.0 + '@shikijs/engine-javascript': 3.22.0 + '@shikijs/engine-oniguruma': 3.22.0 + '@shikijs/langs': 3.22.0 + '@shikijs/themes': 3.22.0 + '@shikijs/types': 3.22.0 '@shikijs/vscode-textmate': 10.0.2 '@types/hast': 3.0.4 @@ -4865,6 +5251,12 @@ snapshots: diff: 7.0.0 supports-color: 7.2.0 + sirv@3.0.2: + dependencies: + '@polka/url': 1.0.0-next.29 + mrmime: 2.0.1 + totalist: 3.0.1 + slash@3.0.0: {} sort-keys-length@1.0.1: @@ -4944,7 +5336,7 @@ snapshots: symbol-tree@3.2.4: {} - tabbable@6.3.0: {} + tabbable@6.4.0: {} tar-stream@3.1.7: dependencies: @@ -4990,6 +5382,8 @@ snapshots: '@tokenizer/token': 0.3.0 ieee754: 1.2.1 + totalist@3.0.1: {} + touch@3.1.1: {} tough-cookie@6.0.0: @@ -5079,6 +5473,8 @@ snapshots: dependencies: '@fastify/busboy': 2.1.1 + undici@7.20.0: {} + unist-util-is@6.0.0: dependencies: '@types/unist': 3.0.3 @@ -5116,7 +5512,7 @@ snapshots: '@types/unist': 3.0.3 vfile-message: 4.0.3 - vite@7.0.6(@types/node@24.10.7)(tsx@4.19.4)(yaml@2.8.0): + vite@7.0.6(@types/node@24.10.10)(tsx@4.19.4)(yaml@2.8.0): dependencies: esbuild: 0.25.4 fdir: 6.5.0(picomatch@4.0.3) @@ -5125,45 +5521,46 @@ snapshots: rollup: 4.41.1 tinyglobby: 0.2.15 optionalDependencies: - '@types/node': 24.10.7 + '@types/node': 24.10.10 fsevents: 2.3.3 tsx: 4.19.4 yaml: 2.8.0 - vite@7.2.4(@types/node@24.10.7)(tsx@4.19.4)(yaml@2.8.0): + vite@7.3.1(@types/node@24.10.10)(tsx@4.19.4)(yaml@2.8.0): dependencies: - esbuild: 0.25.4 + esbuild: 0.27.2 fdir: 6.5.0(picomatch@4.0.3) picomatch: 4.0.3 postcss: 8.5.6 rollup: 4.53.3 tinyglobby: 0.2.15 optionalDependencies: - '@types/node': 24.10.7 + '@types/node': 24.10.10 fsevents: 2.3.3 tsx: 4.19.4 yaml: 2.8.0 - vitepress@2.0.0-alpha.15(@types/node@24.10.7)(postcss@8.5.6)(tsx@4.19.4)(typescript@5.8.3)(yaml@2.8.0): + vitepress@2.0.0-alpha.16(@types/node@24.10.10)(postcss@8.5.6)(tsx@4.19.4)(typescript@5.8.3)(yaml@2.8.0): dependencies: - '@docsearch/css': 4.3.2 - '@docsearch/js': 4.3.2 - '@iconify-json/simple-icons': 1.2.60 - '@shikijs/core': 3.17.0 - '@shikijs/transformers': 3.17.0 - '@shikijs/types': 3.17.0 + '@docsearch/css': 4.5.3 + '@docsearch/js': 4.5.3 + '@docsearch/sidepanel-js': 4.5.3 + '@iconify-json/simple-icons': 1.2.69 + '@shikijs/core': 3.22.0 + '@shikijs/transformers': 3.22.0 + '@shikijs/types': 3.22.0 '@types/markdown-it': 14.1.2 - '@vitejs/plugin-vue': 6.0.1(vite@7.2.4(@types/node@24.10.7)(tsx@4.19.4)(yaml@2.8.0))(vue@3.5.25(typescript@5.8.3)) + '@vitejs/plugin-vue': 6.0.4(vite@7.3.1(@types/node@24.10.10)(tsx@4.19.4)(yaml@2.8.0))(vue@3.5.27(typescript@5.8.3)) '@vue/devtools-api': 8.0.5 - '@vue/shared': 3.5.25 - '@vueuse/core': 14.1.0(vue@3.5.25(typescript@5.8.3)) - '@vueuse/integrations': 14.1.0(focus-trap@7.6.6)(vue@3.5.25(typescript@5.8.3)) - focus-trap: 7.6.6 + '@vue/shared': 3.5.27 + '@vueuse/core': 14.1.0(vue@3.5.27(typescript@5.8.3)) + '@vueuse/integrations': 14.1.0(focus-trap@7.8.0)(vue@3.5.27(typescript@5.8.3)) + focus-trap: 7.8.0 mark.js: 8.11.1 minisearch: 7.2.0 - shiki: 3.17.0 - vite: 7.2.4(@types/node@24.10.7)(tsx@4.19.4)(yaml@2.8.0) - vue: 3.5.25(typescript@5.8.3) + shiki: 3.22.0 + vite: 7.3.1(@types/node@24.10.10)(tsx@4.19.4)(yaml@2.8.0) + vue: 3.5.27(typescript@5.8.3) optionalDependencies: postcss: 8.5.6 transitivePeerDependencies: @@ -5191,10 +5588,10 @@ snapshots: - universal-cookie - yaml - vitest@4.0.14(@types/node@24.10.7)(jsdom@27.2.0)(tsx@4.19.4)(yaml@2.8.0): + vitest@4.0.14(@types/node@24.10.10)(@vitest/browser-playwright@4.0.14)(jsdom@28.0.0)(tsx@4.19.4)(yaml@2.8.0): dependencies: '@vitest/expect': 4.0.14 - '@vitest/mocker': 4.0.14(vite@7.0.6(@types/node@24.10.7)(tsx@4.19.4)(yaml@2.8.0)) + '@vitest/mocker': 4.0.14(vite@7.0.6(@types/node@24.10.10)(tsx@4.19.4)(yaml@2.8.0)) '@vitest/pretty-format': 4.0.14 '@vitest/runner': 4.0.14 '@vitest/snapshot': 4.0.14 @@ -5211,11 +5608,12 @@ snapshots: tinyexec: 0.3.2 tinyglobby: 0.2.15 tinyrainbow: 3.0.3 - vite: 7.0.6(@types/node@24.10.7)(tsx@4.19.4)(yaml@2.8.0) + vite: 7.0.6(@types/node@24.10.10)(tsx@4.19.4)(yaml@2.8.0) why-is-node-running: 2.3.0 optionalDependencies: - '@types/node': 24.10.7 - jsdom: 27.2.0 + '@types/node': 24.10.10 + '@vitest/browser-playwright': 4.0.14(playwright@1.58.0)(vite@7.0.6(@types/node@24.10.10)(tsx@4.19.4)(yaml@2.8.0))(vitest@4.0.14) + jsdom: 28.0.0 transitivePeerDependencies: - jiti - less @@ -5229,13 +5627,13 @@ snapshots: - tsx - yaml - vue@3.5.25(typescript@5.8.3): + vue@3.5.27(typescript@5.8.3): dependencies: - '@vue/compiler-dom': 3.5.25 - '@vue/compiler-sfc': 3.5.25 - '@vue/runtime-dom': 3.5.25 - '@vue/server-renderer': 3.5.25(vue@3.5.25(typescript@5.8.3)) - '@vue/shared': 3.5.25 + '@vue/compiler-dom': 3.5.27 + '@vue/compiler-sfc': 3.5.27 + '@vue/runtime-dom': 3.5.27 + '@vue/server-renderer': 3.5.27(vue@3.5.27(typescript@5.8.3)) + '@vue/shared': 3.5.27 optionalDependencies: typescript: 5.8.3 @@ -5243,18 +5641,17 @@ snapshots: dependencies: xml-name-validator: 5.0.0 - webidl-conversions@8.0.0: {} - - whatwg-encoding@3.1.1: - dependencies: - iconv-lite: 0.6.3 + webidl-conversions@8.0.1: {} - whatwg-mimetype@4.0.0: {} + whatwg-mimetype@5.0.0: {} - whatwg-url@15.1.0: + whatwg-url@16.0.0: dependencies: + '@exodus/bytes': 1.11.0 tr46: 6.0.0 - webidl-conversions: 8.0.0 + webidl-conversions: 8.0.1 + transitivePeerDependencies: + - '@noble/hashes' which@2.0.2: dependencies: diff --git a/scripts/Dockerfile b/scripts/Dockerfile new file mode 100644 index 0000000..7c44916 --- /dev/null +++ b/scripts/Dockerfile @@ -0,0 +1,57 @@ +# ralph-wiggum sandbox environment +# Node 22 + pnpm + Claude Code +# +# Build: +# docker build -t ralph-wiggum:$(basename $(pwd)) -f scripts/Dockerfile . +# +# Run: +# ./scripts/ralph-wiggum.sh "your prompt" tmp/output.md + +FROM node:22-bookworm-slim + +# ----------------------------------------------------------------------------- +# System dependencies +# ----------------------------------------------------------------------------- + +RUN apt-get update && apt-get install -y --no-install-recommends \ + git \ + curl \ + jq \ + sudo \ + tree \ + && rm -rf /var/lib/apt/lists/* + +# ----------------------------------------------------------------------------- +# pnpm +# ----------------------------------------------------------------------------- + +RUN corepack enable && corepack prepare pnpm@latest --activate + +# ----------------------------------------------------------------------------- +# Claude Code +# ----------------------------------------------------------------------------- + +RUN npm install -g @anthropic-ai/claude-code + +# ----------------------------------------------------------------------------- +# Non-root user (required for --dangerously-skip-permissions) +# Created at build time to match host user's UID/GID +# ----------------------------------------------------------------------------- + +ARG HOST_UID=1000 +ARG HOST_GID=1000 + +RUN groupadd -g ${HOST_GID} ralph 2>/dev/null || true && \ + useradd -m -u ${HOST_UID} -g ${HOST_GID} -s /bin/bash ralph 2>/dev/null || \ + useradd -m -u ${HOST_UID} -o -g ${HOST_GID} -s /bin/bash ralph && \ + echo "ralph ALL=(ALL) NOPASSWD:ALL" >> /etc/sudoers + +# ----------------------------------------------------------------------------- +# Workspace setup +# ----------------------------------------------------------------------------- + +WORKDIR /workspace + +USER ralph + +CMD ["claude", "--help"] diff --git a/scripts/build-llm-txt.mjs b/scripts/build-llm-txt.mjs index f86b517..a1b75e8 100644 --- a/scripts/build-llm-txt.mjs +++ b/scripts/build-llm-txt.mjs @@ -15,8 +15,10 @@ import 'zx/globals'; const ROOT = path.join(import.meta.dirname, '..'); const LLM_HELPERS_DIR = path.join(ROOT, 'llm-helpers'); -const OUTPUT_DIR = path.join(ROOT, 'docs', 'public', 'llm'); -const OUTPUT_PATH = path.join(ROOT, 'docs', 'public', 'llms.txt'); +const DOCS_DIR = path.join(ROOT, 'docs'); +const OUTPUT_DIR = path.join(DOCS_DIR, 'public', 'llm'); +const OUTPUT_PATH = path.join(DOCS_DIR, 'public', 'llms.txt'); +const DIST_DIR = path.join(DOCS_DIR, '.vitepress/dist'); $.verbose = false; @@ -41,16 +43,17 @@ if (mdFiles.length === 0) { // Copy markdown files to public/llm/ for direct access await fs.ensureDir(OUTPUT_DIR); +await fs.ensureDir(DIST_DIR, 'llm'); for (const file of mdFiles) { - await fs.copy( - path.join(LLM_HELPERS_DIR, file), - path.join(OUTPUT_DIR, file) - ); + const source = path.join(LLM_HELPERS_DIR, file); + const destination = path.join(DIST_DIR, 'llm', file); + + await fs.copy(source, destination); } -log.info(`Copied ${mdFiles.length} files to docs/public/llm/`); +log.info(`Copied ${mdFiles.length} files to docs/.vitepress/dist/llm/`); // Build package links with descriptions const packageDescriptions = { @@ -67,8 +70,9 @@ const packageLinks = mdFiles .map((file) => { const name = file.replace('.md', ''); + const path = `https://logosdx.dev/llm/${file}`; const desc = packageDescriptions[name] || ''; - return `- [${name}](/llm/${file}): ${desc}`; + return `- [${name}](${path}): ${desc}`; }) .join('\n'); diff --git a/scripts/ralph-wiggum.sh b/scripts/ralph-wiggum.sh new file mode 100755 index 0000000..03d7211 --- /dev/null +++ b/scripts/ralph-wiggum.sh @@ -0,0 +1,327 @@ +#!/bin/bash +# ralph-wiggum.sh - Autonomous Claude Code runner with sandboxed Docker execution +# +# Usage: +# ./scripts/ralph-wiggum.sh [state-file] [max-iterations] +# ./scripts/ralph-wiggum.sh --build # Rebuild Docker image +# ./scripts/ralph-wiggum.sh --login # Login to Claude inside container +# ./scripts/ralph-wiggum.sh --shell # Open bash in container (debugging) +# +# Examples: +# ./scripts/ralph-wiggum.sh "Fix all TypeScript errors" tmp/fix-ts.md +# ./scripts/ralph-wiggum.sh prompts/elaborate-task.md tmp/task-state.md 20 +# +# First-time setup: +# 1. ./scripts/ralph-wiggum.sh --build # Build image with your UID +# 2. ./scripts/ralph-wiggum.sh --login # Authenticate Claude inside container +# +# The script will loop until: +# - Claude outputs DONE in the state file +# - OR max iterations is reached + +set -e + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +PROJECT_DIR="$(cd "$SCRIPT_DIR/.." && pwd)" +REPO_NAME="$(basename "$PROJECT_DIR")" +IMAGE_NAME="${RALPH_IMAGE:-ralph-wiggum:$REPO_NAME}" + +# ----------------------------------------------------------------------------- +# Docker Image Management +# ----------------------------------------------------------------------------- + +build_image() { + + echo "Building Docker image: $IMAGE_NAME (UID=$(id -u), GID=$(id -g))" + docker build \ + --build-arg HOST_UID="$(id -u)" \ + --build-arg HOST_GID="$(id -g)" \ + -t "$IMAGE_NAME" \ + -f "$SCRIPT_DIR/Dockerfile" \ + "$PROJECT_DIR" +} + +ensure_image() { + + if ! docker image inspect "$IMAGE_NAME" &>/dev/null; then + echo "Image '$IMAGE_NAME' not found. Building..." + build_image + fi +} + +# Handle --build flag +if [[ "$1" == "--build" ]]; then + build_image + exit 0 +fi + +# Handle --login flag +if [[ "$1" == "--login" ]]; then + ensure_image + echo "Logging into Claude Code inside container..." + docker run -it --rm \ + -v "$HOME/.claude":/home/ralph/.claude \ + "$IMAGE_NAME" \ + claude /login + exit 0 +fi + +# Handle --shell flag (for debugging) +if [[ "$1" == "--shell" ]]; then + ensure_image + echo "Opening shell in container..." + docker run -it --rm \ + -v "$PROJECT_DIR":/workspace \ + -v "$HOME/.claude":/home/ralph/.claude \ + -w /workspace \ + "$IMAGE_NAME" \ + bash + exit 0 +fi + +# ----------------------------------------------------------------------------- +# Configuration +# ----------------------------------------------------------------------------- + +PROMPT_OR_FILE="$1" +STATE_FILE="${2:-tmp/ralph-state.md}" +MAX_ITERATIONS="${3:-20}" +LOG_FILE="${STATE_FILE%.md}.log" +ITERATION=1 + +# ----------------------------------------------------------------------------- +# Logging +# ----------------------------------------------------------------------------- + +log() { + + local level="$1" + shift + local msg="$*" + local timestamp="$(date '+%Y-%m-%d %H:%M:%S')" + echo "[$timestamp] [$level] $msg" | tee -a "$LOG_FILE" +} + +log_info() { log "INFO" "$@"; } +log_error() { log "ERROR" "$@"; } +log_warn() { log "WARN" "$@"; } + +# ----------------------------------------------------------------------------- +# Validation +# ----------------------------------------------------------------------------- + +if [[ -z "$PROMPT_OR_FILE" ]]; then + echo "Usage: $0 [state-file] [max-iterations]" + echo "" + echo "Options:" + echo " --build Rebuild the Docker image" + echo " --login Authenticate Claude inside the container" + echo " --shell Open a bash shell in the container" + echo "" + echo "Examples:" + echo " $0 \"Fix all TypeScript errors\" tmp/fix-ts.md" + echo " $0 prompts/task.md tmp/state.md 50" + exit 1 +fi + +# Ensure output directories exist +mkdir -p "$(dirname "$STATE_FILE")" +mkdir -p "$(dirname "$LOG_FILE")" + +# Initialize log +echo "" > "$LOG_FILE" +log_info "=== Ralph Wiggum Autonomous Runner ===" +log_info "State file: $STATE_FILE" +log_info "Max iterations: $MAX_ITERATIONS" + +# ----------------------------------------------------------------------------- +# Prompt Loading +# ----------------------------------------------------------------------------- + +if [[ -f "$PROMPT_OR_FILE" ]]; then + PROMPT="$(cat "$PROMPT_OR_FILE")" + log_info "Loaded prompt from file: $PROMPT_OR_FILE" + log_info "Prompt length: ${#PROMPT} characters" +else + PROMPT="$PROMPT_OR_FILE" + log_info "Using inline prompt (${#PROMPT} characters)" +fi + +# ----------------------------------------------------------------------------- +# State File Bootstrap +# ----------------------------------------------------------------------------- + +bootstrap_state_file() { + + if [[ ! -f "$STATE_FILE" ]] || [[ ! -s "$STATE_FILE" ]]; then + log_info "Bootstrapping state file: $STATE_FILE" + cat > "$STATE_FILE" << 'EOF' +## Progress + +- [ ] (starting task...) + +## Current Status + +Initializing... + +## Notes + +(none yet) +EOF + else + log_info "Resuming from existing state file" + fi +} + +# ----------------------------------------------------------------------------- +# Prompt File Creation (with state injection + double prompt) +# ----------------------------------------------------------------------------- + +create_prompt_file() { + + local prompt_file="$1" + + cat > "$prompt_file" << EOF +# Current State + +@$STATE_FILE + +--- + +# Task + +$PROMPT + +--- + +# Task (repeated for emphasis) + +$PROMPT + +--- + +# Instructions + +You are in an autonomous loop. After completing significant work: + +1. UPDATE the state file at \`$STATE_FILE\` with your progress: + - Check off completed items in ## Progress + - Update ## Current Status with what you're working on + - Add any ## Notes about decisions or blockers + +2. When the task is FULLY COMPLETE: + - Ensure all progress items are checked + - Add \`DONE\` at the end of the state file + +3. If you encounter blockers you cannot resolve: + - Document them in ## Notes + - Add \`BLOCKED\` at the end of the state file + +IMPORTANT: Update the state file DURING your work, not just at the end. +The state file is your memory between iterations. +EOF +} + +# ----------------------------------------------------------------------------- +# Completion Detection +# ----------------------------------------------------------------------------- + +check_completion() { + + if [[ ! -f "$STATE_FILE" ]]; then + return 1 + fi + + # Check for completion promise + if grep -q 'DONE' "$STATE_FILE"; then + log_info "Completion promise found: DONE" + return 0 + fi + + # Check for blocked state + if grep -q 'BLOCKED' "$STATE_FILE"; then + log_warn "Task is BLOCKED - stopping loop" + return 0 + fi + + return 1 +} + +# ----------------------------------------------------------------------------- +# Cleanup handler +# ----------------------------------------------------------------------------- + +PROMPT_FILE="" + +cleanup() { + + local exit_code=$? + if [[ -n "$PROMPT_FILE" ]] && [[ -f "$PROMPT_FILE" ]]; then + rm -f "$PROMPT_FILE" + fi + if [[ $exit_code -eq 0 ]]; then + log_info "=== Completed ===" + else + log_error "=== Exited with code: $exit_code ===" + fi +} +trap cleanup EXIT + +# ----------------------------------------------------------------------------- +# Main Loop +# ----------------------------------------------------------------------------- + +ensure_image +bootstrap_state_file + +log_info "Starting autonomous loop..." +log_info "Image: $IMAGE_NAME" +log_info "Mounting workspace: $PROJECT_DIR" +log_info "Mounting Claude config: $HOME/.claude" + +while [[ $ITERATION -le $MAX_ITERATIONS ]]; do + + log_info "--- Iteration $ITERATION of $MAX_ITERATIONS ---" + + # Create prompt file with current state + PROMPT_FILE="$(mktemp)" + create_prompt_file "$PROMPT_FILE" + + # Run Claude Code in Docker + docker run -t --rm \ + -v "$PROJECT_DIR":/workspace \ + -v "$HOME/.claude":/home/ralph/.claude \ + -v "$PROMPT_FILE":/tmp/prompt.txt:ro \ + -w /workspace \ + "$IMAGE_NAME" \ + claude -p "$(cat "$PROMPT_FILE")" \ + --dangerously-skip-permissions \ + --verbose \ + --output-format text \ + --max-turns 50 \ + 2>&1 | tee -a "$LOG_FILE" + + # Cleanup prompt file + rm -f "$PROMPT_FILE" + PROMPT_FILE="" + + # Check for completion + if check_completion; then + log_info "Task completed after $ITERATION iteration(s)" + break + fi + + # Increment iteration + ITERATION=$((ITERATION + 1)) + + if [[ $ITERATION -gt $MAX_ITERATIONS ]]; then + log_warn "Max iterations ($MAX_ITERATIONS) reached without completion" + break + fi + + log_info "Continuing to next iteration..." + sleep 2 # Brief pause between iterations +done + +log_info "Final state saved to: $STATE_FILE" +log_info "Full log available at: $LOG_FILE" diff --git a/tests/package.json b/tests/package.json index c903470..1937ec6 100644 --- a/tests/package.json +++ b/tests/package.json @@ -3,11 +3,13 @@ "version": "0.0.1", "private": true, "scripts": { - "tdd": "NODE_ENV=test vitest --watch", - "test": "NODE_ENV=test vitest run", - "test:coverage": "NODE_ENV=test vitest run --coverage", - "test:only": "NODE_ENV=test vitest run", - "test:ci": "NODE_ENV=test vitest run --reporter=default --reporter=github-actions", + "lint": "pnpm tsc --noEmit --project tsconfig.json", + "tdd": "NODE_ENV=test vitest --watch --project=unit", + "test": "NODE_ENV=test vitest run --project=unit", + "test:coverage": "NODE_ENV=test vitest run --project=unit --coverage", + "test:only": "NODE_ENV=test vitest run --project=unit", + "test:ci": "NODE_ENV=test vitest run --project=unit --reporter=default --reporter=github-actions", + "test:smoke": "NODE_ENV=test vitest run --project=browser", "memory": "tsx --expose-gc src/_memory-tests/index.ts", "memory:ui": "tsx src/_memory-tests/ui/server.ts" }, @@ -26,14 +28,16 @@ "@types/chai": "^5.2.2", "@types/jsdom": "^27", "@types/sinon": "^21", + "@vitest/browser-playwright": "^4.0.14", "@vitest/coverage-v8": "^4", - "better-sse": "^0.15.1", + "better-sse": "^0.16.1", "fast-check": "^4.1.1", "joi": "^18", - "jsdom": "^27", + "jsdom": "^28", "node-test-github-reporter": "^1.3.0", "sinon": "^21", "typescript": "5", - "vitest": "^4" + "vitest": "^4", + "playwright": "^1.58.0" } } diff --git a/tests/src/fetch/_helpers.ts b/tests/src/fetch/_helpers.ts index b698b78..277d616 100644 --- a/tests/src/fetch/_helpers.ts +++ b/tests/src/fetch/_helpers.ts @@ -32,7 +32,7 @@ export interface RegexCallbackArg { } // Augment the FetchEngine module with custom response headers for testing -declare module '../../../packages/fetch/src/engine.ts' { +declare module '../../../packages/fetch/src/engine/index.ts' { namespace FetchEngine { diff --git a/tests/src/fetch/adapter-fs.ts b/tests/src/fetch/adapters/fs.test.ts similarity index 91% rename from tests/src/fetch/adapter-fs.ts rename to tests/src/fetch/adapters/fs.test.ts index 4b94f30..6718565 100644 --- a/tests/src/fetch/adapter-fs.ts +++ b/tests/src/fetch/adapters/fs.test.ts @@ -14,14 +14,14 @@ import { import * as fs from 'node:fs'; import * as path from 'node:path'; -import { FetchEngine } from '../../../packages/fetch/src/index.ts'; +import { FetchEngine } from '../../../../packages/fetch/src/index.ts'; import { CacheAdapter, CacheItem -} from '../../../packages/utils/src/index.ts'; +} from '../../../../packages/utils/src/index.ts'; -import { makeTestStubs } from './_helpers.ts'; +import { makeTestStubs } from '../_helpers.ts'; describe('@logosdx/fetch: file system cache adapter', async () => { @@ -163,9 +163,9 @@ describe('@logosdx/fetch: file system cache adapter', async () => { const missEvents: string[] = []; const cacheKeys: string[] = []; - api.on('fetch-cache-hit', (data) => hitEvents.push(data.path!)); - api.on('fetch-cache-miss', (data) => missEvents.push(data.path!)); - api.on('fetch-cache-set', (data) => cacheKeys.push(data.key)); + api.on('cache-hit', (data) => hitEvents.push(data.path!)); + api.on('cache-miss', (data) => missEvents.push(data.path!)); + api.on('cache-set', (data) => cacheKeys.push(data.key)); // First request - cache miss, stored to file await api.get('/json'); @@ -210,7 +210,7 @@ describe('@logosdx/fetch: file system cache adapter', async () => { // All should be cache hits now const hitEvents: string[] = []; - api.on('fetch-cache-hit', (data) => hitEvents.push(data.path!)); + api.on('cache-hit', (data) => hitEvents.push(data.path!)); await api.get('/json'); await api.get('/json1'); @@ -246,7 +246,7 @@ describe('@logosdx/fetch: file system cache adapter', async () => { // Should be cache miss again const missEvents: string[] = []; - api.on('fetch-cache-miss', (data) => missEvents.push(data.path!)); + api.on('cache-miss', (data) => missEvents.push(data.path!)); await api.get('/json'); @@ -272,8 +272,8 @@ describe('@logosdx/fetch: file system cache adapter', async () => { const staleEvents: string[] = []; const revalidateEvents: string[] = []; - api.on('fetch-cache-stale', (data) => staleEvents.push(data.path!)); - api.on('fetch-cache-revalidate', (data) => revalidateEvents.push(data.path!)); + api.on('cache-stale', (data) => staleEvents.push(data.path!)); + api.on('cache-revalidate', (data) => revalidateEvents.push(data.path!)); // First request - cache miss await api.get('/json'); @@ -308,7 +308,7 @@ describe('@logosdx/fetch: file system cache adapter', async () => { // Capture actual cache keys from events const cacheKeys: string[] = []; - api.on('fetch-cache-set', (data) => cacheKeys.push(data.key)); + api.on('cache-set', (data) => cacheKeys.push(data.key)); await api.get('/json'); await api.get('/json1'); diff --git a/tests/src/fetch/engine/configuration.test.ts b/tests/src/fetch/engine/configuration.test.ts new file mode 100644 index 0000000..ee8544e --- /dev/null +++ b/tests/src/fetch/engine/configuration.test.ts @@ -0,0 +1,178 @@ +import { + describe, + it, + expect +} from 'vitest' + +import { + FetchEngine, +} from '../../../../packages/fetch/src/index.ts'; + +import { attempt, attemptSync } from '../../../../packages/utils/src/index.ts'; +import { makeTestStubs } from '../_helpers.ts'; + + +describe('FetchEngine: configuration validation', async () => { + + const { testUrl } = await makeTestStubs(4131); + + it('should handle both serializers throwing', async () => { + + // Both dedupe and cache serializers throw + const api = new FetchEngine({ + baseUrl: testUrl, + dedupePolicy: { + enabled: true, + serializer: () => { + + throw new Error('Dedupe serializer failed'); + } + }, + cachePolicy: { + enabled: true, + serializer: () => { + + throw new Error('Cache serializer failed'); + } + } + }); + + // First error encountered should be thrown + const [, err] = await attempt(() => api.get('/json')); + + expect(err).to.exist; + expect(err).to.be.instanceOf(Error); + // Either dedupe or cache serializer error + expect(err!.message).to.match(/serializer failed/i); + + api.destroy(); + }); + + it('should handle dedupe enabled with cache disabled and vice versa', async () => { + + // Dedupe enabled, cache explicitly disabled + const api1 = new FetchEngine({ + baseUrl: testUrl, + dedupePolicy: { enabled: true }, + cachePolicy: { enabled: false } + }); + + const path1 = `/test-dedupe-${Date.now()}`; + + const [r1, r2] = await Promise.all([ + api1.get(path1), + api1.get(path1) + ]); + + expect(r1.status).to.equal(200); + expect(r2.status).to.equal(200); + + api1.destroy(); + + // Cache enabled, dedupe explicitly disabled + const api2 = new FetchEngine({ + baseUrl: testUrl, + dedupePolicy: { enabled: false }, + cachePolicy: { enabled: true } + }); + + const path2 = `/test-cache-${Date.now()}`; + + await api2.get(path2); + const r3 = await api2.get(path2); + + expect(r3.status).to.equal(200); + + api2.destroy(); + }); + + it('should handle conflicting method configurations', async () => { + + // GET enabled for dedupe but disabled for cache + const api = new FetchEngine({ + baseUrl: testUrl, + dedupePolicy: { + enabled: true, + methods: ['GET', 'POST'] + }, + cachePolicy: { + enabled: true, + methods: ['POST', 'PUT'] // GET not cached + } + }); + + const dedupeEvents: string[] = []; + const cacheEvents: string[] = []; + + api.on('dedupe-start', () => dedupeEvents.push('dedupe')); + api.on('cache-set', () => cacheEvents.push('cache')); + + const path = `/test-conflict-${Date.now()}`; + + // GET request: should dedupe but not cache + await Promise.all([ + api.get(path), + api.get(path) + ]); + + expect(dedupeEvents.length).to.equal(1); // Deduped + expect(cacheEvents.length).to.equal(0); // Not cached + + api.destroy(); + }); + + it('should handle invalid configuration objects (null)', async () => { + + // Null config objects + const [, err] = attemptSync(() => { + + new FetchEngine({ + baseUrl: testUrl, + dedupePolicy: null as any, + cachePolicy: null as any + }); + }); + + // May fail during construction or treat null as disabled + // Either way, shouldn't crash + expect(err || true).to.exist; + }); + + it('should handle invalid configuration objects (undefined)', async () => { + + // Explicitly undefined config (vs omitted) + const api = new FetchEngine({ + baseUrl: testUrl, + dedupePolicy: undefined, + cachePolicy: undefined + }); + + const path = `/test-undefined-${Date.now()}`; + + // Should work with features disabled + const res = await api.get(path); + + expect(res.status).to.equal(200); + + api.destroy(); + }); + + it('should handle empty config objects', async () => { + + // Empty config objects + const api = new FetchEngine({ + baseUrl: testUrl, + dedupePolicy: {}, + cachePolicy: {} + }); + + const path = `/test-empty-${Date.now()}`; + + // Should work with default behavior + const res = await api.get(path); + + expect(res.status).to.equal(200); + + api.destroy(); + }); +}); diff --git a/tests/src/fetch/base.ts b/tests/src/fetch/engine/core.test.ts similarity index 62% rename from tests/src/fetch/base.ts rename to tests/src/fetch/engine/core.test.ts index 9053b1b..aee0f7b 100644 --- a/tests/src/fetch/base.ts +++ b/tests/src/fetch/engine/core.test.ts @@ -10,12 +10,12 @@ import Hapi from '@hapi/hapi'; import { FetchError, FetchEngine, -} from '../../../packages/fetch/src/index.ts'; +} from '../../../../packages/fetch/src/index.ts'; -import logosFetch from '../../../packages/fetch/src/index.ts'; -import { attempt, attemptSync, wait, noop } from '../../../packages/utils/src/index.ts'; -import { sandbox } from '../_helpers.ts'; -import { EventData, RegexCallbackArg, makeTestStubs } from './_helpers.ts'; +import logosFetch from '../../../../packages/fetch/src/index.ts'; +import { attempt, noop } from '../../../../packages/utils/src/index.ts'; +import { sandbox } from '../../_helpers.ts'; +import { EventData, RegexCallbackArg, makeTestStubs } from '../_helpers.ts'; describe('@logosdx/fetch: base', async () => { @@ -31,19 +31,22 @@ describe('@logosdx/fetch: base', async () => { expect(logosFetch.patch).to.exist; expect(logosFetch.options).to.exist; expect(logosFetch.request).to.exist; - expect(logosFetch.removeHeader).to.exist; - expect(logosFetch.removeParam).to.exist; - expect(logosFetch.addHeader).to.exist; - expect(logosFetch.addParam).to.exist; - expect(logosFetch.setState).to.exist; - expect(logosFetch.resetState).to.exist; - expect(logosFetch.getState).to.exist; - expect(logosFetch.changeBaseUrl).to.exist; - expect(logosFetch.changeModifyOptions).to.exist; - expect(logosFetch.changeModifyMethodOptions).to.exist; + expect(logosFetch.headers).to.exist; + expect(logosFetch.headers.set).to.exist; + expect(logosFetch.headers.remove).to.exist; + expect(logosFetch.headers.has).to.exist; + expect(logosFetch.params).to.exist; + expect(logosFetch.params.set).to.exist; + expect(logosFetch.params.remove).to.exist; + expect(logosFetch.state).to.exist; + expect(logosFetch.state.set).to.exist; + expect(logosFetch.state.get).to.exist; + expect(logosFetch.state.reset).to.exist; + expect(logosFetch.config).to.exist; + expect(logosFetch.config.set).to.exist; + expect(logosFetch.config.get).to.exist; expect(logosFetch.on).to.exist; expect(logosFetch.off).to.exist; - expect(logosFetch.hasHeader).to.exist; await logosFetch.get(server.info.uri + '/json'); expect(callStub.args.length).to.equal(1); @@ -101,18 +104,18 @@ describe('@logosdx/fetch: base', async () => { test(/methodParams items must be objects/i); opts.methodParams.POST = {}; - opts.modifyOptions = 'not a function'; - test(/modifyOptions must be a function/i); + opts.modifyConfig = 'not a function'; + test(/modifyConfig must be a function/i); - opts.modifyOptions = () => {}; - opts.modifyMethodOptions = 'not an object'; - test(/modifyMethodOptions must be an object/i); + opts.modifyConfig = () => {}; + opts.modifyMethodConfig = 'not an object'; + test(/modifyMethodConfig must be an object/i); - opts.modifyMethodOptions = {}; - opts.modifyMethodOptions.POST = 'not a function'; - test(/modifyMethodOptions items must be functions/i); + opts.modifyMethodConfig = {}; + opts.modifyMethodConfig.POST = 'not a function'; + test(/modifyMethodConfig items must be functions/i); - opts.modifyMethodOptions.POST = () => {}; + opts.modifyMethodConfig.POST = () => {}; opts.validate = 'not an object'; test(/validate must be an object/i); @@ -125,13 +128,13 @@ describe('@logosdx/fetch: base', async () => { test(/validate.state must be a function/i); opts.validate.state = () => {}; - opts.timeout = 'not a number'; - test(/timeout must be non-negative integer/i); + opts.totalTimeout = 'not a number'; + test(/totalTimeout must be non-negative integer/i); - opts.timeout = -1; - test(/timeout must be non-negative integer/i); + opts.totalTimeout = -1; + test(/totalTimeout must be non-negative integer/i); - opts.timeout = 1; + opts.totalTimeout = 1; opts.determineType = 'not a function'; test(/determineType must be a function/i); @@ -165,12 +168,12 @@ describe('@logosdx/fetch: base', async () => { page: '3', }, }, - modifyOptions: (opts) => opts, - modifyMethodOptions: { + modifyConfig: (opts) => opts, + modifyMethodConfig: { POST: (opts) => opts, PUT: (opts) => opts, }, - timeout: 1000, + totalTimeout: 1000, validate: { headers: () => true, state: () => true, @@ -180,7 +183,7 @@ describe('@logosdx/fetch: base', async () => { params: true, } }, - determineType: () => 'json', + determineType: () => ({ type: 'json', isJson: true }), }); expect(api.get).to.exist; @@ -188,9 +191,9 @@ describe('@logosdx/fetch: base', async () => { expect(api.patch).to.exist; expect(api.post).to.exist; expect(api.put).to.exist; - expect(api.hasHeader).to.exist; - expect(api.addHeader).to.exist; - expect(api.rmHeader).to.exist; + expect(api.headers.has).to.exist; + expect(api.headers.set).to.exist; + expect(api.headers.remove).to.exist; }); it('makes http requests', async () => { @@ -236,7 +239,7 @@ describe('@logosdx/fetch: base', async () => { const api = new FetchEngine({ baseUrl: testUrl, defaultType: 'json', - timeout: 5000, + attemptTimeout: 5000, headers: { 'X-Custom': 'test-header' } @@ -271,7 +274,7 @@ describe('@logosdx/fetch: base', async () => { expect(response.config).to.exist; expect(response.config).to.be.an('object'); expect(response.config.baseUrl).to.contain(testUrl); - expect(response.config.timeout).to.eq(5000); + expect(response.config.attemptTimeout).to.eq(5000); expect(response.config.headers).to.exist; // Headers might be structured differently in config expect(response.config.headers).to.be.an('object'); @@ -659,7 +662,7 @@ describe('@logosdx/fetch: base', async () => { const onReq = sandbox.stub(); - api.on('fetch-before', onReq); + api.on('before-request', onReq); const anyReq = (method: 'get' | 'post' | 'put' | 'delete' | 'options' | 'patch') => { @@ -694,13 +697,13 @@ describe('@logosdx/fetch: base', async () => { baseUrl: testUrl }); - api.addHeader({ test: 'true' }); - expect(api.hasHeader('test')).to.equal(true); + api.headers.set({ test: 'true' }); + expect(api.headers.has('test')).to.equal(true); await api.get('/json1'); - api.rmHeader('test'); - expect(api.hasHeader('test')).to.equal(false); + api.headers.remove('test'); + expect(api.headers.has('test')).to.equal(false); await api.get('/json2'); @@ -738,7 +741,7 @@ describe('@logosdx/fetch: base', async () => { const headers = { key: '123' }; - api.addHeader(headers, 'DELETE'); + api.headers.set(headers, 'DELETE'); await api.post('/json'); await api.put('/json'); @@ -867,7 +870,7 @@ describe('@logosdx/fetch: base', async () => { * Remove a param */ - api.rmParams('page'); + api.params.remove('page'); await api.get('/json'); @@ -882,7 +885,7 @@ describe('@logosdx/fetch: base', async () => { * Add a param */ - api.addParam('page', '2'); + api.params.set('page', '2'); await api.get('/json'); const [[req2]] = callStub.args as [[Hapi.Request]]; @@ -905,8 +908,8 @@ describe('@logosdx/fetch: base', async () => { const params = { key: '123' }; - api.addParam('page', '2', 'DELETE'); - api.addParam({ page: '2' }, 'PATCH'); + api.params.set('page', '2', 'DELETE'); + api.params.set({ page: '2' }, 'PATCH'); callStub.resetHistory(); @@ -941,7 +944,7 @@ describe('@logosdx/fetch: base', async () => { * Add a param with a method */ - api.addParam('key', '123', 'POST'); + api.params.set('key', '123', 'POST'); await api.get('/json'); await api.post('/json'); @@ -951,7 +954,7 @@ describe('@logosdx/fetch: base', async () => { expect(req4.query).to.contain({ key: '123' }); callStub.resetHistory(); - api.addParam('key', '456', 'GET'); + api.params.set('key', '456', 'GET'); await api.get('/json'); await api.post('/json'); @@ -966,7 +969,7 @@ describe('@logosdx/fetch: base', async () => { * Remove a param with a method */ - api.rmParams('key', 'POST'); + api.params.remove('key', 'POST'); await api.get('/json'); await api.post('/json'); @@ -977,7 +980,7 @@ describe('@logosdx/fetch: base', async () => { expect(req8.query).not.to.contain({ key: '123' }); callStub.resetHistory(); - api.rmParams('key', 'GET'); + api.params.remove('key', 'GET'); await api.get('/json'); @@ -1067,10 +1070,15 @@ describe('@logosdx/fetch: base', async () => { }, }); - await attempt(() => api.get('/bad-content-type', { onError })); + // Note: The /bad-content-type route returns 204 No Content with custom content-type + // 204 responses correctly return null (no content to parse), so no error occurs + // This test verifies 204 with unknown content-type is handled gracefully + const [result, err] = await attempt(() => api.get('/bad-content-type', { onError })); - const [[dropReq]] = onError.args as [[FetchError]]; - expect(dropReq.status).to.equal(204); + // 204 No Content should succeed with null data, not fail + expect(err).to.be.null; + expect(result?.status).to.equal(204); + expect(result?.data).to.be.null; }); it('can abort requests', async () => { @@ -1144,7 +1152,7 @@ describe('@logosdx/fetch: base', async () => { const api = new FetchEngine({ baseUrl: testUrl, - timeout + totalTimeout: timeout }); const now = () => +(new Date()); @@ -1183,7 +1191,7 @@ describe('@logosdx/fetch: base', async () => { const api = new FetchEngine({ baseUrl: testUrl, - modifyOptions(opts) { + modifyConfig(opts) { opts.headers = { @@ -1208,7 +1216,7 @@ describe('@logosdx/fetch: base', async () => { const api = new FetchEngine({ baseUrl: testUrl, - modifyMethodOptions: { + modifyMethodConfig: { POST: modifyOptions, PUT: modifyOptions, } @@ -1216,7 +1224,7 @@ describe('@logosdx/fetch: base', async () => { const onReq = sandbox.stub(); - api.on('fetch-before', onReq); + api.on('before-request', onReq); const anyReq = (method: 'get' | 'post' | 'put' | 'delete' | 'options' | 'patch') => { @@ -1262,7 +1270,7 @@ describe('@logosdx/fetch: base', async () => { const api = new FetchEngine <{}, {}, TestState>({ baseUrl: testUrl, - modifyOptions(opts, state) { + modifyConfig(opts, state) { opts.headers = { @@ -1276,11 +1284,11 @@ describe('@logosdx/fetch: base', async () => { const val = 'someValue'; - api.setState({ theValue: val }); + api.state.set({ theValue: val }); await api.get('/json'); - api.resetState(); + api.state.reset(); await api.get('/json'); @@ -1309,7 +1317,7 @@ describe('@logosdx/fetch: base', async () => { // Use regex to listen to all fetch events (ObserverEngine pattern) // Regex listeners receive ({ event, data }) as first arg - api.on(/fetch-.*/, listener); + api.on(/.*/, listener); // Helper to get event name from ObserverEngine regex callback // Regex callbacks receive { event, data } as first arg @@ -1353,15 +1361,25 @@ describe('@logosdx/fetch: base', async () => { const [args1, args2, args3] = listener.args as [[RegexCallbackArg], [RegexCallbackArg], [RegexCallbackArg]]; - expect(getEventName(args1)).to.eq('fetch-before'); - expect(getEventName(args2)).to.eq('fetch-after'); - expect(getEventName(args3)).to.eq('fetch-error'); + expect(getEventName(args1)).to.eq('before-request'); + expect(getEventName(args2)).to.eq('after-request'); + expect(getEventName(args3)).to.eq('error'); for (const args of [args1, args2, args3]) { assertRemoteEv('/fail', 'GET', getData(args), getEventName(args)); } + // requestStart present on all request events + expect(getData(args1).requestStart, 'before-request requestStart').to.be.a('number'); + expect(getData(args2).requestStart, 'after-request requestStart').to.be.a('number'); + expect(getData(args3).requestStart, 'error requestStart').to.be.a('number'); + + // requestEnd only on terminal events + expect(getData(args1).requestEnd, 'before-request requestEnd').to.not.exist; + expect(getData(args2).requestEnd, 'after-request requestEnd').to.not.exist; + expect(getData(args3).requestEnd, 'error requestEnd').to.be.a('number'); + /** * Test Abort events */ @@ -1375,14 +1393,19 @@ describe('@logosdx/fetch: base', async () => { const [abortArgs1, abortArgs2] = listener.args as [[RegexCallbackArg], [RegexCallbackArg]]; - expect(getEventName(abortArgs1)).to.eq('fetch-before'); - expect(getEventName(abortArgs2)).to.eq('fetch-abort'); + expect(getEventName(abortArgs1)).to.eq('before-request'); + expect(getEventName(abortArgs2)).to.eq('abort'); for (const args of [abortArgs1, abortArgs2]) { assertRemoteEv('/wait', 'GET', getData(args), getEventName(args)); } + expect(getData(abortArgs1).requestStart, 'before-request requestStart').to.be.a('number'); + expect(getData(abortArgs2).requestStart, 'abort requestStart').to.be.a('number'); + expect(getData(abortArgs1).requestEnd, 'before-request requestEnd').to.not.exist; + expect(getData(abortArgs2).requestEnd, 'abort requestEnd').to.be.a('number'); + /** * Test Successful events */ @@ -1395,9 +1418,9 @@ describe('@logosdx/fetch: base', async () => { const [successArgs1, successArgs2, successArgs3] = listener.args as [[RegexCallbackArg], [RegexCallbackArg], [RegexCallbackArg]]; - expect(getEventName(successArgs1)).to.eq('fetch-before'); - expect(getEventName(successArgs2)).to.eq('fetch-after'); - expect(getEventName(successArgs3)).to.eq('fetch-response'); + expect(getEventName(successArgs1)).to.eq('before-request'); + expect(getEventName(successArgs2)).to.eq('after-request'); + expect(getEventName(successArgs3)).to.eq('response'); for (const args of [successArgs1, successArgs2, successArgs3]) { @@ -1411,47 +1434,58 @@ describe('@logosdx/fetch: base', async () => { expect(getData(successArgs3).data, `fetch-response data`).to.contain({ ok: true }); + // requestStart present on all request events + expect(getData(successArgs1).requestStart, 'before-request requestStart').to.be.a('number'); + expect(getData(successArgs2).requestStart, 'after-request requestStart').to.be.a('number'); + expect(getData(successArgs3).requestStart, 'response requestStart').to.be.a('number'); + + // requestEnd only on terminal events + expect(getData(successArgs1).requestEnd, 'before-request requestEnd').to.not.exist; + expect(getData(successArgs2).requestEnd, 'after-request requestEnd').to.not.exist; + expect(getData(successArgs3).requestEnd, 'response requestEnd').to.be.a('number'); + /** * Test Non-request events */ listener.reset(); - api.resetState(); + api.state.reset(); state.flowers = true; - api.setState(state); - api.addHeader({ wee: 'woo' }); - api.rmHeader(['wee']); - api.changeBaseUrl('http://pope.pepe'); + api.state.set(state); + api.headers.set({ wee: 'woo' }); + api.headers.remove(['wee']); + api.config.set('baseUrl', 'http://pope.pepe'); const [ stateResetArgs, stateSetArgs, headerAddArgs, headerRmArgs, - urlChangeArgs + configChangeArgs ] = listener.args as [RegexCallbackArg][]; - const nonRemoteEvs = [ - stateSetArgs, - headerAddArgs, - headerRmArgs, - urlChangeArgs - ]; + // State events use 'current' not 'state' + expect((getData(stateResetArgs!) as any).current).to.exist; + expect((getData(stateResetArgs!) as any).current).to.be.empty; - expect(getData(stateResetArgs!).state).to.exist; - expect(getData(stateResetArgs!).state).to.be.empty; + // State-set events use 'current' not 'state' + expect((getData(stateSetArgs!) as any).current).to.exist; + expect((getData(stateSetArgs!) as any).current).to.contain(state); - for (const args of nonRemoteEvs) { + // Header events have 'key' and 'value' + expect((getData(headerAddArgs!) as any).key || (getData(headerAddArgs!) as any).value).to.exist; + expect((getData(headerRmArgs!) as any).key).to.exist; - assertNonRemoteEv(getData(args!), getEventName(args!)); - } + // config-change event has 'path' and 'value' + expect((getData(configChangeArgs!) as any).path).to.eq('baseUrl'); + expect((getData(configChangeArgs!) as any).value).to.eq('http://pope.pepe'); - expect(getEventName(stateResetArgs!)).to.eq('fetch-state-reset'); - expect(getEventName(stateSetArgs!)).to.eq('fetch-state-set'); - expect(getEventName(headerAddArgs!)).to.eq('fetch-header-add'); - expect(getEventName(headerRmArgs!)).to.eq('fetch-header-remove'); - expect(getEventName(urlChangeArgs!)).to.eq('fetch-url-change'); + expect(getEventName(stateResetArgs!)).to.eq('state-reset'); + expect(getEventName(stateSetArgs!)).to.eq('state-set'); + expect(getEventName(headerAddArgs!)).to.eq('header-add'); + expect(getEventName(headerRmArgs!)).to.eq('header-remove'); + expect(getEventName(configChangeArgs!)).to.eq('config-change'); /** * Test cleanup function (replaces off) @@ -1464,10 +1498,10 @@ describe('@logosdx/fetch: base', async () => { const listener2 = sandbox.stub(); // on() returns a cleanup function - const cleanup = api2.on(/fetch-.*/, listener2); + const cleanup = api2.on(/.*/, listener2); // Trigger an event - api2.resetState(); + api2.state.reset(); expect(listener2.called, 'listener called before cleanup').to.be.true; // Call cleanup to remove listener @@ -1475,10 +1509,10 @@ describe('@logosdx/fetch: base', async () => { cleanup(); // These should NOT trigger the listener - api2.setState({ flowers: true }); - api2.addHeader({ wee: 'woo' }); - api2.rmHeader(['wee']); - api2.changeBaseUrl(testUrl); + api2.state.set({ flowers: true }); + api2.headers.set({ wee: 'woo' }); + api2.headers.remove(['wee']); + api2.config.set('baseUrl', testUrl); await api2.post('/json', payload); @@ -1561,13 +1595,13 @@ describe('@logosdx/fetch: base', async () => { params: true, } }, - modifyOptions(opts, state) { + modifyConfig(opts: any, state: any) { opts.headers!['x-test'] = state.theValue; return opts; }, - modifyMethodOptions: { + modifyMethodConfig: { POST(opts, state) { opts.headers!['x-test'] = state.theValue; @@ -1577,17 +1611,17 @@ describe('@logosdx/fetch: base', async () => { } }); - api.addHeader({ hmac: 'ghi789', poop: 'asd', }); - api.addHeader({ 'x-test': 'test' }); - api.addHeader('x-toast', 'true'); + api.headers.set({ hmac: 'ghi789', poop: 'asd', }); + api.headers.set({ 'x-test': 'test' }); + api.headers.set('x-toast', 'true'); - api.addParam('page', '4'); - api.addParam({ page: '5' }); + api.params.set('page', '4'); + api.params.set({ page: '5' }); - api.rmHeader('x-test'); - api.rmHeader(['x-toast']) - api.rmParams('page'); - api.rmParams(['page']); + api.headers.remove('x-test'); + api.headers.remove(['x-toast']) + api.params.remove('page'); + api.params.remove(['page']); type TestPayload = { test: string; @@ -1638,16 +1672,16 @@ describe('@logosdx/fetch: base', async () => { }); const succeed = [ - () => api.addHeader({ test: 'true' }), - () => api.addHeader('test', 'true'), + () => api.headers.set({ test: 'true' }), + () => api.headers.set('test', 'true'), ]; const fail = [ - () => api.addHeader({ poop: 'asd' }), - () => api.addHeader('poop', 'asd'), - () => api.addHeader('test', 'false'), - () => api.addHeader('content-type', 'application/xml'), + () => api.headers.set({ poop: 'asd' }), + () => api.headers.set('poop', 'asd'), + () => api.headers.set('test', 'false'), + () => api.headers.set('content-type', 'application/xml'), ] succeed.forEach( @@ -1745,16 +1779,16 @@ describe('@logosdx/fetch: base', async () => { }); const succeed = [ - () => api.addParam({ test: 'true' }), - () => api.addParam('test', 'true'), + () => api.params.set({ test: 'true' }), + () => api.params.set('test', 'true'), ]; const fail = [ - () => api.addParam({ poop: 'asd' }), - () => api.addParam('poop', 'asd'), - () => api.addParam('test', 'false'), - () => api.addParam('page', '2'), + () => api.params.set({ poop: 'asd' }), + () => api.params.set('poop', 'asd'), + () => api.params.set('test', 'false'), + () => api.params.set('page', '2'), ]; succeed.forEach( @@ -1854,8 +1888,8 @@ describe('@logosdx/fetch: base', async () => { } }); - api.setState({ theValue: 'someValue' }); - api.resetState(); + api.state.set({ theValue: 'someValue' }); + api.state.reset(); expect(fn.calledTwice).to.be.true; }); @@ -1926,16 +1960,17 @@ describe('@logosdx/fetch: base', async () => { return opts; }; - const onModifyOptionsChange = sandbox.stub(); + const onConfigChange = sandbox.stub(); - api.on('fetch-modify-options-change', onModifyOptionsChange); + api.on('config-change', onConfigChange); // Set modifyOptions function - api.changeModifyOptions(modifyOptions); + api.config.set('modifyConfig', modifyOptions); - expect(onModifyOptionsChange.calledOnce).to.be.true; + expect(onConfigChange.calledOnce).to.be.true; // Non-regex listener: first arg is EventData directly - expect((onModifyOptionsChange.firstCall.args[0] as EventData).data).to.equal(modifyOptions); + expect((onConfigChange.firstCall.args[0] as any).path).to.equal('modifyConfig'); + expect((onConfigChange.firstCall.args[0] as any).value).to.equal(modifyOptions); // Make a request to verify the modifier is applied await api.get('/json'); @@ -1943,10 +1978,10 @@ describe('@logosdx/fetch: base', async () => { expect(modifyCallCount).to.equal(1); // Clear modifyOptions function - api.changeModifyOptions(undefined); + api.config.set('modifyConfig', undefined); - expect(onModifyOptionsChange.calledTwice).to.be.true; - expect((onModifyOptionsChange.secondCall.args[0] as EventData).data).to.be.undefined; + expect(onConfigChange.calledTwice).to.be.true; + expect((onConfigChange.secondCall.args[0] as any).value).to.be.undefined; // Make another request to verify the modifier is no longer applied await api.get('/json'); @@ -1975,26 +2010,24 @@ describe('@logosdx/fetch: base', async () => { return opts; }; - const onModifyMethodOptionsChange = sandbox.stub(); + const onConfigChange = sandbox.stub(); - api.on('fetch-modify-method-options-change', onModifyMethodOptionsChange); + api.on('config-change', onConfigChange); // Set POST modifyOptions function - api.changeModifyMethodOptions('POST', postModifyOptions); + api.config.set('modifyMethodConfig.POST' as any, postModifyOptions); - expect(onModifyMethodOptionsChange.calledOnce).to.be.true; - // Non-regex listener: first arg is EventData directly, data property contains { method, fn } - const firstCallData = (onModifyMethodOptionsChange.firstCall.args[0] as EventData).data as { method: string; fn: unknown }; - expect(firstCallData.method).to.equal('POST'); - expect(firstCallData.fn).to.equal(postModifyOptions); + expect(onConfigChange.calledOnce).to.be.true; + // Non-regex listener: first arg is EventData directly, path and value properties + expect((onConfigChange.firstCall.args[0] as any).path).to.equal('modifyMethodConfig.POST'); + expect((onConfigChange.firstCall.args[0] as any).value).to.equal(postModifyOptions); // Set GET modifyOptions function - api.changeModifyMethodOptions('GET', getModifyOptions); + api.config.set('modifyMethodConfig.GET' as any, getModifyOptions); - expect(onModifyMethodOptionsChange.calledTwice).to.be.true; - const secondCallData = (onModifyMethodOptionsChange.secondCall.args[0] as EventData).data as { method: string; fn: unknown }; - expect(secondCallData.method).to.equal('GET'); - expect(secondCallData.fn).to.equal(getModifyOptions); + expect(onConfigChange.calledTwice).to.be.true; + expect((onConfigChange.secondCall.args[0] as any).path).to.equal('modifyMethodConfig.GET'); + expect((onConfigChange.secondCall.args[0] as any).value).to.equal(getModifyOptions); // Make a GET request to verify only GET modifier is applied await api.get('/json'); @@ -2009,12 +2042,11 @@ describe('@logosdx/fetch: base', async () => { expect(postModifyCallCount).to.equal(1); // Clear POST modifyOptions function - api.changeModifyMethodOptions('POST', undefined); + api.config.set('modifyMethodConfig.POST', undefined); - expect(onModifyMethodOptionsChange.callCount).to.equal(3); - const thirdCallData = (onModifyMethodOptionsChange.thirdCall.args[0] as EventData).data as { method: string; fn: unknown }; - expect(thirdCallData.method).to.equal('POST'); - expect(thirdCallData.fn).to.be.undefined; + expect(onConfigChange.callCount).to.equal(3); + expect((onConfigChange.thirdCall.args[0] as any).path).to.equal('modifyMethodConfig.POST'); + expect((onConfigChange.thirdCall.args[0] as any).value).to.be.undefined; // Make another POST request to verify the modifier is no longer applied await api.post('/json', {}); @@ -2079,774 +2111,4 @@ describe('@logosdx/fetch: base', async () => { expect(allHeaders).to.include('x-rate-limit-remaining'); expect(allHeaders).to.include('x-request-id'); }); - - // ======================================================================== - // TIMEOUT BOUNDARIES - // ======================================================================== - - describe('timeout boundaries', () => { - - it('should handle 0ms timeout', async () => { - - // Immediate timeout edge case - use slow endpoint to ensure timeout fires first - const api = new FetchEngine({ - baseUrl: testUrl, - dedupePolicy: true - }); - - // Timeout of 0ms should immediately abort against slow endpoint - const [, err] = await attempt(() => api.get('/slow-success/200', { timeout: 0 })); - - expect(err).to.exist; - expect(err).to.be.instanceOf(FetchError); - - const fetchErr = err as FetchError; - expect(fetchErr.aborted).to.be.true; - - api.destroy(); - await wait(10); // Let microtasks settle - }); - - it('should handle 1ms timeout', async () => { - - // Very short timeout - likely to fail on slow endpoint - const api = new FetchEngine({ - baseUrl: testUrl, - dedupePolicy: true - }); - - // Start a slow request (1000ms endpoint) with 1ms timeout - const [, err] = await attempt(() => api.get('/wait', { timeout: 1 })); - - expect(err).to.exist; - expect(err).to.be.instanceOf(FetchError); - - const fetchErr = err as FetchError; - expect(fetchErr.aborted).to.be.true; - expect(fetchErr.step).to.equal('fetch'); - - api.destroy(); - await wait(10); // Let microtasks settle - }); - - it('should handle negative timeout values', async () => { - - // Negative timeout throws assertion error at request time (validation) - const api = new FetchEngine({ - baseUrl: testUrl, - dedupePolicy: true - }); - - const [, err] = await attempt(() => api.get('/json', { timeout: -1 })); - - expect(err).to.exist; - expect(err).to.be.instanceOf(Error); - expect((err as Error).message).to.include('non-negative'); - - api.destroy(); - }); - - it('should handle Infinity timeout', async () => { - - // Infinity is coerced by Node.js to 1ms (see TimeoutOverflowWarning) - // so the request effectively has a very short timeout - const api = new FetchEngine({ - baseUrl: testUrl, - dedupePolicy: true - }); - - // Use a slow endpoint so the 1ms timeout triggers an abort - const [, err] = await attempt(() => api.get('/wait', { timeout: Infinity })); - - // Infinity causes immediate abort due to Node.js 32-bit overflow - expect(err).to.exist; - expect(err).to.be.instanceOf(FetchError); - expect((err as FetchError).aborted).to.be.true; - - api.destroy(); - await wait(10); // Let microtasks settle - }); - - it('should handle NaN timeout', async () => { - - // NaN passes typeof check but fails >= 0 assertion - const api = new FetchEngine({ - baseUrl: testUrl, - dedupePolicy: true - }); - - const [, err] = await attempt(() => api.get('/json', { timeout: NaN })); - - expect(err).to.exist; - expect(err).to.be.instanceOf(Error); - expect((err as Error).message).to.include('non-negative'); - - api.destroy(); - }); - - it('should handle empty string baseUrl with absolute path', async () => { - - // Edge case: empty baseUrl is NOT valid - FetchEngine requires baseUrl - // This test verifies the validation error is thrown - const [, err] = attemptSync(() => new FetchEngine({ - baseUrl: '', - dedupePolicy: true - })); - - expect(err).to.exist; - expect(err).to.be.instanceOf(Error); - expect((err as Error).message).to.include('baseUrl'); - }); - }); - - // ======================================================================== - // CONFIGURATION VALIDATION - // ======================================================================== - - describe('configuration validation', () => { - - it('should handle both serializers throwing', async () => { - - // Both dedupe and cache serializers throw - const api = new FetchEngine({ - baseUrl: testUrl, - dedupePolicy: { - enabled: true, - serializer: () => { - - throw new Error('Dedupe serializer failed'); - } - }, - cachePolicy: { - enabled: true, - serializer: () => { - - throw new Error('Cache serializer failed'); - } - } - }); - - // First error encountered should be thrown - const [, err] = await attempt(() => api.get('/json')); - - expect(err).to.exist; - expect(err).to.be.instanceOf(Error); - // Either dedupe or cache serializer error - expect(err!.message).to.match(/serializer failed/i); - - api.destroy(); - }); - - it('should handle dedupe enabled with cache disabled and vice versa', async () => { - - // Dedupe enabled, cache explicitly disabled - const api1 = new FetchEngine({ - baseUrl: testUrl, - dedupePolicy: { enabled: true }, - cachePolicy: { enabled: false } - }); - - const path1 = `/test-dedupe-${Date.now()}`; - - const [r1, r2] = await Promise.all([ - api1.get(path1), - api1.get(path1) - ]); - - expect(r1.status).to.equal(200); - expect(r2.status).to.equal(200); - - api1.destroy(); - - // Cache enabled, dedupe explicitly disabled - const api2 = new FetchEngine({ - baseUrl: testUrl, - dedupePolicy: { enabled: false }, - cachePolicy: { enabled: true } - }); - - const path2 = `/test-cache-${Date.now()}`; - - await api2.get(path2); - const r3 = await api2.get(path2); - - expect(r3.status).to.equal(200); - - api2.destroy(); - }); - - it('should handle conflicting method configurations', async () => { - - // GET enabled for dedupe but disabled for cache - const api = new FetchEngine({ - baseUrl: testUrl, - dedupePolicy: { - enabled: true, - methods: ['GET', 'POST'] - }, - cachePolicy: { - enabled: true, - methods: ['POST', 'PUT'] // GET not cached - } - }); - - const dedupeEvents: string[] = []; - const cacheEvents: string[] = []; - - api.on('fetch-dedupe-start', () => dedupeEvents.push('dedupe')); - api.on('fetch-cache-set', () => cacheEvents.push('cache')); - - const path = `/test-conflict-${Date.now()}`; - - // GET request: should dedupe but not cache - await Promise.all([ - api.get(path), - api.get(path) - ]); - - expect(dedupeEvents.length).to.equal(1); // Deduped - expect(cacheEvents.length).to.equal(0); // Not cached - - api.destroy(); - }); - - it('should handle invalid configuration objects (null)', async () => { - - // Null config objects - const [, err] = attemptSync(() => { - - new FetchEngine({ - baseUrl: testUrl, - dedupePolicy: null as any, - cachePolicy: null as any - }); - }); - - // May fail during construction or treat null as disabled - // Either way, shouldn't crash - expect(err || true).to.exist; - }); - - it('should handle invalid configuration objects (undefined)', async () => { - - // Explicitly undefined config (vs omitted) - const api = new FetchEngine({ - baseUrl: testUrl, - dedupePolicy: undefined, - cachePolicy: undefined - }); - - const path = `/test-undefined-${Date.now()}`; - - // Should work with features disabled - const res = await api.get(path); - - expect(res.status).to.equal(200); - - api.destroy(); - }); - - it('should handle empty config objects', async () => { - - // Empty config objects - const api = new FetchEngine({ - baseUrl: testUrl, - dedupePolicy: {}, - cachePolicy: {} - }); - - const path = `/test-empty-${Date.now()}`; - - // Should work with default behavior - const res = await api.get(path); - - expect(res.status).to.equal(200); - - api.destroy(); - }); - }); - - // ======================================================================== - // STATE MANAGEMENT - // ======================================================================== - - describe('state management', () => { - - it('should handle addHeader during in-flight request', async () => { - - const api = new FetchEngine({ - baseUrl: testUrl, - dedupePolicy: { enabled: true } - }); - - const dedupeEvents: string[] = []; - api.on('fetch-dedupe-start', () => dedupeEvents.push('start')); - api.on('fetch-dedupe-join', () => dedupeEvents.push('join')); - - // Start first request (slow endpoint) - const path = `/slow-success/100-${Date.now()}`; - const promise1 = api.get(path); - - await wait(10); - - // Add header mid-flight (shouldn't affect in-flight request key) - api.addHeader('X-Mid-Flight', 'true'); - - // Start second request to SAME path - should dedupe since key is based on path+method - const promise2 = api.get(path); - - const [result1] = await attempt(() => promise1); - const [result2] = await attempt(() => promise2); - - expect(result1?.data).to.have.property('ok', true); - expect(result2?.data).to.have.property('ok', true); - - // Should have deduped (1 start, 1 join) - expect(dedupeEvents).to.include('start'); - - api.destroy(); - await wait(10); // Let microtasks settle before test ends - }); - - it('should handle destroy called twice without crashing', async () => { - - const api = new FetchEngine({ - baseUrl: testUrl, - dedupePolicy: { enabled: true } - }); - - // First destroy should succeed - const [, err1] = attemptSync(() => api.destroy()); - expect(err1).to.be.null; - - // Second destroy should not crash - const [, err2] = attemptSync(() => api.destroy()); - expect(err2).to.be.null; - }); - - it('should handle flaky endpoint with retry disabled', async () => { - - const api = new FetchEngine({ - baseUrl: testUrl, - retry: false - }); - - // First request succeeds (flaky succeeds first time) - const [result1, err1] = await attempt(() => api.get('/flaky')); - expect(err1).to.be.null; - expect(result1?.data).to.have.property('ok', true); - - // Second request fails (flaky fails after first) - const [, err2] = await attempt(() => api.get('/flaky')); - expect(err2).to.be.instanceOf(FetchError); - - api.destroy(); - await wait(10); // Let microtasks settle before test ends - }); - - it('should handle sequential success after initial failure', async () => { - - const api = new FetchEngine({ - baseUrl: testUrl, - retry: false - }); - - // First request fails - const [, err1] = await attempt(() => api.get('/fail-once')); - expect(err1).to.be.instanceOf(FetchError); - - // Second request succeeds - const [result2, err2] = await attempt(() => api.get('/fail-once')); - expect(err2).to.be.null; - expect(result2?.data).to.have.property('ok', true); - - // Third request also succeeds - const [result3, err3] = await attempt(() => api.get('/fail-once')); - expect(err3).to.be.null; - expect(result3?.data).to.have.property('ok', true); - - api.destroy(); - await wait(10); // Let microtasks settle before test ends - }); - }); - - // ======================================================================== - // FEATURE COMBINATIONS - // ======================================================================== - - describe('feature combinations', () => { - - it('should work with deduplication and timeout', async () => { - - const api = new FetchEngine({ - baseUrl: testUrl, - timeout: 500, - dedupePolicy: { enabled: true, methods: ['GET'] } - }); - - const events: string[] = []; - api.on('fetch-dedupe-start', () => events.push('dedupe-start')); - api.on('fetch-dedupe-join', () => events.push('dedupe-join')); - - const path = '/wait'; - - // Make two concurrent requests that will both timeout - const promise1 = attempt(() => api.get(path)); - const promise2 = attempt(() => api.get(path)); - - const [[_r1, e1], [_r2, e2]] = await Promise.all([promise1, promise2]); - - // Both should timeout since /wait takes 1000ms and timeout is 500ms - expect(e1).to.be.instanceOf(Error); - expect(e2).to.be.instanceOf(Error); - expect(events).to.include('dedupe-start'); - expect(events).to.include('dedupe-join'); - - api.destroy(); - }); - - it('should work with caching and timeout', async () => { - - const api = new FetchEngine({ - baseUrl: testUrl, - timeout: 500, - cachePolicy: { enabled: true, methods: ['GET'], ttl: 5000 } - }); - - const events: string[] = []; - api.on('fetch-cache-miss', () => events.push('cache-miss')); - - // First request succeeds and caches - const path = `/test-cache-timeout-${Date.now()}`; - const [r1] = await attempt(() => api.get(path)); - expect(r1).to.exist; - expect(events).to.include('cache-miss'); - - // Second request hits cache, no timeout issue - events.length = 0; - const [r2] = await attempt(() => api.get(path)); - expect(r2).to.exist; - expect(events).to.not.include('cache-miss'); - - api.destroy(); - }); - }); - - // ======================================================================== - // RESPONSE STRUCTURE VALIDATION - // ======================================================================== - - describe('response structure validation', () => { - - it('should validate response structure consistency across features', async () => { - - // Cross-checks that all features return consistent FetchResponse structure - const configs = [ - { name: 'plain', config: {} }, - { name: 'cache', config: { cachePolicy: true } }, - { name: 'dedupe', config: { dedupePolicy: true } }, - { name: 'both', config: { cachePolicy: true, dedupePolicy: true } } - ]; - - for (const { name, config } of configs) { - - const api = new FetchEngine({ - baseUrl: testUrl, - ...config - }); - - const path = `/test-structure-${name}-${Date.now()}`; - const response = await api.get(path); - - // Validate FetchResponse structure - expect(response.data, `${name}: data should exist`).to.exist; - expect(response.status, `${name}: status should exist`).to.be.a('number'); - expect(response.headers, `${name}: headers should exist`).to.be.an('object'); - expect(response.request, `${name}: request should exist`).to.be.instanceOf(Request); - expect(response.config, `${name}: config should exist`).to.be.an('object'); - - // Validate all keys present - const keys = Object.keys(response).sort(); - expect(keys, `${name}: should have all FetchResponse keys`) - .to.deep.equal(['config', 'data', 'headers', 'request', 'status']); - - api.destroy(); - } - }); - - it('should produce consistent results with retry on vs off (successful request)', async () => { - - // Cross-checks that retry doesn't alter results for successful requests - const apiWithRetry = new FetchEngine({ - baseUrl: testUrl, - retry: { maxAttempts: 3 } - }); - - const apiWithoutRetry = new FetchEngine({ - baseUrl: testUrl, - retry: false - }); - - const path = `/test-retry-${Date.now()}`; - - const r1 = await apiWithRetry.get(path); - const r2 = await apiWithoutRetry.get(path); - - // Results should be identical - expect(r1.data, 'data should be equal').to.deep.equal(r2.data); - expect(r1.status, 'status should be equal').to.equal(r2.status); - - apiWithRetry.destroy(); - apiWithoutRetry.destroy(); - }); - }); - - // ======================================================================== - // PERFORMANCE AND LOAD - // ======================================================================== - - describe('performance and load', () => { - - it('should handle 100+ concurrent requests without issues', async () => { - - const api = new FetchEngine({ - baseUrl: testUrl, - dedupePolicy: { enabled: true } - }); - - // Launch 100 concurrent requests to unique paths - const requests = Array.from({ length: 100 }, (_, i) => - api.get(`/json-${i}-${Date.now()}`) - ); - - const results = await Promise.allSettled(requests); - - const successful = results.filter(r => r.status === 'fulfilled').length; - const failed = results.filter(r => r.status === 'rejected').length; - - // All requests should succeed - expect(successful).to.equal(100); - expect(failed).to.equal(0); - - // Verify no memory leaks - all inflight requests should be cleared - const stats = api.cacheStats(); - expect(stats.inflightCount).to.equal(0); - - api.destroy(); - }); - - it('should handle large request payloads (1MB+)', async () => { - - const api = new FetchEngine({ - baseUrl: testUrl - }); - - // Generate a large payload (1MB of data) - const largePayload = { - data: 'x'.repeat(1024 * 1024), - metadata: { size: '1MB' } - }; - - const path = `/large-payload`; - - // POST with large payload - const [_, err] = await attempt(() => - api.post(path, largePayload) - ); - - // The request should be handled (server may reject, but engine should process) - // We're testing that the engine doesn't crash with large payloads - expect(err).to.satisfy((e: any) => { - - return e === null || e instanceof FetchError; - }); - - api.destroy(); - }); - - it('should handle large response bodies (1MB+)', async () => { - - const api = new FetchEngine({ - baseUrl: testUrl, - cachePolicy: { - enabled: true, - ttl: 5000, - methods: ['POST'] - } - }); - - server - - // Generate a large payload (1MB of data) - const largeData = { data: 'x'.repeat(1024 * 1024) }; - - // Mock server would need to return this, but we can test the handling - // For this test, we verify the engine can process large responses - const path = `/large-payload`; - - const [response, err] = await attempt(() => api.post(path, largeData)); - - expect(err).to.be.null; - expect(response).to.not.be.undefined; - - // Verify cache can handle it - const stats = api.cacheStats(); - expect(stats.cacheSize).to.be.greaterThan(0); - - api.destroy(); - }); - - it('should measure throughput (requests/sec)', async () => { - - const api = new FetchEngine({ - baseUrl: testUrl, - dedupePolicy: { enabled: false }, - cachePolicy: { enabled: false } - }); - - const requestCount = 500; - const start = Date.now(); - - // Launch sequential requests to measure baseline throughput - const requests = Array.from({ length: requestCount }, (_, i) => - api.get(`/json-throughput-${i}-${Date.now()}`) - ); - - await Promise.allSettled(requests); - - const elapsed = Date.now() - start; - const requestsPerSec = (requestCount / elapsed) * 1000; - - // Should handle at least 500 requests/sec (conservative threshold) - expect(requestsPerSec).to.be.greaterThan(500); - - api.destroy(); - }); - - it('should handle destroy during request initiation', async () => { - - const api = new FetchEngine({ - baseUrl: testUrl - }); - - const path = `/wait-success`; - - // Start a long-running request - const requestPromise = api.get(path); - - // Immediately destroy the instance (race condition) - await wait(10); - api.destroy(); - - // Request should either complete or fail gracefully - const [, err] = await attempt(() => requestPromise); - - // System should handle gracefully - expect(err).to.satisfy((e: any) => { - - return e === null || e instanceof Error; - }); - }); - - it('should handle multiple engines hitting same endpoint', async () => { - - // Create two separate FetchEngine instances - const api1 = new FetchEngine({ - baseUrl: testUrl, - dedupePolicy: { enabled: true }, - cachePolicy: { enabled: true, ttl: 5000 } - }); - - const api2 = new FetchEngine({ - baseUrl: testUrl, - dedupePolicy: { enabled: true }, - cachePolicy: { enabled: true, ttl: 5000 } - }); - - const path = `/test-multi-engine-${Date.now()}`; - - // Both engines hit the same endpoint concurrently - const [r1, r2] = await Promise.all([ - api1.get(path), - api2.get(path) - ]); - - // Both should succeed independently - expect(r1.data).to.deep.equal({ ok: true, path }); - expect(r2.data).to.deep.equal({ ok: true, path }); - - // Each engine maintains its own cache - const stats1 = api1.cacheStats(); - const stats2 = api2.cacheStats(); - - expect(stats1.cacheSize).to.be.greaterThan(0); - expect(stats2.cacheSize).to.be.greaterThan(0); - expect(stats1.inflightCount).to.equal(0); - expect(stats2.inflightCount).to.equal(0); - - api1.destroy(); - api2.destroy(); - }); - - it('should handle multiple concurrent request failures', async () => { - - const api = new FetchEngine({ - baseUrl: testUrl, - dedupePolicy: { enabled: false } - }); - - // Launch many concurrent requests to failing endpoint - const requests = Array.from({ length: 500 }, (_, i) => - attempt(() => api.get(`/fail`)) - ); - - const results = await Promise.all(requests); - - // All should fail gracefully - results.forEach(([_, err]) => { - - expect(err).to.not.be.null; - expect(err).to.be.instanceOf(FetchError); - }); - - // No inflight leaks despite mass failures - const stats = api.cacheStats(); - expect(stats.inflightCount).to.equal(0); - - api.destroy(); - }); - - it('should handle mixed success and failure in concurrent batch', async () => { - - const api = new FetchEngine({ - baseUrl: testUrl, - dedupePolicy: { enabled: false } - }); - - // Mix of successful and failing requests - const requests = [ - attempt(() => api.get(`/json`)), - attempt(() => api.get(`/fail`)), - attempt(() => api.get(`/json`)), - attempt(() => api.get(`/fail`)), - attempt(() => api.get(`/json`)) - ]; - - const results = await Promise.all(requests); - - // Count successes and failures - const successes = results.filter(([_, e]) => e === null).length; - const failures = results.filter(([_, e]) => e !== null).length; - - expect(successes).to.equal(3); - expect(failures).to.equal(2); - - // No inflight leaks - const stats = api.cacheStats(); - expect(stats.inflightCount).to.equal(0); - - api.destroy(); - }); - }); - }); diff --git a/tests/src/fetch/engine/integration.test.ts b/tests/src/fetch/engine/integration.test.ts new file mode 100644 index 0000000..a2f0fb6 --- /dev/null +++ b/tests/src/fetch/engine/integration.test.ts @@ -0,0 +1,73 @@ +import { + describe, + it, + expect +} from 'vitest' + +import { + FetchEngine, +} from '../../../../packages/fetch/src/index.ts'; + +import { attempt } from '../../../../packages/utils/src/index.ts'; +import { makeTestStubs } from '../_helpers.ts'; + + +describe('FetchEngine: feature combinations', async () => { + + const { testUrl } = await makeTestStubs(4133); + + it('should work with deduplication and timeout', async () => { + + const api = new FetchEngine({ + baseUrl: testUrl, + totalTimeout: 500, + dedupePolicy: { enabled: true, methods: ['GET'] } + }); + + const events: string[] = []; + api.on('dedupe-start', () => events.push('dedupe-start')); + api.on('dedupe-join', () => events.push('dedupe-join')); + + const path = '/wait'; + + // Make two concurrent requests that will both timeout + const promise1 = attempt(() => api.get(path)); + const promise2 = attempt(() => api.get(path)); + + const [[_r1, e1], [_r2, e2]] = await Promise.all([promise1, promise2]); + + // Both should timeout since /wait takes 1000ms and timeout is 500ms + expect(e1).to.be.instanceOf(Error); + expect(e2).to.be.instanceOf(Error); + expect(events).to.include('dedupe-start'); + expect(events).to.include('dedupe-join'); + + api.destroy(); + }); + + it('should work with caching and timeout', async () => { + + const api = new FetchEngine({ + baseUrl: testUrl, + totalTimeout: 500, + cachePolicy: { enabled: true, methods: ['GET'], ttl: 5000 } + }); + + const events: string[] = []; + api.on('cache-miss', () => events.push('cache-miss')); + + // First request succeeds and caches + const path = `/test-cache-timeout-${Date.now()}`; + const [r1] = await attempt(() => api.get(path)); + expect(r1).to.exist; + expect(events).to.include('cache-miss'); + + // Second request hits cache, no timeout issue + events.length = 0; + const [r2] = await attempt(() => api.get(path)); + expect(r2).to.exist; + expect(events).to.not.include('cache-miss'); + + api.destroy(); + }); +}); diff --git a/tests/src/fetch/engine/lifecycle.test.ts b/tests/src/fetch/engine/lifecycle.test.ts new file mode 100644 index 0000000..23d314a --- /dev/null +++ b/tests/src/fetch/engine/lifecycle.test.ts @@ -0,0 +1,116 @@ +import { + describe, + it, + expect +} from 'vitest' + +import { + FetchError, + FetchEngine, +} from '../../../../packages/fetch/src/index.ts'; + +import { attempt, attemptSync, wait } from '../../../../packages/utils/src/index.ts'; +import { makeTestStubs } from '../_helpers.ts'; + + +describe('FetchEngine: lifecycle and state management', async () => { + + const { testUrl } = await makeTestStubs(4132); + + it('should handle addHeader during in-flight request', async () => { + + const api = new FetchEngine({ + baseUrl: testUrl, + dedupePolicy: { enabled: true } + }); + + const dedupeEvents: string[] = []; + api.on('dedupe-start', () => dedupeEvents.push('start')); + api.on('dedupe-join', () => dedupeEvents.push('join')); + + // Start first request (slow endpoint) + const path = `/slow-success/100-${Date.now()}`; + const promise1 = api.get(path); + + await wait(10); + + // Add header mid-flight (shouldn't affect in-flight request key) + api.headers.set('X-Mid-Flight', 'true'); + + // Start second request to SAME path - should dedupe since key is based on path+method + const promise2 = api.get(path); + + const [result1] = await attempt(() => promise1); + const [result2] = await attempt(() => promise2); + + expect(result1?.data).to.have.property('ok', true); + expect(result2?.data).to.have.property('ok', true); + + // Should have deduped (1 start, 1 join) + expect(dedupeEvents).to.include('start'); + + api.destroy(); + await wait(10); // Let microtasks settle before test ends + }); + + it('should handle destroy called twice without crashing', async () => { + + const api = new FetchEngine({ + baseUrl: testUrl, + dedupePolicy: { enabled: true } + }); + + // First destroy should succeed + const [, err1] = attemptSync(() => api.destroy()); + expect(err1).to.be.null; + + // Second destroy should not crash + const [, err2] = attemptSync(() => api.destroy()); + expect(err2).to.be.null; + }); + + it('should handle flaky endpoint with retry disabled', async () => { + + const api = new FetchEngine({ + baseUrl: testUrl, + retry: false + }); + + // First request succeeds (flaky succeeds first time) + const [result1, err1] = await attempt(() => api.get('/flaky')); + expect(err1).to.be.null; + expect(result1?.data).to.have.property('ok', true); + + // Second request fails (flaky fails after first) + const [, err2] = await attempt(() => api.get('/flaky')); + expect(err2).to.be.instanceOf(FetchError); + + api.destroy(); + await wait(10); // Let microtasks settle before test ends + }); + + it('should handle sequential success after initial failure', async () => { + + const api = new FetchEngine({ + baseUrl: testUrl, + retry: false + }); + + // First request fails + const [, err1] = await attempt(() => api.get('/fail-once')); + expect(err1).to.be.instanceOf(FetchError); + + // Second request succeeds + const [result2, err2] = await attempt(() => api.get('/fail-once')); + expect(err2).to.be.null; + expect(result2?.data).to.have.property('ok', true); + + // Third request also succeeds + const [result3, err3] = await attempt(() => api.get('/fail-once')); + expect(err3).to.be.null; + expect(result3?.data).to.have.property('ok', true); + + api.destroy(); + await wait(10); // Let microtasks settle before test ends + }); +}); diff --git a/tests/src/fetch/engine/performance.test.ts b/tests/src/fetch/engine/performance.test.ts new file mode 100644 index 0000000..5733da9 --- /dev/null +++ b/tests/src/fetch/engine/performance.test.ts @@ -0,0 +1,258 @@ +import { + describe, + it, + expect +} from 'vitest' + +import { + FetchError, + FetchEngine, +} from '../../../../packages/fetch/src/index.ts'; + +import { attempt, wait } from '../../../../packages/utils/src/index.ts'; +import { makeTestStubs } from '../_helpers.ts'; + + +describe('FetchEngine: performance and load', async () => { + + const { testUrl, server: _server } = await makeTestStubs(4135); + + it('should handle 100+ concurrent requests without issues', async () => { + + const api = new FetchEngine({ + baseUrl: testUrl, + dedupePolicy: { enabled: true } + }); + + // Launch 100 concurrent requests to unique paths + const requests = Array.from({ length: 100 }, (_, i) => + api.get(`/json-${i}-${Date.now()}`) + ); + + const results = await Promise.allSettled(requests); + + const successful = results.filter(r => r.status === 'fulfilled').length; + const failed = results.filter(r => r.status === 'rejected').length; + + // All requests should succeed + expect(successful).to.equal(100); + expect(failed).to.equal(0); + + // Verify no memory leaks - all inflight requests should be cleared + const stats = api.cacheStats(); + expect(stats.inflightCount).to.equal(0); + + api.destroy(); + }); + + it('should handle large request payloads (1MB+)', async () => { + + const api = new FetchEngine({ + baseUrl: testUrl + }); + + // Generate a large payload (1MB of data) + const largePayload = { + data: 'x'.repeat(1024 * 1024), + metadata: { size: '1MB' } + }; + + const path = `/large-payload`; + + // POST with large payload + const [_, err] = await attempt(() => + api.post(path, largePayload) + ); + + // The request should be handled (server may reject, but engine should process) + // We're testing that the engine doesn't crash with large payloads + expect(err).to.satisfy((e: any) => { + + return e === null || e instanceof FetchError; + }); + + api.destroy(); + }); + + it('should handle large response bodies (1MB+)', async () => { + + const api = new FetchEngine({ + baseUrl: testUrl, + cachePolicy: { + enabled: true, + ttl: 5000, + methods: ['POST'] + } + }); + + // Generate a large payload (1MB of data) + const largeData = { data: 'x'.repeat(1024 * 1024) }; + + // Mock server would need to return this, but we can test the handling + // For this test, we verify the engine can process large responses + const path = `/large-payload`; + + const [response, err] = await attempt(() => api.post(path, largeData)); + + expect(err).to.be.null; + expect(response).to.not.be.undefined; + + // Verify cache can handle it + const stats = api.cacheStats(); + expect(stats.cacheSize).to.be.greaterThan(0); + + api.destroy(); + }); + + it('should measure throughput (requests/sec)', async () => { + + const api = new FetchEngine({ + baseUrl: testUrl, + dedupePolicy: { enabled: false }, + cachePolicy: { enabled: false } + }); + + const requestCount = 500; + const start = Date.now(); + + // Launch sequential requests to measure baseline throughput + const requests = Array.from({ length: requestCount }, (_, i) => + api.get(`/json-throughput-${i}-${Date.now()}`) + ); + + await Promise.allSettled(requests); + + const elapsed = Date.now() - start; + const requestsPerSec = (requestCount / elapsed) * 1000; + + // Should handle at least 500 requests/sec (conservative threshold) + expect(requestsPerSec).to.be.greaterThan(500); + + api.destroy(); + }); + + it('should handle destroy during request initiation', async () => { + + const api = new FetchEngine({ + baseUrl: testUrl + }); + + const path = `/wait-success`; + + // Start a long-running request + const requestPromise = api.get(path); + + // Immediately destroy the instance (race condition) + await wait(10); + api.destroy(); + + // Request should either complete or fail gracefully + const [, err] = await attempt(() => requestPromise); + + // System should handle gracefully + expect(err).to.satisfy((e: any) => { + + return e === null || e instanceof Error; + }); + }); + + it('should handle multiple engines hitting same endpoint', async () => { + + // Create two separate FetchEngine instances + const api1 = new FetchEngine({ + baseUrl: testUrl, + dedupePolicy: { enabled: true }, + cachePolicy: { enabled: true, ttl: 5000 } + }); + + const api2 = new FetchEngine({ + baseUrl: testUrl, + dedupePolicy: { enabled: true }, + cachePolicy: { enabled: true, ttl: 5000 } + }); + + const path = `/test-multi-engine-${Date.now()}`; + + // Both engines hit the same endpoint concurrently + const [r1, r2] = await Promise.all([ + api1.get(path), + api2.get(path) + ]); + + // Both should succeed independently + expect(r1.data).to.deep.equal({ ok: true, path }); + expect(r2.data).to.deep.equal({ ok: true, path }); + + // Each engine maintains its own cache + const stats1 = api1.cacheStats(); + const stats2 = api2.cacheStats(); + + expect(stats1.cacheSize).to.be.greaterThan(0); + expect(stats2.cacheSize).to.be.greaterThan(0); + expect(stats1.inflightCount).to.equal(0); + expect(stats2.inflightCount).to.equal(0); + + api1.destroy(); + api2.destroy(); + }); + + it('should handle multiple concurrent request failures', async () => { + + const api = new FetchEngine({ + baseUrl: testUrl, + dedupePolicy: { enabled: false } + }); + + // Launch many concurrent requests to failing endpoint + const requests = Array.from({ length: 500 }, () => + attempt(() => api.get(`/fail`)) + ); + + const results = await Promise.all(requests); + + // All should fail gracefully + results.forEach(([_, err]) => { + + expect(err).to.not.be.null; + expect(err).to.be.instanceOf(FetchError); + }); + + // No inflight leaks despite mass failures + const stats = api.cacheStats(); + expect(stats.inflightCount).to.equal(0); + + api.destroy(); + }); + + it('should handle mixed success and failure in concurrent batch', async () => { + + const api = new FetchEngine({ + baseUrl: testUrl, + dedupePolicy: { enabled: false } + }); + + // Mix of successful and failing requests + const requests = [ + attempt(() => api.get(`/json`)), + attempt(() => api.get(`/fail`)), + attempt(() => api.get(`/json`)), + attempt(() => api.get(`/fail`)), + attempt(() => api.get(`/json`)) + ]; + + const results = await Promise.all(requests); + + // Count successes and failures + const successes = results.filter(([_, e]) => e === null).length; + const failures = results.filter(([_, e]) => e !== null).length; + + expect(successes).to.equal(3); + expect(failures).to.equal(2); + + // No inflight leaks + const stats = api.cacheStats(); + expect(stats.inflightCount).to.equal(0); + + api.destroy(); + }); +}); diff --git a/tests/src/fetch/engine/request-init.test.ts b/tests/src/fetch/engine/request-init.test.ts new file mode 100644 index 0000000..e213213 --- /dev/null +++ b/tests/src/fetch/engine/request-init.test.ts @@ -0,0 +1,377 @@ +import { + describe, + it, + expect, + vi, + beforeEach, + afterEach, +} from 'vitest'; + +import { FetchEngine } from '../../../../packages/fetch/src/index.ts'; +import { makeTestStubs } from '../_helpers.ts'; + + +describe('FetchEngine: RequestInit options', async () => { + + const { testUrl } = await makeTestStubs(4140); + + let fetchSpy: ReturnType; + + beforeEach(() => { + + fetchSpy = vi.spyOn(globalThis, 'fetch'); + }); + + afterEach(() => { + + fetchSpy.mockRestore(); + }); + + describe('instance-level RequestInit options', () => { + + it('should pass credentials option to fetch', async () => { + + const api = new FetchEngine({ + baseUrl: testUrl, + credentials: 'include', + }); + + await api.get('/json'); + + expect(fetchSpy).toHaveBeenCalledTimes(1); + + const [, init] = fetchSpy.mock.calls[0]; + + expect(init?.credentials).to.equal('include'); + + api.destroy(); + }); + + it('should pass mode option to fetch', async () => { + + const api = new FetchEngine({ + baseUrl: testUrl, + mode: 'cors', + }); + + await api.get('/json'); + + expect(fetchSpy).toHaveBeenCalledTimes(1); + + const [, init] = fetchSpy.mock.calls[0]; + + expect(init?.mode).to.equal('cors'); + + api.destroy(); + }); + + it('should pass cache option to fetch', async () => { + + const api = new FetchEngine({ + baseUrl: testUrl, + cache: 'no-store', + }); + + await api.get('/json'); + + expect(fetchSpy).toHaveBeenCalledTimes(1); + + const [, init] = fetchSpy.mock.calls[0]; + + expect(init?.cache).to.equal('no-store'); + + api.destroy(); + }); + + it('should pass redirect option to fetch', async () => { + + const api = new FetchEngine({ + baseUrl: testUrl, + redirect: 'manual', + }); + + await api.get('/json'); + + expect(fetchSpy).toHaveBeenCalledTimes(1); + + const [, init] = fetchSpy.mock.calls[0]; + + expect(init?.redirect).to.equal('manual'); + + api.destroy(); + }); + + it('should pass referrerPolicy option to fetch', async () => { + + const api = new FetchEngine({ + baseUrl: testUrl, + referrerPolicy: 'no-referrer', + }); + + await api.get('/json'); + + expect(fetchSpy).toHaveBeenCalledTimes(1); + + const [, init] = fetchSpy.mock.calls[0]; + + expect(init?.referrerPolicy).to.equal('no-referrer'); + + api.destroy(); + }); + + it('should pass integrity option to fetch', async () => { + + // Mock fetch to avoid actual integrity validation + const mockFetch = vi.fn().mockResolvedValue( + new Response(JSON.stringify({ ok: true }), { + status: 200, + headers: { 'content-type': 'application/json' } + }) + ); + + fetchSpy.mockImplementation(mockFetch); + + const api = new FetchEngine({ + baseUrl: testUrl, + integrity: 'sha256-abc123', + }); + + await api.get('/json'); + + expect(mockFetch).toHaveBeenCalledTimes(1); + + const [, init] = mockFetch.mock.calls[0]; + + expect(init?.integrity).to.equal('sha256-abc123'); + + api.destroy(); + }); + + it('should pass keepalive option to fetch', async () => { + + const api = new FetchEngine({ + baseUrl: testUrl, + keepalive: true, + }); + + await api.get('/json'); + + expect(fetchSpy).toHaveBeenCalledTimes(1); + + const [, init] = fetchSpy.mock.calls[0]; + + expect(init?.keepalive).to.equal(true); + + api.destroy(); + }); + + it('should pass multiple RequestInit options to fetch', async () => { + + const api = new FetchEngine({ + baseUrl: testUrl, + credentials: 'include', + mode: 'cors', + cache: 'no-cache', + redirect: 'follow', + referrerPolicy: 'strict-origin', + }); + + await api.get('/json'); + + expect(fetchSpy).toHaveBeenCalledTimes(1); + + const [, init] = fetchSpy.mock.calls[0]; + + expect(init?.credentials).to.equal('include'); + expect(init?.mode).to.equal('cors'); + expect(init?.cache).to.equal('no-cache'); + expect(init?.redirect).to.equal('follow'); + expect(init?.referrerPolicy).to.equal('strict-origin'); + + api.destroy(); + }); + }); + + describe('per-request RequestInit overrides', () => { + + it('should allow per-request override of credentials', async () => { + + const api = new FetchEngine({ + baseUrl: testUrl, + credentials: 'include', + }); + + await api.get('/json', { credentials: 'same-origin' }); + + expect(fetchSpy).toHaveBeenCalledTimes(1); + + const [, init] = fetchSpy.mock.calls[0]; + + expect(init?.credentials).to.equal('same-origin'); + + api.destroy(); + }); + + it('should allow per-request override of mode', async () => { + + const api = new FetchEngine({ + baseUrl: testUrl, + mode: 'cors', + }); + + await api.get('/json', { mode: 'same-origin' }); + + expect(fetchSpy).toHaveBeenCalledTimes(1); + + const [, init] = fetchSpy.mock.calls[0]; + + expect(init?.mode).to.equal('same-origin'); + + api.destroy(); + }); + + it('should allow per-request override of cache', async () => { + + const api = new FetchEngine({ + baseUrl: testUrl, + cache: 'default', + }); + + await api.get('/json', { cache: 'reload' }); + + expect(fetchSpy).toHaveBeenCalledTimes(1); + + const [, init] = fetchSpy.mock.calls[0]; + + expect(init?.cache).to.equal('reload'); + + api.destroy(); + }); + + it('should use instance default when per-request option not provided', async () => { + + const api = new FetchEngine({ + baseUrl: testUrl, + credentials: 'include', + mode: 'cors', + }); + + // Only override credentials, mode should stay as instance default + await api.get('/json', { credentials: 'omit' }); + + expect(fetchSpy).toHaveBeenCalledTimes(1); + + const [, init] = fetchSpy.mock.calls[0]; + + expect(init?.credentials).to.equal('omit'); + expect(init?.mode).to.equal('cors'); + + api.destroy(); + }); + + it('should allow per-request options when no instance defaults', async () => { + + const api = new FetchEngine({ + baseUrl: testUrl, + }); + + await api.get('/json', { + credentials: 'include', + cache: 'no-store', + }); + + expect(fetchSpy).toHaveBeenCalledTimes(1); + + const [, init] = fetchSpy.mock.calls[0]; + + expect(init?.credentials).to.equal('include'); + expect(init?.cache).to.equal('no-store'); + + api.destroy(); + }); + }); + + describe('RequestInit options across HTTP methods', () => { + + it('should pass RequestInit options for POST requests', async () => { + + const api = new FetchEngine({ + baseUrl: testUrl, + credentials: 'include', + }); + + await api.post('/json', { data: 'test' }); + + expect(fetchSpy).toHaveBeenCalledTimes(1); + + const [, init] = fetchSpy.mock.calls[0]; + + expect(init?.credentials).to.equal('include'); + expect(init?.method).to.equal('POST'); + + api.destroy(); + }); + + it('should pass RequestInit options for PUT requests', async () => { + + const api = new FetchEngine({ + baseUrl: testUrl, + mode: 'cors', + }); + + await api.put('/json', { data: 'test' }); + + expect(fetchSpy).toHaveBeenCalledTimes(1); + + const [, init] = fetchSpy.mock.calls[0]; + + expect(init?.mode).to.equal('cors'); + expect(init?.method).to.equal('PUT'); + + api.destroy(); + }); + + it('should pass RequestInit options for DELETE requests', async () => { + + const api = new FetchEngine({ + baseUrl: testUrl, + cache: 'no-cache', + }); + + await api.delete('/json'); + + expect(fetchSpy).toHaveBeenCalledTimes(1); + + const [, init] = fetchSpy.mock.calls[0]; + + expect(init?.cache).to.equal('no-cache'); + expect(init?.method).to.equal('DELETE'); + + api.destroy(); + }); + }); + + describe('RequestInit options with modifyConfig', () => { + + it('should allow modifyConfig to set RequestInit options', async () => { + + const api = new FetchEngine({ + baseUrl: testUrl, + modifyConfig: (opts) => ({ + ...opts, + credentials: 'include' as RequestCredentials, + }), + }); + + await api.get('/json'); + + expect(fetchSpy).toHaveBeenCalledTimes(1); + + const [, init] = fetchSpy.mock.calls[0]; + + expect(init?.credentials).to.equal('include'); + + api.destroy(); + }); + }); +}); diff --git a/tests/src/fetch/engine/response.test.ts b/tests/src/fetch/engine/response.test.ts new file mode 100644 index 0000000..65dc4eb --- /dev/null +++ b/tests/src/fetch/engine/response.test.ts @@ -0,0 +1,79 @@ +import { + describe, + it, + expect +} from 'vitest' + +import { + FetchEngine, +} from '../../../../packages/fetch/src/index.ts'; + +import { makeTestStubs } from '../_helpers.ts'; + + +describe('FetchEngine: response structure validation', async () => { + + const { testUrl } = await makeTestStubs(4134); + + it('should validate response structure consistency across features', async () => { + + // Cross-checks that all features return consistent FetchResponse structure + const configs = [ + { name: 'plain', config: {} }, + { name: 'cache', config: { cachePolicy: true } }, + { name: 'dedupe', config: { dedupePolicy: true } }, + { name: 'both', config: { cachePolicy: true, dedupePolicy: true } } + ]; + + for (const { name, config } of configs) { + + const api = new FetchEngine({ + baseUrl: testUrl, + ...config + }); + + const path = `/test-structure-${name}-${Date.now()}`; + const response = await api.get(path); + + // Validate FetchResponse structure + expect(response.data, `${name}: data should exist`).to.exist; + expect(response.status, `${name}: status should exist`).to.be.a('number'); + expect(response.headers, `${name}: headers should exist`).to.be.an('object'); + expect(response.request, `${name}: request should exist`).to.be.instanceOf(Request); + expect(response.config, `${name}: config should exist`).to.be.an('object'); + + // Validate all keys present + const keys = Object.keys(response).sort(); + expect(keys, `${name}: should have all FetchResponse keys`) + .to.deep.equal(['config', 'data', 'headers', 'request', 'status']); + + api.destroy(); + } + }); + + it('should produce consistent results with retry on vs off (successful request)', async () => { + + // Cross-checks that retry doesn't alter results for successful requests + const apiWithRetry = new FetchEngine({ + baseUrl: testUrl, + retry: { maxAttempts: 3 } + }); + + const apiWithoutRetry = new FetchEngine({ + baseUrl: testUrl, + retry: false + }); + + const path = `/test-retry-${Date.now()}`; + + const r1 = await apiWithRetry.get(path); + const r2 = await apiWithoutRetry.get(path); + + // Results should be identical + expect(r1.data, 'data should be equal').to.deep.equal(r2.data); + expect(r1.status, 'status should be equal').to.equal(r2.status); + + apiWithRetry.destroy(); + apiWithoutRetry.destroy(); + }); +}); diff --git a/tests/src/fetch/retry.ts b/tests/src/fetch/executor/retry.test.ts similarity index 91% rename from tests/src/fetch/retry.ts rename to tests/src/fetch/executor/retry.test.ts index b7a2b4c..dd43284 100644 --- a/tests/src/fetch/retry.ts +++ b/tests/src/fetch/executor/retry.test.ts @@ -9,11 +9,11 @@ import { import { FetchError, FetchEngine, -} from '../../../packages/fetch/src/index.ts'; +} from '../../../../packages/fetch/src/index.ts'; -import { attempt } from '../../../packages/utils/src/index.ts'; -import { sandbox } from '../_helpers.ts'; -import { makeTestStubs } from './_helpers.ts'; +import { attempt } from '../../../../packages/utils/src/index.ts'; +import { sandbox } from '../../_helpers.ts'; +import { makeTestStubs } from '../_helpers.ts'; describe('@logosdx/fetch: retry', async () => { @@ -151,7 +151,7 @@ describe('@logosdx/fetch: retry', async () => { const onError = sandbox.stub(); - api.on('fetch-error', onError); + api.on('error', onError); await attempt(() => api.get('/validate?name=&age=17')) @@ -172,7 +172,7 @@ describe('@logosdx/fetch: retry', async () => { }, }); - api.on('fetch-error', onError); + api.on('error', onError); // First request fails with 400 (validation error) - should retry 3 times await attempt(() => api.get('/validate?name=&age=17')) @@ -214,7 +214,7 @@ describe('@logosdx/fetch: retry', async () => { const onError = sandbox.stub(); - api.on('fetch-error', onError); + api.on('error', onError); const start = Date.now(); @@ -264,7 +264,7 @@ describe('@logosdx/fetch: retry', async () => { const onError = sandbox.stub(); - const reqConfig: FetchEngine.CallOptions = { + const reqConfig: FetchEngine.CallConfig = { retry: { maxAttempts: 2, baseDelay: 10, @@ -301,7 +301,7 @@ describe('@logosdx/fetch: retry', async () => { const onError = sandbox.stub(); - api.on('fetch-error', onError); + api.on('error', onError); await attempt(() => api.get('/rate-limit')) @@ -324,8 +324,8 @@ describe('@logosdx/fetch: retry', async () => { }); const events: string[] = []; - api.on('fetch-dedupe-start', () => events.push('dedupe-start')); - api.on('fetch-retry', () => events.push('retry')); + api.on('dedupe-start', () => events.push('dedupe-start')); + api.on('retry', () => events.push('retry')); const path = '/fail-once'; @@ -353,9 +353,9 @@ describe('@logosdx/fetch: retry', async () => { }); const events: string[] = []; - api.on('fetch-dedupe-start', () => events.push('dedupe-start')); - api.on('fetch-dedupe-join', () => events.push('dedupe-join')); - api.on('fetch-retry', () => events.push('retry')); + api.on('dedupe-start', () => events.push('dedupe-start')); + api.on('dedupe-join', () => events.push('dedupe-join')); + api.on('retry', () => events.push('retry')); // Request A starts - will timeout on first attempt, retry const promiseA = api.get('/slow-success/100'); // 100ms response, 50ms timeout = timeout @@ -393,14 +393,14 @@ describe('@logosdx/fetch: retry', async () => { const api = new FetchEngine({ baseUrl: testUrl, - timeout: 50, // 50ms timeout + totalTimeout: 50, // 50ms timeout retry: { maxAttempts: 3, baseDelay: 10, }, }); - api.on('fetch-before', () => attemptCount++); + api.on('before-request', () => attemptCount++); // /slow-success/200 waits 200ms, so 50ms timeout will trigger const [, err] = await attempt(() => api.get('/slow-success/200')); @@ -436,7 +436,7 @@ describe('@logosdx/fetch: retry', async () => { }, }); - api.on('fetch-before', () => attemptCount++); + api.on('before-request', () => attemptCount++); // /slow-success/200 waits 200ms, so 50ms attemptTimeout will trigger each time const [, err] = await attempt(() => api.get('/slow-success/200')); @@ -452,7 +452,7 @@ describe('@logosdx/fetch: retry', async () => { api.destroy(); }); - it('emits fetch-retry event for timed out requests when using attemptTimeout', async () => { + it('emits retry event for timed out requests when using attemptTimeout', async () => { const retryEvents: any[] = []; @@ -466,7 +466,7 @@ describe('@logosdx/fetch: retry', async () => { }, }); - api.on('fetch-retry', (data) => retryEvents.push(data)); + api.on('retry', (data) => retryEvents.push(data)); await attempt(() => api.get('/slow-success/200')); @@ -474,6 +474,8 @@ describe('@logosdx/fetch: retry', async () => { expect(retryEvents).to.have.length(1); expect(retryEvents[0].attempt).to.eq(1); expect(retryEvents[0].nextAttempt).to.eq(2); + expect(retryEvents[0].requestStart).to.be.a('number'); + expect(retryEvents[0].requestEnd).to.not.exist; api.destroy(); }); @@ -484,14 +486,14 @@ describe('@logosdx/fetch: retry', async () => { const api = new FetchEngine({ baseUrl: testUrl, - timeout: 50, + totalTimeout: 50, retry: { maxAttempts: 1, // No retries baseDelay: 10, }, }); - api.on('fetch-abort', (data) => abortEvents.push(data)); + api.on('abort', (data) => abortEvents.push(data)); await attempt(() => api.get('/slow-success/200')); @@ -510,7 +512,7 @@ describe('@logosdx/fetch: retry', async () => { const api = new FetchEngine({ baseUrl: testUrl, - timeout: 150, // Timeout longer than first attempt but shorter than total + totalTimeout: 150, // Timeout longer than first attempt but shorter than total retry: { maxAttempts: 3, baseDelay: 10, @@ -518,7 +520,7 @@ describe('@logosdx/fetch: retry', async () => { }, }); - api.on('fetch-before', () => attemptCount++); + api.on('before-request', () => attemptCount++); // /slow-fail waits 200ms then returns 503 // With 150ms timeout, first attempt will timeout before server responds @@ -539,14 +541,14 @@ describe('@logosdx/fetch: retry', async () => { const api = new FetchEngine({ baseUrl: testUrl, - timeout: 500, // Instance timeout: 500ms (would succeed) + totalTimeout: 500, // Instance timeout: 500ms (would succeed) retry: { maxAttempts: 3, baseDelay: 10, }, }); - api.on('fetch-before', () => attemptCount++); + api.on('before-request', () => attemptCount++); // Override with shorter timeout that will fail const [, err] = await attempt(() => @@ -574,7 +576,7 @@ describe('@logosdx/fetch: retry', async () => { }, }); - api.on('fetch-before', () => attemptCount++); + api.on('before-request', () => attemptCount++); // Manual abort via returned promise's abort method const promise = api.get('/slow-success/500'); @@ -597,7 +599,7 @@ describe('@logosdx/fetch: retry', async () => { const api = new FetchEngine({ baseUrl: testUrl, - timeout: 50, + totalTimeout: 50, retry: { maxAttempts: 1, }, @@ -610,7 +612,7 @@ describe('@logosdx/fetch: retry', async () => { expect(onErrorStub.called).to.be.true; expect(onErrorStub.callCount).to.eq(1); - const error = onErrorStub.args[0][0] as FetchError; + const error = onErrorStub.args[0]![0] as FetchError; expect(error.aborted).to.be.true; expect(error.status).to.eq(499); @@ -636,14 +638,15 @@ describe('@logosdx/fetch: retry', async () => { }); const events: string[] = []; - api.on('fetch-retry', () => events.push('retry')); + api.on('retry', () => events.push('retry')); + // OPTIONS method name const methodName = method.toLowerCase() as Lowercase; const payload = (method === 'POST' || method === 'PUT' || method === 'PATCH') ? { data: 'test' } : undefined; - const methodFn = api[methodName] as any; + const methodFn = api[methodName as keyof typeof api] as any; // fail-once endpoint will fail first call, succeed on retry const [r1] = await attempt(() => methodFn.call(api, '/fail-once', payload as any)); @@ -672,7 +675,7 @@ describe('@logosdx/fetch: retry', async () => { }, }); - api.on('fetch-before', (data) => controllers.push(data.controller)); + api.on('before-request', (data) => controllers.push((data as any).controller)); await attempt(() => api.get('/slow-success/200')); @@ -721,7 +724,7 @@ describe('@logosdx/fetch: retry', async () => { }, }); - api.on('fetch-before', () => attemptCount++); + api.on('before-request', () => attemptCount++); // /slow-success/200 waits 200ms, so 50ms totalTimeout will trigger const [, err] = await attempt(() => api.get('/slow-success/200')); @@ -754,7 +757,7 @@ describe('@logosdx/fetch: retry', async () => { }, }); - api.on('fetch-before', () => attemptCount++); + api.on('before-request', () => attemptCount++); // With 30ms totalTimeout, the first attempt should be cut short const [, err] = await attempt(() => api.get('/slow-success/200')); @@ -785,8 +788,8 @@ describe('@logosdx/fetch: retry', async () => { }, }); - api.on('fetch-before', () => attemptCount++); - api.on('fetch-retry', (data) => retryEvents.push(data)); + api.on('before-request', () => attemptCount++); + api.on('retry', (data) => retryEvents.push(data)); // /slow-success/200 waits 200ms, each attempt times out at 50ms // With 500ms total budget, we should get all 3 attempts diff --git a/tests/src/fetch/executor/timeout.test.ts b/tests/src/fetch/executor/timeout.test.ts new file mode 100644 index 0000000..a1b32b7 --- /dev/null +++ b/tests/src/fetch/executor/timeout.test.ts @@ -0,0 +1,131 @@ +import { + describe, + it, + expect +} from 'vitest' + +import { + FetchError, + FetchEngine, +} from '../../../../packages/fetch/src/index.ts'; + +import { attempt, attemptSync, wait } from '../../../../packages/utils/src/index.ts'; +import { makeTestStubs } from '../_helpers.ts'; + + +describe('FetchEngine: timeout boundaries', async () => { + + const { testUrl } = await makeTestStubs(4130); + + it('should handle 0ms timeout', async () => { + + // Immediate timeout edge case - use slow endpoint to ensure timeout fires first + const api = new FetchEngine({ + baseUrl: testUrl, + dedupePolicy: true + }); + + // Timeout of 0ms should immediately abort against slow endpoint + const [, err] = await attempt(() => api.get('/slow-success/200', { timeout: 0 })); + + expect(err).to.exist; + expect(err).to.be.instanceOf(FetchError); + + const fetchErr = err as FetchError; + expect(fetchErr.aborted).to.be.true; + + api.destroy(); + await wait(10); // Let microtasks settle + }); + + it('should handle 1ms timeout', async () => { + + // Very short timeout - likely to fail on slow endpoint + const api = new FetchEngine({ + baseUrl: testUrl, + dedupePolicy: true + }); + + // Start a slow request (1000ms endpoint) with 1ms timeout + const [, err] = await attempt(() => api.get('/wait', { timeout: 1 })); + + expect(err).to.exist; + expect(err).to.be.instanceOf(FetchError); + + const fetchErr = err as FetchError; + expect(fetchErr.aborted).to.be.true; + expect(fetchErr.step).to.equal('fetch'); + + api.destroy(); + await wait(10); // Let microtasks settle + }); + + it('should handle negative timeout values', async () => { + + // Negative timeout throws assertion error at request time (validation) + const api = new FetchEngine({ + baseUrl: testUrl, + dedupePolicy: true + }); + + const [, err] = await attempt(() => api.get('/json', { timeout: -1 })); + + expect(err).to.exist; + expect(err).to.be.instanceOf(Error); + expect((err as Error).message).to.include('non-negative'); + + api.destroy(); + }); + + it('should handle Infinity timeout', async () => { + + // Infinity is coerced by Node.js to 1ms (see TimeoutOverflowWarning) + // so the request effectively has a very short timeout + const api = new FetchEngine({ + baseUrl: testUrl, + dedupePolicy: true + }); + + // Use a slow endpoint so the 1ms timeout triggers an abort + const [, err] = await attempt(() => api.get('/wait', { timeout: Infinity })); + + // Infinity causes immediate abort due to Node.js 32-bit overflow + expect(err).to.exist; + expect(err).to.be.instanceOf(FetchError); + expect((err as FetchError).aborted).to.be.true; + + api.destroy(); + await wait(10); // Let microtasks settle + }); + + it('should handle NaN timeout', async () => { + + // NaN passes typeof check but fails >= 0 assertion + const api = new FetchEngine({ + baseUrl: testUrl, + dedupePolicy: true + }); + + const [, err] = await attempt(() => api.get('/json', { timeout: NaN })); + + expect(err).to.exist; + expect(err).to.be.instanceOf(Error); + expect((err as Error).message).to.include('non-negative'); + + api.destroy(); + }); + + it('should handle empty string baseUrl with absolute path', async () => { + + // Edge case: empty baseUrl is NOT valid - FetchEngine requires baseUrl + // This test verifies the validation error is thrown + const [, err] = attemptSync(() => new FetchEngine({ + baseUrl: '', + dedupePolicy: true + })); + + expect(err).to.exist; + expect(err).to.be.instanceOf(Error); + expect((err as Error).message).to.include('baseUrl'); + }); +}); diff --git a/tests/src/fetch/options/get.test.ts b/tests/src/fetch/options/get.test.ts new file mode 100644 index 0000000..f9e52be --- /dev/null +++ b/tests/src/fetch/options/get.test.ts @@ -0,0 +1,83 @@ +import { describe, it, expect, vi } from 'vitest'; +import { ConfigStore } from '../../../../packages/fetch/src/options/index.ts'; + + +describe('ConfigStore.get', () => { + + it('returns cloned options when no path provided', () => { + + const mockEngine = { emit: vi.fn() } as any; + const initialOptions = { + baseUrl: 'https://api.example.com', + retry: { maxAttempts: 3 } + }; + + const store = new ConfigStore(mockEngine, initialOptions); + const result = store.get(); + + expect(result).to.deep.equal(initialOptions); + expect(result).to.not.equal(initialOptions); + }); + + it('returns value at simple path', () => { + + const mockEngine = { emit: vi.fn() } as any; + const store = new ConfigStore(mockEngine, { + baseUrl: 'https://api.example.com', + totalTimeout: 5000 + }); + + expect(store.get('baseUrl')).to.equal('https://api.example.com'); + expect(store.get('totalTimeout')).to.equal(5000); + }); + + it('returns value at nested path', () => { + + const mockEngine = { emit: vi.fn() } as any; + const store = new ConfigStore(mockEngine, { + baseUrl: 'https://api.example.com', + retry: { + maxAttempts: 3, + baseDelay: 1000 + } + }); + + expect(store.get('retry.maxAttempts')).to.equal(3); + expect(store.get('retry.baseDelay')).to.equal(1000); + }); + + it('returns cloned object for nested object paths', () => { + + const mockEngine = { emit: vi.fn() } as any; + const retry = { maxAttempts: 3, baseDelay: 1000 }; + const store = new ConfigStore(mockEngine, { baseUrl: 'https://api.example.com', retry }); + + const result = store.get('retry'); + + expect(result).to.deep.equal(retry); + expect(result).to.not.equal(retry); + }); + + it('returns undefined for non-existent paths', () => { + + const mockEngine = { emit: vi.fn() } as any; + const store = new ConfigStore(mockEngine, { baseUrl: 'https://api.example.com' }); + + expect(store.get('nonExistent' as any)).to.be.undefined; + expect(store.get('nested.path' as any)).to.be.null; + }); + + it('prevents mutation through returned values', () => { + + const mockEngine = { emit: vi.fn() } as any; + const store = new ConfigStore(mockEngine, { + baseUrl: 'https://api.example.com', + retry: { maxAttempts: 3 } + }); + + const result = store.get(); + (result.retry as any).maxAttempts = 999; + + expect(store.get('retry.maxAttempts')).to.equal(3); + }); +}); diff --git a/tests/src/fetch/options/set.test.ts b/tests/src/fetch/options/set.test.ts new file mode 100644 index 0000000..49acdf7 --- /dev/null +++ b/tests/src/fetch/options/set.test.ts @@ -0,0 +1,116 @@ +import { describe, it, expect, vi } from 'vitest'; +import { ConfigStore } from '../../../../packages/fetch/src/options/index.ts'; + + +describe('ConfigStore.set', () => { + + it('sets value by simple path', () => { + + const mockEngine = { emit: vi.fn() } as any; + const store = new ConfigStore(mockEngine, { + baseUrl: 'https://old.example.com', + totalTimeout: 5000 + }); + + store.set('baseUrl', 'https://new.example.com'); + + expect(store.get('baseUrl')).to.equal('https://new.example.com'); + }); + + it('sets value by nested path', () => { + + const mockEngine = { emit: vi.fn() } as any; + const store = new ConfigStore(mockEngine, { + baseUrl: 'https://api.example.com', + retry: { maxAttempts: 3, baseDelay: 1000 } + }); + + store.set('retry.maxAttempts' as any, 5); + + expect(store.get('retry.maxAttempts')).to.equal(5); + expect(store.get('retry.baseDelay')).to.equal(1000); + }); + + it('merges partial options object', () => { + + const mockEngine = { emit: vi.fn() } as any; + const store = new ConfigStore(mockEngine, { + baseUrl: 'https://api.example.com', + totalTimeout: 5000, + retry: { maxAttempts: 3 } + }); + + store.set({ totalTimeout: 10000 }); + + expect(store.get('totalTimeout')).to.equal(10000); + expect(store.get('baseUrl')).to.equal('https://api.example.com'); + }); + + it('deep merges nested partial options', () => { + + const mockEngine = { emit: vi.fn() } as any; + const store = new ConfigStore(mockEngine, { + baseUrl: 'https://api.example.com', + retry: { maxAttempts: 3, baseDelay: 1000, maxDelay: 10000 } + }); + + store.set({ retry: { maxAttempts: 5 } } as any); + + expect(store.get('retry.maxAttempts')).to.equal(5); + expect(store.get('retry.baseDelay')).to.equal(1000); + expect(store.get('retry.maxDelay')).to.equal(10000); + }); + + it('emits options-change event with path when setting by path', () => { + + const mockEngine = { emit: vi.fn() } as any; + const store = new ConfigStore(mockEngine, { + baseUrl: 'https://api.example.com' + }); + + store.set('baseUrl', 'https://new.example.com'); + + expect(mockEngine.emit).toHaveBeenCalledWith('config-change', { + path: 'baseUrl', + value: 'https://new.example.com' + }); + }); + + it('emits options-change event with value when setting by object', () => { + + const mockEngine = { emit: vi.fn() } as any; + const store = new ConfigStore(mockEngine, { + baseUrl: 'https://api.example.com' + }); + + const partial = { baseUrl: 'https://new.example.com' }; + store.set(partial); + + expect(mockEngine.emit).toHaveBeenCalledWith('config-change', { + value: partial + }); + }); + + it('throws on invalid arguments', () => { + + const mockEngine = { emit: vi.fn() } as any; + const store = new ConfigStore(mockEngine, { baseUrl: 'https://api.example.com' }); + + expect(() => (store as any).set(123)).to.throw(); + // Setting a path without a value (undefined) is now allowed for clearing + expect(() => (store as any).set('path')).to.not.throw(); + }); + + it('creates intermediate objects for new paths', () => { + + const mockEngine = { emit: vi.fn() } as any; + const store = new ConfigStore(mockEngine, { + baseUrl: 'https://api.example.com', + retry: {} as any + }); + + store.set('retry.maxAttempts' as any, 5); + + expect(store.get('retry.maxAttempts' as any)).to.equal(5); + }); +}); diff --git a/tests/src/fetch/caching.ts b/tests/src/fetch/policies/cache.test.ts similarity index 90% rename from tests/src/fetch/caching.ts rename to tests/src/fetch/policies/cache.test.ts index cbc137c..4f42a90 100644 --- a/tests/src/fetch/caching.ts +++ b/tests/src/fetch/policies/cache.test.ts @@ -5,15 +5,15 @@ import { vi } from 'vitest' -import { FetchEngine } from '../../../packages/fetch/src/index.ts'; +import { FetchEngine } from '../../../../packages/fetch/src/index.ts'; import { attempt, attemptSync, wait -} from '../../../packages/utils/src/index.ts'; +} from '../../../../packages/utils/src/index.ts'; -import { makeTestStubs } from './_helpers.ts'; +import { makeTestStubs } from '../_helpers.ts'; describe('@logosdx/fetch: caching', async () => { @@ -30,9 +30,9 @@ describe('@logosdx/fetch: caching', async () => { const missEvents: string[] = []; const setEvents: string[] = []; - api.on('fetch-cache-hit', (data) => hitEvents.push(data.path!)); - api.on('fetch-cache-miss', (data) => missEvents.push(data.path!)); - api.on('fetch-cache-set', (data) => setEvents.push(data.path!)); + api.on('cache-hit', (data) => hitEvents.push(data.path!)); + api.on('cache-miss', (data) => missEvents.push(data.path!)); + api.on('cache-set', (data) => setEvents.push(data.path!)); // First request - cache miss await api.get('/json'); @@ -67,8 +67,8 @@ describe('@logosdx/fetch: caching', async () => { const mock = vi.fn(); - api.on('fetch-cache-hit', mock); - api.on('fetch-cache-miss', mock); + api.on('cache-hit', mock); + api.on('cache-miss', mock); await api.get('/json'); await api.get('/json'); @@ -88,8 +88,8 @@ describe('@logosdx/fetch: caching', async () => { const mock = vi.fn(); - api.on('fetch-cache-hit', mock); - api.on('fetch-cache-miss', mock); + api.on('cache-hit', mock); + api.on('cache-miss', mock); await api.get('/json'); await api.get('/json'); @@ -108,8 +108,8 @@ describe('@logosdx/fetch: caching', async () => { const mock = vi.fn(); - api.on('fetch-cache-hit', mock); - api.on('fetch-cache-miss', mock); + api.on('cache-hit', mock); + api.on('cache-miss', mock); const r1 = await api.get('/json'); const r2 = await api.get('/json'); @@ -131,7 +131,7 @@ describe('@logosdx/fetch: caching', async () => { const mock = vi.fn(); - api.on('fetch-cache-miss', mock); + api.on('cache-miss', mock); await api.post('/json', { data: 'test' }); await api.post('/json', { data: 'test' }); @@ -154,8 +154,8 @@ describe('@logosdx/fetch: caching', async () => { const mock = vi.fn(); - api.on('fetch-cache-hit', () => mock('hit')); - api.on('fetch-cache-miss', () => mock('miss')); + api.on('cache-hit', () => mock('hit')); + api.on('cache-miss', () => mock('miss')); // First POST - cache miss await api.post('/json', { data: 'test' }); @@ -180,8 +180,8 @@ describe('@logosdx/fetch: caching', async () => { const mock = vi.fn(); - api.on('fetch-cache-hit', () => mock('hit')); - api.on('fetch-cache-miss', () => mock('miss')); + api.on('cache-hit', () => mock('hit')); + api.on('cache-miss', () => mock('miss')); // First request - cache miss await api.get('/json'); @@ -218,8 +218,8 @@ describe('@logosdx/fetch: caching', async () => { const mock = vi.fn(); - api.on('fetch-cache-miss', () => mock('miss')); - api.on('fetch-cache-hit', () => mock('hit')); + api.on('cache-miss', () => mock('miss')); + api.on('cache-hit', () => mock('hit')); // This path contains skip=true - should not cache await api.get('/json?skip=true'); @@ -265,7 +265,7 @@ describe('@logosdx/fetch: caching', async () => { api.destroy(); }); - it('should emit fetch-cache-hit with correct data', async () => { + it('should emit cache-hit with correct data', async () => { const api = new FetchEngine({ baseUrl: testUrl, @@ -274,7 +274,7 @@ describe('@logosdx/fetch: caching', async () => { let hitData: any = null; - api.on('fetch-cache-hit', (data) => { + api.on('cache-hit', (data) => { hitData = data; }); @@ -293,7 +293,7 @@ describe('@logosdx/fetch: caching', async () => { api.destroy(); }); - it('should emit fetch-cache-miss with correct data', async () => { + it('should emit cache-miss with correct data', async () => { const api = new FetchEngine({ baseUrl: testUrl, @@ -302,7 +302,7 @@ describe('@logosdx/fetch: caching', async () => { let missData: any = null; - api.on('fetch-cache-miss', (data) => { + api.on('cache-miss', (data) => { missData = data; }); @@ -317,7 +317,7 @@ describe('@logosdx/fetch: caching', async () => { api.destroy(); }); - it('should emit fetch-cache-set when storing value', async () => { + it('should emit cache-set when storing value', async () => { const api = new FetchEngine({ baseUrl: testUrl, @@ -326,7 +326,7 @@ describe('@logosdx/fetch: caching', async () => { let setData: any = null; - api.on('fetch-cache-set', (data) => { + api.on('cache-set', (data) => { setData = data; }); @@ -356,8 +356,8 @@ describe('@logosdx/fetch: caching', async () => { const events: string[] = []; - api.on('fetch-cache-miss', () => events.push('miss')); - api.on('fetch-cache-hit', () => events.push('hit')); + api.on('cache-miss', () => events.push('miss')); + api.on('cache-hit', () => events.push('hit')); await api.get('/json'); await api.get('/json'); @@ -384,8 +384,8 @@ describe('@logosdx/fetch: caching', async () => { const hitEvents: string[] = []; const missEvents: string[] = []; - api.on('fetch-cache-hit', () => hitEvents.push('hit')); - api.on('fetch-cache-miss', () => missEvents.push('miss')); + api.on('cache-hit', () => hitEvents.push('hit')); + api.on('cache-miss', () => missEvents.push('miss')); await api.get('/json'); expect(missEvents.length).to.equal(1); @@ -418,8 +418,8 @@ describe('@logosdx/fetch: caching', async () => { const hitEvents: string[] = []; const missEvents: string[] = []; - api.on('fetch-cache-hit', () => hitEvents.push('hit')); - api.on('fetch-cache-miss', () => missEvents.push('miss')); + api.on('cache-hit', () => hitEvents.push('hit')); + api.on('cache-miss', () => missEvents.push('miss')); // POST should be cached for /json route await api.post('/json', { data: 'test' }); @@ -442,8 +442,8 @@ describe('@logosdx/fetch: caching', async () => { const cacheHits: string[] = []; const dedupeStarts: string[] = []; - api.on('fetch-cache-hit', () => cacheHits.push('hit')); - api.on('fetch-dedupe-start', () => dedupeStarts.push('start')); + api.on('cache-hit', () => cacheHits.push('hit')); + api.on('dedupe-start', () => dedupeStarts.push('start')); // First request - cache miss, dedupe start await api.get('/json'); @@ -473,8 +473,8 @@ describe('@logosdx/fetch: caching', async () => { const staleEvents: string[] = []; const revalidateEvents: string[] = []; - api.on('fetch-cache-stale', () => staleEvents.push('stale')); - api.on('fetch-cache-revalidate', () => revalidateEvents.push('revalidate')); + api.on('cache-stale', () => staleEvents.push('stale')); + api.on('cache-revalidate', () => revalidateEvents.push('revalidate')); // First request - populates cache await api.get('/json'); @@ -492,7 +492,7 @@ describe('@logosdx/fetch: caching', async () => { api.destroy(); }); - it('should emit fetch-cache-stale with isStale: true', async () => { + it('should emit cache-stale with isStale: true', async () => { const api = new FetchEngine({ baseUrl: testUrl, @@ -505,7 +505,7 @@ describe('@logosdx/fetch: caching', async () => { let staleData: any = null; - api.on('fetch-cache-stale', (data) => { + api.on('cache-stale', (data) => { staleData = data; }); @@ -559,7 +559,7 @@ describe('@logosdx/fetch: caching', async () => { const revalidateEvents: string[] = []; - api.on('fetch-cache-revalidate', () => revalidateEvents.push('revalidate')); + api.on('cache-revalidate', () => revalidateEvents.push('revalidate')); await api.get('/json'); await new Promise(res => setTimeout(res, 100)); @@ -577,7 +577,7 @@ describe('@logosdx/fetch: caching', async () => { api.destroy(); }); - it('should emit fetch-cache-revalidate-error on revalidation failure', async () => { + it('should emit cache-revalidate-error on revalidation failure', async () => { // Note: flaky counter is automatically reset by beforeEach in _helpers.ts const api = new FetchEngine({ @@ -591,7 +591,7 @@ describe('@logosdx/fetch: caching', async () => { const errorEvents: any[] = []; - api.on('fetch-cache-revalidate-error', (data) => errorEvents.push(data)); + api.on('cache-revalidate-error', (data) => errorEvents.push(data)); // First request to /flaky - succeeds, populates cache const r1 = await api.get('/flaky'); @@ -631,7 +631,7 @@ describe('@logosdx/fetch: caching', async () => { const setEvents: string[] = []; - api.on('fetch-cache-set', () => setEvents.push('set')); + api.on('cache-set', () => setEvents.push('set')); // First request - cache set await api.get('/json'); @@ -660,7 +660,7 @@ describe('@logosdx/fetch: caching', async () => { const missEvents: string[] = []; - api.on('fetch-cache-miss', () => missEvents.push('miss')); + api.on('cache-miss', () => missEvents.push('miss')); // Populate cache await api.get('/json'); @@ -688,7 +688,7 @@ describe('@logosdx/fetch: caching', async () => { let capturedKey: string = ''; - api.on('fetch-cache-set', (data) => { + api.on('cache-set', (data) => { capturedKey = data.key; }); @@ -699,7 +699,7 @@ describe('@logosdx/fetch: caching', async () => { expect(deleted).to.be.true; const missEvents: string[] = []; - api.on('fetch-cache-miss', () => missEvents.push('miss')); + api.on('cache-miss', () => missEvents.push('miss')); await api.get('/json'); expect(missEvents.length).to.equal(1); @@ -737,7 +737,7 @@ describe('@logosdx/fetch: caching', async () => { expect(count).to.equal(1); const missEvents: string[] = []; - api.on('fetch-cache-miss', (data) => missEvents.push(data.path!)); + api.on('cache-miss', (data) => missEvents.push(data.path!)); // json1 should miss, others should hit await api.get('/json'); @@ -802,7 +802,7 @@ describe('@logosdx/fetch: caching', async () => { expect(count).to.equal(2); const hitEvents: string[] = []; - api.on('fetch-cache-hit', (data) => hitEvents.push(data.path!)); + api.on('cache-hit', (data) => hitEvents.push(data.path!)); await api.get('/json'); expect(hitEvents).to.include('/json'); @@ -830,7 +830,7 @@ describe('@logosdx/fetch: caching', async () => { expect(count).to.equal(2); const hitEvents: string[] = []; - api.on('fetch-cache-hit', (data) => hitEvents.push(data.path!)); + api.on('cache-hit', (data) => hitEvents.push(data.path!)); // /json should still be cached await api.get('/json'); @@ -933,7 +933,7 @@ describe('@logosdx/fetch: caching', async () => { let setData: any = null; - api.on('fetch-cache-set', (data) => { + api.on('cache-set', (data) => { setData = data; }); @@ -962,8 +962,8 @@ describe('@logosdx/fetch: caching', async () => { const missEvents: string[] = []; const staleEvents: string[] = []; - api.on('fetch-cache-miss', () => missEvents.push('miss')); - api.on('fetch-cache-stale', () => staleEvents.push('stale')); + api.on('cache-miss', () => missEvents.push('miss')); + api.on('cache-stale', () => staleEvents.push('stale')); // First request await api.get('/json'); @@ -990,7 +990,7 @@ describe('@logosdx/fetch: caching', async () => { const setEvents: string[] = []; - api.on('fetch-cache-set', () => setEvents.push('set')); + api.on('cache-set', () => setEvents.push('set')); // Multiple concurrent requests await Promise.all([ @@ -1014,7 +1014,7 @@ describe('@logosdx/fetch: caching', async () => { const hitEvents: string[] = []; - api.on('fetch-cache-hit', () => hitEvents.push('hit')); + api.on('cache-hit', () => hitEvents.push('hit')); await api.get('/json'); await api.get('/json'); @@ -1033,7 +1033,7 @@ describe('@logosdx/fetch: caching', async () => { const hitEvents: string[] = []; - api.on('fetch-cache-hit', () => hitEvents.push('hit')); + api.on('cache-hit', () => hitEvents.push('hit')); const specialPath = '/json?foo=bar&baz=qux'; @@ -1058,8 +1058,8 @@ describe('@logosdx/fetch: caching', async () => { const missEvents: string[] = []; const hitEvents: string[] = []; - api.on('fetch-cache-miss', (data) => missEvents.push(data.path!)); - api.on('fetch-cache-hit', (data) => hitEvents.push(data.path!)); + api.on('cache-miss', (data) => missEvents.push(data.path!)); + api.on('cache-hit', (data) => hitEvents.push(data.path!)); // /json1 should be skipped by skip callback (no cache events) await api.get('/json1'); @@ -1086,8 +1086,8 @@ describe('@logosdx/fetch: caching', async () => { const missEvents: string[] = []; const hitEvents: string[] = []; - api.on('fetch-cache-miss', () => missEvents.push('miss')); - api.on('fetch-cache-hit', () => hitEvents.push('hit')); + api.on('cache-miss', () => missEvents.push('miss')); + api.on('cache-hit', () => hitEvents.push('hit')); // Different paths should have different cache entries await api.get('/json'); @@ -1114,7 +1114,7 @@ describe('@logosdx/fetch: caching', async () => { const setEvents: string[] = []; - api.on('fetch-cache-set', () => setEvents.push('set')); + api.on('cache-set', () => setEvents.push('set')); // 10 concurrent requests to same path const responses = await Promise.all( @@ -1144,8 +1144,8 @@ describe('@logosdx/fetch: caching', async () => { const hitEvents: string[] = []; const missEvents: string[] = []; - api.on('fetch-cache-hit', () => hitEvents.push('hit')); - api.on('fetch-cache-miss', () => missEvents.push('miss')); + api.on('cache-hit', () => hitEvents.push('hit')); + api.on('cache-miss', () => missEvents.push('miss')); await api.get('/json'); expect(missEvents.length).to.equal(1); @@ -1167,8 +1167,8 @@ describe('@logosdx/fetch: caching', async () => { const startEvents: string[] = []; const joinEvents: string[] = []; - api.on('fetch-dedupe-start', () => startEvents.push('start')); - api.on('fetch-dedupe-join', () => joinEvents.push('join')); + api.on('dedupe-start', () => startEvents.push('start')); + api.on('dedupe-join', () => joinEvents.push('join')); await Promise.all([ api.get('/json'), @@ -1197,7 +1197,7 @@ describe('@logosdx/fetch: caching', async () => { }); let cacheHits = 0; - api.on('fetch-cache-hit', () => { cacheHits++; }); + api.on('cache-hit', () => { cacheHits++; }); // First request - cache miss await api.get(path); @@ -1251,8 +1251,8 @@ describe('@logosdx/fetch: caching', async () => { let dedupeStarts = 0; let cacheHits = 0; - api.on('fetch-dedupe-start', () => { dedupeStarts++; }); - api.on('fetch-cache-hit', () => { cacheHits++; }); + api.on('dedupe-start', () => { dedupeStarts++; }); + api.on('cache-hit', () => { cacheHits++; }); // First request - populates cache await api.get(path); @@ -1289,7 +1289,7 @@ describe('@logosdx/fetch: caching', async () => { }); const events: string[] = []; - api.on('fetch-cache-miss', () => events.push('cache-miss')); + api.on('cache-miss', () => events.push('cache-miss')); // Path NOT matching rule - should cache (miss event) const [r1] = await attempt(() => api.get('/api/users')); @@ -1322,7 +1322,7 @@ describe('@logosdx/fetch: caching', async () => { }); const events: string[] = []; - api.on('fetch-cache-miss', () => events.push('cache-miss')); + api.on('cache-miss', () => events.push('cache-miss')); // Path NOT matching rule - should cache (miss event) const [r1] = await attempt(() => api.get('/api/users/123')); @@ -1358,7 +1358,7 @@ describe('@logosdx/fetch: caching', async () => { }); const events: string[] = []; - api.on('fetch-cache-miss', () => events.push('cache-miss')); + api.on('cache-miss', () => events.push('cache-miss')); // Should cache when neither condition is met const [r1] = await attempt(() => api.get('/api/data.json')); @@ -1399,7 +1399,7 @@ describe('@logosdx/fetch: caching', async () => { }); const events: string[] = []; - api.on('fetch-cache-miss', () => events.push('cache-miss')); + api.on('cache-miss', () => events.push('cache-miss')); // First rule should win (disabled) const [r1] = await attempt(() => api.get('/api/users')); @@ -1424,7 +1424,7 @@ describe('@logosdx/fetch: caching', async () => { }); const events: string[] = []; - api.on('fetch-cache-miss', () => events.push('cache-miss')); + api.on('cache-miss', () => events.push('cache-miss')); // Should not cache paths starting with /no-cache/ const [r1] = await attempt(() => api.get('/no-cache/data')); @@ -1462,7 +1462,7 @@ describe('@logosdx/fetch: caching', async () => { }); const events: string[] = []; - api.on('fetch-cache-miss', () => events.push('cache-miss')); + api.on('cache-miss', () => events.push('cache-miss')); // Should NOT cache (has /api/v prefix AND matches v\d+ pattern) const [r1] = await attempt(() => api.get('/api/v2/users')); @@ -1498,8 +1498,8 @@ describe('@logosdx/fetch: caching', async () => { }); const events: string[] = []; - api.on('fetch-cache-miss', () => events.push('cache-miss')); - api.on('fetch-cache-hit', () => events.push('cache-hit')); + api.on('cache-miss', () => events.push('cache-miss')); + api.on('cache-hit', () => events.push('cache-hit')); const path = '/json'; @@ -1539,9 +1539,9 @@ describe('@logosdx/fetch: caching', async () => { }); const events: string[] = []; - api.on('fetch-cache-miss', () => events.push('cache-miss')); - api.on('fetch-cache-hit', () => events.push('cache-hit')); - api.on('fetch-cache-stale', () => events.push('cache-stale')); + api.on('cache-miss', () => events.push('cache-miss')); + api.on('cache-hit', () => events.push('cache-hit')); + api.on('cache-stale', () => events.push('cache-stale')); const path = '/json'; @@ -1575,9 +1575,9 @@ describe('@logosdx/fetch: caching', async () => { }); const events: string[] = []; - api.on('fetch-cache-miss', () => events.push('cache-miss')); - api.on('fetch-cache-hit', () => events.push('cache-hit')); - api.on('fetch-cache-stale', () => events.push('cache-stale')); + api.on('cache-miss', () => events.push('cache-miss')); + api.on('cache-hit', () => events.push('cache-hit')); + api.on('cache-stale', () => events.push('cache-stale')); const path = '/json'; @@ -1614,9 +1614,9 @@ describe('@logosdx/fetch: caching', async () => { }); const events: string[] = []; - api.on('fetch-cache-miss', () => events.push('cache-miss')); - api.on('fetch-cache-hit', () => events.push('cache-hit')); - api.on('fetch-cache-stale', () => events.push('cache-stale')); + api.on('cache-miss', () => events.push('cache-miss')); + api.on('cache-hit', () => events.push('cache-hit')); + api.on('cache-stale', () => events.push('cache-stale')); const path = '/json'; @@ -1663,8 +1663,8 @@ describe('@logosdx/fetch: caching', async () => { }); const events: string[] = []; - api.on('fetch-cache-miss', () => events.push('cache-miss')); - api.on('fetch-cache-stale', () => events.push('cache-stale')); + api.on('cache-miss', () => events.push('cache-miss')); + api.on('cache-stale', () => events.push('cache-stale')); const path = '/json'; @@ -1693,9 +1693,9 @@ describe('@logosdx/fetch: caching', async () => { }); const events: string[] = []; - api.on('fetch-cache-miss', () => events.push('cache-miss')); - api.on('fetch-cache-hit', () => events.push('cache-hit')); - api.on('fetch-cache-stale', () => events.push('cache-stale')); + api.on('cache-miss', () => events.push('cache-miss')); + api.on('cache-hit', () => events.push('cache-hit')); + api.on('cache-stale', () => events.push('cache-stale')); const path = '/json'; @@ -1736,11 +1736,11 @@ describe('@logosdx/fetch: caching', async () => { }); const events: { path: string; event: string }[] = []; - api.on(/fetch-cache/, (eventData) => { + api.on(/cache-/, (eventData) => { - if (eventData.data.path) { + if ((eventData.data as any).path) { - events.push({ path: eventData.data.path, event: eventData.event }); + events.push({ path: (eventData.data as any).path, event: eventData.event }); } }); @@ -1761,8 +1761,8 @@ describe('@logosdx/fetch: caching', async () => { const fastEvent = events.find(e => e.path === fastPath); const slowEvent = events.find(e => e.path === slowPath); - expect(fastEvent?.event).to.equal('fetch-cache-miss'); - expect(slowEvent?.event).to.equal('fetch-cache-hit'); + expect(fastEvent?.event).to.equal('cache-miss'); + expect(slowEvent?.event).to.equal('cache-hit'); api.destroy(); }); @@ -1783,10 +1783,10 @@ describe('@logosdx/fetch: caching', async () => { }); const events: string[] = []; - api.on('fetch-dedupe-start', () => events.push('dedupe-start')); - api.on('fetch-cache-miss', () => events.push('cache-miss')); - api.on('fetch-cache-set', () => events.push('cache-set')); - api.on('fetch-cache-hit', () => events.push('cache-hit')); + api.on('dedupe-start', () => events.push('dedupe-start')); + api.on('cache-miss', () => events.push('cache-miss')); + api.on('cache-set', () => events.push('cache-set')); + api.on('cache-hit', () => events.push('cache-hit')); const path = '/json'; @@ -1817,7 +1817,7 @@ describe('@logosdx/fetch: caching', async () => { }); const events: string[] = []; - api.on(/fetch-dedupe|fetch-cache|fetch-retry/, (data) => { + api.on(/dedupe|cache|retry/, (data) => { events.push(data.event); }); @@ -1826,9 +1826,9 @@ describe('@logosdx/fetch: caching', async () => { const [r1] = await attempt(() => api.get('/fail-once')); expect(r1).to.exist; - expect(events).to.include('fetch-dedupe-start'); - expect(events).to.include('fetch-cache-miss'); - expect(events).to.include('fetch-retry'); + expect(events).to.include('dedupe-start'); + expect(events).to.include('cache-miss'); + expect(events).to.include('retry'); api.destroy(); }); @@ -1843,7 +1843,7 @@ describe('@logosdx/fetch: caching', async () => { }); const events: string[] = []; - api.on(/fetch-dedupe|fetch-cache|fetch-retry/, (data) => { + api.on(/dedupe|cache|retry/, (data) => { events.push(data.event); }); @@ -1851,9 +1851,9 @@ describe('@logosdx/fetch: caching', async () => { const [r1] = await attempt(() => api.post('/fail-once', { data: 'test' })); expect(r1).to.exist; - expect(events).to.include('fetch-dedupe-start'); - expect(events).to.include('fetch-cache-miss'); - expect(events).to.include('fetch-retry'); + expect(events).to.include('dedupe-start'); + expect(events).to.include('cache-miss'); + expect(events).to.include('retry'); api.destroy(); }); @@ -1868,7 +1868,7 @@ describe('@logosdx/fetch: caching', async () => { }); const events: string[] = []; - api.on(/fetch-dedupe|fetch-cache|fetch-retry/, (data) => { + api.on(/dedupe|cache|retry/, (data) => { events.push(data.event); }); @@ -1876,9 +1876,9 @@ describe('@logosdx/fetch: caching', async () => { const [r1] = await attempt(() => api.put('/fail-once', { data: 'test' })); expect(r1).to.exist; - expect(events).to.include('fetch-dedupe-start'); - expect(events).to.include('fetch-cache-miss'); - expect(events).to.include('fetch-retry'); + expect(events).to.include('dedupe-start'); + expect(events).to.include('cache-miss'); + expect(events).to.include('retry'); api.destroy(); }); @@ -1893,7 +1893,7 @@ describe('@logosdx/fetch: caching', async () => { }); const events: string[] = []; - api.on(/fetch-dedupe|fetch-cache|fetch-retry/, (data) => { + api.on(/dedupe|cache|retry/, (data) => { events.push(data.event); }); @@ -1901,9 +1901,9 @@ describe('@logosdx/fetch: caching', async () => { const [r1] = await attempt(() => api.patch('/fail-once', { data: 'test' })); expect(r1).to.exist; - expect(events).to.include('fetch-dedupe-start'); - expect(events).to.include('fetch-cache-miss'); - expect(events).to.include('fetch-retry'); + expect(events).to.include('dedupe-start'); + expect(events).to.include('cache-miss'); + expect(events).to.include('retry'); api.destroy(); }); @@ -1918,7 +1918,7 @@ describe('@logosdx/fetch: caching', async () => { }); const events: string[] = []; - api.on(/fetch-dedupe|fetch-cache|fetch-retry/, (data) => { + api.on(/dedupe|cache|retry/, (data) => { events.push(data.event); }); @@ -1926,9 +1926,9 @@ describe('@logosdx/fetch: caching', async () => { const [r1] = await attempt(() => api.delete('/fail-once')); expect(r1).to.exist; - expect(events).to.include('fetch-dedupe-start'); - expect(events).to.include('fetch-cache-miss'); - expect(events).to.include('fetch-retry'); + expect(events).to.include('dedupe-start'); + expect(events).to.include('cache-miss'); + expect(events).to.include('retry'); api.destroy(); }); @@ -1949,27 +1949,27 @@ describe('@logosdx/fetch: caching', async () => { }); const events: string[] = []; - api.on(/fetch-dedupe|fetch-cache/, (data) => events.push(data.event)); + api.on(/dedupe|cache/, (data) => events.push(data.event)); const path1 = '/json'; const path2 = '/json2'; // GET should have both dedupe and cache await api.get(path1); - expect(events).to.include('fetch-dedupe-start'); - expect(events).to.include('fetch-cache-miss'); + expect(events).to.include('dedupe-start'); + expect(events).to.include('cache-miss'); // POST should have dedupe but no cache events.length = 0; await api.post(path2, {}); - expect(events).to.include('fetch-dedupe-start'); - expect(events).to.not.include('fetch-cache-miss'); + expect(events).to.include('dedupe-start'); + expect(events).to.not.include('cache-miss'); // PUT should have neither events.length = 0; await api.put('/json3', {}); - expect(events).to.not.include('fetch-dedupe-start'); - expect(events).to.not.include('fetch-cache-miss'); + expect(events).to.not.include('dedupe-start'); + expect(events).to.not.include('cache-miss'); api.destroy(); }); @@ -1984,9 +1984,9 @@ describe('@logosdx/fetch: caching', async () => { }); const events: string[] = []; - api.on('fetch-dedupe-start', () => events.push('dedupe-start')); - api.on('fetch-cache-miss', () => events.push('cache-miss')); - api.on('fetch-retry', () => events.push('retry')); + api.on('dedupe-start', () => events.push('dedupe-start')); + api.on('cache-miss', () => events.push('cache-miss')); + api.on('retry', () => events.push('retry')); // Use fail-once endpoint that will retry const [r1] = await attempt(() => api.get('/fail-once')); @@ -2021,7 +2021,7 @@ describe('@logosdx/fetch: caching', async () => { }); const events: string[] = []; - api.on(/fetch-dedupe|fetch-cache/, (data) => { + api.on(/dedupe|cache/, (data) => { events.push(data.event); }); @@ -2029,27 +2029,27 @@ describe('@logosdx/fetch: caching', async () => { // GET with both features const path1 = '/normal/data'; await api.get(path1); - expect(events).to.include('fetch-dedupe-start'); - expect(events).to.include('fetch-cache-miss'); + expect(events).to.include('dedupe-start'); + expect(events).to.include('cache-miss'); // GET with no dedupe events.length = 0; await api.get('/no-dedupe/data'); - expect(events).to.not.include('fetch-dedupe-start'); - expect(events).to.include('fetch-cache-miss'); + expect(events).to.not.include('dedupe-start'); + expect(events).to.include('cache-miss'); // GET with no cache events.length = 0; await api.get('/no-cache/data'); - expect(events).to.include('fetch-dedupe-start'); - expect(events).to.not.include('fetch-cache-miss'); + expect(events).to.include('dedupe-start'); + expect(events).to.not.include('cache-miss'); // POST with dedupe but no cache events.length = 0; const path2 = '/post/data'; await api.post(path2, {}); - expect(events).to.include('fetch-dedupe-start'); - expect(events).to.not.include('fetch-cache-miss'); + expect(events).to.include('dedupe-start'); + expect(events).to.not.include('cache-miss'); api.destroy(); }); @@ -2067,13 +2067,13 @@ describe('@logosdx/fetch: caching', async () => { }); const events: string[] = []; - api.on(/fetch-cache/, (data) => events.push(data.event)); + api.on(/cache-/, (data) => events.push(data.event)); const path = '/json'; // First request: cache miss await api.get(path); - expect(events).to.include('fetch-cache-miss'); + expect(events).to.include('cache-miss'); // Wait for staleIn await new Promise(res => setTimeout(res, 150)); @@ -2084,7 +2084,7 @@ describe('@logosdx/fetch: caching', async () => { await Promise.all(promises); // All should get stale cache - const staleCount = events.filter(e => e === 'fetch-cache-stale').length; + const staleCount = events.filter(e => e === 'cache-stale').length; expect(staleCount).to.be.greaterThan(0); api.destroy(); @@ -2146,11 +2146,11 @@ describe('@logosdx/fetch: caching', async () => { }); const events: { path: string; event: string }[] = []; - api.on(/fetch-cache/, (eventData) => { + api.on(/cache-/, (eventData) => { - if (eventData.data.path) { + if ((eventData.data as any).path) { - events.push({ path: eventData.data.path, event: eventData.event }); + events.push({ path: (eventData.data as any).path, event: eventData.event }); } }); @@ -2171,8 +2171,8 @@ describe('@logosdx/fetch: caching', async () => { const adminEvent = events.find(e => e.path === adminPath); const apiEvent = events.find(e => e.path === apiPath); - expect(adminEvent?.event).to.equal('fetch-cache-miss'); - expect(apiEvent?.event).to.equal('fetch-cache-hit'); + expect(adminEvent?.event).to.equal('cache-miss'); + expect(apiEvent?.event).to.equal('cache-hit'); api.destroy(); }); @@ -2232,7 +2232,7 @@ describe('@logosdx/fetch: caching', async () => { }); const events: string[] = []; - api.on('fetch-cache-miss', () => events.push('cache-miss')); + api.on('cache-miss', () => events.push('cache-miss')); // First rule wins (caching enabled) const [r1] = await attempt(() => api.get('/api/users')); @@ -2261,7 +2261,7 @@ describe('@logosdx/fetch: caching', async () => { }); const events: string[] = []; - api.on('fetch-cache-miss', () => events.push('cache-miss')); + api.on('cache-miss', () => events.push('cache-miss')); const [r1] = await attempt(() => api.get('/json')); expect(r1).to.exist; @@ -2282,8 +2282,8 @@ describe('@logosdx/fetch: caching', async () => { }); const events: string[] = []; - api.on('fetch-cache-miss', () => events.push('cache-miss')); - api.on('fetch-cache-hit', () => events.push('cache-hit')); + api.on('cache-miss', () => events.push('cache-miss')); + api.on('cache-hit', () => events.push('cache-hit')); const path = '/json'; @@ -2314,8 +2314,8 @@ describe('@logosdx/fetch: caching', async () => { }); const events: string[] = []; - api.on('fetch-cache-miss', () => events.push('cache-miss')); - api.on('fetch-cache-hit', () => events.push('cache-hit')); + api.on('cache-miss', () => events.push('cache-miss')); + api.on('cache-hit', () => events.push('cache-hit')); const path = '/json'; @@ -2343,7 +2343,7 @@ describe('@logosdx/fetch: caching', async () => { }); const events: string[] = []; - api.on('fetch-cache-miss', () => events.push('cache-miss')); + api.on('cache-miss', () => events.push('cache-miss')); // Make 100 requests to different paths const promises = Array.from({ length: 100 }, (_, i) => @@ -2409,7 +2409,7 @@ describe('@logosdx/fetch: caching', async () => { // After deletion const key = await new Promise((resolve) => { - api.on('fetch-cache-set', (data) => resolve(data.key)); + api.on('cache-set', (data) => resolve(data.key)); api.get('/json3'); }); @@ -2443,8 +2443,8 @@ describe('@logosdx/fetch: caching', async () => { const hitEvents: string[] = []; const missEvents: string[] = []; - api.on('fetch-cache-hit', () => hitEvents.push('hit')); - api.on('fetch-cache-miss', () => missEvents.push('miss')); + api.on('cache-hit', () => hitEvents.push('hit')); + api.on('cache-miss', () => missEvents.push('miss')); const path = '/json'; @@ -2482,8 +2482,8 @@ describe('@logosdx/fetch: caching', async () => { const staleEvents: string[] = []; const missEvents: string[] = []; - api.on('fetch-cache-stale', () => staleEvents.push('stale')); - api.on('fetch-cache-miss', () => missEvents.push('miss')); + api.on('cache-stale', () => staleEvents.push('stale')); + api.on('cache-miss', () => missEvents.push('miss')); const path = '/json'; @@ -2547,7 +2547,6 @@ describe('@logosdx/fetch: caching', async () => { await api.get(path); let stats = api.cacheStats(); - const initialSize = stats.cacheSize; // Wait for TTL to expire await new Promise(res => setTimeout(res, 150)); @@ -2724,8 +2723,8 @@ describe('@logosdx/fetch: caching', async () => { const dedupeEvents: string[] = []; const cacheHitEvents: string[] = []; - api.on('fetch-dedupe-start', () => dedupeEvents.push('dedupe')); - api.on('fetch-cache-hit', () => cacheHitEvents.push('hit')); + api.on('dedupe-start', () => dedupeEvents.push('dedupe')); + api.on('cache-hit', () => cacheHitEvents.push('hit')); // Concurrent requests while cache is fresh await Promise.all([ @@ -2756,9 +2755,9 @@ describe('@logosdx/fetch: caching', async () => { const dedupeJoinEvents: string[] = []; const cacheMissEvents: string[] = []; - api.on('fetch-dedupe-start', () => dedupeStartEvents.push('start')); - api.on('fetch-dedupe-join', () => dedupeJoinEvents.push('join')); - api.on('fetch-cache-miss', () => cacheMissEvents.push('miss')); + api.on('dedupe-start', () => dedupeStartEvents.push('start')); + api.on('dedupe-join', () => dedupeJoinEvents.push('join')); + api.on('cache-miss', () => cacheMissEvents.push('miss')); const path = '/json'; @@ -2849,7 +2848,7 @@ describe('@logosdx/fetch: caching', async () => { describe('cache event interfaces', () => { - it('should emit fetch-cache-miss with correct interface', async () => { + it('should emit cache-miss with correct interface', async () => { // Validates cache miss event data const api = new FetchEngine({ @@ -2859,7 +2858,7 @@ describe('@logosdx/fetch: caching', async () => { let eventData: any = null; - api.on('fetch-cache-miss', (data) => { + api.on('cache-miss', (data) => { eventData = data; }); @@ -2884,7 +2883,7 @@ describe('@logosdx/fetch: caching', async () => { api.destroy(); }); - it('should emit fetch-cache-hit with correct interface', async () => { + it('should emit cache-hit with correct interface', async () => { // Validates cache hit event includes isStale flag const api = new FetchEngine({ @@ -2894,7 +2893,7 @@ describe('@logosdx/fetch: caching', async () => { let eventData: any = null; - api.on('fetch-cache-hit', (data) => { + api.on('cache-hit', (data) => { eventData = data; }); @@ -2924,7 +2923,7 @@ describe('@logosdx/fetch: caching', async () => { api.destroy(); }); - it('should emit fetch-cache-set with correct interface', async () => { + it('should emit cache-set with correct interface', async () => { // Validates cache set event data const api = new FetchEngine({ @@ -2934,7 +2933,7 @@ describe('@logosdx/fetch: caching', async () => { let eventData: any = null; - api.on('fetch-cache-set', (data) => { + api.on('cache-set', (data) => { eventData = data; }); @@ -2954,7 +2953,7 @@ describe('@logosdx/fetch: caching', async () => { api.destroy(); }); - it('should emit fetch-cache-stale with correct interface when using SWR', async () => { + it('should emit cache-stale with correct interface when using SWR', async () => { // Validates stale cache hit event (SWR scenario) const api = new FetchEngine({ @@ -2968,7 +2967,7 @@ describe('@logosdx/fetch: caching', async () => { let eventData: any = null; - api.on('fetch-cache-stale', (data) => { + api.on('cache-stale', (data) => { eventData = data; }); @@ -2999,7 +2998,7 @@ describe('@logosdx/fetch: caching', async () => { api.destroy(); }); - it('should emit fetch-cache-revalidate with correct interface', async () => { + it('should emit cache-revalidate with correct interface', async () => { // Validates revalidation event (SWR background refresh) const api = new FetchEngine({ @@ -3013,7 +3012,7 @@ describe('@logosdx/fetch: caching', async () => { let eventData: any = null; - api.on('fetch-cache-revalidate', (data) => { + api.on('cache-revalidate', (data) => { eventData = data; }); @@ -3039,7 +3038,7 @@ describe('@logosdx/fetch: caching', async () => { api.destroy(); }); - it('should emit fetch-cache-expire with correct interface', async () => { + it('should emit cache-expire with correct interface', async () => { // Validates cache expiration event (if it exists) const api = new FetchEngine({ @@ -3052,7 +3051,7 @@ describe('@logosdx/fetch: caching', async () => { let eventData: any = null; - api.on('fetch-cache-expire', (data) => { + api.on('cache-expire', (data) => { eventData = data; }); @@ -3093,7 +3092,7 @@ describe('@logosdx/fetch: caching', async () => { let eventData: any = null; - api.on('fetch-cache-revalidate-error', (data) => { + api.on('cache-revalidate-error', (data) => { eventData = data; }); @@ -3131,7 +3130,7 @@ describe('@logosdx/fetch: caching', async () => { let key1 = ''; let key2 = ''; - api.on('fetch-cache-set', (data) => { + api.on('cache-set', (data) => { if (!key1) key1 = data.key; else key2 = data.key; @@ -3168,7 +3167,7 @@ describe('@logosdx/fetch: caching', async () => { let getKey = ''; let postKey = ''; - api.on('fetch-cache-set', (data) => { + api.on('cache-set', (data) => { if (data.method === 'GET') getKey = data.key; else if (data.method === 'POST') postKey = data.key; @@ -3195,7 +3194,7 @@ describe('@logosdx/fetch: caching', async () => { let key1 = ''; let key2 = ''; - api.on('fetch-cache-set', (data) => { + api.on('cache-set', (data) => { if (!key1) key1 = data.key; else key2 = data.key; @@ -3287,7 +3286,7 @@ describe('@logosdx/fetch: caching', async () => { let missCount = 0; - api.on('fetch-cache-miss', () => missCount++); + api.on('cache-miss', () => missCount++); const path = '/json'; @@ -3484,7 +3483,7 @@ describe('@logosdx/fetch: caching', async () => { const api = new FetchEngine({ baseUrl: testUrl, - timeout: 100, + totalTimeout: 100, cachePolicy: { enabled: true, ttl: 5000, @@ -3537,7 +3536,7 @@ describe('@logosdx/fetch: caching', async () => { const path = '/json'; // Request should either succeed without caching or fail gracefully - const [response, err] = await attempt(() => api.get(path)); + const [, err] = await attempt(() => api.get(path)); // System should handle gracefully expect(err).to.satisfy((e: any) => { @@ -3572,7 +3571,7 @@ describe('@logosdx/fetch: caching', async () => { const staleEvents: string[] = []; - api.on('fetch-cache-stale', () => staleEvents.push('stale')); + api.on('cache-stale', () => staleEvents.push('stale')); // First request - populates cache await api.get('/json'); @@ -3603,7 +3602,7 @@ describe('@logosdx/fetch: caching', async () => { const missEvents: string[] = []; - api.on('fetch-cache-miss', () => missEvents.push('miss')); + api.on('cache-miss', () => missEvents.push('miss')); // First rule should win - caching disabled for /json await api.get('/json'); @@ -3631,8 +3630,8 @@ describe('@logosdx/fetch: caching', async () => { const missEvents: string[] = []; const hitEvents: string[] = []; - api.on('fetch-cache-miss', (data) => missEvents.push(data.path!)); - api.on('fetch-cache-hit', (data) => hitEvents.push(data.path!)); + api.on('cache-miss', (data) => missEvents.push(data.path!)); + api.on('cache-hit', (data) => hitEvents.push(data.path!)); // /json1 matches both startsWith '/json' AND includes '1' - rule disables cache await api.get('/json1'); @@ -3667,8 +3666,8 @@ describe('@logosdx/fetch: caching', async () => { const missEvents: string[] = []; const hitEvents: string[] = []; - api.on('fetch-cache-miss', (data) => missEvents.push(data.path!)); - api.on('fetch-cache-hit', (data) => hitEvents.push(data.path!)); + api.on('cache-miss', (data) => missEvents.push(data.path!)); + api.on('cache-hit', (data) => hitEvents.push(data.path!)); // /json1 has caching enabled by rule await api.get('/json1'); @@ -3710,13 +3709,13 @@ describe('@logosdx/fetch: caching', async () => { // First request - should evaluate cache rule const [res1] = await attempt(() => api.get(path)); - expect(res1?.data.ok).to.be.true; + expect((res1?.data as any).ok).to.be.true; await new Promise(res => setTimeout(res, 50)); // Second request - should use cached rule resolution AND cached response const [res2] = await attempt(() => api.get(path)); - expect(res2?.data.ok).to.be.true; + expect((res2?.data as any).ok).to.be.true; // Only one server call (second served from cache) // NOTE: This is testing that rule caching works, but we can't verify callStub here @@ -3828,7 +3827,7 @@ describe('@logosdx/fetch: caching', async () => { }); const revalidateEvents: string[] = []; - api.on('fetch-cache-revalidate', () => revalidateEvents.push('revalidate')); + api.on('cache-revalidate', () => revalidateEvents.push('revalidate')); // First request await api.get(path); @@ -3870,7 +3869,7 @@ describe('@logosdx/fetch: caching', async () => { }); const revalidateEvents: string[] = []; - api.on('fetch-cache-revalidate', () => revalidateEvents.push('revalidate')); + api.on('cache-revalidate', () => revalidateEvents.push('revalidate')); // First request - populates cache (flaky succeeds first time) await api.get(path); @@ -3919,7 +3918,7 @@ describe('@logosdx/fetch: caching', async () => { }); const cacheEvents: { path: string }[] = []; - api.on('fetch-cache-set', ({ path }) => { + api.on('cache-set', ({ path }) => { cacheEvents.push({ path: path! }); }); @@ -3960,8 +3959,8 @@ describe('@logosdx/fetch: caching', async () => { const hitEvents: string[] = []; const missEvents: string[] = []; - api.on('fetch-cache-hit', () => hitEvents.push('hit')); - api.on('fetch-cache-miss', () => missEvents.push('miss')); + api.on('cache-hit', () => hitEvents.push('hit')); + api.on('cache-miss', () => missEvents.push('miss')); const path = '/test-' + Date.now(); @@ -3995,8 +3994,8 @@ describe('@logosdx/fetch: caching', async () => { const staleEvents: string[] = []; const revalidateEvents: string[] = []; - api.on('fetch-cache-stale', () => staleEvents.push('stale')); - api.on('fetch-cache-revalidate', () => revalidateEvents.push('revalidate')); + api.on('cache-stale', () => staleEvents.push('stale')); + api.on('cache-revalidate', () => revalidateEvents.push('revalidate')); const path = '/test-' + Date.now(); @@ -4029,9 +4028,9 @@ describe('@logosdx/fetch: caching', async () => { const missEvents: string[] = []; const staleEvents: string[] = []; - api.on('fetch-cache-hit', () => hitEvents.push('hit')); - api.on('fetch-cache-miss', () => missEvents.push('miss')); - api.on('fetch-cache-stale', () => staleEvents.push('stale')); + api.on('cache-hit', () => hitEvents.push('hit')); + api.on('cache-miss', () => missEvents.push('miss')); + api.on('cache-stale', () => staleEvents.push('stale')); const path = '/test-' + Date.now(); @@ -4068,8 +4067,8 @@ describe('@logosdx/fetch: caching', async () => { const hitEvents: string[] = []; const staleEvents: string[] = []; - api.on('fetch-cache-hit', () => hitEvents.push('hit')); - api.on('fetch-cache-stale', () => staleEvents.push('stale')); + api.on('cache-hit', () => hitEvents.push('hit')); + api.on('cache-stale', () => staleEvents.push('stale')); const path = '/test-' + Date.now(); @@ -4103,8 +4102,8 @@ describe('@logosdx/fetch: caching', async () => { const hitEvents: string[] = []; const missEvents: string[] = []; - api.on('fetch-cache-hit', () => hitEvents.push('hit')); - api.on('fetch-cache-miss', () => missEvents.push('miss')); + api.on('cache-hit', () => hitEvents.push('hit')); + api.on('cache-miss', () => missEvents.push('miss')); // /empty returns null const r1 = await api.get('/empty'); @@ -4144,8 +4143,8 @@ describe('@logosdx/fetch: caching', async () => { const hitEvents: string[] = []; const missEvents: string[] = []; - api.on('fetch-cache-hit', () => hitEvents.push('hit')); - api.on('fetch-cache-miss', () => missEvents.push('miss')); + api.on('cache-hit', () => hitEvents.push('hit')); + api.on('cache-miss', () => missEvents.push('miss')); const path = '/test-' + Date.now(); @@ -4188,7 +4187,7 @@ describe('@logosdx/fetch: caching', async () => { const errorEvents: string[] = []; - api.on('fetch-error', ({ data }: FetchEngine.EventData) => { + api.on('error', ({ data }: FetchEngine.EventData) => { errorEvents.push((data as any).error?.message); }); @@ -4219,7 +4218,7 @@ describe('@logosdx/fetch: caching', async () => { const errorEvents: string[] = []; - api.on('fetch-error', ({ data }: FetchEngine.EventData) => { + api.on('error', ({ data }: FetchEngine.EventData) => { errorEvents.push((data as any).error?.message); }); @@ -4247,7 +4246,7 @@ describe('@logosdx/fetch: caching', async () => { const setEvents: any[] = []; - api.on('fetch-cache-set', ({ key }: FetchEngine.CacheEventData) => { + api.on('cache-set', ({ key }: FetchEngine.CacheEventData) => { setEvents.push(key); }); @@ -4279,8 +4278,8 @@ describe('@logosdx/fetch: caching', async () => { const hitEvents: string[] = []; const missEvents: string[] = []; - api.on('fetch-cache-hit', () => hitEvents.push('hit')); - api.on('fetch-cache-miss', () => missEvents.push('miss')); + api.on('cache-hit', () => hitEvents.push('hit')); + api.on('cache-miss', () => missEvents.push('miss')); const path = '/test-' + Date.now(); @@ -4313,8 +4312,8 @@ describe('@logosdx/fetch: caching', async () => { const hitEvents: string[] = []; const missEvents: string[] = []; - api.on('fetch-cache-hit', () => hitEvents.push('hit')); - api.on('fetch-cache-miss', () => missEvents.push('miss')); + api.on('cache-hit', () => hitEvents.push('hit')); + api.on('cache-miss', () => missEvents.push('miss')); const path = '/test-' + Date.now(); @@ -4355,8 +4354,8 @@ describe('@logosdx/fetch: caching', async () => { const hitEvents: string[] = []; const missEvents: string[] = []; - api.on('fetch-cache-hit', () => hitEvents.push('hit')); - api.on('fetch-cache-miss', () => missEvents.push('miss')); + api.on('cache-hit', () => hitEvents.push('hit')); + api.on('cache-miss', () => missEvents.push('miss')); const path = '/test-' + Date.now(); @@ -4394,7 +4393,7 @@ describe('@logosdx/fetch: caching', async () => { const staleEvents: string[] = []; - api.on('fetch-cache-stale', () => staleEvents.push('stale')); + api.on('cache-stale', () => staleEvents.push('stale')); const path = '/test-' + Date.now(); @@ -4448,8 +4447,8 @@ describe('@logosdx/fetch: caching', async () => { const staleEvents: string[] = []; const hitEvents: string[] = []; - api.on('fetch-cache-stale', () => staleEvents.push('stale')); - api.on('fetch-cache-hit', () => hitEvents.push('hit')); + api.on('cache-stale', () => staleEvents.push('stale')); + api.on('cache-hit', () => hitEvents.push('hit')); const path = '/test-' + Date.now(); @@ -4517,7 +4516,7 @@ describe('@logosdx/fetch: caching', async () => { const missEvents: string[] = []; - api.on('fetch-cache-miss', () => missEvents.push('miss')); + api.on('cache-miss', () => missEvents.push('miss')); // Request should succeed because rule.skip is not currently called // (only global skip is evaluated in the implementation) @@ -4548,7 +4547,7 @@ describe('@logosdx/fetch: caching', async () => { const setEvents: any[] = []; - api.on('fetch-cache-set', ({ data }) => { + api.on('cache-set', ({ data }) => { setEvents.push(data); }); @@ -4584,8 +4583,8 @@ describe('@logosdx/fetch: caching', async () => { const missEvents: string[] = []; const setEvents: string[] = []; - api.on('fetch-cache-miss', () => missEvents.push('miss')); - api.on('fetch-cache-set', () => setEvents.push('set')); + api.on('cache-miss', () => missEvents.push('miss')); + api.on('cache-set', () => setEvents.push('set')); const path = '/json-' + Date.now(); @@ -4620,8 +4619,8 @@ describe('@logosdx/fetch: caching', async () => { const hitEvents: string[] = []; const missEvents: string[] = []; - api.on('fetch-cache-hit', () => hitEvents.push('hit')); - api.on('fetch-cache-miss', () => missEvents.push('miss')); + api.on('cache-hit', () => hitEvents.push('hit')); + api.on('cache-miss', () => missEvents.push('miss')); const path = '/json-' + Date.now(); diff --git a/tests/src/fetch/deduplication.ts b/tests/src/fetch/policies/dedupe.test.ts similarity index 93% rename from tests/src/fetch/deduplication.ts rename to tests/src/fetch/policies/dedupe.test.ts index def8184..003ec37 100644 --- a/tests/src/fetch/deduplication.ts +++ b/tests/src/fetch/policies/dedupe.test.ts @@ -8,11 +8,11 @@ import { import { FetchError, FetchEngine, -} from '../../../packages/fetch/src/index.ts'; +} from '../../../../packages/fetch/src/index.ts'; -import { attempt, wait } from '../../../packages/utils/src/index.ts'; -import { makeTestStubs } from './_helpers.ts'; -import { attemptSync } from '../../../packages/kit/src/index.ts'; +import { attempt, wait } from '../../../../packages/utils/src/index.ts'; +import { makeTestStubs } from '../_helpers.ts'; +import { attemptSync } from '../../../../packages/kit/src/index.ts'; describe('@logosdx/fetch: deduplication', async () => { @@ -28,12 +28,12 @@ describe('@logosdx/fetch: deduplication', async () => { const startEvents: string[] = []; const joinEvents: string[] = []; - api.on('fetch-dedupe-start', (data) => { + api.on('dedupe-start', (data) => { startEvents.push(data.path!); }); - api.on('fetch-dedupe-join', (data) => { + api.on('dedupe-join', (data) => { joinEvents.push(data.path!); }); @@ -69,7 +69,7 @@ describe('@logosdx/fetch: deduplication', async () => { const startEvents: string[] = []; - api.on('fetch-dedupe-start', (data) => { + api.on('dedupe-start', (data) => { startEvents.push(data.path!); }); @@ -96,12 +96,12 @@ describe('@logosdx/fetch: deduplication', async () => { const startEvents: string[] = []; const joinEvents: string[] = []; - api.on('fetch-dedupe-start', (data) => { + api.on('dedupe-start', (data) => { startEvents.push(data.method!); }); - api.on('fetch-dedupe-join', (data) => { + api.on('dedupe-join', (data) => { joinEvents.push(data.method!); }); @@ -132,12 +132,12 @@ describe('@logosdx/fetch: deduplication', async () => { const startEvents: string[] = []; const joinEvents: string[] = []; - api.on('fetch-dedupe-start', (data) => { + api.on('dedupe-start', (data) => { startEvents.push(data.method!); }); - api.on('fetch-dedupe-join', (data) => { + api.on('dedupe-join', (data) => { joinEvents.push(data.method!); }); @@ -171,7 +171,7 @@ describe('@logosdx/fetch: deduplication', async () => { const startEvents: string[] = []; - api.on('fetch-dedupe-start', (data) => { + api.on('dedupe-start', (data) => { startEvents.push(data.path!); }); @@ -207,12 +207,12 @@ describe('@logosdx/fetch: deduplication', async () => { const startEvents: string[] = []; const joinEvents: string[] = []; - api.on('fetch-dedupe-start', (data) => { + api.on('dedupe-start', (data) => { startEvents.push(data.method!); }); - api.on('fetch-dedupe-join', (data) => { + api.on('dedupe-join', (data) => { joinEvents.push(data.method!); }); @@ -252,8 +252,8 @@ describe('@logosdx/fetch: deduplication', async () => { const startEvents: string[] = []; const joinEvents: string[] = []; - api.on('fetch-dedupe-start', (data) => startEvents.push(data.path!)); - api.on('fetch-dedupe-join', (data) => joinEvents.push(data.path!)); + api.on('dedupe-start', (data) => startEvents.push(data.path!)); + api.on('dedupe-join', (data) => joinEvents.push(data.path!)); // Make 2 concurrent requests to the same path await Promise.all([ @@ -279,7 +279,7 @@ describe('@logosdx/fetch: deduplication', async () => { const startEvents: string[] = []; - api.on('fetch-dedupe-start', (data) => { + api.on('dedupe-start', (data) => { startEvents.push(data.path!); }); @@ -309,7 +309,7 @@ describe('@logosdx/fetch: deduplication', async () => { const waitingCounts: number[] = []; - api.on('fetch-dedupe-join', (data) => { + api.on('dedupe-join', (data) => { waitingCounts.push(data.waitingCount!); }); @@ -406,7 +406,7 @@ describe('@logosdx/fetch: deduplication', async () => { const startEvents: string[] = []; - api.on('fetch-dedupe-start', (data) => { + api.on('dedupe-start', (data) => { startEvents.push(data.key); }); @@ -452,7 +452,7 @@ describe('@logosdx/fetch: deduplication', async () => { const startEvents: string[] = []; - api.on('fetch-dedupe-start', (data) => { + api.on('dedupe-start', (data) => { startEvents.push(data.path!); }); @@ -630,7 +630,7 @@ describe('@logosdx/fetch: deduplication', async () => { const startEvents: string[] = []; - api.on('fetch-dedupe-start', () => startEvents.push('start')); + api.on('dedupe-start', () => startEvents.push('start')); // First batch await Promise.all([api.get('/json'), api.get('/json')]); @@ -657,8 +657,8 @@ describe('@logosdx/fetch: deduplication', async () => { const startEvents: string[] = []; const joinEvents: string[] = []; - api.on('fetch-dedupe-start', (data) => startEvents.push(data.path!)); - api.on('fetch-dedupe-join', (data) => joinEvents.push(data.path!)); + api.on('dedupe-start', (data) => startEvents.push(data.path!)); + api.on('dedupe-join', (data) => joinEvents.push(data.path!)); // 6 requests to 3 different paths (2 each) await Promise.all([ @@ -741,7 +741,7 @@ describe('@logosdx/fetch: deduplication', async () => { }); const cacheSetEvents: string[] = []; - api.on('fetch-cache-set', () => cacheSetEvents.push('set')); + api.on('cache-set', () => cacheSetEvents.push('set')); // Start a slow request const req = api.get('/wait', { timeout: 5000 }); @@ -774,7 +774,7 @@ describe('@logosdx/fetch: deduplication', async () => { const startEvents: string[] = []; - api.on('fetch-dedupe-start', (data) => startEvents.push(data.path!)); + api.on('dedupe-start', (data) => startEvents.push(data.path!)); // Test actual root path `/` await Promise.all([ @@ -792,7 +792,7 @@ describe('@logosdx/fetch: deduplication', async () => { dedupePolicy: true }); - api2.on('fetch-dedupe-start', (data) => startEvents2.push(data.path!)); + api2.on('dedupe-start', (data) => startEvents2.push(data.path!)); await Promise.all([ api2.get(''), @@ -814,7 +814,7 @@ describe('@logosdx/fetch: deduplication', async () => { const startEvents: string[] = []; - api.on('fetch-dedupe-start', (data) => startEvents.push(data.path!)); + api.on('dedupe-start', (data) => startEvents.push(data.path!)); // Path with special characters - same path should dedupe const specialPath = '/json?foo=bar&baz=qux#hash'; @@ -839,7 +839,7 @@ describe('@logosdx/fetch: deduplication', async () => { const startEvents: string[] = []; - api.on('fetch-dedupe-start', (data) => startEvents.push(data.path!)); + api.on('dedupe-start', (data) => startEvents.push(data.path!)); // Different query params should NOT be deduplicated await Promise.all([ @@ -864,7 +864,7 @@ describe('@logosdx/fetch: deduplication', async () => { const startEvents: string[] = []; - api.on('fetch-dedupe-start', (data) => startEvents.push(data.key)); + api.on('dedupe-start', (data) => startEvents.push(data.key)); // Different request-specific params should NOT be deduplicated await Promise.all([ @@ -888,7 +888,7 @@ describe('@logosdx/fetch: deduplication', async () => { const startEvents: string[] = []; - api.on('fetch-dedupe-start', (data) => startEvents.push(data.path!)); + api.on('dedupe-start', (data) => startEvents.push(data.path!)); // Very long path const longPath = '/json?' + 'x'.repeat(1000); @@ -975,9 +975,9 @@ describe('@logosdx/fetch: deduplication', async () => { const events: string[] = []; - api.on('fetch-dedupe-start', () => events.push('start')); - api.on('fetch-dedupe-join', () => events.push('join')); - api.on('fetch-retry', () => events.push('retry')); + api.on('dedupe-start', () => events.push('start')); + api.on('dedupe-join', () => events.push('join')); + api.on('retry', () => events.push('retry')); // Use fail-once endpoint: fails first, succeeds on retry // First request will fail and retry, second request should join during retry @@ -1008,7 +1008,7 @@ describe('@logosdx/fetch: deduplication', async () => { const startEvents: string[] = []; - api.on('fetch-dedupe-start', () => startEvents.push('start')); + api.on('dedupe-start', () => startEvents.push('start')); // Make 2 concurrent GET requests await Promise.all([ @@ -1034,7 +1034,7 @@ describe('@logosdx/fetch: deduplication', async () => { const startEvents: string[] = []; - api.on('fetch-dedupe-start', (data) => startEvents.push(data.key)); + api.on('dedupe-start', (data) => startEvents.push(data.key)); // POST requests with different payloads should NOT be deduplicated await Promise.all([ @@ -1061,7 +1061,7 @@ describe('@logosdx/fetch: deduplication', async () => { const startEvents: string[] = []; - api.on('fetch-dedupe-start', () => startEvents.push('start')); + api.on('dedupe-start', () => startEvents.push('start')); // POST should NOT be deduped since it's not in methods array await Promise.all([ @@ -1090,7 +1090,7 @@ describe('@logosdx/fetch: deduplication', async () => { const startEvents: string[] = []; - api.on('fetch-dedupe-start', () => startEvents.push('start')); + api.on('dedupe-start', () => startEvents.push('start')); // First rule should win - deduplication disabled for /json await Promise.all([ @@ -1119,7 +1119,7 @@ describe('@logosdx/fetch: deduplication', async () => { const startEvents: string[] = []; - api.on('fetch-dedupe-start', (data) => startEvents.push(data.path!)); + api.on('dedupe-start', (data) => startEvents.push(data.path!)); // /json1 matches both startsWith '/json' AND includes '1' - rule applies await Promise.all([ @@ -1158,7 +1158,7 @@ describe('@logosdx/fetch: deduplication', async () => { const startEvents: string[] = []; - api.on('fetch-dedupe-start', (data) => startEvents.push(data.path!)); + api.on('dedupe-start', (data) => startEvents.push(data.path!)); // Exact match /json - rule applies await Promise.all([ @@ -1266,8 +1266,8 @@ describe('@logosdx/fetch: deduplication', async () => { // Both should succeed, but no deduplication occurs // (both requests hit the server since resolveDedupeConfig returns null) - expect(res1.data.ok).to.be.true; - expect(res2.data.ok).to.be.true; + expect((res1.data as any).ok).to.be.true; + expect((res2.data as any).ok).to.be.true; api.destroy(); }); @@ -1288,7 +1288,7 @@ describe('@logosdx/fetch: deduplication', async () => { }); const startEvents: string[] = []; - api.on('fetch-dedupe-start', () => startEvents.push('start')); + api.on('dedupe-start', () => startEvents.push('start')); // Make two simultaneous requests const [res1, res2] = await Promise.all([ @@ -1298,8 +1298,8 @@ describe('@logosdx/fetch: deduplication', async () => { // Both should succeed, but only one server call (deduplicated) expect(startEvents.length).to.equal(1); - expect(res1.data.ok).to.be.true; - expect(res2.data.ok).to.be.true; + expect((res1.data as any).ok).to.be.true; + expect((res2.data as any).ok).to.be.true; api.destroy(); }); @@ -1321,7 +1321,7 @@ describe('@logosdx/fetch: deduplication', async () => { }); const startEvents: string[] = []; - api.on('fetch-dedupe-start', () => startEvents.push('start')); + api.on('dedupe-start', () => startEvents.push('start')); // Path1 should deduplicate const [r1, r2] = await Promise.all([ @@ -1341,10 +1341,10 @@ describe('@logosdx/fetch: deduplication', async () => { // The dedupe key is based on method+path+payload, not on the rule match // Both path2 requests happen concurrently and get deduplicated by the global flight tracker expect(startEvents.length).to.equal(2); - expect(r1.data.ok).to.be.true; - expect(r2.data.ok).to.be.true; - expect(r3.data.ok).to.be.true; - expect(r4.data.ok).to.be.true; + expect((r1.data as any).ok).to.be.true; + expect((r2.data as any).ok).to.be.true; + expect((r3.data as any).ok).to.be.true; + expect((r4.data as any).ok).to.be.true; api.destroy(); }); @@ -1363,7 +1363,7 @@ describe('@logosdx/fetch: deduplication', async () => { }); const startEvents: string[] = []; - api.on('fetch-dedupe-start', () => startEvents.push('start')); + api.on('dedupe-start', () => startEvents.push('start')); // Make two simultaneous requests const [res1, res2] = await Promise.all([ @@ -1373,8 +1373,8 @@ describe('@logosdx/fetch: deduplication', async () => { // Should deduplicate (undefined treated as truthy) expect(startEvents.length).to.equal(1); - expect(res1.data.ok).to.be.true; - expect(res2.data.ok).to.be.true; + expect((res1.data as any).ok).to.be.true; + expect((res2.data as any).ok).to.be.true; api.destroy(); }); @@ -1393,7 +1393,7 @@ describe('@logosdx/fetch: deduplication', async () => { }); const startEvents: string[] = []; - api.on('fetch-dedupe-start', () => startEvents.push('start')); + api.on('dedupe-start', () => startEvents.push('start')); // Make two simultaneous requests const [res1, res2] = await Promise.all([ @@ -1403,8 +1403,8 @@ describe('@logosdx/fetch: deduplication', async () => { // Should NOT deduplicate (false is explicit) expect(startEvents.length).to.equal(0); - expect(res1.data.ok).to.be.true; - expect(res2.data.ok).to.be.true; + expect((res1.data as any).ok).to.be.true; + expect((res2.data as any).ok).to.be.true; api.destroy(); }); @@ -1425,7 +1425,7 @@ describe('@logosdx/fetch: deduplication', async () => { }); const startEvents: string[] = []; - api.on('fetch-dedupe-start', () => startEvents.push('start')); + api.on('dedupe-start', () => startEvents.push('start')); // GET should deduplicate (inherited from config) const [r1, r2] = await Promise.all([ @@ -1441,10 +1441,10 @@ describe('@logosdx/fetch: deduplication', async () => { // Both should have deduplicated expect(startEvents.length).to.equal(2); // 1 GET + 1 POST - expect(r1.data.ok).to.be.true; - expect(r2.data.ok).to.be.true; - expect(r3.data.ok).to.be.true; - expect(r4.data.ok).to.be.true; + expect((r1.data as any).ok).to.be.true; + expect((r2.data as any).ok).to.be.true; + expect((r3.data as any).ok).to.be.true; + expect((r4.data as any).ok).to.be.true; api.destroy(); }); @@ -1468,7 +1468,7 @@ describe('@logosdx/fetch: deduplication', async () => { }); const startEvents: string[] = []; - api.on('fetch-dedupe-start', () => startEvents.push('start')); + api.on('dedupe-start', () => startEvents.push('start')); // GET should deduplicate (rule allows) const [r1, r2] = await Promise.all([ @@ -1485,10 +1485,10 @@ describe('@logosdx/fetch: deduplication', async () => { // GET: 1 call (deduplicated), POST: 1 call (also deduplicated by flight tracker) // The POST requests happen concurrently and share the same dedupe key expect(startEvents.length).to.equal(2); - expect(r1.data.ok).to.be.true; - expect(r2.data.ok).to.be.true; - expect(r3.data.ok).to.be.true; - expect(r4.data.ok).to.be.true; + expect((r1.data as any).ok).to.be.true; + expect((r2.data as any).ok).to.be.true; + expect((r3.data as any).ok).to.be.true; + expect((r4.data as any).ok).to.be.true; api.destroy(); }); @@ -1517,7 +1517,7 @@ describe('@logosdx/fetch: deduplication', async () => { }); const startEvents: string[] = []; - api.on('fetch-dedupe-start', () => startEvents.push('start')); + api.on('dedupe-start', () => startEvents.push('start')); // Inherit path: both GET and POST should dedupe const [i1, i2] = await Promise.all([ @@ -1540,8 +1540,8 @@ describe('@logosdx/fetch: deduplication', async () => { ]); // Verify results - expect(i1.data.ok && i2.data.ok && i3.data.ok && i4.data.ok).to.be.true; - expect(o1.data.ok && o2.data.ok && o3.data.ok && o4.data.ok).to.be.true; + expect((i1.data as any).ok && (i2.data as any).ok && (i3.data as any).ok && (i4.data as any).ok).to.be.true; + expect((o1.data as any).ok && (o2.data as any).ok && (o3.data as any).ok && (o4.data as any).ok).to.be.true; // inheritPath: 1 GET + 1 POST (both deduped) // overridePath: 1 GET (deduped) + 2 POST (not deduped) @@ -1568,12 +1568,12 @@ describe('@logosdx/fetch: deduplication', async () => { const events: string[] = []; - api.on('fetch-dedupe-start', ({ path }: any) => { + api.on('dedupe-start', ({ path }: any) => { events.push(`start:${path}`); }); - api.on('fetch-dedupe-join', ({ path }: any) => { + api.on('dedupe-join', ({ path }: any) => { events.push(`join:${path}`); }); @@ -1606,7 +1606,7 @@ describe('@logosdx/fetch: deduplication', async () => { const startEvents: any[] = []; - api.on('fetch-dedupe-start', (event) => { + api.on('dedupe-start', (event) => { startEvents.push(event); }); @@ -1640,7 +1640,7 @@ describe('@logosdx/fetch: deduplication', async () => { const startEvents: any[] = []; - api.on('fetch-dedupe-start', (event) => { + api.on('dedupe-start', (event) => { startEvents.push(event); }); @@ -1674,7 +1674,7 @@ describe('@logosdx/fetch: deduplication', async () => { const startEvents: any[] = []; - api.on('fetch-dedupe-start', (event) => { + api.on('dedupe-start', (event) => { startEvents.push(event); }); @@ -1708,7 +1708,7 @@ describe('@logosdx/fetch: deduplication', async () => { const startEvents: any[] = []; - api.on('fetch-dedupe-start', (event) => { + api.on('dedupe-start', (event) => { startEvents.push(event); }); @@ -1741,7 +1741,7 @@ describe('@logosdx/fetch: deduplication', async () => { const startEvents: any[] = []; - api.on('fetch-dedupe-start', (event) => { + api.on('dedupe-start', (event) => { startEvents.push(event); }); @@ -1775,7 +1775,7 @@ describe('@logosdx/fetch: deduplication', async () => { const startEvents: any[] = []; - api.on('fetch-dedupe-start', (event) => { + api.on('dedupe-start', (event) => { startEvents.push(event); }); @@ -1807,7 +1807,7 @@ describe('@logosdx/fetch: deduplication', async () => { const startEvents: any[] = []; - api.on('fetch-dedupe-start', (event) => { + api.on('dedupe-start', (event) => { startEvents.push(event); }); @@ -1834,7 +1834,7 @@ describe('@logosdx/fetch: deduplication', async () => { const startEvents: string[] = []; - api.on('fetch-dedupe-start', () => { + api.on('dedupe-start', () => { startEvents.push('start'); }); @@ -1867,7 +1867,7 @@ describe('@logosdx/fetch: deduplication', async () => { const startEvents: object[] = []; - api.on('fetch-dedupe-start', (event) => { + api.on('dedupe-start', (event) => { startEvents.push(event); }); @@ -1930,7 +1930,7 @@ describe('@logosdx/fetch: deduplication', async () => { const keys: any[] = []; - api.on('fetch-dedupe-start', (event) => { + api.on('dedupe-start', (event) => { keys.push(event); }); @@ -1964,7 +1964,7 @@ describe('@logosdx/fetch: deduplication', async () => { const startEvents: string[] = []; - api.on('fetch-dedupe-start', () => { + api.on('dedupe-start', () => { startEvents.push('start'); }); @@ -2026,7 +2026,7 @@ describe('@logosdx/fetch: deduplication', async () => { const events: object[] = []; - api.on('fetch-dedupe-start', (event) => { + api.on('dedupe-start', (event) => { events.push(event); }); @@ -2058,7 +2058,7 @@ describe('@logosdx/fetch: deduplication', async () => { const startEvents: string[] = []; - api.on('fetch-dedupe-start', () => startEvents.push('start')); + api.on('dedupe-start', () => startEvents.push('start')); const [r1, r2] = await Promise.all([ api.post('/json', ''), @@ -2099,7 +2099,7 @@ describe('@logosdx/fetch: deduplication', async () => { const errorEvents: string[] = []; - api.on('fetch-error', ({ data }) => { + api.on('error', ({ data }) => { errorEvents.push((data as any).error!.message); }); @@ -2127,7 +2127,7 @@ describe('@logosdx/fetch: deduplication', async () => { const startEvents: any[] = []; - api.on('fetch-dedupe-start', ({ key }) => { + api.on('dedupe-start', ({ key }) => { startEvents.push(key); }); @@ -2168,7 +2168,7 @@ describe('@logosdx/fetch: deduplication', async () => { const startEvents: any[] = []; - api.on('fetch-dedupe-start', ({ key }) => { + api.on('dedupe-start', ({ key }) => { startEvents.push(key); }); @@ -2236,7 +2236,7 @@ describe('@logosdx/fetch: deduplication', async () => { const errorEvents: string[] = []; - api.on('fetch-error', ({ data }) => { + api.on('error', ({ data }) => { errorEvents.push((data as any).error!.message); }); @@ -2298,7 +2298,7 @@ describe('@logosdx/fetch: deduplication', async () => { const startEvents: string[] = []; - api.on('fetch-dedupe-start', () => startEvents.push('start')); + api.on('dedupe-start', () => startEvents.push('start')); const path = '/json'; @@ -2330,7 +2330,7 @@ describe('@logosdx/fetch: deduplication', async () => { const startEvents: string[] = []; - api.on('fetch-dedupe-start', () => startEvents.push('start')); + api.on('dedupe-start', () => startEvents.push('start')); const path = '/json-' + Date.now(); @@ -2432,7 +2432,7 @@ describe('@logosdx/fetch: deduplication', async () => { }); const events: string[] = []; - api.on('fetch-dedupe-start', () => events.push('dedupe-start')); + api.on('dedupe-start', () => events.push('dedupe-start')); // Path NOT matching rule - should dedupe const [r1] = await attempt(() => api.get('/other/path')); @@ -2464,7 +2464,7 @@ describe('@logosdx/fetch: deduplication', async () => { }); const events: string[] = []; - api.on('fetch-dedupe-start', () => events.push('dedupe-start')); + api.on('dedupe-start', () => events.push('dedupe-start')); // Path NOT matching rule - should dedupe const [r1] = await attempt(() => api.get('/data.json')); @@ -2496,7 +2496,7 @@ describe('@logosdx/fetch: deduplication', async () => { }); const events: string[] = []; - api.on('fetch-dedupe-start', () => events.push('dedupe-start')); + api.on('dedupe-start', () => events.push('dedupe-start')); // Path NOT matching regex - should dedupe const [r1] = await attempt(() => api.get('/items/123')); @@ -2531,7 +2531,7 @@ describe('@logosdx/fetch: deduplication', async () => { }); const events: string[] = []; - api.on('fetch-dedupe-start', () => events.push('dedupe-start')); + api.on('dedupe-start', () => events.push('dedupe-start')); // Should NOT dedupe (matches all three conditions) await api.get('/api/users/123/profile'); @@ -2569,7 +2569,7 @@ describe('@logosdx/fetch: deduplication', async () => { }); const events: string[] = []; - api.on('fetch-dedupe-start', () => events.push('dedupe-start')); + api.on('dedupe-start', () => events.push('dedupe-start')); // Should NOT dedupe (matches all three conditions) await api.get('/api/users/123/profile'); @@ -2598,7 +2598,7 @@ describe('@logosdx/fetch: deduplication', async () => { }); const events: string[] = []; - api.on('fetch-dedupe-start', () => events.push('dedupe-start')); + api.on('dedupe-start', () => events.push('dedupe-start')); // Should dedupe GET (path doesn't match rule) await api.get('/api/data'); @@ -2778,7 +2778,7 @@ describe('@logosdx/fetch: deduplication', async () => { }); const keys: string[] = []; - api.on('fetch-dedupe-start', (event) => { + api.on('dedupe-start', (event) => { keys.push(event.key); }); @@ -2801,7 +2801,7 @@ describe('@logosdx/fetch: deduplication', async () => { }); const keys: string[] = []; - api.on('fetch-dedupe-start', (event) => { + api.on('dedupe-start', (event) => { keys.push(event.key); }); @@ -2827,7 +2827,7 @@ describe('@logosdx/fetch: deduplication', async () => { }); const keys: string[] = []; - api.on('fetch-dedupe-start', (event) => { + api.on('dedupe-start', (event) => { keys.push(event.key); }); @@ -2854,7 +2854,7 @@ describe('@logosdx/fetch: deduplication', async () => { }); const keys: string[] = []; - api.on('fetch-dedupe-start', (event) => { + api.on('dedupe-start', (event) => { keys.push(event.key); }); @@ -2874,7 +2874,7 @@ describe('@logosdx/fetch: deduplication', async () => { api.destroy(); }); - it('should emit fetch-dedupe-start with correct interface', async () => { + it('should emit dedupe-start with correct interface', async () => { // Validates DedupeEventData interface compliance const api = new FetchEngine({ @@ -2883,7 +2883,7 @@ describe('@logosdx/fetch: deduplication', async () => { }); let eventData: any; - api.on('fetch-dedupe-start', (event) => { + api.on('dedupe-start', (event) => { eventData = event; }); @@ -2908,7 +2908,7 @@ describe('@logosdx/fetch: deduplication', async () => { api.destroy(); }); - it('should emit fetch-dedupe-join with correct interface', async () => { + it('should emit dedupe-join with correct interface', async () => { // Validates join event includes waitingCount const api = new FetchEngine({ @@ -2917,7 +2917,7 @@ describe('@logosdx/fetch: deduplication', async () => { }); let eventData: any; - api.on('fetch-dedupe-join', (event) => { + api.on('dedupe-join', (event) => { eventData = event; }); @@ -2943,7 +2943,7 @@ describe('@logosdx/fetch: deduplication', async () => { api.destroy(); }); - it('should emit fetch-dedupe-complete with correct interface', async () => { + it('should emit dedupe-complete with correct interface', async () => { // Validates dedupe complete event (if it exists) const api = new FetchEngine({ @@ -2953,7 +2953,7 @@ describe('@logosdx/fetch: deduplication', async () => { let eventData: any; let eventFired = false; - api.on('fetch-dedupe-complete', (event) => { + api.on('dedupe-complete', (event) => { eventData = event; eventFired = true; @@ -2985,7 +2985,7 @@ describe('@logosdx/fetch: deduplication', async () => { api.destroy(); }); - it('should emit fetch-dedupe-error with correct interface on failure', async () => { + it('should emit dedupe-error with correct interface on failure', async () => { // Validates dedupe error event data const api = new FetchEngine({ @@ -2995,7 +2995,7 @@ describe('@logosdx/fetch: deduplication', async () => { let eventData: any; let eventFired = false; - api.on('fetch-dedupe-error', (event) => { + api.on('dedupe-error', (event) => { eventData = event; eventFired = true; @@ -3068,12 +3068,12 @@ describe('@logosdx/fetch: deduplication', async () => { const startKeys: string[] = []; const joinKeys: string[] = []; - api.on('fetch-dedupe-start', (event) => { + api.on('dedupe-start', (event) => { startKeys.push(event.key); }); - api.on('fetch-dedupe-join', (event) => { + api.on('dedupe-join', (event) => { joinKeys.push(event.key); }); diff --git a/tests/src/fetch/rate-limiting.ts b/tests/src/fetch/policies/rate-limit.test.ts similarity index 92% rename from tests/src/fetch/rate-limiting.ts rename to tests/src/fetch/policies/rate-limit.test.ts index 1689c90..33806dd 100644 --- a/tests/src/fetch/rate-limiting.ts +++ b/tests/src/fetch/policies/rate-limit.test.ts @@ -6,21 +6,19 @@ import { import { FetchEngine, -} from '../../../packages/fetch/src/index.ts'; +} from '../../../../packages/fetch/src/index.ts'; import { attempt, - wait, - RateLimitError, isRateLimitError -} from '../../../packages/utils/src/index.ts'; +} from '../../../../packages/utils/src/index.ts'; -import { makeTestStubs } from './_helpers.ts'; +import { makeTestStubs } from '../_helpers.ts'; describe('@logosdx/fetch: rate limiting', async () => { - const { testUrl, callStub } = await makeTestStubs(4300); + const { testUrl } = await makeTestStubs(4300); describe('basic configuration', () => { @@ -34,7 +32,7 @@ describe('@logosdx/fetch: rate limiting', async () => { const acquireEvents: string[] = []; - api.on('fetch-ratelimit-acquire', (data) => { + api.on('ratelimit-acquire', (data) => { acquireEvents.push(data.path!); }); @@ -56,7 +54,7 @@ describe('@logosdx/fetch: rate limiting', async () => { const acquireEvents: string[] = []; - api.on('fetch-ratelimit-acquire', (data) => { + api.on('ratelimit-acquire', (data) => { acquireEvents.push(data.path!); }); @@ -76,7 +74,7 @@ describe('@logosdx/fetch: rate limiting', async () => { const acquireEvents: string[] = []; - api.on('fetch-ratelimit-acquire', (data) => { + api.on('ratelimit-acquire', (data) => { acquireEvents.push(data.path!); }); @@ -97,7 +95,7 @@ describe('@logosdx/fetch: rate limiting', async () => { const acquireEvents: FetchEngine.RateLimitEventData[] = []; - api.on('fetch-ratelimit-acquire', (data) => { + api.on('ratelimit-acquire', (data) => { acquireEvents.push(data); }); @@ -123,7 +121,7 @@ describe('@logosdx/fetch: rate limiting', async () => { const acquireEvents: FetchEngine.RateLimitEventData[] = []; - api.on('fetch-ratelimit-acquire', (data) => { + api.on('ratelimit-acquire', (data) => { acquireEvents.push(data); }); @@ -154,7 +152,7 @@ describe('@logosdx/fetch: rate limiting', async () => { const acquireEvents: FetchEngine.RateLimitEventData[] = []; - api.on('fetch-ratelimit-acquire', (data) => { + api.on('ratelimit-acquire', (data) => { acquireEvents.push(data); }); @@ -187,7 +185,7 @@ describe('@logosdx/fetch: rate limiting', async () => { const acquireEvents: FetchEngine.RateLimitEventData[] = []; - api.on('fetch-ratelimit-acquire', (data) => { + api.on('ratelimit-acquire', (data) => { acquireEvents.push(data); }); @@ -221,12 +219,12 @@ describe('@logosdx/fetch: rate limiting', async () => { const waitEvents: FetchEngine.RateLimitEventData[] = []; const acquireEvents: FetchEngine.RateLimitEventData[] = []; - api.on('fetch-ratelimit-wait', (data) => { + api.on('ratelimit-wait', (data) => { waitEvents.push(data); }); - api.on('fetch-ratelimit-acquire', (data) => { + api.on('ratelimit-acquire', (data) => { acquireEvents.push(data); }); @@ -268,7 +266,7 @@ describe('@logosdx/fetch: rate limiting', async () => { const rejectEvents: FetchEngine.RateLimitEventData[] = []; - api.on('fetch-ratelimit-reject', (data) => { + api.on('ratelimit-reject', (data) => { rejectEvents.push(data); }); @@ -338,14 +336,14 @@ describe('@logosdx/fetch: rate limiting', async () => { const acquireEvents: FetchEngine.RateLimitEventData[] = []; - api.on('fetch-ratelimit-acquire', (data) => { + api.on('ratelimit-acquire', (data) => { acquireEvents.push(data); }); // Same path, different query params - await api.get('/json', { params: { page: 1 } }); - await api.get('/json', { params: { page: 2 } }); + await api.get('/json', { params: { page: '1' } }); + await api.get('/json', { params: { page: '2' } }); // Both should use the same bucket (key = 'GET|/json') expect(acquireEvents[0]!.key).to.equal('GET|/json'); @@ -371,7 +369,7 @@ describe('@logosdx/fetch: rate limiting', async () => { const acquireEvents: FetchEngine.RateLimitEventData[] = []; - api.on('fetch-ratelimit-acquire', (data) => { + api.on('ratelimit-acquire', (data) => { acquireEvents.push(data); }); @@ -404,7 +402,7 @@ describe('@logosdx/fetch: rate limiting', async () => { const acquireEvents: FetchEngine.RateLimitEventData[] = []; - api.on('fetch-ratelimit-acquire', (data) => { + api.on('ratelimit-acquire', (data) => { acquireEvents.push(data); }); @@ -434,7 +432,7 @@ describe('@logosdx/fetch: rate limiting', async () => { const acquireEvents: FetchEngine.RateLimitEventData[] = []; - api.on('fetch-ratelimit-acquire', (data) => { + api.on('ratelimit-acquire', (data) => { acquireEvents.push(data); }); @@ -470,7 +468,7 @@ describe('@logosdx/fetch: rate limiting', async () => { const acquireEvents: FetchEngine.RateLimitEventData[] = []; - api.on('fetch-ratelimit-acquire', (data) => { + api.on('ratelimit-acquire', (data) => { acquireEvents.push(data); }); @@ -506,7 +504,7 @@ describe('@logosdx/fetch: rate limiting', async () => { const acquireEvents: string[] = []; - api.on('fetch-ratelimit-acquire', (data) => { + api.on('ratelimit-acquire', (data) => { acquireEvents.push(data.method!); }); @@ -533,7 +531,7 @@ describe('@logosdx/fetch: rate limiting', async () => { const acquireEvents: string[] = []; - api.on('fetch-ratelimit-acquire', (data) => { + api.on('ratelimit-acquire', (data) => { acquireEvents.push(data.method!); }); @@ -567,7 +565,7 @@ describe('@logosdx/fetch: rate limiting', async () => { const acquireEvents: string[] = []; - api.on('fetch-ratelimit-acquire', (data) => { + api.on('ratelimit-acquire', (data) => { acquireEvents.push(data.path!); }); @@ -617,7 +615,7 @@ describe('@logosdx/fetch: rate limiting', async () => { const acquireEvents: FetchEngine.RateLimitEventData[] = []; - api.on('fetch-ratelimit-acquire', (data) => { + api.on('ratelimit-acquire', (data) => { acquireEvents.push(data); }); @@ -661,7 +659,7 @@ describe('@logosdx/fetch: rate limiting', async () => { const acquireEvents: string[] = []; - api.on('fetch-ratelimit-acquire', (data) => { + api.on('ratelimit-acquire', (data) => { acquireEvents.push(data.path!); }); @@ -696,7 +694,7 @@ describe('@logosdx/fetch: rate limiting', async () => { const acquireEvents: FetchEngine.RateLimitEventData[] = []; - api.on('fetch-ratelimit-acquire', (data) => { + api.on('ratelimit-acquire', (data) => { acquireEvents.push(data); }); @@ -737,7 +735,7 @@ describe('@logosdx/fetch: rate limiting', async () => { const rejectEvents: string[] = []; - api.on('fetch-ratelimit-reject', (data) => { + api.on('ratelimit-reject', (data) => { rejectEvents.push(data.path!); }); @@ -774,7 +772,7 @@ describe('@logosdx/fetch: rate limiting', async () => { const acquireEvents: string[] = []; - api.on('fetch-ratelimit-acquire', (data) => { + api.on('ratelimit-acquire', (data) => { acquireEvents.push(`${data.method}:${data.path}`); }); @@ -810,7 +808,7 @@ describe('@logosdx/fetch: rate limiting', async () => { const acquireEvents: FetchEngine.RateLimitEventData[] = []; - api.on('fetch-ratelimit-acquire', (data) => { + api.on('ratelimit-acquire', (data) => { acquireEvents.push(data); }); @@ -847,7 +845,7 @@ describe('@logosdx/fetch: rate limiting', async () => { const acquireEvents: FetchEngine.RateLimitEventData[] = []; - api.on('fetch-ratelimit-acquire', (data) => { + api.on('ratelimit-acquire', (data) => { acquireEvents.push(data); }); @@ -881,7 +879,7 @@ describe('@logosdx/fetch: rate limiting', async () => { const acquireEvents: FetchEngine.RateLimitEventData[] = []; - api.on('fetch-ratelimit-acquire', (data) => { + api.on('ratelimit-acquire', (data) => { acquireEvents.push(data); }); @@ -928,7 +926,7 @@ describe('@logosdx/fetch: rate limiting', async () => { describe('integration with cache and deduplication', () => { - it('should rate limit before cache check', async () => { + it('should check cache before rate limit', async () => { const api = new FetchEngine({ baseUrl: testUrl, @@ -946,31 +944,31 @@ describe('@logosdx/fetch: rate limiting', async () => { const rateLimitEvents: string[] = []; const cacheEvents: string[] = []; - api.on('fetch-ratelimit-acquire', (data) => { + api.on('ratelimit-acquire', (data) => { rateLimitEvents.push(data.path!); }); - api.on('fetch-cache-hit', (data) => { + api.on('cache-hit', (data) => { cacheEvents.push(data.path!); }); - api.on('fetch-cache-miss', (data) => { + api.on('cache-miss', (data) => { cacheEvents.push(`miss:${data.path}`); }); - // First request: rate limit -> cache miss -> fetch + // First request: cache miss -> rate limit -> fetch await api.get('/json'); expect(rateLimitEvents).to.deep.equal(['/json']); expect(cacheEvents).to.deep.equal(['miss:/json']); - // Second request: rate limit -> cache hit (no fetch) + // Second request: cache hit -> returns immediately (no rate limit consumed) await api.get('/json'); - expect(rateLimitEvents).to.deep.equal(['/json', '/json']); + expect(rateLimitEvents).to.deep.equal(['/json']); expect(cacheEvents).to.include('/json'); api.destroy(); @@ -991,17 +989,17 @@ describe('@logosdx/fetch: rate limiting', async () => { const rateLimitEvents: string[] = []; const dedupeEvents: string[] = []; - api.on('fetch-ratelimit-acquire', (data) => { + api.on('ratelimit-acquire', (data) => { rateLimitEvents.push(data.path!); }); - api.on('fetch-dedupe-start', (data) => { + api.on('dedupe-start', (data) => { dedupeEvents.push(`start:${data.path}`); }); - api.on('fetch-dedupe-join', (data) => { + api.on('dedupe-join', (data) => { dedupeEvents.push(`join:${data.path}`); }); @@ -1040,13 +1038,13 @@ describe('@logosdx/fetch: rate limiting', async () => { let eventData: FetchEngine.RateLimitEventData | null = null; - api.on('fetch-ratelimit-acquire', (data) => { + api.on('ratelimit-acquire', (data) => { eventData = data; }); await api.get('/json', { - params: { page: 1 }, + params: { page: '1' }, headers: { 'X-Custom': 'value' } }); diff --git a/tests/src/fetch/properties/headers.test.ts b/tests/src/fetch/properties/headers.test.ts new file mode 100644 index 0000000..6630bd0 --- /dev/null +++ b/tests/src/fetch/properties/headers.test.ts @@ -0,0 +1,238 @@ +import { describe, it, expect, vi } from 'vitest'; +import { HeadersManager } from '../../../../packages/fetch/src/properties/headers.ts'; + + +/** + * Helper to create a mock engine for HeadersManager tests. + * Provides a minimal FetchEngineCore-compatible interface. + */ +const createMockEngine = ( + headers?: Record, + methodHeaders?: Record>, + validate?: (h: any, m?: string) => void +) => ({ + + emit: vi.fn(), + config: { + get: (path: string) => { + + if (path === 'headers') return headers; + if (path === 'methodHeaders') return methodHeaders; + if (path === 'validate.headers') return validate; + return undefined; + } + }, + state: null as any, + headerStore: null as any, + paramStore: null as any +}); + + +describe('HeadersManager', () => { + + describe('constructor', () => { + + it('initializes with headers from engine options', () => { + + const mockEngine = createMockEngine({ Authorization: 'Bearer token' }); + const manager = new HeadersManager(mockEngine as any); + + expect(manager.defaults).to.deep.equal({ Authorization: 'Bearer token' }); + }); + + it('initializes with method headers from engine options', () => { + + const mockEngine = createMockEngine( + { 'Content-Type': 'application/json' }, + { POST: { 'X-Custom': 'post-value' } } + ); + const manager = new HeadersManager(mockEngine as any); + + expect(manager.forMethod('POST')).to.deep.equal({ 'X-Custom': 'post-value' }); + }); + + it('initializes with empty defaults when no headers provided', () => { + + const mockEngine = createMockEngine(); + const manager = new HeadersManager(mockEngine as any); + + expect(manager.defaults).to.deep.equal({}); + }); + }); + + describe('set', () => { + + it('sets header by key-value', () => { + + const mockEngine = createMockEngine(); + const manager = new HeadersManager(mockEngine as any); + + manager.set('Authorization', 'Bearer xyz'); + + expect(manager.defaults.Authorization).to.equal('Bearer xyz'); + }); + + it('sets header by object', () => { + + const mockEngine = createMockEngine(); + const manager = new HeadersManager(mockEngine as any); + + manager.set({ 'X-API-Key': 'abc', 'X-Request-ID': '123' }); + + expect(manager.defaults).to.deep.equal({ 'X-API-Key': 'abc', 'X-Request-ID': '123' }); + }); + + it('sets method-specific header', () => { + + const mockEngine = createMockEngine(); + const manager = new HeadersManager(mockEngine as any); + + manager.set('Content-Type', 'multipart/form-data', 'POST'); + + expect(manager.forMethod('POST')).to.deep.equal({ 'Content-Type': 'multipart/form-data' }); + }); + + it('emits header-add event for key-value', () => { + + const mockEngine = createMockEngine(); + const manager = new HeadersManager(mockEngine as any); + + manager.set('Authorization', 'Bearer token'); + + expect(mockEngine.emit).toHaveBeenCalledWith('header-add', { + key: 'Authorization', + value: 'Bearer token', + method: undefined + }); + }); + + it('emits header-add event for object', () => { + + const mockEngine = createMockEngine(); + const manager = new HeadersManager(mockEngine as any); + + const headers = { 'X-API-Key': 'abc' }; + manager.set(headers); + + expect(mockEngine.emit).toHaveBeenCalledWith('header-add', { + value: headers, + method: undefined + }); + }); + + it('emits header-add event with method', () => { + + const mockEngine = createMockEngine(); + const manager = new HeadersManager(mockEngine as any); + + manager.set('Content-Type', 'application/json', 'POST'); + + expect(mockEngine.emit).toHaveBeenCalledWith('header-add', { + key: 'Content-Type', + value: 'application/json', + method: 'POST' + }); + }); + }); + + describe('remove', () => { + + it('removes header by key', () => { + + const mockEngine = createMockEngine({ Authorization: 'Bearer token' }); + const manager = new HeadersManager(mockEngine as any); + + manager.remove('Authorization'); + + expect(manager.defaults.Authorization).to.be.undefined; + }); + + it('removes multiple headers', () => { + + const mockEngine = createMockEngine({ 'X-API-Key': 'abc', 'X-Request-ID': '123' }); + const manager = new HeadersManager(mockEngine as any); + + manager.remove(['X-API-Key', 'X-Request-ID']); + + expect(manager.defaults).to.deep.equal({}); + }); + + it('emits header-remove event', () => { + + const mockEngine = createMockEngine({ Authorization: 'Bearer token' }); + const manager = new HeadersManager(mockEngine as any); + + manager.remove('Authorization'); + + expect(mockEngine.emit).toHaveBeenCalledWith('header-remove', { + key: 'Authorization', + method: undefined + }); + }); + }); + + describe('has', () => { + + it('returns true for existing header', () => { + + const mockEngine = createMockEngine({ Authorization: 'Bearer token' }); + const manager = new HeadersManager(mockEngine as any); + + expect(manager.has('Authorization')).to.be.true; + }); + + it('returns false for non-existing header', () => { + + const mockEngine = createMockEngine(); + const manager = new HeadersManager(mockEngine as any); + + expect(manager.has('Authorization')).to.be.false; + }); + }); + + describe('resolve', () => { + + it('merges defaults and method overrides', () => { + + const mockEngine = createMockEngine( + { 'Content-Type': 'application/json', Authorization: 'Bearer token' }, + { POST: { 'Content-Type': 'multipart/form-data' } } + ); + const manager = new HeadersManager(mockEngine as any); + + const resolved = manager.resolve('POST'); + + expect(resolved).to.deep.equal({ + 'Content-Type': 'multipart/form-data', + Authorization: 'Bearer token' + }); + }); + + it('applies request overrides', () => { + + const mockEngine = createMockEngine({ 'Content-Type': 'application/json' }); + const manager = new HeadersManager(mockEngine as any); + + const resolved = manager.resolve('POST', { 'X-Request-ID': '123' }); + + expect(resolved).to.deep.equal({ + 'Content-Type': 'application/json', + 'X-Request-ID': '123' + }); + }); + }); + + describe('validation', () => { + + it('calls validate function when setting headers', () => { + + const validate = vi.fn(); + const mockEngine = createMockEngine({}, {}, validate); + const manager = new HeadersManager(mockEngine as any); + + manager.set('Authorization', 'Bearer token'); + + expect(validate).toHaveBeenCalled(); + }); + }); +}); diff --git a/tests/src/fetch/properties/params.test.ts b/tests/src/fetch/properties/params.test.ts new file mode 100644 index 0000000..b2e9ed0 --- /dev/null +++ b/tests/src/fetch/properties/params.test.ts @@ -0,0 +1,238 @@ +import { describe, it, expect, vi } from 'vitest'; +import { ParamsManager } from '../../../../packages/fetch/src/properties/params.ts'; + + +/** + * Helper to create a mock engine for ParamsManager tests. + * Provides a minimal FetchEngineCore-compatible interface. + */ +const createMockEngine = ( + params?: Record, + methodParams?: Record>, + validate?: (p: any, m?: string) => void +) => ({ + + emit: vi.fn(), + config: { + get: (path: string) => { + + if (path === 'params') return params; + if (path === 'methodParams') return methodParams; + if (path === 'validate.params') return validate; + return undefined; + } + }, + state: null as any, + headerStore: null as any, + paramStore: null as any +}); + + +describe('ParamsManager', () => { + + describe('constructor', () => { + + it('initializes with params from engine options', () => { + + const mockEngine = createMockEngine({ apiKey: 'abc123' }); + const manager = new ParamsManager(mockEngine as any); + + expect(manager.defaults).to.deep.equal({ apiKey: 'abc123' }); + }); + + it('initializes with method params from engine options', () => { + + const mockEngine = createMockEngine( + { format: 'json' }, + { GET: { page: '1' } } + ); + const manager = new ParamsManager(mockEngine as any); + + expect(manager.forMethod('GET')).to.deep.equal({ page: '1' }); + }); + + it('initializes with empty defaults when no params provided', () => { + + const mockEngine = createMockEngine(); + const manager = new ParamsManager(mockEngine as any); + + expect(manager.defaults).to.deep.equal({}); + }); + }); + + describe('set', () => { + + it('sets param by key-value', () => { + + const mockEngine = createMockEngine(); + const manager = new ParamsManager(mockEngine as any); + + manager.set('apiKey', 'xyz'); + + expect(manager.defaults.apiKey).to.equal('xyz'); + }); + + it('sets param by object', () => { + + const mockEngine = createMockEngine(); + const manager = new ParamsManager(mockEngine as any); + + manager.set({ page: '1', limit: '10' }); + + expect(manager.defaults).to.deep.equal({ page: '1', limit: '10' }); + }); + + it('sets method-specific param', () => { + + const mockEngine = createMockEngine(); + const manager = new ParamsManager(mockEngine as any); + + manager.set('format', 'xml', 'GET'); + + expect(manager.forMethod('GET')).to.deep.equal({ format: 'xml' }); + }); + + it('emits param-add event for key-value', () => { + + const mockEngine = createMockEngine(); + const manager = new ParamsManager(mockEngine as any); + + manager.set('apiKey', 'abc'); + + expect(mockEngine.emit).toHaveBeenCalledWith('param-add', { + key: 'apiKey', + value: 'abc', + method: undefined + }); + }); + + it('emits param-add event for object', () => { + + const mockEngine = createMockEngine(); + const manager = new ParamsManager(mockEngine as any); + + const params = { page: '1' }; + manager.set(params); + + expect(mockEngine.emit).toHaveBeenCalledWith('param-add', { + value: params, + method: undefined + }); + }); + + it('emits param-add event with method', () => { + + const mockEngine = createMockEngine(); + const manager = new ParamsManager(mockEngine as any); + + manager.set('format', 'json', 'GET'); + + expect(mockEngine.emit).toHaveBeenCalledWith('param-add', { + key: 'format', + value: 'json', + method: 'GET' + }); + }); + }); + + describe('remove', () => { + + it('removes param by key', () => { + + const mockEngine = createMockEngine({ apiKey: 'abc' }); + const manager = new ParamsManager(mockEngine as any); + + manager.remove('apiKey'); + + expect(manager.defaults.apiKey).to.be.undefined; + }); + + it('removes multiple params', () => { + + const mockEngine = createMockEngine({ page: '1', limit: '10' }); + const manager = new ParamsManager(mockEngine as any); + + manager.remove(['page', 'limit']); + + expect(manager.defaults).to.deep.equal({}); + }); + + it('emits param-remove event', () => { + + const mockEngine = createMockEngine({ apiKey: 'abc' }); + const manager = new ParamsManager(mockEngine as any); + + manager.remove('apiKey'); + + expect(mockEngine.emit).toHaveBeenCalledWith('param-remove', { + key: 'apiKey', + method: undefined + }); + }); + }); + + describe('has', () => { + + it('returns true for existing param', () => { + + const mockEngine = createMockEngine({ apiKey: 'abc' }); + const manager = new ParamsManager(mockEngine as any); + + expect(manager.has('apiKey')).to.be.true; + }); + + it('returns false for non-existing param', () => { + + const mockEngine = createMockEngine(); + const manager = new ParamsManager(mockEngine as any); + + expect(manager.has('apiKey')).to.be.false; + }); + }); + + describe('resolve', () => { + + it('merges defaults and method overrides', () => { + + const mockEngine = createMockEngine( + { format: 'json', apiKey: 'abc' }, + { GET: { format: 'xml' } } + ); + const manager = new ParamsManager(mockEngine as any); + + const resolved = manager.resolve('GET'); + + expect(resolved).to.deep.equal({ + format: 'xml', + apiKey: 'abc' + }); + }); + + it('applies request overrides', () => { + + const mockEngine = createMockEngine({ format: 'json' }); + const manager = new ParamsManager(mockEngine as any); + + const resolved = manager.resolve('GET', { page: '1' }); + + expect(resolved).to.deep.equal({ + format: 'json', + page: '1' + }); + }); + }); + + describe('validation', () => { + + it('calls validate function when setting params', () => { + + const validate = vi.fn(); + const mockEngine = createMockEngine({}, {}, validate); + const manager = new ParamsManager(mockEngine as any); + + manager.set('apiKey', 'xyz'); + + expect(validate).toHaveBeenCalled(); + }); + }); +}); diff --git a/tests/src/fetch/property-store.ts b/tests/src/fetch/properties/store.test.ts similarity index 99% rename from tests/src/fetch/property-store.ts rename to tests/src/fetch/properties/store.test.ts index 9c32157..b41fb29 100644 --- a/tests/src/fetch/property-store.ts +++ b/tests/src/fetch/properties/store.test.ts @@ -5,7 +5,7 @@ import { vi } from 'vitest' -import { PropertyStore } from '../../../packages/fetch/src/property-store.ts'; +import { PropertyStore } from '../../../../packages/fetch/src/properties/store.ts'; type TestHeaders = Record; diff --git a/tests/src/fetch/serializers.ts b/tests/src/fetch/serializers/index.test.ts similarity index 99% rename from tests/src/fetch/serializers.ts rename to tests/src/fetch/serializers/index.test.ts index e9062f2..62754a8 100644 --- a/tests/src/fetch/serializers.ts +++ b/tests/src/fetch/serializers/index.test.ts @@ -7,7 +7,7 @@ import { import { endpointSerializer, requestSerializer -} from '../../../packages/fetch/src/serializers/index.ts'; +} from '../../../../packages/fetch/src/serializers/index.ts'; describe('@logosdx/fetch: serializers', () => { diff --git a/tests/src/fetch/state/get.test.ts b/tests/src/fetch/state/get.test.ts new file mode 100644 index 0000000..c2e824f --- /dev/null +++ b/tests/src/fetch/state/get.test.ts @@ -0,0 +1,55 @@ +import { describe, it, expect } from 'vitest'; +import { FetchState } from '../../../../packages/fetch/src/state/index.ts'; + + +/** + * Helper to create a mock engine for FetchState tests. + * Provides a minimal FetchEngineCore-compatible interface. + */ +const createMockEngine = (validate?: (state: any) => void) => ({ + + emit: () => {}, + config: { + get: (path: string) => path === 'validate.state' ? validate : undefined + }, + state: null as any, + headerStore: null as any, + paramStore: null as any +}); + + +describe('FetchState.get', () => { + + it('returns empty object for initial state', () => { + + const mockEngine = createMockEngine(); + const state = new FetchState(mockEngine as any); + + expect(state.get()).to.deep.equal({}); + }); + + it('returns a deep clone of the state', () => { + + const mockEngine = createMockEngine(); + const state = new FetchState<{ nested: { value: number } }>(mockEngine as any); + + state.set({ nested: { value: 1 } }); + const result = state.get(); + + // Modify the returned object + result.nested.value = 999; + + // Original state should be unchanged + expect(state.get().nested.value).to.equal(1); + }); + + it('returns state after set', () => { + + const mockEngine = createMockEngine(); + const state = new FetchState<{ token: string }>(mockEngine as any); + + state.set('token', 'abc123'); + + expect(state.get()).to.deep.equal({ token: 'abc123' }); + }); +}); diff --git a/tests/src/fetch/state/reset.test.ts b/tests/src/fetch/state/reset.test.ts new file mode 100644 index 0000000..6203448 --- /dev/null +++ b/tests/src/fetch/state/reset.test.ts @@ -0,0 +1,60 @@ +import { describe, it, expect, vi } from 'vitest'; +import { FetchState } from '../../../../packages/fetch/src/state/index.ts'; + + +/** + * Helper to create a mock engine for FetchState tests. + * Provides a minimal FetchEngineCore-compatible interface. + */ +const createMockEngine = (validate?: (state: any) => void) => ({ + + emit: vi.fn(), + config: { + get: (path: string) => path === 'validate.state' ? validate : undefined + }, + state: null as any, + headerStore: null as any, + paramStore: null as any +}); + + +describe('FetchState.reset', () => { + + it('resets state to empty object', () => { + + const mockEngine = createMockEngine(); + const state = new FetchState<{ token: string }>(mockEngine as any); + + state.set('token', 'abc'); + state.reset(); + + expect(state.get()).to.deep.equal({}); + }); + + it('emits state-reset event', () => { + + const mockEngine = createMockEngine(); + const state = new FetchState<{ token: string }>(mockEngine as any); + + state.set('token', 'abc'); + mockEngine.emit.mockClear(); + + state.reset(); + + expect(mockEngine.emit).toHaveBeenCalledWith('state-reset', { + previous: { token: 'abc' }, + current: {} + }); + }); + + it('calls validate function from engine options after reset', () => { + + const validate = vi.fn(); + const mockEngine = createMockEngine(validate); + const state = new FetchState<{ token: string }>(mockEngine as any); + + state.reset(); + + expect(validate).toHaveBeenCalledWith({}); + }); +}); diff --git a/tests/src/fetch/state/set.test.ts b/tests/src/fetch/state/set.test.ts new file mode 100644 index 0000000..5ce6c72 --- /dev/null +++ b/tests/src/fetch/state/set.test.ts @@ -0,0 +1,103 @@ +import { describe, it, expect, vi } from 'vitest'; +import { FetchState } from '../../../../packages/fetch/src/state/index.ts'; + + +/** + * Helper to create a mock engine for FetchState tests. + * Provides a minimal FetchEngineCore-compatible interface. + */ +const createMockEngine = (validate?: (state: any) => void) => ({ + + emit: vi.fn(), + config: { + get: (path: string) => path === 'validate.state' ? validate : undefined + }, + state: null as any, + headerStore: null as any, + paramStore: null as any +}); + + +describe('FetchState.set', () => { + + it('sets state by key-value', () => { + + const mockEngine = createMockEngine(); + const state = new FetchState<{ token: string }>(mockEngine as any); + + state.set('token', 'bearer-123'); + + expect(state.get()).to.deep.equal({ token: 'bearer-123' }); + }); + + it('sets state by partial object', () => { + + const mockEngine = createMockEngine(); + const state = new FetchState<{ user: string; role: string }>(mockEngine as any); + + state.set({ user: 'john', role: 'admin' }); + + expect(state.get()).to.deep.equal({ user: 'john', role: 'admin' }); + }); + + it('merges with existing state', () => { + + const mockEngine = createMockEngine(); + const state = new FetchState<{ a: number; b: number }>(mockEngine as any); + + state.set('a', 1); + state.set('b', 2); + + expect(state.get()).to.deep.equal({ a: 1, b: 2 }); + }); + + it('emits state-set event with key-value format', () => { + + const mockEngine = createMockEngine(); + const state = new FetchState<{ token: string }>(mockEngine as any); + + state.set('token', 'abc'); + + expect(mockEngine.emit).toHaveBeenCalledWith('state-set', { + key: 'token', + value: 'abc', + previous: {}, + current: { token: 'abc' } + }); + }); + + it('emits state-set event with object format', () => { + + const mockEngine = createMockEngine(); + const state = new FetchState<{ user: string }>(mockEngine as any); + + state.set({ user: 'john' }); + + expect(mockEngine.emit).toHaveBeenCalledWith('state-set', { + key: undefined, + value: { user: 'john' }, + previous: {}, + current: { user: 'john' } + }); + }); + + it('calls validate function from engine options', () => { + + const validate = vi.fn(); + const mockEngine = createMockEngine(validate); + const state = new FetchState<{ token: string }>(mockEngine as any); + + state.set('token', 'xyz'); + + expect(validate).toHaveBeenCalledWith({ token: 'xyz' }); + }); + + it('throws on invalid arguments', () => { + + const mockEngine = createMockEngine(); + const state = new FetchState(mockEngine as any); + + expect(() => (state as any).set(123)).to.throw(); + expect(() => (state as any).set('key')).to.throw(); + }); +}); diff --git a/tests/src/observable/engine.ts b/tests/src/observable/engine.ts index 243d2e5..b4004f2 100644 --- a/tests/src/observable/engine.ts +++ b/tests/src/observable/engine.ts @@ -440,6 +440,59 @@ describe('@logosdx/observer', function () { expect(events).to.deep.eq(['a', 'b', 'c']); }); + it('should not drop events emitted faster than the consumer iterates', async () => { + + const { observer } = stub; + const generator = observer.on('test'); + + const events: string[] = []; + + const listen = (async () => { + + for await (const event of generator) { + + events.push(event as never); + + // Simulate async work between iterations + await wait(10); + } + })(); + + // Emit multiple events synchronously before the consumer + // has a chance to loop back and call next() + generator.emit('a'); + generator.emit('b'); + generator.emit('c'); + + // Wait long enough for all iterations to complete + await wait(50); + + expect(events).to.deep.eq(['a', 'b', 'c']); + + generator.cleanup(); + await listen; + }); + + it('should buffer events emitted before next() is called', async () => { + + const { observer } = stub; + const generator = observer.on('test'); + + // Emit before anyone calls next() + generator.emit('a'); + generator.emit('b'); + generator.emit('c'); + + const r1 = await generator.next(); + const r2 = await generator.next(); + const r3 = await generator.next(); + + expect(r1).to.eq('a'); + expect(r2).to.eq('b'); + expect(r3).to.eq('c'); + + generator.cleanup(); + }); it('handles regex with EventGenerators', async () => { diff --git a/tests/src/smoke/dom.test.ts b/tests/src/smoke/dom.test.ts new file mode 100644 index 0000000..798824e --- /dev/null +++ b/tests/src/smoke/dom.test.ts @@ -0,0 +1,86 @@ +const ns = () => (window as any).LogosDx.Dom; + +describe('smoke: @logosdx/dom', () => { + + beforeAll(async () => { + + await (window as any).__loadBundle('dom'); + }); + + it('namespace is loaded', () => { + + expect(ns()).toBeDefined(); + }); + + it('$() queries elements from the DOM', () => { + + const div = document.createElement('div'); + div.className = 'smoke-test-query'; + document.body.appendChild(div); + + const results = ns().$('.smoke-test-query'); + expect(results).toHaveLength(1); + expect(results[0]).toBe(div); + + div.remove(); + }); + + it('html.css.set() and html.css.get() manipulate styles', () => { + + const el = document.createElement('div'); + document.body.appendChild(el); + + ns().html.css.set(el, { color: 'red' }); + const color = ns().html.css.get(el, 'color'); + + // Browser returns computed color in RGB format + expect(color).toBe('rgb(255, 0, 0)'); + + el.remove(); + }); + + it('html.attrs.set() and html.attrs.get() manipulate attributes', () => { + + const el = document.createElement('div'); + document.body.appendChild(el); + + ns().html.attrs.set(el, { 'data-smoke': 'test' }); + const val = ns().html.attrs.get(el, 'data-smoke'); + + expect(val).toBe('test'); + + el.remove(); + }); + + it('html.events.on() attaches and fires event listeners', () => { + + const el = document.createElement('button'); + document.body.appendChild(el); + + let clicked = false; + ns().html.events.on(el, 'click', () => { clicked = true; }); + + el.click(); + expect(clicked).toBe(true); + + el.remove(); + }); + + it('html.events.on() cleanup removes the listener', () => { + + const el = document.createElement('button'); + document.body.appendChild(el); + + let count = 0; + const cleanup = ns().html.events.on(el, 'click', () => { count++; }); + + el.click(); + expect(count).toBe(1); + + cleanup(); + el.click(); + expect(count).toBe(1); + + el.remove(); + }); +}); diff --git a/tests/src/smoke/fetch.test.ts b/tests/src/smoke/fetch.test.ts new file mode 100644 index 0000000..ed85b11 --- /dev/null +++ b/tests/src/smoke/fetch.test.ts @@ -0,0 +1,102 @@ +const ns = () => (window as any).LogosDx.Fetch; + +describe('smoke: @logosdx/fetch', () => { + + beforeAll(async () => { + + await (window as any).__loadBundle('fetch'); + }); + + it('namespace is loaded', () => { + + expect(ns()).toBeDefined(); + }); + + it('instantiates FetchEngine with base config', () => { + + const engine = new (ns().FetchEngine)({ baseUrl: 'https://api.example.com' }); + + expect(engine).toBeDefined(); + expect(typeof engine.get).toBe('function'); + expect(typeof engine.post).toBe('function'); + expect(typeof engine.destroy).toBe('function'); + + engine.destroy(); + }); + + it('headers manager supports set and has', () => { + + const engine = new (ns().FetchEngine)({ baseUrl: 'https://api.example.com' }); + + engine.headers.set('Authorization', 'Bearer token123'); + expect(engine.headers.has('Authorization')).toBe(true); + expect(engine.headers.defaults.Authorization).toBe('Bearer token123'); + + engine.destroy(); + }); + + it('params manager supports set and has', () => { + + const engine = new (ns().FetchEngine)({ baseUrl: 'https://api.example.com' }); + + engine.params.set('page', '1'); + expect(engine.params.has('page')).toBe(true); + expect(engine.params.defaults.page).toBe('1'); + + engine.destroy(); + }); + + it('subscribes to events via on()', () => { + + const engine = new (ns().FetchEngine)({ baseUrl: 'https://api.example.com' }); + + const cleanup = engine.on('request', () => { /* no-op */ }); + + expect(typeof cleanup).toBe('function'); + cleanup(); + + engine.destroy(); + }); + + it('state supports set and get', () => { + + const engine = new (ns().FetchEngine)({ + baseUrl: 'https://api.example.com', + state: { token: '' }, + }); + + engine.state.set('token', 'abc'); + expect(engine.state.get().token).toBe('abc'); + + engine.destroy(); + }); + + it('FetchError construction and isFetchError guard', () => { + + const { FetchError, isFetchError } = ns(); + const err = new FetchError('request failed'); + + expect(err).toBeInstanceOf(Error); + expect(err.message).toBe('request failed'); + expect(isFetchError(err)).toBe(true); + expect(isFetchError(new Error('generic'))).toBe(false); + }); + + it('instantiates CachePolicy', () => { + + const policy = new (ns().CachePolicy)({ ttl: 5000 }); + expect(policy).toBeDefined(); + }); + + it('instantiates DedupePolicy', () => { + + const policy = new (ns().DedupePolicy)(); + expect(policy).toBeDefined(); + }); + + it('instantiates RateLimitPolicy', () => { + + const policy = new (ns().RateLimitPolicy)({ limit: 10, interval: 1000 }); + expect(policy).toBeDefined(); + }); +}); diff --git a/tests/src/smoke/hooks.test.ts b/tests/src/smoke/hooks.test.ts new file mode 100644 index 0000000..48bdd55 --- /dev/null +++ b/tests/src/smoke/hooks.test.ts @@ -0,0 +1,116 @@ +const ns = () => (window as any).LogosDx.Hooks; + +describe('smoke: @logosdx/hooks', () => { + + beforeAll(async () => { + + await (window as any).__loadBundle('hooks'); + }); + + it('namespace is loaded', () => { + + expect(ns()).toBeDefined(); + }); + + it('instantiates HookEngine and registers hooks', () => { + + const engine = new (ns().HookEngine)(); + + engine.register('beforeSave', 'afterSave'); + expect(engine).toBeDefined(); + }); + + it('on() subscribes and emit() fires a hook', async () => { + + const engine = new (ns().HookEngine)(); + engine.register('onLoad'); + + let called = false; + engine.on('onLoad', { + callback: async () => { called = true; }, + }); + + await engine.emit('onLoad'); + expect(called).toBe(true); + }); + + it('once() fires exactly once', async () => { + + const engine = new (ns().HookEngine)(); + engine.register('init'); + + let count = 0; + engine.once('init', async () => { count++; }); + + await engine.emit('init'); + await engine.emit('init'); + expect(count).toBe(1); + }); + + it('emit() provides context with args', async () => { + + const engine = new (ns().HookEngine)(); + engine.register('process'); + + let receivedArgs: any = null; + engine.on('process', { + callback: async (ctx: any) => { receivedArgs = ctx.args; }, + }); + + await engine.emit('process', 'hello', 42); + expect(receivedArgs).toEqual(['hello', 42]); + }); + + it('context setResult() modifies the emit result', async () => { + + const engine = new (ns().HookEngine)(); + engine.register('transform'); + + engine.on('transform', { + callback: async (ctx: any) => { ctx.setResult('modified'); }, + }); + + const result = await engine.emit('transform'); + expect(result.result).toBe('modified'); + }); + + it('wrap() wraps a function with pre/post hooks', async () => { + + const engine = new (ns().HookEngine)(); + engine.register('beforeCall', 'afterCall'); + + const order: string[] = []; + + engine.on('beforeCall', { + callback: async () => { order.push('pre'); }, + }); + + engine.on('afterCall', { + callback: async () => { order.push('post'); }, + }); + + const wrapped = engine.wrap( + async (x: number) => { + + order.push('fn'); + return x * 2; + }, + { pre: 'beforeCall', post: 'afterCall' }, + ); + + const result = await wrapped(5); + + expect(result).toBe(10); + expect(order).toEqual(['pre', 'fn', 'post']); + }); + + it('HookError and isHookError guard', () => { + + const { HookError, isHookError } = ns(); + const err = new HookError('hook failed'); + + expect(err).toBeInstanceOf(Error); + expect(isHookError(err)).toBe(true); + expect(isHookError(new Error('generic'))).toBe(false); + }); +}); diff --git a/tests/src/smoke/kit.test.ts b/tests/src/smoke/kit.test.ts new file mode 100644 index 0000000..512ec0d --- /dev/null +++ b/tests/src/smoke/kit.test.ts @@ -0,0 +1,107 @@ +const ns = () => (window as any).LogosDx.Kit; + +// Helper to safely clear storage (handles empty storage case) +const safeClear = (storage: any) => { + + const keys = storage.keys(); + if (keys.length > 0) { + storage.clear(); + } +}; + +describe('smoke: @logosdx/kit', () => { + + beforeAll(async () => { + + await (window as any).__loadBundle('kit'); + }); + + it('namespace is loaded', () => { + + expect(ns()).toBeDefined(); + }); + + it('appKit() creates a minimal kit with observer only', () => { + + const kit = ns().appKit({ + observer: {}, + }); + + expect(kit.observer).toBeDefined(); + expect(typeof kit.observer.on).toBe('function'); + expect(typeof kit.observer.emit).toBe('function'); + }); + + it('appKit() creates a full kit with all components', () => { + + const kit = ns().appKit({ + observer: {}, + storage: { + implementation: localStorage, + prefix: 'smoke-kit', + }, + locales: { + current: 'en', + fallback: 'en', + locales: { + en: { label: 'English', messages: { hello: 'Hi' } }, + }, + }, + stateMachine: { + initial: { count: 0 }, + reducer: (val: any) => val, + }, + fetch: { + baseUrl: 'https://api.example.com', + }, + }); + + expect(kit.observer).toBeDefined(); + expect(kit.storage).toBeDefined(); + expect(kit.locale).toBeDefined(); + expect(kit.stateMachine).toBeDefined(); + expect(kit.fetch).toBeDefined(); + + kit.fetch.destroy(); + safeClear(kit.storage); + }); + + it('kit observer can emit and receive events', () => { + + const kit = ns().appKit({ observer: {} }); + + let received: any = null; + kit.observer.on('smoke', (d: any) => { received = d; }); + kit.observer.emit('smoke', 'hello'); + + expect(received).toBe('hello'); + }); + + it('kit stateMachine dispatches and reads state', () => { + + const kit = ns().appKit({ + stateMachine: { + initial: { value: 0 }, + reducer: (val: any) => val, + }, + }); + + kit.stateMachine.dispatch({ value: 42 }); + expect(kit.stateMachine.state()).toEqual({ value: 42 }); + }); + + it('kit storage round-trips values', () => { + + const kit = ns().appKit({ + storage: { + implementation: localStorage, + prefix: 'smoke-kit-rt', + }, + }); + + kit.storage.set('key', 'val'); + expect(kit.storage.get('key')).toBe('val'); + + safeClear(kit.storage); + }); +}); diff --git a/tests/src/smoke/localize.test.ts b/tests/src/smoke/localize.test.ts new file mode 100644 index 0000000..0172758 --- /dev/null +++ b/tests/src/smoke/localize.test.ts @@ -0,0 +1,84 @@ +const ns = () => (window as any).LogosDx.Localize; + +describe('smoke: @logosdx/localize', () => { + + beforeAll(async () => { + + await (window as any).__loadBundle('localize'); + }); + + it('namespace is loaded', () => { + + expect(ns()).toBeDefined(); + }); + + it('instantiates LocaleManager with messages', () => { + + const manager = new (ns().LocaleManager)({ + current: 'en', + fallback: 'en', + locales: { + en: { code: 'en', text: 'English', labels: { greeting: 'Hello' } }, + es: { code: 'es', text: 'Español', labels: { greeting: 'Hola' } }, + }, + }); + + expect(manager).toBeDefined(); + expect(manager.current).toBe('en'); + }); + + it('text() retrieves a message by key', () => { + + const manager = new (ns().LocaleManager)({ + current: 'en', + fallback: 'en', + locales: { + en: { code: 'en', text: 'English', labels: { greeting: 'Hello' } }, + }, + }); + + expect(manager.text('greeting')).toBe('Hello'); + }); + + it('format() interpolates template variables', () => { + + const result = ns().format('Hello, {name}!', { name: 'World' }); + expect(result).toBe('Hello, World!'); + }); + + it('changeTo() switches locale and updates messages', () => { + + const manager = new (ns().LocaleManager)({ + current: 'en', + fallback: 'en', + locales: { + en: { code: 'en', text: 'English', labels: { greeting: 'Hello' } }, + es: { code: 'es', text: 'Español', labels: { greeting: 'Hola' } }, + }, + }); + + expect(manager.text('greeting')).toBe('Hello'); + + manager.changeTo('es'); + expect(manager.current).toBe('es'); + expect(manager.text('greeting')).toBe('Hola'); + }); + + it('on() fires locale change events', () => { + + const manager = new (ns().LocaleManager)({ + current: 'en', + fallback: 'en', + locales: { + en: { code: 'en', text: 'English', labels: { greeting: 'Hello' } }, + fr: { code: 'fr', text: 'Français', labels: { greeting: 'Bonjour' } }, + }, + }); + + let newLocale: any = null; + manager.on('locale-change', (e: any) => { newLocale = e.code; }); + + manager.changeTo('fr'); + expect(newLocale).toBe('fr'); + }); +}); diff --git a/tests/src/smoke/observer.test.ts b/tests/src/smoke/observer.test.ts new file mode 100644 index 0000000..adc9cd9 --- /dev/null +++ b/tests/src/smoke/observer.test.ts @@ -0,0 +1,90 @@ +const ns = () => (window as any).LogosDx.Observer; + +describe('smoke: @logosdx/observer', () => { + + beforeAll(async () => { + + await (window as any).__loadBundle('observer'); + }); + + it('namespace is loaded', () => { + + expect(ns()).toBeDefined(); + }); + + it('instantiates ObserverEngine and subscribes to events', () => { + + const obs = new (ns().ObserverEngine)(); + let received: any = null; + + obs.on('test-event', (data: any) => { received = data; }); + obs.emit('test-event', { value: 42 }); + + expect(received).toEqual({ value: 42 }); + }); + + it('unsubscribe prevents further callbacks', () => { + + const obs = new (ns().ObserverEngine)(); + let count = 0; + + const cleanup = obs.on('ping', () => { count++; }); + obs.emit('ping'); + expect(count).toBe(1); + + cleanup(); + obs.emit('ping'); + expect(count).toBe(1); + }); + + it('once() fires exactly once', () => { + + const obs = new (ns().ObserverEngine)(); + let count = 0; + + obs.once('single', () => { count++; }); + obs.emit('single'); + obs.emit('single'); + + expect(count).toBe(1); + }); + + it('regex pattern matching triggers on matching events', () => { + + const obs = new (ns().ObserverEngine)(); + const matched: string[] = []; + + obs.on(/^user:/, (rgx: any) => { matched.push(rgx.data); }); + obs.emit('user:login', 'alice'); + obs.emit('user:logout', 'bob'); + obs.emit('system:boot', 'nope'); + + expect(matched).toEqual(['alice', 'bob']); + }); + + it('event data propagation passes data to listeners', () => { + + const obs = new (ns().ObserverEngine)(); + const results: any[] = []; + + obs.on('data', (d: any) => results.push(d)); + obs.emit('data', 'first'); + obs.emit('data', 'second'); + + expect(results).toEqual(['first', 'second']); + }); + + it('EventQueue is created via queue()', () => { + + const obs = new (ns().ObserverEngine)(); + const processed: any[] = []; + + const q = obs.queue('job', (data: any) => { + + processed.push(data); + }, { name: 'smoke-queue', concurrency: 1 }); + + // Queue was created without error + expect(q).toBeDefined(); + }); +}); diff --git a/tests/src/smoke/setup.ts b/tests/src/smoke/setup.ts new file mode 100644 index 0000000..8375b07 --- /dev/null +++ b/tests/src/smoke/setup.ts @@ -0,0 +1,45 @@ +/** + * Browser smoke test setup. + * + * Provides a helper to load individual IIFE bundles on demand. + * Each test file loads only the bundle it needs to avoid global + * variable conflicts between minified IIFE bundles. + */ + +declare const __PACKAGES_ROOT__: string; + +(window as any).__loadBundle = async function loadBundle(pkg: string): Promise { + + const root = __PACKAGES_ROOT__; + const src = `/@fs/${root}/${pkg}/dist/browser/bundle.js`; + const res = await fetch(src); + + if (!res.ok) { + + const body = await res.text(); + throw new Error( + `Failed to fetch ${pkg} bundle (${res.status}): ${body.slice(0, 200)}` + ); + } + + const code = await res.text(); + const blob = new Blob([code], { type: 'application/javascript' }); + const blobUrl = URL.createObjectURL(blob); + + return new Promise((resolve, reject) => { + + const script = document.createElement('script'); + script.src = blobUrl; + script.onload = () => { + + URL.revokeObjectURL(blobUrl); + resolve(); + }; + script.onerror = () => { + + URL.revokeObjectURL(blobUrl); + reject(new Error(`Failed to execute bundle: ${pkg}`)); + }; + document.head.appendChild(script); + }); +}; diff --git a/tests/src/smoke/state-machine.test.ts b/tests/src/smoke/state-machine.test.ts new file mode 100644 index 0000000..0809964 --- /dev/null +++ b/tests/src/smoke/state-machine.test.ts @@ -0,0 +1,94 @@ +const ns = () => (window as any).LogosDx.StateMachine; + +describe('smoke: @logosdx/state-machine', () => { + + beforeAll(async () => { + + await (window as any).__loadBundle('state-machine'); + }); + + it('namespace is loaded', () => { + + expect(ns()).toBeDefined(); + }); + + it('instantiates with initial state', () => { + + const sm = new (ns().StateMachine)({ count: 0 }); + expect(sm.state()).toEqual({ count: 0 }); + }); + + it('dispatch() updates state', () => { + + const sm = new (ns().StateMachine)({ count: 0 }); + + sm.dispatch({ count: 5 }); + expect(sm.state()).toEqual({ count: 5 }); + }); + + it('addReducer() transforms dispatched values', () => { + + const sm = new (ns().StateMachine)({ count: 0 }); + + sm.addReducer((value: any, state: any) => { + + return { count: state.count + value.amount }; + }); + + sm.dispatch({ amount: 3 }); + expect(sm.state()).toEqual({ count: 3 }); + + sm.dispatch({ amount: 7 }); + expect(sm.state()).toEqual({ count: 10 }); + }); + + it('addListener() notifies on state change', () => { + + const sm = new (ns().StateMachine)({ value: '' }); + const history: any[] = []; + + sm.addListener((newState: any) => { history.push(newState); }); + + sm.dispatch({ value: 'a' }); + sm.dispatch({ value: 'b' }); + + expect(history).toEqual([{ value: 'a' }, { value: 'b' }]); + }); + + it('states() returns accumulated history', () => { + + const sm = new (ns().StateMachine)({ n: 0 }); + + sm.dispatch({ n: 1 }); + sm.dispatch({ n: 2 }); + sm.dispatch({ n: 3 }); + + const all = sm.states(); + expect(all.length).toBeGreaterThanOrEqual(3); + }); + + it('prevState() and nextState() navigate history', () => { + + const sm = new (ns().StateMachine)({ step: 0 }); + + sm.dispatch({ step: 1 }); + sm.dispatch({ step: 2 }); + + sm.prevState(); + expect(sm.state()).toEqual({ step: 1 }); + + sm.nextState(); + expect(sm.state()).toEqual({ step: 2 }); + }); + + it('clone() creates an independent child machine', () => { + + const parent = new (ns().StateMachine)({ x: 1 }); + const child = parent.clone(); + + child.dispatch({ x: 99 }); + + expect(child.state()).toEqual({ x: 99 }); + expect(parent.state()).toEqual({ x: 1 }); + }); +}); diff --git a/tests/src/smoke/storage.test.ts b/tests/src/smoke/storage.test.ts new file mode 100644 index 0000000..eb13a2f --- /dev/null +++ b/tests/src/smoke/storage.test.ts @@ -0,0 +1,104 @@ +const ns = () => (window as any).LogosDx.Storage; + +// Helper to safely clear storage (handles empty storage case) +const safeClear = (adapter: any) => { + + const keys = adapter.keys(); + if (keys.length > 0) { + adapter.clear(); + } +}; + +describe('smoke: @logosdx/storage', () => { + + beforeAll(async () => { + + await (window as any).__loadBundle('storage'); + }); + + let adapter: any; + + beforeEach(() => { + + adapter = new (ns().StorageAdapter)(localStorage, 'smoke-test'); + safeClear(adapter); + }); + + afterEach(() => { + + safeClear(adapter); + }); + + it('namespace is loaded', () => { + + expect(ns()).toBeDefined(); + }); + + it('set() and get() round-trip a value', () => { + + adapter.set('name', 'Alice'); + expect(adapter.get('name')).toBe('Alice'); + }); + + it('set() accepts an object of key-value pairs', () => { + + adapter.set({ name: 'Bob', age: 30 }); + expect(adapter.get('name')).toBe('Bob'); + expect(adapter.get('age')).toBe(30); + }); + + it('has() checks key existence', () => { + + adapter.set('exists', true); + expect(adapter.has('exists')).toBe(true); + expect(adapter.has('missing')).toBe(false); + }); + + it('rm() removes a key', () => { + + adapter.set('temp', 'value'); + expect(adapter.has('temp')).toBe(true); + + adapter.rm('temp'); + expect(adapter.has('temp')).toBe(false); + }); + + it('wrap() provides a key-scoped accessor', () => { + + const wrapped = adapter.wrap('count'); + wrapped.set(10); + expect(wrapped.get()).toBe(10); + + wrapped.remove(); + expect(adapter.has('count')).toBe(false); + }); + + it('assign() merges partial values into a key', () => { + + adapter.set('config', { theme: 'dark', lang: 'en' }); + adapter.assign('config', { lang: 'es' }); + + const config = adapter.get('config'); + expect(config.theme).toBe('dark'); + expect(config.lang).toBe('es'); + }); + + it('on() fires storage-after-set event', () => { + + let eventKey: any = null; + adapter.on('storage-after-set', (e: any) => { eventKey = e.key; }); + + adapter.set('trigger', 'value'); + expect(eventKey).toBe('trigger'); + }); + + it('clear() removes all values', () => { + + adapter.set({ a: 1, b: 2, c: 3 }); + adapter.clear(); + + expect(adapter.has('a')).toBe(false); + expect(adapter.has('b')).toBe(false); + expect(adapter.has('c')).toBe(false); + }); +}); diff --git a/tests/src/smoke/utils.test.ts b/tests/src/smoke/utils.test.ts new file mode 100644 index 0000000..e74c28e --- /dev/null +++ b/tests/src/smoke/utils.test.ts @@ -0,0 +1,118 @@ +const ns = () => (window as any).LogosDx.Utils; + +describe('smoke: @logosdx/utils', () => { + + beforeAll(async () => { + + await (window as any).__loadBundle('utils'); + }); + + it('namespace is loaded', () => { + + expect(ns()).toBeDefined(); + }); + + it('attempt() resolves with [result, null] on success', async () => { + + const [result, err] = await ns().attempt(() => Promise.resolve(42)); + + expect(result).toBe(42); + expect(err).toBeNull(); + }); + + it('attempt() resolves with [null, error] on failure', async () => { + + const [result, err] = await ns().attempt(() => Promise.reject(new Error('boom'))); + + expect(result).toBeNull(); + expect(err).toBeInstanceOf(Error); + expect(err.message).toBe('boom'); + }); + + it('clone() deep-clones an object with no shared references', () => { + + const original = { a: 1, nested: { b: 2, arr: [3, 4] } }; + const cloned = ns().clone(original); + + expect(cloned).toEqual(original); + expect(cloned).not.toBe(original); + expect(cloned.nested).not.toBe(original.nested); + expect(cloned.nested.arr).not.toBe(original.nested.arr); + }); + + it('merge() combines two objects', () => { + + const result = ns().merge({ a: 1 }, { b: 2 }); + expect(result).toEqual({ a: 1, b: 2 }); + }); + + it('assert() passes on truthy, throws on falsy', () => { + + expect(() => ns().assert(true, 'ok')).not.toThrow(); + expect(() => ns().assert(false, 'nope')).toThrow(); + }); + + it('PriorityQueue enqueues and dequeues in priority order', () => { + + const queue = new (ns().PriorityQueue)(); + + queue.push('low', 10); + queue.push('high', 1); + queue.push('mid', 5); + + expect(queue.pop()).toBe('high'); + expect(queue.pop()).toBe('mid'); + expect(queue.pop()).toBe('low'); + }); + + it('equals() compares values deeply', () => { + + expect(ns().equals({ a: [1, 2] }, { a: [1, 2] })).toBe(true); + expect(ns().equals({ a: 1 }, { a: 2 })).toBe(false); + }); + + it('wait() resolves after delay', async () => { + + const start = Date.now(); + await ns().wait(50); + const elapsed = Date.now() - start; + + expect(elapsed).toBeGreaterThanOrEqual(40); + }); + + it('retry() retries on failure and eventually succeeds', async () => { + + let calls = 0; + + const result = await ns().retry( + () => { + + calls++; + if (calls < 3) throw new Error('not yet'); + return 'done'; + }, + { retries: 5, delay: 10 }, + ); + + expect(result).toBe('done'); + expect(calls).toBe(3); + }); + + it('rateLimit() throttles function calls', async () => { + + let count = 0; + const limited = ns().rateLimit(() => count++, { maxCalls: 2, windowMs: 200 }); + + await limited(); + await limited(); + + expect(count).toBe(2); + + // Third call should throw (rate limit exceeded) + await expect(limited()).rejects.toThrow(); + + await ns().wait(250); + await limited(); + expect(count).toBe(3); + }); +}); diff --git a/tests/src/storage.ts b/tests/src/storage.ts index 134d13b..4abed96 100644 --- a/tests/src/storage.ts +++ b/tests/src/storage.ts @@ -289,7 +289,6 @@ describe('@logosdx/storage', () => { }); const getKeys = (setTo: Partial) => Object.keys(setTo) as (keyof StorageItems)[]; - const getVals = (setTo: Partial) => Object.keys(setTo) as (StorageItems[keyof StorageItems])[]; const mapArgs = ]>>(spy: S) => spy.args.map(([e]) => e) it('dispatches events on set', () => { diff --git a/tests/src/utils/data-structures.ts b/tests/src/utils/data-structures.ts index d05a819..8c195f3 100644 --- a/tests/src/utils/data-structures.ts +++ b/tests/src/utils/data-structures.ts @@ -2,8 +2,6 @@ import { describe, it, beforeAll, - after, - afterEach, expect } from 'vitest' diff --git a/tests/src/utils/flow-control/batch.ts b/tests/src/utils/flow-control/batch.ts index 291f868..e864d82 100644 --- a/tests/src/utils/flow-control/batch.ts +++ b/tests/src/utils/flow-control/batch.ts @@ -1,5 +1,4 @@ import { - beforeAll, describe, it, vi, diff --git a/tests/src/utils/flow-control/compose-flow.ts b/tests/src/utils/flow-control/compose-flow.ts index 24b7b37..b610147 100644 --- a/tests/src/utils/flow-control/compose-flow.ts +++ b/tests/src/utils/flow-control/compose-flow.ts @@ -250,12 +250,8 @@ describe('@logosdx/utils', () => { it('should handle retry with circuit breaker', async () => { - let callCount = 0; - const originalFn = vi.fn(async (_x: number) => { - callCount++; - // Always throw to test max retries behavior throw new Error('Service temporarily down'); }); @@ -304,14 +300,11 @@ describe('@logosdx/utils', () => { it('should simulate resilientFetch with all production options', async () => { - let callCount = 0; let flakyCount = 0; // Mock fetch-like function that simulates real network behavior const mockFetch = vi.fn(async (url: string) => { - callCount++; - // Simulate different failure modes based on URL if (url.includes('/slow-endpoint')) { await wait(100); // Longer than timeout (50ms) @@ -404,7 +397,7 @@ describe('@logosdx/utils', () => { it('should handle realistic API failure scenarios with resilientFetch', async () => { let orderCallCount = 0; - let paymentCallCount = 0; + let _paymentCallCount = 0; const mockApiCall = vi.fn(async (endpoint: string) => { @@ -425,7 +418,7 @@ describe('@logosdx/utils', () => { if (endpoint === '/api/payment/process') { - paymentCallCount++; + _paymentCallCount++; // Consistently fail to test circuit breaker throw new Error('Payment service down'); @@ -460,7 +453,7 @@ describe('@logosdx/utils', () => { expect(orderCallCount).to.equal(3); // Failed twice, succeeded on 3rd attempt // Test circuit breaker with payment service - paymentCallCount = 0; + _paymentCallCount = 0; let circuitTripped = false; // Make enough failed calls to trip circuit breaker diff --git a/tests/src/utils/flow-control/rate-limit.ts b/tests/src/utils/flow-control/rate-limit.ts index 922855f..2288a9b 100644 --- a/tests/src/utils/flow-control/rate-limit.ts +++ b/tests/src/utils/flow-control/rate-limit.ts @@ -2,8 +2,6 @@ import { describe, it, vi, - beforeAll, - afterAll, expect, beforeEach, afterEach diff --git a/tests/src/utils/flow-control/retry.ts b/tests/src/utils/flow-control/retry.ts index 5a90642..32b6b15 100644 --- a/tests/src/utils/flow-control/retry.ts +++ b/tests/src/utils/flow-control/retry.ts @@ -13,7 +13,6 @@ import { retry, makeRetryable, wait, - RetryError, isRetryError, } from '../../../../packages/utils/src/index.ts'; @@ -405,8 +404,8 @@ describe('@logosdx/utils - flow-control: retry', () => { // onRetry is called before attempts 2 and 3 (not before attempt 1) calledExactly(onRetry, 2, 'onRetry called twice'); - expect(onRetry.mock.calls[0][1]).to.equal(1); // attempt number 1 - expect(onRetry.mock.calls[1][1]).to.equal(2); // attempt number 2 + expect(onRetry.mock.calls[0]![1]).to.equal(1); // attempt number 1 + expect(onRetry.mock.calls[1]![1]).to.equal(2); // attempt number 2 }); it('should not call onRetry on the first attempt', async () => { @@ -473,8 +472,8 @@ describe('@logosdx/utils - flow-control: retry', () => { // onRetry is called before attempt 2 with Error #1, and before attempt 3 with Error #2 expect(capturedErrors).to.have.length(2); - expect(capturedErrors[0].message).to.equal('Error #1'); - expect(capturedErrors[1].message).to.equal('Error #2'); + expect(capturedErrors[0]!.message).to.equal('Error #1'); + expect(capturedErrors[1]!.message).to.equal('Error #2'); }); }); @@ -499,7 +498,7 @@ describe('@logosdx/utils - flow-control: retry', () => { expect(result).to.deep.equal({ fallback: true, originalMessage: 'Network failure' }); calledExactly(onRetryExhausted, 1, 'onRetryExhausted called once'); - expect(onRetryExhausted.mock.calls[0][0]).to.equal(originalError); + expect(onRetryExhausted.mock.calls[0]![0]).to.equal(originalError); }); it('should support async onRetryExhausted callback', async () => { @@ -631,7 +630,7 @@ describe('@logosdx/utils - flow-control: retry', () => { it('should work with onRetry and onRetryExhausted together', async () => { const callOrder: string[] = []; - const onRetry = vi.fn(() => callOrder.push('onRetry')); + const onRetry = vi.fn(() => { callOrder.push('onRetry'); }); const onRetryExhausted = vi.fn(() => { callOrder.push('exhausted'); return 'fallback'; @@ -657,7 +656,7 @@ describe('@logosdx/utils - flow-control: retry', () => { it('should work with onRetry and throwLastError together', async () => { const onRetryCalls: number[] = []; - const onRetry = vi.fn((_, attempt) => onRetryCalls.push(attempt)); + const onRetry = vi.fn((_: Error, attempt: number) => { onRetryCalls.push(attempt); }); const originalError = new Error('original'); const fn = vi.fn(() => { diff --git a/tests/src/utils/flow-control/singleflight.ts b/tests/src/utils/flow-control/singleflight.ts index 9166dc0..1d76b21 100644 --- a/tests/src/utils/flow-control/singleflight.ts +++ b/tests/src/utils/flow-control/singleflight.ts @@ -323,6 +323,155 @@ describe('@logosdx/utils - SingleFlight', () => { expect(stats.inflightCount).to.equal(1); }); + it('should invalidate cache entries matching predicate', async () => { + + const flight = new SingleFlight(); + + await flight.setCache('user:1', 'alice'); + await flight.setCache('user:2', 'bob'); + await flight.setCache('post:1', 'hello'); + await flight.setCache('post:2', 'world'); + + const deleted = await flight.invalidateCache(key => key.startsWith('user:')); + + expect(deleted).to.equal(2); + expect(await flight.hasCache('user:1')).to.be.false; + expect(await flight.hasCache('user:2')).to.be.false; + expect(await flight.hasCache('post:1')).to.be.true; + expect(await flight.hasCache('post:2')).to.be.true; + }); + + it('should return 0 when no entries match predicate', async () => { + + const flight = new SingleFlight(); + + await flight.setCache('user:1', 'alice'); + await flight.setCache('user:2', 'bob'); + + const deleted = await flight.invalidateCache(key => key.startsWith('post:')); + + expect(deleted).to.equal(0); + expect(flight.stats().cacheSize).to.equal(2); + }); + + it('should return 0 when cache is empty', async () => { + + const flight = new SingleFlight(); + + const deleted = await flight.invalidateCache(key => key.startsWith('user:')); + + expect(deleted).to.equal(0); + }); + + it('should invalidate all entries when predicate always returns true', async () => { + + const flight = new SingleFlight(); + + await flight.setCache('key1', 'value1'); + await flight.setCache('key2', 'value2'); + await flight.setCache('key3', 'value3'); + + const deleted = await flight.invalidateCache(() => true); + + expect(deleted).to.equal(3); + expect(flight.stats().cacheSize).to.equal(0); + }); + + it('should work with complex predicate patterns', async () => { + + const flight = new SingleFlight(); + + await flight.setCache('api:v1:users:1', 'user1'); + await flight.setCache('api:v1:users:2', 'user2'); + await flight.setCache('api:v2:users:1', 'user1-v2'); + await flight.setCache('api:v1:posts:1', 'post1'); + await flight.setCache('cache:temp:1', 'temp'); + + // Delete only v1 users + const deleted = await flight.invalidateCache( + key => key.includes(':v1:') && key.includes(':users:') + ); + + expect(deleted).to.equal(2); + expect(await flight.hasCache('api:v1:users:1')).to.be.false; + expect(await flight.hasCache('api:v1:users:2')).to.be.false; + expect(await flight.hasCache('api:v2:users:1')).to.be.true; + expect(await flight.hasCache('api:v1:posts:1')).to.be.true; + expect(await flight.hasCache('cache:temp:1')).to.be.true; + }); + + it('should work with regex-based predicate', async () => { + + const flight = new SingleFlight(); + + await flight.setCache('user:123', 'alice'); + await flight.setCache('user:456', 'bob'); + await flight.setCache('user:abc', 'charlie'); + await flight.setCache('post:123', 'post'); + + // Delete user entries with numeric IDs + const pattern = /^user:\d+$/; + const deleted = await flight.invalidateCache(key => pattern.test(key)); + + expect(deleted).to.equal(2); + expect(await flight.hasCache('user:123')).to.be.false; + expect(await flight.hasCache('user:456')).to.be.false; + expect(await flight.hasCache('user:abc')).to.be.true; + expect(await flight.hasCache('post:123')).to.be.true; + }); + + it('should return 0 when adapter does not support keys()', async () => { + + const store = new Map>(); + + // Adapter without keys() method + const adapterWithoutKeys: CacheAdapter = { + + async get(key) { + + return store.get(key) ?? null; + }, + + async set(key, item) { + + store.set(key, item); + }, + + async delete(key) { + + return store.delete(key); + }, + + async has(key) { + + return store.has(key); + }, + + async clear() { + + store.clear(); + }, + + get size() { + + return store.size; + } + }; + + const flight = new SingleFlight({ + adapter: adapterWithoutKeys, + defaultTtl: 60000 + }); + + await flight.setCache('key1', 'value1'); + await flight.setCache('key2', 'value2'); + + const deleted = await flight.invalidateCache(() => true); + + expect(deleted).to.equal(0); + expect(store.size).to.equal(2); // Entries still exist + }); + it('should return accurate stats', async () => { const flight = new SingleFlight(); @@ -359,7 +508,7 @@ describe('@logosdx/utils - SingleFlight', () => { async get(key) { await wait(10); - return store.get(key); + return store.get(key) ?? null; }, async set(key, item) { @@ -412,7 +561,7 @@ describe('@logosdx/utils - SingleFlight', () => { async get(key) { - return store.get(key); + return store.get(key) ?? null; }, async set(key, item) { @@ -463,7 +612,7 @@ describe('@logosdx/utils - SingleFlight', () => { async get(key) { - return store.get(key); + return store.get(key) ?? null; }, async set(key, item) { @@ -512,7 +661,7 @@ describe('@logosdx/utils - SingleFlight', () => { async get(key) { - return store.get(key); + return store.get(key) ?? null; }, async set(key, item) { diff --git a/tests/src/utils/flow-control/throttle.ts b/tests/src/utils/flow-control/throttle.ts index 3545c30..cfc3900 100644 --- a/tests/src/utils/flow-control/throttle.ts +++ b/tests/src/utils/flow-control/throttle.ts @@ -9,7 +9,6 @@ import { import { mockHelpers } from '../../_helpers'; import { - attempt, attemptSync, throttle, wait, diff --git a/tests/tsconfig.json b/tests/tsconfig.json index 1f9183d..4467bbc 100644 --- a/tests/tsconfig.json +++ b/tests/tsconfig.json @@ -1,5 +1,10 @@ { "extends": "../tsconfig.json", + "compilerOptions": { + "emitDeclarationOnly": false, + "noEmit": true, + "types": ["vitest/globals"] + }, "include": [ "src" ] diff --git a/tests/vitest.config.ts b/tests/vitest.config.ts index 87bfa31..6d034cb 100644 --- a/tests/vitest.config.ts +++ b/tests/vitest.config.ts @@ -1,36 +1,73 @@ import { defineConfig } from 'vitest/config'; +import { playwright } from '@vitest/browser-playwright'; +import path from 'path'; + +const packagesRoot = path.resolve(__dirname, '..', 'packages'); export default defineConfig({ test: { - include: ['src/**/*.ts'], - exclude: [ - 'src/_helpers.ts', - 'src/fetch/_helpers.ts', - 'src/_playground.ts', - 'src/_memory-tests/**', - 'src/experiments/**', - 'src/index.ts', - 'src/setup.ts' + projects: [ + { + test: { + name: 'unit', + include: ['src/**/*.ts'], + exclude: [ + 'src/_helpers.ts', + 'src/fetch/_helpers.ts', + 'src/_playground.ts', + 'src/_memory-tests/**', + 'src/experiments/**', + 'src/index.ts', + 'src/setup.ts', + 'src/smoke/**', + ], + setupFiles: ['src/setup.ts'], + environment: 'jsdom', + globals: true, + pool: 'forks', + testTimeout: 10000, + hookTimeout: 10000, + reporters: ['default'], + clearMocks: true, + restoreMocks: true, + sequence: { + shuffle: false, + concurrent: false, + }, + coverage: { + provider: 'v8', + reporter: ['text', 'json', 'html'], + reportsDirectory: './coverage', + include: ['../packages/*/src/**/*.ts'], + exclude: ['**/*.test.ts', '**/*.spec.ts'], + }, + }, + }, + { + define: { + __PACKAGES_ROOT__: JSON.stringify(packagesRoot), + }, + server: { + fs: { + allow: [path.resolve(__dirname, '..')], + strict: false, + }, + }, + test: { + name: 'browser', + include: ['src/smoke/**/*.test.ts'], + globals: true, + testTimeout: 15000, + hookTimeout: 15000, + setupFiles: ['src/smoke/setup.ts'], + browser: { + enabled: true, + provider: playwright(), + headless: true, + instances: [{ browser: 'chromium' }], + }, + }, + }, ], - setupFiles: ['src/setup.ts'], - environment: 'jsdom', - globals: true, - pool: 'forks', - testTimeout: 10000, - hookTimeout: 10000, - reporters: ['default'], - clearMocks: true, - restoreMocks: true, - sequence: { - shuffle: false, - concurrent: false - }, - coverage: { - provider: 'v8', - reporter: ['text', 'json', 'html'], - reportsDirectory: './coverage', - include: ['../packages/*/src/**/*.ts'], - exclude: ['**/*.test.ts', '**/*.spec.ts'] - } - } + }, }); From 47c31c65553f9530ef71ee1a54859e2352737f90 Mon Sep 17 00:00:00 2001 From: Danilo Alonso Date: Tue, 3 Feb 2026 23:53:43 -0500 Subject: [PATCH 10/13] chore: update pnpm-lock, remove kit tests, fix test lint --- pnpm-lock.yaml | 39 +-- tests/package.json | 2 +- .../scenarios/fetch/a-instance-churn.ts | 2 +- .../scenarios/fetch/b-event-listeners.ts | 2 +- .../scenarios/fetch/c-state-headers-params.ts | 2 +- .../scenarios/fetch/d-abort-timeout.ts | 4 +- .../scenarios/fetch/e-repeated-calls.ts | 4 +- .../scenarios/fetch/z-diagnostic.ts | 8 +- .../scenarios/observer/_helpers.ts | 2 +- .../scenarios/observer/a-subscriber-churn.ts | 2 +- .../observer/b-long-lived-subjects.ts | 2 +- .../scenarios/observer/c-burst-traffic.ts | 2 +- .../scenarios/observer/d-fan-out-fan-in.ts | 2 +- .../scenarios/observer/e-failure-reconnect.ts | 2 +- .../scenarios/observer/f-hot-paths.ts | 2 +- .../scenarios/utils/a-memoize-churn.ts | 2 +- .../scenarios/utils/b-debounce-throttle.ts | 2 +- .../scenarios/utils/c-circuit-breaker.ts | 2 +- .../scenarios/utils/d-rate-limit.ts | 3 +- .../scenarios/utils/e-inflight-dedup.ts | 2 +- .../scenarios/utils/f-batch-retry.ts | 2 +- .../scenarios/utils/g-timeout-wait.ts | 2 +- .../scenarios/utils/h-priority-queue.ts | 2 +- .../_memory-tests/scenarios/utils/i-clone.ts | 2 +- .../scenarios/utils/j-compose-flow.ts | 2 +- tests/src/fetch/engine/request-init.test.ts | 2 +- tests/src/fetch/policies/dedupe.test.ts | 3 +- tests/src/kit.ts | 304 ------------------ tests/src/utils/flow-control/memo.ts | 2 +- tests/tsconfig.json | 4 + 30 files changed, 45 insertions(+), 368 deletions(-) delete mode 100644 tests/src/kit.ts diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 796d284..1e958d7 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -73,27 +73,6 @@ importers: specifier: workspace:^ version: link:../utils - packages/kit: - dependencies: - '@logosdx/fetch': - specifier: workspace:^ - version: link:../fetch - '@logosdx/localize': - specifier: workspace:^ - version: link:../localize - '@logosdx/observer': - specifier: workspace:^ - version: link:../observer - '@logosdx/state-machine': - specifier: workspace:^ - version: link:../state-machine - '@logosdx/storage': - specifier: workspace:^ - version: link:../storage - '@logosdx/utils': - specifier: workspace:^ - version: link:../utils - packages/localize: dependencies: '@logosdx/utils': @@ -168,8 +147,8 @@ importers: specifier: ^21 version: 21.0.0 typescript: - specifier: '5' - version: 5.0.4 + specifier: 5.9.3 + version: 5.9.3 vitest: specifier: ^4 version: 4.0.14(@types/node@24.10.10)(@vitest/browser-playwright@4.0.14)(jsdom@28.0.0)(tsx@4.19.4)(yaml@2.8.0) @@ -2680,16 +2659,16 @@ packages: peerDependencies: typescript: 5.0.x || 5.1.x || 5.2.x || 5.3.x || 5.4.x || 5.5.x || 5.6.x || 5.7.x || 5.8.x - typescript@5.0.4: - resolution: {integrity: sha512-cW9T5W9xY37cc+jfEnaUvX91foxtHkza3Nw3wkoF4sSlKn0MONdkdEndig/qPBWXNkmplh3NzayQzCiHM4/hqw==} - engines: {node: '>=12.20'} - hasBin: true - typescript@5.8.3: resolution: {integrity: sha512-p1diW6TqL9L07nNxvRMM7hMMw4c5XOo/1ibL4aAIGmSAt9slTE1Xgw5KWuof2uTOvCg9BY7ZRi+GaF+7sfgPeQ==} engines: {node: '>=14.17'} hasBin: true + typescript@5.9.3: + resolution: {integrity: sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==} + engines: {node: '>=14.17'} + hasBin: true + uc.micro@2.1.0: resolution: {integrity: sha512-ARDJmphmdvUk6Glw7y9DQ2bFkKBHwQHLi2lsaH6PPmz/Ka9sFOBsBluozhDltWmnv9u/cF6Rt87znRTPV+yp/A==} @@ -5452,10 +5431,10 @@ snapshots: typescript: 5.8.3 yaml: 2.8.0 - typescript@5.0.4: {} - typescript@5.8.3: {} + typescript@5.9.3: {} + uc.micro@2.1.0: {} uint8array-extras@1.4.0: {} diff --git a/tests/package.json b/tests/package.json index 1937ec6..c608e09 100644 --- a/tests/package.json +++ b/tests/package.json @@ -36,7 +36,7 @@ "jsdom": "^28", "node-test-github-reporter": "^1.3.0", "sinon": "^21", - "typescript": "5", + "typescript": "5.9.3", "vitest": "^4", "playwright": "^1.58.0" } diff --git a/tests/src/_memory-tests/scenarios/fetch/a-instance-churn.ts b/tests/src/_memory-tests/scenarios/fetch/a-instance-churn.ts index d511f33..67676e6 100644 --- a/tests/src/_memory-tests/scenarios/fetch/a-instance-churn.ts +++ b/tests/src/_memory-tests/scenarios/fetch/a-instance-churn.ts @@ -24,7 +24,7 @@ * The retained memory is from undici, not our code. */ -import { FetchEngine } from '../../../../../../packages/fetch/src/index.ts'; +import { FetchEngine } from '../../../../../packages/fetch/src/index.ts'; import type { Scenario, ScenarioContext } from '../../types.ts'; import { diff --git a/tests/src/_memory-tests/scenarios/fetch/b-event-listeners.ts b/tests/src/_memory-tests/scenarios/fetch/b-event-listeners.ts index bbae9c7..828a8d8 100644 --- a/tests/src/_memory-tests/scenarios/fetch/b-event-listeners.ts +++ b/tests/src/_memory-tests/scenarios/fetch/b-event-listeners.ts @@ -16,7 +16,7 @@ * - Closures captured by listeners are released */ -import { FetchEngine, FetchEventNames } from '../../../../../../packages/fetch/src/index.ts'; +import { FetchEngine, FetchEventNames } from '../../../../../packages/fetch/src/index.ts'; import type { Scenario, ScenarioContext } from '../../types.ts'; import { diff --git a/tests/src/_memory-tests/scenarios/fetch/c-state-headers-params.ts b/tests/src/_memory-tests/scenarios/fetch/c-state-headers-params.ts index 50bbbb0..edc18fb 100644 --- a/tests/src/_memory-tests/scenarios/fetch/c-state-headers-params.ts +++ b/tests/src/_memory-tests/scenarios/fetch/c-state-headers-params.ts @@ -15,7 +15,7 @@ * - State objects are properly replaced, not accumulated */ -import { FetchEngine } from '../../../../../../packages/fetch/src/index.ts'; +import { FetchEngine } from '../../../../../packages/fetch/src/index.ts'; import type { Scenario, ScenarioContext } from '../../types.ts'; import { diff --git a/tests/src/_memory-tests/scenarios/fetch/d-abort-timeout.ts b/tests/src/_memory-tests/scenarios/fetch/d-abort-timeout.ts index e92537e..dd87dc9 100644 --- a/tests/src/_memory-tests/scenarios/fetch/d-abort-timeout.ts +++ b/tests/src/_memory-tests/scenarios/fetch/d-abort-timeout.ts @@ -15,8 +15,8 @@ * - AbortController references are released */ -import { FetchEngine } from '../../../../../../packages/fetch/src/index.ts'; -import { attempt } from '../../../../../../packages/utils/src/index.ts'; +import { FetchEngine } from '../../../../../packages/fetch/src/index.ts'; +import { attempt } from '../../../../../packages/utils/src/index.ts'; import type { Scenario, ScenarioContext } from '../../types.ts'; import { diff --git a/tests/src/_memory-tests/scenarios/fetch/e-repeated-calls.ts b/tests/src/_memory-tests/scenarios/fetch/e-repeated-calls.ts index afa5403..4088c4e 100644 --- a/tests/src/_memory-tests/scenarios/fetch/e-repeated-calls.ts +++ b/tests/src/_memory-tests/scenarios/fetch/e-repeated-calls.ts @@ -26,8 +26,8 @@ * and internal state are properly garbage collected. */ -import { FetchEngine } from '../../../../../../packages/fetch/src/index.ts'; -import { attempt } from '../../../../../../packages/utils/src/index.ts'; +import { FetchEngine } from '../../../../../packages/fetch/src/index.ts'; +import { attempt } from '../../../../../packages/utils/src/index.ts'; import type { Scenario, ScenarioContext } from '../../types.ts'; import { diff --git a/tests/src/_memory-tests/scenarios/fetch/z-diagnostic.ts b/tests/src/_memory-tests/scenarios/fetch/z-diagnostic.ts index 295d9b9..5b2a8fb 100644 --- a/tests/src/_memory-tests/scenarios/fetch/z-diagnostic.ts +++ b/tests/src/_memory-tests/scenarios/fetch/z-diagnostic.ts @@ -21,8 +21,8 @@ * This is expected behavior, not a bug in FetchEngine. */ -import { FetchEngine } from '../../../../../../packages/fetch/src/index.ts'; -import { attempt } from '../../../../../../packages/utils/src/index.ts'; +import { FetchEngine } from '../../../../../packages/fetch/src/index.ts'; +import { attempt } from '../../../../../packages/utils/src/index.ts'; import type { Scenario, ScenarioContext } from '../../types.ts'; import { @@ -121,7 +121,7 @@ export const diagnostic: Scenario = { context.gc(); const before = process.memoryUsage().heapUsed; - const { ObserverEngine } = await import('../../../../../../packages/observer/src/index.ts'); + const { ObserverEngine } = await import('../../../../../packages/observer/src/index.ts'); for (let i = 0; i < 100; i++) { @@ -142,7 +142,7 @@ export const diagnostic: Scenario = { context.gc(); const before = process.memoryUsage().heapUsed; - const { ObserverEngine } = await import('../../../../../../packages/observer/src/index.ts'); + const { ObserverEngine } = await import('../../../../../packages/observer/src/index.ts'); const sharedState = { token: 'abc', user: { id: 1 } }; diff --git a/tests/src/_memory-tests/scenarios/observer/_helpers.ts b/tests/src/_memory-tests/scenarios/observer/_helpers.ts index 23149cf..f53445c 100644 --- a/tests/src/_memory-tests/scenarios/observer/_helpers.ts +++ b/tests/src/_memory-tests/scenarios/observer/_helpers.ts @@ -4,7 +4,7 @@ * Shared utilities for observer memory test scenarios. */ -import type { ObserverEngine } from '../../../../../../packages/observer/src/index.ts'; +import type { ObserverEngine } from '../../../../../packages/observer/src/index.ts'; import type { ObserverStats } from '../../types.ts'; diff --git a/tests/src/_memory-tests/scenarios/observer/a-subscriber-churn.ts b/tests/src/_memory-tests/scenarios/observer/a-subscriber-churn.ts index b4faf97..81e579a 100644 --- a/tests/src/_memory-tests/scenarios/observer/a-subscriber-churn.ts +++ b/tests/src/_memory-tests/scenarios/observer/a-subscriber-churn.ts @@ -15,7 +15,7 @@ * - No growth in retained closures */ -import { ObserverEngine } from '../../../../../../packages/observer/src/index.ts'; +import { ObserverEngine } from '../../../../../packages/observer/src/index.ts'; import type { Scenario, ScenarioContext } from '../../types.ts'; import { getObserverStats } from './_helpers.ts'; diff --git a/tests/src/_memory-tests/scenarios/observer/b-long-lived-subjects.ts b/tests/src/_memory-tests/scenarios/observer/b-long-lived-subjects.ts index 75f33ca..6fac0bf 100644 --- a/tests/src/_memory-tests/scenarios/observer/b-long-lived-subjects.ts +++ b/tests/src/_memory-tests/scenarios/observer/b-long-lived-subjects.ts @@ -14,7 +14,7 @@ * - No creeping growth in listener references */ -import { ObserverEngine } from '../../../../../../packages/observer/src/index.ts'; +import { ObserverEngine } from '../../../../../packages/observer/src/index.ts'; import type { Scenario, ScenarioContext } from '../../types.ts'; import { getObserverStats } from './_helpers.ts'; diff --git a/tests/src/_memory-tests/scenarios/observer/c-burst-traffic.ts b/tests/src/_memory-tests/scenarios/observer/c-burst-traffic.ts index 3dbb707..7868ae5 100644 --- a/tests/src/_memory-tests/scenarios/observer/c-burst-traffic.ts +++ b/tests/src/_memory-tests/scenarios/observer/c-burst-traffic.ts @@ -14,7 +14,7 @@ * - All listeners complete processing */ -import { ObserverEngine } from '../../../../../../packages/observer/src/index.ts'; +import { ObserverEngine } from '../../../../../packages/observer/src/index.ts'; import type { Scenario, ScenarioContext } from '../../types.ts'; import { getObserverStats } from './_helpers.ts'; diff --git a/tests/src/_memory-tests/scenarios/observer/d-fan-out-fan-in.ts b/tests/src/_memory-tests/scenarios/observer/d-fan-out-fan-in.ts index 0a92063..21e8745 100644 --- a/tests/src/_memory-tests/scenarios/observer/d-fan-out-fan-in.ts +++ b/tests/src/_memory-tests/scenarios/observer/d-fan-out-fan-in.ts @@ -17,7 +17,7 @@ * - Internal counts match expected values */ -import { ObserverEngine } from '../../../../../../packages/observer/src/index.ts'; +import { ObserverEngine } from '../../../../../packages/observer/src/index.ts'; import type { Scenario, ScenarioContext } from '../../types.ts'; import { getObserverStats } from './_helpers.ts'; diff --git a/tests/src/_memory-tests/scenarios/observer/e-failure-reconnect.ts b/tests/src/_memory-tests/scenarios/observer/e-failure-reconnect.ts index dc3e7f0..2bbac03 100644 --- a/tests/src/_memory-tests/scenarios/observer/e-failure-reconnect.ts +++ b/tests/src/_memory-tests/scenarios/observer/e-failure-reconnect.ts @@ -15,7 +15,7 @@ * - Signal listeners properly cleaned via queueMicrotask pattern */ -import { ObserverEngine } from '../../../../../../packages/observer/src/index.ts'; +import { ObserverEngine } from '../../../../../packages/observer/src/index.ts'; import type { Scenario, ScenarioContext } from '../../types.ts'; import { getObserverStats } from './_helpers.ts'; diff --git a/tests/src/_memory-tests/scenarios/observer/f-hot-paths.ts b/tests/src/_memory-tests/scenarios/observer/f-hot-paths.ts index 403e62b..8d72ab7 100644 --- a/tests/src/_memory-tests/scenarios/observer/f-hot-paths.ts +++ b/tests/src/_memory-tests/scenarios/observer/f-hot-paths.ts @@ -14,7 +14,7 @@ * - GC pauses don't exceed thresholds */ -import { ObserverEngine } from '../../../../../../packages/observer/src/index.ts'; +import { ObserverEngine } from '../../../../../packages/observer/src/index.ts'; import type { Scenario, ScenarioContext } from '../../types.ts'; import { getObserverStats } from './_helpers.ts'; diff --git a/tests/src/_memory-tests/scenarios/utils/a-memoize-churn.ts b/tests/src/_memory-tests/scenarios/utils/a-memoize-churn.ts index da2588c..95c6655 100644 --- a/tests/src/_memory-tests/scenarios/utils/a-memoize-churn.ts +++ b/tests/src/_memory-tests/scenarios/utils/a-memoize-churn.ts @@ -22,7 +22,7 @@ import { memoize, memoizeSync, type EnhancedMemoizedFunction -} from '../../../../../../packages/utils/src/index.ts'; +} from '../../../../../packages/utils/src/index.ts'; import type { Scenario, ScenarioContext } from '../../types.ts'; import { createLargeObject, generateUniqueKey } from './_helpers.ts'; diff --git a/tests/src/_memory-tests/scenarios/utils/b-debounce-throttle.ts b/tests/src/_memory-tests/scenarios/utils/b-debounce-throttle.ts index e97e51f..1a525e5 100644 --- a/tests/src/_memory-tests/scenarios/utils/b-debounce-throttle.ts +++ b/tests/src/_memory-tests/scenarios/utils/b-debounce-throttle.ts @@ -26,7 +26,7 @@ import { throttle, type DebouncedFunction, type ThrottledFunction -} from '../../../../../../packages/utils/src/index.ts'; +} from '../../../../../packages/utils/src/index.ts'; import type { Scenario, ScenarioContext } from '../../types.ts'; import { createLargeObject, delay } from './_helpers.ts'; diff --git a/tests/src/_memory-tests/scenarios/utils/c-circuit-breaker.ts b/tests/src/_memory-tests/scenarios/utils/c-circuit-breaker.ts index c79f4b6..3ec1785 100644 --- a/tests/src/_memory-tests/scenarios/utils/c-circuit-breaker.ts +++ b/tests/src/_memory-tests/scenarios/utils/c-circuit-breaker.ts @@ -21,7 +21,7 @@ import { circuitBreakerSync, CircuitBreakerError, isCircuitBreakerError -} from '../../../../../../packages/utils/src/index.ts'; +} from '../../../../../packages/utils/src/index.ts'; import type { Scenario, ScenarioContext } from '../../types.ts'; import { createLargeObject } from './_helpers.ts'; diff --git a/tests/src/_memory-tests/scenarios/utils/d-rate-limit.ts b/tests/src/_memory-tests/scenarios/utils/d-rate-limit.ts index 66b79ee..fbb2e7d 100644 --- a/tests/src/_memory-tests/scenarios/utils/d-rate-limit.ts +++ b/tests/src/_memory-tests/scenarios/utils/d-rate-limit.ts @@ -19,8 +19,7 @@ import { rateLimit, RateLimitTokenBucket, - RateLimitError -} from '../../../../../../packages/utils/src/index.ts'; +} from '../../../../../packages/utils/src/index.ts'; import type { Scenario, ScenarioContext } from '../../types.ts'; diff --git a/tests/src/_memory-tests/scenarios/utils/e-inflight-dedup.ts b/tests/src/_memory-tests/scenarios/utils/e-inflight-dedup.ts index c8fbb9c..b7897ce 100644 --- a/tests/src/_memory-tests/scenarios/utils/e-inflight-dedup.ts +++ b/tests/src/_memory-tests/scenarios/utils/e-inflight-dedup.ts @@ -15,7 +15,7 @@ * - Zombie promises accumulate (documents known limitation) */ -import { withInflightDedup } from '../../../../../../packages/utils/src/index.ts'; +import { withInflightDedup } from '../../../../../packages/utils/src/index.ts'; import type { Scenario, ScenarioContext } from '../../types.ts'; import { createLargeObject, delay } from './_helpers.ts'; diff --git a/tests/src/_memory-tests/scenarios/utils/f-batch-retry.ts b/tests/src/_memory-tests/scenarios/utils/f-batch-retry.ts index a84f752..b638de4 100644 --- a/tests/src/_memory-tests/scenarios/utils/f-batch-retry.ts +++ b/tests/src/_memory-tests/scenarios/utils/f-batch-retry.ts @@ -22,7 +22,7 @@ import { retry, makeRetryable, RetryError -} from '../../../../../../packages/utils/src/index.ts'; +} from '../../../../../packages/utils/src/index.ts'; import type { Scenario, ScenarioContext } from '../../types.ts'; import { createLargeObject, delay } from './_helpers.ts'; diff --git a/tests/src/_memory-tests/scenarios/utils/g-timeout-wait.ts b/tests/src/_memory-tests/scenarios/utils/g-timeout-wait.ts index 8d71856..95db975 100644 --- a/tests/src/_memory-tests/scenarios/utils/g-timeout-wait.ts +++ b/tests/src/_memory-tests/scenarios/utils/g-timeout-wait.ts @@ -24,7 +24,7 @@ import { TimeoutError, nextTick, nextLoop -} from '../../../../../../packages/utils/src/index.ts'; +} from '../../../../../packages/utils/src/index.ts'; import type { Scenario, ScenarioContext } from '../../types.ts'; import { createLargeObject } from './_helpers.ts'; diff --git a/tests/src/_memory-tests/scenarios/utils/h-priority-queue.ts b/tests/src/_memory-tests/scenarios/utils/h-priority-queue.ts index e4a15b7..7dcb30e 100644 --- a/tests/src/_memory-tests/scenarios/utils/h-priority-queue.ts +++ b/tests/src/_memory-tests/scenarios/utils/h-priority-queue.ts @@ -17,7 +17,7 @@ * - Backing array buffer shrinks after mass pop (implementation dependent) */ -import { PriorityQueue } from '../../../../../../packages/utils/src/index.ts'; +import { PriorityQueue } from '../../../../../packages/utils/src/index.ts'; import type { Scenario, ScenarioContext } from '../../types.ts'; import { createLargeObject } from './_helpers.ts'; diff --git a/tests/src/_memory-tests/scenarios/utils/i-clone.ts b/tests/src/_memory-tests/scenarios/utils/i-clone.ts index aa5893f..041d352 100644 --- a/tests/src/_memory-tests/scenarios/utils/i-clone.ts +++ b/tests/src/_memory-tests/scenarios/utils/i-clone.ts @@ -19,7 +19,7 @@ import { clone, merge, equals -} from '../../../../../../packages/utils/src/index.ts'; +} from '../../../../../packages/utils/src/index.ts'; import type { Scenario, ScenarioContext } from '../../types.ts'; import { diff --git a/tests/src/_memory-tests/scenarios/utils/j-compose-flow.ts b/tests/src/_memory-tests/scenarios/utils/j-compose-flow.ts index 72c2278..2c978e2 100644 --- a/tests/src/_memory-tests/scenarios/utils/j-compose-flow.ts +++ b/tests/src/_memory-tests/scenarios/utils/j-compose-flow.ts @@ -24,7 +24,7 @@ import { withTimeout, rateLimit, withInflightDedup -} from '../../../../../../packages/utils/src/index.ts'; +} from '../../../../../packages/utils/src/index.ts'; import type { Scenario, ScenarioContext } from '../../types.ts'; import { createLargeObject } from './_helpers.ts'; diff --git a/tests/src/fetch/engine/request-init.test.ts b/tests/src/fetch/engine/request-init.test.ts index e213213..6b56ec4 100644 --- a/tests/src/fetch/engine/request-init.test.ts +++ b/tests/src/fetch/engine/request-init.test.ts @@ -140,7 +140,7 @@ describe('FetchEngine: RequestInit options', async () => { expect(mockFetch).toHaveBeenCalledTimes(1); - const [, init] = mockFetch.mock.calls[0]; + const [, init] = mockFetch.mock.calls[0]!; expect(init?.integrity).to.equal('sha256-abc123'); diff --git a/tests/src/fetch/policies/dedupe.test.ts b/tests/src/fetch/policies/dedupe.test.ts index 003ec37..cc6b6b5 100644 --- a/tests/src/fetch/policies/dedupe.test.ts +++ b/tests/src/fetch/policies/dedupe.test.ts @@ -10,9 +10,8 @@ import { FetchEngine, } from '../../../../packages/fetch/src/index.ts'; -import { attempt, wait } from '../../../../packages/utils/src/index.ts'; +import { attempt, attemptSync, wait } from '../../../../packages/utils/src/index.ts'; import { makeTestStubs } from '../_helpers.ts'; -import { attemptSync } from '../../../../packages/kit/src/index.ts'; describe('@logosdx/fetch: deduplication', async () => { diff --git a/tests/src/kit.ts b/tests/src/kit.ts deleted file mode 100644 index e1f3055..0000000 --- a/tests/src/kit.ts +++ /dev/null @@ -1,304 +0,0 @@ -import { describe, it, expect } from 'vitest' - - -import * as Kit from '../../packages/kit/src/index.ts'; - -describe('@logosdx/kit', () => { - - const { - appKit - } = Kit; - - type AppEventsType = { - 'mint': ( - 'spearmint' | - 'peppermint' - ), - 'toothpaste': { colgate?: boolean, crest?: boolean }, - floss: boolean - } - - const locale = { - some: { - label: 'wee' - }, - poo: 'weenie', - bear: '{type} bear' - }; - - type AppLocaleType = typeof locale; - type LocaleCodes = 'en' | 'es'; - - const locales: Kit.LocaleManager.ManyLocales = { - en: { code: 'en', text: 'English', labels: locale }, - es: { code: 'es', text: 'Spanish', labels: { bear: 'oso {type}' } }, - } - - const initialState = { - count: 0, - name: '', - age: 0 - }; - - type AppStateType = typeof initialState; - - const stateReducer: Kit.ReducerFunction = (val, state) => { - - return Kit.merge(state, val) as AppStateType; - } - - type AppStorageType = { - name: string, - age: number - }; - - type AppKitType = Kit.MakeKitType<{ - events: AppEventsType, - storage: AppStorageType, - locales: { - locale: AppLocaleType, - codes: LocaleCodes - }, - stateMachine: { - state: AppStateType, - reducerValue: AppStateType - }, - fetch: { - state: { - authToken: string, - }, - headers: { - authorization?: string, - hmac?: string, - timestamp?: string - }, - }, - apis: { - stripe: { - headers: { Authorization: string }, - state: {}, - params: {} - }, - facebook: { - headers: {}, - params: { access_token: string }, - state: {} - } - } - }>; - - type Opts = Kit.AppKitOpts; - - const localesOpts: Opts['locales'] = { - current: 'en', - fallback: 'en', - locales - }; - - const observerOpts: Opts['observer'] = {}; - - const stateMachineOpts: Opts['stateMachine'] = { - initial: initialState, - reducer: stateReducer - }; - - const storageOpts: Opts['storage'] = { - implementation: window.localStorage, - prefix: 'kit' - }; - - const fetchOpts: Opts['fetch'] = { - baseUrl: 'http://localhost:1234', - defaultType: 'json', - headers: { - hmac: '123', - } - }; - - const apis: Opts['apis'] = { - stripe: { - baseUrl: 'https://api.stripe.com', - headers: { - 'Content-Type': 'application/json', - Authorization: 'Bearer sk_', - } - }, - facebook: { - baseUrl: 'https://graph.facebook.com', - params: { - access_token: '123' - }, - headers: {} - } - } - - it('provides an appKit', function () { - - expect(Kit.appKit).to.be.a('function'); - - const app = Kit.appKit({ - fetch: fetchOpts, - locales: localesOpts, - observer: observerOpts, - stateMachine: stateMachineOpts, - storage: storageOpts, - apis - }); - - expect(app.observer).to.exist; - expect(app.locale).to.exist; - expect(app.stateMachine).to.exist; - expect(app.storage).to.exist; - expect(app.fetch).to.exist; - expect(app.apis).to.exist; - - expect(app.apis).to.have.property('stripe'); - expect(app.apis).to.have.property('facebook'); - - expect(app.apis.stripe).to.be.an.instanceOf(Kit.FetchEngine); - expect(app.apis.facebook).to.be.an.instanceOf(Kit.FetchEngine); - expect(app.fetch).to.be.an.instanceOf(Kit.FetchEngine); - - expect(app.observer).to.be.an.instanceOf(Kit.ObserverEngine); - expect(app.locale).to.be.an.instanceOf(Kit.LocaleManager); - expect(app.stateMachine).to.be.an.instanceOf(Kit.StateMachine); - expect(app.storage).to.be.an.instanceOf(Kit.StorageAdapter); - - app.observer!.on('mint', (data) => data === 'peppermint'); - app.observer!.emit('floss', true); - - app.locale!.t('bear'); - app.locale!.changeTo('en'); - - app.fetch!.addHeader({ hmac: '123', nonsensee: '123' }); - app.fetch!.removeHeader('authorization'); - app.fetch!.setState({ authToken: '123' }); - - app.apis.stripe.addHeader({ 'Content-Type': 'application/json' }); - app.apis.stripe.removeHeader('Content-Type'); - app.apis.stripe.setState({}); - - app.apis.facebook.addParam({ access_token: '123' }); - app.apis.facebook.rmParams('access_token'); - - app.storage!.set('age', 123); - app.storage!.get('name') === 'abc'; - - app.stateMachine!.state().age === 123; - app.stateMachine!.state().name === 'abc'; - app.stateMachine!.dispatch({ age: 123 }); - - app.stateMachine!.addListener((state) => state.age == 123); - app.stateMachine!.addListener((state) => state.name == 'abc'); - - }); - - it ('optionally instantiates tools', () => { - - const onlyObserver = appKit({ observer: observerOpts }); - const onlyLocale = appKit({ locales: localesOpts }) - const onlyStateMachine = appKit({ stateMachine: stateMachineOpts }) - const onlyStorage = appKit({ storage: storageOpts }) - const onlyFetch = appKit({ fetch: fetchOpts}) - - expect(onlyObserver).to.contain.keys('observer'); - expect(onlyObserver).to.contain({ - locale: null, - stateMachine: null, - storage: null, - fetch: null - }); - - expect(onlyLocale).to.contain.keys('locale'); - expect(onlyLocale).to.contain({ - observer: null, - stateMachine: null, - storage: null, - fetch: null - }); - - expect(onlyStateMachine).to.contain.keys('stateMachine'); - expect(onlyStateMachine).to.contain({ - locale: null, - observer: null, - storage: null, - fetch: null - }); - - expect(onlyStorage).to.contain.keys('storage'); - expect(onlyStorage).to.contain({ - locale: null, - stateMachine: null, - observer: null, - fetch: null - }); - - expect(onlyFetch).to.contain.keys('fetch'); - expect(onlyFetch).to.contain({ - locale: null, - stateMachine: null, - storage: null, - observer: null - }); - }); - - it('can compose a custom type', () => { - - type MyKit = Kit.MakeKitType<{ - apis: { - stripe: { - headers: { Authorization: string }, - state: {}, - params: {} - } - }, - events: { - test: true - }, - locales: { - locale: { - test: 'test', - test2: 'test2', - nested: { - test3: 'test3' - } - }, - codes: 'en' | 'es' - }, - storage: { - name: 'test', - age: 123 - }, - stateMachine: { - state: { - name: 'test' - }, - reducerValue: { - name: 'test' - } - }, - }> - - const kit = appKit({}); - - kit.apis?.stripe?.addHeader({ Authorization: 'Bearer 123' }); - - kit.observer?.on('test', (bool) => { - - bool === true; - }); - - kit.observer?.emit('test', true); - - kit.locale?.t('test'); - kit.locale?.t('nested.test3'); - - kit.locale?.changeTo('en'); - - kit.storage?.set('name', 'test'); - kit.storage?.get('name') === 'test'; - - kit.stateMachine?.dispatch({ name: 'test' }); - kit.stateMachine?.state().name === 'test'; - }); -}); diff --git a/tests/src/utils/flow-control/memo.ts b/tests/src/utils/flow-control/memo.ts index 8a07c3b..96a2e06 100644 --- a/tests/src/utils/flow-control/memo.ts +++ b/tests/src/utils/flow-control/memo.ts @@ -12,8 +12,8 @@ import { wait, memoizeSync, memoize, + attemptSync } from '../../../../packages/utils/src/index.ts'; -import { attemptSync } from '../../../../packages/kit/src/index.ts'; describe('@logosdx/utils', () => { diff --git a/tests/tsconfig.json b/tests/tsconfig.json index 4467bbc..0d39132 100644 --- a/tests/tsconfig.json +++ b/tests/tsconfig.json @@ -7,5 +7,9 @@ }, "include": [ "src" + ], + "exclude": [ + "src/_memory-tests", + "src/_playground.ts" ] } \ No newline at end of file From 657b82097b95b02d0359bdaa623aa99a08ad0d0d Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Wed, 4 Feb 2026 01:30:20 -0500 Subject: [PATCH 11/13] Version Packages (beta) (#123) Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> --- .changeset/pre.json | 6 +- packages/dom/CHANGELOG.md | 7 ++ packages/dom/package.json | 2 +- packages/fetch/CHANGELOG.md | 173 ++++++++++++++++++++++++++++ packages/fetch/package.json | 2 +- packages/hooks/CHANGELOG.md | 7 ++ packages/hooks/package.json | 2 +- packages/localize/CHANGELOG.md | 7 ++ packages/localize/package.json | 2 +- packages/observer/CHANGELOG.md | 11 ++ packages/observer/package.json | 2 +- packages/state-machine/CHANGELOG.md | 7 ++ packages/state-machine/package.json | 2 +- packages/storage/CHANGELOG.md | 7 ++ packages/storage/package.json | 2 +- packages/utils/CHANGELOG.md | 8 ++ packages/utils/package.json | 2 +- 17 files changed, 240 insertions(+), 9 deletions(-) diff --git a/.changeset/pre.json b/.changeset/pre.json index b06c481..e0f37cc 100644 --- a/.changeset/pre.json +++ b/.changeset/pre.json @@ -13,7 +13,11 @@ "@logosdx/tests": "0.0.1" }, "changesets": [ + "bright-dogs-swim", + "calm-trees-grow", + "quiet-birds-sing", "rich-pears-jam", - "thin-crews-vanish" + "thin-crews-vanish", + "warm-lights-shine" ] } diff --git a/packages/dom/CHANGELOG.md b/packages/dom/CHANGELOG.md index ac4c965..9a1301d 100644 --- a/packages/dom/CHANGELOG.md +++ b/packages/dom/CHANGELOG.md @@ -1,5 +1,12 @@ # @logosdx/dom +## 2.0.18-beta.0 + +### Patch Changes + +- Updated dependencies [11e8233] + - @logosdx/utils@6.1.0-beta.0 + ## 2.0.17 ### Patch Changes diff --git a/packages/dom/package.json b/packages/dom/package.json index 96bd8cf..4ae2a48 100644 --- a/packages/dom/package.json +++ b/packages/dom/package.json @@ -1,6 +1,6 @@ { "name": "@logosdx/dom", - "version": "2.0.17", + "version": "2.0.18-beta.0", "description": "A small set of utilities for DOM manipulation", "exports": { ".": { diff --git a/packages/fetch/CHANGELOG.md b/packages/fetch/CHANGELOG.md index e19c1c2..42627e5 100644 --- a/packages/fetch/CHANGELOG.md +++ b/packages/fetch/CHANGELOG.md @@ -1,5 +1,178 @@ # @logosdx/fetch +## 8.0.0-beta.0 + +### Major Changes + +- 11e8233: Refactored FetchEngine from a 2,671-line monolith into a modular architecture with clear separation of concerns. The core HTTP API (`get`, `post`, `put`, `patch`, `delete`, `request`) remains unchanged. + + ### Breaking Changes + + #### State Management + + State methods moved to a dedicated `state` property: + + ```typescript + // Before + engine.getState(); + engine.setState("token", "abc123"); + engine.resetState(); + + // After + engine.state.get(); + engine.state.set("token", "abc123"); + engine.state.reset(); + ``` + + #### Header Management + + Header methods moved to a dedicated `headers` manager with method-specific support: + + ```typescript + // Before + engine.addHeader("Authorization", "Bearer token"); + engine.hasHeader("Authorization"); + engine.rmHeader("Authorization"); + engine.headers; // getter returned object + + // After + engine.headers.set("Authorization", "Bearer token"); + engine.headers.set("X-Custom", "post-only", "POST"); // NEW: method-specific + engine.headers.has("Authorization"); + engine.headers.remove("Authorization"); + engine.headers.all; // property with default + method overrides + ``` + + #### Parameter Management + + Parameter methods moved to a dedicated `params` manager: + + ```typescript + // Before + engine.addParam("api_key", "abc123"); + engine.hasParam("api_key"); + engine.rmParams("api_key"); + engine.params; // getter returned object + + // After + engine.params.set("api_key", "abc123"); + engine.params.set("format", "json", "GET"); // NEW: method-specific + engine.params.has("api_key"); + engine.params.remove("api_key"); + engine.params.all; // property with default + method overrides + ``` + + #### Configuration Management + + Configuration methods replaced with unified `options` store supporting deep path access: + + ```typescript + // Before + engine.changeBaseUrl("https://new-api.com"); + engine.changeModifyOptions(fn); + engine.changeModifyMethodOptions("POST", fn); + + // After + engine.options.set("baseUrl", "https://new-api.com"); + engine.options.set("modifyOptions", fn); + engine.options.set("modifyMethodOptions.POST", fn); + + // NEW: Deep path access for any nested option + engine.options.get("retry.maxAttempts"); + engine.options.set("retry.maxAttempts", 5); + engine.options.set("dedupePolicy", { enabled: false }); + ``` + + #### Event Names + + Events drop the `fetch-` prefix for cleaner names: + + | Before | After | + | ------------------- | ------------- | + | `fetch-before` | `before` | + | `fetch-after` | `after` | + | `fetch-response` | `response` | + | `fetch-error` | `error` | + | `fetch-cache-hit` | `cache-hit` | + | `fetch-dedupe-join` | `dedupe-join` | + | `fetch-state-set` | `state-set` | + | `fetch-header-add` | `header-add` | + + ```typescript + // Before + engine.on("fetch-before", handler); + engine.on("fetch-cache-hit", handler); + + // After + engine.on("before", handler); + engine.on("cache-hit", handler); + ``` + + #### Internal API Removed + + - `engine._flight` is no longer exposed (internal via RequestExecutor) + + ### Why These Changes + + 1. **Modular Architecture**: Split monolithic engine into focused modules (state/, options/, properties/, policies/) for easier testing and maintenance + + 2. **Single Source of Truth**: All configuration flows through OptionsStore with type-safe deep path access + + 3. **Runtime Configurable**: Any option can now be changed at runtime, enabling dynamic API endpoints and feature flags + + 4. **Method-Specific Properties**: Headers and params can now be configured per-HTTP-method + + 5. **Cleaner Event Names**: Events match their domain without redundant prefixes + + ### Backward Compatibility + + Deprecated methods still work during migration: + + ```typescript + // These still work (deprecated) + engine.getState(); // → engine.state.get() + engine.addHeader(k, v); // → engine.headers.set(k, v) + engine.changeBaseUrl(); // → engine.options.set('baseUrl', ...) + + // Old event names still emit (deprecated) + engine.on("fetch-before", handler); // still works + ``` + + ### New Capabilities + + - **FetchError helpers**: `err.isTimeout()`, `err.isCancelled()`, `err.isConnectionLost()` + - **Attempt timeouts**: Separate `attemptTimeout` and `totalTimeout` for retry control + - **Deep config access**: `engine.options.get('retry.maxAttempts')` + +### Minor Changes + +- 11e8233: ### Added + + - **Event timing data**: All request lifecycle events now include a `requestStart` timestamp (`Date.now()` captured at pipeline entry). Terminal events (`response`, `error`, `abort`) also include a `requestEnd` timestamp, enabling duration calculation directly from event data. + + ```typescript + engine.on("response", (event) => { + const duration = event.requestEnd - event.requestStart; + console.log(`Request completed in ${duration}ms`); + }); + ``` + + | Event | `requestStart` | `requestEnd` | + | ---------------- | :------------: | :----------: | + | `before-request` | yes | - | + | `after-request` | yes | - | + | `retry` | yes | - | + | `response` | yes | yes | + | `error` | yes | yes | + | `abort` | yes | yes | + +### Patch Changes + +- Updated dependencies [11e8233] +- Updated dependencies [11e8233] + - @logosdx/utils@6.1.0-beta.0 + - @logosdx/observer@2.3.1-beta.0 + ## 7.1.0 ### Minor Changes diff --git a/packages/fetch/package.json b/packages/fetch/package.json index d0b502c..3a1d8fa 100644 --- a/packages/fetch/package.json +++ b/packages/fetch/package.json @@ -1,6 +1,6 @@ { "name": "@logosdx/fetch", - "version": "7.1.0", + "version": "8.0.0-beta.0", "description": "A full-feature wrapper around the Fetch API", "exports": { ".": { diff --git a/packages/hooks/CHANGELOG.md b/packages/hooks/CHANGELOG.md index 812a2a0..9f01a4c 100644 --- a/packages/hooks/CHANGELOG.md +++ b/packages/hooks/CHANGELOG.md @@ -1,5 +1,12 @@ # @logosdx/hooks +## 1.0.0-beta.1 + +### Patch Changes + +- Updated dependencies [11e8233] + - @logosdx/utils@6.1.0-beta.0 + ## 1.0.0-beta.0 ### Major Changes diff --git a/packages/hooks/package.json b/packages/hooks/package.json index 46c8650..863467b 100644 --- a/packages/hooks/package.json +++ b/packages/hooks/package.json @@ -1,6 +1,6 @@ { "name": "@logosdx/hooks", - "version": "1.0.0-beta.0", + "version": "1.0.0-beta.1", "description": "A lightweight, type-safe hook system for extending function behavior", "exports": { ".": { diff --git a/packages/localize/CHANGELOG.md b/packages/localize/CHANGELOG.md index 34abbc7..ed0aeb3 100644 --- a/packages/localize/CHANGELOG.md +++ b/packages/localize/CHANGELOG.md @@ -1,5 +1,12 @@ # @logosdx/localize +## 1.0.22-beta.0 + +### Patch Changes + +- Updated dependencies [11e8233] + - @logosdx/utils@6.1.0-beta.0 + ## 1.0.21 ### Patch Changes diff --git a/packages/localize/package.json b/packages/localize/package.json index 5b31432..058e5b8 100644 --- a/packages/localize/package.json +++ b/packages/localize/package.json @@ -1,6 +1,6 @@ { "name": "@logosdx/localize", - "version": "1.0.21", + "version": "1.0.22-beta.0", "description": "A small, strongly-typed, full feature localization utility", "exports": { ".": { diff --git a/packages/observer/CHANGELOG.md b/packages/observer/CHANGELOG.md index 83c2919..8009725 100644 --- a/packages/observer/CHANGELOG.md +++ b/packages/observer/CHANGELOG.md @@ -1,5 +1,16 @@ # @logosdx/observer +## 2.3.1-beta.0 + +### Patch Changes + +- 11e8233: ### Fixed + + - **EventGenerator buffering**: Fixed a race condition where events emitted faster than the async iterator could consume them were silently dropped. Replaced single Deferred pattern with a PriorityQueue buffer, ensuring no events are lost under burst conditions. All existing consumer code continues to work unchanged. + +- Updated dependencies [11e8233] + - @logosdx/utils@6.1.0-beta.0 + ## 2.3.0 ### Minor Changes diff --git a/packages/observer/package.json b/packages/observer/package.json index 2f69a88..7336d1d 100644 --- a/packages/observer/package.json +++ b/packages/observer/package.json @@ -1,6 +1,6 @@ { "name": "@logosdx/observer", - "version": "2.3.0", + "version": "2.3.1-beta.0", "description": "A small, strongly-typed, powerful observer utility for the DOM or NodeJS", "exports": { ".": { diff --git a/packages/state-machine/CHANGELOG.md b/packages/state-machine/CHANGELOG.md index 7797388..8c4a47f 100644 --- a/packages/state-machine/CHANGELOG.md +++ b/packages/state-machine/CHANGELOG.md @@ -1,5 +1,12 @@ # @logosdx/state-machine +## 1.0.22-beta.0 + +### Patch Changes + +- Updated dependencies [11e8233] + - @logosdx/utils@6.1.0-beta.0 + ## 1.0.21 ### Patch Changes diff --git a/packages/state-machine/package.json b/packages/state-machine/package.json index b9fb5f0..df733b7 100644 --- a/packages/state-machine/package.json +++ b/packages/state-machine/package.json @@ -1,6 +1,6 @@ { "name": "@logosdx/state-machine", - "version": "1.0.21", + "version": "1.0.22-beta.0", "description": "A powerful, stream-based, strongly-typed state management library", "exports": { ".": { diff --git a/packages/storage/CHANGELOG.md b/packages/storage/CHANGELOG.md index 21b8917..874930b 100644 --- a/packages/storage/CHANGELOG.md +++ b/packages/storage/CHANGELOG.md @@ -1,5 +1,12 @@ # @logosdx/storage +## 1.0.22-beta.0 + +### Patch Changes + +- Updated dependencies [11e8233] + - @logosdx/utils@6.1.0-beta.0 + ## 1.0.21 ### Patch Changes diff --git a/packages/storage/package.json b/packages/storage/package.json index b3ec6ed..4b22c41 100644 --- a/packages/storage/package.json +++ b/packages/storage/package.json @@ -1,6 +1,6 @@ { "name": "@logosdx/storage", - "version": "1.0.21", + "version": "1.0.22-beta.0", "description": "A feature-rich-but-simple, strongly-typed local storage wrapper", "exports": { ".": { diff --git a/packages/utils/CHANGELOG.md b/packages/utils/CHANGELOG.md index 3a1e5be..0b53528 100644 --- a/packages/utils/CHANGELOG.md +++ b/packages/utils/CHANGELOG.md @@ -1,5 +1,13 @@ # @logosdx/utils +## 6.1.0-beta.0 + +### Minor Changes + +- 11e8233: ### Added + + - `SingleFlight.invalidateCache(predicate)`: Selectively invalidate cache entries matching a predicate function + ## 6.0.0 ### Major Changes diff --git a/packages/utils/package.json b/packages/utils/package.json index 480c004..9376079 100644 --- a/packages/utils/package.json +++ b/packages/utils/package.json @@ -1,6 +1,6 @@ { "name": "@logosdx/utils", - "version": "6.0.0", + "version": "6.1.0-beta.0", "description": "A small, common set of JS and TS utilities for LogosDX", "exports": { ".": { From 94a4154b2d20bc3832a149e217933a81998e15fb Mon Sep 17 00:00:00 2001 From: Danilo Alonso Date: Wed, 4 Feb 2026 02:55:20 -0500 Subject: [PATCH 12/13] feat(fetch): request ID header injection, per-request ID override, and streaming mode Add opt-in distributed tracing via `requestIdHeader` config that automatically injects the generated requestId into outgoing request headers. Add per-request `requestId` option on CallConfig to override the auto-generated ID with an external trace ID from upstream services. Add `stream` option to CallConfig for returning raw Response objects with unconsumed body streams, skipping cache and deduplication while preserving rate limiting and lifecycle events. All HTTP method overloads now support typed stream responses. Improve abort error metadata population when totalTimeout fires and update documentation for distributed tracing patterns. --- .changeset/swift-foxes-trace.md | 27 ++ .github/workflows/claude-ci-failure.yml | 1 + .github/workflows/publish-beta.yml | 67 ---- .github/workflows/publish.yml | 27 +- docs/packages/fetch/advanced.md | 3 + docs/packages/fetch/configuration.md | 47 +++ docs/packages/fetch/events.md | 35 ++- llm-helpers/fetch.md | 8 + packages/fetch/src/engine/events.ts | 3 + packages/fetch/src/engine/executor.ts | 144 ++++++--- packages/fetch/src/engine/index.ts | 113 +++++-- packages/fetch/src/engine/types.ts | 6 + packages/fetch/src/helpers/fetch-error.ts | 1 + packages/fetch/src/options/types.ts | 50 +++ tests/src/fetch/_helpers.ts | 70 ++++- tests/src/fetch/engine/request-id.test.ts | 366 ++++++++++++++++++++++ tests/src/fetch/engine/streaming.test.ts | 250 +++++++++++++++ 17 files changed, 1086 insertions(+), 132 deletions(-) create mode 100644 .changeset/swift-foxes-trace.md delete mode 100644 .github/workflows/publish-beta.yml create mode 100644 tests/src/fetch/engine/request-id.test.ts create mode 100644 tests/src/fetch/engine/streaming.test.ts diff --git a/.changeset/swift-foxes-trace.md b/.changeset/swift-foxes-trace.md new file mode 100644 index 0000000..97d5352 --- /dev/null +++ b/.changeset/swift-foxes-trace.md @@ -0,0 +1,27 @@ +--- +"@logosdx/fetch": minor +--- + +### Added + +* `feat(fetch):` Add `requestIdHeader` engine config option for automatic request ID header injection, enabling end-to-end distributed tracing without manual `modifyConfig` wiring +* `feat(fetch):` Add per-request `requestId` option to `CallConfig`, allowing callers to override the auto-generated ID with an external trace ID from upstream services +* `feat(fetch):` Add `stream` option to `CallConfig` for returning raw `Response` objects with unconsumed body streams — cache and deduplication are skipped while rate limiting and lifecycle events still fire + +```typescript +// Distributed tracing +const api = new FetchEngine({ + baseUrl: 'https://api.example.com', + requestIdHeader: 'X-Request-Id' +}); + +// Auto-generated ID sent as header + available in all events +await api.get('/orders'); + +// Override with upstream trace ID for end-to-end correlation +await api.get('/orders', { requestId: incomingTraceId }); + +// Stream mode — raw Response with unconsumed body +const { data: response } = await api.get('/sse', { stream: true }); +const reader = response.body.getReader(); +``` diff --git a/.github/workflows/claude-ci-failure.yml b/.github/workflows/claude-ci-failure.yml index b29687d..2e22d4d 100644 --- a/.github/workflows/claude-ci-failure.yml +++ b/.github/workflows/claude-ci-failure.yml @@ -67,6 +67,7 @@ jobs: uses: anthropics/claude-code-action@v1 with: anthropic_api_key: ${{ secrets.ANTHROPIC_API_KEY }} + github_token: ${{ secrets.GITHUB_TOKEN }} use_sticky_comment: true additional_permissions: | actions: read diff --git a/.github/workflows/publish-beta.yml b/.github/workflows/publish-beta.yml deleted file mode 100644 index 738a68b..0000000 --- a/.github/workflows/publish-beta.yml +++ /dev/null @@ -1,67 +0,0 @@ -name: Publish Beta -on: - push: - branches: - - "beta" - workflow_dispatch: - -concurrency: ${{ github.workflow }}-${{ github.ref }} - -permissions: - id-token: write # Required for OIDC - contents: write - actions: write - issues: write - pull-requests: write - - -jobs: - check-prerelease: - runs-on: ubuntu-latest - outputs: - is_prerelease: ${{ steps.check.outputs.is_prerelease }} - steps: - - - uses: actions/checkout@v6 - - - name: Check if pre-release mode is enabled - id: check - run: | - if [ -f ".changeset/pre.json" ]; then - echo "Pre-release mode is enabled" - echo "is_prerelease=true" >> $GITHUB_OUTPUT - else - echo "Pre-release mode is not enabled, skipping publish" - echo "is_prerelease=false" >> $GITHUB_OUTPUT - fi - - publish: - needs: check-prerelease - if: needs.check-prerelease.outputs.is_prerelease == 'true' - runs-on: ubuntu-latest - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - NPM_TOKEN: ${{ secrets.NPM_TOKEN }} - steps: - - - uses: actions/checkout@v6 - - - uses: pnpm/action-setup@v4 - with: - version: 10 - - - uses: actions/setup-node@v6 - with: - node-version: 22.x - cache: "pnpm" - - - run: pnpm install --frozen-lockfile - - - run: pnpm recursive run build - - - name: Publish - uses: changesets/action@v1 - with: - publish: pnpm run release - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 36b9c88..8b5d303 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -3,6 +3,7 @@ on: push: branches: - "release" + - "beta" workflow_dispatch: concurrency: ${{ github.workflow }}-${{ github.ref }} @@ -16,11 +17,32 @@ permissions: jobs: + check-prerelease: + if: github.ref == 'refs/heads/beta' + runs-on: ubuntu-latest + outputs: + is_prerelease: ${{ steps.check.outputs.is_prerelease }} + steps: + + - uses: actions/checkout@v6 + + - name: Check if pre-release mode is enabled + id: check + run: | + if [ -f ".changeset/pre.json" ]; then + echo "Pre-release mode is enabled" + echo "is_prerelease=true" >> $GITHUB_OUTPUT + else + echo "Pre-release mode is not enabled, skipping publish" + echo "is_prerelease=false" >> $GITHUB_OUTPUT + fi + publish: + needs: check-prerelease + if: always() && (github.ref == 'refs/heads/release' || needs.check-prerelease.outputs.is_prerelease == 'true') runs-on: ubuntu-latest env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - NPM_TOKEN: ${{ secrets.NPM_TOKEN }} steps: - uses: actions/checkout@v6 @@ -33,6 +55,7 @@ jobs: with: node-version: 22.x cache: "pnpm" + registry-url: "https://registry.npmjs.org" - run: pnpm install --frozen-lockfile @@ -42,7 +65,9 @@ jobs: publish: pnpm run release env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + docs: + if: github.ref == 'refs/heads/release' needs: publish uses: ./.github/workflows/docs.yml secrets: diff --git a/docs/packages/fetch/advanced.md b/docs/packages/fetch/advanced.md index 2c74907..37ffebb 100644 --- a/docs/packages/fetch/advanced.md +++ b/docs/packages/fetch/advanced.md @@ -260,6 +260,9 @@ const api = new FetchEngine({ totalTimeout: 30000, // 30s max for entire operation attemptTimeout: 10000, // 10s per attempt + // Distributed tracing - sends requestId as header to server + requestIdHeader: 'X-Request-Id', + // Global headers headers: { 'Content-Type': 'application/json', diff --git a/docs/packages/fetch/configuration.md b/docs/packages/fetch/configuration.md index 3cf6b32..7ab0191 100644 --- a/docs/packages/fetch/configuration.md +++ b/docs/packages/fetch/configuration.md @@ -33,6 +33,8 @@ The `FetchEngine.Config` interface defines all options for creating a FetchEngin | `dedupePolicy` | `boolean \| DeduplicationConfig` | Request deduplication configuration | | `cachePolicy` | `boolean \| CacheConfig` | Response caching configuration | | `rateLimitPolicy` | `boolean \| RateLimitConfig` | Rate limiting configuration | +| `generateRequestId` | `() => string` | Custom function to generate request IDs for tracing | +| `requestIdHeader` | `string` | Header name for sending the request ID with every outgoing request | | `determineType` | `(response: Response) => DetermineTypeResult` | Custom response type detection | | `name` | `string` | Instance name for debugging | | `onBeforeReq` | `(opts) => void` | Lifecycle hook before each request | @@ -79,6 +81,51 @@ const api = new FetchEngine({ ``` +### Distributed Tracing + + +FetchEngine generates a unique `requestId` for every request, visible in all lifecycle events and `FetchError` instances. To propagate this ID to the server, set `requestIdHeader`: + +```typescript +const api = new FetchEngine({ + baseUrl: 'https://api.example.com', + requestIdHeader: 'X-Request-Id' +}); + +// Every outgoing request now includes the X-Request-Id header +// with the same value available in before-request, after-request, and error events +api.on('before-request', (data) => { + console.log('Request ID:', data.requestId); +}); +``` + +When combined with `generateRequestId`, the custom ID is used in both the header and all events: + +```typescript +const api = new FetchEngine({ + baseUrl: 'https://api.example.com', + generateRequestId: () => `trace-${crypto.randomUUID()}`, + requestIdHeader: 'X-Trace-Id' +}); +``` + +The request ID can also be overridden per-request. This is useful for propagating an external trace ID from an upstream service: + +```typescript +const api = new FetchEngine({ + baseUrl: 'https://api.example.com', + requestIdHeader: 'X-Request-Id' +}); + +// Use the upstream trace ID instead of generating a new one +await api.get('/orders', { requestId: incomingTraceId }); +``` + +::: info +When `requestIdHeader` is not set, no header is injected. This is opt-in to avoid unexpected headers in environments with strict CORS policies. +::: + + ## ConfigStore diff --git a/docs/packages/fetch/events.md b/docs/packages/fetch/events.md index 7e549d5..d69318e 100644 --- a/docs/packages/fetch/events.md +++ b/docs/packages/fetch/events.md @@ -141,6 +141,7 @@ interface EventData { status?: number; path?: string; aborted?: boolean; + requestId?: string; // Unique ID for this request (consistent across retries) requestStart?: number; // Timestamp (ms) when request entered pipeline requestEnd?: number; // Timestamp (ms) when request resolved } @@ -256,6 +257,37 @@ api.on('error', (data) => { ``` +### Distributed Tracing + + +Use `requestIdHeader` to automatically send the request ID to the server, then use events to correlate client and server logs: + +```typescript +const api = new FetchEngine({ + baseUrl: 'https://api.example.com', + requestIdHeader: 'X-Request-Id' +}); + +api.on('before-request', (data) => { + console.log(`→ [${data.requestId}] ${data.method} ${data.path}`); +}); + +api.on('after-request', (data) => { + console.log(`← [${data.requestId}] ${data.status} ${data.path}`); +}); + +api.on('error', (data) => { + // Same requestId is available on the server via the X-Request-Id header + errorReporting.captureException(data.error, { + tags: { requestId: data.requestId } + }); +}); + +// Override the request ID per-request to propagate an upstream trace +await api.get('/orders', { requestId: incomingTraceId }); +``` + + ### Retry Monitoring @@ -407,7 +439,8 @@ api.on('error', (data) => { tags: { endpoint: data.path, method: data.method, - status: data.status + status: data.status, + requestId: data.requestId }, extra: { attempt: data.attempt diff --git a/llm-helpers/fetch.md b/llm-helpers/fetch.md index 5e0e024..6dfca89 100644 --- a/llm-helpers/fetch.md +++ b/llm-helpers/fetch.md @@ -153,6 +153,10 @@ interface FetchEngine.Config { }; }; + // Request ID tracing + generateRequestId?: () => string; // Custom ID generator (default: generateId from utils) + requestIdHeader?: string; // Header name for sending requestId to server + // Response type determination determineType?: (response: Response) => 'json' | 'text' | 'blob' | 'arrayBuffer' | 'formData' | Symbol; @@ -757,6 +761,7 @@ const [response, err] = await attempt(() => timeout: 10000, headers: { 'X-Request-ID': '123' }, params: { include: 'profile' }, + requestId: 'upstream-trace-id', // Override auto-generated request ID onBeforeReq: (opts) => console.log('Making request:', opts), onAfterReq: (response) => console.log('Response:', response.status), onError: (error) => console.error('Error:', error), @@ -940,6 +945,9 @@ const api = new FetchEngine({ defaultType: 'json', totalTimeout: 5000, + // Distributed tracing - sends requestId as header to server + requestIdHeader: 'X-Request-Id', + // Deduplication - prevent duplicate concurrent requests dedupePolicy: { enabled: true, diff --git a/packages/fetch/src/engine/events.ts b/packages/fetch/src/engine/events.ts index 6f67b32..9ad5174 100644 --- a/packages/fetch/src/engine/events.ts +++ b/packages/fetch/src/engine/events.ts @@ -30,6 +30,9 @@ export interface EventData { path?: string | undefined; aborted?: boolean | undefined; + /** Unique ID for this request, flows through all events */ + requestId?: string | undefined; + /** Timestamp (ms) when the request entered the execution pipeline */ requestStart?: number | undefined; diff --git a/packages/fetch/src/engine/executor.ts b/packages/fetch/src/engine/executor.ts index d29bc0e..146cf38 100644 --- a/packages/fetch/src/engine/executor.ts +++ b/packages/fetch/src/engine/executor.ts @@ -4,6 +4,7 @@ import { SingleFlight, Deferred, assert, + generateId, } from '@logosdx/utils'; import type { @@ -175,6 +176,7 @@ export class RequestExecutor< 'timeout' in payloadOrOptions || 'retry' in payloadOrOptions || 'abortController' in payloadOrOptions || + 'stream' in payloadOrOptions || 'onError' in payloadOrOptions || 'onBeforeReq' in payloadOrOptions || 'onAfterReq' in payloadOrOptions; @@ -318,6 +320,8 @@ export class RequestExecutor< signal, determineType, retry, + stream, + requestId: perRequestId, headers: requestHeaders, // RequestInit options (per-request overrides config defaults) ...perRequestInit @@ -412,12 +416,25 @@ export class RequestExecutor< ? {} : (opts.retry === false ? { maxAttempts: 0 } : opts.retry); + // Generate request ID for tracing across all events + const generateRequestId = this.engine.config.get('generateRequestId'); + const requestId = perRequestId || (generateRequestId ? generateRequestId() : generateId()); + + const requestIdHeader = this.engine.config.get('requestIdHeader'); + + if (requestIdHeader) { + + headers = { ...headers, [requestIdHeader]: requestId } as DictAndT; + } + // Return normalized options // opts now contains all RequestInit options (config + per-request + modifyConfig) return { // Spread opts to get all RequestInit options after modifyConfig ...opts, // Explicit values (override anything from opts) + stream, + requestId, method, path, payload, @@ -663,6 +680,7 @@ export class RequestExecutor< } } + err.requestId = normalizedOpts.requestId; err.attempt = attemptNum; err.status = err.status || status!; err.method = err.method || method!; @@ -788,6 +806,44 @@ export class RequestExecutor< onAfterRequest && await onAfterRequest(response.clone(), callbackOpts); + // Stream mode: return raw Response without body parsing. + // Non-ok statuses are returned as-is — the consumer checks status. + if (options.stream) { + + const responseHeaders = {} as Partial; + + response.headers.forEach((value, key) => { + + responseHeaders[key as keyof ResHdr] = value as ResHdr[keyof ResHdr]; + }); + + this.engine.emit('response', { + ...options, + response, + data: response, + status: response.status, + requestEnd: Date.now() + }); + + const config: FetchConfig, DictAndT

> = { + baseUrl: this.baseUrl.toString(), + attemptTimeout: options.attemptTimeout, + method, + headers: reqHeaders, + params, + retry: this.retryConfig, + determineType, + }; + + return { + data: response as unknown as Res, + headers: responseHeaders, + status: response.status, + request: new Request(url, fetchOpts), + config + }; + } + const [data, parseErr] = await attempt(async () => { const typeResult = determineType @@ -948,7 +1004,12 @@ export class RequestExecutor< // Check if parent (totalTimeout) already aborted - stop retrying if (options.controller.signal.aborted) { - const err = lastError ?? new FetchError('Request aborted by totalTimeout'); + const err = lastError ?? new FetchError('Request aborted'); + err.aborted = true; + err.method = err.method || options.method; + err.path = err.path || options.path; + err.status = err.status || 499; + err.step = err.step || 'fetch'; err.timedOut = options.getTotalTimeoutFired?.() ?? false; throw err; } @@ -1226,33 +1287,38 @@ export class RequestExecutor< normalizedOpts.requestStart = Date.now(); const { method, path, controller } = normalizedOpts; - - // === Cache Check === - // Cache runs first: cached responses return immediately without consuming rate limit tokens. - const cacheResult = await this.cachePolicy.checkCache, DictAndT

, ResHdr>>({ - method, - path, - normalizedOpts: normalizedOpts as any, - options: normalizedOpts as any, - clearTimeout: () => totalTimeout?.clear() - }); + const isStream = normalizedOpts.stream === true; let cacheKey: string | null = null; let cacheConfig: CacheRule | null = null; - if (cacheResult?.hit) { + // === Cache Check === + // Cache runs first: cached responses return immediately without consuming rate limit tokens. + // Stream requests skip cache — each caller needs their own Response body. + if (!isStream) { + + const cacheResult = await this.cachePolicy.checkCache, DictAndT

, ResHdr>>({ + method, + path, + normalizedOpts: normalizedOpts as any, + options: normalizedOpts as any, + clearTimeout: () => totalTimeout?.clear() + }); - return cacheResult.value; - } + if (cacheResult?.hit) { - if (cacheResult && !cacheResult.hit) { + return cacheResult.value; + } + + if (cacheResult && !cacheResult.hit) { - cacheKey = cacheResult.key; - cacheConfig = cacheResult.config as CacheRule; + cacheKey = cacheResult.key; + cacheConfig = cacheResult.config as CacheRule; + } } // === Rate Limit Check === - // Rate limiting only gates actual outbound requests (after cache miss). + // Rate limiting still gates stream requests (they're real outbound calls). await this.rateLimitPolicy.executeGuard({ method, path, @@ -1272,31 +1338,35 @@ export class RequestExecutor< } }); - // === Deduplication Check === - // Cast normalizedOpts for policy compatibility (internal type order mismatch) - const dedupeResult = this.dedupePolicy.checkInflight, DictAndT

, ResHdr>>({ - method, - path, - normalizedOpts: normalizedOpts as any - }); - let dedupeKey: string | null = null; let cleanup: (() => void) | null = null; - if (dedupeResult?.joined) { + // === Deduplication Check === + // Stream requests skip deduplication — each caller needs their own Response body. + if (!isStream) { + + // Cast normalizedOpts for policy compatibility (internal type order mismatch) + const dedupeResult = this.dedupePolicy.checkInflight, DictAndT

, ResHdr>>({ + method, + path, + normalizedOpts: normalizedOpts as any + }); - return this.#awaitWithIndependentTimeout( - dedupeResult.promise, - controller, - totalTimeout, - normalizedOpts.method, - path - ); - } + if (dedupeResult?.joined) { - if (dedupeResult && !dedupeResult.joined) { + return this.#awaitWithIndependentTimeout( + dedupeResult.promise, + controller, + totalTimeout, + normalizedOpts.method, + path + ); + } - dedupeKey = dedupeResult.key; + if (dedupeResult && !dedupeResult.joined) { + + dedupeKey = dedupeResult.key; + } } // === Execute Request === diff --git a/packages/fetch/src/engine/index.ts b/packages/fetch/src/engine/index.ts index 37502f7..406952e 100644 --- a/packages/fetch/src/engine/index.ts +++ b/packages/fetch/src/engine/index.ts @@ -224,12 +224,19 @@ export class FetchEngine< * const { data: users } = await api.get('/users'); * ``` */ + get( + path: string, + options: CallConfig & { stream: true } + ): AbortablePromise, DictAndT

, RH>>; + get( path: string, - options: CallConfig = {} - ): AbortablePromise, DictAndT

, ResHdr>> { + options?: CallConfig + ): AbortablePromise, DictAndT

, ResHdr>>; - return this.request('GET', path, options); + get(path: string, options: CallConfig = {}): any { + + return this.request('GET', path, options); } /** @@ -245,13 +252,21 @@ export class FetchEngine< * const { data: user } = await api.post('/users', { name: 'John' }); * ``` */ + post( + path: string, + payload: Data | undefined, + options: CallConfig & { stream: true } + ): AbortablePromise, DictAndT

, RH>>; + post( path: string, payload?: Data, - options: CallConfig = {} - ): AbortablePromise, DictAndT

, ResHdr>> { + options?: CallConfig + ): AbortablePromise, DictAndT

, ResHdr>>; + + post(path: string, payload?: unknown, options: CallConfig = {}): any { - return this.#executor.execute('POST', path, payload, options); + return this.#executor.execute('POST', path, payload, options); } /** @@ -267,13 +282,21 @@ export class FetchEngine< * const { data: user } = await api.put('/users/123', { name: 'Jane' }); * ``` */ + put( + path: string, + payload: Data | undefined, + options: CallConfig & { stream: true } + ): AbortablePromise, DictAndT

, RH>>; + put( path: string, payload?: Data, - options: CallConfig = {} - ): AbortablePromise, DictAndT

, ResHdr>> { + options?: CallConfig + ): AbortablePromise, DictAndT

, ResHdr>>; + + put(path: string, payload?: unknown, options: CallConfig = {}): any { - return this.#executor.execute('PUT', path, payload, options); + return this.#executor.execute('PUT', path, payload, options); } /** @@ -289,13 +312,21 @@ export class FetchEngine< * const { data } = await api.patch('/users/123', { email: 'new@example.com' }); * ``` */ + patch( + path: string, + payload: Data | undefined, + options: CallConfig & { stream: true } + ): AbortablePromise, DictAndT

, RH>>; + patch( path: string, payload?: Data, - options: CallConfig = {} - ): AbortablePromise, DictAndT

, ResHdr>> { + options?: CallConfig + ): AbortablePromise, DictAndT

, ResHdr>>; - return this.#executor.execute('PATCH', path, payload, options); + patch(path: string, payload?: unknown, options: CallConfig = {}): any { + + return this.#executor.execute('PATCH', path, payload, options); } /** @@ -311,13 +342,21 @@ export class FetchEngine< * await api.delete('/users/123'); * ``` */ + delete( + path: string, + payload: Data | undefined, + options: CallConfig & { stream: true } + ): AbortablePromise, DictAndT

, RH>>; + delete( path: string, payload?: Data, - options: CallConfig = {} - ): AbortablePromise, DictAndT

, ResHdr>> { + options?: CallConfig + ): AbortablePromise, DictAndT

, ResHdr>>; - return this.#executor.execute('DELETE', path, payload, options); + delete(path: string, payload?: unknown, options: CallConfig = {}): any { + + return this.#executor.execute('DELETE', path, payload, options); } /** @@ -336,12 +375,19 @@ export class FetchEngine< * const { headers } = await api.request('OPTIONS', '/users'); * ``` */ + options( + path: string, + opts: CallConfig & { stream: true } + ): AbortablePromise, DictAndT

, RH>>; + options( path: string, - opts: CallConfig = {} - ): AbortablePromise, DictAndT

, ResHdr>> { + opts?: CallConfig + ): AbortablePromise, DictAndT

, ResHdr>>; + + options(path: string, opts: CallConfig = {}): any { - return this.request('OPTIONS', path, opts); + return this.request('OPTIONS', path, opts); } /** @@ -360,12 +406,19 @@ export class FetchEngine< * const { headers } = await api.request('HEAD', '/users/123'); * ``` */ + head( + path: string, + opts: CallConfig & { stream: true } + ): AbortablePromise, DictAndT

, RH>>; + head( path: string, - opts: CallConfig = {} - ): AbortablePromise, DictAndT

, ResHdr>> { + opts?: CallConfig + ): AbortablePromise, DictAndT

, ResHdr>>; + + head(path: string, opts: CallConfig = {}): any { - return this.request('HEAD', path, opts); + return this.request('HEAD', path, opts); } /** @@ -381,11 +434,23 @@ export class FetchEngine< * const response = await api.request('GET', '/users'); * ``` */ + request( + method: HttpMethods, + path: string, + options: CallConfig & { payload?: Data; stream: true } + ): AbortablePromise, DictAndT

, RH>>; + request( method: HttpMethods, path: string, - options: CallConfig & { payload?: Data } = {} - ): AbortablePromise, DictAndT

, ResHdr>> { + options?: CallConfig & { payload?: Data } + ): AbortablePromise, DictAndT

, ResHdr>>; + + request( + method: HttpMethods, + path: string, + options: CallConfig & { payload?: unknown } = {} + ): any { if (this.#destroyed) { @@ -404,7 +469,7 @@ export class FetchEngine< instanceSignal.addEventListener('abort', () => controller.abort('FetchEngine destroyed'), { once: true }); } - return this.#executor.execute( + return this.#executor.execute( method, path, payload, diff --git a/packages/fetch/src/engine/types.ts b/packages/fetch/src/engine/types.ts index f79f875..bdcc902 100644 --- a/packages/fetch/src/engine/types.ts +++ b/packages/fetch/src/engine/types.ts @@ -159,6 +159,12 @@ export interface InternalReqOptions // === Runtime state === + /** When true, returns raw Response without body parsing */ + stream?: boolean | undefined; + + /** Unique ID for this request, flows through all events */ + requestId?: string | undefined; + /** Current attempt number (1-based) */ attempt?: number | undefined; diff --git a/packages/fetch/src/helpers/fetch-error.ts b/packages/fetch/src/helpers/fetch-error.ts index b2ac5bd..f705a3c 100644 --- a/packages/fetch/src/helpers/fetch-error.ts +++ b/packages/fetch/src/helpers/fetch-error.ts @@ -20,6 +20,7 @@ export interface FetchError extends Error { */ timedOut?: boolean | undefined; + requestId?: string | undefined; attempt?: number | undefined; step?: 'fetch' | 'parse' | 'response' | undefined; url?: string | undefined; diff --git a/packages/fetch/src/options/types.ts b/packages/fetch/src/options/types.ts index 4dd4f07..9b66e2e 100644 --- a/packages/fetch/src/options/types.ts +++ b/packages/fetch/src/options/types.ts @@ -118,6 +118,34 @@ export interface CallConfig /** AbortController for manual request cancellation */ abortController?: AbortController | undefined; + /** + * Return raw Response without body parsing. + * + * When true, the response `data` will be the raw `Response` object + * with an unconsumed body stream. Cache and deduplication are skipped + * because each caller needs their own readable stream. + * + * Rate limiting and lifecycle events (before-request, after-request, + * response) still fire normally. + */ + stream?: boolean | undefined; + + /** + * Override the auto-generated request ID for this request. + * + * When provided, this value is used instead of `generateRequestId()` + * or the default `generateId()`. Useful for propagating an external + * trace ID from an upstream service or user-defined correlation ID. + * + * @example + * ```typescript + * await api.get('/orders', { + * requestId: incomingTraceId + * }); + * ``` + */ + requestId?: string | undefined; + /** @deprecated Use totalTimeout instead */ timeout?: number | undefined; } @@ -266,6 +294,28 @@ export interface EngineConfig< */ rateLimitPolicy?: boolean | RateLimitConfig | undefined; + /** + * Custom function to generate request IDs for tracing. + * When omitted, uses `generateId` from `@logosdx/utils`. + */ + generateRequestId?: (() => string) | undefined; + + /** + * Header name for sending the request ID with every request. + * + * When set, each outgoing request includes this header with the + * generated `requestId` value, enabling end-to-end distributed tracing. + * + * @example + * ```typescript + * const api = new FetchEngine({ + * baseUrl: 'https://api.example.com', + * requestIdHeader: 'X-Request-Id' + * }); + * ``` + */ + requestIdHeader?: string | undefined; + // From RequestOpts totalTimeout?: number | undefined; attemptTimeout?: number | undefined; diff --git a/tests/src/fetch/_helpers.ts b/tests/src/fetch/_helpers.ts index 277d616..19faf95 100644 --- a/tests/src/fetch/_helpers.ts +++ b/tests/src/fetch/_helpers.ts @@ -9,6 +9,8 @@ import net from 'net'; import Hapi, { Lifecycle } from '@hapi/hapi'; import Boom from '@hapi/boom'; import Joi from 'joi'; +import { createSession } from 'better-sse'; +import type { ServerResponse } from 'http'; import { FetchEngine, @@ -109,6 +111,22 @@ export const makeTestStubs = async (port?: number) => { let failOnceCallCount = 0; const resetFailOnce = () => { failOnceCallCount = 0; }; + // SSE connection tracking for cleanup + const activeResponses: ServerResponse[] = []; + + const closeConnections = () => { + + for (const res of activeResponses) { + + if (!res.writableEnded) { + + res.end(); + } + } + + activeResponses.length = 0; + }; + server.route( [ mkHapiRoute('/bad-content-type', (_, h) => h.response().header('content-type', 'habibti/allah')), @@ -121,6 +139,52 @@ export const makeTestStubs = async (port?: number) => { mkHapiRoute('/empty', () => { return null; }), mkHapiRoute('/empty2', (_, h) => { return h.response().code(204); }), + // SSE endpoint: pushes two events and keeps connection open + { + method: 'GET' as const, + path: '/sse', + handler: (async (request, h) => { + + activeResponses.push(request.raw.res); + callStub(request); + + const session = await createSession( + request.raw.req, + request.raw.res + ); + + session.push('hello'); + session.push('world'); + + return h.abandon; + }) as Lifecycle.Method + }, + + // SSE endpoint: pushes a configurable number of events + { + method: 'GET' as const, + path: '/sse/events', + handler: (async (request, h) => { + + activeResponses.push(request.raw.res); + callStub(request); + + const session = await createSession( + request.raw.req, + request.raw.res + ); + + const count = Number(request.query.count) || 3; + + for (let i = 0; i < count; i++) { + + session.push(`event-${i}`); + } + + return h.abandon; + }) as Lifecycle.Method + }, + // Flaky endpoint: succeeds on first call, fails on subsequent calls // Useful for testing SWR revalidation error handling mkHapiRoute('/flaky', () => { @@ -235,6 +299,7 @@ export const makeTestStubs = async (port?: number) => { afterAll(async () => { + closeConnections(); await server.stop(); }); @@ -243,7 +308,8 @@ export const makeTestStubs = async (port?: number) => { callStub.reset(); resetFlaky(); resetFailOnce(); + closeConnections(); }); - return { callStub, server, testUrl, resetFlaky, resetFailOnce }; -} \ No newline at end of file + return { callStub, server, testUrl, resetFlaky, resetFailOnce, closeConnections }; +} diff --git a/tests/src/fetch/engine/request-id.test.ts b/tests/src/fetch/engine/request-id.test.ts new file mode 100644 index 0000000..2456be6 --- /dev/null +++ b/tests/src/fetch/engine/request-id.test.ts @@ -0,0 +1,366 @@ +import { + describe, + it, + expect +} from 'vitest'; + +import { + FetchEngine, + FetchError, +} from '../../../../packages/fetch/src/index.ts'; + +import { attempt, wait } from '../../../../packages/utils/src/index.ts'; +import { makeTestStubs } from '../_helpers.ts'; + + +describe('@logosdx/fetch: request ID', async () => { + + const { testUrl, callStub } = await makeTestStubs(4141); + + it('generates a default requestId on every request', async () => { + + const events: any[] = []; + const api = new FetchEngine({ baseUrl: testUrl }); + + api.on('before-request', (data) => events.push(data)); + + await api.get('/json'); + + expect(events).to.have.length(1); + expect(events[0].requestId).to.be.a('string'); + expect(events[0].requestId.length).to.be.greaterThan(0); + + api.destroy(); + await wait(10); + }); + + it('generates unique requestIds for different requests', async () => { + + const ids: string[] = []; + const api = new FetchEngine({ baseUrl: testUrl }); + + api.on('before-request', (data: any) => ids.push(data.requestId)); + + await api.get('/json'); + await api.get('/json'); + await api.get('/json'); + + expect(ids).to.have.length(3); + expect(new Set(ids).size).to.equal(3); + + api.destroy(); + await wait(10); + }); + + it('uses custom generateRequestId when provided', async () => { + + let callCount = 0; + + const api = new FetchEngine({ + baseUrl: testUrl, + generateRequestId: () => { + + callCount++; + return `custom-${callCount}`; + } + } as any); + + const events: any[] = []; + api.on('before-request', (data) => events.push(data)); + + await api.get('/json'); + + expect(callCount).to.equal(1); + expect(events[0].requestId).to.equal('custom-1'); + + api.destroy(); + await wait(10); + }); + + it('threads the same requestId through before-request and after-request', async () => { + + const beforeIds: string[] = []; + const afterIds: string[] = []; + const api = new FetchEngine({ baseUrl: testUrl }); + + api.on('before-request', (data: any) => beforeIds.push(data.requestId)); + api.on('after-request', (data: any) => afterIds.push(data.requestId)); + + await api.get('/json'); + + expect(beforeIds).to.have.length(1); + expect(afterIds).to.have.length(1); + expect(beforeIds[0]).to.equal(afterIds[0]); + + api.destroy(); + await wait(10); + }); + + it('threads the same requestId through before-request and error events', async () => { + + const beforeIds: string[] = []; + const errorIds: string[] = []; + + const api = new FetchEngine({ + baseUrl: testUrl, + retry: false + }); + + api.on('before-request', (data: any) => beforeIds.push(data.requestId)); + api.on('error', (data: any) => errorIds.push(data.requestId)); + + const [, err] = await attempt(() => api.get('/fail')); + + expect(err).to.be.instanceOf(FetchError); + expect(beforeIds).to.have.length(1); + expect(errorIds).to.have.length(1); + expect(beforeIds[0]).to.equal(errorIds[0]); + + api.destroy(); + await wait(10); + }); + + it('preserves the same requestId across retry events', async () => { + + const ids = new Set(); + + const api = new FetchEngine({ + baseUrl: testUrl, + retry: { maxAttempts: 2, baseDelay: 10 } + }); + + api.on('before-request', (data: any) => ids.add(data.requestId)); + api.on('retry', (data: any) => ids.add(data.requestId)); + api.on('error', (data: any) => ids.add(data.requestId)); + + const [, err] = await attempt(() => api.get('/fail')); + + expect(err).to.be.instanceOf(FetchError); + expect(ids.size).to.equal(1); + + api.destroy(); + await wait(10); + }); + + it('includes requestId in cache events', async () => { + + const cacheEvents: any[] = []; + + const api = new FetchEngine({ + baseUrl: testUrl, + cachePolicy: { + rules: [{ match: /.*/, ttl: 5000 }] + } + }); + + api.on('cache-miss', (data: any) => cacheEvents.push(data)); + api.on('cache-set', (data: any) => cacheEvents.push(data)); + api.on('cache-hit', (data: any) => cacheEvents.push(data)); + + await api.get('/json'); + await api.get('/json'); + + // First request: cache-miss + cache-set + // Second request: cache-hit + expect(cacheEvents.length).to.be.greaterThanOrEqual(2); + + for (const event of cacheEvents) { + + expect(event.requestId).to.be.a('string'); + } + + api.destroy(); + await wait(10); + }); + + it('includes requestId in dedupe events', async () => { + + const dedupeEvents: any[] = []; + + const api = new FetchEngine({ + baseUrl: testUrl, + dedupePolicy: true + }); + + api.on('dedupe-start', (data: any) => dedupeEvents.push(data)); + api.on('dedupe-join', (data: any) => dedupeEvents.push(data)); + + await Promise.all([ + api.get('/slow-success/100'), + api.get('/slow-success/100') + ]); + + expect(dedupeEvents.length).to.be.greaterThanOrEqual(1); + + for (const event of dedupeEvents) { + + expect(event.requestId).to.be.a('string'); + } + + api.destroy(); + await wait(10); + }); + + it('sets requestId on FetchError instances', async () => { + + const api = new FetchEngine({ + baseUrl: testUrl, + retry: false + }); + + const [, err] = await attempt(() => api.get('/fail')); + + expect(err).to.be.instanceOf(FetchError); + expect((err as any).requestId).to.be.a('string'); + + api.destroy(); + await wait(10); + }); + + it('sends requestId as header when requestIdHeader is configured', async () => { + + const events: any[] = []; + + const api = new FetchEngine({ + baseUrl: testUrl, + requestIdHeader: 'X-Request-Id' + }); + + api.on('before-request', (data) => events.push(data)); + + await api.get('/json'); + + expect(events).to.have.length(1); + + const sentHeader = callStub.firstCall.args[0].headers['x-request-id']; + expect(sentHeader).to.equal(events[0].requestId); + + api.destroy(); + await wait(10); + }); + + it('does not send header when requestIdHeader is not configured', async () => { + + const api = new FetchEngine({ baseUrl: testUrl }); + + await api.get('/json'); + + const headers = callStub.firstCall.args[0].headers; + expect(headers['x-request-id']).to.be.undefined; + + api.destroy(); + await wait(10); + }); + + it('uses custom generateRequestId value in the header', async () => { + + const api = new FetchEngine({ + baseUrl: testUrl, + generateRequestId: () => 'trace-abc-123', + requestIdHeader: 'X-Trace-Id' + } as any); + + await api.get('/json'); + + const sentHeader = callStub.firstCall.args[0].headers['x-trace-id']; + expect(sentHeader).to.equal('trace-abc-123'); + + api.destroy(); + await wait(10); + }); + + it('uses per-request requestId when provided', async () => { + + const events: any[] = []; + const api = new FetchEngine({ baseUrl: testUrl }); + + api.on('before-request', (data) => events.push(data)); + + await api.get('/json', { requestId: 'external-trace-xyz' }); + + expect(events).to.have.length(1); + expect(events[0].requestId).to.equal('external-trace-xyz'); + + api.destroy(); + await wait(10); + }); + + it('per-request requestId overrides generateRequestId', async () => { + + const events: any[] = []; + + const api = new FetchEngine({ + baseUrl: testUrl, + generateRequestId: () => 'engine-generated-id' + } as any); + + api.on('before-request', (data) => events.push(data)); + + await api.get('/json', { requestId: 'per-request-override' }); + + expect(events).to.have.length(1); + expect(events[0].requestId).to.equal('per-request-override'); + + api.destroy(); + await wait(10); + }); + + it('per-request requestId flows into requestIdHeader', async () => { + + const api = new FetchEngine({ + baseUrl: testUrl, + requestIdHeader: 'X-Request-Id' + }); + + await api.get('/json', { requestId: 'upstream-trace-id' }); + + const sentHeader = callStub.firstCall.args[0].headers['x-request-id']; + expect(sentHeader).to.equal('upstream-trace-id'); + + api.destroy(); + await wait(10); + }); + + it('falls back to generated ID when per-request requestId is not provided', async () => { + + const events: any[] = []; + const api = new FetchEngine({ baseUrl: testUrl }); + + api.on('before-request', (data) => events.push(data)); + + await api.get('/json'); + + expect(events).to.have.length(1); + expect(events[0].requestId).to.be.a('string'); + expect(events[0].requestId).to.not.equal(''); + + api.destroy(); + await wait(10); + }); + + it('gives deduped joiners their own requestId', async () => { + + let startId: string | undefined; + let joinId: string | undefined; + + const api = new FetchEngine({ + baseUrl: testUrl, + dedupePolicy: true + }); + + api.on('dedupe-start', (data: any) => { startId = data.requestId; }); + api.on('dedupe-join', (data: any) => { joinId = data.requestId; }); + + await Promise.all([ + api.get('/slow-success/100'), + api.get('/slow-success/100') + ]); + + expect(startId).to.be.a('string'); + expect(joinId).to.be.a('string'); + expect(startId).to.not.equal(joinId); + + api.destroy(); + await wait(10); + }); +}); diff --git a/tests/src/fetch/engine/streaming.test.ts b/tests/src/fetch/engine/streaming.test.ts new file mode 100644 index 0000000..9fe601e --- /dev/null +++ b/tests/src/fetch/engine/streaming.test.ts @@ -0,0 +1,250 @@ +import { + describe, + it, + expect +} from 'vitest'; + +import { + FetchEngine, + FetchError, +} from '../../../../packages/fetch/src/index.ts'; + +import { attempt, wait } from '../../../../packages/utils/src/index.ts'; +import { makeTestStubs } from '../_helpers.ts'; + + +describe('@logosdx/fetch: streaming', async () => { + + const { testUrl, closeConnections } = await makeTestStubs(4142); + + it('stream GET returns raw Response as data', async () => { + + const api = new FetchEngine({ baseUrl: testUrl }); + + const result = await api.get('/json', { stream: true }); + + expect(result.data).to.be.instanceOf(Response); + expect(result.status).to.equal(200); + + api.destroy(); + await wait(10); + }); + + it('provides readable stream via response.body.getReader()', async () => { + + const api = new FetchEngine({ baseUrl: testUrl }); + + const result = await api.get('/sse', { stream: true }); + const response = result.data; + + expect(response.body).to.not.be.null; + + const reader = response.body!.getReader(); + const decoder = new TextDecoder(); + + const { done, value } = await reader.read(); + + expect(done).to.be.false; + + const text = decoder.decode(value); + + expect(text.length).to.be.greaterThan(0); + + reader.releaseLock(); + api.destroy(); + closeConnections(); + await wait(10); + }); + + it('skips cache for stream requests', async () => { + + const cacheEvents: string[] = []; + + const api = new FetchEngine({ + baseUrl: testUrl, + cachePolicy: { + rules: [{ match: /.*/, ttl: 5000 }] + } + }); + + api.on('cache-hit', () => cacheEvents.push('hit')); + api.on('cache-miss', () => cacheEvents.push('miss')); + api.on('cache-set', () => cacheEvents.push('set')); + + await api.get('/json', { stream: true }); + await api.get('/json', { stream: true }); + + expect(cacheEvents).to.have.length(0); + + api.destroy(); + await wait(10); + }); + + it('skips deduplication for stream requests', async () => { + + const dedupeEvents: string[] = []; + + const api = new FetchEngine({ + baseUrl: testUrl, + dedupePolicy: true + }); + + api.on('dedupe-start', () => dedupeEvents.push('start')); + api.on('dedupe-join', () => dedupeEvents.push('join')); + + const [r1, r2] = await Promise.all([ + api.get('/json', { stream: true }), + api.get('/json', { stream: true }) + ]); + + // Each caller should get their own Response object + expect(r1.data).to.not.equal(r2.data); + expect(dedupeEvents).to.have.length(0); + + api.destroy(); + await wait(10); + }); + + it('fires before-request, after-request, and response events', async () => { + + const events: string[] = []; + const api = new FetchEngine({ baseUrl: testUrl }); + + api.on('before-request', () => events.push('before-request')); + api.on('after-request', () => events.push('after-request')); + api.on('response', () => events.push('response')); + + await api.get('/json', { stream: true }); + + expect(events).to.deep.equal(['before-request', 'after-request', 'response']); + + api.destroy(); + await wait(10); + }); + + it('respects AbortController for stream requests', async () => { + + const api = new FetchEngine({ baseUrl: testUrl }); + const controller = new AbortController(); + + // Abort before the request to guarantee fetch rejects + controller.abort(); + + const [, err] = await attempt(() => ( + api.get('/json', { + stream: true, + abortController: controller + }) + )); + + expect(err).to.be.instanceOf(FetchError); + expect((err as FetchError).aborted).to.be.true; + + api.destroy(); + await wait(10); + }); + + it('returns non-ok responses without throwing', async () => { + + const api = new FetchEngine({ + baseUrl: testUrl, + retry: false + }); + + // Stream returns raw Response even for error status codes. + // The consumer checks status themselves. + const result = await api.get('/fail', { stream: true }); + + expect(result.data).to.be.instanceOf(Response); + expect(result.status).to.equal(400); + + api.destroy(); + await wait(10); + }); + + it('stream POST sends payload correctly', async () => { + + const api = new FetchEngine({ baseUrl: testUrl }); + + const result = await api.post( + '/json', + { hello: 'world' }, + { stream: true } + ); + + expect(result.data).to.be.instanceOf(Response); + expect(result.status).to.equal(200); + + api.destroy(); + await wait(10); + }); + + it('still respects rate limiting for stream requests', async () => { + + const rateLimitEvents: string[] = []; + + const api = new FetchEngine({ + baseUrl: testUrl, + rateLimitPolicy: { + maxCalls: 1, + windowMs: 1000 + } + }); + + api.on('ratelimit-acquire', () => rateLimitEvents.push('acquire')); + api.on('ratelimit-wait', () => rateLimitEvents.push('wait')); + + await api.get('/json', { stream: true }); + + expect(rateLimitEvents).to.include('acquire'); + + api.destroy(); + await wait(10); + }); + + it('consumes SSE events from a stream', async () => { + + const api = new FetchEngine({ baseUrl: testUrl }); + + const result = await api.get('/sse/events?count=3', { stream: true }); + const response = result.data; + + const reader = response.body!.getReader(); + const decoder = new TextDecoder(); + let fullText = ''; + + while (true) { + + const { done, value } = await reader.read(); + if (done) break; + + fullText += decoder.decode(value, { stream: true }); + + // Break once we've received all expected events + if (fullText.includes('event-2')) break; + } + + reader.releaseLock(); + + expect(fullText).to.include('event-0'); + expect(fullText).to.include('event-1'); + expect(fullText).to.include('event-2'); + + api.destroy(); + closeConnections(); + await wait(10); + }); + + it('includes response headers on stream responses', async () => { + + const api = new FetchEngine({ baseUrl: testUrl }); + + const result = await api.get('/json', { stream: true }); + + expect(result.headers).to.be.an('object'); + expect(result.status).to.equal(200); + + api.destroy(); + await wait(10); + }); +}); From da1c51073da1d759f56fd357b9de0c5ad9237e11 Mon Sep 17 00:00:00 2001 From: Danilo Alonso Date: Wed, 4 Feb 2026 03:03:36 -0500 Subject: [PATCH 13/13] chore: docs --- docs/cheat-sheet.md | 227 ++++++++++++++++++++++++++++++++++++++++--- llm-helpers/fetch.md | 91 +++++++++++++++-- 2 files changed, 294 insertions(+), 24 deletions(-) diff --git a/docs/cheat-sheet.md b/docs/cheat-sheet.md index 8732da0..7f671d1 100644 --- a/docs/cheat-sheet.md +++ b/docs/cheat-sheet.md @@ -86,10 +86,22 @@ const [data, err] = await attempt(() => api.get('/users/123', { headers: { 'X-Include': 'profile' }, params: { include: 'permissions' }, - timeout: 10000, + totalTimeout: 30000, + attemptTimeout: 10000, abortController: new AbortController() }) ); + +// Stream mode — raw Response with unconsumed body +const [streamRes, err] = await attempt(() => + api.get('/events', { stream: true }) +); +const reader = streamRes.data.body.getReader(); + +// Override request ID for distributed tracing +const [traced, err] = await attempt(() => + api.get('/orders', { requestId: upstreamTraceId }) +); ``` @@ -186,12 +198,23 @@ if (api.params.has('version')) { ### Configuration Management ```typescript +// Get config values +const config = api.config.get(); +const baseUrl = api.config.get('baseUrl'); +const maxAttempts = api.config.get('retry.maxAttempts'); + // Change base URL api.config.set('baseUrl', 'https://staging.example.com'); // Change other config at runtime api.config.set('totalTimeout', 60000); api.config.set('retry.maxAttempts', 5); + +// Merge partial config +api.config.set({ + totalTimeout: 60000, + retry: { maxAttempts: 5 } +}); ``` @@ -235,36 +258,71 @@ api.once('response', (event) => { console.log('First response:', event.response); }); -// Listen to all events -api.on('*', (event) => { - console.log(`Event: ${event.type}`); +// Listen to all events (regex pattern) +api.on(/./, ({ event, data }) => { + console.log(`Event: ${event}`, data); }); // Remove listener cleanup(); // or api.off('error', callback); - -// Emit custom event -api.emit('custom-event', { data: 'value' }); ``` #### Available Events -- `before-request` - Before request -- `after-request` - After request -- `abort` - Request aborted -- `error` - Request error -- `response` - Response received -- `retry` - Retry attempt +**Request Lifecycle:** +- `before-request` - Before each request attempt +- `after-request` - After response is parsed and ready +- `response` - When raw response is received (before parsing) +- `error` - On request failure +- `retry` - Before retry attempt +- `abort` - When request is aborted + +**Property Changes:** - `header-add` - Header added - `header-remove` - Header removed - `param-add` - Parameter added - `param-remove` - Parameter removed + +**State Changes:** - `state-set` - State updated - `state-reset` - State reset + +**Configuration Changes:** +- `config-change` - Config modified +- `modify-config-change` - modifyConfig function changed +- `modify-method-config-change` - Method-specific modifier changed - `url-change` - Base URL changed +**Deduplication:** +- `dedupe-start` - New request starts tracking +- `dedupe-join` - Caller joins existing request + +**Cache:** +- `cache-hit` - Fresh cache hit +- `cache-stale` - Stale cache hit (SWR) +- `cache-miss` - Cache miss +- `cache-set` - Entry cached +- `cache-revalidate` - SWR revalidation started +- `cache-revalidate-error` - SWR revalidation failed + +**Rate Limiting:** +- `ratelimit-wait` - Waiting for token +- `ratelimit-reject` - Request rejected +- `ratelimit-acquire` - Token acquired + +#### Event Timing + +Terminal events (`response`, `error`, `abort`) include timing data: + +```typescript +api.on('response', (event) => { + const duration = event.requestEnd - event.requestStart; + console.log(`Request completed in ${duration}ms`); +}); +``` + ### Error Handling @@ -278,7 +336,14 @@ if (err) { console.log('HTTP Error:', err.status, err.message); console.log('Failed at step:', err.step); console.log('Response data:', err.data); + console.log('Request ID:', err.requestId); console.log('Was aborted:', err.aborted); + console.log('Timed out:', err.timedOut); + + // Convenience methods for 499 errors + if (err.isCancelled()) console.log('User/app cancelled'); + if (err.isTimeout()) console.log('Timeout fired'); + if (err.isConnectionLost()) console.log('Network failed'); } else { console.log('Network error:', err.message); } @@ -287,7 +352,7 @@ if (err) { #### Error Status Codes -- `499` - Request aborted by server +- `499` - Request aborted (user cancel, timeout, or connection lost) - `999` - Error during response parsing @@ -296,6 +361,11 @@ if (err) { ```typescript const api = new FetchEngine({ baseUrl: 'https://api.example.com', + totalTimeout: 30000, + attemptTimeout: 10000, + + // Distributed tracing — auto-inject request ID header + requestIdHeader: 'X-Request-Id', // Method-specific headers methodHeaders: { @@ -355,6 +425,82 @@ const api = new FetchEngine({ ``` +### Resilience Policies + +```typescript +const api = new FetchEngine({ + baseUrl: 'https://api.example.com', + + // Request deduplication — prevent duplicate concurrent requests + dedupePolicy: { + enabled: true, + methods: ['GET'], + rules: [ + { includes: '/realtime', enabled: false } + ] + }, + + // Response caching with stale-while-revalidate + cachePolicy: { + enabled: true, + methods: ['GET'], + ttl: 60000, + staleIn: 30000, + rules: [ + { startsWith: '/static', ttl: 3600000 }, + { startsWith: '/user/me', ttl: 300000 }, + { includes: '/realtime', enabled: false } + ] + }, + + // Rate limiting — protect against overwhelming the API + rateLimitPolicy: { + enabled: true, + maxCalls: 100, + windowMs: 60000, + waitForToken: true, + rules: [ + { startsWith: '/api/search', maxCalls: 10 }, + { startsWith: '/api/bulk', waitForToken: false } + ] + } +}); +``` + + +### Cache Management + +```typescript +// Clear all cache entries +api.clearCache(); + +// Invalidate specific path (marks stale for SWR) +api.invalidatePath('/users/123'); + +// Invalidate with predicate +api.invalidateCache((key) => key.includes('/users')); + +// Delete a specific cache entry +api.deleteCache('/users/123'); + +// Get cache statistics +const stats = api.cacheStats(); +``` + + +### Lifecycle Management + +```typescript +// Destroy engine — aborts all pending requests +api.destroy(); + +// Check if destroyed before making requests +if (!api.isDestroyed()) { + await api.get('/users'); +} +``` + + ### TypeScript Module Declaration ```typescript @@ -397,6 +543,43 @@ if (!err && loginResponse) { } ``` +#### Distributed Tracing + +```typescript +const api = new FetchEngine({ + baseUrl: 'https://api.example.com', + requestIdHeader: 'X-Request-Id' +}); + +// Request ID auto-generated and sent as header +api.on('error', (event) => { + errorReporting.captureException(event.error, { + tags: { requestId: event.requestId } + }); +}); + +// Override with upstream trace ID +await api.get('/orders', { requestId: incomingTraceId }); +``` + +#### Stream Mode + +```typescript +// Raw Response with unconsumed body (skips cache/dedupe) +const [sse, err] = await attempt(() => + api.get('/events', { stream: true }) +); + +if (!err) { + const reader = sse.data.body.getReader(); + while (true) { + const { done, value } = await reader.read(); + if (done) break; + console.log(new TextDecoder().decode(value)); + } +} +``` + #### File Upload ```typescript @@ -476,7 +659,8 @@ const observer = new ObserverEngine({ spy: (action) => console.log(action.fn, action.event), emitValidator: (event, data) => { if (!data) throw new Error('Data required') - } + }, + signal: abortController.signal // Auto-cleanup on abort }); ``` @@ -510,6 +694,14 @@ console.log('First login:', loginData.userId); observer.on(/^user:/, ({ event, data }) => { console.log(`User event ${event}:`, data); }); + +// With AbortSignal — auto-removes listener on abort +const controller = new AbortController(); +observer.on('user:login', handler, { signal: controller.signal }); +controller.abort(); // Listener removed + +// Once with AbortSignal — rejects promise on abort +const data = await observer.once('user:login', { signal: controller.signal }); ``` @@ -580,6 +772,11 @@ enhanced.on('open', () => { enhanced.emit('open'); enhanced.cleanup(); // Remove component listeners + +// With AbortSignal — auto-cleanup on abort +const controller = new AbortController(); +const component = observer.observe(widget, { signal: controller.signal }); +controller.abort(); // Component listeners auto-cleaned ``` diff --git a/llm-helpers/fetch.md b/llm-helpers/fetch.md index 6dfca89..5749359 100644 --- a/llm-helpers/fetch.md +++ b/llm-helpers/fetch.md @@ -179,12 +179,18 @@ interface FetchError extends Error { status: number; method: HttpMethods; path: string; - aborted?: boolean; // True if request was aborted (any cause) - timedOut?: boolean; // True if aborted due to timeout (attemptTimeout or totalTimeout) + aborted?: boolean; // True if request was aborted (any cause) + timedOut?: boolean; // True if aborted due to timeout (attemptTimeout or totalTimeout) + requestId?: string; // Unique request ID for tracing (consistent across retries) attempt?: number; step?: 'fetch' | 'parse' | 'response'; url?: string; headers?: H; + + // Convenience methods for distinguishing 499 errors + isCancelled(): boolean; // status === 499 && aborted && !timedOut + isTimeout(): boolean; // status === 499 && timedOut + isConnectionLost(): boolean; // status === 499 && step === 'fetch' && !aborted } // Error checking - FetchError is thrown on failure @@ -330,13 +336,53 @@ enum FetchEventNames { 'ratelimit-acquire' = 'ratelimit-acquire' // Token acquired } -// Event listeners -api.on('*', (event) => console.log('Any event:', event.type)); +// Event listeners (use regex to match all events) +api.on(/./, ({ event, data }) => console.log('Event:', event, data)); api.on('before-request', (event) => console.log('Request starting:', event.url)); api.on('error', (event) => console.error('Request failed:', event.error)); api.off('error', errorHandler); // remove listener + +// Event timing — terminal events include requestStart/requestEnd +api.on('response', (event) => { + const duration = event.requestEnd - event.requestStart; + console.log(`[${event.requestId}] ${event.method} ${event.path} completed in ${duration}ms`); +}); +``` + +### Event Data Fields + +Request lifecycle events receive `EventData`: + +```typescript +interface EventData { + state: S; + url?: string | URL; + method?: HttpMethods; + headers?: DictAndT; + params?: DictAndT

; + error?: Error | FetchError; + response?: Response; + data?: unknown; + payload?: unknown; + attempt?: number; + nextAttempt?: number; + delay?: number; + step?: 'fetch' | 'parse' | 'response'; + status?: number; + path?: string; + aborted?: boolean; + requestId?: string; // Unique ID for this request (consistent across retries) + requestStart?: number; // Date.now() when request entered pipeline (all request events) + requestEnd?: number; // Date.now() when request resolved (response, error, abort only) +} ``` +| Field | Present in | Description | +|-------|-----------|-------------| +| `requestStart` | All request events | Timestamp when the request entered the execution pipeline | +| `requestEnd` | `response`, `error`, `abort` | Timestamp when the request resolved | +| `requestId` | All request events | Unique ID, consistent across retries of the same request | + ## Request Deduplication Prevents duplicate concurrent requests by sharing the same in-flight promise among callers with identical request keys. @@ -587,7 +633,7 @@ When deduplicating, each caller can have independent timeout/abort constraints: ```typescript // Caller A: 10s timeout -const promiseA = api.get('/slow', { timeout: 10000 }); +const promiseA = api.get('/slow', { totalTimeout: 10000 }); // Caller B: 2s timeout (joins A's request) const promiseB = api.get('/slow', { totalTimeout: 2000 }); @@ -678,6 +724,34 @@ if (isFetchError(err)) { | Server closed connection | 499 | `false` | `undefined` | `'fetch'` | | Network error | 499 | `false` | `undefined` | `'fetch'` | +## Stream Mode + +Return raw `Response` objects with unconsumed body streams. Cache and deduplication are skipped (each caller needs its own readable stream). Rate limiting and lifecycle events still fire normally. + +```typescript +// Stream mode — raw Response with unconsumed body +const [sse, err] = await attempt(() => + api.get('/events', { stream: true }) +); + +if (!err) { + const reader = sse.data.body.getReader(); + while (true) { + const { done, value } = await reader.read(); + if (done) break; + console.log(new TextDecoder().decode(value)); + } +} + +// Works with all HTTP methods +const [response, err] = await attempt(() => + api.post('/upload-stream', largePayload, { stream: true }) +); + +// Type signature: when stream: true, data is Response +// api.get(path, { stream: true }): AbortablePromise> +``` + ## Advanced Features ```typescript @@ -758,13 +832,12 @@ api.config.set('modifyMethodConfig', { POST: undefined }); // Per-request options const [response, err] = await attempt(() => api.get('/users', { - timeout: 10000, + totalTimeout: 30000, + attemptTimeout: 10000, headers: { 'X-Request-ID': '123' }, params: { include: 'profile' }, requestId: 'upstream-trace-id', // Override auto-generated request ID - onBeforeReq: (opts) => console.log('Making request:', opts), - onAfterReq: (response) => console.log('Response:', response.status), - onError: (error) => console.error('Error:', error), + stream: false, // Set true for raw Response with unconsumed body retry: { maxAttempts: 5 } }) );