From b4308f48a9911e59ccad8409b8b9256cd42a077e Mon Sep 17 00:00:00 2001 From: Claude Code Date: Mon, 23 Feb 2026 17:48:54 +0700 Subject: [PATCH] fix(core): fix 6 bugs found in codebase audit - remove duplicate AnalyticsEngineDataPoint from types/realtime.ts; realtime adapter now imports from types/index.js (Bug 1) - guard stdDev === 0 in anomaly detection to prevent Infinity zScore (Bug 2) - replace busy-wait retry loop with async setTimeout; make writeDataPoint async to avoid blocking V8 event loop in edge runtime (Bug 3) - add countProjects() and return real DB total in project list response instead of current page length (Bug 4) - add radix 10 and NaN fallback to parseInt in projects route (Bug 5) - add .js extension to all bare local imports in realtime files (Bug 6) Co-Authored-By: duyetbot --- src/adapters/realtime.ts | 8 +- src/durable-objects/realtime-aggregator.ts | 6 +- src/routes/projects.ts | 21 +++-- src/routes/realtime.ts | 6 +- src/services/analytics-engine.ts | 22 +++--- src/services/analytics-query.ts | 24 +++--- src/services/project.ts | 12 +++ src/services/self-tracking.ts | 4 +- src/types/realtime.ts | 10 --- src/utils/fingerprint.ts | 2 +- src/utils/route-handler.ts | 8 +- test/e2e/projects-api.test.ts | 80 +++++++++++++++++++ test/unit/services/analytics-engine.test.ts | 87 +++++++++++++-------- test/unit/services/analytics-query.test.ts | 33 ++++++++ test/unit/services/project.test.ts | 38 +++++++++ 15 files changed, 274 insertions(+), 87 deletions(-) diff --git a/src/adapters/realtime.ts b/src/adapters/realtime.ts index ed46ce9..a1191cc 100644 --- a/src/adapters/realtime.ts +++ b/src/adapters/realtime.ts @@ -1,12 +1,12 @@ -import { BaseAdapter } from './base'; -import type { AnalyticsEngineDataPoint } from '../types/realtime'; +import { BaseAdapter } from './base.js'; +import type { AnalyticsEngineDataPoint } from '../types/index.js'; import { realtimeEventSchema, type RealtimeEvent, type ServerContext, } from '../schemas/index.js'; -import { parseUserAgent } from '../utils/user-agent-parser'; -import { detectBot } from '../utils/bot-detection'; +import { parseUserAgent } from '../utils/user-agent-parser.js'; +import { detectBot } from '../utils/bot-detection.js'; /** * Adapter for real-time analytics events diff --git a/src/durable-objects/realtime-aggregator.ts b/src/durable-objects/realtime-aggregator.ts index e2f34fe..3d31cfb 100644 --- a/src/durable-objects/realtime-aggregator.ts +++ b/src/durable-objects/realtime-aggregator.ts @@ -2,9 +2,9 @@ import type { RealtimeEvent, RealtimeStats, AggregatedData, -} from '../types/realtime'; -import { parseUserAgent } from '../utils/user-agent-parser'; -import { detectBot } from '../utils/bot-detection'; +} from '../types/realtime.js'; +import { parseUserAgent } from '../utils/user-agent-parser.js'; +import { detectBot } from '../utils/bot-detection.js'; /** * Durable Object for real-time analytics aggregation diff --git a/src/routes/projects.ts b/src/routes/projects.ts index 0af94e3..0a61444 100644 --- a/src/routes/projects.ts +++ b/src/routes/projects.ts @@ -10,8 +10,11 @@ import { createProject, listProjects, getProject, + countProjects, } from '../services/project.js'; +const MAX_LIMIT = 1000; + /** * Create and configure projects API router */ @@ -68,15 +71,23 @@ export function createProjectsRouter(): Hono<{ Bindings: Env }> { */ app.get('/', async (c) => { try { - const limit = parseInt(c.req.query('limit') || '100'); - const offset = parseInt(c.req.query('offset') || '0'); - - const projects = await listProjects(c.env.DB, limit, offset); + const rawLimit = parseInt(c.req.query('limit') ?? '', 10); + const limit = + Number.isNaN(rawLimit) || rawLimit < 0 + ? 100 + : Math.min(rawLimit, MAX_LIMIT); + const rawOffset = parseInt(c.req.query('offset') ?? '', 10); + const offset = Number.isNaN(rawOffset) || rawOffset < 0 ? 0 : rawOffset; + + const [projects, total] = await Promise.all([ + listProjects(c.env.DB, limit, offset), + countProjects(c.env.DB), + ]); const response: ProjectListResponse = { success: true, projects, - total: projects.length, + total, }; return c.json(response); diff --git a/src/routes/realtime.ts b/src/routes/realtime.ts index 92ebb07..68fb867 100644 --- a/src/routes/realtime.ts +++ b/src/routes/realtime.ts @@ -1,7 +1,7 @@ import { Hono } from 'hono'; -import type { Env } from '../types/index'; -import { RealtimeAdapter } from '../adapters/realtime'; -import type { ServerContext, RealtimeEvent } from '../types/realtime'; +import type { Env } from '../types/index.js'; +import { RealtimeAdapter } from '../adapters/realtime.js'; +import type { ServerContext, RealtimeEvent } from '../types/realtime.js'; const realtimeRouter = new Hono<{ Bindings: Env }>(); const adapter = new RealtimeAdapter(); diff --git a/src/services/analytics-engine.ts b/src/services/analytics-engine.ts index af02822..7bb9a73 100644 --- a/src/services/analytics-engine.ts +++ b/src/services/analytics-engine.ts @@ -16,12 +16,12 @@ export class AnalyticsEngineService { * Write data point to Analytics Engine dataset with retry logic * @returns Success status */ - writeDataPoint( + async writeDataPoint( env: Env, datasetName: keyof Env, adapter: DataAdapter, rawData: unknown - ): { success: boolean; error?: string } { + ): Promise<{ success: boolean; error?: string }> { try { // Validate input data if (!adapter.validate(rawData)) { @@ -42,7 +42,11 @@ export class AnalyticsEngineService { } // Write data point with retry logic - const writeResult = this.writeWithRetry(dataset, dataPoint, datasetName); + const writeResult = await this.writeWithRetry( + dataset, + dataPoint, + datasetName + ); if (!writeResult.success) { return writeResult; } @@ -75,11 +79,11 @@ export class AnalyticsEngineService { /** * Write data point with exponential backoff retry logic */ - private writeWithRetry( + private async writeWithRetry( dataset: AnalyticsEngineDataset, dataPoint: AnalyticsEngineDataPoint, datasetName: keyof Env - ): { success: boolean; error?: string } { + ): Promise<{ success: boolean; error?: string }> { let lastError: Error | undefined; for ( @@ -115,12 +119,8 @@ export class AnalyticsEngineService { } ); - // Exponential backoff delay (synchronous for simplicity in edge runtime) - // Note: In real-world edge runtime, this would be async, but for testing we keep it sync - const start = Date.now(); - while (Date.now() - start < delay) { - // Busy wait for delay - } + // Async exponential backoff — avoids blocking the V8 event loop + await new Promise((r) => setTimeout(r, delay)); } } } diff --git a/src/services/analytics-query.ts b/src/services/analytics-query.ts index b5260dd..39d6578 100644 --- a/src/services/analytics-query.ts +++ b/src/services/analytics-query.ts @@ -377,17 +377,19 @@ export class AnalyticsQueryService { allValues.length ); - timeseries.forEach((point) => { - const zScore = Math.abs((point.value - mean) / stdDev); - if (zScore > 2.5 && allValues.length > 10) { - anomalies.push({ - timestamp: point.timestamp, - description: `Unusual activity detected: ${Math.round(point.value)} events`, - severity: zScore > 3 ? 'high' : 'medium', - value: point.value, - }); - } - }); + if (stdDev > 0) { + timeseries.forEach((point) => { + const zScore = Math.abs((point.value - mean) / stdDev); + if (zScore > 2.5 && allValues.length > 10) { + anomalies.push({ + timestamp: point.timestamp, + description: `Unusual activity detected: ${Math.round(point.value)} events`, + severity: zScore > 3 ? 'high' : 'medium', + value: point.value, + }); + } + }); + } // Generate recommendations if (trends.length > 0 && trends[0]?.direction === 'up') { diff --git a/src/services/project.ts b/src/services/project.ts index e4c6b58..44831f9 100644 --- a/src/services/project.ts +++ b/src/services/project.ts @@ -121,6 +121,18 @@ export async function listProjects( return results || []; } +/** + * Count total number of projects + * @param db D1 database binding + * @returns Total project count + */ +export async function countProjects(db: D1Database): Promise { + const result = await db + .prepare('SELECT COUNT(*) as count FROM projects') + .first<{ count: number }>(); + return result?.count ?? 0; +} + /** * Update last_used timestamp for a project * @param db D1 database binding diff --git a/src/services/self-tracking.ts b/src/services/self-tracking.ts index 2ca99f4..dbc1699 100644 --- a/src/services/self-tracking.ts +++ b/src/services/self-tracking.ts @@ -74,8 +74,8 @@ export class SelfTrackingService { } // Create async write operation - const writePromise = Promise.resolve().then(() => { - const result = this.analyticsService.writeDataPoint( + const writePromise = Promise.resolve().then(async () => { + const result = await this.analyticsService.writeDataPoint( env, 'SELF_TRACKING_ANALYTICS', this.adapter, diff --git a/src/types/realtime.ts b/src/types/realtime.ts index 291ddf1..f749042 100644 --- a/src/types/realtime.ts +++ b/src/types/realtime.ts @@ -248,13 +248,3 @@ export interface AggregatedData { visitor_id?: string; }>; } - -/** - * Analytics Engine data point format - * Reference: https://developers.cloudflare.com/analytics/analytics-engine/ - */ -export interface AnalyticsEngineDataPoint { - indexes?: string[]; // Max 1 index, max 96 bytes each - doubles?: number[]; // Numeric values - blobs?: string[]; // String values, max 5120 bytes each -} diff --git a/src/utils/fingerprint.ts b/src/utils/fingerprint.ts index 68f0143..ef06802 100644 --- a/src/utils/fingerprint.ts +++ b/src/utils/fingerprint.ts @@ -1,4 +1,4 @@ -import type { FingerprintComponents, Fingerprint } from '../types/realtime'; +import type { FingerprintComponents, Fingerprint } from '../types/realtime.js'; /** * Generate a privacy-respecting fingerprint from browser components diff --git a/src/utils/route-handler.ts b/src/utils/route-handler.ts index 06bd007..0a4d25c 100644 --- a/src/utils/route-handler.ts +++ b/src/utils/route-handler.ts @@ -11,14 +11,14 @@ export function createAnalyticsHandler( adapter: DataAdapter, analyticsService: AnalyticsEngineService ): { - handleGet: (c: Context<{ Bindings: Env }>) => Response; + handleGet: (c: Context<{ Bindings: Env }>) => Promise; handlePost: (c: Context<{ Bindings: Env }>) => Promise; } { return { /** * Handle GET requests with query parameters */ - handleGet: (c: Context<{ Bindings: Env }>): Response => { + handleGet: async (c: Context<{ Bindings: Env }>): Promise => { const rawData = c.req.query() as Record; const projectId = c.get('project_id'); @@ -33,7 +33,7 @@ export function createAnalyticsHandler( const dataWithProject = projectId ? { ...rawData, project_id: projectId } : rawData; - const result = analyticsService.writeDataPoint( + const result = await analyticsService.writeDataPoint( c.env, dataset, adapter, @@ -95,7 +95,7 @@ export function createAnalyticsHandler( projectId && !Array.isArray(rawData) ? { ...rawData, project_id: projectId } : rawData; - const result = analyticsService.writeDataPoint( + const result = await analyticsService.writeDataPoint( c.env, dataset, adapter, diff --git a/test/e2e/projects-api.test.ts b/test/e2e/projects-api.test.ts index e059791..2836d50 100644 --- a/test/e2e/projects-api.test.ts +++ b/test/e2e/projects-api.test.ts @@ -240,6 +240,7 @@ describe('Projects API E2E', () => { ]; statement.all.mockResolvedValueOnce({ results: mockProjects }); + statement.first.mockResolvedValueOnce({ count: 3 }); const app = createRouter(); const request = new Request('http://localhost/api/project'); @@ -260,6 +261,7 @@ describe('Projects API E2E', () => { it('should handle empty project list', async () => { statement.all.mockResolvedValueOnce({ results: [] }); + statement.first.mockResolvedValueOnce({ count: 0 }); const app = createRouter(); const request = new Request('http://localhost/api/project'); @@ -277,6 +279,37 @@ describe('Projects API E2E', () => { } }); + it('should return DB total, not page size (pagination test)', async () => { + // Page returns 1 project but DB has 10 total + statement.all.mockResolvedValueOnce({ + results: [ + { + id: 'proj2', + description: 'Project 2', + created_at: 2, + last_used: null, + }, + ], + }); + statement.first.mockResolvedValueOnce({ count: 10 }); + + const app = createRouter(); + const request = new Request( + 'http://localhost/api/project?limit=1&offset=1' + ); + + const response = await app.fetch(request, env); + const data = (await response.json()) as + | ProjectListResponse + | ErrorResponse; + + expect(response.status).toBe(200); + if ('success' in data && data.success) { + expect(data.projects).toHaveLength(1); + expect(data.total).toBe(10); // DB total, not page size + } + }); + it('should support pagination with limit and offset', async () => { statement.all.mockResolvedValueOnce({ results: [ @@ -288,6 +321,7 @@ describe('Projects API E2E', () => { }, ], }); + statement.first.mockResolvedValueOnce({ count: 1 }); const app = createRouter(); const request = new Request( @@ -306,6 +340,52 @@ describe('Projects API E2E', () => { expect(statement.bind).toHaveBeenCalledWith(1, 1); }); + it('should use default limit=100 and offset=0 for NaN params', async () => { + statement.all.mockResolvedValueOnce({ results: [] }); + statement.first.mockResolvedValueOnce({ count: 0 }); + + const app = createRouter(); + const request = new Request( + 'http://localhost/api/project?limit=abc&offset=xyz' + ); + + const response = await app.fetch(request, env); + + expect(response.status).toBe(200); + // NaN should fall back to defaults: limit=100, offset=0 + expect(statement.bind).toHaveBeenCalledWith(100, 0); + }); + + it('should clamp negative limit to default and negative offset to 0', async () => { + statement.all.mockResolvedValueOnce({ results: [] }); + statement.first.mockResolvedValueOnce({ count: 0 }); + + const app = createRouter(); + const request = new Request( + 'http://localhost/api/project?limit=-10&offset=-5' + ); + + const response = await app.fetch(request, env); + + expect(response.status).toBe(200); + // Negative limit should default to 100, negative offset should be 0 + expect(statement.bind).toHaveBeenCalledWith(100, 0); + }); + + it('should enforce MAX_LIMIT of 1000', async () => { + statement.all.mockResolvedValueOnce({ results: [] }); + statement.first.mockResolvedValueOnce({ count: 0 }); + + const app = createRouter(); + const request = new Request('http://localhost/api/project?limit=9999'); + + const response = await app.fetch(request, env); + + expect(response.status).toBe(200); + // Limit above MAX_LIMIT should be clamped to 1000 + expect(statement.bind).toHaveBeenCalledWith(1000, 0); + }); + it('should return 500 on database error', async () => { statement.all.mockRejectedValueOnce( new Error('Database connection failed') diff --git a/test/unit/services/analytics-engine.test.ts b/test/unit/services/analytics-engine.test.ts index 756e769..74c60ad 100644 --- a/test/unit/services/analytics-engine.test.ts +++ b/test/unit/services/analytics-engine.test.ts @@ -1,4 +1,4 @@ -import { describe, it, expect, vi, beforeEach } from 'vitest'; +import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest'; import { AnalyticsEngineService } from '../../../src/services/analytics-engine.js'; import type { Env, @@ -48,13 +48,17 @@ describe('AnalyticsEngineService', () => { mockDataset.writeDataPoint.mockClear(); }); + afterEach(() => { + vi.useRealTimers(); + }); + describe('writeDataPoint', () => { - it('should write valid data to Analytics Engine', () => { + it('should write valid data to Analytics Engine', async () => { const rawData = { test: 'value' }; const validateSpy = vi.spyOn(mockAdapter, 'validate'); const transformSpy = vi.spyOn(mockAdapter, 'transform'); - const result = service.writeDataPoint( + const result = await service.writeDataPoint( mockEnv, 'CLAUDE_CODE_ANALYTICS', mockAdapter, @@ -76,7 +80,7 @@ describe('AnalyticsEngineService', () => { transformSpy.mockRestore(); }); - it('should return error for invalid data', () => { + it('should return error for invalid data', async () => { const invalidAdapter: DataAdapter = { validate(_data: unknown): _data is unknown { return false; @@ -89,7 +93,7 @@ describe('AnalyticsEngineService', () => { const validateSpy = vi.spyOn(invalidAdapter, 'validate'); const transformSpy = vi.spyOn(invalidAdapter, 'transform'); - const result = service.writeDataPoint( + const result = await service.writeDataPoint( mockEnv, 'CLAUDE_CODE_ANALYTICS', invalidAdapter, @@ -106,10 +110,10 @@ describe('AnalyticsEngineService', () => { transformSpy.mockRestore(); }); - it('should return error for missing dataset binding', () => { + it('should return error for missing dataset binding', async () => { const emptyEnv = {} as Env; - const result = service.writeDataPoint( + const result = await service.writeDataPoint( emptyEnv, 'CLAUDE_CODE_ANALYTICS', mockAdapter, @@ -122,10 +126,10 @@ describe('AnalyticsEngineService', () => { ); }); - it('should handle GA_ANALYTICS dataset', () => { + it('should handle GA_ANALYTICS dataset', async () => { const rawData = { test: 'value' }; - const result = service.writeDataPoint( + const result = await service.writeDataPoint( mockEnv, 'GA_ANALYTICS', mockAdapter, @@ -136,7 +140,7 @@ describe('AnalyticsEngineService', () => { expect(mockDataset.writeDataPoint).toHaveBeenCalled(); }); - it('should validate before transforming', () => { + it('should validate before transforming', async () => { const callOrder: string[] = []; const orderedAdapter: DataAdapter = { @@ -150,7 +154,7 @@ describe('AnalyticsEngineService', () => { }, }; - const result = service.writeDataPoint( + const result = await service.writeDataPoint( mockEnv, 'CLAUDE_CODE_ANALYTICS', orderedAdapter, @@ -161,7 +165,9 @@ describe('AnalyticsEngineService', () => { expect(callOrder).toEqual(['validate', 'transform']); }); - it('should retry on write failure with exponential backoff', () => { + it('should retry on write failure with exponential backoff', async () => { + vi.useFakeTimers(); + // Mock writeDataPoint to fail twice then succeed mockDataset.writeDataPoint .mockImplementationOnce(() => { @@ -182,13 +188,19 @@ describe('AnalyticsEngineService', () => { rawData ); - expect(result.success).toBe(true); - expect(result.error).toBeUndefined(); + // Fast-forward through all retry delays + await vi.runAllTimersAsync(); + const awaitedResult = await result; + + expect(awaitedResult.success).toBe(true); + expect(awaitedResult.error).toBeUndefined(); // Should be called 3 times (2 failures + 1 success) expect(mockDataset.writeDataPoint).toHaveBeenCalledTimes(3); }); - it('should return error after max retries exhausted', () => { + it('should return error after max retries exhausted', async () => { + vi.useFakeTimers(); + // Mock writeDataPoint to always fail mockDataset.writeDataPoint.mockImplementation(() => { throw new Error('Persistent failure'); @@ -202,13 +214,17 @@ describe('AnalyticsEngineService', () => { rawData ); - expect(result.success).toBe(false); - expect(result.error).toBe('Persistent failure'); + // Fast-forward through all retry delays + await vi.runAllTimersAsync(); + const awaitedResult = await result; + + expect(awaitedResult.success).toBe(false); + expect(awaitedResult.error).toBe('Persistent failure'); // Should be called 3 times (max 2 retries + initial attempt) expect(mockDataset.writeDataPoint).toHaveBeenCalledTimes(3); }); - it('should handle non-Error exceptions', () => { + it('should handle non-Error exceptions', async () => { // Mock writeDataPoint to throw non-Error object mockDataset.writeDataPoint.mockImplementation(() => { // eslint-disable-next-line @typescript-eslint/only-throw-error @@ -216,7 +232,7 @@ describe('AnalyticsEngineService', () => { }); const rawData = { test: 'value' }; - const result = service.writeDataPoint( + const result = await service.writeDataPoint( mockEnv, 'CLAUDE_CODE_ANALYTICS', mockAdapter, @@ -227,7 +243,7 @@ describe('AnalyticsEngineService', () => { expect(result.error).toBe('String error'); }); - it('should catch unexpected errors during validation', () => { + it('should catch unexpected errors during validation', async () => { const faultyAdapter: DataAdapter = { validate(_data: unknown): _data is unknown { throw new Error('Validation crashed'); @@ -237,7 +253,7 @@ describe('AnalyticsEngineService', () => { }, }; - const result = service.writeDataPoint( + const result = await service.writeDataPoint( mockEnv, 'CLAUDE_CODE_ANALYTICS', faultyAdapter, @@ -249,7 +265,7 @@ describe('AnalyticsEngineService', () => { expect(mockDataset.writeDataPoint).not.toHaveBeenCalled(); }); - it('should catch unexpected errors during transformation', () => { + it('should catch unexpected errors during transformation', async () => { const faultyAdapter: DataAdapter = { validate(_data: unknown): _data is unknown { return true; @@ -259,7 +275,7 @@ describe('AnalyticsEngineService', () => { }, }; - const result = service.writeDataPoint( + const result = await service.writeDataPoint( mockEnv, 'CLAUDE_CODE_ANALYTICS', faultyAdapter, @@ -271,9 +287,9 @@ describe('AnalyticsEngineService', () => { expect(mockDataset.writeDataPoint).not.toHaveBeenCalled(); }); - it('should succeed on first attempt without retries', () => { + it('should succeed on first attempt without retries', async () => { const rawData = { test: 'value' }; - const result = service.writeDataPoint( + const result = await service.writeDataPoint( mockEnv, 'CLAUDE_CODE_ANALYTICS', mockAdapter, @@ -285,7 +301,7 @@ describe('AnalyticsEngineService', () => { expect(mockDataset.writeDataPoint).toHaveBeenCalledTimes(1); }); - it('should retry exactly once before succeeding', () => { + it('should retry exactly once before succeeding', async () => { // Mock writeDataPoint to fail once then succeed mockDataset.writeDataPoint .mockImplementationOnce(() => { @@ -296,7 +312,7 @@ describe('AnalyticsEngineService', () => { }); const rawData = { test: 'value' }; - const result = service.writeDataPoint( + const result = await service.writeDataPoint( mockEnv, 'CLAUDE_CODE_ANALYTICS', mockAdapter, @@ -308,7 +324,7 @@ describe('AnalyticsEngineService', () => { expect(mockDataset.writeDataPoint).toHaveBeenCalledTimes(2); }); - it('should handle all dataset types', () => { + it('should handle all dataset types', async () => { const datasets: Array = [ 'CLAUDE_CODE_ANALYTICS', 'CLAUDE_CODE_LOGS', @@ -319,16 +335,21 @@ describe('AnalyticsEngineService', () => { 'SENTRY_ANALYTICS', ]; - datasets.forEach((dataset) => { + for (const dataset of datasets) { mockDataset.writeDataPoint.mockClear(); - const result = service.writeDataPoint(mockEnv, dataset, mockAdapter, { - test: 'value', - }); + const result = await service.writeDataPoint( + mockEnv, + dataset, + mockAdapter, + { + test: 'value', + } + ); expect(result.success).toBe(true); expect(mockDataset.writeDataPoint).toHaveBeenCalledTimes(1); - }); + } }); }); }); diff --git a/test/unit/services/analytics-query.test.ts b/test/unit/services/analytics-query.test.ts index ee023af..23c9a50 100644 --- a/test/unit/services/analytics-query.test.ts +++ b/test/unit/services/analytics-query.test.ts @@ -363,6 +363,39 @@ describe('AnalyticsQueryService', () => { expect(result.insights.trends[0]?.metric).toBe('event_volume'); }); + it('should not detect anomalies when all values are identical (stdDev = 0)', async () => { + const mockEnvWithCreds = { + ...mockEnv, + CLOUDFLARE_ACCOUNT_ID: 'test-account', + CLOUDFLARE_API_TOKEN: 'test-token', + }; + + const params: AnalyticsQueryParams = { + dataset: 'CLAUDE_CODE_METRICS', + }; + + // All values identical → stdDev = 0 → division-by-zero risk + const mockData = Array.from({ length: 15 }, (_, i) => ({ + timestamp: new Date(Date.now() + i * 60000).toISOString(), + index1: 'project1', + blob1: '{}', + double1: 100, // constant value + _sample_interval: 1, + })); + + const mockResponse = mockData.map((d) => JSON.stringify(d)).join('\n'); + + global.fetch = vi.fn().mockResolvedValue({ + ok: true, + text: vi.fn().mockResolvedValue(mockResponse), + }); + + const result = await service.getInsights(mockEnvWithCreds as any, params); + + // No anomalies should be reported when stdDev is 0 + expect(result.insights.anomalies).toHaveLength(0); + }); + it('should detect anomalies correctly', async () => { const mockEnvWithCreds = { ...mockEnv, diff --git a/test/unit/services/project.test.ts b/test/unit/services/project.test.ts index 8e34881..2afc3f2 100644 --- a/test/unit/services/project.test.ts +++ b/test/unit/services/project.test.ts @@ -6,6 +6,7 @@ import { createProject, getProject, listProjects, + countProjects, updateLastUsed, } from '../../../src/services/project.js'; import type { ProjectCreateRequest } from '../../../src/types/index.js'; @@ -304,6 +305,43 @@ describe('listProjects', () => { }); }); +describe('countProjects', () => { + it('should return total project count', async () => { + const { db, mockFirst } = createMockD1Database(); + mockFirst.mockResolvedValue({ count: 42 }); + + const count = await countProjects(db); + expect(count).toBe(42); + }); + + it('should return 0 when no projects exist', async () => { + const { db, mockFirst } = createMockD1Database(); + mockFirst.mockResolvedValue({ count: 0 }); + + const count = await countProjects(db); + expect(count).toBe(0); + }); + + it('should return 0 when result is null', async () => { + const { db, mockFirst } = createMockD1Database(); + mockFirst.mockResolvedValue(null); + + const count = await countProjects(db); + expect(count).toBe(0); + }); + + it('should use correct SQL COUNT query', async () => { + const { db, mockPrepare, mockFirst } = createMockD1Database(); + mockFirst.mockResolvedValue({ count: 5 }); + + await countProjects(db); + + expect(mockPrepare).toHaveBeenCalledWith( + 'SELECT COUNT(*) as count FROM projects' + ); + }); +}); + describe('updateLastUsed', () => { it('should update last_used timestamp', async () => { const { db, mockBind, mockRun } = createMockD1Database();