Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
36 commits
Select commit Hold shift + click to select a range
54e99fb
Migration feature - Phase 1 - comparison between the target and sourc…
KIvanow Mar 26, 2026
55bd637
Fixed roborev findings
KIvanow Mar 26, 2026
5298ec2
Phase 2 - migration with RedisShake and proprietary key by key batch …
KIvanow Mar 27, 2026
c42cac6
Phase 3 - validation post migration
KIvanow Mar 27, 2026
8c8cb57
roborev fixes
KIvanow Mar 27, 2026
9023ead
Info tooltips for the analysis after migration
KIvanow Mar 27, 2026
9762f51
added testing
KIvanow Mar 27, 2026
2172876
fixed e2e test
KIvanow Mar 27, 2026
f8b3ce5
fixed e2e test
KIvanow Mar 27, 2026
5f0685d
UI/UX updates
KIvanow Mar 30, 2026
852c250
Fixed PR feedback
KIvanow Mar 31, 2026
48ce84f
Fixed PR feedback
KIvanow Mar 31, 2026
ea3acb0
Merge remote-tracking branch 'origin/master' into feature-migration
KIvanow Mar 31, 2026
91fdce1
Fix review feedback: eviction ordering, credential sanitization, stre…
KIvanow Mar 31, 2026
d1a6be4
Fix Claude Code review action to post inline comments
KIvanow Mar 31, 2026
7c6fc17
Fix totalKeys falsy check and credential sanitization regex
KIvanow Mar 31, 2026
4a88b32
Fix claude-review action: set base_branch to master, fix PR number fo…
KIvanow Mar 31, 2026
b27035e
Address Claude code review feedback
KIvanow Mar 31, 2026
ebf3b2d
Fix tests for TTL sampler skip behavior and cancelled validation status
KIvanow Mar 31, 2026
0988deb
Fix critical and high severity issues from Claude code review
KIvanow Mar 31, 2026
a532614
Fix TTL sampler denominator, HFE false-negative, sample validator, an…
KIvanow Mar 31, 2026
9ebe03a
Fix Dockerfile healthcheck, atomic string TTL, binary-safe validation…
KIvanow Mar 31, 2026
b4d43f4
Fix binary data corruption in sorted-set and stream migration
KIvanow Mar 31, 2026
4fe1529
Fix IPv6 TOML bracketing, multi-word password redaction, and binary-s…
KIvanow Mar 31, 2026
e6d8187
Fix 7 review issues: binary-safe validation, atomic migration, job li…
KIvanow Mar 31, 2026
10d0f8e
Fix 5 review findings: community-tier guard, key count edge case, rac…
KIvanow Mar 31, 2026
19b5a23
Fix cluster fail-master filtering, CROSSSLOT temp keys, stuck-job han…
KIvanow Mar 31, 2026
afce537
Support cluster target in command-mode migration and validation
KIvanow Mar 31, 2026
c5f3b73
cleanup
KIvanow Mar 31, 2026
31ad55d
Add analysis compatibility tests to migration topology e2e
KIvanow Apr 1, 2026
22dea6d
Mock Pro license in topology e2e so execution tests actually run
KIvanow Apr 1, 2026
a998853
Add migration feature documentation
KIvanow Apr 1, 2026
c2a53ad
Fix 5 review findings: pttl===0, catch scope, regex, hash compare, HF…
KIvanow Apr 1, 2026
f9dccdb
Fix cluster fail-master filtering, CROSSSLOT temp keys, stuck-job han…
KIvanow Apr 1, 2026
a6c87fd
Resolved conflicts with master
KIvanow Apr 1, 2026
f2bcab9
Fix migration e2e tests: use DB_PORT instead of hardcoded 6380
KIvanow Apr 1, 2026
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 10 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,16 @@ pnpm-debug.log*
.env.test.local
.env.production.local

# Build info
*.tsbuildinfo

# Claude Code
.claude/settings.local.json

# BetterDB context
.betterdb_context.md
**/.betterdb_context.md

# Turbo
.turbo

Expand Down
12 changes: 12 additions & 0 deletions Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,18 @@ ENV NODE_ENV=production
ENV PORT=3001
ENV STORAGE_TYPE=memory

# Install RedisShake binary for migration execution (with checksum verification)
ARG TARGETARCH
ARG REDISSHAKE_VERSION=4.6.0
RUN REDISSHAKE_SHA256_AMD64="6ccab1ff2ba3c200950f8ada811f0c6fe6e2f5e6bd3b8e92b4d9444dc0aff4df" && \
REDISSHAKE_SHA256_ARM64="653298efa83ef3d495ae2ec21b40c773f36eb15e507f8b3f2931660509d09690" && \
if [ "${TARGETARCH}" = "amd64" ]; then EXPECTED_SHA256="${REDISSHAKE_SHA256_AMD64}"; else EXPECTED_SHA256="${REDISSHAKE_SHA256_ARM64}"; fi && \
wget -qO /tmp/redis-shake.tar.gz "https://github.com/tair-opensource/RedisShake/releases/download/v${REDISSHAKE_VERSION}/redis-shake-v${REDISSHAKE_VERSION}-linux-${TARGETARCH}.tar.gz" && \
echo "${EXPECTED_SHA256} /tmp/redis-shake.tar.gz" | sha256sum -c - && \
tar -xzf /tmp/redis-shake.tar.gz --strip-components=0 -C /usr/local/bin ./redis-shake && \
chmod +x /usr/local/bin/redis-shake && \
rm /tmp/redis-shake.tar.gz

# Create non-root user for security (Docker Scout compliance)
RUN addgroup --system --gid 1001 nodejs && \
adduser --system --uid 1001 --ingroup nodejs betterdb
Expand Down
14 changes: 14 additions & 0 deletions Dockerfile.prod
Original file line number Diff line number Diff line change
Expand Up @@ -154,6 +154,20 @@ ENV DB_USERNAME=default
ENV STORAGE_TYPE=memory
ENV AI_ENABLED=false

# Install RedisShake binary for migration execution (with checksum verification)
ARG TARGETARCH
ARG REDISSHAKE_VERSION=4.6.0
RUN apk add --no-cache wget && \
REDISSHAKE_SHA256_AMD64="6ccab1ff2ba3c200950f8ada811f0c6fe6e2f5e6bd3b8e92b4d9444dc0aff4df" && \
REDISSHAKE_SHA256_ARM64="653298efa83ef3d495ae2ec21b40c773f36eb15e507f8b3f2931660509d09690" && \
if [ "${TARGETARCH}" = "amd64" ]; then EXPECTED_SHA256="${REDISSHAKE_SHA256_AMD64}"; else EXPECTED_SHA256="${REDISSHAKE_SHA256_ARM64}"; fi && \
wget -qO /tmp/redis-shake.tar.gz "https://github.com/tair-opensource/RedisShake/releases/download/v${REDISSHAKE_VERSION}/redis-shake-v${REDISSHAKE_VERSION}-linux-${TARGETARCH}.tar.gz" && \
echo "${EXPECTED_SHA256} /tmp/redis-shake.tar.gz" | sha256sum -c - && \
tar -xzf /tmp/redis-shake.tar.gz --strip-components=0 -C /usr/local/bin ./redis-shake && \
chmod +x /usr/local/bin/redis-shake && \
rm /tmp/redis-shake.tar.gz && \
apk del wget

# Create non-root user for security (Docker Scout compliance)
RUN addgroup --system --gid 1001 nodejs && \
adduser --system --uid 1001 --ingroup nodejs betterdb
Expand Down
1 change: 1 addition & 0 deletions apps/api/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
"test:integration:redis": "TEST_DB_PORT=6392 jest test/database-compatibility.e2e-spec.ts",
"test:integration:valkey": "TEST_DB_PORT=6390 jest --testRegex='.e2e-spec.ts$'",
"test:cluster": "jest test/api-cluster.e2e-spec.ts",
"test:migration-topology": "RUN_TOPOLOGY_TESTS=true jest test/migration-topology.e2e-spec.ts",
"test:cluster:unit": "jest src/cluster/*.spec.ts",
"test:integration:cluster": "TEST_DB_HOST=localhost TEST_DB_PORT=7001 jest test/api-cluster.e2e-spec.ts",
"test:unit:parsers": "jest src/database/parsers/*.spec.ts",
Expand Down
2 changes: 2 additions & 0 deletions apps/api/src/app.module.ts
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ import { SettingsModule } from './settings/settings.module';
import { WebhooksModule } from './webhooks/webhooks.module';
import { TelemetryModule } from './telemetry/telemetry.module';
import { VectorSearchModule } from './vector-search/vector-search.module';
import { MigrationModule } from './migration/migration.module';
import { CloudAuthModule } from './auth/cloud-auth.module';
import { McpModule } from './mcp/mcp.module';
import { MetricForecastingModule } from './metric-forecasting/metric-forecasting.module';
Expand Down Expand Up @@ -120,6 +121,7 @@ const baseImports = [
WebhooksModule,
McpModule,
VectorSearchModule,
MigrationModule,
MetricForecastingModule,
];

Expand Down
133 changes: 133 additions & 0 deletions apps/api/src/migration/__tests__/commandlog-analyzer.spec.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,133 @@
import { analyzeCommands } from '../analysis/commandlog-analyzer';
import type { DatabasePort, DatabaseCapabilities } from '../../common/interfaces/database-port.interface';

function createMockAdapter(options: {
hasCommandLog?: boolean;
commandLogEntries?: Array<{ command: string[] }>;
slowLogEntries?: Array<{ command: string[] }>;
commandLogError?: boolean;
slowLogError?: boolean;
} = {}): DatabasePort {
const {
hasCommandLog = false,
commandLogEntries = [],
slowLogEntries = [],
commandLogError = false,
slowLogError = false,
} = options;

return {
getCapabilities: jest.fn().mockReturnValue({
hasCommandLog,
} as Partial<DatabaseCapabilities>),
getCommandLog: jest.fn().mockImplementation(() => {
if (commandLogError) return Promise.reject(new Error('COMMANDLOG failed'));
return Promise.resolve(commandLogEntries);
}),
getSlowLog: jest.fn().mockImplementation(() => {
if (slowLogError) return Promise.reject(new Error('SLOWLOG failed'));
return Promise.resolve(slowLogEntries);
}),
} as unknown as DatabasePort;
}

describe('analyzeCommands', () => {
it('should return top commands from COMMANDLOG when available', async () => {
const adapter = createMockAdapter({
hasCommandLog: true,
commandLogEntries: [
{ command: ['SET', 'key1', 'val'] },
{ command: ['SET', 'key2', 'val'] },
{ command: ['GET', 'key1'] },
{ command: ['SET', 'key3', 'val'] },
],
});

const result = await analyzeCommands(adapter);

expect(result.sourceUsed).toBe('commandlog');
expect(result.topCommands).toHaveLength(2);
expect(result.topCommands[0]).toEqual({ command: 'SET', count: 3 });
expect(result.topCommands[1]).toEqual({ command: 'GET', count: 1 });
});

it('should fall back to SLOWLOG when COMMANDLOG is not available', async () => {
const adapter = createMockAdapter({
hasCommandLog: false,
slowLogEntries: [
{ command: ['HGETALL', 'myhash'] },
{ command: ['HGETALL', 'myhash2'] },
{ command: ['ZADD', 'myset', '1', 'a'] },
],
});

const result = await analyzeCommands(adapter);

expect(result.sourceUsed).toBe('slowlog');
expect(result.topCommands[0]).toEqual({ command: 'HGETALL', count: 2 });
expect(result.topCommands[1]).toEqual({ command: 'ZADD', count: 1 });
});

it('should fall back to SLOWLOG when COMMANDLOG errors', async () => {
const adapter = createMockAdapter({
hasCommandLog: true,
commandLogError: true,
slowLogEntries: [
{ command: ['INFO', 'all'] },
],
});

const result = await analyzeCommands(adapter);

expect(result.sourceUsed).toBe('slowlog');
expect(result.topCommands).toHaveLength(1);
expect(result.topCommands[0].command).toBe('INFO');
});

it('should return empty topCommands when both logs are empty', async () => {
const adapter = createMockAdapter({
hasCommandLog: true,
commandLogEntries: [],
});

const result = await analyzeCommands(adapter);

expect(result.sourceUsed).toBe('commandlog');
expect(result.topCommands).toEqual([]);
});

it('should return unavailable when both sources fail', async () => {
const adapter = createMockAdapter({
hasCommandLog: false,
slowLogError: true,
});

const result = await analyzeCommands(adapter);

expect(result.sourceUsed).toBe('unavailable');
expect(result.topCommands).toEqual([]);
});

it('should sort commands by count descending', async () => {
const adapter = createMockAdapter({
hasCommandLog: true,
commandLogEntries: [
{ command: ['GET', 'a'] },
{ command: ['SET', 'a', '1'] },
{ command: ['SET', 'b', '2'] },
{ command: ['SET', 'c', '3'] },
{ command: ['GET', 'b'] },
{ command: ['DEL', 'a'] },
],
});

const result = await analyzeCommands(adapter);

expect(result.topCommands[0].command).toBe('SET');
expect(result.topCommands[0].count).toBe(3);
expect(result.topCommands[1].command).toBe('GET');
expect(result.topCommands[1].count).toBe(2);
expect(result.topCommands[2].command).toBe('DEL');
expect(result.topCommands[2].count).toBe(1);
});
});
Loading
Loading