Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
18 commits
Select commit Hold shift + click to select a range
e929c59
fix(sftp): add keepalive interval and debug logging to connection config
hugoheml Jan 21, 2026
c799f47
fix(mysql): default database to ignore
hugoheml Jan 21, 2026
8b942e3
refactor: extract backup service init in backup controller to avoid r…
hugoheml Jan 21, 2026
17e8e1a
Revert "refactor: extract backup service init in backup controller to…
hugoheml Jan 21, 2026
5a736e6
fix(sftp): change debug logging level to verbose in connection config
hugoheml Jan 21, 2026
a60e0b3
feat(sftp): add keepaliveCountMax parameter
hugoheml Jan 21, 2026
5d8736e
feat(sftp): implements always connect / disconnect before all methods
hugoheml Jan 21, 2026
e96bf4b
fix(sftp): try to improve big files upload
hugoheml Jan 21, 2026
d9387b1
fix(sftp): build issue
hugoheml Jan 21, 2026
be67fb4
fix(sftp): build issue - again
hugoheml Jan 21, 2026
05a1222
fix(sftp): try to use fastPut
hugoheml Jan 21, 2026
ee49ff3
fix(encryption): Native Node streams are no longer supported error
hugoheml Jan 23, 2026
4503f7d
fix(rsync): now ignore compression for already compressed files
hugoheml Feb 16, 2026
3651626
feat(rsync): add support for single and multi-backup modes
hugoheml Feb 16, 2026
13d9e69
fix(rsync): update path check to handle wildcard correctly
hugoheml Feb 16, 2026
c399f9c
fix(rsync): fix some errors for listRemoteFiles
hugoheml Feb 16, 2026
5203169
fix(rsync): simplify error handling and streamline data processing in…
hugoheml Feb 16, 2026
6d80f20
fix(rsync): improve error reporting in listRemoteFiles by including s…
hugoheml Feb 16, 2026
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
37 changes: 37 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -193,6 +193,43 @@ The `PERIODIC_BACKUP_RETENTION` setting allows you to define custom retention po
| `RSYNC_SSH_OPTIONS` | Additional SSH options appended to the rsync SSH command. Useful for disabling host key checks during testing. | _(empty)_ |
| `RSYNC_EXTRA_ARGS` | Additional rsync arguments (space separated, double quotes supported for paths or arguments that contain spaces) | _(empty)_ |

#### Rsync Backup Modes

The rsync backup service supports two different backup modes depending on the path configuration:

**Single Backup Mode (without `/*`):**

When `RSYNC_TARGET_PATH` does not end with `/*`, the entire directory content is backed up as a single archive with a timestamp. A new backup is created at each execution.

```bash
RSYNC_TARGET_PATH=/var/www/mysite
```

Result:
- Creates: `mysite-2026-02-16-14-30-00.tar.gz`
- Behavior: New backup at each execution
- Storage: `rsync/mysite/mysite-2026-02-16-14-30-00.tar.gz`

**Multi-Backup Mode (with `/*`):**

When `RSYNC_TARGET_PATH` ends with `/*`, each file and subdirectory is backed up individually **without timestamp**. This prevents duplicate backups: if a backup already exists for an item, it is skipped.

```bash
RSYNC_TARGET_PATH=/var/www/*
```

Result (assuming `/var/www/` contains `site1`, `site2`, `site3`):
- Creates: `site1.tar.gz`, `site2.tar.gz`, `site3.tar.gz`
- Behavior: Backup only created if file doesn't exist
- Storage: `rsync/target-name/site1/site1.tar.gz`, `rsync/target-name/site2/site2.tar.gz`, etc.

**Use Cases:**

- **Single mode**: For backing up a complete application with versioning (database backups, complete application snapshots)
- **Multi mode**: For backing up multiple independent sites/projects where you only want to backup once and skip duplicates (web hosting directories, user folders)

#### Development Setup

The default development `.env` values point `RSYNC_TARGET_HOST` to this container (`rsync`) and mount the generated SSH private key at `/app/docker_keys/id_ed25519`. The files served over rsync live in `docker/rsync/data`.

To generate the development key pair, run the helper script and recreate the container:
Expand Down
2 changes: 1 addition & 1 deletion src/services/backup/mysql/MysqlBackupService.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ import { statSync } from "node:fs";

const { MYSQL_FOLDER_PATH, MYSQL_IGNORE_DATABASES } = process.env;

const DATABASES_TO_IGNORE = (MYSQL_IGNORE_DATABASES || "").split(',');
const DATABASES_TO_IGNORE = (MYSQL_IGNORE_DATABASES || "information_schema,performance_schema,mysql,sys").split(',');

export class MysqlBackupService extends BackupService {
SERVICE_NAME = "mysql";
Expand Down
120 changes: 106 additions & 14 deletions src/services/backup/rsync/RsyncBackupService.ts
Original file line number Diff line number Diff line change
@@ -1,15 +1,22 @@
import { BackupService } from "../BackupService";
import { BackupFileMetadata } from "../types/BackupFileMetadata";
import { loadRsyncTarget, createArchiveForTarget, RsyncTarget } from "./utils";
import { loadRsyncTarget, createArchiveForTarget, RsyncTarget, listRemoteFiles, sanitizeName, buildTimestamp } from "./utils";
import { logger } from "../../log";

const { RSYNC_FOLDER_PATH, BACKUP_RSYNC } = process.env;

type PendingBackup = {
target: RsyncTarget;
itemName: string;
specificPath: string;
};

export class RsyncBackupService extends BackupService {
SERVICE_NAME = "rsync";
FOLDER_PATH = RSYNC_FOLDER_PATH || "rsync";

private target: RsyncTarget | undefined;
private pendingBackups: Map<string, PendingBackup> = new Map();

async init() {
this.target = loadRsyncTarget();
Expand All @@ -24,22 +31,107 @@ export class RsyncBackupService extends BackupService {
}

try {
const archive = await createArchiveForTarget(this.target);

return [
{
parentElement: this.target.name,
destinationFolder: `${this.FOLDER_PATH}/${archive.sanitizedName}`,
fileName: archive.archiveName,
uuid: `rsync-${archive.sanitizedName}-${archive.timestamp}`,
size: archive.size,
date: archive.date,
localPath: archive.archivePath
}
];
// Check if path ends with *
if (this.target.path.endsWith("*")) {
return await this.getMultipleBackups();
} else {
return await this.getSingleBackup();
}
} catch (error) {
logger.error(`[rsync] Failed to create archive: ${error}`);
throw error;
}
}

private async getSingleBackup(): Promise<BackupFileMetadata[]> {
if (!this.target) {
return [];
}

const archive = await createArchiveForTarget(this.target);

return [
{
parentElement: this.target.name,
destinationFolder: `${this.FOLDER_PATH}/${archive.sanitizedName}`,
fileName: archive.archiveName,
uuid: `rsync-${archive.sanitizedName}-${archive.timestamp}`,
size: archive.size,
date: archive.date,
localPath: archive.archivePath
}
];
}

private async getMultipleBackups(): Promise<BackupFileMetadata[]> {
if (!this.target) {
return [];
}

// Remove /* from the path to get the base directory
const basePath = this.target.path.endsWith("/*") ? this.target.path.slice(0, -2) : this.target.path.endsWith("*") ? this.target.path.slice(0, -1) : this.target.path;

logger.info(`[rsync] Listing files in remote directory: ${basePath}`);
const remoteFiles = await listRemoteFiles(this.target, basePath);

if (remoteFiles.length === 0) {
logger.warn(`[rsync] No files found in remote directory: ${basePath}`);
return [];
}

logger.info(`[rsync] Found ${remoteFiles.length} items to backup individually`);

const result: BackupFileMetadata[] = [];
const date = new Date();
const timestamp = buildTimestamp(date);

for (const itemName of remoteFiles) {
const sanitizedItemName = sanitizeName(itemName);
const specificPath = `${basePath}/${itemName}`;
// Use timestamp only in UUID for internal tracking, not in filename
const uuid = `rsync-${sanitizeName(this.target.name)}-${sanitizedItemName}-${timestamp}`;

// Store the pending backup info for later download
this.pendingBackups.set(uuid, {
target: this.target,
itemName,
specificPath
});

result.push({
parentElement: `${this.target.name} - ${itemName}`,
destinationFolder: `${this.FOLDER_PATH}/${sanitizeName(this.target.name)}/${sanitizedItemName}`,
// No timestamp in filename: this allows BackupController to detect existing backups
fileName: `${sanitizedItemName}.tar.gz`,
uuid,
size: 0, // Size will be known after archive creation
date
});
}

return result;
}

async downloadBackup(backupMetadata: BackupFileMetadata): Promise<string | undefined> {
// Check if this is a pending backup (multi-file mode)
const pendingBackup = this.pendingBackups.get(backupMetadata.uuid);

if (pendingBackup) {
// Create the archive now
logger.info(`[rsync] Creating archive for item: ${pendingBackup.itemName}`);
const archive = await createArchiveForTarget(
pendingBackup.target,
pendingBackup.specificPath,
pendingBackup.itemName
);

// Remove from pending backups
this.pendingBackups.delete(backupMetadata.uuid);

return archive.archivePath;
}

// Default behavior for single backup mode
return backupMetadata.localPath;
}
}
77 changes: 68 additions & 9 deletions src/services/backup/rsync/utils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -151,9 +151,12 @@ async function runCommand(command: string, args: string[], options?: CommandOpti
});
}

function buildRsyncArgs(target: RsyncTarget, destination: string) {
const args = ["-az"];
export function isArchiveFile(fileName: string) {
const extensions = [".tar.gz", ".tgz", ".tar"];
return extensions.some((ext) => fileName.endsWith(ext));
}

function buildSshCommand(target: RsyncTarget): string[] {
const sshParts = ["ssh"];

if (target.port) {
Expand All @@ -168,6 +171,14 @@ function buildRsyncArgs(target: RsyncTarget, destination: string) {
sshParts.push(option);
}

return sshParts;
}

function buildRsyncArgs(target: RsyncTarget, destination: string, specificPath?: string) {
const args = ["-az"];

const sshParts = buildSshCommand(target);

for (const exclude of target.excludes) {
args.push("--exclude", exclude);
}
Expand All @@ -176,28 +187,76 @@ function buildRsyncArgs(target: RsyncTarget, destination: string) {

args.push("-e", sshParts.join(" "));

const remoteSpec = `${target.user ? `${target.user}@` : ""}${formatHost(target.host)}:${target.path}`;
const remotePath = specificPath || target.path;
const remoteSpec = `${target.user ? `${target.user}@` : ""}${formatHost(target.host)}:${remotePath}`;
args.push(remoteSpec, destination);

return args;
}

export async function createArchiveForTarget(target: RsyncTarget) {
const sanitizedName = sanitizeName(target.name);
export async function listRemoteFiles(target: RsyncTarget, remotePath: string): Promise<string[]> {
const sshCommand = buildSshCommand(target);
const host = formatHost(target.host);
const remoteHost = target.user ? `${target.user}@${host}` : host;

const findCommand = `find "${remotePath}" -mindepth 1 -maxdepth 1 -printf '%f\\n'`;

const [bin, ...sshArgs] = sshCommand;
const args = [...sshArgs, remoteHost, findCommand];

return new Promise<string[]>((resolve, reject) => {
const child = spawn(bin, args, { stdio: ["ignore", "pipe", "pipe"] });

let stdout = "";
let stderr = "";

child.stdout?.on("data", (data) => stdout += data.toString());
child.stderr?.on("data", (data) => stderr += data.toString());

child.on("close", (code) => {
if (code === 0) {
const files = stdout.trim().split("\n").filter(l => l.length > 0);
resolve(files);
} else {
reject(new Error(`SSH Exit ${code}: ${stdout} - ${stderr.trim()}`));
}
});
});
}

export async function createArchiveForTarget(target: RsyncTarget, specificPath?: string, itemName?: string) {
const sanitizedName = itemName ? sanitizeName(itemName) : sanitizeName(target.name);
const date = new Date();
const timestamp = buildTimestamp(date);

const workingDirectory = join(RSYNC_TMP_ROOT, `${sanitizedName}-${timestamp}`);
const archiveName = `${sanitizedName}-${timestamp}.tar.gz`;
const archivePath = join(RSYNC_TMP_ROOT, archiveName);
let archiveName = `${sanitizedName}-${timestamp}.tar.gz`;
let archivePath = join(RSYNC_TMP_ROOT, archiveName);

mkdirSync(workingDirectory, { recursive: true });

const remotePath = specificPath || target.path;
const displayPath = specificPath ? `${target.path}/${specificPath}` : target.path;

try {
logger.info(`[rsync] Starting sync for "${target.name}" (${target.host}:${target.path}).`);
const rsyncArgs = buildRsyncArgs(target, workingDirectory);
logger.info(`[rsync] Starting sync for "${target.name}" (${target.host}:${displayPath}).`);
const rsyncArgs = buildRsyncArgs(target, workingDirectory, remotePath);
await runCommand("rsync", rsyncArgs, { logPrefix: "rsync" });

if (!isArchiveFile(archiveName)) {
archiveName += ".tar.gz";
archivePath = join(RSYNC_TMP_ROOT, archiveName);

return {
archiveName,
archivePath,
sanitizedName: archiveName,
timestamp,
size: statSync(archivePath).size,
date
};
}

logger.info(`[rsync] Creating archive for "${target.name}".`);
await runCommand("tar", ["-czf", archivePath, "-C", workingDirectory, "."], { logPrefix: "tar" });

Expand Down
12 changes: 9 additions & 3 deletions src/services/files/encryption.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ import "dotenv/config";
import { createMessage, encrypt, readKey } from "openpgp";
import { readFile, writeFile, rename } from "node:fs/promises";
import { createReadStream, createWriteStream } from "node:fs";
import { Readable, Writable } from "node:stream";
import { pipeline } from "node:stream/promises";
import { logger } from "../log";

Expand All @@ -21,20 +22,25 @@ export async function EncryptFile(filePath: string): Promise<void> {
logger.debug(`Encrypting file ${filePath}...`);

// Use streaming to handle large files without loading them entirely into memory
const readStream = createReadStream(filePath);
const nodeReadStream = createReadStream(filePath);
// Convert Node.js stream to WebStream (required by openpgp v6+)
const webReadStream = Readable.toWeb(nodeReadStream) as ReadableStream<Uint8Array>;
const tempFilePath = `${filePath}.tmp`;
const writeStream = createWriteStream(tempFilePath);

const encryptedStream = await encrypt({
message: await createMessage({ binary: readStream }),
message: await createMessage({ binary: webReadStream }),
encryptionKeys: publicKey,
format: 'armored'
});

logger.debug(`File ${filePath} encrypted successfully, writing to temporary file...`);

// Convert WebStream back to Node.js stream for pipeline
const nodeEncryptedStream = Readable.fromWeb(encryptedStream as import("stream/web").ReadableStream);

// Pipe the encrypted stream to the output file
await pipeline(encryptedStream, writeStream);
await pipeline(nodeEncryptedStream, writeStream);

// Replace the original file with the encrypted one
await rename(tempFilePath, filePath);
Expand Down
Loading