feat: protect sensitive files and auto-backup before modifications

Implements GitHub issues #104 and #102:

**#104: Sensitive File Protection**
- Block writes to .env, credentials, SSH keys, cloud configs
- Categories: environment, credentials, ssh_keys, api_tokens, certificates, cloud_config
- Warn on reads of .env files (may contain secrets)
- Block writes to id_rsa, *.pem, *.key, credentials.json, etc.

**#102: Auto-Backup System**
- Automatic backup before every write/edit operation
- Backups stored in .codetyper-backup/ (preserves directory structure)
- Max 10 backups per file
- 7-day retention with auto-cleanup
- listBackups, getLatestBackup, restoreFromBackup functions

Closes #104
Closes #102
This commit is contained in:
2026-02-05 19:12:31 -05:00
parent a30b3bb60f
commit f2641d6ab0
6 changed files with 736 additions and 0 deletions

250
src/services/file-backup.ts Normal file
View File

@@ -0,0 +1,250 @@
/**
* File Backup Service
*
* Automatically creates backups of files before write/edit operations.
* Provides rollback functionality to restore previous versions.
*/
import fs from "fs/promises";
import path from "path";
import { BACKUP_DEFAULTS, BACKUP_MESSAGES } from "@constants/backup";
/**
* Backup metadata
*/
export interface BackupInfo {
originalPath: string;
backupPath: string;
timestamp: number;
formattedTime: string;
}
/**
* Backup configuration
*/
export interface BackupConfig {
enabled: boolean;
directory: string;
maxBackupsPerFile: number;
retentionDays: number;
}
/**
* Get backup configuration (could be loaded from config in the future)
*/
export const getBackupConfig = (): BackupConfig => ({
enabled: BACKUP_DEFAULTS.ENABLED,
directory: BACKUP_DEFAULTS.DIRECTORY,
maxBackupsPerFile: BACKUP_DEFAULTS.MAX_BACKUPS_PER_FILE,
retentionDays: BACKUP_DEFAULTS.RETENTION_DAYS,
});
/**
* Generate a backup filename with timestamp
*/
const generateBackupFilename = (originalFilename: string): string => {
const timestamp = new Date().toISOString().replace(/[:.]/g, "-");
return `${originalFilename}.${timestamp}${BACKUP_DEFAULTS.EXTENSION}`;
};
/**
* Get the backup directory for a file
*/
const getBackupDir = (
filePath: string,
workingDir: string,
config: BackupConfig,
): string => {
// Preserve directory structure in backup
const relativePath = path.relative(workingDir, path.dirname(filePath));
return path.join(workingDir, config.directory, relativePath);
};
/**
* Create a backup of a file before modification
* Returns the backup path if successful, null if file doesn't exist or backup failed
*/
export const createBackup = async (
filePath: string,
workingDir: string,
): Promise<string | null> => {
const config = getBackupConfig();
if (!config.enabled) {
return null;
}
try {
// Check if file exists
await fs.access(filePath);
// Read original content
const content = await fs.readFile(filePath, "utf-8");
// Create backup directory
const backupDir = getBackupDir(filePath, workingDir, config);
await fs.mkdir(backupDir, { recursive: true });
// Generate backup filename
const originalFilename = path.basename(filePath);
const backupFilename = generateBackupFilename(originalFilename);
const backupPath = path.join(backupDir, backupFilename);
// Write backup
await fs.writeFile(backupPath, content, "utf-8");
// Cleanup old backups
await cleanupOldBackups(filePath, workingDir, config);
return backupPath;
} catch {
// File doesn't exist or backup failed - not critical
return null;
}
};
/**
* Get list of backups for a file
*/
export const listBackups = async (
filePath: string,
workingDir: string,
): Promise<BackupInfo[]> => {
const config = getBackupConfig();
const backupDir = getBackupDir(filePath, workingDir, config);
const originalFilename = path.basename(filePath);
try {
const files = await fs.readdir(backupDir);
const backups: BackupInfo[] = [];
for (const file of files) {
if (file.startsWith(originalFilename) && file.endsWith(BACKUP_DEFAULTS.EXTENSION)) {
const backupPath = path.join(backupDir, file);
const stats = await fs.stat(backupPath);
backups.push({
originalPath: filePath,
backupPath,
timestamp: stats.mtime.getTime(),
formattedTime: stats.mtime.toLocaleString(),
});
}
}
// Sort by timestamp, newest first
return backups.sort((a, b) => b.timestamp - a.timestamp);
} catch {
return [];
}
};
/**
* Get the most recent backup for a file
*/
export const getLatestBackup = async (
filePath: string,
workingDir: string,
): Promise<BackupInfo | null> => {
const backups = await listBackups(filePath, workingDir);
return backups[0] ?? null;
};
/**
* Restore a file from backup
*/
export const restoreFromBackup = async (
backupPath: string,
targetPath: string,
): Promise<boolean> => {
try {
const content = await fs.readFile(backupPath, "utf-8");
await fs.writeFile(targetPath, content, "utf-8");
return true;
} catch {
return false;
}
};
/**
* Restore a file from its most recent backup
*/
export const restoreLatestBackup = async (
filePath: string,
workingDir: string,
): Promise<{ success: boolean; message: string }> => {
const backup = await getLatestBackup(filePath, workingDir);
if (!backup) {
return {
success: false,
message: BACKUP_MESSAGES.NO_BACKUP(filePath),
};
}
const restored = await restoreFromBackup(backup.backupPath, filePath);
if (restored) {
return {
success: true,
message: BACKUP_MESSAGES.RESTORED(filePath),
};
}
return {
success: false,
message: BACKUP_MESSAGES.BACKUP_FAILED("Could not restore from backup"),
};
};
/**
* Clean up old backups for a file
*/
const cleanupOldBackups = async (
filePath: string,
workingDir: string,
config: BackupConfig,
): Promise<number> => {
const backups = await listBackups(filePath, workingDir);
let cleanedUp = 0;
const cutoffTime = Date.now() - config.retentionDays * 24 * 60 * 60 * 1000;
for (let i = 0; i < backups.length; i++) {
const backup = backups[i];
const shouldDelete =
// Exceeds max backups
i >= config.maxBackupsPerFile ||
// Exceeds retention period
backup.timestamp < cutoffTime;
if (shouldDelete) {
try {
await fs.unlink(backup.backupPath);
cleanedUp++;
} catch {
// Ignore deletion errors
}
}
}
return cleanedUp;
};
/**
* Format backup list for display
*/
export const formatBackupList = (backups: BackupInfo[]): string => {
if (backups.length === 0) {
return BACKUP_MESSAGES.NO_BACKUPS;
}
const lines = [BACKUP_MESSAGES.LIST_HEADER];
for (const backup of backups) {
const relativePath = path.basename(backup.backupPath);
lines.push(` ${backup.formattedTime} - ${relativePath}`);
}
return lines.join("\n");
};

View File

@@ -0,0 +1,163 @@
/**
* Sensitive File Guard Service
*
* Protects sensitive files (credentials, keys, etc.) from being
* modified or inadvertently exposed.
*/
import {
PROTECTED_FILE_PATTERNS,
SENSITIVE_FILE_MESSAGES,
type ProtectedFilePattern,
type SensitiveFileCategory,
} from "@constants/sensitive-files";
/**
* Type of file operation
*/
export type FileOperation = "read" | "write" | "edit" | "delete";
/**
* Result of checking a file for sensitivity
*/
export interface SensitiveFileCheckResult {
/** Whether the operation should be blocked */
blocked: boolean;
/** Whether to show a warning (for allowed reads) */
warn: boolean;
/** The matched pattern, if any */
pattern?: ProtectedFilePattern;
/** User-friendly message */
message?: string;
}
/**
* Check if a file operation on the given path should be blocked or warned
*/
export const checkSensitiveFile = (
filePath: string,
operation: FileOperation,
): SensitiveFileCheckResult => {
// Normalize path for checking
const normalizedPath = filePath.replace(/\\/g, "/");
for (const pattern of PROTECTED_FILE_PATTERNS) {
if (pattern.pattern.test(normalizedPath)) {
// For read operations on files that allow reading
if (operation === "read" && pattern.allowRead) {
return {
blocked: false,
warn: true,
pattern,
message: formatWarningMessage(pattern),
};
}
// For write/edit/delete or read on files that don't allow reading
if (operation !== "read" || !pattern.allowRead) {
return {
blocked: true,
warn: false,
pattern,
message: formatBlockedMessage(pattern, operation),
};
}
}
}
return { blocked: false, warn: false };
};
/**
* Format a warning message for sensitive file reads
*/
const formatWarningMessage = (pattern: ProtectedFilePattern): string => {
const categoryDescription =
SENSITIVE_FILE_MESSAGES.CATEGORY_DESCRIPTIONS[pattern.category];
return [
`[WARNING] ${pattern.description}`,
"",
categoryDescription,
SENSITIVE_FILE_MESSAGES.WARN_READ,
].join("\n");
};
/**
* Format a blocked message for sensitive file writes
*/
const formatBlockedMessage = (
pattern: ProtectedFilePattern,
operation: FileOperation,
): string => {
const categoryDescription =
SENSITIVE_FILE_MESSAGES.CATEGORY_DESCRIPTIONS[pattern.category];
const operationText = operation === "read" ? "reading" : "modifying";
const suggestion =
operation === "read"
? SENSITIVE_FILE_MESSAGES.READ_SUGGESTION
: SENSITIVE_FILE_MESSAGES.WRITE_SUGGESTION;
return [
`[BLOCKED] Cannot ${operation} ${pattern.description.toLowerCase()}`,
"",
`Category: ${formatCategoryName(pattern.category)}`,
"",
categoryDescription,
SENSITIVE_FILE_MESSAGES.BLOCKED_REASON,
"",
suggestion,
].join("\n");
};
/**
* Format category name for display
*/
const formatCategoryName = (category: SensitiveFileCategory): string => {
const names: Record<SensitiveFileCategory, string> = {
environment: "Environment Files",
credentials: "Credential Files",
ssh_keys: "SSH Keys",
api_tokens: "API Tokens",
certificates: "Certificates",
cloud_config: "Cloud Configuration",
};
return names[category];
};
/**
* Get all protected patterns (for configuration UI)
*/
export const getProtectedPatterns = (): readonly ProtectedFilePattern[] => {
return PROTECTED_FILE_PATTERNS;
};
/**
* Check if a file path is sensitive
*/
export const isSensitiveFile = (filePath: string): boolean => {
const result = checkSensitiveFile(filePath, "read");
return result.blocked || result.warn;
};
/**
* Check if a file can be safely written
*/
export const canWriteFile = (filePath: string): boolean => {
return !checkSensitiveFile(filePath, "write").blocked;
};
/**
* Get categories of sensitive files
*/
export const getSensitiveFileCategories = (): SensitiveFileCategory[] => {
return [
"environment",
"credentials",
"ssh_keys",
"api_tokens",
"certificates",
"cloud_config",
];
};