feat: protect sensitive files and auto-backup before modifications

Implements GitHub issues #104 and #102:

**#104: Sensitive File Protection**
- Block writes to .env, credentials, SSH keys, cloud configs
- Categories: environment, credentials, ssh_keys, api_tokens, certificates, cloud_config
- Warn on reads of .env files (may contain secrets)
- Block writes to id_rsa, *.pem, *.key, credentials.json, etc.

**#102: Auto-Backup System**
- Automatic backup before every write/edit operation
- Backups stored in .codetyper-backup/ (preserves directory structure)
- Max 10 backups per file
- 7-day retention with auto-cleanup
- listBackups, getLatestBackup, restoreFromBackup functions

Closes #104
Closes #102
This commit is contained in:
2026-02-05 19:12:31 -05:00
parent a30b3bb60f
commit f2641d6ab0
6 changed files with 736 additions and 0 deletions

42
src/constants/backup.ts Normal file
View File

@@ -0,0 +1,42 @@
/**
* Backup System Constants
*
* Configuration for the automatic file backup system.
*/
/**
* Default backup configuration
*/
export const BACKUP_DEFAULTS = {
/** Whether backup is enabled by default */
ENABLED: true,
/** Directory to store backups (relative to working dir) */
DIRECTORY: ".codetyper-backup",
/** Maximum number of backups to keep per file */
MAX_BACKUPS_PER_FILE: 10,
/** Number of days to retain backups */
RETENTION_DAYS: 7,
/** File extension for backups */
EXTENSION: ".bak",
};
/**
* Backup messages
*/
export const BACKUP_MESSAGES = {
CREATED: (backupPath: string) => `Backup created: ${backupPath}`,
RESTORED: (filePath: string) => `Restored from backup: ${filePath}`,
CLEANUP: (count: number) => `Cleaned up ${count} old backup(s)`,
NO_BACKUP: (filePath: string) => `No backup found for: ${filePath}`,
BACKUP_FAILED: (error: string) => `Backup failed: ${error}`,
LIST_HEADER: "Available backups:",
NO_BACKUPS: "No backups available",
};
/**
* Gitignore entry for backup directory
*/
export const BACKUP_GITIGNORE_ENTRY = `
# CodeTyper backup files
.codetyper-backup/
`;

View File

@@ -0,0 +1,225 @@
/**
* Sensitive File Patterns
*
* Patterns to detect and protect files that may contain credentials,
* secrets, keys, or other sensitive information.
*/
/**
* Category of sensitive file
*/
export type SensitiveFileCategory =
| "environment"
| "credentials"
| "ssh_keys"
| "api_tokens"
| "certificates"
| "cloud_config";
/**
* A protected file pattern with metadata
*/
export interface ProtectedFilePattern {
name: string;
pattern: RegExp;
category: SensitiveFileCategory;
description: string;
/** If true, block writes but warn on reads. If false, block both. */
allowRead: boolean;
}
/**
* Protected file patterns
*/
export const PROTECTED_FILE_PATTERNS: ProtectedFilePattern[] = [
// ==========================================================================
// Environment Files
// ==========================================================================
{
name: "env_file",
pattern: /\.env(\..*)?$/,
category: "environment",
description: "Environment configuration file",
allowRead: true,
},
{
name: "env_local",
pattern: /\.env\.local$/,
category: "environment",
description: "Local environment file",
allowRead: true,
},
// ==========================================================================
// Credential Files
// ==========================================================================
{
name: "credentials_json",
pattern: /credentials?\.json$/i,
category: "credentials",
description: "Credentials JSON file",
allowRead: false,
},
{
name: "credentials_yaml",
pattern: /credentials?\.ya?ml$/i,
category: "credentials",
description: "Credentials YAML file",
allowRead: false,
},
{
name: "secrets_json",
pattern: /secrets?\.json$/i,
category: "credentials",
description: "Secrets JSON file",
allowRead: false,
},
{
name: "secrets_yaml",
pattern: /secrets?\.ya?ml$/i,
category: "credentials",
description: "Secrets YAML file",
allowRead: false,
},
// ==========================================================================
// SSH Keys
// ==========================================================================
{
name: "ssh_private_rsa",
pattern: /id_rsa$/,
category: "ssh_keys",
description: "SSH RSA private key",
allowRead: false,
},
{
name: "ssh_private_ed25519",
pattern: /id_ed25519$/,
category: "ssh_keys",
description: "SSH ED25519 private key",
allowRead: false,
},
{
name: "ssh_private_ecdsa",
pattern: /id_ecdsa$/,
category: "ssh_keys",
description: "SSH ECDSA private key",
allowRead: false,
},
{
name: "ssh_private_dsa",
pattern: /id_dsa$/,
category: "ssh_keys",
description: "SSH DSA private key",
allowRead: false,
},
{
name: "pem_key",
pattern: /\.(pem|key)$/,
category: "ssh_keys",
description: "PEM or KEY file (may contain private key)",
allowRead: false,
},
{
name: "pkcs12",
pattern: /\.(p12|pfx)$/,
category: "ssh_keys",
description: "PKCS#12 certificate bundle",
allowRead: false,
},
// ==========================================================================
// API Tokens & Package Manager Configs
// ==========================================================================
{
name: "npmrc",
pattern: /\.npmrc$/,
category: "api_tokens",
description: "NPM configuration (may contain auth token)",
allowRead: true,
},
{
name: "pypirc",
pattern: /\.pypirc$/,
category: "api_tokens",
description: "PyPI configuration (may contain auth token)",
allowRead: false,
},
{
name: "docker_config",
pattern: /\.docker\/config\.json$/,
category: "api_tokens",
description: "Docker config (may contain registry credentials)",
allowRead: false,
},
// ==========================================================================
// Cloud Configuration
// ==========================================================================
{
name: "aws_credentials",
pattern: /\.aws\/credentials$/,
category: "cloud_config",
description: "AWS credentials file",
allowRead: false,
},
{
name: "kube_config",
pattern: /\.kube\/config$/,
category: "cloud_config",
description: "Kubernetes config (may contain cluster credentials)",
allowRead: false,
},
{
name: "gcloud_credentials",
pattern: /application_default_credentials\.json$/,
category: "cloud_config",
description: "Google Cloud credentials",
allowRead: false,
},
{
name: "azure_credentials",
pattern: /\.azure\/credentials$/,
category: "cloud_config",
description: "Azure credentials file",
allowRead: false,
},
// ==========================================================================
// Certificates
// ==========================================================================
{
name: "private_key_pem",
pattern: /privkey\.pem$/,
category: "certificates",
description: "Private key PEM file",
allowRead: false,
},
{
name: "server_key",
pattern: /server\.key$/,
category: "certificates",
description: "Server private key",
allowRead: false,
},
];
/**
* Messages for sensitive file operations
*/
export const SENSITIVE_FILE_MESSAGES = {
BLOCKED_WRITE_TITLE: "Cannot modify sensitive file",
BLOCKED_READ_TITLE: "Sensitive file detected",
WARN_READ: "This file may contain secrets. Proceed with caution.",
CATEGORY_DESCRIPTIONS: {
environment: "Environment files often contain API keys and secrets",
credentials: "Credential files contain sensitive authentication data",
ssh_keys: "SSH keys provide access to remote systems",
api_tokens: "API token configs may contain authentication credentials",
certificates: "Certificate files contain cryptographic keys",
cloud_config: "Cloud configuration files contain service credentials",
} as Record<SensitiveFileCategory, string>,
BLOCKED_REASON: "Modifying this file could expose or corrupt sensitive credentials.",
READ_SUGGESTION: "If you need to debug credentials, review the file manually.",
WRITE_SUGGESTION: "To modify credentials, edit the file manually outside of CodeTyper.",
};

250
src/services/file-backup.ts Normal file
View File

@@ -0,0 +1,250 @@
/**
* File Backup Service
*
* Automatically creates backups of files before write/edit operations.
* Provides rollback functionality to restore previous versions.
*/
import fs from "fs/promises";
import path from "path";
import { BACKUP_DEFAULTS, BACKUP_MESSAGES } from "@constants/backup";
/**
* Backup metadata
*/
export interface BackupInfo {
originalPath: string;
backupPath: string;
timestamp: number;
formattedTime: string;
}
/**
* Backup configuration
*/
export interface BackupConfig {
enabled: boolean;
directory: string;
maxBackupsPerFile: number;
retentionDays: number;
}
/**
* Get backup configuration (could be loaded from config in the future)
*/
export const getBackupConfig = (): BackupConfig => ({
enabled: BACKUP_DEFAULTS.ENABLED,
directory: BACKUP_DEFAULTS.DIRECTORY,
maxBackupsPerFile: BACKUP_DEFAULTS.MAX_BACKUPS_PER_FILE,
retentionDays: BACKUP_DEFAULTS.RETENTION_DAYS,
});
/**
* Generate a backup filename with timestamp
*/
const generateBackupFilename = (originalFilename: string): string => {
const timestamp = new Date().toISOString().replace(/[:.]/g, "-");
return `${originalFilename}.${timestamp}${BACKUP_DEFAULTS.EXTENSION}`;
};
/**
* Get the backup directory for a file
*/
const getBackupDir = (
filePath: string,
workingDir: string,
config: BackupConfig,
): string => {
// Preserve directory structure in backup
const relativePath = path.relative(workingDir, path.dirname(filePath));
return path.join(workingDir, config.directory, relativePath);
};
/**
* Create a backup of a file before modification
* Returns the backup path if successful, null if file doesn't exist or backup failed
*/
export const createBackup = async (
filePath: string,
workingDir: string,
): Promise<string | null> => {
const config = getBackupConfig();
if (!config.enabled) {
return null;
}
try {
// Check if file exists
await fs.access(filePath);
// Read original content
const content = await fs.readFile(filePath, "utf-8");
// Create backup directory
const backupDir = getBackupDir(filePath, workingDir, config);
await fs.mkdir(backupDir, { recursive: true });
// Generate backup filename
const originalFilename = path.basename(filePath);
const backupFilename = generateBackupFilename(originalFilename);
const backupPath = path.join(backupDir, backupFilename);
// Write backup
await fs.writeFile(backupPath, content, "utf-8");
// Cleanup old backups
await cleanupOldBackups(filePath, workingDir, config);
return backupPath;
} catch {
// File doesn't exist or backup failed - not critical
return null;
}
};
/**
* Get list of backups for a file
*/
export const listBackups = async (
filePath: string,
workingDir: string,
): Promise<BackupInfo[]> => {
const config = getBackupConfig();
const backupDir = getBackupDir(filePath, workingDir, config);
const originalFilename = path.basename(filePath);
try {
const files = await fs.readdir(backupDir);
const backups: BackupInfo[] = [];
for (const file of files) {
if (file.startsWith(originalFilename) && file.endsWith(BACKUP_DEFAULTS.EXTENSION)) {
const backupPath = path.join(backupDir, file);
const stats = await fs.stat(backupPath);
backups.push({
originalPath: filePath,
backupPath,
timestamp: stats.mtime.getTime(),
formattedTime: stats.mtime.toLocaleString(),
});
}
}
// Sort by timestamp, newest first
return backups.sort((a, b) => b.timestamp - a.timestamp);
} catch {
return [];
}
};
/**
* Get the most recent backup for a file
*/
export const getLatestBackup = async (
filePath: string,
workingDir: string,
): Promise<BackupInfo | null> => {
const backups = await listBackups(filePath, workingDir);
return backups[0] ?? null;
};
/**
* Restore a file from backup
*/
export const restoreFromBackup = async (
backupPath: string,
targetPath: string,
): Promise<boolean> => {
try {
const content = await fs.readFile(backupPath, "utf-8");
await fs.writeFile(targetPath, content, "utf-8");
return true;
} catch {
return false;
}
};
/**
* Restore a file from its most recent backup
*/
export const restoreLatestBackup = async (
filePath: string,
workingDir: string,
): Promise<{ success: boolean; message: string }> => {
const backup = await getLatestBackup(filePath, workingDir);
if (!backup) {
return {
success: false,
message: BACKUP_MESSAGES.NO_BACKUP(filePath),
};
}
const restored = await restoreFromBackup(backup.backupPath, filePath);
if (restored) {
return {
success: true,
message: BACKUP_MESSAGES.RESTORED(filePath),
};
}
return {
success: false,
message: BACKUP_MESSAGES.BACKUP_FAILED("Could not restore from backup"),
};
};
/**
* Clean up old backups for a file
*/
const cleanupOldBackups = async (
filePath: string,
workingDir: string,
config: BackupConfig,
): Promise<number> => {
const backups = await listBackups(filePath, workingDir);
let cleanedUp = 0;
const cutoffTime = Date.now() - config.retentionDays * 24 * 60 * 60 * 1000;
for (let i = 0; i < backups.length; i++) {
const backup = backups[i];
const shouldDelete =
// Exceeds max backups
i >= config.maxBackupsPerFile ||
// Exceeds retention period
backup.timestamp < cutoffTime;
if (shouldDelete) {
try {
await fs.unlink(backup.backupPath);
cleanedUp++;
} catch {
// Ignore deletion errors
}
}
}
return cleanedUp;
};
/**
* Format backup list for display
*/
export const formatBackupList = (backups: BackupInfo[]): string => {
if (backups.length === 0) {
return BACKUP_MESSAGES.NO_BACKUPS;
}
const lines = [BACKUP_MESSAGES.LIST_HEADER];
for (const backup of backups) {
const relativePath = path.basename(backup.backupPath);
lines.push(` ${backup.formattedTime} - ${relativePath}`);
}
return lines.join("\n");
};

View File

@@ -0,0 +1,163 @@
/**
* Sensitive File Guard Service
*
* Protects sensitive files (credentials, keys, etc.) from being
* modified or inadvertently exposed.
*/
import {
PROTECTED_FILE_PATTERNS,
SENSITIVE_FILE_MESSAGES,
type ProtectedFilePattern,
type SensitiveFileCategory,
} from "@constants/sensitive-files";
/**
* Type of file operation
*/
export type FileOperation = "read" | "write" | "edit" | "delete";
/**
* Result of checking a file for sensitivity
*/
export interface SensitiveFileCheckResult {
/** Whether the operation should be blocked */
blocked: boolean;
/** Whether to show a warning (for allowed reads) */
warn: boolean;
/** The matched pattern, if any */
pattern?: ProtectedFilePattern;
/** User-friendly message */
message?: string;
}
/**
* Check if a file operation on the given path should be blocked or warned
*/
export const checkSensitiveFile = (
filePath: string,
operation: FileOperation,
): SensitiveFileCheckResult => {
// Normalize path for checking
const normalizedPath = filePath.replace(/\\/g, "/");
for (const pattern of PROTECTED_FILE_PATTERNS) {
if (pattern.pattern.test(normalizedPath)) {
// For read operations on files that allow reading
if (operation === "read" && pattern.allowRead) {
return {
blocked: false,
warn: true,
pattern,
message: formatWarningMessage(pattern),
};
}
// For write/edit/delete or read on files that don't allow reading
if (operation !== "read" || !pattern.allowRead) {
return {
blocked: true,
warn: false,
pattern,
message: formatBlockedMessage(pattern, operation),
};
}
}
}
return { blocked: false, warn: false };
};
/**
* Format a warning message for sensitive file reads
*/
const formatWarningMessage = (pattern: ProtectedFilePattern): string => {
const categoryDescription =
SENSITIVE_FILE_MESSAGES.CATEGORY_DESCRIPTIONS[pattern.category];
return [
`[WARNING] ${pattern.description}`,
"",
categoryDescription,
SENSITIVE_FILE_MESSAGES.WARN_READ,
].join("\n");
};
/**
* Format a blocked message for sensitive file writes
*/
const formatBlockedMessage = (
pattern: ProtectedFilePattern,
operation: FileOperation,
): string => {
const categoryDescription =
SENSITIVE_FILE_MESSAGES.CATEGORY_DESCRIPTIONS[pattern.category];
const operationText = operation === "read" ? "reading" : "modifying";
const suggestion =
operation === "read"
? SENSITIVE_FILE_MESSAGES.READ_SUGGESTION
: SENSITIVE_FILE_MESSAGES.WRITE_SUGGESTION;
return [
`[BLOCKED] Cannot ${operation} ${pattern.description.toLowerCase()}`,
"",
`Category: ${formatCategoryName(pattern.category)}`,
"",
categoryDescription,
SENSITIVE_FILE_MESSAGES.BLOCKED_REASON,
"",
suggestion,
].join("\n");
};
/**
* Format category name for display
*/
const formatCategoryName = (category: SensitiveFileCategory): string => {
const names: Record<SensitiveFileCategory, string> = {
environment: "Environment Files",
credentials: "Credential Files",
ssh_keys: "SSH Keys",
api_tokens: "API Tokens",
certificates: "Certificates",
cloud_config: "Cloud Configuration",
};
return names[category];
};
/**
* Get all protected patterns (for configuration UI)
*/
export const getProtectedPatterns = (): readonly ProtectedFilePattern[] => {
return PROTECTED_FILE_PATTERNS;
};
/**
* Check if a file path is sensitive
*/
export const isSensitiveFile = (filePath: string): boolean => {
const result = checkSensitiveFile(filePath, "read");
return result.blocked || result.warn;
};
/**
* Check if a file can be safely written
*/
export const canWriteFile = (filePath: string): boolean => {
return !checkSensitiveFile(filePath, "write").blocked;
};
/**
* Get categories of sensitive files
*/
export const getSensitiveFileCategories = (): SensitiveFileCategory[] => {
return [
"environment",
"credentials",
"ssh_keys",
"api_tokens",
"certificates",
"cloud_config",
];
};

View File

@@ -6,10 +6,13 @@ import fs from "fs/promises";
import path from "path";
import { EDIT_MESSAGES, EDIT_TITLES, EDIT_DESCRIPTION } from "@constants/edit";
import { SENSITIVE_FILE_MESSAGES } from "@constants/sensitive-files";
import {
isFileOpAllowed,
promptFilePermission,
} from "@services/core/permissions";
import { checkSensitiveFile } from "@services/sensitive-file-guard";
import { createBackup } from "@services/file-backup";
import { formatDiff } from "@utils/diff/format";
import { generateDiff } from "@utils/diff/generate";
import { editParams } from "@tools/edit/params";
@@ -32,6 +35,18 @@ const createDeniedResult = (relativePath: string): ToolResult => ({
error: EDIT_MESSAGES.PERMISSION_DENIED,
});
const createBlockedResult = (relativePath: string, message: string): ToolResult => ({
success: false,
title: SENSITIVE_FILE_MESSAGES.BLOCKED_WRITE_TITLE,
output: "",
error: message,
metadata: {
filepath: relativePath,
blocked: true,
reason: "sensitive_file",
},
});
const createErrorResult = (relativePath: string, error: Error): ToolResult => ({
success: false,
title: EDIT_TITLES.FAILED(relativePath),
@@ -103,6 +118,15 @@ export const executeEdit = async (
const { filePath, oldString, newString, replaceAll = false } = args;
const { fullPath, relativePath } = resolvePath(filePath, ctx.workingDir);
// SAFETY: Check for sensitive files BEFORE any other operation
const sensitiveCheck = checkSensitiveFile(fullPath, "edit");
if (sensitiveCheck.blocked) {
return createBlockedResult(
relativePath,
sensitiveCheck.message ?? "Cannot edit sensitive file",
);
}
try {
const content = await fs.readFile(fullPath, "utf-8");
@@ -129,6 +153,9 @@ export const executeEdit = async (
status: "running",
});
// Create backup before editing
await createBackup(fullPath, ctx.workingDir);
const newContent = applyEdit(content, oldString, newString, replaceAll);
const diff = generateDiff(content, newContent);
const diffOutput = formatDiff(diff, relativePath);

View File

@@ -10,10 +10,13 @@ import {
WRITE_TITLES,
WRITE_DESCRIPTION,
} from "@constants/write";
import { SENSITIVE_FILE_MESSAGES } from "@constants/sensitive-files";
import {
isFileOpAllowed,
promptFilePermission,
} from "@services/core/permissions";
import { checkSensitiveFile } from "@services/sensitive-file-guard";
import { createBackup } from "@services/file-backup";
import { formatDiff } from "@utils/diff/format";
import { generateDiff } from "@utils/diff/generate";
import { writeParams } from "@tools/write/params";
@@ -31,6 +34,18 @@ const createDeniedResult = (relativePath: string): ToolResult => ({
error: WRITE_MESSAGES.PERMISSION_DENIED,
});
const createBlockedResult = (relativePath: string, message: string): ToolResult => ({
success: false,
title: SENSITIVE_FILE_MESSAGES.BLOCKED_WRITE_TITLE,
output: "",
error: message,
metadata: {
filepath: relativePath,
blocked: true,
reason: "sensitive_file",
},
});
const createErrorResult = (relativePath: string, error: Error): ToolResult => ({
success: false,
title: WRITE_TITLES.FAILED(relativePath),
@@ -129,6 +144,15 @@ export const executeWrite = async (
const { filePath, content } = args;
const { fullPath, relativePath } = resolvePaths(filePath, ctx.workingDir);
// SAFETY: Check for sensitive files BEFORE any other operation
const sensitiveCheck = checkSensitiveFile(fullPath, "write");
if (sensitiveCheck.blocked) {
return createBlockedResult(
relativePath,
sensitiveCheck.message ?? "Cannot write to sensitive file",
);
}
const { exists, content: oldContent } = await readExistingContent(fullPath);
const allowed = await checkPermission(
@@ -147,6 +171,11 @@ export const executeWrite = async (
try {
await ensureDirectory(fullPath);
// Create backup before overwriting (only if file exists)
if (exists) {
await createBackup(fullPath, ctx.workingDir);
}
const diff = generateDiff(oldContent, content);
const diffOutput = formatDiff(diff, relativePath);