feat: implement hooks, plugins, session forks, and vim motions

Add 4 major features to codetyper-cli:

- Hooks System: Lifecycle hooks (PreToolUse, PostToolUse, SessionStart,
  SessionEnd, UserPromptSubmit, Stop) with exit code control flow
- Plugin System: Custom tools, commands, and hooks via plugin manifest
- Session Forking: Snapshots, rewind, fork, and switch between branches
- Vim Motions: Normal/Insert/Command/Visual modes with keyboard navigation

New files:
- src/types/{hooks,plugin,session-fork,vim}.ts
- src/constants/{hooks,plugin,session-fork,vim}.ts
- src/services/{hooks-service,plugin-loader,plugin-service,session-fork-service}.ts
- src/stores/vim-store.ts (vanilla)
- src/tui/hooks/{useVimMode,useVimStore,useTodoStore,useThemeStore}.ts
- src/tui/components/VimStatusLine.tsx

Modified:
- src/services/agent.ts (hook integration)
- src/tools/index.ts (plugin tool registration)
- src/stores/{todo-store,theme-store}.ts (converted to vanilla)
- TUI components (updated hook imports)
This commit is contained in:
2026-01-31 22:22:04 -05:00
parent 37d4a43154
commit a3c407d89a
56 changed files with 7507 additions and 90 deletions

View File

@@ -23,6 +23,11 @@ import { chat as providerChat } from "@providers/index";
import { getTool, getToolsForApi, refreshMCPTools } from "@tools/index";
import type { ToolContext, ToolCall, ToolResult } from "@/types/tools";
import { initializePermissions } from "@services/permissions";
import {
loadHooks,
executePreToolUseHooks,
executePostToolUseHooks,
} from "@services/hooks-service";
import { MAX_ITERATIONS } from "@constants/agent";
import { usageStore } from "@stores/usage-store";
@@ -130,12 +135,40 @@ const callLLM = async (
};
/**
* Execute a tool call
* Execute a tool call with hook support
*/
const executeTool = async (
state: AgentState,
toolCall: ToolCall,
): Promise<ToolResult> => {
// Execute PreToolUse hooks
const hookResult = await executePreToolUseHooks(
state.sessionId,
toolCall.name,
toolCall.arguments,
state.workingDir,
);
// Handle hook results
if (hookResult.action === "block") {
return {
success: false,
title: "Blocked by hook",
output: "",
error: hookResult.message,
};
}
if (hookResult.action === "warn") {
state.options.onWarning?.(hookResult.message);
}
// Apply modified arguments if hook returned them
const effectiveArgs =
hookResult.action === "modify"
? { ...toolCall.arguments, ...hookResult.updatedInput }
: toolCall.arguments;
const tool = getTool(toolCall.name);
if (!tool) {
@@ -160,19 +193,34 @@ const executeTool = async (
},
};
let result: ToolResult;
try {
// Validate arguments
const validatedArgs = tool.parameters.parse(toolCall.arguments);
return await tool.execute(validatedArgs, ctx);
const validatedArgs = tool.parameters.parse(effectiveArgs);
result = await tool.execute(validatedArgs, ctx);
} catch (error: unknown) {
const errorMessage = error instanceof Error ? error.message : String(error);
return {
result = {
success: false,
title: "Tool error",
output: "",
error: errorMessage,
};
}
// Execute PostToolUse hooks (fire-and-forget, don't block on result)
executePostToolUseHooks(
state.sessionId,
toolCall.name,
effectiveArgs,
result,
state.workingDir,
).catch(() => {
// Silently ignore post-hook errors
});
return result;
};
/**
@@ -190,6 +238,9 @@ export const runAgentLoop = async (
// Initialize permissions
await initializePermissions();
// Load hooks
await loadHooks(state.workingDir);
// Refresh MCP tools if available
await refreshMCPTools();

View File

@@ -0,0 +1,248 @@
/**
* Clipboard Service - Reads images from system clipboard
*
* Platform-specific clipboard reading:
* - macOS: Uses pbpaste and osascript for images
* - Linux: Uses xclip or wl-paste
* - Windows: Uses PowerShell
*/
import { spawn } from "child_process";
import { tmpdir } from "os";
import { join } from "path";
import { readFile, unlink } from "fs/promises";
import { v4 as uuidv4 } from "uuid";
import type { ImageMediaType, PastedImage } from "@/types/image";
/** Supported image formats for clipboard operations */
export const SUPPORTED_IMAGE_FORMATS: ImageMediaType[] = [
"image/png",
"image/jpeg",
"image/gif",
"image/webp",
];
const detectPlatform = (): "darwin" | "linux" | "win32" | "unsupported" => {
const platform = process.platform;
if (platform === "darwin" || platform === "linux" || platform === "win32") {
return platform;
}
return "unsupported";
};
const runCommand = (
command: string,
args: string[],
): Promise<{ stdout: Buffer; stderr: string }> => {
return new Promise((resolve, reject) => {
const proc = spawn(command, args);
const stdout: Buffer[] = [];
let stderr = "";
proc.stdout.on("data", (data) => stdout.push(data));
proc.stderr.on("data", (data) => {
stderr += data.toString();
});
proc.on("close", (code) => {
if (code === 0) {
resolve({ stdout: Buffer.concat(stdout), stderr });
} else {
reject(new Error(`Command failed with code ${code}: ${stderr}`));
}
});
proc.on("error", reject);
});
};
const detectImageType = (buffer: Buffer): ImageMediaType | null => {
// PNG: 89 50 4E 47
if (buffer[0] === 0x89 && buffer[1] === 0x50 && buffer[2] === 0x4e && buffer[3] === 0x47) {
return "image/png";
}
// JPEG: FF D8 FF
if (buffer[0] === 0xff && buffer[1] === 0xd8 && buffer[2] === 0xff) {
return "image/jpeg";
}
// GIF: 47 49 46 38
if (buffer[0] === 0x47 && buffer[1] === 0x49 && buffer[2] === 0x46 && buffer[3] === 0x38) {
return "image/gif";
}
// WebP: 52 49 46 46 ... 57 45 42 50
if (buffer[0] === 0x52 && buffer[1] === 0x49 && buffer[2] === 0x46 && buffer[3] === 0x46) {
if (buffer[8] === 0x57 && buffer[9] === 0x45 && buffer[10] === 0x42 && buffer[11] === 0x50) {
return "image/webp";
}
}
return null;
};
const readClipboardImageMacOS = async (): Promise<PastedImage | null> => {
const tempFile = join(tmpdir(), `clipboard-${uuidv4()}.png`);
try {
// Use osascript to save clipboard image to temp file
const script = `
set theFile to POSIX file "${tempFile}"
try
set imageData to the clipboard as «class PNGf»
set fileRef to open for access theFile with write permission
write imageData to fileRef
close access fileRef
return "success"
on error
return "no image"
end try
`;
const { stdout } = await runCommand("osascript", ["-e", script]);
const result = stdout.toString().trim();
if (result !== "success") {
return null;
}
// Read the temp file
const imageBuffer = await readFile(tempFile);
const mediaType = detectImageType(imageBuffer);
if (!mediaType) {
return null;
}
const base64Data = imageBuffer.toString("base64");
return {
id: uuidv4(),
mediaType,
data: base64Data,
timestamp: Date.now(),
};
} catch {
return null;
} finally {
// Cleanup temp file
try {
await unlink(tempFile);
} catch {
// Ignore cleanup errors
}
}
};
const readClipboardImageLinux = async (): Promise<PastedImage | null> => {
// Try xclip first, then wl-paste for Wayland
const commands = [
{ cmd: "xclip", args: ["-selection", "clipboard", "-t", "image/png", "-o"] },
{ cmd: "wl-paste", args: ["--type", "image/png"] },
];
for (const { cmd, args } of commands) {
try {
const { stdout } = await runCommand(cmd, args);
if (stdout.length === 0) {
continue;
}
const mediaType = detectImageType(stdout);
if (!mediaType) {
continue;
}
return {
id: uuidv4(),
mediaType,
data: stdout.toString("base64"),
timestamp: Date.now(),
};
} catch {
// Try next command
continue;
}
}
return null;
};
const readClipboardImageWindows = async (): Promise<PastedImage | null> => {
const tempFile = join(tmpdir(), `clipboard-${uuidv4()}.png`);
try {
// PowerShell script to save clipboard image
const script = `
Add-Type -AssemblyName System.Windows.Forms
$image = [System.Windows.Forms.Clipboard]::GetImage()
if ($image -ne $null) {
$image.Save('${tempFile.replace(/\\/g, "\\\\")}', [System.Drawing.Imaging.ImageFormat]::Png)
Write-Output "success"
} else {
Write-Output "no image"
}
`;
const { stdout } = await runCommand("powershell", ["-Command", script]);
const result = stdout.toString().trim();
if (result !== "success") {
return null;
}
const imageBuffer = await readFile(tempFile);
const mediaType = detectImageType(imageBuffer);
if (!mediaType) {
return null;
}
return {
id: uuidv4(),
mediaType,
data: imageBuffer.toString("base64"),
timestamp: Date.now(),
};
} catch {
return null;
} finally {
try {
await unlink(tempFile);
} catch {
// Ignore cleanup errors
}
}
};
export const readClipboardImage = async (): Promise<PastedImage | null> => {
const platform = detectPlatform();
const platformHandlers: Record<string, () => Promise<PastedImage | null>> = {
darwin: readClipboardImageMacOS,
linux: readClipboardImageLinux,
win32: readClipboardImageWindows,
unsupported: async () => null,
};
const handler = platformHandlers[platform];
return handler();
};
export const hasClipboardImage = async (): Promise<boolean> => {
const image = await readClipboardImage();
return image !== null;
};
export const formatImageSize = (bytes: number): string => {
if (bytes < 1024) {
return `${bytes}B`;
}
if (bytes < 1024 * 1024) {
return `${(bytes / 1024).toFixed(1)}KB`;
}
return `${(bytes / (1024 * 1024)).toFixed(1)}MB`;
};
export const getImageSizeFromBase64 = (base64: string): number => {
// Base64 encoding increases size by ~33%
return Math.ceil((base64.length * 3) / 4);
};

View File

@@ -0,0 +1,446 @@
/**
* Hooks Service
*
* Manages lifecycle hooks for tool execution and session events
*/
import { spawn } from "child_process";
import { readFile, access, constants } from "fs/promises";
import { join, isAbsolute, resolve } from "path";
import type {
HookDefinition,
HooksConfig,
HookEventType,
HookResult,
HookInput,
PreToolUseHookInput,
PostToolUseHookInput,
HookExecutionError,
} from "@/types/hooks";
import type { ToolResult } from "@/types/tools";
import {
HOOKS_CONFIG_FILE,
DEFAULT_HOOK_TIMEOUT,
HOOK_EXIT_CODES,
HOOK_SHELL,
MAX_HOOK_OUTPUT_SIZE,
HOOK_ENV_PREFIX,
} from "@constants/hooks";
import { DIRS, LOCAL_CONFIG_DIR } from "@constants/paths";
/**
* Cached hooks configuration
*/
interface HooksCache {
global: HookDefinition[];
local: HookDefinition[];
loaded: boolean;
}
const hooksCache: HooksCache = {
global: [],
local: [],
loaded: false,
};
/**
* Load hooks configuration from a file
*/
const loadHooksFromFile = async (filePath: string): Promise<HookDefinition[]> => {
try {
await access(filePath, constants.R_OK);
const content = await readFile(filePath, "utf-8");
const config: HooksConfig = JSON.parse(content);
if (!Array.isArray(config.hooks)) {
return [];
}
return config.hooks.filter(
(hook) => hook.enabled !== false && hook.event && hook.script
);
} catch {
return [];
}
};
/**
* Load all hooks from global and local configurations
*/
export const loadHooks = async (workingDir: string): Promise<void> => {
const globalPath = join(DIRS.config, HOOKS_CONFIG_FILE);
const localPath = join(workingDir, LOCAL_CONFIG_DIR, HOOKS_CONFIG_FILE);
const [globalHooks, localHooks] = await Promise.all([
loadHooksFromFile(globalPath),
loadHooksFromFile(localPath),
]);
hooksCache.global = globalHooks;
hooksCache.local = localHooks;
hooksCache.loaded = true;
};
/**
* Refresh hooks cache
*/
export const refreshHooks = async (workingDir: string): Promise<void> => {
hooksCache.loaded = false;
await loadHooks(workingDir);
};
/**
* Get hooks for a specific event type
*/
export const getHooksForEvent = (event: HookEventType): HookDefinition[] => {
if (!hooksCache.loaded) {
return [];
}
const allHooks = [...hooksCache.global, ...hooksCache.local];
return allHooks.filter((hook) => hook.event === event);
};
/**
* Resolve script path to absolute path
*/
const resolveScriptPath = (script: string, workingDir: string): string => {
if (isAbsolute(script)) {
return script;
}
return resolve(workingDir, script);
};
/**
* Execute a single hook script
*/
const executeHookScript = async (
hook: HookDefinition,
input: HookInput,
workingDir: string
): Promise<HookResult> => {
const scriptPath = resolveScriptPath(hook.script, workingDir);
const timeout = hook.timeout ?? DEFAULT_HOOK_TIMEOUT;
// Verify script exists
try {
await access(scriptPath, constants.X_OK);
} catch {
return {
action: "warn",
message: `Hook script not found or not executable: ${scriptPath}`,
};
}
return new Promise((resolvePromise) => {
const env = {
...process.env,
[`${HOOK_ENV_PREFIX}EVENT`]: hook.event,
[`${HOOK_ENV_PREFIX}WORKING_DIR`]: workingDir,
};
const child = spawn(HOOK_SHELL, [scriptPath], {
cwd: workingDir,
env,
stdio: ["pipe", "pipe", "pipe"],
});
let stdout = "";
let stderr = "";
let outputSize = 0;
const timeoutId = setTimeout(() => {
child.kill("SIGTERM");
resolvePromise({
action: "warn",
message: `Hook timed out after ${timeout}ms: ${hook.name || hook.script}`,
});
}, timeout);
child.stdout.on("data", (data: Buffer) => {
outputSize += data.length;
if (outputSize <= MAX_HOOK_OUTPUT_SIZE) {
stdout += data.toString();
}
});
child.stderr.on("data", (data: Buffer) => {
outputSize += data.length;
if (outputSize <= MAX_HOOK_OUTPUT_SIZE) {
stderr += data.toString();
}
});
child.on("close", (code) => {
clearTimeout(timeoutId);
const exitCode = code ?? HOOK_EXIT_CODES.ALLOW;
if (exitCode === HOOK_EXIT_CODES.ALLOW) {
// Check if stdout contains modified input
if (stdout.trim()) {
try {
const parsed = JSON.parse(stdout.trim());
if (parsed.updatedInput) {
resolvePromise({
action: "modify",
updatedInput: parsed.updatedInput,
});
return;
}
} catch {
// Not JSON or no updatedInput, just allow
}
}
resolvePromise({ action: "allow" });
} else if (exitCode === HOOK_EXIT_CODES.WARN) {
resolvePromise({
action: "warn",
message: stderr.trim() || `Hook warning: ${hook.name || hook.script}`,
});
} else if (exitCode === HOOK_EXIT_CODES.BLOCK) {
resolvePromise({
action: "block",
message: stderr.trim() || `Blocked by hook: ${hook.name || hook.script}`,
});
} else {
resolvePromise({
action: "warn",
message: `Hook exited with unexpected code ${exitCode}: ${hook.name || hook.script}`,
});
}
});
child.on("error", (error) => {
clearTimeout(timeoutId);
resolvePromise({
action: "warn",
message: `Hook execution error: ${error.message}`,
});
});
// Send input to stdin
child.stdin.write(JSON.stringify(input));
child.stdin.end();
});
};
/**
* Execute all hooks for a specific event
*/
const executeHooks = async (
event: HookEventType,
input: HookInput,
workingDir: string
): Promise<HookResult> => {
const hooks = getHooksForEvent(event);
if (hooks.length === 0) {
return { action: "allow" };
}
const errors: HookExecutionError[] = [];
let modifiedInput: Record<string, unknown> | null = null;
for (const hook of hooks) {
const result = await executeHookScript(hook, input, workingDir);
if (result.action === "block") {
return result;
}
if (result.action === "warn") {
errors.push({
hook,
error: result.message,
});
}
if (result.action === "modify") {
modifiedInput = {
...(modifiedInput ?? {}),
...result.updatedInput,
};
}
}
if (modifiedInput) {
return {
action: "modify",
updatedInput: modifiedInput,
};
}
if (errors.length > 0) {
return {
action: "warn",
message: errors.map((e) => e.error).join("\n"),
};
}
return { action: "allow" };
};
/**
* Execute PreToolUse hooks
*/
export const executePreToolUseHooks = async (
sessionId: string,
toolName: string,
toolArgs: Record<string, unknown>,
workingDir: string
): Promise<HookResult> => {
if (!hooksCache.loaded) {
await loadHooks(workingDir);
}
const input: PreToolUseHookInput = {
sessionId,
toolName,
toolArgs,
workingDir,
};
return executeHooks("PreToolUse", input, workingDir);
};
/**
* Execute PostToolUse hooks
*/
export const executePostToolUseHooks = async (
sessionId: string,
toolName: string,
toolArgs: Record<string, unknown>,
result: ToolResult,
workingDir: string
): Promise<void> => {
if (!hooksCache.loaded) {
await loadHooks(workingDir);
}
const input: PostToolUseHookInput = {
sessionId,
toolName,
toolArgs,
result: {
success: result.success,
output: result.output,
error: result.error,
},
workingDir,
};
// PostToolUse hooks don't block, just execute them
await executeHooks("PostToolUse", input, workingDir);
};
/**
* Execute SessionStart hooks
*/
export const executeSessionStartHooks = async (
sessionId: string,
workingDir: string,
provider: string,
model: string
): Promise<void> => {
if (!hooksCache.loaded) {
await loadHooks(workingDir);
}
const input = {
sessionId,
workingDir,
provider,
model,
};
await executeHooks("SessionStart", input, workingDir);
};
/**
* Execute SessionEnd hooks
*/
export const executeSessionEndHooks = async (
sessionId: string,
workingDir: string,
duration: number,
messageCount: number
): Promise<void> => {
if (!hooksCache.loaded) {
await loadHooks(workingDir);
}
const input = {
sessionId,
workingDir,
duration,
messageCount,
};
await executeHooks("SessionEnd", input, workingDir);
};
/**
* Execute UserPromptSubmit hooks
*/
export const executeUserPromptSubmitHooks = async (
sessionId: string,
prompt: string,
workingDir: string
): Promise<HookResult> => {
if (!hooksCache.loaded) {
await loadHooks(workingDir);
}
const input = {
sessionId,
prompt,
workingDir,
};
return executeHooks("UserPromptSubmit", input, workingDir);
};
/**
* Execute Stop hooks
*/
export const executeStopHooks = async (
sessionId: string,
workingDir: string,
reason: "interrupt" | "complete" | "error"
): Promise<void> => {
if (!hooksCache.loaded) {
await loadHooks(workingDir);
}
const input = {
sessionId,
workingDir,
reason,
};
await executeHooks("Stop", input, workingDir);
};
/**
* Check if hooks are loaded
*/
export const isHooksLoaded = (): boolean => {
return hooksCache.loaded;
};
/**
* Get all loaded hooks
*/
export const getAllHooks = (): HookDefinition[] => {
return [...hooksCache.global, ...hooksCache.local];
};
/**
* Clear hooks cache
*/
export const clearHooksCache = (): void => {
hooksCache.global = [];
hooksCache.local = [];
hooksCache.loaded = false;
};

431
src/services/lsp/client.ts Normal file
View File

@@ -0,0 +1,431 @@
/**
* LSP Client Implementation
*
* Handles LSP protocol communication with language servers
*/
import type { ChildProcess } from "child_process";
import { createInterface } from "readline";
import { EventEmitter } from "events";
import { getLanguageId } from "@services/lsp/language";
export interface Position {
line: number;
character: number;
}
export interface Range {
start: Position;
end: Position;
}
export interface Location {
uri: string;
range: Range;
}
export interface Diagnostic {
range: Range;
severity?: 1 | 2 | 3 | 4; // Error, Warning, Info, Hint
code?: string | number;
source?: string;
message: string;
}
export interface CompletionItem {
label: string;
kind?: number;
detail?: string;
documentation?: string | { kind: string; value: string };
insertText?: string;
}
export interface DocumentSymbol {
name: string;
kind: number;
range: Range;
selectionRange: Range;
children?: DocumentSymbol[];
}
export interface Hover {
contents: string | { kind: string; value: string } | Array<string | { kind: string; value: string }>;
range?: Range;
}
export interface LSPClientInfo {
serverId: string;
root: string;
capabilities: Record<string, unknown>;
}
export interface LSPClientEvents {
diagnostics: (uri: string, diagnostics: Diagnostic[]) => void;
error: (error: Error) => void;
close: () => void;
}
type RequestId = number;
interface PendingRequest {
resolve: (result: unknown) => void;
reject: (error: Error) => void;
}
export class LSPClient extends EventEmitter {
private process: ChildProcess;
private serverId: string;
private root: string;
private requestId: RequestId = 0;
private pendingRequests: Map<RequestId, PendingRequest> = new Map();
private initialized: boolean = false;
private capabilities: Record<string, unknown> = {};
private openFiles: Map<string, number> = new Map(); // uri -> version
private diagnosticsMap: Map<string, Diagnostic[]> = new Map();
private buffer: string = "";
constructor(process: ChildProcess, serverId: string, root: string) {
super();
this.process = process;
this.serverId = serverId;
this.root = root;
this.setupHandlers();
}
private setupHandlers(): void {
const rl = createInterface({
input: this.process.stdout!,
crlfDelay: Infinity,
});
let contentLength = 0;
let headers = true;
rl.on("line", (line) => {
if (headers) {
if (line.startsWith("Content-Length:")) {
contentLength = parseInt(line.slice(15).trim(), 10);
} else if (line === "") {
headers = false;
this.buffer = "";
}
} else {
this.buffer += line;
if (this.buffer.length >= contentLength) {
try {
const message = JSON.parse(this.buffer);
this.handleMessage(message);
} catch {
// Ignore parse errors
}
headers = true;
contentLength = 0;
this.buffer = "";
}
}
});
this.process.on("close", () => {
this.emit("close");
});
this.process.on("error", (err) => {
this.emit("error", err);
});
}
private handleMessage(message: {
id?: RequestId;
method?: string;
result?: unknown;
error?: { code: number; message: string };
params?: unknown;
}): void {
// Response to our request
if (message.id !== undefined && this.pendingRequests.has(message.id)) {
const pending = this.pendingRequests.get(message.id)!;
this.pendingRequests.delete(message.id);
if (message.error) {
pending.reject(new Error(message.error.message));
} else {
pending.resolve(message.result);
}
return;
}
// Notification from server
if (message.method) {
this.handleNotification(message.method, message.params);
}
}
private handleNotification(method: string, params: unknown): void {
if (method === "textDocument/publishDiagnostics") {
const { uri, diagnostics } = params as { uri: string; diagnostics: Diagnostic[] };
this.diagnosticsMap.set(uri, diagnostics);
this.emit("diagnostics", uri, diagnostics);
}
// Handle other notifications as needed
}
private send(message: Record<string, unknown>): void {
const content = JSON.stringify(message);
const header = `Content-Length: ${Buffer.byteLength(content)}\r\n\r\n`;
this.process.stdin!.write(header + content);
}
private async request<T>(method: string, params?: unknown): Promise<T> {
const id = ++this.requestId;
return new Promise<T>((resolve, reject) => {
this.pendingRequests.set(id, {
resolve: resolve as (result: unknown) => void,
reject,
});
this.send({
jsonrpc: "2.0",
id,
method,
params,
});
// Timeout after 30 seconds
setTimeout(() => {
if (this.pendingRequests.has(id)) {
this.pendingRequests.delete(id);
reject(new Error(`Request ${method} timed out`));
}
}, 30000);
});
}
private notify(method: string, params?: unknown): void {
this.send({
jsonrpc: "2.0",
method,
params,
});
}
async initialize(): Promise<void> {
if (this.initialized) return;
const result = await this.request<{ capabilities: Record<string, unknown> }>("initialize", {
processId: process.pid,
rootUri: `file://${this.root}`,
rootPath: this.root,
capabilities: {
textDocument: {
synchronization: {
didSave: true,
didOpen: true,
didClose: true,
didChange: 2, // Incremental
},
completion: {
completionItem: {
snippetSupport: true,
documentationFormat: ["markdown", "plaintext"],
},
},
hover: {
contentFormat: ["markdown", "plaintext"],
},
definition: {
linkSupport: true,
},
references: {},
documentSymbol: {
hierarchicalDocumentSymbolSupport: true,
},
publishDiagnostics: {
relatedInformation: true,
},
},
workspace: {
workspaceFolders: true,
didChangeConfiguration: {
dynamicRegistration: true,
},
},
},
workspaceFolders: [
{
uri: `file://${this.root}`,
name: this.root.split("/").pop(),
},
],
});
this.capabilities = result.capabilities;
this.initialized = true;
this.notify("initialized", {});
}
async openFile(filePath: string, content: string): Promise<void> {
const uri = `file://${filePath}`;
const languageId = getLanguageId(filePath) ?? "plaintext";
const version = 1;
this.openFiles.set(uri, version);
this.notify("textDocument/didOpen", {
textDocument: {
uri,
languageId,
version,
text: content,
},
});
}
async updateFile(filePath: string, content: string): Promise<void> {
const uri = `file://${filePath}`;
const currentVersion = this.openFiles.get(uri) ?? 0;
const newVersion = currentVersion + 1;
this.openFiles.set(uri, newVersion);
this.notify("textDocument/didChange", {
textDocument: { uri, version: newVersion },
contentChanges: [{ text: content }],
});
}
async closeFile(filePath: string): Promise<void> {
const uri = `file://${filePath}`;
this.openFiles.delete(uri);
this.diagnosticsMap.delete(uri);
this.notify("textDocument/didClose", {
textDocument: { uri },
});
}
async getHover(filePath: string, position: Position): Promise<Hover | null> {
const uri = `file://${filePath}`;
try {
return await this.request<Hover | null>("textDocument/hover", {
textDocument: { uri },
position,
});
} catch {
return null;
}
}
async getDefinition(filePath: string, position: Position): Promise<Location | Location[] | null> {
const uri = `file://${filePath}`;
try {
return await this.request<Location | Location[] | null>("textDocument/definition", {
textDocument: { uri },
position,
});
} catch {
return null;
}
}
async getReferences(filePath: string, position: Position, includeDeclaration = true): Promise<Location[]> {
const uri = `file://${filePath}`;
try {
const result = await this.request<Location[] | null>("textDocument/references", {
textDocument: { uri },
position,
context: { includeDeclaration },
});
return result ?? [];
} catch {
return [];
}
}
async getCompletions(filePath: string, position: Position): Promise<CompletionItem[]> {
const uri = `file://${filePath}`;
try {
const result = await this.request<{ items: CompletionItem[] } | CompletionItem[] | null>(
"textDocument/completion",
{
textDocument: { uri },
position,
},
);
if (!result) return [];
return Array.isArray(result) ? result : result.items;
} catch {
return [];
}
}
async getDocumentSymbols(filePath: string): Promise<DocumentSymbol[]> {
const uri = `file://${filePath}`;
try {
const result = await this.request<DocumentSymbol[] | null>("textDocument/documentSymbol", {
textDocument: { uri },
});
return result ?? [];
} catch {
return [];
}
}
getDiagnostics(filePath?: string): Diagnostic[] {
if (filePath) {
const uri = `file://${filePath}`;
return this.diagnosticsMap.get(uri) ?? [];
}
// Return all diagnostics
const all: Diagnostic[] = [];
for (const diagnostics of this.diagnosticsMap.values()) {
all.push(...diagnostics);
}
return all;
}
getAllDiagnostics(): Map<string, Diagnostic[]> {
return new Map(this.diagnosticsMap);
}
getInfo(): LSPClientInfo {
return {
serverId: this.serverId,
root: this.root,
capabilities: this.capabilities,
};
}
isFileOpen(filePath: string): boolean {
const uri = `file://${filePath}`;
return this.openFiles.has(uri);
}
shutdown(): void {
this.request("shutdown", null)
.then(() => {
this.notify("exit");
this.process.kill();
})
.catch(() => {
this.process.kill();
});
}
}
export const createLSPClient = (
process: ChildProcess,
serverId: string,
root: string,
): LSPClient => {
return new LSPClient(process, serverId, root);
};

357
src/services/lsp/index.ts Normal file
View File

@@ -0,0 +1,357 @@
/**
* LSP Service - Main entry point for language server functionality
*
* Provides:
* - Language detection
* - Server startup/shutdown management
* - Real-time diagnostics
* - Code completion
* - Document symbols
* - References finding
* - Definition jumping
* - Hover information
*/
import fs from "fs/promises";
import path from "path";
import { EventEmitter } from "events";
import {
LSPClient,
createLSPClient,
type Diagnostic,
type Position,
type Location,
type CompletionItem,
type DocumentSymbol,
type Hover,
} from "@services/lsp/client";
import {
getServersForFile,
findRootForServer,
spawnServer,
type ServerInfo,
} from "@services/lsp/server";
import { getLanguageId } from "@services/lsp/language";
interface LSPState {
clients: Map<string, LSPClient>; // key: `${root}:${serverId}`
spawning: Map<string, Promise<LSPClient | null>>;
broken: Set<string>;
}
const state: LSPState = {
clients: new Map(),
spawning: new Map(),
broken: new Set(),
};
const events = new EventEmitter();
const getClientKey = (root: string, serverId: string): string => {
return `${root}:${serverId}`;
};
const getClientsForFile = async (filePath: string): Promise<LSPClient[]> => {
const servers = getServersForFile(filePath);
const clients: LSPClient[] = [];
for (const server of servers) {
const root = await findRootForServer(filePath, server);
if (!root) continue;
const key = getClientKey(root, server.id);
// Skip broken servers
if (state.broken.has(key)) continue;
// Check for existing client
if (state.clients.has(key)) {
clients.push(state.clients.get(key)!);
continue;
}
// Check for in-flight spawn
if (state.spawning.has(key)) {
const client = await state.spawning.get(key);
if (client) clients.push(client);
continue;
}
// Spawn new client
const spawnPromise = spawnClient(server, root);
state.spawning.set(key, spawnPromise);
try {
const client = await spawnPromise;
if (client) {
clients.push(client);
}
} finally {
state.spawning.delete(key);
}
}
return clients;
};
const spawnClient = async (
server: ServerInfo,
root: string,
): Promise<LSPClient | null> => {
const key = getClientKey(root, server.id);
try {
const handle = await spawnServer(server, root);
if (!handle) {
state.broken.add(key);
return null;
}
const client = createLSPClient(handle.process, server.id, root);
client.on("close", () => {
state.clients.delete(key);
events.emit("clientClosed", { serverId: server.id, root });
});
client.on("error", () => {
state.clients.delete(key);
state.broken.add(key);
});
client.on("diagnostics", (uri: string, diagnostics: Diagnostic[]) => {
events.emit("diagnostics", { uri, diagnostics, serverId: server.id });
});
await client.initialize();
state.clients.set(key, client);
events.emit("clientConnected", { serverId: server.id, root });
return client;
} catch {
state.broken.add(key);
return null;
}
};
// Public API
export const openFile = async (filePath: string): Promise<void> => {
const absolutePath = path.resolve(filePath);
const clients = await getClientsForFile(absolutePath);
if (clients.length === 0) return;
const content = await fs.readFile(absolutePath, "utf-8");
for (const client of clients) {
if (!client.isFileOpen(absolutePath)) {
await client.openFile(absolutePath, content);
}
}
};
export const updateFile = async (filePath: string, content: string): Promise<void> => {
const absolutePath = path.resolve(filePath);
const clients = await getClientsForFile(absolutePath);
for (const client of clients) {
if (client.isFileOpen(absolutePath)) {
await client.updateFile(absolutePath, content);
} else {
await client.openFile(absolutePath, content);
}
}
};
export const closeFile = async (filePath: string): Promise<void> => {
const absolutePath = path.resolve(filePath);
const clients = await getClientsForFile(absolutePath);
for (const client of clients) {
if (client.isFileOpen(absolutePath)) {
await client.closeFile(absolutePath);
}
}
};
export const getHover = async (filePath: string, position: Position): Promise<Hover | null> => {
const absolutePath = path.resolve(filePath);
const clients = await getClientsForFile(absolutePath);
for (const client of clients) {
const hover = await client.getHover(absolutePath, position);
if (hover) return hover;
}
return null;
};
export const getDefinition = async (
filePath: string,
position: Position,
): Promise<Location | Location[] | null> => {
const absolutePath = path.resolve(filePath);
const clients = await getClientsForFile(absolutePath);
for (const client of clients) {
const definition = await client.getDefinition(absolutePath, position);
if (definition) return definition;
}
return null;
};
export const getReferences = async (
filePath: string,
position: Position,
includeDeclaration = true,
): Promise<Location[]> => {
const absolutePath = path.resolve(filePath);
const clients = await getClientsForFile(absolutePath);
const allRefs: Location[] = [];
for (const client of clients) {
const refs = await client.getReferences(absolutePath, position, includeDeclaration);
allRefs.push(...refs);
}
// Deduplicate by URI and range
const seen = new Set<string>();
return allRefs.filter((loc) => {
const key = `${loc.uri}:${loc.range.start.line}:${loc.range.start.character}`;
if (seen.has(key)) return false;
seen.add(key);
return true;
});
};
export const getCompletions = async (
filePath: string,
position: Position,
): Promise<CompletionItem[]> => {
const absolutePath = path.resolve(filePath);
const clients = await getClientsForFile(absolutePath);
const allCompletions: CompletionItem[] = [];
for (const client of clients) {
const completions = await client.getCompletions(absolutePath, position);
allCompletions.push(...completions);
}
return allCompletions;
};
export const getDocumentSymbols = async (filePath: string): Promise<DocumentSymbol[]> => {
const absolutePath = path.resolve(filePath);
const clients = await getClientsForFile(absolutePath);
for (const client of clients) {
const symbols = await client.getDocumentSymbols(absolutePath);
if (symbols.length > 0) return symbols;
}
return [];
};
export const getDiagnostics = (filePath?: string): Map<string, Diagnostic[]> => {
const allDiagnostics = new Map<string, Diagnostic[]>();
for (const client of state.clients.values()) {
const clientDiagnostics = client.getAllDiagnostics();
for (const [uri, diagnostics] of clientDiagnostics) {
if (filePath) {
const expectedUri = `file://${path.resolve(filePath)}`;
if (uri !== expectedUri) continue;
}
const existing = allDiagnostics.get(uri) ?? [];
allDiagnostics.set(uri, [...existing, ...diagnostics]);
}
}
return allDiagnostics;
};
export const getStatus = (): {
connected: Array<{ serverId: string; root: string }>;
broken: string[];
} => {
const connected = Array.from(state.clients.values()).map((client) => client.getInfo());
const broken = Array.from(state.broken);
return { connected, broken };
};
export const hasSupport = (filePath: string): boolean => {
const servers = getServersForFile(filePath);
return servers.length > 0;
};
export const getLanguage = (filePath: string): string | null => {
return getLanguageId(filePath);
};
export const shutdown = (): void => {
for (const client of state.clients.values()) {
client.shutdown();
}
state.clients.clear();
state.spawning.clear();
state.broken.clear();
};
export const onDiagnostics = (
callback: (data: { uri: string; diagnostics: Diagnostic[]; serverId: string }) => void,
): (() => void) => {
events.on("diagnostics", callback);
return () => events.off("diagnostics", callback);
};
export const onClientConnected = (
callback: (data: { serverId: string; root: string }) => void,
): (() => void) => {
events.on("clientConnected", callback);
return () => events.off("clientConnected", callback);
};
export const onClientClosed = (
callback: (data: { serverId: string; root: string }) => void,
): (() => void) => {
events.on("clientClosed", callback);
return () => events.off("clientClosed", callback);
};
export const lspService = {
openFile,
updateFile,
closeFile,
getHover,
getDefinition,
getReferences,
getCompletions,
getDocumentSymbols,
getDiagnostics,
getStatus,
hasSupport,
getLanguage,
shutdown,
onDiagnostics,
onClientConnected,
onClientClosed,
};
// Re-export types
export type {
Diagnostic,
Position,
Range,
Location,
CompletionItem,
DocumentSymbol,
Hover,
} from "@services/lsp/client";
export { getLanguageId, getSupportedExtensions } from "@services/lsp/language";
export { SERVERS, getAvailableServers } from "@services/lsp/server";

View File

@@ -0,0 +1,182 @@
/**
* Language Detection and Extension Mapping
*
* Maps file extensions to LSP language IDs
*/
export const LANGUAGE_EXTENSIONS: Record<string, string> = {
// TypeScript/JavaScript
".ts": "typescript",
".tsx": "typescriptreact",
".js": "javascript",
".jsx": "javascriptreact",
".mjs": "javascript",
".cjs": "javascript",
".mts": "typescript",
".cts": "typescript",
// Web
".html": "html",
".htm": "html",
".css": "css",
".scss": "scss",
".sass": "sass",
".less": "less",
".vue": "vue",
".svelte": "svelte",
".astro": "astro",
// Python
".py": "python",
".pyi": "python",
".pyw": "python",
// Go
".go": "go",
".mod": "go.mod",
".sum": "go.sum",
// Rust
".rs": "rust",
// C/C++
".c": "c",
".h": "c",
".cpp": "cpp",
".cxx": "cpp",
".cc": "cpp",
".hpp": "cpp",
".hxx": "cpp",
".hh": "cpp",
// Java/Kotlin
".java": "java",
".kt": "kotlin",
".kts": "kotlin",
// C#/F#
".cs": "csharp",
".fs": "fsharp",
".fsx": "fsharp",
// Ruby
".rb": "ruby",
".rake": "ruby",
".gemspec": "ruby",
// PHP
".php": "php",
// Swift
".swift": "swift",
// Lua
".lua": "lua",
// Shell
".sh": "shellscript",
".bash": "shellscript",
".zsh": "shellscript",
".fish": "fish",
// Data formats
".json": "json",
".jsonc": "jsonc",
".yaml": "yaml",
".yml": "yaml",
".toml": "toml",
".xml": "xml",
// Markdown/Docs
".md": "markdown",
".mdx": "mdx",
".rst": "restructuredtext",
// SQL
".sql": "sql",
// Docker
Dockerfile: "dockerfile",
".dockerfile": "dockerfile",
// Config
".env": "dotenv",
".ini": "ini",
".conf": "conf",
// Elixir
".ex": "elixir",
".exs": "elixir",
// Zig
".zig": "zig",
// Dart
".dart": "dart",
// Haskell
".hs": "haskell",
".lhs": "haskell",
// OCaml
".ml": "ocaml",
".mli": "ocaml",
// Clojure
".clj": "clojure",
".cljs": "clojurescript",
".cljc": "clojure",
// Scala
".scala": "scala",
".sc": "scala",
// Erlang
".erl": "erlang",
".hrl": "erlang",
// Nix
".nix": "nix",
// Terraform
".tf": "terraform",
".tfvars": "terraform",
// Prisma
".prisma": "prisma",
// GraphQL
".graphql": "graphql",
".gql": "graphql",
// Protobuf
".proto": "proto",
// Makefile
Makefile: "makefile",
".mk": "makefile",
// Gleam
".gleam": "gleam",
// Typst
".typ": "typst",
};
export const getLanguageId = (filePath: string): string | null => {
const ext = filePath.includes(".")
? "." + filePath.split(".").pop()
: filePath.split("/").pop() ?? "";
return LANGUAGE_EXTENSIONS[ext] ?? LANGUAGE_EXTENSIONS[filePath.split("/").pop() ?? ""] ?? null;
};
export const getExtensionsForLanguage = (languageId: string): string[] => {
return Object.entries(LANGUAGE_EXTENSIONS)
.filter(([_, lang]) => lang === languageId)
.map(([ext]) => ext);
};
export const getSupportedExtensions = (): string[] => {
return Object.keys(LANGUAGE_EXTENSIONS);
};

267
src/services/lsp/server.ts Normal file
View File

@@ -0,0 +1,267 @@
/**
* LSP Server Definitions
*
* Defines how to find and spawn language servers
*/
import { spawn, execSync, type ChildProcess } from "child_process";
import path from "path";
import fs from "fs/promises";
export interface ServerHandle {
process: ChildProcess;
capabilities?: Record<string, unknown>;
}
export interface ServerInfo {
id: string;
name: string;
extensions: string[];
rootPatterns: string[];
command: string;
args?: string[];
env?: Record<string, string>;
}
const fileExists = async (filePath: string): Promise<boolean> => {
try {
await fs.access(filePath);
return true;
} catch {
return false;
}
};
const findProjectRoot = async (
startDir: string,
patterns: string[],
): Promise<string | null> => {
let currentDir = startDir;
const root = path.parse(currentDir).root;
while (currentDir !== root) {
for (const pattern of patterns) {
const checkPath = path.join(currentDir, pattern);
if (await fileExists(checkPath)) {
return currentDir;
}
}
currentDir = path.dirname(currentDir);
}
return null;
};
const findBinary = async (name: string): Promise<string | null> => {
try {
const command = process.platform === "win32" ? `where ${name}` : `which ${name}`;
const result = execSync(command, { encoding: "utf-8", stdio: ["pipe", "pipe", "pipe"] });
return result.trim().split("\n")[0] || null;
} catch {
return null;
}
};
export const SERVERS: Record<string, ServerInfo> = {
typescript: {
id: "typescript",
name: "TypeScript Language Server",
extensions: [".ts", ".tsx", ".js", ".jsx", ".mjs", ".cjs"],
rootPatterns: ["package.json", "tsconfig.json", "jsconfig.json"],
command: "typescript-language-server",
args: ["--stdio"],
},
deno: {
id: "deno",
name: "Deno Language Server",
extensions: [".ts", ".tsx", ".js", ".jsx"],
rootPatterns: ["deno.json", "deno.jsonc"],
command: "deno",
args: ["lsp"],
},
python: {
id: "python",
name: "Pyright",
extensions: [".py", ".pyi"],
rootPatterns: ["pyproject.toml", "setup.py", "requirements.txt", "pyrightconfig.json"],
command: "pyright-langserver",
args: ["--stdio"],
},
gopls: {
id: "gopls",
name: "Go Language Server",
extensions: [".go"],
rootPatterns: ["go.mod", "go.work"],
command: "gopls",
args: ["serve"],
},
rust: {
id: "rust-analyzer",
name: "Rust Analyzer",
extensions: [".rs"],
rootPatterns: ["Cargo.toml"],
command: "rust-analyzer",
},
clangd: {
id: "clangd",
name: "Clangd",
extensions: [".c", ".cpp", ".h", ".hpp", ".cc", ".cxx"],
rootPatterns: ["compile_commands.json", "CMakeLists.txt", ".clangd"],
command: "clangd",
},
lua: {
id: "lua-language-server",
name: "Lua Language Server",
extensions: [".lua"],
rootPatterns: [".luarc.json", ".luarc.jsonc"],
command: "lua-language-server",
},
bash: {
id: "bash-language-server",
name: "Bash Language Server",
extensions: [".sh", ".bash", ".zsh"],
rootPatterns: [".bashrc", ".zshrc"],
command: "bash-language-server",
args: ["start"],
},
yaml: {
id: "yaml-language-server",
name: "YAML Language Server",
extensions: [".yaml", ".yml"],
rootPatterns: [".yamllint", ".yaml-lint.yml"],
command: "yaml-language-server",
args: ["--stdio"],
},
json: {
id: "vscode-json-language-server",
name: "JSON Language Server",
extensions: [".json", ".jsonc"],
rootPatterns: ["package.json", "tsconfig.json"],
command: "vscode-json-language-server",
args: ["--stdio"],
},
html: {
id: "vscode-html-language-server",
name: "HTML Language Server",
extensions: [".html", ".htm"],
rootPatterns: ["package.json", "index.html"],
command: "vscode-html-language-server",
args: ["--stdio"],
},
css: {
id: "vscode-css-language-server",
name: "CSS Language Server",
extensions: [".css", ".scss", ".less"],
rootPatterns: ["package.json"],
command: "vscode-css-language-server",
args: ["--stdio"],
},
eslint: {
id: "eslint",
name: "ESLint Language Server",
extensions: [".ts", ".tsx", ".js", ".jsx"],
rootPatterns: [".eslintrc", ".eslintrc.js", ".eslintrc.json", "eslint.config.js"],
command: "vscode-eslint-language-server",
args: ["--stdio"],
},
svelte: {
id: "svelte-language-server",
name: "Svelte Language Server",
extensions: [".svelte"],
rootPatterns: ["svelte.config.js", "svelte.config.ts"],
command: "svelteserver",
args: ["--stdio"],
},
vue: {
id: "vue-language-server",
name: "Vue Language Server",
extensions: [".vue"],
rootPatterns: ["vue.config.js", "vite.config.ts", "nuxt.config.ts"],
command: "vue-language-server",
args: ["--stdio"],
},
prisma: {
id: "prisma-language-server",
name: "Prisma Language Server",
extensions: [".prisma"],
rootPatterns: ["schema.prisma"],
command: "prisma-language-server",
args: ["--stdio"],
},
terraform: {
id: "terraform-ls",
name: "Terraform Language Server",
extensions: [".tf", ".tfvars"],
rootPatterns: [".terraform", "main.tf"],
command: "terraform-ls",
args: ["serve"],
},
docker: {
id: "docker-langserver",
name: "Dockerfile Language Server",
extensions: [".dockerfile"],
rootPatterns: ["Dockerfile", "docker-compose.yml"],
command: "docker-langserver",
args: ["--stdio"],
},
};
export const getServersForFile = (filePath: string): ServerInfo[] => {
const ext = "." + (filePath.split(".").pop() ?? "");
const fileName = path.basename(filePath);
return Object.values(SERVERS).filter((server) => {
return (
server.extensions.includes(ext) ||
server.extensions.includes(fileName)
);
});
};
export const findRootForServer = async (
filePath: string,
server: ServerInfo,
): Promise<string | null> => {
const dir = path.dirname(filePath);
return findProjectRoot(dir, server.rootPatterns);
};
export const spawnServer = async (
server: ServerInfo,
root: string,
): Promise<ServerHandle | null> => {
const binary = await findBinary(server.command);
if (!binary) {
return null;
}
const proc = spawn(binary, server.args ?? [], {
cwd: root,
env: { ...process.env, ...server.env },
stdio: ["pipe", "pipe", "pipe"],
});
if (!proc.pid) {
return null;
}
return { process: proc };
};
export const isServerAvailable = async (server: ServerInfo): Promise<boolean> => {
const binary = await findBinary(server.command);
return binary !== null;
};
export const getAvailableServers = async (): Promise<ServerInfo[]> => {
const available: ServerInfo[] = [];
for (const server of Object.values(SERVERS)) {
if (await isServerAvailable(server)) {
available.push(server);
}
}
return available;
};

View File

@@ -0,0 +1,350 @@
/**
* Plugin Loader Service
*
* Discovers and parses plugin manifests and files
*/
import { readdir, readFile, access, constants, stat } from "fs/promises";
import { join, extname, basename } from "path";
import type {
PluginManifest,
PluginDiscoveryResult,
PluginToolDefinition,
PluginCommandDefinition,
} from "@/types/plugin";
import type { HookDefinition } from "@/types/hooks";
import {
PLUGINS_DIR,
PLUGIN_MANIFEST_FILE,
PLUGIN_SUBDIRS,
COMMAND_FILE_EXTENSION,
COMMAND_FRONTMATTER_DELIMITER,
HOOK_SCRIPT_EXTENSIONS,
MAX_PLUGINS,
} from "@constants/plugin";
import { DIRS, LOCAL_CONFIG_DIR } from "@constants/paths";
/**
* Discover plugins in a directory
*/
const discoverPluginsInDir = async (
baseDir: string
): Promise<PluginDiscoveryResult[]> => {
const pluginsPath = join(baseDir, PLUGINS_DIR);
const results: PluginDiscoveryResult[] = [];
try {
await access(pluginsPath, constants.R_OK);
const entries = await readdir(pluginsPath, { withFileTypes: true });
for (const entry of entries) {
if (!entry.isDirectory()) continue;
if (results.length >= MAX_PLUGINS) break;
const pluginPath = join(pluginsPath, entry.name);
const manifestPath = join(pluginPath, PLUGIN_MANIFEST_FILE);
try {
await access(manifestPath, constants.R_OK);
results.push({
name: entry.name,
path: pluginPath,
manifestPath,
});
} catch {
// No manifest, skip this directory
}
}
} catch {
// Directory doesn't exist or not readable
}
return results;
};
/**
* Discover all plugins from global and local directories
*/
export const discoverPlugins = async (
workingDir: string
): Promise<PluginDiscoveryResult[]> => {
const [globalPlugins, localPlugins] = await Promise.all([
discoverPluginsInDir(DIRS.config),
discoverPluginsInDir(join(workingDir, LOCAL_CONFIG_DIR)),
]);
// Local plugins override global ones with same name
const pluginMap = new Map<string, PluginDiscoveryResult>();
for (const plugin of globalPlugins) {
pluginMap.set(plugin.name, plugin);
}
for (const plugin of localPlugins) {
pluginMap.set(plugin.name, plugin);
}
return Array.from(pluginMap.values());
};
/**
* Parse plugin manifest
*/
export const parseManifest = async (
manifestPath: string
): Promise<PluginManifest | null> => {
try {
const content = await readFile(manifestPath, "utf-8");
const manifest: PluginManifest = JSON.parse(content);
// Validate required fields
if (!manifest.name || !manifest.version) {
return null;
}
return manifest;
} catch {
return null;
}
};
/**
* Parse command file with frontmatter
*/
export const parseCommandFile = async (
filePath: string
): Promise<PluginCommandDefinition | null> => {
try {
const content = await readFile(filePath, "utf-8");
const lines = content.split("\n");
// Check for frontmatter
if (lines[0]?.trim() !== COMMAND_FRONTMATTER_DELIMITER) {
// No frontmatter, treat entire content as prompt
const name = basename(filePath, COMMAND_FILE_EXTENSION);
return {
name,
description: `Custom command: ${name}`,
prompt: content,
};
}
// Find closing frontmatter delimiter
let endIndex = -1;
for (let i = 1; i < lines.length; i++) {
if (lines[i]?.trim() === COMMAND_FRONTMATTER_DELIMITER) {
endIndex = i;
break;
}
}
if (endIndex === -1) {
// Malformed frontmatter
return null;
}
// Parse frontmatter as YAML-like key-value pairs
const frontmatterLines = lines.slice(1, endIndex);
const frontmatter: Record<string, string> = {};
for (const line of frontmatterLines) {
const colonIndex = line.indexOf(":");
if (colonIndex > 0) {
const key = line.slice(0, colonIndex).trim();
const value = line.slice(colonIndex + 1).trim();
frontmatter[key] = value;
}
}
// Rest is the prompt
const prompt = lines.slice(endIndex + 1).join("\n").trim();
const name = frontmatter.name || basename(filePath, COMMAND_FILE_EXTENSION);
const description = frontmatter.description || `Custom command: ${name}`;
return {
name,
description,
prompt,
};
} catch {
return null;
}
};
/**
* Load tool module dynamically
*/
export const loadToolModule = async (
filePath: string
): Promise<PluginToolDefinition | null> => {
try {
// For Bun, we can use dynamic import
const module = await import(filePath);
const toolDef = module.default || module;
// Validate tool definition
if (!toolDef.name || !toolDef.description || !toolDef.parameters || !toolDef.execute) {
return null;
}
return toolDef as PluginToolDefinition;
} catch {
return null;
}
};
/**
* Load hooks from plugin hooks directory
*/
export const loadPluginHooks = async (
pluginPath: string,
manifest: PluginManifest
): Promise<HookDefinition[]> => {
const hooks: HookDefinition[] = [];
// Load hooks from manifest
if (manifest.hooks) {
for (const hookRef of manifest.hooks) {
const scriptPath = join(pluginPath, PLUGIN_SUBDIRS.hooks, hookRef.script);
try {
await access(scriptPath, constants.X_OK);
hooks.push({
event: hookRef.event as HookDefinition["event"],
script: scriptPath,
timeout: hookRef.timeout,
name: `${manifest.name}:${hookRef.event}`,
});
} catch {
// Script not found or not executable
}
}
}
// Also discover hooks by convention
const hooksDir = join(pluginPath, PLUGIN_SUBDIRS.hooks);
try {
await access(hooksDir, constants.R_OK);
const entries = await readdir(hooksDir);
for (const entry of entries) {
const ext = extname(entry);
if (!HOOK_SCRIPT_EXTENSIONS.includes(ext)) continue;
const scriptPath = join(hooksDir, entry);
const scriptStat = await stat(scriptPath);
if (!scriptStat.isFile()) continue;
// Try to determine event type from filename
const baseName = basename(entry, ext);
const eventTypes = [
"PreToolUse",
"PostToolUse",
"SessionStart",
"SessionEnd",
"UserPromptSubmit",
"Stop",
];
for (const eventType of eventTypes) {
if (baseName.toLowerCase().includes(eventType.toLowerCase())) {
// Check if already added from manifest
const alreadyAdded = hooks.some(
(h) => h.script === scriptPath && h.event === eventType
);
if (!alreadyAdded) {
hooks.push({
event: eventType as HookDefinition["event"],
script: scriptPath,
name: `${manifest.name}:${baseName}`,
});
}
break;
}
}
}
} catch {
// Hooks directory doesn't exist
}
return hooks;
};
/**
* Load commands from plugin commands directory
*/
export const loadPluginCommands = async (
pluginPath: string,
manifest: PluginManifest
): Promise<Map<string, PluginCommandDefinition>> => {
const commands = new Map<string, PluginCommandDefinition>();
// Load commands from manifest
if (manifest.commands) {
for (const cmdRef of manifest.commands) {
const cmdPath = join(pluginPath, PLUGIN_SUBDIRS.commands, cmdRef.file);
const cmdDef = await parseCommandFile(cmdPath);
if (cmdDef) {
cmdDef.name = cmdRef.name; // Override with manifest name
commands.set(cmdRef.name, cmdDef);
}
}
}
// Also discover commands by convention
const commandsDir = join(pluginPath, PLUGIN_SUBDIRS.commands);
try {
await access(commandsDir, constants.R_OK);
const entries = await readdir(commandsDir);
for (const entry of entries) {
if (extname(entry) !== COMMAND_FILE_EXTENSION) continue;
const cmdPath = join(commandsDir, entry);
const cmdStat = await stat(cmdPath);
if (!cmdStat.isFile()) continue;
const cmdDef = await parseCommandFile(cmdPath);
if (cmdDef && !commands.has(cmdDef.name)) {
commands.set(cmdDef.name, cmdDef);
}
}
} catch {
// Commands directory doesn't exist
}
return commands;
};
/**
* Load tools from plugin tools directory
*/
export const loadPluginTools = async (
pluginPath: string,
manifest: PluginManifest
): Promise<Map<string, PluginToolDefinition>> => {
const tools = new Map<string, PluginToolDefinition>();
// Load tools from manifest
if (manifest.tools) {
for (const toolRef of manifest.tools) {
const toolPath = join(pluginPath, PLUGIN_SUBDIRS.tools, toolRef.file);
const toolDef = await loadToolModule(toolPath);
if (toolDef) {
toolDef.name = toolRef.name; // Override with manifest name
tools.set(toolRef.name, toolDef);
}
}
}
return tools;
};

View File

@@ -0,0 +1,278 @@
/**
* Plugin Service
*
* Manages plugin lifecycle and provides access to plugin tools and commands
*/
import type {
LoadedPlugin,
PluginRegistry,
PluginCommandDefinition,
PluginLoadResult,
} from "@/types/plugin";
import type { FunctionDefinition, ToolDefinition } from "@tools/types";
import type { HookDefinition } from "@/types/hooks";
import {
discoverPlugins,
parseManifest,
loadPluginTools,
loadPluginCommands,
loadPluginHooks,
} from "@services/plugin-loader";
import {
PLUGIN_TOOL_SEPARATOR,
PLUGIN_ERRORS,
} from "@constants/plugin";
/**
* Plugin registry singleton
*/
const registry: PluginRegistry = {
plugins: new Map(),
tools: new Map(),
commands: new Map(),
initialized: false,
};
/**
* Load a single plugin
*/
const loadPlugin = async (
_name: string,
path: string,
manifestPath: string
): Promise<PluginLoadResult> => {
const manifest = await parseManifest(manifestPath);
if (!manifest) {
return {
success: false,
error: PLUGIN_ERRORS.MANIFEST_INVALID,
};
}
const [tools, commands, hooks] = await Promise.all([
loadPluginTools(path, manifest),
loadPluginCommands(path, manifest),
loadPluginHooks(path, manifest),
]);
const plugin: LoadedPlugin = {
manifest,
path,
tools,
commands,
hooks,
enabled: true,
};
return {
success: true,
plugin,
};
};
/**
* Initialize the plugin system
*/
export const initializePlugins = async (workingDir: string): Promise<void> => {
if (registry.initialized) {
return;
}
const discoveredPlugins = await discoverPlugins(workingDir);
for (const discovered of discoveredPlugins) {
const result = await loadPlugin(
discovered.name,
discovered.path,
discovered.manifestPath
);
if (result.success && result.plugin) {
registry.plugins.set(discovered.name, result.plugin);
// Register tools with prefixed names
for (const [toolName, toolDef] of result.plugin.tools) {
const prefixedName = `${discovered.name}${PLUGIN_TOOL_SEPARATOR}${toolName}`;
registry.tools.set(prefixedName, toolDef);
}
// Register commands
for (const [cmdName, cmdDef] of result.plugin.commands) {
registry.commands.set(cmdName, cmdDef);
}
}
}
registry.initialized = true;
};
/**
* Refresh plugins (reload all)
*/
export const refreshPlugins = async (workingDir: string): Promise<void> => {
registry.plugins.clear();
registry.tools.clear();
registry.commands.clear();
registry.initialized = false;
await initializePlugins(workingDir);
};
/**
* Check if a tool is a plugin tool
*/
export const isPluginTool = (name: string): boolean => {
return registry.tools.has(name);
};
/**
* Get a plugin tool by name
*/
export const getPluginTool = (name: string): ToolDefinition | undefined => {
const pluginTool = registry.tools.get(name);
if (!pluginTool) {
return undefined;
}
return pluginTool as unknown as ToolDefinition;
};
/**
* Get all plugin tools for API
*/
export const getPluginToolsForApi = (): {
type: "function";
function: FunctionDefinition;
}[] => {
const tools: {
type: "function";
function: FunctionDefinition;
}[] = [];
for (const [name, tool] of registry.tools) {
tools.push({
type: "function",
function: {
name,
description: tool.description,
parameters: {
type: "object",
properties: {},
},
},
});
}
return tools;
};
/**
* Get a plugin command by name
*/
export const getPluginCommand = (
name: string
): PluginCommandDefinition | undefined => {
return registry.commands.get(name);
};
/**
* Check if a command is a plugin command
*/
export const isPluginCommand = (name: string): boolean => {
return registry.commands.has(name);
};
/**
* Get all plugin commands
*/
export const getAllPluginCommands = (): PluginCommandDefinition[] => {
return Array.from(registry.commands.values());
};
/**
* Get all plugin hooks
*/
export const getAllPluginHooks = (): HookDefinition[] => {
const hooks: HookDefinition[] = [];
for (const plugin of registry.plugins.values()) {
if (plugin.enabled) {
hooks.push(...plugin.hooks);
}
}
return hooks;
};
/**
* Get all loaded plugins
*/
export const getAllPlugins = (): LoadedPlugin[] => {
return Array.from(registry.plugins.values());
};
/**
* Get a specific plugin by name
*/
export const getPlugin = (name: string): LoadedPlugin | undefined => {
return registry.plugins.get(name);
};
/**
* Enable a plugin
*/
export const enablePlugin = (name: string): boolean => {
const plugin = registry.plugins.get(name);
if (!plugin) {
return false;
}
plugin.enabled = true;
return true;
};
/**
* Disable a plugin
*/
export const disablePlugin = (name: string): boolean => {
const plugin = registry.plugins.get(name);
if (!plugin) {
return false;
}
plugin.enabled = false;
return true;
};
/**
* Check if plugins are initialized
*/
export const isPluginsInitialized = (): boolean => {
return registry.initialized;
};
/**
* Get plugin count
*/
export const getPluginCount = (): number => {
return registry.plugins.size;
};
/**
* Get plugin tool count
*/
export const getPluginToolCount = (): number => {
return registry.tools.size;
};
/**
* Get plugin command count
*/
export const getPluginCommandCount = (): number => {
return registry.commands.size;
};

View File

@@ -0,0 +1,392 @@
/**
* Project Setup Service
*
* Automatically configures the project on startup:
* - Adds .codetyper to .gitignore if .git exists
* - Creates default agent configurations in .codetyper/agents/
*/
import fs from "fs/promises";
import path from "path";
const CODETYPER_DIR = ".codetyper";
const AGENTS_DIR = "agents";
const GITIGNORE_ENTRY = ".codetyper/";
interface SetupResult {
gitignoreUpdated: boolean;
agentsCreated: string[];
errors: string[];
}
const fileExists = async (filePath: string): Promise<boolean> => {
try {
await fs.access(filePath);
return true;
} catch {
return false;
}
};
const isGitRepository = async (workingDir: string): Promise<boolean> => {
return fileExists(path.join(workingDir, ".git"));
};
const ensureDirectoryExists = async (dirPath: string): Promise<void> => {
try {
await fs.mkdir(dirPath, { recursive: true });
} catch {
// Directory might already exist
}
};
const addToGitignore = async (workingDir: string): Promise<boolean> => {
const gitignorePath = path.join(workingDir, ".gitignore");
try {
let content = "";
const exists = await fileExists(gitignorePath);
if (exists) {
content = await fs.readFile(gitignorePath, "utf-8");
// Check if already present
const lines = content.split("\n").map((line) => line.trim());
if (lines.includes(GITIGNORE_ENTRY) || lines.includes(CODETYPER_DIR)) {
return false; // Already configured
}
}
// Add .codetyper to gitignore
const newContent = content.endsWith("\n") || content === ""
? `${content}${GITIGNORE_ENTRY}\n`
: `${content}\n${GITIGNORE_ENTRY}\n`;
await fs.writeFile(gitignorePath, newContent, "utf-8");
return true;
} catch {
return false;
}
};
interface AgentDefinition {
id: string;
name: string;
description: string;
mode: "primary" | "subagent" | "all";
color: string;
prompt: string;
}
const DEFAULT_AGENTS: AgentDefinition[] = [
{
id: "explore",
name: "Explore",
description: "Fast codebase exploration specialist",
mode: "subagent",
color: "cyan",
prompt: `You are an expert codebase explorer. Your role is to quickly navigate and understand codebases.
## Capabilities
- Find files by patterns and naming conventions
- Search code for keywords, functions, classes, and patterns
- Answer questions about codebase structure and architecture
- Identify key files and entry points
## Guidelines
- Use Glob to find files by pattern
- Use Grep to search file contents
- Use Read to examine specific files
- Be thorough but efficient - explore multiple locations
- Report findings with exact file paths and line numbers
- Summarize patterns and conventions you discover
## Output Format
Always include:
1. Files found (with paths)
2. Relevant code snippets
3. Summary of findings
4. Suggestions for further exploration if needed`,
},
{
id: "plan",
name: "Plan",
description: "Software architect for designing implementation plans",
mode: "subagent",
color: "yellow",
prompt: `You are a software architect specializing in implementation planning.
## Role
Design comprehensive implementation plans for features, refactors, and bug fixes.
## Approach
1. Analyze requirements thoroughly
2. Explore relevant codebase areas
3. Identify affected components
4. Consider architectural trade-offs
5. Create step-by-step implementation plans
## Output Format
Your plans should include:
### Summary
Brief overview of the change
### Affected Files
List of files that will be created, modified, or deleted
### Implementation Steps
1. Step-by-step instructions
2. Each step should be atomic and testable
3. Include code snippets where helpful
### Considerations
- Potential risks or edge cases
- Testing requirements
- Performance implications
- Backwards compatibility
### Dependencies
Any prerequisites or blocking tasks`,
},
{
id: "bash",
name: "Bash",
description: "Command execution specialist for terminal operations",
mode: "subagent",
color: "green",
prompt: `You are a command execution specialist for terminal operations.
## Expertise
- Git operations (commit, push, branch, rebase, etc.)
- Package management (npm, yarn, pnpm, pip, etc.)
- Build tools and scripts
- System commands
- Docker and container operations
## Guidelines
- Always explain what commands will do before executing
- Use safe defaults and avoid destructive operations
- Quote paths with spaces properly
- Handle errors gracefully
- Provide clear output and status
## Safety Rules
- NEVER run destructive commands without explicit confirmation
- NEVER modify git config without permission
- NEVER force push to main/master
- NEVER skip safety hooks unless requested
- Always check command exit codes
## Output
Include:
- Command being executed
- Expected outcome
- Actual output or error
- Next steps if needed`,
},
{
id: "code-reviewer",
name: "Code Reviewer",
description: "Expert code reviewer for quality and best practices",
mode: "subagent",
color: "magenta",
prompt: `You are an expert code reviewer focused on quality and best practices.
## Review Areas
1. **Correctness** - Does the code do what it's supposed to?
2. **Security** - Are there vulnerabilities or unsafe patterns?
3. **Performance** - Are there inefficiencies or bottlenecks?
4. **Maintainability** - Is the code readable and well-organized?
5. **Testing** - Is the code testable and properly tested?
## Review Process
1. Understand the change's purpose
2. Check for correctness and edge cases
3. Look for security issues (OWASP top 10)
4. Assess code style and conventions
5. Verify error handling
6. Check test coverage
## Output Format
### Summary
Brief assessment of the change
### Issues Found
- **Critical**: Must fix before merge
- **Major**: Should fix, significant impact
- **Minor**: Nice to fix, low impact
- **Nitpick**: Style/preference suggestions
### Positive Aspects
What's done well
### Suggestions
Specific improvements with code examples`,
},
{
id: "architect",
name: "Code Architect",
description: "Design implementation plans and architectural decisions",
mode: "subagent",
color: "blue",
prompt: `You are a code architect specializing in system design and implementation strategy.
## Responsibilities
- Design scalable and maintainable solutions
- Make architectural decisions with clear trade-offs
- Create implementation roadmaps
- Identify patterns and anti-patterns
## Approach
1. **Understand Context**
- Current system architecture
- Constraints and requirements
- Team capabilities and preferences
2. **Explore Options**
- Consider multiple approaches
- Evaluate trade-offs
- Document pros and cons
3. **Design Solution**
- Clear component structure
- Interface definitions
- Data flow diagrams
- Integration points
4. **Plan Implementation**
- Phased approach if needed
- Risk mitigation
- Testing strategy
## Output
- Architecture overview
- Component breakdown
- Interface contracts
- Implementation phases
- Risk assessment`,
},
{
id: "general",
name: "General Purpose",
description: "Multi-step research and complex task execution",
mode: "subagent",
color: "white",
prompt: `You are a general-purpose agent for researching complex questions and executing multi-step tasks.
## Capabilities
- Search codebases for information
- Read and analyze files
- Execute multi-step research tasks
- Synthesize findings from multiple sources
- Answer complex questions about code
## Approach
1. Break down complex tasks into steps
2. Gather information systematically
3. Cross-reference findings
4. Synthesize and summarize
## Guidelines
- Be thorough in research
- Cite sources with file paths and line numbers
- Acknowledge uncertainty when present
- Provide actionable insights
## Output
- Clear, structured answers
- Supporting evidence
- Confidence level
- Further research suggestions if needed`,
},
];
const generateAgentFile = (agent: AgentDefinition): string => {
return `---
name: "${agent.name}"
description: "${agent.description}"
mode: "${agent.mode}"
color: "${agent.color}"
---
${agent.prompt}
`;
};
const createDefaultAgents = async (workingDir: string): Promise<string[]> => {
const agentsDir = path.join(workingDir, CODETYPER_DIR, AGENTS_DIR);
const created: string[] = [];
await ensureDirectoryExists(agentsDir);
for (const agent of DEFAULT_AGENTS) {
const filePath = path.join(agentsDir, `${agent.id}.agent.md`);
// Skip if already exists
if (await fileExists(filePath)) {
continue;
}
try {
const content = generateAgentFile(agent);
await fs.writeFile(filePath, content, "utf-8");
created.push(agent.id);
} catch {
// Skip on error
}
}
return created;
};
export const setupProject = async (workingDir: string): Promise<SetupResult> => {
const result: SetupResult = {
gitignoreUpdated: false,
agentsCreated: [],
errors: [],
};
try {
// Check if this is a git repository
const isGit = await isGitRepository(workingDir);
if (isGit) {
// Add .codetyper to gitignore
result.gitignoreUpdated = await addToGitignore(workingDir);
}
// Create default agents
result.agentsCreated = await createDefaultAgents(workingDir);
} catch (error) {
result.errors.push(error instanceof Error ? error.message : String(error));
}
return result;
};
export const getSetupStatus = async (workingDir: string): Promise<{
hasGit: boolean;
hasCodetyperDir: boolean;
agentCount: number;
}> => {
const hasGit = await isGitRepository(workingDir);
const hasCodetyperDir = await fileExists(path.join(workingDir, CODETYPER_DIR));
let agentCount = 0;
if (hasCodetyperDir) {
const agentsDir = path.join(workingDir, CODETYPER_DIR, AGENTS_DIR);
if (await fileExists(agentsDir)) {
const files = await fs.readdir(agentsDir);
agentCount = files.filter((f) => f.endsWith(".agent.md")).length;
}
}
return { hasGit, hasCodetyperDir, agentCount };
};
export const projectSetupService = {
setupProject,
getSetupStatus,
isGitRepository,
};

View File

@@ -0,0 +1,347 @@
/**
* Security Service - Pattern detection and validation
*
* Provides:
* - Command injection detection
* - XSS pattern detection
* - Permission explainer
* - Shell continuation validation
* - OAuth token filtering
* - Security pattern hooks
*/
export type SecurityRisk = "critical" | "high" | "medium" | "low" | "info";
export interface SecurityIssue {
type: string;
risk: SecurityRisk;
description: string;
location?: string;
suggestion?: string;
}
export interface SecurityReport {
issues: SecurityIssue[];
hasCritical: boolean;
hasHigh: boolean;
summary: string;
}
// Command injection patterns
const COMMAND_INJECTION_PATTERNS = [
// Shell metacharacters
{ pattern: /[;&|`$]/, description: "Shell metacharacter detected" },
// Subshell execution
{ pattern: /\$\([^)]+\)/, description: "Subshell execution detected" },
// Backtick execution
{ pattern: /`[^`]+`/, description: "Backtick command execution detected" },
// Pipe chains
{ pattern: /\|(?!\|)/, description: "Pipe character detected" },
// Redirections
{ pattern: /[<>]/, description: "Redirection operator detected" },
// Newline injection
{ pattern: /[\n\r]/, description: "Newline character in command" },
// Null byte injection
{ pattern: /\x00/, description: "Null byte detected" },
// Environment variable expansion
{ pattern: /\$\{[^}]+\}/, description: "Environment variable expansion" },
{ pattern: /\$[A-Za-z_][A-Za-z0-9_]*/, description: "Variable reference detected" },
];
// XSS patterns
const XSS_PATTERNS = [
// Script tags
{ pattern: /<script[\s>]/i, description: "Script tag detected" },
// Event handlers
{ pattern: /on\w+\s*=/i, description: "Event handler attribute detected" },
// JavaScript protocol
{ pattern: /javascript:/i, description: "JavaScript protocol detected" },
// Data URLs with script content
{ pattern: /data:[^,]*;base64/i, description: "Data URL with base64 encoding" },
// Expression/eval
{ pattern: /expression\s*\(/i, description: "CSS expression detected" },
// SVG with script
{ pattern: /<svg[\s>].*?<script/i, description: "SVG with embedded script" },
// Template literals in HTML
{ pattern: /\{\{.*?\}\}/i, description: "Template literal detected" },
// HTML entities that could be script
{ pattern: /&#x?[0-9a-f]+;/i, description: "HTML entity encoding detected" },
];
// SQL injection patterns
const SQL_INJECTION_PATTERNS = [
{ pattern: /(['"])\s*;\s*--/i, description: "SQL comment injection" },
{ pattern: /union\s+select/i, description: "UNION SELECT statement" },
{ pattern: /'\s*or\s+'?1'?\s*=\s*'?1/i, description: "OR 1=1 pattern" },
{ pattern: /drop\s+table/i, description: "DROP TABLE statement" },
{ pattern: /insert\s+into/i, description: "INSERT INTO statement" },
{ pattern: /delete\s+from/i, description: "DELETE FROM statement" },
];
// Dangerous system calls
const DANGEROUS_CALLS_PATTERNS = [
{ pattern: /eval\s*\(/i, description: "eval() usage detected" },
{ pattern: /exec\s*\(/i, description: "exec() usage detected" },
{ pattern: /system\s*\(/i, description: "system() call detected" },
{ pattern: /os\.system\s*\(/i, description: "os.system() call detected" },
{ pattern: /subprocess\.call\s*\(/i, description: "subprocess.call() detected" },
{ pattern: /child_process/i, description: "child_process module usage" },
{ pattern: /pickle\.loads?\s*\(/i, description: "Pickle deserialization detected" },
{ pattern: /yaml\.unsafe_load\s*\(/i, description: "Unsafe YAML loading" },
{ pattern: /unserialize\s*\(/i, description: "PHP unserialize() detected" },
];
// Shell continuation patterns (dangerous when user-controlled)
const SHELL_CONTINUATION_PATTERNS = [
{ pattern: /\\\s*$/, description: "Line continuation at end" },
{ pattern: /;\s*$/, description: "Command separator at end" },
{ pattern: /\|\s*$/, description: "Pipe at end (awaiting next command)" },
{ pattern: /&&\s*$/, description: "AND operator at end" },
{ pattern: /\|\|\s*$/, description: "OR operator at end" },
];
// OAuth/API token patterns (for filtering)
const TOKEN_PATTERNS = [
// Generic API keys
{ pattern: /api[_-]?key[=:]["']?[a-zA-Z0-9_-]{20,}["']?/i, type: "API Key" },
// OAuth tokens
{ pattern: /bearer\s+[a-zA-Z0-9_-]+\.[a-zA-Z0-9_-]+\.[a-zA-Z0-9_-]+/i, type: "JWT Token" },
{ pattern: /oauth[_-]?token[=:]["']?[a-zA-Z0-9_-]{20,}["']?/i, type: "OAuth Token" },
// AWS credentials
{ pattern: /AKIA[0-9A-Z]{16}/i, type: "AWS Access Key" },
{ pattern: /aws[_-]?secret[_-]?access[_-]?key[=:]["']?[a-zA-Z0-9/+=]{40}["']?/i, type: "AWS Secret Key" },
// GitHub tokens
{ pattern: /gh[pousr]_[A-Za-z0-9_]{36,}/i, type: "GitHub Token" },
// Generic secrets
{ pattern: /password[=:]["']?[^\s"']{8,}["']?/i, type: "Password" },
{ pattern: /secret[=:]["']?[^\s"']{8,}["']?/i, type: "Secret" },
// Private keys
{ pattern: /-----BEGIN\s+(?:RSA|DSA|EC|OPENSSH)?\s*PRIVATE\s+KEY-----/i, type: "Private Key" },
];
const checkPatterns = (
content: string,
patterns: Array<{ pattern: RegExp; description: string }>,
type: string,
risk: SecurityRisk,
): SecurityIssue[] => {
const issues: SecurityIssue[] = [];
for (const { pattern, description } of patterns) {
const match = content.match(pattern);
if (match) {
issues.push({
type,
risk,
description,
location: match[0].slice(0, 50) + (match[0].length > 50 ? "..." : ""),
});
}
}
return issues;
};
export const detectCommandInjection = (command: string): SecurityIssue[] => {
return checkPatterns(
command,
COMMAND_INJECTION_PATTERNS,
"command_injection",
"critical",
);
};
export const detectXSS = (content: string): SecurityIssue[] => {
return checkPatterns(content, XSS_PATTERNS, "xss", "high");
};
export const detectSQLInjection = (content: string): SecurityIssue[] => {
return checkPatterns(content, SQL_INJECTION_PATTERNS, "sql_injection", "critical");
};
export const detectDangerousCalls = (code: string): SecurityIssue[] => {
return checkPatterns(code, DANGEROUS_CALLS_PATTERNS, "dangerous_call", "high");
};
export const detectShellContinuation = (command: string): SecurityIssue[] => {
return checkPatterns(
command,
SHELL_CONTINUATION_PATTERNS,
"shell_continuation",
"medium",
);
};
export const findSensitiveTokens = (
content: string,
): Array<{ type: string; match: string; masked: string }> => {
const tokens: Array<{ type: string; match: string; masked: string }> = [];
for (const { pattern, type } of TOKEN_PATTERNS) {
const matches = content.matchAll(new RegExp(pattern, "gi"));
for (const match of matches) {
const value = match[0];
// Mask the token, keeping first and last 4 characters
const masked =
value.length > 12
? value.slice(0, 4) + "*".repeat(value.length - 8) + value.slice(-4)
: "*".repeat(value.length);
tokens.push({ type, match: value, masked });
}
}
return tokens;
};
export const filterSensitiveTokens = (content: string): string => {
let filtered = content;
for (const { pattern } of TOKEN_PATTERNS) {
filtered = filtered.replace(new RegExp(pattern, "gi"), (match) => {
if (match.length > 12) {
return match.slice(0, 4) + "*".repeat(match.length - 8) + match.slice(-4);
}
return "*".repeat(match.length);
});
}
return filtered;
};
export const validateCommand = (command: string): SecurityReport => {
const issues: SecurityIssue[] = [
...detectCommandInjection(command),
...detectShellContinuation(command),
];
return {
issues,
hasCritical: issues.some((i) => i.risk === "critical"),
hasHigh: issues.some((i) => i.risk === "high"),
summary:
issues.length === 0
? "No security issues detected"
: `Found ${issues.length} potential security issue(s)`,
};
};
export const validateCode = (code: string): SecurityReport => {
const issues: SecurityIssue[] = [
...detectDangerousCalls(code),
...detectXSS(code),
...detectSQLInjection(code),
];
return {
issues,
hasCritical: issues.some((i) => i.risk === "critical"),
hasHigh: issues.some((i) => i.risk === "high"),
summary:
issues.length === 0
? "No security issues detected"
: `Found ${issues.length} potential security issue(s)`,
};
};
export const explainPermission = (
tool: string,
args: Record<string, unknown>,
): { explanation: string; risks: string[]; recommendation: string } => {
const explanations: Record<
string,
(args: Record<string, unknown>) => {
explanation: string;
risks: string[];
recommendation: string;
}
> = {
bash: (args) => {
const command = (args.command as string) ?? "";
const report = validateCommand(command);
return {
explanation: `Execute shell command: ${command.slice(0, 100)}${command.length > 100 ? "..." : ""}`,
risks: report.issues.map((i) => `${i.risk.toUpperCase()}: ${i.description}`),
recommendation: report.hasCritical
? "DENY - Critical security risk detected"
: report.hasHigh
? "REVIEW CAREFULLY - High risk patterns detected"
: "ALLOW - No obvious security issues",
};
},
write: (args) => {
const filePath = (args.path as string) ?? (args.file_path as string) ?? "";
const content = (args.content as string) ?? "";
const tokens = findSensitiveTokens(content);
return {
explanation: `Write to file: ${filePath}`,
risks: [
...(filePath.includes("..") ? ["Path traversal attempt"] : []),
...(tokens.length > 0
? [`Contains ${tokens.length} potential sensitive token(s)`]
: []),
],
recommendation:
filePath.includes("..") || tokens.length > 0
? "REVIEW CAREFULLY - Potential security concerns"
: "ALLOW - File write operation",
};
},
edit: (args) => {
const filePath = (args.path as string) ?? (args.file_path as string) ?? "";
return {
explanation: `Edit file: ${filePath}`,
risks: filePath.includes("..") ? ["Path traversal attempt"] : [],
recommendation: filePath.includes("..")
? "DENY - Path traversal detected"
: "ALLOW - File edit operation",
};
},
read: (args) => {
const filePath = (args.path as string) ?? (args.file_path as string) ?? "";
return {
explanation: `Read file: ${filePath}`,
risks: [
...(filePath.includes("..") ? ["Path traversal attempt"] : []),
...(filePath.match(/\.(env|pem|key|secret)$/i)
? ["Reading potentially sensitive file"]
: []),
],
recommendation: filePath.includes("..")
? "DENY - Path traversal detected"
: "ALLOW - File read operation",
};
},
};
const explainer = explanations[tool];
if (explainer) {
return explainer(args);
}
return {
explanation: `Execute tool: ${tool}`,
risks: [],
recommendation: "ALLOW - Standard tool operation",
};
};
export const securityService = {
detectCommandInjection,
detectXSS,
detectSQLInjection,
detectDangerousCalls,
detectShellContinuation,
findSensitiveTokens,
filterSensitiveTokens,
validateCommand,
validateCode,
explainPermission,
};

View File

@@ -0,0 +1,462 @@
/**
* Session Fork Service
*
* Manages session snapshots, forks, and rewind functionality
*/
import { readFile, writeFile, mkdir, access, constants } from "fs/promises";
import { join, dirname } from "path";
import { v4 as uuidv4 } from "uuid";
import type {
SessionSnapshot,
SessionSnapshotState,
SessionFork,
SessionForkFile,
SnapshotCreateResult,
RewindResult,
ForkCreateResult,
ForkSwitchResult,
ForkSummary,
SnapshotSummary,
SnapshotOptions,
ForkOptions,
SessionMessage,
} from "@/types/session-fork";
import type { TodoItem } from "@/types/todo";
import {
FORK_FILE_EXTENSION,
MAIN_FORK_NAME,
DEFAULT_SNAPSHOT_PREFIX,
MAX_SNAPSHOTS_PER_FORK,
MAX_FORKS_PER_SESSION,
FORK_FILE_VERSION,
FORKS_SUBDIR,
COMMIT_MESSAGE_TEMPLATES,
COMMIT_TYPE_KEYWORDS,
FORK_ERRORS,
} from "@constants/session-fork";
import { LOCAL_CONFIG_DIR } from "@constants/paths";
/**
* In-memory state for current session
*/
interface SessionForkState {
sessionId: string | null;
file: SessionForkFile | null;
filePath: string | null;
dirty: boolean;
}
const state: SessionForkState = {
sessionId: null,
file: null,
filePath: null,
dirty: false,
};
/**
* Generate suggested commit message from messages
*/
const generateCommitMessage = (messages: SessionMessage[]): string => {
const userMessages = messages.filter((m) => m.role === "user");
const count = messages.length;
if (userMessages.length === 0) {
return COMMIT_MESSAGE_TEMPLATES.DEFAULT
.replace("{summary}", "session checkpoint")
.replace("{count}", String(count));
}
// Get first user message as summary base
const firstMessage = userMessages[0]?.content || "";
const summary = firstMessage.slice(0, 50).replace(/\n/g, " ").trim();
// Detect commit type from messages
const allContent = userMessages.map((m) => m.content.toLowerCase()).join(" ");
for (const [type, keywords] of Object.entries(COMMIT_TYPE_KEYWORDS)) {
for (const keyword of keywords) {
if (allContent.includes(keyword)) {
const template = COMMIT_MESSAGE_TEMPLATES[type as keyof typeof COMMIT_MESSAGE_TEMPLATES];
return template
.replace("{summary}", summary || keyword)
.replace("{count}", String(count));
}
}
}
return COMMIT_MESSAGE_TEMPLATES.DEFAULT
.replace("{summary}", summary || "session changes")
.replace("{count}", String(count));
};
/**
* Get fork file path for a session
*/
const getForkFilePath = (sessionId: string, workingDir: string): string => {
const localPath = join(workingDir, LOCAL_CONFIG_DIR, FORKS_SUBDIR);
return join(localPath, `${sessionId}${FORK_FILE_EXTENSION}`);
};
/**
* Create empty fork file
*/
const createEmptyForkFile = (sessionId: string): SessionForkFile => {
const mainFork: SessionFork = {
id: uuidv4(),
name: MAIN_FORK_NAME,
snapshots: [],
currentSnapshotId: "",
createdAt: Date.now(),
updatedAt: Date.now(),
};
return {
version: FORK_FILE_VERSION,
sessionId,
forks: [mainFork],
currentForkId: mainFork.id,
};
};
/**
* Load fork file for a session
*/
const loadForkFile = async (
sessionId: string,
workingDir: string
): Promise<SessionForkFile> => {
const filePath = getForkFilePath(sessionId, workingDir);
try {
await access(filePath, constants.R_OK);
const content = await readFile(filePath, "utf-8");
return JSON.parse(content) as SessionForkFile;
} catch {
return createEmptyForkFile(sessionId);
}
};
/**
* Save fork file
*/
const saveForkFile = async (
file: SessionForkFile,
filePath: string
): Promise<void> => {
const dir = dirname(filePath);
try {
await access(dir, constants.W_OK);
} catch {
await mkdir(dir, { recursive: true });
}
await writeFile(filePath, JSON.stringify(file, null, 2), "utf-8");
};
/**
* Initialize fork service for a session
*/
export const initializeForkService = async (
sessionId: string,
workingDir: string
): Promise<void> => {
const filePath = getForkFilePath(sessionId, workingDir);
const file = await loadForkFile(sessionId, workingDir);
state.sessionId = sessionId;
state.file = file;
state.filePath = filePath;
state.dirty = false;
};
/**
* Get current fork
*/
const getCurrentFork = (): SessionFork | null => {
if (!state.file) return null;
return state.file.forks.find((f) => f.id === state.file?.currentForkId) || null;
};
/**
* Create a snapshot
*/
export const createSnapshot = async (
messages: SessionMessage[],
todoItems: TodoItem[],
contextFiles: string[],
metadata: { provider: string; model: string; agent: string; workingDir: string },
options: SnapshotOptions = {}
): Promise<SnapshotCreateResult> => {
if (!state.file || !state.filePath) {
return { success: false, error: FORK_ERRORS.SESSION_NOT_FOUND };
}
const fork = getCurrentFork();
if (!fork) {
return { success: false, error: FORK_ERRORS.FORK_NOT_FOUND };
}
if (fork.snapshots.length >= MAX_SNAPSHOTS_PER_FORK) {
return { success: false, error: FORK_ERRORS.MAX_SNAPSHOTS_REACHED };
}
// Generate snapshot name
const name = options.name || `${DEFAULT_SNAPSHOT_PREFIX}-${fork.snapshots.length + 1}`;
// Check for duplicate name
if (fork.snapshots.some((s) => s.name === name)) {
return { success: false, error: FORK_ERRORS.DUPLICATE_SNAPSHOT_NAME };
}
const snapshotState: SessionSnapshotState = {
messages: [...messages],
todoItems: options.includeTodos !== false ? [...todoItems] : [],
contextFiles: options.includeContextFiles !== false ? [...contextFiles] : [],
metadata,
};
const snapshot: SessionSnapshot = {
id: uuidv4(),
name,
timestamp: Date.now(),
parentId: fork.currentSnapshotId || null,
state: snapshotState,
suggestedCommitMessage: generateCommitMessage(messages),
};
fork.snapshots.push(snapshot);
fork.currentSnapshotId = snapshot.id;
fork.updatedAt = Date.now();
state.dirty = true;
await saveForkFile(state.file, state.filePath);
return { success: true, snapshot };
};
/**
* Rewind to a snapshot
*/
export const rewindToSnapshot = async (
target: string | number
): Promise<RewindResult> => {
if (!state.file || !state.filePath) {
return { success: false, messagesRestored: 0, error: FORK_ERRORS.SESSION_NOT_FOUND };
}
const fork = getCurrentFork();
if (!fork) {
return { success: false, messagesRestored: 0, error: FORK_ERRORS.FORK_NOT_FOUND };
}
if (fork.snapshots.length === 0) {
return { success: false, messagesRestored: 0, error: FORK_ERRORS.NO_SNAPSHOTS_TO_REWIND };
}
let snapshot: SessionSnapshot | undefined;
if (typeof target === "number") {
// Rewind by count (e.g., 1 = previous snapshot)
const currentIndex = fork.snapshots.findIndex(
(s) => s.id === fork.currentSnapshotId
);
const targetIndex = currentIndex - target;
if (targetIndex < 0) {
snapshot = fork.snapshots[0];
} else {
snapshot = fork.snapshots[targetIndex];
}
} else {
// Rewind by name
snapshot = fork.snapshots.find((s) => s.name === target || s.id === target);
}
if (!snapshot) {
return { success: false, messagesRestored: 0, error: FORK_ERRORS.SNAPSHOT_NOT_FOUND };
}
fork.currentSnapshotId = snapshot.id;
fork.updatedAt = Date.now();
state.dirty = true;
await saveForkFile(state.file, state.filePath);
return {
success: true,
snapshot,
messagesRestored: snapshot.state.messages.length,
};
};
/**
* Create a new fork
*/
export const createFork = async (
options: ForkOptions = {}
): Promise<ForkCreateResult> => {
if (!state.file || !state.filePath) {
return { success: false, error: FORK_ERRORS.SESSION_NOT_FOUND };
}
if (state.file.forks.length >= MAX_FORKS_PER_SESSION) {
return { success: false, error: FORK_ERRORS.MAX_FORKS_REACHED };
}
const currentFork = getCurrentFork();
if (!currentFork) {
return { success: false, error: FORK_ERRORS.FORK_NOT_FOUND };
}
// Generate fork name
const name = options.name || `fork-${state.file.forks.length + 1}`;
// Check for duplicate name
if (state.file.forks.some((f) => f.name === name)) {
return { success: false, error: FORK_ERRORS.DUPLICATE_FORK_NAME };
}
// Determine which snapshot to branch from
let branchFromId = currentFork.currentSnapshotId;
if (options.fromSnapshot) {
const snapshot = currentFork.snapshots.find(
(s) => s.name === options.fromSnapshot || s.id === options.fromSnapshot
);
if (!snapshot) {
return { success: false, error: FORK_ERRORS.SNAPSHOT_NOT_FOUND };
}
branchFromId = snapshot.id;
}
// Copy snapshots up to branch point
const branchIndex = currentFork.snapshots.findIndex((s) => s.id === branchFromId);
const copiedSnapshots = currentFork.snapshots.slice(0, branchIndex + 1).map((s) => ({
...s,
id: uuidv4(), // New IDs for copied snapshots
}));
const newFork: SessionFork = {
id: uuidv4(),
name,
snapshots: copiedSnapshots,
currentSnapshotId: copiedSnapshots[copiedSnapshots.length - 1]?.id || "",
parentForkId: currentFork.id,
createdAt: Date.now(),
updatedAt: Date.now(),
};
state.file.forks.push(newFork);
state.file.currentForkId = newFork.id;
state.dirty = true;
await saveForkFile(state.file, state.filePath);
return { success: true, fork: newFork };
};
/**
* Switch to a different fork
*/
export const switchFork = async (name: string): Promise<ForkSwitchResult> => {
if (!state.file || !state.filePath) {
return { success: false, error: FORK_ERRORS.SESSION_NOT_FOUND };
}
const fork = state.file.forks.find((f) => f.name === name || f.id === name);
if (!fork) {
return { success: false, error: FORK_ERRORS.FORK_NOT_FOUND };
}
state.file.currentForkId = fork.id;
state.dirty = true;
await saveForkFile(state.file, state.filePath);
return { success: true, fork };
};
/**
* List all forks
*/
export const listForks = (): ForkSummary[] => {
if (!state.file) return [];
return state.file.forks.map((fork) => {
const currentSnapshot = fork.snapshots.find(
(s) => s.id === fork.currentSnapshotId
);
return {
id: fork.id,
name: fork.name,
snapshotCount: fork.snapshots.length,
currentSnapshotName: currentSnapshot?.name || "(no snapshots)",
createdAt: fork.createdAt,
updatedAt: fork.updatedAt,
isCurrent: fork.id === state.file?.currentForkId,
};
});
};
/**
* List snapshots in current fork
*/
export const listSnapshots = (): SnapshotSummary[] => {
const fork = getCurrentFork();
if (!fork) return [];
return fork.snapshots.map((snapshot) => ({
id: snapshot.id,
name: snapshot.name,
timestamp: snapshot.timestamp,
messageCount: snapshot.state.messages.length,
isCurrent: snapshot.id === fork.currentSnapshotId,
suggestedCommitMessage: snapshot.suggestedCommitMessage,
}));
};
/**
* Get current snapshot
*/
export const getCurrentSnapshot = (): SessionSnapshot | null => {
const fork = getCurrentFork();
if (!fork) return null;
return fork.snapshots.find((s) => s.id === fork.currentSnapshotId) || null;
};
/**
* Get snapshot by name or ID
*/
export const getSnapshot = (nameOrId: string): SessionSnapshot | null => {
const fork = getCurrentFork();
if (!fork) return null;
return fork.snapshots.find((s) => s.name === nameOrId || s.id === nameOrId) || null;
};
/**
* Check if fork service is initialized
*/
export const isForkServiceInitialized = (): boolean => {
return state.file !== null && state.sessionId !== null;
};
/**
* Get current session ID
*/
export const getCurrentSessionId = (): string | null => {
return state.sessionId;
};
/**
* Clear fork service state
*/
export const clearForkService = (): void => {
state.sessionId = null;
state.file = null;
state.filePath = null;
state.dirty = false;
};

View File

@@ -0,0 +1,453 @@
/**
* Snapshot Service - Git-based differential snapshots
*
* Provides:
* - Git-based differential snapshots
* - Automatic 7-day retention pruning
* - Patch generation and validation
* - FileDiff tracking (additions, deletions, files changed)
*/
import { execSync, exec } from "child_process";
import fs from "fs/promises";
import path from "path";
import { v4 as uuidv4 } from "uuid";
const SNAPSHOTS_DIR = ".codetyper/snapshots";
const RETENTION_DAYS = 7;
const SNAPSHOT_BRANCH_PREFIX = "codetyper-snapshot-";
export interface FileDiff {
path: string;
status: "added" | "modified" | "deleted" | "renamed";
additions: number;
deletions: number;
oldPath?: string; // For renamed files
}
export interface Snapshot {
id: string;
timestamp: number;
message: string;
commitHash: string;
parentHash: string | null;
files: FileDiff[];
stats: {
filesChanged: number;
additions: number;
deletions: number;
};
}
export interface SnapshotMetadata {
id: string;
timestamp: number;
message: string;
commitHash: string;
}
const fileExists = async (filePath: string): Promise<boolean> => {
try {
await fs.access(filePath);
return true;
} catch {
return false;
}
};
const isGitRepository = async (workingDir: string): Promise<boolean> => {
return fileExists(path.join(workingDir, ".git"));
};
const runGitCommand = (
command: string,
cwd: string,
): { success: boolean; output: string; error?: string } => {
try {
const output = execSync(command, {
cwd,
encoding: "utf-8",
stdio: ["pipe", "pipe", "pipe"],
});
return { success: true, output: output.trim() };
} catch (err) {
const error = err as { stderr?: string; message?: string };
return {
success: false,
output: "",
error: error.stderr ?? error.message ?? "Unknown error",
};
}
};
const runGitCommandAsync = (
command: string,
cwd: string,
): Promise<{ success: boolean; output: string; error?: string }> => {
return new Promise((resolve) => {
exec(command, { cwd, encoding: "utf-8" }, (err, stdout, stderr) => {
if (err) {
resolve({ success: false, output: "", error: stderr || err.message });
} else {
resolve({ success: true, output: stdout.trim() });
}
});
});
};
const ensureSnapshotsDir = async (workingDir: string): Promise<void> => {
const snapshotsDir = path.join(workingDir, SNAPSHOTS_DIR);
await fs.mkdir(snapshotsDir, { recursive: true });
};
const parseGitDiff = (diffOutput: string): FileDiff[] => {
const files: FileDiff[] = [];
const lines = diffOutput.split("\n").filter((l) => l.trim());
for (const line of lines) {
// Format: 1 2 path or A - path (for additions)
const match = line.match(/^(\d+|-)\t(\d+|-)\t(.+)$/);
if (match) {
const additions = match[1] === "-" ? 0 : parseInt(match[1], 10);
const deletions = match[2] === "-" ? 0 : parseInt(match[2], 10);
const filePath = match[3];
// Check for rename (old => new)
const renameMatch = filePath.match(/^(.+) => (.+)$/);
if (renameMatch) {
files.push({
path: renameMatch[2],
oldPath: renameMatch[1],
status: "renamed",
additions,
deletions,
});
} else {
// Determine status based on additions/deletions
let status: FileDiff["status"] = "modified";
if (additions > 0 && deletions === 0) {
status = "added";
} else if (deletions > 0 && additions === 0) {
status = "deleted";
}
files.push({
path: filePath,
status,
additions,
deletions,
});
}
}
}
return files;
};
const getCommitDiff = (
workingDir: string,
commitHash: string,
parentHash: string | null,
): FileDiff[] => {
const compareTarget = parentHash ?? `${commitHash}^`;
const result = runGitCommand(
`git diff --numstat ${compareTarget} ${commitHash}`,
workingDir,
);
if (!result.success || !result.output) {
return [];
}
return parseGitDiff(result.output);
};
const getCurrentCommitHash = (workingDir: string): string | null => {
const result = runGitCommand("git rev-parse HEAD", workingDir);
return result.success ? result.output : null;
};
/** Get the most recent commit message */
export const getHeadCommitMessage = (workingDir: string): string => {
const result = runGitCommand("git log -1 --format=%s", workingDir);
return result.success ? result.output : "No message";
};
export const createSnapshot = async (
workingDir: string,
message?: string,
): Promise<Snapshot | null> => {
if (!(await isGitRepository(workingDir))) {
return null;
}
await ensureSnapshotsDir(workingDir);
const id = uuidv4();
const timestamp = Date.now();
const snapshotMessage = message ?? `Snapshot ${new Date(timestamp).toISOString()}`;
// Get current state
const currentCommit = getCurrentCommitHash(workingDir);
if (!currentCommit) {
return null;
}
// Check if there are uncommitted changes
const statusResult = runGitCommand("git status --porcelain", workingDir);
const hasChanges = statusResult.success && statusResult.output.length > 0;
let snapshotCommit = currentCommit;
let parentHash: string | null = null;
if (hasChanges) {
// Stash current changes, create snapshot, then restore
const stashResult = runGitCommand(
`git stash push -m "codetyper-temp-${id}"`,
workingDir,
);
if (!stashResult.success) {
return null;
}
// Get the parent commit (before stash)
parentHash = getCurrentCommitHash(workingDir);
} else {
// Get parent of current commit
const parentResult = runGitCommand("git rev-parse HEAD^", workingDir);
parentHash = parentResult.success ? parentResult.output : null;
}
// Calculate diff
const files = getCommitDiff(workingDir, snapshotCommit, parentHash);
const stats = {
filesChanged: files.length,
additions: files.reduce((sum, f) => sum + f.additions, 0),
deletions: files.reduce((sum, f) => sum + f.deletions, 0),
};
// Restore stashed changes if any
if (hasChanges) {
runGitCommand("git stash pop", workingDir);
}
// Save snapshot metadata
const snapshot: Snapshot = {
id,
timestamp,
message: snapshotMessage,
commitHash: snapshotCommit,
parentHash,
files,
stats,
};
const snapshotPath = path.join(workingDir, SNAPSHOTS_DIR, `${id}.json`);
await fs.writeFile(snapshotPath, JSON.stringify(snapshot, null, 2));
return snapshot;
};
export const getSnapshot = async (
workingDir: string,
snapshotId: string,
): Promise<Snapshot | null> => {
const snapshotPath = path.join(workingDir, SNAPSHOTS_DIR, `${snapshotId}.json`);
try {
const content = await fs.readFile(snapshotPath, "utf-8");
return JSON.parse(content) as Snapshot;
} catch {
return null;
}
};
export const listSnapshots = async (workingDir: string): Promise<SnapshotMetadata[]> => {
const snapshotsDir = path.join(workingDir, SNAPSHOTS_DIR);
if (!(await fileExists(snapshotsDir))) {
return [];
}
try {
const files = await fs.readdir(snapshotsDir);
const snapshots: SnapshotMetadata[] = [];
for (const file of files) {
if (!file.endsWith(".json")) continue;
try {
const content = await fs.readFile(path.join(snapshotsDir, file), "utf-8");
const snapshot = JSON.parse(content) as Snapshot;
snapshots.push({
id: snapshot.id,
timestamp: snapshot.timestamp,
message: snapshot.message,
commitHash: snapshot.commitHash,
});
} catch {
// Skip invalid snapshot files
}
}
// Sort by timestamp descending (newest first)
return snapshots.sort((a, b) => b.timestamp - a.timestamp);
} catch {
return [];
}
};
export const deleteSnapshot = async (
workingDir: string,
snapshotId: string,
): Promise<boolean> => {
const snapshotPath = path.join(workingDir, SNAPSHOTS_DIR, `${snapshotId}.json`);
try {
await fs.unlink(snapshotPath);
return true;
} catch {
return false;
}
};
export const pruneOldSnapshots = async (workingDir: string): Promise<number> => {
const cutoff = Date.now() - RETENTION_DAYS * 24 * 60 * 60 * 1000;
const snapshots = await listSnapshots(workingDir);
let deleted = 0;
for (const snapshot of snapshots) {
if (snapshot.timestamp < cutoff) {
if (await deleteSnapshot(workingDir, snapshot.id)) {
deleted++;
}
}
}
return deleted;
};
export const generatePatch = async (
workingDir: string,
snapshotId: string,
): Promise<string | null> => {
const snapshot = await getSnapshot(workingDir, snapshotId);
if (!snapshot) {
return null;
}
const compareTarget = snapshot.parentHash ?? `${snapshot.commitHash}^`;
const result = runGitCommand(
`git diff ${compareTarget} ${snapshot.commitHash}`,
workingDir,
);
return result.success ? result.output : null;
};
export const validatePatch = async (
workingDir: string,
patch: string,
): Promise<{ valid: boolean; errors: string[] }> => {
// Write patch to temp file
const tempPatchPath = path.join(workingDir, SNAPSHOTS_DIR, `temp-${Date.now()}.patch`);
try {
await fs.writeFile(tempPatchPath, patch);
// Try to apply patch with --check (dry run)
const result = await runGitCommandAsync(
`git apply --check "${tempPatchPath}"`,
workingDir,
);
return {
valid: result.success,
errors: result.error ? [result.error] : [],
};
} finally {
// Clean up temp file
try {
await fs.unlink(tempPatchPath);
} catch {
// Ignore cleanup errors
}
}
};
export const restoreSnapshot = async (
workingDir: string,
snapshotId: string,
): Promise<{ success: boolean; error?: string }> => {
const snapshot = await getSnapshot(workingDir, snapshotId);
if (!snapshot) {
return { success: false, error: "Snapshot not found" };
}
// Check if commit exists
const result = runGitCommand(
`git cat-file -t ${snapshot.commitHash}`,
workingDir,
);
if (!result.success) {
return { success: false, error: "Snapshot commit no longer exists" };
}
// Create a new branch from the snapshot
const branchName = `${SNAPSHOT_BRANCH_PREFIX}${snapshotId.slice(0, 8)}`;
const branchResult = runGitCommand(
`git checkout -b ${branchName} ${snapshot.commitHash}`,
workingDir,
);
if (!branchResult.success) {
return { success: false, error: branchResult.error };
}
return { success: true };
};
export const getWorkingDirectoryDiff = async (
workingDir: string,
): Promise<FileDiff[]> => {
if (!(await isGitRepository(workingDir))) {
return [];
}
// Get diff between HEAD and working directory
const stagedResult = runGitCommand("git diff --numstat --cached", workingDir);
const unstagedResult = runGitCommand("git diff --numstat", workingDir);
const files: FileDiff[] = [];
if (stagedResult.success && stagedResult.output) {
files.push(...parseGitDiff(stagedResult.output));
}
if (unstagedResult.success && unstagedResult.output) {
const unstagedFiles = parseGitDiff(unstagedResult.output);
// Merge with staged, preferring staged status
for (const file of unstagedFiles) {
if (!files.some((f) => f.path === file.path)) {
files.push(file);
}
}
}
return files;
};
export const snapshotService = {
createSnapshot,
getSnapshot,
listSnapshots,
deleteSnapshot,
pruneOldSnapshots,
generatePatch,
validatePatch,
restoreSnapshot,
getWorkingDirectoryDiff,
isGitRepository,
};