Add BRAIN_DISABLED flag and fix Ollama tool call formatting

Features:
  - Add BRAIN_DISABLED feature flag to hide all Brain functionality
  - When enabled, hides Brain banner, status indicator, menu, and commands
  - Flag location: src/constants/brain.ts

  Fixes:
  - Fix Ollama 400 error by properly formatting tool_calls in messages
  - Update OllamaMessage type to include tool_calls field
  - Fix Brain menu keyboard not working (add missing modes to isMenuOpen)

  UI Changes:
  - Remove "^Tab toggle mode" hint from status bar
  - Remove "ctrl+t to hide todos" hint from status bar

  Files modified:
  - src/constants/brain.ts (add BRAIN_DISABLED flag)
  - src/types/ollama.ts (add tool_calls to OllamaMessage)
  - src/providers/ollama/chat.ts (format tool_calls in messages)
  - src/tui-solid/components/header.tsx (hide Brain UI when disabled)
  - src/tui-solid/components/status-bar.tsx (remove hints)
  - src/tui-solid/components/command-menu.tsx (filter brain command)
  - src/tui-solid/components/input-area.tsx (fix isMenuOpen modes)
  - src/tui-solid/routes/session.tsx (skip brain menu when disabled)
  - src/services/brain.ts (early return when disabled)
  - src/services/chat-tui/initialize.ts (skip brain init when disabled)
This commit is contained in:
2026-02-02 13:25:38 -05:00
parent 2eadda584a
commit c839fc4d68
114 changed files with 17243 additions and 273 deletions

View File

@@ -0,0 +1,289 @@
/**
* Agent definition loader service
* Loads agent definitions from markdown files with YAML frontmatter
*/
import { readFile, readdir } from "node:fs/promises";
import { join, basename, extname } from "node:path";
import { existsSync } from "node:fs";
import { homedir } from "node:os";
import type {
AgentDefinition,
AgentFrontmatter,
AgentDefinitionFile,
AgentRegistry,
AgentLoadResult,
AgentTier,
AgentColor,
} from "@src/types/agent-definition";
import { DEFAULT_AGENT_DEFINITION, AGENT_DEFINITION_SCHEMA } from "@src/types/agent-definition";
import { AGENT_DEFINITION, AGENT_DEFINITION_PATHS, AGENT_MESSAGES } from "@src/constants/agent-definition";
const parseFrontmatter = (content: string): { frontmatter: Record<string, unknown>; body: string } | null => {
const delimiter = AGENT_DEFINITION.FRONTMATTER_DELIMITER;
const lines = content.split("\n");
if (lines[0]?.trim() !== delimiter) {
return null;
}
const endIndex = lines.findIndex((line, index) => index > 0 && line.trim() === delimiter);
if (endIndex === -1) {
return null;
}
const frontmatterLines = lines.slice(1, endIndex);
const body = lines.slice(endIndex + 1).join("\n").trim();
// Simple YAML parser for frontmatter
const frontmatter: Record<string, unknown> = {};
let currentKey = "";
let currentArray: string[] | null = null;
frontmatterLines.forEach((line) => {
const trimmed = line.trim();
if (trimmed.startsWith("- ") && currentArray !== null) {
currentArray.push(trimmed.slice(2));
return;
}
if (currentArray !== null) {
frontmatter[currentKey] = currentArray;
currentArray = null;
}
const colonIndex = trimmed.indexOf(":");
if (colonIndex === -1) return;
const key = trimmed.slice(0, colonIndex).trim();
const value = trimmed.slice(colonIndex + 1).trim();
if (value === "") {
currentKey = key;
currentArray = [];
} else if (value.startsWith("[") && value.endsWith("]")) {
frontmatter[key] = value
.slice(1, -1)
.split(",")
.map((s) => s.trim().replace(/^["']|["']$/g, ""));
} else if (value === "true") {
frontmatter[key] = true;
} else if (value === "false") {
frontmatter[key] = false;
} else if (!isNaN(Number(value))) {
frontmatter[key] = Number(value);
} else {
frontmatter[key] = value.replace(/^["']|["']$/g, "");
}
});
if (currentArray !== null) {
frontmatter[currentKey] = currentArray;
}
return { frontmatter, body };
};
const validateFrontmatter = (frontmatter: Record<string, unknown>): AgentFrontmatter | null => {
const { required } = AGENT_DEFINITION_SCHEMA;
for (const field of required) {
if (!(field in frontmatter)) {
return null;
}
}
const name = frontmatter.name;
const description = frontmatter.description;
const tools = frontmatter.tools;
if (typeof name !== "string" || typeof description !== "string" || !Array.isArray(tools)) {
return null;
}
return {
name,
description,
tools: tools as ReadonlyArray<string>,
tier: (frontmatter.tier as AgentTier) || DEFAULT_AGENT_DEFINITION.tier,
color: (frontmatter.color as AgentColor) || DEFAULT_AGENT_DEFINITION.color,
maxTurns: (frontmatter.maxTurns as number) || DEFAULT_AGENT_DEFINITION.maxTurns,
triggerPhrases: (frontmatter.triggerPhrases as ReadonlyArray<string>) || [],
capabilities: (frontmatter.capabilities as ReadonlyArray<string>) || [],
allowedPaths: frontmatter.allowedPaths as ReadonlyArray<string> | undefined,
deniedPaths: frontmatter.deniedPaths as ReadonlyArray<string> | undefined,
};
};
const frontmatterToDefinition = (frontmatter: AgentFrontmatter, content: string): AgentDefinition => ({
name: frontmatter.name,
description: frontmatter.description,
tools: frontmatter.tools,
tier: frontmatter.tier || (DEFAULT_AGENT_DEFINITION.tier as AgentTier),
color: frontmatter.color || (DEFAULT_AGENT_DEFINITION.color as AgentColor),
maxTurns: frontmatter.maxTurns || DEFAULT_AGENT_DEFINITION.maxTurns,
systemPrompt: content || undefined,
triggerPhrases: frontmatter.triggerPhrases || [],
capabilities: frontmatter.capabilities || [],
permissions: {
allowedPaths: frontmatter.allowedPaths,
deniedPaths: frontmatter.deniedPaths,
},
});
export const loadAgentDefinitionFile = async (filePath: string): Promise<AgentLoadResult> => {
try {
const content = await readFile(filePath, "utf-8");
const parsed = parseFrontmatter(content);
if (!parsed) {
return { success: false, error: AGENT_MESSAGES.INVALID_FRONTMATTER, filePath };
}
const frontmatter = validateFrontmatter(parsed.frontmatter);
if (!frontmatter) {
return { success: false, error: AGENT_MESSAGES.MISSING_REQUIRED, filePath };
}
const agent = frontmatterToDefinition(frontmatter, parsed.body);
return { success: true, agent, filePath };
} catch (error) {
const message = error instanceof Error ? error.message : "Unknown error";
return { success: false, error: message, filePath };
}
};
export const loadAgentDefinitionsFromDirectory = async (
directoryPath: string
): Promise<ReadonlyArray<AgentLoadResult>> => {
const resolvedPath = directoryPath.replace("~", homedir());
if (!existsSync(resolvedPath)) {
return [];
}
try {
const files = await readdir(resolvedPath);
const mdFiles = files.filter(
(file) => extname(file) === AGENT_DEFINITION.FILE_EXTENSION
);
const results = await Promise.all(
mdFiles.map((file) => loadAgentDefinitionFile(join(resolvedPath, file)))
);
return results;
} catch {
return [];
}
};
export const loadAllAgentDefinitions = async (
projectPath: string
): Promise<AgentRegistry> => {
const agents = new Map<string, AgentDefinition>();
const byTrigger = new Map<string, string>();
const byCapability = new Map<string, string[]>();
// Load from all paths in priority order (project > global > builtin)
const paths = [
join(projectPath, AGENT_DEFINITION_PATHS.PROJECT),
AGENT_DEFINITION_PATHS.GLOBAL,
];
for (const path of paths) {
const results = await loadAgentDefinitionsFromDirectory(path);
results.forEach((result) => {
if (result.success && result.agent) {
const { agent } = result;
// Don't override if already loaded (project takes precedence)
if (!agents.has(agent.name)) {
agents.set(agent.name, agent);
// Index by trigger phrases
agent.triggerPhrases?.forEach((phrase) => {
byTrigger.set(phrase.toLowerCase(), agent.name);
});
// Index by capabilities
agent.capabilities?.forEach((capability) => {
const existing = byCapability.get(capability) || [];
byCapability.set(capability, [...existing, agent.name]);
});
}
}
});
}
return { agents, byTrigger, byCapability };
};
export const findAgentByTrigger = (
registry: AgentRegistry,
text: string
): AgentDefinition | undefined => {
const normalized = text.toLowerCase();
for (const [phrase, agentName] of registry.byTrigger) {
if (normalized.includes(phrase)) {
return registry.agents.get(agentName);
}
}
return undefined;
};
export const findAgentsByCapability = (
registry: AgentRegistry,
capability: string
): ReadonlyArray<AgentDefinition> => {
const agentNames = registry.byCapability.get(capability) || [];
return agentNames
.map((name) => registry.agents.get(name))
.filter((a): a is AgentDefinition => a !== undefined);
};
export const getAgentByName = (
registry: AgentRegistry,
name: string
): AgentDefinition | undefined => registry.agents.get(name);
export const listAllAgents = (registry: AgentRegistry): ReadonlyArray<AgentDefinition> =>
Array.from(registry.agents.values());
export const createAgentDefinitionContent = (agent: AgentDefinition): string => {
const frontmatter = [
"---",
`name: ${agent.name}`,
`description: ${agent.description}`,
`tools: [${agent.tools.join(", ")}]`,
`tier: ${agent.tier}`,
`color: ${agent.color}`,
];
if (agent.maxTurns) {
frontmatter.push(`maxTurns: ${agent.maxTurns}`);
}
if (agent.triggerPhrases && agent.triggerPhrases.length > 0) {
frontmatter.push("triggerPhrases:");
agent.triggerPhrases.forEach((phrase) => frontmatter.push(` - ${phrase}`));
}
if (agent.capabilities && agent.capabilities.length > 0) {
frontmatter.push("capabilities:");
agent.capabilities.forEach((cap) => frontmatter.push(` - ${cap}`));
}
frontmatter.push("---");
const content = agent.systemPrompt || `# ${agent.name}\n\n${agent.description}`;
return `${frontmatter.join("\n")}\n\n${content}`;
};

View File

@@ -0,0 +1,389 @@
/**
* Background task service
* Manages background task execution, queue, and lifecycle
*/
import { randomUUID } from "node:crypto";
import { writeFile, readFile, mkdir, readdir, unlink } from "node:fs/promises";
import { join } from "node:path";
import { existsSync } from "node:fs";
import { homedir } from "node:os";
import type {
BackgroundTask,
BackgroundTaskStatus,
BackgroundTaskPriority,
BackgroundTaskConfig,
TaskProgress,
TaskResult,
TaskError,
TaskMetadata,
TaskNotification,
TaskStep,
TaskArtifact,
} from "@src/types/background-task";
import { DEFAULT_BACKGROUND_TASK_CONFIG, BACKGROUND_TASK_PRIORITIES } from "@src/types/background-task";
import {
BACKGROUND_TASK,
BACKGROUND_TASK_STORAGE,
BACKGROUND_TASK_MESSAGES,
BACKGROUND_TASK_STATUS_ICONS,
} from "@src/constants/background-task";
type TaskHandler = (task: BackgroundTask, updateProgress: (progress: Partial<TaskProgress>) => void) => Promise<TaskResult>;
type NotificationHandler = (notification: TaskNotification) => void;
interface BackgroundTaskState {
tasks: Map<string, BackgroundTask>;
queue: string[];
running: string[];
handlers: Map<string, TaskHandler>;
notificationHandlers: NotificationHandler[];
config: BackgroundTaskConfig;
}
const state: BackgroundTaskState = {
tasks: new Map(),
queue: [],
running: [],
handlers: new Map(),
notificationHandlers: [],
config: DEFAULT_BACKGROUND_TASK_CONFIG,
};
const getStoragePath = (): string => {
const basePath = join(homedir(), ".local", "share", "codetyper", "tasks");
return basePath;
};
const ensureStorageDirectory = async (): Promise<void> => {
const storagePath = getStoragePath();
if (!existsSync(storagePath)) {
await mkdir(storagePath, { recursive: true });
}
};
const persistTask = async (task: BackgroundTask): Promise<void> => {
if (!state.config.persistTasks) return;
await ensureStorageDirectory();
const filePath = join(getStoragePath(), `${task.id}${BACKGROUND_TASK_STORAGE.FILE_EXTENSION}`);
await writeFile(filePath, JSON.stringify(task, null, 2));
};
const removePersistedTask = async (taskId: string): Promise<void> => {
const filePath = join(getStoragePath(), `${taskId}${BACKGROUND_TASK_STORAGE.FILE_EXTENSION}`);
if (existsSync(filePath)) {
await unlink(filePath);
}
};
const loadPersistedTasks = async (): Promise<void> => {
const storagePath = getStoragePath();
if (!existsSync(storagePath)) return;
const files = await readdir(storagePath);
const taskFiles = files.filter((f) => f.endsWith(BACKGROUND_TASK_STORAGE.FILE_EXTENSION));
for (const file of taskFiles) {
try {
const content = await readFile(join(storagePath, file), "utf-8");
const task = JSON.parse(content) as BackgroundTask;
// Re-queue pending/running tasks that were interrupted
if (task.status === "pending" || task.status === "running") {
const updatedTask: BackgroundTask = {
...task,
status: "pending",
};
state.tasks.set(task.id, updatedTask);
state.queue.push(task.id);
} else {
state.tasks.set(task.id, task);
}
} catch {
// Skip corrupted task files
}
}
};
const notify = (taskId: string, type: TaskNotification["type"], message: string): void => {
const notification: TaskNotification = {
taskId,
type,
message,
timestamp: Date.now(),
};
state.notificationHandlers.forEach((handler) => handler(notification));
};
const createInitialProgress = (): TaskProgress => ({
current: 0,
total: 100,
percentage: 0,
message: "Starting...",
steps: [],
});
const processQueue = async (): Promise<void> => {
while (
state.queue.length > 0 &&
state.running.length < state.config.maxConcurrent
) {
// Sort by priority
state.queue.sort((a, b) => {
const taskA = state.tasks.get(a);
const taskB = state.tasks.get(b);
if (!taskA || !taskB) return 0;
return BACKGROUND_TASK_PRIORITIES[taskB.priority] - BACKGROUND_TASK_PRIORITIES[taskA.priority];
});
const taskId = state.queue.shift();
if (!taskId) continue;
const task = state.tasks.get(taskId);
if (!task) continue;
await executeTask(task);
}
};
const executeTask = async (task: BackgroundTask): Promise<void> => {
const handler = state.handlers.get(task.name);
if (!handler) {
await updateTaskStatus(task.id, "failed", {
code: "HANDLER_NOT_FOUND",
message: `No handler registered for task: ${task.name}`,
recoverable: false,
});
return;
}
state.running.push(task.id);
const updatedTask: BackgroundTask = {
...task,
status: "running",
startedAt: Date.now(),
};
state.tasks.set(task.id, updatedTask);
await persistTask(updatedTask);
notify(task.id, "started", BACKGROUND_TASK_MESSAGES.STARTED);
const updateProgress = (partial: Partial<TaskProgress>): void => {
const currentTask = state.tasks.get(task.id);
if (!currentTask) return;
const newProgress: TaskProgress = {
...currentTask.progress,
...partial,
percentage: partial.current !== undefined && partial.total !== undefined
? Math.round((partial.current / partial.total) * 100)
: currentTask.progress.percentage,
};
const progressTask: BackgroundTask = {
...currentTask,
progress: newProgress,
};
state.tasks.set(task.id, progressTask);
notify(task.id, "progress", newProgress.message);
};
try {
const result = await Promise.race([
handler(updatedTask, updateProgress),
new Promise<never>((_, reject) =>
setTimeout(() => reject(new Error("Task timeout")), state.config.defaultTimeout)
),
]);
await completeTask(task.id, result);
} catch (error) {
const taskError: TaskError = {
code: "EXECUTION_ERROR",
message: error instanceof Error ? error.message : "Unknown error",
stack: error instanceof Error ? error.stack : undefined,
recoverable: true,
};
await updateTaskStatus(task.id, "failed", taskError);
} finally {
state.running = state.running.filter((id) => id !== task.id);
processQueue();
}
};
const completeTask = async (taskId: string, result: TaskResult): Promise<void> => {
const task = state.tasks.get(taskId);
if (!task) return;
const completedTask: BackgroundTask = {
...task,
status: "completed",
completedAt: Date.now(),
result,
progress: {
...task.progress,
current: task.progress.total,
percentage: 100,
message: "Completed",
},
};
state.tasks.set(taskId, completedTask);
await persistTask(completedTask);
notify(taskId, "completed", BACKGROUND_TASK_MESSAGES.COMPLETED);
};
const updateTaskStatus = async (
taskId: string,
status: BackgroundTaskStatus,
error?: TaskError
): Promise<void> => {
const task = state.tasks.get(taskId);
if (!task) return;
const updatedTask: BackgroundTask = {
...task,
status,
error,
completedAt: ["completed", "failed", "cancelled"].includes(status) ? Date.now() : undefined,
};
state.tasks.set(taskId, updatedTask);
await persistTask(updatedTask);
if (status === "failed") {
notify(taskId, "failed", error?.message || BACKGROUND_TASK_MESSAGES.FAILED);
}
};
// Public API
export const initialize = async (config?: Partial<BackgroundTaskConfig>): Promise<void> => {
state.config = { ...DEFAULT_BACKGROUND_TASK_CONFIG, ...config };
await loadPersistedTasks();
processQueue();
};
export const registerHandler = (name: string, handler: TaskHandler): void => {
state.handlers.set(name, handler);
};
export const onNotification = (handler: NotificationHandler): () => void => {
state.notificationHandlers.push(handler);
return () => {
state.notificationHandlers = state.notificationHandlers.filter((h) => h !== handler);
};
};
export const createTask = async (
name: string,
description: string,
metadata: TaskMetadata,
priority: BackgroundTaskPriority = "normal"
): Promise<BackgroundTask> => {
const task: BackgroundTask = {
id: randomUUID(),
name,
description,
status: "pending",
priority,
createdAt: Date.now(),
progress: createInitialProgress(),
metadata,
};
state.tasks.set(task.id, task);
state.queue.push(task.id);
await persistTask(task);
processQueue();
return task;
};
export const cancelTask = async (taskId: string): Promise<boolean> => {
const task = state.tasks.get(taskId);
if (!task) return false;
if (task.status === "running") {
await updateTaskStatus(taskId, "cancelled");
state.running = state.running.filter((id) => id !== taskId);
notify(taskId, "failed", BACKGROUND_TASK_MESSAGES.CANCELLED);
return true;
}
if (task.status === "pending") {
state.queue = state.queue.filter((id) => id !== taskId);
await updateTaskStatus(taskId, "cancelled");
return true;
}
return false;
};
export const pauseTask = async (taskId: string): Promise<boolean> => {
const task = state.tasks.get(taskId);
if (!task || task.status !== "running") return false;
await updateTaskStatus(taskId, "paused");
state.running = state.running.filter((id) => id !== taskId);
notify(taskId, "progress", BACKGROUND_TASK_MESSAGES.PAUSED);
return true;
};
export const resumeTask = async (taskId: string): Promise<boolean> => {
const task = state.tasks.get(taskId);
if (!task || task.status !== "paused") return false;
state.queue.unshift(taskId);
await updateTaskStatus(taskId, "pending");
notify(taskId, "progress", BACKGROUND_TASK_MESSAGES.RESUMED);
processQueue();
return true;
};
export const getTask = (taskId: string): BackgroundTask | undefined =>
state.tasks.get(taskId);
export const listTasks = (filter?: { status?: BackgroundTaskStatus }): ReadonlyArray<BackgroundTask> => {
let tasks = Array.from(state.tasks.values());
if (filter?.status) {
tasks = tasks.filter((t) => t.status === filter.status);
}
return tasks.sort((a, b) => b.createdAt - a.createdAt);
};
export const clearCompletedTasks = async (): Promise<number> => {
const completed = Array.from(state.tasks.values()).filter(
(t) => t.status === "completed" || t.status === "failed" || t.status === "cancelled"
);
for (const task of completed) {
state.tasks.delete(task.id);
await removePersistedTask(task.id);
}
return completed.length;
};
export const getTaskStatusIcon = (status: BackgroundTaskStatus): string =>
BACKGROUND_TASK_STATUS_ICONS[status];
export const formatTaskSummary = (task: BackgroundTask): string => {
const icon = getTaskStatusIcon(task.status);
const progress = task.status === "running" ? ` (${task.progress.percentage}%)` : "";
return `${icon} ${task.name}${progress} - ${task.description}`;
};
export const getQueueLength = (): number => state.queue.length;
export const getRunningCount = (): number => state.running.length;

688
src/services/brain.ts Normal file
View File

@@ -0,0 +1,688 @@
/**
* Brain Service
*
* Business logic layer for the CodeTyper Brain integration.
* Provides context injection, knowledge recall, and learning capabilities.
*/
import fs from "fs/promises";
import { DIRS, FILES } from "@constants/paths";
import { BRAIN_DEFAULTS, BRAIN_ERRORS, BRAIN_DISABLED } from "@constants/brain";
import * as brainApi from "@api/brain";
import type {
BrainCredentials,
BrainState,
BrainConnectionStatus,
BrainUser,
BrainConcept,
BrainRecallResponse,
BrainExtractResponse,
} from "@/types/brain";
// ============================================================================
// State Management (Singleton via Closure)
// ============================================================================
interface VarsFile {
brainApiKey?: string;
brainJwtToken?: string;
}
let brainState: BrainState = {
status: "disconnected",
user: null,
projectId: BRAIN_DEFAULTS.PROJECT_ID,
knowledgeCount: 0,
memoryCount: 0,
lastError: null,
};
let cachedCredentials: BrainCredentials | null = null;
let cachedVars: VarsFile | null = null;
// ============================================================================
// Vars File Management
// ============================================================================
/**
* Load vars file from disk
*/
const loadVarsFile = async (): Promise<VarsFile> => {
if (cachedVars) {
return cachedVars;
}
try {
const data = await fs.readFile(FILES.vars, "utf-8");
cachedVars = JSON.parse(data) as VarsFile;
return cachedVars;
} catch {
return {};
}
};
/**
* Save vars file to disk
*/
const saveVarsFile = async (vars: VarsFile): Promise<void> => {
try {
await fs.mkdir(DIRS.config, { recursive: true });
await fs.writeFile(FILES.vars, JSON.stringify(vars, null, 2), "utf-8");
cachedVars = vars;
} catch (error) {
throw new Error(`Failed to save vars file: ${error}`);
}
};
// ============================================================================
// Credentials Management
// ============================================================================
/**
* Get path to brain credentials file
*/
const getCredentialsPath = (): string => {
return `${DIRS.data}/brain-credentials.json`;
};
/**
* Load brain credentials from disk
*/
export const loadCredentials = async (): Promise<BrainCredentials | null> => {
if (cachedCredentials) {
return cachedCredentials;
}
try {
const data = await fs.readFile(getCredentialsPath(), "utf-8");
cachedCredentials = JSON.parse(data) as BrainCredentials;
return cachedCredentials;
} catch {
return null;
}
};
/**
* Save brain credentials to disk
*/
export const saveCredentials = async (
credentials: BrainCredentials,
): Promise<void> => {
try {
await fs.mkdir(DIRS.data, { recursive: true });
await fs.writeFile(
getCredentialsPath(),
JSON.stringify(credentials, null, 2),
"utf-8",
);
cachedCredentials = credentials;
} catch (error) {
throw new Error(`Failed to save brain credentials: ${error}`);
}
};
/**
* Clear brain credentials
*/
export const clearCredentials = async (): Promise<void> => {
try {
await fs.unlink(getCredentialsPath());
cachedCredentials = null;
} catch {
// File may not exist, ignore
}
// Also clear vars file entries
try {
const vars = await loadVarsFile();
await saveVarsFile({
...vars,
brainApiKey: undefined,
brainJwtToken: undefined,
});
} catch {
// Ignore errors
}
};
/**
* Get API key from vars file or environment
*/
export const getApiKey = async (): Promise<string | undefined> => {
// First check environment variable
const envKey = process.env.CODETYPER_BRAIN_API_KEY;
if (envKey) {
return envKey;
}
// Then check vars file
const vars = await loadVarsFile();
return vars.brainApiKey;
};
/**
* Get JWT token from vars file
*/
export const getJwtToken = async (): Promise<string | undefined> => {
const vars = await loadVarsFile();
return vars.brainJwtToken;
};
/**
* Set API key in vars file
*/
export const setApiKey = async (apiKey: string): Promise<void> => {
const vars = await loadVarsFile();
await saveVarsFile({ ...vars, brainApiKey: apiKey });
};
/**
* Set JWT token in vars file
*/
export const setJwtToken = async (jwtToken: string): Promise<void> => {
const vars = await loadVarsFile();
await saveVarsFile({ ...vars, brainJwtToken: jwtToken });
};
// ============================================================================
// Authentication
// ============================================================================
/**
* Login to Brain service
*/
export const login = async (
email: string,
password: string,
): Promise<{ success: boolean; user?: BrainUser; error?: string }> => {
try {
updateState({ status: "connecting" });
const response = await brainApi.login(email, password);
if (response.success && response.data) {
const credentials: BrainCredentials = {
accessToken: response.data.access_token,
refreshToken: response.data.refresh_token,
expiresAt: response.data.expires_at,
user: response.data.user,
};
await saveCredentials(credentials);
updateState({
status: "connected",
user: response.data.user,
lastError: null,
});
return { success: true, user: response.data.user };
}
updateState({ status: "error", lastError: "Login failed" });
return { success: false, error: "Login failed" };
} catch (error) {
const errorMessage =
error instanceof Error ? error.message : "Unknown error";
updateState({ status: "error", lastError: errorMessage });
return { success: false, error: errorMessage };
}
};
/**
* Register a new account
*/
export const register = async (
email: string,
password: string,
displayName: string,
): Promise<{ success: boolean; user?: BrainUser; error?: string }> => {
try {
updateState({ status: "connecting" });
const response = await brainApi.register(email, password, displayName);
if (response.success && response.data) {
const credentials: BrainCredentials = {
accessToken: response.data.access_token,
refreshToken: response.data.refresh_token,
expiresAt: response.data.expires_at,
user: response.data.user,
};
await saveCredentials(credentials);
updateState({
status: "connected",
user: response.data.user,
lastError: null,
});
return { success: true, user: response.data.user };
}
updateState({ status: "error", lastError: "Registration failed" });
return { success: false, error: "Registration failed" };
} catch (error) {
const errorMessage =
error instanceof Error ? error.message : "Unknown error";
updateState({ status: "error", lastError: errorMessage });
return { success: false, error: errorMessage };
}
};
/**
* Logout from Brain service
*/
export const logout = async (): Promise<void> => {
try {
const credentials = await loadCredentials();
if (credentials?.refreshToken) {
await brainApi.logout(credentials.refreshToken);
}
} catch {
// Ignore logout errors
} finally {
await clearCredentials();
updateState({
status: "disconnected",
user: null,
knowledgeCount: 0,
memoryCount: 0,
});
}
};
// ============================================================================
// Connection Management
// ============================================================================
/**
* Get authentication token (API key or JWT token)
*/
export const getAuthToken = async (): Promise<string | undefined> => {
const apiKey = await getApiKey();
if (apiKey) {
return apiKey;
}
return getJwtToken();
};
/**
* Check if Brain service is available and connect
*/
export const connect = async (): Promise<boolean> => {
// Skip connection when Brain is disabled
if (BRAIN_DISABLED) {
return false;
}
try {
updateState({ status: "connecting" });
// First check if service is healthy
await brainApi.checkHealth();
// Then check if we have valid credentials (API key or JWT token)
const authToken = await getAuthToken();
if (!authToken) {
updateState({ status: "disconnected", lastError: null });
return false;
}
// Try to get stats to verify credentials are valid
const projectId = brainState.projectId ?? BRAIN_DEFAULTS.PROJECT_ID;
const statsResponse = await brainApi.getKnowledgeStats(projectId, authToken);
if (statsResponse.success && statsResponse.data) {
updateState({
status: "connected",
knowledgeCount: statsResponse.data.total_concepts,
lastError: null,
});
// Also try to get memory stats
try {
const memoryStats = await brainApi.getMemoryStats(authToken);
updateState({ memoryCount: memoryStats.totalNodes });
} catch {
// Memory stats are optional
}
return true;
}
updateState({ status: "error", lastError: BRAIN_ERRORS.INVALID_API_KEY });
return false;
} catch (error) {
const errorMessage =
error instanceof Error ? error.message : BRAIN_ERRORS.CONNECTION_FAILED;
updateState({ status: "error", lastError: errorMessage });
return false;
}
};
/**
* Disconnect from Brain service
*/
export const disconnect = (): void => {
updateState({
status: "disconnected",
knowledgeCount: 0,
memoryCount: 0,
lastError: null,
});
};
/**
* Check if connected to Brain
*/
export const isConnected = (): boolean => {
if (BRAIN_DISABLED) return false;
return brainState.status === "connected";
};
// ============================================================================
// Knowledge Operations
// ============================================================================
/**
* Recall relevant knowledge for a query
*/
export const recall = async (
query: string,
limit = 5,
): Promise<BrainRecallResponse | null> => {
if (!isConnected()) {
return null;
}
try {
const apiKey = await getApiKey();
if (!apiKey) {
return null;
}
const response = await brainApi.recallKnowledge(
{
query,
project_id: brainState.projectId ?? BRAIN_DEFAULTS.PROJECT_ID,
limit,
},
apiKey,
);
return response;
} catch (error) {
const errorMessage =
error instanceof Error ? error.message : BRAIN_ERRORS.RECALL_FAILED;
updateState({ lastError: errorMessage });
return null;
}
};
/**
* Get context string for prompt injection
*/
export const getContext = async (
query: string,
maxConcepts = 3,
): Promise<string | null> => {
if (!isConnected()) {
return null;
}
try {
const apiKey = await getApiKey();
if (!apiKey) {
return null;
}
const response = await brainApi.buildContext(
{
query,
project_id: brainState.projectId ?? BRAIN_DEFAULTS.PROJECT_ID,
max_concepts: maxConcepts,
},
apiKey,
);
if (response.success && response.data.has_knowledge) {
return response.data.context;
}
return null;
} catch {
return null;
}
};
/**
* Learn a concept
*/
export const learn = async (
name: string,
whatItDoes: string,
options?: {
howItWorks?: string;
patterns?: string[];
files?: string[];
keyFunctions?: string[];
aliases?: string[];
},
): Promise<BrainConcept | null> => {
if (!isConnected()) {
return null;
}
try {
const apiKey = await getApiKey();
if (!apiKey) {
return null;
}
const response = await brainApi.learnConcept(
{
project_id: brainState.projectId ?? BRAIN_DEFAULTS.PROJECT_ID,
name,
what_it_does: whatItDoes,
how_it_works: options?.howItWorks,
patterns: options?.patterns,
files: options?.files,
key_functions: options?.keyFunctions,
aliases: options?.aliases,
},
apiKey,
);
if (response.success && response.data) {
// Update knowledge count
updateState({ knowledgeCount: brainState.knowledgeCount + 1 });
return response.data;
}
return null;
} catch (error) {
const errorMessage =
error instanceof Error ? error.message : BRAIN_ERRORS.LEARN_FAILED;
updateState({ lastError: errorMessage });
return null;
}
};
/**
* Extract and learn concepts from content
*/
export const extractAndLearn = async (
content: string,
source = "conversation",
): Promise<BrainExtractResponse | null> => {
if (!isConnected()) {
return null;
}
try {
const apiKey = await getApiKey();
if (!apiKey) {
return null;
}
const response = await brainApi.extractConcepts(
{
content,
project_id: brainState.projectId ?? BRAIN_DEFAULTS.PROJECT_ID,
source,
},
apiKey,
);
if (response.success) {
// Update knowledge count
const newCount =
brainState.knowledgeCount + response.data.stored + response.data.updated;
updateState({ knowledgeCount: newCount });
return response;
}
return null;
} catch (error) {
const errorMessage =
error instanceof Error ? error.message : BRAIN_ERRORS.EXTRACT_FAILED;
updateState({ lastError: errorMessage });
return null;
}
};
// ============================================================================
// Memory Operations
// ============================================================================
/**
* Search memories
*/
export const searchMemories = async (
query: string,
limit = 10,
): Promise<{ memories: Array<{ content: string; similarity: number }> } | null> => {
if (!isConnected()) {
return null;
}
try {
const apiKey = await getApiKey();
if (!apiKey) {
return null;
}
const response = await brainApi.searchMemories(
{
query,
limit,
project_id: brainState.projectId ?? BRAIN_DEFAULTS.PROJECT_ID,
},
apiKey,
);
return {
memories: response.memories.map((m) => ({
content: m.content,
similarity: m.similarity ?? 0,
})),
};
} catch {
return null;
}
};
/**
* Store a memory
*/
export const storeMemory = async (
content: string,
type: "fact" | "pattern" | "correction" | "preference" | "context" = "context",
): Promise<boolean> => {
if (!isConnected()) {
return false;
}
try {
const apiKey = await getApiKey();
if (!apiKey) {
return false;
}
const response = await brainApi.storeMemory(
{
content,
type,
project_id: brainState.projectId ?? BRAIN_DEFAULTS.PROJECT_ID,
},
apiKey,
);
if (response.success) {
updateState({ memoryCount: brainState.memoryCount + 1 });
return true;
}
return false;
} catch {
return false;
}
};
// ============================================================================
// State Accessors
// ============================================================================
/**
* Get current brain state
*/
export const getState = (): BrainState => {
return { ...brainState };
};
/**
* Update brain state
*/
const updateState = (updates: Partial<BrainState>): void => {
brainState = { ...brainState, ...updates };
};
/**
* Set project ID
*/
export const setProjectId = (projectId: number): void => {
updateState({ projectId });
};
/**
* Get connection status
*/
export const getStatus = (): BrainConnectionStatus => {
return brainState.status;
};
/**
* Check if authenticated (has API key or JWT token)
*/
export const isAuthenticated = async (): Promise<boolean> => {
const apiKey = await getApiKey();
const jwtToken = await getJwtToken();
return apiKey !== undefined || jwtToken !== undefined;
};
// ============================================================================
// Initialization
// ============================================================================
/**
* Initialize brain service (auto-connect if credentials available)
*/
export const initialize = async (): Promise<boolean> => {
const hasAuth = await isAuthenticated();
if (hasAuth) {
return connect();
}
return false;
};

View File

@@ -0,0 +1,523 @@
/**
* Cloud Sync Service
*
* Handles push/pull synchronization with the cloud brain service.
*/
import {
CLOUD_BRAIN_DEFAULTS,
CLOUD_ENDPOINTS,
CLOUD_ERRORS,
CLOUD_MESSAGES,
CLOUD_HTTP_CONFIG,
SYNC_CONFIG,
} from "@constants/brain-cloud";
import {
enqueue,
enqueueBatch,
dequeue,
markProcessed,
markFailed,
hasQueuedItems,
getQueueSize,
clearQueue,
} from "@services/brain/offline-queue";
import {
createConflict,
resolveAllConflicts,
getPendingConflicts,
hasUnresolvedConflicts,
clearResolvedConflicts,
} from "@services/brain/conflict-resolver";
import type {
BrainSyncState,
CloudBrainConfig,
SyncItem,
SyncResult,
SyncOptions,
PushRequest,
PushResponse,
PullRequest,
PullResponse,
} from "@/types/brain-cloud";
// Sync state
let syncState: BrainSyncState = {
status: "synced",
lastSyncAt: null,
lastPushAt: null,
lastPullAt: null,
pendingChanges: 0,
conflictCount: 0,
syncErrors: [],
};
// Cloud configuration
let cloudConfig: CloudBrainConfig = { ...CLOUD_BRAIN_DEFAULTS };
// Sync lock to prevent concurrent syncs
let syncInProgress = false;
// Local version tracking
let localVersion = 0;
/**
* Configure cloud sync
*/
export const configure = (config: Partial<CloudBrainConfig>): void => {
cloudConfig = { ...cloudConfig, ...config };
};
/**
* Get current sync state
*/
export const getSyncState = (): BrainSyncState => ({ ...syncState });
/**
* Get cloud configuration
*/
export const getConfig = (): CloudBrainConfig => ({ ...cloudConfig });
/**
* Check if cloud sync is enabled
*/
export const isEnabled = (): boolean => cloudConfig.enabled;
/**
* Check if device is online
*/
const isOnline = (): boolean => {
// In Node.js/Bun, we'll assume online unless proven otherwise
return true;
};
/**
* Perform a full sync (push then pull)
*/
export const sync = async (
authToken: string,
projectId: number,
options: SyncOptions = {},
): Promise<SyncResult> => {
if (!cloudConfig.enabled) {
throw new Error(CLOUD_ERRORS.NOT_CONFIGURED);
}
if (syncInProgress) {
throw new Error(CLOUD_ERRORS.SYNC_IN_PROGRESS);
}
if (!isOnline()) {
syncState.status = "offline";
throw new Error(CLOUD_ERRORS.OFFLINE);
}
syncInProgress = true;
syncState.status = "syncing";
syncState.syncErrors = [];
const startTime = Date.now();
const result: SyncResult = {
success: true,
direction: options.direction ?? "both",
itemsSynced: 0,
itemsFailed: 0,
conflicts: [],
errors: [],
duration: 0,
timestamp: startTime,
};
try {
const direction = options.direction ?? "both";
// Push local changes
if (direction === "push" || direction === "both") {
options.onProgress?.({
phase: "pushing",
current: 0,
total: await getQueueSize(),
message: CLOUD_MESSAGES.STARTING_SYNC,
});
const pushResult = await pushChanges(authToken, projectId, options);
result.itemsSynced += pushResult.itemsSynced;
result.itemsFailed += pushResult.itemsFailed;
result.conflicts.push(...pushResult.conflicts);
result.errors.push(...pushResult.errors);
if (pushResult.errors.length > 0) {
result.success = false;
}
}
// Pull remote changes
if (direction === "pull" || direction === "both") {
options.onProgress?.({
phase: "pulling",
current: 0,
total: 0,
message: CLOUD_MESSAGES.PULLING(0),
});
const pullResult = await pullChanges(authToken, projectId, options);
result.itemsSynced += pullResult.itemsSynced;
result.itemsFailed += pullResult.itemsFailed;
result.conflicts.push(...pullResult.conflicts);
result.errors.push(...pullResult.errors);
if (pullResult.errors.length > 0) {
result.success = false;
}
}
// Handle conflicts if any
if (result.conflicts.length > 0) {
options.onProgress?.({
phase: "resolving",
current: 0,
total: result.conflicts.length,
message: CLOUD_MESSAGES.RESOLVING_CONFLICTS(result.conflicts.length),
});
const strategy = options.conflictStrategy ?? cloudConfig.conflictStrategy;
if (strategy !== "manual") {
resolveAllConflicts(strategy);
result.conflicts = getPendingConflicts();
}
if (hasUnresolvedConflicts()) {
syncState.status = "conflict";
syncState.conflictCount = result.conflicts.length;
}
}
// Update state
result.duration = Date.now() - startTime;
if (result.success && result.conflicts.length === 0) {
syncState.status = "synced";
syncState.lastSyncAt = Date.now();
} else if (result.conflicts.length > 0) {
syncState.status = "conflict";
} else {
syncState.status = "error";
}
syncState.pendingChanges = await getQueueSize();
syncState.syncErrors = result.errors;
options.onProgress?.({
phase: "completing",
current: result.itemsSynced,
total: result.itemsSynced,
message: CLOUD_MESSAGES.SYNC_COMPLETE,
});
return result;
} catch (error) {
const message = error instanceof Error ? error.message : String(error);
syncState.status = "error";
syncState.syncErrors.push(message);
result.success = false;
result.errors.push(message);
result.duration = Date.now() - startTime;
return result;
} finally {
syncInProgress = false;
clearResolvedConflicts();
}
};
/**
* Push local changes to cloud
*/
const pushChanges = async (
authToken: string,
projectId: number,
options: SyncOptions,
): Promise<Omit<SyncResult, "direction" | "timestamp">> => {
const result = {
success: true,
itemsSynced: 0,
itemsFailed: 0,
conflicts: [] as SyncResult["conflicts"],
errors: [] as string[],
duration: 0,
};
// Get queued items
const queuedItems = await dequeue(SYNC_CONFIG.MAX_BATCH_SIZE);
if (queuedItems.length === 0) {
return result;
}
options.onProgress?.({
phase: "pushing",
current: 0,
total: queuedItems.length,
message: CLOUD_MESSAGES.PUSHING(queuedItems.length),
});
const items = queuedItems.map((q) => q.item);
try {
const response = await pushToCloud(authToken, projectId, items);
if (response.success) {
result.itemsSynced = response.accepted;
result.itemsFailed = response.rejected;
// Mark successful items as processed
const successIds = queuedItems
.slice(0, response.accepted)
.map((q) => q.id);
await markProcessed(successIds);
// Handle conflicts
for (const conflict of response.conflicts) {
result.conflicts.push(conflict);
}
syncState.lastPushAt = Date.now();
} else {
result.success = false;
result.errors.push(...(response.errors ?? []));
// Mark all as failed
await markFailed(
queuedItems.map((q) => q.id),
response.errors?.[0],
);
}
} catch (error) {
const message = error instanceof Error ? error.message : String(error);
result.success = false;
result.errors.push(CLOUD_ERRORS.PUSH_FAILED(message));
// Queue for retry
await markFailed(
queuedItems.map((q) => q.id),
message,
);
}
return result;
};
/**
* Pull remote changes from cloud
*/
const pullChanges = async (
authToken: string,
projectId: number,
options: SyncOptions,
): Promise<Omit<SyncResult, "direction" | "timestamp">> => {
const result = {
success: true,
itemsSynced: 0,
itemsFailed: 0,
conflicts: [] as SyncResult["conflicts"],
errors: [] as string[],
duration: 0,
};
try {
const response = await pullFromCloud(
authToken,
projectId,
localVersion,
syncState.lastPullAt ?? 0,
);
if (response.success) {
options.onProgress?.({
phase: "pulling",
current: response.items.length,
total: response.items.length,
message: CLOUD_MESSAGES.PULLING(response.items.length),
});
// Process pulled items
for (const item of response.items) {
// Check for conflicts with local changes
const hasConflict = await checkLocalConflict(item);
if (hasConflict) {
// Create conflict entry
const localItem = await getLocalItem(item.id, item.type);
if (localItem) {
const conflict = createConflict(localItem, item);
result.conflicts.push(conflict);
}
} else {
// Apply remote change locally
await applyRemoteChange(item);
result.itemsSynced++;
}
}
// Update local version
localVersion = response.serverVersion;
syncState.lastPullAt = Date.now();
} else {
result.success = false;
result.errors.push(...(response.errors ?? []));
}
} catch (error) {
const message = error instanceof Error ? error.message : String(error);
result.success = false;
result.errors.push(CLOUD_ERRORS.PULL_FAILED(message));
}
return result;
};
/**
* Push items to cloud API
*/
const pushToCloud = async (
authToken: string,
projectId: number,
items: SyncItem[],
): Promise<PushResponse> => {
const url = `${cloudConfig.endpoint}${CLOUD_ENDPOINTS.PUSH}`;
const request: PushRequest = {
items,
projectId,
clientVersion: "1.0.0",
};
const response = await fetch(url, {
method: "POST",
headers: {
...CLOUD_HTTP_CONFIG.HEADERS,
Authorization: `Bearer ${authToken}`,
},
body: JSON.stringify(request),
signal: AbortSignal.timeout(CLOUD_HTTP_CONFIG.TIMEOUT_MS),
});
if (!response.ok) {
throw new Error(`HTTP ${response.status}: ${response.statusText}`);
}
return response.json() as Promise<PushResponse>;
};
/**
* Pull items from cloud API
*/
const pullFromCloud = async (
authToken: string,
projectId: number,
sinceVersion: number,
sinceTimestamp: number,
): Promise<PullResponse> => {
const url = `${cloudConfig.endpoint}${CLOUD_ENDPOINTS.PULL}`;
const request: PullRequest = {
projectId,
sinceVersion,
sinceTimestamp,
limit: SYNC_CONFIG.MAX_BATCH_SIZE,
};
const response = await fetch(url, {
method: "POST",
headers: {
...CLOUD_HTTP_CONFIG.HEADERS,
Authorization: `Bearer ${authToken}`,
},
body: JSON.stringify(request),
signal: AbortSignal.timeout(CLOUD_HTTP_CONFIG.TIMEOUT_MS),
});
if (!response.ok) {
throw new Error(`HTTP ${response.status}: ${response.statusText}`);
}
return response.json() as Promise<PullResponse>;
};
/**
* Check if pulled item conflicts with local changes
*/
const checkLocalConflict = async (
_item: SyncItem,
): Promise<boolean> => {
// Check if we have pending changes for this item
const queued = await hasQueuedItems();
return queued;
};
/**
* Get local item by ID and type
*/
const getLocalItem = async (
_id: string,
_type: "concept" | "memory" | "relation",
): Promise<SyncItem | null> => {
// This would retrieve the local item from the brain service
// Placeholder implementation
return null;
};
/**
* Apply a remote change locally
*/
const applyRemoteChange = async (_item: SyncItem): Promise<void> => {
// This would apply the change to the local brain storage
// Placeholder implementation
};
/**
* Queue a change for sync
*/
export const queueChange = async (item: SyncItem): Promise<void> => {
await enqueue(item);
syncState.pendingChanges = await getQueueSize();
syncState.status = "pending";
};
/**
* Queue multiple changes
*/
export const queueChanges = async (items: SyncItem[]): Promise<number> => {
const added = await enqueueBatch(items);
syncState.pendingChanges = await getQueueSize();
syncState.status = "pending";
return added;
};
/**
* Force sync now
*/
export const syncNow = async (
authToken: string,
projectId: number,
): Promise<SyncResult> => {
return sync(authToken, projectId, { force: true });
};
/**
* Reset sync state
*/
export const resetSyncState = async (): Promise<void> => {
await clearQueue();
syncState = {
status: "synced",
lastSyncAt: null,
lastPushAt: null,
lastPullAt: null,
pendingChanges: 0,
conflictCount: 0,
syncErrors: [],
};
localVersion = 0;
};

View File

@@ -0,0 +1,249 @@
/**
* Conflict Resolver
*
* Handles sync conflicts between local and remote brain data.
*/
import {
CONFLICT_LABELS,
} from "@constants/brain-cloud";
import type {
SyncConflict,
ConflictStrategy,
SyncItem,
} from "@/types/brain-cloud";
// In-memory conflict storage
const pendingConflicts = new Map<string, SyncConflict>();
/**
* Create a conflict from local and remote items
*/
export const createConflict = (
localItem: SyncItem,
remoteItem: SyncItem,
): SyncConflict => {
const conflict: SyncConflict = {
id: generateConflictId(),
itemId: localItem.id,
itemType: localItem.type,
localData: localItem.data,
remoteData: remoteItem.data,
localVersion: localItem.localVersion,
remoteVersion: remoteItem.remoteVersion ?? 0,
localTimestamp: localItem.timestamp,
remoteTimestamp: remoteItem.timestamp,
resolved: false,
};
pendingConflicts.set(conflict.id, conflict);
return conflict;
};
/**
* Resolve a conflict using the specified strategy
*/
export const resolveConflict = (
conflictId: string,
strategy: ConflictStrategy,
): SyncConflict | null => {
const conflict = pendingConflicts.get(conflictId);
if (!conflict) return null;
const resolver = resolvers[strategy];
const resolvedData = resolver(conflict);
conflict.resolved = true;
conflict.resolution = strategy;
conflict.resolvedData = resolvedData;
return conflict;
};
/**
* Resolve all pending conflicts with a single strategy
*/
export const resolveAllConflicts = (
strategy: ConflictStrategy,
): SyncConflict[] => {
const resolved: SyncConflict[] = [];
for (const [id, conflict] of pendingConflicts) {
if (!conflict.resolved) {
const result = resolveConflict(id, strategy);
if (result) {
resolved.push(result);
}
}
}
return resolved;
};
/**
* Conflict resolution strategies
*/
const resolvers: Record<ConflictStrategy, (conflict: SyncConflict) => unknown> = {
"local-wins": (conflict) => conflict.localData,
"remote-wins": (conflict) => conflict.remoteData,
manual: (_conflict) => {
// Manual resolution returns null - requires user input
return null;
},
merge: (conflict) => {
// Attempt to merge the data
return mergeData(conflict.localData, conflict.remoteData);
},
};
/**
* Attempt to merge two data objects
*/
const mergeData = (local: unknown, remote: unknown): unknown => {
// If both are objects, merge their properties
if (isObject(local) && isObject(remote)) {
const localObj = local as Record<string, unknown>;
const remoteObj = remote as Record<string, unknown>;
const merged: Record<string, unknown> = { ...remoteObj };
for (const key of Object.keys(localObj)) {
// Local wins for non-timestamp fields that differ
if (key !== "updatedAt" && key !== "timestamp") {
merged[key] = localObj[key];
}
}
// Use most recent timestamp
const localTime = (localObj.updatedAt ?? localObj.timestamp ?? 0) as number;
const remoteTime = (remoteObj.updatedAt ?? remoteObj.timestamp ?? 0) as number;
merged.updatedAt = Math.max(localTime, remoteTime);
return merged;
}
// For non-objects, prefer local (or most recent)
return local;
};
/**
* Check if value is an object
*/
const isObject = (value: unknown): value is Record<string, unknown> => {
return typeof value === "object" && value !== null && !Array.isArray(value);
};
/**
* Get pending conflicts
*/
export const getPendingConflicts = (): SyncConflict[] => {
return Array.from(pendingConflicts.values()).filter((c) => !c.resolved);
};
/**
* Get all conflicts
*/
export const getAllConflicts = (): SyncConflict[] => {
return Array.from(pendingConflicts.values());
};
/**
* Get conflict by ID
*/
export const getConflict = (id: string): SyncConflict | undefined => {
return pendingConflicts.get(id);
};
/**
* Clear resolved conflicts
*/
export const clearResolvedConflicts = (): number => {
let cleared = 0;
for (const [id, conflict] of pendingConflicts) {
if (conflict.resolved) {
pendingConflicts.delete(id);
cleared++;
}
}
return cleared;
};
/**
* Clear all conflicts
*/
export const clearAllConflicts = (): void => {
pendingConflicts.clear();
};
/**
* Get conflict count
*/
export const getConflictCount = (): number => {
return getPendingConflicts().length;
};
/**
* Check if there are unresolved conflicts
*/
export const hasUnresolvedConflicts = (): boolean => {
return getPendingConflicts().length > 0;
};
/**
* Get suggested resolution for a conflict
*/
export const suggestResolution = (conflict: SyncConflict): ConflictStrategy => {
// If remote is newer, suggest remote-wins
if (conflict.remoteTimestamp > conflict.localTimestamp) {
return "remote-wins";
}
// If local is newer, suggest local-wins
if (conflict.localTimestamp > conflict.remoteTimestamp) {
return "local-wins";
}
// If timestamps are equal, try merge
return "merge";
};
/**
* Format conflict for display
*/
export const formatConflict = (conflict: SyncConflict): string => {
const lines: string[] = [];
lines.push(`**Conflict: ${conflict.itemId}**`);
lines.push(`Type: ${conflict.itemType}`);
lines.push(`Local version: ${conflict.localVersion}`);
lines.push(`Remote version: ${conflict.remoteVersion}`);
lines.push("");
lines.push("Local data:");
lines.push("```json");
lines.push(JSON.stringify(conflict.localData, null, 2));
lines.push("```");
lines.push("");
lines.push("Remote data:");
lines.push("```json");
lines.push(JSON.stringify(conflict.remoteData, null, 2));
lines.push("```");
if (conflict.resolved) {
lines.push("");
lines.push(`Resolution: ${CONFLICT_LABELS[conflict.resolution!]}`);
}
return lines.join("\n");
};
/**
* Generate unique conflict ID
*/
const generateConflictId = (): string => {
return `conflict_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`;
};

View File

@@ -0,0 +1,354 @@
/**
* Brain MCP Server service
* Exposes Brain as an MCP server for external tools
*/
import { createServer, type Server, type IncomingMessage, type ServerResponse } from "node:http";
import { randomUUID } from "node:crypto";
import type {
BrainMcpServerConfig,
BrainMcpRequest,
BrainMcpResponse,
BrainMcpServerStatus,
BrainMcpToolName,
McpContent,
McpError,
} from "@src/types/brain-mcp";
import {
DEFAULT_BRAIN_MCP_SERVER_CONFIG,
BRAIN_MCP_TOOLS,
MCP_ERROR_CODES,
} from "@src/types/brain-mcp";
import {
BRAIN_MCP_SERVER,
BRAIN_MCP_MESSAGES,
BRAIN_MCP_ERRORS,
BRAIN_MCP_AUTH,
} from "@src/constants/brain-mcp";
type BrainService = {
recall: (query: string, limit?: number) => Promise<unknown>;
learn: (name: string, whatItDoes: string, options?: unknown) => Promise<unknown>;
searchMemories: (query: string, limit?: number, type?: string) => Promise<unknown>;
relate: (source: string, target: string, type: string, weight?: number) => Promise<unknown>;
getContext: (query: string, maxConcepts?: number) => Promise<string>;
getStats: () => Promise<unknown>;
isConnected: () => boolean;
};
interface McpServerState {
server: Server | null;
config: BrainMcpServerConfig;
brainService: BrainService | null;
connectedClients: number;
startTime: number | null;
requestsServed: number;
lastRequestAt: number | null;
rateLimitMap: Map<string, { count: number; resetAt: number }>;
apiKeys: Set<string>;
}
const state: McpServerState = {
server: null,
config: DEFAULT_BRAIN_MCP_SERVER_CONFIG,
brainService: null,
connectedClients: 0,
startTime: null,
requestsServed: 0,
lastRequestAt: null,
rateLimitMap: new Map(),
apiKeys: new Set(),
};
const createMcpError = (code: number, message: string, data?: unknown): McpError => ({
code,
message,
data,
});
const createMcpResponse = (
id: string | number,
content?: ReadonlyArray<McpContent>,
error?: McpError
): BrainMcpResponse => {
if (error) {
return { id, error };
}
return {
id,
result: {
content: content || [],
},
};
};
const checkRateLimit = (clientIp: string): boolean => {
if (!state.config.rateLimit.enabled) return true;
const now = Date.now();
const clientLimit = state.rateLimitMap.get(clientIp);
if (!clientLimit || now > clientLimit.resetAt) {
state.rateLimitMap.set(clientIp, {
count: 1,
resetAt: now + state.config.rateLimit.windowMs,
});
return true;
}
if (clientLimit.count >= state.config.rateLimit.maxRequests) {
return false;
}
state.rateLimitMap.set(clientIp, {
...clientLimit,
count: clientLimit.count + 1,
});
return true;
};
const validateApiKey = (req: IncomingMessage): boolean => {
if (!state.config.enableAuth) return true;
const apiKey = req.headers[state.config.apiKeyHeader.toLowerCase()] as string | undefined;
if (!apiKey) return false;
// If no API keys configured, accept any key for now
if (state.apiKeys.size === 0) return true;
return state.apiKeys.has(apiKey);
};
const handleToolCall = async (
toolName: BrainMcpToolName,
args: Record<string, unknown>
): Promise<McpContent[]> => {
if (!state.brainService) {
throw createMcpError(MCP_ERROR_CODES.BRAIN_UNAVAILABLE, BRAIN_MCP_MESSAGES.SERVER_NOT_RUNNING);
}
if (!state.brainService.isConnected()) {
throw createMcpError(MCP_ERROR_CODES.BRAIN_UNAVAILABLE, "Brain service not connected");
}
const tool = BRAIN_MCP_TOOLS.find((t) => t.name === toolName);
if (!tool) {
throw createMcpError(MCP_ERROR_CODES.TOOL_NOT_FOUND, `Tool not found: ${toolName}`);
}
let result: unknown;
const toolHandlers: Record<BrainMcpToolName, () => Promise<unknown>> = {
brain_recall: () => state.brainService!.recall(args.query as string, args.limit as number | undefined),
brain_learn: () => state.brainService!.learn(
args.name as string,
args.whatItDoes as string,
{ keywords: args.keywords, patterns: args.patterns, files: args.files }
),
brain_search: () => state.brainService!.searchMemories(
args.query as string,
args.limit as number | undefined,
args.type as string | undefined
),
brain_relate: () => state.brainService!.relate(
args.sourceConcept as string,
args.targetConcept as string,
args.relationType as string,
args.weight as number | undefined
),
brain_context: () => state.brainService!.getContext(
args.query as string,
args.maxConcepts as number | undefined
),
brain_stats: () => state.brainService!.getStats(),
brain_projects: async () => {
// Import dynamically to avoid circular dependency
const { listProjects } = await import("@src/services/brain/project-service");
return listProjects();
},
};
const handler = toolHandlers[toolName];
if (!handler) {
throw createMcpError(MCP_ERROR_CODES.TOOL_NOT_FOUND, `No handler for tool: ${toolName}`);
}
result = await handler();
return [
{
type: "text",
text: typeof result === "string" ? result : JSON.stringify(result, null, 2),
},
];
};
const handleRequest = async (
req: IncomingMessage,
res: ServerResponse
): Promise<void> => {
// Set CORS headers
res.setHeader("Access-Control-Allow-Origin", state.config.allowedOrigins.join(","));
res.setHeader("Access-Control-Allow-Methods", "POST, OPTIONS");
res.setHeader("Access-Control-Allow-Headers", `Content-Type, ${state.config.apiKeyHeader}`);
// Handle preflight
if (req.method === "OPTIONS") {
res.writeHead(204);
res.end();
return;
}
if (req.method !== "POST") {
res.writeHead(405);
res.end(JSON.stringify(createMcpResponse("", undefined, BRAIN_MCP_ERRORS.INVALID_REQUEST)));
return;
}
// Get client IP for rate limiting
const clientIp = req.socket.remoteAddress || "unknown";
// Check rate limit
if (!checkRateLimit(clientIp)) {
res.writeHead(429);
res.end(JSON.stringify(createMcpResponse("", undefined, BRAIN_MCP_ERRORS.RATE_LIMITED)));
return;
}
// Validate API key
if (!validateApiKey(req)) {
res.writeHead(401);
res.end(JSON.stringify(createMcpResponse("", undefined, BRAIN_MCP_ERRORS.UNAUTHORIZED)));
return;
}
// Parse request body
let body = "";
req.on("data", (chunk) => {
body += chunk;
});
req.on("end", async () => {
state.requestsServed++;
state.lastRequestAt = Date.now();
let mcpRequest: BrainMcpRequest;
try {
mcpRequest = JSON.parse(body) as BrainMcpRequest;
} catch {
res.writeHead(400);
res.end(JSON.stringify(createMcpResponse("", undefined, BRAIN_MCP_ERRORS.PARSE_ERROR)));
return;
}
// Handle MCP request
try {
if (mcpRequest.method === "tools/call") {
const { name, arguments: args } = mcpRequest.params;
const content = await handleToolCall(name, args);
res.writeHead(200, { "Content-Type": "application/json" });
res.end(JSON.stringify(createMcpResponse(mcpRequest.id, content)));
} else if (mcpRequest.method === "tools/list") {
const tools = BRAIN_MCP_TOOLS.map((tool) => ({
name: tool.name,
description: tool.description,
inputSchema: tool.inputSchema,
}));
res.writeHead(200, { "Content-Type": "application/json" });
res.end(JSON.stringify({
id: mcpRequest.id,
result: { tools },
}));
} else {
res.writeHead(400);
res.end(JSON.stringify(createMcpResponse(mcpRequest.id, undefined, BRAIN_MCP_ERRORS.METHOD_NOT_FOUND)));
}
} catch (error) {
const mcpError = error instanceof Object && "code" in error
? error as McpError
: createMcpError(MCP_ERROR_CODES.INTERNAL_ERROR, error instanceof Error ? error.message : "Unknown error");
res.writeHead(500);
res.end(JSON.stringify(createMcpResponse(mcpRequest.id, undefined, mcpError)));
}
});
};
// Public API
export const start = async (
brainService: BrainService,
config?: Partial<BrainMcpServerConfig>
): Promise<void> => {
if (state.server) {
throw new Error(BRAIN_MCP_MESSAGES.SERVER_ALREADY_RUNNING);
}
state.config = { ...DEFAULT_BRAIN_MCP_SERVER_CONFIG, ...config };
state.brainService = brainService;
return new Promise((resolve, reject) => {
state.server = createServer(handleRequest);
state.server.on("error", (error) => {
state.server = null;
reject(error);
});
state.server.listen(state.config.port, state.config.host, () => {
state.startTime = Date.now();
state.requestsServed = 0;
resolve();
});
});
};
export const stop = async (): Promise<void> => {
if (!state.server) {
return;
}
return new Promise((resolve) => {
state.server!.close(() => {
state.server = null;
state.startTime = null;
state.connectedClients = 0;
state.brainService = null;
resolve();
});
});
};
export const getStatus = (): BrainMcpServerStatus => ({
running: state.server !== null,
port: state.config.port,
host: state.config.host,
connectedClients: state.connectedClients,
uptime: state.startTime ? Date.now() - state.startTime : 0,
requestsServed: state.requestsServed,
lastRequestAt: state.lastRequestAt || undefined,
});
export const addApiKey = (key: string): void => {
state.apiKeys.add(key);
};
export const removeApiKey = (key: string): void => {
state.apiKeys.delete(key);
};
export const isRunning = (): boolean => state.server !== null;
export const getConfig = (): BrainMcpServerConfig => ({ ...state.config });
export const updateConfig = (config: Partial<BrainMcpServerConfig>): void => {
state.config = { ...state.config, ...config };
};
export const getAvailableTools = (): ReadonlyArray<{ name: string; description: string }> =>
BRAIN_MCP_TOOLS.map((t) => ({ name: t.name, description: t.description }));

View File

@@ -0,0 +1,270 @@
/**
* Offline Queue
*
* Manages queued changes when offline for later synchronization.
*/
import fs from "fs/promises";
import { join } from "path";
import { DIRS } from "@constants/paths";
import { SYNC_CONFIG, CLOUD_ERRORS } from "@constants/brain-cloud";
import type {
SyncItem,
OfflineQueueItem,
OfflineQueueState,
SyncOperationType,
} from "@/types/brain-cloud";
// Queue file path
const getQueuePath = (): string => join(DIRS.data, "brain-offline-queue.json");
// In-memory queue state
let queueState: OfflineQueueState = {
items: [],
totalSize: 0,
oldestItem: null,
};
let loaded = false;
/**
* Load queue from disk
*/
export const loadQueue = async (): Promise<void> => {
if (loaded) return;
try {
const data = await fs.readFile(getQueuePath(), "utf-8");
const parsed = JSON.parse(data) as OfflineQueueState;
queueState = parsed;
loaded = true;
} catch {
// File doesn't exist or is invalid, start fresh
queueState = {
items: [],
totalSize: 0,
oldestItem: null,
};
loaded = true;
}
};
/**
* Save queue to disk
*/
const saveQueue = async (): Promise<void> => {
try {
await fs.mkdir(DIRS.data, { recursive: true });
await fs.writeFile(getQueuePath(), JSON.stringify(queueState, null, 2));
} catch (error) {
console.error("Failed to save offline queue:", error);
}
};
/**
* Add item to offline queue
*/
export const enqueue = async (item: SyncItem): Promise<boolean> => {
await loadQueue();
// Check queue size limit
if (queueState.items.length >= SYNC_CONFIG.MAX_QUEUE_SIZE) {
throw new Error(CLOUD_ERRORS.QUEUE_FULL);
}
const queueItem: OfflineQueueItem = {
id: generateQueueId(),
item,
retryCount: 0,
lastAttempt: 0,
};
queueState.items.push(queueItem);
queueState.totalSize = queueState.items.length;
queueState.oldestItem = Math.min(
queueState.oldestItem ?? item.timestamp,
item.timestamp,
);
await saveQueue();
return true;
};
/**
* Add multiple items to queue
*/
export const enqueueBatch = async (items: SyncItem[]): Promise<number> => {
await loadQueue();
let added = 0;
for (const item of items) {
if (queueState.items.length >= SYNC_CONFIG.MAX_QUEUE_SIZE) {
break;
}
const queueItem: OfflineQueueItem = {
id: generateQueueId(),
item,
retryCount: 0,
lastAttempt: 0,
};
queueState.items.push(queueItem);
added++;
}
queueState.totalSize = queueState.items.length;
if (added > 0) {
queueState.oldestItem = Math.min(
queueState.oldestItem ?? Date.now(),
...items.map((i) => i.timestamp),
);
}
await saveQueue();
return added;
};
/**
* Get items from queue for processing
*/
export const dequeue = async (limit: number = SYNC_CONFIG.MAX_BATCH_SIZE): Promise<OfflineQueueItem[]> => {
await loadQueue();
// Get items that haven't exceeded retry limit
const available = queueState.items.filter(
(item) => item.retryCount < SYNC_CONFIG.MAX_QUEUE_SIZE,
);
return available.slice(0, limit);
};
/**
* Mark items as processed (remove from queue)
*/
export const markProcessed = async (ids: string[]): Promise<void> => {
await loadQueue();
const idSet = new Set(ids);
queueState.items = queueState.items.filter((item) => !idSet.has(item.id));
queueState.totalSize = queueState.items.length;
// Update oldest item
if (queueState.items.length > 0) {
queueState.oldestItem = Math.min(
...queueState.items.map((i) => i.item.timestamp),
);
} else {
queueState.oldestItem = null;
}
await saveQueue();
};
/**
* Mark items as failed (increment retry count)
*/
export const markFailed = async (
ids: string[],
error?: string,
): Promise<void> => {
await loadQueue();
const now = Date.now();
for (const id of ids) {
const item = queueState.items.find((i) => i.id === id);
if (item) {
item.retryCount++;
item.lastAttempt = now;
item.error = error;
}
}
await saveQueue();
};
/**
* Get queue state
*/
export const getQueueState = async (): Promise<OfflineQueueState> => {
await loadQueue();
return { ...queueState };
};
/**
* Get queue size
*/
export const getQueueSize = async (): Promise<number> => {
await loadQueue();
return queueState.items.length;
};
/**
* Check if queue has items
*/
export const hasQueuedItems = async (): Promise<boolean> => {
await loadQueue();
return queueState.items.length > 0;
};
/**
* Clear the entire queue
*/
export const clearQueue = async (): Promise<void> => {
queueState = {
items: [],
totalSize: 0,
oldestItem: null,
};
await saveQueue();
};
/**
* Remove stale items from queue
*/
export const pruneStaleItems = async (): Promise<number> => {
await loadQueue();
const cutoff = Date.now() - SYNC_CONFIG.STALE_ITEM_AGE_MS;
const before = queueState.items.length;
queueState.items = queueState.items.filter(
(item) => item.item.timestamp > cutoff,
);
queueState.totalSize = queueState.items.length;
const removed = before - queueState.items.length;
if (removed > 0) {
await saveQueue();
}
return removed;
};
/**
* Get items by type
*/
export const getItemsByType = async (
type: "concept" | "memory" | "relation",
): Promise<OfflineQueueItem[]> => {
await loadQueue();
return queueState.items.filter((item) => item.item.type === type);
};
/**
* Get items by operation
*/
export const getItemsByOperation = async (
operation: SyncOperationType,
): Promise<OfflineQueueItem[]> => {
await loadQueue();
return queueState.items.filter((item) => item.item.operation === operation);
};
/**
* Generate unique queue item ID
*/
const generateQueueId = (): string => {
return `q_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`;
};

View File

@@ -0,0 +1,384 @@
/**
* Brain project service
* Manages multiple Brain projects/knowledge bases
*/
import { writeFile, readFile, mkdir } from "node:fs/promises";
import { join } from "node:path";
import { existsSync } from "node:fs";
import { homedir } from "node:os";
import type {
BrainProject,
BrainProjectStats,
BrainProjectSettings,
BrainProjectCreateInput,
BrainProjectUpdateInput,
BrainProjectSwitchResult,
BrainProjectListResult,
BrainProjectExport,
BrainProjectImportResult,
ExportedConcept,
ExportedMemory,
ExportedRelationship,
} from "@src/types/brain-project";
import {
DEFAULT_BRAIN_PROJECT_SETTINGS,
BRAIN_PROJECT_EXPORT_VERSION,
} from "@src/types/brain-project";
import {
BRAIN_PROJECT,
BRAIN_PROJECT_STORAGE,
BRAIN_PROJECT_PATHS,
BRAIN_PROJECT_MESSAGES,
BRAIN_PROJECT_API,
} from "@src/constants/brain-project";
interface ProjectServiceState {
projects: Map<number, BrainProject>;
activeProjectId: number | null;
configPath: string;
initialized: boolean;
}
const state: ProjectServiceState = {
projects: new Map(),
activeProjectId: null,
configPath: join(homedir(), ".local", "share", "codetyper", BRAIN_PROJECT_STORAGE.CONFIG_FILE),
initialized: false,
};
const ensureDirectories = async (): Promise<void> => {
const paths = [
join(homedir(), ".local", "share", "codetyper", "brain"),
join(homedir(), ".local", "share", "codetyper", "brain", "exports"),
join(homedir(), ".local", "share", "codetyper", "brain", "backups"),
];
for (const path of paths) {
if (!existsSync(path)) {
await mkdir(path, { recursive: true });
}
}
};
const loadProjectsFromConfig = async (): Promise<void> => {
if (!existsSync(state.configPath)) {
return;
}
try {
const content = await readFile(state.configPath, "utf-8");
const data = JSON.parse(content) as {
projects: BrainProject[];
activeProjectId: number | null;
};
state.projects.clear();
data.projects.forEach((project) => {
state.projects.set(project.id, project);
});
state.activeProjectId = data.activeProjectId;
} catch {
// Config file corrupted, start fresh
state.projects.clear();
state.activeProjectId = null;
}
};
const saveProjectsToConfig = async (): Promise<void> => {
await ensureDirectories();
const data = {
projects: Array.from(state.projects.values()),
activeProjectId: state.activeProjectId,
version: "1.0.0",
updatedAt: Date.now(),
};
await writeFile(state.configPath, JSON.stringify(data, null, 2));
};
const generateProjectId = (): number => {
const existingIds = Array.from(state.projects.keys());
return existingIds.length > 0 ? Math.max(...existingIds) + 1 : 1;
};
const createDefaultStats = (): BrainProjectStats => ({
conceptCount: 0,
memoryCount: 0,
relationshipCount: 0,
totalTokensUsed: 0,
});
// Public API
export const initialize = async (): Promise<void> => {
if (state.initialized) return;
await ensureDirectories();
await loadProjectsFromConfig();
state.initialized = true;
};
export const createProject = async (input: BrainProjectCreateInput): Promise<BrainProject> => {
await initialize();
// Validate name
if (input.name.length < BRAIN_PROJECT.NAME_MIN_LENGTH) {
throw new Error(BRAIN_PROJECT_MESSAGES.INVALID_NAME);
}
if (input.name.length > BRAIN_PROJECT.NAME_MAX_LENGTH) {
throw new Error(BRAIN_PROJECT_MESSAGES.INVALID_NAME);
}
// Check for duplicate names
const existingProject = Array.from(state.projects.values()).find(
(p) => p.name.toLowerCase() === input.name.toLowerCase()
);
if (existingProject) {
throw new Error(BRAIN_PROJECT_MESSAGES.ALREADY_EXISTS);
}
const now = Date.now();
const project: BrainProject = {
id: generateProjectId(),
name: input.name,
description: input.description || "",
rootPath: input.rootPath,
createdAt: now,
updatedAt: now,
stats: createDefaultStats(),
settings: {
...DEFAULT_BRAIN_PROJECT_SETTINGS,
...input.settings,
},
isActive: false,
};
state.projects.set(project.id, project);
await saveProjectsToConfig();
return project;
};
export const updateProject = async (
projectId: number,
input: BrainProjectUpdateInput
): Promise<BrainProject> => {
await initialize();
const project = state.projects.get(projectId);
if (!project) {
throw new Error(BRAIN_PROJECT_MESSAGES.NOT_FOUND);
}
const updatedProject: BrainProject = {
...project,
name: input.name ?? project.name,
description: input.description ?? project.description,
settings: input.settings
? { ...project.settings, ...input.settings }
: project.settings,
updatedAt: Date.now(),
};
state.projects.set(projectId, updatedProject);
await saveProjectsToConfig();
return updatedProject;
};
export const deleteProject = async (projectId: number): Promise<boolean> => {
await initialize();
const project = state.projects.get(projectId);
if (!project) {
return false;
}
// Can't delete active project
if (state.activeProjectId === projectId) {
state.activeProjectId = null;
}
state.projects.delete(projectId);
await saveProjectsToConfig();
return true;
};
export const switchProject = async (projectId: number): Promise<BrainProjectSwitchResult> => {
await initialize();
const newProject = state.projects.get(projectId);
if (!newProject) {
throw new Error(BRAIN_PROJECT_MESSAGES.NOT_FOUND);
}
const previousProject = state.activeProjectId
? state.projects.get(state.activeProjectId)
: undefined;
// Update active status
if (previousProject) {
state.projects.set(previousProject.id, { ...previousProject, isActive: false });
}
state.projects.set(projectId, { ...newProject, isActive: true });
state.activeProjectId = projectId;
await saveProjectsToConfig();
return {
success: true,
previousProject,
currentProject: state.projects.get(projectId)!,
message: `${BRAIN_PROJECT_MESSAGES.SWITCHED} "${newProject.name}"`,
};
};
export const getProject = async (projectId: number): Promise<BrainProject | undefined> => {
await initialize();
return state.projects.get(projectId);
};
export const getActiveProject = async (): Promise<BrainProject | undefined> => {
await initialize();
return state.activeProjectId ? state.projects.get(state.activeProjectId) : undefined;
};
export const listProjects = async (): Promise<BrainProjectListResult> => {
await initialize();
return {
projects: Array.from(state.projects.values()).sort((a, b) => b.updatedAt - a.updatedAt),
activeProjectId: state.activeProjectId ?? undefined,
total: state.projects.size,
};
};
export const findProjectByPath = async (rootPath: string): Promise<BrainProject | undefined> => {
await initialize();
return Array.from(state.projects.values()).find((p) => p.rootPath === rootPath);
};
export const updateProjectStats = async (
projectId: number,
stats: Partial<BrainProjectStats>
): Promise<void> => {
await initialize();
const project = state.projects.get(projectId);
if (!project) return;
const updatedProject: BrainProject = {
...project,
stats: { ...project.stats, ...stats },
updatedAt: Date.now(),
};
state.projects.set(projectId, updatedProject);
await saveProjectsToConfig();
};
export const exportProject = async (projectId: number): Promise<BrainProjectExport> => {
await initialize();
const project = state.projects.get(projectId);
if (!project) {
throw new Error(BRAIN_PROJECT_MESSAGES.NOT_FOUND);
}
// In a real implementation, this would fetch data from Brain API
// For now, return structure with empty data
const exportData: BrainProjectExport = {
project,
concepts: [],
memories: [],
relationships: [],
exportedAt: Date.now(),
version: BRAIN_PROJECT_EXPORT_VERSION,
};
// Save export file
const exportPath = join(
homedir(),
".local",
"share",
"codetyper",
"brain",
"exports",
`${project.name}-${Date.now()}${BRAIN_PROJECT_STORAGE.EXPORT_EXTENSION}`
);
await writeFile(exportPath, JSON.stringify(exportData, null, 2));
return exportData;
};
export const importProject = async (
exportData: BrainProjectExport
): Promise<BrainProjectImportResult> => {
await initialize();
try {
// Create new project with imported data
const newProject = await createProject({
name: `${exportData.project.name} (imported)`,
description: exportData.project.description,
rootPath: exportData.project.rootPath,
settings: exportData.project.settings,
});
// In a real implementation, this would send data to Brain API
// For now, just return success with counts
return {
success: true,
project: newProject,
imported: {
concepts: exportData.concepts.length,
memories: exportData.memories.length,
relationships: exportData.relationships.length,
},
errors: [],
};
} catch (error) {
return {
success: false,
project: exportData.project,
imported: { concepts: 0, memories: 0, relationships: 0 },
errors: [error instanceof Error ? error.message : "Import failed"],
};
}
};
export const getProjectSettings = async (projectId: number): Promise<BrainProjectSettings | undefined> => {
await initialize();
const project = state.projects.get(projectId);
return project?.settings;
};
export const updateProjectSettings = async (
projectId: number,
settings: Partial<BrainProjectSettings>
): Promise<BrainProjectSettings> => {
const project = await updateProject(projectId, { settings });
return project.settings;
};
export const setActiveProjectByPath = async (rootPath: string): Promise<BrainProject | undefined> => {
const project = await findProjectByPath(rootPath);
if (project) {
await switchProject(project.id);
return project;
}
return undefined;
};

View File

@@ -19,6 +19,8 @@ import {
buildCompletePrompt,
} from "@services/prompt-builder";
import { initSuggestionService } from "@services/command-suggestion-service";
import * as brainService from "@services/brain";
import { BRAIN_DISABLED } from "@constants/brain";
import { addContextFile } from "@services/chat-tui/files";
import type { ProviderName, Message } from "@/types/providers";
import type { ChatSession } from "@/types/index";
@@ -147,6 +149,39 @@ const initializeTheme = async (): Promise<void> => {
}
};
/**
* Initialize brain service and update store state
* Skipped when BRAIN_DISABLED flag is true
*/
const initializeBrain = async (): Promise<void> => {
// Skip brain initialization when disabled
if (BRAIN_DISABLED) {
appStore.setBrainStatus("disconnected");
appStore.setBrainShowBanner(false);
return;
}
try {
appStore.setBrainStatus("connecting");
const connected = await brainService.initialize();
if (connected) {
const state = brainService.getState();
appStore.setBrainStatus("connected");
appStore.setBrainUser(state.user);
appStore.setBrainCounts(state.knowledgeCount, state.memoryCount);
appStore.setBrainShowBanner(false);
} else {
appStore.setBrainStatus("disconnected");
appStore.setBrainShowBanner(true);
}
} catch {
appStore.setBrainStatus("disconnected");
appStore.setBrainShowBanner(true);
}
};
/**
* Rebuild system prompt when interaction mode changes
* Updates both the state and the first message in the conversation
@@ -178,9 +213,13 @@ export const initializeChatService = async (
const initialMode = appStore.getState().interactionMode;
const state = await createInitialState(options, initialMode);
await validateProvider(state);
await buildSystemPrompt(state, options);
await initializeTheme();
// Run provider validation and system prompt building in parallel
// These are independent and both involve async operations
await Promise.all([
validateProvider(state),
buildSystemPrompt(state, options),
initializeTheme(),
]);
const session = await initializeSession(state, options);
@@ -188,9 +227,18 @@ export const initializeChatService = async (
state.messages.push({ role: "system", content: state.systemPrompt });
}
await addInitialContextFiles(state, options.files);
await initializePermissions();
// Run these in parallel - they're independent
await Promise.all([
addInitialContextFiles(state, options.files),
initializePermissions(),
]);
initSuggestionService(process.cwd());
// Initialize brain service (non-blocking, errors silently handled)
initializeBrain().catch(() => {
// Silently fail - brain is optional
});
return { state, session };
};

View File

@@ -366,7 +366,7 @@ export const handleMessage = async (
const modeLabel = interactionMode === "ask" ? "Ask" : "Code Review";
callbacks.onLog(
"system",
`${modeLabel} mode: Read-only tools only (Ctrl+Tab to switch modes)`,
`${modeLabel} mode: Read-only tools only (Ctrl+M to switch modes)`,
);
}

View File

@@ -0,0 +1,209 @@
/**
* Confidence-based filtering service
* Filters PR review issues and agent outputs by confidence score
*/
import type {
ConfidenceScore,
ConfidenceLevel,
ConfidenceFactor,
ConfidenceFilterConfig,
FilteredResult,
ValidationResult,
ConfidenceFilterStats,
} from "@src/types/confidence-filter";
import {
CONFIDENCE_LEVELS,
DEFAULT_CONFIDENCE_FILTER_CONFIG,
} from "@src/types/confidence-filter";
import { CONFIDENCE_FILTER, CONFIDENCE_WEIGHTS } from "@src/constants/confidence-filter";
export const calculateConfidenceLevel = (score: number): ConfidenceLevel => {
const levels = Object.entries(CONFIDENCE_LEVELS) as Array<[ConfidenceLevel, { min: number; max: number }]>;
const found = levels.find(([, range]) => score >= range.min && score <= range.max);
return found ? found[0] : "low";
};
export const calculateConfidenceScore = (factors: ReadonlyArray<ConfidenceFactor>): ConfidenceScore => {
const totalWeight = factors.reduce((sum, f) => sum + f.weight, 0);
const weightedSum = factors.reduce((sum, f) => sum + f.score * f.weight, 0);
const value = totalWeight > 0 ? Math.round(weightedSum / totalWeight) : 0;
return {
value,
level: calculateConfidenceLevel(value),
factors,
};
};
export const createConfidenceFactor = (
name: string,
score: number,
weight: number,
reason: string
): ConfidenceFactor => ({
name,
score: Math.max(0, Math.min(100, score)),
weight: Math.max(0, Math.min(1, weight)),
reason,
});
export const createPatternMatchFactor = (matchCount: number, expectedCount: number): ConfidenceFactor =>
createConfidenceFactor(
"Pattern Match",
Math.min(100, (matchCount / Math.max(1, expectedCount)) * 100),
CONFIDENCE_WEIGHTS.PATTERN_MATCH,
`Matched ${matchCount}/${expectedCount} expected patterns`
);
export const createContextRelevanceFactor = (relevanceScore: number): ConfidenceFactor =>
createConfidenceFactor(
"Context Relevance",
relevanceScore,
CONFIDENCE_WEIGHTS.CONTEXT_RELEVANCE,
`Context relevance score: ${relevanceScore}%`
);
export const createSeverityFactor = (severity: "low" | "medium" | "high" | "critical"): ConfidenceFactor => {
const severityScores: Record<string, number> = { low: 40, medium: 60, high: 80, critical: 95 };
return createConfidenceFactor(
"Severity Level",
severityScores[severity] ?? 50,
CONFIDENCE_WEIGHTS.SEVERITY_LEVEL,
`Issue severity: ${severity}`
);
};
export const createCodeAnalysisFactor = (analysisScore: number): ConfidenceFactor =>
createConfidenceFactor(
"Code Analysis",
analysisScore,
CONFIDENCE_WEIGHTS.CODE_ANALYSIS,
`Static analysis confidence: ${analysisScore}%`
);
export const createHistoricalAccuracyFactor = (accuracy: number): ConfidenceFactor =>
createConfidenceFactor(
"Historical Accuracy",
accuracy,
CONFIDENCE_WEIGHTS.HISTORICAL_ACCURACY,
`Historical accuracy for similar issues: ${accuracy}%`
);
export const filterByConfidence = <T>(
items: ReadonlyArray<{ item: T; confidence: ConfidenceScore }>,
config: ConfidenceFilterConfig = DEFAULT_CONFIDENCE_FILTER_CONFIG
): ReadonlyArray<FilteredResult<T>> =>
items.map(({ item, confidence }) => ({
item,
confidence,
passed: confidence.value >= config.minThreshold,
}));
export const filterPassedOnly = <T>(results: ReadonlyArray<FilteredResult<T>>): ReadonlyArray<T> =>
results.filter((r) => r.passed).map((r) => r.item);
export const groupByConfidenceLevel = <T>(
results: ReadonlyArray<FilteredResult<T>>
): Record<ConfidenceLevel, ReadonlyArray<FilteredResult<T>>> => ({
low: results.filter((r) => r.confidence.level === "low"),
medium: results.filter((r) => r.confidence.level === "medium"),
high: results.filter((r) => r.confidence.level === "high"),
critical: results.filter((r) => r.confidence.level === "critical"),
});
export const calculateFilterStats = <T>(results: ReadonlyArray<FilteredResult<T>>): ConfidenceFilterStats => {
const passed = results.filter((r) => r.passed).length;
const grouped = groupByConfidenceLevel(results);
const totalConfidence = results.reduce((sum, r) => sum + r.confidence.value, 0);
return {
total: results.length,
passed,
filtered: results.length - passed,
byLevel: {
low: grouped.low.length,
medium: grouped.medium.length,
high: grouped.high.length,
critical: grouped.critical.length,
},
averageConfidence: results.length > 0 ? Math.round(totalConfidence / results.length) : 0,
};
};
export const validateConfidence = async (
confidence: ConfidenceScore,
validatorFn: (factors: ReadonlyArray<ConfidenceFactor>) => Promise<{ validated: boolean; adjustment: number; notes: string }>
): Promise<ValidationResult> => {
const result = await validatorFn(confidence.factors);
return {
validated: result.validated,
adjustedConfidence: Math.max(0, Math.min(100, confidence.value + result.adjustment)),
validatorNotes: result.notes,
};
};
export const formatConfidenceScore = (confidence: ConfidenceScore, showFactors: boolean = false): string => {
const levelColors: Record<ConfidenceLevel, string> = {
low: "\x1b[90m",
medium: "\x1b[33m",
high: "\x1b[32m",
critical: "\x1b[31m",
};
const reset = "\x1b[0m";
const color = levelColors[confidence.level];
let result = `${color}[${confidence.value}% - ${confidence.level.toUpperCase()}]${reset}`;
if (showFactors && confidence.factors.length > 0) {
const factorLines = confidence.factors
.map((f) => ` - ${f.name}: ${f.score}% (weight: ${f.weight})`)
.join("\n");
result += `\n${factorLines}`;
}
return result;
};
export const mergeConfidenceFactors = (
existing: ReadonlyArray<ConfidenceFactor>,
additional: ReadonlyArray<ConfidenceFactor>
): ReadonlyArray<ConfidenceFactor> => {
const factorMap = new Map<string, ConfidenceFactor>();
existing.forEach((f) => factorMap.set(f.name, f));
additional.forEach((f) => {
const existingFactor = factorMap.get(f.name);
if (existingFactor) {
// Average the scores if factor already exists
factorMap.set(f.name, {
...f,
score: Math.round((existingFactor.score + f.score) / 2),
});
} else {
factorMap.set(f.name, f);
}
});
return Array.from(factorMap.values());
};
export const adjustThreshold = (
baseThreshold: number,
context: { isCritical: boolean; isAutomated: boolean; userPreference?: number }
): number => {
let threshold = context.userPreference ?? baseThreshold;
// Lower threshold for critical contexts
if (context.isCritical) {
threshold = Math.max(CONFIDENCE_FILTER.MIN_THRESHOLD, threshold - 10);
}
// Higher threshold for automated contexts
if (context.isAutomated) {
threshold = Math.min(CONFIDENCE_FILTER.MAX_THRESHOLD, threshold + 10);
}
return threshold;
};

View File

@@ -44,23 +44,41 @@ const PROVIDER_ENV_VARS: Record<Provider, string> = {
* Config state (singleton pattern using closure)
*/
let configState: Config = getDefaults();
let configLoaded = false;
let configLoadPromise: Promise<void> | null = null;
/**
* Load configuration from file
* Load configuration from file (with caching)
*/
export const loadConfig = async (): Promise<void> => {
try {
const data = await fs.readFile(FILES.config, "utf-8");
const loaded = JSON.parse(data);
// Clean up deprecated keys
delete loaded.models;
configState = { ...getDefaults(), ...loaded };
} catch {
// Config file doesn't exist or is invalid, use defaults
configState = getDefaults();
// Return cached config if already loaded
if (configLoaded) {
return;
}
// If loading is in progress, wait for it
if (configLoadPromise) {
return configLoadPromise;
}
// Start loading
configLoadPromise = (async () => {
try {
const data = await fs.readFile(FILES.config, "utf-8");
const loaded = JSON.parse(data);
// Clean up deprecated keys
delete loaded.models;
configState = { ...getDefaults(), ...loaded };
} catch {
// Config file doesn't exist or is invalid, use defaults
configState = getDefaults();
}
configLoaded = true;
})();
return configLoadPromise;
};
/**

View File

@@ -0,0 +1,209 @@
/**
* Checkpoint Handler
*
* Manages user approval checkpoints during feature development.
*/
import {
PHASE_CHECKPOINTS,
FEATURE_DEV_ERRORS,
} from "@constants/feature-dev";
import type {
FeatureDevPhase,
FeatureDevState,
Checkpoint,
CheckpointDecision,
PhaseExecutionContext,
} from "@/types/feature-dev";
/**
* Create a checkpoint for user approval
*/
export const createCheckpoint = (
phase: FeatureDevPhase,
state: FeatureDevState,
details: string[],
): Checkpoint => {
const config = PHASE_CHECKPOINTS[phase];
return {
phase,
title: config.title,
summary: buildCheckpointSummary(phase, state),
details,
requiresApproval: config.required,
suggestedAction: "approve",
};
};
/**
* Build summary for checkpoint based on phase
*/
const buildCheckpointSummary = (
phase: FeatureDevPhase,
state: FeatureDevState,
): string => {
const summaryBuilders: Record<FeatureDevPhase, () => string> = {
understand: () => {
const reqCount = state.requirements.length;
const clarCount = state.clarifications.length;
return `${reqCount} requirement(s) identified, ${clarCount} clarification(s) made`;
},
explore: () => {
const fileCount = state.relevantFiles.length;
const findingCount = state.explorationResults.reduce(
(sum, r) => sum + r.findings.length,
0,
);
return `Found ${fileCount} relevant file(s) with ${findingCount} finding(s)`;
},
plan: () => {
if (!state.plan) return "No plan created";
const stepCount = state.plan.steps.length;
const complexity = state.plan.estimatedComplexity;
return `${stepCount} step(s) planned, ${complexity} complexity`;
},
implement: () => {
const changeCount = state.changes.length;
const additions = state.changes.reduce((sum, c) => sum + c.additions, 0);
const deletions = state.changes.reduce((sum, c) => sum + c.deletions, 0);
return `${changeCount} file(s) changed (+${additions}/-${deletions})`;
},
verify: () => {
if (!state.testResults) return "Tests not run yet";
const { passedTests, failedTests, totalTests } = state.testResults;
return `${passedTests}/${totalTests} tests passed, ${failedTests} failed`;
},
review: () => {
const issues = state.reviewFindings.filter((f) => f.type === "issue").length;
const suggestions = state.reviewFindings.filter(
(f) => f.type === "suggestion",
).length;
return `${issues} issue(s), ${suggestions} suggestion(s) found`;
},
finalize: () => {
const changeCount = state.changes.length;
return `Ready to commit ${changeCount} file change(s)`;
},
};
return summaryBuilders[phase]();
};
/**
* Check if phase requires a checkpoint
*/
export const requiresCheckpoint = (phase: FeatureDevPhase): boolean => {
return PHASE_CHECKPOINTS[phase].required;
};
/**
* Request user approval at a checkpoint
*/
export const requestApproval = async (
checkpoint: Checkpoint,
ctx: PhaseExecutionContext,
): Promise<{ decision: CheckpointDecision; feedback?: string }> => {
// If no checkpoint handler provided, auto-approve non-required checkpoints
if (!ctx.onCheckpoint) {
if (checkpoint.requiresApproval) {
throw new Error(FEATURE_DEV_ERRORS.CHECKPOINT_REQUIRED(checkpoint.phase));
}
return { decision: "approve" };
}
// Request approval from handler
const result = await ctx.onCheckpoint(checkpoint);
// Record checkpoint in state
ctx.state.checkpoints.push({
checkpoint,
decision: result.decision,
feedback: result.feedback,
timestamp: Date.now(),
});
return result;
};
/**
* Process checkpoint decision
*/
export const processCheckpointDecision = (
decision: CheckpointDecision,
_feedback?: string,
): { proceed: boolean; action?: string } => {
const decisionHandlers: Record<
CheckpointDecision,
() => { proceed: boolean; action?: string }
> = {
approve: () => ({ proceed: true }),
reject: () => ({ proceed: false, action: "rejected" }),
modify: () => ({ proceed: false, action: "modify", }),
skip: () => ({ proceed: true, action: "skipped" }),
abort: () => ({ proceed: false, action: "aborted" }),
};
return decisionHandlers[decision]();
};
/**
* Format checkpoint for display
*/
export const formatCheckpoint = (checkpoint: Checkpoint): string => {
const lines: string[] = [];
lines.push(`## ${checkpoint.title}`);
lines.push("");
lines.push(`**Phase:** ${checkpoint.phase}`);
lines.push(`**Summary:** ${checkpoint.summary}`);
lines.push("");
if (checkpoint.details.length > 0) {
lines.push("### Details");
for (const detail of checkpoint.details) {
lines.push(`- ${detail}`);
}
lines.push("");
}
if (checkpoint.requiresApproval) {
lines.push("*This checkpoint requires your approval to proceed.*");
}
return lines.join("\n");
};
/**
* Get checkpoint history for a phase
*/
export const getPhaseCheckpoints = (
state: FeatureDevState,
phase: FeatureDevPhase,
): Array<{
checkpoint: Checkpoint;
decision: CheckpointDecision;
feedback?: string;
timestamp: number;
}> => {
return state.checkpoints.filter((c) => c.checkpoint.phase === phase);
};
/**
* Check if phase was approved
*/
export const wasPhaseApproved = (
state: FeatureDevState,
phase: FeatureDevPhase,
): boolean => {
const checkpoints = getPhaseCheckpoints(state, phase);
return checkpoints.some(
(c) => c.decision === "approve" || c.decision === "skip",
);
};

View File

@@ -0,0 +1,292 @@
/**
* Context Builder
*
* Builds context for each phase of feature development.
*/
import {
PHASE_PROMPTS,
PHASE_DESCRIPTIONS,
} from "@constants/feature-dev";
import type {
FeatureDevPhase,
FeatureDevState,
} from "@/types/feature-dev";
/**
* Build the full context for a phase execution
*/
export const buildPhaseContext = (
phase: FeatureDevPhase,
state: FeatureDevState,
userRequest: string,
): string => {
const parts: string[] = [];
// Phase header
parts.push(`# Feature Development: ${phase.toUpperCase()} Phase`);
parts.push("");
parts.push(`**Goal:** ${PHASE_DESCRIPTIONS[phase]}`);
parts.push("");
// Phase-specific prompt
parts.push("## Instructions");
parts.push(PHASE_PROMPTS[phase]);
parts.push("");
// User's original request
parts.push("## Feature Request");
parts.push(userRequest);
parts.push("");
// Add state context based on phase
const stateContext = buildStateContext(phase, state);
if (stateContext) {
parts.push("## Current State");
parts.push(stateContext);
parts.push("");
}
return parts.join("\n");
};
/**
* Build state context based on accumulated results
*/
const buildStateContext = (
phase: FeatureDevPhase,
state: FeatureDevState,
): string | null => {
const contextBuilders: Record<FeatureDevPhase, () => string | null> = {
understand: () => null, // No prior context
explore: () => {
if (state.requirements.length === 0) return null;
const lines: string[] = [];
lines.push("### Understood Requirements");
for (const req of state.requirements) {
lines.push(`- ${req}`);
}
if (state.clarifications.length > 0) {
lines.push("");
lines.push("### Clarifications");
for (const c of state.clarifications) {
lines.push(`Q: ${c.question}`);
lines.push(`A: ${c.answer}`);
}
}
return lines.join("\n");
},
plan: () => {
const lines: string[] = [];
// Requirements
if (state.requirements.length > 0) {
lines.push("### Requirements");
for (const req of state.requirements) {
lines.push(`- ${req}`);
}
lines.push("");
}
// Exploration results
if (state.relevantFiles.length > 0) {
lines.push("### Relevant Files Found");
for (const file of state.relevantFiles.slice(0, 10)) {
lines.push(`- ${file}`);
}
if (state.relevantFiles.length > 10) {
lines.push(`- ... and ${state.relevantFiles.length - 10} more`);
}
lines.push("");
}
// Patterns found
const patterns = state.explorationResults.flatMap((r) => r.patterns);
if (patterns.length > 0) {
lines.push("### Patterns to Follow");
for (const pattern of [...new Set(patterns)].slice(0, 5)) {
lines.push(`- ${pattern}`);
}
lines.push("");
}
return lines.length > 0 ? lines.join("\n") : null;
},
implement: () => {
if (!state.plan) return null;
const lines: string[] = [];
lines.push("### Approved Implementation Plan");
lines.push(`**Summary:** ${state.plan.summary}`);
lines.push("");
lines.push("**Steps:**");
for (const step of state.plan.steps) {
lines.push(`${step.order}. [${step.changeType}] ${step.file}`);
lines.push(` ${step.description}`);
}
if (state.plan.risks.length > 0) {
lines.push("");
lines.push("**Risks to Watch:**");
for (const risk of state.plan.risks) {
lines.push(`- ${risk}`);
}
}
return lines.join("\n");
},
verify: () => {
if (state.changes.length === 0) return null;
const lines: string[] = [];
lines.push("### Files Changed");
for (const change of state.changes) {
lines.push(
`- ${change.path} (${change.changeType}, +${change.additions}/-${change.deletions})`,
);
}
if (state.plan?.testStrategy) {
lines.push("");
lines.push("### Test Strategy");
lines.push(state.plan.testStrategy);
}
return lines.join("\n");
},
review: () => {
const lines: string[] = [];
// Changes to review
if (state.changes.length > 0) {
lines.push("### Changes to Review");
for (const change of state.changes) {
lines.push(
`- ${change.path} (${change.changeType}, +${change.additions}/-${change.deletions})`,
);
}
lines.push("");
}
// Test results
if (state.testResults) {
lines.push("### Test Results");
lines.push(
`${state.testResults.passedTests}/${state.testResults.totalTests} tests passed`,
);
if (state.testResults.failedTests > 0) {
lines.push("**Failures:**");
for (const failure of state.testResults.failures) {
lines.push(`- ${failure.testName}: ${failure.error}`);
}
}
lines.push("");
}
return lines.length > 0 ? lines.join("\n") : null;
},
finalize: () => {
const lines: string[] = [];
// Summary of changes
lines.push("### Summary of Changes");
for (const change of state.changes) {
lines.push(
`- ${change.path} (${change.changeType}, +${change.additions}/-${change.deletions})`,
);
}
lines.push("");
// Review findings to address
const issues = state.reviewFindings.filter(
(f) => f.type === "issue" && f.severity === "critical",
);
if (issues.length > 0) {
lines.push("### Outstanding Issues");
for (const issue of issues) {
lines.push(`- [${issue.severity}] ${issue.message}`);
}
lines.push("");
}
// Test status
if (state.testResults) {
const status = state.testResults.passed ? "✓ All tests passing" : "✗ Tests failing";
lines.push(`### Test Status: ${status}`);
}
return lines.join("\n");
},
};
return contextBuilders[phase]();
};
/**
* Build summary of current workflow state
*/
export const buildWorkflowSummary = (state: FeatureDevState): string => {
const lines: string[] = [];
lines.push("# Feature Development Progress");
lines.push("");
lines.push(`**Current Phase:** ${state.phase}`);
lines.push(`**Status:** ${state.phaseStatus}`);
lines.push("");
// Phase completion status
const phases: FeatureDevPhase[] = [
"understand",
"explore",
"plan",
"implement",
"verify",
"review",
"finalize",
];
const currentIndex = phases.indexOf(state.phase);
lines.push("## Progress");
for (let i = 0; i < phases.length; i++) {
const phase = phases[i];
const status =
i < currentIndex
? "✓"
: i === currentIndex
? state.phaseStatus === "completed"
? "✓"
: "→"
: "○";
lines.push(`${status} ${phase}`);
}
return lines.join("\n");
};
/**
* Extract key information from state for quick reference
*/
export const extractKeyInfo = (
state: FeatureDevState,
): Record<string, string | number> => {
return {
phase: state.phase,
status: state.phaseStatus,
requirementsCount: state.requirements.length,
relevantFilesCount: state.relevantFiles.length,
changesCount: state.changes.length,
reviewFindingsCount: state.reviewFindings.length,
checkpointsCount: state.checkpoints.length,
duration: Date.now() - state.startedAt,
};
};

View File

@@ -0,0 +1,290 @@
/**
* Feature-Dev Workflow Service
*
* Main orchestrator for the 7-phase feature development workflow.
*/
import { PHASE_ORDER, FEATURE_DEV_CONFIG, FEATURE_DEV_ERRORS } from "@constants/feature-dev";
import {
executePhase,
validateTransition,
} from "@services/feature-dev/phase-executor";
import { buildWorkflowSummary, extractKeyInfo } from "@services/feature-dev/context-builder";
import type {
FeatureDevPhase,
FeatureDevState,
PhaseExecutionContext,
Checkpoint,
CheckpointDecision,
} from "@/types/feature-dev";
// Re-export sub-modules
export * from "@services/feature-dev/phase-executor";
export * from "@services/feature-dev/checkpoint-handler";
export * from "@services/feature-dev/context-builder";
// Active workflows storage
const activeWorkflows = new Map<string, FeatureDevState>();
/**
* Create a new feature development workflow
*/
export const createWorkflow = (
id: string,
requirements: string[] = [],
): FeatureDevState => {
const state: FeatureDevState = {
id,
phase: "understand",
phaseStatus: "pending",
startedAt: Date.now(),
updatedAt: Date.now(),
requirements,
clarifications: [],
explorationResults: [],
relevantFiles: [],
changes: [],
reviewFindings: [],
checkpoints: [],
};
activeWorkflows.set(id, state);
return state;
};
/**
* Get an active workflow by ID
*/
export const getWorkflow = (id: string): FeatureDevState | undefined => {
return activeWorkflows.get(id);
};
/**
* Update workflow state
*/
export const updateWorkflow = (
id: string,
updates: Partial<FeatureDevState>,
): FeatureDevState | undefined => {
const workflow = activeWorkflows.get(id);
if (!workflow) return undefined;
const updated = {
...workflow,
...updates,
updatedAt: Date.now(),
};
activeWorkflows.set(id, updated);
return updated;
};
/**
* Delete a workflow
*/
export const deleteWorkflow = (id: string): boolean => {
return activeWorkflows.delete(id);
};
/**
* Run the complete feature development workflow
*/
export const runWorkflow = async (
workflowId: string,
userRequest: string,
options: {
config?: Partial<typeof FEATURE_DEV_CONFIG>;
workingDir: string;
sessionId: string;
abortSignal?: AbortSignal;
onProgress?: (message: string) => void;
onCheckpoint?: (checkpoint: Checkpoint) => Promise<{
decision: CheckpointDecision;
feedback?: string;
}>;
},
): Promise<{
success: boolean;
finalState: FeatureDevState;
error?: string;
}> => {
// Merge config with defaults (kept for future extensibility)
void { ...FEATURE_DEV_CONFIG, ...options.config };
// Get or create workflow
let state = getWorkflow(workflowId);
if (!state) {
state = createWorkflow(workflowId);
}
// Build execution context
const ctx: PhaseExecutionContext = {
state,
workingDir: options.workingDir,
sessionId: options.sessionId,
abortSignal: options.abortSignal,
onProgress: options.onProgress,
onCheckpoint: options.onCheckpoint,
};
// Execute phases in order
while (state.phase !== "finalize" || state.phaseStatus !== "completed") {
// Check for abort
if (options.abortSignal?.aborted) {
state.abortReason = "Workflow aborted by user";
state.phaseStatus = "failed";
return {
success: false,
finalState: state,
error: FEATURE_DEV_ERRORS.WORKFLOW_ABORTED(state.abortReason),
};
}
// Execute current phase
const result = await executePhase(state.phase, ctx, userRequest);
// Apply state updates
if (result.stateUpdates) {
state = updateWorkflow(workflowId, result.stateUpdates) ?? state;
ctx.state = state;
}
// Handle phase result
if (!result.success) {
if (state.abortReason) {
// Workflow was aborted
return {
success: false,
finalState: state,
error: result.error,
};
}
// Phase needs attention (rejected, needs modification, etc.)
// Stay in current phase and let caller handle
continue;
}
// Move to next phase
if (result.nextPhase) {
const transition = validateTransition({
fromPhase: state.phase,
toPhase: result.nextPhase,
});
if (!transition.valid) {
return {
success: false,
finalState: state,
error: transition.error,
};
}
state = updateWorkflow(workflowId, {
phase: result.nextPhase,
phaseStatus: "pending",
}) ?? state;
ctx.state = state;
} else {
// No next phase, workflow complete
break;
}
}
return {
success: true,
finalState: state,
};
};
/**
* Get workflow progress summary
*/
export const getWorkflowProgress = (
workflowId: string,
): { summary: string; keyInfo: Record<string, string | number> } | undefined => {
const workflow = getWorkflow(workflowId);
if (!workflow) return undefined;
return {
summary: buildWorkflowSummary(workflow),
keyInfo: extractKeyInfo(workflow),
};
};
/**
* Abort an active workflow
*/
export const abortWorkflow = (
workflowId: string,
reason: string,
): FeatureDevState | undefined => {
return updateWorkflow(workflowId, {
phaseStatus: "failed",
abortReason: reason,
});
};
/**
* Reset workflow to a specific phase
*/
export const resetToPhase = (
workflowId: string,
phase: FeatureDevPhase,
): FeatureDevState | undefined => {
const workflow = getWorkflow(workflowId);
if (!workflow) return undefined;
// Clear state accumulated after this phase
const phaseIndex = PHASE_ORDER.indexOf(phase);
const updates: Partial<FeatureDevState> = {
phase,
phaseStatus: "pending",
};
// Clear phase-specific data based on which phase we're resetting to
if (phaseIndex <= PHASE_ORDER.indexOf("explore")) {
updates.explorationResults = [];
updates.relevantFiles = [];
}
if (phaseIndex <= PHASE_ORDER.indexOf("plan")) {
updates.plan = undefined;
}
if (phaseIndex <= PHASE_ORDER.indexOf("implement")) {
updates.changes = [];
}
if (phaseIndex <= PHASE_ORDER.indexOf("verify")) {
updates.testResults = undefined;
}
if (phaseIndex <= PHASE_ORDER.indexOf("review")) {
updates.reviewFindings = [];
}
if (phaseIndex <= PHASE_ORDER.indexOf("finalize")) {
updates.commitHash = undefined;
}
return updateWorkflow(workflowId, updates);
};
/**
* List all active workflows
*/
export const listWorkflows = (): Array<{
id: string;
phase: FeatureDevPhase;
status: string;
startedAt: number;
}> => {
return Array.from(activeWorkflows.values()).map((w) => ({
id: w.id,
phase: w.phase,
status: w.phaseStatus,
startedAt: w.startedAt,
}));
};
/**
* Create workflow ID
*/
export const createWorkflowId = (): string => {
return `fd_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`;
};

View File

@@ -0,0 +1,345 @@
/**
* Phase Executor
*
* Executes individual phases of the feature development workflow.
*/
import {
PHASE_ORDER,
ALLOWED_TRANSITIONS,
PHASE_TIMEOUTS,
FEATURE_DEV_ERRORS,
FEATURE_DEV_MESSAGES,
} from "@constants/feature-dev";
import {
createCheckpoint,
requiresCheckpoint,
requestApproval,
processCheckpointDecision,
} from "@services/feature-dev/checkpoint-handler";
import { buildPhaseContext } from "@services/feature-dev/context-builder";
import type {
FeatureDevPhase,
PhaseExecutionContext,
PhaseExecutionResult,
PhaseTransitionRequest,
} from "@/types/feature-dev";
/**
* Execute a single phase
*/
export const executePhase = async (
phase: FeatureDevPhase,
ctx: PhaseExecutionContext,
userRequest: string,
): Promise<PhaseExecutionResult> => {
// Update state to in_progress
ctx.state.phase = phase;
ctx.state.phaseStatus = "in_progress";
ctx.state.updatedAt = Date.now();
ctx.onProgress?.(FEATURE_DEV_MESSAGES.STARTING(phase));
try {
// Execute phase-specific logic
const result = await executePhaseLogic(phase, ctx, userRequest);
// Handle checkpoint if needed
if (requiresCheckpoint(phase) || result.checkpoint) {
const checkpoint =
result.checkpoint ?? createCheckpoint(phase, ctx.state, []);
ctx.state.phaseStatus = "awaiting_approval";
const { decision, feedback } = await requestApproval(checkpoint, ctx);
const { proceed, action } = processCheckpointDecision(decision, feedback);
if (!proceed) {
if (action === "aborted") {
ctx.state.abortReason = feedback ?? "User aborted";
return {
success: false,
phase,
error: FEATURE_DEV_ERRORS.WORKFLOW_ABORTED(ctx.state.abortReason),
stateUpdates: { phaseStatus: "failed" },
};
}
// Rejected or modify - stay in current phase
return {
success: false,
phase,
stateUpdates: { phaseStatus: "pending" },
};
}
ctx.state.phaseStatus = "approved";
}
// Phase completed successfully
ctx.state.phaseStatus = "completed";
ctx.state.updatedAt = Date.now();
ctx.onProgress?.(FEATURE_DEV_MESSAGES.COMPLETED(phase));
return {
success: true,
phase,
nextPhase: getNextPhase(phase),
stateUpdates: { phaseStatus: "completed", ...result.stateUpdates },
};
} catch (error) {
const message = error instanceof Error ? error.message : String(error);
ctx.state.phaseStatus = "failed";
return {
success: false,
phase,
error: FEATURE_DEV_ERRORS.PHASE_FAILED(phase, message),
stateUpdates: { phaseStatus: "failed" },
};
}
};
/**
* Execute phase-specific logic
*/
const executePhaseLogic = async (
phase: FeatureDevPhase,
ctx: PhaseExecutionContext,
userRequest: string,
): Promise<Partial<PhaseExecutionResult>> => {
// Build context for this phase
const phaseContext = buildPhaseContext(phase, ctx.state, userRequest);
// Phase-specific execution
const phaseExecutors: Record<
FeatureDevPhase,
() => Promise<Partial<PhaseExecutionResult>>
> = {
understand: async () => executeUnderstandPhase(ctx, phaseContext),
explore: async () => executeExplorePhase(ctx, phaseContext),
plan: async () => executePlanPhase(ctx, phaseContext),
implement: async () => executeImplementPhase(ctx, phaseContext),
verify: async () => executeVerifyPhase(ctx, phaseContext),
review: async () => executeReviewPhase(ctx, phaseContext),
finalize: async () => executeFinalizePhase(ctx, phaseContext),
};
return phaseExecutors[phase]();
};
/**
* Understand phase execution
*/
const executeUnderstandPhase = async (
ctx: PhaseExecutionContext,
_phaseContext: string,
): Promise<Partial<PhaseExecutionResult>> => {
// This phase would typically involve LLM interaction to:
// 1. Parse the user's request
// 2. Identify requirements
// 3. Ask clarifying questions
// For now, return a checkpoint for user confirmation
const checkpoint = createCheckpoint("understand", ctx.state, [
"Review the identified requirements",
"Provide any clarifications needed",
"Confirm understanding is correct",
]);
return {
checkpoint,
stateUpdates: {},
};
};
/**
* Explore phase execution
*/
const executeExplorePhase = async (
ctx: PhaseExecutionContext,
_phaseContext: string,
): Promise<Partial<PhaseExecutionResult>> => {
ctx.onProgress?.(FEATURE_DEV_MESSAGES.EXPLORING("relevant code patterns"));
// This phase would use parallel agents to search the codebase
// For now, return a basic result
return {
stateUpdates: {},
};
};
/**
* Plan phase execution
*/
const executePlanPhase = async (
ctx: PhaseExecutionContext,
_phaseContext: string,
): Promise<Partial<PhaseExecutionResult>> => {
// This phase would involve LLM to create implementation plan
// The plan must be approved before proceeding
const checkpoint = createCheckpoint("plan", ctx.state, [
"Review the implementation plan",
"Check the proposed file changes",
"Verify the approach is correct",
"Consider the identified risks",
]);
return {
checkpoint,
stateUpdates: {},
};
};
/**
* Implement phase execution
*/
const executeImplementPhase = async (
ctx: PhaseExecutionContext,
_phaseContext: string,
): Promise<Partial<PhaseExecutionResult>> => {
// Verify we have a plan
if (!ctx.state.plan) {
throw new Error(FEATURE_DEV_ERRORS.NO_PLAN);
}
// This phase would execute each step in the plan
const totalSteps = ctx.state.plan.steps.length;
for (let i = 0; i < totalSteps; i++) {
ctx.onProgress?.(FEATURE_DEV_MESSAGES.IMPLEMENTING_STEP(i + 1, totalSteps));
// Step execution would happen here
}
return {
stateUpdates: {},
};
};
/**
* Verify phase execution
*/
const executeVerifyPhase = async (
ctx: PhaseExecutionContext,
_phaseContext: string,
): Promise<Partial<PhaseExecutionResult>> => {
ctx.onProgress?.(FEATURE_DEV_MESSAGES.RUNNING_TESTS);
// This phase would run the test suite
// For now, create a checkpoint for test review
const checkpoint = createCheckpoint("verify", ctx.state, [
"Review test results",
"Check for any failures",
"Verify coverage is adequate",
]);
return {
checkpoint,
stateUpdates: {},
};
};
/**
* Review phase execution
*/
const executeReviewPhase = async (
ctx: PhaseExecutionContext,
_phaseContext: string,
): Promise<Partial<PhaseExecutionResult>> => {
ctx.onProgress?.(FEATURE_DEV_MESSAGES.REVIEWING);
// This phase would perform self-review of changes
const checkpoint = createCheckpoint("review", ctx.state, [
"Review code quality findings",
"Address any critical issues",
"Confirm changes are ready",
]);
return {
checkpoint,
stateUpdates: {},
};
};
/**
* Finalize phase execution
*/
const executeFinalizePhase = async (
ctx: PhaseExecutionContext,
_phaseContext: string,
): Promise<Partial<PhaseExecutionResult>> => {
ctx.onProgress?.(FEATURE_DEV_MESSAGES.FINALIZING);
// This phase would create the commit
const checkpoint = createCheckpoint("finalize", ctx.state, [
"Confirm commit message",
"Verify all changes are included",
"Approve final commit",
]);
return {
checkpoint,
stateUpdates: {},
};
};
/**
* Get the next phase in the workflow
*/
export const getNextPhase = (
currentPhase: FeatureDevPhase,
): FeatureDevPhase | undefined => {
const currentIndex = PHASE_ORDER.indexOf(currentPhase);
if (currentIndex === -1 || currentIndex >= PHASE_ORDER.length - 1) {
return undefined;
}
return PHASE_ORDER[currentIndex + 1];
};
/**
* Get the previous phase in the workflow
*/
export const getPreviousPhase = (
currentPhase: FeatureDevPhase,
): FeatureDevPhase | undefined => {
const currentIndex = PHASE_ORDER.indexOf(currentPhase);
if (currentIndex <= 0) {
return undefined;
}
return PHASE_ORDER[currentIndex - 1];
};
/**
* Validate a phase transition
*/
export const validateTransition = (
request: PhaseTransitionRequest,
): { valid: boolean; error?: string } => {
if (request.skipValidation) {
return { valid: true };
}
const allowed = ALLOWED_TRANSITIONS[request.fromPhase];
if (!allowed.includes(request.toPhase)) {
return {
valid: false,
error: FEATURE_DEV_ERRORS.INVALID_TRANSITION(
request.fromPhase,
request.toPhase,
),
};
}
return { valid: true };
};
/**
* Get timeout for a phase
*/
export const getPhaseTimeout = (phase: FeatureDevPhase): number => {
return PHASE_TIMEOUTS[phase];
};

View File

@@ -8,3 +8,4 @@ export * from "@services/github-issue-service";
export * from "@services/command-suggestion-service";
export * from "@services/learning-service";
export * from "@services/rules-service";
export * as brainService from "@services/brain";

View File

@@ -0,0 +1,225 @@
/**
* Model Routing Service
*
* Maps agent tiers to appropriate models based on task complexity.
* Following Claude Code's multi-model strategy:
* - fast: Quick screening, filtering (like Haiku)
* - balanced: Detailed analysis, general tasks (like Sonnet)
* - thorough: Complex reasoning, bug hunting (like Opus)
*/
import { getModelContextSize } from "@constants/copilot";
import type { AgentConfig } from "@/types/agent-config";
/**
* Model tier for routing decisions
*/
export type ModelTier = "fast" | "balanced" | "thorough";
/**
* Model tier mapping to Copilot models
* These are the default mappings - can be overridden by agent config
*/
export const MODEL_TIER_MAPPING: Record<ModelTier, string[]> = {
// Fast tier: Low cost, quick responses (0x or 0.33x multiplier)
fast: [
"gpt-5-mini",
"gpt-4o-mini",
"claude-haiku-4.5",
"gemini-3-flash-preview",
"grok-code-fast-1",
],
// Balanced tier: Good quality, moderate cost (1x multiplier)
balanced: [
"claude-sonnet-4.5",
"claude-sonnet-4",
"gpt-5",
"gpt-5.1",
"gemini-2.5-pro",
"gpt-4.1",
],
// Thorough tier: Best quality, higher cost (3x multiplier)
thorough: [
"claude-opus-4.5",
"gpt-5.2-codex",
"gpt-5.1-codex-max",
],
};
/**
* Tier aliases for agent frontmatter
*/
const TIER_ALIASES: Record<string, ModelTier> = {
haiku: "fast",
fast: "fast",
quick: "fast",
sonnet: "balanced",
balanced: "balanced",
default: "balanced",
opus: "thorough",
thorough: "thorough",
deep: "thorough",
};
/**
* Agent type to default tier mapping
*/
const AGENT_TYPE_TIERS: Record<string, ModelTier> = {
explorer: "fast",
explore: "fast",
filter: "fast",
screen: "fast",
architect: "balanced",
planner: "balanced",
plan: "balanced",
coder: "balanced",
general: "balanced",
reviewer: "balanced",
review: "balanced",
"code-reviewer": "balanced",
"bug-hunter": "thorough",
bugs: "thorough",
security: "thorough",
compaction: "fast",
summary: "fast",
title: "fast",
};
/**
* Resolve model tier from string (tier name or model ID)
*/
export const resolveTier = (modelOrTier: string): ModelTier | null => {
const lower = modelOrTier.toLowerCase();
// Check if it's a tier alias
if (lower in TIER_ALIASES) {
return TIER_ALIASES[lower];
}
// Check if it's already a model ID in one of the tiers
for (const [tier, models] of Object.entries(MODEL_TIER_MAPPING)) {
if (models.some((m) => m.toLowerCase() === lower)) {
return tier as ModelTier;
}
}
return null;
};
/**
* Get the best available model for a tier
* Returns the first model in the tier's list (assumed to be preference order)
*/
export const getModelForTier = (
tier: ModelTier,
availableModels?: string[],
): string => {
const tierModels = MODEL_TIER_MAPPING[tier];
if (availableModels && availableModels.length > 0) {
// Find first available model from tier
for (const model of tierModels) {
if (availableModels.includes(model)) {
return model;
}
}
// Fallback to first tier model if none available
return tierModels[0];
}
return tierModels[0];
};
/**
* Infer tier from agent type/name
*/
export const inferTierFromAgent = (agent: AgentConfig): ModelTier => {
const idLower = agent.id.toLowerCase();
const nameLower = agent.name.toLowerCase();
// Check agent type mapping
for (const [type, tier] of Object.entries(AGENT_TYPE_TIERS)) {
if (idLower.includes(type) || nameLower.includes(type)) {
return tier;
}
}
// Default to balanced
return "balanced";
};
/**
* Resolve the model to use for an agent
*
* Priority:
* 1. Explicit model in agent config (full model ID)
* 2. Tier specified in agent config (fast/balanced/thorough)
* 3. Inferred from agent type/name
* 4. Default model passed in
*/
export const resolveAgentModel = (
agent: AgentConfig,
defaultModel: string,
availableModels?: string[],
): { model: string; tier: ModelTier; source: string } => {
// 1. Check explicit model in agent config
if (agent.model) {
// Check if it's a tier name
const tier = resolveTier(agent.model);
if (tier) {
const model = getModelForTier(tier, availableModels);
return { model, tier, source: "agent-tier" };
}
// Otherwise use as model ID
return {
model: agent.model,
tier: resolveTier(agent.model) ?? "balanced",
source: "agent-model",
};
}
// 2. Infer from agent type
const inferredTier = inferTierFromAgent(agent);
if (inferredTier !== "balanced") {
const model = getModelForTier(inferredTier, availableModels);
return { model, tier: inferredTier, source: "agent-inferred" };
}
// 3. Use default
const defaultTier = resolveTier(defaultModel) ?? "balanced";
return { model: defaultModel, tier: defaultTier, source: "default" };
};
/**
* Get model context size for routing decisions
*/
export const getRouteContextSize = (modelId: string): number => {
return getModelContextSize(modelId).input;
};
/**
* Model routing decision
*/
export interface ModelRoutingDecision {
model: string;
tier: ModelTier;
source: string;
contextSize: number;
}
/**
* Make routing decision for an agent
*/
export const routeAgent = (
agent: AgentConfig,
defaultModel: string,
availableModels?: string[],
): ModelRoutingDecision => {
const resolution = resolveAgentModel(agent, defaultModel, availableModels);
return {
...resolution,
contextSize: getRouteContextSize(resolution.model),
};
};

View File

@@ -0,0 +1,241 @@
/**
* Conflict Detector
*
* Detects conflicts between parallel tasks based on file paths
* and task types. Read-only tasks don't conflict with each other.
*/
import { CONFLICT_CONFIG, READ_ONLY_TASK_TYPES, MODIFYING_TASK_TYPES } from "@constants/parallel";
import type {
ParallelTask,
ConflictCheckResult,
ConflictResolution,
} from "@/types/parallel";
/**
* Active tasks being tracked for conflicts
*/
const activeTasks = new Map<string, ParallelTask>();
/**
* Register a task as active
*/
export const registerActiveTask = (task: ParallelTask): void => {
activeTasks.set(task.id, task);
};
/**
* Unregister a task when completed
*/
export const unregisterActiveTask = (taskId: string): void => {
activeTasks.delete(taskId);
};
/**
* Clear all active tasks
*/
export const clearActiveTasks = (): void => {
activeTasks.clear();
};
/**
* Get all active task IDs
*/
export const getActiveTaskIds = (): string[] => {
return Array.from(activeTasks.keys());
};
/**
* Check if two tasks conflict based on their paths
*/
const checkPathConflict = (
taskA: ParallelTask,
taskB: ParallelTask,
): string[] => {
const pathsA = taskA.conflictPaths ?? [];
const pathsB = taskB.conflictPaths ?? [];
const conflictingPaths: string[] = [];
for (const pathA of pathsA) {
for (const pathB of pathsB) {
if (pathsOverlap(pathA, pathB)) {
conflictingPaths.push(pathA);
}
}
}
return conflictingPaths;
};
/**
* Check if two paths overlap (one contains or equals the other)
*/
const pathsOverlap = (pathA: string, pathB: string): boolean => {
const normalizedA = normalizePath(pathA);
const normalizedB = normalizePath(pathB);
// Exact match
if (normalizedA === normalizedB) return true;
// One is parent of the other
if (normalizedA.startsWith(normalizedB + "/")) return true;
if (normalizedB.startsWith(normalizedA + "/")) return true;
return false;
};
/**
* Normalize path for comparison
*/
const normalizePath = (path: string): string => {
return path.replace(/\\/g, "/").replace(/\/+/g, "/").replace(/\/$/, "");
};
/**
* Check if task types can conflict
*/
const canTypesConflict = (typeA: string, typeB: string): boolean => {
// Read-only tasks don't conflict with each other
if (READ_ONLY_TASK_TYPES.has(typeA) && READ_ONLY_TASK_TYPES.has(typeB)) {
return false;
}
// Modifying tasks conflict with everything on same paths
if (MODIFYING_TASK_TYPES.has(typeA) || MODIFYING_TASK_TYPES.has(typeB)) {
return true;
}
return false;
};
/**
* Check if a task conflicts with any active tasks
*/
export const checkConflicts = (task: ParallelTask): ConflictCheckResult => {
if (!CONFLICT_CONFIG.ENABLE_PATH_CONFLICT) {
return {
hasConflict: false,
conflictingTaskIds: [],
conflictingPaths: [],
};
}
const conflictingTaskIds: string[] = [];
const conflictingPaths: string[] = [];
for (const [activeId, activeTask] of activeTasks) {
// Skip self
if (activeId === task.id) continue;
// Check if task types can conflict
if (!canTypesConflict(task.type, activeTask.type)) continue;
// Check path conflicts
const pathConflicts = checkPathConflict(task, activeTask);
if (pathConflicts.length > 0) {
conflictingTaskIds.push(activeId);
conflictingPaths.push(...pathConflicts);
}
}
const hasConflict = conflictingTaskIds.length > 0;
// Suggest resolution
const resolution = hasConflict ? suggestResolution(task, conflictingTaskIds) : undefined;
return {
hasConflict,
conflictingTaskIds,
conflictingPaths: [...new Set(conflictingPaths)],
resolution,
};
};
/**
* Suggest a conflict resolution strategy
*/
const suggestResolution = (
task: ParallelTask,
conflictingTaskIds: string[],
): ConflictResolution => {
// Read-only tasks should wait
if (READ_ONLY_TASK_TYPES.has(task.type)) {
return "wait";
}
// High priority tasks may cancel lower priority conflicts
const conflictingTasks = conflictingTaskIds
.map((id) => activeTasks.get(id))
.filter((t): t is ParallelTask => t !== undefined);
const allLowerPriority = conflictingTasks.every(
(t) => getPriorityValue(t.priority) < getPriorityValue(task.priority),
);
if (allLowerPriority && task.priority === "critical") {
return "cancel";
}
// Default to waiting
return "wait";
};
/**
* Get numeric priority value
*/
const getPriorityValue = (priority: string): number => {
const values: Record<string, number> = {
critical: 100,
high: 75,
normal: 50,
low: 25,
};
return values[priority] ?? 50;
};
/**
* Wait for conflicts to resolve
*/
export const waitForConflictResolution = async (
taskIds: string[],
timeout: number = CONFLICT_CONFIG.CONFLICT_CHECK_TIMEOUT_MS,
): Promise<boolean> => {
const startTime = Date.now();
while (Date.now() - startTime < timeout) {
const stillActive = taskIds.filter((id) => activeTasks.has(id));
if (stillActive.length === 0) {
return true;
}
// Wait a bit before checking again
await new Promise((resolve) => setTimeout(resolve, 100));
}
return false;
};
/**
* Get tasks that would be affected by cancelling a task
*/
export const getDependentTasks = (taskId: string): string[] => {
const task = activeTasks.get(taskId);
if (!task) return [];
const dependents: string[] = [];
for (const [id, activeTask] of activeTasks) {
if (id === taskId) continue;
// Check if this task was waiting on the cancelled task
const conflicts = checkPathConflict(activeTask, task);
if (conflicts.length > 0) {
dependents.push(id);
}
}
return dependents;
};

View File

@@ -0,0 +1,351 @@
/**
* Parallel Executor
*
* Main orchestrator for parallel task execution.
* Coordinates conflict detection, resource management, and result aggregation.
*/
import { PARALLEL_DEFAULTS, PARALLEL_ERRORS, TASK_TIMEOUTS } from "@constants/parallel";
import {
registerActiveTask,
unregisterActiveTask,
checkConflicts,
waitForConflictResolution,
clearActiveTasks,
} from "@services/parallel/conflict-detector";
import {
initializeResourceManager,
acquireResources,
releaseResources,
canAcceptTask,
cancelWaitingTask,
resetResourceManager,
getResourceState,
} from "@services/parallel/resource-manager";
import { collectResults } from "@services/parallel/result-aggregator";
import type {
ParallelTask,
ParallelExecutionResult,
ParallelExecutorOptions,
AggregatedResults,
BatchExecutionRequest,
ConflictResolution,
} from "@/types/parallel";
// Re-export utilities
export * from "@services/parallel/conflict-detector";
export * from "@services/parallel/resource-manager";
export * from "@services/parallel/result-aggregator";
// ============================================================================
// Task Execution
// ============================================================================
/**
* Execute a single task with timeout and error handling
*/
const executeTask = async <TInput, TOutput>(
task: ParallelTask<TInput, TOutput>,
executor: (input: TInput) => Promise<TOutput>,
options: ParallelExecutorOptions,
): Promise<ParallelExecutionResult<TOutput>> => {
const startedAt = Date.now();
const timeout = task.timeout ?? TASK_TIMEOUTS[task.type] ?? PARALLEL_DEFAULTS.defaultTimeout;
try {
// Notify task start
options.onTaskStart?.(task);
// Execute with timeout
const result = await Promise.race([
executor(task.input),
createTimeout<TOutput>(timeout, task.id),
]);
const completedAt = Date.now();
const executionResult: ParallelExecutionResult<TOutput> = {
taskId: task.id,
status: "completed",
result,
duration: completedAt - startedAt,
startedAt,
completedAt,
};
options.onTaskComplete?.(executionResult);
return executionResult;
} catch (error) {
const completedAt = Date.now();
const isTimeout = error instanceof TimeoutError;
const executionResult: ParallelExecutionResult<TOutput> = {
taskId: task.id,
status: isTimeout ? "timeout" : "error",
error: error instanceof Error ? error.message : String(error),
duration: completedAt - startedAt,
startedAt,
completedAt,
};
options.onTaskError?.(task, error instanceof Error ? error : new Error(String(error)));
return executionResult;
}
};
/**
* Create a timeout promise
*/
class TimeoutError extends Error {
constructor(taskId: string) {
super(PARALLEL_ERRORS.TIMEOUT(taskId));
this.name = "TimeoutError";
}
}
const createTimeout = <T>(ms: number, taskId: string): Promise<T> => {
return new Promise((_, reject) => {
setTimeout(() => reject(new TimeoutError(taskId)), ms);
});
};
// ============================================================================
// Parallel Executor
// ============================================================================
/**
* Execute tasks in parallel with conflict detection and resource management
*/
export const executeParallel = async <TInput, TOutput>(
tasks: ParallelTask<TInput, TOutput>[],
executor: (input: TInput) => Promise<TOutput>,
options: Partial<ParallelExecutorOptions> = {},
): Promise<AggregatedResults<TOutput>> => {
const fullOptions: ParallelExecutorOptions = {
limits: options.limits ?? PARALLEL_DEFAULTS,
onTaskStart: options.onTaskStart,
onTaskComplete: options.onTaskComplete,
onTaskError: options.onTaskError,
onConflict: options.onConflict,
abortSignal: options.abortSignal,
};
// Initialize resource manager
initializeResourceManager(fullOptions.limits);
// Track results
const results: ParallelExecutionResult<TOutput>[] = [];
const pendingTasks = new Map<string, Promise<ParallelExecutionResult<TOutput>>>();
// Check if executor was aborted
const checkAbort = (): boolean => {
return fullOptions.abortSignal?.aborted ?? false;
};
// Process each task
for (const task of tasks) {
if (checkAbort()) {
results.push({
taskId: task.id,
status: "cancelled",
error: PARALLEL_ERRORS.EXECUTOR_ABORTED,
duration: 0,
startedAt: Date.now(),
completedAt: Date.now(),
});
continue;
}
// Check if we can accept more tasks
if (!canAcceptTask(fullOptions.limits)) {
results.push({
taskId: task.id,
status: "error",
error: PARALLEL_ERRORS.QUEUE_FULL,
duration: 0,
startedAt: Date.now(),
completedAt: Date.now(),
});
continue;
}
// Start task execution
const taskPromise = executeWithConflictHandling(
task,
executor,
fullOptions,
);
pendingTasks.set(task.id, taskPromise);
// Remove from pending when done
taskPromise.then((result) => {
pendingTasks.delete(task.id);
results.push(result);
});
}
// Wait for all pending tasks
await Promise.all(pendingTasks.values());
// Cleanup
clearActiveTasks();
return collectResults(results);
};
/**
* Execute a task with conflict handling
*/
const executeWithConflictHandling = async <TInput, TOutput>(
task: ParallelTask<TInput, TOutput>,
executor: (input: TInput) => Promise<TOutput>,
options: ParallelExecutorOptions,
): Promise<ParallelExecutionResult<TOutput>> => {
// Acquire resources
await acquireResources(task);
try {
// Check for conflicts
const conflicts = checkConflicts(task);
if (conflicts.hasConflict) {
const resolution = options.onConflict?.(task, conflicts) ?? conflicts.resolution ?? "wait";
const handled = await handleConflict(task, conflicts, resolution, options);
if (!handled.continue) {
releaseResources(task, 0, false);
return handled.result;
}
}
// Register as active
registerActiveTask(task);
// Execute task
const result = await executeTask(task, executor, options);
// Unregister and release resources
unregisterActiveTask(task.id);
releaseResources(task, result.duration, result.status === "completed");
return result;
} catch (error) {
releaseResources(task, 0, false);
throw error;
}
};
/**
* Handle task conflict based on resolution strategy
*/
const handleConflict = async <TInput, TOutput>(
task: ParallelTask<TInput, TOutput>,
conflicts: { conflictingTaskIds: string[]; conflictingPaths: string[] },
resolution: ConflictResolution,
_options: ParallelExecutorOptions,
): Promise<{ continue: boolean; result: ParallelExecutionResult<TOutput> }> => {
const createFailResult = (status: "conflict" | "cancelled", error: string) => ({
continue: false,
result: {
taskId: task.id,
status,
error,
duration: 0,
startedAt: Date.now(),
completedAt: Date.now(),
} as ParallelExecutionResult<TOutput>,
});
const resolutionHandlers: Record<
ConflictResolution,
() => Promise<{ continue: boolean; result: ParallelExecutionResult<TOutput> }>
> = {
wait: async () => {
const resolved = await waitForConflictResolution(conflicts.conflictingTaskIds);
if (resolved) {
return { continue: true, result: {} as ParallelExecutionResult<TOutput> };
}
return createFailResult("conflict", PARALLEL_ERRORS.CONFLICT(task.id, conflicts.conflictingPaths));
},
cancel: async () => {
// Cancel conflicting tasks
for (const id of conflicts.conflictingTaskIds) {
cancelWaitingTask(id);
}
return { continue: true, result: {} as ParallelExecutionResult<TOutput> };
},
merge: async () => {
// For merge, we continue and let result aggregator handle merging
return { continue: true, result: {} as ParallelExecutionResult<TOutput> };
},
abort: async () => {
return createFailResult("conflict", PARALLEL_ERRORS.CONFLICT(task.id, conflicts.conflictingPaths));
},
};
return resolutionHandlers[resolution]();
};
// ============================================================================
// Batch Execution
// ============================================================================
/**
* Execute a batch of tasks
*/
export const executeBatch = async <TInput, TOutput>(
request: BatchExecutionRequest<TInput>,
executor: (input: TInput) => Promise<TOutput>,
): Promise<AggregatedResults<TOutput>> => {
return executeParallel(
request.tasks as ParallelTask<TInput, TOutput>[],
executor,
request.options,
);
};
// ============================================================================
// Utility Functions
// ============================================================================
/**
* Create a task ID
*/
export const createTaskId = (): string => {
return `task_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`;
};
/**
* Create a parallel task
*/
export const createTask = <TInput>(
input: TInput,
options: Partial<ParallelTask<TInput>> = {},
): ParallelTask<TInput> => ({
id: options.id ?? createTaskId(),
type: options.type ?? "explore",
agent: options.agent ?? { name: "default" },
input,
priority: options.priority ?? "normal",
conflictPaths: options.conflictPaths,
timeout: options.timeout,
metadata: options.metadata,
});
/**
* Reset the parallel executor
*/
export const resetParallelExecutor = (): void => {
clearActiveTasks();
resetResourceManager();
};
/**
* Get execution statistics
*/
export const getExecutionStats = () => {
return getResourceState();
};

View File

@@ -0,0 +1,274 @@
/**
* Resource Manager
*
* Manages concurrent task execution limits using a semaphore pattern.
* Handles task queuing, priority ordering, and rate limiting.
*/
import {
PARALLEL_DEFAULTS,
PRIORITY_WEIGHTS,
TASK_TYPE_LIMITS,
PARALLEL_ERRORS,
} from "@constants/parallel";
import type {
ParallelTask,
ResourceLimits,
ResourceState,
SemaphoreState,
TaskPriority,
} from "@/types/parallel";
// ============================================================================
// Semaphore Implementation
// ============================================================================
interface WaitingTask {
task: ParallelTask;
resolve: () => void;
reject: (reason: Error) => void;
}
class Semaphore {
private permits: number;
private readonly maxPermits: number;
private waiting: WaitingTask[] = [];
constructor(permits: number) {
this.permits = permits;
this.maxPermits = permits;
}
async acquire(task: ParallelTask): Promise<void> {
if (this.permits > 0) {
this.permits--;
return;
}
return new Promise<void>((resolve, reject) => {
this.waiting.push({ task, resolve, reject });
// Sort by priority (highest first)
this.waiting.sort(
(a, b) =>
PRIORITY_WEIGHTS[b.task.priority] - PRIORITY_WEIGHTS[a.task.priority],
);
});
}
release(): void {
if (this.waiting.length > 0) {
const next = this.waiting.shift();
if (next) {
next.resolve();
}
} else {
this.permits = Math.min(this.permits + 1, this.maxPermits);
}
}
cancelWaiting(taskId: string): boolean {
const index = this.waiting.findIndex((w) => w.task.id === taskId);
if (index === -1) return false;
const [removed] = this.waiting.splice(index, 1);
removed.reject(new Error(PARALLEL_ERRORS.CANCELLED(taskId)));
return true;
}
getState(): SemaphoreState {
return {
permits: this.permits,
maxPermits: this.maxPermits,
waiting: this.waiting.length,
};
}
clearWaiting(): void {
for (const waiting of this.waiting) {
waiting.reject(new Error(PARALLEL_ERRORS.EXECUTOR_ABORTED));
}
this.waiting = [];
}
}
// ============================================================================
// Resource Manager
// ============================================================================
let globalSemaphore: Semaphore | null = null;
const taskTypeSemaphores = new Map<string, Semaphore>();
let resourceState: ResourceState = {
activeTasks: 0,
queuedTasks: 0,
completedTasks: 0,
failedTasks: 0,
totalDuration: 0,
};
/**
* Initialize resource manager with limits
*/
export const initializeResourceManager = (
limits: ResourceLimits = PARALLEL_DEFAULTS,
): void => {
globalSemaphore = new Semaphore(limits.maxConcurrentTasks);
// Create per-type semaphores
taskTypeSemaphores.clear();
for (const [type, limit] of Object.entries(TASK_TYPE_LIMITS)) {
taskTypeSemaphores.set(type, new Semaphore(limit));
}
// Reset state
resourceState = {
activeTasks: 0,
queuedTasks: 0,
completedTasks: 0,
failedTasks: 0,
totalDuration: 0,
};
};
/**
* Acquire resources for a task
*/
export const acquireResources = async (task: ParallelTask): Promise<void> => {
if (!globalSemaphore) {
initializeResourceManager();
}
resourceState.queuedTasks++;
try {
// Acquire global permit
await globalSemaphore!.acquire(task);
// Acquire type-specific permit if exists
const typeSemaphore = taskTypeSemaphores.get(task.type);
if (typeSemaphore) {
await typeSemaphore.acquire(task);
}
resourceState.queuedTasks--;
resourceState.activeTasks++;
} catch (error) {
resourceState.queuedTasks--;
throw error;
}
};
/**
* Release resources after task completion
*/
export const releaseResources = (task: ParallelTask, duration: number, success: boolean): void => {
if (!globalSemaphore) return;
// Release global permit
globalSemaphore.release();
// Release type-specific permit
const typeSemaphore = taskTypeSemaphores.get(task.type);
if (typeSemaphore) {
typeSemaphore.release();
}
// Update state
resourceState.activeTasks--;
resourceState.totalDuration += duration;
if (success) {
resourceState.completedTasks++;
} else {
resourceState.failedTasks++;
}
};
/**
* Cancel a waiting task
*/
export const cancelWaitingTask = (taskId: string): boolean => {
if (!globalSemaphore) return false;
const cancelled = globalSemaphore.cancelWaiting(taskId);
if (cancelled) {
resourceState.queuedTasks--;
}
return cancelled;
};
/**
* Get current resource state
*/
export const getResourceState = (): ResourceState => ({
...resourceState,
});
/**
* Get semaphore state for a task type
*/
export const getTypeSemaphoreState = (type: string): SemaphoreState | null => {
const semaphore = taskTypeSemaphores.get(type);
return semaphore ? semaphore.getState() : null;
};
/**
* Get global semaphore state
*/
export const getGlobalSemaphoreState = (): SemaphoreState | null => {
return globalSemaphore ? globalSemaphore.getState() : null;
};
/**
* Check if we can accept more tasks
*/
export const canAcceptTask = (
limits: ResourceLimits = PARALLEL_DEFAULTS,
): boolean => {
const totalPending = resourceState.activeTasks + resourceState.queuedTasks;
return totalPending < limits.maxQueueSize;
};
/**
* Reset resource manager
*/
export const resetResourceManager = (): void => {
if (globalSemaphore) {
globalSemaphore.clearWaiting();
}
for (const semaphore of taskTypeSemaphores.values()) {
semaphore.clearWaiting();
}
resourceState = {
activeTasks: 0,
queuedTasks: 0,
completedTasks: 0,
failedTasks: 0,
totalDuration: 0,
};
};
/**
* Get queue position for a task based on priority
*/
export const getQueuePosition = (priority: TaskPriority): number => {
if (!globalSemaphore) return 0;
const state = globalSemaphore.getState();
// Estimate position based on priority
// Higher priority tasks will be processed first
const priorityWeight = PRIORITY_WEIGHTS[priority];
const avgWeight =
(PRIORITY_WEIGHTS.critical +
PRIORITY_WEIGHTS.high +
PRIORITY_WEIGHTS.normal +
PRIORITY_WEIGHTS.low) /
4;
const positionFactor = avgWeight / priorityWeight;
return Math.ceil(state.waiting * positionFactor);
};

View File

@@ -0,0 +1,280 @@
/**
* Result Aggregator
*
* Merges and deduplicates results from parallel task execution.
* Supports various aggregation strategies based on task type.
*/
import { DEDUP_CONFIG } from "@constants/parallel";
import type {
ParallelExecutionResult,
AggregatedResults,
DeduplicationKey,
DeduplicationResult,
} from "@/types/parallel";
// ============================================================================
// Result Collection
// ============================================================================
/**
* Collect results into aggregated structure
*/
export const collectResults = <TOutput>(
results: ParallelExecutionResult<TOutput>[],
): AggregatedResults<TOutput> => {
const successful = results.filter((r) => r.status === "completed").length;
const failed = results.filter((r) => r.status === "error" || r.status === "timeout").length;
const cancelled = results.filter((r) => r.status === "cancelled").length;
const totalDuration = results.reduce((sum, r) => sum + r.duration, 0);
return {
results,
successful,
failed,
cancelled,
totalDuration,
};
};
// ============================================================================
// Deduplication
// ============================================================================
/**
* Create a deduplication key from an object
*/
export const createDeduplicationKey = <T>(
item: T,
keyExtractor: (item: T) => DeduplicationKey,
): string => {
const key = keyExtractor(item);
return JSON.stringify(key);
};
/**
* Deduplicate results based on a key extractor
*/
export const deduplicateResults = <T>(
items: T[],
keyExtractor: (item: T) => DeduplicationKey,
): DeduplicationResult<T> => {
const seen = new Map<string, T>();
let duplicateCount = 0;
let mergedCount = 0;
for (const item of items) {
const key = createDeduplicationKey(item, keyExtractor);
if (seen.has(key)) {
duplicateCount++;
// Could implement merging logic here if needed
} else {
seen.set(key, item);
}
}
return {
unique: Array.from(seen.values()),
duplicateCount,
mergedCount,
};
};
/**
* Deduplicate file results (by path)
*/
export const deduplicateFileResults = (
results: Array<{ path: string; content?: string }>,
): DeduplicationResult<{ path: string; content?: string }> => {
return deduplicateResults(results, (item) => ({
type: "file",
path: item.path,
}));
};
/**
* Deduplicate search results (by path and content)
*/
export const deduplicateSearchResults = <T extends { path: string; match?: string }>(
results: T[],
): DeduplicationResult<T> => {
return deduplicateResults(results, (item) => ({
type: "search",
path: item.path,
content: item.match,
}));
};
// ============================================================================
// Result Merging
// ============================================================================
/**
* Merge multiple arrays of results
*/
export const mergeArrayResults = <T>(arrays: T[][]): T[] => {
return arrays.flat();
};
/**
* Merge object results (shallow merge)
*/
export const mergeObjectResults = <T extends Record<string, unknown>>(
objects: T[],
): T => {
return objects.reduce((acc, obj) => ({ ...acc, ...obj }), {} as T);
};
/**
* Merge results by priority (later results override earlier)
*/
export const mergeByPriority = <T>(
results: ParallelExecutionResult<T>[],
): T | undefined => {
// Sort by completion time (most recent last)
const sorted = [...results].sort((a, b) => a.completedAt - b.completedAt);
// Return the most recent successful result
const successful = sorted.filter((r) => r.status === "completed" && r.result !== undefined);
return successful.length > 0 ? successful[successful.length - 1].result : undefined;
};
// ============================================================================
// Content Similarity
// ============================================================================
/**
* Calculate similarity between two strings using Jaccard index
*/
const calculateSimilarity = (a: string, b: string): number => {
if (a === b) return 1;
if (!a || !b) return 0;
const aTokens = new Set(a.toLowerCase().split(/\s+/));
const bTokens = new Set(b.toLowerCase().split(/\s+/));
const intersection = [...aTokens].filter((token) => bTokens.has(token));
const union = new Set([...aTokens, ...bTokens]);
return intersection.length / union.size;
};
/**
* Find similar results based on content
*/
export const findSimilarResults = <T>(
items: T[],
contentExtractor: (item: T) => string,
threshold: number = DEDUP_CONFIG.SIMILARITY_THRESHOLD,
): Map<T, T[]> => {
const similarGroups = new Map<T, T[]>();
for (let i = 0; i < items.length; i++) {
const item = items[i];
const content = contentExtractor(item);
const similar: T[] = [];
for (let j = i + 1; j < items.length; j++) {
const otherItem = items[j];
const otherContent = contentExtractor(otherItem);
const similarity = calculateSimilarity(content, otherContent);
if (similarity >= threshold) {
similar.push(otherItem);
}
}
if (similar.length > 0) {
similarGroups.set(item, similar);
}
}
return similarGroups;
};
// ============================================================================
// Aggregation Strategies
// ============================================================================
/**
* Aggregate results as a list (concatenate all)
*/
export const aggregateAsList = <T>(
results: ParallelExecutionResult<T[]>[],
): T[] => {
const arrays = results
.filter((r) => r.status === "completed" && r.result)
.map((r) => r.result!);
return mergeArrayResults(arrays);
};
/**
* Aggregate results as a map (by key)
*/
export const aggregateAsMap = <T extends Record<string, unknown>>(
results: ParallelExecutionResult<T>[],
keyExtractor: (result: T) => string,
): Map<string, T> => {
const map = new Map<string, T>();
for (const result of results) {
if (result.status === "completed" && result.result) {
const key = keyExtractor(result.result);
map.set(key, result.result);
}
}
return map;
};
/**
* Aggregate results and return first non-empty
*/
export const aggregateFirstNonEmpty = <T>(
results: ParallelExecutionResult<T>[],
): T | undefined => {
const successful = results
.filter((r) => r.status === "completed" && r.result !== undefined)
.sort((a, b) => a.completedAt - b.completedAt);
return successful.length > 0 ? successful[0].result : undefined;
};
/**
* Aggregate numeric results (sum)
*/
export const aggregateSum = (
results: ParallelExecutionResult<number>[],
): number => {
return results
.filter((r) => r.status === "completed" && typeof r.result === "number")
.reduce((sum, r) => sum + r.result!, 0);
};
/**
* Aggregate boolean results (all true)
*/
export const aggregateAll = (
results: ParallelExecutionResult<boolean>[],
): boolean => {
const completed = results.filter(
(r) => r.status === "completed" && typeof r.result === "boolean",
);
return completed.length > 0 && completed.every((r) => r.result === true);
};
/**
* Aggregate boolean results (any true)
*/
export const aggregateAny = (
results: ParallelExecutionResult<boolean>[],
): boolean => {
return results.some(
(r) => r.status === "completed" && r.result === true,
);
};

View File

@@ -0,0 +1,309 @@
/**
* Diff Parser
*
* Parses unified diff format for PR review analysis.
*/
import type { ParsedDiff, ParsedFileDiff, DiffHunk } from "@/types/pr-review";
/**
* Diff parsing patterns
*/
const PATTERNS = {
FILE_HEADER: /^diff --git a\/(.+) b\/(.+)$/,
OLD_FILE: /^--- (.+?)(?:\t.*)?$/,
NEW_FILE: /^\+\+\+ (.+?)(?:\t.*)?$/,
HUNK_HEADER: /^@@ -(\d+)(?:,(\d+))? \+(\d+)(?:,(\d+))? @@(.*)$/,
BINARY: /^Binary files .+ differ$/,
NEW_FILE_MODE: /^new file mode \d+$/,
DELETED_FILE_MODE: /^deleted file mode \d+$/,
RENAME_FROM: /^rename from (.+)$/,
RENAME_TO: /^rename to (.+)$/,
} as const;
/**
* Parse unified diff content
*/
export const parseDiff = (diffContent: string): ParsedDiff => {
const lines = diffContent.split("\n");
const files: ParsedFileDiff[] = [];
let currentFile: ParsedFileDiff | null = null;
let currentHunk: DiffHunk | null = null;
let lineIndex = 0;
while (lineIndex < lines.length) {
const line = lines[lineIndex];
// Git diff header
const gitDiffMatch = line.match(PATTERNS.FILE_HEADER);
if (gitDiffMatch) {
if (currentFile) {
if (currentHunk) {
currentFile.hunks.push(currentHunk);
}
files.push(currentFile);
}
currentFile = createEmptyFileDiff(gitDiffMatch[1], gitDiffMatch[2]);
currentHunk = null;
lineIndex++;
continue;
}
// Old file header
const oldFileMatch = line.match(PATTERNS.OLD_FILE);
if (oldFileMatch) {
if (!currentFile) {
currentFile = createEmptyFileDiff("", "");
}
currentFile.oldPath = cleanPath(oldFileMatch[1]);
if (currentFile.oldPath === "/dev/null") {
currentFile.isNew = true;
}
lineIndex++;
continue;
}
// New file header
const newFileMatch = line.match(PATTERNS.NEW_FILE);
if (newFileMatch) {
if (!currentFile) {
currentFile = createEmptyFileDiff("", "");
}
currentFile.newPath = cleanPath(newFileMatch[1]);
if (currentFile.newPath === "/dev/null") {
currentFile.isDeleted = true;
}
lineIndex++;
continue;
}
// Binary file
if (PATTERNS.BINARY.test(line)) {
if (currentFile) {
currentFile.isBinary = true;
}
lineIndex++;
continue;
}
// New file mode
if (PATTERNS.NEW_FILE_MODE.test(line)) {
if (currentFile) {
currentFile.isNew = true;
}
lineIndex++;
continue;
}
// Deleted file mode
if (PATTERNS.DELETED_FILE_MODE.test(line)) {
if (currentFile) {
currentFile.isDeleted = true;
}
lineIndex++;
continue;
}
// Rename from
const renameFromMatch = line.match(PATTERNS.RENAME_FROM);
if (renameFromMatch) {
if (currentFile) {
currentFile.isRenamed = true;
currentFile.oldPath = cleanPath(renameFromMatch[1]);
}
lineIndex++;
continue;
}
// Rename to
const renameToMatch = line.match(PATTERNS.RENAME_TO);
if (renameToMatch) {
if (currentFile) {
currentFile.newPath = cleanPath(renameToMatch[1]);
}
lineIndex++;
continue;
}
// Hunk header
const hunkMatch = line.match(PATTERNS.HUNK_HEADER);
if (hunkMatch) {
if (currentHunk && currentFile) {
currentFile.hunks.push(currentHunk);
}
currentHunk = {
oldStart: parseInt(hunkMatch[1], 10),
oldLines: hunkMatch[2] ? parseInt(hunkMatch[2], 10) : 1,
newStart: parseInt(hunkMatch[3], 10),
newLines: hunkMatch[4] ? parseInt(hunkMatch[4], 10) : 1,
content: line,
additions: [],
deletions: [],
context: [],
};
lineIndex++;
continue;
}
// Content lines
if (currentHunk) {
if (line.startsWith("+") && !line.startsWith("+++")) {
currentHunk.additions.push(line.slice(1));
if (currentFile) currentFile.additions++;
} else if (line.startsWith("-") && !line.startsWith("---")) {
currentHunk.deletions.push(line.slice(1));
if (currentFile) currentFile.deletions++;
} else if (line.startsWith(" ") || line === "") {
currentHunk.context.push(line.slice(1) || "");
}
}
lineIndex++;
}
// Push final hunk and file
if (currentHunk && currentFile) {
currentFile.hunks.push(currentHunk);
}
if (currentFile) {
files.push(currentFile);
}
// Calculate totals
const totalAdditions = files.reduce((sum, f) => sum + f.additions, 0);
const totalDeletions = files.reduce((sum, f) => sum + f.deletions, 0);
return {
files,
totalAdditions,
totalDeletions,
totalFiles: files.length,
};
};
/**
* Create empty file diff structure
*/
const createEmptyFileDiff = (oldPath: string, newPath: string): ParsedFileDiff => ({
oldPath: cleanPath(oldPath),
newPath: cleanPath(newPath),
hunks: [],
additions: 0,
deletions: 0,
isBinary: false,
isNew: false,
isDeleted: false,
isRenamed: false,
});
/**
* Clean path by removing a/ or b/ prefixes
*/
const cleanPath = (path: string): string => {
if (path.startsWith("a/")) return path.slice(2);
if (path.startsWith("b/")) return path.slice(2);
return path;
};
/**
* Get the effective path for a file diff
*/
export const getFilePath = (fileDiff: ParsedFileDiff): string => {
if (fileDiff.isNew) return fileDiff.newPath;
if (fileDiff.isDeleted) return fileDiff.oldPath;
return fileDiff.newPath || fileDiff.oldPath;
};
/**
* Filter files by pattern
*/
export const filterFiles = (
files: ParsedFileDiff[],
excludePatterns: string[],
): ParsedFileDiff[] => {
return files.filter((file) => {
const path = getFilePath(file);
return !excludePatterns.some((pattern) => matchPattern(path, pattern));
});
};
/**
* Simple glob pattern matching
*/
const matchPattern = (path: string, pattern: string): boolean => {
// Convert glob to regex
const regexPattern = pattern
.replace(/\*\*/g, ".*")
.replace(/\*/g, "[^/]*")
.replace(/\?/g, ".");
const regex = new RegExp(`^${regexPattern}$`);
return regex.test(path);
};
/**
* Get added lines with line numbers
*/
export const getAddedLines = (
fileDiff: ParsedFileDiff,
): Array<{ line: number; content: string }> => {
const result: Array<{ line: number; content: string }> = [];
for (const hunk of fileDiff.hunks) {
let lineNumber = hunk.newStart;
for (const addition of hunk.additions) {
result.push({ line: lineNumber, content: addition });
lineNumber++;
}
}
return result;
};
/**
* Get hunk context (surrounding code)
*/
export const getHunkContext = (
hunk: DiffHunk,
contextLines: number = 3,
): string => {
const lines: string[] = [];
// Context before
const beforeContext = hunk.context.slice(0, contextLines);
for (const ctx of beforeContext) {
lines.push(` ${ctx}`);
}
// Changes
for (const del of hunk.deletions) {
lines.push(`-${del}`);
}
for (const add of hunk.additions) {
lines.push(`+${add}`);
}
// Context after
const afterContext = hunk.context.slice(-contextLines);
for (const ctx of afterContext) {
lines.push(` ${ctx}`);
}
return lines.join("\n");
};
/**
* Get diff statistics
*/
export const getDiffStats = (
diff: ParsedDiff,
): { files: number; additions: number; deletions: number; summary: string } => {
return {
files: diff.totalFiles,
additions: diff.totalAdditions,
deletions: diff.totalDeletions,
summary: `${diff.totalFiles} file(s), +${diff.totalAdditions}/-${diff.totalDeletions}`,
};
};

View File

@@ -0,0 +1,215 @@
/**
* PR Review Service
*
* Main orchestrator for multi-agent code review.
*/
import {
DEFAULT_REVIEW_CONFIG,
PR_REVIEW_ERRORS,
PR_REVIEW_MESSAGES,
} from "@constants/pr-review";
import { parseDiff, filterFiles, getFilePath } from "@services/pr-review/diff-parser";
import { generateReport, formatReportMarkdown } from "@services/pr-review/report-generator";
import * as securityReviewer from "@services/pr-review/reviewers/security";
import * as performanceReviewer from "@services/pr-review/reviewers/performance";
import * as logicReviewer from "@services/pr-review/reviewers/logic";
import * as styleReviewer from "@services/pr-review/reviewers/style";
import type {
PRReviewReport,
PRReviewRequest,
PRReviewConfig,
ReviewerResult,
ParsedDiff,
ReviewFileContext,
} from "@/types/pr-review";
// Re-export utilities
export * from "@services/pr-review/diff-parser";
export * from "@services/pr-review/report-generator";
// Reviewer map
const reviewers = {
security: securityReviewer,
performance: performanceReviewer,
logic: logicReviewer,
style: styleReviewer,
} as const;
/**
* Run a complete PR review
*/
export const reviewPR = async (
diffContent: string,
request: PRReviewRequest = {},
options: {
onProgress?: (message: string) => void;
abortSignal?: AbortSignal;
} = {},
): Promise<PRReviewReport> => {
const config = { ...DEFAULT_REVIEW_CONFIG, ...request.config };
options.onProgress?.(PR_REVIEW_MESSAGES.STARTING);
// Parse diff
options.onProgress?.(PR_REVIEW_MESSAGES.PARSING_DIFF);
const diff = parseDiff(diffContent);
if (diff.files.length === 0) {
throw new Error(PR_REVIEW_ERRORS.NO_FILES);
}
// Filter files
const filteredFiles = filterFiles(diff.files, config.excludePatterns);
if (filteredFiles.length === 0) {
throw new Error(PR_REVIEW_ERRORS.EXCLUDED_ALL);
}
// Create filtered diff
const filteredDiff: ParsedDiff = {
files: filteredFiles,
totalAdditions: filteredFiles.reduce((sum, f) => sum + f.additions, 0),
totalDeletions: filteredFiles.reduce((sum, f) => sum + f.deletions, 0),
totalFiles: filteredFiles.length,
};
// Run reviewers in parallel
const reviewerResults = await runReviewers(
filteredDiff,
config,
options.onProgress,
options.abortSignal,
);
// Generate report
const report = generateReport(reviewerResults, filteredDiff, {
baseBranch: request.baseBranch ?? "main",
headBranch: request.headBranch ?? "HEAD",
commitRange: `${request.baseBranch ?? "main"}...${request.headBranch ?? "HEAD"}`,
});
options.onProgress?.(PR_REVIEW_MESSAGES.COMPLETED(report.findings.length));
return report;
};
/**
* Run all enabled reviewers
*/
const runReviewers = async (
diff: ParsedDiff,
config: PRReviewConfig,
onProgress?: (message: string) => void,
abortSignal?: AbortSignal,
): Promise<ReviewerResult[]> => {
const results: ReviewerResult[] = [];
const enabledReviewers = config.reviewers.filter((r) => r.enabled);
// Run reviewers in parallel
const promises = enabledReviewers.map(async (reviewerConfig) => {
if (abortSignal?.aborted) {
return {
reviewer: reviewerConfig.name,
findings: [],
duration: 0,
error: "Aborted",
};
}
onProgress?.(PR_REVIEW_MESSAGES.REVIEWING(reviewerConfig.name));
const startTime = Date.now();
const reviewerModule = reviewers[reviewerConfig.name as keyof typeof reviewers];
if (!reviewerModule) {
return {
reviewer: reviewerConfig.name,
findings: [],
duration: 0,
error: `Unknown reviewer: ${reviewerConfig.name}`,
};
}
try {
const findings = [];
for (const fileDiff of diff.files) {
const fileContext: ReviewFileContext = {
path: getFilePath(fileDiff),
diff: fileDiff,
};
const fileFindings = reviewerModule.reviewFile(fileContext);
findings.push(...fileFindings);
}
return {
reviewer: reviewerConfig.name,
findings,
duration: Date.now() - startTime,
};
} catch (error) {
const message = error instanceof Error ? error.message : String(error);
return {
reviewer: reviewerConfig.name,
findings: [],
duration: Date.now() - startTime,
error: message,
};
}
});
const parallelResults = await Promise.all(promises);
results.push(...parallelResults);
return results;
};
/**
* Run a quick review (only critical checks)
*/
export const quickReview = async (
diffContent: string,
options: {
onProgress?: (message: string) => void;
} = {},
): Promise<PRReviewReport> => {
return reviewPR(
diffContent,
{
config: {
reviewers: [
{ name: "security", type: "security", enabled: true, minConfidence: 90 },
{ name: "logic", type: "logic", enabled: true, minConfidence: 90 },
],
},
},
options,
);
};
/**
* Get review report as markdown
*/
export const getReportMarkdown = (report: PRReviewReport): string => {
return formatReportMarkdown(report);
};
/**
* Create a review summary for commit messages
*/
export const createReviewSummary = (report: PRReviewReport): string => {
const parts: string[] = [];
parts.push(`Review: ${report.rating}/5 stars`);
if (report.findingsBySeverity.critical > 0) {
parts.push(`${report.findingsBySeverity.critical} critical issue(s)`);
}
if (report.findingsBySeverity.warning > 0) {
parts.push(`${report.findingsBySeverity.warning} warning(s)`);
}
return parts.join(", ");
};

View File

@@ -0,0 +1,410 @@
/**
* Report Generator
*
* Aggregates findings and generates the review report.
*/
import {
DEFAULT_REVIEW_CONFIG,
SEVERITY_ICONS,
SEVERITY_LABELS,
FINDING_TYPE_LABELS,
RATING_THRESHOLDS,
RECOMMENDATION_THRESHOLDS,
PR_REVIEW_TITLES,
} from "@constants/pr-review";
import type {
PRReviewFinding,
PRReviewReport,
ReviewerResult,
ReviewRating,
ReviewRecommendation,
ReviewSeverity,
ReviewFindingType,
ParsedDiff,
} from "@/types/pr-review";
/**
* Generate a complete review report
*/
export const generateReport = (
reviewerResults: ReviewerResult[],
diff: ParsedDiff,
options: {
baseBranch: string;
headBranch: string;
commitRange: string;
},
): PRReviewReport => {
// Collect all findings
const allFindings = aggregateFindings(reviewerResults);
// Filter by confidence threshold
const findings = filterByConfidence(
allFindings,
DEFAULT_REVIEW_CONFIG.minConfidence,
);
// Limit total findings
const limitedFindings = limitFindings(
findings,
DEFAULT_REVIEW_CONFIG.maxFindings,
);
// Calculate statistics
const findingsBySeverity = countBySeverity(limitedFindings);
const findingsByType = countByType(limitedFindings);
// Calculate rating and recommendation
const rating = calculateRating(findingsBySeverity);
const recommendation = calculateRecommendation(findingsBySeverity);
// Generate summary
const summary = generateSummary(limitedFindings, rating, recommendation);
// Calculate duration
const duration = reviewerResults.reduce((sum, r) => sum + r.duration, 0);
return {
id: generateReportId(),
timestamp: Date.now(),
duration,
baseBranch: options.baseBranch,
headBranch: options.headBranch,
commitRange: options.commitRange,
filesChanged: diff.totalFiles,
additions: diff.totalAdditions,
deletions: diff.totalDeletions,
findings: limitedFindings,
findingsBySeverity,
findingsByType,
reviewerResults,
rating,
recommendation,
summary,
};
};
/**
* Aggregate findings from all reviewers
*/
const aggregateFindings = (results: ReviewerResult[]): PRReviewFinding[] => {
const allFindings: PRReviewFinding[] = [];
for (const result of results) {
allFindings.push(...result.findings);
}
// Sort by severity (critical first) then by file
return allFindings.sort((a, b) => {
const severityOrder: Record<ReviewSeverity, number> = {
critical: 0,
warning: 1,
suggestion: 2,
nitpick: 3,
};
const severityDiff = severityOrder[a.severity] - severityOrder[b.severity];
if (severityDiff !== 0) return severityDiff;
return a.file.localeCompare(b.file);
});
};
/**
* Filter findings by confidence threshold
*/
const filterByConfidence = (
findings: PRReviewFinding[],
minConfidence: number,
): PRReviewFinding[] => {
return findings.filter((f) => f.confidence >= minConfidence);
};
/**
* Limit total number of findings
*/
const limitFindings = (
findings: PRReviewFinding[],
maxFindings: number,
): PRReviewFinding[] => {
if (findings.length <= maxFindings) return findings;
// Prioritize critical and warning findings
const critical = findings.filter((f) => f.severity === "critical");
const warnings = findings.filter((f) => f.severity === "warning");
const suggestions = findings.filter((f) => f.severity === "suggestion");
const nitpicks = findings.filter((f) => f.severity === "nitpick");
const result: PRReviewFinding[] = [];
// Add all critical findings
result.push(...critical);
// Add warnings up to limit
const remainingWarnings = maxFindings - result.length;
result.push(...warnings.slice(0, remainingWarnings));
// Add suggestions if room
const remainingSuggestions = maxFindings - result.length;
result.push(...suggestions.slice(0, remainingSuggestions));
// Add nitpicks if room
const remainingNitpicks = maxFindings - result.length;
result.push(...nitpicks.slice(0, remainingNitpicks));
return result;
};
/**
* Count findings by severity
*/
const countBySeverity = (
findings: PRReviewFinding[],
): Record<ReviewSeverity, number> => {
const counts: Record<ReviewSeverity, number> = {
critical: 0,
warning: 0,
suggestion: 0,
nitpick: 0,
};
for (const finding of findings) {
counts[finding.severity]++;
}
return counts;
};
/**
* Count findings by type
*/
const countByType = (
findings: PRReviewFinding[],
): Record<ReviewFindingType, number> => {
const counts: Record<ReviewFindingType, number> = {
security: 0,
performance: 0,
style: 0,
logic: 0,
documentation: 0,
testing: 0,
};
for (const finding of findings) {
counts[finding.type]++;
}
return counts;
};
/**
* Calculate overall rating (1-5 stars)
*/
const calculateRating = (
bySeverity: Record<ReviewSeverity, number>,
): ReviewRating => {
for (const rating of [5, 4, 3, 2, 1] as const) {
const threshold = RATING_THRESHOLDS[rating];
if (
bySeverity.critical <= threshold.maxCritical &&
bySeverity.warning <= threshold.maxWarning
) {
return rating;
}
}
return 1;
};
/**
* Calculate recommendation
*/
const calculateRecommendation = (
bySeverity: Record<ReviewSeverity, number>,
): ReviewRecommendation => {
if (
bySeverity.critical === 0 &&
bySeverity.warning === 0 &&
bySeverity.suggestion <= RECOMMENDATION_THRESHOLDS.approve.maxSuggestion
) {
return "approve";
}
if (
bySeverity.critical === 0 &&
bySeverity.warning <= RECOMMENDATION_THRESHOLDS.approve_with_suggestions.maxWarning
) {
return "approve_with_suggestions";
}
if (bySeverity.critical >= 1) {
return "request_changes";
}
return "needs_discussion";
};
/**
* Generate summary text
*/
const generateSummary = (
findings: PRReviewFinding[],
_rating: ReviewRating,
recommendation: ReviewRecommendation,
): string => {
if (findings.length === 0) {
return "No significant issues found. Code looks good!";
}
const parts: string[] = [];
// Count by severity
const critical = findings.filter((f) => f.severity === "critical").length;
const warnings = findings.filter((f) => f.severity === "warning").length;
const suggestions = findings.filter((f) => f.severity === "suggestion").length;
if (critical > 0) {
parts.push(`${critical} critical issue(s) must be addressed`);
}
if (warnings > 0) {
parts.push(`${warnings} warning(s) should be reviewed`);
}
if (suggestions > 0) {
parts.push(`${suggestions} suggestion(s) for improvement`);
}
// Add recommendation context
const recommendationText: Record<ReviewRecommendation, string> = {
approve: "",
approve_with_suggestions:
"Changes can be merged after addressing suggestions.",
request_changes: "Critical issues must be fixed before merging.",
needs_discussion: "Some items need clarification or discussion.",
};
if (recommendationText[recommendation]) {
parts.push(recommendationText[recommendation]);
}
return parts.join(". ");
};
/**
* Format report as markdown
*/
export const formatReportMarkdown = (report: PRReviewReport): string => {
const lines: string[] = [];
// Header
lines.push(`## ${PR_REVIEW_TITLES.REPORT}`);
lines.push("");
// Summary stats
lines.push("### ${PR_REVIEW_TITLES.SUMMARY}");
lines.push("");
lines.push(`| Metric | Value |`);
lines.push(`|--------|-------|`);
lines.push(`| Files Changed | ${report.filesChanged} |`);
lines.push(`| Additions | +${report.additions} |`);
lines.push(`| Deletions | -${report.deletions} |`);
lines.push(`| Findings | ${report.findings.length} |`);
lines.push("");
// Findings by severity
lines.push("| Severity | Count |");
lines.push("|----------|-------|");
for (const severity of ["critical", "warning", "suggestion", "nitpick"] as const) {
const count = report.findingsBySeverity[severity];
if (count > 0) {
lines.push(
`| ${SEVERITY_ICONS[severity]} ${SEVERITY_LABELS[severity]} | ${count} |`,
);
}
}
lines.push("");
// Rating
const stars = "⭐".repeat(report.rating);
lines.push(`**Rating:** ${stars} (${report.rating}/5)`);
lines.push("");
// Recommendation
const recommendationEmoji: Record<ReviewRecommendation, string> = {
approve: "✅",
approve_with_suggestions: "✅",
request_changes: "🔴",
needs_discussion: "💬",
};
lines.push(
`**${PR_REVIEW_TITLES.RECOMMENDATION}:** ${recommendationEmoji[report.recommendation]} ${formatRecommendation(report.recommendation)}`,
);
lines.push("");
lines.push(report.summary);
lines.push("");
// Findings
if (report.findings.length > 0) {
lines.push(`### ${PR_REVIEW_TITLES.FINDINGS}`);
lines.push("");
for (const finding of report.findings) {
lines.push(formatFinding(finding));
lines.push("");
}
}
return lines.join("\n");
};
/**
* Format recommendation for display
*/
const formatRecommendation = (recommendation: ReviewRecommendation): string => {
const labels: Record<ReviewRecommendation, string> = {
approve: "Approve",
approve_with_suggestions: "Approve with Suggestions",
request_changes: "Request Changes",
needs_discussion: "Needs Discussion",
};
return labels[recommendation];
};
/**
* Format a single finding
*/
const formatFinding = (finding: PRReviewFinding): string => {
const lines: string[] = [];
lines.push(
`${SEVERITY_ICONS[finding.severity]} **[${SEVERITY_LABELS[finding.severity]}]** ${FINDING_TYPE_LABELS[finding.type]}: ${finding.message}`,
);
lines.push("");
lines.push(`📍 \`${finding.file}${finding.line ? `:${finding.line}` : ""}\``);
if (finding.details) {
lines.push("");
lines.push(`**Issue:** ${finding.details}`);
}
if (finding.suggestion) {
lines.push("");
lines.push(`**Suggestion:** ${finding.suggestion}`);
}
lines.push("");
lines.push("---");
return lines.join("\n");
};
/**
* Generate report ID
*/
const generateReportId = (): string => {
return `review_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`;
};

View File

@@ -0,0 +1,240 @@
/**
* Logic Reviewer
*
* Analyzes code for logical errors and edge cases.
*/
import { MIN_CONFIDENCE_THRESHOLD, REVIEWER_PROMPTS } from "@constants/pr-review";
import type {
PRReviewFinding,
ParsedFileDiff,
ReviewFileContext,
} from "@/types/pr-review";
/**
* Logic patterns to check
*/
const LOGIC_PATTERNS = {
MISSING_NULL_CHECK: {
patterns: [
/\w+\.\w+\.\w+/, // Deep property access without optional chaining
/(\w+)\[['"][^'"]+['"]\]\.\w+/, // Object property followed by method
],
message: "Potential null/undefined reference",
suggestion: "Use optional chaining (?.) or add null checks",
confidence: 70,
},
OPTIONAL_CHAIN_MISSING: {
patterns: [
/if\s*\([^)]*\)\s*\{[^}]*\w+\./, // Variable used after if check without ?.
],
message: "Consider using optional chaining",
suggestion: "Replace conditional access with ?. operator",
confidence: 65,
},
EMPTY_CATCH: {
patterns: [
/catch\s*\([^)]*\)\s*\{\s*\}/,
/catch\s*\{\s*\}/,
],
message: "Empty catch block - errors silently ignored",
suggestion: "Log the error or handle it appropriately",
confidence: 90,
},
UNHANDLED_PROMISE: {
patterns: [
/\basync\s+\w+\s*\([^)]*\)\s*\{[^}]*(?!try)[^}]*await\s+[^}]*\}/,
],
message: "Async function without try-catch",
suggestion: "Wrap await calls in try-catch or use .catch()",
confidence: 70,
},
FLOATING_PROMISE: {
patterns: [
/^\s*\w+\s*\.\s*then\s*\(/m,
/^\s*\w+\([^)]*\)\.then\s*\(/m,
],
message: "Floating promise - missing await or error handling",
suggestion: "Use await or add .catch() for error handling",
confidence: 80,
},
ARRAY_INDEX_ACCESS: {
patterns: [
/\[\d+\]/,
/\[0\]/,
/\[-1\]/,
],
message: "Direct array index access without bounds check",
suggestion: "Consider using .at() or add bounds checking",
confidence: 60,
},
EQUALITY_TYPE_COERCION: {
patterns: [
/[^=!]==[^=]/,
/[^!]!=[^=]/,
],
message: "Using == instead of === (type coercion)",
suggestion: "Use strict equality (===) to avoid type coercion bugs",
confidence: 85,
},
ASYNC_IN_FOREACH: {
patterns: [
/\.forEach\s*\(\s*async/,
],
message: "Async callback in forEach - won't await properly",
suggestion: "Use for...of loop or Promise.all with .map()",
confidence: 90,
},
MUTATING_PARAMETER: {
patterns: [
/function\s+\w+\s*\(\w+\)\s*\{[^}]*\w+\s*\.\s*\w+\s*=/,
/\(\w+\)\s*=>\s*\{[^}]*\w+\s*\.\s*push/,
],
message: "Mutating function parameter",
suggestion: "Create a copy before mutating or use immutable patterns",
confidence: 75,
},
RACE_CONDITION: {
patterns: [
/let\s+\w+\s*=[^;]+;\s*await\s+[^;]+;\s*\w+\s*=/,
],
message: "Potential race condition with shared state",
suggestion: "Use atomic operations or proper synchronization",
confidence: 70,
},
INFINITE_LOOP_RISK: {
patterns: [
/while\s*\(\s*true\s*\)/,
/for\s*\(\s*;\s*;\s*\)/,
],
message: "Infinite loop without clear exit condition",
suggestion: "Ensure there's a clear break condition",
confidence: 75,
},
} as const;
/**
* Run logic review on a file
*/
export const reviewFile = (
fileContext: ReviewFileContext,
): PRReviewFinding[] => {
const findings: PRReviewFinding[] = [];
const { diff, path } = fileContext;
// Get all added lines
const addedLines = getAllAddedLines(diff);
// Check each pattern
for (const [patternName, config] of Object.entries(LOGIC_PATTERNS)) {
// Skip patterns below threshold
if (config.confidence < MIN_CONFIDENCE_THRESHOLD) {
continue;
}
for (const { content, lineNumber } of addedLines) {
for (const pattern of config.patterns) {
if (pattern.test(content)) {
findings.push({
id: generateFindingId(),
type: "logic",
severity: determineSeverity(config.confidence),
file: path,
line: lineNumber,
message: config.message,
details: `Pattern: ${patternName}`,
suggestion: config.suggestion,
confidence: config.confidence,
reviewer: "logic",
});
break;
}
}
}
}
// Deduplicate similar findings
return deduplicateFindings(findings);
};
/**
* Determine severity based on confidence
*/
const determineSeverity = (
confidence: number,
): "critical" | "warning" | "suggestion" => {
if (confidence >= 90) return "critical";
if (confidence >= 80) return "warning";
return "suggestion";
};
/**
* Get all added lines with line numbers
*/
const getAllAddedLines = (
diff: ParsedFileDiff,
): Array<{ content: string; lineNumber: number }> => {
const lines: Array<{ content: string; lineNumber: number }> = [];
for (const hunk of diff.hunks) {
let lineNumber = hunk.newStart;
for (const addition of hunk.additions) {
lines.push({
content: addition,
lineNumber,
});
lineNumber++;
}
}
return lines;
};
/**
* Deduplicate findings with same message on adjacent lines
*/
const deduplicateFindings = (findings: PRReviewFinding[]): PRReviewFinding[] => {
const seen = new Map<string, PRReviewFinding>();
for (const finding of findings) {
const key = `${finding.file}:${finding.message}`;
const existing = seen.get(key);
if (!existing) {
seen.set(key, finding);
} else if (finding.line && existing.line) {
// Keep finding with more specific line number
if (Math.abs(finding.line - existing.line) > 5) {
// Different location, keep both
seen.set(`${key}:${finding.line}`, finding);
}
}
}
return Array.from(seen.values());
};
/**
* Generate unique finding ID
*/
const generateFindingId = (): string => {
return `logic_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`;
};
/**
* Get reviewer prompt
*/
export const getPrompt = (): string => {
return REVIEWER_PROMPTS.logic;
};

View File

@@ -0,0 +1,208 @@
/**
* Performance Reviewer
*
* Analyzes code for performance issues.
*/
import { MIN_CONFIDENCE_THRESHOLD, REVIEWER_PROMPTS } from "@constants/pr-review";
import type {
PRReviewFinding,
ParsedFileDiff,
ReviewFileContext,
} from "@/types/pr-review";
/**
* Performance patterns to check
*/
const PERFORMANCE_PATTERNS = {
NESTED_LOOPS: {
patterns: [
/for\s*\([^)]+\)\s*\{[^}]*for\s*\([^)]+\)/,
/\.forEach\([^)]+\)[^}]*\.forEach\(/,
/\.map\([^)]+\)[^}]*\.map\(/,
/while\s*\([^)]+\)\s*\{[^}]*while\s*\([^)]+\)/,
],
message: "Nested loops detected - potential O(n²) complexity",
suggestion: "Consider using a Map/Set for O(1) lookups or restructuring the algorithm",
confidence: 75,
},
ARRAY_IN_LOOP: {
patterns: [
/for\s*\([^)]+\)\s*\{[^}]*\.includes\s*\(/,
/for\s*\([^)]+\)\s*\{[^}]*\.indexOf\s*\(/,
/\.forEach\([^)]+\)[^}]*\.includes\s*\(/,
/\.map\([^)]+\)[^}]*\.indexOf\s*\(/,
],
message: "Array search inside loop - O(n²) complexity",
suggestion: "Convert array to Set for O(1) lookups before the loop",
confidence: 85,
},
UNNECESSARY_RERENDER: {
patterns: [
/useEffect\s*\(\s*\([^)]*\)\s*=>\s*\{[^}]*\},\s*\[\s*\]\s*\)/,
/useState\s*\(\s*\{/,
/useState\s*\(\s*\[/,
/style\s*=\s*\{\s*\{/,
],
message: "Potential unnecessary re-render in React component",
suggestion: "Use useMemo/useCallback for objects/arrays, extract styles outside component",
confidence: 70,
},
MISSING_MEMO: {
patterns: [
/export\s+(?:default\s+)?function\s+\w+\s*\([^)]*\)\s*\{[^}]*return\s*\(/,
/const\s+\w+\s*=\s*\([^)]*\)\s*=>\s*\{[^}]*return\s*\(/,
],
message: "Component may benefit from React.memo",
suggestion: "Consider wrapping with React.memo if props rarely change",
confidence: 60, // Below threshold, informational only
},
N_PLUS_ONE_QUERY: {
patterns: [
/for\s*\([^)]+\)\s*\{[^}]*await\s+.*\.(find|query|get)/,
/\.forEach\([^)]+\)[^}]*await\s+.*\.(find|query|get)/,
/\.map\([^)]+\)[^}]*await\s+.*\.(find|query|get)/,
],
message: "Potential N+1 query problem",
suggestion: "Use batch queries or include/join to fetch related data",
confidence: 85,
},
MEMORY_LEAK: {
patterns: [
/setInterval\s*\([^)]+\)/,
/addEventListener\s*\([^)]+\)/,
/subscribe\s*\([^)]+\)/,
],
message: "Potential memory leak - subscription/interval without cleanup",
suggestion: "Ensure cleanup in useEffect return or componentWillUnmount",
confidence: 75,
},
LARGE_BUNDLE: {
patterns: [
/import\s+\*\s+as\s+\w+\s+from\s+['"]lodash['"]/,
/import\s+\w+\s+from\s+['"]moment['"]/,
/require\s*\(\s*['"]lodash['"]\s*\)/,
],
message: "Large library import may increase bundle size",
suggestion: "Use specific imports (lodash/get) or smaller alternatives (date-fns)",
confidence: 80,
},
SYNC_FILE_OPERATION: {
patterns: [
/readFileSync\s*\(/,
/writeFileSync\s*\(/,
/readdirSync\s*\(/,
/existsSync\s*\(/,
],
message: "Synchronous file operation may block event loop",
suggestion: "Use async versions (readFile, writeFile) for better performance",
confidence: 80,
},
} as const;
/**
* Run performance review on a file
*/
export const reviewFile = (
fileContext: ReviewFileContext,
): PRReviewFinding[] => {
const findings: PRReviewFinding[] = [];
const { diff, path } = fileContext;
// Get all added lines
const addedLines = getAllAddedLines(diff);
// Combine lines for multi-line pattern matching
const combinedContent = addedLines.map(l => l.content).join("\n");
// Check each pattern
for (const [patternName, config] of Object.entries(PERFORMANCE_PATTERNS)) {
// Skip patterns below threshold
if (config.confidence < MIN_CONFIDENCE_THRESHOLD) {
continue;
}
for (const pattern of config.patterns) {
// Check in combined content for multi-line patterns
if (pattern.test(combinedContent)) {
// Find the approximate line number
const lineNumber = findPatternLine(addedLines, pattern);
findings.push({
id: generateFindingId(),
type: "performance",
severity: config.confidence >= 85 ? "warning" : "suggestion",
file: path,
line: lineNumber,
message: config.message,
details: `Pattern: ${patternName}`,
suggestion: config.suggestion,
confidence: config.confidence,
reviewer: "performance",
});
break; // One finding per pattern type
}
}
}
return findings;
};
/**
* Get all added lines with line numbers
*/
const getAllAddedLines = (
diff: ParsedFileDiff,
): Array<{ content: string; lineNumber: number }> => {
const lines: Array<{ content: string; lineNumber: number }> = [];
for (const hunk of diff.hunks) {
let lineNumber = hunk.newStart;
for (const addition of hunk.additions) {
lines.push({
content: addition,
lineNumber,
});
lineNumber++;
}
}
return lines;
};
/**
* Find the line number where a pattern matches
*/
const findPatternLine = (
lines: Array<{ content: string; lineNumber: number }>,
pattern: RegExp,
): number | undefined => {
for (const { content, lineNumber } of lines) {
if (pattern.test(content)) {
return lineNumber;
}
}
return lines.length > 0 ? lines[0].lineNumber : undefined;
};
/**
* Generate unique finding ID
*/
const generateFindingId = (): string => {
return `perf_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`;
};
/**
* Get reviewer prompt
*/
export const getPrompt = (): string => {
return REVIEWER_PROMPTS.performance;
};

View File

@@ -0,0 +1,182 @@
/**
* Security Reviewer
*
* Analyzes code for security vulnerabilities.
*/
import { MIN_CONFIDENCE_THRESHOLD, REVIEWER_PROMPTS } from "@constants/pr-review";
import type {
PRReviewFinding,
ParsedFileDiff,
ReviewFileContext,
} from "@/types/pr-review";
/**
* Security patterns to check
*/
const SECURITY_PATTERNS = {
SQL_INJECTION: {
patterns: [
/`SELECT .* FROM .* WHERE .*\$\{/i,
/`INSERT INTO .* VALUES.*\$\{/i,
/`UPDATE .* SET .*\$\{/i,
/`DELETE FROM .* WHERE .*\$\{/i,
/query\s*\(\s*['"`].*\$\{/i,
/execute\s*\(\s*['"`].*\$\{/i,
],
message: "Potential SQL injection vulnerability",
suggestion: "Use parameterized queries or prepared statements",
confidence: 90,
},
XSS: {
patterns: [
/innerHTML\s*=\s*[^"'].*\+/,
/dangerouslySetInnerHTML/,
/document\.write\s*\(/,
/\.html\s*\([^)]*\+/,
/v-html\s*=/,
],
message: "Potential XSS vulnerability",
suggestion: "Sanitize user input before rendering or use text content",
confidence: 85,
},
COMMAND_INJECTION: {
patterns: [
/exec\s*\(\s*['"`].*\$\{/,
/spawn\s*\(\s*['"`].*\$\{/,
/execSync\s*\(\s*['"`].*\$\{/,
/child_process.*\$\{/,
/\$\(.* \+ /,
],
message: "Potential command injection vulnerability",
suggestion: "Avoid string concatenation in shell commands, use argument arrays",
confidence: 90,
},
PATH_TRAVERSAL: {
patterns: [
/readFile\s*\([^)]*\+/,
/readFileSync\s*\([^)]*\+/,
/fs\..*\([^)]*\+.*req\./,
/path\.join\s*\([^)]*req\./,
],
message: "Potential path traversal vulnerability",
suggestion: "Validate and sanitize file paths, use path.normalize",
confidence: 85,
},
SECRETS_EXPOSURE: {
patterns: [
/api[_-]?key\s*[:=]\s*['"][^'"]+['"]/i,
/secret\s*[:=]\s*['"][^'"]+['"]/i,
/password\s*[:=]\s*['"][^'"]+['"]/i,
/token\s*[:=]\s*['"][^'"]+['"]/i,
/private[_-]?key\s*[:=]\s*['"][^'"]+['"]/i,
/Bearer\s+[A-Za-z0-9_-]+/,
],
message: "Potential hardcoded secret",
suggestion: "Use environment variables or a secrets manager",
confidence: 80,
},
INSECURE_RANDOM: {
patterns: [
/Math\.random\s*\(\)/,
],
message: "Insecure random number generation",
suggestion: "Use crypto.randomBytes or crypto.getRandomValues for security-sensitive operations",
confidence: 70,
},
EVAL_USAGE: {
patterns: [
/\beval\s*\(/,
/new\s+Function\s*\(/,
/setTimeout\s*\(\s*['"`]/,
/setInterval\s*\(\s*['"`]/,
],
message: "Dangerous use of eval or dynamic code execution",
suggestion: "Avoid eval and dynamic code execution, use safer alternatives",
confidence: 85,
},
} as const;
/**
* Run security review on a file
*/
export const reviewFile = (
fileContext: ReviewFileContext,
): PRReviewFinding[] => {
const findings: PRReviewFinding[] = [];
const { diff, path } = fileContext;
// Get all added lines
const addedLines = getAllAddedLines(diff);
// Check each pattern
for (const [patternName, config] of Object.entries(SECURITY_PATTERNS)) {
for (const { content, lineNumber } of addedLines) {
for (const pattern of config.patterns) {
if (pattern.test(content)) {
// Only report if confidence meets threshold
if (config.confidence >= MIN_CONFIDENCE_THRESHOLD) {
findings.push({
id: generateFindingId(),
type: "security",
severity: config.confidence >= 90 ? "critical" : "warning",
file: path,
line: lineNumber,
message: config.message,
details: `Found pattern: ${patternName}`,
suggestion: config.suggestion,
confidence: config.confidence,
reviewer: "security",
});
}
break; // One finding per line per pattern type
}
}
}
}
return findings;
};
/**
* Get all added lines with line numbers
*/
const getAllAddedLines = (
diff: ParsedFileDiff,
): Array<{ content: string; lineNumber: number }> => {
const lines: Array<{ content: string; lineNumber: number }> = [];
for (const hunk of diff.hunks) {
let lineNumber = hunk.newStart;
for (const addition of hunk.additions) {
lines.push({
content: addition,
lineNumber,
});
lineNumber++;
}
}
return lines;
};
/**
* Generate unique finding ID
*/
const generateFindingId = (): string => {
return `sec_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`;
};
/**
* Get reviewer prompt
*/
export const getPrompt = (): string => {
return REVIEWER_PROMPTS.security;
};

View File

@@ -0,0 +1,267 @@
/**
* Style Reviewer
*
* Analyzes code for style and consistency issues.
*/
import { MIN_CONFIDENCE_THRESHOLD, REVIEWER_PROMPTS } from "@constants/pr-review";
import type {
PRReviewFinding,
ParsedFileDiff,
ReviewFileContext,
} from "@/types/pr-review";
/**
* Style patterns to check
*/
const STYLE_PATTERNS = {
CONSOLE_LOG: {
patterns: [
/console\.(log|debug|info)\s*\(/,
],
message: "Console statement left in code",
suggestion: "Remove console statements before committing or use a logger",
confidence: 85,
},
TODO_COMMENT: {
patterns: [
/\/\/\s*TODO[:\s]/i,
/\/\/\s*FIXME[:\s]/i,
/\/\/\s*HACK[:\s]/i,
/\/\*\s*TODO[:\s]/i,
],
message: "TODO/FIXME comment found",
suggestion: "Address the TODO or create a tracking issue",
confidence: 70,
},
MAGIC_NUMBER: {
patterns: [
/(?<![a-zA-Z_])(?:86400|3600|60000|1000|24|60|365|1024|2048|4096)(?![a-zA-Z_\d])/,
],
message: "Magic number - consider using a named constant",
suggestion: "Extract to a named constant for better readability",
confidence: 70,
},
LONG_LINE: {
patterns: [
/.{121,}/,
],
message: "Line exceeds 120 characters",
suggestion: "Break long lines for better readability",
confidence: 75,
},
INCONSISTENT_QUOTES: {
patterns: [
/["'][^"']*["']/,
],
message: "Inconsistent quote style",
suggestion: "Use consistent quotes (single or double) throughout the file",
confidence: 60,
},
VAR_DECLARATION: {
patterns: [
/\bvar\s+\w+/,
],
message: "Using 'var' instead of 'let' or 'const'",
suggestion: "Prefer 'const' for immutable values, 'let' for mutable",
confidence: 85,
},
NESTED_TERNARY: {
patterns: [
/\?[^:]+\?[^:]+:/,
],
message: "Nested ternary operator - hard to read",
suggestion: "Use if-else statements or extract to a function",
confidence: 80,
},
CALLBACK_HELL: {
patterns: [
/\)\s*=>\s*\{[^}]*\)\s*=>\s*\{[^}]*\)\s*=>\s*\{/,
/\.then\([^)]+\.then\([^)]+\.then\(/,
],
message: "Deeply nested callbacks - callback hell",
suggestion: "Refactor using async/await or extract functions",
confidence: 80,
},
ANY_TYPE: {
patterns: [
/:\s*any\b/,
/<any>/,
/as\s+any\b/,
],
message: "Using 'any' type reduces type safety",
suggestion: "Use specific types or 'unknown' with type guards",
confidence: 75,
},
SINGLE_LETTER_VAR: {
patterns: [
/\b(?:const|let|var)\s+[a-z]\s*=/,
],
message: "Single-letter variable name",
suggestion: "Use descriptive variable names for clarity",
confidence: 65,
},
COMMENTED_CODE: {
patterns: [
/\/\/\s*(?:const|let|var|function|if|for|while|return)\s+\w+/,
/\/\*\s*(?:const|let|var|function|if|for|while|return)\s+\w+/,
],
message: "Commented out code detected",
suggestion: "Remove commented code - use version control for history",
confidence: 80,
},
DUPLICATE_IMPORT: {
patterns: [
/import\s+\{[^}]+\}\s+from\s+['"]([^'"]+)['"]/,
],
message: "Check for duplicate or unused imports",
suggestion: "Consolidate imports from the same module",
confidence: 60,
},
} as const;
/**
* Run style review on a file
*/
export const reviewFile = (
fileContext: ReviewFileContext,
): PRReviewFinding[] => {
const findings: PRReviewFinding[] = [];
const { diff, path } = fileContext;
// Get all added lines
const addedLines = getAllAddedLines(diff);
// Check each pattern
for (const [patternName, config] of Object.entries(STYLE_PATTERNS)) {
// Skip patterns below threshold
if (config.confidence < MIN_CONFIDENCE_THRESHOLD) {
continue;
}
let foundInFile = false;
for (const { content, lineNumber } of addedLines) {
for (const pattern of config.patterns) {
if (pattern.test(content)) {
// For some patterns, only report once per file
if (shouldReportOncePerFile(patternName)) {
if (!foundInFile) {
findings.push(createFinding(path, lineNumber, config, patternName));
foundInFile = true;
}
} else {
findings.push(createFinding(path, lineNumber, config, patternName));
}
break;
}
}
}
}
// Limit findings per pattern type
return limitFindings(findings, 3);
};
/**
* Check if pattern should only be reported once per file
*/
const shouldReportOncePerFile = (patternName: string): boolean => {
const oncePerFile = new Set([
"INCONSISTENT_QUOTES",
"VAR_DECLARATION",
"ANY_TYPE",
"DUPLICATE_IMPORT",
]);
return oncePerFile.has(patternName);
};
/**
* Create a finding from config
*/
const createFinding = (
path: string,
lineNumber: number,
config: { message: string; suggestion: string; confidence: number },
patternName: string,
): PRReviewFinding => ({
id: generateFindingId(),
type: "style",
severity: config.confidence >= 85 ? "warning" : "nitpick",
file: path,
line: lineNumber,
message: config.message,
details: `Pattern: ${patternName}`,
suggestion: config.suggestion,
confidence: config.confidence,
reviewer: "style",
});
/**
* Limit findings per pattern to avoid noise
*/
const limitFindings = (
findings: PRReviewFinding[],
maxPerPattern: number,
): PRReviewFinding[] => {
const countByMessage = new Map<string, number>();
const result: PRReviewFinding[] = [];
for (const finding of findings) {
const count = countByMessage.get(finding.message) ?? 0;
if (count < maxPerPattern) {
result.push(finding);
countByMessage.set(finding.message, count + 1);
}
}
return result;
};
/**
* Get all added lines with line numbers
*/
const getAllAddedLines = (
diff: ParsedFileDiff,
): Array<{ content: string; lineNumber: number }> => {
const lines: Array<{ content: string; lineNumber: number }> = [];
for (const hunk of diff.hunks) {
let lineNumber = hunk.newStart;
for (const addition of hunk.additions) {
lines.push({
content: addition,
lineNumber,
});
lineNumber++;
}
}
return lines;
};
/**
* Generate unique finding ID
*/
const generateFindingId = (): string => {
return `style_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`;
};
/**
* Get reviewer prompt
*/
export const getPrompt = (): string => {
return REVIEWER_PROMPTS.style;
};

View File

@@ -74,7 +74,37 @@ const MODE_PROMPT_BUILDERS: Record<
};
/**
* Get git context for prompt building
* Execute git command asynchronously
*/
const execGitCommand = (args: string[]): Promise<string> => {
return new Promise((resolve, reject) => {
const { spawn } = require("child_process");
const proc = spawn("git", args, { cwd: process.cwd() });
let stdout = "";
let stderr = "";
proc.stdout.on("data", (data: Buffer) => {
stdout += data.toString();
});
proc.stderr.on("data", (data: Buffer) => {
stderr += data.toString();
});
proc.on("close", (code: number) => {
if (code === 0) {
resolve(stdout.trim());
} else {
reject(new Error(stderr || `git exited with code ${code}`));
}
});
proc.on("error", reject);
});
};
/**
* Get git context for prompt building (async, non-blocking)
*/
export const getGitContext = async (): Promise<{
isGitRepo: boolean;
@@ -83,16 +113,15 @@ export const getGitContext = async (): Promise<{
recentCommits?: string[];
}> => {
try {
const { execSync } = await import("child_process");
const branch = execSync("git branch --show-current", {
encoding: "utf-8",
}).trim();
const status =
execSync("git status --short", { encoding: "utf-8" }).trim() || "(clean)";
const commits = execSync("git log --oneline -5", { encoding: "utf-8" })
.trim()
.split("\n")
.filter(Boolean);
// Run all git commands in parallel for faster execution
const [branch, status, commits] = await Promise.all([
execGitCommand(["branch", "--show-current"]),
execGitCommand(["status", "--short"]).then((s) => s || "(clean)"),
execGitCommand(["log", "--oneline", "-5"]).then((s) =>
s.split("\n").filter(Boolean),
),
]);
return { isGitRepo: true, branch, status, recentCommits: commits };
} catch {
return { isGitRepo: false };

View File

@@ -0,0 +1,318 @@
/**
* Session Compaction Service
*
* Integrates auto-compaction with the agent loop and hooks system.
* Follows OpenCode's two-tier approach: pruning (remove old tool output)
* and compaction (summarize for fresh context).
*/
import type { Message } from "@/types/providers";
import {
CHARS_PER_TOKEN,
TOKEN_OVERFLOW_THRESHOLD,
PRUNE_MINIMUM_TOKENS,
PRUNE_PROTECT_TOKENS,
PRUNE_RECENT_TURNS,
PRUNE_PROTECTED_TOOLS,
TOKEN_MESSAGES,
} from "@constants/token";
import { getModelContextSize, DEFAULT_CONTEXT_SIZE } from "@constants/copilot";
import {
compactConversation,
checkCompactionNeeded,
getModelCompactionConfig,
createCompactionSummary,
} from "@services/auto-compaction";
import { appStore } from "@tui-solid/context/app";
/**
* Estimate tokens from content
*/
export const estimateTokens = (content: string): number => {
return Math.max(0, Math.round((content || "").length / CHARS_PER_TOKEN));
};
/**
* Estimate total tokens in message array
*/
export const estimateMessagesTokens = (messages: Message[]): number => {
return messages.reduce((total, msg) => {
const content =
typeof msg.content === "string"
? msg.content
: JSON.stringify(msg.content);
return total + estimateTokens(content);
}, 0);
};
/**
* Check if context overflow is imminent
*/
export const isContextOverflow = (
messages: Message[],
modelId?: string,
): boolean => {
const contextSize = modelId
? getModelContextSize(modelId)
: DEFAULT_CONTEXT_SIZE;
const currentTokens = estimateMessagesTokens(messages);
const threshold = contextSize.input * TOKEN_OVERFLOW_THRESHOLD;
return currentTokens >= threshold;
};
/**
* Prune old tool outputs from messages
*
* Strategy (following OpenCode):
* 1. Walk backwards through messages
* 2. Skip first N user turns (protect recent context)
* 3. Mark tool outputs for pruning once we accumulate enough tokens
* 4. Only prune if we can free minimum threshold
*/
export const pruneToolOutputs = (
messages: Message[],
options: {
minTokensToFree?: number;
protectThreshold?: number;
recentTurns?: number;
protectedTools?: Set<string>;
} = {},
): { messages: Message[]; prunedCount: number; tokensSaved: number } => {
const {
minTokensToFree = PRUNE_MINIMUM_TOKENS,
protectThreshold = PRUNE_PROTECT_TOKENS,
recentTurns = PRUNE_RECENT_TURNS,
protectedTools = PRUNE_PROTECTED_TOOLS,
} = options;
// Find tool messages to potentially prune
interface PruneCandidate {
index: number;
tokens: number;
}
const candidates: PruneCandidate[] = [];
let userTurnCount = 0;
let totalPrunableTokens = 0;
// Walk backwards through messages
for (let i = messages.length - 1; i >= 0; i--) {
const msg = messages[i];
// Count user turns
if (msg.role === "user") {
userTurnCount++;
}
// Skip if in protected recent turns
if (userTurnCount < recentTurns) {
continue;
}
// Check for tool messages
if (msg.role === "tool") {
// Extract tool name from tool_call_id if possible
const toolName = (msg as { tool_call_id?: string }).tool_call_id
?.split("-")[0] ?? "";
// Skip protected tools
if (protectedTools.has(toolName)) {
continue;
}
const content =
typeof msg.content === "string"
? msg.content
: JSON.stringify(msg.content);
const tokens = estimateTokens(content);
totalPrunableTokens += tokens;
// Only mark for pruning after we've accumulated enough
if (totalPrunableTokens > protectThreshold) {
candidates.push({ index: i, tokens });
}
}
}
// Calculate total tokens to save
const tokensSaved = candidates.reduce((sum, c) => sum + c.tokens, 0);
// Only prune if we can free minimum threshold
if (tokensSaved < minTokensToFree) {
return { messages, prunedCount: 0, tokensSaved: 0 };
}
// Create pruned messages
const prunedIndices = new Set(candidates.map((c) => c.index));
const prunedMessages = messages.map((msg, index) => {
if (prunedIndices.has(index)) {
// Replace content with truncation marker
return {
...msg,
content: "[Output pruned to save context]",
};
}
return msg;
});
return {
messages: prunedMessages,
prunedCount: candidates.length,
tokensSaved,
};
};
/**
* Perform full session compaction
*
* 1. First try pruning tool outputs
* 2. If still over threshold, do full compaction
*/
export const performSessionCompaction = async (
messages: Message[],
modelId?: string,
options?: {
onPruneStart?: () => void;
onPruneComplete?: (count: number, saved: number) => void;
onCompactStart?: () => void;
onCompactComplete?: (saved: number) => void;
},
): Promise<{
messages: Message[];
compacted: boolean;
pruned: boolean;
tokensSaved: number;
}> => {
const config = getModelCompactionConfig(modelId);
// Phase 1: Try pruning first
options?.onPruneStart?.();
const pruneResult = pruneToolOutputs(messages);
if (pruneResult.prunedCount > 0) {
options?.onPruneComplete?.(pruneResult.prunedCount, pruneResult.tokensSaved);
// Check if pruning was enough
const afterPruneCheck = checkCompactionNeeded(pruneResult.messages, config);
if (!afterPruneCheck.needsCompaction) {
return {
messages: pruneResult.messages,
compacted: false,
pruned: true,
tokensSaved: pruneResult.tokensSaved,
};
}
}
// Phase 2: Full compaction needed
options?.onCompactStart?.();
const compactResult = compactConversation(
pruneResult.prunedCount > 0 ? pruneResult.messages : messages,
config,
);
if (compactResult.result.compacted) {
options?.onCompactComplete?.(compactResult.result.tokensSaved);
}
const totalSaved = pruneResult.tokensSaved + compactResult.result.tokensSaved;
return {
messages: compactResult.messages,
compacted: compactResult.result.compacted,
pruned: pruneResult.prunedCount > 0,
tokensSaved: totalSaved,
};
};
/**
* Create a compaction check middleware for the agent loop
*/
export const createCompactionMiddleware = (
modelId?: string,
): {
shouldCompact: (messages: Message[]) => boolean;
compact: (
messages: Message[],
) => Promise<{ messages: Message[]; summary: string }>;
} => {
return {
shouldCompact: (messages: Message[]) => isContextOverflow(messages, modelId),
compact: async (messages: Message[]) => {
// Notify UI that compaction is starting
appStore.setIsCompacting(true);
try {
const result = await performSessionCompaction(messages, modelId, {
onPruneStart: () => {
appStore.setThinkingMessage("Pruning old tool outputs...");
},
onPruneComplete: (count, saved) => {
appStore.addLog({
type: "system",
content: `Pruned ${count} tool outputs (${saved.toLocaleString()} tokens)`,
});
},
onCompactStart: () => {
appStore.setThinkingMessage(TOKEN_MESSAGES.COMPACTION_STARTING);
},
onCompactComplete: (saved) => {
appStore.addLog({
type: "system",
content: TOKEN_MESSAGES.COMPACTION_COMPLETE(saved),
});
},
});
// Build summary
const parts: string[] = [];
if (result.pruned) {
parts.push("pruned old outputs");
}
if (result.compacted) {
parts.push("compacted conversation");
}
const summary =
parts.length > 0
? `Context management: ${parts.join(", ")} (${result.tokensSaved.toLocaleString()} tokens saved)`
: "";
return {
messages: result.messages,
summary,
};
} finally {
appStore.setIsCompacting(false);
appStore.setThinkingMessage(null);
}
},
};
};
/**
* Get compaction status for display
*/
export const getCompactionStatus = (
messages: Message[],
modelId?: string,
): {
currentTokens: number;
maxTokens: number;
usagePercent: number;
needsCompaction: boolean;
} => {
const contextSize = modelId
? getModelContextSize(modelId)
: DEFAULT_CONTEXT_SIZE;
const currentTokens = estimateMessagesTokens(messages);
const maxTokens = contextSize.input;
const usagePercent = maxTokens > 0 ? (currentTokens / maxTokens) * 100 : 0;
return {
currentTokens,
maxTokens,
usagePercent,
needsCompaction: isContextOverflow(messages, modelId),
};
};

View File

@@ -0,0 +1,435 @@
/**
* Skill Loader Service
*
* Parses SKILL.md files with frontmatter and body content.
* Supports progressive disclosure with 3 loading levels.
*/
import fs from "fs/promises";
import { join } from "path";
import {
SKILL_FILE,
SKILL_DIRS,
SKILL_DEFAULTS,
SKILL_ERRORS,
SKILL_REQUIRED_FIELDS,
SKILL_LOADING,
} from "@constants/skills";
import type {
SkillDefinition,
SkillMetadata,
SkillFrontmatter,
ParsedSkillFile,
SkillExample,
SkillLoadLevel,
} from "@/types/skills";
// ============================================================================
// Frontmatter Parsing
// ============================================================================
/**
* Parse YAML-like frontmatter from SKILL.md content
*/
const parseFrontmatter = (content: string): { frontmatter: string; body: string } => {
const delimiter = SKILL_FILE.FRONTMATTER_DELIMITER;
const lines = content.split("\n");
if (lines[0]?.trim() !== delimiter) {
return { frontmatter: "", body: content };
}
let endIndex = -1;
for (let i = 1; i < lines.length; i++) {
if (lines[i]?.trim() === delimiter) {
endIndex = i;
break;
}
}
if (endIndex === -1) {
return { frontmatter: "", body: content };
}
const frontmatter = lines.slice(1, endIndex).join("\n");
const body = lines.slice(endIndex + 1).join("\n").trim();
return { frontmatter, body };
};
/**
* Parse simple YAML-like frontmatter to object
* Supports: strings, arrays (- item), booleans
*/
const parseYamlLike = (yaml: string): Record<string, unknown> => {
const result: Record<string, unknown> = {};
const lines = yaml.split("\n");
let currentKey: string | null = null;
let currentArray: string[] | null = null;
for (const line of lines) {
const trimmed = line.trim();
if (!trimmed || trimmed.startsWith("#")) continue;
// Array item
if (trimmed.startsWith("- ") && currentKey) {
const value = trimmed.slice(2).trim();
if (!currentArray) {
currentArray = [];
}
// Remove quotes if present
const unquoted = value.replace(/^["']|["']$/g, "");
currentArray.push(unquoted);
result[currentKey] = currentArray;
continue;
}
// Key-value pair
const colonIndex = trimmed.indexOf(":");
if (colonIndex > 0) {
// Save previous array if exists
if (currentArray && currentKey) {
result[currentKey] = currentArray;
}
currentArray = null;
currentKey = trimmed.slice(0, colonIndex).trim();
const value = trimmed.slice(colonIndex + 1).trim();
if (!value) {
// Empty value, might be followed by array
continue;
}
// Parse value type
const parsed = parseValue(value);
result[currentKey] = parsed;
}
}
return result;
};
/**
* Parse a single YAML value
*/
const parseValue = (value: string): string | boolean | number => {
// Boolean
if (value === "true") return true;
if (value === "false") return false;
// Number
const num = Number(value);
if (!isNaN(num) && value !== "") return num;
// String (remove quotes if present)
return value.replace(/^["']|["']$/g, "");
};
/**
* Validate required fields in frontmatter
*/
const validateFrontmatter = (
data: Record<string, unknown>,
filePath: string,
): SkillFrontmatter => {
for (const field of SKILL_REQUIRED_FIELDS) {
if (!(field in data) || data[field] === undefined || data[field] === "") {
throw new Error(SKILL_ERRORS.MISSING_REQUIRED_FIELD(field, filePath));
}
}
// Validate triggers is an array
if (!Array.isArray(data.triggers)) {
throw new Error(SKILL_ERRORS.MISSING_REQUIRED_FIELD("triggers (array)", filePath));
}
return {
id: String(data.id),
name: String(data.name),
description: String(data.description),
version: data.version ? String(data.version) : undefined,
triggers: data.triggers as string[],
triggerType: data.triggerType as SkillFrontmatter["triggerType"],
autoTrigger: typeof data.autoTrigger === "boolean" ? data.autoTrigger : undefined,
requiredTools: Array.isArray(data.requiredTools)
? (data.requiredTools as string[])
: undefined,
tags: Array.isArray(data.tags) ? (data.tags as string[]) : undefined,
};
};
// ============================================================================
// Example Parsing
// ============================================================================
/**
* Parse examples from skill body
* Examples are in format:
* ## Examples
* ### Example 1
* Input: ...
* Output: ...
*/
const parseExamples = (body: string): SkillExample[] => {
const examples: SkillExample[] = [];
const exampleSection = body.match(/## Examples([\s\S]*?)(?=##[^#]|$)/i);
if (!exampleSection) return examples;
const content = exampleSection[1];
const exampleBlocks = content.split(/### /);
for (const block of exampleBlocks) {
if (!block.trim()) continue;
const inputMatch = block.match(/Input:\s*([\s\S]*?)(?=Output:|$)/i);
const outputMatch = block.match(/Output:\s*([\s\S]*?)(?=###|$)/i);
if (inputMatch && outputMatch) {
const descMatch = block.match(/^([^\n]+)/);
examples.push({
input: inputMatch[1].trim(),
output: outputMatch[1].trim(),
description: descMatch ? descMatch[1].trim() : undefined,
});
}
}
return examples;
};
// ============================================================================
// File Loading
// ============================================================================
/**
* Load and parse a SKILL.md file
*/
export const loadSkillFile = async (filePath: string): Promise<ParsedSkillFile> => {
try {
const stat = await fs.stat(filePath);
if (stat.size > SKILL_LOADING.MAX_FILE_SIZE_BYTES) {
throw new Error(`Skill file too large: ${filePath}`);
}
const content = await fs.readFile(filePath, SKILL_FILE.ENCODING);
const { frontmatter, body } = parseFrontmatter(content);
if (!frontmatter) {
throw new Error(SKILL_ERRORS.INVALID_FRONTMATTER(filePath));
}
const data = parseYamlLike(frontmatter);
const validatedFrontmatter = validateFrontmatter(data, filePath);
const examples = parseExamples(body);
return {
frontmatter: validatedFrontmatter,
body,
examples: examples.length > 0 ? examples : undefined,
filePath,
};
} catch (error) {
const message = error instanceof Error ? error.message : String(error);
throw new Error(SKILL_ERRORS.LOAD_FAILED(filePath, message));
}
};
/**
* Convert parsed skill file to SkillMetadata (Level 1)
*/
export const toSkillMetadata = (parsed: ParsedSkillFile): SkillMetadata => ({
id: parsed.frontmatter.id,
name: parsed.frontmatter.name,
description: parsed.frontmatter.description,
version: parsed.frontmatter.version ?? SKILL_DEFAULTS.VERSION,
triggers: parsed.frontmatter.triggers,
triggerType: parsed.frontmatter.triggerType ?? SKILL_DEFAULTS.TRIGGER_TYPE,
autoTrigger: parsed.frontmatter.autoTrigger ?? SKILL_DEFAULTS.AUTO_TRIGGER,
requiredTools: parsed.frontmatter.requiredTools ?? SKILL_DEFAULTS.REQUIRED_TOOLS,
tags: parsed.frontmatter.tags,
});
/**
* Convert parsed skill file to full SkillDefinition (Level 3)
*/
export const toSkillDefinition = (parsed: ParsedSkillFile): SkillDefinition => {
const metadata = toSkillMetadata(parsed);
// Extract system prompt and instructions from body
const { systemPrompt, instructions } = parseSkillBody(parsed.body);
return {
...metadata,
systemPrompt,
instructions,
examples: parsed.examples,
loadedAt: Date.now(),
};
};
/**
* Parse skill body to extract system prompt and instructions
*/
const parseSkillBody = (body: string): { systemPrompt: string; instructions: string } => {
// Look for ## System Prompt section
const systemPromptMatch = body.match(
/## System Prompt([\s\S]*?)(?=## Instructions|## Examples|$)/i,
);
// Look for ## Instructions section
const instructionsMatch = body.match(
/## Instructions([\s\S]*?)(?=## Examples|## System Prompt|$)/i,
);
// If no sections found, use the whole body as instructions
const systemPrompt = systemPromptMatch ? systemPromptMatch[1].trim() : "";
const instructions = instructionsMatch ? instructionsMatch[1].trim() : body.trim();
return { systemPrompt, instructions };
};
// ============================================================================
// Directory Scanning
// ============================================================================
/**
* Find all SKILL.md files in a directory
*/
export const findSkillFiles = async (dir: string): Promise<string[]> => {
const skillFiles: string[] = [];
try {
const entries = await fs.readdir(dir, { withFileTypes: true });
for (const entry of entries) {
const fullPath = join(dir, entry.name);
if (entry.isDirectory()) {
// Check for SKILL.md in subdirectory
const skillPath = join(fullPath, SKILL_FILE.NAME);
try {
await fs.access(skillPath);
skillFiles.push(skillPath);
} catch {
// No SKILL.md in this directory
}
} else if (entry.name === SKILL_FILE.NAME) {
skillFiles.push(fullPath);
}
}
} catch {
// Directory doesn't exist or isn't accessible
}
return skillFiles;
};
/**
* Load all skills from a directory
*/
export const loadSkillsFromDirectory = async (
dir: string,
level: SkillLoadLevel = "metadata",
): Promise<SkillDefinition[]> => {
const skillFiles = await findSkillFiles(dir);
const skills: SkillDefinition[] = [];
for (const filePath of skillFiles) {
try {
const parsed = await loadSkillFile(filePath);
if (level === "metadata") {
// Only load metadata, but cast to SkillDefinition for uniform handling
const metadata = toSkillMetadata(parsed);
skills.push({
...metadata,
systemPrompt: "",
instructions: "",
});
} else {
skills.push(toSkillDefinition(parsed));
}
} catch (error) {
// Log error but continue loading other skills
console.error(
`Failed to load skill: ${filePath}`,
error instanceof Error ? error.message : error,
);
}
}
return skills;
};
/**
* Load all skills from all skill directories
*/
export const loadAllSkills = async (
level: SkillLoadLevel = "metadata",
): Promise<SkillDefinition[]> => {
const allSkills: SkillDefinition[] = [];
const dirs = [SKILL_DIRS.BUILTIN, SKILL_DIRS.USER];
// Add project skills if we're in a project directory
const projectSkillsDir = join(process.cwd(), SKILL_DIRS.PROJECT);
try {
await fs.access(projectSkillsDir);
dirs.push(projectSkillsDir);
} catch {
// No project skills directory
}
for (const dir of dirs) {
const skills = await loadSkillsFromDirectory(dir, level);
allSkills.push(...skills);
}
// Deduplicate by ID (later ones override earlier)
const skillMap = new Map<string, SkillDefinition>();
for (const skill of allSkills) {
skillMap.set(skill.id, skill);
}
return Array.from(skillMap.values());
};
/**
* Load a specific skill by ID
*/
export const loadSkillById = async (
id: string,
level: SkillLoadLevel = "full",
): Promise<SkillDefinition | null> => {
const dirs = [SKILL_DIRS.BUILTIN, SKILL_DIRS.USER];
const projectSkillsDir = join(process.cwd(), SKILL_DIRS.PROJECT);
try {
await fs.access(projectSkillsDir);
dirs.push(projectSkillsDir);
} catch {
// No project skills directory
}
// Search in reverse order (project > user > builtin)
for (const dir of dirs.reverse()) {
const skillPath = join(dir, id, SKILL_FILE.NAME);
try {
await fs.access(skillPath);
const parsed = await loadSkillFile(skillPath);
if (parsed.frontmatter.id === id) {
return level === "metadata"
? { ...toSkillMetadata(parsed), systemPrompt: "", instructions: "" }
: toSkillDefinition(parsed);
}
} catch {
// Skill not found in this directory
}
}
return null;
};

View File

@@ -0,0 +1,407 @@
/**
* Skill Registry Service
*
* Manages skill registration, matching, and invocation.
* Uses progressive disclosure to load skills on demand.
*/
import {
SKILL_MATCHING,
SKILL_LOADING,
SKILL_ERRORS,
} from "@constants/skills";
import {
loadAllSkills,
loadSkillById,
} from "@services/skill-loader";
import type {
SkillDefinition,
SkillMatch,
SkillContext,
SkillExecutionResult,
SkillRegistryState,
} from "@/types/skills";
// ============================================================================
// State Management
// ============================================================================
let registryState: SkillRegistryState = {
skills: new Map(),
lastLoadedAt: null,
loadErrors: [],
};
/**
* Get current registry state
*/
export const getRegistryState = (): SkillRegistryState => ({
skills: new Map(registryState.skills),
lastLoadedAt: registryState.lastLoadedAt,
loadErrors: [...registryState.loadErrors],
});
/**
* Check if cache is stale
*/
const isCacheStale = (): boolean => {
if (!registryState.lastLoadedAt) return true;
return Date.now() - registryState.lastLoadedAt > SKILL_LOADING.CACHE_TTL_MS;
};
// ============================================================================
// Skill Registration
// ============================================================================
/**
* Initialize skill registry with all available skills
*/
export const initializeRegistry = async (): Promise<void> => {
try {
const skills = await loadAllSkills("metadata");
registryState.skills.clear();
registryState.loadErrors = [];
for (const skill of skills) {
registryState.skills.set(skill.id, skill);
}
registryState.lastLoadedAt = Date.now();
} catch (error) {
const message = error instanceof Error ? error.message : String(error);
registryState.loadErrors.push(message);
}
};
/**
* Register a skill manually
*/
export const registerSkill = (skill: SkillDefinition): void => {
registryState.skills.set(skill.id, skill);
};
/**
* Unregister a skill
*/
export const unregisterSkill = (skillId: string): boolean => {
return registryState.skills.delete(skillId);
};
/**
* Get a skill by ID
*/
export const getSkill = (skillId: string): SkillDefinition | undefined => {
return registryState.skills.get(skillId);
};
/**
* Get all registered skills
*/
export const getAllSkills = (): SkillDefinition[] => {
return Array.from(registryState.skills.values());
};
/**
* Refresh registry if cache is stale
*/
export const refreshIfNeeded = async (): Promise<void> => {
if (isCacheStale()) {
await initializeRegistry();
}
};
// ============================================================================
// Skill Matching
// ============================================================================
/**
* Calculate string similarity using Levenshtein distance
*/
const calculateSimilarity = (a: string, b: string): number => {
const aLower = a.toLowerCase();
const bLower = b.toLowerCase();
if (aLower === bLower) return 1;
if (aLower.includes(bLower) || bLower.includes(aLower)) {
return 0.8;
}
// Simple word overlap for fuzzy matching
const aWords = new Set(aLower.split(/\s+/));
const bWords = new Set(bLower.split(/\s+/));
const intersection = [...aWords].filter((word) => bWords.has(word));
if (intersection.length === 0) return 0;
return intersection.length / Math.max(aWords.size, bWords.size);
};
/**
* Check if input matches a trigger pattern
*/
const matchTrigger = (
input: string,
trigger: string,
): { matches: boolean; confidence: number } => {
const inputLower = input.toLowerCase().trim();
const triggerLower = trigger.toLowerCase().trim();
// Exact match (command style)
if (inputLower === triggerLower) {
return { matches: true, confidence: 1.0 };
}
// Command prefix match
if (trigger.startsWith(SKILL_MATCHING.COMMAND_PREFIX)) {
if (inputLower.startsWith(triggerLower)) {
return { matches: true, confidence: 0.95 };
}
}
// Input starts with trigger
if (inputLower.startsWith(triggerLower)) {
return { matches: true, confidence: 0.9 };
}
// Fuzzy match
const similarity = calculateSimilarity(inputLower, triggerLower);
if (similarity >= SKILL_MATCHING.FUZZY_THRESHOLD) {
return { matches: true, confidence: similarity };
}
return { matches: false, confidence: 0 };
};
/**
* Find matching skills for user input
*/
export const findMatchingSkills = async (input: string): Promise<SkillMatch[]> => {
await refreshIfNeeded();
const matches: SkillMatch[] = [];
const inputLower = input.toLowerCase().trim();
for (const skill of registryState.skills.values()) {
let bestMatch: { trigger: string; confidence: number } | null = null;
for (const trigger of skill.triggers) {
const result = matchTrigger(inputLower, trigger);
if (result.matches) {
if (!bestMatch || result.confidence > bestMatch.confidence) {
bestMatch = { trigger, confidence: result.confidence };
}
}
}
if (bestMatch && bestMatch.confidence >= SKILL_MATCHING.MIN_CONFIDENCE) {
matches.push({
skill,
confidence: bestMatch.confidence,
matchedTrigger: bestMatch.trigger,
matchType: skill.triggerType,
});
}
}
// Sort by confidence (highest first)
matches.sort((a, b) => b.confidence - a.confidence);
return matches;
};
/**
* Find the best matching skill for input
*/
export const findBestMatch = async (input: string): Promise<SkillMatch | null> => {
const matches = await findMatchingSkills(input);
return matches.length > 0 ? matches[0] : null;
};
/**
* Check if input matches a command pattern (starts with /)
*/
export const isCommandInput = (input: string): boolean => {
return input.trim().startsWith(SKILL_MATCHING.COMMAND_PREFIX);
};
/**
* Extract command name from input
*/
export const extractCommandName = (input: string): string | null => {
if (!isCommandInput(input)) return null;
const trimmed = input.trim();
const spaceIndex = trimmed.indexOf(" ");
if (spaceIndex === -1) {
return trimmed.slice(1); // Remove leading /
}
return trimmed.slice(1, spaceIndex);
};
// ============================================================================
// Skill Execution
// ============================================================================
/**
* Load full skill definition for execution
*/
export const loadSkillForExecution = async (
skillId: string,
): Promise<SkillDefinition | null> => {
// Check if already fully loaded
const existing = registryState.skills.get(skillId);
if (existing && existing.systemPrompt && existing.instructions) {
return existing;
}
// Load full definition
const fullSkill = await loadSkillById(skillId, "full");
if (fullSkill) {
registryState.skills.set(skillId, fullSkill);
return fullSkill;
}
return null;
};
/**
* Build prompt with skill context
*/
export const buildSkillPrompt = (
skill: SkillDefinition,
context: SkillContext,
): string => {
const parts: string[] = [];
// Add system prompt if present
if (skill.systemPrompt) {
parts.push(skill.systemPrompt);
}
// Add instructions
if (skill.instructions) {
parts.push("## Instructions\n" + skill.instructions);
}
// Add examples if present
if (skill.examples && skill.examples.length > 0) {
parts.push("## Examples");
for (const example of skill.examples) {
if (example.description) {
parts.push(`### ${example.description}`);
}
parts.push(`Input: ${example.input}`);
parts.push(`Output: ${example.output}`);
}
}
// Add context
parts.push("## Context");
parts.push(`Working directory: ${context.workingDir}`);
if (context.gitBranch) {
parts.push(`Git branch: ${context.gitBranch}`);
}
parts.push(`User input: ${context.userInput}`);
return parts.join("\n\n");
};
/**
* Execute a skill
*/
export const executeSkill = async (
skillId: string,
context: SkillContext,
): Promise<SkillExecutionResult> => {
try {
const skill = await loadSkillForExecution(skillId);
if (!skill) {
return {
success: false,
skillId,
prompt: "",
error: SKILL_ERRORS.NOT_FOUND(skillId),
};
}
const prompt = buildSkillPrompt(skill, context);
return {
success: true,
skillId,
prompt,
};
} catch (error) {
const message = error instanceof Error ? error.message : String(error);
return {
success: false,
skillId,
prompt: "",
error: SKILL_ERRORS.EXECUTION_FAILED(skillId, message),
};
}
};
/**
* Execute skill from user input (auto-detect and execute)
*/
export const executeFromInput = async (
input: string,
context: Omit<SkillContext, "userInput">,
): Promise<SkillExecutionResult | null> => {
const match = await findBestMatch(input);
if (!match) {
return null;
}
return executeSkill(match.skill.id, {
...context,
userInput: input,
});
};
// ============================================================================
// Utility Functions
// ============================================================================
/**
* Get skills that can auto-trigger
*/
export const getAutoTriggerSkills = (): SkillDefinition[] => {
return Array.from(registryState.skills.values()).filter(
(skill) => skill.autoTrigger,
);
};
/**
* Get skills by tag
*/
export const getSkillsByTag = (tag: string): SkillDefinition[] => {
return Array.from(registryState.skills.values()).filter(
(skill) => skill.tags?.includes(tag),
);
};
/**
* Get available command completions
*/
export const getCommandCompletions = (partial: string): string[] => {
const commands: string[] = [];
for (const skill of registryState.skills.values()) {
for (const trigger of skill.triggers) {
if (trigger.startsWith(SKILL_MATCHING.COMMAND_PREFIX)) {
if (trigger.toLowerCase().startsWith(partial.toLowerCase())) {
commands.push(trigger);
}
}
}
}
return commands.sort();
};