diff --git a/README.md b/README.md
index 62df09a..2720428 100644
--- a/README.md
+++ b/README.md
@@ -1,8 +1,10 @@
# CodeTyper CLI
-An AI-powered terminal coding agent with an interactive TUI. CodeTyper autonomously executes coding tasks using tool calls with granular permission controls and intelligent provider routing.
+
+
+
-
+An AI-powered terminal coding agent with an interactive TUI. CodeTyper autonomously executes coding tasks using tool calls with granular permission controls and intelligent provider routing.
## How It Works
@@ -85,7 +87,7 @@ Full-screen terminal interface with real-time streaming responses.
- `Enter` - Send message
- `Shift+Enter` - New line
- `/` - Open command menu
-- `Ctrl+Tab` - Toggle interaction mode
+- `Ctrl+M` - Toggle interaction mode
- `Ctrl+T` - Toggle todo panel
- `Shift+Up/Down` - Scroll log panel
- `Ctrl+C` (twice) - Exit
diff --git a/assets/Codetyper_logo.png b/assets/Codetyper_logo.png
new file mode 100644
index 0000000..d79cec4
Binary files /dev/null and b/assets/Codetyper_logo.png differ
diff --git a/assets/ascii-art.txt b/assets/ascii-art.txt
new file mode 100644
index 0000000..0904025
--- /dev/null
+++ b/assets/ascii-art.txt
@@ -0,0 +1,55 @@
+
+
+
+
+
+
+
+
+
+
+
+
+ ######### #########
+ %%%%%%%%%%%%% %%%%%%%%%%%%%
+ %%%##++++*%%%%%######=######=#%####%%%%%*+++=*#%%%
+ %%%*=+++++*%%%#######=######=#######%%%++++++=*%%%
+ %%%*=++++*#%%%%#++##%=#%###%=%##++#%%%%#*++++=*%%%
+ @%%%#*+*#%%%%%%##+*%%=++##++=%%#=##%%%%%%#*+*#%%%@
+ @%%%%%%%%%%%%%%#*+#=#+##*#=#*+#%%%%%%%%%%%%%%@
+ %%%%%%###%%%###+=#*##*#=+#####%#######%%%
+ %%%%%%%*=*#####**##+##*##**######+*###%%%%
+ %%%%%%%%%#%%%%%%%#+*+##**+##%%%%%%%%%##%%%%%
+ %%%%%%%%##%%%%%%#%%+=##+=#%%%%%%%%%%%%%%%%%%
+ %%%###%%%%#%%@%%%%%%#%%%#%%%%%%%%%@%%%%###%%%%%%
+ %%%##%%%%##%%%%##%%%%*+++++#%%%%*%%%%###%%%%%%%%
+ %%%%%%%%%%%%#%%%%%%*=------=+#%%%%%%%%%%%%%%%%%%
+ %%%%%%%%%%####%%%#+=---------=#%%%%%%%##%%#%%%%%
+ %%%%%%%%########=-----------*##%#%%%%###%%%%
+ %%%%%%%#####%%*=---++++++----=%%%%%%####%%%%
+ %%%%%%%####%%*--+*%%%%%%#*--=%%%%#%%##%%%%
+ @%%%%%####%%*--+%%@@%%%%#--=%%#%%%#%%%%%
+ %#%% %%%%%####%%*-===#%%%%%+====%%#%%%%%%%% %%%%
+ %##%%% %%%%%%%%%#====+*%%#+====+%%%%%%%%%@%%%%%%
+ %%%##%@@%%%%%%%%%#+==*%%%##+==##%%%%%%%%@@%%%%%%
+ %%%%%%%%@@@@%%@@@@%%#******##%%@@@@%%@@@%%%%%%%%
+ %%%%###%%%%@@@@@@@@@%++++++%@@@@@@@@@@%%####%#%%
+ %#%%####%%%@@@@@@@@@@%%@@@@@@@@@@%%%##%%%##%
+ %%%%###%%#%%%%@@@@@@@@@@@@@@@@%%%%%%%%%%%%%%
+ %#%##%%##%%%%@@@@@@@@@@@@%%%%%%%%%%%%%
+ %%%#########%%%@@@@@@@@%%%###%%%###%%%
+ %###########%@#*****%%#####%%###%%
+ %%%%%####%%#+==++==+*%%%%###%%%%
+ %%%####%%*======+=+%%%%###%%
+ %%##%%*=+==-=#=+%%##%#%
+ %%%%%+=++===*==%%#%%@
+ %#%========== %%@
+ ======
+
+
+
+
+
+
+
+
diff --git a/src/api/brain/index.ts b/src/api/brain/index.ts
new file mode 100644
index 0000000..4443fc1
--- /dev/null
+++ b/src/api/brain/index.ts
@@ -0,0 +1,367 @@
+/**
+ * Brain API Layer
+ *
+ * Low-level HTTP API calls to the CodeTyper Brain service
+ */
+
+import got from "got";
+import {
+ BRAIN_DEFAULTS,
+ BRAIN_ENDPOINTS,
+ BRAIN_TIMEOUTS,
+ BRAIN_HEADERS,
+} from "@constants/brain";
+import type {
+ BrainHealthResponse,
+ BrainLoginResponse,
+ BrainRegisterResponse,
+ BrainRecallRequest,
+ BrainRecallResponse,
+ BrainLearnConceptRequest,
+ BrainApiResponse,
+ BrainConcept,
+ BrainContextRequest,
+ BrainContextResponse,
+ BrainExtractRequest,
+ BrainExtractResponse,
+ BrainMemorySearchRequest,
+ BrainMemorySearchResponse,
+ BrainStoreMemoryRequest,
+ BrainKnowledgeStats,
+ BrainMemoryStats,
+ BrainUser,
+} from "@/types/brain";
+
+/**
+ * Build request headers with API key
+ */
+const buildHeaders = (
+ apiKey?: string,
+ accessToken?: string,
+): Record => ({
+ [BRAIN_HEADERS.CONTENT_TYPE]: "application/json",
+ ...(apiKey ? { [BRAIN_HEADERS.API_KEY]: apiKey } : {}),
+ ...(accessToken
+ ? { [BRAIN_HEADERS.AUTHORIZATION]: `Bearer ${accessToken}` }
+ : {}),
+});
+
+/**
+ * Get base URL for Brain API
+ */
+const getBaseUrl = (customUrl?: string): string => {
+ return customUrl ?? BRAIN_DEFAULTS.BASE_URL;
+};
+
+// ============================================================================
+// Health Check
+// ============================================================================
+
+/**
+ * Check if Brain service is healthy
+ */
+export const checkHealth = async (
+ baseUrl?: string,
+): Promise => {
+ const url = `${getBaseUrl(baseUrl)}${BRAIN_ENDPOINTS.HEALTH}`;
+ const response = await got
+ .get(url, {
+ timeout: { request: BRAIN_TIMEOUTS.HEALTH },
+ })
+ .json();
+
+ return response;
+};
+
+// ============================================================================
+// Authentication
+// ============================================================================
+
+/**
+ * Register a new user
+ */
+export const register = async (
+ email: string,
+ password: string,
+ displayName: string,
+ baseUrl?: string,
+): Promise => {
+ const url = `${getBaseUrl(baseUrl)}${BRAIN_ENDPOINTS.AUTH_REGISTER}`;
+ const response = await got
+ .post(url, {
+ json: { email, password, display_name: displayName },
+ timeout: { request: BRAIN_TIMEOUTS.AUTH },
+ })
+ .json();
+
+ return response;
+};
+
+/**
+ * Login with email and password
+ */
+export const login = async (
+ email: string,
+ password: string,
+ baseUrl?: string,
+): Promise => {
+ const url = `${getBaseUrl(baseUrl)}${BRAIN_ENDPOINTS.AUTH_LOGIN}`;
+ const response = await got
+ .post(url, {
+ json: { email, password },
+ timeout: { request: BRAIN_TIMEOUTS.AUTH },
+ })
+ .json();
+
+ return response;
+};
+
+/**
+ * Logout (revoke refresh token)
+ */
+export const logout = async (
+ refreshToken: string,
+ baseUrl?: string,
+): Promise => {
+ const url = `${getBaseUrl(baseUrl)}${BRAIN_ENDPOINTS.AUTH_LOGOUT}`;
+ await got.post(url, {
+ json: { refresh_token: refreshToken },
+ timeout: { request: BRAIN_TIMEOUTS.AUTH },
+ });
+};
+
+/**
+ * Refresh access token
+ */
+export const refreshToken = async (
+ refreshTokenValue: string,
+ baseUrl?: string,
+): Promise => {
+ const url = `${getBaseUrl(baseUrl)}${BRAIN_ENDPOINTS.AUTH_REFRESH}`;
+ const response = await got
+ .post(url, {
+ json: { refresh_token: refreshTokenValue },
+ timeout: { request: BRAIN_TIMEOUTS.AUTH },
+ })
+ .json();
+
+ return response;
+};
+
+/**
+ * Get current authenticated user
+ */
+export const getCurrentUser = async (
+ accessToken: string,
+ baseUrl?: string,
+): Promise> => {
+ const url = `${getBaseUrl(baseUrl)}${BRAIN_ENDPOINTS.AUTH_ME}`;
+ const response = await got
+ .get(url, {
+ ...{ headers: buildHeaders(undefined, accessToken) },
+ timeout: { request: BRAIN_TIMEOUTS.AUTH },
+ })
+ .json>();
+
+ return response;
+};
+
+// ============================================================================
+// Knowledge Graph
+// ============================================================================
+
+/**
+ * Recall relevant concepts from the knowledge graph
+ */
+export const recallKnowledge = async (
+ request: BrainRecallRequest,
+ apiKey: string,
+ baseUrl?: string,
+): Promise => {
+ const url = `${getBaseUrl(baseUrl)}${BRAIN_ENDPOINTS.KNOWLEDGE_RECALL}`;
+ const response = await got
+ .post(url, {
+ ...{ headers: buildHeaders(apiKey) },
+ json: request,
+ timeout: { request: BRAIN_TIMEOUTS.KNOWLEDGE },
+ })
+ .json();
+
+ return response;
+};
+
+/**
+ * Learn/store a concept in the knowledge graph
+ */
+export const learnConcept = async (
+ request: BrainLearnConceptRequest,
+ apiKey: string,
+ baseUrl?: string,
+): Promise> => {
+ const url = `${getBaseUrl(baseUrl)}${BRAIN_ENDPOINTS.KNOWLEDGE_LEARN}`;
+ const response = await got
+ .post(url, {
+ ...{ headers: buildHeaders(apiKey) },
+ json: request,
+ timeout: { request: BRAIN_TIMEOUTS.KNOWLEDGE },
+ })
+ .json>();
+
+ return response;
+};
+
+/**
+ * Build context string for prompt injection
+ */
+export const buildContext = async (
+ request: BrainContextRequest,
+ apiKey: string,
+ baseUrl?: string,
+): Promise => {
+ const url = `${getBaseUrl(baseUrl)}${BRAIN_ENDPOINTS.KNOWLEDGE_CONTEXT}`;
+ const response = await got
+ .post(url, {
+ ...{ headers: buildHeaders(apiKey) },
+ json: request,
+ timeout: { request: BRAIN_TIMEOUTS.KNOWLEDGE },
+ })
+ .json();
+
+ return response;
+};
+
+/**
+ * Extract concepts from text content
+ */
+export const extractConcepts = async (
+ request: BrainExtractRequest,
+ apiKey: string,
+ baseUrl?: string,
+): Promise => {
+ const url = `${getBaseUrl(baseUrl)}${BRAIN_ENDPOINTS.KNOWLEDGE_EXTRACT}`;
+ const response = await got
+ .post(url, {
+ ...{ headers: buildHeaders(apiKey) },
+ json: request,
+ timeout: { request: BRAIN_TIMEOUTS.EXTRACT },
+ })
+ .json();
+
+ return response;
+};
+
+/**
+ * Get knowledge stats for a project
+ */
+export const getKnowledgeStats = async (
+ projectId: number,
+ apiKey: string,
+ baseUrl?: string,
+): Promise> => {
+ const url = `${getBaseUrl(baseUrl)}${BRAIN_ENDPOINTS.KNOWLEDGE_STATS}?project_id=${projectId}`;
+ const response = await got
+ .get(url, {
+ ...{ headers: buildHeaders(apiKey) },
+ timeout: { request: BRAIN_TIMEOUTS.KNOWLEDGE },
+ })
+ .json>();
+
+ return response;
+};
+
+/**
+ * List all concepts for a project
+ */
+export const listConcepts = async (
+ projectId: number,
+ apiKey: string,
+ baseUrl?: string,
+): Promise> => {
+ const url = `${getBaseUrl(baseUrl)}${BRAIN_ENDPOINTS.KNOWLEDGE_CONCEPTS}?project_id=${projectId}`;
+ const response = await got
+ .get(url, {
+ ...{ headers: buildHeaders(apiKey) },
+ timeout: { request: BRAIN_TIMEOUTS.KNOWLEDGE },
+ })
+ .json>();
+
+ return response;
+};
+
+// ============================================================================
+// Memory
+// ============================================================================
+
+/**
+ * Search for relevant memories
+ */
+export const searchMemories = async (
+ request: BrainMemorySearchRequest,
+ apiKey: string,
+ baseUrl?: string,
+): Promise => {
+ const url = `${getBaseUrl(baseUrl)}${BRAIN_ENDPOINTS.MEMORY_SEARCH}`;
+ const response = await got
+ .post(url, {
+ ...{ headers: buildHeaders(apiKey) },
+ json: request,
+ timeout: { request: BRAIN_TIMEOUTS.MEMORY },
+ })
+ .json();
+
+ return response;
+};
+
+/**
+ * Store a memory
+ */
+export const storeMemory = async (
+ request: BrainStoreMemoryRequest,
+ apiKey: string,
+ baseUrl?: string,
+): Promise> => {
+ const url = `${getBaseUrl(baseUrl)}${BRAIN_ENDPOINTS.MEMORY_STORE}`;
+ const response = await got
+ .post(url, {
+ ...{ headers: buildHeaders(apiKey) },
+ json: request,
+ timeout: { request: BRAIN_TIMEOUTS.MEMORY },
+ })
+ .json>();
+
+ return response;
+};
+
+/**
+ * Get memory stats
+ */
+export const getMemoryStats = async (
+ apiKey: string,
+ baseUrl?: string,
+): Promise => {
+ const url = `${getBaseUrl(baseUrl)}${BRAIN_ENDPOINTS.MEMORY_STATS}`;
+ const response = await got
+ .get(url, {
+ ...{ headers: buildHeaders(apiKey) },
+ timeout: { request: BRAIN_TIMEOUTS.MEMORY },
+ })
+ .json();
+
+ return response;
+};
+
+/**
+ * Check memory status
+ */
+export const checkMemoryStatus = async (
+ baseUrl?: string,
+): Promise> => {
+ const url = `${getBaseUrl(baseUrl)}${BRAIN_ENDPOINTS.MEMORY_STATUS}`;
+ const response = await got
+ .get(url, {
+ timeout: { request: BRAIN_TIMEOUTS.MEMORY },
+ })
+ .json>();
+
+ return response;
+};
diff --git a/src/api/index.ts b/src/api/index.ts
index cb93198..e92078a 100644
--- a/src/api/index.ts
+++ b/src/api/index.ts
@@ -7,3 +7,4 @@
export * as copilotApi from "@api/copilot";
export * as ollamaApi from "@api/ollama";
+export * as brainApi from "@api/brain";
diff --git a/src/commands/components/execute/execute.tsx b/src/commands/components/execute/execute.tsx
index 3c30ac4..a81b07d 100644
--- a/src/commands/components/execute/execute.tsx
+++ b/src/commands/components/execute/execute.tsx
@@ -1,6 +1,7 @@
import { tui, appStore } from "@tui/index";
import { getProviderInfo } from "@services/chat-tui-service";
import { addServer, connectServer } from "@services/mcp/index";
+import * as brainService from "@services/brain";
import type { ChatServiceState } from "@services/chat-tui-service";
import type { AgentConfig } from "@/types/agent-config";
import type { PermissionScope, LearningScope } from "@/types/tui";
@@ -32,6 +33,9 @@ export interface RenderAppProps {
scope?: LearningScope,
editedContent?: string,
) => void;
+ handleBrainSetJwtToken?: (jwtToken: string) => Promise;
+ handleBrainSetApiKey?: (apiKey: string) => Promise;
+ handleBrainLogout?: () => Promise;
handleExit: () => void;
showBanner: boolean;
state: ChatServiceState;
@@ -65,6 +69,42 @@ const defaultHandleMCPAdd = async (data: MCPAddFormData): Promise => {
await connectServer(data.name);
};
+const defaultHandleBrainSetJwtToken = async (jwtToken: string): Promise => {
+ await brainService.setJwtToken(jwtToken);
+ const connected = await brainService.connect();
+ if (connected) {
+ const state = brainService.getState();
+ appStore.setBrainStatus("connected");
+ appStore.setBrainUser(state.user);
+ appStore.setBrainCounts(state.knowledgeCount, state.memoryCount);
+ appStore.setBrainShowBanner(false);
+ } else {
+ throw new Error("Failed to connect with the provided JWT token.");
+ }
+};
+
+const defaultHandleBrainSetApiKey = async (apiKey: string): Promise => {
+ await brainService.setApiKey(apiKey);
+ const connected = await brainService.connect();
+ if (connected) {
+ const state = brainService.getState();
+ appStore.setBrainStatus("connected");
+ appStore.setBrainUser(state.user);
+ appStore.setBrainCounts(state.knowledgeCount, state.memoryCount);
+ appStore.setBrainShowBanner(false);
+ } else {
+ throw new Error("Failed to connect with the provided API key.");
+ }
+};
+
+const defaultHandleBrainLogout = async (): Promise => {
+ await brainService.logout();
+ appStore.setBrainStatus("disconnected");
+ appStore.setBrainUser(null);
+ appStore.setBrainCounts(0, 0);
+ appStore.setBrainShowBanner(true);
+};
+
export const renderApp = async (props: RenderAppProps): Promise => {
const { displayName, model: defaultModel } = getProviderInfo(
props.state.provider,
@@ -95,6 +135,9 @@ export const renderApp = async (props: RenderAppProps): Promise => {
onMCPAdd: props.handleMCPAdd ?? defaultHandleMCPAdd,
onPermissionResponse: props.handlePermissionResponse ?? (() => {}),
onLearningResponse: props.handleLearningResponse ?? (() => {}),
+ onBrainSetJwtToken: props.handleBrainSetJwtToken ?? defaultHandleBrainSetJwtToken,
+ onBrainSetApiKey: props.handleBrainSetApiKey ?? defaultHandleBrainSetApiKey,
+ onBrainLogout: props.handleBrainLogout ?? defaultHandleBrainLogout,
plan: props.plan,
});
diff --git a/src/constants/agent-definition.ts b/src/constants/agent-definition.ts
new file mode 100644
index 0000000..85489bb
--- /dev/null
+++ b/src/constants/agent-definition.ts
@@ -0,0 +1,66 @@
+/**
+ * Agent definition constants
+ */
+
+export const AGENT_DEFINITION = {
+ FILE_EXTENSION: ".md",
+ DIRECTORY_NAME: "agents",
+ FRONTMATTER_DELIMITER: "---",
+ MAX_NAME_LENGTH: 50,
+ MAX_DESCRIPTION_LENGTH: 500,
+ MAX_TOOLS: 20,
+ MAX_TRIGGER_PHRASES: 10,
+} as const;
+
+export const AGENT_DEFINITION_PATHS = {
+ PROJECT: ".codetyper/agents",
+ GLOBAL: "~/.config/codetyper/agents",
+ BUILTIN: "src/agents",
+} as const;
+
+export const AGENT_DEFAULT_TOOLS = {
+ EXPLORE: ["read", "glob", "grep"],
+ PLAN: ["read", "glob", "grep", "web_search"],
+ CODE: ["read", "write", "edit", "glob", "grep", "bash"],
+ REVIEW: ["read", "glob", "grep", "lsp"],
+ BASH: ["bash", "read"],
+} as const;
+
+export const AGENT_COLORS = {
+ RED: "\x1b[31m",
+ GREEN: "\x1b[32m",
+ BLUE: "\x1b[34m",
+ YELLOW: "\x1b[33m",
+ CYAN: "\x1b[36m",
+ MAGENTA: "\x1b[35m",
+ WHITE: "\x1b[37m",
+ GRAY: "\x1b[90m",
+ RESET: "\x1b[0m",
+} as const;
+
+export const AGENT_TIER_CONFIG = {
+ fast: {
+ model: "gpt-4o-mini",
+ maxTurns: 5,
+ timeout: 30000,
+ },
+ balanced: {
+ model: "gpt-4o",
+ maxTurns: 10,
+ timeout: 60000,
+ },
+ thorough: {
+ model: "o1",
+ maxTurns: 20,
+ timeout: 120000,
+ },
+} as const;
+
+export const AGENT_MESSAGES = {
+ LOADING: "Loading agent definitions...",
+ LOADED: "Agent definitions loaded",
+ NOT_FOUND: "Agent definition not found",
+ INVALID_FRONTMATTER: "Invalid YAML frontmatter",
+ MISSING_REQUIRED: "Missing required field",
+ INVALID_TOOL: "Invalid tool specified",
+} as const;
diff --git a/src/constants/apply-patch.ts b/src/constants/apply-patch.ts
new file mode 100644
index 0000000..a1f54aa
--- /dev/null
+++ b/src/constants/apply-patch.ts
@@ -0,0 +1,100 @@
+/**
+ * Apply Patch Constants
+ *
+ * Configuration for unified diff parsing and application.
+ */
+
+/**
+ * Default configuration for patch application
+ */
+export const PATCH_DEFAULTS = {
+ FUZZ: 2,
+ MAX_FUZZ: 3,
+ IGNORE_WHITESPACE: false,
+ IGNORE_CASE: false,
+ CONTEXT_LINES: 3,
+} as const;
+
+/**
+ * Patch file patterns
+ */
+export const PATCH_PATTERNS = {
+ HUNK_HEADER: /^@@ -(\d+)(?:,(\d+))? \+(\d+)(?:,(\d+))? @@(.*)$/,
+ FILE_HEADER_OLD: /^--- (.+?)(?:\t.*)?$/,
+ FILE_HEADER_NEW: /^\+\+\+ (.+?)(?:\t.*)?$/,
+ GIT_DIFF: /^diff --git a\/(.+) b\/(.+)$/,
+ INDEX_LINE: /^index [a-f0-9]+\.\.[a-f0-9]+(?: \d+)?$/,
+ BINARY_FILE: /^Binary files .+ differ$/,
+ NEW_FILE: /^new file mode \d+$/,
+ DELETED_FILE: /^deleted file mode \d+$/,
+ RENAME_FROM: /^rename from (.+)$/,
+ RENAME_TO: /^rename to (.+)$/,
+ NO_NEWLINE: /^\\ No newline at end of file$/,
+} as const;
+
+/**
+ * Line type prefixes
+ */
+export const LINE_PREFIXES = {
+ CONTEXT: " ",
+ ADDITION: "+",
+ DELETION: "-",
+} as const;
+
+/**
+ * Error messages
+ */
+export const PATCH_ERRORS = {
+ INVALID_PATCH: "Invalid patch format",
+ PARSE_FAILED: (detail: string) => `Failed to parse patch: ${detail}`,
+ HUNK_FAILED: (index: number, reason: string) =>
+ `Hunk #${index + 1} failed: ${reason}`,
+ FILE_NOT_FOUND: (path: string) => `Target file not found: ${path}`,
+ CONTEXT_MISMATCH: (line: number) =>
+ `Context mismatch at line ${line}`,
+ FUZZY_MATCH_FAILED: (hunk: number) =>
+ `Could not find match for hunk #${hunk + 1} even with fuzzy matching`,
+ ALREADY_APPLIED: "Patch appears to be already applied",
+ REVERSED_PATCH: "Patch appears to be reversed",
+ BINARY_NOT_SUPPORTED: "Binary patches are not supported",
+ WRITE_FAILED: (path: string, error: string) =>
+ `Failed to write patched file ${path}: ${error}`,
+} as const;
+
+/**
+ * Success messages
+ */
+export const PATCH_MESSAGES = {
+ PARSING: "Parsing patch...",
+ APPLYING: (file: string) => `Applying patch to ${file}`,
+ APPLIED: (files: number, hunks: number) =>
+ `Successfully applied ${hunks} hunk(s) to ${files} file(s)`,
+ DRY_RUN: (files: number, hunks: number) =>
+ `Dry run: ${hunks} hunk(s) would be applied to ${files} file(s)`,
+ FUZZY_APPLIED: (hunk: number, offset: number) =>
+ `Hunk #${hunk + 1} applied with fuzzy offset of ${offset}`,
+ ROLLBACK_AVAILABLE: "Rollback is available if needed",
+ SKIPPED_BINARY: (file: string) => `Skipped binary file: ${file}`,
+} as const;
+
+/**
+ * Tool titles
+ */
+export const PATCH_TITLES = {
+ APPLYING: (file: string) => `Patching: ${file}`,
+ SUCCESS: (files: number) => `Patched ${files} file(s)`,
+ PARTIAL: (success: number, failed: number) =>
+ `Partial success: ${success} patched, ${failed} failed`,
+ FAILED: "Patch failed",
+ DRY_RUN: "Patch dry run",
+ VALIDATING: "Validating patch",
+} as const;
+
+/**
+ * Special path values
+ */
+export const SPECIAL_PATHS = {
+ DEV_NULL: "/dev/null",
+ A_PREFIX: "a/",
+ B_PREFIX: "b/",
+} as const;
diff --git a/src/constants/background-task.ts b/src/constants/background-task.ts
new file mode 100644
index 0000000..b7ef70c
--- /dev/null
+++ b/src/constants/background-task.ts
@@ -0,0 +1,62 @@
+/**
+ * Background task constants
+ */
+
+export const BACKGROUND_TASK = {
+ MAX_CONCURRENT: 3,
+ DEFAULT_TIMEOUT: 300000, // 5 minutes
+ MAX_TIMEOUT: 3600000, // 1 hour
+ POLL_INTERVAL: 1000, // 1 second
+ MAX_RETRIES: 3,
+ RETRY_DELAY: 5000, // 5 seconds
+ HISTORY_LIMIT: 100,
+} as const;
+
+export const BACKGROUND_TASK_STORAGE = {
+ DIRECTORY: ".codetyper/tasks",
+ FILE_EXTENSION: ".json",
+ MAX_FILE_SIZE: 10 * 1024 * 1024, // 10MB
+} as const;
+
+export const BACKGROUND_TASK_SHORTCUTS = {
+ START: "ctrl+b",
+ LIST: "ctrl+shift+b",
+ CANCEL: "ctrl+shift+c",
+ PAUSE: "ctrl+shift+p",
+ RESUME: "ctrl+shift+r",
+} as const;
+
+export const BACKGROUND_TASK_COMMANDS = {
+ START: "/background",
+ LIST: "/tasks",
+ CANCEL: "/task cancel",
+ STATUS: "/task status",
+ CLEAR: "/task clear",
+} as const;
+
+export const BACKGROUND_TASK_STATUS_ICONS = {
+ pending: "\u23F3", // hourglass
+ running: "\u25B6", // play
+ paused: "\u23F8", // pause
+ completed: "\u2705", // check
+ failed: "\u274C", // cross
+ cancelled: "\u23F9", // stop
+} as const;
+
+export const BACKGROUND_TASK_MESSAGES = {
+ STARTED: "Task started in background",
+ COMPLETED: "Background task completed",
+ FAILED: "Background task failed",
+ CANCELLED: "Background task cancelled",
+ PAUSED: "Background task paused",
+ RESUMED: "Background task resumed",
+ QUEUE_FULL: "Task queue is full",
+ NOT_FOUND: "Task not found",
+ ALREADY_RUNNING: "Task is already running",
+} as const;
+
+export const BACKGROUND_TASK_NOTIFICATIONS = {
+ SOUND_ENABLED: true,
+ DESKTOP_ENABLED: true,
+ INLINE_ENABLED: true,
+} as const;
diff --git a/src/constants/brain-cloud.ts b/src/constants/brain-cloud.ts
new file mode 100644
index 0000000..d3984c1
--- /dev/null
+++ b/src/constants/brain-cloud.ts
@@ -0,0 +1,107 @@
+/**
+ * Brain Cloud Sync Constants
+ *
+ * Configuration for cloud synchronization of brain data.
+ */
+
+import type { CloudBrainConfig } from "@/types/brain-cloud";
+
+/**
+ * Default cloud configuration
+ */
+export const CLOUD_BRAIN_DEFAULTS: CloudBrainConfig = {
+ enabled: false,
+ endpoint: "https://brain.codetyper.dev/api/v1",
+ syncOnSessionEnd: true,
+ syncInterval: 300000, // 5 minutes
+ conflictStrategy: "local-wins",
+ retryAttempts: 3,
+ retryDelay: 1000,
+} as const;
+
+/**
+ * Cloud API endpoints
+ */
+export const CLOUD_ENDPOINTS = {
+ PUSH: "/sync/push",
+ PULL: "/sync/pull",
+ STATUS: "/sync/status",
+ CONFLICTS: "/sync/conflicts",
+ RESOLVE: "/sync/resolve",
+ HEALTH: "/health",
+} as const;
+
+/**
+ * Sync configuration
+ */
+export const SYNC_CONFIG = {
+ MAX_BATCH_SIZE: 100,
+ MAX_QUEUE_SIZE: 1000,
+ STALE_ITEM_AGE_MS: 86400000, // 24 hours
+ VERSION_KEY: "brain_sync_version",
+ QUEUE_KEY: "brain_offline_queue",
+} as const;
+
+/**
+ * Error messages
+ */
+export const CLOUD_ERRORS = {
+ NOT_CONFIGURED: "Cloud sync is not configured",
+ OFFLINE: "Device is offline",
+ SYNC_IN_PROGRESS: "Sync already in progress",
+ PUSH_FAILED: (error: string) => `Push failed: ${error}`,
+ PULL_FAILED: (error: string) => `Pull failed: ${error}`,
+ CONFLICT_UNRESOLVED: (count: number) =>
+ `${count} conflict(s) require manual resolution`,
+ QUEUE_FULL: "Offline queue is full",
+ VERSION_MISMATCH: "Version mismatch - full sync required",
+ AUTH_REQUIRED: "Authentication required for cloud sync",
+ INVALID_RESPONSE: "Invalid response from server",
+} as const;
+
+/**
+ * Status messages
+ */
+export const CLOUD_MESSAGES = {
+ STARTING_SYNC: "Starting cloud sync...",
+ PUSHING: (count: number) => `Pushing ${count} change(s)...`,
+ PULLING: (count: number) => `Pulling ${count} change(s)...`,
+ RESOLVING_CONFLICTS: (count: number) => `Resolving ${count} conflict(s)...`,
+ SYNC_COMPLETE: "Cloud sync complete",
+ SYNC_SKIPPED: "No changes to sync",
+ QUEUED_OFFLINE: (count: number) => `Queued ${count} change(s) for later sync`,
+ RETRYING: (attempt: number, max: number) =>
+ `Retrying sync (${attempt}/${max})...`,
+} as const;
+
+/**
+ * Titles for UI
+ */
+export const CLOUD_TITLES = {
+ SYNCING: "Syncing with cloud",
+ SYNCED: "Cloud sync complete",
+ OFFLINE: "Offline - changes queued",
+ CONFLICT: "Sync conflicts",
+ ERROR: "Sync failed",
+} as const;
+
+/**
+ * Conflict resolution labels
+ */
+export const CONFLICT_LABELS = {
+ "local-wins": "Keep local version",
+ "remote-wins": "Use remote version",
+ manual: "Resolve manually",
+ merge: "Attempt to merge",
+} as const;
+
+/**
+ * HTTP request configuration
+ */
+export const CLOUD_HTTP_CONFIG = {
+ TIMEOUT_MS: 30000,
+ HEADERS: {
+ "Content-Type": "application/json",
+ "X-Client": "codetyper-cli",
+ },
+} as const;
diff --git a/src/constants/brain-mcp.ts b/src/constants/brain-mcp.ts
new file mode 100644
index 0000000..7bf5a76
--- /dev/null
+++ b/src/constants/brain-mcp.ts
@@ -0,0 +1,75 @@
+/**
+ * Brain MCP Server constants
+ */
+
+export const BRAIN_MCP_SERVER = {
+ DEFAULT_PORT: 5002,
+ DEFAULT_HOST: "localhost",
+ REQUEST_TIMEOUT: 30000,
+ MAX_CONNECTIONS: 100,
+ HEARTBEAT_INTERVAL: 30000,
+} as const;
+
+export const BRAIN_MCP_RATE_LIMIT = {
+ ENABLED: true,
+ MAX_REQUESTS: 100,
+ WINDOW_MS: 60000, // 1 minute
+ BLOCK_DURATION: 300000, // 5 minutes
+} as const;
+
+export const BRAIN_MCP_AUTH = {
+ HEADER: "X-Brain-API-Key",
+ TOKEN_PREFIX: "Bearer",
+ SESSION_DURATION: 3600000, // 1 hour
+} as const;
+
+export const BRAIN_MCP_COMMANDS = {
+ START: "/brain mcp start",
+ STOP: "/brain mcp stop",
+ STATUS: "/brain mcp status",
+ LOGS: "/brain mcp logs",
+ CONFIG: "/brain mcp config",
+} as const;
+
+export const BRAIN_MCP_TOOL_NAMES = {
+ RECALL: "brain_recall",
+ LEARN: "brain_learn",
+ SEARCH: "brain_search",
+ RELATE: "brain_relate",
+ CONTEXT: "brain_context",
+ STATS: "brain_stats",
+ PROJECTS: "brain_projects",
+} as const;
+
+export const BRAIN_MCP_MESSAGES = {
+ SERVER_STARTED: "Brain MCP server started",
+ SERVER_STOPPED: "Brain MCP server stopped",
+ SERVER_ALREADY_RUNNING: "Brain MCP server is already running",
+ SERVER_NOT_RUNNING: "Brain MCP server is not running",
+ CLIENT_CONNECTED: "MCP client connected",
+ CLIENT_DISCONNECTED: "MCP client disconnected",
+ TOOL_EXECUTED: "Tool executed successfully",
+ TOOL_FAILED: "Tool execution failed",
+ UNAUTHORIZED: "Unauthorized request",
+ RATE_LIMITED: "Rate limit exceeded",
+ INVALID_REQUEST: "Invalid MCP request",
+} as const;
+
+export const BRAIN_MCP_ERRORS = {
+ PARSE_ERROR: { code: -32700, message: "Parse error" },
+ INVALID_REQUEST: { code: -32600, message: "Invalid request" },
+ METHOD_NOT_FOUND: { code: -32601, message: "Method not found" },
+ INVALID_PARAMS: { code: -32602, message: "Invalid params" },
+ INTERNAL_ERROR: { code: -32603, message: "Internal error" },
+ TOOL_NOT_FOUND: { code: -32001, message: "Tool not found" },
+ UNAUTHORIZED: { code: -32002, message: "Unauthorized" },
+ RATE_LIMITED: { code: -32003, message: "Rate limited" },
+ BRAIN_UNAVAILABLE: { code: -32004, message: "Brain service unavailable" },
+} as const;
+
+export const BRAIN_MCP_LOG_LEVELS = {
+ DEBUG: 0,
+ INFO: 1,
+ WARN: 2,
+ ERROR: 3,
+} as const;
diff --git a/src/constants/brain-project.ts b/src/constants/brain-project.ts
new file mode 100644
index 0000000..56cedda
--- /dev/null
+++ b/src/constants/brain-project.ts
@@ -0,0 +1,69 @@
+/**
+ * Multi-project Brain constants
+ */
+
+export const BRAIN_PROJECT = {
+ MAX_PROJECTS: 100,
+ NAME_MIN_LENGTH: 2,
+ NAME_MAX_LENGTH: 100,
+ DESCRIPTION_MAX_LENGTH: 500,
+ DEFAULT_RECALL_LIMIT: 5,
+ DEFAULT_SYNC_INTERVAL: 30, // minutes
+} as const;
+
+export const BRAIN_PROJECT_STORAGE = {
+ CONFIG_FILE: "brain-projects.json",
+ EXPORT_EXTENSION: ".brain-export.json",
+ BACKUP_EXTENSION: ".brain-backup.json",
+} as const;
+
+export const BRAIN_PROJECT_PATHS = {
+ LOCAL: ".codetyper/brain",
+ GLOBAL: "~/.local/share/codetyper/brain",
+ EXPORTS: "~/.local/share/codetyper/brain/exports",
+ BACKUPS: "~/.local/share/codetyper/brain/backups",
+} as const;
+
+export const BRAIN_PROJECT_COMMANDS = {
+ LIST: "/brain projects",
+ CREATE: "/brain project create",
+ SWITCH: "/brain project switch",
+ DELETE: "/brain project delete",
+ EXPORT: "/brain project export",
+ IMPORT: "/brain project import",
+ SYNC: "/brain project sync",
+} as const;
+
+export const BRAIN_PROJECT_API = {
+ LIST: "/api/projects",
+ CREATE: "/api/projects",
+ GET: "/api/projects/:id",
+ UPDATE: "/api/projects/:id",
+ DELETE: "/api/projects/:id",
+ SWITCH: "/api/projects/:id/switch",
+ EXPORT: "/api/projects/:id/export",
+ IMPORT: "/api/projects/import",
+ SYNC: "/api/projects/:id/sync",
+} as const;
+
+export const BRAIN_PROJECT_MESSAGES = {
+ CREATED: "Brain project created successfully",
+ SWITCHED: "Switched to project",
+ DELETED: "Brain project deleted",
+ EXPORTED: "Brain project exported",
+ IMPORTED: "Brain project imported",
+ SYNCED: "Brain project synced",
+ NOT_FOUND: "Brain project not found",
+ ALREADY_EXISTS: "Project with this name already exists",
+ INVALID_NAME: "Invalid project name",
+ SWITCH_FAILED: "Failed to switch project",
+ EXPORT_FAILED: "Failed to export project",
+ IMPORT_FAILED: "Failed to import project",
+} as const;
+
+export const BRAIN_PROJECT_DEFAULTS = {
+ AUTO_LEARN: true,
+ AUTO_RECALL: true,
+ CONTEXT_INJECTION: true,
+ SYNC_ENABLED: false,
+} as const;
diff --git a/src/constants/brain.ts b/src/constants/brain.ts
new file mode 100644
index 0000000..5fdb071
--- /dev/null
+++ b/src/constants/brain.ts
@@ -0,0 +1,94 @@
+/**
+ * Brain API Constants
+ *
+ * Configuration constants for the CodeTyper Brain service
+ */
+
+/**
+ * Feature flag to disable all Brain functionality.
+ * Set to true to hide Brain menu, disable Brain API calls,
+ * and remove Brain-related UI elements.
+ */
+export const BRAIN_DISABLED = true;
+
+export const BRAIN_PROVIDER_NAME = "brain" as const;
+export const BRAIN_DISPLAY_NAME = "CodeTyper Brain";
+
+export const BRAIN_DEFAULTS = {
+ BASE_URL: "http://localhost:5001",
+ PROJECT_ID: 1,
+} as const;
+
+export const BRAIN_ENDPOINTS = {
+ // Health
+ HEALTH: "/",
+
+ // Authentication
+ AUTH_REGISTER: "/auth/register",
+ AUTH_LOGIN: "/auth/login",
+ AUTH_LOGOUT: "/auth/logout",
+ AUTH_REFRESH: "/auth/refresh",
+ AUTH_ME: "/auth/me",
+
+ // Knowledge Graph
+ KNOWLEDGE_LEARN: "/api/knowledge/learn",
+ KNOWLEDGE_RECALL: "/api/knowledge/recall",
+ KNOWLEDGE_RELATE: "/api/knowledge/relate",
+ KNOWLEDGE_EXTRACT: "/api/knowledge/extract",
+ KNOWLEDGE_CONTEXT: "/api/knowledge/context",
+ KNOWLEDGE_CONCEPTS: "/api/knowledge/concepts",
+ KNOWLEDGE_STATS: "/api/knowledge/stats",
+
+ // Memory
+ MEMORY_STATUS: "/api/memory/status",
+ MEMORY_STATS: "/api/memory/stats",
+ MEMORY_SEARCH: "/api/memory/search",
+ MEMORY_STORE: "/api/memory/store",
+ MEMORY_TOP: "/api/memory/top",
+ MEMORY_FEEDBACK: "/api/memory/feedback",
+
+ // GraphQL (unified endpoint)
+ GRAPHQL: "/graphql",
+} as const;
+
+export const BRAIN_TIMEOUTS = {
+ HEALTH: 3000,
+ AUTH: 10000,
+ KNOWLEDGE: 15000,
+ MEMORY: 10000,
+ EXTRACT: 30000,
+} as const;
+
+export const BRAIN_ERRORS = {
+ NOT_RUNNING: "Brain service not available. Start the API server at localhost:5001",
+ NOT_AUTHENTICATED: "Not authenticated. Please login or set an API key.",
+ INVALID_API_KEY: "Invalid API key. Please check your credentials.",
+ CONNECTION_FAILED: "Failed to connect to Brain service.",
+ RECALL_FAILED: "Failed to recall knowledge from Brain.",
+ LEARN_FAILED: "Failed to store knowledge in Brain.",
+ EXTRACT_FAILED: "Failed to extract concepts from content.",
+} as const;
+
+export const BRAIN_MESSAGES = {
+ CONNECTED: "Brain connected",
+ CONNECTING: "Connecting to Brain...",
+ DISCONNECTED: "Brain disconnected",
+ LEARNING: "Learning concept...",
+ RECALLING: "Recalling knowledge...",
+ EXTRACTING: "Extracting concepts...",
+} as const;
+
+export const BRAIN_BANNER = {
+ TITLE: "CodeTyper has a Brain!",
+ CTA: "Login and get an API key to enable long-term memory",
+ URL: "http://localhost:5001",
+ LOGIN_URL: "http://localhost:5173/docs/login",
+ EMOJI_CONNECTED: "š§ ",
+ EMOJI_DISCONNECTED: "š¤",
+} as const;
+
+export const BRAIN_HEADERS = {
+ API_KEY: "api-key",
+ AUTHORIZATION: "Authorization",
+ CONTENT_TYPE: "Content-Type",
+} as const;
diff --git a/src/constants/confidence-filter.ts b/src/constants/confidence-filter.ts
new file mode 100644
index 0000000..18d9000
--- /dev/null
+++ b/src/constants/confidence-filter.ts
@@ -0,0 +1,33 @@
+/**
+ * Confidence filtering constants
+ */
+
+export const CONFIDENCE_FILTER = {
+ DEFAULT_THRESHOLD: 80,
+ MIN_THRESHOLD: 0,
+ MAX_THRESHOLD: 100,
+ VALIDATION_TIMEOUT: 30000,
+ MAX_BATCH_SIZE: 50,
+} as const;
+
+export const CONFIDENCE_WEIGHTS = {
+ PATTERN_MATCH: 0.3,
+ CONTEXT_RELEVANCE: 0.25,
+ SEVERITY_LEVEL: 0.2,
+ CODE_ANALYSIS: 0.15,
+ HISTORICAL_ACCURACY: 0.1,
+} as const;
+
+export const CONFIDENCE_MESSAGES = {
+ BELOW_THRESHOLD: "Filtered out due to low confidence",
+ VALIDATION_FAILED: "Confidence adjusted after validation",
+ VALIDATION_PASSED: "Confidence validated successfully",
+ NO_FACTORS: "No confidence factors available",
+} as const;
+
+export const CONFIDENCE_COLORS = {
+ LOW: "#808080",
+ MEDIUM: "#FFA500",
+ HIGH: "#00FF00",
+ CRITICAL: "#FF0000",
+} as const;
diff --git a/src/constants/feature-dev.ts b/src/constants/feature-dev.ts
new file mode 100644
index 0000000..8a5e7c2
--- /dev/null
+++ b/src/constants/feature-dev.ts
@@ -0,0 +1,275 @@
+/**
+ * Feature-Dev Workflow Constants
+ *
+ * Configuration and prompts for the 7-phase development workflow.
+ */
+
+import type { FeatureDevPhase, FeatureDevConfig } from "@/types/feature-dev";
+
+/**
+ * Default workflow configuration
+ */
+export const FEATURE_DEV_CONFIG: FeatureDevConfig = {
+ requireCheckpoints: true,
+ autoRunTests: true,
+ autoCommit: false,
+ maxExplorationDepth: 3,
+ parallelExplorations: 3,
+} as const;
+
+/**
+ * Phase order for workflow progression
+ */
+export const PHASE_ORDER: FeatureDevPhase[] = [
+ "understand",
+ "explore",
+ "plan",
+ "implement",
+ "verify",
+ "review",
+ "finalize",
+] as const;
+
+/**
+ * Phase descriptions
+ */
+export const PHASE_DESCRIPTIONS: Record = {
+ understand: "Clarify requirements and gather context",
+ explore: "Search codebase for relevant code and patterns",
+ plan: "Design the implementation approach",
+ implement: "Write the code changes",
+ verify: "Run tests and validate changes",
+ review: "Self-review the implementation",
+ finalize: "Commit changes and cleanup",
+} as const;
+
+/**
+ * Phase prompts for guiding the agent
+ */
+export const PHASE_PROMPTS: Record = {
+ understand: `You are in the UNDERSTAND phase of feature development.
+
+Your goal is to fully understand what needs to be built before writing any code.
+
+Tasks:
+1. Analyze the user's feature request
+2. Identify unclear or ambiguous requirements
+3. Ask clarifying questions if needed
+4. Document the understood requirements
+
+Output a summary of:
+- What the feature should do
+- User-facing behavior
+- Technical requirements
+- Edge cases to consider
+- Any assumptions made
+
+If anything is unclear, ask the user for clarification before proceeding.`,
+
+ explore: `You are in the EXPLORE phase of feature development.
+
+Your goal is to understand the existing codebase before making changes.
+
+Tasks:
+1. Search for related code using grep and glob
+2. Identify files that will need to be modified
+3. Understand existing patterns and conventions
+4. Find similar implementations to reference
+5. Identify potential dependencies or impacts
+
+Run multiple parallel searches to gather context efficiently.
+
+Document your findings:
+- Relevant files and their purposes
+- Existing patterns to follow
+- Code that might be affected
+- Useful examples in the codebase`,
+
+ plan: `You are in the PLAN phase of feature development.
+
+Your goal is to create a detailed implementation plan before writing code.
+
+Tasks:
+1. Design the solution architecture
+2. List files to create, modify, or delete
+3. Define the order of changes
+4. Identify risks and dependencies
+5. Plan the testing approach
+
+Create a plan that includes:
+- Summary of the approach
+- Step-by-step implementation order
+- File changes with descriptions
+- Potential risks and mitigations
+- Test cases to verify the feature
+
+Present this plan for user approval before proceeding.`,
+
+ implement: `You are in the IMPLEMENT phase of feature development.
+
+Your goal is to write the code according to the approved plan.
+
+Tasks:
+1. Follow the implementation plan step by step
+2. Write clean, well-documented code
+3. Follow existing code patterns and conventions
+4. Create necessary files and make required changes
+5. Track all changes made
+
+Guidelines:
+- Implement one step at a time
+- Test each change locally if possible
+- Keep changes focused and minimal
+- Add comments for complex logic
+- Update imports and exports as needed`,
+
+ verify: `You are in the VERIFY phase of feature development.
+
+Your goal is to ensure the implementation works correctly.
+
+Tasks:
+1. Run the test suite
+2. Add new tests for the feature
+3. Fix any failing tests
+4. Check for regressions
+5. Verify edge cases
+
+Report:
+- Test results (pass/fail counts)
+- Coverage information if available
+- Any issues discovered
+- Additional tests needed`,
+
+ review: `You are in the REVIEW phase of feature development.
+
+Your goal is to self-review the implementation for quality.
+
+Tasks:
+1. Review all changes made
+2. Check for code quality issues
+3. Verify documentation is complete
+4. Look for potential bugs
+5. Ensure best practices are followed
+
+Review criteria:
+- Code clarity and readability
+- Error handling
+- Edge cases covered
+- Performance considerations
+- Security implications
+- Documentation completeness
+
+Report any findings that need attention.`,
+
+ finalize: `You are in the FINALIZE phase of feature development.
+
+Your goal is to complete the feature implementation.
+
+Tasks:
+1. Create a commit with appropriate message
+2. Update any documentation
+3. Clean up temporary files
+4. Prepare summary of changes
+
+Output:
+- Final list of changes
+- Commit message (if committing)
+- Any follow-up tasks recommended
+- Success confirmation`,
+} as const;
+
+/**
+ * Checkpoint configuration per phase
+ */
+export const PHASE_CHECKPOINTS: Record<
+ FeatureDevPhase,
+ { required: boolean; title: string }
+> = {
+ understand: {
+ required: true,
+ title: "Requirements Confirmation",
+ },
+ explore: {
+ required: false,
+ title: "Exploration Summary",
+ },
+ plan: {
+ required: true,
+ title: "Implementation Plan Approval",
+ },
+ implement: {
+ required: false,
+ title: "Implementation Progress",
+ },
+ verify: {
+ required: true,
+ title: "Test Results Review",
+ },
+ review: {
+ required: true,
+ title: "Code Review Findings",
+ },
+ finalize: {
+ required: true,
+ title: "Final Approval",
+ },
+} as const;
+
+/**
+ * Error messages
+ */
+export const FEATURE_DEV_ERRORS = {
+ INVALID_PHASE: (phase: string) => `Invalid phase: ${phase}`,
+ INVALID_TRANSITION: (from: FeatureDevPhase, to: FeatureDevPhase) =>
+ `Cannot transition from ${from} to ${to}`,
+ CHECKPOINT_REQUIRED: (phase: FeatureDevPhase) =>
+ `User approval required for ${phase} phase`,
+ PHASE_FAILED: (phase: FeatureDevPhase, reason: string) =>
+ `Phase ${phase} failed: ${reason}`,
+ WORKFLOW_ABORTED: (reason: string) => `Workflow aborted: ${reason}`,
+ NO_PLAN: "Cannot implement without an approved plan",
+ TEST_FAILURE: "Tests failed - review required before proceeding",
+} as const;
+
+/**
+ * Status messages
+ */
+export const FEATURE_DEV_MESSAGES = {
+ STARTING: (phase: FeatureDevPhase) => `Starting ${phase} phase...`,
+ COMPLETED: (phase: FeatureDevPhase) => `Completed ${phase} phase`,
+ AWAITING_APPROVAL: (phase: FeatureDevPhase) =>
+ `Awaiting approval for ${phase}`,
+ CHECKPOINT: (title: string) => `Checkpoint: ${title}`,
+ EXPLORING: (query: string) => `Exploring: ${query}`,
+ IMPLEMENTING_STEP: (step: number, total: number) =>
+ `Implementing step ${step}/${total}`,
+ RUNNING_TESTS: "Running tests...",
+ REVIEWING: "Reviewing changes...",
+ FINALIZING: "Finalizing changes...",
+} as const;
+
+/**
+ * Allowed phase transitions
+ */
+export const ALLOWED_TRANSITIONS: Record = {
+ understand: ["explore", "plan"], // Can skip explore if simple
+ explore: ["plan", "understand"], // Can go back to understand
+ plan: ["implement", "explore", "understand"], // Can go back
+ implement: ["verify", "plan"], // Can revise plan
+ verify: ["review", "implement"], // Can fix issues
+ review: ["finalize", "implement"], // Can fix issues
+ finalize: [], // Terminal state
+} as const;
+
+/**
+ * Phase timeout configuration (in ms)
+ */
+export const PHASE_TIMEOUTS: Record = {
+ understand: 120000,
+ explore: 180000,
+ plan: 120000,
+ implement: 600000,
+ verify: 300000,
+ review: 120000,
+ finalize: 60000,
+} as const;
diff --git a/src/constants/help-content.ts b/src/constants/help-content.ts
index 3154893..e0d23de 100644
--- a/src/constants/help-content.ts
+++ b/src/constants/help-content.ts
@@ -89,7 +89,7 @@ export const HELP_TOPICS: HelpTopic[] = [
fullDescription:
"Switch between Agent (full access), Ask (read-only), and Code Review modes.",
usage: "/mode",
- shortcuts: ["Ctrl+Tab"],
+ shortcuts: ["Ctrl+M"],
category: "commands",
},
{
@@ -166,11 +166,11 @@ export const HELP_TOPICS: HelpTopic[] = [
category: "shortcuts",
},
{
- id: "shortcut-ctrltab",
- name: "Ctrl+Tab",
+ id: "shortcut-ctrlm",
+ name: "Ctrl+M",
shortDescription: "Cycle modes",
fullDescription: "Cycle through interaction modes.",
- shortcuts: ["Ctrl+Tab"],
+ shortcuts: ["Ctrl+M"],
category: "shortcuts",
},
];
diff --git a/src/constants/home.ts b/src/constants/home.ts
index 54ef3b8..0f33a37 100644
--- a/src/constants/home.ts
+++ b/src/constants/home.ts
@@ -1,4 +1,23 @@
export const HOME_VARS = {
- title: "Welcome to CodeTyper - Your AI Coding Assistant",
- subTitle: "Type a prompt below to start a new session",
+ subTitle: "Type a prompt below to start",
};
+
+/** CODETYPER text logo */
+export const ASCII_LOGO = [
+ " āāāāāāā āāāāāāā āāāāāāā āāāāāāāā āāāāāāāāā āāā āāā āāāāāāā āāāāāāāā āāāāāāā ",
+ "āāāāāāāā āāāāāāāāā āāāāāāāā āāāāāāāā āāāāāāāāā āāāā āāāā āāāāāāāā āāāāāāāā āāāāāāāā",
+ "āāā āāā āāā āāā āāā āāāāāā āāā āāāāāāā āāāāāāāā āāāāāā āāāāāāāā",
+ "āāā āāā āāā āāā āāā āāāāāā āāā āāāāā āāāāāāā āāāāāā āāāāāāāā",
+ "āāāāāāāā āāāāāāāāā āāāāāāāā āāāāāāāā āāā āāā āāā āāāāāāāā āāā āāā",
+ " āāāāāāā āāāāāāā āāāāāāā āāāāāāāā āāā āāā āāā āāāāāāāā āāā āāā",
+];
+
+/** Gradient colors for CODETYPER text - from top to bottom */
+export const ASCII_LOGO_GRADIENT = [
+ "#00FFFF", // Cyan
+ "#00D4FF", // Light blue
+ "#00AAFF", // Blue
+ "#0080FF", // Medium blue
+ "#0055FF", // Deep blue
+ "#AA00FF", // Purple
+];
diff --git a/src/constants/multi-edit.ts b/src/constants/multi-edit.ts
new file mode 100644
index 0000000..995cd70
--- /dev/null
+++ b/src/constants/multi-edit.ts
@@ -0,0 +1,54 @@
+/**
+ * MultiEdit Tool Constants
+ *
+ * Configuration for batch file editing operations
+ */
+
+export const MULTI_EDIT_DEFAULTS = {
+ MAX_EDITS: 50, // Maximum number of edits in a single batch
+ MAX_FILE_SIZE: 1024 * 1024, // 1MB max file size
+} as const;
+
+export const MULTI_EDIT_TITLES = {
+ VALIDATING: (count: number) => `Validating ${count} edits...`,
+ APPLYING: (current: number, total: number) =>
+ `Applying edit ${current}/${total}`,
+ SUCCESS: (count: number) => `Applied ${count} edits`,
+ PARTIAL: (success: number, failed: number) =>
+ `Applied ${success} edits, ${failed} failed`,
+ FAILED: "Multi-edit failed",
+ ROLLBACK: "Rolling back changes...",
+} as const;
+
+export const MULTI_EDIT_MESSAGES = {
+ NO_EDITS: "No edits provided",
+ TOO_MANY_EDITS: (max: number) => `Too many edits (max: ${max})`,
+ VALIDATION_FAILED: "Validation failed for one or more edits",
+ ATOMIC_FAILURE: "Atomic edit failed - all changes rolled back",
+ DUPLICATE_FILE: (path: string) =>
+ `Multiple edits to same file must be ordered: ${path}`,
+ OLD_STRING_NOT_FOUND: (path: string, preview: string) =>
+ `Old string not found in ${path}: "${preview}..."`,
+ OLD_STRING_NOT_UNIQUE: (path: string, count: number) =>
+ `Old string found ${count} times in ${path} (must be unique)`,
+ FILE_NOT_FOUND: (path: string) => `File not found: ${path}`,
+ FILE_TOO_LARGE: (path: string) => `File too large: ${path}`,
+} as const;
+
+export const MULTI_EDIT_DESCRIPTION = `Edit multiple files in a single atomic operation.
+
+Use this tool when you need to:
+- Make related changes across multiple files
+- Refactor code that spans several files
+- Apply consistent changes to many files
+
+All edits are validated before any changes are applied.
+If any edit fails validation, no changes are made.
+
+Each edit requires:
+- file_path: Absolute path to the file
+- old_string: The exact text to find and replace
+- new_string: The replacement text
+
+The old_string must be unique in the file. If it appears multiple times,
+provide more context to make it unique.`;
diff --git a/src/constants/parallel.ts b/src/constants/parallel.ts
new file mode 100644
index 0000000..b993f73
--- /dev/null
+++ b/src/constants/parallel.ts
@@ -0,0 +1,108 @@
+/**
+ * Parallel Agent Execution Constants
+ *
+ * Configuration for concurrent task execution, resource limits,
+ * and conflict detection.
+ */
+
+import type { ResourceLimits, TaskPriority } from "@/types/parallel";
+
+/**
+ * Default resource limits
+ */
+export const PARALLEL_DEFAULTS: ResourceLimits = {
+ maxConcurrentTasks: 5,
+ maxQueueSize: 50,
+ defaultTimeout: 60000,
+ maxRetries: 2,
+} as const;
+
+/**
+ * Priority weights for task ordering
+ */
+export const PRIORITY_WEIGHTS: Record = {
+ critical: 100,
+ high: 75,
+ normal: 50,
+ low: 25,
+} as const;
+
+/**
+ * Task type concurrency limits
+ * Some task types should have lower concurrency
+ */
+export const TASK_TYPE_LIMITS = {
+ explore: 5,
+ analyze: 4,
+ execute: 2,
+ search: 3,
+} as const;
+
+/**
+ * Conflict detection configuration
+ */
+export const CONFLICT_CONFIG = {
+ ENABLE_PATH_CONFLICT: true,
+ CONFLICT_CHECK_TIMEOUT_MS: 5000,
+ AUTO_RESOLVE_READ_CONFLICTS: true,
+} as const;
+
+/**
+ * Timeout values for different task types
+ */
+export const TASK_TIMEOUTS = {
+ explore: 30000,
+ analyze: 45000,
+ execute: 120000,
+ search: 15000,
+} as const;
+
+/**
+ * Error messages for parallel execution
+ */
+export const PARALLEL_ERRORS = {
+ QUEUE_FULL: "Task queue is full",
+ TIMEOUT: (taskId: string) => `Task ${taskId} timed out`,
+ CONFLICT: (taskId: string, paths: string[]) =>
+ `Task ${taskId} conflicts with paths: ${paths.join(", ")}`,
+ MAX_RETRIES: (taskId: string, retries: number) =>
+ `Task ${taskId} failed after ${retries} retries`,
+ CANCELLED: (taskId: string) => `Task ${taskId} was cancelled`,
+ INVALID_TASK: "Invalid task configuration",
+ EXECUTOR_ABORTED: "Executor was aborted",
+} as const;
+
+/**
+ * Status messages for parallel execution
+ */
+export const PARALLEL_MESSAGES = {
+ STARTING: (count: number) => `Starting ${count} parallel task(s)`,
+ COMPLETED: (success: number, failed: number) =>
+ `Completed: ${success} successful, ${failed} failed`,
+ QUEUED: (taskId: string, position: number) =>
+ `Task ${taskId} queued at position ${position}`,
+ RUNNING: (taskId: string) => `Running task: ${taskId}`,
+ WAITING_CONFLICT: (taskId: string) =>
+ `Task ${taskId} waiting for conflict resolution`,
+ RETRYING: (taskId: string, attempt: number) =>
+ `Retrying task ${taskId} (attempt ${attempt})`,
+} as const;
+
+/**
+ * Deduplication configuration
+ */
+export const DEDUP_CONFIG = {
+ ENABLE_CONTENT_DEDUP: true,
+ SIMILARITY_THRESHOLD: 0.95,
+ MAX_RESULTS_PER_TYPE: 100,
+} as const;
+
+/**
+ * Read-only task types (no conflict with each other)
+ */
+export const READ_ONLY_TASK_TYPES = new Set(["explore", "analyze", "search"]);
+
+/**
+ * Modifying task types (conflict with all tasks on same paths)
+ */
+export const MODIFYING_TASK_TYPES = new Set(["execute"]);
diff --git a/src/constants/paths.ts b/src/constants/paths.ts
index 8dfc1e4..af180a6 100644
--- a/src/constants/paths.ts
+++ b/src/constants/paths.ts
@@ -58,12 +58,18 @@ export const FILES = {
/** Provider credentials (stored in data, not config) */
credentials: join(DIRS.data, "credentials.json"),
+ /** Environment variables and tokens (API keys, JWT tokens, etc.) */
+ vars: join(DIRS.config, "vars.json"),
+
/** Command history */
history: join(DIRS.data, "history.json"),
/** Models cache */
modelsCache: join(DIRS.cache, "models.json"),
+ /** Copilot token cache */
+ copilotTokenCache: join(DIRS.cache, "copilot-token.json"),
+
/** Frecency cache for file/command suggestions */
frecency: join(DIRS.cache, "frecency.json"),
diff --git a/src/constants/pr-review.ts b/src/constants/pr-review.ts
new file mode 100644
index 0000000..af86452
--- /dev/null
+++ b/src/constants/pr-review.ts
@@ -0,0 +1,207 @@
+/**
+ * PR Review Toolkit Constants
+ *
+ * Configuration for multi-agent code review.
+ */
+
+import type {
+ PRReviewConfig,
+ ReviewSeverity,
+ ReviewFindingType,
+} from "@/types/pr-review";
+
+/**
+ * Minimum confidence threshold for reporting findings
+ * Only report findings with confidence >= 80%
+ */
+export const MIN_CONFIDENCE_THRESHOLD = 80;
+
+/**
+ * Default review configuration
+ */
+export const DEFAULT_REVIEW_CONFIG: PRReviewConfig = {
+ minConfidence: MIN_CONFIDENCE_THRESHOLD,
+ reviewers: [
+ { name: "security", type: "security", enabled: true, minConfidence: 80 },
+ { name: "performance", type: "performance", enabled: true, minConfidence: 80 },
+ { name: "style", type: "style", enabled: true, minConfidence: 85 },
+ { name: "logic", type: "logic", enabled: true, minConfidence: 80 },
+ ],
+ security: {
+ checkInjection: true,
+ checkXSS: true,
+ checkAuth: true,
+ checkSecrets: true,
+ checkDependencies: true,
+ },
+ performance: {
+ checkComplexity: true,
+ checkMemory: true,
+ checkQueries: true,
+ checkCaching: true,
+ checkRenders: true,
+ },
+ style: {
+ checkNaming: true,
+ checkFormatting: true,
+ checkConsistency: true,
+ checkComments: true,
+ },
+ logic: {
+ checkEdgeCases: true,
+ checkNullHandling: true,
+ checkErrorHandling: true,
+ checkConcurrency: true,
+ checkTypes: true,
+ },
+ excludePatterns: [
+ "**/node_modules/**",
+ "**/*.min.js",
+ "**/*.bundle.js",
+ "**/dist/**",
+ "**/build/**",
+ "**/*.lock",
+ "**/package-lock.json",
+ "**/yarn.lock",
+ "**/pnpm-lock.yaml",
+ ],
+ maxFindings: 50,
+} as const;
+
+/**
+ * Severity emoji indicators
+ */
+export const SEVERITY_ICONS: Record = {
+ critical: "š“",
+ warning: "š ",
+ suggestion: "š”",
+ nitpick: "š¢",
+} as const;
+
+/**
+ * Severity labels
+ */
+export const SEVERITY_LABELS: Record = {
+ critical: "CRITICAL",
+ warning: "WARNING",
+ suggestion: "SUGGESTION",
+ nitpick: "NITPICK",
+} as const;
+
+/**
+ * Finding type labels
+ */
+export const FINDING_TYPE_LABELS: Record = {
+ security: "Security",
+ performance: "Performance",
+ style: "Style",
+ logic: "Logic",
+ documentation: "Documentation",
+ testing: "Testing",
+} as const;
+
+/**
+ * Reviewer prompts
+ */
+export const REVIEWER_PROMPTS: Record = {
+ security: `You are a security reviewer. Analyze the code changes for:
+- SQL injection, XSS, command injection vulnerabilities
+- Authentication and authorization issues
+- Sensitive data exposure (API keys, passwords, tokens)
+- Input validation and sanitization problems
+- Insecure dependencies
+
+Only report findings with high confidence (ā„80%). For each issue:
+- Describe the vulnerability
+- Explain the potential impact
+- Suggest a specific fix`,
+
+ performance: `You are a performance reviewer. Analyze the code changes for:
+- Algorithmic complexity issues (O(n²) or worse operations)
+- Memory usage problems (leaks, excessive allocations)
+- Database query efficiency (N+1 queries, missing indexes)
+- Unnecessary re-renders (React) or DOM manipulations
+- Missing caching opportunities
+
+Only report findings with high confidence (ā„80%). For each issue:
+- Describe the performance impact
+- Provide complexity analysis if applicable
+- Suggest optimization`,
+
+ style: `You are a code style reviewer. Analyze the code changes for:
+- Naming convention violations
+- Inconsistent formatting
+- Code organization issues
+- Missing or unclear documentation
+- Deviations from project patterns
+
+Only report significant style issues that affect readability or maintainability.
+Skip minor formatting issues that could be auto-fixed.`,
+
+ logic: `You are a logic reviewer. Analyze the code changes for:
+- Edge cases not handled
+- Null/undefined reference risks
+- Error handling gaps
+- Race conditions or concurrency issues
+- Type safety violations
+
+Only report findings with high confidence (ā„80%). For each issue:
+- Describe the bug or potential bug
+- Explain how it could manifest
+- Suggest a fix with example code`,
+} as const;
+
+/**
+ * Rating thresholds
+ */
+export const RATING_THRESHOLDS = {
+ 5: { maxCritical: 0, maxWarning: 0 },
+ 4: { maxCritical: 0, maxWarning: 3 },
+ 3: { maxCritical: 0, maxWarning: 10 },
+ 2: { maxCritical: 1, maxWarning: 20 },
+ 1: { maxCritical: Infinity, maxWarning: Infinity },
+} as const;
+
+/**
+ * Recommendation thresholds
+ */
+export const RECOMMENDATION_THRESHOLDS = {
+ approve: { maxCritical: 0, maxWarning: 0, maxSuggestion: 5 },
+ approve_with_suggestions: { maxCritical: 0, maxWarning: 3, maxSuggestion: Infinity },
+ request_changes: { maxCritical: 1, maxWarning: Infinity, maxSuggestion: Infinity },
+ needs_discussion: { maxCritical: Infinity, maxWarning: Infinity, maxSuggestion: Infinity },
+} as const;
+
+/**
+ * Error messages
+ */
+export const PR_REVIEW_ERRORS = {
+ NO_DIFF: "No diff content to review",
+ PARSE_FAILED: (error: string) => `Failed to parse diff: ${error}`,
+ REVIEWER_FAILED: (reviewer: string, error: string) =>
+ `Reviewer ${reviewer} failed: ${error}`,
+ NO_FILES: "No files in diff to review",
+ EXCLUDED_ALL: "All files excluded by pattern",
+} as const;
+
+/**
+ * Status messages
+ */
+export const PR_REVIEW_MESSAGES = {
+ STARTING: "Starting PR review...",
+ PARSING_DIFF: "Parsing diff...",
+ REVIEWING: (reviewer: string) => `Running ${reviewer} review...`,
+ AGGREGATING: "Aggregating results...",
+ COMPLETED: (findings: number) => `Review complete: ${findings} finding(s)`,
+ NO_FINDINGS: "No issues found",
+} as const;
+
+/**
+ * Report titles
+ */
+export const PR_REVIEW_TITLES = {
+ REPORT: "Pull Request Review",
+ FINDINGS: "Findings",
+ SUMMARY: "Summary",
+ RECOMMENDATION: "Recommendation",
+} as const;
diff --git a/src/constants/skills.ts b/src/constants/skills.ts
new file mode 100644
index 0000000..fb1ef70
--- /dev/null
+++ b/src/constants/skills.ts
@@ -0,0 +1,132 @@
+/**
+ * Skill System Constants
+ *
+ * Constants for skill loading, matching, and execution.
+ */
+
+import { join } from "path";
+import { DIRS } from "@constants/paths";
+
+/**
+ * Skill file configuration
+ */
+export const SKILL_FILE = {
+ NAME: "SKILL.md",
+ FRONTMATTER_DELIMITER: "---",
+ ENCODING: "utf-8",
+} as const;
+
+/**
+ * Skill directories
+ */
+export const SKILL_DIRS = {
+ BUILTIN: join(__dirname, "..", "skills"),
+ USER: join(DIRS.config, "skills"),
+ PROJECT: ".codetyper/skills",
+} as const;
+
+/**
+ * Skill loading configuration
+ */
+export const SKILL_LOADING = {
+ CACHE_TTL_MS: 60000,
+ MAX_SKILLS: 100,
+ MAX_FILE_SIZE_BYTES: 100000,
+} as const;
+
+/**
+ * Skill matching configuration
+ */
+export const SKILL_MATCHING = {
+ MIN_CONFIDENCE: 0.7,
+ EXACT_MATCH_BONUS: 0.3,
+ COMMAND_PREFIX: "/",
+ FUZZY_THRESHOLD: 0.6,
+} as const;
+
+/**
+ * Default skill metadata values
+ */
+export const SKILL_DEFAULTS = {
+ VERSION: "1.0.0",
+ TRIGGER_TYPE: "command" as const,
+ AUTO_TRIGGER: false,
+ REQUIRED_TOOLS: [] as string[],
+} as const;
+
+/**
+ * Skill error messages
+ */
+export const SKILL_ERRORS = {
+ NOT_FOUND: (id: string) => `Skill not found: ${id}`,
+ INVALID_FRONTMATTER: (file: string) => `Invalid frontmatter in: ${file}`,
+ MISSING_REQUIRED_FIELD: (field: string, file: string) =>
+ `Missing required field '${field}' in: ${file}`,
+ LOAD_FAILED: (file: string, error: string) =>
+ `Failed to load skill from ${file}: ${error}`,
+ NO_MATCH: "No matching skill found for input",
+ EXECUTION_FAILED: (id: string, error: string) =>
+ `Skill execution failed for ${id}: ${error}`,
+} as const;
+
+/**
+ * Skill titles for UI
+ */
+export const SKILL_TITLES = {
+ LOADING: (name: string) => `Loading skill: ${name}`,
+ EXECUTING: (name: string) => `Executing skill: ${name}`,
+ MATCHED: (name: string, confidence: number) =>
+ `Matched skill: ${name} (${(confidence * 100).toFixed(0)}%)`,
+ COMPLETED: (name: string) => `Skill completed: ${name}`,
+ FAILED: (name: string) => `Skill failed: ${name}`,
+} as const;
+
+/**
+ * Built-in skill IDs
+ */
+export const BUILTIN_SKILLS = {
+ COMMIT: "commit",
+ REVIEW_PR: "review-pr",
+ EXPLAIN: "explain",
+ FEATURE_DEV: "feature-dev",
+} as const;
+
+/**
+ * Skill trigger patterns for common commands
+ */
+export const SKILL_TRIGGER_PATTERNS = {
+ COMMIT: [
+ "/commit",
+ "commit changes",
+ "commit this",
+ "git commit",
+ "make a commit",
+ ],
+ REVIEW_PR: [
+ "/review-pr",
+ "/review",
+ "review pr",
+ "review this pr",
+ "review pull request",
+ "code review",
+ ],
+ EXPLAIN: [
+ "/explain",
+ "explain this",
+ "explain code",
+ "what does this do",
+ "how does this work",
+ ],
+ FEATURE_DEV: [
+ "/feature",
+ "/feature-dev",
+ "implement feature",
+ "new feature",
+ "build feature",
+ ],
+} as const;
+
+/**
+ * Required fields in skill frontmatter
+ */
+export const SKILL_REQUIRED_FIELDS = ["id", "name", "description", "triggers"] as const;
diff --git a/src/constants/token.ts b/src/constants/token.ts
new file mode 100644
index 0000000..9fff32b
--- /dev/null
+++ b/src/constants/token.ts
@@ -0,0 +1,55 @@
+/**
+ * Token Counting Constants
+ *
+ * Configuration for token estimation and context management
+ */
+
+// Token estimation ratios
+export const CHARS_PER_TOKEN = 4;
+export const TOKENS_PER_CHAR = 0.25;
+
+// Context warning thresholds
+export const TOKEN_WARNING_THRESHOLD = 0.75; // 75% - yellow warning
+export const TOKEN_CRITICAL_THRESHOLD = 0.90; // 90% - red warning
+export const TOKEN_OVERFLOW_THRESHOLD = 0.95; // 95% - trigger compaction
+
+// Pruning thresholds (following OpenCode pattern)
+export const PRUNE_MINIMUM_TOKENS = 20000; // Min tokens to actually prune
+export const PRUNE_PROTECT_TOKENS = 40000; // Threshold before marking for pruning
+export const PRUNE_RECENT_TURNS = 2; // Protect last N user turns
+
+// Protected tools that should never be pruned
+export const PRUNE_PROTECTED_TOOLS = new Set([
+ "skill",
+ "todo_read",
+ "todo_write",
+]);
+
+// Default context sizes
+export const DEFAULT_MAX_CONTEXT_TOKENS = 128000;
+export const DEFAULT_OUTPUT_TOKENS = 16000;
+
+// Token display formatting
+export const TOKEN_DISPLAY = {
+ SEPARATOR: "/",
+ UNIT_K: "K",
+ FORMAT_DECIMALS: 1,
+} as const;
+
+// Token status colors (semantic keys for theme lookup)
+export const TOKEN_STATUS_COLORS = {
+ NORMAL: "textDim",
+ WARNING: "warning",
+ CRITICAL: "error",
+ COMPACTING: "info",
+} as const;
+
+// Messages
+export const TOKEN_MESSAGES = {
+ CONTEXT_LOW: "Context running low",
+ CONTEXT_CRITICAL: "Context nearly full",
+ COMPACTION_STARTING: "Starting context compaction...",
+ COMPACTION_COMPLETE: (saved: number) =>
+ `Compaction complete: ${saved.toLocaleString()} tokens freed`,
+ OVERFLOW_WARNING: "Context overflow detected",
+} as const;
diff --git a/src/constants/tui-components.ts b/src/constants/tui-components.ts
index 836a599..a4e8c65 100644
--- a/src/constants/tui-components.ts
+++ b/src/constants/tui-components.ts
@@ -49,6 +49,8 @@ export const MODE_DISPLAY_CONFIG: Record = {
learning_prompt: { text: "Save Learning?", color: "cyan" },
help_menu: { text: "Help", color: "cyan" },
help_detail: { text: "Help Detail", color: "cyan" },
+ brain_menu: { text: "Brain Settings", color: "magenta" },
+ brain_login: { text: "Brain Login", color: "magenta" },
} as const;
export const DEFAULT_MODE_DISPLAY: ModeDisplayConfig = {
@@ -219,6 +221,11 @@ export const SLASH_COMMANDS: SlashCommand[] = [
description: "Sign out from provider",
category: "account",
},
+ {
+ name: "brain",
+ description: "Configure CodeTyper Brain (memory & knowledge)",
+ category: "account",
+ },
];
export const COMMAND_CATEGORIES = [
diff --git a/src/constants/web-fetch.ts b/src/constants/web-fetch.ts
new file mode 100644
index 0000000..6f79111
--- /dev/null
+++ b/src/constants/web-fetch.ts
@@ -0,0 +1,75 @@
+/**
+ * WebFetch Tool Constants
+ *
+ * Configuration for the web content fetching tool
+ */
+
+export const WEB_FETCH_DEFAULTS = {
+ TIMEOUT_MS: 30000,
+ MAX_CONTENT_LENGTH: 500000, // 500KB max
+ USER_AGENT:
+ "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36",
+} as const;
+
+export const WEB_FETCH_TITLES = {
+ FETCHING: (url: string) => `Fetching: ${url}`,
+ SUCCESS: "Content fetched",
+ FAILED: "Fetch failed",
+ TIMEOUT: "Fetch timed out",
+} as const;
+
+export const WEB_FETCH_MESSAGES = {
+ URL_REQUIRED: "URL is required",
+ INVALID_URL: (url: string) => `Invalid URL: ${url}`,
+ TIMEOUT: "Request timed out",
+ FETCH_ERROR: (error: string) => `Fetch failed: ${error}`,
+ CONTENT_TOO_LARGE: "Content exceeds maximum size limit",
+ REDIRECT_DETECTED: (from: string, to: string) =>
+ `Redirected from ${from} to ${to}`,
+} as const;
+
+export const WEB_FETCH_DESCRIPTION = `Fetch content from a URL and convert HTML to markdown.
+
+Use this tool when you need to:
+- Read documentation from a URL
+- Fetch API responses
+- Get content from web pages
+
+The content will be converted to markdown for readability.
+HTML will be cleaned and converted. JSON responses are formatted.
+
+Note: This tool cannot access authenticated or private URLs.
+For GitHub URLs, prefer using the \`bash\` tool with \`gh\` CLI instead.`;
+
+// Supported content types for conversion
+export const SUPPORTED_CONTENT_TYPES = {
+ HTML: ["text/html", "application/xhtml+xml"],
+ JSON: ["application/json", "text/json"],
+ TEXT: ["text/plain", "text/markdown", "text/csv"],
+ XML: ["text/xml", "application/xml"],
+} as const;
+
+// HTML elements to remove (scripts, styles, etc.)
+export const HTML_REMOVE_ELEMENTS = [
+ "script",
+ "style",
+ "noscript",
+ "iframe",
+ "svg",
+ "canvas",
+ "video",
+ "audio",
+ "nav",
+ "footer",
+ "aside",
+];
+
+// HTML elements to convert to markdown
+export const HTML_BLOCK_ELEMENTS = [
+ "p",
+ "div",
+ "section",
+ "article",
+ "main",
+ "header",
+];
diff --git a/src/prompts/system/ask.ts b/src/prompts/system/ask.ts
index 5b73e33..b5b1bc5 100644
--- a/src/prompts/system/ask.ts
+++ b/src/prompts/system/ask.ts
@@ -207,7 +207,7 @@ Read-only tools only:
- You are in READ-ONLY mode - you cannot modify files
- Always search before answering questions about the codebase
-- If asked to make changes, explain that you're in Ask mode and suggest switching to Agent mode (Ctrl+Tab)
+- If asked to make changes, explain that you're in Ask mode and suggest switching to Agent mode (Ctrl+M)
- For general programming questions, you can answer without searching`;
/**
diff --git a/src/providers/copilot/chat.ts b/src/providers/copilot/chat.ts
index 64be4c3..27feeab 100644
--- a/src/providers/copilot/chat.ts
+++ b/src/providers/copilot/chat.ts
@@ -246,6 +246,7 @@ const executeStream = (
if (delta?.tool_calls) {
for (const tc of delta.tool_calls) {
addDebugLog("api", `Tool call chunk: ${JSON.stringify(tc)}`);
+ console.log("Debug: Tool call chunk received:", JSON.stringify(tc));
onChunk({ type: "tool_call", toolCall: tc });
}
}
diff --git a/src/providers/copilot/token.ts b/src/providers/copilot/token.ts
index 2a6548e..6b71f68 100644
--- a/src/providers/copilot/token.ts
+++ b/src/providers/copilot/token.ts
@@ -2,13 +2,14 @@
* Copilot token management
*/
-import { readFile } from "fs/promises";
+import { readFile, writeFile, mkdir } from "fs/promises";
import { existsSync } from "fs";
import { homedir, platform } from "os";
import { join } from "path";
import got from "got";
import { COPILOT_AUTH_URL } from "@constants/copilot";
+import { FILES, DIRS } from "@constants/paths";
import {
getState,
setOAuthToken,
@@ -16,6 +17,36 @@ import {
} from "@providers/copilot/state";
import type { CopilotToken } from "@/types/copilot";
+/**
+ * Load cached Copilot token from disk
+ */
+const loadCachedToken = async (): Promise => {
+ try {
+ const data = await readFile(FILES.copilotTokenCache, "utf-8");
+ const token = JSON.parse(data) as CopilotToken;
+
+ // Check if token is still valid (with 60 second buffer)
+ if (token.expires_at > Date.now() / 1000 + 60) {
+ return token;
+ }
+ } catch {
+ // Cache doesn't exist or is invalid
+ }
+ return null;
+};
+
+/**
+ * Save Copilot token to disk cache
+ */
+const saveCachedToken = async (token: CopilotToken): Promise => {
+ try {
+ await mkdir(DIRS.cache, { recursive: true });
+ await writeFile(FILES.copilotTokenCache, JSON.stringify(token), "utf-8");
+ } catch {
+ // Silently fail - caching is optional
+ }
+};
+
const getConfigDir = (): string => {
const home = homedir();
const os = platform();
@@ -88,6 +119,7 @@ export const refreshToken = async (): Promise => {
const currentState = getState();
+ // Check in-memory cache first
if (
currentState.githubToken &&
currentState.githubToken.expires_at > Date.now() / 1000
@@ -95,6 +127,14 @@ export const refreshToken = async (): Promise => {
return currentState.githubToken;
}
+ // Check disk cache to avoid network request on startup
+ const cachedToken = await loadCachedToken();
+ if (cachedToken) {
+ setGitHubToken(cachedToken);
+ return cachedToken;
+ }
+
+ // Fetch new token from GitHub
const response = await got
.get(COPILOT_AUTH_URL, {
headers: {
@@ -109,6 +149,10 @@ export const refreshToken = async (): Promise => {
}
setGitHubToken(response);
+
+ // Cache to disk for faster startup next time
+ saveCachedToken(response).catch(() => {});
+
return response;
};
diff --git a/src/providers/ollama/chat.ts b/src/providers/ollama/chat.ts
index de9c3ee..dc6f08d 100644
--- a/src/providers/ollama/chat.ts
+++ b/src/providers/ollama/chat.ts
@@ -22,15 +22,36 @@ import type {
OllamaChatResponse,
OllamaToolCall,
OllamaToolDefinition,
+ OllamaMessage,
} from "@/types/ollama";
-const formatMessages = (
- messages: Message[],
-): Array<{ role: string; content: string }> =>
- messages.map((msg) => ({
- role: msg.role,
- content: msg.content,
- }));
+/**
+ * Format messages for Ollama API
+ * Handles regular messages, assistant messages with tool_calls, and tool response messages
+ */
+const formatMessages = (messages: Message[]): OllamaMessage[] =>
+ messages.map((msg) => {
+ const formatted: OllamaMessage = {
+ role: msg.role,
+ content: msg.content,
+ };
+
+ // Include tool_calls for assistant messages that made tool calls
+ if (msg.tool_calls && msg.tool_calls.length > 0) {
+ formatted.tool_calls = msg.tool_calls.map((tc) => ({
+ id: tc.id,
+ function: {
+ name: tc.function.name,
+ arguments:
+ typeof tc.function.arguments === "string"
+ ? JSON.parse(tc.function.arguments)
+ : tc.function.arguments,
+ },
+ }));
+ }
+
+ return formatted;
+ });
const formatTools = (
tools: ChatCompletionOptions["tools"],
diff --git a/src/services/agent-definition-loader.ts b/src/services/agent-definition-loader.ts
new file mode 100644
index 0000000..840adaa
--- /dev/null
+++ b/src/services/agent-definition-loader.ts
@@ -0,0 +1,289 @@
+/**
+ * Agent definition loader service
+ * Loads agent definitions from markdown files with YAML frontmatter
+ */
+
+import { readFile, readdir } from "node:fs/promises";
+import { join, basename, extname } from "node:path";
+import { existsSync } from "node:fs";
+import { homedir } from "node:os";
+
+import type {
+ AgentDefinition,
+ AgentFrontmatter,
+ AgentDefinitionFile,
+ AgentRegistry,
+ AgentLoadResult,
+ AgentTier,
+ AgentColor,
+} from "@src/types/agent-definition";
+import { DEFAULT_AGENT_DEFINITION, AGENT_DEFINITION_SCHEMA } from "@src/types/agent-definition";
+import { AGENT_DEFINITION, AGENT_DEFINITION_PATHS, AGENT_MESSAGES } from "@src/constants/agent-definition";
+
+const parseFrontmatter = (content: string): { frontmatter: Record; body: string } | null => {
+ const delimiter = AGENT_DEFINITION.FRONTMATTER_DELIMITER;
+ const lines = content.split("\n");
+
+ if (lines[0]?.trim() !== delimiter) {
+ return null;
+ }
+
+ const endIndex = lines.findIndex((line, index) => index > 0 && line.trim() === delimiter);
+ if (endIndex === -1) {
+ return null;
+ }
+
+ const frontmatterLines = lines.slice(1, endIndex);
+ const body = lines.slice(endIndex + 1).join("\n").trim();
+
+ // Simple YAML parser for frontmatter
+ const frontmatter: Record = {};
+ let currentKey = "";
+ let currentArray: string[] | null = null;
+
+ frontmatterLines.forEach((line) => {
+ const trimmed = line.trim();
+
+ if (trimmed.startsWith("- ") && currentArray !== null) {
+ currentArray.push(trimmed.slice(2));
+ return;
+ }
+
+ if (currentArray !== null) {
+ frontmatter[currentKey] = currentArray;
+ currentArray = null;
+ }
+
+ const colonIndex = trimmed.indexOf(":");
+ if (colonIndex === -1) return;
+
+ const key = trimmed.slice(0, colonIndex).trim();
+ const value = trimmed.slice(colonIndex + 1).trim();
+
+ if (value === "") {
+ currentKey = key;
+ currentArray = [];
+ } else if (value.startsWith("[") && value.endsWith("]")) {
+ frontmatter[key] = value
+ .slice(1, -1)
+ .split(",")
+ .map((s) => s.trim().replace(/^["']|["']$/g, ""));
+ } else if (value === "true") {
+ frontmatter[key] = true;
+ } else if (value === "false") {
+ frontmatter[key] = false;
+ } else if (!isNaN(Number(value))) {
+ frontmatter[key] = Number(value);
+ } else {
+ frontmatter[key] = value.replace(/^["']|["']$/g, "");
+ }
+ });
+
+ if (currentArray !== null) {
+ frontmatter[currentKey] = currentArray;
+ }
+
+ return { frontmatter, body };
+};
+
+const validateFrontmatter = (frontmatter: Record): AgentFrontmatter | null => {
+ const { required } = AGENT_DEFINITION_SCHEMA;
+
+ for (const field of required) {
+ if (!(field in frontmatter)) {
+ return null;
+ }
+ }
+
+ const name = frontmatter.name;
+ const description = frontmatter.description;
+ const tools = frontmatter.tools;
+
+ if (typeof name !== "string" || typeof description !== "string" || !Array.isArray(tools)) {
+ return null;
+ }
+
+ return {
+ name,
+ description,
+ tools: tools as ReadonlyArray,
+ tier: (frontmatter.tier as AgentTier) || DEFAULT_AGENT_DEFINITION.tier,
+ color: (frontmatter.color as AgentColor) || DEFAULT_AGENT_DEFINITION.color,
+ maxTurns: (frontmatter.maxTurns as number) || DEFAULT_AGENT_DEFINITION.maxTurns,
+ triggerPhrases: (frontmatter.triggerPhrases as ReadonlyArray) || [],
+ capabilities: (frontmatter.capabilities as ReadonlyArray) || [],
+ allowedPaths: frontmatter.allowedPaths as ReadonlyArray | undefined,
+ deniedPaths: frontmatter.deniedPaths as ReadonlyArray | undefined,
+ };
+};
+
+const frontmatterToDefinition = (frontmatter: AgentFrontmatter, content: string): AgentDefinition => ({
+ name: frontmatter.name,
+ description: frontmatter.description,
+ tools: frontmatter.tools,
+ tier: frontmatter.tier || (DEFAULT_AGENT_DEFINITION.tier as AgentTier),
+ color: frontmatter.color || (DEFAULT_AGENT_DEFINITION.color as AgentColor),
+ maxTurns: frontmatter.maxTurns || DEFAULT_AGENT_DEFINITION.maxTurns,
+ systemPrompt: content || undefined,
+ triggerPhrases: frontmatter.triggerPhrases || [],
+ capabilities: frontmatter.capabilities || [],
+ permissions: {
+ allowedPaths: frontmatter.allowedPaths,
+ deniedPaths: frontmatter.deniedPaths,
+ },
+});
+
+export const loadAgentDefinitionFile = async (filePath: string): Promise => {
+ try {
+ const content = await readFile(filePath, "utf-8");
+ const parsed = parseFrontmatter(content);
+
+ if (!parsed) {
+ return { success: false, error: AGENT_MESSAGES.INVALID_FRONTMATTER, filePath };
+ }
+
+ const frontmatter = validateFrontmatter(parsed.frontmatter);
+
+ if (!frontmatter) {
+ return { success: false, error: AGENT_MESSAGES.MISSING_REQUIRED, filePath };
+ }
+
+ const agent = frontmatterToDefinition(frontmatter, parsed.body);
+
+ return { success: true, agent, filePath };
+ } catch (error) {
+ const message = error instanceof Error ? error.message : "Unknown error";
+ return { success: false, error: message, filePath };
+ }
+};
+
+export const loadAgentDefinitionsFromDirectory = async (
+ directoryPath: string
+): Promise> => {
+ const resolvedPath = directoryPath.replace("~", homedir());
+
+ if (!existsSync(resolvedPath)) {
+ return [];
+ }
+
+ try {
+ const files = await readdir(resolvedPath);
+ const mdFiles = files.filter(
+ (file) => extname(file) === AGENT_DEFINITION.FILE_EXTENSION
+ );
+
+ const results = await Promise.all(
+ mdFiles.map((file) => loadAgentDefinitionFile(join(resolvedPath, file)))
+ );
+
+ return results;
+ } catch {
+ return [];
+ }
+};
+
+export const loadAllAgentDefinitions = async (
+ projectPath: string
+): Promise => {
+ const agents = new Map();
+ const byTrigger = new Map();
+ const byCapability = new Map();
+
+ // Load from all paths in priority order (project > global > builtin)
+ const paths = [
+ join(projectPath, AGENT_DEFINITION_PATHS.PROJECT),
+ AGENT_DEFINITION_PATHS.GLOBAL,
+ ];
+
+ for (const path of paths) {
+ const results = await loadAgentDefinitionsFromDirectory(path);
+
+ results.forEach((result) => {
+ if (result.success && result.agent) {
+ const { agent } = result;
+
+ // Don't override if already loaded (project takes precedence)
+ if (!agents.has(agent.name)) {
+ agents.set(agent.name, agent);
+
+ // Index by trigger phrases
+ agent.triggerPhrases?.forEach((phrase) => {
+ byTrigger.set(phrase.toLowerCase(), agent.name);
+ });
+
+ // Index by capabilities
+ agent.capabilities?.forEach((capability) => {
+ const existing = byCapability.get(capability) || [];
+ byCapability.set(capability, [...existing, agent.name]);
+ });
+ }
+ }
+ });
+ }
+
+ return { agents, byTrigger, byCapability };
+};
+
+export const findAgentByTrigger = (
+ registry: AgentRegistry,
+ text: string
+): AgentDefinition | undefined => {
+ const normalized = text.toLowerCase();
+
+ for (const [phrase, agentName] of registry.byTrigger) {
+ if (normalized.includes(phrase)) {
+ return registry.agents.get(agentName);
+ }
+ }
+
+ return undefined;
+};
+
+export const findAgentsByCapability = (
+ registry: AgentRegistry,
+ capability: string
+): ReadonlyArray => {
+ const agentNames = registry.byCapability.get(capability) || [];
+ return agentNames
+ .map((name) => registry.agents.get(name))
+ .filter((a): a is AgentDefinition => a !== undefined);
+};
+
+export const getAgentByName = (
+ registry: AgentRegistry,
+ name: string
+): AgentDefinition | undefined => registry.agents.get(name);
+
+export const listAllAgents = (registry: AgentRegistry): ReadonlyArray =>
+ Array.from(registry.agents.values());
+
+export const createAgentDefinitionContent = (agent: AgentDefinition): string => {
+ const frontmatter = [
+ "---",
+ `name: ${agent.name}`,
+ `description: ${agent.description}`,
+ `tools: [${agent.tools.join(", ")}]`,
+ `tier: ${agent.tier}`,
+ `color: ${agent.color}`,
+ ];
+
+ if (agent.maxTurns) {
+ frontmatter.push(`maxTurns: ${agent.maxTurns}`);
+ }
+
+ if (agent.triggerPhrases && agent.triggerPhrases.length > 0) {
+ frontmatter.push("triggerPhrases:");
+ agent.triggerPhrases.forEach((phrase) => frontmatter.push(` - ${phrase}`));
+ }
+
+ if (agent.capabilities && agent.capabilities.length > 0) {
+ frontmatter.push("capabilities:");
+ agent.capabilities.forEach((cap) => frontmatter.push(` - ${cap}`));
+ }
+
+ frontmatter.push("---");
+
+ const content = agent.systemPrompt || `# ${agent.name}\n\n${agent.description}`;
+
+ return `${frontmatter.join("\n")}\n\n${content}`;
+};
diff --git a/src/services/background-task-service.ts b/src/services/background-task-service.ts
new file mode 100644
index 0000000..62b0ee9
--- /dev/null
+++ b/src/services/background-task-service.ts
@@ -0,0 +1,389 @@
+/**
+ * Background task service
+ * Manages background task execution, queue, and lifecycle
+ */
+
+import { randomUUID } from "node:crypto";
+import { writeFile, readFile, mkdir, readdir, unlink } from "node:fs/promises";
+import { join } from "node:path";
+import { existsSync } from "node:fs";
+import { homedir } from "node:os";
+
+import type {
+ BackgroundTask,
+ BackgroundTaskStatus,
+ BackgroundTaskPriority,
+ BackgroundTaskConfig,
+ TaskProgress,
+ TaskResult,
+ TaskError,
+ TaskMetadata,
+ TaskNotification,
+ TaskStep,
+ TaskArtifact,
+} from "@src/types/background-task";
+import { DEFAULT_BACKGROUND_TASK_CONFIG, BACKGROUND_TASK_PRIORITIES } from "@src/types/background-task";
+import {
+ BACKGROUND_TASK,
+ BACKGROUND_TASK_STORAGE,
+ BACKGROUND_TASK_MESSAGES,
+ BACKGROUND_TASK_STATUS_ICONS,
+} from "@src/constants/background-task";
+
+type TaskHandler = (task: BackgroundTask, updateProgress: (progress: Partial) => void) => Promise;
+type NotificationHandler = (notification: TaskNotification) => void;
+
+interface BackgroundTaskState {
+ tasks: Map;
+ queue: string[];
+ running: string[];
+ handlers: Map;
+ notificationHandlers: NotificationHandler[];
+ config: BackgroundTaskConfig;
+}
+
+const state: BackgroundTaskState = {
+ tasks: new Map(),
+ queue: [],
+ running: [],
+ handlers: new Map(),
+ notificationHandlers: [],
+ config: DEFAULT_BACKGROUND_TASK_CONFIG,
+};
+
+const getStoragePath = (): string => {
+ const basePath = join(homedir(), ".local", "share", "codetyper", "tasks");
+ return basePath;
+};
+
+const ensureStorageDirectory = async (): Promise => {
+ const storagePath = getStoragePath();
+ if (!existsSync(storagePath)) {
+ await mkdir(storagePath, { recursive: true });
+ }
+};
+
+const persistTask = async (task: BackgroundTask): Promise => {
+ if (!state.config.persistTasks) return;
+
+ await ensureStorageDirectory();
+ const filePath = join(getStoragePath(), `${task.id}${BACKGROUND_TASK_STORAGE.FILE_EXTENSION}`);
+ await writeFile(filePath, JSON.stringify(task, null, 2));
+};
+
+const removePersistedTask = async (taskId: string): Promise => {
+ const filePath = join(getStoragePath(), `${taskId}${BACKGROUND_TASK_STORAGE.FILE_EXTENSION}`);
+ if (existsSync(filePath)) {
+ await unlink(filePath);
+ }
+};
+
+const loadPersistedTasks = async (): Promise => {
+ const storagePath = getStoragePath();
+ if (!existsSync(storagePath)) return;
+
+ const files = await readdir(storagePath);
+ const taskFiles = files.filter((f) => f.endsWith(BACKGROUND_TASK_STORAGE.FILE_EXTENSION));
+
+ for (const file of taskFiles) {
+ try {
+ const content = await readFile(join(storagePath, file), "utf-8");
+ const task = JSON.parse(content) as BackgroundTask;
+
+ // Re-queue pending/running tasks that were interrupted
+ if (task.status === "pending" || task.status === "running") {
+ const updatedTask: BackgroundTask = {
+ ...task,
+ status: "pending",
+ };
+ state.tasks.set(task.id, updatedTask);
+ state.queue.push(task.id);
+ } else {
+ state.tasks.set(task.id, task);
+ }
+ } catch {
+ // Skip corrupted task files
+ }
+ }
+};
+
+const notify = (taskId: string, type: TaskNotification["type"], message: string): void => {
+ const notification: TaskNotification = {
+ taskId,
+ type,
+ message,
+ timestamp: Date.now(),
+ };
+
+ state.notificationHandlers.forEach((handler) => handler(notification));
+};
+
+const createInitialProgress = (): TaskProgress => ({
+ current: 0,
+ total: 100,
+ percentage: 0,
+ message: "Starting...",
+ steps: [],
+});
+
+const processQueue = async (): Promise => {
+ while (
+ state.queue.length > 0 &&
+ state.running.length < state.config.maxConcurrent
+ ) {
+ // Sort by priority
+ state.queue.sort((a, b) => {
+ const taskA = state.tasks.get(a);
+ const taskB = state.tasks.get(b);
+ if (!taskA || !taskB) return 0;
+ return BACKGROUND_TASK_PRIORITIES[taskB.priority] - BACKGROUND_TASK_PRIORITIES[taskA.priority];
+ });
+
+ const taskId = state.queue.shift();
+ if (!taskId) continue;
+
+ const task = state.tasks.get(taskId);
+ if (!task) continue;
+
+ await executeTask(task);
+ }
+};
+
+const executeTask = async (task: BackgroundTask): Promise => {
+ const handler = state.handlers.get(task.name);
+ if (!handler) {
+ await updateTaskStatus(task.id, "failed", {
+ code: "HANDLER_NOT_FOUND",
+ message: `No handler registered for task: ${task.name}`,
+ recoverable: false,
+ });
+ return;
+ }
+
+ state.running.push(task.id);
+
+ const updatedTask: BackgroundTask = {
+ ...task,
+ status: "running",
+ startedAt: Date.now(),
+ };
+ state.tasks.set(task.id, updatedTask);
+ await persistTask(updatedTask);
+
+ notify(task.id, "started", BACKGROUND_TASK_MESSAGES.STARTED);
+
+ const updateProgress = (partial: Partial): void => {
+ const currentTask = state.tasks.get(task.id);
+ if (!currentTask) return;
+
+ const newProgress: TaskProgress = {
+ ...currentTask.progress,
+ ...partial,
+ percentage: partial.current !== undefined && partial.total !== undefined
+ ? Math.round((partial.current / partial.total) * 100)
+ : currentTask.progress.percentage,
+ };
+
+ const progressTask: BackgroundTask = {
+ ...currentTask,
+ progress: newProgress,
+ };
+ state.tasks.set(task.id, progressTask);
+
+ notify(task.id, "progress", newProgress.message);
+ };
+
+ try {
+ const result = await Promise.race([
+ handler(updatedTask, updateProgress),
+ new Promise((_, reject) =>
+ setTimeout(() => reject(new Error("Task timeout")), state.config.defaultTimeout)
+ ),
+ ]);
+
+ await completeTask(task.id, result);
+ } catch (error) {
+ const taskError: TaskError = {
+ code: "EXECUTION_ERROR",
+ message: error instanceof Error ? error.message : "Unknown error",
+ stack: error instanceof Error ? error.stack : undefined,
+ recoverable: true,
+ };
+
+ await updateTaskStatus(task.id, "failed", taskError);
+ } finally {
+ state.running = state.running.filter((id) => id !== task.id);
+ processQueue();
+ }
+};
+
+const completeTask = async (taskId: string, result: TaskResult): Promise => {
+ const task = state.tasks.get(taskId);
+ if (!task) return;
+
+ const completedTask: BackgroundTask = {
+ ...task,
+ status: "completed",
+ completedAt: Date.now(),
+ result,
+ progress: {
+ ...task.progress,
+ current: task.progress.total,
+ percentage: 100,
+ message: "Completed",
+ },
+ };
+
+ state.tasks.set(taskId, completedTask);
+ await persistTask(completedTask);
+
+ notify(taskId, "completed", BACKGROUND_TASK_MESSAGES.COMPLETED);
+};
+
+const updateTaskStatus = async (
+ taskId: string,
+ status: BackgroundTaskStatus,
+ error?: TaskError
+): Promise => {
+ const task = state.tasks.get(taskId);
+ if (!task) return;
+
+ const updatedTask: BackgroundTask = {
+ ...task,
+ status,
+ error,
+ completedAt: ["completed", "failed", "cancelled"].includes(status) ? Date.now() : undefined,
+ };
+
+ state.tasks.set(taskId, updatedTask);
+ await persistTask(updatedTask);
+
+ if (status === "failed") {
+ notify(taskId, "failed", error?.message || BACKGROUND_TASK_MESSAGES.FAILED);
+ }
+};
+
+// Public API
+
+export const initialize = async (config?: Partial): Promise => {
+ state.config = { ...DEFAULT_BACKGROUND_TASK_CONFIG, ...config };
+ await loadPersistedTasks();
+ processQueue();
+};
+
+export const registerHandler = (name: string, handler: TaskHandler): void => {
+ state.handlers.set(name, handler);
+};
+
+export const onNotification = (handler: NotificationHandler): () => void => {
+ state.notificationHandlers.push(handler);
+ return () => {
+ state.notificationHandlers = state.notificationHandlers.filter((h) => h !== handler);
+ };
+};
+
+export const createTask = async (
+ name: string,
+ description: string,
+ metadata: TaskMetadata,
+ priority: BackgroundTaskPriority = "normal"
+): Promise => {
+ const task: BackgroundTask = {
+ id: randomUUID(),
+ name,
+ description,
+ status: "pending",
+ priority,
+ createdAt: Date.now(),
+ progress: createInitialProgress(),
+ metadata,
+ };
+
+ state.tasks.set(task.id, task);
+ state.queue.push(task.id);
+
+ await persistTask(task);
+ processQueue();
+
+ return task;
+};
+
+export const cancelTask = async (taskId: string): Promise => {
+ const task = state.tasks.get(taskId);
+ if (!task) return false;
+
+ if (task.status === "running") {
+ await updateTaskStatus(taskId, "cancelled");
+ state.running = state.running.filter((id) => id !== taskId);
+ notify(taskId, "failed", BACKGROUND_TASK_MESSAGES.CANCELLED);
+ return true;
+ }
+
+ if (task.status === "pending") {
+ state.queue = state.queue.filter((id) => id !== taskId);
+ await updateTaskStatus(taskId, "cancelled");
+ return true;
+ }
+
+ return false;
+};
+
+export const pauseTask = async (taskId: string): Promise => {
+ const task = state.tasks.get(taskId);
+ if (!task || task.status !== "running") return false;
+
+ await updateTaskStatus(taskId, "paused");
+ state.running = state.running.filter((id) => id !== taskId);
+ notify(taskId, "progress", BACKGROUND_TASK_MESSAGES.PAUSED);
+ return true;
+};
+
+export const resumeTask = async (taskId: string): Promise => {
+ const task = state.tasks.get(taskId);
+ if (!task || task.status !== "paused") return false;
+
+ state.queue.unshift(taskId);
+ await updateTaskStatus(taskId, "pending");
+ notify(taskId, "progress", BACKGROUND_TASK_MESSAGES.RESUMED);
+ processQueue();
+ return true;
+};
+
+export const getTask = (taskId: string): BackgroundTask | undefined =>
+ state.tasks.get(taskId);
+
+export const listTasks = (filter?: { status?: BackgroundTaskStatus }): ReadonlyArray => {
+ let tasks = Array.from(state.tasks.values());
+
+ if (filter?.status) {
+ tasks = tasks.filter((t) => t.status === filter.status);
+ }
+
+ return tasks.sort((a, b) => b.createdAt - a.createdAt);
+};
+
+export const clearCompletedTasks = async (): Promise => {
+ const completed = Array.from(state.tasks.values()).filter(
+ (t) => t.status === "completed" || t.status === "failed" || t.status === "cancelled"
+ );
+
+ for (const task of completed) {
+ state.tasks.delete(task.id);
+ await removePersistedTask(task.id);
+ }
+
+ return completed.length;
+};
+
+export const getTaskStatusIcon = (status: BackgroundTaskStatus): string =>
+ BACKGROUND_TASK_STATUS_ICONS[status];
+
+export const formatTaskSummary = (task: BackgroundTask): string => {
+ const icon = getTaskStatusIcon(task.status);
+ const progress = task.status === "running" ? ` (${task.progress.percentage}%)` : "";
+ return `${icon} ${task.name}${progress} - ${task.description}`;
+};
+
+export const getQueueLength = (): number => state.queue.length;
+
+export const getRunningCount = (): number => state.running.length;
diff --git a/src/services/brain.ts b/src/services/brain.ts
new file mode 100644
index 0000000..dc90a05
--- /dev/null
+++ b/src/services/brain.ts
@@ -0,0 +1,688 @@
+/**
+ * Brain Service
+ *
+ * Business logic layer for the CodeTyper Brain integration.
+ * Provides context injection, knowledge recall, and learning capabilities.
+ */
+
+import fs from "fs/promises";
+import { DIRS, FILES } from "@constants/paths";
+import { BRAIN_DEFAULTS, BRAIN_ERRORS, BRAIN_DISABLED } from "@constants/brain";
+import * as brainApi from "@api/brain";
+import type {
+ BrainCredentials,
+ BrainState,
+ BrainConnectionStatus,
+ BrainUser,
+ BrainConcept,
+ BrainRecallResponse,
+ BrainExtractResponse,
+} from "@/types/brain";
+
+// ============================================================================
+// State Management (Singleton via Closure)
+// ============================================================================
+
+interface VarsFile {
+ brainApiKey?: string;
+ brainJwtToken?: string;
+}
+
+let brainState: BrainState = {
+ status: "disconnected",
+ user: null,
+ projectId: BRAIN_DEFAULTS.PROJECT_ID,
+ knowledgeCount: 0,
+ memoryCount: 0,
+ lastError: null,
+};
+
+let cachedCredentials: BrainCredentials | null = null;
+let cachedVars: VarsFile | null = null;
+
+// ============================================================================
+// Vars File Management
+// ============================================================================
+
+/**
+ * Load vars file from disk
+ */
+const loadVarsFile = async (): Promise => {
+ if (cachedVars) {
+ return cachedVars;
+ }
+
+ try {
+ const data = await fs.readFile(FILES.vars, "utf-8");
+ cachedVars = JSON.parse(data) as VarsFile;
+ return cachedVars;
+ } catch {
+ return {};
+ }
+};
+
+/**
+ * Save vars file to disk
+ */
+const saveVarsFile = async (vars: VarsFile): Promise => {
+ try {
+ await fs.mkdir(DIRS.config, { recursive: true });
+ await fs.writeFile(FILES.vars, JSON.stringify(vars, null, 2), "utf-8");
+ cachedVars = vars;
+ } catch (error) {
+ throw new Error(`Failed to save vars file: ${error}`);
+ }
+};
+
+// ============================================================================
+// Credentials Management
+// ============================================================================
+
+/**
+ * Get path to brain credentials file
+ */
+const getCredentialsPath = (): string => {
+ return `${DIRS.data}/brain-credentials.json`;
+};
+
+/**
+ * Load brain credentials from disk
+ */
+export const loadCredentials = async (): Promise => {
+ if (cachedCredentials) {
+ return cachedCredentials;
+ }
+
+ try {
+ const data = await fs.readFile(getCredentialsPath(), "utf-8");
+ cachedCredentials = JSON.parse(data) as BrainCredentials;
+ return cachedCredentials;
+ } catch {
+ return null;
+ }
+};
+
+/**
+ * Save brain credentials to disk
+ */
+export const saveCredentials = async (
+ credentials: BrainCredentials,
+): Promise => {
+ try {
+ await fs.mkdir(DIRS.data, { recursive: true });
+ await fs.writeFile(
+ getCredentialsPath(),
+ JSON.stringify(credentials, null, 2),
+ "utf-8",
+ );
+ cachedCredentials = credentials;
+ } catch (error) {
+ throw new Error(`Failed to save brain credentials: ${error}`);
+ }
+};
+
+/**
+ * Clear brain credentials
+ */
+export const clearCredentials = async (): Promise => {
+ try {
+ await fs.unlink(getCredentialsPath());
+ cachedCredentials = null;
+ } catch {
+ // File may not exist, ignore
+ }
+
+ // Also clear vars file entries
+ try {
+ const vars = await loadVarsFile();
+ await saveVarsFile({
+ ...vars,
+ brainApiKey: undefined,
+ brainJwtToken: undefined,
+ });
+ } catch {
+ // Ignore errors
+ }
+};
+
+/**
+ * Get API key from vars file or environment
+ */
+export const getApiKey = async (): Promise => {
+ // First check environment variable
+ const envKey = process.env.CODETYPER_BRAIN_API_KEY;
+ if (envKey) {
+ return envKey;
+ }
+
+ // Then check vars file
+ const vars = await loadVarsFile();
+ return vars.brainApiKey;
+};
+
+/**
+ * Get JWT token from vars file
+ */
+export const getJwtToken = async (): Promise => {
+ const vars = await loadVarsFile();
+ return vars.brainJwtToken;
+};
+
+/**
+ * Set API key in vars file
+ */
+export const setApiKey = async (apiKey: string): Promise => {
+ const vars = await loadVarsFile();
+ await saveVarsFile({ ...vars, brainApiKey: apiKey });
+};
+
+/**
+ * Set JWT token in vars file
+ */
+export const setJwtToken = async (jwtToken: string): Promise => {
+ const vars = await loadVarsFile();
+ await saveVarsFile({ ...vars, brainJwtToken: jwtToken });
+};
+
+// ============================================================================
+// Authentication
+// ============================================================================
+
+/**
+ * Login to Brain service
+ */
+export const login = async (
+ email: string,
+ password: string,
+): Promise<{ success: boolean; user?: BrainUser; error?: string }> => {
+ try {
+ updateState({ status: "connecting" });
+
+ const response = await brainApi.login(email, password);
+
+ if (response.success && response.data) {
+ const credentials: BrainCredentials = {
+ accessToken: response.data.access_token,
+ refreshToken: response.data.refresh_token,
+ expiresAt: response.data.expires_at,
+ user: response.data.user,
+ };
+
+ await saveCredentials(credentials);
+
+ updateState({
+ status: "connected",
+ user: response.data.user,
+ lastError: null,
+ });
+
+ return { success: true, user: response.data.user };
+ }
+
+ updateState({ status: "error", lastError: "Login failed" });
+ return { success: false, error: "Login failed" };
+ } catch (error) {
+ const errorMessage =
+ error instanceof Error ? error.message : "Unknown error";
+ updateState({ status: "error", lastError: errorMessage });
+ return { success: false, error: errorMessage };
+ }
+};
+
+/**
+ * Register a new account
+ */
+export const register = async (
+ email: string,
+ password: string,
+ displayName: string,
+): Promise<{ success: boolean; user?: BrainUser; error?: string }> => {
+ try {
+ updateState({ status: "connecting" });
+
+ const response = await brainApi.register(email, password, displayName);
+
+ if (response.success && response.data) {
+ const credentials: BrainCredentials = {
+ accessToken: response.data.access_token,
+ refreshToken: response.data.refresh_token,
+ expiresAt: response.data.expires_at,
+ user: response.data.user,
+ };
+
+ await saveCredentials(credentials);
+
+ updateState({
+ status: "connected",
+ user: response.data.user,
+ lastError: null,
+ });
+
+ return { success: true, user: response.data.user };
+ }
+
+ updateState({ status: "error", lastError: "Registration failed" });
+ return { success: false, error: "Registration failed" };
+ } catch (error) {
+ const errorMessage =
+ error instanceof Error ? error.message : "Unknown error";
+ updateState({ status: "error", lastError: errorMessage });
+ return { success: false, error: errorMessage };
+ }
+};
+
+/**
+ * Logout from Brain service
+ */
+export const logout = async (): Promise => {
+ try {
+ const credentials = await loadCredentials();
+ if (credentials?.refreshToken) {
+ await brainApi.logout(credentials.refreshToken);
+ }
+ } catch {
+ // Ignore logout errors
+ } finally {
+ await clearCredentials();
+ updateState({
+ status: "disconnected",
+ user: null,
+ knowledgeCount: 0,
+ memoryCount: 0,
+ });
+ }
+};
+
+// ============================================================================
+// Connection Management
+// ============================================================================
+
+/**
+ * Get authentication token (API key or JWT token)
+ */
+export const getAuthToken = async (): Promise => {
+ const apiKey = await getApiKey();
+ if (apiKey) {
+ return apiKey;
+ }
+ return getJwtToken();
+};
+
+/**
+ * Check if Brain service is available and connect
+ */
+export const connect = async (): Promise => {
+ // Skip connection when Brain is disabled
+ if (BRAIN_DISABLED) {
+ return false;
+ }
+
+ try {
+ updateState({ status: "connecting" });
+
+ // First check if service is healthy
+ await brainApi.checkHealth();
+
+ // Then check if we have valid credentials (API key or JWT token)
+ const authToken = await getAuthToken();
+ if (!authToken) {
+ updateState({ status: "disconnected", lastError: null });
+ return false;
+ }
+
+ // Try to get stats to verify credentials are valid
+ const projectId = brainState.projectId ?? BRAIN_DEFAULTS.PROJECT_ID;
+ const statsResponse = await brainApi.getKnowledgeStats(projectId, authToken);
+
+ if (statsResponse.success && statsResponse.data) {
+ updateState({
+ status: "connected",
+ knowledgeCount: statsResponse.data.total_concepts,
+ lastError: null,
+ });
+
+ // Also try to get memory stats
+ try {
+ const memoryStats = await brainApi.getMemoryStats(authToken);
+ updateState({ memoryCount: memoryStats.totalNodes });
+ } catch {
+ // Memory stats are optional
+ }
+
+ return true;
+ }
+
+ updateState({ status: "error", lastError: BRAIN_ERRORS.INVALID_API_KEY });
+ return false;
+ } catch (error) {
+ const errorMessage =
+ error instanceof Error ? error.message : BRAIN_ERRORS.CONNECTION_FAILED;
+ updateState({ status: "error", lastError: errorMessage });
+ return false;
+ }
+};
+
+/**
+ * Disconnect from Brain service
+ */
+export const disconnect = (): void => {
+ updateState({
+ status: "disconnected",
+ knowledgeCount: 0,
+ memoryCount: 0,
+ lastError: null,
+ });
+};
+
+/**
+ * Check if connected to Brain
+ */
+export const isConnected = (): boolean => {
+ if (BRAIN_DISABLED) return false;
+ return brainState.status === "connected";
+};
+
+// ============================================================================
+// Knowledge Operations
+// ============================================================================
+
+/**
+ * Recall relevant knowledge for a query
+ */
+export const recall = async (
+ query: string,
+ limit = 5,
+): Promise => {
+ if (!isConnected()) {
+ return null;
+ }
+
+ try {
+ const apiKey = await getApiKey();
+ if (!apiKey) {
+ return null;
+ }
+
+ const response = await brainApi.recallKnowledge(
+ {
+ query,
+ project_id: brainState.projectId ?? BRAIN_DEFAULTS.PROJECT_ID,
+ limit,
+ },
+ apiKey,
+ );
+
+ return response;
+ } catch (error) {
+ const errorMessage =
+ error instanceof Error ? error.message : BRAIN_ERRORS.RECALL_FAILED;
+ updateState({ lastError: errorMessage });
+ return null;
+ }
+};
+
+/**
+ * Get context string for prompt injection
+ */
+export const getContext = async (
+ query: string,
+ maxConcepts = 3,
+): Promise => {
+ if (!isConnected()) {
+ return null;
+ }
+
+ try {
+ const apiKey = await getApiKey();
+ if (!apiKey) {
+ return null;
+ }
+
+ const response = await brainApi.buildContext(
+ {
+ query,
+ project_id: brainState.projectId ?? BRAIN_DEFAULTS.PROJECT_ID,
+ max_concepts: maxConcepts,
+ },
+ apiKey,
+ );
+
+ if (response.success && response.data.has_knowledge) {
+ return response.data.context;
+ }
+
+ return null;
+ } catch {
+ return null;
+ }
+};
+
+/**
+ * Learn a concept
+ */
+export const learn = async (
+ name: string,
+ whatItDoes: string,
+ options?: {
+ howItWorks?: string;
+ patterns?: string[];
+ files?: string[];
+ keyFunctions?: string[];
+ aliases?: string[];
+ },
+): Promise => {
+ if (!isConnected()) {
+ return null;
+ }
+
+ try {
+ const apiKey = await getApiKey();
+ if (!apiKey) {
+ return null;
+ }
+
+ const response = await brainApi.learnConcept(
+ {
+ project_id: brainState.projectId ?? BRAIN_DEFAULTS.PROJECT_ID,
+ name,
+ what_it_does: whatItDoes,
+ how_it_works: options?.howItWorks,
+ patterns: options?.patterns,
+ files: options?.files,
+ key_functions: options?.keyFunctions,
+ aliases: options?.aliases,
+ },
+ apiKey,
+ );
+
+ if (response.success && response.data) {
+ // Update knowledge count
+ updateState({ knowledgeCount: brainState.knowledgeCount + 1 });
+ return response.data;
+ }
+
+ return null;
+ } catch (error) {
+ const errorMessage =
+ error instanceof Error ? error.message : BRAIN_ERRORS.LEARN_FAILED;
+ updateState({ lastError: errorMessage });
+ return null;
+ }
+};
+
+/**
+ * Extract and learn concepts from content
+ */
+export const extractAndLearn = async (
+ content: string,
+ source = "conversation",
+): Promise => {
+ if (!isConnected()) {
+ return null;
+ }
+
+ try {
+ const apiKey = await getApiKey();
+ if (!apiKey) {
+ return null;
+ }
+
+ const response = await brainApi.extractConcepts(
+ {
+ content,
+ project_id: brainState.projectId ?? BRAIN_DEFAULTS.PROJECT_ID,
+ source,
+ },
+ apiKey,
+ );
+
+ if (response.success) {
+ // Update knowledge count
+ const newCount =
+ brainState.knowledgeCount + response.data.stored + response.data.updated;
+ updateState({ knowledgeCount: newCount });
+ return response;
+ }
+
+ return null;
+ } catch (error) {
+ const errorMessage =
+ error instanceof Error ? error.message : BRAIN_ERRORS.EXTRACT_FAILED;
+ updateState({ lastError: errorMessage });
+ return null;
+ }
+};
+
+// ============================================================================
+// Memory Operations
+// ============================================================================
+
+/**
+ * Search memories
+ */
+export const searchMemories = async (
+ query: string,
+ limit = 10,
+): Promise<{ memories: Array<{ content: string; similarity: number }> } | null> => {
+ if (!isConnected()) {
+ return null;
+ }
+
+ try {
+ const apiKey = await getApiKey();
+ if (!apiKey) {
+ return null;
+ }
+
+ const response = await brainApi.searchMemories(
+ {
+ query,
+ limit,
+ project_id: brainState.projectId ?? BRAIN_DEFAULTS.PROJECT_ID,
+ },
+ apiKey,
+ );
+
+ return {
+ memories: response.memories.map((m) => ({
+ content: m.content,
+ similarity: m.similarity ?? 0,
+ })),
+ };
+ } catch {
+ return null;
+ }
+};
+
+/**
+ * Store a memory
+ */
+export const storeMemory = async (
+ content: string,
+ type: "fact" | "pattern" | "correction" | "preference" | "context" = "context",
+): Promise => {
+ if (!isConnected()) {
+ return false;
+ }
+
+ try {
+ const apiKey = await getApiKey();
+ if (!apiKey) {
+ return false;
+ }
+
+ const response = await brainApi.storeMemory(
+ {
+ content,
+ type,
+ project_id: brainState.projectId ?? BRAIN_DEFAULTS.PROJECT_ID,
+ },
+ apiKey,
+ );
+
+ if (response.success) {
+ updateState({ memoryCount: brainState.memoryCount + 1 });
+ return true;
+ }
+
+ return false;
+ } catch {
+ return false;
+ }
+};
+
+// ============================================================================
+// State Accessors
+// ============================================================================
+
+/**
+ * Get current brain state
+ */
+export const getState = (): BrainState => {
+ return { ...brainState };
+};
+
+/**
+ * Update brain state
+ */
+const updateState = (updates: Partial): void => {
+ brainState = { ...brainState, ...updates };
+};
+
+/**
+ * Set project ID
+ */
+export const setProjectId = (projectId: number): void => {
+ updateState({ projectId });
+};
+
+/**
+ * Get connection status
+ */
+export const getStatus = (): BrainConnectionStatus => {
+ return brainState.status;
+};
+
+/**
+ * Check if authenticated (has API key or JWT token)
+ */
+export const isAuthenticated = async (): Promise => {
+ const apiKey = await getApiKey();
+ const jwtToken = await getJwtToken();
+ return apiKey !== undefined || jwtToken !== undefined;
+};
+
+// ============================================================================
+// Initialization
+// ============================================================================
+
+/**
+ * Initialize brain service (auto-connect if credentials available)
+ */
+export const initialize = async (): Promise => {
+ const hasAuth = await isAuthenticated();
+ if (hasAuth) {
+ return connect();
+ }
+ return false;
+};
diff --git a/src/services/brain/cloud-sync.ts b/src/services/brain/cloud-sync.ts
new file mode 100644
index 0000000..52aa931
--- /dev/null
+++ b/src/services/brain/cloud-sync.ts
@@ -0,0 +1,523 @@
+/**
+ * Cloud Sync Service
+ *
+ * Handles push/pull synchronization with the cloud brain service.
+ */
+
+import {
+ CLOUD_BRAIN_DEFAULTS,
+ CLOUD_ENDPOINTS,
+ CLOUD_ERRORS,
+ CLOUD_MESSAGES,
+ CLOUD_HTTP_CONFIG,
+ SYNC_CONFIG,
+} from "@constants/brain-cloud";
+import {
+ enqueue,
+ enqueueBatch,
+ dequeue,
+ markProcessed,
+ markFailed,
+ hasQueuedItems,
+ getQueueSize,
+ clearQueue,
+} from "@services/brain/offline-queue";
+import {
+ createConflict,
+ resolveAllConflicts,
+ getPendingConflicts,
+ hasUnresolvedConflicts,
+ clearResolvedConflicts,
+} from "@services/brain/conflict-resolver";
+import type {
+ BrainSyncState,
+ CloudBrainConfig,
+ SyncItem,
+ SyncResult,
+ SyncOptions,
+ PushRequest,
+ PushResponse,
+ PullRequest,
+ PullResponse,
+} from "@/types/brain-cloud";
+
+// Sync state
+let syncState: BrainSyncState = {
+ status: "synced",
+ lastSyncAt: null,
+ lastPushAt: null,
+ lastPullAt: null,
+ pendingChanges: 0,
+ conflictCount: 0,
+ syncErrors: [],
+};
+
+// Cloud configuration
+let cloudConfig: CloudBrainConfig = { ...CLOUD_BRAIN_DEFAULTS };
+
+// Sync lock to prevent concurrent syncs
+let syncInProgress = false;
+
+// Local version tracking
+let localVersion = 0;
+
+/**
+ * Configure cloud sync
+ */
+export const configure = (config: Partial): void => {
+ cloudConfig = { ...cloudConfig, ...config };
+};
+
+/**
+ * Get current sync state
+ */
+export const getSyncState = (): BrainSyncState => ({ ...syncState });
+
+/**
+ * Get cloud configuration
+ */
+export const getConfig = (): CloudBrainConfig => ({ ...cloudConfig });
+
+/**
+ * Check if cloud sync is enabled
+ */
+export const isEnabled = (): boolean => cloudConfig.enabled;
+
+/**
+ * Check if device is online
+ */
+const isOnline = (): boolean => {
+ // In Node.js/Bun, we'll assume online unless proven otherwise
+ return true;
+};
+
+/**
+ * Perform a full sync (push then pull)
+ */
+export const sync = async (
+ authToken: string,
+ projectId: number,
+ options: SyncOptions = {},
+): Promise => {
+ if (!cloudConfig.enabled) {
+ throw new Error(CLOUD_ERRORS.NOT_CONFIGURED);
+ }
+
+ if (syncInProgress) {
+ throw new Error(CLOUD_ERRORS.SYNC_IN_PROGRESS);
+ }
+
+ if (!isOnline()) {
+ syncState.status = "offline";
+ throw new Error(CLOUD_ERRORS.OFFLINE);
+ }
+
+ syncInProgress = true;
+ syncState.status = "syncing";
+ syncState.syncErrors = [];
+
+ const startTime = Date.now();
+ const result: SyncResult = {
+ success: true,
+ direction: options.direction ?? "both",
+ itemsSynced: 0,
+ itemsFailed: 0,
+ conflicts: [],
+ errors: [],
+ duration: 0,
+ timestamp: startTime,
+ };
+
+ try {
+ const direction = options.direction ?? "both";
+
+ // Push local changes
+ if (direction === "push" || direction === "both") {
+ options.onProgress?.({
+ phase: "pushing",
+ current: 0,
+ total: await getQueueSize(),
+ message: CLOUD_MESSAGES.STARTING_SYNC,
+ });
+
+ const pushResult = await pushChanges(authToken, projectId, options);
+ result.itemsSynced += pushResult.itemsSynced;
+ result.itemsFailed += pushResult.itemsFailed;
+ result.conflicts.push(...pushResult.conflicts);
+ result.errors.push(...pushResult.errors);
+
+ if (pushResult.errors.length > 0) {
+ result.success = false;
+ }
+ }
+
+ // Pull remote changes
+ if (direction === "pull" || direction === "both") {
+ options.onProgress?.({
+ phase: "pulling",
+ current: 0,
+ total: 0,
+ message: CLOUD_MESSAGES.PULLING(0),
+ });
+
+ const pullResult = await pullChanges(authToken, projectId, options);
+ result.itemsSynced += pullResult.itemsSynced;
+ result.itemsFailed += pullResult.itemsFailed;
+ result.conflicts.push(...pullResult.conflicts);
+ result.errors.push(...pullResult.errors);
+
+ if (pullResult.errors.length > 0) {
+ result.success = false;
+ }
+ }
+
+ // Handle conflicts if any
+ if (result.conflicts.length > 0) {
+ options.onProgress?.({
+ phase: "resolving",
+ current: 0,
+ total: result.conflicts.length,
+ message: CLOUD_MESSAGES.RESOLVING_CONFLICTS(result.conflicts.length),
+ });
+
+ const strategy = options.conflictStrategy ?? cloudConfig.conflictStrategy;
+
+ if (strategy !== "manual") {
+ resolveAllConflicts(strategy);
+ result.conflicts = getPendingConflicts();
+ }
+
+ if (hasUnresolvedConflicts()) {
+ syncState.status = "conflict";
+ syncState.conflictCount = result.conflicts.length;
+ }
+ }
+
+ // Update state
+ result.duration = Date.now() - startTime;
+
+ if (result.success && result.conflicts.length === 0) {
+ syncState.status = "synced";
+ syncState.lastSyncAt = Date.now();
+ } else if (result.conflicts.length > 0) {
+ syncState.status = "conflict";
+ } else {
+ syncState.status = "error";
+ }
+
+ syncState.pendingChanges = await getQueueSize();
+ syncState.syncErrors = result.errors;
+
+ options.onProgress?.({
+ phase: "completing",
+ current: result.itemsSynced,
+ total: result.itemsSynced,
+ message: CLOUD_MESSAGES.SYNC_COMPLETE,
+ });
+
+ return result;
+ } catch (error) {
+ const message = error instanceof Error ? error.message : String(error);
+ syncState.status = "error";
+ syncState.syncErrors.push(message);
+
+ result.success = false;
+ result.errors.push(message);
+ result.duration = Date.now() - startTime;
+
+ return result;
+ } finally {
+ syncInProgress = false;
+ clearResolvedConflicts();
+ }
+};
+
+/**
+ * Push local changes to cloud
+ */
+const pushChanges = async (
+ authToken: string,
+ projectId: number,
+ options: SyncOptions,
+): Promise> => {
+ const result = {
+ success: true,
+ itemsSynced: 0,
+ itemsFailed: 0,
+ conflicts: [] as SyncResult["conflicts"],
+ errors: [] as string[],
+ duration: 0,
+ };
+
+ // Get queued items
+ const queuedItems = await dequeue(SYNC_CONFIG.MAX_BATCH_SIZE);
+
+ if (queuedItems.length === 0) {
+ return result;
+ }
+
+ options.onProgress?.({
+ phase: "pushing",
+ current: 0,
+ total: queuedItems.length,
+ message: CLOUD_MESSAGES.PUSHING(queuedItems.length),
+ });
+
+ const items = queuedItems.map((q) => q.item);
+
+ try {
+ const response = await pushToCloud(authToken, projectId, items);
+
+ if (response.success) {
+ result.itemsSynced = response.accepted;
+ result.itemsFailed = response.rejected;
+
+ // Mark successful items as processed
+ const successIds = queuedItems
+ .slice(0, response.accepted)
+ .map((q) => q.id);
+ await markProcessed(successIds);
+
+ // Handle conflicts
+ for (const conflict of response.conflicts) {
+ result.conflicts.push(conflict);
+ }
+
+ syncState.lastPushAt = Date.now();
+ } else {
+ result.success = false;
+ result.errors.push(...(response.errors ?? []));
+
+ // Mark all as failed
+ await markFailed(
+ queuedItems.map((q) => q.id),
+ response.errors?.[0],
+ );
+ }
+ } catch (error) {
+ const message = error instanceof Error ? error.message : String(error);
+ result.success = false;
+ result.errors.push(CLOUD_ERRORS.PUSH_FAILED(message));
+
+ // Queue for retry
+ await markFailed(
+ queuedItems.map((q) => q.id),
+ message,
+ );
+ }
+
+ return result;
+};
+
+/**
+ * Pull remote changes from cloud
+ */
+const pullChanges = async (
+ authToken: string,
+ projectId: number,
+ options: SyncOptions,
+): Promise> => {
+ const result = {
+ success: true,
+ itemsSynced: 0,
+ itemsFailed: 0,
+ conflicts: [] as SyncResult["conflicts"],
+ errors: [] as string[],
+ duration: 0,
+ };
+
+ try {
+ const response = await pullFromCloud(
+ authToken,
+ projectId,
+ localVersion,
+ syncState.lastPullAt ?? 0,
+ );
+
+ if (response.success) {
+ options.onProgress?.({
+ phase: "pulling",
+ current: response.items.length,
+ total: response.items.length,
+ message: CLOUD_MESSAGES.PULLING(response.items.length),
+ });
+
+ // Process pulled items
+ for (const item of response.items) {
+ // Check for conflicts with local changes
+ const hasConflict = await checkLocalConflict(item);
+
+ if (hasConflict) {
+ // Create conflict entry
+ const localItem = await getLocalItem(item.id, item.type);
+ if (localItem) {
+ const conflict = createConflict(localItem, item);
+ result.conflicts.push(conflict);
+ }
+ } else {
+ // Apply remote change locally
+ await applyRemoteChange(item);
+ result.itemsSynced++;
+ }
+ }
+
+ // Update local version
+ localVersion = response.serverVersion;
+ syncState.lastPullAt = Date.now();
+ } else {
+ result.success = false;
+ result.errors.push(...(response.errors ?? []));
+ }
+ } catch (error) {
+ const message = error instanceof Error ? error.message : String(error);
+ result.success = false;
+ result.errors.push(CLOUD_ERRORS.PULL_FAILED(message));
+ }
+
+ return result;
+};
+
+/**
+ * Push items to cloud API
+ */
+const pushToCloud = async (
+ authToken: string,
+ projectId: number,
+ items: SyncItem[],
+): Promise => {
+ const url = `${cloudConfig.endpoint}${CLOUD_ENDPOINTS.PUSH}`;
+
+ const request: PushRequest = {
+ items,
+ projectId,
+ clientVersion: "1.0.0",
+ };
+
+ const response = await fetch(url, {
+ method: "POST",
+ headers: {
+ ...CLOUD_HTTP_CONFIG.HEADERS,
+ Authorization: `Bearer ${authToken}`,
+ },
+ body: JSON.stringify(request),
+ signal: AbortSignal.timeout(CLOUD_HTTP_CONFIG.TIMEOUT_MS),
+ });
+
+ if (!response.ok) {
+ throw new Error(`HTTP ${response.status}: ${response.statusText}`);
+ }
+
+ return response.json() as Promise;
+};
+
+/**
+ * Pull items from cloud API
+ */
+const pullFromCloud = async (
+ authToken: string,
+ projectId: number,
+ sinceVersion: number,
+ sinceTimestamp: number,
+): Promise => {
+ const url = `${cloudConfig.endpoint}${CLOUD_ENDPOINTS.PULL}`;
+
+ const request: PullRequest = {
+ projectId,
+ sinceVersion,
+ sinceTimestamp,
+ limit: SYNC_CONFIG.MAX_BATCH_SIZE,
+ };
+
+ const response = await fetch(url, {
+ method: "POST",
+ headers: {
+ ...CLOUD_HTTP_CONFIG.HEADERS,
+ Authorization: `Bearer ${authToken}`,
+ },
+ body: JSON.stringify(request),
+ signal: AbortSignal.timeout(CLOUD_HTTP_CONFIG.TIMEOUT_MS),
+ });
+
+ if (!response.ok) {
+ throw new Error(`HTTP ${response.status}: ${response.statusText}`);
+ }
+
+ return response.json() as Promise;
+};
+
+/**
+ * Check if pulled item conflicts with local changes
+ */
+const checkLocalConflict = async (
+ _item: SyncItem,
+): Promise => {
+ // Check if we have pending changes for this item
+ const queued = await hasQueuedItems();
+ return queued;
+};
+
+/**
+ * Get local item by ID and type
+ */
+const getLocalItem = async (
+ _id: string,
+ _type: "concept" | "memory" | "relation",
+): Promise => {
+ // This would retrieve the local item from the brain service
+ // Placeholder implementation
+ return null;
+};
+
+/**
+ * Apply a remote change locally
+ */
+const applyRemoteChange = async (_item: SyncItem): Promise => {
+ // This would apply the change to the local brain storage
+ // Placeholder implementation
+};
+
+/**
+ * Queue a change for sync
+ */
+export const queueChange = async (item: SyncItem): Promise => {
+ await enqueue(item);
+ syncState.pendingChanges = await getQueueSize();
+ syncState.status = "pending";
+};
+
+/**
+ * Queue multiple changes
+ */
+export const queueChanges = async (items: SyncItem[]): Promise => {
+ const added = await enqueueBatch(items);
+ syncState.pendingChanges = await getQueueSize();
+ syncState.status = "pending";
+ return added;
+};
+
+/**
+ * Force sync now
+ */
+export const syncNow = async (
+ authToken: string,
+ projectId: number,
+): Promise => {
+ return sync(authToken, projectId, { force: true });
+};
+
+/**
+ * Reset sync state
+ */
+export const resetSyncState = async (): Promise => {
+ await clearQueue();
+ syncState = {
+ status: "synced",
+ lastSyncAt: null,
+ lastPushAt: null,
+ lastPullAt: null,
+ pendingChanges: 0,
+ conflictCount: 0,
+ syncErrors: [],
+ };
+ localVersion = 0;
+};
diff --git a/src/services/brain/conflict-resolver.ts b/src/services/brain/conflict-resolver.ts
new file mode 100644
index 0000000..9028672
--- /dev/null
+++ b/src/services/brain/conflict-resolver.ts
@@ -0,0 +1,249 @@
+/**
+ * Conflict Resolver
+ *
+ * Handles sync conflicts between local and remote brain data.
+ */
+
+import {
+ CONFLICT_LABELS,
+} from "@constants/brain-cloud";
+import type {
+ SyncConflict,
+ ConflictStrategy,
+ SyncItem,
+} from "@/types/brain-cloud";
+
+// In-memory conflict storage
+const pendingConflicts = new Map();
+
+/**
+ * Create a conflict from local and remote items
+ */
+export const createConflict = (
+ localItem: SyncItem,
+ remoteItem: SyncItem,
+): SyncConflict => {
+ const conflict: SyncConflict = {
+ id: generateConflictId(),
+ itemId: localItem.id,
+ itemType: localItem.type,
+ localData: localItem.data,
+ remoteData: remoteItem.data,
+ localVersion: localItem.localVersion,
+ remoteVersion: remoteItem.remoteVersion ?? 0,
+ localTimestamp: localItem.timestamp,
+ remoteTimestamp: remoteItem.timestamp,
+ resolved: false,
+ };
+
+ pendingConflicts.set(conflict.id, conflict);
+ return conflict;
+};
+
+/**
+ * Resolve a conflict using the specified strategy
+ */
+export const resolveConflict = (
+ conflictId: string,
+ strategy: ConflictStrategy,
+): SyncConflict | null => {
+ const conflict = pendingConflicts.get(conflictId);
+ if (!conflict) return null;
+
+ const resolver = resolvers[strategy];
+ const resolvedData = resolver(conflict);
+
+ conflict.resolved = true;
+ conflict.resolution = strategy;
+ conflict.resolvedData = resolvedData;
+
+ return conflict;
+};
+
+/**
+ * Resolve all pending conflicts with a single strategy
+ */
+export const resolveAllConflicts = (
+ strategy: ConflictStrategy,
+): SyncConflict[] => {
+ const resolved: SyncConflict[] = [];
+
+ for (const [id, conflict] of pendingConflicts) {
+ if (!conflict.resolved) {
+ const result = resolveConflict(id, strategy);
+ if (result) {
+ resolved.push(result);
+ }
+ }
+ }
+
+ return resolved;
+};
+
+/**
+ * Conflict resolution strategies
+ */
+const resolvers: Record unknown> = {
+ "local-wins": (conflict) => conflict.localData,
+
+ "remote-wins": (conflict) => conflict.remoteData,
+
+ manual: (_conflict) => {
+ // Manual resolution returns null - requires user input
+ return null;
+ },
+
+ merge: (conflict) => {
+ // Attempt to merge the data
+ return mergeData(conflict.localData, conflict.remoteData);
+ },
+};
+
+/**
+ * Attempt to merge two data objects
+ */
+const mergeData = (local: unknown, remote: unknown): unknown => {
+ // If both are objects, merge their properties
+ if (isObject(local) && isObject(remote)) {
+ const localObj = local as Record;
+ const remoteObj = remote as Record;
+
+ const merged: Record = { ...remoteObj };
+
+ for (const key of Object.keys(localObj)) {
+ // Local wins for non-timestamp fields that differ
+ if (key !== "updatedAt" && key !== "timestamp") {
+ merged[key] = localObj[key];
+ }
+ }
+
+ // Use most recent timestamp
+ const localTime = (localObj.updatedAt ?? localObj.timestamp ?? 0) as number;
+ const remoteTime = (remoteObj.updatedAt ?? remoteObj.timestamp ?? 0) as number;
+ merged.updatedAt = Math.max(localTime, remoteTime);
+
+ return merged;
+ }
+
+ // For non-objects, prefer local (or most recent)
+ return local;
+};
+
+/**
+ * Check if value is an object
+ */
+const isObject = (value: unknown): value is Record => {
+ return typeof value === "object" && value !== null && !Array.isArray(value);
+};
+
+/**
+ * Get pending conflicts
+ */
+export const getPendingConflicts = (): SyncConflict[] => {
+ return Array.from(pendingConflicts.values()).filter((c) => !c.resolved);
+};
+
+/**
+ * Get all conflicts
+ */
+export const getAllConflicts = (): SyncConflict[] => {
+ return Array.from(pendingConflicts.values());
+};
+
+/**
+ * Get conflict by ID
+ */
+export const getConflict = (id: string): SyncConflict | undefined => {
+ return pendingConflicts.get(id);
+};
+
+/**
+ * Clear resolved conflicts
+ */
+export const clearResolvedConflicts = (): number => {
+ let cleared = 0;
+
+ for (const [id, conflict] of pendingConflicts) {
+ if (conflict.resolved) {
+ pendingConflicts.delete(id);
+ cleared++;
+ }
+ }
+
+ return cleared;
+};
+
+/**
+ * Clear all conflicts
+ */
+export const clearAllConflicts = (): void => {
+ pendingConflicts.clear();
+};
+
+/**
+ * Get conflict count
+ */
+export const getConflictCount = (): number => {
+ return getPendingConflicts().length;
+};
+
+/**
+ * Check if there are unresolved conflicts
+ */
+export const hasUnresolvedConflicts = (): boolean => {
+ return getPendingConflicts().length > 0;
+};
+
+/**
+ * Get suggested resolution for a conflict
+ */
+export const suggestResolution = (conflict: SyncConflict): ConflictStrategy => {
+ // If remote is newer, suggest remote-wins
+ if (conflict.remoteTimestamp > conflict.localTimestamp) {
+ return "remote-wins";
+ }
+
+ // If local is newer, suggest local-wins
+ if (conflict.localTimestamp > conflict.remoteTimestamp) {
+ return "local-wins";
+ }
+
+ // If timestamps are equal, try merge
+ return "merge";
+};
+
+/**
+ * Format conflict for display
+ */
+export const formatConflict = (conflict: SyncConflict): string => {
+ const lines: string[] = [];
+
+ lines.push(`**Conflict: ${conflict.itemId}**`);
+ lines.push(`Type: ${conflict.itemType}`);
+ lines.push(`Local version: ${conflict.localVersion}`);
+ lines.push(`Remote version: ${conflict.remoteVersion}`);
+ lines.push("");
+ lines.push("Local data:");
+ lines.push("```json");
+ lines.push(JSON.stringify(conflict.localData, null, 2));
+ lines.push("```");
+ lines.push("");
+ lines.push("Remote data:");
+ lines.push("```json");
+ lines.push(JSON.stringify(conflict.remoteData, null, 2));
+ lines.push("```");
+
+ if (conflict.resolved) {
+ lines.push("");
+ lines.push(`Resolution: ${CONFLICT_LABELS[conflict.resolution!]}`);
+ }
+
+ return lines.join("\n");
+};
+
+/**
+ * Generate unique conflict ID
+ */
+const generateConflictId = (): string => {
+ return `conflict_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`;
+};
diff --git a/src/services/brain/mcp-server.ts b/src/services/brain/mcp-server.ts
new file mode 100644
index 0000000..0e3a99a
--- /dev/null
+++ b/src/services/brain/mcp-server.ts
@@ -0,0 +1,354 @@
+/**
+ * Brain MCP Server service
+ * Exposes Brain as an MCP server for external tools
+ */
+
+import { createServer, type Server, type IncomingMessage, type ServerResponse } from "node:http";
+import { randomUUID } from "node:crypto";
+
+import type {
+ BrainMcpServerConfig,
+ BrainMcpRequest,
+ BrainMcpResponse,
+ BrainMcpServerStatus,
+ BrainMcpToolName,
+ McpContent,
+ McpError,
+} from "@src/types/brain-mcp";
+import {
+ DEFAULT_BRAIN_MCP_SERVER_CONFIG,
+ BRAIN_MCP_TOOLS,
+ MCP_ERROR_CODES,
+} from "@src/types/brain-mcp";
+import {
+ BRAIN_MCP_SERVER,
+ BRAIN_MCP_MESSAGES,
+ BRAIN_MCP_ERRORS,
+ BRAIN_MCP_AUTH,
+} from "@src/constants/brain-mcp";
+
+type BrainService = {
+ recall: (query: string, limit?: number) => Promise;
+ learn: (name: string, whatItDoes: string, options?: unknown) => Promise;
+ searchMemories: (query: string, limit?: number, type?: string) => Promise;
+ relate: (source: string, target: string, type: string, weight?: number) => Promise;
+ getContext: (query: string, maxConcepts?: number) => Promise;
+ getStats: () => Promise;
+ isConnected: () => boolean;
+};
+
+interface McpServerState {
+ server: Server | null;
+ config: BrainMcpServerConfig;
+ brainService: BrainService | null;
+ connectedClients: number;
+ startTime: number | null;
+ requestsServed: number;
+ lastRequestAt: number | null;
+ rateLimitMap: Map;
+ apiKeys: Set;
+}
+
+const state: McpServerState = {
+ server: null,
+ config: DEFAULT_BRAIN_MCP_SERVER_CONFIG,
+ brainService: null,
+ connectedClients: 0,
+ startTime: null,
+ requestsServed: 0,
+ lastRequestAt: null,
+ rateLimitMap: new Map(),
+ apiKeys: new Set(),
+};
+
+const createMcpError = (code: number, message: string, data?: unknown): McpError => ({
+ code,
+ message,
+ data,
+});
+
+const createMcpResponse = (
+ id: string | number,
+ content?: ReadonlyArray,
+ error?: McpError
+): BrainMcpResponse => {
+ if (error) {
+ return { id, error };
+ }
+
+ return {
+ id,
+ result: {
+ content: content || [],
+ },
+ };
+};
+
+const checkRateLimit = (clientIp: string): boolean => {
+ if (!state.config.rateLimit.enabled) return true;
+
+ const now = Date.now();
+ const clientLimit = state.rateLimitMap.get(clientIp);
+
+ if (!clientLimit || now > clientLimit.resetAt) {
+ state.rateLimitMap.set(clientIp, {
+ count: 1,
+ resetAt: now + state.config.rateLimit.windowMs,
+ });
+ return true;
+ }
+
+ if (clientLimit.count >= state.config.rateLimit.maxRequests) {
+ return false;
+ }
+
+ state.rateLimitMap.set(clientIp, {
+ ...clientLimit,
+ count: clientLimit.count + 1,
+ });
+
+ return true;
+};
+
+const validateApiKey = (req: IncomingMessage): boolean => {
+ if (!state.config.enableAuth) return true;
+
+ const apiKey = req.headers[state.config.apiKeyHeader.toLowerCase()] as string | undefined;
+
+ if (!apiKey) return false;
+
+ // If no API keys configured, accept any key for now
+ if (state.apiKeys.size === 0) return true;
+
+ return state.apiKeys.has(apiKey);
+};
+
+const handleToolCall = async (
+ toolName: BrainMcpToolName,
+ args: Record
+): Promise => {
+ if (!state.brainService) {
+ throw createMcpError(MCP_ERROR_CODES.BRAIN_UNAVAILABLE, BRAIN_MCP_MESSAGES.SERVER_NOT_RUNNING);
+ }
+
+ if (!state.brainService.isConnected()) {
+ throw createMcpError(MCP_ERROR_CODES.BRAIN_UNAVAILABLE, "Brain service not connected");
+ }
+
+ const tool = BRAIN_MCP_TOOLS.find((t) => t.name === toolName);
+ if (!tool) {
+ throw createMcpError(MCP_ERROR_CODES.TOOL_NOT_FOUND, `Tool not found: ${toolName}`);
+ }
+
+ let result: unknown;
+
+ const toolHandlers: Record Promise> = {
+ brain_recall: () => state.brainService!.recall(args.query as string, args.limit as number | undefined),
+ brain_learn: () => state.brainService!.learn(
+ args.name as string,
+ args.whatItDoes as string,
+ { keywords: args.keywords, patterns: args.patterns, files: args.files }
+ ),
+ brain_search: () => state.brainService!.searchMemories(
+ args.query as string,
+ args.limit as number | undefined,
+ args.type as string | undefined
+ ),
+ brain_relate: () => state.brainService!.relate(
+ args.sourceConcept as string,
+ args.targetConcept as string,
+ args.relationType as string,
+ args.weight as number | undefined
+ ),
+ brain_context: () => state.brainService!.getContext(
+ args.query as string,
+ args.maxConcepts as number | undefined
+ ),
+ brain_stats: () => state.brainService!.getStats(),
+ brain_projects: async () => {
+ // Import dynamically to avoid circular dependency
+ const { listProjects } = await import("@src/services/brain/project-service");
+ return listProjects();
+ },
+ };
+
+ const handler = toolHandlers[toolName];
+ if (!handler) {
+ throw createMcpError(MCP_ERROR_CODES.TOOL_NOT_FOUND, `No handler for tool: ${toolName}`);
+ }
+
+ result = await handler();
+
+ return [
+ {
+ type: "text",
+ text: typeof result === "string" ? result : JSON.stringify(result, null, 2),
+ },
+ ];
+};
+
+const handleRequest = async (
+ req: IncomingMessage,
+ res: ServerResponse
+): Promise => {
+ // Set CORS headers
+ res.setHeader("Access-Control-Allow-Origin", state.config.allowedOrigins.join(","));
+ res.setHeader("Access-Control-Allow-Methods", "POST, OPTIONS");
+ res.setHeader("Access-Control-Allow-Headers", `Content-Type, ${state.config.apiKeyHeader}`);
+
+ // Handle preflight
+ if (req.method === "OPTIONS") {
+ res.writeHead(204);
+ res.end();
+ return;
+ }
+
+ if (req.method !== "POST") {
+ res.writeHead(405);
+ res.end(JSON.stringify(createMcpResponse("", undefined, BRAIN_MCP_ERRORS.INVALID_REQUEST)));
+ return;
+ }
+
+ // Get client IP for rate limiting
+ const clientIp = req.socket.remoteAddress || "unknown";
+
+ // Check rate limit
+ if (!checkRateLimit(clientIp)) {
+ res.writeHead(429);
+ res.end(JSON.stringify(createMcpResponse("", undefined, BRAIN_MCP_ERRORS.RATE_LIMITED)));
+ return;
+ }
+
+ // Validate API key
+ if (!validateApiKey(req)) {
+ res.writeHead(401);
+ res.end(JSON.stringify(createMcpResponse("", undefined, BRAIN_MCP_ERRORS.UNAUTHORIZED)));
+ return;
+ }
+
+ // Parse request body
+ let body = "";
+ req.on("data", (chunk) => {
+ body += chunk;
+ });
+
+ req.on("end", async () => {
+ state.requestsServed++;
+ state.lastRequestAt = Date.now();
+
+ let mcpRequest: BrainMcpRequest;
+
+ try {
+ mcpRequest = JSON.parse(body) as BrainMcpRequest;
+ } catch {
+ res.writeHead(400);
+ res.end(JSON.stringify(createMcpResponse("", undefined, BRAIN_MCP_ERRORS.PARSE_ERROR)));
+ return;
+ }
+
+ // Handle MCP request
+ try {
+ if (mcpRequest.method === "tools/call") {
+ const { name, arguments: args } = mcpRequest.params;
+ const content = await handleToolCall(name, args);
+ res.writeHead(200, { "Content-Type": "application/json" });
+ res.end(JSON.stringify(createMcpResponse(mcpRequest.id, content)));
+ } else if (mcpRequest.method === "tools/list") {
+ const tools = BRAIN_MCP_TOOLS.map((tool) => ({
+ name: tool.name,
+ description: tool.description,
+ inputSchema: tool.inputSchema,
+ }));
+ res.writeHead(200, { "Content-Type": "application/json" });
+ res.end(JSON.stringify({
+ id: mcpRequest.id,
+ result: { tools },
+ }));
+ } else {
+ res.writeHead(400);
+ res.end(JSON.stringify(createMcpResponse(mcpRequest.id, undefined, BRAIN_MCP_ERRORS.METHOD_NOT_FOUND)));
+ }
+ } catch (error) {
+ const mcpError = error instanceof Object && "code" in error
+ ? error as McpError
+ : createMcpError(MCP_ERROR_CODES.INTERNAL_ERROR, error instanceof Error ? error.message : "Unknown error");
+
+ res.writeHead(500);
+ res.end(JSON.stringify(createMcpResponse(mcpRequest.id, undefined, mcpError)));
+ }
+ });
+};
+
+// Public API
+
+export const start = async (
+ brainService: BrainService,
+ config?: Partial
+): Promise => {
+ if (state.server) {
+ throw new Error(BRAIN_MCP_MESSAGES.SERVER_ALREADY_RUNNING);
+ }
+
+ state.config = { ...DEFAULT_BRAIN_MCP_SERVER_CONFIG, ...config };
+ state.brainService = brainService;
+
+ return new Promise((resolve, reject) => {
+ state.server = createServer(handleRequest);
+
+ state.server.on("error", (error) => {
+ state.server = null;
+ reject(error);
+ });
+
+ state.server.listen(state.config.port, state.config.host, () => {
+ state.startTime = Date.now();
+ state.requestsServed = 0;
+ resolve();
+ });
+ });
+};
+
+export const stop = async (): Promise => {
+ if (!state.server) {
+ return;
+ }
+
+ return new Promise((resolve) => {
+ state.server!.close(() => {
+ state.server = null;
+ state.startTime = null;
+ state.connectedClients = 0;
+ state.brainService = null;
+ resolve();
+ });
+ });
+};
+
+export const getStatus = (): BrainMcpServerStatus => ({
+ running: state.server !== null,
+ port: state.config.port,
+ host: state.config.host,
+ connectedClients: state.connectedClients,
+ uptime: state.startTime ? Date.now() - state.startTime : 0,
+ requestsServed: state.requestsServed,
+ lastRequestAt: state.lastRequestAt || undefined,
+});
+
+export const addApiKey = (key: string): void => {
+ state.apiKeys.add(key);
+};
+
+export const removeApiKey = (key: string): void => {
+ state.apiKeys.delete(key);
+};
+
+export const isRunning = (): boolean => state.server !== null;
+
+export const getConfig = (): BrainMcpServerConfig => ({ ...state.config });
+
+export const updateConfig = (config: Partial): void => {
+ state.config = { ...state.config, ...config };
+};
+
+export const getAvailableTools = (): ReadonlyArray<{ name: string; description: string }> =>
+ BRAIN_MCP_TOOLS.map((t) => ({ name: t.name, description: t.description }));
diff --git a/src/services/brain/offline-queue.ts b/src/services/brain/offline-queue.ts
new file mode 100644
index 0000000..6dbca88
--- /dev/null
+++ b/src/services/brain/offline-queue.ts
@@ -0,0 +1,270 @@
+/**
+ * Offline Queue
+ *
+ * Manages queued changes when offline for later synchronization.
+ */
+
+import fs from "fs/promises";
+import { join } from "path";
+import { DIRS } from "@constants/paths";
+import { SYNC_CONFIG, CLOUD_ERRORS } from "@constants/brain-cloud";
+import type {
+ SyncItem,
+ OfflineQueueItem,
+ OfflineQueueState,
+ SyncOperationType,
+} from "@/types/brain-cloud";
+
+// Queue file path
+const getQueuePath = (): string => join(DIRS.data, "brain-offline-queue.json");
+
+// In-memory queue state
+let queueState: OfflineQueueState = {
+ items: [],
+ totalSize: 0,
+ oldestItem: null,
+};
+
+let loaded = false;
+
+/**
+ * Load queue from disk
+ */
+export const loadQueue = async (): Promise => {
+ if (loaded) return;
+
+ try {
+ const data = await fs.readFile(getQueuePath(), "utf-8");
+ const parsed = JSON.parse(data) as OfflineQueueState;
+ queueState = parsed;
+ loaded = true;
+ } catch {
+ // File doesn't exist or is invalid, start fresh
+ queueState = {
+ items: [],
+ totalSize: 0,
+ oldestItem: null,
+ };
+ loaded = true;
+ }
+};
+
+/**
+ * Save queue to disk
+ */
+const saveQueue = async (): Promise => {
+ try {
+ await fs.mkdir(DIRS.data, { recursive: true });
+ await fs.writeFile(getQueuePath(), JSON.stringify(queueState, null, 2));
+ } catch (error) {
+ console.error("Failed to save offline queue:", error);
+ }
+};
+
+/**
+ * Add item to offline queue
+ */
+export const enqueue = async (item: SyncItem): Promise => {
+ await loadQueue();
+
+ // Check queue size limit
+ if (queueState.items.length >= SYNC_CONFIG.MAX_QUEUE_SIZE) {
+ throw new Error(CLOUD_ERRORS.QUEUE_FULL);
+ }
+
+ const queueItem: OfflineQueueItem = {
+ id: generateQueueId(),
+ item,
+ retryCount: 0,
+ lastAttempt: 0,
+ };
+
+ queueState.items.push(queueItem);
+ queueState.totalSize = queueState.items.length;
+ queueState.oldestItem = Math.min(
+ queueState.oldestItem ?? item.timestamp,
+ item.timestamp,
+ );
+
+ await saveQueue();
+ return true;
+};
+
+/**
+ * Add multiple items to queue
+ */
+export const enqueueBatch = async (items: SyncItem[]): Promise => {
+ await loadQueue();
+
+ let added = 0;
+ for (const item of items) {
+ if (queueState.items.length >= SYNC_CONFIG.MAX_QUEUE_SIZE) {
+ break;
+ }
+
+ const queueItem: OfflineQueueItem = {
+ id: generateQueueId(),
+ item,
+ retryCount: 0,
+ lastAttempt: 0,
+ };
+
+ queueState.items.push(queueItem);
+ added++;
+ }
+
+ queueState.totalSize = queueState.items.length;
+ if (added > 0) {
+ queueState.oldestItem = Math.min(
+ queueState.oldestItem ?? Date.now(),
+ ...items.map((i) => i.timestamp),
+ );
+ }
+
+ await saveQueue();
+ return added;
+};
+
+/**
+ * Get items from queue for processing
+ */
+export const dequeue = async (limit: number = SYNC_CONFIG.MAX_BATCH_SIZE): Promise => {
+ await loadQueue();
+
+ // Get items that haven't exceeded retry limit
+ const available = queueState.items.filter(
+ (item) => item.retryCount < SYNC_CONFIG.MAX_QUEUE_SIZE,
+ );
+
+ return available.slice(0, limit);
+};
+
+/**
+ * Mark items as processed (remove from queue)
+ */
+export const markProcessed = async (ids: string[]): Promise => {
+ await loadQueue();
+
+ const idSet = new Set(ids);
+ queueState.items = queueState.items.filter((item) => !idSet.has(item.id));
+ queueState.totalSize = queueState.items.length;
+
+ // Update oldest item
+ if (queueState.items.length > 0) {
+ queueState.oldestItem = Math.min(
+ ...queueState.items.map((i) => i.item.timestamp),
+ );
+ } else {
+ queueState.oldestItem = null;
+ }
+
+ await saveQueue();
+};
+
+/**
+ * Mark items as failed (increment retry count)
+ */
+export const markFailed = async (
+ ids: string[],
+ error?: string,
+): Promise => {
+ await loadQueue();
+
+ const now = Date.now();
+ for (const id of ids) {
+ const item = queueState.items.find((i) => i.id === id);
+ if (item) {
+ item.retryCount++;
+ item.lastAttempt = now;
+ item.error = error;
+ }
+ }
+
+ await saveQueue();
+};
+
+/**
+ * Get queue state
+ */
+export const getQueueState = async (): Promise => {
+ await loadQueue();
+ return { ...queueState };
+};
+
+/**
+ * Get queue size
+ */
+export const getQueueSize = async (): Promise => {
+ await loadQueue();
+ return queueState.items.length;
+};
+
+/**
+ * Check if queue has items
+ */
+export const hasQueuedItems = async (): Promise => {
+ await loadQueue();
+ return queueState.items.length > 0;
+};
+
+/**
+ * Clear the entire queue
+ */
+export const clearQueue = async (): Promise => {
+ queueState = {
+ items: [],
+ totalSize: 0,
+ oldestItem: null,
+ };
+ await saveQueue();
+};
+
+/**
+ * Remove stale items from queue
+ */
+export const pruneStaleItems = async (): Promise => {
+ await loadQueue();
+
+ const cutoff = Date.now() - SYNC_CONFIG.STALE_ITEM_AGE_MS;
+ const before = queueState.items.length;
+
+ queueState.items = queueState.items.filter(
+ (item) => item.item.timestamp > cutoff,
+ );
+
+ queueState.totalSize = queueState.items.length;
+ const removed = before - queueState.items.length;
+
+ if (removed > 0) {
+ await saveQueue();
+ }
+
+ return removed;
+};
+
+/**
+ * Get items by type
+ */
+export const getItemsByType = async (
+ type: "concept" | "memory" | "relation",
+): Promise => {
+ await loadQueue();
+ return queueState.items.filter((item) => item.item.type === type);
+};
+
+/**
+ * Get items by operation
+ */
+export const getItemsByOperation = async (
+ operation: SyncOperationType,
+): Promise => {
+ await loadQueue();
+ return queueState.items.filter((item) => item.item.operation === operation);
+};
+
+/**
+ * Generate unique queue item ID
+ */
+const generateQueueId = (): string => {
+ return `q_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`;
+};
diff --git a/src/services/brain/project-service.ts b/src/services/brain/project-service.ts
new file mode 100644
index 0000000..f923703
--- /dev/null
+++ b/src/services/brain/project-service.ts
@@ -0,0 +1,384 @@
+/**
+ * Brain project service
+ * Manages multiple Brain projects/knowledge bases
+ */
+
+import { writeFile, readFile, mkdir } from "node:fs/promises";
+import { join } from "node:path";
+import { existsSync } from "node:fs";
+import { homedir } from "node:os";
+
+import type {
+ BrainProject,
+ BrainProjectStats,
+ BrainProjectSettings,
+ BrainProjectCreateInput,
+ BrainProjectUpdateInput,
+ BrainProjectSwitchResult,
+ BrainProjectListResult,
+ BrainProjectExport,
+ BrainProjectImportResult,
+ ExportedConcept,
+ ExportedMemory,
+ ExportedRelationship,
+} from "@src/types/brain-project";
+import {
+ DEFAULT_BRAIN_PROJECT_SETTINGS,
+ BRAIN_PROJECT_EXPORT_VERSION,
+} from "@src/types/brain-project";
+import {
+ BRAIN_PROJECT,
+ BRAIN_PROJECT_STORAGE,
+ BRAIN_PROJECT_PATHS,
+ BRAIN_PROJECT_MESSAGES,
+ BRAIN_PROJECT_API,
+} from "@src/constants/brain-project";
+
+interface ProjectServiceState {
+ projects: Map;
+ activeProjectId: number | null;
+ configPath: string;
+ initialized: boolean;
+}
+
+const state: ProjectServiceState = {
+ projects: new Map(),
+ activeProjectId: null,
+ configPath: join(homedir(), ".local", "share", "codetyper", BRAIN_PROJECT_STORAGE.CONFIG_FILE),
+ initialized: false,
+};
+
+const ensureDirectories = async (): Promise => {
+ const paths = [
+ join(homedir(), ".local", "share", "codetyper", "brain"),
+ join(homedir(), ".local", "share", "codetyper", "brain", "exports"),
+ join(homedir(), ".local", "share", "codetyper", "brain", "backups"),
+ ];
+
+ for (const path of paths) {
+ if (!existsSync(path)) {
+ await mkdir(path, { recursive: true });
+ }
+ }
+};
+
+const loadProjectsFromConfig = async (): Promise => {
+ if (!existsSync(state.configPath)) {
+ return;
+ }
+
+ try {
+ const content = await readFile(state.configPath, "utf-8");
+ const data = JSON.parse(content) as {
+ projects: BrainProject[];
+ activeProjectId: number | null;
+ };
+
+ state.projects.clear();
+ data.projects.forEach((project) => {
+ state.projects.set(project.id, project);
+ });
+ state.activeProjectId = data.activeProjectId;
+ } catch {
+ // Config file corrupted, start fresh
+ state.projects.clear();
+ state.activeProjectId = null;
+ }
+};
+
+const saveProjectsToConfig = async (): Promise => {
+ await ensureDirectories();
+
+ const data = {
+ projects: Array.from(state.projects.values()),
+ activeProjectId: state.activeProjectId,
+ version: "1.0.0",
+ updatedAt: Date.now(),
+ };
+
+ await writeFile(state.configPath, JSON.stringify(data, null, 2));
+};
+
+const generateProjectId = (): number => {
+ const existingIds = Array.from(state.projects.keys());
+ return existingIds.length > 0 ? Math.max(...existingIds) + 1 : 1;
+};
+
+const createDefaultStats = (): BrainProjectStats => ({
+ conceptCount: 0,
+ memoryCount: 0,
+ relationshipCount: 0,
+ totalTokensUsed: 0,
+});
+
+// Public API
+
+export const initialize = async (): Promise => {
+ if (state.initialized) return;
+
+ await ensureDirectories();
+ await loadProjectsFromConfig();
+ state.initialized = true;
+};
+
+export const createProject = async (input: BrainProjectCreateInput): Promise => {
+ await initialize();
+
+ // Validate name
+ if (input.name.length < BRAIN_PROJECT.NAME_MIN_LENGTH) {
+ throw new Error(BRAIN_PROJECT_MESSAGES.INVALID_NAME);
+ }
+
+ if (input.name.length > BRAIN_PROJECT.NAME_MAX_LENGTH) {
+ throw new Error(BRAIN_PROJECT_MESSAGES.INVALID_NAME);
+ }
+
+ // Check for duplicate names
+ const existingProject = Array.from(state.projects.values()).find(
+ (p) => p.name.toLowerCase() === input.name.toLowerCase()
+ );
+
+ if (existingProject) {
+ throw new Error(BRAIN_PROJECT_MESSAGES.ALREADY_EXISTS);
+ }
+
+ const now = Date.now();
+ const project: BrainProject = {
+ id: generateProjectId(),
+ name: input.name,
+ description: input.description || "",
+ rootPath: input.rootPath,
+ createdAt: now,
+ updatedAt: now,
+ stats: createDefaultStats(),
+ settings: {
+ ...DEFAULT_BRAIN_PROJECT_SETTINGS,
+ ...input.settings,
+ },
+ isActive: false,
+ };
+
+ state.projects.set(project.id, project);
+ await saveProjectsToConfig();
+
+ return project;
+};
+
+export const updateProject = async (
+ projectId: number,
+ input: BrainProjectUpdateInput
+): Promise => {
+ await initialize();
+
+ const project = state.projects.get(projectId);
+ if (!project) {
+ throw new Error(BRAIN_PROJECT_MESSAGES.NOT_FOUND);
+ }
+
+ const updatedProject: BrainProject = {
+ ...project,
+ name: input.name ?? project.name,
+ description: input.description ?? project.description,
+ settings: input.settings
+ ? { ...project.settings, ...input.settings }
+ : project.settings,
+ updatedAt: Date.now(),
+ };
+
+ state.projects.set(projectId, updatedProject);
+ await saveProjectsToConfig();
+
+ return updatedProject;
+};
+
+export const deleteProject = async (projectId: number): Promise => {
+ await initialize();
+
+ const project = state.projects.get(projectId);
+ if (!project) {
+ return false;
+ }
+
+ // Can't delete active project
+ if (state.activeProjectId === projectId) {
+ state.activeProjectId = null;
+ }
+
+ state.projects.delete(projectId);
+ await saveProjectsToConfig();
+
+ return true;
+};
+
+export const switchProject = async (projectId: number): Promise => {
+ await initialize();
+
+ const newProject = state.projects.get(projectId);
+ if (!newProject) {
+ throw new Error(BRAIN_PROJECT_MESSAGES.NOT_FOUND);
+ }
+
+ const previousProject = state.activeProjectId
+ ? state.projects.get(state.activeProjectId)
+ : undefined;
+
+ // Update active status
+ if (previousProject) {
+ state.projects.set(previousProject.id, { ...previousProject, isActive: false });
+ }
+
+ state.projects.set(projectId, { ...newProject, isActive: true });
+ state.activeProjectId = projectId;
+
+ await saveProjectsToConfig();
+
+ return {
+ success: true,
+ previousProject,
+ currentProject: state.projects.get(projectId)!,
+ message: `${BRAIN_PROJECT_MESSAGES.SWITCHED} "${newProject.name}"`,
+ };
+};
+
+export const getProject = async (projectId: number): Promise => {
+ await initialize();
+ return state.projects.get(projectId);
+};
+
+export const getActiveProject = async (): Promise => {
+ await initialize();
+ return state.activeProjectId ? state.projects.get(state.activeProjectId) : undefined;
+};
+
+export const listProjects = async (): Promise => {
+ await initialize();
+
+ return {
+ projects: Array.from(state.projects.values()).sort((a, b) => b.updatedAt - a.updatedAt),
+ activeProjectId: state.activeProjectId ?? undefined,
+ total: state.projects.size,
+ };
+};
+
+export const findProjectByPath = async (rootPath: string): Promise => {
+ await initialize();
+
+ return Array.from(state.projects.values()).find((p) => p.rootPath === rootPath);
+};
+
+export const updateProjectStats = async (
+ projectId: number,
+ stats: Partial
+): Promise => {
+ await initialize();
+
+ const project = state.projects.get(projectId);
+ if (!project) return;
+
+ const updatedProject: BrainProject = {
+ ...project,
+ stats: { ...project.stats, ...stats },
+ updatedAt: Date.now(),
+ };
+
+ state.projects.set(projectId, updatedProject);
+ await saveProjectsToConfig();
+};
+
+export const exportProject = async (projectId: number): Promise => {
+ await initialize();
+
+ const project = state.projects.get(projectId);
+ if (!project) {
+ throw new Error(BRAIN_PROJECT_MESSAGES.NOT_FOUND);
+ }
+
+ // In a real implementation, this would fetch data from Brain API
+ // For now, return structure with empty data
+ const exportData: BrainProjectExport = {
+ project,
+ concepts: [],
+ memories: [],
+ relationships: [],
+ exportedAt: Date.now(),
+ version: BRAIN_PROJECT_EXPORT_VERSION,
+ };
+
+ // Save export file
+ const exportPath = join(
+ homedir(),
+ ".local",
+ "share",
+ "codetyper",
+ "brain",
+ "exports",
+ `${project.name}-${Date.now()}${BRAIN_PROJECT_STORAGE.EXPORT_EXTENSION}`
+ );
+
+ await writeFile(exportPath, JSON.stringify(exportData, null, 2));
+
+ return exportData;
+};
+
+export const importProject = async (
+ exportData: BrainProjectExport
+): Promise => {
+ await initialize();
+
+ try {
+ // Create new project with imported data
+ const newProject = await createProject({
+ name: `${exportData.project.name} (imported)`,
+ description: exportData.project.description,
+ rootPath: exportData.project.rootPath,
+ settings: exportData.project.settings,
+ });
+
+ // In a real implementation, this would send data to Brain API
+ // For now, just return success with counts
+
+ return {
+ success: true,
+ project: newProject,
+ imported: {
+ concepts: exportData.concepts.length,
+ memories: exportData.memories.length,
+ relationships: exportData.relationships.length,
+ },
+ errors: [],
+ };
+ } catch (error) {
+ return {
+ success: false,
+ project: exportData.project,
+ imported: { concepts: 0, memories: 0, relationships: 0 },
+ errors: [error instanceof Error ? error.message : "Import failed"],
+ };
+ }
+};
+
+export const getProjectSettings = async (projectId: number): Promise => {
+ await initialize();
+
+ const project = state.projects.get(projectId);
+ return project?.settings;
+};
+
+export const updateProjectSettings = async (
+ projectId: number,
+ settings: Partial
+): Promise => {
+ const project = await updateProject(projectId, { settings });
+ return project.settings;
+};
+
+export const setActiveProjectByPath = async (rootPath: string): Promise => {
+ const project = await findProjectByPath(rootPath);
+
+ if (project) {
+ await switchProject(project.id);
+ return project;
+ }
+
+ return undefined;
+};
diff --git a/src/services/chat-tui/initialize.ts b/src/services/chat-tui/initialize.ts
index 5bf2ba6..531b0ce 100644
--- a/src/services/chat-tui/initialize.ts
+++ b/src/services/chat-tui/initialize.ts
@@ -19,6 +19,8 @@ import {
buildCompletePrompt,
} from "@services/prompt-builder";
import { initSuggestionService } from "@services/command-suggestion-service";
+import * as brainService from "@services/brain";
+import { BRAIN_DISABLED } from "@constants/brain";
import { addContextFile } from "@services/chat-tui/files";
import type { ProviderName, Message } from "@/types/providers";
import type { ChatSession } from "@/types/index";
@@ -147,6 +149,39 @@ const initializeTheme = async (): Promise => {
}
};
+/**
+ * Initialize brain service and update store state
+ * Skipped when BRAIN_DISABLED flag is true
+ */
+const initializeBrain = async (): Promise => {
+ // Skip brain initialization when disabled
+ if (BRAIN_DISABLED) {
+ appStore.setBrainStatus("disconnected");
+ appStore.setBrainShowBanner(false);
+ return;
+ }
+
+ try {
+ appStore.setBrainStatus("connecting");
+
+ const connected = await brainService.initialize();
+
+ if (connected) {
+ const state = brainService.getState();
+ appStore.setBrainStatus("connected");
+ appStore.setBrainUser(state.user);
+ appStore.setBrainCounts(state.knowledgeCount, state.memoryCount);
+ appStore.setBrainShowBanner(false);
+ } else {
+ appStore.setBrainStatus("disconnected");
+ appStore.setBrainShowBanner(true);
+ }
+ } catch {
+ appStore.setBrainStatus("disconnected");
+ appStore.setBrainShowBanner(true);
+ }
+};
+
/**
* Rebuild system prompt when interaction mode changes
* Updates both the state and the first message in the conversation
@@ -178,9 +213,13 @@ export const initializeChatService = async (
const initialMode = appStore.getState().interactionMode;
const state = await createInitialState(options, initialMode);
- await validateProvider(state);
- await buildSystemPrompt(state, options);
- await initializeTheme();
+ // Run provider validation and system prompt building in parallel
+ // These are independent and both involve async operations
+ await Promise.all([
+ validateProvider(state),
+ buildSystemPrompt(state, options),
+ initializeTheme(),
+ ]);
const session = await initializeSession(state, options);
@@ -188,9 +227,18 @@ export const initializeChatService = async (
state.messages.push({ role: "system", content: state.systemPrompt });
}
- await addInitialContextFiles(state, options.files);
- await initializePermissions();
+ // Run these in parallel - they're independent
+ await Promise.all([
+ addInitialContextFiles(state, options.files),
+ initializePermissions(),
+ ]);
+
initSuggestionService(process.cwd());
+ // Initialize brain service (non-blocking, errors silently handled)
+ initializeBrain().catch(() => {
+ // Silently fail - brain is optional
+ });
+
return { state, session };
};
diff --git a/src/services/chat-tui/message-handler.ts b/src/services/chat-tui/message-handler.ts
index f9fac1c..fae92e4 100644
--- a/src/services/chat-tui/message-handler.ts
+++ b/src/services/chat-tui/message-handler.ts
@@ -366,7 +366,7 @@ export const handleMessage = async (
const modeLabel = interactionMode === "ask" ? "Ask" : "Code Review";
callbacks.onLog(
"system",
- `${modeLabel} mode: Read-only tools only (Ctrl+Tab to switch modes)`,
+ `${modeLabel} mode: Read-only tools only (Ctrl+M to switch modes)`,
);
}
diff --git a/src/services/confidence-filter.ts b/src/services/confidence-filter.ts
new file mode 100644
index 0000000..6d90ad2
--- /dev/null
+++ b/src/services/confidence-filter.ts
@@ -0,0 +1,209 @@
+/**
+ * Confidence-based filtering service
+ * Filters PR review issues and agent outputs by confidence score
+ */
+
+import type {
+ ConfidenceScore,
+ ConfidenceLevel,
+ ConfidenceFactor,
+ ConfidenceFilterConfig,
+ FilteredResult,
+ ValidationResult,
+ ConfidenceFilterStats,
+} from "@src/types/confidence-filter";
+import {
+ CONFIDENCE_LEVELS,
+ DEFAULT_CONFIDENCE_FILTER_CONFIG,
+} from "@src/types/confidence-filter";
+import { CONFIDENCE_FILTER, CONFIDENCE_WEIGHTS } from "@src/constants/confidence-filter";
+
+export const calculateConfidenceLevel = (score: number): ConfidenceLevel => {
+ const levels = Object.entries(CONFIDENCE_LEVELS) as Array<[ConfidenceLevel, { min: number; max: number }]>;
+ const found = levels.find(([, range]) => score >= range.min && score <= range.max);
+ return found ? found[0] : "low";
+};
+
+export const calculateConfidenceScore = (factors: ReadonlyArray): ConfidenceScore => {
+ const totalWeight = factors.reduce((sum, f) => sum + f.weight, 0);
+ const weightedSum = factors.reduce((sum, f) => sum + f.score * f.weight, 0);
+ const value = totalWeight > 0 ? Math.round(weightedSum / totalWeight) : 0;
+
+ return {
+ value,
+ level: calculateConfidenceLevel(value),
+ factors,
+ };
+};
+
+export const createConfidenceFactor = (
+ name: string,
+ score: number,
+ weight: number,
+ reason: string
+): ConfidenceFactor => ({
+ name,
+ score: Math.max(0, Math.min(100, score)),
+ weight: Math.max(0, Math.min(1, weight)),
+ reason,
+});
+
+export const createPatternMatchFactor = (matchCount: number, expectedCount: number): ConfidenceFactor =>
+ createConfidenceFactor(
+ "Pattern Match",
+ Math.min(100, (matchCount / Math.max(1, expectedCount)) * 100),
+ CONFIDENCE_WEIGHTS.PATTERN_MATCH,
+ `Matched ${matchCount}/${expectedCount} expected patterns`
+ );
+
+export const createContextRelevanceFactor = (relevanceScore: number): ConfidenceFactor =>
+ createConfidenceFactor(
+ "Context Relevance",
+ relevanceScore,
+ CONFIDENCE_WEIGHTS.CONTEXT_RELEVANCE,
+ `Context relevance score: ${relevanceScore}%`
+ );
+
+export const createSeverityFactor = (severity: "low" | "medium" | "high" | "critical"): ConfidenceFactor => {
+ const severityScores: Record = { low: 40, medium: 60, high: 80, critical: 95 };
+ return createConfidenceFactor(
+ "Severity Level",
+ severityScores[severity] ?? 50,
+ CONFIDENCE_WEIGHTS.SEVERITY_LEVEL,
+ `Issue severity: ${severity}`
+ );
+};
+
+export const createCodeAnalysisFactor = (analysisScore: number): ConfidenceFactor =>
+ createConfidenceFactor(
+ "Code Analysis",
+ analysisScore,
+ CONFIDENCE_WEIGHTS.CODE_ANALYSIS,
+ `Static analysis confidence: ${analysisScore}%`
+ );
+
+export const createHistoricalAccuracyFactor = (accuracy: number): ConfidenceFactor =>
+ createConfidenceFactor(
+ "Historical Accuracy",
+ accuracy,
+ CONFIDENCE_WEIGHTS.HISTORICAL_ACCURACY,
+ `Historical accuracy for similar issues: ${accuracy}%`
+ );
+
+export const filterByConfidence = (
+ items: ReadonlyArray<{ item: T; confidence: ConfidenceScore }>,
+ config: ConfidenceFilterConfig = DEFAULT_CONFIDENCE_FILTER_CONFIG
+): ReadonlyArray> =>
+ items.map(({ item, confidence }) => ({
+ item,
+ confidence,
+ passed: confidence.value >= config.minThreshold,
+ }));
+
+export const filterPassedOnly = (results: ReadonlyArray>): ReadonlyArray =>
+ results.filter((r) => r.passed).map((r) => r.item);
+
+export const groupByConfidenceLevel = (
+ results: ReadonlyArray>
+): Record>> => ({
+ low: results.filter((r) => r.confidence.level === "low"),
+ medium: results.filter((r) => r.confidence.level === "medium"),
+ high: results.filter((r) => r.confidence.level === "high"),
+ critical: results.filter((r) => r.confidence.level === "critical"),
+});
+
+export const calculateFilterStats = (results: ReadonlyArray>): ConfidenceFilterStats => {
+ const passed = results.filter((r) => r.passed).length;
+ const grouped = groupByConfidenceLevel(results);
+ const totalConfidence = results.reduce((sum, r) => sum + r.confidence.value, 0);
+
+ return {
+ total: results.length,
+ passed,
+ filtered: results.length - passed,
+ byLevel: {
+ low: grouped.low.length,
+ medium: grouped.medium.length,
+ high: grouped.high.length,
+ critical: grouped.critical.length,
+ },
+ averageConfidence: results.length > 0 ? Math.round(totalConfidence / results.length) : 0,
+ };
+};
+
+export const validateConfidence = async (
+ confidence: ConfidenceScore,
+ validatorFn: (factors: ReadonlyArray) => Promise<{ validated: boolean; adjustment: number; notes: string }>
+): Promise => {
+ const result = await validatorFn(confidence.factors);
+
+ return {
+ validated: result.validated,
+ adjustedConfidence: Math.max(0, Math.min(100, confidence.value + result.adjustment)),
+ validatorNotes: result.notes,
+ };
+};
+
+export const formatConfidenceScore = (confidence: ConfidenceScore, showFactors: boolean = false): string => {
+ const levelColors: Record = {
+ low: "\x1b[90m",
+ medium: "\x1b[33m",
+ high: "\x1b[32m",
+ critical: "\x1b[31m",
+ };
+ const reset = "\x1b[0m";
+ const color = levelColors[confidence.level];
+
+ let result = `${color}[${confidence.value}% - ${confidence.level.toUpperCase()}]${reset}`;
+
+ if (showFactors && confidence.factors.length > 0) {
+ const factorLines = confidence.factors
+ .map((f) => ` - ${f.name}: ${f.score}% (weight: ${f.weight})`)
+ .join("\n");
+ result += `\n${factorLines}`;
+ }
+
+ return result;
+};
+
+export const mergeConfidenceFactors = (
+ existing: ReadonlyArray,
+ additional: ReadonlyArray
+): ReadonlyArray => {
+ const factorMap = new Map();
+
+ existing.forEach((f) => factorMap.set(f.name, f));
+ additional.forEach((f) => {
+ const existingFactor = factorMap.get(f.name);
+ if (existingFactor) {
+ // Average the scores if factor already exists
+ factorMap.set(f.name, {
+ ...f,
+ score: Math.round((existingFactor.score + f.score) / 2),
+ });
+ } else {
+ factorMap.set(f.name, f);
+ }
+ });
+
+ return Array.from(factorMap.values());
+};
+
+export const adjustThreshold = (
+ baseThreshold: number,
+ context: { isCritical: boolean; isAutomated: boolean; userPreference?: number }
+): number => {
+ let threshold = context.userPreference ?? baseThreshold;
+
+ // Lower threshold for critical contexts
+ if (context.isCritical) {
+ threshold = Math.max(CONFIDENCE_FILTER.MIN_THRESHOLD, threshold - 10);
+ }
+
+ // Higher threshold for automated contexts
+ if (context.isAutomated) {
+ threshold = Math.min(CONFIDENCE_FILTER.MAX_THRESHOLD, threshold + 10);
+ }
+
+ return threshold;
+};
diff --git a/src/services/config.ts b/src/services/config.ts
index 7b65925..3914f4b 100644
--- a/src/services/config.ts
+++ b/src/services/config.ts
@@ -44,23 +44,41 @@ const PROVIDER_ENV_VARS: Record = {
* Config state (singleton pattern using closure)
*/
let configState: Config = getDefaults();
+let configLoaded = false;
+let configLoadPromise: Promise | null = null;
/**
- * Load configuration from file
+ * Load configuration from file (with caching)
*/
export const loadConfig = async (): Promise => {
- try {
- const data = await fs.readFile(FILES.config, "utf-8");
- const loaded = JSON.parse(data);
-
- // Clean up deprecated keys
- delete loaded.models;
-
- configState = { ...getDefaults(), ...loaded };
- } catch {
- // Config file doesn't exist or is invalid, use defaults
- configState = getDefaults();
+ // Return cached config if already loaded
+ if (configLoaded) {
+ return;
}
+
+ // If loading is in progress, wait for it
+ if (configLoadPromise) {
+ return configLoadPromise;
+ }
+
+ // Start loading
+ configLoadPromise = (async () => {
+ try {
+ const data = await fs.readFile(FILES.config, "utf-8");
+ const loaded = JSON.parse(data);
+
+ // Clean up deprecated keys
+ delete loaded.models;
+
+ configState = { ...getDefaults(), ...loaded };
+ } catch {
+ // Config file doesn't exist or is invalid, use defaults
+ configState = getDefaults();
+ }
+ configLoaded = true;
+ })();
+
+ return configLoadPromise;
};
/**
diff --git a/src/services/feature-dev/checkpoint-handler.ts b/src/services/feature-dev/checkpoint-handler.ts
new file mode 100644
index 0000000..d4be102
--- /dev/null
+++ b/src/services/feature-dev/checkpoint-handler.ts
@@ -0,0 +1,209 @@
+/**
+ * Checkpoint Handler
+ *
+ * Manages user approval checkpoints during feature development.
+ */
+
+import {
+ PHASE_CHECKPOINTS,
+ FEATURE_DEV_ERRORS,
+} from "@constants/feature-dev";
+import type {
+ FeatureDevPhase,
+ FeatureDevState,
+ Checkpoint,
+ CheckpointDecision,
+ PhaseExecutionContext,
+} from "@/types/feature-dev";
+
+/**
+ * Create a checkpoint for user approval
+ */
+export const createCheckpoint = (
+ phase: FeatureDevPhase,
+ state: FeatureDevState,
+ details: string[],
+): Checkpoint => {
+ const config = PHASE_CHECKPOINTS[phase];
+
+ return {
+ phase,
+ title: config.title,
+ summary: buildCheckpointSummary(phase, state),
+ details,
+ requiresApproval: config.required,
+ suggestedAction: "approve",
+ };
+};
+
+/**
+ * Build summary for checkpoint based on phase
+ */
+const buildCheckpointSummary = (
+ phase: FeatureDevPhase,
+ state: FeatureDevState,
+): string => {
+ const summaryBuilders: Record string> = {
+ understand: () => {
+ const reqCount = state.requirements.length;
+ const clarCount = state.clarifications.length;
+ return `${reqCount} requirement(s) identified, ${clarCount} clarification(s) made`;
+ },
+
+ explore: () => {
+ const fileCount = state.relevantFiles.length;
+ const findingCount = state.explorationResults.reduce(
+ (sum, r) => sum + r.findings.length,
+ 0,
+ );
+ return `Found ${fileCount} relevant file(s) with ${findingCount} finding(s)`;
+ },
+
+ plan: () => {
+ if (!state.plan) return "No plan created";
+ const stepCount = state.plan.steps.length;
+ const complexity = state.plan.estimatedComplexity;
+ return `${stepCount} step(s) planned, ${complexity} complexity`;
+ },
+
+ implement: () => {
+ const changeCount = state.changes.length;
+ const additions = state.changes.reduce((sum, c) => sum + c.additions, 0);
+ const deletions = state.changes.reduce((sum, c) => sum + c.deletions, 0);
+ return `${changeCount} file(s) changed (+${additions}/-${deletions})`;
+ },
+
+ verify: () => {
+ if (!state.testResults) return "Tests not run yet";
+ const { passedTests, failedTests, totalTests } = state.testResults;
+ return `${passedTests}/${totalTests} tests passed, ${failedTests} failed`;
+ },
+
+ review: () => {
+ const issues = state.reviewFindings.filter((f) => f.type === "issue").length;
+ const suggestions = state.reviewFindings.filter(
+ (f) => f.type === "suggestion",
+ ).length;
+ return `${issues} issue(s), ${suggestions} suggestion(s) found`;
+ },
+
+ finalize: () => {
+ const changeCount = state.changes.length;
+ return `Ready to commit ${changeCount} file change(s)`;
+ },
+ };
+
+ return summaryBuilders[phase]();
+};
+
+/**
+ * Check if phase requires a checkpoint
+ */
+export const requiresCheckpoint = (phase: FeatureDevPhase): boolean => {
+ return PHASE_CHECKPOINTS[phase].required;
+};
+
+/**
+ * Request user approval at a checkpoint
+ */
+export const requestApproval = async (
+ checkpoint: Checkpoint,
+ ctx: PhaseExecutionContext,
+): Promise<{ decision: CheckpointDecision; feedback?: string }> => {
+ // If no checkpoint handler provided, auto-approve non-required checkpoints
+ if (!ctx.onCheckpoint) {
+ if (checkpoint.requiresApproval) {
+ throw new Error(FEATURE_DEV_ERRORS.CHECKPOINT_REQUIRED(checkpoint.phase));
+ }
+ return { decision: "approve" };
+ }
+
+ // Request approval from handler
+ const result = await ctx.onCheckpoint(checkpoint);
+
+ // Record checkpoint in state
+ ctx.state.checkpoints.push({
+ checkpoint,
+ decision: result.decision,
+ feedback: result.feedback,
+ timestamp: Date.now(),
+ });
+
+ return result;
+};
+
+/**
+ * Process checkpoint decision
+ */
+export const processCheckpointDecision = (
+ decision: CheckpointDecision,
+ _feedback?: string,
+): { proceed: boolean; action?: string } => {
+ const decisionHandlers: Record<
+ CheckpointDecision,
+ () => { proceed: boolean; action?: string }
+ > = {
+ approve: () => ({ proceed: true }),
+ reject: () => ({ proceed: false, action: "rejected" }),
+ modify: () => ({ proceed: false, action: "modify", }),
+ skip: () => ({ proceed: true, action: "skipped" }),
+ abort: () => ({ proceed: false, action: "aborted" }),
+ };
+
+ return decisionHandlers[decision]();
+};
+
+/**
+ * Format checkpoint for display
+ */
+export const formatCheckpoint = (checkpoint: Checkpoint): string => {
+ const lines: string[] = [];
+
+ lines.push(`## ${checkpoint.title}`);
+ lines.push("");
+ lines.push(`**Phase:** ${checkpoint.phase}`);
+ lines.push(`**Summary:** ${checkpoint.summary}`);
+ lines.push("");
+
+ if (checkpoint.details.length > 0) {
+ lines.push("### Details");
+ for (const detail of checkpoint.details) {
+ lines.push(`- ${detail}`);
+ }
+ lines.push("");
+ }
+
+ if (checkpoint.requiresApproval) {
+ lines.push("*This checkpoint requires your approval to proceed.*");
+ }
+
+ return lines.join("\n");
+};
+
+/**
+ * Get checkpoint history for a phase
+ */
+export const getPhaseCheckpoints = (
+ state: FeatureDevState,
+ phase: FeatureDevPhase,
+): Array<{
+ checkpoint: Checkpoint;
+ decision: CheckpointDecision;
+ feedback?: string;
+ timestamp: number;
+}> => {
+ return state.checkpoints.filter((c) => c.checkpoint.phase === phase);
+};
+
+/**
+ * Check if phase was approved
+ */
+export const wasPhaseApproved = (
+ state: FeatureDevState,
+ phase: FeatureDevPhase,
+): boolean => {
+ const checkpoints = getPhaseCheckpoints(state, phase);
+ return checkpoints.some(
+ (c) => c.decision === "approve" || c.decision === "skip",
+ );
+};
diff --git a/src/services/feature-dev/context-builder.ts b/src/services/feature-dev/context-builder.ts
new file mode 100644
index 0000000..5eaf4a0
--- /dev/null
+++ b/src/services/feature-dev/context-builder.ts
@@ -0,0 +1,292 @@
+/**
+ * Context Builder
+ *
+ * Builds context for each phase of feature development.
+ */
+
+import {
+ PHASE_PROMPTS,
+ PHASE_DESCRIPTIONS,
+} from "@constants/feature-dev";
+import type {
+ FeatureDevPhase,
+ FeatureDevState,
+} from "@/types/feature-dev";
+
+/**
+ * Build the full context for a phase execution
+ */
+export const buildPhaseContext = (
+ phase: FeatureDevPhase,
+ state: FeatureDevState,
+ userRequest: string,
+): string => {
+ const parts: string[] = [];
+
+ // Phase header
+ parts.push(`# Feature Development: ${phase.toUpperCase()} Phase`);
+ parts.push("");
+ parts.push(`**Goal:** ${PHASE_DESCRIPTIONS[phase]}`);
+ parts.push("");
+
+ // Phase-specific prompt
+ parts.push("## Instructions");
+ parts.push(PHASE_PROMPTS[phase]);
+ parts.push("");
+
+ // User's original request
+ parts.push("## Feature Request");
+ parts.push(userRequest);
+ parts.push("");
+
+ // Add state context based on phase
+ const stateContext = buildStateContext(phase, state);
+ if (stateContext) {
+ parts.push("## Current State");
+ parts.push(stateContext);
+ parts.push("");
+ }
+
+ return parts.join("\n");
+};
+
+/**
+ * Build state context based on accumulated results
+ */
+const buildStateContext = (
+ phase: FeatureDevPhase,
+ state: FeatureDevState,
+): string | null => {
+ const contextBuilders: Record string | null> = {
+ understand: () => null, // No prior context
+
+ explore: () => {
+ if (state.requirements.length === 0) return null;
+
+ const lines: string[] = [];
+ lines.push("### Understood Requirements");
+ for (const req of state.requirements) {
+ lines.push(`- ${req}`);
+ }
+
+ if (state.clarifications.length > 0) {
+ lines.push("");
+ lines.push("### Clarifications");
+ for (const c of state.clarifications) {
+ lines.push(`Q: ${c.question}`);
+ lines.push(`A: ${c.answer}`);
+ }
+ }
+
+ return lines.join("\n");
+ },
+
+ plan: () => {
+ const lines: string[] = [];
+
+ // Requirements
+ if (state.requirements.length > 0) {
+ lines.push("### Requirements");
+ for (const req of state.requirements) {
+ lines.push(`- ${req}`);
+ }
+ lines.push("");
+ }
+
+ // Exploration results
+ if (state.relevantFiles.length > 0) {
+ lines.push("### Relevant Files Found");
+ for (const file of state.relevantFiles.slice(0, 10)) {
+ lines.push(`- ${file}`);
+ }
+ if (state.relevantFiles.length > 10) {
+ lines.push(`- ... and ${state.relevantFiles.length - 10} more`);
+ }
+ lines.push("");
+ }
+
+ // Patterns found
+ const patterns = state.explorationResults.flatMap((r) => r.patterns);
+ if (patterns.length > 0) {
+ lines.push("### Patterns to Follow");
+ for (const pattern of [...new Set(patterns)].slice(0, 5)) {
+ lines.push(`- ${pattern}`);
+ }
+ lines.push("");
+ }
+
+ return lines.length > 0 ? lines.join("\n") : null;
+ },
+
+ implement: () => {
+ if (!state.plan) return null;
+
+ const lines: string[] = [];
+ lines.push("### Approved Implementation Plan");
+ lines.push(`**Summary:** ${state.plan.summary}`);
+ lines.push("");
+ lines.push("**Steps:**");
+ for (const step of state.plan.steps) {
+ lines.push(`${step.order}. [${step.changeType}] ${step.file}`);
+ lines.push(` ${step.description}`);
+ }
+
+ if (state.plan.risks.length > 0) {
+ lines.push("");
+ lines.push("**Risks to Watch:**");
+ for (const risk of state.plan.risks) {
+ lines.push(`- ${risk}`);
+ }
+ }
+
+ return lines.join("\n");
+ },
+
+ verify: () => {
+ if (state.changes.length === 0) return null;
+
+ const lines: string[] = [];
+ lines.push("### Files Changed");
+ for (const change of state.changes) {
+ lines.push(
+ `- ${change.path} (${change.changeType}, +${change.additions}/-${change.deletions})`,
+ );
+ }
+
+ if (state.plan?.testStrategy) {
+ lines.push("");
+ lines.push("### Test Strategy");
+ lines.push(state.plan.testStrategy);
+ }
+
+ return lines.join("\n");
+ },
+
+ review: () => {
+ const lines: string[] = [];
+
+ // Changes to review
+ if (state.changes.length > 0) {
+ lines.push("### Changes to Review");
+ for (const change of state.changes) {
+ lines.push(
+ `- ${change.path} (${change.changeType}, +${change.additions}/-${change.deletions})`,
+ );
+ }
+ lines.push("");
+ }
+
+ // Test results
+ if (state.testResults) {
+ lines.push("### Test Results");
+ lines.push(
+ `${state.testResults.passedTests}/${state.testResults.totalTests} tests passed`,
+ );
+ if (state.testResults.failedTests > 0) {
+ lines.push("**Failures:**");
+ for (const failure of state.testResults.failures) {
+ lines.push(`- ${failure.testName}: ${failure.error}`);
+ }
+ }
+ lines.push("");
+ }
+
+ return lines.length > 0 ? lines.join("\n") : null;
+ },
+
+ finalize: () => {
+ const lines: string[] = [];
+
+ // Summary of changes
+ lines.push("### Summary of Changes");
+ for (const change of state.changes) {
+ lines.push(
+ `- ${change.path} (${change.changeType}, +${change.additions}/-${change.deletions})`,
+ );
+ }
+ lines.push("");
+
+ // Review findings to address
+ const issues = state.reviewFindings.filter(
+ (f) => f.type === "issue" && f.severity === "critical",
+ );
+ if (issues.length > 0) {
+ lines.push("### Outstanding Issues");
+ for (const issue of issues) {
+ lines.push(`- [${issue.severity}] ${issue.message}`);
+ }
+ lines.push("");
+ }
+
+ // Test status
+ if (state.testResults) {
+ const status = state.testResults.passed ? "ā All tests passing" : "ā Tests failing";
+ lines.push(`### Test Status: ${status}`);
+ }
+
+ return lines.join("\n");
+ },
+ };
+
+ return contextBuilders[phase]();
+};
+
+/**
+ * Build summary of current workflow state
+ */
+export const buildWorkflowSummary = (state: FeatureDevState): string => {
+ const lines: string[] = [];
+
+ lines.push("# Feature Development Progress");
+ lines.push("");
+ lines.push(`**Current Phase:** ${state.phase}`);
+ lines.push(`**Status:** ${state.phaseStatus}`);
+ lines.push("");
+
+ // Phase completion status
+ const phases: FeatureDevPhase[] = [
+ "understand",
+ "explore",
+ "plan",
+ "implement",
+ "verify",
+ "review",
+ "finalize",
+ ];
+
+ const currentIndex = phases.indexOf(state.phase);
+
+ lines.push("## Progress");
+ for (let i = 0; i < phases.length; i++) {
+ const phase = phases[i];
+ const status =
+ i < currentIndex
+ ? "ā"
+ : i === currentIndex
+ ? state.phaseStatus === "completed"
+ ? "ā"
+ : "ā"
+ : "ā";
+ lines.push(`${status} ${phase}`);
+ }
+
+ return lines.join("\n");
+};
+
+/**
+ * Extract key information from state for quick reference
+ */
+export const extractKeyInfo = (
+ state: FeatureDevState,
+): Record => {
+ return {
+ phase: state.phase,
+ status: state.phaseStatus,
+ requirementsCount: state.requirements.length,
+ relevantFilesCount: state.relevantFiles.length,
+ changesCount: state.changes.length,
+ reviewFindingsCount: state.reviewFindings.length,
+ checkpointsCount: state.checkpoints.length,
+ duration: Date.now() - state.startedAt,
+ };
+};
diff --git a/src/services/feature-dev/index.ts b/src/services/feature-dev/index.ts
new file mode 100644
index 0000000..b36717e
--- /dev/null
+++ b/src/services/feature-dev/index.ts
@@ -0,0 +1,290 @@
+/**
+ * Feature-Dev Workflow Service
+ *
+ * Main orchestrator for the 7-phase feature development workflow.
+ */
+
+import { PHASE_ORDER, FEATURE_DEV_CONFIG, FEATURE_DEV_ERRORS } from "@constants/feature-dev";
+import {
+ executePhase,
+ validateTransition,
+} from "@services/feature-dev/phase-executor";
+import { buildWorkflowSummary, extractKeyInfo } from "@services/feature-dev/context-builder";
+import type {
+ FeatureDevPhase,
+ FeatureDevState,
+ PhaseExecutionContext,
+ Checkpoint,
+ CheckpointDecision,
+} from "@/types/feature-dev";
+
+// Re-export sub-modules
+export * from "@services/feature-dev/phase-executor";
+export * from "@services/feature-dev/checkpoint-handler";
+export * from "@services/feature-dev/context-builder";
+
+// Active workflows storage
+const activeWorkflows = new Map();
+
+/**
+ * Create a new feature development workflow
+ */
+export const createWorkflow = (
+ id: string,
+ requirements: string[] = [],
+): FeatureDevState => {
+ const state: FeatureDevState = {
+ id,
+ phase: "understand",
+ phaseStatus: "pending",
+ startedAt: Date.now(),
+ updatedAt: Date.now(),
+ requirements,
+ clarifications: [],
+ explorationResults: [],
+ relevantFiles: [],
+ changes: [],
+ reviewFindings: [],
+ checkpoints: [],
+ };
+
+ activeWorkflows.set(id, state);
+ return state;
+};
+
+/**
+ * Get an active workflow by ID
+ */
+export const getWorkflow = (id: string): FeatureDevState | undefined => {
+ return activeWorkflows.get(id);
+};
+
+/**
+ * Update workflow state
+ */
+export const updateWorkflow = (
+ id: string,
+ updates: Partial,
+): FeatureDevState | undefined => {
+ const workflow = activeWorkflows.get(id);
+ if (!workflow) return undefined;
+
+ const updated = {
+ ...workflow,
+ ...updates,
+ updatedAt: Date.now(),
+ };
+
+ activeWorkflows.set(id, updated);
+ return updated;
+};
+
+/**
+ * Delete a workflow
+ */
+export const deleteWorkflow = (id: string): boolean => {
+ return activeWorkflows.delete(id);
+};
+
+/**
+ * Run the complete feature development workflow
+ */
+export const runWorkflow = async (
+ workflowId: string,
+ userRequest: string,
+ options: {
+ config?: Partial;
+ workingDir: string;
+ sessionId: string;
+ abortSignal?: AbortSignal;
+ onProgress?: (message: string) => void;
+ onCheckpoint?: (checkpoint: Checkpoint) => Promise<{
+ decision: CheckpointDecision;
+ feedback?: string;
+ }>;
+ },
+): Promise<{
+ success: boolean;
+ finalState: FeatureDevState;
+ error?: string;
+}> => {
+ // Merge config with defaults (kept for future extensibility)
+ void { ...FEATURE_DEV_CONFIG, ...options.config };
+
+ // Get or create workflow
+ let state = getWorkflow(workflowId);
+ if (!state) {
+ state = createWorkflow(workflowId);
+ }
+
+ // Build execution context
+ const ctx: PhaseExecutionContext = {
+ state,
+ workingDir: options.workingDir,
+ sessionId: options.sessionId,
+ abortSignal: options.abortSignal,
+ onProgress: options.onProgress,
+ onCheckpoint: options.onCheckpoint,
+ };
+
+ // Execute phases in order
+ while (state.phase !== "finalize" || state.phaseStatus !== "completed") {
+ // Check for abort
+ if (options.abortSignal?.aborted) {
+ state.abortReason = "Workflow aborted by user";
+ state.phaseStatus = "failed";
+ return {
+ success: false,
+ finalState: state,
+ error: FEATURE_DEV_ERRORS.WORKFLOW_ABORTED(state.abortReason),
+ };
+ }
+
+ // Execute current phase
+ const result = await executePhase(state.phase, ctx, userRequest);
+
+ // Apply state updates
+ if (result.stateUpdates) {
+ state = updateWorkflow(workflowId, result.stateUpdates) ?? state;
+ ctx.state = state;
+ }
+
+ // Handle phase result
+ if (!result.success) {
+ if (state.abortReason) {
+ // Workflow was aborted
+ return {
+ success: false,
+ finalState: state,
+ error: result.error,
+ };
+ }
+ // Phase needs attention (rejected, needs modification, etc.)
+ // Stay in current phase and let caller handle
+ continue;
+ }
+
+ // Move to next phase
+ if (result.nextPhase) {
+ const transition = validateTransition({
+ fromPhase: state.phase,
+ toPhase: result.nextPhase,
+ });
+
+ if (!transition.valid) {
+ return {
+ success: false,
+ finalState: state,
+ error: transition.error,
+ };
+ }
+
+ state = updateWorkflow(workflowId, {
+ phase: result.nextPhase,
+ phaseStatus: "pending",
+ }) ?? state;
+ ctx.state = state;
+ } else {
+ // No next phase, workflow complete
+ break;
+ }
+ }
+
+ return {
+ success: true,
+ finalState: state,
+ };
+};
+
+/**
+ * Get workflow progress summary
+ */
+export const getWorkflowProgress = (
+ workflowId: string,
+): { summary: string; keyInfo: Record } | undefined => {
+ const workflow = getWorkflow(workflowId);
+ if (!workflow) return undefined;
+
+ return {
+ summary: buildWorkflowSummary(workflow),
+ keyInfo: extractKeyInfo(workflow),
+ };
+};
+
+/**
+ * Abort an active workflow
+ */
+export const abortWorkflow = (
+ workflowId: string,
+ reason: string,
+): FeatureDevState | undefined => {
+ return updateWorkflow(workflowId, {
+ phaseStatus: "failed",
+ abortReason: reason,
+ });
+};
+
+/**
+ * Reset workflow to a specific phase
+ */
+export const resetToPhase = (
+ workflowId: string,
+ phase: FeatureDevPhase,
+): FeatureDevState | undefined => {
+ const workflow = getWorkflow(workflowId);
+ if (!workflow) return undefined;
+
+ // Clear state accumulated after this phase
+ const phaseIndex = PHASE_ORDER.indexOf(phase);
+ const updates: Partial = {
+ phase,
+ phaseStatus: "pending",
+ };
+
+ // Clear phase-specific data based on which phase we're resetting to
+ if (phaseIndex <= PHASE_ORDER.indexOf("explore")) {
+ updates.explorationResults = [];
+ updates.relevantFiles = [];
+ }
+ if (phaseIndex <= PHASE_ORDER.indexOf("plan")) {
+ updates.plan = undefined;
+ }
+ if (phaseIndex <= PHASE_ORDER.indexOf("implement")) {
+ updates.changes = [];
+ }
+ if (phaseIndex <= PHASE_ORDER.indexOf("verify")) {
+ updates.testResults = undefined;
+ }
+ if (phaseIndex <= PHASE_ORDER.indexOf("review")) {
+ updates.reviewFindings = [];
+ }
+ if (phaseIndex <= PHASE_ORDER.indexOf("finalize")) {
+ updates.commitHash = undefined;
+ }
+
+ return updateWorkflow(workflowId, updates);
+};
+
+/**
+ * List all active workflows
+ */
+export const listWorkflows = (): Array<{
+ id: string;
+ phase: FeatureDevPhase;
+ status: string;
+ startedAt: number;
+}> => {
+ return Array.from(activeWorkflows.values()).map((w) => ({
+ id: w.id,
+ phase: w.phase,
+ status: w.phaseStatus,
+ startedAt: w.startedAt,
+ }));
+};
+
+/**
+ * Create workflow ID
+ */
+export const createWorkflowId = (): string => {
+ return `fd_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`;
+};
diff --git a/src/services/feature-dev/phase-executor.ts b/src/services/feature-dev/phase-executor.ts
new file mode 100644
index 0000000..d086abf
--- /dev/null
+++ b/src/services/feature-dev/phase-executor.ts
@@ -0,0 +1,345 @@
+/**
+ * Phase Executor
+ *
+ * Executes individual phases of the feature development workflow.
+ */
+
+import {
+ PHASE_ORDER,
+ ALLOWED_TRANSITIONS,
+ PHASE_TIMEOUTS,
+ FEATURE_DEV_ERRORS,
+ FEATURE_DEV_MESSAGES,
+} from "@constants/feature-dev";
+import {
+ createCheckpoint,
+ requiresCheckpoint,
+ requestApproval,
+ processCheckpointDecision,
+} from "@services/feature-dev/checkpoint-handler";
+import { buildPhaseContext } from "@services/feature-dev/context-builder";
+import type {
+ FeatureDevPhase,
+ PhaseExecutionContext,
+ PhaseExecutionResult,
+ PhaseTransitionRequest,
+} from "@/types/feature-dev";
+
+/**
+ * Execute a single phase
+ */
+export const executePhase = async (
+ phase: FeatureDevPhase,
+ ctx: PhaseExecutionContext,
+ userRequest: string,
+): Promise => {
+ // Update state to in_progress
+ ctx.state.phase = phase;
+ ctx.state.phaseStatus = "in_progress";
+ ctx.state.updatedAt = Date.now();
+
+ ctx.onProgress?.(FEATURE_DEV_MESSAGES.STARTING(phase));
+
+ try {
+ // Execute phase-specific logic
+ const result = await executePhaseLogic(phase, ctx, userRequest);
+
+ // Handle checkpoint if needed
+ if (requiresCheckpoint(phase) || result.checkpoint) {
+ const checkpoint =
+ result.checkpoint ?? createCheckpoint(phase, ctx.state, []);
+
+ ctx.state.phaseStatus = "awaiting_approval";
+
+ const { decision, feedback } = await requestApproval(checkpoint, ctx);
+ const { proceed, action } = processCheckpointDecision(decision, feedback);
+
+ if (!proceed) {
+ if (action === "aborted") {
+ ctx.state.abortReason = feedback ?? "User aborted";
+ return {
+ success: false,
+ phase,
+ error: FEATURE_DEV_ERRORS.WORKFLOW_ABORTED(ctx.state.abortReason),
+ stateUpdates: { phaseStatus: "failed" },
+ };
+ }
+
+ // Rejected or modify - stay in current phase
+ return {
+ success: false,
+ phase,
+ stateUpdates: { phaseStatus: "pending" },
+ };
+ }
+
+ ctx.state.phaseStatus = "approved";
+ }
+
+ // Phase completed successfully
+ ctx.state.phaseStatus = "completed";
+ ctx.state.updatedAt = Date.now();
+
+ ctx.onProgress?.(FEATURE_DEV_MESSAGES.COMPLETED(phase));
+
+ return {
+ success: true,
+ phase,
+ nextPhase: getNextPhase(phase),
+ stateUpdates: { phaseStatus: "completed", ...result.stateUpdates },
+ };
+ } catch (error) {
+ const message = error instanceof Error ? error.message : String(error);
+ ctx.state.phaseStatus = "failed";
+
+ return {
+ success: false,
+ phase,
+ error: FEATURE_DEV_ERRORS.PHASE_FAILED(phase, message),
+ stateUpdates: { phaseStatus: "failed" },
+ };
+ }
+};
+
+/**
+ * Execute phase-specific logic
+ */
+const executePhaseLogic = async (
+ phase: FeatureDevPhase,
+ ctx: PhaseExecutionContext,
+ userRequest: string,
+): Promise> => {
+ // Build context for this phase
+ const phaseContext = buildPhaseContext(phase, ctx.state, userRequest);
+
+ // Phase-specific execution
+ const phaseExecutors: Record<
+ FeatureDevPhase,
+ () => Promise>
+ > = {
+ understand: async () => executeUnderstandPhase(ctx, phaseContext),
+ explore: async () => executeExplorePhase(ctx, phaseContext),
+ plan: async () => executePlanPhase(ctx, phaseContext),
+ implement: async () => executeImplementPhase(ctx, phaseContext),
+ verify: async () => executeVerifyPhase(ctx, phaseContext),
+ review: async () => executeReviewPhase(ctx, phaseContext),
+ finalize: async () => executeFinalizePhase(ctx, phaseContext),
+ };
+
+ return phaseExecutors[phase]();
+};
+
+/**
+ * Understand phase execution
+ */
+const executeUnderstandPhase = async (
+ ctx: PhaseExecutionContext,
+ _phaseContext: string,
+): Promise> => {
+ // This phase would typically involve LLM interaction to:
+ // 1. Parse the user's request
+ // 2. Identify requirements
+ // 3. Ask clarifying questions
+ // For now, return a checkpoint for user confirmation
+
+ const checkpoint = createCheckpoint("understand", ctx.state, [
+ "Review the identified requirements",
+ "Provide any clarifications needed",
+ "Confirm understanding is correct",
+ ]);
+
+ return {
+ checkpoint,
+ stateUpdates: {},
+ };
+};
+
+/**
+ * Explore phase execution
+ */
+const executeExplorePhase = async (
+ ctx: PhaseExecutionContext,
+ _phaseContext: string,
+): Promise> => {
+ ctx.onProgress?.(FEATURE_DEV_MESSAGES.EXPLORING("relevant code patterns"));
+
+ // This phase would use parallel agents to search the codebase
+ // For now, return a basic result
+
+ return {
+ stateUpdates: {},
+ };
+};
+
+/**
+ * Plan phase execution
+ */
+const executePlanPhase = async (
+ ctx: PhaseExecutionContext,
+ _phaseContext: string,
+): Promise> => {
+ // This phase would involve LLM to create implementation plan
+ // The plan must be approved before proceeding
+
+ const checkpoint = createCheckpoint("plan", ctx.state, [
+ "Review the implementation plan",
+ "Check the proposed file changes",
+ "Verify the approach is correct",
+ "Consider the identified risks",
+ ]);
+
+ return {
+ checkpoint,
+ stateUpdates: {},
+ };
+};
+
+/**
+ * Implement phase execution
+ */
+const executeImplementPhase = async (
+ ctx: PhaseExecutionContext,
+ _phaseContext: string,
+): Promise> => {
+ // Verify we have a plan
+ if (!ctx.state.plan) {
+ throw new Error(FEATURE_DEV_ERRORS.NO_PLAN);
+ }
+
+ // This phase would execute each step in the plan
+ const totalSteps = ctx.state.plan.steps.length;
+
+ for (let i = 0; i < totalSteps; i++) {
+ ctx.onProgress?.(FEATURE_DEV_MESSAGES.IMPLEMENTING_STEP(i + 1, totalSteps));
+ // Step execution would happen here
+ }
+
+ return {
+ stateUpdates: {},
+ };
+};
+
+/**
+ * Verify phase execution
+ */
+const executeVerifyPhase = async (
+ ctx: PhaseExecutionContext,
+ _phaseContext: string,
+): Promise> => {
+ ctx.onProgress?.(FEATURE_DEV_MESSAGES.RUNNING_TESTS);
+
+ // This phase would run the test suite
+ // For now, create a checkpoint for test review
+
+ const checkpoint = createCheckpoint("verify", ctx.state, [
+ "Review test results",
+ "Check for any failures",
+ "Verify coverage is adequate",
+ ]);
+
+ return {
+ checkpoint,
+ stateUpdates: {},
+ };
+};
+
+/**
+ * Review phase execution
+ */
+const executeReviewPhase = async (
+ ctx: PhaseExecutionContext,
+ _phaseContext: string,
+): Promise> => {
+ ctx.onProgress?.(FEATURE_DEV_MESSAGES.REVIEWING);
+
+ // This phase would perform self-review of changes
+ const checkpoint = createCheckpoint("review", ctx.state, [
+ "Review code quality findings",
+ "Address any critical issues",
+ "Confirm changes are ready",
+ ]);
+
+ return {
+ checkpoint,
+ stateUpdates: {},
+ };
+};
+
+/**
+ * Finalize phase execution
+ */
+const executeFinalizePhase = async (
+ ctx: PhaseExecutionContext,
+ _phaseContext: string,
+): Promise> => {
+ ctx.onProgress?.(FEATURE_DEV_MESSAGES.FINALIZING);
+
+ // This phase would create the commit
+ const checkpoint = createCheckpoint("finalize", ctx.state, [
+ "Confirm commit message",
+ "Verify all changes are included",
+ "Approve final commit",
+ ]);
+
+ return {
+ checkpoint,
+ stateUpdates: {},
+ };
+};
+
+/**
+ * Get the next phase in the workflow
+ */
+export const getNextPhase = (
+ currentPhase: FeatureDevPhase,
+): FeatureDevPhase | undefined => {
+ const currentIndex = PHASE_ORDER.indexOf(currentPhase);
+ if (currentIndex === -1 || currentIndex >= PHASE_ORDER.length - 1) {
+ return undefined;
+ }
+ return PHASE_ORDER[currentIndex + 1];
+};
+
+/**
+ * Get the previous phase in the workflow
+ */
+export const getPreviousPhase = (
+ currentPhase: FeatureDevPhase,
+): FeatureDevPhase | undefined => {
+ const currentIndex = PHASE_ORDER.indexOf(currentPhase);
+ if (currentIndex <= 0) {
+ return undefined;
+ }
+ return PHASE_ORDER[currentIndex - 1];
+};
+
+/**
+ * Validate a phase transition
+ */
+export const validateTransition = (
+ request: PhaseTransitionRequest,
+): { valid: boolean; error?: string } => {
+ if (request.skipValidation) {
+ return { valid: true };
+ }
+
+ const allowed = ALLOWED_TRANSITIONS[request.fromPhase];
+ if (!allowed.includes(request.toPhase)) {
+ return {
+ valid: false,
+ error: FEATURE_DEV_ERRORS.INVALID_TRANSITION(
+ request.fromPhase,
+ request.toPhase,
+ ),
+ };
+ }
+
+ return { valid: true };
+};
+
+/**
+ * Get timeout for a phase
+ */
+export const getPhaseTimeout = (phase: FeatureDevPhase): number => {
+ return PHASE_TIMEOUTS[phase];
+};
diff --git a/src/services/index.ts b/src/services/index.ts
index a77540c..a6098cc 100644
--- a/src/services/index.ts
+++ b/src/services/index.ts
@@ -8,3 +8,4 @@ export * from "@services/github-issue-service";
export * from "@services/command-suggestion-service";
export * from "@services/learning-service";
export * from "@services/rules-service";
+export * as brainService from "@services/brain";
diff --git a/src/services/model-routing.ts b/src/services/model-routing.ts
new file mode 100644
index 0000000..085e054
--- /dev/null
+++ b/src/services/model-routing.ts
@@ -0,0 +1,225 @@
+/**
+ * Model Routing Service
+ *
+ * Maps agent tiers to appropriate models based on task complexity.
+ * Following Claude Code's multi-model strategy:
+ * - fast: Quick screening, filtering (like Haiku)
+ * - balanced: Detailed analysis, general tasks (like Sonnet)
+ * - thorough: Complex reasoning, bug hunting (like Opus)
+ */
+
+import { getModelContextSize } from "@constants/copilot";
+import type { AgentConfig } from "@/types/agent-config";
+
+/**
+ * Model tier for routing decisions
+ */
+export type ModelTier = "fast" | "balanced" | "thorough";
+
+/**
+ * Model tier mapping to Copilot models
+ * These are the default mappings - can be overridden by agent config
+ */
+export const MODEL_TIER_MAPPING: Record = {
+ // Fast tier: Low cost, quick responses (0x or 0.33x multiplier)
+ fast: [
+ "gpt-5-mini",
+ "gpt-4o-mini",
+ "claude-haiku-4.5",
+ "gemini-3-flash-preview",
+ "grok-code-fast-1",
+ ],
+ // Balanced tier: Good quality, moderate cost (1x multiplier)
+ balanced: [
+ "claude-sonnet-4.5",
+ "claude-sonnet-4",
+ "gpt-5",
+ "gpt-5.1",
+ "gemini-2.5-pro",
+ "gpt-4.1",
+ ],
+ // Thorough tier: Best quality, higher cost (3x multiplier)
+ thorough: [
+ "claude-opus-4.5",
+ "gpt-5.2-codex",
+ "gpt-5.1-codex-max",
+ ],
+};
+
+/**
+ * Tier aliases for agent frontmatter
+ */
+const TIER_ALIASES: Record = {
+ haiku: "fast",
+ fast: "fast",
+ quick: "fast",
+ sonnet: "balanced",
+ balanced: "balanced",
+ default: "balanced",
+ opus: "thorough",
+ thorough: "thorough",
+ deep: "thorough",
+};
+
+/**
+ * Agent type to default tier mapping
+ */
+const AGENT_TYPE_TIERS: Record = {
+ explorer: "fast",
+ explore: "fast",
+ filter: "fast",
+ screen: "fast",
+ architect: "balanced",
+ planner: "balanced",
+ plan: "balanced",
+ coder: "balanced",
+ general: "balanced",
+ reviewer: "balanced",
+ review: "balanced",
+ "code-reviewer": "balanced",
+ "bug-hunter": "thorough",
+ bugs: "thorough",
+ security: "thorough",
+ compaction: "fast",
+ summary: "fast",
+ title: "fast",
+};
+
+/**
+ * Resolve model tier from string (tier name or model ID)
+ */
+export const resolveTier = (modelOrTier: string): ModelTier | null => {
+ const lower = modelOrTier.toLowerCase();
+
+ // Check if it's a tier alias
+ if (lower in TIER_ALIASES) {
+ return TIER_ALIASES[lower];
+ }
+
+ // Check if it's already a model ID in one of the tiers
+ for (const [tier, models] of Object.entries(MODEL_TIER_MAPPING)) {
+ if (models.some((m) => m.toLowerCase() === lower)) {
+ return tier as ModelTier;
+ }
+ }
+
+ return null;
+};
+
+/**
+ * Get the best available model for a tier
+ * Returns the first model in the tier's list (assumed to be preference order)
+ */
+export const getModelForTier = (
+ tier: ModelTier,
+ availableModels?: string[],
+): string => {
+ const tierModels = MODEL_TIER_MAPPING[tier];
+
+ if (availableModels && availableModels.length > 0) {
+ // Find first available model from tier
+ for (const model of tierModels) {
+ if (availableModels.includes(model)) {
+ return model;
+ }
+ }
+ // Fallback to first tier model if none available
+ return tierModels[0];
+ }
+
+ return tierModels[0];
+};
+
+/**
+ * Infer tier from agent type/name
+ */
+export const inferTierFromAgent = (agent: AgentConfig): ModelTier => {
+ const idLower = agent.id.toLowerCase();
+ const nameLower = agent.name.toLowerCase();
+
+ // Check agent type mapping
+ for (const [type, tier] of Object.entries(AGENT_TYPE_TIERS)) {
+ if (idLower.includes(type) || nameLower.includes(type)) {
+ return tier;
+ }
+ }
+
+ // Default to balanced
+ return "balanced";
+};
+
+/**
+ * Resolve the model to use for an agent
+ *
+ * Priority:
+ * 1. Explicit model in agent config (full model ID)
+ * 2. Tier specified in agent config (fast/balanced/thorough)
+ * 3. Inferred from agent type/name
+ * 4. Default model passed in
+ */
+export const resolveAgentModel = (
+ agent: AgentConfig,
+ defaultModel: string,
+ availableModels?: string[],
+): { model: string; tier: ModelTier; source: string } => {
+ // 1. Check explicit model in agent config
+ if (agent.model) {
+ // Check if it's a tier name
+ const tier = resolveTier(agent.model);
+ if (tier) {
+ const model = getModelForTier(tier, availableModels);
+ return { model, tier, source: "agent-tier" };
+ }
+
+ // Otherwise use as model ID
+ return {
+ model: agent.model,
+ tier: resolveTier(agent.model) ?? "balanced",
+ source: "agent-model",
+ };
+ }
+
+ // 2. Infer from agent type
+ const inferredTier = inferTierFromAgent(agent);
+ if (inferredTier !== "balanced") {
+ const model = getModelForTier(inferredTier, availableModels);
+ return { model, tier: inferredTier, source: "agent-inferred" };
+ }
+
+ // 3. Use default
+ const defaultTier = resolveTier(defaultModel) ?? "balanced";
+ return { model: defaultModel, tier: defaultTier, source: "default" };
+};
+
+/**
+ * Get model context size for routing decisions
+ */
+export const getRouteContextSize = (modelId: string): number => {
+ return getModelContextSize(modelId).input;
+};
+
+/**
+ * Model routing decision
+ */
+export interface ModelRoutingDecision {
+ model: string;
+ tier: ModelTier;
+ source: string;
+ contextSize: number;
+}
+
+/**
+ * Make routing decision for an agent
+ */
+export const routeAgent = (
+ agent: AgentConfig,
+ defaultModel: string,
+ availableModels?: string[],
+): ModelRoutingDecision => {
+ const resolution = resolveAgentModel(agent, defaultModel, availableModels);
+
+ return {
+ ...resolution,
+ contextSize: getRouteContextSize(resolution.model),
+ };
+};
diff --git a/src/services/parallel/conflict-detector.ts b/src/services/parallel/conflict-detector.ts
new file mode 100644
index 0000000..7ede153
--- /dev/null
+++ b/src/services/parallel/conflict-detector.ts
@@ -0,0 +1,241 @@
+/**
+ * Conflict Detector
+ *
+ * Detects conflicts between parallel tasks based on file paths
+ * and task types. Read-only tasks don't conflict with each other.
+ */
+
+import { CONFLICT_CONFIG, READ_ONLY_TASK_TYPES, MODIFYING_TASK_TYPES } from "@constants/parallel";
+import type {
+ ParallelTask,
+ ConflictCheckResult,
+ ConflictResolution,
+} from "@/types/parallel";
+
+/**
+ * Active tasks being tracked for conflicts
+ */
+const activeTasks = new Map();
+
+/**
+ * Register a task as active
+ */
+export const registerActiveTask = (task: ParallelTask): void => {
+ activeTasks.set(task.id, task);
+};
+
+/**
+ * Unregister a task when completed
+ */
+export const unregisterActiveTask = (taskId: string): void => {
+ activeTasks.delete(taskId);
+};
+
+/**
+ * Clear all active tasks
+ */
+export const clearActiveTasks = (): void => {
+ activeTasks.clear();
+};
+
+/**
+ * Get all active task IDs
+ */
+export const getActiveTaskIds = (): string[] => {
+ return Array.from(activeTasks.keys());
+};
+
+/**
+ * Check if two tasks conflict based on their paths
+ */
+const checkPathConflict = (
+ taskA: ParallelTask,
+ taskB: ParallelTask,
+): string[] => {
+ const pathsA = taskA.conflictPaths ?? [];
+ const pathsB = taskB.conflictPaths ?? [];
+
+ const conflictingPaths: string[] = [];
+
+ for (const pathA of pathsA) {
+ for (const pathB of pathsB) {
+ if (pathsOverlap(pathA, pathB)) {
+ conflictingPaths.push(pathA);
+ }
+ }
+ }
+
+ return conflictingPaths;
+};
+
+/**
+ * Check if two paths overlap (one contains or equals the other)
+ */
+const pathsOverlap = (pathA: string, pathB: string): boolean => {
+ const normalizedA = normalizePath(pathA);
+ const normalizedB = normalizePath(pathB);
+
+ // Exact match
+ if (normalizedA === normalizedB) return true;
+
+ // One is parent of the other
+ if (normalizedA.startsWith(normalizedB + "/")) return true;
+ if (normalizedB.startsWith(normalizedA + "/")) return true;
+
+ return false;
+};
+
+/**
+ * Normalize path for comparison
+ */
+const normalizePath = (path: string): string => {
+ return path.replace(/\\/g, "/").replace(/\/+/g, "/").replace(/\/$/, "");
+};
+
+/**
+ * Check if task types can conflict
+ */
+const canTypesConflict = (typeA: string, typeB: string): boolean => {
+ // Read-only tasks don't conflict with each other
+ if (READ_ONLY_TASK_TYPES.has(typeA) && READ_ONLY_TASK_TYPES.has(typeB)) {
+ return false;
+ }
+
+ // Modifying tasks conflict with everything on same paths
+ if (MODIFYING_TASK_TYPES.has(typeA) || MODIFYING_TASK_TYPES.has(typeB)) {
+ return true;
+ }
+
+ return false;
+};
+
+/**
+ * Check if a task conflicts with any active tasks
+ */
+export const checkConflicts = (task: ParallelTask): ConflictCheckResult => {
+ if (!CONFLICT_CONFIG.ENABLE_PATH_CONFLICT) {
+ return {
+ hasConflict: false,
+ conflictingTaskIds: [],
+ conflictingPaths: [],
+ };
+ }
+
+ const conflictingTaskIds: string[] = [];
+ const conflictingPaths: string[] = [];
+
+ for (const [activeId, activeTask] of activeTasks) {
+ // Skip self
+ if (activeId === task.id) continue;
+
+ // Check if task types can conflict
+ if (!canTypesConflict(task.type, activeTask.type)) continue;
+
+ // Check path conflicts
+ const pathConflicts = checkPathConflict(task, activeTask);
+
+ if (pathConflicts.length > 0) {
+ conflictingTaskIds.push(activeId);
+ conflictingPaths.push(...pathConflicts);
+ }
+ }
+
+ const hasConflict = conflictingTaskIds.length > 0;
+
+ // Suggest resolution
+ const resolution = hasConflict ? suggestResolution(task, conflictingTaskIds) : undefined;
+
+ return {
+ hasConflict,
+ conflictingTaskIds,
+ conflictingPaths: [...new Set(conflictingPaths)],
+ resolution,
+ };
+};
+
+/**
+ * Suggest a conflict resolution strategy
+ */
+const suggestResolution = (
+ task: ParallelTask,
+ conflictingTaskIds: string[],
+): ConflictResolution => {
+ // Read-only tasks should wait
+ if (READ_ONLY_TASK_TYPES.has(task.type)) {
+ return "wait";
+ }
+
+ // High priority tasks may cancel lower priority conflicts
+ const conflictingTasks = conflictingTaskIds
+ .map((id) => activeTasks.get(id))
+ .filter((t): t is ParallelTask => t !== undefined);
+
+ const allLowerPriority = conflictingTasks.every(
+ (t) => getPriorityValue(t.priority) < getPriorityValue(task.priority),
+ );
+
+ if (allLowerPriority && task.priority === "critical") {
+ return "cancel";
+ }
+
+ // Default to waiting
+ return "wait";
+};
+
+/**
+ * Get numeric priority value
+ */
+const getPriorityValue = (priority: string): number => {
+ const values: Record = {
+ critical: 100,
+ high: 75,
+ normal: 50,
+ low: 25,
+ };
+ return values[priority] ?? 50;
+};
+
+/**
+ * Wait for conflicts to resolve
+ */
+export const waitForConflictResolution = async (
+ taskIds: string[],
+ timeout: number = CONFLICT_CONFIG.CONFLICT_CHECK_TIMEOUT_MS,
+): Promise => {
+ const startTime = Date.now();
+
+ while (Date.now() - startTime < timeout) {
+ const stillActive = taskIds.filter((id) => activeTasks.has(id));
+
+ if (stillActive.length === 0) {
+ return true;
+ }
+
+ // Wait a bit before checking again
+ await new Promise((resolve) => setTimeout(resolve, 100));
+ }
+
+ return false;
+};
+
+/**
+ * Get tasks that would be affected by cancelling a task
+ */
+export const getDependentTasks = (taskId: string): string[] => {
+ const task = activeTasks.get(taskId);
+ if (!task) return [];
+
+ const dependents: string[] = [];
+
+ for (const [id, activeTask] of activeTasks) {
+ if (id === taskId) continue;
+
+ // Check if this task was waiting on the cancelled task
+ const conflicts = checkPathConflict(activeTask, task);
+ if (conflicts.length > 0) {
+ dependents.push(id);
+ }
+ }
+
+ return dependents;
+};
diff --git a/src/services/parallel/index.ts b/src/services/parallel/index.ts
new file mode 100644
index 0000000..43a4731
--- /dev/null
+++ b/src/services/parallel/index.ts
@@ -0,0 +1,351 @@
+/**
+ * Parallel Executor
+ *
+ * Main orchestrator for parallel task execution.
+ * Coordinates conflict detection, resource management, and result aggregation.
+ */
+
+import { PARALLEL_DEFAULTS, PARALLEL_ERRORS, TASK_TIMEOUTS } from "@constants/parallel";
+import {
+ registerActiveTask,
+ unregisterActiveTask,
+ checkConflicts,
+ waitForConflictResolution,
+ clearActiveTasks,
+} from "@services/parallel/conflict-detector";
+import {
+ initializeResourceManager,
+ acquireResources,
+ releaseResources,
+ canAcceptTask,
+ cancelWaitingTask,
+ resetResourceManager,
+ getResourceState,
+} from "@services/parallel/resource-manager";
+import { collectResults } from "@services/parallel/result-aggregator";
+import type {
+ ParallelTask,
+ ParallelExecutionResult,
+ ParallelExecutorOptions,
+ AggregatedResults,
+ BatchExecutionRequest,
+ ConflictResolution,
+} from "@/types/parallel";
+
+// Re-export utilities
+export * from "@services/parallel/conflict-detector";
+export * from "@services/parallel/resource-manager";
+export * from "@services/parallel/result-aggregator";
+
+// ============================================================================
+// Task Execution
+// ============================================================================
+
+/**
+ * Execute a single task with timeout and error handling
+ */
+const executeTask = async (
+ task: ParallelTask,
+ executor: (input: TInput) => Promise,
+ options: ParallelExecutorOptions,
+): Promise> => {
+ const startedAt = Date.now();
+ const timeout = task.timeout ?? TASK_TIMEOUTS[task.type] ?? PARALLEL_DEFAULTS.defaultTimeout;
+
+ try {
+ // Notify task start
+ options.onTaskStart?.(task);
+
+ // Execute with timeout
+ const result = await Promise.race([
+ executor(task.input),
+ createTimeout(timeout, task.id),
+ ]);
+
+ const completedAt = Date.now();
+ const executionResult: ParallelExecutionResult = {
+ taskId: task.id,
+ status: "completed",
+ result,
+ duration: completedAt - startedAt,
+ startedAt,
+ completedAt,
+ };
+
+ options.onTaskComplete?.(executionResult);
+ return executionResult;
+ } catch (error) {
+ const completedAt = Date.now();
+ const isTimeout = error instanceof TimeoutError;
+
+ const executionResult: ParallelExecutionResult = {
+ taskId: task.id,
+ status: isTimeout ? "timeout" : "error",
+ error: error instanceof Error ? error.message : String(error),
+ duration: completedAt - startedAt,
+ startedAt,
+ completedAt,
+ };
+
+ options.onTaskError?.(task, error instanceof Error ? error : new Error(String(error)));
+ return executionResult;
+ }
+};
+
+/**
+ * Create a timeout promise
+ */
+class TimeoutError extends Error {
+ constructor(taskId: string) {
+ super(PARALLEL_ERRORS.TIMEOUT(taskId));
+ this.name = "TimeoutError";
+ }
+}
+
+const createTimeout = (ms: number, taskId: string): Promise => {
+ return new Promise((_, reject) => {
+ setTimeout(() => reject(new TimeoutError(taskId)), ms);
+ });
+};
+
+// ============================================================================
+// Parallel Executor
+// ============================================================================
+
+/**
+ * Execute tasks in parallel with conflict detection and resource management
+ */
+export const executeParallel = async (
+ tasks: ParallelTask[],
+ executor: (input: TInput) => Promise,
+ options: Partial = {},
+): Promise> => {
+ const fullOptions: ParallelExecutorOptions = {
+ limits: options.limits ?? PARALLEL_DEFAULTS,
+ onTaskStart: options.onTaskStart,
+ onTaskComplete: options.onTaskComplete,
+ onTaskError: options.onTaskError,
+ onConflict: options.onConflict,
+ abortSignal: options.abortSignal,
+ };
+
+ // Initialize resource manager
+ initializeResourceManager(fullOptions.limits);
+
+ // Track results
+ const results: ParallelExecutionResult[] = [];
+ const pendingTasks = new Map>>();
+
+ // Check if executor was aborted
+ const checkAbort = (): boolean => {
+ return fullOptions.abortSignal?.aborted ?? false;
+ };
+
+ // Process each task
+ for (const task of tasks) {
+ if (checkAbort()) {
+ results.push({
+ taskId: task.id,
+ status: "cancelled",
+ error: PARALLEL_ERRORS.EXECUTOR_ABORTED,
+ duration: 0,
+ startedAt: Date.now(),
+ completedAt: Date.now(),
+ });
+ continue;
+ }
+
+ // Check if we can accept more tasks
+ if (!canAcceptTask(fullOptions.limits)) {
+ results.push({
+ taskId: task.id,
+ status: "error",
+ error: PARALLEL_ERRORS.QUEUE_FULL,
+ duration: 0,
+ startedAt: Date.now(),
+ completedAt: Date.now(),
+ });
+ continue;
+ }
+
+ // Start task execution
+ const taskPromise = executeWithConflictHandling(
+ task,
+ executor,
+ fullOptions,
+ );
+
+ pendingTasks.set(task.id, taskPromise);
+
+ // Remove from pending when done
+ taskPromise.then((result) => {
+ pendingTasks.delete(task.id);
+ results.push(result);
+ });
+ }
+
+ // Wait for all pending tasks
+ await Promise.all(pendingTasks.values());
+
+ // Cleanup
+ clearActiveTasks();
+
+ return collectResults(results);
+};
+
+/**
+ * Execute a task with conflict handling
+ */
+const executeWithConflictHandling = async (
+ task: ParallelTask