Adding autocomplete and copilot suggestions
This commit is contained in:
291
lua/codetyper/brain/delta/commit.lua
Normal file
291
lua/codetyper/brain/delta/commit.lua
Normal file
@@ -0,0 +1,291 @@
|
||||
--- Brain Delta Commit Operations
|
||||
--- Git-like commit creation and management
|
||||
|
||||
local storage = require("codetyper.brain.storage")
|
||||
local hash_mod = require("codetyper.brain.hash")
|
||||
local diff_mod = require("codetyper.brain.delta.diff")
|
||||
local types = require("codetyper.brain.types")
|
||||
|
||||
local M = {}
|
||||
|
||||
--- Create a new delta commit
|
||||
---@param changes table[] Changes to commit
|
||||
---@param message string Commit message
|
||||
---@param trigger? string Trigger source
|
||||
---@return Delta|nil Created delta
|
||||
function M.create(changes, message, trigger)
|
||||
if not changes or #changes == 0 then
|
||||
return nil
|
||||
end
|
||||
|
||||
local now = os.time()
|
||||
local head = storage.get_head()
|
||||
|
||||
-- Create delta object
|
||||
local delta = {
|
||||
h = hash_mod.delta_hash(changes, head, now),
|
||||
p = head,
|
||||
ts = now,
|
||||
ch = {},
|
||||
m = {
|
||||
msg = message or "Unnamed commit",
|
||||
trig = trigger or "manual",
|
||||
},
|
||||
}
|
||||
|
||||
-- Process changes
|
||||
for _, change in ipairs(changes) do
|
||||
table.insert(delta.ch, {
|
||||
op = change.op,
|
||||
path = change.path,
|
||||
bh = change.bh,
|
||||
ah = change.ah,
|
||||
diff = change.diff,
|
||||
})
|
||||
end
|
||||
|
||||
-- Save delta
|
||||
storage.save_delta(delta)
|
||||
|
||||
-- Update HEAD
|
||||
storage.set_head(delta.h)
|
||||
|
||||
-- Update meta
|
||||
local meta = storage.get_meta()
|
||||
storage.update_meta({ dc = meta.dc + 1 })
|
||||
|
||||
return delta
|
||||
end
|
||||
|
||||
--- Get a delta by hash
|
||||
---@param delta_hash string Delta hash
|
||||
---@return Delta|nil
|
||||
function M.get(delta_hash)
|
||||
return storage.get_delta(delta_hash)
|
||||
end
|
||||
|
||||
--- Get the current HEAD delta
|
||||
---@return Delta|nil
|
||||
function M.get_head()
|
||||
local head_hash = storage.get_head()
|
||||
if not head_hash then
|
||||
return nil
|
||||
end
|
||||
return M.get(head_hash)
|
||||
end
|
||||
|
||||
--- Get delta history (ancestry chain)
|
||||
---@param limit? number Max entries
|
||||
---@param from_hash? string Starting hash (default: HEAD)
|
||||
---@return Delta[]
|
||||
function M.get_history(limit, from_hash)
|
||||
limit = limit or 50
|
||||
local history = {}
|
||||
local current_hash = from_hash or storage.get_head()
|
||||
|
||||
while current_hash and #history < limit do
|
||||
local delta = M.get(current_hash)
|
||||
if not delta then
|
||||
break
|
||||
end
|
||||
|
||||
table.insert(history, delta)
|
||||
current_hash = delta.p
|
||||
end
|
||||
|
||||
return history
|
||||
end
|
||||
|
||||
--- Check if a delta exists
|
||||
---@param delta_hash string Delta hash
|
||||
---@return boolean
|
||||
function M.exists(delta_hash)
|
||||
return M.get(delta_hash) ~= nil
|
||||
end
|
||||
|
||||
--- Get the path from one delta to another
|
||||
---@param from_hash string Start delta hash
|
||||
---@param to_hash string End delta hash
|
||||
---@return Delta[]|nil Path of deltas, or nil if no path
|
||||
function M.get_path(from_hash, to_hash)
|
||||
-- Build ancestry from both sides
|
||||
local from_ancestry = {}
|
||||
local current = from_hash
|
||||
while current do
|
||||
from_ancestry[current] = true
|
||||
local delta = M.get(current)
|
||||
if not delta then
|
||||
break
|
||||
end
|
||||
current = delta.p
|
||||
end
|
||||
|
||||
-- Walk from to_hash back to find common ancestor
|
||||
local path = {}
|
||||
current = to_hash
|
||||
while current do
|
||||
local delta = M.get(current)
|
||||
if not delta then
|
||||
break
|
||||
end
|
||||
|
||||
table.insert(path, 1, delta)
|
||||
|
||||
if from_ancestry[current] then
|
||||
-- Found common ancestor
|
||||
return path
|
||||
end
|
||||
|
||||
current = delta.p
|
||||
end
|
||||
|
||||
return nil
|
||||
end
|
||||
|
||||
--- Get all changes between two deltas
|
||||
---@param from_hash string|nil Start delta hash (nil = beginning)
|
||||
---@param to_hash string End delta hash
|
||||
---@return table[] Combined changes
|
||||
function M.get_changes_between(from_hash, to_hash)
|
||||
local path = {}
|
||||
local current = to_hash
|
||||
|
||||
while current and current ~= from_hash do
|
||||
local delta = M.get(current)
|
||||
if not delta then
|
||||
break
|
||||
end
|
||||
table.insert(path, 1, delta)
|
||||
current = delta.p
|
||||
end
|
||||
|
||||
-- Collect all changes
|
||||
local changes = {}
|
||||
for _, delta in ipairs(path) do
|
||||
for _, change in ipairs(delta.ch) do
|
||||
table.insert(changes, change)
|
||||
end
|
||||
end
|
||||
|
||||
return changes
|
||||
end
|
||||
|
||||
--- Compute reverse changes for rollback
|
||||
---@param delta Delta Delta to reverse
|
||||
---@return table[] Reverse changes
|
||||
function M.compute_reverse(delta)
|
||||
local reversed = {}
|
||||
|
||||
for i = #delta.ch, 1, -1 do
|
||||
local change = delta.ch[i]
|
||||
local rev = {
|
||||
path = change.path,
|
||||
}
|
||||
|
||||
if change.op == types.DELTA_OPS.ADD then
|
||||
rev.op = types.DELTA_OPS.DELETE
|
||||
rev.bh = change.ah
|
||||
elseif change.op == types.DELTA_OPS.DELETE then
|
||||
rev.op = types.DELTA_OPS.ADD
|
||||
rev.ah = change.bh
|
||||
elseif change.op == types.DELTA_OPS.MODIFY then
|
||||
rev.op = types.DELTA_OPS.MODIFY
|
||||
rev.bh = change.ah
|
||||
rev.ah = change.bh
|
||||
if change.diff then
|
||||
rev.diff = diff_mod.reverse(change.diff)
|
||||
end
|
||||
end
|
||||
|
||||
table.insert(reversed, rev)
|
||||
end
|
||||
|
||||
return reversed
|
||||
end
|
||||
|
||||
--- Squash multiple deltas into one
|
||||
---@param delta_hashes string[] Delta hashes to squash
|
||||
---@param message string Squash commit message
|
||||
---@return Delta|nil Squashed delta
|
||||
function M.squash(delta_hashes, message)
|
||||
if #delta_hashes == 0 then
|
||||
return nil
|
||||
end
|
||||
|
||||
-- Collect all changes in order
|
||||
local all_changes = {}
|
||||
for _, delta_hash in ipairs(delta_hashes) do
|
||||
local delta = M.get(delta_hash)
|
||||
if delta then
|
||||
for _, change in ipairs(delta.ch) do
|
||||
table.insert(all_changes, change)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
-- Compact the changes
|
||||
local compacted = diff_mod.compact(all_changes)
|
||||
|
||||
return M.create(compacted, message, "squash")
|
||||
end
|
||||
|
||||
--- Get summary of a delta
|
||||
---@param delta Delta Delta to summarize
|
||||
---@return table Summary
|
||||
function M.summarize(delta)
|
||||
local adds = 0
|
||||
local mods = 0
|
||||
local dels = 0
|
||||
local paths = {}
|
||||
|
||||
for _, change in ipairs(delta.ch) do
|
||||
if change.op == types.DELTA_OPS.ADD then
|
||||
adds = adds + 1
|
||||
elseif change.op == types.DELTA_OPS.MODIFY then
|
||||
mods = mods + 1
|
||||
elseif change.op == types.DELTA_OPS.DELETE then
|
||||
dels = dels + 1
|
||||
end
|
||||
|
||||
-- Extract category from path
|
||||
local parts = vim.split(change.path, ".", { plain = true })
|
||||
if parts[1] then
|
||||
paths[parts[1]] = true
|
||||
end
|
||||
end
|
||||
|
||||
return {
|
||||
hash = delta.h,
|
||||
parent = delta.p,
|
||||
timestamp = delta.ts,
|
||||
message = delta.m.msg,
|
||||
trigger = delta.m.trig,
|
||||
stats = {
|
||||
adds = adds,
|
||||
modifies = mods,
|
||||
deletes = dels,
|
||||
total = adds + mods + dels,
|
||||
},
|
||||
categories = vim.tbl_keys(paths),
|
||||
}
|
||||
end
|
||||
|
||||
--- Format delta for display
|
||||
---@param delta Delta Delta to format
|
||||
---@return string[] Lines
|
||||
function M.format(delta)
|
||||
local summary = M.summarize(delta)
|
||||
local lines = {
|
||||
string.format("commit %s", delta.h),
|
||||
string.format("Date: %s", os.date("%Y-%m-%d %H:%M:%S", delta.ts)),
|
||||
string.format("Parent: %s", delta.p or "(none)"),
|
||||
"",
|
||||
" " .. (delta.m.msg or "No message"),
|
||||
"",
|
||||
string.format(" %d additions, %d modifications, %d deletions", summary.stats.adds, summary.stats.modifies, summary.stats.deletes),
|
||||
}
|
||||
|
||||
return lines
|
||||
end
|
||||
|
||||
return M
|
||||
261
lua/codetyper/brain/delta/diff.lua
Normal file
261
lua/codetyper/brain/delta/diff.lua
Normal file
@@ -0,0 +1,261 @@
|
||||
--- Brain Delta Diff Computation
|
||||
--- Field-level diff algorithms for delta versioning
|
||||
|
||||
local hash = require("codetyper.brain.hash")
|
||||
|
||||
local M = {}
|
||||
|
||||
--- Compute diff between two values
|
||||
---@param before any Before value
|
||||
---@param after any After value
|
||||
---@param path? string Current path
|
||||
---@return table[] Diff entries
|
||||
function M.compute(before, after, path)
|
||||
path = path or ""
|
||||
local diffs = {}
|
||||
|
||||
local before_type = type(before)
|
||||
local after_type = type(after)
|
||||
|
||||
-- Handle nil cases
|
||||
if before == nil and after == nil then
|
||||
return diffs
|
||||
end
|
||||
|
||||
if before == nil then
|
||||
table.insert(diffs, {
|
||||
path = path,
|
||||
op = "add",
|
||||
value = after,
|
||||
})
|
||||
return diffs
|
||||
end
|
||||
|
||||
if after == nil then
|
||||
table.insert(diffs, {
|
||||
path = path,
|
||||
op = "delete",
|
||||
value = before,
|
||||
})
|
||||
return diffs
|
||||
end
|
||||
|
||||
-- Type change
|
||||
if before_type ~= after_type then
|
||||
table.insert(diffs, {
|
||||
path = path,
|
||||
op = "replace",
|
||||
from = before,
|
||||
to = after,
|
||||
})
|
||||
return diffs
|
||||
end
|
||||
|
||||
-- Tables (recursive)
|
||||
if before_type == "table" then
|
||||
-- Get all keys
|
||||
local keys = {}
|
||||
for k in pairs(before) do
|
||||
keys[k] = true
|
||||
end
|
||||
for k in pairs(after) do
|
||||
keys[k] = true
|
||||
end
|
||||
|
||||
for k in pairs(keys) do
|
||||
local sub_path = path == "" and tostring(k) or (path .. "." .. tostring(k))
|
||||
local sub_diffs = M.compute(before[k], after[k], sub_path)
|
||||
for _, d in ipairs(sub_diffs) do
|
||||
table.insert(diffs, d)
|
||||
end
|
||||
end
|
||||
|
||||
return diffs
|
||||
end
|
||||
|
||||
-- Primitive comparison
|
||||
if before ~= after then
|
||||
table.insert(diffs, {
|
||||
path = path,
|
||||
op = "replace",
|
||||
from = before,
|
||||
to = after,
|
||||
})
|
||||
end
|
||||
|
||||
return diffs
|
||||
end
|
||||
|
||||
--- Apply a diff to a value
|
||||
---@param base any Base value
|
||||
---@param diffs table[] Diff entries
|
||||
---@return any Result value
|
||||
function M.apply(base, diffs)
|
||||
local result = vim.deepcopy(base) or {}
|
||||
|
||||
for _, diff in ipairs(diffs) do
|
||||
M.apply_single(result, diff)
|
||||
end
|
||||
|
||||
return result
|
||||
end
|
||||
|
||||
--- Apply a single diff entry
|
||||
---@param target table Target table
|
||||
---@param diff table Diff entry
|
||||
function M.apply_single(target, diff)
|
||||
local path = diff.path
|
||||
local parts = vim.split(path, ".", { plain = true })
|
||||
|
||||
if #parts == 0 or parts[1] == "" then
|
||||
-- Root-level change
|
||||
if diff.op == "add" or diff.op == "replace" then
|
||||
for k, v in pairs(diff.value or diff.to or {}) do
|
||||
target[k] = v
|
||||
end
|
||||
end
|
||||
return
|
||||
end
|
||||
|
||||
-- Navigate to parent
|
||||
local current = target
|
||||
for i = 1, #parts - 1 do
|
||||
local key = parts[i]
|
||||
-- Try numeric key
|
||||
local num_key = tonumber(key)
|
||||
key = num_key or key
|
||||
|
||||
if current[key] == nil then
|
||||
current[key] = {}
|
||||
end
|
||||
current = current[key]
|
||||
end
|
||||
|
||||
-- Apply to final key
|
||||
local final_key = parts[#parts]
|
||||
local num_key = tonumber(final_key)
|
||||
final_key = num_key or final_key
|
||||
|
||||
if diff.op == "add" then
|
||||
current[final_key] = diff.value
|
||||
elseif diff.op == "delete" then
|
||||
current[final_key] = nil
|
||||
elseif diff.op == "replace" then
|
||||
current[final_key] = diff.to
|
||||
end
|
||||
end
|
||||
|
||||
--- Reverse a diff (for rollback)
|
||||
---@param diffs table[] Diff entries
|
||||
---@return table[] Reversed diffs
|
||||
function M.reverse(diffs)
|
||||
local reversed = {}
|
||||
|
||||
for i = #diffs, 1, -1 do
|
||||
local diff = diffs[i]
|
||||
local rev = {
|
||||
path = diff.path,
|
||||
}
|
||||
|
||||
if diff.op == "add" then
|
||||
rev.op = "delete"
|
||||
rev.value = diff.value
|
||||
elseif diff.op == "delete" then
|
||||
rev.op = "add"
|
||||
rev.value = diff.value
|
||||
elseif diff.op == "replace" then
|
||||
rev.op = "replace"
|
||||
rev.from = diff.to
|
||||
rev.to = diff.from
|
||||
end
|
||||
|
||||
table.insert(reversed, rev)
|
||||
end
|
||||
|
||||
return reversed
|
||||
end
|
||||
|
||||
--- Compact diffs (combine related changes)
|
||||
---@param diffs table[] Diff entries
|
||||
---@return table[] Compacted diffs
|
||||
function M.compact(diffs)
|
||||
local by_path = {}
|
||||
|
||||
for _, diff in ipairs(diffs) do
|
||||
local existing = by_path[diff.path]
|
||||
if existing then
|
||||
-- Combine: keep first "from", use last "to"
|
||||
if diff.op == "replace" then
|
||||
existing.to = diff.to
|
||||
elseif diff.op == "delete" then
|
||||
existing.op = "delete"
|
||||
existing.to = nil
|
||||
end
|
||||
else
|
||||
by_path[diff.path] = vim.deepcopy(diff)
|
||||
end
|
||||
end
|
||||
|
||||
-- Convert back to array, filter out no-ops
|
||||
local result = {}
|
||||
for _, diff in pairs(by_path) do
|
||||
-- Skip if add then delete (net no change)
|
||||
if not (diff.op == "delete" and diff.from == nil) then
|
||||
table.insert(result, diff)
|
||||
end
|
||||
end
|
||||
|
||||
return result
|
||||
end
|
||||
|
||||
--- Create a minimal diff summary for storage
|
||||
---@param diffs table[] Diff entries
|
||||
---@return table Summary
|
||||
function M.summarize(diffs)
|
||||
local adds = 0
|
||||
local deletes = 0
|
||||
local replaces = 0
|
||||
local paths = {}
|
||||
|
||||
for _, diff in ipairs(diffs) do
|
||||
if diff.op == "add" then
|
||||
adds = adds + 1
|
||||
elseif diff.op == "delete" then
|
||||
deletes = deletes + 1
|
||||
elseif diff.op == "replace" then
|
||||
replaces = replaces + 1
|
||||
end
|
||||
|
||||
-- Extract top-level path
|
||||
local parts = vim.split(diff.path, ".", { plain = true })
|
||||
if parts[1] then
|
||||
paths[parts[1]] = true
|
||||
end
|
||||
end
|
||||
|
||||
return {
|
||||
adds = adds,
|
||||
deletes = deletes,
|
||||
replaces = replaces,
|
||||
paths = vim.tbl_keys(paths),
|
||||
total = adds + deletes + replaces,
|
||||
}
|
||||
end
|
||||
|
||||
--- Check if two states are equal (no diff)
|
||||
---@param state1 any First state
|
||||
---@param state2 any Second state
|
||||
---@return boolean
|
||||
function M.equals(state1, state2)
|
||||
local diffs = M.compute(state1, state2)
|
||||
return #diffs == 0
|
||||
end
|
||||
|
||||
--- Get hash of diff for deduplication
|
||||
---@param diffs table[] Diff entries
|
||||
---@return string Hash
|
||||
function M.hash(diffs)
|
||||
return hash.compute_table(diffs)
|
||||
end
|
||||
|
||||
return M
|
||||
278
lua/codetyper/brain/delta/init.lua
Normal file
278
lua/codetyper/brain/delta/init.lua
Normal file
@@ -0,0 +1,278 @@
|
||||
--- Brain Delta Coordinator
|
||||
--- Git-like versioning system for brain state
|
||||
|
||||
local storage = require("codetyper.brain.storage")
|
||||
local commit_mod = require("codetyper.brain.delta.commit")
|
||||
local diff_mod = require("codetyper.brain.delta.diff")
|
||||
local types = require("codetyper.brain.types")
|
||||
|
||||
local M = {}
|
||||
|
||||
-- Re-export submodules
|
||||
M.commit = commit_mod
|
||||
M.diff = diff_mod
|
||||
|
||||
--- Create a commit from pending graph changes
|
||||
---@param message string Commit message
|
||||
---@param trigger? string Trigger source
|
||||
---@return string|nil Delta hash
|
||||
function M.commit(message, trigger)
|
||||
local graph = require("codetyper.brain.graph")
|
||||
local changes = graph.get_pending_changes()
|
||||
|
||||
if #changes == 0 then
|
||||
return nil
|
||||
end
|
||||
|
||||
local delta = commit_mod.create(changes, message, trigger or "auto")
|
||||
if delta then
|
||||
return delta.h
|
||||
end
|
||||
|
||||
return nil
|
||||
end
|
||||
|
||||
--- Rollback to a specific delta
|
||||
---@param target_hash string Target delta hash
|
||||
---@return boolean Success
|
||||
function M.rollback(target_hash)
|
||||
local current_hash = storage.get_head()
|
||||
if not current_hash then
|
||||
return false
|
||||
end
|
||||
|
||||
if current_hash == target_hash then
|
||||
return true -- Already at target
|
||||
end
|
||||
|
||||
-- Get path from target to current
|
||||
local deltas_to_reverse = {}
|
||||
local current = current_hash
|
||||
|
||||
while current and current ~= target_hash do
|
||||
local delta = commit_mod.get(current)
|
||||
if not delta then
|
||||
return false -- Broken chain
|
||||
end
|
||||
table.insert(deltas_to_reverse, delta)
|
||||
current = delta.p
|
||||
end
|
||||
|
||||
if current ~= target_hash then
|
||||
return false -- Target not in ancestry
|
||||
end
|
||||
|
||||
-- Apply reverse changes
|
||||
for _, delta in ipairs(deltas_to_reverse) do
|
||||
local reverse_changes = commit_mod.compute_reverse(delta)
|
||||
M.apply_changes(reverse_changes)
|
||||
end
|
||||
|
||||
-- Update HEAD
|
||||
storage.set_head(target_hash)
|
||||
|
||||
-- Create a rollback commit
|
||||
commit_mod.create({
|
||||
{
|
||||
op = types.DELTA_OPS.MODIFY,
|
||||
path = "meta.head",
|
||||
bh = current_hash,
|
||||
ah = target_hash,
|
||||
},
|
||||
}, "Rollback to " .. target_hash:sub(1, 8), "rollback")
|
||||
|
||||
return true
|
||||
end
|
||||
|
||||
--- Apply changes to current state
|
||||
---@param changes table[] Changes to apply
|
||||
function M.apply_changes(changes)
|
||||
local node_mod = require("codetyper.brain.graph.node")
|
||||
|
||||
for _, change in ipairs(changes) do
|
||||
local parts = vim.split(change.path, ".", { plain = true })
|
||||
|
||||
if parts[1] == "nodes" and #parts >= 3 then
|
||||
local node_type = parts[2]
|
||||
local node_id = parts[3]
|
||||
|
||||
if change.op == types.DELTA_OPS.ADD then
|
||||
-- Node was added, need to delete for reverse
|
||||
node_mod.delete(node_id)
|
||||
elseif change.op == types.DELTA_OPS.DELETE then
|
||||
-- Node was deleted, would need original data to restore
|
||||
-- This is a limitation - we'd need content storage
|
||||
elseif change.op == types.DELTA_OPS.MODIFY then
|
||||
-- Apply diff if available
|
||||
if change.diff then
|
||||
local node = node_mod.get(node_id)
|
||||
if node then
|
||||
local updated = diff_mod.apply(node, change.diff)
|
||||
-- Direct update without tracking
|
||||
local nodes = storage.get_nodes(node_type)
|
||||
nodes[node_id] = updated
|
||||
storage.save_nodes(node_type, nodes)
|
||||
end
|
||||
end
|
||||
end
|
||||
elseif parts[1] == "graph" then
|
||||
-- Handle graph/edge changes
|
||||
local edge_mod = require("codetyper.brain.graph.edge")
|
||||
if parts[2] == "edges" and #parts >= 3 then
|
||||
local edge_id = parts[3]
|
||||
if change.op == types.DELTA_OPS.ADD then
|
||||
-- Edge was added, delete for reverse
|
||||
-- Parse edge_id to get source/target
|
||||
local graph = storage.get_graph()
|
||||
if graph.edges and graph.edges[edge_id] then
|
||||
local edge = graph.edges[edge_id]
|
||||
edge_mod.delete(edge.s, edge.t, edge.ty)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
--- Get delta history
|
||||
---@param limit? number Max entries
|
||||
---@return Delta[]
|
||||
function M.get_history(limit)
|
||||
return commit_mod.get_history(limit)
|
||||
end
|
||||
|
||||
--- Get formatted log
|
||||
---@param limit? number Max entries
|
||||
---@return string[] Log lines
|
||||
function M.log(limit)
|
||||
local history = M.get_history(limit or 20)
|
||||
local lines = {}
|
||||
|
||||
for _, delta in ipairs(history) do
|
||||
local formatted = commit_mod.format(delta)
|
||||
for _, line in ipairs(formatted) do
|
||||
table.insert(lines, line)
|
||||
end
|
||||
table.insert(lines, "")
|
||||
end
|
||||
|
||||
return lines
|
||||
end
|
||||
|
||||
--- Get current HEAD hash
|
||||
---@return string|nil
|
||||
function M.head()
|
||||
return storage.get_head()
|
||||
end
|
||||
|
||||
--- Check if there are uncommitted changes
|
||||
---@return boolean
|
||||
function M.has_pending()
|
||||
local graph = require("codetyper.brain.graph")
|
||||
local node_pending = require("codetyper.brain.graph.node").pending
|
||||
local edge_pending = require("codetyper.brain.graph.edge").pending
|
||||
return #node_pending > 0 or #edge_pending > 0
|
||||
end
|
||||
|
||||
--- Get status (like git status)
|
||||
---@return table Status info
|
||||
function M.status()
|
||||
local node_pending = require("codetyper.brain.graph.node").pending
|
||||
local edge_pending = require("codetyper.brain.graph.edge").pending
|
||||
|
||||
local adds = 0
|
||||
local mods = 0
|
||||
local dels = 0
|
||||
|
||||
for _, change in ipairs(node_pending) do
|
||||
if change.op == types.DELTA_OPS.ADD then
|
||||
adds = adds + 1
|
||||
elseif change.op == types.DELTA_OPS.MODIFY then
|
||||
mods = mods + 1
|
||||
elseif change.op == types.DELTA_OPS.DELETE then
|
||||
dels = dels + 1
|
||||
end
|
||||
end
|
||||
|
||||
for _, change in ipairs(edge_pending) do
|
||||
if change.op == types.DELTA_OPS.ADD then
|
||||
adds = adds + 1
|
||||
elseif change.op == types.DELTA_OPS.DELETE then
|
||||
dels = dels + 1
|
||||
end
|
||||
end
|
||||
|
||||
return {
|
||||
head = storage.get_head(),
|
||||
pending = {
|
||||
adds = adds,
|
||||
modifies = mods,
|
||||
deletes = dels,
|
||||
total = adds + mods + dels,
|
||||
},
|
||||
clean = (adds + mods + dels) == 0,
|
||||
}
|
||||
end
|
||||
|
||||
--- Prune old deltas
|
||||
---@param keep number Number of recent deltas to keep
|
||||
---@return number Number of pruned deltas
|
||||
function M.prune_history(keep)
|
||||
keep = keep or 100
|
||||
local history = M.get_history(1000) -- Get all
|
||||
|
||||
if #history <= keep then
|
||||
return 0
|
||||
end
|
||||
|
||||
local pruned = 0
|
||||
local brain_dir = storage.get_brain_dir()
|
||||
|
||||
for i = keep + 1, #history do
|
||||
local delta = history[i]
|
||||
local filepath = brain_dir .. "/deltas/objects/" .. delta.h .. ".json"
|
||||
if os.remove(filepath) then
|
||||
pruned = pruned + 1
|
||||
end
|
||||
end
|
||||
|
||||
-- Update meta
|
||||
local meta = storage.get_meta()
|
||||
storage.update_meta({ dc = math.max(0, meta.dc - pruned) })
|
||||
|
||||
return pruned
|
||||
end
|
||||
|
||||
--- Reset to initial state (dangerous!)
|
||||
---@return boolean Success
|
||||
function M.reset()
|
||||
-- Clear all nodes
|
||||
for _, node_type in pairs(types.NODE_TYPES) do
|
||||
storage.save_nodes(node_type .. "s", {})
|
||||
end
|
||||
|
||||
-- Clear graph
|
||||
storage.save_graph({ adj = {}, radj = {}, edges = {} })
|
||||
|
||||
-- Clear indices
|
||||
storage.save_index("by_file", {})
|
||||
storage.save_index("by_time", {})
|
||||
storage.save_index("by_symbol", {})
|
||||
|
||||
-- Reset meta
|
||||
storage.update_meta({
|
||||
head = nil,
|
||||
nc = 0,
|
||||
ec = 0,
|
||||
dc = 0,
|
||||
})
|
||||
|
||||
-- Clear pending
|
||||
require("codetyper.brain.graph.node").pending = {}
|
||||
require("codetyper.brain.graph.edge").pending = {}
|
||||
|
||||
storage.flush_all()
|
||||
return true
|
||||
end
|
||||
|
||||
return M
|
||||
367
lua/codetyper/brain/graph/edge.lua
Normal file
367
lua/codetyper/brain/graph/edge.lua
Normal file
@@ -0,0 +1,367 @@
|
||||
--- Brain Graph Edge Operations
|
||||
--- CRUD operations for node connections
|
||||
|
||||
local storage = require("codetyper.brain.storage")
|
||||
local hash = require("codetyper.brain.hash")
|
||||
local types = require("codetyper.brain.types")
|
||||
|
||||
local M = {}
|
||||
|
||||
--- Pending changes for delta tracking
|
||||
---@type table[]
|
||||
M.pending = {}
|
||||
|
||||
--- Create a new edge between nodes
|
||||
---@param source_id string Source node ID
|
||||
---@param target_id string Target node ID
|
||||
---@param edge_type EdgeType Edge type
|
||||
---@param props? EdgeProps Edge properties
|
||||
---@return Edge|nil Created edge
|
||||
function M.create(source_id, target_id, edge_type, props)
|
||||
props = props or {}
|
||||
|
||||
local edge = {
|
||||
id = hash.edge_id(source_id, target_id),
|
||||
s = source_id,
|
||||
t = target_id,
|
||||
ty = edge_type,
|
||||
p = {
|
||||
w = props.w or 0.5,
|
||||
dir = props.dir or "bi",
|
||||
r = props.r,
|
||||
},
|
||||
ts = os.time(),
|
||||
}
|
||||
|
||||
-- Update adjacency lists
|
||||
local graph = storage.get_graph()
|
||||
|
||||
-- Forward adjacency
|
||||
graph.adj[source_id] = graph.adj[source_id] or {}
|
||||
graph.adj[source_id][edge_type] = graph.adj[source_id][edge_type] or {}
|
||||
|
||||
-- Check for duplicate
|
||||
if vim.tbl_contains(graph.adj[source_id][edge_type], target_id) then
|
||||
-- Edge exists, strengthen it instead
|
||||
return M.strengthen(source_id, target_id, edge_type)
|
||||
end
|
||||
|
||||
table.insert(graph.adj[source_id][edge_type], target_id)
|
||||
|
||||
-- Reverse adjacency
|
||||
graph.radj[target_id] = graph.radj[target_id] or {}
|
||||
graph.radj[target_id][edge_type] = graph.radj[target_id][edge_type] or {}
|
||||
table.insert(graph.radj[target_id][edge_type], source_id)
|
||||
|
||||
-- Store edge properties separately (for weight/metadata)
|
||||
graph.edges = graph.edges or {}
|
||||
graph.edges[edge.id] = edge
|
||||
|
||||
storage.save_graph(graph)
|
||||
|
||||
-- Update meta
|
||||
local meta = storage.get_meta()
|
||||
storage.update_meta({ ec = meta.ec + 1 })
|
||||
|
||||
-- Track pending change
|
||||
table.insert(M.pending, {
|
||||
op = types.DELTA_OPS.ADD,
|
||||
path = "graph.edges." .. edge.id,
|
||||
ah = hash.compute_table(edge),
|
||||
})
|
||||
|
||||
return edge
|
||||
end
|
||||
|
||||
--- Get edge by source and target
|
||||
---@param source_id string Source node ID
|
||||
---@param target_id string Target node ID
|
||||
---@param edge_type? EdgeType Optional edge type filter
|
||||
---@return Edge|nil
|
||||
function M.get(source_id, target_id, edge_type)
|
||||
local graph = storage.get_graph()
|
||||
local edge_id = hash.edge_id(source_id, target_id)
|
||||
|
||||
if not graph.edges or not graph.edges[edge_id] then
|
||||
return nil
|
||||
end
|
||||
|
||||
local edge = graph.edges[edge_id]
|
||||
|
||||
if edge_type and edge.ty ~= edge_type then
|
||||
return nil
|
||||
end
|
||||
|
||||
return edge
|
||||
end
|
||||
|
||||
--- Get all edges for a node
|
||||
---@param node_id string Node ID
|
||||
---@param edge_types? EdgeType[] Edge types to include
|
||||
---@param direction? "out"|"in"|"both" Direction (default: "out")
|
||||
---@return Edge[]
|
||||
function M.get_edges(node_id, edge_types, direction)
|
||||
direction = direction or "out"
|
||||
local graph = storage.get_graph()
|
||||
local results = {}
|
||||
|
||||
edge_types = edge_types or vim.tbl_values(types.EDGE_TYPES)
|
||||
|
||||
-- Outgoing edges
|
||||
if direction == "out" or direction == "both" then
|
||||
local adj = graph.adj[node_id]
|
||||
if adj then
|
||||
for _, edge_type in ipairs(edge_types) do
|
||||
local targets = adj[edge_type] or {}
|
||||
for _, target_id in ipairs(targets) do
|
||||
local edge_id = hash.edge_id(node_id, target_id)
|
||||
if graph.edges and graph.edges[edge_id] then
|
||||
table.insert(results, graph.edges[edge_id])
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
-- Incoming edges
|
||||
if direction == "in" or direction == "both" then
|
||||
local radj = graph.radj[node_id]
|
||||
if radj then
|
||||
for _, edge_type in ipairs(edge_types) do
|
||||
local sources = radj[edge_type] or {}
|
||||
for _, source_id in ipairs(sources) do
|
||||
local edge_id = hash.edge_id(source_id, node_id)
|
||||
if graph.edges and graph.edges[edge_id] then
|
||||
table.insert(results, graph.edges[edge_id])
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
return results
|
||||
end
|
||||
|
||||
--- Get neighbor node IDs
|
||||
---@param node_id string Node ID
|
||||
---@param edge_types? EdgeType[] Edge types to follow
|
||||
---@param direction? "out"|"in"|"both" Direction
|
||||
---@return string[] Neighbor node IDs
|
||||
function M.get_neighbors(node_id, edge_types, direction)
|
||||
direction = direction or "out"
|
||||
local graph = storage.get_graph()
|
||||
local neighbors = {}
|
||||
|
||||
edge_types = edge_types or vim.tbl_values(types.EDGE_TYPES)
|
||||
|
||||
-- Outgoing
|
||||
if direction == "out" or direction == "both" then
|
||||
local adj = graph.adj[node_id]
|
||||
if adj then
|
||||
for _, edge_type in ipairs(edge_types) do
|
||||
for _, target in ipairs(adj[edge_type] or {}) do
|
||||
if not vim.tbl_contains(neighbors, target) then
|
||||
table.insert(neighbors, target)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
-- Incoming
|
||||
if direction == "in" or direction == "both" then
|
||||
local radj = graph.radj[node_id]
|
||||
if radj then
|
||||
for _, edge_type in ipairs(edge_types) do
|
||||
for _, source in ipairs(radj[edge_type] or {}) do
|
||||
if not vim.tbl_contains(neighbors, source) then
|
||||
table.insert(neighbors, source)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
return neighbors
|
||||
end
|
||||
|
||||
--- Delete an edge
|
||||
---@param source_id string Source node ID
|
||||
---@param target_id string Target node ID
|
||||
---@param edge_type? EdgeType Edge type (deletes all if nil)
|
||||
---@return boolean Success
|
||||
function M.delete(source_id, target_id, edge_type)
|
||||
local graph = storage.get_graph()
|
||||
local edge_id = hash.edge_id(source_id, target_id)
|
||||
|
||||
if not graph.edges or not graph.edges[edge_id] then
|
||||
return false
|
||||
end
|
||||
|
||||
local edge = graph.edges[edge_id]
|
||||
|
||||
if edge_type and edge.ty ~= edge_type then
|
||||
return false
|
||||
end
|
||||
|
||||
local before_hash = hash.compute_table(edge)
|
||||
|
||||
-- Remove from adjacency
|
||||
if graph.adj[source_id] and graph.adj[source_id][edge.ty] then
|
||||
graph.adj[source_id][edge.ty] = vim.tbl_filter(function(id)
|
||||
return id ~= target_id
|
||||
end, graph.adj[source_id][edge.ty])
|
||||
end
|
||||
|
||||
-- Remove from reverse adjacency
|
||||
if graph.radj[target_id] and graph.radj[target_id][edge.ty] then
|
||||
graph.radj[target_id][edge.ty] = vim.tbl_filter(function(id)
|
||||
return id ~= source_id
|
||||
end, graph.radj[target_id][edge.ty])
|
||||
end
|
||||
|
||||
-- Remove edge data
|
||||
graph.edges[edge_id] = nil
|
||||
|
||||
storage.save_graph(graph)
|
||||
|
||||
-- Update meta
|
||||
local meta = storage.get_meta()
|
||||
storage.update_meta({ ec = math.max(0, meta.ec - 1) })
|
||||
|
||||
-- Track pending change
|
||||
table.insert(M.pending, {
|
||||
op = types.DELTA_OPS.DELETE,
|
||||
path = "graph.edges." .. edge_id,
|
||||
bh = before_hash,
|
||||
})
|
||||
|
||||
return true
|
||||
end
|
||||
|
||||
--- Delete all edges for a node
|
||||
---@param node_id string Node ID
|
||||
---@return number Number of deleted edges
|
||||
function M.delete_all(node_id)
|
||||
local edges = M.get_edges(node_id, nil, "both")
|
||||
local count = 0
|
||||
|
||||
for _, edge in ipairs(edges) do
|
||||
if M.delete(edge.s, edge.t, edge.ty) then
|
||||
count = count + 1
|
||||
end
|
||||
end
|
||||
|
||||
return count
|
||||
end
|
||||
|
||||
--- Strengthen an existing edge
|
||||
---@param source_id string Source node ID
|
||||
---@param target_id string Target node ID
|
||||
---@param edge_type EdgeType Edge type
|
||||
---@return Edge|nil Updated edge
|
||||
function M.strengthen(source_id, target_id, edge_type)
|
||||
local graph = storage.get_graph()
|
||||
local edge_id = hash.edge_id(source_id, target_id)
|
||||
|
||||
if not graph.edges or not graph.edges[edge_id] then
|
||||
return nil
|
||||
end
|
||||
|
||||
local edge = graph.edges[edge_id]
|
||||
|
||||
if edge.ty ~= edge_type then
|
||||
return nil
|
||||
end
|
||||
|
||||
-- Increase weight (diminishing returns)
|
||||
edge.p.w = math.min(1.0, edge.p.w + (1 - edge.p.w) * 0.1)
|
||||
edge.ts = os.time()
|
||||
|
||||
graph.edges[edge_id] = edge
|
||||
storage.save_graph(graph)
|
||||
|
||||
return edge
|
||||
end
|
||||
|
||||
--- Find path between two nodes
|
||||
---@param from_id string Start node ID
|
||||
---@param to_id string End node ID
|
||||
---@param max_depth? number Maximum depth (default: 5)
|
||||
---@return table|nil Path info {nodes: string[], edges: Edge[], found: boolean}
|
||||
function M.find_path(from_id, to_id, max_depth)
|
||||
max_depth = max_depth or 5
|
||||
|
||||
-- BFS
|
||||
local queue = { { id = from_id, path = {}, edges = {} } }
|
||||
local visited = { [from_id] = true }
|
||||
|
||||
while #queue > 0 do
|
||||
local current = table.remove(queue, 1)
|
||||
|
||||
if current.id == to_id then
|
||||
table.insert(current.path, to_id)
|
||||
return {
|
||||
nodes = current.path,
|
||||
edges = current.edges,
|
||||
found = true,
|
||||
}
|
||||
end
|
||||
|
||||
if #current.path >= max_depth then
|
||||
goto continue
|
||||
end
|
||||
|
||||
-- Get all neighbors
|
||||
local edges = M.get_edges(current.id, nil, "both")
|
||||
|
||||
for _, edge in ipairs(edges) do
|
||||
local neighbor = edge.s == current.id and edge.t or edge.s
|
||||
|
||||
if not visited[neighbor] then
|
||||
visited[neighbor] = true
|
||||
|
||||
local new_path = vim.list_extend({}, current.path)
|
||||
table.insert(new_path, current.id)
|
||||
|
||||
local new_edges = vim.list_extend({}, current.edges)
|
||||
table.insert(new_edges, edge)
|
||||
|
||||
table.insert(queue, {
|
||||
id = neighbor,
|
||||
path = new_path,
|
||||
edges = new_edges,
|
||||
})
|
||||
end
|
||||
end
|
||||
|
||||
::continue::
|
||||
end
|
||||
|
||||
return { nodes = {}, edges = {}, found = false }
|
||||
end
|
||||
|
||||
--- Get pending changes and clear
|
||||
---@return table[] Pending changes
|
||||
function M.get_and_clear_pending()
|
||||
local changes = M.pending
|
||||
M.pending = {}
|
||||
return changes
|
||||
end
|
||||
|
||||
--- Check if two nodes are connected
|
||||
---@param node_id_1 string First node ID
|
||||
---@param node_id_2 string Second node ID
|
||||
---@param edge_type? EdgeType Edge type filter
|
||||
---@return boolean
|
||||
function M.are_connected(node_id_1, node_id_2, edge_type)
|
||||
local edge = M.get(node_id_1, node_id_2, edge_type)
|
||||
if edge then
|
||||
return true
|
||||
end
|
||||
-- Check reverse
|
||||
edge = M.get(node_id_2, node_id_1, edge_type)
|
||||
return edge ~= nil
|
||||
end
|
||||
|
||||
return M
|
||||
213
lua/codetyper/brain/graph/init.lua
Normal file
213
lua/codetyper/brain/graph/init.lua
Normal file
@@ -0,0 +1,213 @@
|
||||
--- Brain Graph Coordinator
|
||||
--- High-level graph operations
|
||||
|
||||
local node = require("codetyper.brain.graph.node")
|
||||
local edge = require("codetyper.brain.graph.edge")
|
||||
local query = require("codetyper.brain.graph.query")
|
||||
local storage = require("codetyper.brain.storage")
|
||||
local types = require("codetyper.brain.types")
|
||||
|
||||
local M = {}
|
||||
|
||||
-- Re-export submodules
|
||||
M.node = node
|
||||
M.edge = edge
|
||||
M.query = query
|
||||
|
||||
--- Add a learning with automatic edge creation
|
||||
---@param node_type NodeType Node type
|
||||
---@param content NodeContent Content
|
||||
---@param context? NodeContext Context
|
||||
---@param related_ids? string[] Related node IDs
|
||||
---@return Node Created node
|
||||
function M.add_learning(node_type, content, context, related_ids)
|
||||
-- Create the node
|
||||
local new_node = node.create(node_type, content, context)
|
||||
|
||||
-- Create edges to related nodes
|
||||
if related_ids then
|
||||
for _, related_id in ipairs(related_ids) do
|
||||
local related_node = node.get(related_id)
|
||||
if related_node then
|
||||
-- Determine edge type based on relationship
|
||||
local edge_type = types.EDGE_TYPES.SEMANTIC
|
||||
|
||||
-- If same file, use file edge
|
||||
if context and context.f and related_node.ctx and related_node.ctx.f == context.f then
|
||||
edge_type = types.EDGE_TYPES.FILE
|
||||
end
|
||||
|
||||
edge.create(new_node.id, related_id, edge_type, {
|
||||
w = 0.5,
|
||||
r = "Related learning",
|
||||
})
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
-- Find and link to similar existing nodes
|
||||
local similar = query.semantic_search(content.s, 5)
|
||||
for _, sim_node in ipairs(similar) do
|
||||
if sim_node.id ~= new_node.id then
|
||||
-- Create semantic edge if similarity is high enough
|
||||
local sim_score = query.compute_relevance(sim_node, { query = content.s })
|
||||
if sim_score > 0.5 then
|
||||
edge.create(new_node.id, sim_node.id, types.EDGE_TYPES.SEMANTIC, {
|
||||
w = sim_score,
|
||||
r = "Semantic similarity",
|
||||
})
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
return new_node
|
||||
end
|
||||
|
||||
--- Remove a learning and its edges
|
||||
---@param node_id string Node ID to remove
|
||||
---@return boolean Success
|
||||
function M.remove_learning(node_id)
|
||||
-- Delete all edges first
|
||||
edge.delete_all(node_id)
|
||||
|
||||
-- Delete the node
|
||||
return node.delete(node_id)
|
||||
end
|
||||
|
||||
--- Prune low-value nodes
|
||||
---@param opts? table Prune options
|
||||
---@return number Number of pruned nodes
|
||||
function M.prune(opts)
|
||||
opts = opts or {}
|
||||
local threshold = opts.threshold or 0.1
|
||||
local unused_days = opts.unused_days or 90
|
||||
local now = os.time()
|
||||
local cutoff = now - (unused_days * 86400)
|
||||
|
||||
local pruned = 0
|
||||
|
||||
-- Find nodes to prune
|
||||
for _, node_type in pairs(types.NODE_TYPES) do
|
||||
local nodes_to_prune = node.find({
|
||||
types = { node_type },
|
||||
min_weight = 0, -- Get all
|
||||
})
|
||||
|
||||
for _, n in ipairs(nodes_to_prune) do
|
||||
local should_prune = false
|
||||
|
||||
-- Prune if weight below threshold and not used recently
|
||||
if n.sc.w < threshold and (n.ts.lu or n.ts.up) < cutoff then
|
||||
should_prune = true
|
||||
end
|
||||
|
||||
-- Prune if never used and old
|
||||
if n.sc.u == 0 and n.ts.cr < cutoff then
|
||||
should_prune = true
|
||||
end
|
||||
|
||||
if should_prune then
|
||||
if M.remove_learning(n.id) then
|
||||
pruned = pruned + 1
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
return pruned
|
||||
end
|
||||
|
||||
--- Get all pending changes from nodes and edges
|
||||
---@return table[] Combined pending changes
|
||||
function M.get_pending_changes()
|
||||
local changes = {}
|
||||
|
||||
-- Get node changes
|
||||
local node_changes = node.get_and_clear_pending()
|
||||
for _, change in ipairs(node_changes) do
|
||||
table.insert(changes, change)
|
||||
end
|
||||
|
||||
-- Get edge changes
|
||||
local edge_changes = edge.get_and_clear_pending()
|
||||
for _, change in ipairs(edge_changes) do
|
||||
table.insert(changes, change)
|
||||
end
|
||||
|
||||
return changes
|
||||
end
|
||||
|
||||
--- Get graph statistics
|
||||
---@return table Stats
|
||||
function M.stats()
|
||||
local meta = storage.get_meta()
|
||||
|
||||
-- Count nodes by type
|
||||
local by_type = {}
|
||||
for _, node_type in pairs(types.NODE_TYPES) do
|
||||
local nodes = storage.get_nodes(node_type .. "s")
|
||||
by_type[node_type] = vim.tbl_count(nodes)
|
||||
end
|
||||
|
||||
-- Count edges by type
|
||||
local graph = storage.get_graph()
|
||||
local edges_by_type = {}
|
||||
if graph.edges then
|
||||
for _, e in pairs(graph.edges) do
|
||||
edges_by_type[e.ty] = (edges_by_type[e.ty] or 0) + 1
|
||||
end
|
||||
end
|
||||
|
||||
return {
|
||||
node_count = meta.nc,
|
||||
edge_count = meta.ec,
|
||||
delta_count = meta.dc,
|
||||
nodes_by_type = by_type,
|
||||
edges_by_type = edges_by_type,
|
||||
}
|
||||
end
|
||||
|
||||
--- Create temporal edge between nodes created in sequence
|
||||
---@param node_ids string[] Node IDs in temporal order
|
||||
function M.link_temporal(node_ids)
|
||||
for i = 1, #node_ids - 1 do
|
||||
edge.create(node_ids[i], node_ids[i + 1], types.EDGE_TYPES.TEMPORAL, {
|
||||
w = 0.7,
|
||||
dir = "fwd",
|
||||
r = "Temporal sequence",
|
||||
})
|
||||
end
|
||||
end
|
||||
|
||||
--- Create causal edge (this caused that)
|
||||
---@param cause_id string Cause node ID
|
||||
---@param effect_id string Effect node ID
|
||||
---@param reason? string Reason description
|
||||
function M.link_causal(cause_id, effect_id, reason)
|
||||
edge.create(cause_id, effect_id, types.EDGE_TYPES.CAUSAL, {
|
||||
w = 0.8,
|
||||
dir = "fwd",
|
||||
r = reason or "Caused by",
|
||||
})
|
||||
end
|
||||
|
||||
--- Mark a node as superseded by another
|
||||
---@param old_id string Old node ID
|
||||
---@param new_id string New node ID
|
||||
function M.supersede(old_id, new_id)
|
||||
edge.create(old_id, new_id, types.EDGE_TYPES.SUPERSEDES, {
|
||||
w = 1.0,
|
||||
dir = "fwd",
|
||||
r = "Superseded by newer learning",
|
||||
})
|
||||
|
||||
-- Reduce weight of old node
|
||||
local old_node = node.get(old_id)
|
||||
if old_node then
|
||||
node.update(old_id, {
|
||||
sc = { w = old_node.sc.w * 0.5 },
|
||||
})
|
||||
end
|
||||
end
|
||||
|
||||
return M
|
||||
403
lua/codetyper/brain/graph/node.lua
Normal file
403
lua/codetyper/brain/graph/node.lua
Normal file
@@ -0,0 +1,403 @@
|
||||
--- Brain Graph Node Operations
|
||||
--- CRUD operations for learning nodes
|
||||
|
||||
local storage = require("codetyper.brain.storage")
|
||||
local hash = require("codetyper.brain.hash")
|
||||
local types = require("codetyper.brain.types")
|
||||
|
||||
local M = {}
|
||||
|
||||
--- Pending changes for delta tracking
|
||||
---@type table[]
|
||||
M.pending = {}
|
||||
|
||||
--- Node type to file mapping
|
||||
local TYPE_MAP = {
|
||||
[types.NODE_TYPES.PATTERN] = "patterns",
|
||||
[types.NODE_TYPES.CORRECTION] = "corrections",
|
||||
[types.NODE_TYPES.DECISION] = "decisions",
|
||||
[types.NODE_TYPES.CONVENTION] = "conventions",
|
||||
[types.NODE_TYPES.FEEDBACK] = "feedback",
|
||||
[types.NODE_TYPES.SESSION] = "sessions",
|
||||
-- Full names for convenience
|
||||
patterns = "patterns",
|
||||
corrections = "corrections",
|
||||
decisions = "decisions",
|
||||
conventions = "conventions",
|
||||
feedback = "feedback",
|
||||
sessions = "sessions",
|
||||
}
|
||||
|
||||
--- Get storage key for node type
|
||||
---@param node_type string Node type
|
||||
---@return string Storage key
|
||||
local function get_storage_key(node_type)
|
||||
return TYPE_MAP[node_type] or "patterns"
|
||||
end
|
||||
|
||||
--- Create a new node
|
||||
---@param node_type NodeType Node type
|
||||
---@param content NodeContent Content
|
||||
---@param context? NodeContext Context
|
||||
---@param opts? table Additional options
|
||||
---@return Node Created node
|
||||
function M.create(node_type, content, context, opts)
|
||||
opts = opts or {}
|
||||
local now = os.time()
|
||||
|
||||
local node = {
|
||||
id = hash.node_id(node_type, content.s),
|
||||
t = node_type,
|
||||
h = hash.compute(content.s .. (content.d or "")),
|
||||
c = {
|
||||
s = content.s or "",
|
||||
d = content.d or content.s or "",
|
||||
code = content.code,
|
||||
lang = content.lang,
|
||||
},
|
||||
ctx = context or {},
|
||||
sc = {
|
||||
w = opts.weight or 0.5,
|
||||
u = 0,
|
||||
sr = 1.0,
|
||||
},
|
||||
ts = {
|
||||
cr = now,
|
||||
up = now,
|
||||
lu = now,
|
||||
},
|
||||
m = {
|
||||
src = opts.source or types.SOURCES.AUTO,
|
||||
v = 1,
|
||||
},
|
||||
}
|
||||
|
||||
-- Store node
|
||||
local storage_key = get_storage_key(node_type)
|
||||
local nodes = storage.get_nodes(storage_key)
|
||||
nodes[node.id] = node
|
||||
storage.save_nodes(storage_key, nodes)
|
||||
|
||||
-- Update meta
|
||||
local meta = storage.get_meta()
|
||||
storage.update_meta({ nc = meta.nc + 1 })
|
||||
|
||||
-- Update indices
|
||||
M.update_indices(node, "add")
|
||||
|
||||
-- Track pending change
|
||||
table.insert(M.pending, {
|
||||
op = types.DELTA_OPS.ADD,
|
||||
path = "nodes." .. storage_key .. "." .. node.id,
|
||||
ah = node.h,
|
||||
})
|
||||
|
||||
return node
|
||||
end
|
||||
|
||||
--- Get a node by ID
|
||||
---@param node_id string Node ID
|
||||
---@return Node|nil
|
||||
function M.get(node_id)
|
||||
-- Parse node type from ID (n_<type>_<timestamp>_<hash>)
|
||||
local parts = vim.split(node_id, "_")
|
||||
if #parts < 3 then
|
||||
return nil
|
||||
end
|
||||
|
||||
local node_type = parts[2]
|
||||
local storage_key = get_storage_key(node_type)
|
||||
local nodes = storage.get_nodes(storage_key)
|
||||
|
||||
return nodes[node_id]
|
||||
end
|
||||
|
||||
--- Update a node
|
||||
---@param node_id string Node ID
|
||||
---@param updates table Partial updates
|
||||
---@return Node|nil Updated node
|
||||
function M.update(node_id, updates)
|
||||
local node = M.get(node_id)
|
||||
if not node then
|
||||
return nil
|
||||
end
|
||||
|
||||
local before_hash = node.h
|
||||
|
||||
-- Apply updates
|
||||
if updates.c then
|
||||
node.c = vim.tbl_deep_extend("force", node.c, updates.c)
|
||||
end
|
||||
if updates.ctx then
|
||||
node.ctx = vim.tbl_deep_extend("force", node.ctx, updates.ctx)
|
||||
end
|
||||
if updates.sc then
|
||||
node.sc = vim.tbl_deep_extend("force", node.sc, updates.sc)
|
||||
end
|
||||
|
||||
-- Update timestamps and hash
|
||||
node.ts.up = os.time()
|
||||
node.h = hash.compute((node.c.s or "") .. (node.c.d or ""))
|
||||
node.m.v = (node.m.v or 0) + 1
|
||||
|
||||
-- Save
|
||||
local storage_key = get_storage_key(node.t)
|
||||
local nodes = storage.get_nodes(storage_key)
|
||||
nodes[node_id] = node
|
||||
storage.save_nodes(storage_key, nodes)
|
||||
|
||||
-- Update indices if context changed
|
||||
if updates.ctx then
|
||||
M.update_indices(node, "update")
|
||||
end
|
||||
|
||||
-- Track pending change
|
||||
table.insert(M.pending, {
|
||||
op = types.DELTA_OPS.MODIFY,
|
||||
path = "nodes." .. storage_key .. "." .. node_id,
|
||||
bh = before_hash,
|
||||
ah = node.h,
|
||||
})
|
||||
|
||||
return node
|
||||
end
|
||||
|
||||
--- Delete a node
|
||||
---@param node_id string Node ID
|
||||
---@return boolean Success
|
||||
function M.delete(node_id)
|
||||
local node = M.get(node_id)
|
||||
if not node then
|
||||
return false
|
||||
end
|
||||
|
||||
local storage_key = get_storage_key(node.t)
|
||||
local nodes = storage.get_nodes(storage_key)
|
||||
|
||||
if not nodes[node_id] then
|
||||
return false
|
||||
end
|
||||
|
||||
local before_hash = node.h
|
||||
nodes[node_id] = nil
|
||||
storage.save_nodes(storage_key, nodes)
|
||||
|
||||
-- Update meta
|
||||
local meta = storage.get_meta()
|
||||
storage.update_meta({ nc = math.max(0, meta.nc - 1) })
|
||||
|
||||
-- Update indices
|
||||
M.update_indices(node, "delete")
|
||||
|
||||
-- Track pending change
|
||||
table.insert(M.pending, {
|
||||
op = types.DELTA_OPS.DELETE,
|
||||
path = "nodes." .. storage_key .. "." .. node_id,
|
||||
bh = before_hash,
|
||||
})
|
||||
|
||||
return true
|
||||
end
|
||||
|
||||
--- Find nodes by criteria
|
||||
---@param criteria table Search criteria
|
||||
---@return Node[]
|
||||
function M.find(criteria)
|
||||
local results = {}
|
||||
|
||||
local node_types = criteria.types or vim.tbl_values(types.NODE_TYPES)
|
||||
|
||||
for _, node_type in ipairs(node_types) do
|
||||
local storage_key = get_storage_key(node_type)
|
||||
local nodes = storage.get_nodes(storage_key)
|
||||
|
||||
for _, node in pairs(nodes) do
|
||||
local matches = true
|
||||
|
||||
-- Filter by file
|
||||
if criteria.file and node.ctx.f ~= criteria.file then
|
||||
matches = false
|
||||
end
|
||||
|
||||
-- Filter by min weight
|
||||
if criteria.min_weight and node.sc.w < criteria.min_weight then
|
||||
matches = false
|
||||
end
|
||||
|
||||
-- Filter by since timestamp
|
||||
if criteria.since and node.ts.cr < criteria.since then
|
||||
matches = false
|
||||
end
|
||||
|
||||
-- Filter by content match
|
||||
if criteria.query then
|
||||
local query_lower = criteria.query:lower()
|
||||
local summary_lower = (node.c.s or ""):lower()
|
||||
local detail_lower = (node.c.d or ""):lower()
|
||||
if not summary_lower:find(query_lower, 1, true) and not detail_lower:find(query_lower, 1, true) then
|
||||
matches = false
|
||||
end
|
||||
end
|
||||
|
||||
if matches then
|
||||
table.insert(results, node)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
-- Sort by relevance (weight * recency)
|
||||
table.sort(results, function(a, b)
|
||||
local score_a = a.sc.w * (1 / (1 + (os.time() - a.ts.lu) / 86400))
|
||||
local score_b = b.sc.w * (1 / (1 + (os.time() - b.ts.lu) / 86400))
|
||||
return score_a > score_b
|
||||
end)
|
||||
|
||||
-- Apply limit
|
||||
if criteria.limit and #results > criteria.limit then
|
||||
local limited = {}
|
||||
for i = 1, criteria.limit do
|
||||
limited[i] = results[i]
|
||||
end
|
||||
return limited
|
||||
end
|
||||
|
||||
return results
|
||||
end
|
||||
|
||||
--- Record usage of a node
|
||||
---@param node_id string Node ID
|
||||
---@param success? boolean Was the usage successful
|
||||
function M.record_usage(node_id, success)
|
||||
local node = M.get(node_id)
|
||||
if not node then
|
||||
return
|
||||
end
|
||||
|
||||
-- Update usage stats
|
||||
node.sc.u = node.sc.u + 1
|
||||
node.ts.lu = os.time()
|
||||
|
||||
-- Update success rate
|
||||
if success ~= nil then
|
||||
local total = node.sc.u
|
||||
local successes = node.sc.sr * (total - 1) + (success and 1 or 0)
|
||||
node.sc.sr = successes / total
|
||||
end
|
||||
|
||||
-- Increase weight slightly for frequently used nodes
|
||||
if node.sc.u > 5 then
|
||||
node.sc.w = math.min(1.0, node.sc.w + 0.01)
|
||||
end
|
||||
|
||||
-- Save (direct save, no pending change tracking for usage)
|
||||
local storage_key = get_storage_key(node.t)
|
||||
local nodes = storage.get_nodes(storage_key)
|
||||
nodes[node_id] = node
|
||||
storage.save_nodes(storage_key, nodes)
|
||||
end
|
||||
|
||||
--- Update indices for a node
|
||||
---@param node Node The node
|
||||
---@param op "add"|"update"|"delete" Operation type
|
||||
function M.update_indices(node, op)
|
||||
-- File index
|
||||
if node.ctx.f then
|
||||
local by_file = storage.get_index("by_file")
|
||||
|
||||
if op == "delete" then
|
||||
if by_file[node.ctx.f] then
|
||||
by_file[node.ctx.f] = vim.tbl_filter(function(id)
|
||||
return id ~= node.id
|
||||
end, by_file[node.ctx.f])
|
||||
end
|
||||
else
|
||||
by_file[node.ctx.f] = by_file[node.ctx.f] or {}
|
||||
if not vim.tbl_contains(by_file[node.ctx.f], node.id) then
|
||||
table.insert(by_file[node.ctx.f], node.id)
|
||||
end
|
||||
end
|
||||
|
||||
storage.save_index("by_file", by_file)
|
||||
end
|
||||
|
||||
-- Symbol index
|
||||
if node.ctx.sym then
|
||||
local by_symbol = storage.get_index("by_symbol")
|
||||
|
||||
for _, sym in ipairs(node.ctx.sym) do
|
||||
if op == "delete" then
|
||||
if by_symbol[sym] then
|
||||
by_symbol[sym] = vim.tbl_filter(function(id)
|
||||
return id ~= node.id
|
||||
end, by_symbol[sym])
|
||||
end
|
||||
else
|
||||
by_symbol[sym] = by_symbol[sym] or {}
|
||||
if not vim.tbl_contains(by_symbol[sym], node.id) then
|
||||
table.insert(by_symbol[sym], node.id)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
storage.save_index("by_symbol", by_symbol)
|
||||
end
|
||||
|
||||
-- Time index (daily buckets)
|
||||
local day = os.date("%Y-%m-%d", node.ts.cr)
|
||||
local by_time = storage.get_index("by_time")
|
||||
|
||||
if op == "delete" then
|
||||
if by_time[day] then
|
||||
by_time[day] = vim.tbl_filter(function(id)
|
||||
return id ~= node.id
|
||||
end, by_time[day])
|
||||
end
|
||||
elseif op == "add" then
|
||||
by_time[day] = by_time[day] or {}
|
||||
if not vim.tbl_contains(by_time[day], node.id) then
|
||||
table.insert(by_time[day], node.id)
|
||||
end
|
||||
end
|
||||
|
||||
storage.save_index("by_time", by_time)
|
||||
end
|
||||
|
||||
--- Get pending changes and clear
|
||||
---@return table[] Pending changes
|
||||
function M.get_and_clear_pending()
|
||||
local changes = M.pending
|
||||
M.pending = {}
|
||||
return changes
|
||||
end
|
||||
|
||||
--- Merge two similar nodes
|
||||
---@param node_id_1 string First node ID
|
||||
---@param node_id_2 string Second node ID (will be deleted)
|
||||
---@return Node|nil Merged node
|
||||
function M.merge(node_id_1, node_id_2)
|
||||
local node1 = M.get(node_id_1)
|
||||
local node2 = M.get(node_id_2)
|
||||
|
||||
if not node1 or not node2 then
|
||||
return nil
|
||||
end
|
||||
|
||||
-- Merge content (keep longer detail)
|
||||
local merged_detail = #node1.c.d > #node2.c.d and node1.c.d or node2.c.d
|
||||
|
||||
-- Merge scores (combine weights and usage)
|
||||
local merged_weight = (node1.sc.w + node2.sc.w) / 2
|
||||
local merged_usage = node1.sc.u + node2.sc.u
|
||||
|
||||
M.update(node_id_1, {
|
||||
c = { d = merged_detail },
|
||||
sc = { w = merged_weight, u = merged_usage },
|
||||
})
|
||||
|
||||
-- Delete the second node
|
||||
M.delete(node_id_2)
|
||||
|
||||
return M.get(node_id_1)
|
||||
end
|
||||
|
||||
return M
|
||||
394
lua/codetyper/brain/graph/query.lua
Normal file
394
lua/codetyper/brain/graph/query.lua
Normal file
@@ -0,0 +1,394 @@
|
||||
--- Brain Graph Query Engine
|
||||
--- Multi-dimensional traversal and relevance scoring
|
||||
|
||||
local storage = require("codetyper.brain.storage")
|
||||
local types = require("codetyper.brain.types")
|
||||
|
||||
local M = {}
|
||||
|
||||
--- Lazy load dependencies to avoid circular requires
|
||||
local function get_node_module()
|
||||
return require("codetyper.brain.graph.node")
|
||||
end
|
||||
|
||||
local function get_edge_module()
|
||||
return require("codetyper.brain.graph.edge")
|
||||
end
|
||||
|
||||
--- Compute text similarity (simple keyword matching)
|
||||
---@param text1 string First text
|
||||
---@param text2 string Second text
|
||||
---@return number Similarity score (0-1)
|
||||
local function text_similarity(text1, text2)
|
||||
if not text1 or not text2 then
|
||||
return 0
|
||||
end
|
||||
|
||||
text1 = text1:lower()
|
||||
text2 = text2:lower()
|
||||
|
||||
-- Extract words
|
||||
local words1 = {}
|
||||
for word in text1:gmatch("%w+") do
|
||||
words1[word] = true
|
||||
end
|
||||
|
||||
local words2 = {}
|
||||
for word in text2:gmatch("%w+") do
|
||||
words2[word] = true
|
||||
end
|
||||
|
||||
-- Count matches
|
||||
local matches = 0
|
||||
local total = 0
|
||||
|
||||
for word in pairs(words1) do
|
||||
total = total + 1
|
||||
if words2[word] then
|
||||
matches = matches + 1
|
||||
end
|
||||
end
|
||||
|
||||
for word in pairs(words2) do
|
||||
if not words1[word] then
|
||||
total = total + 1
|
||||
end
|
||||
end
|
||||
|
||||
if total == 0 then
|
||||
return 0
|
||||
end
|
||||
|
||||
return matches / total
|
||||
end
|
||||
|
||||
--- Compute relevance score for a node
|
||||
---@param node Node Node to score
|
||||
---@param opts QueryOpts Query options
|
||||
---@return number Relevance score (0-1)
|
||||
function M.compute_relevance(node, opts)
|
||||
local score = 0
|
||||
local weights = {
|
||||
content_match = 0.30,
|
||||
recency = 0.20,
|
||||
usage = 0.15,
|
||||
weight = 0.15,
|
||||
connection_density = 0.10,
|
||||
success_rate = 0.10,
|
||||
}
|
||||
|
||||
-- Content similarity
|
||||
if opts.query then
|
||||
local summary = node.c.s or ""
|
||||
local detail = node.c.d or ""
|
||||
local similarity = math.max(text_similarity(opts.query, summary), text_similarity(opts.query, detail) * 0.8)
|
||||
score = score + (similarity * weights.content_match)
|
||||
else
|
||||
score = score + weights.content_match * 0.5 -- Base score if no query
|
||||
end
|
||||
|
||||
-- Recency decay (exponential with 30-day half-life)
|
||||
local age_days = (os.time() - (node.ts.lu or node.ts.up)) / 86400
|
||||
local recency = math.exp(-age_days / 30)
|
||||
score = score + (recency * weights.recency)
|
||||
|
||||
-- Usage frequency (normalized)
|
||||
local usage = math.min(node.sc.u / 10, 1.0)
|
||||
score = score + (usage * weights.usage)
|
||||
|
||||
-- Node weight
|
||||
score = score + (node.sc.w * weights.weight)
|
||||
|
||||
-- Connection density
|
||||
local edge_mod = get_edge_module()
|
||||
local connections = #edge_mod.get_edges(node.id, nil, "both")
|
||||
local density = math.min(connections / 5, 1.0)
|
||||
score = score + (density * weights.connection_density)
|
||||
|
||||
-- Success rate
|
||||
score = score + (node.sc.sr * weights.success_rate)
|
||||
|
||||
return score
|
||||
end
|
||||
|
||||
--- Traverse graph from seed nodes
|
||||
---@param seed_ids string[] Starting node IDs
|
||||
---@param depth number Traversal depth
|
||||
---@param edge_types? EdgeType[] Edge types to follow
|
||||
---@return table<string, Node> Discovered nodes indexed by ID
|
||||
local function traverse(seed_ids, depth, edge_types)
|
||||
local node_mod = get_node_module()
|
||||
local edge_mod = get_edge_module()
|
||||
local discovered = {}
|
||||
local frontier = seed_ids
|
||||
|
||||
for _ = 1, depth do
|
||||
local next_frontier = {}
|
||||
|
||||
for _, node_id in ipairs(frontier) do
|
||||
-- Skip if already discovered
|
||||
if discovered[node_id] then
|
||||
goto continue
|
||||
end
|
||||
|
||||
-- Get and store node
|
||||
local node = node_mod.get(node_id)
|
||||
if node then
|
||||
discovered[node_id] = node
|
||||
|
||||
-- Get neighbors
|
||||
local neighbors = edge_mod.get_neighbors(node_id, edge_types, "both")
|
||||
for _, neighbor_id in ipairs(neighbors) do
|
||||
if not discovered[neighbor_id] then
|
||||
table.insert(next_frontier, neighbor_id)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
::continue::
|
||||
end
|
||||
|
||||
frontier = next_frontier
|
||||
if #frontier == 0 then
|
||||
break
|
||||
end
|
||||
end
|
||||
|
||||
return discovered
|
||||
end
|
||||
|
||||
--- Execute a query across all dimensions
|
||||
---@param opts QueryOpts Query options
|
||||
---@return QueryResult
|
||||
function M.execute(opts)
|
||||
opts = opts or {}
|
||||
local node_mod = get_node_module()
|
||||
local results = {
|
||||
semantic = {},
|
||||
file = {},
|
||||
temporal = {},
|
||||
}
|
||||
|
||||
-- 1. Semantic traversal (content similarity)
|
||||
if opts.query then
|
||||
local seed_nodes = node_mod.find({
|
||||
query = opts.query,
|
||||
types = opts.types,
|
||||
limit = 10,
|
||||
})
|
||||
|
||||
local seed_ids = vim.tbl_map(function(n)
|
||||
return n.id
|
||||
end, seed_nodes)
|
||||
local depth = opts.depth or 2
|
||||
|
||||
local discovered = traverse(seed_ids, depth, { types.EDGE_TYPES.SEMANTIC })
|
||||
for id, node in pairs(discovered) do
|
||||
results.semantic[id] = node
|
||||
end
|
||||
end
|
||||
|
||||
-- 2. File-based traversal
|
||||
if opts.file then
|
||||
local by_file = storage.get_index("by_file")
|
||||
local file_node_ids = by_file[opts.file] or {}
|
||||
|
||||
for _, node_id in ipairs(file_node_ids) do
|
||||
local node = node_mod.get(node_id)
|
||||
if node then
|
||||
results.file[node.id] = node
|
||||
end
|
||||
end
|
||||
|
||||
-- Also get nodes from related files via edges
|
||||
local discovered = traverse(file_node_ids, 1, { types.EDGE_TYPES.FILE })
|
||||
for id, node in pairs(discovered) do
|
||||
results.file[id] = node
|
||||
end
|
||||
end
|
||||
|
||||
-- 3. Temporal traversal (recent context)
|
||||
if opts.since then
|
||||
local by_time = storage.get_index("by_time")
|
||||
local now = os.time()
|
||||
|
||||
for day, node_ids in pairs(by_time) do
|
||||
-- Parse day to timestamp
|
||||
local year, month, day_num = day:match("(%d+)-(%d+)-(%d+)")
|
||||
if year then
|
||||
local day_ts = os.time({ year = tonumber(year), month = tonumber(month), day = tonumber(day_num) })
|
||||
if day_ts >= opts.since then
|
||||
for _, node_id in ipairs(node_ids) do
|
||||
local node = node_mod.get(node_id)
|
||||
if node then
|
||||
results.temporal[node.id] = node
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
-- Follow temporal edges
|
||||
local temporal_ids = vim.tbl_keys(results.temporal)
|
||||
local discovered = traverse(temporal_ids, 1, { types.EDGE_TYPES.TEMPORAL })
|
||||
for id, node in pairs(discovered) do
|
||||
results.temporal[id] = node
|
||||
end
|
||||
end
|
||||
|
||||
-- 4. Combine and deduplicate
|
||||
local all_nodes = {}
|
||||
for _, category in pairs(results) do
|
||||
for id, node in pairs(category) do
|
||||
if not all_nodes[id] then
|
||||
all_nodes[id] = node
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
-- 5. Score and rank
|
||||
local scored = {}
|
||||
for id, node in pairs(all_nodes) do
|
||||
local relevance = M.compute_relevance(node, opts)
|
||||
table.insert(scored, { node = node, relevance = relevance })
|
||||
end
|
||||
|
||||
table.sort(scored, function(a, b)
|
||||
return a.relevance > b.relevance
|
||||
end)
|
||||
|
||||
-- 6. Apply limit
|
||||
local limit = opts.limit or 50
|
||||
local result_nodes = {}
|
||||
local truncated = #scored > limit
|
||||
|
||||
for i = 1, math.min(limit, #scored) do
|
||||
table.insert(result_nodes, scored[i].node)
|
||||
end
|
||||
|
||||
-- 7. Get edges between result nodes
|
||||
local edge_mod = get_edge_module()
|
||||
local result_edges = {}
|
||||
local node_ids = {}
|
||||
for _, node in ipairs(result_nodes) do
|
||||
node_ids[node.id] = true
|
||||
end
|
||||
|
||||
for _, node in ipairs(result_nodes) do
|
||||
local edges = edge_mod.get_edges(node.id, nil, "out")
|
||||
for _, edge in ipairs(edges) do
|
||||
if node_ids[edge.t] then
|
||||
table.insert(result_edges, edge)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
return {
|
||||
nodes = result_nodes,
|
||||
edges = result_edges,
|
||||
stats = {
|
||||
semantic_count = vim.tbl_count(results.semantic),
|
||||
file_count = vim.tbl_count(results.file),
|
||||
temporal_count = vim.tbl_count(results.temporal),
|
||||
total_scored = #scored,
|
||||
},
|
||||
truncated = truncated,
|
||||
}
|
||||
end
|
||||
|
||||
--- Find nodes by file
|
||||
---@param filepath string File path
|
||||
---@param limit? number Max results
|
||||
---@return Node[]
|
||||
function M.by_file(filepath, limit)
|
||||
local result = M.execute({
|
||||
file = filepath,
|
||||
limit = limit or 20,
|
||||
})
|
||||
return result.nodes
|
||||
end
|
||||
|
||||
--- Find nodes by time range
|
||||
---@param since number Start timestamp
|
||||
---@param until_ts? number End timestamp
|
||||
---@param limit? number Max results
|
||||
---@return Node[]
|
||||
function M.by_time_range(since, until_ts, limit)
|
||||
local node_mod = get_node_module()
|
||||
local by_time = storage.get_index("by_time")
|
||||
local results = {}
|
||||
|
||||
until_ts = until_ts or os.time()
|
||||
|
||||
for day, node_ids in pairs(by_time) do
|
||||
local year, month, day_num = day:match("(%d+)-(%d+)-(%d+)")
|
||||
if year then
|
||||
local day_ts = os.time({ year = tonumber(year), month = tonumber(month), day = tonumber(day_num) })
|
||||
if day_ts >= since and day_ts <= until_ts then
|
||||
for _, node_id in ipairs(node_ids) do
|
||||
local node = node_mod.get(node_id)
|
||||
if node then
|
||||
table.insert(results, node)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
-- Sort by creation time
|
||||
table.sort(results, function(a, b)
|
||||
return a.ts.cr > b.ts.cr
|
||||
end)
|
||||
|
||||
if limit and #results > limit then
|
||||
local limited = {}
|
||||
for i = 1, limit do
|
||||
limited[i] = results[i]
|
||||
end
|
||||
return limited
|
||||
end
|
||||
|
||||
return results
|
||||
end
|
||||
|
||||
--- Find semantically similar nodes
|
||||
---@param query string Query text
|
||||
---@param limit? number Max results
|
||||
---@return Node[]
|
||||
function M.semantic_search(query, limit)
|
||||
local result = M.execute({
|
||||
query = query,
|
||||
limit = limit or 10,
|
||||
depth = 2,
|
||||
})
|
||||
return result.nodes
|
||||
end
|
||||
|
||||
--- Get context chain (path) for explanation
|
||||
---@param node_ids string[] Node IDs to chain
|
||||
---@return string[] Chain descriptions
|
||||
function M.get_context_chain(node_ids)
|
||||
local node_mod = get_node_module()
|
||||
local edge_mod = get_edge_module()
|
||||
local chain = {}
|
||||
|
||||
for i, node_id in ipairs(node_ids) do
|
||||
local node = node_mod.get(node_id)
|
||||
if node then
|
||||
local entry = string.format("[%s] %s (w:%.2f)", node.t:upper(), node.c.s, node.sc.w)
|
||||
table.insert(chain, entry)
|
||||
|
||||
-- Add edge to next node if exists
|
||||
if node_ids[i + 1] then
|
||||
local edge = edge_mod.get(node_id, node_ids[i + 1])
|
||||
if edge then
|
||||
table.insert(chain, string.format(" -> %s (w:%.2f)", edge.ty, edge.p.w))
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
return chain
|
||||
end
|
||||
|
||||
return M
|
||||
112
lua/codetyper/brain/hash.lua
Normal file
112
lua/codetyper/brain/hash.lua
Normal file
@@ -0,0 +1,112 @@
|
||||
--- Brain Hashing Utilities
|
||||
--- Content-addressable storage with 8-character hashes
|
||||
|
||||
local M = {}
|
||||
|
||||
--- Simple DJB2 hash algorithm (fast, good distribution)
|
||||
---@param str string String to hash
|
||||
---@return number Hash value
|
||||
local function djb2(str)
|
||||
local hash = 5381
|
||||
for i = 1, #str do
|
||||
hash = ((hash * 33) + string.byte(str, i)) % 0x100000000
|
||||
end
|
||||
return hash
|
||||
end
|
||||
|
||||
--- Convert number to hex string
|
||||
---@param num number Number to convert
|
||||
---@param len number Desired length
|
||||
---@return string Hex string
|
||||
local function to_hex(num, len)
|
||||
local hex = string.format("%x", num)
|
||||
if #hex < len then
|
||||
hex = string.rep("0", len - #hex) .. hex
|
||||
end
|
||||
return hex:sub(-len)
|
||||
end
|
||||
|
||||
--- Compute 8-character hash from string
|
||||
---@param content string Content to hash
|
||||
---@return string 8-character hex hash
|
||||
function M.compute(content)
|
||||
if not content or content == "" then
|
||||
return "00000000"
|
||||
end
|
||||
local hash = djb2(content)
|
||||
return to_hex(hash, 8)
|
||||
end
|
||||
|
||||
--- Compute hash from table (JSON-serialized)
|
||||
---@param tbl table Table to hash
|
||||
---@return string 8-character hex hash
|
||||
function M.compute_table(tbl)
|
||||
local ok, json = pcall(vim.json.encode, tbl)
|
||||
if not ok then
|
||||
return "00000000"
|
||||
end
|
||||
return M.compute(json)
|
||||
end
|
||||
|
||||
--- Generate unique node ID
|
||||
---@param node_type string Node type prefix
|
||||
---@param content? string Optional content for hash
|
||||
---@return string Node ID (n_<timestamp>_<hash>)
|
||||
function M.node_id(node_type, content)
|
||||
local ts = os.time()
|
||||
local hash_input = (content or "") .. tostring(ts) .. tostring(math.random(100000))
|
||||
local hash = M.compute(hash_input):sub(1, 6)
|
||||
return string.format("n_%s_%d_%s", node_type, ts, hash)
|
||||
end
|
||||
|
||||
--- Generate unique edge ID
|
||||
---@param source_id string Source node ID
|
||||
---@param target_id string Target node ID
|
||||
---@return string Edge ID (e_<source_hash>_<target_hash>)
|
||||
function M.edge_id(source_id, target_id)
|
||||
local src_hash = M.compute(source_id):sub(1, 4)
|
||||
local tgt_hash = M.compute(target_id):sub(1, 4)
|
||||
return string.format("e_%s_%s", src_hash, tgt_hash)
|
||||
end
|
||||
|
||||
--- Generate delta hash
|
||||
---@param changes table[] Delta changes
|
||||
---@param parent string|nil Parent delta hash
|
||||
---@param timestamp number Delta timestamp
|
||||
---@return string 8-character delta hash
|
||||
function M.delta_hash(changes, parent, timestamp)
|
||||
local content = (parent or "root") .. tostring(timestamp)
|
||||
for _, change in ipairs(changes or {}) do
|
||||
content = content .. (change.op or "") .. (change.path or "")
|
||||
end
|
||||
return M.compute(content)
|
||||
end
|
||||
|
||||
--- Hash file path for storage
|
||||
---@param filepath string File path
|
||||
---@return string 8-character hash
|
||||
function M.path_hash(filepath)
|
||||
return M.compute(filepath)
|
||||
end
|
||||
|
||||
--- Check if two hashes match
|
||||
---@param hash1 string First hash
|
||||
---@param hash2 string Second hash
|
||||
---@return boolean True if matching
|
||||
function M.matches(hash1, hash2)
|
||||
return hash1 == hash2
|
||||
end
|
||||
|
||||
--- Generate random hash (for testing/temporary IDs)
|
||||
---@return string 8-character random hash
|
||||
function M.random()
|
||||
local chars = "0123456789abcdef"
|
||||
local result = ""
|
||||
for _ = 1, 8 do
|
||||
local idx = math.random(1, #chars)
|
||||
result = result .. chars:sub(idx, idx)
|
||||
end
|
||||
return result
|
||||
end
|
||||
|
||||
return M
|
||||
276
lua/codetyper/brain/init.lua
Normal file
276
lua/codetyper/brain/init.lua
Normal file
@@ -0,0 +1,276 @@
|
||||
--- Brain Learning System
|
||||
--- Graph-based knowledge storage with delta versioning
|
||||
|
||||
local storage = require("codetyper.brain.storage")
|
||||
local types = require("codetyper.brain.types")
|
||||
|
||||
local M = {}
|
||||
|
||||
---@type BrainConfig|nil
|
||||
local config = nil
|
||||
|
||||
---@type boolean
|
||||
local initialized = false
|
||||
|
||||
--- Pending changes counter for auto-commit
|
||||
local pending_changes = 0
|
||||
|
||||
--- Default configuration
|
||||
local DEFAULT_CONFIG = {
|
||||
enabled = true,
|
||||
auto_learn = true,
|
||||
auto_commit = true,
|
||||
commit_threshold = 10,
|
||||
max_nodes = 5000,
|
||||
max_deltas = 500,
|
||||
prune = {
|
||||
enabled = true,
|
||||
threshold = 0.1,
|
||||
unused_days = 90,
|
||||
},
|
||||
output = {
|
||||
max_tokens = 4000,
|
||||
format = "compact",
|
||||
},
|
||||
}
|
||||
|
||||
--- Initialize brain system
|
||||
---@param opts? BrainConfig Configuration options
|
||||
function M.setup(opts)
|
||||
config = vim.tbl_deep_extend("force", DEFAULT_CONFIG, opts or {})
|
||||
|
||||
if not config.enabled then
|
||||
return
|
||||
end
|
||||
|
||||
-- Ensure storage directories
|
||||
storage.ensure_dirs()
|
||||
|
||||
-- Initialize meta if not exists
|
||||
storage.get_meta()
|
||||
|
||||
initialized = true
|
||||
end
|
||||
|
||||
--- Check if brain is initialized
|
||||
---@return boolean
|
||||
function M.is_initialized()
|
||||
return initialized and config and config.enabled
|
||||
end
|
||||
|
||||
--- Get current configuration
|
||||
---@return BrainConfig|nil
|
||||
function M.get_config()
|
||||
return config
|
||||
end
|
||||
|
||||
--- Learn from an event
|
||||
---@param event LearnEvent Learning event
|
||||
---@return string|nil Node ID if created
|
||||
function M.learn(event)
|
||||
if not M.is_initialized() or not config.auto_learn then
|
||||
return nil
|
||||
end
|
||||
|
||||
local learners = require("codetyper.brain.learners")
|
||||
local node_id = learners.process(event)
|
||||
|
||||
if node_id then
|
||||
pending_changes = pending_changes + 1
|
||||
|
||||
-- Auto-commit if threshold reached
|
||||
if config.auto_commit and pending_changes >= config.commit_threshold then
|
||||
M.commit("Auto-commit: " .. pending_changes .. " changes")
|
||||
pending_changes = 0
|
||||
end
|
||||
end
|
||||
|
||||
return node_id
|
||||
end
|
||||
|
||||
--- Query relevant knowledge for context
|
||||
---@param opts QueryOpts Query options
|
||||
---@return QueryResult
|
||||
function M.query(opts)
|
||||
if not M.is_initialized() then
|
||||
return { nodes = {}, edges = {}, stats = {}, truncated = false }
|
||||
end
|
||||
|
||||
local query_engine = require("codetyper.brain.graph.query")
|
||||
return query_engine.execute(opts)
|
||||
end
|
||||
|
||||
--- Get LLM-optimized context string
|
||||
---@param opts? QueryOpts Query options
|
||||
---@return string Formatted context
|
||||
function M.get_context_for_llm(opts)
|
||||
if not M.is_initialized() then
|
||||
return ""
|
||||
end
|
||||
|
||||
opts = opts or {}
|
||||
opts.max_tokens = opts.max_tokens or config.output.max_tokens
|
||||
|
||||
local result = M.query(opts)
|
||||
local formatter = require("codetyper.brain.output.formatter")
|
||||
|
||||
if config.output.format == "json" then
|
||||
return formatter.to_json(result, opts)
|
||||
else
|
||||
return formatter.to_compact(result, opts)
|
||||
end
|
||||
end
|
||||
|
||||
--- Create a delta commit
|
||||
---@param message string Commit message
|
||||
---@return string|nil Delta hash
|
||||
function M.commit(message)
|
||||
if not M.is_initialized() then
|
||||
return nil
|
||||
end
|
||||
|
||||
local delta_mgr = require("codetyper.brain.delta")
|
||||
return delta_mgr.commit(message)
|
||||
end
|
||||
|
||||
--- Rollback to a previous delta
|
||||
---@param delta_hash string Target delta hash
|
||||
---@return boolean Success
|
||||
function M.rollback(delta_hash)
|
||||
if not M.is_initialized() then
|
||||
return false
|
||||
end
|
||||
|
||||
local delta_mgr = require("codetyper.brain.delta")
|
||||
return delta_mgr.rollback(delta_hash)
|
||||
end
|
||||
|
||||
--- Get delta history
|
||||
---@param limit? number Max entries
|
||||
---@return Delta[]
|
||||
function M.get_history(limit)
|
||||
if not M.is_initialized() then
|
||||
return {}
|
||||
end
|
||||
|
||||
local delta_mgr = require("codetyper.brain.delta")
|
||||
return delta_mgr.get_history(limit or 50)
|
||||
end
|
||||
|
||||
--- Prune low-value nodes
|
||||
---@param opts? table Prune options
|
||||
---@return number Number of pruned nodes
|
||||
function M.prune(opts)
|
||||
if not M.is_initialized() or not config.prune.enabled then
|
||||
return 0
|
||||
end
|
||||
|
||||
opts = vim.tbl_extend("force", {
|
||||
threshold = config.prune.threshold,
|
||||
unused_days = config.prune.unused_days,
|
||||
}, opts or {})
|
||||
|
||||
local graph = require("codetyper.brain.graph")
|
||||
return graph.prune(opts)
|
||||
end
|
||||
|
||||
--- Export brain state
|
||||
---@return table|nil Exported data
|
||||
function M.export()
|
||||
if not M.is_initialized() then
|
||||
return nil
|
||||
end
|
||||
|
||||
return {
|
||||
schema = types.SCHEMA_VERSION,
|
||||
meta = storage.get_meta(),
|
||||
graph = storage.get_graph(),
|
||||
nodes = {
|
||||
patterns = storage.get_nodes("patterns"),
|
||||
corrections = storage.get_nodes("corrections"),
|
||||
decisions = storage.get_nodes("decisions"),
|
||||
conventions = storage.get_nodes("conventions"),
|
||||
feedback = storage.get_nodes("feedback"),
|
||||
sessions = storage.get_nodes("sessions"),
|
||||
},
|
||||
indices = {
|
||||
by_file = storage.get_index("by_file"),
|
||||
by_time = storage.get_index("by_time"),
|
||||
by_symbol = storage.get_index("by_symbol"),
|
||||
},
|
||||
}
|
||||
end
|
||||
|
||||
--- Import brain state
|
||||
---@param data table Exported data
|
||||
---@return boolean Success
|
||||
function M.import(data)
|
||||
if not data or data.schema ~= types.SCHEMA_VERSION then
|
||||
return false
|
||||
end
|
||||
|
||||
storage.ensure_dirs()
|
||||
|
||||
-- Import nodes
|
||||
if data.nodes then
|
||||
for node_type, nodes in pairs(data.nodes) do
|
||||
storage.save_nodes(node_type, nodes)
|
||||
end
|
||||
end
|
||||
|
||||
-- Import graph
|
||||
if data.graph then
|
||||
storage.save_graph(data.graph)
|
||||
end
|
||||
|
||||
-- Import indices
|
||||
if data.indices then
|
||||
for index_type, index_data in pairs(data.indices) do
|
||||
storage.save_index(index_type, index_data)
|
||||
end
|
||||
end
|
||||
|
||||
-- Import meta last
|
||||
if data.meta then
|
||||
for k, v in pairs(data.meta) do
|
||||
storage.update_meta({ [k] = v })
|
||||
end
|
||||
end
|
||||
|
||||
storage.flush_all()
|
||||
return true
|
||||
end
|
||||
|
||||
--- Get stats about the brain
|
||||
---@return table Stats
|
||||
function M.stats()
|
||||
if not M.is_initialized() then
|
||||
return {}
|
||||
end
|
||||
|
||||
local meta = storage.get_meta()
|
||||
return {
|
||||
initialized = true,
|
||||
node_count = meta.nc,
|
||||
edge_count = meta.ec,
|
||||
delta_count = meta.dc,
|
||||
head = meta.head,
|
||||
pending_changes = pending_changes,
|
||||
}
|
||||
end
|
||||
|
||||
--- Flush all pending writes to disk
|
||||
function M.flush()
|
||||
storage.flush_all()
|
||||
end
|
||||
|
||||
--- Shutdown brain (call before exit)
|
||||
function M.shutdown()
|
||||
if pending_changes > 0 then
|
||||
M.commit("Session end: " .. pending_changes .. " changes")
|
||||
end
|
||||
storage.flush_all()
|
||||
initialized = false
|
||||
end
|
||||
|
||||
return M
|
||||
233
lua/codetyper/brain/learners/convention.lua
Normal file
233
lua/codetyper/brain/learners/convention.lua
Normal file
@@ -0,0 +1,233 @@
|
||||
--- Brain Convention Learner
|
||||
--- Learns project conventions and coding standards
|
||||
|
||||
local types = require("codetyper.brain.types")
|
||||
|
||||
local M = {}
|
||||
|
||||
--- Detect if event contains convention info
|
||||
---@param event LearnEvent Learning event
|
||||
---@return boolean
|
||||
function M.detect(event)
|
||||
local valid_types = {
|
||||
"convention_detected",
|
||||
"naming_pattern",
|
||||
"style_pattern",
|
||||
"project_structure",
|
||||
"config_change",
|
||||
}
|
||||
|
||||
for _, t in ipairs(valid_types) do
|
||||
if event.type == t then
|
||||
return true
|
||||
end
|
||||
end
|
||||
|
||||
return false
|
||||
end
|
||||
|
||||
--- Extract convention data from event
|
||||
---@param event LearnEvent Learning event
|
||||
---@return table|nil Extracted data
|
||||
function M.extract(event)
|
||||
local data = event.data or {}
|
||||
|
||||
if event.type == "convention_detected" then
|
||||
return {
|
||||
summary = "Convention: " .. (data.name or "unnamed"),
|
||||
detail = data.description or data.name,
|
||||
rule = data.rule,
|
||||
examples = data.examples,
|
||||
category = data.category or "general",
|
||||
file = event.file,
|
||||
}
|
||||
end
|
||||
|
||||
if event.type == "naming_pattern" then
|
||||
return {
|
||||
summary = "Naming: " .. (data.pattern_name or data.pattern),
|
||||
detail = "Naming convention: " .. (data.description or data.pattern),
|
||||
rule = data.pattern,
|
||||
examples = data.examples,
|
||||
category = "naming",
|
||||
scope = data.scope, -- function, variable, class, file
|
||||
}
|
||||
end
|
||||
|
||||
if event.type == "style_pattern" then
|
||||
return {
|
||||
summary = "Style: " .. (data.name or "unnamed"),
|
||||
detail = data.description or "Code style pattern",
|
||||
rule = data.rule,
|
||||
examples = data.examples,
|
||||
category = "style",
|
||||
lang = data.language,
|
||||
}
|
||||
end
|
||||
|
||||
if event.type == "project_structure" then
|
||||
return {
|
||||
summary = "Structure: " .. (data.pattern or "project layout"),
|
||||
detail = data.description or "Project structure convention",
|
||||
rule = data.rule,
|
||||
category = "structure",
|
||||
paths = data.paths,
|
||||
}
|
||||
end
|
||||
|
||||
if event.type == "config_change" then
|
||||
return {
|
||||
summary = "Config: " .. (data.setting or "setting change"),
|
||||
detail = "Configuration: " .. (data.description or data.setting),
|
||||
before = data.before,
|
||||
after = data.after,
|
||||
category = "config",
|
||||
file = event.file,
|
||||
}
|
||||
end
|
||||
|
||||
return nil
|
||||
end
|
||||
|
||||
--- Check if convention should be learned
|
||||
---@param data table Extracted data
|
||||
---@return boolean
|
||||
function M.should_learn(data)
|
||||
if not data.summary then
|
||||
return false
|
||||
end
|
||||
|
||||
-- Skip very vague conventions
|
||||
if not data.detail or #data.detail < 5 then
|
||||
return false
|
||||
end
|
||||
|
||||
return true
|
||||
end
|
||||
|
||||
--- Create node from convention data
|
||||
---@param data table Extracted data
|
||||
---@return table Node creation params
|
||||
function M.create_node_params(data)
|
||||
local detail = data.detail or ""
|
||||
|
||||
-- Add examples if available
|
||||
if data.examples and #data.examples > 0 then
|
||||
detail = detail .. "\n\nExamples:"
|
||||
for _, ex in ipairs(data.examples) do
|
||||
detail = detail .. "\n- " .. tostring(ex)
|
||||
end
|
||||
end
|
||||
|
||||
-- Add rule if available
|
||||
if data.rule then
|
||||
detail = detail .. "\n\nRule: " .. tostring(data.rule)
|
||||
end
|
||||
|
||||
return {
|
||||
node_type = types.NODE_TYPES.CONVENTION,
|
||||
content = {
|
||||
s = data.summary:sub(1, 200),
|
||||
d = detail,
|
||||
lang = data.lang,
|
||||
},
|
||||
context = {
|
||||
f = data.file,
|
||||
sym = data.scope and { data.scope } or nil,
|
||||
},
|
||||
opts = {
|
||||
weight = 0.6,
|
||||
source = types.SOURCES.AUTO,
|
||||
},
|
||||
}
|
||||
end
|
||||
|
||||
--- Find related conventions
|
||||
---@param data table Extracted data
|
||||
---@param query_fn function Query function
|
||||
---@return string[] Related node IDs
|
||||
function M.find_related(data, query_fn)
|
||||
local related = {}
|
||||
|
||||
-- Find conventions in same category
|
||||
if data.category then
|
||||
local similar = query_fn({
|
||||
query = data.category,
|
||||
types = { types.NODE_TYPES.CONVENTION },
|
||||
limit = 5,
|
||||
})
|
||||
for _, node in ipairs(similar) do
|
||||
table.insert(related, node.id)
|
||||
end
|
||||
end
|
||||
|
||||
-- Find patterns that follow this convention
|
||||
if data.rule then
|
||||
local patterns = query_fn({
|
||||
query = data.rule,
|
||||
types = { types.NODE_TYPES.PATTERN },
|
||||
limit = 3,
|
||||
})
|
||||
for _, node in ipairs(patterns) do
|
||||
if not vim.tbl_contains(related, node.id) then
|
||||
table.insert(related, node.id)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
return related
|
||||
end
|
||||
|
||||
--- Detect naming convention from symbol names
|
||||
---@param symbols string[] Symbol names to analyze
|
||||
---@return table|nil Detected convention
|
||||
function M.detect_naming(symbols)
|
||||
if not symbols or #symbols < 3 then
|
||||
return nil
|
||||
end
|
||||
|
||||
local patterns = {
|
||||
snake_case = 0,
|
||||
camelCase = 0,
|
||||
PascalCase = 0,
|
||||
SCREAMING_SNAKE = 0,
|
||||
kebab_case = 0,
|
||||
}
|
||||
|
||||
for _, sym in ipairs(symbols) do
|
||||
if sym:match("^[a-z][a-z0-9_]*$") then
|
||||
patterns.snake_case = patterns.snake_case + 1
|
||||
elseif sym:match("^[a-z][a-zA-Z0-9]*$") then
|
||||
patterns.camelCase = patterns.camelCase + 1
|
||||
elseif sym:match("^[A-Z][a-zA-Z0-9]*$") then
|
||||
patterns.PascalCase = patterns.PascalCase + 1
|
||||
elseif sym:match("^[A-Z][A-Z0-9_]*$") then
|
||||
patterns.SCREAMING_SNAKE = patterns.SCREAMING_SNAKE + 1
|
||||
elseif sym:match("^[a-z][a-z0-9%-]*$") then
|
||||
patterns.kebab_case = patterns.kebab_case + 1
|
||||
end
|
||||
end
|
||||
|
||||
-- Find dominant pattern
|
||||
local max_count = 0
|
||||
local dominant = nil
|
||||
|
||||
for pattern, count in pairs(patterns) do
|
||||
if count > max_count then
|
||||
max_count = count
|
||||
dominant = pattern
|
||||
end
|
||||
end
|
||||
|
||||
if dominant and max_count >= #symbols * 0.6 then
|
||||
return {
|
||||
pattern = dominant,
|
||||
confidence = max_count / #symbols,
|
||||
sample_size = #symbols,
|
||||
}
|
||||
end
|
||||
|
||||
return nil
|
||||
end
|
||||
|
||||
return M
|
||||
213
lua/codetyper/brain/learners/correction.lua
Normal file
213
lua/codetyper/brain/learners/correction.lua
Normal file
@@ -0,0 +1,213 @@
|
||||
--- Brain Correction Learner
|
||||
--- Learns from user corrections and edits
|
||||
|
||||
local types = require("codetyper.brain.types")
|
||||
|
||||
local M = {}
|
||||
|
||||
--- Detect if event is a correction
|
||||
---@param event LearnEvent Learning event
|
||||
---@return boolean
|
||||
function M.detect(event)
|
||||
local valid_types = {
|
||||
"user_correction",
|
||||
"code_rejected",
|
||||
"code_modified",
|
||||
"suggestion_rejected",
|
||||
}
|
||||
|
||||
for _, t in ipairs(valid_types) do
|
||||
if event.type == t then
|
||||
return true
|
||||
end
|
||||
end
|
||||
|
||||
return false
|
||||
end
|
||||
|
||||
--- Extract correction data from event
|
||||
---@param event LearnEvent Learning event
|
||||
---@return table|nil Extracted data
|
||||
function M.extract(event)
|
||||
local data = event.data or {}
|
||||
|
||||
if event.type == "user_correction" then
|
||||
return {
|
||||
summary = "Correction: " .. (data.error_type or "user edit"),
|
||||
detail = data.description or "User corrected the generated code",
|
||||
before = data.before,
|
||||
after = data.after,
|
||||
error_type = data.error_type,
|
||||
file = event.file,
|
||||
function_name = data.function_name,
|
||||
lines = data.lines,
|
||||
}
|
||||
end
|
||||
|
||||
if event.type == "code_rejected" then
|
||||
return {
|
||||
summary = "Rejected: " .. (data.reason or "not accepted"),
|
||||
detail = data.description or "User rejected generated code",
|
||||
rejected_code = data.code,
|
||||
reason = data.reason,
|
||||
file = event.file,
|
||||
intent = data.intent,
|
||||
}
|
||||
end
|
||||
|
||||
if event.type == "code_modified" then
|
||||
local changes = M.analyze_changes(data.before, data.after)
|
||||
return {
|
||||
summary = "Modified: " .. changes.summary,
|
||||
detail = changes.detail,
|
||||
before = data.before,
|
||||
after = data.after,
|
||||
change_type = changes.type,
|
||||
file = event.file,
|
||||
lines = data.lines,
|
||||
}
|
||||
end
|
||||
|
||||
return nil
|
||||
end
|
||||
|
||||
--- Analyze changes between before/after code
|
||||
---@param before string Before code
|
||||
---@param after string After code
|
||||
---@return table Change analysis
|
||||
function M.analyze_changes(before, after)
|
||||
before = before or ""
|
||||
after = after or ""
|
||||
|
||||
local before_lines = vim.split(before, "\n")
|
||||
local after_lines = vim.split(after, "\n")
|
||||
|
||||
local added = 0
|
||||
local removed = 0
|
||||
local modified = 0
|
||||
|
||||
-- Simple line-based diff
|
||||
local max_lines = math.max(#before_lines, #after_lines)
|
||||
for i = 1, max_lines do
|
||||
local b = before_lines[i]
|
||||
local a = after_lines[i]
|
||||
|
||||
if b == nil and a ~= nil then
|
||||
added = added + 1
|
||||
elseif b ~= nil and a == nil then
|
||||
removed = removed + 1
|
||||
elseif b ~= a then
|
||||
modified = modified + 1
|
||||
end
|
||||
end
|
||||
|
||||
local change_type = "mixed"
|
||||
if added > 0 and removed == 0 and modified == 0 then
|
||||
change_type = "addition"
|
||||
elseif removed > 0 and added == 0 and modified == 0 then
|
||||
change_type = "deletion"
|
||||
elseif modified > 0 and added == 0 and removed == 0 then
|
||||
change_type = "modification"
|
||||
end
|
||||
|
||||
return {
|
||||
type = change_type,
|
||||
summary = string.format("+%d -%d ~%d lines", added, removed, modified),
|
||||
detail = string.format("Added %d, removed %d, modified %d lines", added, removed, modified),
|
||||
stats = {
|
||||
added = added,
|
||||
removed = removed,
|
||||
modified = modified,
|
||||
},
|
||||
}
|
||||
end
|
||||
|
||||
--- Check if correction should be learned
|
||||
---@param data table Extracted data
|
||||
---@return boolean
|
||||
function M.should_learn(data)
|
||||
-- Always learn corrections - they're valuable
|
||||
if not data.summary then
|
||||
return false
|
||||
end
|
||||
|
||||
-- Skip trivial changes
|
||||
if data.before and data.after then
|
||||
-- Skip if only whitespace changed
|
||||
local before_trimmed = data.before:gsub("%s+", "")
|
||||
local after_trimmed = data.after:gsub("%s+", "")
|
||||
if before_trimmed == after_trimmed then
|
||||
return false
|
||||
end
|
||||
end
|
||||
|
||||
return true
|
||||
end
|
||||
|
||||
--- Create node from correction data
|
||||
---@param data table Extracted data
|
||||
---@return table Node creation params
|
||||
function M.create_node_params(data)
|
||||
local detail = data.detail or ""
|
||||
|
||||
-- Include before/after in detail for learning
|
||||
if data.before and data.after then
|
||||
detail = detail .. "\n\nBefore:\n" .. data.before:sub(1, 500)
|
||||
detail = detail .. "\n\nAfter:\n" .. data.after:sub(1, 500)
|
||||
end
|
||||
|
||||
return {
|
||||
node_type = types.NODE_TYPES.CORRECTION,
|
||||
content = {
|
||||
s = data.summary:sub(1, 200),
|
||||
d = detail,
|
||||
code = data.after or data.rejected_code,
|
||||
lang = data.lang,
|
||||
},
|
||||
context = {
|
||||
f = data.file,
|
||||
fn = data.function_name,
|
||||
ln = data.lines,
|
||||
},
|
||||
opts = {
|
||||
weight = 0.7, -- Corrections are valuable
|
||||
source = types.SOURCES.USER,
|
||||
},
|
||||
}
|
||||
end
|
||||
|
||||
--- Find related nodes for corrections
|
||||
---@param data table Extracted data
|
||||
---@param query_fn function Query function
|
||||
---@return string[] Related node IDs
|
||||
function M.find_related(data, query_fn)
|
||||
local related = {}
|
||||
|
||||
-- Find patterns that might be corrected
|
||||
if data.before then
|
||||
local similar = query_fn({
|
||||
query = data.before:sub(1, 100),
|
||||
types = { types.NODE_TYPES.PATTERN },
|
||||
limit = 3,
|
||||
})
|
||||
for _, node in ipairs(similar) do
|
||||
table.insert(related, node.id)
|
||||
end
|
||||
end
|
||||
|
||||
-- Find other corrections in same file
|
||||
if data.file then
|
||||
local file_corrections = query_fn({
|
||||
file = data.file,
|
||||
types = { types.NODE_TYPES.CORRECTION },
|
||||
limit = 3,
|
||||
})
|
||||
for _, node in ipairs(file_corrections) do
|
||||
table.insert(related, node.id)
|
||||
end
|
||||
end
|
||||
|
||||
return related
|
||||
end
|
||||
|
||||
return M
|
||||
232
lua/codetyper/brain/learners/init.lua
Normal file
232
lua/codetyper/brain/learners/init.lua
Normal file
@@ -0,0 +1,232 @@
|
||||
--- Brain Learners Coordinator
|
||||
--- Routes learning events to appropriate learners
|
||||
|
||||
local types = require("codetyper.brain.types")
|
||||
|
||||
local M = {}
|
||||
|
||||
-- Lazy load learners
|
||||
local function get_pattern_learner()
|
||||
return require("codetyper.brain.learners.pattern")
|
||||
end
|
||||
|
||||
local function get_correction_learner()
|
||||
return require("codetyper.brain.learners.correction")
|
||||
end
|
||||
|
||||
local function get_convention_learner()
|
||||
return require("codetyper.brain.learners.convention")
|
||||
end
|
||||
|
||||
--- All available learners
|
||||
local LEARNERS = {
|
||||
{ name = "pattern", loader = get_pattern_learner },
|
||||
{ name = "correction", loader = get_correction_learner },
|
||||
{ name = "convention", loader = get_convention_learner },
|
||||
}
|
||||
|
||||
--- Process a learning event
|
||||
---@param event LearnEvent Learning event
|
||||
---@return string|nil Created node ID
|
||||
function M.process(event)
|
||||
if not event or not event.type then
|
||||
return nil
|
||||
end
|
||||
|
||||
-- Add timestamp if missing
|
||||
event.timestamp = event.timestamp or os.time()
|
||||
|
||||
-- Find matching learner
|
||||
for _, learner_info in ipairs(LEARNERS) do
|
||||
local learner = learner_info.loader()
|
||||
|
||||
if learner.detect(event) then
|
||||
return M.learn_with(learner, event)
|
||||
end
|
||||
end
|
||||
|
||||
-- Handle generic feedback events
|
||||
if event.type == "user_feedback" then
|
||||
return M.process_feedback(event)
|
||||
end
|
||||
|
||||
-- Handle session events
|
||||
if event.type == "session_start" or event.type == "session_end" then
|
||||
return M.process_session(event)
|
||||
end
|
||||
|
||||
return nil
|
||||
end
|
||||
|
||||
--- Learn using a specific learner
|
||||
---@param learner table Learner module
|
||||
---@param event LearnEvent Learning event
|
||||
---@return string|nil Created node ID
|
||||
function M.learn_with(learner, event)
|
||||
-- Extract data
|
||||
local extracted = learner.extract(event)
|
||||
if not extracted then
|
||||
return nil
|
||||
end
|
||||
|
||||
-- Handle multiple extractions (e.g., from file indexing)
|
||||
if vim.islist(extracted) then
|
||||
local node_ids = {}
|
||||
for _, data in ipairs(extracted) do
|
||||
local node_id = M.create_learning(learner, data, event)
|
||||
if node_id then
|
||||
table.insert(node_ids, node_id)
|
||||
end
|
||||
end
|
||||
return node_ids[1] -- Return first for now
|
||||
end
|
||||
|
||||
return M.create_learning(learner, extracted, event)
|
||||
end
|
||||
|
||||
--- Create a learning from extracted data
|
||||
---@param learner table Learner module
|
||||
---@param data table Extracted data
|
||||
---@param event LearnEvent Original event
|
||||
---@return string|nil Created node ID
|
||||
function M.create_learning(learner, data, event)
|
||||
-- Check if should learn
|
||||
if not learner.should_learn(data) then
|
||||
return nil
|
||||
end
|
||||
|
||||
-- Get node params
|
||||
local params = learner.create_node_params(data)
|
||||
|
||||
-- Get graph module
|
||||
local graph = require("codetyper.brain.graph")
|
||||
|
||||
-- Find related nodes
|
||||
local related_ids = {}
|
||||
if learner.find_related then
|
||||
related_ids = learner.find_related(data, function(opts)
|
||||
return graph.query.execute(opts).nodes
|
||||
end)
|
||||
end
|
||||
|
||||
-- Create the learning
|
||||
local node = graph.add_learning(params.node_type, params.content, params.context, related_ids)
|
||||
|
||||
-- Update weight if specified
|
||||
if params.opts and params.opts.weight then
|
||||
graph.node.update(node.id, { sc = { w = params.opts.weight } })
|
||||
end
|
||||
|
||||
return node.id
|
||||
end
|
||||
|
||||
--- Process feedback event
|
||||
---@param event LearnEvent Feedback event
|
||||
---@return string|nil Created node ID
|
||||
function M.process_feedback(event)
|
||||
local data = event.data or {}
|
||||
local graph = require("codetyper.brain.graph")
|
||||
|
||||
local content = {
|
||||
s = "Feedback: " .. (data.feedback or "unknown"),
|
||||
d = data.description or ("User " .. (data.feedback or "gave feedback")),
|
||||
}
|
||||
|
||||
local context = {
|
||||
f = event.file,
|
||||
}
|
||||
|
||||
-- If feedback references a node, update it
|
||||
if data.node_id then
|
||||
local node = graph.node.get(data.node_id)
|
||||
if node then
|
||||
local weight_delta = data.feedback == "accepted" and 0.1 or -0.1
|
||||
local new_weight = math.max(0, math.min(1, node.sc.w + weight_delta))
|
||||
|
||||
graph.node.update(data.node_id, {
|
||||
sc = { w = new_weight },
|
||||
})
|
||||
|
||||
-- Record usage
|
||||
graph.node.record_usage(data.node_id, data.feedback == "accepted")
|
||||
|
||||
-- Create feedback node linked to original
|
||||
local fb_node = graph.add_learning(types.NODE_TYPES.FEEDBACK, content, context, { data.node_id })
|
||||
|
||||
return fb_node.id
|
||||
end
|
||||
end
|
||||
|
||||
-- Create standalone feedback node
|
||||
local node = graph.add_learning(types.NODE_TYPES.FEEDBACK, content, context)
|
||||
return node.id
|
||||
end
|
||||
|
||||
--- Process session event
|
||||
---@param event LearnEvent Session event
|
||||
---@return string|nil Created node ID
|
||||
function M.process_session(event)
|
||||
local data = event.data or {}
|
||||
local graph = require("codetyper.brain.graph")
|
||||
|
||||
local content = {
|
||||
s = event.type == "session_start" and "Session started" or "Session ended",
|
||||
d = data.description or event.type,
|
||||
}
|
||||
|
||||
if event.type == "session_end" and data.stats then
|
||||
content.d = content.d .. "\n\nStats:"
|
||||
content.d = content.d .. "\n- Completions: " .. (data.stats.completions or 0)
|
||||
content.d = content.d .. "\n- Corrections: " .. (data.stats.corrections or 0)
|
||||
content.d = content.d .. "\n- Files: " .. (data.stats.files or 0)
|
||||
end
|
||||
|
||||
local node = graph.add_learning(types.NODE_TYPES.SESSION, content, {})
|
||||
|
||||
-- Link to recent session nodes
|
||||
if event.type == "session_end" then
|
||||
local recent = graph.query.by_time_range(os.time() - 3600, os.time(), 20) -- Last hour
|
||||
local session_nodes = {}
|
||||
|
||||
for _, n in ipairs(recent) do
|
||||
if n.id ~= node.id then
|
||||
table.insert(session_nodes, n.id)
|
||||
end
|
||||
end
|
||||
|
||||
-- Create temporal links
|
||||
if #session_nodes > 0 then
|
||||
graph.link_temporal(session_nodes)
|
||||
end
|
||||
end
|
||||
|
||||
return node.id
|
||||
end
|
||||
|
||||
--- Batch process multiple events
|
||||
---@param events LearnEvent[] Events to process
|
||||
---@return string[] Created node IDs
|
||||
function M.batch_process(events)
|
||||
local node_ids = {}
|
||||
|
||||
for _, event in ipairs(events) do
|
||||
local node_id = M.process(event)
|
||||
if node_id then
|
||||
table.insert(node_ids, node_id)
|
||||
end
|
||||
end
|
||||
|
||||
return node_ids
|
||||
end
|
||||
|
||||
--- Get learner names
|
||||
---@return string[]
|
||||
function M.get_learner_names()
|
||||
local names = {}
|
||||
for _, learner in ipairs(LEARNERS) do
|
||||
table.insert(names, learner.name)
|
||||
end
|
||||
return names
|
||||
end
|
||||
|
||||
return M
|
||||
172
lua/codetyper/brain/learners/pattern.lua
Normal file
172
lua/codetyper/brain/learners/pattern.lua
Normal file
@@ -0,0 +1,172 @@
|
||||
--- Brain Pattern Learner
|
||||
--- Detects and learns code patterns
|
||||
|
||||
local types = require("codetyper.brain.types")
|
||||
|
||||
local M = {}
|
||||
|
||||
--- Detect if event contains a learnable pattern
|
||||
---@param event LearnEvent Learning event
|
||||
---@return boolean
|
||||
function M.detect(event)
|
||||
local valid_types = {
|
||||
"code_completion",
|
||||
"file_indexed",
|
||||
"code_analyzed",
|
||||
"pattern_detected",
|
||||
}
|
||||
|
||||
for _, t in ipairs(valid_types) do
|
||||
if event.type == t then
|
||||
return true
|
||||
end
|
||||
end
|
||||
|
||||
return false
|
||||
end
|
||||
|
||||
--- Extract pattern data from event
|
||||
---@param event LearnEvent Learning event
|
||||
---@return table|nil Extracted data
|
||||
function M.extract(event)
|
||||
local data = event.data or {}
|
||||
|
||||
-- Extract from code completion
|
||||
if event.type == "code_completion" then
|
||||
return {
|
||||
summary = "Code pattern: " .. (data.intent or "unknown"),
|
||||
detail = data.code or data.content or "",
|
||||
code = data.code,
|
||||
lang = data.language,
|
||||
file = event.file,
|
||||
function_name = data.function_name,
|
||||
symbols = data.symbols,
|
||||
}
|
||||
end
|
||||
|
||||
-- Extract from file indexing
|
||||
if event.type == "file_indexed" then
|
||||
local patterns = {}
|
||||
|
||||
-- Extract function patterns
|
||||
if data.functions then
|
||||
for _, func in ipairs(data.functions) do
|
||||
table.insert(patterns, {
|
||||
summary = "Function: " .. func.name,
|
||||
detail = func.signature or func.name,
|
||||
code = func.body,
|
||||
lang = data.language,
|
||||
file = event.file,
|
||||
function_name = func.name,
|
||||
lines = func.lines,
|
||||
})
|
||||
end
|
||||
end
|
||||
|
||||
-- Extract class patterns
|
||||
if data.classes then
|
||||
for _, class in ipairs(data.classes) do
|
||||
table.insert(patterns, {
|
||||
summary = "Class: " .. class.name,
|
||||
detail = class.description or class.name,
|
||||
lang = data.language,
|
||||
file = event.file,
|
||||
symbols = { class.name },
|
||||
})
|
||||
end
|
||||
end
|
||||
|
||||
return #patterns > 0 and patterns or nil
|
||||
end
|
||||
|
||||
-- Extract from explicit pattern detection
|
||||
if event.type == "pattern_detected" then
|
||||
return {
|
||||
summary = data.name or "Unnamed pattern",
|
||||
detail = data.description or data.name or "",
|
||||
code = data.example,
|
||||
lang = data.language,
|
||||
file = event.file,
|
||||
symbols = data.symbols,
|
||||
}
|
||||
end
|
||||
|
||||
return nil
|
||||
end
|
||||
|
||||
--- Check if pattern should be learned
|
||||
---@param data table Extracted data
|
||||
---@return boolean
|
||||
function M.should_learn(data)
|
||||
-- Skip if no meaningful content
|
||||
if not data.summary or data.summary == "" then
|
||||
return false
|
||||
end
|
||||
|
||||
-- Skip very short patterns
|
||||
if data.detail and #data.detail < 10 then
|
||||
return false
|
||||
end
|
||||
|
||||
-- Skip auto-generated patterns
|
||||
if data.summary:match("^%s*$") then
|
||||
return false
|
||||
end
|
||||
|
||||
return true
|
||||
end
|
||||
|
||||
--- Create node from pattern data
|
||||
---@param data table Extracted data
|
||||
---@return table Node creation params
|
||||
function M.create_node_params(data)
|
||||
return {
|
||||
node_type = types.NODE_TYPES.PATTERN,
|
||||
content = {
|
||||
s = data.summary:sub(1, 200), -- Limit summary
|
||||
d = data.detail,
|
||||
code = data.code,
|
||||
lang = data.lang,
|
||||
},
|
||||
context = {
|
||||
f = data.file,
|
||||
fn = data.function_name,
|
||||
ln = data.lines,
|
||||
sym = data.symbols,
|
||||
},
|
||||
opts = {
|
||||
weight = 0.5,
|
||||
source = types.SOURCES.AUTO,
|
||||
},
|
||||
}
|
||||
end
|
||||
|
||||
--- Find potentially related nodes
|
||||
---@param data table Extracted data
|
||||
---@param query_fn function Query function
|
||||
---@return string[] Related node IDs
|
||||
function M.find_related(data, query_fn)
|
||||
local related = {}
|
||||
|
||||
-- Find nodes in same file
|
||||
if data.file then
|
||||
local file_nodes = query_fn({ file = data.file, limit = 5 })
|
||||
for _, node in ipairs(file_nodes) do
|
||||
table.insert(related, node.id)
|
||||
end
|
||||
end
|
||||
|
||||
-- Find semantically similar
|
||||
if data.summary then
|
||||
local similar = query_fn({ query = data.summary, limit = 3 })
|
||||
for _, node in ipairs(similar) do
|
||||
if not vim.tbl_contains(related, node.id) then
|
||||
table.insert(related, node.id)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
return related
|
||||
end
|
||||
|
||||
return M
|
||||
279
lua/codetyper/brain/output/formatter.lua
Normal file
279
lua/codetyper/brain/output/formatter.lua
Normal file
@@ -0,0 +1,279 @@
|
||||
--- Brain Output Formatter
|
||||
--- LLM-optimized output formatting
|
||||
|
||||
local types = require("codetyper.brain.types")
|
||||
|
||||
local M = {}
|
||||
|
||||
--- Estimate token count (rough approximation)
|
||||
---@param text string Text to estimate
|
||||
---@return number Estimated tokens
|
||||
function M.estimate_tokens(text)
|
||||
if not text then
|
||||
return 0
|
||||
end
|
||||
-- Rough estimate: 1 token ~= 4 characters
|
||||
return math.ceil(#text / 4)
|
||||
end
|
||||
|
||||
--- Format nodes to compact text format
|
||||
---@param result QueryResult Query result
|
||||
---@param opts? table Options
|
||||
---@return string Formatted output
|
||||
function M.to_compact(result, opts)
|
||||
opts = opts or {}
|
||||
local max_tokens = opts.max_tokens or 4000
|
||||
local lines = {}
|
||||
local current_tokens = 0
|
||||
|
||||
-- Header
|
||||
table.insert(lines, "---BRAIN_CONTEXT---")
|
||||
if opts.query then
|
||||
table.insert(lines, "Q: " .. opts.query)
|
||||
end
|
||||
table.insert(lines, "")
|
||||
|
||||
-- Add nodes by relevance (already sorted)
|
||||
table.insert(lines, "Learnings:")
|
||||
|
||||
for i, node in ipairs(result.nodes) do
|
||||
-- Format: [idx] TYPE | w:0.85 u:5 | Summary
|
||||
local line = string.format(
|
||||
"[%d] %s | w:%.2f u:%d | %s",
|
||||
i,
|
||||
(node.t or "?"):upper(),
|
||||
node.sc.w or 0,
|
||||
node.sc.u or 0,
|
||||
(node.c.s or ""):sub(1, 100)
|
||||
)
|
||||
|
||||
local line_tokens = M.estimate_tokens(line)
|
||||
if current_tokens + line_tokens > max_tokens - 100 then
|
||||
table.insert(lines, "... (truncated)")
|
||||
break
|
||||
end
|
||||
|
||||
table.insert(lines, line)
|
||||
current_tokens = current_tokens + line_tokens
|
||||
|
||||
-- Add context if file-related
|
||||
if node.ctx and node.ctx.f then
|
||||
local ctx_line = " @ " .. node.ctx.f
|
||||
if node.ctx.fn then
|
||||
ctx_line = ctx_line .. ":" .. node.ctx.fn
|
||||
end
|
||||
if node.ctx.ln then
|
||||
ctx_line = ctx_line .. " L" .. node.ctx.ln[1]
|
||||
end
|
||||
table.insert(lines, ctx_line)
|
||||
current_tokens = current_tokens + M.estimate_tokens(ctx_line)
|
||||
end
|
||||
end
|
||||
|
||||
-- Add connections if space allows
|
||||
if #result.edges > 0 and current_tokens < max_tokens - 200 then
|
||||
table.insert(lines, "")
|
||||
table.insert(lines, "Connections:")
|
||||
|
||||
for _, edge in ipairs(result.edges) do
|
||||
if current_tokens >= max_tokens - 50 then
|
||||
break
|
||||
end
|
||||
|
||||
local conn_line = string.format(
|
||||
" %s --%s(%.2f)--> %s",
|
||||
edge.s:sub(-8),
|
||||
edge.ty,
|
||||
edge.p.w or 0.5,
|
||||
edge.t:sub(-8)
|
||||
)
|
||||
table.insert(lines, conn_line)
|
||||
current_tokens = current_tokens + M.estimate_tokens(conn_line)
|
||||
end
|
||||
end
|
||||
|
||||
table.insert(lines, "---END_CONTEXT---")
|
||||
|
||||
return table.concat(lines, "\n")
|
||||
end
|
||||
|
||||
--- Format nodes to JSON format
|
||||
---@param result QueryResult Query result
|
||||
---@param opts? table Options
|
||||
---@return string JSON output
|
||||
function M.to_json(result, opts)
|
||||
opts = opts or {}
|
||||
local max_tokens = opts.max_tokens or 4000
|
||||
|
||||
local output = {
|
||||
_s = "brain-v1", -- Schema
|
||||
q = opts.query,
|
||||
l = {}, -- Learnings
|
||||
c = {}, -- Connections
|
||||
}
|
||||
|
||||
local current_tokens = 50 -- Base overhead
|
||||
|
||||
-- Add nodes
|
||||
for _, node in ipairs(result.nodes) do
|
||||
local entry = {
|
||||
t = node.t,
|
||||
s = (node.c.s or ""):sub(1, 150),
|
||||
w = node.sc.w,
|
||||
u = node.sc.u,
|
||||
}
|
||||
|
||||
if node.ctx and node.ctx.f then
|
||||
entry.f = node.ctx.f
|
||||
end
|
||||
|
||||
local entry_tokens = M.estimate_tokens(vim.json.encode(entry))
|
||||
if current_tokens + entry_tokens > max_tokens - 100 then
|
||||
break
|
||||
end
|
||||
|
||||
table.insert(output.l, entry)
|
||||
current_tokens = current_tokens + entry_tokens
|
||||
end
|
||||
|
||||
-- Add edges if space
|
||||
if current_tokens < max_tokens - 200 then
|
||||
for _, edge in ipairs(result.edges) do
|
||||
if current_tokens >= max_tokens - 50 then
|
||||
break
|
||||
end
|
||||
|
||||
local e = {
|
||||
s = edge.s:sub(-8),
|
||||
t = edge.t:sub(-8),
|
||||
r = edge.ty,
|
||||
w = edge.p.w,
|
||||
}
|
||||
|
||||
table.insert(output.c, e)
|
||||
current_tokens = current_tokens + 30
|
||||
end
|
||||
end
|
||||
|
||||
return vim.json.encode(output)
|
||||
end
|
||||
|
||||
--- Format as natural language
|
||||
---@param result QueryResult Query result
|
||||
---@param opts? table Options
|
||||
---@return string Natural language output
|
||||
function M.to_natural(result, opts)
|
||||
opts = opts or {}
|
||||
local max_tokens = opts.max_tokens or 4000
|
||||
local lines = {}
|
||||
local current_tokens = 0
|
||||
|
||||
if #result.nodes == 0 then
|
||||
return "No relevant learnings found."
|
||||
end
|
||||
|
||||
table.insert(lines, "Based on previous learnings:")
|
||||
table.insert(lines, "")
|
||||
|
||||
-- Group by type
|
||||
local by_type = {}
|
||||
for _, node in ipairs(result.nodes) do
|
||||
by_type[node.t] = by_type[node.t] or {}
|
||||
table.insert(by_type[node.t], node)
|
||||
end
|
||||
|
||||
local type_names = {
|
||||
[types.NODE_TYPES.PATTERN] = "Code Patterns",
|
||||
[types.NODE_TYPES.CORRECTION] = "Previous Corrections",
|
||||
[types.NODE_TYPES.CONVENTION] = "Project Conventions",
|
||||
[types.NODE_TYPES.DECISION] = "Architectural Decisions",
|
||||
[types.NODE_TYPES.FEEDBACK] = "User Preferences",
|
||||
[types.NODE_TYPES.SESSION] = "Session Context",
|
||||
}
|
||||
|
||||
for node_type, nodes in pairs(by_type) do
|
||||
local type_name = type_names[node_type] or node_type
|
||||
|
||||
table.insert(lines, "**" .. type_name .. "**")
|
||||
|
||||
for _, node in ipairs(nodes) do
|
||||
if current_tokens >= max_tokens - 100 then
|
||||
table.insert(lines, "...")
|
||||
goto done
|
||||
end
|
||||
|
||||
local bullet = string.format("- %s (confidence: %.0f%%)", node.c.s or "?", (node.sc.w or 0) * 100)
|
||||
|
||||
table.insert(lines, bullet)
|
||||
current_tokens = current_tokens + M.estimate_tokens(bullet)
|
||||
|
||||
-- Add detail if high weight
|
||||
if node.sc.w > 0.7 and node.c.d and #node.c.d > #(node.c.s or "") then
|
||||
local detail = " " .. node.c.d:sub(1, 150)
|
||||
if #node.c.d > 150 then
|
||||
detail = detail .. "..."
|
||||
end
|
||||
table.insert(lines, detail)
|
||||
current_tokens = current_tokens + M.estimate_tokens(detail)
|
||||
end
|
||||
end
|
||||
|
||||
table.insert(lines, "")
|
||||
end
|
||||
|
||||
::done::
|
||||
|
||||
return table.concat(lines, "\n")
|
||||
end
|
||||
|
||||
--- Format context chain for explanation
|
||||
---@param chain table[] Chain of nodes and edges
|
||||
---@return string Chain explanation
|
||||
function M.format_chain(chain)
|
||||
local lines = {}
|
||||
|
||||
for i, item in ipairs(chain) do
|
||||
if item.node then
|
||||
local prefix = i == 1 and "" or " -> "
|
||||
table.insert(lines, string.format("%s[%s] %s (w:%.2f)", prefix, item.node.t:upper(), item.node.c.s:sub(1, 50), item.node.sc.w))
|
||||
end
|
||||
if item.edge then
|
||||
table.insert(lines, string.format(" via %s (w:%.2f)", item.edge.ty, item.edge.p.w))
|
||||
end
|
||||
end
|
||||
|
||||
return table.concat(lines, "\n")
|
||||
end
|
||||
|
||||
--- Compress output to fit token budget
|
||||
---@param text string Text to compress
|
||||
---@param max_tokens number Token budget
|
||||
---@return string Compressed text
|
||||
function M.compress(text, max_tokens)
|
||||
local current = M.estimate_tokens(text)
|
||||
|
||||
if current <= max_tokens then
|
||||
return text
|
||||
end
|
||||
|
||||
-- Simple truncation with ellipsis
|
||||
local ratio = max_tokens / current
|
||||
local target_chars = math.floor(#text * ratio * 0.9) -- 10% buffer
|
||||
|
||||
return text:sub(1, target_chars) .. "\n...(truncated)"
|
||||
end
|
||||
|
||||
--- Get minimal context for quick lookups
|
||||
---@param nodes Node[] Nodes to format
|
||||
---@return string Minimal context
|
||||
function M.minimal(nodes)
|
||||
local items = {}
|
||||
|
||||
for _, node in ipairs(nodes) do
|
||||
table.insert(items, string.format("%s:%s", node.t, (node.c.s or ""):sub(1, 40)))
|
||||
end
|
||||
|
||||
return table.concat(items, " | ")
|
||||
end
|
||||
|
||||
return M
|
||||
166
lua/codetyper/brain/output/init.lua
Normal file
166
lua/codetyper/brain/output/init.lua
Normal file
@@ -0,0 +1,166 @@
|
||||
--- Brain Output Coordinator
|
||||
--- Manages LLM context generation
|
||||
|
||||
local formatter = require("codetyper.brain.output.formatter")
|
||||
|
||||
local M = {}
|
||||
|
||||
-- Re-export formatter
|
||||
M.formatter = formatter
|
||||
|
||||
--- Default token budget
|
||||
local DEFAULT_MAX_TOKENS = 4000
|
||||
|
||||
--- Generate context for LLM prompt
|
||||
---@param opts? table Options
|
||||
---@return string Context string
|
||||
function M.generate(opts)
|
||||
opts = opts or {}
|
||||
|
||||
local brain = require("codetyper.brain")
|
||||
if not brain.is_initialized() then
|
||||
return ""
|
||||
end
|
||||
|
||||
-- Build query opts
|
||||
local query_opts = {
|
||||
query = opts.query,
|
||||
file = opts.file,
|
||||
types = opts.types,
|
||||
since = opts.since,
|
||||
limit = opts.limit or 30,
|
||||
depth = opts.depth or 2,
|
||||
max_tokens = opts.max_tokens or DEFAULT_MAX_TOKENS,
|
||||
}
|
||||
|
||||
-- Execute query
|
||||
local result = brain.query(query_opts)
|
||||
|
||||
if #result.nodes == 0 then
|
||||
return ""
|
||||
end
|
||||
|
||||
-- Format based on style
|
||||
local format = opts.format or "compact"
|
||||
|
||||
if format == "json" then
|
||||
return formatter.to_json(result, query_opts)
|
||||
elseif format == "natural" then
|
||||
return formatter.to_natural(result, query_opts)
|
||||
else
|
||||
return formatter.to_compact(result, query_opts)
|
||||
end
|
||||
end
|
||||
|
||||
--- Generate context for a specific file
|
||||
---@param filepath string File path
|
||||
---@param opts? table Options
|
||||
---@return string Context string
|
||||
function M.for_file(filepath, opts)
|
||||
opts = opts or {}
|
||||
opts.file = filepath
|
||||
return M.generate(opts)
|
||||
end
|
||||
|
||||
--- Generate context for current buffer
|
||||
---@param opts? table Options
|
||||
---@return string Context string
|
||||
function M.for_current_buffer(opts)
|
||||
local filepath = vim.fn.expand("%:p")
|
||||
if filepath == "" then
|
||||
return ""
|
||||
end
|
||||
return M.for_file(filepath, opts)
|
||||
end
|
||||
|
||||
--- Generate context for a query/prompt
|
||||
---@param query string Query text
|
||||
---@param opts? table Options
|
||||
---@return string Context string
|
||||
function M.for_query(query, opts)
|
||||
opts = opts or {}
|
||||
opts.query = query
|
||||
return M.generate(opts)
|
||||
end
|
||||
|
||||
--- Get context for LLM system prompt
|
||||
---@param opts? table Options
|
||||
---@return string System context
|
||||
function M.system_context(opts)
|
||||
opts = opts or {}
|
||||
opts.limit = opts.limit or 20
|
||||
opts.format = opts.format or "compact"
|
||||
|
||||
local context = M.generate(opts)
|
||||
|
||||
if context == "" then
|
||||
return ""
|
||||
end
|
||||
|
||||
return [[
|
||||
The following context contains learned patterns and conventions from this project:
|
||||
|
||||
]] .. context .. [[
|
||||
|
||||
|
||||
Use this context to inform your responses, following established patterns and conventions.
|
||||
]]
|
||||
end
|
||||
|
||||
--- Get relevant context for code completion
|
||||
---@param prefix string Code before cursor
|
||||
---@param suffix string Code after cursor
|
||||
---@param filepath string Current file
|
||||
---@return string Context
|
||||
function M.for_completion(prefix, suffix, filepath)
|
||||
-- Extract relevant terms from code
|
||||
local terms = {}
|
||||
|
||||
-- Get function/class names
|
||||
for word in prefix:gmatch("[A-Z][a-zA-Z0-9]+") do
|
||||
table.insert(terms, word)
|
||||
end
|
||||
for word in prefix:gmatch("function%s+([a-zA-Z_][a-zA-Z0-9_]*)") do
|
||||
table.insert(terms, word)
|
||||
end
|
||||
|
||||
local query = table.concat(terms, " ")
|
||||
|
||||
return M.generate({
|
||||
query = query,
|
||||
file = filepath,
|
||||
limit = 15,
|
||||
max_tokens = 2000,
|
||||
format = "compact",
|
||||
})
|
||||
end
|
||||
|
||||
--- Check if context is available
|
||||
---@return boolean
|
||||
function M.has_context()
|
||||
local brain = require("codetyper.brain")
|
||||
if not brain.is_initialized() then
|
||||
return false
|
||||
end
|
||||
|
||||
local stats = brain.stats()
|
||||
return stats.node_count > 0
|
||||
end
|
||||
|
||||
--- Get context stats
|
||||
---@return table Stats
|
||||
function M.stats()
|
||||
local brain = require("codetyper.brain")
|
||||
if not brain.is_initialized() then
|
||||
return { available = false }
|
||||
end
|
||||
|
||||
local stats = brain.stats()
|
||||
return {
|
||||
available = true,
|
||||
node_count = stats.node_count,
|
||||
edge_count = stats.edge_count,
|
||||
}
|
||||
end
|
||||
|
||||
return M
|
||||
338
lua/codetyper/brain/storage.lua
Normal file
338
lua/codetyper/brain/storage.lua
Normal file
@@ -0,0 +1,338 @@
|
||||
--- Brain Storage Layer
|
||||
--- Cache + disk persistence with lazy loading
|
||||
|
||||
local utils = require("codetyper.utils")
|
||||
local types = require("codetyper.brain.types")
|
||||
|
||||
local M = {}
|
||||
|
||||
--- In-memory cache keyed by project root
|
||||
---@type table<string, table>
|
||||
local cache = {}
|
||||
|
||||
--- Dirty flags for pending writes
|
||||
---@type table<string, table<string, boolean>>
|
||||
local dirty = {}
|
||||
|
||||
--- Debounce timers
|
||||
---@type table<string, userdata>
|
||||
local timers = {}
|
||||
|
||||
local DEBOUNCE_MS = 500
|
||||
|
||||
--- Get brain directory path for current project
|
||||
---@param root? string Project root (defaults to current)
|
||||
---@return string Brain directory path
|
||||
function M.get_brain_dir(root)
|
||||
root = root or utils.get_project_root()
|
||||
return root .. "/.coder/brain"
|
||||
end
|
||||
|
||||
--- Ensure brain directory structure exists
|
||||
---@param root? string Project root
|
||||
---@return boolean Success
|
||||
function M.ensure_dirs(root)
|
||||
local brain_dir = M.get_brain_dir(root)
|
||||
local dirs = {
|
||||
brain_dir,
|
||||
brain_dir .. "/nodes",
|
||||
brain_dir .. "/indices",
|
||||
brain_dir .. "/deltas",
|
||||
brain_dir .. "/deltas/objects",
|
||||
}
|
||||
for _, dir in ipairs(dirs) do
|
||||
if not utils.ensure_dir(dir) then
|
||||
return false
|
||||
end
|
||||
end
|
||||
return true
|
||||
end
|
||||
|
||||
--- Get file path for a storage key
|
||||
---@param key string Storage key (e.g., "meta", "nodes.patterns", "deltas.objects.abc123")
|
||||
---@param root? string Project root
|
||||
---@return string File path
|
||||
function M.get_path(key, root)
|
||||
local brain_dir = M.get_brain_dir(root)
|
||||
local parts = vim.split(key, ".", { plain = true })
|
||||
|
||||
if #parts == 1 then
|
||||
return brain_dir .. "/" .. key .. ".json"
|
||||
elseif #parts == 2 then
|
||||
return brain_dir .. "/" .. parts[1] .. "/" .. parts[2] .. ".json"
|
||||
else
|
||||
return brain_dir .. "/" .. table.concat(parts, "/") .. ".json"
|
||||
end
|
||||
end
|
||||
|
||||
--- Get cache for project
|
||||
---@param root? string Project root
|
||||
---@return table Project cache
|
||||
local function get_cache(root)
|
||||
root = root or utils.get_project_root()
|
||||
if not cache[root] then
|
||||
cache[root] = {}
|
||||
dirty[root] = {}
|
||||
end
|
||||
return cache[root]
|
||||
end
|
||||
|
||||
--- Read JSON from disk
|
||||
---@param filepath string File path
|
||||
---@return table|nil Data or nil on error
|
||||
local function read_json(filepath)
|
||||
local content = utils.read_file(filepath)
|
||||
if not content or content == "" then
|
||||
return nil
|
||||
end
|
||||
local ok, data = pcall(vim.json.decode, content)
|
||||
if not ok then
|
||||
return nil
|
||||
end
|
||||
return data
|
||||
end
|
||||
|
||||
--- Write JSON to disk
|
||||
---@param filepath string File path
|
||||
---@param data table Data to write
|
||||
---@return boolean Success
|
||||
local function write_json(filepath, data)
|
||||
local ok, json = pcall(vim.json.encode, data)
|
||||
if not ok then
|
||||
return false
|
||||
end
|
||||
return utils.write_file(filepath, json)
|
||||
end
|
||||
|
||||
--- Load data from disk into cache
|
||||
---@param key string Storage key
|
||||
---@param root? string Project root
|
||||
---@return table|nil Data or nil
|
||||
function M.load(key, root)
|
||||
root = root or utils.get_project_root()
|
||||
local project_cache = get_cache(root)
|
||||
|
||||
-- Return cached if available
|
||||
if project_cache[key] ~= nil then
|
||||
return project_cache[key]
|
||||
end
|
||||
|
||||
-- Load from disk
|
||||
local filepath = M.get_path(key, root)
|
||||
local data = read_json(filepath)
|
||||
|
||||
-- Cache the result (even nil to avoid repeated reads)
|
||||
project_cache[key] = data or {}
|
||||
|
||||
return project_cache[key]
|
||||
end
|
||||
|
||||
--- Save data to cache and schedule disk write
|
||||
---@param key string Storage key
|
||||
---@param data table Data to save
|
||||
---@param root? string Project root
|
||||
---@param immediate? boolean Skip debounce
|
||||
function M.save(key, data, root, immediate)
|
||||
root = root or utils.get_project_root()
|
||||
local project_cache = get_cache(root)
|
||||
|
||||
-- Update cache
|
||||
project_cache[key] = data
|
||||
dirty[root][key] = true
|
||||
|
||||
if immediate then
|
||||
M.flush(key, root)
|
||||
return
|
||||
end
|
||||
|
||||
-- Debounced write
|
||||
local timer_key = root .. ":" .. key
|
||||
if timers[timer_key] then
|
||||
timers[timer_key]:stop()
|
||||
end
|
||||
|
||||
timers[timer_key] = vim.defer_fn(function()
|
||||
M.flush(key, root)
|
||||
timers[timer_key] = nil
|
||||
end, DEBOUNCE_MS)
|
||||
end
|
||||
|
||||
--- Flush a key to disk immediately
|
||||
---@param key string Storage key
|
||||
---@param root? string Project root
|
||||
---@return boolean Success
|
||||
function M.flush(key, root)
|
||||
root = root or utils.get_project_root()
|
||||
local project_cache = get_cache(root)
|
||||
|
||||
if not dirty[root][key] then
|
||||
return true
|
||||
end
|
||||
|
||||
M.ensure_dirs(root)
|
||||
local filepath = M.get_path(key, root)
|
||||
local data = project_cache[key]
|
||||
|
||||
if data == nil then
|
||||
-- Delete file if data is nil
|
||||
os.remove(filepath)
|
||||
dirty[root][key] = nil
|
||||
return true
|
||||
end
|
||||
|
||||
local success = write_json(filepath, data)
|
||||
if success then
|
||||
dirty[root][key] = nil
|
||||
end
|
||||
return success
|
||||
end
|
||||
|
||||
--- Flush all dirty keys to disk
|
||||
---@param root? string Project root
|
||||
function M.flush_all(root)
|
||||
root = root or utils.get_project_root()
|
||||
if not dirty[root] then
|
||||
return
|
||||
end
|
||||
|
||||
for key, is_dirty in pairs(dirty[root]) do
|
||||
if is_dirty then
|
||||
M.flush(key, root)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
--- Get meta.json data
|
||||
---@param root? string Project root
|
||||
---@return GraphMeta
|
||||
function M.get_meta(root)
|
||||
local meta = M.load("meta", root)
|
||||
if not meta or not meta.v then
|
||||
meta = {
|
||||
v = types.SCHEMA_VERSION,
|
||||
head = nil,
|
||||
nc = 0,
|
||||
ec = 0,
|
||||
dc = 0,
|
||||
}
|
||||
M.save("meta", meta, root)
|
||||
end
|
||||
return meta
|
||||
end
|
||||
|
||||
--- Update meta.json
|
||||
---@param updates table Partial updates
|
||||
---@param root? string Project root
|
||||
function M.update_meta(updates, root)
|
||||
local meta = M.get_meta(root)
|
||||
for k, v in pairs(updates) do
|
||||
meta[k] = v
|
||||
end
|
||||
M.save("meta", meta, root)
|
||||
end
|
||||
|
||||
--- Get nodes by type
|
||||
---@param node_type string Node type (e.g., "patterns", "corrections")
|
||||
---@param root? string Project root
|
||||
---@return table<string, Node> Nodes indexed by ID
|
||||
function M.get_nodes(node_type, root)
|
||||
return M.load("nodes." .. node_type, root) or {}
|
||||
end
|
||||
|
||||
--- Save nodes by type
|
||||
---@param node_type string Node type
|
||||
---@param nodes table<string, Node> Nodes indexed by ID
|
||||
---@param root? string Project root
|
||||
function M.save_nodes(node_type, nodes, root)
|
||||
M.save("nodes." .. node_type, nodes, root)
|
||||
end
|
||||
|
||||
--- Get graph adjacency
|
||||
---@param root? string Project root
|
||||
---@return Graph Graph data
|
||||
function M.get_graph(root)
|
||||
local graph = M.load("graph", root)
|
||||
if not graph or not graph.adj then
|
||||
graph = {
|
||||
adj = {},
|
||||
radj = {},
|
||||
}
|
||||
M.save("graph", graph, root)
|
||||
end
|
||||
return graph
|
||||
end
|
||||
|
||||
--- Save graph
|
||||
---@param graph Graph Graph data
|
||||
---@param root? string Project root
|
||||
function M.save_graph(graph, root)
|
||||
M.save("graph", graph, root)
|
||||
end
|
||||
|
||||
--- Get index by type
|
||||
---@param index_type string Index type (e.g., "by_file", "by_time")
|
||||
---@param root? string Project root
|
||||
---@return table Index data
|
||||
function M.get_index(index_type, root)
|
||||
return M.load("indices." .. index_type, root) or {}
|
||||
end
|
||||
|
||||
--- Save index
|
||||
---@param index_type string Index type
|
||||
---@param data table Index data
|
||||
---@param root? string Project root
|
||||
function M.save_index(index_type, data, root)
|
||||
M.save("indices." .. index_type, data, root)
|
||||
end
|
||||
|
||||
--- Get delta by hash
|
||||
---@param hash string Delta hash
|
||||
---@param root? string Project root
|
||||
---@return Delta|nil Delta data
|
||||
function M.get_delta(hash, root)
|
||||
return M.load("deltas.objects." .. hash, root)
|
||||
end
|
||||
|
||||
--- Save delta
|
||||
---@param delta Delta Delta data
|
||||
---@param root? string Project root
|
||||
function M.save_delta(delta, root)
|
||||
M.save("deltas.objects." .. delta.h, delta, root, true) -- Immediate write for deltas
|
||||
end
|
||||
|
||||
--- Get HEAD delta hash
|
||||
---@param root? string Project root
|
||||
---@return string|nil HEAD hash
|
||||
function M.get_head(root)
|
||||
local meta = M.get_meta(root)
|
||||
return meta.head
|
||||
end
|
||||
|
||||
--- Set HEAD delta hash
|
||||
---@param hash string|nil Delta hash
|
||||
---@param root? string Project root
|
||||
function M.set_head(hash, root)
|
||||
M.update_meta({ head = hash }, root)
|
||||
end
|
||||
|
||||
--- Clear all caches (for testing)
|
||||
function M.clear_cache()
|
||||
cache = {}
|
||||
dirty = {}
|
||||
for _, timer in pairs(timers) do
|
||||
if timer then
|
||||
timer:stop()
|
||||
end
|
||||
end
|
||||
timers = {}
|
||||
end
|
||||
|
||||
--- Check if brain exists for project
|
||||
---@param root? string Project root
|
||||
---@return boolean
|
||||
function M.exists(root)
|
||||
local brain_dir = M.get_brain_dir(root)
|
||||
return vim.fn.isdirectory(brain_dir) == 1
|
||||
end
|
||||
|
||||
return M
|
||||
175
lua/codetyper/brain/types.lua
Normal file
175
lua/codetyper/brain/types.lua
Normal file
@@ -0,0 +1,175 @@
|
||||
---@meta
|
||||
--- Brain Learning System Type Definitions
|
||||
--- Optimized for LLM consumption with compact field names
|
||||
|
||||
local M = {}
|
||||
|
||||
---@alias NodeType "pat"|"cor"|"dec"|"con"|"fbk"|"ses"
|
||||
-- pat = pattern, cor = correction, dec = decision
|
||||
-- con = convention, fbk = feedback, ses = session
|
||||
|
||||
---@alias EdgeType "sem"|"file"|"temp"|"caus"|"sup"
|
||||
-- sem = semantic, file = file-based, temp = temporal
|
||||
-- caus = causal, sup = supersedes
|
||||
|
||||
---@alias DeltaOp "add"|"mod"|"del"
|
||||
|
||||
---@class NodeContent
|
||||
---@field s string Summary (max 200 chars)
|
||||
---@field d string Detail (full description)
|
||||
---@field code? string Optional code snippet
|
||||
---@field lang? string Language identifier
|
||||
|
||||
---@class NodeContext
|
||||
---@field f? string File path (relative)
|
||||
---@field fn? string Function name
|
||||
---@field ln? number[] Line range [start, end]
|
||||
---@field sym? string[] Symbol references
|
||||
|
||||
---@class NodeScores
|
||||
---@field w number Weight (0-1)
|
||||
---@field u number Usage count
|
||||
---@field sr number Success rate (0-1)
|
||||
|
||||
---@class NodeTimestamps
|
||||
---@field cr number Created (unix timestamp)
|
||||
---@field up number Updated (unix timestamp)
|
||||
---@field lu? number Last used (unix timestamp)
|
||||
|
||||
---@class NodeMeta
|
||||
---@field src "auto"|"user"|"llm" Source of learning
|
||||
---@field v number Version number
|
||||
---@field dr? string[] Delta references
|
||||
|
||||
---@class Node
|
||||
---@field id string Unique identifier (n_<timestamp>_<hash>)
|
||||
---@field t NodeType Node type
|
||||
---@field h string Content hash (8 chars)
|
||||
---@field c NodeContent Content
|
||||
---@field ctx NodeContext Context
|
||||
---@field sc NodeScores Scores
|
||||
---@field ts NodeTimestamps Timestamps
|
||||
---@field m? NodeMeta Metadata
|
||||
|
||||
---@class EdgeProps
|
||||
---@field w number Weight (0-1)
|
||||
---@field dir "bi"|"fwd"|"bwd" Direction
|
||||
---@field r? string Reason/description
|
||||
|
||||
---@class Edge
|
||||
---@field id string Unique identifier (e_<source>_<target>)
|
||||
---@field s string Source node ID
|
||||
---@field t string Target node ID
|
||||
---@field ty EdgeType Edge type
|
||||
---@field p EdgeProps Properties
|
||||
---@field ts number Created timestamp
|
||||
|
||||
---@class DeltaChange
|
||||
---@field op DeltaOp Operation type
|
||||
---@field path string JSON path (e.g., "nodes.pat.n_123")
|
||||
---@field bh? string Before hash
|
||||
---@field ah? string After hash
|
||||
---@field diff? table Field-level diff
|
||||
|
||||
---@class DeltaMeta
|
||||
---@field msg string Commit message
|
||||
---@field trig string Trigger source
|
||||
---@field sid? string Session ID
|
||||
|
||||
---@class Delta
|
||||
---@field h string Hash (8 chars)
|
||||
---@field p? string Parent hash
|
||||
---@field ts number Timestamp
|
||||
---@field ch DeltaChange[] Changes
|
||||
---@field m DeltaMeta Metadata
|
||||
|
||||
---@class GraphMeta
|
||||
---@field v number Schema version
|
||||
---@field head? string Current HEAD delta hash
|
||||
---@field nc number Node count
|
||||
---@field ec number Edge count
|
||||
---@field dc number Delta count
|
||||
|
||||
---@class AdjacencyEntry
|
||||
---@field sem? string[] Semantic edges
|
||||
---@field file? string[] File edges
|
||||
---@field temp? string[] Temporal edges
|
||||
---@field caus? string[] Causal edges
|
||||
---@field sup? string[] Supersedes edges
|
||||
|
||||
---@class Graph
|
||||
---@field meta GraphMeta Metadata
|
||||
---@field adj table<string, AdjacencyEntry> Adjacency list
|
||||
---@field radj table<string, AdjacencyEntry> Reverse adjacency
|
||||
|
||||
---@class QueryOpts
|
||||
---@field query? string Text query
|
||||
---@field file? string File path filter
|
||||
---@field types? NodeType[] Node types to include
|
||||
---@field since? number Timestamp filter
|
||||
---@field limit? number Max results
|
||||
---@field depth? number Traversal depth
|
||||
---@field max_tokens? number Token budget
|
||||
|
||||
---@class QueryResult
|
||||
---@field nodes Node[] Matched nodes
|
||||
---@field edges Edge[] Related edges
|
||||
---@field stats table Query statistics
|
||||
---@field truncated boolean Whether results were truncated
|
||||
|
||||
---@class LLMContext
|
||||
---@field schema string Schema version
|
||||
---@field query string Original query
|
||||
---@field learnings table[] Compact learning entries
|
||||
---@field connections table[] Connection summaries
|
||||
---@field tokens number Estimated token count
|
||||
|
||||
---@class LearnEvent
|
||||
---@field type string Event type
|
||||
---@field data table Event data
|
||||
---@field file? string Related file
|
||||
---@field timestamp number Event timestamp
|
||||
|
||||
---@class BrainConfig
|
||||
---@field enabled boolean Enable brain system
|
||||
---@field auto_learn boolean Auto-learn from events
|
||||
---@field auto_commit boolean Auto-commit after threshold
|
||||
---@field commit_threshold number Changes before auto-commit
|
||||
---@field max_nodes number Max nodes before pruning
|
||||
---@field max_deltas number Max delta history
|
||||
---@field prune table Pruning config
|
||||
---@field output table Output config
|
||||
|
||||
-- Type constants for runtime use
|
||||
M.NODE_TYPES = {
|
||||
PATTERN = "pat",
|
||||
CORRECTION = "cor",
|
||||
DECISION = "dec",
|
||||
CONVENTION = "con",
|
||||
FEEDBACK = "fbk",
|
||||
SESSION = "ses",
|
||||
}
|
||||
|
||||
M.EDGE_TYPES = {
|
||||
SEMANTIC = "sem",
|
||||
FILE = "file",
|
||||
TEMPORAL = "temp",
|
||||
CAUSAL = "caus",
|
||||
SUPERSEDES = "sup",
|
||||
}
|
||||
|
||||
M.DELTA_OPS = {
|
||||
ADD = "add",
|
||||
MODIFY = "mod",
|
||||
DELETE = "del",
|
||||
}
|
||||
|
||||
M.SOURCES = {
|
||||
AUTO = "auto",
|
||||
USER = "user",
|
||||
LLM = "llm",
|
||||
}
|
||||
|
||||
M.SCHEMA_VERSION = 1
|
||||
|
||||
return M
|
||||
Reference in New Issue
Block a user