Add AI prompt tool for natural language graph operations
New MCP tool and portal UI for executing natural language instructions against the memory graph via Ollama (qwen3-coder:30b). Single LLM call generates a JSON action plan which is executed sequentially. Supports 8 action types: add_node, update_node, remove_node, add_edge, remove_edge, bulk_tag, reorganize, query. Actions can reference previous results via $result[N].field interpolation. Uses /api/chat with few-shot assistant example, format:json, and temperature:0 for reliable output.
This commit is contained in:
@@ -8,6 +8,7 @@ import QueryBar from './components/QueryBar';
|
||||
import LinkModal from './components/LinkModal';
|
||||
import Toast from './components/Toast';
|
||||
import MaintenancePanel from './components/MaintenancePanel';
|
||||
import PromptPanel from './components/PromptPanel';
|
||||
|
||||
export default function App() {
|
||||
const [selectedId, setSelectedId] = useState<string | null>(null);
|
||||
@@ -17,6 +18,7 @@ export default function App() {
|
||||
const [drawerOpen, setDrawerOpen] = useState(false);
|
||||
const [showQuery, setShowQuery] = useState(false);
|
||||
const [showMaintenance, setShowMaintenance] = useState(false);
|
||||
const [showPrompt, setShowPrompt] = useState(false);
|
||||
const qc = useQueryClient();
|
||||
|
||||
const refresh = useCallback(() => {
|
||||
@@ -38,13 +40,14 @@ export default function App() {
|
||||
const handler = (e: KeyboardEvent) => {
|
||||
if (e.key === 'Escape') {
|
||||
if (selectedId) setSelectedId(null);
|
||||
else if (showPrompt) setShowPrompt(false);
|
||||
else if (showQuery) setShowQuery(false);
|
||||
else if (drawerOpen) setDrawerOpen(false);
|
||||
}
|
||||
};
|
||||
window.addEventListener('keydown', handler);
|
||||
return () => window.removeEventListener('keydown', handler);
|
||||
}, [selectedId, drawerOpen, showQuery]);
|
||||
}, [selectedId, drawerOpen, showQuery, showPrompt]);
|
||||
|
||||
return (
|
||||
<div className="h-screen w-screen overflow-hidden relative">
|
||||
@@ -76,6 +79,13 @@ export default function App() {
|
||||
>
|
||||
?
|
||||
</button>
|
||||
<button
|
||||
onClick={() => setShowPrompt(!showPrompt)}
|
||||
className="w-12 h-12 rounded-full bg-amber-600/90 backdrop-blur border border-amber-500 text-white hover:bg-amber-500 shadow-lg flex items-center justify-center text-lg"
|
||||
title="AI Prompt"
|
||||
>
|
||||
✦
|
||||
</button>
|
||||
<button
|
||||
onClick={() => setShowMaintenance(!showMaintenance)}
|
||||
className="w-12 h-12 rounded-full bg-emerald-600/90 backdrop-blur border border-emerald-500 text-white hover:bg-emerald-500 shadow-lg flex items-center justify-center text-lg"
|
||||
@@ -141,6 +151,22 @@ export default function App() {
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Prompt panel — slides up from bottom */}
|
||||
{showPrompt && (
|
||||
<div className="fixed inset-0 z-40" onClick={() => setShowPrompt(false)}>
|
||||
<div className="absolute inset-0 bg-black/40" />
|
||||
<div
|
||||
className="absolute bottom-0 left-0 right-0 h-[70vh] animate-slide-in-up"
|
||||
onClick={(e) => e.stopPropagation()}
|
||||
>
|
||||
<PromptPanel
|
||||
onClose={() => setShowPrompt(false)}
|
||||
onDone={() => { refresh(); notify('Prompt executed'); }}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{showAddNode && (
|
||||
<AddNodeModal
|
||||
onClose={() => setShowAddNode(false)}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import type { CortexNode, CortexEdge, GraphData, NodeWithConnections, SearchResult, NodeKind, EdgeType, GroupedQueryResult } from './types';
|
||||
import type { CortexNode, CortexEdge, GraphData, NodeWithConnections, SearchResult, NodeKind, EdgeType, GroupedQueryResult, PromptResult } from './types';
|
||||
|
||||
const BASE = '/api';
|
||||
|
||||
@@ -50,4 +50,7 @@ export const api = {
|
||||
|
||||
runMaintenance: () =>
|
||||
request<Record<string, any>>('/maintenance/run', { method: 'POST' }),
|
||||
|
||||
prompt: (prompt: string) =>
|
||||
request<PromptResult>('/prompt', { method: 'POST', body: JSON.stringify({ prompt }) }),
|
||||
};
|
||||
|
||||
137
portal/src/components/PromptPanel.tsx
Normal file
137
portal/src/components/PromptPanel.tsx
Normal file
@@ -0,0 +1,137 @@
|
||||
import { useState } from 'react';
|
||||
import { api } from '../api';
|
||||
import type { PromptResult } from '../types';
|
||||
|
||||
interface Props {
|
||||
onClose: () => void;
|
||||
onDone: () => void;
|
||||
}
|
||||
|
||||
export default function PromptPanel({ onClose, onDone }: Props) {
|
||||
const [text, setText] = useState('');
|
||||
const [result, setResult] = useState<PromptResult | null>(null);
|
||||
const [loading, setLoading] = useState(false);
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
|
||||
const submit = async () => {
|
||||
if (!text.trim()) return;
|
||||
setLoading(true);
|
||||
setError(null);
|
||||
setResult(null);
|
||||
try {
|
||||
const data = await api.prompt(text);
|
||||
setResult(data);
|
||||
if (data.success) onDone();
|
||||
} catch (err: any) {
|
||||
setError(err.message || 'Request failed');
|
||||
} finally {
|
||||
setLoading(false);
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="h-full flex flex-col bg-gray-900/95 backdrop-blur border-t border-amber-500/30 rounded-t-xl">
|
||||
{/* Header */}
|
||||
<div className="flex items-center justify-between px-4 py-3 border-b border-gray-700/50">
|
||||
<span className="text-amber-300 font-medium text-sm">AI Prompt</span>
|
||||
<button onClick={onClose} className="text-gray-500 hover:text-gray-300 text-lg">×</button>
|
||||
</div>
|
||||
|
||||
{/* Input */}
|
||||
<div className="px-4 py-3 flex gap-2">
|
||||
<input
|
||||
value={text}
|
||||
onChange={e => setText(e.target.value)}
|
||||
onKeyDown={e => e.key === 'Enter' && !loading && submit()}
|
||||
placeholder='e.g. "create a decision node about using Redis for caching"'
|
||||
className="flex-1 bg-gray-800 border border-gray-600 rounded-lg px-3 py-2 text-sm text-gray-200 placeholder-gray-500 focus:outline-none focus:border-amber-500"
|
||||
autoFocus
|
||||
disabled={loading}
|
||||
/>
|
||||
<button
|
||||
onClick={submit}
|
||||
disabled={loading || !text.trim()}
|
||||
className="px-4 py-2 bg-amber-600 hover:bg-amber-500 disabled:opacity-50 rounded-lg text-sm text-white font-medium min-w-[80px]"
|
||||
>
|
||||
{loading ? (
|
||||
<span className="inline-block animate-pulse">Running...</span>
|
||||
) : 'Run'}
|
||||
</button>
|
||||
</div>
|
||||
|
||||
{/* Results */}
|
||||
<div className="flex-1 overflow-y-auto px-4 pb-4">
|
||||
{!result && !loading && !error && (
|
||||
<div className="text-gray-500 text-sm mt-4 space-y-1">
|
||||
<p>Give a natural language instruction to modify the graph:</p>
|
||||
<p className="text-gray-600 ml-2">"create a decision node about using Redis for caching"</p>
|
||||
<p className="text-gray-600 ml-2">"tag all task nodes with 'backlog'"</p>
|
||||
<p className="text-gray-600 ml-2">"create a component for auth and link it to the API gateway"</p>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{loading && (
|
||||
<div className="text-amber-400/70 text-sm mt-4 animate-pulse">
|
||||
Generating and executing action plan...
|
||||
</div>
|
||||
)}
|
||||
|
||||
{error && (
|
||||
<div className="mt-4 p-3 rounded-lg bg-red-900/30 border border-red-700/50 text-red-300 text-sm">
|
||||
{error}
|
||||
</div>
|
||||
)}
|
||||
|
||||
{result && (
|
||||
<div className="space-y-3 mt-2">
|
||||
{/* Summary */}
|
||||
<div className={`p-3 rounded-lg border text-sm ${
|
||||
result.success
|
||||
? 'bg-green-900/20 border-green-700/50 text-green-300'
|
||||
: 'bg-yellow-900/20 border-yellow-700/50 text-yellow-300'
|
||||
}`}>
|
||||
<div className="font-medium mb-1">{result.success ? 'Completed' : 'Completed with errors'}</div>
|
||||
<div>{result.summary}</div>
|
||||
</div>
|
||||
|
||||
{/* Reasoning */}
|
||||
{result.reasoning && (
|
||||
<div className="text-xs text-gray-400 bg-gray-800/50 rounded-lg p-3">
|
||||
<span className="text-gray-500 font-medium">Reasoning: </span>
|
||||
{result.reasoning}
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Execution log */}
|
||||
{result.executionLog.length > 0 && (
|
||||
<div className="space-y-1">
|
||||
<div className="text-xs text-gray-500 font-medium uppercase tracking-wide">Execution Log</div>
|
||||
{result.executionLog.map((entry, i) => (
|
||||
<div
|
||||
key={i}
|
||||
className={`flex items-start gap-2 p-2 rounded text-sm ${
|
||||
entry.status === 'completed' ? 'bg-gray-800/30' : 'bg-red-900/20'
|
||||
}`}
|
||||
>
|
||||
<span className={`mt-0.5 text-xs ${
|
||||
entry.status === 'completed' ? 'text-green-500' : 'text-red-500'
|
||||
}`}>
|
||||
{entry.status === 'completed' ? '✓' : '✗'}
|
||||
</span>
|
||||
<div className="flex-1 min-w-0">
|
||||
<div className="text-gray-300 truncate">{entry.description}</div>
|
||||
<div className="text-xs text-gray-500">{entry.action}</div>
|
||||
{entry.error && (
|
||||
<div className="text-xs text-red-400 mt-1">{entry.error}</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -54,3 +54,18 @@ export interface GroupedQueryResult {
|
||||
groups: ResultGroup[];
|
||||
totalResults: number;
|
||||
}
|
||||
|
||||
export interface ActionResultEntry {
|
||||
action: string;
|
||||
description: string;
|
||||
status: 'completed' | 'failed';
|
||||
result?: any;
|
||||
error?: string;
|
||||
}
|
||||
|
||||
export interface PromptResult {
|
||||
success: boolean;
|
||||
reasoning: string;
|
||||
executionLog: ActionResultEntry[];
|
||||
summary: string;
|
||||
}
|
||||
|
||||
166
src/core/prompt/executor.ts
Normal file
166
src/core/prompt/executor.ts
Normal file
@@ -0,0 +1,166 @@
|
||||
import { ActionPlan, ActionResult } from './types';
|
||||
import { query, addNode, updateNode, removeNode, addEdge, removeEdge } from '../store';
|
||||
import { getNode, findNodeByPrefix, listNodes } from '../store';
|
||||
import { getDb } from '../db';
|
||||
import { NodeKind, EdgeType } from '../../types';
|
||||
|
||||
function resolveRefs(value: any, results: any[]): any {
|
||||
if (typeof value === 'string') {
|
||||
return value.replace(/\$result\[(\d+)\]((?:\.\w+|\[\d+\])*)/g, (_match, idx, path) => {
|
||||
let obj = results[parseInt(idx)];
|
||||
if (obj === undefined) return _match;
|
||||
// Parse path like .id, [0].node.id
|
||||
const parts = path.match(/\.(\w+)|\[(\d+)\]/g) || [];
|
||||
for (const part of parts) {
|
||||
if (obj == null) return _match;
|
||||
if (part.startsWith('.')) {
|
||||
obj = obj[part.slice(1)];
|
||||
} else {
|
||||
const i = parseInt(part.slice(1, -1));
|
||||
obj = obj[i];
|
||||
}
|
||||
}
|
||||
return typeof obj === 'string' ? obj : JSON.stringify(obj);
|
||||
});
|
||||
}
|
||||
if (Array.isArray(value)) return value.map(v => resolveRefs(v, results));
|
||||
if (value && typeof value === 'object') {
|
||||
const out: Record<string, any> = {};
|
||||
for (const [k, v] of Object.entries(value)) {
|
||||
out[k] = resolveRefs(v, results);
|
||||
}
|
||||
return out;
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
const VALID_KINDS = new Set(['memory', 'component', 'task', 'decision']);
|
||||
const VALID_EDGE_TYPES = new Set(['depends_on', 'contains', 'implements', 'blocked_by', 'subtask_of', 'relates_to', 'supersedes', 'about']);
|
||||
|
||||
async function executeAction(type: string, params: Record<string, any>): Promise<any> {
|
||||
switch (type) {
|
||||
case 'query': {
|
||||
const results = await query(params.text, {
|
||||
kind: params.kind as NodeKind,
|
||||
limit: params.limit,
|
||||
});
|
||||
return results.map(r => ({ node: { id: r.node.id, kind: r.node.kind, title: r.node.title, content: r.node.content, tags: r.node.tags }, score: r.score }));
|
||||
}
|
||||
case 'add_node': {
|
||||
if (!VALID_KINDS.has(params.kind)) throw new Error(`Invalid kind: ${params.kind}`);
|
||||
const node = await addNode({
|
||||
kind: params.kind as NodeKind,
|
||||
title: params.title,
|
||||
content: params.content,
|
||||
tags: params.tags,
|
||||
status: params.status,
|
||||
});
|
||||
return { id: node.id, kind: node.kind, title: node.title, content: node.content, tags: node.tags, status: node.status };
|
||||
}
|
||||
case 'update_node': {
|
||||
const node = await updateNode(params.id, {
|
||||
title: params.title,
|
||||
content: params.content,
|
||||
status: params.status,
|
||||
tags: params.tags,
|
||||
});
|
||||
if (!node) throw new Error(`Node not found: ${params.id}`);
|
||||
return { id: node.id, kind: node.kind, title: node.title, content: node.content, tags: node.tags, status: node.status };
|
||||
}
|
||||
case 'remove_node': {
|
||||
// Soft delete only
|
||||
const ok = removeNode(params.id, false);
|
||||
if (!ok) throw new Error(`Node not found: ${params.id}`);
|
||||
return { ok: true };
|
||||
}
|
||||
case 'add_edge': {
|
||||
if (!VALID_EDGE_TYPES.has(params.type)) throw new Error(`Invalid edge type: ${params.type}`);
|
||||
const edge = addEdge(params.fromId, params.toId, params.type as EdgeType);
|
||||
return { id: edge.id, fromId: edge.fromId, toId: edge.toId, type: edge.type };
|
||||
}
|
||||
case 'remove_edge': {
|
||||
const ok = removeEdge(params.id);
|
||||
if (!ok) throw new Error(`Edge not found: ${params.id}`);
|
||||
return { ok: true };
|
||||
}
|
||||
case 'bulk_tag': {
|
||||
let targets: { id: string; tags: string[] }[];
|
||||
if (params.nodeIds?.length) {
|
||||
targets = params.nodeIds.map((id: string) => {
|
||||
const n = getNode(id) ?? findNodeByPrefix(id);
|
||||
return n ? { id: n.id, tags: n.tags } : null;
|
||||
}).filter(Boolean);
|
||||
} else if (params.filter) {
|
||||
targets = listNodes({
|
||||
kind: params.filter.kind as NodeKind,
|
||||
status: params.filter.status,
|
||||
tags: params.filter.tags,
|
||||
}).map(n => ({ id: n.id, tags: n.tags }));
|
||||
} else {
|
||||
throw new Error('bulk_tag requires nodeIds or filter');
|
||||
}
|
||||
let modified = 0;
|
||||
for (const t of targets) {
|
||||
let newTags: string[];
|
||||
if (params.action === 'add') {
|
||||
newTags = [...new Set([...t.tags, ...params.tags])];
|
||||
} else {
|
||||
newTags = t.tags.filter((tag: string) => !params.tags.includes(tag));
|
||||
}
|
||||
if (JSON.stringify(newTags) !== JSON.stringify(t.tags)) {
|
||||
await updateNode(t.id, { tags: newTags });
|
||||
modified++;
|
||||
}
|
||||
}
|
||||
return { action: params.action, tags: params.tags, modified, total: targets.length };
|
||||
}
|
||||
case 'reorganize': {
|
||||
const node = getNode(params.nodeId) ?? findNodeByPrefix(params.nodeId);
|
||||
const parent = getNode(params.newParentId) ?? findNodeByPrefix(params.newParentId);
|
||||
if (!node) throw new Error(`Node not found: ${params.nodeId}`);
|
||||
if (!parent) throw new Error(`Parent not found: ${params.newParentId}`);
|
||||
const db = getDb();
|
||||
const incomingContains = db.prepare('SELECT id FROM edges WHERE to_id = ? AND type = ?').all(node.id, 'contains') as any[];
|
||||
for (const e of incomingContains) {
|
||||
removeEdge(e.id);
|
||||
}
|
||||
const edge = addEdge(parent.id, node.id, 'contains');
|
||||
return { moved: node.id, newParent: parent.id, edge: edge.id };
|
||||
}
|
||||
default:
|
||||
throw new Error(`Unknown action type: ${type}`);
|
||||
}
|
||||
}
|
||||
|
||||
export async function executePlan(plan: ActionPlan): Promise<{ log: ActionResult[]; summary: string }> {
|
||||
const log: ActionResult[] = [];
|
||||
const results: any[] = [];
|
||||
const counts: Record<string, number> = {};
|
||||
|
||||
for (const action of plan.actions) {
|
||||
try {
|
||||
const resolvedParams = resolveRefs(action.params, results);
|
||||
const result = await executeAction(action.type, resolvedParams);
|
||||
results.push(result);
|
||||
log.push({ action: action.type, description: action.description, status: 'completed', result });
|
||||
counts[action.type] = (counts[action.type] || 0) + 1;
|
||||
} catch (err: any) {
|
||||
results.push(null);
|
||||
log.push({ action: action.type, description: action.description, status: 'failed', error: err.message });
|
||||
}
|
||||
}
|
||||
|
||||
const parts: string[] = [];
|
||||
const labels: Record<string, string> = {
|
||||
query: 'queries', add_node: 'nodes created', update_node: 'nodes updated',
|
||||
remove_node: 'nodes removed', add_edge: 'edges created', remove_edge: 'edges removed',
|
||||
bulk_tag: 'bulk tag ops', reorganize: 'reorganizations',
|
||||
};
|
||||
for (const [type, count] of Object.entries(counts)) {
|
||||
parts.push(`${count} ${labels[type] || type}`);
|
||||
}
|
||||
const failed = log.filter(l => l.status === 'failed').length;
|
||||
if (failed > 0) parts.push(`${failed} failed`);
|
||||
|
||||
return { log, summary: parts.join(', ') || 'No actions executed' };
|
||||
}
|
||||
105
src/core/prompt/interpreter.ts
Normal file
105
src/core/prompt/interpreter.ts
Normal file
@@ -0,0 +1,105 @@
|
||||
import { isGenAvailable } from '../search/ollamaGen';
|
||||
import { query } from '../store';
|
||||
import { buildMessages } from './templates';
|
||||
import { ActionPlanSchema, PromptResult } from './types';
|
||||
import { executePlan } from './executor';
|
||||
|
||||
const OLLAMA_URL = process.env.OLLAMA_URL || 'http://localhost:11434';
|
||||
const GEN_MODEL = process.env.OLLAMA_GEN_MODEL || 'qwen3-coder:30b';
|
||||
const PROMPT_TIMEOUT = 120000; // 2 minutes for large prompts
|
||||
|
||||
function fail(summary: string): PromptResult {
|
||||
return { success: false, reasoning: '', executionLog: [], summary };
|
||||
}
|
||||
|
||||
export async function interpretAndExecute(prompt: string): Promise<PromptResult> {
|
||||
// Check Ollama availability
|
||||
if (!(await isGenAvailable())) {
|
||||
return fail('Ollama is not available. Make sure Ollama is running and the model is pulled (e.g. `ollama pull llama3`).');
|
||||
}
|
||||
|
||||
// Gather context
|
||||
const context = await query(prompt, { limit: 10 });
|
||||
|
||||
// Build and send chat messages
|
||||
const messages = buildMessages(prompt, context);
|
||||
|
||||
let raw: string;
|
||||
try {
|
||||
const res = await fetch(`${OLLAMA_URL}/api/chat`, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ model: GEN_MODEL, messages, stream: false, format: 'json', options: { num_predict: 4096, temperature: 0 } }),
|
||||
signal: AbortSignal.timeout(PROMPT_TIMEOUT),
|
||||
});
|
||||
if (!res.ok) {
|
||||
const body = await res.text().catch(() => '');
|
||||
return fail(`Ollama returned HTTP ${res.status}: ${body.slice(0, 200)}`);
|
||||
}
|
||||
const data = await res.json() as { message?: { content?: string } };
|
||||
if (!data.message?.content?.trim()) {
|
||||
return fail('Ollama returned an empty response.');
|
||||
}
|
||||
raw = data.message.content.trim();
|
||||
} catch (err: any) {
|
||||
if (err.name === 'TimeoutError' || err.name === 'AbortError') {
|
||||
return fail(`Ollama timed out after ${PROMPT_TIMEOUT / 1000}s. Try a simpler prompt or check if the model is loaded.`);
|
||||
}
|
||||
return fail(`Ollama request failed: ${err.message}`);
|
||||
}
|
||||
|
||||
// Strip thinking tags (qwen3 models output <think>...</think> before the answer)
|
||||
let jsonStr = raw.replace(/<think>[\s\S]*?<\/think>/g, '').trim();
|
||||
|
||||
// Strip markdown fences if present
|
||||
const fenceMatch = jsonStr.match(/```(?:json)?\s*([\s\S]*?)```/);
|
||||
if (fenceMatch) jsonStr = fenceMatch[1].trim();
|
||||
|
||||
// Try to extract JSON object if there's surrounding text
|
||||
if (!jsonStr.startsWith('{')) {
|
||||
const objMatch = jsonStr.match(/(\{[\s\S]*\})/);
|
||||
if (objMatch) jsonStr = objMatch[1];
|
||||
}
|
||||
|
||||
let parsed: any;
|
||||
try {
|
||||
parsed = JSON.parse(jsonStr);
|
||||
} catch {
|
||||
return {
|
||||
success: false,
|
||||
reasoning: '',
|
||||
executionLog: [],
|
||||
summary: `Failed to parse LLM response as JSON. Raw response: ${raw.slice(0, 500)}`,
|
||||
};
|
||||
}
|
||||
|
||||
// Validate — be lenient: filter out unknown action types instead of rejecting the whole plan
|
||||
const VALID_TYPES = new Set(['query', 'add_node', 'update_node', 'remove_node', 'add_edge', 'remove_edge', 'bulk_tag', 'reorganize']);
|
||||
|
||||
if (!parsed.actions || !Array.isArray(parsed.actions)) {
|
||||
return fail(`Invalid action plan: missing actions array. Raw: ${raw.slice(0, 300)}`);
|
||||
}
|
||||
|
||||
const validActions = parsed.actions.filter((a: any) => a && VALID_TYPES.has(a.type));
|
||||
const skipped = parsed.actions.length - validActions.length;
|
||||
|
||||
if (validActions.length === 0) {
|
||||
return fail(`No valid actions in plan. The model used unsupported action types: ${parsed.actions.map((a: any) => a?.type).join(', ')}`);
|
||||
}
|
||||
|
||||
const plan = {
|
||||
reasoning: parsed.reasoning || '',
|
||||
actions: validActions.map((a: any) => ({ type: a.type, params: a.params || {}, description: a.description || '' })),
|
||||
};
|
||||
|
||||
// Execute
|
||||
const { log, summary } = await executePlan(plan);
|
||||
const anyFailed = log.some(l => l.status === 'failed');
|
||||
|
||||
return {
|
||||
success: !anyFailed,
|
||||
reasoning: plan.reasoning,
|
||||
executionLog: log,
|
||||
summary: summary + (skipped > 0 ? ` (${skipped} unsupported actions skipped)` : ''),
|
||||
};
|
||||
}
|
||||
49
src/core/prompt/templates.ts
Normal file
49
src/core/prompt/templates.ts
Normal file
@@ -0,0 +1,49 @@
|
||||
import { SearchResult } from '../../types';
|
||||
|
||||
export function buildMessages(userPrompt: string, context: SearchResult[]): { role: string; content: string }[] {
|
||||
const system = `You produce JSON action plans for a knowledge graph.
|
||||
|
||||
ACTIONS:
|
||||
- add_node: params { kind, title, content?, tags?, status? } → returns { id }. kind must be: memory, component, task, or decision.
|
||||
- update_node: params { id, title?, content?, status?, tags? }
|
||||
- remove_node: params { id }
|
||||
- add_edge: params { fromId, toId, type }. type must be: depends_on, contains, implements, blocked_by, subtask_of, relates_to, supersedes, about
|
||||
- remove_edge: params { id }
|
||||
- bulk_tag: params { action: "add"|"remove", tags, nodeIds? }
|
||||
- reorganize: params { nodeId, newParentId }
|
||||
|
||||
Use "$result[N].id" to reference the id from action N's result.
|
||||
|
||||
IMPORTANT: To group nodes, first add_node to create a parent, then reorganize each node under it. Use node IDs from context — do NOT query.
|
||||
|
||||
Output JSON: {"reasoning":"...","actions":[{"type":"...","params":{...},"description":"..."}]}`;
|
||||
|
||||
// Few-shot example showing the exact grouping pattern
|
||||
const exampleUser = `AVAILABLE NODES:
|
||||
1. id="aaa-111" kind=memory title="DB Config" tags=[config]
|
||||
2. id="bbb-222" kind=memory title="API Config" tags=[config]
|
||||
3. id="ccc-333" kind=memory title="Unrelated Note" tags=[misc]
|
||||
|
||||
INSTRUCTION: group the config nodes /no_think`;
|
||||
|
||||
const exampleAssistant = `{"reasoning":"Create a parent Configuration group and move the two config nodes under it.","actions":[{"type":"add_node","params":{"kind":"memory","title":"Configuration","content":"Parent group for configuration nodes.","tags":["configuration","group"]},"description":"Create Configuration parent node"},{"type":"reorganize","params":{"nodeId":"aaa-111","newParentId":"$result[0].id"},"description":"Move DB Config under Configuration"},{"type":"reorganize","params":{"nodeId":"bbb-222","newParentId":"$result[0].id"},"description":"Move API Config under Configuration"}]}`;
|
||||
|
||||
let contextList = '';
|
||||
if (context.length > 0) {
|
||||
contextList = context.map((r, i) =>
|
||||
`${i + 1}. id="${r.node.id}" kind=${r.node.kind} title="${r.node.title}"${r.node.tags.length ? ` tags=[${r.node.tags.join(',')}]` : ''}`
|
||||
).join('\n');
|
||||
}
|
||||
|
||||
const user = `AVAILABLE NODES:
|
||||
${contextList || '(none)'}
|
||||
|
||||
INSTRUCTION: ${userPrompt} /no_think`;
|
||||
|
||||
return [
|
||||
{ role: 'system', content: system },
|
||||
{ role: 'user', content: exampleUser },
|
||||
{ role: 'assistant', content: exampleAssistant },
|
||||
{ role: 'user', content: user },
|
||||
];
|
||||
}
|
||||
35
src/core/prompt/types.ts
Normal file
35
src/core/prompt/types.ts
Normal file
@@ -0,0 +1,35 @@
|
||||
import { z } from 'zod/v3';
|
||||
|
||||
export const ActionTypeSchema = z.enum([
|
||||
'query', 'add_node', 'update_node', 'remove_node',
|
||||
'add_edge', 'remove_edge', 'bulk_tag', 'reorganize',
|
||||
]);
|
||||
export type ActionType = z.infer<typeof ActionTypeSchema>;
|
||||
|
||||
export const ActionSchema = z.object({
|
||||
type: ActionTypeSchema,
|
||||
params: z.record(z.any()),
|
||||
description: z.string(),
|
||||
});
|
||||
export type Action = z.infer<typeof ActionSchema>;
|
||||
|
||||
export const ActionPlanSchema = z.object({
|
||||
reasoning: z.string(),
|
||||
actions: z.array(ActionSchema).max(20),
|
||||
});
|
||||
export type ActionPlan = z.infer<typeof ActionPlanSchema>;
|
||||
|
||||
export interface ActionResult {
|
||||
action: string;
|
||||
description: string;
|
||||
status: 'completed' | 'failed';
|
||||
result?: any;
|
||||
error?: string;
|
||||
}
|
||||
|
||||
export interface PromptResult {
|
||||
success: boolean;
|
||||
reasoning: string;
|
||||
executionLog: ActionResult[];
|
||||
summary: string;
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
const OLLAMA_URL = process.env.OLLAMA_URL || 'http://localhost:11434';
|
||||
const GEN_MODEL = process.env.OLLAMA_GEN_MODEL || 'llama3';
|
||||
const GEN_MODEL = process.env.OLLAMA_GEN_MODEL || 'llama3.2';
|
||||
|
||||
let _available: boolean | null = null;
|
||||
|
||||
|
||||
@@ -367,6 +367,21 @@ server.tool(
|
||||
}
|
||||
);
|
||||
|
||||
// --- memory_prompt ---
|
||||
import { interpretAndExecute } from '../core/prompt/interpreter';
|
||||
|
||||
server.tool(
|
||||
'memory_prompt',
|
||||
'Execute a natural language instruction against the memory graph. Uses AI to generate and run an action plan. Returns log of actions performed.',
|
||||
{
|
||||
prompt: z.string().describe('Natural language instruction'),
|
||||
},
|
||||
async ({ prompt }) => {
|
||||
const result = await interpretAndExecute(prompt);
|
||||
return { content: [{ type: 'text' as const, text: serialize(result) }] };
|
||||
}
|
||||
);
|
||||
|
||||
async function main() {
|
||||
const transport = new StdioServerTransport();
|
||||
await server.connect(transport);
|
||||
|
||||
@@ -166,4 +166,19 @@ router.post('/maintenance/run', async (_req: Request, res: Response) => {
|
||||
}
|
||||
});
|
||||
|
||||
// Prompt — AI-driven natural language instruction
|
||||
router.post('/prompt', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { prompt } = req.body;
|
||||
if (!prompt || typeof prompt !== 'string') {
|
||||
return res.status(400).json({ error: 'prompt is required' });
|
||||
}
|
||||
const { interpretAndExecute } = await import('../core/prompt/interpreter');
|
||||
const result = await interpretAndExecute(prompt);
|
||||
res.json(result);
|
||||
} catch (err: any) {
|
||||
res.status(500).json({ error: err.message });
|
||||
}
|
||||
});
|
||||
|
||||
export default router;
|
||||
|
||||
Reference in New Issue
Block a user