New MCP tool and portal UI for executing natural language instructions against the memory graph via Ollama (qwen3-coder:30b). Single LLM call generates a JSON action plan which is executed sequentially. Supports 8 action types: add_node, update_node, remove_node, add_edge, remove_edge, bulk_tag, reorganize, query. Actions can reference previous results via $result[N].field interpolation. Uses /api/chat with few-shot assistant example, format:json, and temperature:0 for reliable output.
185 lines
5.8 KiB
TypeScript
185 lines
5.8 KiB
TypeScript
import { Router, Request, Response } from 'express';
|
|
import { addNode, getNode, listNodes, updateNode, removeNode, addEdge, removeEdge, query } from '../core/store';
|
|
import { getConnections, buildTree } from '../core/graph';
|
|
import { getDb } from '../core/db';
|
|
import { determineGroupingStrategy, groupResults } from './queryOrganizer';
|
|
import { getLastReport, markDirty, runMaintenance } from './heartbeat';
|
|
|
|
const router = Router();
|
|
|
|
function param(req: Request, name: string): string {
|
|
const v = req.params[name];
|
|
return Array.isArray(v) ? v[0] : v;
|
|
}
|
|
|
|
// List nodes
|
|
router.get('/nodes', (req: Request, res: Response) => {
|
|
try {
|
|
const options: any = {};
|
|
if (req.query.kind) options.kind = req.query.kind as string;
|
|
if (req.query.status) options.status = req.query.status as string;
|
|
if (req.query.tags) options.tags = (req.query.tags as string).split(',');
|
|
if (req.query.limit) options.limit = parseInt(req.query.limit as string);
|
|
if (req.query.includeStale === 'true') options.includeStale = true;
|
|
const nodes = listNodes(options);
|
|
res.json(nodes.map(n => ({ ...n, embedding: undefined })));
|
|
} catch (err: any) {
|
|
res.status(500).json({ error: err.message });
|
|
}
|
|
});
|
|
|
|
// Get single node + connections
|
|
router.get('/nodes/:id', (req: Request, res: Response) => {
|
|
try {
|
|
const node = getNode(param(req, 'id'));
|
|
if (!node) return res.status(404).json({ error: 'Node not found' });
|
|
const connections = getConnections(param(req, 'id'));
|
|
res.json({ ...node, embedding: undefined, connections });
|
|
} catch (err: any) {
|
|
res.status(500).json({ error: err.message });
|
|
}
|
|
});
|
|
|
|
// Add node
|
|
router.post('/nodes', async (req: Request, res: Response) => {
|
|
try {
|
|
const node = await addNode(req.body);
|
|
res.status(201).json({ ...node, embedding: undefined });
|
|
} catch (err: any) {
|
|
res.status(400).json({ error: err.message });
|
|
}
|
|
});
|
|
|
|
// Update node
|
|
router.patch('/nodes/:id', async (req: Request, res: Response) => {
|
|
try {
|
|
const node = await updateNode(param(req, 'id'), req.body);
|
|
if (!node) return res.status(404).json({ error: 'Node not found' });
|
|
res.json({ ...node, embedding: undefined });
|
|
} catch (err: any) {
|
|
res.status(400).json({ error: err.message });
|
|
}
|
|
});
|
|
|
|
// Delete node
|
|
router.delete('/nodes/:id', (req: Request, res: Response) => {
|
|
try {
|
|
const hard = req.query.hard === 'true';
|
|
const ok = removeNode(param(req, 'id'), hard);
|
|
if (!ok) return res.status(404).json({ error: 'Node not found' });
|
|
res.json({ ok: true });
|
|
} catch (err: any) {
|
|
res.status(500).json({ error: err.message });
|
|
}
|
|
});
|
|
|
|
// Add edge
|
|
router.post('/edges', (req: Request, res: Response) => {
|
|
try {
|
|
const { fromId, toId, type, metadata } = req.body;
|
|
const edge = addEdge(fromId, toId, type, metadata);
|
|
res.status(201).json(edge);
|
|
} catch (err: any) {
|
|
res.status(400).json({ error: err.message });
|
|
}
|
|
});
|
|
|
|
// Delete edge
|
|
router.delete('/edges/:id', (req: Request, res: Response) => {
|
|
try {
|
|
const ok = removeEdge(param(req, 'id'));
|
|
if (!ok) return res.status(404).json({ error: 'Edge not found' });
|
|
res.json({ ok: true });
|
|
} catch (err: any) {
|
|
res.status(500).json({ error: err.message });
|
|
}
|
|
});
|
|
|
|
// Graph — returns nodes + edges for React Flow
|
|
router.get('/graph', (_req: Request, res: Response) => {
|
|
try {
|
|
const db = getDb();
|
|
const nodes = (db.prepare('SELECT * FROM nodes WHERE is_stale = 0').all() as any[]).map(row => ({
|
|
id: row.id,
|
|
kind: row.kind,
|
|
title: row.title,
|
|
content: row.content,
|
|
status: row.status,
|
|
tags: JSON.parse(row.tags || '[]'),
|
|
metadata: JSON.parse(row.metadata || '{}'),
|
|
createdAt: row.created_at,
|
|
updatedAt: row.updated_at,
|
|
}));
|
|
const edges = (db.prepare('SELECT * FROM edges').all() as any[]).map(row => ({
|
|
id: row.id,
|
|
fromId: row.from_id,
|
|
toId: row.to_id,
|
|
type: row.type,
|
|
metadata: JSON.parse(row.metadata || '{}'),
|
|
createdAt: row.created_at,
|
|
}));
|
|
res.json({ nodes, edges });
|
|
} catch (err: any) {
|
|
res.status(500).json({ error: err.message });
|
|
}
|
|
});
|
|
|
|
// Search
|
|
router.post('/search', async (req: Request, res: Response) => {
|
|
try {
|
|
const { text, options } = req.body;
|
|
const results = await query(text, options || {});
|
|
res.json(results.map(r => ({ ...r, node: { ...r.node, embedding: undefined } })));
|
|
} catch (err: any) {
|
|
res.status(500).json({ error: err.message });
|
|
}
|
|
});
|
|
|
|
// Organized query
|
|
router.post('/query/organize', async (req: Request, res: Response) => {
|
|
try {
|
|
const { text } = req.body;
|
|
if (!text) return res.status(400).json({ error: 'text is required' });
|
|
const results = await query(text, { limit: 30 });
|
|
const strategy = determineGroupingStrategy(text);
|
|
const grouped = groupResults(results, strategy);
|
|
res.json(grouped);
|
|
} catch (err: any) {
|
|
res.status(500).json({ error: err.message });
|
|
}
|
|
});
|
|
|
|
// Maintenance status
|
|
router.get('/maintenance/status', (_req: Request, res: Response) => {
|
|
const report = getLastReport();
|
|
res.json(report || { message: 'No heartbeat has run yet' });
|
|
});
|
|
|
|
// Trigger maintenance manually
|
|
router.post('/maintenance/run', async (_req: Request, res: Response) => {
|
|
try {
|
|
markDirty();
|
|
const report = await runMaintenance();
|
|
res.json(report);
|
|
} catch (err: any) {
|
|
res.status(500).json({ error: err.message });
|
|
}
|
|
});
|
|
|
|
// Prompt — AI-driven natural language instruction
|
|
router.post('/prompt', async (req: Request, res: Response) => {
|
|
try {
|
|
const { prompt } = req.body;
|
|
if (!prompt || typeof prompt !== 'string') {
|
|
return res.status(400).json({ error: 'prompt is required' });
|
|
}
|
|
const { interpretAndExecute } = await import('../core/prompt/interpreter');
|
|
const result = await interpretAndExecute(prompt);
|
|
res.json(result);
|
|
} catch (err: any) {
|
|
res.status(500).json({ error: err.message });
|
|
}
|
|
});
|
|
|
|
export default router;
|