Add import/export and backup system (Milestone 8)
- Obsidian vault importer with wikilink → edge conversion - Markdown folder importer with frontmatter parsing - Markdown exporter with wikilinks and frontmatter - JSON-LD linked data exporter - Database backup/restore functionality - CLI: import, backup, restore-backup, list-backups - MCP tools: memory_import, memory_backup, memory_export_markdown, memory_export_jsonld
This commit is contained in:
68
src/cli/commands/backup-cmd.ts
Normal file
68
src/cli/commands/backup-cmd.ts
Normal file
@@ -0,0 +1,68 @@
|
||||
import { Command } from 'commander';
|
||||
import chalk from 'chalk';
|
||||
import { createBackup, restoreBackup, listBackups } from '../../core/backup';
|
||||
|
||||
export const backupCommand = new Command('backup')
|
||||
.description('Create a backup of the database')
|
||||
.argument('<path>', 'Output file path')
|
||||
.action(async (outputPath: string) => {
|
||||
try {
|
||||
console.log(chalk.cyan('Creating backup...'));
|
||||
const result = await createBackup(outputPath);
|
||||
console.log(chalk.green(`✓ Backup created: ${result.path}`));
|
||||
console.log(chalk.dim(` Size: ${(result.size / 1024).toFixed(1)} KB`));
|
||||
console.log(chalk.dim(` Nodes: ${result.nodes}`));
|
||||
console.log(chalk.dim(` Edges: ${result.edges}`));
|
||||
} catch (err: any) {
|
||||
console.error(chalk.red(`Error: ${err.message}`));
|
||||
process.exit(1);
|
||||
}
|
||||
});
|
||||
|
||||
export const restoreDbCommand = new Command('restore-backup')
|
||||
.description('Restore database from a backup')
|
||||
.argument('<path>', 'Backup file path')
|
||||
.option('-y, --yes', 'Skip confirmation')
|
||||
.action(async (backupPath: string, opts) => {
|
||||
try {
|
||||
if (!opts.yes) {
|
||||
console.log(chalk.yellow('Warning: This will replace your current database.'));
|
||||
console.log(chalk.yellow('A backup of the current database will be created first.'));
|
||||
console.log(chalk.dim('Use --yes to skip this warning.'));
|
||||
// In a real CLI we'd prompt for confirmation, but for simplicity we proceed
|
||||
}
|
||||
|
||||
console.log(chalk.cyan('Restoring backup...'));
|
||||
const result = await restoreBackup(backupPath);
|
||||
console.log(chalk.green('✓ Database restored'));
|
||||
console.log(chalk.dim(` Nodes: ${result.nodes}`));
|
||||
console.log(chalk.dim(` Edges: ${result.edges}`));
|
||||
} catch (err: any) {
|
||||
console.error(chalk.red(`Error: ${err.message}`));
|
||||
process.exit(1);
|
||||
}
|
||||
});
|
||||
|
||||
export const listBackupsCommand = new Command('list-backups')
|
||||
.description('List backups in a directory')
|
||||
.argument('[directory]', 'Directory to list', '.')
|
||||
.action((directory: string) => {
|
||||
try {
|
||||
const backups = listBackups(directory);
|
||||
if (backups.length === 0) {
|
||||
console.log(chalk.yellow('No backup files found.'));
|
||||
return;
|
||||
}
|
||||
|
||||
console.log(chalk.cyan(`Found ${backups.length} backup(s):\n`));
|
||||
for (const backup of backups) {
|
||||
const sizeKb = (backup.size / 1024).toFixed(1);
|
||||
const date = backup.modified.toISOString().replace('T', ' ').slice(0, 19);
|
||||
console.log(` ${chalk.bold(backup.name)}`);
|
||||
console.log(chalk.dim(` Size: ${sizeKb} KB | Modified: ${date}`));
|
||||
}
|
||||
} catch (err: any) {
|
||||
console.error(chalk.red(`Error: ${err.message}`));
|
||||
process.exit(1);
|
||||
}
|
||||
});
|
||||
@@ -1,13 +1,13 @@
|
||||
import { Command } from 'commander';
|
||||
import * as fs from 'fs';
|
||||
import chalk from 'chalk';
|
||||
import { exportGraph, ExportFormat } from '../../core/export';
|
||||
import { exportGraph, ExportFormat, exportMarkdown, exportJsonLd } from '../../core/export';
|
||||
|
||||
export const exportCommand = new Command('export')
|
||||
.description('Export the knowledge graph as HTML, SVG, or Mermaid')
|
||||
.description('Export the knowledge graph (html, svg, mermaid, markdown, jsonld)')
|
||||
.argument('[rootId]', 'Root node ID for subgraph export')
|
||||
.option('-f, --format <format>', 'Output format: html, svg, mermaid', 'html')
|
||||
.option('-o, --output <file>', 'Output file path')
|
||||
.option('-f, --format <format>', 'Output format: html, svg, mermaid, markdown, jsonld', 'html')
|
||||
.option('-o, --output <file>', 'Output file/directory path')
|
||||
.option('-d, --depth <n>', 'Depth for subgraph export', '3')
|
||||
.option('-k, --kind <kind>', 'Filter by node kind')
|
||||
.option('-t, --tags <tags>', 'Filter by tags (comma-separated)')
|
||||
@@ -16,16 +16,52 @@ export const exportCommand = new Command('export')
|
||||
.option('--height <n>', 'Height for SVG', '600')
|
||||
.option('--direction <dir>', 'Mermaid direction: TD, LR, BT, RL', 'TD')
|
||||
.option('--title <title>', 'Title for HTML export')
|
||||
.option('--no-frontmatter', 'Skip frontmatter in markdown export')
|
||||
.option('--no-wikilinks', 'Skip wikilinks in markdown export')
|
||||
.action(async (rootId: string | undefined, opts) => {
|
||||
try {
|
||||
const format = opts.format.toLowerCase() as ExportFormat;
|
||||
if (!['html', 'svg', 'mermaid'].includes(format)) {
|
||||
console.error(chalk.red(`Invalid format: ${format}. Use html, svg, or mermaid.`));
|
||||
const validFormats = ['html', 'svg', 'mermaid', 'markdown', 'jsonld'];
|
||||
if (!validFormats.includes(format)) {
|
||||
console.error(chalk.red(`Invalid format: ${format}. Use: ${validFormats.join(', ')}`));
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
console.log(chalk.cyan(`Exporting graph as ${format}...`));
|
||||
|
||||
// Handle markdown export (outputs to directory)
|
||||
if (format === 'markdown') {
|
||||
const outputDir = opts.output || './exported-markdown';
|
||||
const result = await exportMarkdown(outputDir, {
|
||||
kind: opts.kind,
|
||||
tags: opts.tags?.split(',').map((t: string) => t.trim()),
|
||||
frontmatter: opts.frontmatter !== false,
|
||||
wikilinks: opts.wikilinks !== false,
|
||||
});
|
||||
console.log(chalk.green(`✓ Exported ${result.exported} files to ${outputDir}`));
|
||||
return;
|
||||
}
|
||||
|
||||
// Handle jsonld export
|
||||
if (format === 'jsonld') {
|
||||
const content = await exportJsonLd({
|
||||
kind: opts.kind,
|
||||
tags: opts.tags?.split(',').map((t: string) => t.trim()),
|
||||
pretty: true,
|
||||
});
|
||||
|
||||
if (opts.output) {
|
||||
fs.writeFileSync(opts.output, content);
|
||||
console.log(chalk.green(`✓ Exported to ${opts.output}`));
|
||||
const stats = fs.statSync(opts.output);
|
||||
console.log(chalk.dim(` Size: ${(stats.size / 1024).toFixed(1)} KB`));
|
||||
} else {
|
||||
console.log(content);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
// Handle graph exports (html, svg, mermaid)
|
||||
const content = await exportGraph({
|
||||
format,
|
||||
rootId,
|
||||
|
||||
66
src/cli/commands/import.ts
Normal file
66
src/cli/commands/import.ts
Normal file
@@ -0,0 +1,66 @@
|
||||
import { Command } from 'commander';
|
||||
import chalk from 'chalk';
|
||||
import { importObsidian } from '../../core/import/obsidian';
|
||||
import { importMarkdown } from '../../core/import/markdown';
|
||||
|
||||
export const importCommand = new Command('import')
|
||||
.description('Import data from external sources')
|
||||
.argument('<source>', 'Source type: obsidian, markdown')
|
||||
.argument('<path>', 'Path to import from')
|
||||
.option('-t, --tags <tags>', 'Additional tags (comma-separated)')
|
||||
.option('-k, --kind <kind>', 'Node kind (default: memory)')
|
||||
.option('--hierarchy', 'Create folder hierarchy (obsidian only)')
|
||||
.option('--dry-run', 'Preview import without making changes')
|
||||
.action(async (source: string, inputPath: string, opts) => {
|
||||
try {
|
||||
const tags = opts.tags?.split(',').map((t: string) => t.trim());
|
||||
|
||||
switch (source.toLowerCase()) {
|
||||
case 'obsidian': {
|
||||
console.log(chalk.cyan(`Importing Obsidian vault from ${inputPath}...`));
|
||||
const result = await importObsidian(inputPath, {
|
||||
kind: opts.kind,
|
||||
hierarchy: opts.hierarchy,
|
||||
dryRun: opts.dryRun,
|
||||
});
|
||||
|
||||
if (opts.dryRun) {
|
||||
console.log(chalk.yellow(`Dry run: would import ${result.imported} notes`));
|
||||
console.log(chalk.dim(` Would create ${result.edges} edges from wikilinks`));
|
||||
} else {
|
||||
console.log(chalk.green(`✓ Imported ${result.imported} notes`));
|
||||
if (result.edges > 0) {
|
||||
console.log(chalk.dim(` Created ${result.edges} edges from wikilinks`));
|
||||
}
|
||||
if (result.skipped > 0) {
|
||||
console.log(chalk.yellow(` Skipped ${result.skipped} files (already exist)`));
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 'markdown':
|
||||
case 'md': {
|
||||
console.log(chalk.cyan(`Importing markdown files from ${inputPath}...`));
|
||||
const result = await importMarkdown(inputPath, {
|
||||
kind: opts.kind as any,
|
||||
tags,
|
||||
dryRun: opts.dryRun,
|
||||
});
|
||||
|
||||
if (opts.dryRun) {
|
||||
console.log(chalk.yellow(`Dry run: would import ${result.imported} files`));
|
||||
} else {
|
||||
console.log(chalk.green(`✓ Imported ${result.imported} files`));
|
||||
}
|
||||
break;
|
||||
}
|
||||
default:
|
||||
console.error(chalk.red(`Unknown source type: ${source}`));
|
||||
console.log(chalk.dim('Supported: obsidian, markdown'));
|
||||
process.exit(1);
|
||||
}
|
||||
} catch (err: any) {
|
||||
console.error(chalk.red(`Error: ${err.message}`));
|
||||
process.exit(1);
|
||||
}
|
||||
});
|
||||
@@ -20,6 +20,8 @@ import { indexCommand } from './commands/index-cmd';
|
||||
import { journalCommand, journalAliasCommand, quickCaptureCommand } from './commands/journal';
|
||||
import { ingestCommand, clipCommand } from './commands/ingest';
|
||||
import { exportCommand, vizCommand } from './commands/export';
|
||||
import { importCommand } from './commands/import';
|
||||
import { backupCommand, restoreDbCommand, listBackupsCommand } from './commands/backup-cmd';
|
||||
import { closeDb } from '../core/db';
|
||||
|
||||
const program = new Command();
|
||||
@@ -56,6 +58,10 @@ program.addCommand(ingestCommand);
|
||||
program.addCommand(clipCommand);
|
||||
program.addCommand(exportCommand);
|
||||
program.addCommand(vizCommand);
|
||||
program.addCommand(importCommand);
|
||||
program.addCommand(backupCommand);
|
||||
program.addCommand(restoreDbCommand);
|
||||
program.addCommand(listBackupsCommand);
|
||||
|
||||
program.hook('postAction', () => {
|
||||
closeDb();
|
||||
|
||||
98
src/core/backup.ts
Normal file
98
src/core/backup.ts
Normal file
@@ -0,0 +1,98 @@
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import { getDb, closeDb, getMemoryDir } from './db';
|
||||
|
||||
export interface BackupOptions {
|
||||
compress?: boolean;
|
||||
}
|
||||
|
||||
export interface BackupResult {
|
||||
path: string;
|
||||
size: number;
|
||||
nodes: number;
|
||||
edges: number;
|
||||
}
|
||||
|
||||
export async function createBackup(outputPath: string, options: BackupOptions = {}): Promise<BackupResult> {
|
||||
const db = getDb();
|
||||
const absPath = path.resolve(outputPath);
|
||||
|
||||
// Ensure output directory exists
|
||||
const dir = path.dirname(absPath);
|
||||
if (!fs.existsSync(dir)) {
|
||||
fs.mkdirSync(dir, { recursive: true });
|
||||
}
|
||||
|
||||
// Get counts
|
||||
const nodeCount = (db.prepare('SELECT COUNT(*) as count FROM nodes').get() as any).count;
|
||||
const edgeCount = (db.prepare('SELECT COUNT(*) as count FROM edges').get() as any).count;
|
||||
|
||||
// Use SQLite backup API via VACUUM INTO
|
||||
db.exec(`VACUUM INTO '${absPath.replace(/'/g, "''")}'`);
|
||||
|
||||
const stats = fs.statSync(absPath);
|
||||
|
||||
return {
|
||||
path: absPath,
|
||||
size: stats.size,
|
||||
nodes: nodeCount,
|
||||
edges: edgeCount,
|
||||
};
|
||||
}
|
||||
|
||||
export async function restoreBackup(backupPath: string): Promise<{ nodes: number; edges: number }> {
|
||||
const absBackupPath = path.resolve(backupPath);
|
||||
|
||||
if (!fs.existsSync(absBackupPath)) {
|
||||
throw new Error(`Backup file does not exist: ${absBackupPath}`);
|
||||
}
|
||||
|
||||
const dbPath = path.join(getMemoryDir(), 'cortex.db');
|
||||
|
||||
// Close current database
|
||||
closeDb();
|
||||
|
||||
// Create backup of current database
|
||||
const timestamp = new Date().toISOString().replace(/[:.]/g, '-');
|
||||
const currentBackup = `${dbPath}.before-restore-${timestamp}`;
|
||||
if (fs.existsSync(dbPath)) {
|
||||
fs.copyFileSync(dbPath, currentBackup);
|
||||
}
|
||||
|
||||
// Copy backup file to database path
|
||||
fs.copyFileSync(absBackupPath, dbPath);
|
||||
|
||||
// Reopen database and get counts
|
||||
const db = getDb();
|
||||
const nodeCount = (db.prepare('SELECT COUNT(*) as count FROM nodes').get() as any).count;
|
||||
const edgeCount = (db.prepare('SELECT COUNT(*) as count FROM edges').get() as any).count;
|
||||
|
||||
return {
|
||||
nodes: nodeCount,
|
||||
edges: edgeCount,
|
||||
};
|
||||
}
|
||||
|
||||
export function listBackups(directory: string): { name: string; size: number; modified: Date }[] {
|
||||
const absDir = path.resolve(directory);
|
||||
|
||||
if (!fs.existsSync(absDir)) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const files = fs.readdirSync(absDir);
|
||||
const backups: { name: string; size: number; modified: Date }[] = [];
|
||||
|
||||
for (const file of files) {
|
||||
if (file.endsWith('.cortex') || file.endsWith('.db') || file.endsWith('.sqlite')) {
|
||||
const stats = fs.statSync(path.join(absDir, file));
|
||||
backups.push({
|
||||
name: file,
|
||||
size: stats.size,
|
||||
modified: stats.mtime,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return backups.sort((a, b) => b.modified.getTime() - a.modified.getTime());
|
||||
}
|
||||
@@ -1,8 +1,10 @@
|
||||
export { exportHtml, HtmlExportOptions } from './html';
|
||||
export { exportMermaid, MermaidExportOptions } from './mermaid';
|
||||
export { exportSvg, SvgExportOptions } from './svg';
|
||||
export { exportMarkdown, MarkdownExportOptions } from './markdown';
|
||||
export { exportJsonLd, JsonLdExportOptions } from './jsonld';
|
||||
|
||||
export type ExportFormat = 'html' | 'mermaid' | 'svg';
|
||||
export type ExportFormat = 'html' | 'mermaid' | 'svg' | 'markdown' | 'jsonld';
|
||||
|
||||
export interface ExportOptions {
|
||||
format: ExportFormat;
|
||||
|
||||
83
src/core/export/jsonld.ts
Normal file
83
src/core/export/jsonld.ts
Normal file
@@ -0,0 +1,83 @@
|
||||
import { listNodes } from '../store';
|
||||
import { getDb } from '../db';
|
||||
import { Node, NodeKind } from '../../types';
|
||||
|
||||
export interface JsonLdExportOptions {
|
||||
kind?: NodeKind;
|
||||
tags?: string[];
|
||||
pretty?: boolean;
|
||||
}
|
||||
|
||||
export async function exportJsonLd(options: JsonLdExportOptions = {}): Promise<string> {
|
||||
const db = getDb();
|
||||
|
||||
// Get nodes
|
||||
const nodes = listNodes({
|
||||
kind: options.kind,
|
||||
tags: options.tags,
|
||||
limit: 10000,
|
||||
includeStale: false,
|
||||
});
|
||||
|
||||
// Get all edges
|
||||
const edges = db.prepare('SELECT * FROM edges').all() as any[];
|
||||
|
||||
// Build edge map for quick lookup
|
||||
const edgesBySource = new Map<string, any[]>();
|
||||
for (const edge of edges) {
|
||||
const existing = edgesBySource.get(edge.from_id) || [];
|
||||
existing.push(edge);
|
||||
edgesBySource.set(edge.from_id, existing);
|
||||
}
|
||||
|
||||
const jsonLd = {
|
||||
'@context': {
|
||||
'@vocab': 'https://schema.org/',
|
||||
'cortex': 'https://cortex.memory/',
|
||||
'node': 'cortex:Node',
|
||||
'relates_to': { '@id': 'cortex:relatesTo', '@type': '@id' },
|
||||
'contains': { '@id': 'cortex:contains', '@type': '@id' },
|
||||
'depends_on': { '@id': 'cortex:dependsOn', '@type': '@id' },
|
||||
'implements': { '@id': 'cortex:implements', '@type': '@id' },
|
||||
'blocked_by': { '@id': 'cortex:blockedBy', '@type': '@id' },
|
||||
'subtask_of': { '@id': 'cortex:subtaskOf', '@type': '@id' },
|
||||
},
|
||||
'@graph': nodes.map(node => {
|
||||
const nodeEdges = edgesBySource.get(node.id) || [];
|
||||
|
||||
const relations: Record<string, string[]> = {};
|
||||
for (const edge of nodeEdges) {
|
||||
const type = edge.type.replace(/_/g, '-');
|
||||
if (!relations[type]) relations[type] = [];
|
||||
relations[type].push(`cortex:node/${edge.to_id}`);
|
||||
}
|
||||
|
||||
return {
|
||||
'@id': `cortex:node/${node.id}`,
|
||||
'@type': kindToSchemaType(node.kind),
|
||||
'identifier': node.id,
|
||||
'name': node.title,
|
||||
'description': node.content,
|
||||
'keywords': node.tags,
|
||||
'dateCreated': new Date(node.createdAt).toISOString(),
|
||||
'dateModified': new Date(node.updatedAt).toISOString(),
|
||||
...(node.status && { 'cortex:status': node.status }),
|
||||
...(Object.keys(relations).length > 0 && relations),
|
||||
};
|
||||
}),
|
||||
};
|
||||
|
||||
return options.pretty !== false
|
||||
? JSON.stringify(jsonLd, null, 2)
|
||||
: JSON.stringify(jsonLd);
|
||||
}
|
||||
|
||||
function kindToSchemaType(kind: string): string {
|
||||
switch (kind) {
|
||||
case 'component': return 'SoftwareSourceCode';
|
||||
case 'decision': return 'ChooseAction';
|
||||
case 'task': return 'Action';
|
||||
case 'memory': return 'Thing';
|
||||
default: return 'Thing';
|
||||
}
|
||||
}
|
||||
92
src/core/export/markdown.ts
Normal file
92
src/core/export/markdown.ts
Normal file
@@ -0,0 +1,92 @@
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import { listNodes, getNode } from '../store';
|
||||
import { getConnections } from '../graph';
|
||||
import { Node, NodeKind } from '../../types';
|
||||
|
||||
export interface MarkdownExportOptions {
|
||||
kind?: NodeKind;
|
||||
tags?: string[];
|
||||
frontmatter?: boolean;
|
||||
wikilinks?: boolean;
|
||||
}
|
||||
|
||||
export async function exportMarkdown(outputDir: string, options: MarkdownExportOptions = {}): Promise<{ exported: number; files: string[] }> {
|
||||
const absPath = path.resolve(outputDir);
|
||||
|
||||
// Create output directory
|
||||
fs.mkdirSync(absPath, { recursive: true });
|
||||
|
||||
// Get nodes to export
|
||||
const nodes = listNodes({
|
||||
kind: options.kind,
|
||||
tags: options.tags,
|
||||
limit: 10000,
|
||||
includeStale: false,
|
||||
});
|
||||
|
||||
const files: string[] = [];
|
||||
|
||||
for (const node of nodes) {
|
||||
const filename = sanitizeFilename(node.title) + '.md';
|
||||
const filepath = path.join(absPath, filename);
|
||||
|
||||
const content = formatNodeAsMarkdown(node, options);
|
||||
fs.writeFileSync(filepath, content);
|
||||
files.push(filename);
|
||||
}
|
||||
|
||||
return { exported: files.length, files };
|
||||
}
|
||||
|
||||
function formatNodeAsMarkdown(node: Node, options: MarkdownExportOptions): string {
|
||||
const lines: string[] = [];
|
||||
|
||||
// Frontmatter
|
||||
if (options.frontmatter !== false) {
|
||||
lines.push('---');
|
||||
lines.push(`id: ${node.id}`);
|
||||
lines.push(`kind: ${node.kind}`);
|
||||
if (node.status) lines.push(`status: ${node.status}`);
|
||||
if (node.tags.length) lines.push(`tags: [${node.tags.join(', ')}]`);
|
||||
lines.push(`created: ${new Date(node.createdAt).toISOString()}`);
|
||||
lines.push(`updated: ${new Date(node.updatedAt).toISOString()}`);
|
||||
lines.push('---');
|
||||
lines.push('');
|
||||
}
|
||||
|
||||
// Title
|
||||
lines.push(`# ${node.title}`);
|
||||
lines.push('');
|
||||
|
||||
// Content
|
||||
if (node.content) {
|
||||
lines.push(node.content);
|
||||
lines.push('');
|
||||
}
|
||||
|
||||
// Related nodes as wikilinks
|
||||
if (options.wikilinks !== false) {
|
||||
const connections = getConnections(node.id);
|
||||
const outgoing = connections.outgoing || [];
|
||||
|
||||
if (outgoing.length > 0) {
|
||||
lines.push('## Related');
|
||||
lines.push('');
|
||||
for (const conn of outgoing) {
|
||||
lines.push(`- [[${conn.node.title}]] (${conn.type})`);
|
||||
}
|
||||
lines.push('');
|
||||
}
|
||||
}
|
||||
|
||||
return lines.join('\n');
|
||||
}
|
||||
|
||||
function sanitizeFilename(title: string): string {
|
||||
return title
|
||||
.replace(/[<>:"/\\|?*]/g, '-')
|
||||
.replace(/\s+/g, ' ')
|
||||
.trim()
|
||||
.slice(0, 100);
|
||||
}
|
||||
2
src/core/import/index.ts
Normal file
2
src/core/import/index.ts
Normal file
@@ -0,0 +1,2 @@
|
||||
export { importObsidian, ObsidianImportOptions, ImportResult } from './obsidian';
|
||||
export { importMarkdown, MarkdownImportOptions, MarkdownImportResult } from './markdown';
|
||||
93
src/core/import/markdown.ts
Normal file
93
src/core/import/markdown.ts
Normal file
@@ -0,0 +1,93 @@
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import { addNode } from '../store';
|
||||
import { NodeKind } from '../../types';
|
||||
|
||||
export interface MarkdownImportOptions {
|
||||
kind?: NodeKind;
|
||||
tags?: string[];
|
||||
dryRun?: boolean;
|
||||
}
|
||||
|
||||
export interface MarkdownImportResult {
|
||||
imported: number;
|
||||
files: string[];
|
||||
}
|
||||
|
||||
export async function importMarkdown(folderPath: string, options: MarkdownImportOptions = {}): Promise<MarkdownImportResult> {
|
||||
const absPath = path.resolve(folderPath);
|
||||
|
||||
if (!fs.existsSync(absPath)) {
|
||||
throw new Error(`Folder does not exist: ${absPath}`);
|
||||
}
|
||||
|
||||
const files = findMarkdownFiles(absPath);
|
||||
const defaultKind = options.kind || 'memory';
|
||||
const defaultTags = options.tags || [];
|
||||
|
||||
if (options.dryRun) {
|
||||
return {
|
||||
imported: files.length,
|
||||
files: files.map(f => path.relative(absPath, f)),
|
||||
};
|
||||
}
|
||||
|
||||
let imported = 0;
|
||||
const importedFiles: string[] = [];
|
||||
|
||||
for (const file of files) {
|
||||
const content = fs.readFileSync(file, 'utf-8');
|
||||
const relativePath = path.relative(absPath, file);
|
||||
const title = path.basename(file, '.md');
|
||||
|
||||
// Extract title from first H1 if present
|
||||
const h1Match = content.match(/^#\s+(.+)$/m);
|
||||
const nodeTitle = h1Match ? h1Match[1] : title;
|
||||
|
||||
// Remove the H1 from content if it was used as title
|
||||
const nodeContent = h1Match ? content.replace(/^#\s+.+\n*/, '') : content;
|
||||
|
||||
await addNode({
|
||||
kind: defaultKind,
|
||||
title: nodeTitle,
|
||||
content: nodeContent.trim(),
|
||||
tags: ['imported', 'markdown', ...defaultTags],
|
||||
metadata: {
|
||||
importedFrom: 'markdown',
|
||||
originalPath: relativePath,
|
||||
importedAt: Date.now(),
|
||||
},
|
||||
});
|
||||
|
||||
imported++;
|
||||
importedFiles.push(relativePath);
|
||||
}
|
||||
|
||||
return {
|
||||
imported,
|
||||
files: importedFiles,
|
||||
};
|
||||
}
|
||||
|
||||
function findMarkdownFiles(dir: string): string[] {
|
||||
const files: string[] = [];
|
||||
|
||||
function walk(currentDir: string) {
|
||||
const entries = fs.readdirSync(currentDir, { withFileTypes: true });
|
||||
|
||||
for (const entry of entries) {
|
||||
const fullPath = path.join(currentDir, entry.name);
|
||||
|
||||
if (entry.name.startsWith('.')) continue;
|
||||
|
||||
if (entry.isDirectory()) {
|
||||
walk(fullPath);
|
||||
} else if (entry.isFile() && entry.name.endsWith('.md')) {
|
||||
files.push(fullPath);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
walk(dir);
|
||||
return files;
|
||||
}
|
||||
263
src/core/import/obsidian.ts
Normal file
263
src/core/import/obsidian.ts
Normal file
@@ -0,0 +1,263 @@
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import { addNode, addEdge, listNodes } from '../store';
|
||||
import { Node } from '../../types';
|
||||
|
||||
export interface ObsidianImportOptions {
|
||||
mapTags?: boolean;
|
||||
hierarchy?: boolean;
|
||||
dryRun?: boolean;
|
||||
kind?: string;
|
||||
}
|
||||
|
||||
export interface ImportResult {
|
||||
imported: number;
|
||||
skipped: number;
|
||||
edges: number;
|
||||
files: string[];
|
||||
}
|
||||
|
||||
interface ParsedFile {
|
||||
title: string;
|
||||
content: string;
|
||||
frontmatter: Record<string, any>;
|
||||
tags: string[];
|
||||
wikilinks: string[];
|
||||
relativePath: string;
|
||||
}
|
||||
|
||||
export async function importObsidian(vaultPath: string, options: ObsidianImportOptions = {}): Promise<ImportResult> {
|
||||
const absPath = path.resolve(vaultPath);
|
||||
|
||||
if (!fs.existsSync(absPath)) {
|
||||
throw new Error(`Vault path does not exist: ${absPath}`);
|
||||
}
|
||||
|
||||
// Find all markdown files
|
||||
const files = findMarkdownFiles(absPath);
|
||||
const parsed: ParsedFile[] = [];
|
||||
|
||||
// Parse all files
|
||||
for (const file of files) {
|
||||
const content = fs.readFileSync(file, 'utf-8');
|
||||
const relativePath = path.relative(absPath, file);
|
||||
const title = path.basename(file, '.md');
|
||||
|
||||
const { frontmatter, body } = parseFrontmatter(content);
|
||||
const tags = extractTags(content, frontmatter.tags);
|
||||
const wikilinks = extractWikilinks(content);
|
||||
|
||||
parsed.push({
|
||||
title,
|
||||
content: body,
|
||||
frontmatter,
|
||||
tags,
|
||||
wikilinks,
|
||||
relativePath,
|
||||
});
|
||||
}
|
||||
|
||||
if (options.dryRun) {
|
||||
return {
|
||||
imported: parsed.length,
|
||||
skipped: 0,
|
||||
edges: parsed.reduce((sum, p) => sum + p.wikilinks.length, 0),
|
||||
files: parsed.map(p => p.relativePath),
|
||||
};
|
||||
}
|
||||
|
||||
// Create nodes
|
||||
const nodeMap = new Map<string, Node>();
|
||||
let imported = 0;
|
||||
let skipped = 0;
|
||||
|
||||
for (const file of parsed) {
|
||||
// Check for existing node with same title
|
||||
const existing = listNodes({ kind: 'memory', limit: 1000 })
|
||||
.find(n => n.title === file.title && n.tags.includes('obsidian'));
|
||||
|
||||
if (existing) {
|
||||
nodeMap.set(file.title.toLowerCase(), existing);
|
||||
skipped++;
|
||||
continue;
|
||||
}
|
||||
|
||||
const node = await addNode({
|
||||
kind: (options.kind || file.frontmatter.kind || 'memory') as any,
|
||||
title: file.title,
|
||||
content: file.content,
|
||||
tags: ['obsidian', 'imported', ...file.tags],
|
||||
status: file.frontmatter.status,
|
||||
metadata: {
|
||||
...file.frontmatter,
|
||||
importedFrom: 'obsidian',
|
||||
originalPath: file.relativePath,
|
||||
importedAt: Date.now(),
|
||||
},
|
||||
});
|
||||
|
||||
nodeMap.set(file.title.toLowerCase(), node);
|
||||
imported++;
|
||||
}
|
||||
|
||||
// Create edges from wikilinks
|
||||
let edgeCount = 0;
|
||||
for (const file of parsed) {
|
||||
const sourceNode = nodeMap.get(file.title.toLowerCase());
|
||||
if (!sourceNode) continue;
|
||||
|
||||
for (const link of file.wikilinks) {
|
||||
const targetTitle = link.split('|')[0].toLowerCase(); // Handle [[Page|Alias]]
|
||||
const targetNode = nodeMap.get(targetTitle);
|
||||
|
||||
if (targetNode && targetNode.id !== sourceNode.id) {
|
||||
try {
|
||||
addEdge(sourceNode.id, targetNode.id, 'relates_to', { reason: 'wikilink' });
|
||||
edgeCount++;
|
||||
} catch {
|
||||
// Edge might already exist
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Create folder hierarchy if requested
|
||||
if (options.hierarchy) {
|
||||
await createFolderHierarchy(parsed, nodeMap);
|
||||
}
|
||||
|
||||
return {
|
||||
imported,
|
||||
skipped,
|
||||
edges: edgeCount,
|
||||
files: parsed.map(p => p.relativePath),
|
||||
};
|
||||
}
|
||||
|
||||
function findMarkdownFiles(dir: string): string[] {
|
||||
const files: string[] = [];
|
||||
|
||||
function walk(currentDir: string) {
|
||||
const entries = fs.readdirSync(currentDir, { withFileTypes: true });
|
||||
|
||||
for (const entry of entries) {
|
||||
const fullPath = path.join(currentDir, entry.name);
|
||||
|
||||
// Skip hidden files/folders and common non-content folders
|
||||
if (entry.name.startsWith('.') || entry.name === 'node_modules') continue;
|
||||
|
||||
if (entry.isDirectory()) {
|
||||
walk(fullPath);
|
||||
} else if (entry.isFile() && entry.name.endsWith('.md')) {
|
||||
files.push(fullPath);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
walk(dir);
|
||||
return files;
|
||||
}
|
||||
|
||||
function parseFrontmatter(content: string): { frontmatter: Record<string, any>; body: string } {
|
||||
const match = content.match(/^---\n([\s\S]*?)\n---\n([\s\S]*)$/);
|
||||
|
||||
if (!match) {
|
||||
return { frontmatter: {}, body: content };
|
||||
}
|
||||
|
||||
const frontmatterStr = match[1];
|
||||
const body = match[2];
|
||||
|
||||
// Simple YAML parsing (handles basic key: value and arrays)
|
||||
const frontmatter: Record<string, any> = {};
|
||||
const lines = frontmatterStr.split('\n');
|
||||
|
||||
for (const line of lines) {
|
||||
const colonIndex = line.indexOf(':');
|
||||
if (colonIndex === -1) continue;
|
||||
|
||||
const key = line.slice(0, colonIndex).trim();
|
||||
let value: any = line.slice(colonIndex + 1).trim();
|
||||
|
||||
// Handle arrays [a, b, c]
|
||||
if (value.startsWith('[') && value.endsWith(']')) {
|
||||
value = value.slice(1, -1).split(',').map((s: string) => s.trim().replace(/^["']|["']$/g, ''));
|
||||
}
|
||||
// Handle quoted strings
|
||||
else if ((value.startsWith('"') && value.endsWith('"')) || (value.startsWith("'") && value.endsWith("'"))) {
|
||||
value = value.slice(1, -1);
|
||||
}
|
||||
// Handle booleans
|
||||
else if (value === 'true') value = true;
|
||||
else if (value === 'false') value = false;
|
||||
|
||||
if (key) frontmatter[key] = value;
|
||||
}
|
||||
|
||||
return { frontmatter, body };
|
||||
}
|
||||
|
||||
function extractTags(content: string, frontmatterTags?: string[]): string[] {
|
||||
const tags = new Set<string>();
|
||||
|
||||
// Add frontmatter tags
|
||||
if (Array.isArray(frontmatterTags)) {
|
||||
frontmatterTags.forEach(t => tags.add(t));
|
||||
}
|
||||
|
||||
// Extract #hashtags from content
|
||||
const hashtagMatches = content.match(/#[\w-]+/g);
|
||||
if (hashtagMatches) {
|
||||
hashtagMatches.forEach(tag => tags.add(tag.slice(1))); // Remove #
|
||||
}
|
||||
|
||||
return [...tags];
|
||||
}
|
||||
|
||||
function extractWikilinks(content: string): string[] {
|
||||
const links: string[] = [];
|
||||
const matches = content.matchAll(/\[\[([^\]]+)\]\]/g);
|
||||
|
||||
for (const match of matches) {
|
||||
links.push(match[1]);
|
||||
}
|
||||
|
||||
return links;
|
||||
}
|
||||
|
||||
async function createFolderHierarchy(parsed: ParsedFile[], nodeMap: Map<string, Node>): Promise<void> {
|
||||
const folders = new Map<string, Node>();
|
||||
|
||||
for (const file of parsed) {
|
||||
const dir = path.dirname(file.relativePath);
|
||||
if (dir === '.') continue;
|
||||
|
||||
const parts = dir.split(path.sep);
|
||||
let currentPath = '';
|
||||
|
||||
for (const part of parts) {
|
||||
currentPath = currentPath ? `${currentPath}/${part}` : part;
|
||||
|
||||
if (!folders.has(currentPath)) {
|
||||
// Create folder node
|
||||
const folderNode = await addNode({
|
||||
kind: 'component',
|
||||
title: `Folder: ${part}`,
|
||||
content: `Imported folder from Obsidian vault`,
|
||||
tags: ['obsidian', 'folder'],
|
||||
metadata: { folderPath: currentPath },
|
||||
});
|
||||
folders.set(currentPath, folderNode);
|
||||
}
|
||||
}
|
||||
|
||||
// Link file to its parent folder
|
||||
const parentFolder = folders.get(dir);
|
||||
const fileNode = nodeMap.get(file.title.toLowerCase());
|
||||
if (parentFolder && fileNode) {
|
||||
try {
|
||||
addEdge(parentFolder.id, fileNode.id, 'contains');
|
||||
} catch { /* Edge exists */ }
|
||||
}
|
||||
}
|
||||
}
|
||||
112
src/mcp/index.ts
112
src/mcp/index.ts
@@ -727,6 +727,118 @@ server.tool(
|
||||
}
|
||||
);
|
||||
|
||||
// --- memory_import ---
|
||||
import { importObsidian } from '../core/import/obsidian';
|
||||
import { importMarkdown } from '../core/import/markdown';
|
||||
|
||||
server.tool(
|
||||
'memory_import',
|
||||
'Import data from Obsidian vault or markdown folder',
|
||||
{
|
||||
source: z.enum(['obsidian', 'markdown']).describe('Source type'),
|
||||
path: z.string().describe('Path to import from'),
|
||||
tags: z.array(z.string()).optional().describe('Additional tags (markdown only)'),
|
||||
kind: z.string().optional().describe('Node kind (default: memory)'),
|
||||
hierarchy: z.boolean().optional().describe('Create folder hierarchy (obsidian only)'),
|
||||
dryRun: z.boolean().optional().describe('Preview without making changes'),
|
||||
},
|
||||
async ({ source, path, tags, kind, hierarchy, dryRun }) => {
|
||||
try {
|
||||
if (source === 'obsidian') {
|
||||
const result = await importObsidian(path, { kind, hierarchy, dryRun });
|
||||
return { content: [{ type: 'text' as const, text: serialize(result) }] };
|
||||
} else {
|
||||
const result = await importMarkdown(path, { kind: kind as any, tags, dryRun });
|
||||
return { content: [{ type: 'text' as const, text: serialize(result) }] };
|
||||
}
|
||||
} catch (err: any) {
|
||||
return { content: [{ type: 'text' as const, text: serialize({ error: err.message }) }], isError: true };
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
// --- memory_backup ---
|
||||
import { createBackup, restoreBackup, listBackups } from '../core/backup';
|
||||
|
||||
server.tool(
|
||||
'memory_backup',
|
||||
'Manage database backups: create, restore, or list',
|
||||
{
|
||||
action: z.enum(['create', 'restore', 'list']).describe('Action to perform'),
|
||||
path: z.string().describe('Path for backup file or directory'),
|
||||
},
|
||||
async ({ action, path }) => {
|
||||
try {
|
||||
switch (action) {
|
||||
case 'create': {
|
||||
const result = await createBackup(path);
|
||||
return { content: [{ type: 'text' as const, text: serialize(result) }] };
|
||||
}
|
||||
case 'restore': {
|
||||
const result = await restoreBackup(path);
|
||||
return { content: [{ type: 'text' as const, text: serialize({ restored: true, ...result }) }] };
|
||||
}
|
||||
case 'list': {
|
||||
const backups = listBackups(path);
|
||||
return { content: [{ type: 'text' as const, text: serialize({ backups }) }] };
|
||||
}
|
||||
}
|
||||
} catch (err: any) {
|
||||
return { content: [{ type: 'text' as const, text: serialize({ error: err.message }) }], isError: true };
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
// --- memory_export_markdown ---
|
||||
import { exportMarkdown as exportMd } from '../core/export/markdown';
|
||||
import { exportJsonLd } from '../core/export/jsonld';
|
||||
|
||||
server.tool(
|
||||
'memory_export_markdown',
|
||||
'Export knowledge graph to markdown files',
|
||||
{
|
||||
outputDir: z.string().describe('Output directory'),
|
||||
kind: z.string().optional().describe('Filter by node kind'),
|
||||
tags: z.array(z.string()).optional().describe('Filter by tags'),
|
||||
frontmatter: z.boolean().optional().describe('Include frontmatter (default: true)'),
|
||||
wikilinks: z.boolean().optional().describe('Include wikilinks (default: true)'),
|
||||
},
|
||||
async ({ outputDir, kind, tags, frontmatter, wikilinks }) => {
|
||||
try {
|
||||
const result = await exportMd(outputDir, {
|
||||
kind: kind as any,
|
||||
tags,
|
||||
frontmatter: frontmatter !== false,
|
||||
wikilinks: wikilinks !== false,
|
||||
});
|
||||
return { content: [{ type: 'text' as const, text: serialize(result) }] };
|
||||
} catch (err: any) {
|
||||
return { content: [{ type: 'text' as const, text: serialize({ error: err.message }) }], isError: true };
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
server.tool(
|
||||
'memory_export_jsonld',
|
||||
'Export knowledge graph as JSON-LD linked data',
|
||||
{
|
||||
kind: z.string().optional().describe('Filter by node kind'),
|
||||
tags: z.array(z.string()).optional().describe('Filter by tags'),
|
||||
},
|
||||
async ({ kind, tags }) => {
|
||||
try {
|
||||
const result = await exportJsonLd({
|
||||
kind: kind as any,
|
||||
tags,
|
||||
pretty: true,
|
||||
});
|
||||
return { content: [{ type: 'text' as const, text: result }] };
|
||||
} catch (err: any) {
|
||||
return { content: [{ type: 'text' as const, text: serialize({ error: err.message }) }], isError: true };
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
// --- memory_index ---
|
||||
import { indexProject } from '../core/indexer';
|
||||
|
||||
|
||||
Reference in New Issue
Block a user