1351 lines
		
	
	
		
			43 KiB
		
	
	
	
		
			JavaScript
		
	
	
	
	
	
			
		
		
	
	
			1351 lines
		
	
	
		
			43 KiB
		
	
	
	
		
			JavaScript
		
	
	
	
	
	
#!/usr/bin/env node
 | 
						||
 | 
						||
import express from 'express';
 | 
						||
import path from 'path';
 | 
						||
import { fileURLToPath } from 'url';
 | 
						||
import fs from 'fs';
 | 
						||
import zlib from 'zlib';
 | 
						||
import chokidar from 'chokidar';
 | 
						||
import { meiliClient, vaultIndexName, ensureIndexSettings } from './meilisearch.client.mjs';
 | 
						||
import { fullReindex, upsertFile, deleteFile } from './meilisearch-indexer.mjs';
 | 
						||
import { mapObsidianQueryToMeili, buildSearchParams } from './search.mapping.mjs';
 | 
						||
import { PORT as CFG_PORT, VAULT_PATH as CFG_VAULT_PATH, debugPrintConfig } from './config.mjs';
 | 
						||
import { z } from 'zod';
 | 
						||
import { 
 | 
						||
  parseExcalidrawAny, 
 | 
						||
  toObsidianExcalidrawMd, 
 | 
						||
  extractFrontMatter, 
 | 
						||
  isValidExcalidrawScene 
 | 
						||
} from './excalidraw-obsidian.mjs';
 | 
						||
import { rewriteTagsFrontmatter, extractTagsFromFrontmatter } from './markdown-frontmatter.mjs';
 | 
						||
import { enrichFrontmatterOnOpen } from './ensureFrontmatter.mjs';
 | 
						||
import { loadVaultMetadataOnly } from './vault-metadata-loader.mjs';
 | 
						||
import { MetadataCache as MetadataCacheOld, PerformanceLogger } from './performance-config.mjs';
 | 
						||
import { MetadataCache } from './perf/metadata-cache.js';
 | 
						||
import { PerformanceMonitor } from './perf/performance-monitor.js';
 | 
						||
import { retryWithBackoff, CircuitBreaker } from './utils/retry.js';
 | 
						||
import {
 | 
						||
  setupMetadataEndpoint,
 | 
						||
  setupPaginatedMetadataEndpoint,
 | 
						||
  setupPerformanceEndpoint,
 | 
						||
  setupDeferredIndexing
 | 
						||
} from './index-phase3-patch.mjs';
 | 
						||
 | 
						||
const __filename = fileURLToPath(import.meta.url);
 | 
						||
const __dirname = path.dirname(__filename);
 | 
						||
 | 
						||
const app = express();
 | 
						||
const PORT = CFG_PORT;
 | 
						||
 | 
						||
const rootDir = path.resolve(__dirname, '..');
 | 
						||
const distDir = path.join(rootDir, 'dist');
 | 
						||
// Centralized vault directory
 | 
						||
const vaultDir = path.isAbsolute(CFG_VAULT_PATH) ? CFG_VAULT_PATH : path.join(rootDir, CFG_VAULT_PATH);
 | 
						||
 | 
						||
const vaultEventClients = new Set();
 | 
						||
 | 
						||
// Phase 3: Advanced caching and monitoring
 | 
						||
const metadataCache = new MetadataCache({ ttlMs: 5 * 60 * 1000, maxItems: 10_000 });
 | 
						||
const performanceMonitor = new PerformanceMonitor();
 | 
						||
const meilisearchCircuitBreaker = new CircuitBreaker({ failureThreshold: 5, resetTimeoutMs: 30_000 });
 | 
						||
 | 
						||
const registerVaultEventClient = (res) => {
 | 
						||
  const heartbeat = setInterval(() => {
 | 
						||
    try {
 | 
						||
      res.write(':keepalive\n\n');
 | 
						||
    } catch {
 | 
						||
      // Write failures will be handled by the close handler.
 | 
						||
    }
 | 
						||
  }, 20000);
 | 
						||
 | 
						||
  const client = { res, heartbeat };
 | 
						||
  // moved scanVaultDrawings to top-level
 | 
						||
  vaultEventClients.add(client);
 | 
						||
  return client;
 | 
						||
};
 | 
						||
 | 
						||
const unregisterVaultEventClient = (client) => {
 | 
						||
  clearInterval(client.heartbeat);
 | 
						||
  vaultEventClients.delete(client);
 | 
						||
};
 | 
						||
 | 
						||
const broadcastVaultEvent = (payload) => {
 | 
						||
  if (!vaultEventClients.size) {
 | 
						||
    return;
 | 
						||
  }
 | 
						||
 | 
						||
  const data = `data: ${JSON.stringify(payload)}\n\n`;
 | 
						||
  for (const client of [...vaultEventClients]) {
 | 
						||
    try {
 | 
						||
      client.res.write(data);
 | 
						||
    } catch (error) {
 | 
						||
      console.error('Failed to notify vault event client:', error);
 | 
						||
      unregisterVaultEventClient(client);
 | 
						||
    }
 | 
						||
  }
 | 
						||
};
 | 
						||
 | 
						||
const isMarkdownFile = (entry) => entry.isFile() && entry.name.toLowerCase().endsWith('.md');
 | 
						||
 | 
						||
const normalizeString = (value) => {
 | 
						||
  return value
 | 
						||
    .normalize('NFKD')
 | 
						||
    .replace(/[\u0300-\u036f]/g, '')
 | 
						||
    .trim();
 | 
						||
};
 | 
						||
 | 
						||
const slugifySegment = (segment) => {
 | 
						||
  const normalized = normalizeString(segment);
 | 
						||
  const slug = normalized
 | 
						||
    .toLowerCase()
 | 
						||
    .replace(/[^a-z0-9]+/g, '-')
 | 
						||
    .replace(/^-+|-+$/g, '');
 | 
						||
  return slug || normalized.toLowerCase() || segment.toLowerCase();
 | 
						||
};
 | 
						||
 | 
						||
const slugifyPath = (relativePath) => {
 | 
						||
  return relativePath
 | 
						||
    .split('/')
 | 
						||
    .map((segment) => slugifySegment(segment))
 | 
						||
    .filter(Boolean)
 | 
						||
    .join('/');
 | 
						||
};
 | 
						||
 | 
						||
const extractTitle = (content, fallback) => {
 | 
						||
  const headingMatch = content.match(/^\s*#\s+(.+)$/m);
 | 
						||
  if (headingMatch) {
 | 
						||
    return headingMatch[1].trim();
 | 
						||
  }
 | 
						||
  return fallback;
 | 
						||
};
 | 
						||
 | 
						||
const extractTags = (content) => {
 | 
						||
  const tagRegex = /(^|\s)#([A-Za-z0-9_\/-]+)/g;
 | 
						||
  const tags = new Set();
 | 
						||
  let match;
 | 
						||
  while ((match = tagRegex.exec(content)) !== null) {
 | 
						||
    tags.add(match[2]);
 | 
						||
  }
 | 
						||
  return Array.from(tags);
 | 
						||
};
 | 
						||
 | 
						||
const loadVaultNotes = async (vaultPath) => {
 | 
						||
  const notes = [];
 | 
						||
 | 
						||
  const walk = async (currentDir) => {
 | 
						||
    if (!fs.existsSync(currentDir)) {
 | 
						||
      return;
 | 
						||
    }
 | 
						||
 | 
						||
    const entries = fs.readdirSync(currentDir, { withFileTypes: true });
 | 
						||
    for (const entry of entries) {
 | 
						||
      const entryPath = path.join(currentDir, entry.name);
 | 
						||
 | 
						||
      if (entry.isDirectory()) {
 | 
						||
        await walk(entryPath);
 | 
						||
        continue;
 | 
						||
      }
 | 
						||
 | 
						||
      if (!isMarkdownFile(entry)) {
 | 
						||
        continue;
 | 
						||
      }
 | 
						||
 | 
						||
      try {
 | 
						||
        // Skip enrichment during initial load for performance (Phase 1)
 | 
						||
        // Enrichment will happen on-demand when file is opened via /api/files
 | 
						||
        const content = fs.readFileSync(entryPath, 'utf-8');
 | 
						||
        
 | 
						||
        const stats = fs.statSync(entryPath);
 | 
						||
        const relativePathWithExt = path.relative(vaultPath, entryPath).replace(/\\/g, '/');
 | 
						||
        const relativePath = relativePathWithExt.replace(/\.md$/i, '');
 | 
						||
        const id = slugifyPath(relativePath);
 | 
						||
        const fileNameWithExt = entry.name;
 | 
						||
 | 
						||
        const fallbackTitle = path.basename(relativePath);
 | 
						||
        const title = extractTitle(content, fallbackTitle);
 | 
						||
        const finalId = id || slugifySegment(fallbackTitle) || fallbackTitle;
 | 
						||
        const createdDate = stats.birthtimeMs ? new Date(stats.birthtimeMs) : new Date(stats.ctimeMs);
 | 
						||
        const updatedDate = new Date(stats.mtimeMs);
 | 
						||
 | 
						||
        notes.push({
 | 
						||
          id: finalId,
 | 
						||
          title,
 | 
						||
          content,
 | 
						||
          tags: extractTags(content),
 | 
						||
          mtime: stats.mtimeMs,
 | 
						||
          fileName: fileNameWithExt,
 | 
						||
          filePath: relativePathWithExt,
 | 
						||
          originalPath: relativePath,
 | 
						||
          createdAt: createdDate.toISOString(),
 | 
						||
          updatedAt: updatedDate.toISOString()
 | 
						||
        });
 | 
						||
      } catch (err) {
 | 
						||
        console.error(`Failed to read/enrich note at ${entryPath}:`, err);
 | 
						||
      }
 | 
						||
    }
 | 
						||
  };
 | 
						||
 | 
						||
  await walk(vaultPath);
 | 
						||
  return notes;
 | 
						||
};
 | 
						||
 | 
						||
// Scan vault for .excalidraw.md files and return FileMetadata-like entries
 | 
						||
const scanVaultDrawings = (vaultPath) => {
 | 
						||
  const items = [];
 | 
						||
  const walk = (currentDir) => {
 | 
						||
    let entries = [];
 | 
						||
    try {
 | 
						||
      entries = fs.readdirSync(currentDir, { withFileTypes: true });
 | 
						||
    } catch {
 | 
						||
      return;
 | 
						||
    }
 | 
						||
    for (const entry of entries) {
 | 
						||
      const entryPath = path.join(currentDir, entry.name);
 | 
						||
      if (entry.isDirectory()) {
 | 
						||
        walk(entryPath);
 | 
						||
        continue;
 | 
						||
      }
 | 
						||
      if (!entry.isFile()) continue;
 | 
						||
      const lower = entry.name.toLowerCase();
 | 
						||
      if (!lower.endsWith('.excalidraw.md')) continue;
 | 
						||
      try {
 | 
						||
        const stats = fs.statSync(entryPath);
 | 
						||
        const relPath = path.relative(vaultPath, entryPath).replace(/\\/g, '/');
 | 
						||
        const id = slugifyPath(relPath.replace(/\.excalidraw(?:\.md)?$/i, ''));
 | 
						||
        const title = path.basename(relPath).replace(/\.excalidraw(?:\.md)?$/i, '');
 | 
						||
        items.push({
 | 
						||
          id,
 | 
						||
          title,
 | 
						||
          path: relPath,
 | 
						||
          createdAt: new Date(stats.birthtimeMs ? stats.birthtimeMs : stats.ctimeMs).toISOString(),
 | 
						||
          updatedAt: new Date(stats.mtimeMs).toISOString(),
 | 
						||
        });
 | 
						||
      } catch {}
 | 
						||
    }
 | 
						||
  };
 | 
						||
  walk(vaultPath);
 | 
						||
  return items;
 | 
						||
};
 | 
						||
 | 
						||
const buildFileMetadata = (notes) =>
 | 
						||
  notes.map((note) => ({
 | 
						||
    id: note.id,
 | 
						||
    title: note.title,
 | 
						||
    path: note.filePath,
 | 
						||
    createdAt: note.createdAt,
 | 
						||
    updatedAt: note.updatedAt,
 | 
						||
  }));
 | 
						||
 | 
						||
const normalizeDateInput = (value) => {
 | 
						||
  if (!value) {
 | 
						||
    return null;
 | 
						||
  }
 | 
						||
  const date = new Date(value);
 | 
						||
  return Number.isNaN(date.getTime()) ? null : date;
 | 
						||
};
 | 
						||
 | 
						||
const isDateWithinRange = (target, start, end) => {
 | 
						||
  const targetTime = target.getTime();
 | 
						||
  return targetTime >= start.getTime() && targetTime <= end.getTime();
 | 
						||
};
 | 
						||
 | 
						||
const vaultWatcher = chokidar.watch(vaultDir, {
 | 
						||
  persistent: true,
 | 
						||
  ignoreInitial: true,
 | 
						||
  awaitWriteFinish: {
 | 
						||
    stabilityThreshold: 250,
 | 
						||
    pollInterval: 100,
 | 
						||
  },
 | 
						||
});
 | 
						||
 | 
						||
const watchedVaultEvents = ['add', 'change', 'unlink', 'addDir', 'unlinkDir'];
 | 
						||
 | 
						||
watchedVaultEvents.forEach((eventName) => {
 | 
						||
  vaultWatcher.on(eventName, (changedPath) => {
 | 
						||
    const relativePath = path.relative(vaultDir, changedPath).replace(/\\/g, '/');
 | 
						||
    broadcastVaultEvent({
 | 
						||
      event: eventName,
 | 
						||
      path: relativePath,
 | 
						||
      timestamp: Date.now(),
 | 
						||
    });
 | 
						||
  });
 | 
						||
});
 | 
						||
 | 
						||
// Integrate Meilisearch with Chokidar for incremental updates
 | 
						||
vaultWatcher.on('add', async (filePath) => {
 | 
						||
  if (filePath.toLowerCase().endsWith('.md')) {
 | 
						||
    // Invalidate metadata cache (Phase 1)
 | 
						||
    metadataCache.invalidate();
 | 
						||
    
 | 
						||
    // Enrichir le frontmatter pour les nouveaux fichiers
 | 
						||
    try {
 | 
						||
      const enrichResult = await enrichFrontmatterOnOpen(filePath);
 | 
						||
      if (enrichResult.modified) {
 | 
						||
        console.log('[Watcher] Enriched frontmatter for new file:', path.basename(filePath));
 | 
						||
      }
 | 
						||
    } catch (enrichError) {
 | 
						||
      console.warn('[Watcher] Failed to enrich frontmatter for new file:', enrichError);
 | 
						||
    }
 | 
						||
    
 | 
						||
    // Puis indexer dans Meilisearch
 | 
						||
    upsertFile(filePath).catch(err => console.error('[Meili] Upsert on add failed:', err));
 | 
						||
  }
 | 
						||
});
 | 
						||
 | 
						||
vaultWatcher.on('change', (filePath) => {
 | 
						||
  if (filePath.toLowerCase().endsWith('.md')) {
 | 
						||
    // Invalidate metadata cache (Phase 1)
 | 
						||
    metadataCache.invalidate();
 | 
						||
    
 | 
						||
    upsertFile(filePath).catch(err => console.error('[Meili] Upsert on change failed:', err));
 | 
						||
  }
 | 
						||
});
 | 
						||
 | 
						||
vaultWatcher.on('unlink', (filePath) => {
 | 
						||
  if (filePath.toLowerCase().endsWith('.md')) {
 | 
						||
    // Invalidate metadata cache (Phase 1)
 | 
						||
    metadataCache.invalidate();
 | 
						||
    
 | 
						||
    const relativePath = path.relative(vaultDir, filePath).replace(/\\/g, '/');
 | 
						||
    deleteFile(relativePath).catch(err => console.error('[Meili] Delete failed:', err));
 | 
						||
  }
 | 
						||
});
 | 
						||
 | 
						||
vaultWatcher.on('ready', () => {
 | 
						||
  broadcastVaultEvent({
 | 
						||
    event: 'ready',
 | 
						||
    timestamp: Date.now(),
 | 
						||
  });
 | 
						||
});
 | 
						||
 | 
						||
vaultWatcher.on('error', (error) => {
 | 
						||
  console.error('Vault watcher error:', error);
 | 
						||
  broadcastVaultEvent({
 | 
						||
    event: 'error',
 | 
						||
    message: typeof error?.message === 'string' ? error.message : 'Unknown watcher error',
 | 
						||
    timestamp: Date.now(),
 | 
						||
  });
 | 
						||
});
 | 
						||
 | 
						||
// Vérifier si le répertoire dist existe
 | 
						||
if (!fs.existsSync(distDir)) {
 | 
						||
  console.warn(`Warning: build directory not found at ${distDir}. Did you run \`npm run build\`?`);
 | 
						||
}
 | 
						||
 | 
						||
// Servir les fichiers statiques de l'application Angular
 | 
						||
app.use(express.static(distDir));
 | 
						||
 | 
						||
// Exposer les fichiers de la voûte pour un accès direct si nécessaire
 | 
						||
app.use('/vault', express.static(vaultDir));
 | 
						||
 | 
						||
// Résolution des attachements: recherche le fichier en remontant les dossiers depuis la note, puis dans la voûte
 | 
						||
app.get('/api/attachments/resolve', (req, res) => {
 | 
						||
  try {
 | 
						||
    const rawName = typeof req.query.name === 'string' ? req.query.name.trim() : '';
 | 
						||
    if (!rawName) {
 | 
						||
      return res.status(400).type('text/plain').send('Missing required query parameter: name');
 | 
						||
    }
 | 
						||
 | 
						||
    const sanitize = (value) => value.replace(/\\/g, '/').replace(/^[/]+|[/]+$/g, '');
 | 
						||
    const name = sanitize(rawName);
 | 
						||
    const noteRelPath = typeof req.query.note === 'string' ? sanitize(req.query.note) : '';
 | 
						||
    const baseRaw = typeof req.query.base === 'string' ? req.query.base : '';
 | 
						||
    const baseRel = sanitize(baseRaw);
 | 
						||
 | 
						||
    const candidateDirs = new Set();
 | 
						||
 | 
						||
    const addCandidate = (dir, extra) => {
 | 
						||
      const dirSegments = sanitize(dir);
 | 
						||
      const extraSegments = sanitize(extra);
 | 
						||
      if (dirSegments && extraSegments) {
 | 
						||
        candidateDirs.add(`${dirSegments}/${extraSegments}`);
 | 
						||
      } else if (dirSegments) {
 | 
						||
        candidateDirs.add(dirSegments);
 | 
						||
      } else if (extraSegments) {
 | 
						||
        candidateDirs.add(extraSegments);
 | 
						||
      } else {
 | 
						||
        candidateDirs.add('');
 | 
						||
      }
 | 
						||
    };
 | 
						||
 | 
						||
    // Dossiers parents de la note
 | 
						||
    if (noteRelPath) {
 | 
						||
      const segments = noteRelPath.split('/');
 | 
						||
      segments.pop(); // retirer le nom de fichier
 | 
						||
      while (segments.length >= 0) {
 | 
						||
        const currentDir = segments.join('/');
 | 
						||
        addCandidate(currentDir, baseRel);
 | 
						||
        addCandidate(currentDir, '');
 | 
						||
        if (!segments.length) break;
 | 
						||
        segments.pop();
 | 
						||
      }
 | 
						||
    }
 | 
						||
 | 
						||
    // Si base est défini, tenter aussi directement depuis la racine
 | 
						||
    if (baseRel) {
 | 
						||
      addCandidate('', baseRel);
 | 
						||
    }
 | 
						||
 | 
						||
    // Toujours ajouter la racine seule en dernier recours
 | 
						||
    addCandidate('', '');
 | 
						||
 | 
						||
    for (const dir of candidateDirs) {
 | 
						||
      const absoluteDir = dir ? path.join(vaultDir, dir) : vaultDir;
 | 
						||
      const candidatePath = path.join(absoluteDir, name);
 | 
						||
      try {
 | 
						||
        if (fs.existsSync(candidatePath) && fs.statSync(candidatePath).isFile()) {
 | 
						||
          return res.sendFile(candidatePath);
 | 
						||
        }
 | 
						||
      } catch {
 | 
						||
        // Ignorer et poursuivre
 | 
						||
      }
 | 
						||
    }
 | 
						||
 | 
						||
    // Recherche exhaustive en dernier recours (coût plus élevé)
 | 
						||
    const stack = [vaultDir];
 | 
						||
    const nameLower = name.toLowerCase();
 | 
						||
    while (stack.length) {
 | 
						||
      const currentDir = stack.pop();
 | 
						||
      let entries = [];
 | 
						||
      try {
 | 
						||
        entries = fs.readdirSync(currentDir, { withFileTypes: true });
 | 
						||
      } catch {
 | 
						||
        continue;
 | 
						||
      }
 | 
						||
 | 
						||
      for (const entry of entries) {
 | 
						||
        const fullPath = path.join(currentDir, entry.name);
 | 
						||
        if (entry.isDirectory()) {
 | 
						||
          stack.push(fullPath);
 | 
						||
        } else if (entry.isFile() && entry.name.toLowerCase() === nameLower) {
 | 
						||
          return res.sendFile(fullPath);
 | 
						||
        }
 | 
						||
      }
 | 
						||
    }
 | 
						||
 | 
						||
    return res.status(404).type('text/plain').send(`‼️Attachement ${rawName} introuvable`);
 | 
						||
  } catch (error) {
 | 
						||
    console.error('Attachment resolve error:', error);
 | 
						||
    return res.status(500).type('text/plain').send('Attachment resolver internal error');
 | 
						||
  }
 | 
						||
});
 | 
						||
 | 
						||
// API endpoint pour la santé
 | 
						||
app.get('/api/health', (req, res) => {
 | 
						||
  res.json({ status: 'ok', timestamp: new Date().toISOString() });
 | 
						||
});
 | 
						||
 | 
						||
app.get('/api/vault/events', (req, res) => {
 | 
						||
  res.set({
 | 
						||
    'Content-Type': 'text/event-stream',
 | 
						||
    'Cache-Control': 'no-cache',
 | 
						||
    Connection: 'keep-alive',
 | 
						||
    'X-Accel-Buffering': 'no',
 | 
						||
  });
 | 
						||
 | 
						||
  res.flushHeaders?.();
 | 
						||
  res.write(
 | 
						||
    `data: ${JSON.stringify({
 | 
						||
      event: 'connected',
 | 
						||
      timestamp: Date.now(),
 | 
						||
    })}\n\n`,
 | 
						||
  );
 | 
						||
 | 
						||
  const client = registerVaultEventClient(res);
 | 
						||
 | 
						||
  req.on('close', () => {
 | 
						||
    unregisterVaultEventClient(client);
 | 
						||
  });
 | 
						||
});
 | 
						||
 | 
						||
// API endpoint pour les données de la voûte (contenu réel)
 | 
						||
app.get('/api/vault', async (req, res) => {
 | 
						||
  try {
 | 
						||
    const notes = await loadVaultNotes(vaultDir);
 | 
						||
    res.json({ notes });
 | 
						||
  } catch (error) {
 | 
						||
    console.error('Failed to load vault notes:', error);
 | 
						||
    res.status(500).json({ error: 'Unable to load vault notes.' });
 | 
						||
  }
 | 
						||
});
 | 
						||
 | 
						||
// Fast file list from Meilisearch (id, title, path, createdAt, updatedAt)
 | 
						||
app.get('/api/files/list', async (req, res) => {
 | 
						||
  try {
 | 
						||
    const client = meiliClient();
 | 
						||
    const indexUid = vaultIndexName(vaultDir);
 | 
						||
    const index = await ensureIndexSettings(client, indexUid);
 | 
						||
    const result = await index.search('', {
 | 
						||
      limit: 10000,
 | 
						||
      attributesToRetrieve: ['id', 'title', 'path', 'createdAt', 'updatedAt']
 | 
						||
    });
 | 
						||
 | 
						||
    const items = Array.isArray(result.hits) ? result.hits.map(hit => ({
 | 
						||
      id: hit.id,
 | 
						||
      title: hit.title,
 | 
						||
      path: hit.path,
 | 
						||
      createdAt: typeof hit.createdAt === 'number' ? new Date(hit.createdAt).toISOString() : hit.createdAt,
 | 
						||
      updatedAt: typeof hit.updatedAt === 'number' ? new Date(hit.updatedAt).toISOString() : hit.updatedAt,
 | 
						||
    })) : [];
 | 
						||
 | 
						||
    res.json(items);
 | 
						||
  } catch (error) {
 | 
						||
    console.error('Failed to list files via Meilisearch, falling back to FS:', error);
 | 
						||
    try {
 | 
						||
      const notes = await loadVaultNotes(vaultDir);
 | 
						||
      res.json(buildFileMetadata(notes));
 | 
						||
    } catch (err2) {
 | 
						||
      console.error('FS fallback failed:', err2);
 | 
						||
      res.status(500).json({ error: 'Unable to list files.' });
 | 
						||
    }
 | 
						||
  }
 | 
						||
});
 | 
						||
 | 
						||
// Phase 3: Fast metadata endpoint with cache read-through and monitoring
 | 
						||
setupMetadataEndpoint(app, metadataCache, performanceMonitor, vaultDir, meilisearchCircuitBreaker, retryWithBackoff);
 | 
						||
 | 
						||
// Phase 3: Paginated metadata endpoint with cache read-through and monitoring
 | 
						||
setupPaginatedMetadataEndpoint(app, metadataCache, performanceMonitor, vaultDir, meilisearchCircuitBreaker, retryWithBackoff);
 | 
						||
 | 
						||
app.get('/api/files/metadata', async (req, res) => {
 | 
						||
  try {
 | 
						||
    // Prefer Meilisearch for fast metadata
 | 
						||
    const client = meiliClient();
 | 
						||
    const indexUid = vaultIndexName(vaultDir);
 | 
						||
    const index = await ensureIndexSettings(client, indexUid);
 | 
						||
    const result = await index.search('', {
 | 
						||
      limit: 10000,
 | 
						||
      attributesToRetrieve: ['id', 'title', 'path', 'createdAt', 'updatedAt']
 | 
						||
    });
 | 
						||
 | 
						||
    const items = Array.isArray(result.hits) ? result.hits.map(hit => ({
 | 
						||
      id: hit.id,
 | 
						||
      title: hit.title,
 | 
						||
      path: hit.path,
 | 
						||
      createdAt: typeof hit.createdAt === 'number' ? new Date(hit.createdAt).toISOString() : hit.createdAt,
 | 
						||
      updatedAt: typeof hit.updatedAt === 'number' ? new Date(hit.updatedAt).toISOString() : hit.updatedAt,
 | 
						||
    })) : [];
 | 
						||
 | 
						||
    // Merge .excalidraw files discovered via FS
 | 
						||
    const drawings = scanVaultDrawings(vaultDir);
 | 
						||
    const byPath = new Map(items.map(it => [String(it.path).toLowerCase(), it]));
 | 
						||
    for (const d of drawings) {
 | 
						||
      const key = String(d.path).toLowerCase();
 | 
						||
      if (!byPath.has(key)) {
 | 
						||
        byPath.set(key, d);
 | 
						||
      }
 | 
						||
    }
 | 
						||
 | 
						||
    res.json(Array.from(byPath.values()));
 | 
						||
  } catch (error) {
 | 
						||
    console.error('Failed to load file metadata via Meilisearch, falling back to FS:', error);
 | 
						||
    try {
 | 
						||
      const notes = await loadVaultNotes(vaultDir);
 | 
						||
      const base = buildFileMetadata(notes);
 | 
						||
      const drawings = scanVaultDrawings(vaultDir);
 | 
						||
      const byPath = new Map(base.map(it => [String(it.path).toLowerCase(), it]));
 | 
						||
      for (const d of drawings) {
 | 
						||
        const key = String(d.path).toLowerCase();
 | 
						||
        if (!byPath.has(key)) byPath.set(key, d);
 | 
						||
      }
 | 
						||
      res.json(Array.from(byPath.values()));
 | 
						||
    } catch (err2) {
 | 
						||
      console.error('FS fallback failed:', err2);
 | 
						||
      res.status(500).json({ error: 'Unable to load file metadata.' });
 | 
						||
    }
 | 
						||
  }
 | 
						||
});
 | 
						||
 | 
						||
app.get('/api/files/by-date', async (req, res) => {
 | 
						||
  const { date } = req.query;
 | 
						||
  const targetDate = normalizeDateInput(date);
 | 
						||
 | 
						||
  if (!targetDate) {
 | 
						||
    return res.status(400).json({ error: 'Invalid or missing date query parameter.' });
 | 
						||
  }
 | 
						||
 | 
						||
  try {
 | 
						||
    const notes = await loadVaultNotes(vaultDir);
 | 
						||
    const startOfDay = new Date(targetDate);
 | 
						||
    startOfDay.setHours(0, 0, 0, 0);
 | 
						||
    const endOfDay = new Date(targetDate);
 | 
						||
    endOfDay.setHours(23, 59, 59, 999);
 | 
						||
 | 
						||
    const filtered = notes.filter((note) => {
 | 
						||
      const createdAt = normalizeDateInput(note.createdAt);
 | 
						||
      const updatedAt = normalizeDateInput(note.updatedAt);
 | 
						||
      const matchesCreated = createdAt && isDateWithinRange(createdAt, startOfDay, endOfDay);
 | 
						||
      const matchesUpdated = updatedAt && isDateWithinRange(updatedAt, startOfDay, endOfDay);
 | 
						||
      return matchesCreated || matchesUpdated;
 | 
						||
    });
 | 
						||
 | 
						||
    res.json(buildFileMetadata(filtered));
 | 
						||
  } catch (error) {
 | 
						||
    console.error('Failed to search files by date:', error);
 | 
						||
    res.status(500).json({ error: 'Unable to search files by date.' });
 | 
						||
  }
 | 
						||
});
 | 
						||
 | 
						||
app.get('/api/files/by-date-range', async (req, res) => {
 | 
						||
  const { start, end } = req.query;
 | 
						||
  const startDate = normalizeDateInput(start);
 | 
						||
  const endDate = normalizeDateInput(end ?? start);
 | 
						||
 | 
						||
  if (!startDate || !endDate || startDate > endDate) {
 | 
						||
    return res.status(400).json({ error: 'Invalid start or end date parameters.' });
 | 
						||
  }
 | 
						||
 | 
						||
  const normalizedStart = new Date(startDate);
 | 
						||
  normalizedStart.setHours(0, 0, 0, 0);
 | 
						||
  const normalizedEnd = new Date(endDate);
 | 
						||
  normalizedEnd.setHours(23, 59, 59, 999);
 | 
						||
 | 
						||
  try {
 | 
						||
    const notes = await loadVaultNotes(vaultDir);
 | 
						||
    const filtered = notes.filter((note) => {
 | 
						||
      const createdAt = normalizeDateInput(note.createdAt);
 | 
						||
      const updatedAt = normalizeDateInput(note.updatedAt);
 | 
						||
      const matchesCreated = createdAt && isDateWithinRange(createdAt, normalizedStart, normalizedEnd);
 | 
						||
      const matchesUpdated = updatedAt && isDateWithinRange(updatedAt, normalizedStart, normalizedEnd);
 | 
						||
      return matchesCreated || matchesUpdated;
 | 
						||
    });
 | 
						||
 | 
						||
    res.json(buildFileMetadata(filtered));
 | 
						||
  } catch (error) {
 | 
						||
    console.error('Failed to search files by date range:', error);
 | 
						||
    res.status(500).json({ error: 'Unable to search files by date range.' });
 | 
						||
  }
 | 
						||
});
 | 
						||
 | 
						||
// Bookmarks API - reads/writes <vault>/.obsidian/bookmarks.json
 | 
						||
app.use(express.json());
 | 
						||
 | 
						||
app.post('/api/log', (req, res) => {
 | 
						||
  try {
 | 
						||
    const payload = req.body;
 | 
						||
 | 
						||
    if (!payload) {
 | 
						||
      return res.status(400).json({ error: 'Missing log payload' });
 | 
						||
    }
 | 
						||
 | 
						||
    const records = Array.isArray(payload) ? payload : [payload];
 | 
						||
 | 
						||
    records.forEach((record) => {
 | 
						||
      if (!record || typeof record !== 'object') {
 | 
						||
        console.warn('[FrontendLog] Ignored invalid record', record);
 | 
						||
        return;
 | 
						||
      }
 | 
						||
 | 
						||
      const {
 | 
						||
        event = 'UNKNOWN_EVENT',
 | 
						||
        level = 'info',
 | 
						||
        sessionId,
 | 
						||
        userAgent,
 | 
						||
        context = {},
 | 
						||
        data,
 | 
						||
      } = record;
 | 
						||
 | 
						||
      const summary = {
 | 
						||
        sessionId,
 | 
						||
        route: context?.route ?? 'n/a',
 | 
						||
        theme: context?.theme ?? 'n/a',
 | 
						||
        version: context?.version ?? 'n/a',
 | 
						||
      };
 | 
						||
 | 
						||
      if (data !== undefined) {
 | 
						||
        summary.data = data;
 | 
						||
      }
 | 
						||
 | 
						||
      console.log(`[FrontendLog:${level}]`, event, summary, userAgent ?? '');
 | 
						||
    });
 | 
						||
 | 
						||
    return res.status(202).json({ ok: true });
 | 
						||
  } catch (error) {
 | 
						||
    console.error('Failed to process frontend logs:', error);
 | 
						||
    return res.status(500).json({ error: 'Failed to process logs' });
 | 
						||
  }
 | 
						||
});
 | 
						||
 | 
						||
app.post('/api/logs', (req, res) => {
 | 
						||
  const { source = 'frontend', level = 'info', message = '', data = null, timestamp = Date.now() } = req.body || {};
 | 
						||
 | 
						||
  const prefix = `[ClientLog:${source}]`;
 | 
						||
  const payload = data !== undefined ? { data } : undefined;
 | 
						||
 | 
						||
  switch (level) {
 | 
						||
    case 'error':
 | 
						||
      console.error(prefix, message, payload ?? '', new Date(timestamp).toISOString());
 | 
						||
      break;
 | 
						||
    case 'warn':
 | 
						||
      console.warn(prefix, message, payload ?? '', new Date(timestamp).toISOString());
 | 
						||
      break;
 | 
						||
    case 'debug':
 | 
						||
      console.debug(prefix, message, payload ?? '', new Date(timestamp).toISOString());
 | 
						||
      break;
 | 
						||
    default:
 | 
						||
      console.log(prefix, message, payload ?? '', new Date(timestamp).toISOString());
 | 
						||
      break;
 | 
						||
  }
 | 
						||
 | 
						||
  res.status(202).json({ status: 'queued' });
 | 
						||
});
 | 
						||
 | 
						||
// --- Files API (supports .excalidraw.md (Markdown-wrapped JSON), .excalidraw, .json and binary sidecars) ---
 | 
						||
// Helpers
 | 
						||
const sanitizeRelPath = (rel) => String(rel || '').replace(/\\/g, '/').replace(/^\/+/, '');
 | 
						||
const resolveVaultPath = (rel) => {
 | 
						||
  const clean = sanitizeRelPath(rel);
 | 
						||
  const abs = path.resolve(vaultDir, clean);
 | 
						||
  if (!abs.startsWith(path.resolve(vaultDir))) {
 | 
						||
    throw Object.assign(new Error('Invalid path'), { status: 400 });
 | 
						||
  }
 | 
						||
  return abs;
 | 
						||
};
 | 
						||
 | 
						||
const excalidrawSceneSchema = z.object({
 | 
						||
  elements: z.array(z.any()),
 | 
						||
  appState: z.record(z.any()).optional(),
 | 
						||
  files: z.record(z.any()).optional(),
 | 
						||
}).passthrough();
 | 
						||
 | 
						||
// Helper to determine content type
 | 
						||
function guessContentType(filePath) {
 | 
						||
  const lower = filePath.toLowerCase();
 | 
						||
  if (lower.endsWith('.md')) return 'text/markdown; charset=utf-8';
 | 
						||
  if (lower.endsWith('.json')) return 'application/json; charset=utf-8';
 | 
						||
  return 'application/octet-stream';
 | 
						||
}
 | 
						||
 | 
						||
// GET file content (supports .excalidraw.md, .excalidraw, .json, .md)
 | 
						||
app.get('/api/files', async (req, res) => {
 | 
						||
  try {
 | 
						||
    const pathParam = req.query.path;
 | 
						||
    if (!pathParam || typeof pathParam !== 'string') {
 | 
						||
      return res.status(400).json({ error: 'Missing path query parameter' });
 | 
						||
    }
 | 
						||
    
 | 
						||
    const rel = decodeURIComponent(pathParam);
 | 
						||
    const abs = resolveVaultPath(rel);
 | 
						||
    
 | 
						||
    if (!fs.existsSync(abs) || !fs.statSync(abs).isFile()) {
 | 
						||
      return res.status(404).json({ error: 'File not found' });
 | 
						||
    }
 | 
						||
    
 | 
						||
    const base = path.basename(abs).toLowerCase();
 | 
						||
    const ext = path.extname(abs).toLowerCase();
 | 
						||
    const isExcalidrawMd = base.endsWith('.excalidraw.md');
 | 
						||
    const isExcalidraw = ext === '.excalidraw' || ext === '.json' || isExcalidrawMd;
 | 
						||
    
 | 
						||
    if (!isExcalidraw && ext !== '.md') {
 | 
						||
      return res.status(415).json({ error: 'Unsupported file type' });
 | 
						||
    }
 | 
						||
    
 | 
						||
    // For regular markdown files, enrich front-matter before reading
 | 
						||
    if (!isExcalidraw && ext === '.md') {
 | 
						||
      try {
 | 
						||
        const enrichResult = await enrichFrontmatterOnOpen(abs);
 | 
						||
        
 | 
						||
        // If modified, trigger Meilisearch reindex
 | 
						||
        if (enrichResult.modified) {
 | 
						||
          upsertFile(abs).catch(err => 
 | 
						||
            console.warn('[GET /api/files] Failed to reindex after enrichment:', err)
 | 
						||
          );
 | 
						||
        }
 | 
						||
        
 | 
						||
        const rev = calculateSimpleHash(enrichResult.content);
 | 
						||
        res.setHeader('ETag', rev);
 | 
						||
        res.setHeader('Content-Type', guessContentType(abs));
 | 
						||
        return res.send(enrichResult.content);
 | 
						||
      } catch (enrichError) {
 | 
						||
        console.error('[GET /api/files] Front-matter enrichment failed:', enrichError);
 | 
						||
        // Fallback to reading without enrichment
 | 
						||
      }
 | 
						||
    }
 | 
						||
    
 | 
						||
    const content = fs.readFileSync(abs, 'utf-8');
 | 
						||
    
 | 
						||
    // For Excalidraw files, parse and return JSON
 | 
						||
    if (isExcalidraw) {
 | 
						||
      const data = parseExcalidrawAny(content);
 | 
						||
      
 | 
						||
      if (!data || !isValidExcalidrawScene(data)) {
 | 
						||
        return res.status(400).json({ error: 'Invalid Excalidraw format' });
 | 
						||
      }
 | 
						||
      
 | 
						||
      // Normalize scene structure
 | 
						||
      const normalized = {
 | 
						||
        elements: Array.isArray(data.elements) ? data.elements : [],
 | 
						||
        appState: (data && typeof data.appState === 'object') ? data.appState : {},
 | 
						||
        files: (data && typeof data.files === 'object') ? data.files : {}
 | 
						||
      };
 | 
						||
      
 | 
						||
      const rev = calculateSimpleHash(content);
 | 
						||
      res.setHeader('ETag', rev);
 | 
						||
      res.setHeader('Content-Type', 'application/json; charset=utf-8');
 | 
						||
      return res.send(JSON.stringify(normalized));
 | 
						||
    }
 | 
						||
    
 | 
						||
    // For regular markdown, return as-is (fallback)
 | 
						||
    const rev = calculateSimpleHash(content);
 | 
						||
    res.setHeader('ETag', rev);
 | 
						||
    res.setHeader('Content-Type', guessContentType(abs));
 | 
						||
    return res.send(content);
 | 
						||
    
 | 
						||
  } catch (error) {
 | 
						||
    const code = typeof error?.status === 'number' ? error.status : 500;
 | 
						||
    console.error('GET /api/files error:', error);
 | 
						||
    res.status(code).json({ error: 'Internal server error' });
 | 
						||
  }
 | 
						||
});
 | 
						||
 | 
						||
// PUT file content with If-Match check and size limit (10MB)
 | 
						||
app.put('/api/files', express.json({ limit: '10mb' }), express.text({ limit: '10mb', type: 'text/markdown' }), (req, res) => {
 | 
						||
  try {
 | 
						||
    const pathParam = req.query.path;
 | 
						||
    if (!pathParam || typeof pathParam !== 'string') {
 | 
						||
      return res.status(400).json({ error: 'Missing path query parameter' });
 | 
						||
    }
 | 
						||
    
 | 
						||
    const rel = decodeURIComponent(pathParam);
 | 
						||
    const abs = resolveVaultPath(rel);
 | 
						||
    const dir = path.dirname(abs);
 | 
						||
    if (!fs.existsSync(dir)) fs.mkdirSync(dir, { recursive: true });
 | 
						||
    
 | 
						||
    const contentType = (req.headers['content-type'] || '').split(';')[0];
 | 
						||
    const base = path.basename(abs).toLowerCase();
 | 
						||
    const isExcalidrawMd = base.endsWith('.excalidraw.md');
 | 
						||
    
 | 
						||
    let finalContent;
 | 
						||
    let existingFrontMatter = null;
 | 
						||
    console.log('[PUT /api/files] path=%s contentType=%s isExcalidrawMd=%s', rel, contentType, isExcalidrawMd);
 | 
						||
    
 | 
						||
    // Extract existing front matter if file exists
 | 
						||
    if (fs.existsSync(abs) && isExcalidrawMd) {
 | 
						||
      try {
 | 
						||
        const existing = fs.readFileSync(abs, 'utf-8');
 | 
						||
        existingFrontMatter = extractFrontMatter(existing);
 | 
						||
      } catch {}
 | 
						||
    }
 | 
						||
    
 | 
						||
    // Handle JSON payload (Excalidraw scene)
 | 
						||
    if (contentType === 'application/json') {
 | 
						||
      const body = req.body;
 | 
						||
      const parsed = excalidrawSceneSchema.safeParse(body);
 | 
						||
      
 | 
						||
      if (!parsed.success) {
 | 
						||
        console.warn('[PUT /api/files] invalid scene schema', parsed.error?.issues?.slice(0,3));
 | 
						||
        return res.status(400).json({ 
 | 
						||
          error: 'Invalid Excalidraw scene', 
 | 
						||
          issues: parsed.error.issues?.slice(0, 5) 
 | 
						||
        });
 | 
						||
      }
 | 
						||
      
 | 
						||
      // Convert to Obsidian format if target is .excalidraw.md
 | 
						||
      if (isExcalidrawMd) {
 | 
						||
        finalContent = toObsidianExcalidrawMd(parsed.data, existingFrontMatter);
 | 
						||
      } else {
 | 
						||
        // Plain JSON for .excalidraw or .json files
 | 
						||
        finalContent = JSON.stringify(parsed.data, null, 2);
 | 
						||
      }
 | 
						||
    } 
 | 
						||
    // Handle text/markdown payload (already formatted)
 | 
						||
    else if (contentType === 'text/markdown') {
 | 
						||
      finalContent = typeof req.body === 'string' ? req.body : String(req.body);
 | 
						||
    } 
 | 
						||
    else {
 | 
						||
      console.warn('[PUT /api/files] unsupported content-type', contentType);
 | 
						||
      return res.status(400).json({ error: 'Unsupported content type' });
 | 
						||
    }
 | 
						||
    
 | 
						||
    // Check size limit
 | 
						||
    if (Buffer.byteLength(finalContent, 'utf-8') > 10 * 1024 * 1024) {
 | 
						||
      console.warn('[PUT /api/files] payload too large path=%s size=%d', rel, Buffer.byteLength(finalContent, 'utf-8'));
 | 
						||
      return res.status(413).json({ error: 'Payload too large' });
 | 
						||
    }
 | 
						||
    
 | 
						||
    // Check for conflicts with If-Match
 | 
						||
    const hasExisting = fs.existsSync(abs);
 | 
						||
    const ifMatch = req.headers['if-match'];
 | 
						||
    if (hasExisting && ifMatch) {
 | 
						||
      const current = fs.readFileSync(abs, 'utf-8');
 | 
						||
      const currentRev = calculateSimpleHash(current);
 | 
						||
      if (ifMatch !== currentRev) {
 | 
						||
        console.warn('[PUT /api/files] conflict path=%s ifMatch=%s current=%s', rel, ifMatch, currentRev);
 | 
						||
        return res.status(409).json({ error: 'Conflict detected' });
 | 
						||
      }
 | 
						||
    }
 | 
						||
    
 | 
						||
    // Atomic write with backup
 | 
						||
    const temp = abs + '.tmp';
 | 
						||
    const backup = abs + '.bak';
 | 
						||
    
 | 
						||
    try {
 | 
						||
      if (hasExisting) fs.copyFileSync(abs, backup);
 | 
						||
      fs.writeFileSync(temp, finalContent, 'utf-8');
 | 
						||
      fs.renameSync(temp, abs);
 | 
						||
      console.log('[PUT /api/files] wrote file path=%s bytes=%d', rel, Buffer.byteLength(finalContent, 'utf-8'));
 | 
						||
    } catch (e) {
 | 
						||
      if (fs.existsSync(temp)) try { fs.unlinkSync(temp); } catch {}
 | 
						||
      if (hasExisting && fs.existsSync(backup)) try { fs.copyFileSync(backup, abs); } catch {}
 | 
						||
      console.error('[PUT /api/files] write error path=%s', rel, e);
 | 
						||
      throw e;
 | 
						||
    }
 | 
						||
    
 | 
						||
    const rev = calculateSimpleHash(finalContent);
 | 
						||
    res.setHeader('ETag', rev);
 | 
						||
    res.json({ rev });
 | 
						||
    
 | 
						||
  } catch (error) {
 | 
						||
    const code = typeof error?.status === 'number' ? error.status : 500;
 | 
						||
    console.error('PUT /api/files error:', error);
 | 
						||
    res.status(code).json({ error: 'Internal server error' });
 | 
						||
  }
 | 
						||
});
 | 
						||
 | 
						||
// PUT binary sidecar (e.g., PNG/SVG)
 | 
						||
app.put('/api/files/blob', (req, res) => {
 | 
						||
  try {
 | 
						||
    const pathParam = req.query.path;
 | 
						||
    if (!pathParam || typeof pathParam !== 'string') {
 | 
						||
      return res.status(400).json({ error: 'Missing path query parameter' });
 | 
						||
    }
 | 
						||
    
 | 
						||
    const rel = decodeURIComponent(pathParam);
 | 
						||
    const abs = resolveVaultPath(rel);
 | 
						||
    const dir = path.dirname(abs);
 | 
						||
    if (!fs.existsSync(dir)) fs.mkdirSync(dir, { recursive: true });
 | 
						||
 | 
						||
    // Collect raw body
 | 
						||
    const chunks = [];
 | 
						||
    req.on('data', (chunk) => {
 | 
						||
      chunks.push(chunk);
 | 
						||
      const size = chunks.reduce((a, b) => a + b.length, 0);
 | 
						||
      if (size > 10 * 1024 * 1024) { // 10MB limit
 | 
						||
        req.destroy();
 | 
						||
      }
 | 
						||
    });
 | 
						||
    req.on('end', () => {
 | 
						||
      const buf = Buffer.concat(chunks);
 | 
						||
      // Basic allowlist
 | 
						||
      const ext = path.extname(abs).toLowerCase();
 | 
						||
      if (!['.png', '.svg'].includes(ext)) {
 | 
						||
        return res.status(415).json({ error: 'unsupported_media_type' });
 | 
						||
      }
 | 
						||
      fs.writeFileSync(abs, buf);
 | 
						||
      res.json({ ok: true });
 | 
						||
    });
 | 
						||
    req.on('error', (err) => {
 | 
						||
      console.error('Blob upload error:', err);
 | 
						||
      res.status(500).json({ error: 'internal_error' });
 | 
						||
    });
 | 
						||
  } catch (error) {
 | 
						||
    const code = typeof error?.status === 'number' ? error.status : 500;
 | 
						||
    console.error('PUT /api/files/blob error:', error);
 | 
						||
    res.status(code).json({ error: 'internal_error' });
 | 
						||
  }
 | 
						||
});
 | 
						||
 | 
						||
function ensureBookmarksStorage() {
 | 
						||
  const obsidianDir = path.join(vaultDir, '.obsidian');
 | 
						||
  if (!fs.existsSync(obsidianDir)) {
 | 
						||
    fs.mkdirSync(obsidianDir, { recursive: true });
 | 
						||
  }
 | 
						||
 | 
						||
  const bookmarksPath = path.join(obsidianDir, 'bookmarks.json');
 | 
						||
  if (!fs.existsSync(bookmarksPath)) {
 | 
						||
    const emptyDoc = { items: [] };
 | 
						||
    const initialContent = JSON.stringify(emptyDoc, null, 2);
 | 
						||
    fs.writeFileSync(bookmarksPath, initialContent, 'utf-8');
 | 
						||
  }
 | 
						||
 | 
						||
  return { obsidianDir, bookmarksPath };
 | 
						||
}
 | 
						||
 | 
						||
// Ensure bookmarks storage is ready on startup
 | 
						||
ensureBookmarksStorage();
 | 
						||
 | 
						||
// Graph config API - reads/writes <vault>/.obsidian/graph.json
 | 
						||
function ensureGraphStorage() {
 | 
						||
  const obsidianDir = path.join(vaultDir, '.obsidian');
 | 
						||
  if (!fs.existsSync(obsidianDir)) {
 | 
						||
    fs.mkdirSync(obsidianDir, { recursive: true });
 | 
						||
  }
 | 
						||
 | 
						||
  const graphPath = path.join(obsidianDir, 'graph.json');
 | 
						||
  if (!fs.existsSync(graphPath)) {
 | 
						||
    // Create default graph config matching Obsidian defaults
 | 
						||
    const defaultConfig = {
 | 
						||
      'collapse-filter': false,
 | 
						||
      search: '',
 | 
						||
      showTags: false,
 | 
						||
      showAttachments: false,
 | 
						||
      hideUnresolved: false,
 | 
						||
      showOrphans: true,
 | 
						||
      'collapse-color-groups': false,
 | 
						||
      colorGroups: [],
 | 
						||
      'collapse-display': false,
 | 
						||
      showArrow: false,
 | 
						||
      textFadeMultiplier: 0,
 | 
						||
      nodeSizeMultiplier: 1,
 | 
						||
      lineSizeMultiplier: 1,
 | 
						||
      'collapse-forces': false,
 | 
						||
      centerStrength: 0.3,
 | 
						||
      repelStrength: 17,
 | 
						||
      linkStrength: 0.5,
 | 
						||
      linkDistance: 200,
 | 
						||
      scale: 1,
 | 
						||
      close: false
 | 
						||
    };
 | 
						||
    const initialContent = JSON.stringify(defaultConfig, null, 2);
 | 
						||
    fs.writeFileSync(graphPath, initialContent, 'utf-8');
 | 
						||
  }
 | 
						||
 | 
						||
  return { obsidianDir, graphPath };
 | 
						||
}
 | 
						||
 | 
						||
// Ensure graph storage is ready on startup
 | 
						||
ensureGraphStorage();
 | 
						||
 | 
						||
app.get('/api/vault/graph', (req, res) => {
 | 
						||
  try {
 | 
						||
    const { graphPath } = ensureGraphStorage();
 | 
						||
    const content = fs.readFileSync(graphPath, 'utf-8');
 | 
						||
    const config = JSON.parse(content);
 | 
						||
    const rev = calculateSimpleHash(content);
 | 
						||
 | 
						||
    res.json({ config, rev });
 | 
						||
  } catch (error) {
 | 
						||
    console.error('Failed to load graph config:', error);
 | 
						||
    res.status(500).json({ error: 'Unable to load graph config.' });
 | 
						||
  }
 | 
						||
});
 | 
						||
 | 
						||
app.put('/api/vault/graph', (req, res) => {
 | 
						||
  try {
 | 
						||
    const { graphPath } = ensureGraphStorage();
 | 
						||
    const ifMatch = req.headers['if-match'];
 | 
						||
 | 
						||
    // Check for conflicts if If-Match header is present
 | 
						||
    if (ifMatch) {
 | 
						||
      const currentContent = fs.readFileSync(graphPath, 'utf-8');
 | 
						||
      const currentRev = calculateSimpleHash(currentContent);
 | 
						||
 | 
						||
      if (ifMatch !== currentRev) {
 | 
						||
        return res.status(409).json({ error: 'Conflict: File modified externally' });
 | 
						||
      }
 | 
						||
    }
 | 
						||
 | 
						||
    // Create backup before writing
 | 
						||
    const backupPath = graphPath + '.bak';
 | 
						||
    if (fs.existsSync(graphPath)) {
 | 
						||
      fs.copyFileSync(graphPath, backupPath);
 | 
						||
    }
 | 
						||
 | 
						||
    // Atomic write: write to temp file, then rename
 | 
						||
    const tempPath = graphPath + '.tmp';
 | 
						||
    const content = JSON.stringify(req.body, null, 2);
 | 
						||
    
 | 
						||
    try {
 | 
						||
      fs.writeFileSync(tempPath, content, 'utf-8');
 | 
						||
      fs.renameSync(tempPath, graphPath);
 | 
						||
    } catch (writeError) {
 | 
						||
      // If write failed, restore backup if it exists
 | 
						||
      if (fs.existsSync(backupPath)) {
 | 
						||
        fs.copyFileSync(backupPath, graphPath);
 | 
						||
      }
 | 
						||
      throw writeError;
 | 
						||
    }
 | 
						||
 | 
						||
    const newRev = calculateSimpleHash(content);
 | 
						||
    res.json({ rev: newRev });
 | 
						||
  } catch (error) {
 | 
						||
    console.error('Failed to save graph config:', error);
 | 
						||
    res.status(500).json({ error: 'Unable to save graph config.' });
 | 
						||
  }
 | 
						||
});
 | 
						||
 | 
						||
app.get('/api/vault/bookmarks', (req, res) => {
 | 
						||
  try {
 | 
						||
    const { bookmarksPath } = ensureBookmarksStorage();
 | 
						||
    const content = fs.readFileSync(bookmarksPath, 'utf-8');
 | 
						||
    const doc = JSON.parse(content);
 | 
						||
    const rev = calculateSimpleHash(content);
 | 
						||
 | 
						||
    res.json({ ...doc, rev });
 | 
						||
  } catch (error) {
 | 
						||
    console.error('Failed to load bookmarks:', error);
 | 
						||
    res.status(500).json({ error: 'Unable to load bookmarks.' });
 | 
						||
  }
 | 
						||
});
 | 
						||
 | 
						||
app.put('/api/vault/bookmarks', (req, res) => {
 | 
						||
  try {
 | 
						||
    const { bookmarksPath } = ensureBookmarksStorage();
 | 
						||
    const ifMatch = req.headers['if-match'];
 | 
						||
 | 
						||
    // Check for conflicts if If-Match header is present
 | 
						||
    if (ifMatch) {
 | 
						||
      const currentContent = fs.readFileSync(bookmarksPath, 'utf-8');
 | 
						||
      const currentRev = calculateSimpleHash(currentContent);
 | 
						||
 | 
						||
      if (ifMatch !== currentRev) {
 | 
						||
        return res.status(409).json({ error: 'Conflict: File modified externally' });
 | 
						||
      }
 | 
						||
    }
 | 
						||
 | 
						||
    // Create backup before writing
 | 
						||
    const backupPath = bookmarksPath + '.bak';
 | 
						||
    if (fs.existsSync(bookmarksPath)) {
 | 
						||
      fs.copyFileSync(bookmarksPath, backupPath);
 | 
						||
    }
 | 
						||
 | 
						||
    // Atomic write: write to temp file, then rename
 | 
						||
    const tempPath = bookmarksPath + '.tmp';
 | 
						||
    const content = JSON.stringify(req.body, null, 2);
 | 
						||
    
 | 
						||
    try {
 | 
						||
      fs.writeFileSync(tempPath, content, 'utf-8');
 | 
						||
      fs.renameSync(tempPath, bookmarksPath);
 | 
						||
    } catch (writeError) {
 | 
						||
      // If write failed, restore backup if it exists
 | 
						||
      if (fs.existsSync(backupPath)) {
 | 
						||
        fs.copyFileSync(backupPath, bookmarksPath);
 | 
						||
      }
 | 
						||
      throw writeError;
 | 
						||
    }
 | 
						||
 | 
						||
    const newRev = calculateSimpleHash(content);
 | 
						||
    res.json({ rev: newRev });
 | 
						||
  } catch (error) {
 | 
						||
    console.error('Failed to save bookmarks:', error);
 | 
						||
    res.status(500).json({ error: 'Unable to save bookmarks.' });
 | 
						||
  }
 | 
						||
});
 | 
						||
 | 
						||
// Simple hash function for rev generation
 | 
						||
function calculateSimpleHash(content) {
 | 
						||
  let hash = 0;
 | 
						||
  for (let i = 0; i < content.length; i++) {
 | 
						||
    const char = content.charCodeAt(i);
 | 
						||
    hash = (hash << 5) - hash + char;
 | 
						||
    hash = hash & hash;
 | 
						||
  }
 | 
						||
  return Math.abs(hash).toString(36) + '-' + content.length;
 | 
						||
}
 | 
						||
 | 
						||
// Meilisearch API endpoints
 | 
						||
app.get('/api/search', async (req, res) => {
 | 
						||
  try {
 | 
						||
    const { q = '', limit = '20', offset = '0', sort, highlight = 'true' } = req.query;
 | 
						||
    
 | 
						||
    // Parse Obsidian-style query to Meilisearch format
 | 
						||
    const parsedQuery = mapObsidianQueryToMeili(String(q));
 | 
						||
    
 | 
						||
    // Build search parameters
 | 
						||
    const searchParams = buildSearchParams(parsedQuery, {
 | 
						||
      limit: Number(limit),
 | 
						||
      offset: Number(offset),
 | 
						||
      sort,
 | 
						||
      highlight: highlight === 'true'
 | 
						||
    });
 | 
						||
    
 | 
						||
    // Execute search
 | 
						||
    const client = meiliClient();
 | 
						||
    const indexUid = vaultIndexName(vaultDir);
 | 
						||
    const index = await ensureIndexSettings(client, indexUid);
 | 
						||
    const result = await index.search(searchParams.q, searchParams);
 | 
						||
    
 | 
						||
    // Return results
 | 
						||
    res.json({
 | 
						||
      hits: result.hits,
 | 
						||
      estimatedTotalHits: result.estimatedTotalHits,
 | 
						||
      facetDistribution: result.facetDistribution,
 | 
						||
      processingTimeMs: result.processingTimeMs,
 | 
						||
      query: q
 | 
						||
    });
 | 
						||
  } catch (error) {
 | 
						||
    console.error('[Meili] Search failed:', error);
 | 
						||
    res.status(500).json({ 
 | 
						||
      error: 'search_failed',
 | 
						||
      message: error.message 
 | 
						||
    });
 | 
						||
  }
 | 
						||
});
 | 
						||
 | 
						||
// PUT /api/notes/:idOrPath/tags - Update tags for a specific note
 | 
						||
// Accepts either a slug id or a vault-relative path (with or without .md), including slashes
 | 
						||
app.put(/^\/api\/notes\/(.+?)\/tags$/, express.json(), async (req, res) => {
 | 
						||
  try {
 | 
						||
    const rawParam = req.params[0];
 | 
						||
    const noteParam = decodeURIComponent(rawParam || '');
 | 
						||
    const { tags } = req.body;
 | 
						||
    
 | 
						||
    if (!Array.isArray(tags)) {
 | 
						||
      return res.status(400).json({ error: 'tags must be an array' });
 | 
						||
    }
 | 
						||
    
 | 
						||
    // Find note by id or by path
 | 
						||
    const notes = await loadVaultNotes(vaultDir);
 | 
						||
    let note = notes.find(n => n.id === noteParam);
 | 
						||
 | 
						||
    if (!note) {
 | 
						||
      // Try by originalPath (without .md)
 | 
						||
      const withoutExt = noteParam.replace(/\.md$/i, '');
 | 
						||
      note = notes.find(n => n.originalPath === withoutExt);
 | 
						||
    }
 | 
						||
 | 
						||
    if (!note) {
 | 
						||
      // Try by filePath (with .md)
 | 
						||
      const withExt = /\.md$/i.test(noteParam) ? noteParam : `${noteParam}.md`;
 | 
						||
      // Normalize slashes
 | 
						||
      const normalized = withExt.replace(/\\/g, '/');
 | 
						||
      note = notes.find(n => n.filePath === normalized || n.filePath === normalized.replace(/^\//, ''));
 | 
						||
    }
 | 
						||
    
 | 
						||
    if (!note || !note.filePath) {
 | 
						||
      return res.status(404).json({ error: 'Note not found' });
 | 
						||
    }
 | 
						||
    
 | 
						||
    const absolutePath = path.join(vaultDir, note.filePath);
 | 
						||
    
 | 
						||
    if (!fs.existsSync(absolutePath)) {
 | 
						||
      return res.status(404).json({ error: 'File not found on disk' });
 | 
						||
    }
 | 
						||
    
 | 
						||
    // Read current content
 | 
						||
    const currentContent = fs.readFileSync(absolutePath, 'utf-8');
 | 
						||
    
 | 
						||
    // Rewrite with new tags
 | 
						||
    const updatedContent = rewriteTagsFrontmatter(currentContent, tags);
 | 
						||
    
 | 
						||
    // Write back to disk (atomic with backup)
 | 
						||
    const tempPath = absolutePath + '.tmp';
 | 
						||
    const backupPath = absolutePath + '.bak';
 | 
						||
    
 | 
						||
    try {
 | 
						||
      // Create backup
 | 
						||
      fs.copyFileSync(absolutePath, backupPath);
 | 
						||
      
 | 
						||
      // Write to temp file
 | 
						||
      fs.writeFileSync(tempPath, updatedContent, 'utf-8');
 | 
						||
      
 | 
						||
      // Atomic rename
 | 
						||
      fs.renameSync(tempPath, absolutePath);
 | 
						||
      
 | 
						||
      console.log(`[Tags] Updated tags for note ${note.id} (${note.filePath})`);
 | 
						||
      
 | 
						||
      // Extract final tags from updated content
 | 
						||
      const finalTags = extractTagsFromFrontmatter(updatedContent);
 | 
						||
      
 | 
						||
      // Trigger Meilisearch reindex for this file
 | 
						||
      try {
 | 
						||
        await upsertFile(note.filePath);
 | 
						||
      } catch (indexError) {
 | 
						||
        console.warn('[Tags] Failed to reindex after tag update:', indexError);
 | 
						||
      }
 | 
						||
      
 | 
						||
      res.json({ 
 | 
						||
        ok: true,
 | 
						||
        tags: finalTags,
 | 
						||
        noteId: note.id
 | 
						||
      });
 | 
						||
      
 | 
						||
    } catch (writeError) {
 | 
						||
      // Restore from backup on error
 | 
						||
      if (fs.existsSync(tempPath)) {
 | 
						||
        try { fs.unlinkSync(tempPath); } catch {}
 | 
						||
      }
 | 
						||
      if (fs.existsSync(backupPath)) {
 | 
						||
        try { fs.copyFileSync(backupPath, absolutePath); } catch {}
 | 
						||
      }
 | 
						||
      throw writeError;
 | 
						||
    }
 | 
						||
    
 | 
						||
  } catch (error) {
 | 
						||
    console.error('[Tags] Update failed:', error);
 | 
						||
    res.status(500).json({ 
 | 
						||
      error: 'Failed to update tags',
 | 
						||
      message: error.message 
 | 
						||
    });
 | 
						||
  }
 | 
						||
});
 | 
						||
 | 
						||
app.post('/api/reindex', async (_req, res) => {
 | 
						||
  try {
 | 
						||
    console.log('[Meili] Manual reindex triggered');
 | 
						||
    const result = await fullReindex();
 | 
						||
    res.json({ 
 | 
						||
      ok: true,
 | 
						||
      ...result
 | 
						||
    });
 | 
						||
  } catch (error) {
 | 
						||
    console.error('[Meili] Reindex failed:', error);
 | 
						||
    res.status(500).json({ 
 | 
						||
      error: 'reindex_failed',
 | 
						||
      message: error.message
 | 
						||
    });
 | 
						||
  }
 | 
						||
});
 | 
						||
 | 
						||
// Get counts for Quick Links (favorites, templates, tasks, drafts, archive)
 | 
						||
app.get('/api/quick-links/counts', async (req, res) => {
 | 
						||
  try {
 | 
						||
    const client = meiliClient();
 | 
						||
    const indexUid = vaultIndexName(vaultDir);
 | 
						||
    const index = await ensureIndexSettings(client, indexUid);
 | 
						||
    
 | 
						||
    // Get counts for each filter
 | 
						||
    const [favoritesResult, templatesResult, tasksResult, draftsResult, archiveResult] = await Promise.all([
 | 
						||
      index.search('', { filter: 'favoris = true', limit: 0 }),
 | 
						||
      index.search('', { filter: 'template = true', limit: 0 }),
 | 
						||
      index.search('', { filter: 'task = true', limit: 0 }),
 | 
						||
      index.search('', { filter: 'draft = true', limit: 0 }),
 | 
						||
      index.search('', { filter: 'archive = true', limit: 0 })
 | 
						||
    ]);
 | 
						||
    
 | 
						||
    res.json({
 | 
						||
      favorites: favoritesResult.estimatedTotalHits || 0,
 | 
						||
      templates: templatesResult.estimatedTotalHits || 0,
 | 
						||
      tasks: tasksResult.estimatedTotalHits || 0,
 | 
						||
      drafts: draftsResult.estimatedTotalHits || 0,
 | 
						||
      archive: archiveResult.estimatedTotalHits || 0
 | 
						||
    });
 | 
						||
  } catch (error) {
 | 
						||
    console.error('[Quick Links] Failed to get counts:', error);
 | 
						||
    res.status(500).json({ 
 | 
						||
      error: 'Failed to get counts',
 | 
						||
      message: error.message 
 | 
						||
    });
 | 
						||
  }
 | 
						||
});
 | 
						||
 | 
						||
// Servir l'index.html pour toutes les routes (SPA)
 | 
						||
const sendIndex = (req, res) => {
 | 
						||
  const indexPath = path.join(distDir, 'index.html');
 | 
						||
  if (!fs.existsSync(indexPath)) {
 | 
						||
    return res
 | 
						||
      .status(500)
 | 
						||
      .send('Application build missing. Please run `npm run build` before starting the server.');
 | 
						||
  }
 | 
						||
  res.sendFile(indexPath);
 | 
						||
};
 | 
						||
 | 
						||
app.get('/', sendIndex);
 | 
						||
app.use((req, res) => {
 | 
						||
  if (req.path.startsWith('/api/')) {
 | 
						||
    return res.status(404).json({ error: 'Not found' });
 | 
						||
  }
 | 
						||
  return sendIndex(req, res);
 | 
						||
});
 | 
						||
 | 
						||
// Créer le répertoire de la voûte s'il n'existe pas
 | 
						||
if (!fs.existsSync(vaultDir)) {
 | 
						||
  fs.mkdirSync(vaultDir, { recursive: true });
 | 
						||
  console.log('Created vault directory:', vaultDir);
 | 
						||
}
 | 
						||
 | 
						||
app.listen(PORT, '0.0.0.0', () => {
 | 
						||
  console.log(`ObsiViewer server running on http://0.0.0.0:${PORT}`);
 | 
						||
  console.log(`Vault directory: ${vaultDir}`);
 | 
						||
});
 |