#!/usr/bin/env node /** * Export all Outline wiki content as markdown files to content/wiki/. * * Usage: * OUTLINE_URL=https://wiki.ghostguild.org OUTLINE_API_TOKEN=your-token node scripts/export-content.js * * Cron (daily at 4 AM UTC): * 0 4 * * * /path/to/wiki-ghostguild/scripts/export-content-cron.sh >> /var/log/wiki-export.log 2>&1 */ import fs from "fs/promises"; import path from "path"; import { fileURLToPath } from "url"; import matter from "gray-matter"; const __dirname = path.dirname(fileURLToPath(import.meta.url)); const OUTLINE_URL = process.env.OUTLINE_URL || process.env.URL; const OUTLINE_API_TOKEN = process.env.OUTLINE_API_TOKEN; const OUTPUT_DIR = path.resolve(__dirname, "../content/wiki"); const RATE_LIMIT_MS = 200; if (!OUTLINE_URL || !OUTLINE_API_TOKEN) { console.error( "Error: OUTLINE_URL and OUTLINE_API_TOKEN env vars are required." ); process.exit(1); } async function outlineApi(endpoint, body) { const res = await fetch(`${OUTLINE_URL}/api/${endpoint}`, { method: "POST", headers: { "Content-Type": "application/json", Authorization: `Bearer ${OUTLINE_API_TOKEN}`, }, body: JSON.stringify(body), }); if (!res.ok) { const text = await res.text(); throw new Error(`API ${endpoint} failed (${res.status}): ${text}`); } return res.json(); } function delay(ms) { return new Promise((resolve) => setTimeout(resolve, ms)); } function slugify(text) { return text .toLowerCase() .replace(/[^a-z0-9]+/g, "-") .replace(/^-+|-+$/g, ""); } async function fetchAllCollections() { const { data } = await outlineApi("collections.list", { limit: 100 }); return data; } async function fetchAllDocuments(collectionId) { const docs = []; let offset = 0; const limit = 100; while (true) { await delay(RATE_LIMIT_MS); const result = await outlineApi("documents.list", { collectionId, limit, offset, }); docs.push(...result.data); if (!result.pagination || result.data.length < limit) break; offset += limit; } return docs; } function buildPath(doc, docMap, collectionName) { const parts = [doc.title]; let current = doc; while (current.parentDocumentId) { const parent = docMap.get(current.parentDocumentId); if (!parent) break; parts.unshift(parent.title); current = parent; } return `${collectionName}/${parts.join("/")}`; } function buildRelFilePath(doc, docMap, collectionName) { const parts = [slugify(doc.title)]; let current = doc; while (current.parentDocumentId) { const parent = docMap.get(current.parentDocumentId); if (!parent) break; parts.unshift(slugify(parent.title)); current = parent; } parts.unshift(slugify(collectionName)); return parts.join("/") + ".md"; } async function walkMarkdownFiles(dir, baseDir) { const entries = await fs.readdir(dir, { withFileTypes: true }); const results = []; for (const entry of entries) { const full = path.join(dir, entry.name); if (entry.isDirectory()) { results.push(...(await walkMarkdownFiles(full, baseDir))); } else if (entry.name.endsWith(".md")) { results.push(path.relative(baseDir, full).split(path.sep).join("/")); } } return results; } function getParentTitle(doc, docMap) { if (!doc.parentDocumentId) return null; const parent = docMap.get(doc.parentDocumentId); return parent ? parent.title : null; } async function main() { // Ensure output directory exists await fs.mkdir(OUTPUT_DIR, { recursive: true }); console.log("Fetching collections..."); const collections = await fetchAllCollections(); console.log(`Found ${collections.length} collections.\n`); // Build a map of collection ID → name const collectionMap = new Map(); for (const col of collections) { collectionMap.set(col.id, col.name); } // Fetch all documents across all collections const allDocs = []; for (const col of collections) { console.log(`Fetching docs from "${col.name}"...`); const docs = await fetchAllDocuments(col.id); console.log(` ${docs.length} documents`); allDocs.push(...docs); } // Build lookup map for parent chain resolution const docMap = new Map(); for (const doc of allDocs) { docMap.set(doc.id, doc); } // Write all documents const writtenFiles = new Set(); const collectionSlugs = new Set( Array.from(collectionMap.values()).map(slugify) ); let count = 0; for (const doc of allDocs) { const collectionName = collectionMap.get(doc.collectionId) || "unknown"; const relPath = buildRelFilePath(doc, docMap, collectionName); const docPath = buildPath(doc, docMap, collectionName); const parentTitle = getParentTitle(doc, docMap); const frontmatter = { title: doc.title, collection: collectionName, path: docPath, parentDocument: parentTitle, outlineId: doc.id, createdBy: doc.createdBy?.email || doc.createdBy?.name || null, }; // Pass an object (not a bare string) so gray-matter doesn't run the body // through its frontmatter parser. Otherwise an Outline doc whose body // starts with `---` (markdown horizontal rule) gets misread as having // YAML frontmatter and crashes the export. const content = matter.stringify({ content: doc.text || "" }, frontmatter); const filePath = path.join(OUTPUT_DIR, relPath); await fs.mkdir(path.dirname(filePath), { recursive: true }); await fs.writeFile(filePath, content, "utf-8"); writtenFiles.add(relPath); count++; } console.log(`\nWrote ${count} documents to content/wiki/`); // Orphan cleanup is scoped to files we own: // - anything under a known / directory // - legacy flat files matching --*.md at the top level // This leaves sibling dirs (hub/, _local/, .claude/) and loose source files alone. const existing = await walkMarkdownFiles(OUTPUT_DIR, OUTPUT_DIR); let removed = 0; for (const rel of existing) { if (writtenFiles.has(rel)) continue; const segments = rel.split("/"); const topDir = segments.length > 1 ? segments[0] : null; const flatSlug = segments.length === 1 ? rel.split("--")[0] : null; const owned = (topDir && collectionSlugs.has(topDir)) || (flatSlug && collectionSlugs.has(flatSlug)); if (owned) { await fs.unlink(path.join(OUTPUT_DIR, rel)); console.log(` Removed orphan: ${rel}`); removed++; } } if (removed > 0) { console.log(`Removed ${removed} orphaned files.`); } console.log("Export complete."); } main().catch((err) => { console.error("Export failed:", err); process.exit(1); });