Export script pulls all Outline documents via API and writes them as flat markdown files to content/wiki/ with frontmatter metadata. Cron wrapper auto-commits changes daily.
194 lines
4.9 KiB
JavaScript
Executable file
194 lines
4.9 KiB
JavaScript
Executable file
#!/usr/bin/env node
|
|
|
|
/**
|
|
* Export all Outline wiki content as markdown files to content/wiki/.
|
|
*
|
|
* Usage:
|
|
* OUTLINE_URL=https://wiki.ghostguild.org OUTLINE_API_TOKEN=your-token node scripts/export-content.js
|
|
*
|
|
* Cron (daily at 4 AM UTC):
|
|
* 0 4 * * * /path/to/wiki-ghostguild/scripts/export-content-cron.sh >> /var/log/wiki-export.log 2>&1
|
|
*/
|
|
|
|
import fs from "fs/promises";
|
|
import path from "path";
|
|
import { fileURLToPath } from "url";
|
|
import matter from "gray-matter";
|
|
|
|
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
|
|
|
const OUTLINE_URL = process.env.OUTLINE_URL || process.env.URL;
|
|
const OUTLINE_API_TOKEN = process.env.OUTLINE_API_TOKEN;
|
|
const OUTPUT_DIR = path.resolve(__dirname, "../content/wiki");
|
|
const RATE_LIMIT_MS = 200;
|
|
|
|
if (!OUTLINE_URL || !OUTLINE_API_TOKEN) {
|
|
console.error(
|
|
"Error: OUTLINE_URL and OUTLINE_API_TOKEN env vars are required."
|
|
);
|
|
process.exit(1);
|
|
}
|
|
|
|
async function outlineApi(endpoint, body) {
|
|
const res = await fetch(`${OUTLINE_URL}/api/${endpoint}`, {
|
|
method: "POST",
|
|
headers: {
|
|
"Content-Type": "application/json",
|
|
Authorization: `Bearer ${OUTLINE_API_TOKEN}`,
|
|
},
|
|
body: JSON.stringify(body),
|
|
});
|
|
|
|
if (!res.ok) {
|
|
const text = await res.text();
|
|
throw new Error(`API ${endpoint} failed (${res.status}): ${text}`);
|
|
}
|
|
|
|
return res.json();
|
|
}
|
|
|
|
function delay(ms) {
|
|
return new Promise((resolve) => setTimeout(resolve, ms));
|
|
}
|
|
|
|
function slugify(text) {
|
|
return text
|
|
.toLowerCase()
|
|
.replace(/[^a-z0-9]+/g, "-")
|
|
.replace(/^-+|-+$/g, "");
|
|
}
|
|
|
|
async function fetchAllCollections() {
|
|
const { data } = await outlineApi("collections.list", { limit: 100 });
|
|
return data;
|
|
}
|
|
|
|
async function fetchAllDocuments(collectionId) {
|
|
const docs = [];
|
|
let offset = 0;
|
|
const limit = 100;
|
|
|
|
while (true) {
|
|
await delay(RATE_LIMIT_MS);
|
|
const result = await outlineApi("documents.list", {
|
|
collectionId,
|
|
limit,
|
|
offset,
|
|
});
|
|
|
|
docs.push(...result.data);
|
|
|
|
if (!result.pagination || result.data.length < limit) break;
|
|
offset += limit;
|
|
}
|
|
|
|
return docs;
|
|
}
|
|
|
|
function buildPath(doc, docMap, collectionName) {
|
|
const parts = [doc.title];
|
|
let current = doc;
|
|
|
|
while (current.parentDocumentId) {
|
|
const parent = docMap.get(current.parentDocumentId);
|
|
if (!parent) break;
|
|
parts.unshift(parent.title);
|
|
current = parent;
|
|
}
|
|
|
|
return `${collectionName}/${parts.join("/")}`;
|
|
}
|
|
|
|
function getParentTitle(doc, docMap) {
|
|
if (!doc.parentDocumentId) return null;
|
|
const parent = docMap.get(doc.parentDocumentId);
|
|
return parent ? parent.title : null;
|
|
}
|
|
|
|
async function main() {
|
|
// Ensure output directory exists
|
|
await fs.mkdir(OUTPUT_DIR, { recursive: true });
|
|
|
|
console.log("Fetching collections...");
|
|
const collections = await fetchAllCollections();
|
|
console.log(`Found ${collections.length} collections.\n`);
|
|
|
|
// Build a map of collection ID → name
|
|
const collectionMap = new Map();
|
|
for (const col of collections) {
|
|
collectionMap.set(col.id, col.name);
|
|
}
|
|
|
|
// Fetch all documents across all collections
|
|
const allDocs = [];
|
|
for (const col of collections) {
|
|
console.log(`Fetching docs from "${col.name}"...`);
|
|
const docs = await fetchAllDocuments(col.id);
|
|
console.log(` ${docs.length} documents`);
|
|
allDocs.push(...docs);
|
|
}
|
|
|
|
// Build lookup map for parent chain resolution
|
|
const docMap = new Map();
|
|
for (const doc of allDocs) {
|
|
docMap.set(doc.id, doc);
|
|
}
|
|
|
|
// Write all documents
|
|
const writtenFiles = new Set();
|
|
let count = 0;
|
|
|
|
for (const doc of allDocs) {
|
|
const collectionName = collectionMap.get(doc.collectionId) || "unknown";
|
|
const collectionSlug = slugify(collectionName);
|
|
const docSlug = slugify(doc.title);
|
|
const filename = `${collectionSlug}--${docSlug}.md`;
|
|
|
|
const docPath = buildPath(doc, docMap, collectionName);
|
|
const parentTitle = getParentTitle(doc, docMap);
|
|
|
|
const frontmatter = {
|
|
title: doc.title,
|
|
collection: collectionName,
|
|
path: docPath,
|
|
parentDocument: parentTitle,
|
|
outlineId: doc.id,
|
|
updatedAt: doc.updatedAt,
|
|
createdBy: doc.createdBy?.email || doc.createdBy?.name || null,
|
|
};
|
|
|
|
const content = matter.stringify(doc.text || "", frontmatter);
|
|
const filePath = path.join(OUTPUT_DIR, filename);
|
|
|
|
await fs.writeFile(filePath, content, "utf-8");
|
|
writtenFiles.add(filename);
|
|
count++;
|
|
}
|
|
|
|
console.log(`\nWrote ${count} documents to content/wiki/`);
|
|
|
|
// Clean up orphaned files
|
|
const existing = (await fs.readdir(OUTPUT_DIR)).filter((f) =>
|
|
f.endsWith(".md")
|
|
);
|
|
let removed = 0;
|
|
|
|
for (const file of existing) {
|
|
if (!writtenFiles.has(file)) {
|
|
await fs.unlink(path.join(OUTPUT_DIR, file));
|
|
console.log(` Removed orphan: ${file}`);
|
|
removed++;
|
|
}
|
|
}
|
|
|
|
if (removed > 0) {
|
|
console.log(`Removed ${removed} orphaned files.`);
|
|
}
|
|
|
|
console.log("Export complete.");
|
|
}
|
|
|
|
main().catch((err) => {
|
|
console.error("Export failed:", err);
|
|
process.exit(1);
|
|
});
|