Add daily wiki content export
Export script pulls all Outline documents via API and writes them as flat markdown files to content/wiki/ with frontmatter metadata. Cron wrapper auto-commits changes daily.
This commit is contained in:
parent
b5de1e575a
commit
2bb36af20e
37 changed files with 5272 additions and 0 deletions
33
scripts/export-content-cron.sh
Executable file
33
scripts/export-content-cron.sh
Executable file
|
|
@ -0,0 +1,33 @@
|
|||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
# Daily wiki content export — run via cron:
|
||||
# 0 4 * * * /path/to/wiki-ghostguild/scripts/export-content-cron.sh >> /var/log/wiki-export.log 2>&1
|
||||
|
||||
REPO_DIR="$(cd "$(dirname "$0")/.." && pwd)"
|
||||
cd "$REPO_DIR"
|
||||
|
||||
# Source env vars (line-by-line to handle unquoted values with spaces)
|
||||
while IFS='=' read -r key value; do
|
||||
[[ -z "$key" || "$key" =~ ^# ]] && continue
|
||||
export "$key=$value"
|
||||
done < outline.env
|
||||
|
||||
# Map Outline's URL to OUTLINE_URL if not already set
|
||||
export OUTLINE_URL="${OUTLINE_URL:-$URL}"
|
||||
|
||||
# OUTLINE_API_TOKEN must be set in the environment or in outline.env
|
||||
if [[ -z "${OUTLINE_API_TOKEN:-}" ]]; then
|
||||
echo "Error: OUTLINE_API_TOKEN is not set" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Run export
|
||||
node scripts/export-content.js
|
||||
|
||||
# Commit and push if there are changes
|
||||
git add content/wiki/
|
||||
if ! git diff --cached --quiet; then
|
||||
git commit -m "wiki content export $(date +%Y-%m-%d)"
|
||||
git push
|
||||
fi
|
||||
194
scripts/export-content.js
Executable file
194
scripts/export-content.js
Executable file
|
|
@ -0,0 +1,194 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
/**
|
||||
* Export all Outline wiki content as markdown files to content/wiki/.
|
||||
*
|
||||
* Usage:
|
||||
* OUTLINE_URL=https://wiki.ghostguild.org OUTLINE_API_TOKEN=your-token node scripts/export-content.js
|
||||
*
|
||||
* Cron (daily at 4 AM UTC):
|
||||
* 0 4 * * * /path/to/wiki-ghostguild/scripts/export-content-cron.sh >> /var/log/wiki-export.log 2>&1
|
||||
*/
|
||||
|
||||
import fs from "fs/promises";
|
||||
import path from "path";
|
||||
import { fileURLToPath } from "url";
|
||||
import matter from "gray-matter";
|
||||
|
||||
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
||||
|
||||
const OUTLINE_URL = process.env.OUTLINE_URL || process.env.URL;
|
||||
const OUTLINE_API_TOKEN = process.env.OUTLINE_API_TOKEN;
|
||||
const OUTPUT_DIR = path.resolve(__dirname, "../content/wiki");
|
||||
const RATE_LIMIT_MS = 200;
|
||||
|
||||
if (!OUTLINE_URL || !OUTLINE_API_TOKEN) {
|
||||
console.error(
|
||||
"Error: OUTLINE_URL and OUTLINE_API_TOKEN env vars are required."
|
||||
);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
async function outlineApi(endpoint, body) {
|
||||
const res = await fetch(`${OUTLINE_URL}/api/${endpoint}`, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
Authorization: `Bearer ${OUTLINE_API_TOKEN}`,
|
||||
},
|
||||
body: JSON.stringify(body),
|
||||
});
|
||||
|
||||
if (!res.ok) {
|
||||
const text = await res.text();
|
||||
throw new Error(`API ${endpoint} failed (${res.status}): ${text}`);
|
||||
}
|
||||
|
||||
return res.json();
|
||||
}
|
||||
|
||||
function delay(ms) {
|
||||
return new Promise((resolve) => setTimeout(resolve, ms));
|
||||
}
|
||||
|
||||
function slugify(text) {
|
||||
return text
|
||||
.toLowerCase()
|
||||
.replace(/[^a-z0-9]+/g, "-")
|
||||
.replace(/^-+|-+$/g, "");
|
||||
}
|
||||
|
||||
async function fetchAllCollections() {
|
||||
const { data } = await outlineApi("collections.list", { limit: 100 });
|
||||
return data;
|
||||
}
|
||||
|
||||
async function fetchAllDocuments(collectionId) {
|
||||
const docs = [];
|
||||
let offset = 0;
|
||||
const limit = 100;
|
||||
|
||||
while (true) {
|
||||
await delay(RATE_LIMIT_MS);
|
||||
const result = await outlineApi("documents.list", {
|
||||
collectionId,
|
||||
limit,
|
||||
offset,
|
||||
});
|
||||
|
||||
docs.push(...result.data);
|
||||
|
||||
if (!result.pagination || result.data.length < limit) break;
|
||||
offset += limit;
|
||||
}
|
||||
|
||||
return docs;
|
||||
}
|
||||
|
||||
function buildPath(doc, docMap, collectionName) {
|
||||
const parts = [doc.title];
|
||||
let current = doc;
|
||||
|
||||
while (current.parentDocumentId) {
|
||||
const parent = docMap.get(current.parentDocumentId);
|
||||
if (!parent) break;
|
||||
parts.unshift(parent.title);
|
||||
current = parent;
|
||||
}
|
||||
|
||||
return `${collectionName}/${parts.join("/")}`;
|
||||
}
|
||||
|
||||
function getParentTitle(doc, docMap) {
|
||||
if (!doc.parentDocumentId) return null;
|
||||
const parent = docMap.get(doc.parentDocumentId);
|
||||
return parent ? parent.title : null;
|
||||
}
|
||||
|
||||
async function main() {
|
||||
// Ensure output directory exists
|
||||
await fs.mkdir(OUTPUT_DIR, { recursive: true });
|
||||
|
||||
console.log("Fetching collections...");
|
||||
const collections = await fetchAllCollections();
|
||||
console.log(`Found ${collections.length} collections.\n`);
|
||||
|
||||
// Build a map of collection ID → name
|
||||
const collectionMap = new Map();
|
||||
for (const col of collections) {
|
||||
collectionMap.set(col.id, col.name);
|
||||
}
|
||||
|
||||
// Fetch all documents across all collections
|
||||
const allDocs = [];
|
||||
for (const col of collections) {
|
||||
console.log(`Fetching docs from "${col.name}"...`);
|
||||
const docs = await fetchAllDocuments(col.id);
|
||||
console.log(` ${docs.length} documents`);
|
||||
allDocs.push(...docs);
|
||||
}
|
||||
|
||||
// Build lookup map for parent chain resolution
|
||||
const docMap = new Map();
|
||||
for (const doc of allDocs) {
|
||||
docMap.set(doc.id, doc);
|
||||
}
|
||||
|
||||
// Write all documents
|
||||
const writtenFiles = new Set();
|
||||
let count = 0;
|
||||
|
||||
for (const doc of allDocs) {
|
||||
const collectionName = collectionMap.get(doc.collectionId) || "unknown";
|
||||
const collectionSlug = slugify(collectionName);
|
||||
const docSlug = slugify(doc.title);
|
||||
const filename = `${collectionSlug}--${docSlug}.md`;
|
||||
|
||||
const docPath = buildPath(doc, docMap, collectionName);
|
||||
const parentTitle = getParentTitle(doc, docMap);
|
||||
|
||||
const frontmatter = {
|
||||
title: doc.title,
|
||||
collection: collectionName,
|
||||
path: docPath,
|
||||
parentDocument: parentTitle,
|
||||
outlineId: doc.id,
|
||||
updatedAt: doc.updatedAt,
|
||||
createdBy: doc.createdBy?.email || doc.createdBy?.name || null,
|
||||
};
|
||||
|
||||
const content = matter.stringify(doc.text || "", frontmatter);
|
||||
const filePath = path.join(OUTPUT_DIR, filename);
|
||||
|
||||
await fs.writeFile(filePath, content, "utf-8");
|
||||
writtenFiles.add(filename);
|
||||
count++;
|
||||
}
|
||||
|
||||
console.log(`\nWrote ${count} documents to content/wiki/`);
|
||||
|
||||
// Clean up orphaned files
|
||||
const existing = (await fs.readdir(OUTPUT_DIR)).filter((f) =>
|
||||
f.endsWith(".md")
|
||||
);
|
||||
let removed = 0;
|
||||
|
||||
for (const file of existing) {
|
||||
if (!writtenFiles.has(file)) {
|
||||
await fs.unlink(path.join(OUTPUT_DIR, file));
|
||||
console.log(` Removed orphan: ${file}`);
|
||||
removed++;
|
||||
}
|
||||
}
|
||||
|
||||
if (removed > 0) {
|
||||
console.log(`Removed ${removed} orphaned files.`);
|
||||
}
|
||||
|
||||
console.log("Export complete.");
|
||||
}
|
||||
|
||||
main().catch((err) => {
|
||||
console.error("Export failed:", err);
|
||||
process.exit(1);
|
||||
});
|
||||
120
scripts/package-lock.json
generated
Normal file
120
scripts/package-lock.json
generated
Normal file
|
|
@ -0,0 +1,120 @@
|
|||
{
|
||||
"name": "scripts",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"dependencies": {
|
||||
"gray-matter": "^4.0.3"
|
||||
}
|
||||
},
|
||||
"node_modules/argparse": {
|
||||
"version": "1.0.10",
|
||||
"resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz",
|
||||
"integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"sprintf-js": "~1.0.2"
|
||||
}
|
||||
},
|
||||
"node_modules/esprima": {
|
||||
"version": "4.0.1",
|
||||
"resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz",
|
||||
"integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==",
|
||||
"license": "BSD-2-Clause",
|
||||
"bin": {
|
||||
"esparse": "bin/esparse.js",
|
||||
"esvalidate": "bin/esvalidate.js"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=4"
|
||||
}
|
||||
},
|
||||
"node_modules/extend-shallow": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz",
|
||||
"integrity": "sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"is-extendable": "^0.1.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=0.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/gray-matter": {
|
||||
"version": "4.0.3",
|
||||
"resolved": "https://registry.npmjs.org/gray-matter/-/gray-matter-4.0.3.tgz",
|
||||
"integrity": "sha512-5v6yZd4JK3eMI3FqqCouswVqwugaA9r4dNZB1wwcmrD02QkV5H0y7XBQW8QwQqEaZY1pM9aqORSORhJRdNK44Q==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"js-yaml": "^3.13.1",
|
||||
"kind-of": "^6.0.2",
|
||||
"section-matter": "^1.0.0",
|
||||
"strip-bom-string": "^1.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6.0"
|
||||
}
|
||||
},
|
||||
"node_modules/is-extendable": {
|
||||
"version": "0.1.1",
|
||||
"resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-0.1.1.tgz",
|
||||
"integrity": "sha512-5BMULNob1vgFX6EjQw5izWDxrecWK9AM72rugNr0TFldMOi0fj6Jk+zeKIt0xGj4cEfQIJth4w3OKWOJ4f+AFw==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=0.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/js-yaml": {
|
||||
"version": "3.14.2",
|
||||
"resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.2.tgz",
|
||||
"integrity": "sha512-PMSmkqxr106Xa156c2M265Z+FTrPl+oxd/rgOQy2tijQeK5TxQ43psO1ZCwhVOSdnn+RzkzlRz/eY4BgJBYVpg==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"argparse": "^1.0.7",
|
||||
"esprima": "^4.0.0"
|
||||
},
|
||||
"bin": {
|
||||
"js-yaml": "bin/js-yaml.js"
|
||||
}
|
||||
},
|
||||
"node_modules/kind-of": {
|
||||
"version": "6.0.3",
|
||||
"resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz",
|
||||
"integrity": "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=0.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/section-matter": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/section-matter/-/section-matter-1.0.0.tgz",
|
||||
"integrity": "sha512-vfD3pmTzGpufjScBh50YHKzEu2lxBWhVEHsNGoEXmCmn2hKGfeNLYMzCJpe8cD7gqX7TJluOVpBkAequ6dgMmA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"extend-shallow": "^2.0.1",
|
||||
"kind-of": "^6.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=4"
|
||||
}
|
||||
},
|
||||
"node_modules/sprintf-js": {
|
||||
"version": "1.0.3",
|
||||
"resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz",
|
||||
"integrity": "sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==",
|
||||
"license": "BSD-3-Clause"
|
||||
},
|
||||
"node_modules/strip-bom-string": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/strip-bom-string/-/strip-bom-string-1.0.0.tgz",
|
||||
"integrity": "sha512-uCC2VHvQRYu+lMh4My/sFNmF2klFymLX1wHJeXnbEJERpV/ZsVuonzerjfrGpIGF7LBVa1O7i9kjiWvJiFck8g==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=0.10.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Loading…
Add table
Add a link
Reference in a new issue