From 16aef35682f46fffbf17c7ee90de76968e7b59b9 Mon Sep 17 00:00:00 2001 From: Jennie Robinson Faber Date: Tue, 21 Apr 2026 13:12:19 +0100 Subject: [PATCH] Nest exported wiki docs under collection/parent folders Outline docs with the same title under different parents (e.g. Peer Support Playbook > Session Guides > Session 0 vs Manual > Session Content > Session 0) were silently overwriting each other under the flat {collection}--{title}.md naming, dropping 9 of 99 docs on every export. Use the full slugified doc path instead, and have orphan cleanup scope to known collection-slug roots (sweeping legacy flat files too) so sibling dirs like hub/, _local/, .claude/ stay untouched. Also switch the cron's git add to -A so doc deletions and renames in Outline actually propagate. --- scripts/export-content-cron.sh | 5 ++- scripts/export-content.js | 68 +++++++++++++++++++++++++++------- 2 files changed, 58 insertions(+), 15 deletions(-) diff --git a/scripts/export-content-cron.sh b/scripts/export-content-cron.sh index 2390910..74491d7 100755 --- a/scripts/export-content-cron.sh +++ b/scripts/export-content-cron.sh @@ -30,8 +30,9 @@ fi # Run export node scripts/export-content.js -# Commit and push if there are changes -git add content/wiki/ +# Commit and push if there are changes. -A stages deletions and renames too, +# so docs removed or renamed in Outline actually propagate to git. +git add -A content/wiki/ if ! git diff --cached --quiet; then git commit -m "wiki content export $(date +%Y-%m-%d)" git push diff --git a/scripts/export-content.js b/scripts/export-content.js index 40442ce..1fcf7e4 100755 --- a/scripts/export-content.js +++ b/scripts/export-content.js @@ -99,6 +99,35 @@ function buildPath(doc, docMap, collectionName) { return `${collectionName}/${parts.join("/")}`; } +function buildRelFilePath(doc, docMap, collectionName) { + const parts = [slugify(doc.title)]; + let current = doc; + + while (current.parentDocumentId) { + const parent = docMap.get(current.parentDocumentId); + if (!parent) break; + parts.unshift(slugify(parent.title)); + current = parent; + } + + parts.unshift(slugify(collectionName)); + return parts.join("/") + ".md"; +} + +async function walkMarkdownFiles(dir, baseDir) { + const entries = await fs.readdir(dir, { withFileTypes: true }); + const results = []; + for (const entry of entries) { + const full = path.join(dir, entry.name); + if (entry.isDirectory()) { + results.push(...(await walkMarkdownFiles(full, baseDir))); + } else if (entry.name.endsWith(".md")) { + results.push(path.relative(baseDir, full).split(path.sep).join("/")); + } + } + return results; +} + function getParentTitle(doc, docMap) { if (!doc.parentDocumentId) return null; const parent = docMap.get(doc.parentDocumentId); @@ -136,14 +165,15 @@ async function main() { // Write all documents const writtenFiles = new Set(); + const collectionSlugs = new Set( + Array.from(collectionMap.values()).map(slugify) + ); let count = 0; for (const doc of allDocs) { const collectionName = collectionMap.get(doc.collectionId) || "unknown"; - const collectionSlug = slugify(collectionName); - const docSlug = slugify(doc.title); - const filename = `${collectionSlug}--${docSlug}.md`; + const relPath = buildRelFilePath(doc, docMap, collectionName); const docPath = buildPath(doc, docMap, collectionName); const parentTitle = getParentTitle(doc, docMap); @@ -161,25 +191,37 @@ async function main() { // starts with `---` (markdown horizontal rule) gets misread as having // YAML frontmatter and crashes the export. const content = matter.stringify({ content: doc.text || "" }, frontmatter); - const filePath = path.join(OUTPUT_DIR, filename); + const filePath = path.join(OUTPUT_DIR, relPath); + await fs.mkdir(path.dirname(filePath), { recursive: true }); await fs.writeFile(filePath, content, "utf-8"); - writtenFiles.add(filename); + writtenFiles.add(relPath); count++; } console.log(`\nWrote ${count} documents to content/wiki/`); - // Clean up orphaned files - const existing = (await fs.readdir(OUTPUT_DIR)).filter((f) => - f.endsWith(".md") - ); + // Orphan cleanup is scoped to files we own: + // - anything under a known / directory + // - legacy flat files matching --*.md at the top level + // This leaves sibling dirs (hub/, _local/, .claude/) and loose source files alone. + const existing = await walkMarkdownFiles(OUTPUT_DIR, OUTPUT_DIR); let removed = 0; - for (const file of existing) { - if (!writtenFiles.has(file)) { - await fs.unlink(path.join(OUTPUT_DIR, file)); - console.log(` Removed orphan: ${file}`); + for (const rel of existing) { + if (writtenFiles.has(rel)) continue; + + const segments = rel.split("/"); + const topDir = segments.length > 1 ? segments[0] : null; + const flatSlug = segments.length === 1 ? rel.split("--")[0] : null; + + const owned = + (topDir && collectionSlugs.has(topDir)) || + (flatSlug && collectionSlugs.has(flatSlug)); + + if (owned) { + await fs.unlink(path.join(OUTPUT_DIR, rel)); + console.log(` Removed orphan: ${rel}`); removed++; } }