Nest exported wiki docs under collection/parent folders
Outline docs with the same title under different parents (e.g. Peer Support
Playbook > Session Guides > Session 0 vs Manual > Session Content > Session 0)
were silently overwriting each other under the flat {collection}--{title}.md
naming, dropping 9 of 99 docs on every export. Use the full slugified doc
path instead, and have orphan cleanup scope to known collection-slug roots
(sweeping legacy flat files too) so sibling dirs like hub/, _local/, .claude/
stay untouched.
Also switch the cron's git add to -A so doc deletions and renames in Outline
actually propagate.
This commit is contained in:
parent
bac3fc6dd2
commit
16aef35682
2 changed files with 58 additions and 15 deletions
|
|
@ -99,6 +99,35 @@ function buildPath(doc, docMap, collectionName) {
|
|||
return `${collectionName}/${parts.join("/")}`;
|
||||
}
|
||||
|
||||
function buildRelFilePath(doc, docMap, collectionName) {
|
||||
const parts = [slugify(doc.title)];
|
||||
let current = doc;
|
||||
|
||||
while (current.parentDocumentId) {
|
||||
const parent = docMap.get(current.parentDocumentId);
|
||||
if (!parent) break;
|
||||
parts.unshift(slugify(parent.title));
|
||||
current = parent;
|
||||
}
|
||||
|
||||
parts.unshift(slugify(collectionName));
|
||||
return parts.join("/") + ".md";
|
||||
}
|
||||
|
||||
async function walkMarkdownFiles(dir, baseDir) {
|
||||
const entries = await fs.readdir(dir, { withFileTypes: true });
|
||||
const results = [];
|
||||
for (const entry of entries) {
|
||||
const full = path.join(dir, entry.name);
|
||||
if (entry.isDirectory()) {
|
||||
results.push(...(await walkMarkdownFiles(full, baseDir)));
|
||||
} else if (entry.name.endsWith(".md")) {
|
||||
results.push(path.relative(baseDir, full).split(path.sep).join("/"));
|
||||
}
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
function getParentTitle(doc, docMap) {
|
||||
if (!doc.parentDocumentId) return null;
|
||||
const parent = docMap.get(doc.parentDocumentId);
|
||||
|
|
@ -136,14 +165,15 @@ async function main() {
|
|||
|
||||
// Write all documents
|
||||
const writtenFiles = new Set();
|
||||
const collectionSlugs = new Set(
|
||||
Array.from(collectionMap.values()).map(slugify)
|
||||
);
|
||||
let count = 0;
|
||||
|
||||
for (const doc of allDocs) {
|
||||
const collectionName = collectionMap.get(doc.collectionId) || "unknown";
|
||||
const collectionSlug = slugify(collectionName);
|
||||
const docSlug = slugify(doc.title);
|
||||
const filename = `${collectionSlug}--${docSlug}.md`;
|
||||
|
||||
const relPath = buildRelFilePath(doc, docMap, collectionName);
|
||||
const docPath = buildPath(doc, docMap, collectionName);
|
||||
const parentTitle = getParentTitle(doc, docMap);
|
||||
|
||||
|
|
@ -161,25 +191,37 @@ async function main() {
|
|||
// starts with `---` (markdown horizontal rule) gets misread as having
|
||||
// YAML frontmatter and crashes the export.
|
||||
const content = matter.stringify({ content: doc.text || "" }, frontmatter);
|
||||
const filePath = path.join(OUTPUT_DIR, filename);
|
||||
const filePath = path.join(OUTPUT_DIR, relPath);
|
||||
|
||||
await fs.mkdir(path.dirname(filePath), { recursive: true });
|
||||
await fs.writeFile(filePath, content, "utf-8");
|
||||
writtenFiles.add(filename);
|
||||
writtenFiles.add(relPath);
|
||||
count++;
|
||||
}
|
||||
|
||||
console.log(`\nWrote ${count} documents to content/wiki/`);
|
||||
|
||||
// Clean up orphaned files
|
||||
const existing = (await fs.readdir(OUTPUT_DIR)).filter((f) =>
|
||||
f.endsWith(".md")
|
||||
);
|
||||
// Orphan cleanup is scoped to files we own:
|
||||
// - anything under a known <collection-slug>/ directory
|
||||
// - legacy flat files matching <collection-slug>--*.md at the top level
|
||||
// This leaves sibling dirs (hub/, _local/, .claude/) and loose source files alone.
|
||||
const existing = await walkMarkdownFiles(OUTPUT_DIR, OUTPUT_DIR);
|
||||
let removed = 0;
|
||||
|
||||
for (const file of existing) {
|
||||
if (!writtenFiles.has(file)) {
|
||||
await fs.unlink(path.join(OUTPUT_DIR, file));
|
||||
console.log(` Removed orphan: ${file}`);
|
||||
for (const rel of existing) {
|
||||
if (writtenFiles.has(rel)) continue;
|
||||
|
||||
const segments = rel.split("/");
|
||||
const topDir = segments.length > 1 ? segments[0] : null;
|
||||
const flatSlug = segments.length === 1 ? rel.split("--")[0] : null;
|
||||
|
||||
const owned =
|
||||
(topDir && collectionSlugs.has(topDir)) ||
|
||||
(flatSlug && collectionSlugs.has(flatSlug));
|
||||
|
||||
if (owned) {
|
||||
await fs.unlink(path.join(OUTPUT_DIR, rel));
|
||||
console.log(` Removed orphan: ${rel}`);
|
||||
removed++;
|
||||
}
|
||||
}
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue