Replace Nuxt wiki with Outline deployment config
Strip the Nuxt 4 static site and replace with Docker Compose config for self-hosted Outline wiki (Outline + PostgreSQL 16 + Redis 7). Adds nginx reverse proxy with WebSocket support and CSS injection, migration script for existing markdown articles, backup script, and starter theme CSS.
This commit is contained in:
parent
bcca73c17a
commit
c836df8825
91 changed files with 414 additions and 17714 deletions
|
|
@ -1,39 +0,0 @@
|
|||
#!/bin/bash
|
||||
# Audit script to identify missing image references
|
||||
|
||||
echo "=== Image Reference Audit ==="
|
||||
echo ""
|
||||
echo "Finding all image references in articles..."
|
||||
echo ""
|
||||
|
||||
REFS=$(grep -rh '!\[' content/articles/ 2>/dev/null | grep -oE '\(/img/[^)]+\)' | sed 's/(//' | sed 's/)//' | sort -u)
|
||||
|
||||
echo "Total image references found: $(echo "$REFS" | wc -l)"
|
||||
echo ""
|
||||
echo "Checking which exist in /public/img/:"
|
||||
echo ""
|
||||
|
||||
MISSING_COUNT=0
|
||||
EXISTING_COUNT=0
|
||||
|
||||
while read -r ref; do
|
||||
if [ -z "$ref" ]; then continue; fi
|
||||
|
||||
filename=$(basename "$ref")
|
||||
fullpath="/Users/jennie/Sites/wiki-ghostguild${ref}"
|
||||
|
||||
if [ -f "$fullpath" ]; then
|
||||
echo "✓ $filename"
|
||||
((EXISTING_COUNT++))
|
||||
else
|
||||
echo "✗ MISSING: $filename"
|
||||
((MISSING_COUNT++))
|
||||
fi
|
||||
done <<< "$REFS"
|
||||
|
||||
echo ""
|
||||
echo "=== Summary ==="
|
||||
echo "Existing images: $EXISTING_COUNT"
|
||||
echo "Missing images: $MISSING_COUNT"
|
||||
echo ""
|
||||
echo "Missing images need to be added to /public/img/ or updated in article markdown."
|
||||
130
scripts/migrate-content.js
Normal file
130
scripts/migrate-content.js
Normal file
|
|
@ -0,0 +1,130 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
/**
|
||||
* Migrate markdown articles from content/articles/ into Outline wiki.
|
||||
*
|
||||
* Usage:
|
||||
* OUTLINE_URL=http://localhost:3100 OUTLINE_API_TOKEN=your-token node migrate-content.js
|
||||
*
|
||||
* Requires: npm install (in this directory) to get gray-matter.
|
||||
*/
|
||||
|
||||
import fs from "fs/promises";
|
||||
import path from "path";
|
||||
import { fileURLToPath } from "url";
|
||||
import matter from "gray-matter";
|
||||
|
||||
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
||||
|
||||
const OUTLINE_URL = process.env.OUTLINE_URL;
|
||||
const OUTLINE_API_TOKEN = process.env.OUTLINE_API_TOKEN;
|
||||
const CONTENT_DIR = path.resolve(__dirname, "../content/articles");
|
||||
const RATE_LIMIT_MS = 200;
|
||||
|
||||
if (!OUTLINE_URL || !OUTLINE_API_TOKEN) {
|
||||
console.error("Error: OUTLINE_URL and OUTLINE_API_TOKEN env vars are required.");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
async function outlineApi(endpoint, body) {
|
||||
const res = await fetch(`${OUTLINE_URL}/api/${endpoint}`, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
Authorization: `Bearer ${OUTLINE_API_TOKEN}`,
|
||||
},
|
||||
body: JSON.stringify(body),
|
||||
});
|
||||
|
||||
if (!res.ok) {
|
||||
const text = await res.text();
|
||||
throw new Error(`API ${endpoint} failed (${res.status}): ${text}`);
|
||||
}
|
||||
|
||||
return res.json();
|
||||
}
|
||||
|
||||
function delay(ms) {
|
||||
return new Promise((resolve) => setTimeout(resolve, ms));
|
||||
}
|
||||
|
||||
async function getOrCreateCollection(name) {
|
||||
// Search existing collections
|
||||
const { data } = await outlineApi("collections.list", { limit: 100 });
|
||||
const existing = data.find(
|
||||
(c) => c.name.toLowerCase() === name.toLowerCase()
|
||||
);
|
||||
if (existing) return existing.id;
|
||||
|
||||
// Create new collection
|
||||
await delay(RATE_LIMIT_MS);
|
||||
const { data: created } = await outlineApi("collections.create", { name });
|
||||
console.log(` Created collection: ${name}`);
|
||||
return created.id;
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const files = (await fs.readdir(CONTENT_DIR)).filter((f) => f.endsWith(".md"));
|
||||
console.log(`Found ${files.length} markdown files to migrate.\n`);
|
||||
|
||||
// Build a map of category → collection ID
|
||||
const collectionIds = new Map();
|
||||
|
||||
let success = 0;
|
||||
let failed = 0;
|
||||
|
||||
for (const file of files) {
|
||||
const filePath = path.join(CONTENT_DIR, file);
|
||||
const raw = await fs.readFile(filePath, "utf-8");
|
||||
const { data: frontmatter, content } = matter(raw);
|
||||
|
||||
// Determine title: frontmatter title, or first H1, or filename
|
||||
let title =
|
||||
frontmatter.title ||
|
||||
content.match(/^#\s+(.+)$/m)?.[1] ||
|
||||
path.basename(file, ".md").replace(/-/g, " ");
|
||||
|
||||
// Determine collection from category
|
||||
const category = frontmatter.category || "Uncategorized";
|
||||
const categoryName = category
|
||||
.replace(/-/g, " ")
|
||||
.replace(/\b\w/g, (c) => c.toUpperCase());
|
||||
|
||||
if (!collectionIds.has(categoryName)) {
|
||||
try {
|
||||
const id = await getOrCreateCollection(categoryName);
|
||||
collectionIds.set(categoryName, id);
|
||||
await delay(RATE_LIMIT_MS);
|
||||
} catch (err) {
|
||||
console.error(` Failed to get/create collection "${categoryName}": ${err.message}`);
|
||||
failed++;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
const collectionId = collectionIds.get(categoryName);
|
||||
|
||||
try {
|
||||
const { data: doc } = await outlineApi("documents.create", {
|
||||
title,
|
||||
text: content.trim(),
|
||||
collectionId,
|
||||
publish: true,
|
||||
});
|
||||
console.log(` ✓ ${title} → ${doc.url}`);
|
||||
success++;
|
||||
} catch (err) {
|
||||
console.error(` ✗ ${title}: ${err.message}`);
|
||||
failed++;
|
||||
}
|
||||
|
||||
await delay(RATE_LIMIT_MS);
|
||||
}
|
||||
|
||||
console.log(`\nDone. ${success} migrated, ${failed} failed.`);
|
||||
}
|
||||
|
||||
main().catch((err) => {
|
||||
console.error("Migration failed:", err);
|
||||
process.exit(1);
|
||||
});
|
||||
39
scripts/outline-backup.sh
Executable file
39
scripts/outline-backup.sh
Executable file
|
|
@ -0,0 +1,39 @@
|
|||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
# =============================================================================
|
||||
# Outline Wiki Backup Script
|
||||
# Backs up PostgreSQL database and file storage from Docker containers.
|
||||
#
|
||||
# Usage:
|
||||
# ./outline-backup.sh [backup_dir]
|
||||
#
|
||||
# Crontab (daily at 3 AM):
|
||||
# 0 3 * * * /path/to/outline-backup.sh /backups/outline >> /var/log/outline-backup.log 2>&1
|
||||
# =============================================================================
|
||||
|
||||
BACKUP_DIR="${1:-/backups/outline}"
|
||||
DATE=$(date +%Y-%m-%d_%H%M)
|
||||
RETENTION_DAYS=14
|
||||
|
||||
mkdir -p "$BACKUP_DIR"
|
||||
|
||||
echo "=== Outline Backup — $DATE ==="
|
||||
|
||||
# PostgreSQL dump
|
||||
echo "Backing up PostgreSQL..."
|
||||
docker exec outline-postgres pg_dump -U outline -Fc outline \
|
||||
> "$BACKUP_DIR/outline-db-$DATE.dump"
|
||||
echo " Database: outline-db-$DATE.dump"
|
||||
|
||||
# File storage backup
|
||||
echo "Backing up file storage..."
|
||||
docker cp outline:/var/lib/outline/data - \
|
||||
| gzip > "$BACKUP_DIR/outline-files-$DATE.tar.gz"
|
||||
echo " Files: outline-files-$DATE.tar.gz"
|
||||
|
||||
# Prune old backups
|
||||
echo "Pruning backups older than $RETENTION_DAYS days..."
|
||||
find "$BACKUP_DIR" -type f -mtime +$RETENTION_DAYS -delete
|
||||
|
||||
echo "=== Backup complete ==="
|
||||
7
scripts/package.json
Normal file
7
scripts/package.json
Normal file
|
|
@ -0,0 +1,7 @@
|
|||
{
|
||||
"private": true,
|
||||
"type": "module",
|
||||
"dependencies": {
|
||||
"gray-matter": "^4.0.3"
|
||||
}
|
||||
}
|
||||
Loading…
Add table
Add a link
Reference in a new issue