no more typescript parser. only rust!
SSE Changed a bit to fit the Rust Parser
This commit is contained in:
@@ -1,172 +1,34 @@
|
|||||||
export const dynamic = "force-dynamic";
|
|
||||||
|
|
||||||
import { NextResponse } from 'next/server';
|
import { NextResponse } from 'next/server';
|
||||||
import fs from 'fs';
|
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
import matter from 'gray-matter';
|
|
||||||
import { marked } from 'marked';
|
|
||||||
import DOMPurify from 'dompurify';
|
|
||||||
import { JSDOM } from 'jsdom';
|
|
||||||
import hljs from 'highlight.js';
|
|
||||||
import { getPostsDirectory } from '@/lib/postsDirectory';
|
import { getPostsDirectory } from '@/lib/postsDirectory';
|
||||||
import { spawnSync } from 'child_process';
|
import { spawnSync } from 'child_process';
|
||||||
|
|
||||||
const postsDirectory = getPostsDirectory();
|
const postsDirectory = getPostsDirectory();
|
||||||
|
|
||||||
// Function to get file creation date
|
|
||||||
function getFileCreationDate(filePath: string): Date {
|
|
||||||
const stats = fs.statSync(filePath);
|
|
||||||
return stats.birthtime ?? stats.mtime;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Function to generate ID from text (matches frontend logic)
|
|
||||||
function generateId(text: string): string {
|
|
||||||
return text
|
|
||||||
.toLowerCase()
|
|
||||||
.replace(/[^a-z0-9]+/g, '-')
|
|
||||||
.replace(/^-+|-+$/g, '');
|
|
||||||
}
|
|
||||||
|
|
||||||
const renderer = new marked.Renderer();
|
|
||||||
|
|
||||||
// Custom heading renderer to add IDs
|
|
||||||
renderer.heading = (text, level) => {
|
|
||||||
const id = generateId(text);
|
|
||||||
return `<h${level} id="${id}">${text}</h${level}>`;
|
|
||||||
};
|
|
||||||
|
|
||||||
renderer.code = (code, infostring, escaped) => {
|
|
||||||
const lang = (infostring || '').match(/\S*/)?.[0];
|
|
||||||
const highlighted = lang && hljs.getLanguage(lang)
|
|
||||||
? hljs.highlight(code, { language: lang }).value
|
|
||||||
: hljs.highlightAuto(code).value;
|
|
||||||
const langClass = lang ? `language-${lang}` : '';
|
|
||||||
return `<pre><code class="hljs ${langClass}">${highlighted}</code></pre>`;
|
|
||||||
};
|
|
||||||
|
|
||||||
marked.setOptions({
|
|
||||||
gfm: true,
|
|
||||||
breaks: true,
|
|
||||||
renderer,
|
|
||||||
});
|
|
||||||
|
|
||||||
async function getPostBySlug(slug: string) {
|
|
||||||
const realSlug = slug.replace(/\.md$/, '');
|
|
||||||
const fullPath = path.join(postsDirectory, `${realSlug}.md`);
|
|
||||||
let rustResult;
|
|
||||||
try {
|
|
||||||
// Try Rust backend first
|
|
||||||
rustResult = spawnSync(
|
|
||||||
path.resolve(process.cwd(), 'markdown_backend/target/release/markdown_backend'),
|
|
||||||
['show', realSlug],
|
|
||||||
{ encoding: 'utf-8' }
|
|
||||||
);
|
|
||||||
if (rustResult.status === 0 && rustResult.stdout) {
|
|
||||||
// Expect Rust to output a JSON object matching the post shape
|
|
||||||
const post = JSON.parse(rustResult.stdout);
|
|
||||||
// Map snake_case to camelCase for frontend compatibility
|
|
||||||
post.createdAt = post.created_at;
|
|
||||||
delete post.created_at;
|
|
||||||
return post;
|
|
||||||
} else {
|
|
||||||
console.error('[Rust parser error]', rustResult.stderr || rustResult.error);
|
|
||||||
}
|
|
||||||
} catch (e) {
|
|
||||||
console.error('[Rust parser exception]', e);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Fallback to TypeScript parser
|
|
||||||
const fileContents = fs.readFileSync(fullPath, 'utf8');
|
|
||||||
const { data, content } = matter(fileContents);
|
|
||||||
const createdAt = getFileCreationDate(fullPath);
|
|
||||||
|
|
||||||
let processedContent = '';
|
|
||||||
try {
|
|
||||||
// Convert markdown to HTML
|
|
||||||
const rawHtml = marked.parse(content);
|
|
||||||
const window = new JSDOM('').window;
|
|
||||||
const purify = DOMPurify(window);
|
|
||||||
processedContent = purify.sanitize(rawHtml as string, {
|
|
||||||
ALLOWED_TAGS: [
|
|
||||||
'h1', 'h2', 'h3', 'h4', 'h5', 'h6',
|
|
||||||
'p', 'a', 'ul', 'ol', 'li', 'blockquote',
|
|
||||||
'pre', 'code', 'em', 'strong', 'del',
|
|
||||||
'hr', 'br', 'img', 'table', 'thead', 'tbody',
|
|
||||||
'tr', 'th', 'td', 'div', 'span', 'iframe'
|
|
||||||
],
|
|
||||||
ALLOWED_ATTR: [
|
|
||||||
'class', 'id', 'style',
|
|
||||||
'href', 'target', 'rel',
|
|
||||||
'src', 'alt', 'title', 'width', 'height',
|
|
||||||
'frameborder', 'allowfullscreen'
|
|
||||||
],
|
|
||||||
ALLOWED_URI_REGEXP: /^(?:(?:(?:f|ht)tps?|mailto|tel|callto|cid|xmpp):|[^a-z]|[a-z+.-]+(?:[^a-z+.-:]|$))/i
|
|
||||||
});
|
|
||||||
} catch (err) {
|
|
||||||
console.error(`Error processing markdown for slug "${realSlug}":`, err);
|
|
||||||
processedContent = `<div class="error-message">
|
|
||||||
<p>Error processing markdown content. Please check the console for details.</p>
|
|
||||||
<pre>${err instanceof Error ? err.message : 'Unknown error'}</pre>
|
|
||||||
</div>`;
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
slug: realSlug,
|
|
||||||
title: data.title,
|
|
||||||
date: data.date,
|
|
||||||
tags: data.tags || [],
|
|
||||||
summary: data.summary,
|
|
||||||
content: processedContent,
|
|
||||||
createdAt: createdAt.toISOString(),
|
|
||||||
author: (process.env.NEXT_PUBLIC_BLOG_OWNER || 'Anonymous') + "'s",
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function GET(
|
export async function GET(
|
||||||
request: Request,
|
request: Request,
|
||||||
{ params }: { params: { slug: string[] | string } }
|
{ params }: { params: { slug: string[] | string } }
|
||||||
) {
|
) {
|
||||||
let parser = 'typescript';
|
|
||||||
let rustError = '';
|
|
||||||
try {
|
try {
|
||||||
const slugArr = Array.isArray(params.slug) ? params.slug : [params.slug];
|
const slugArr = Array.isArray(params.slug) ? params.slug : [params.slug];
|
||||||
const slugPath = slugArr.join('/');
|
const slugPath = slugArr.join('/');
|
||||||
let post;
|
const rustResult = spawnSync(
|
||||||
try {
|
path.resolve(process.cwd(), 'markdown_backend/target/release/markdown_backend'),
|
||||||
const rustResult = spawnSync(
|
['show', slugPath],
|
||||||
path.resolve(process.cwd(), 'markdown_backend/target/release/markdown_backend'),
|
{ encoding: 'utf-8' }
|
||||||
['show', slugPath],
|
);
|
||||||
{ encoding: 'utf-8' }
|
if (rustResult.status === 0 && rustResult.stdout) {
|
||||||
);
|
const post = JSON.parse(rustResult.stdout);
|
||||||
if (rustResult.status === 0 && rustResult.stdout) {
|
post.createdAt = post.created_at;
|
||||||
post = JSON.parse(rustResult.stdout);
|
delete post.created_at;
|
||||||
post.createdAt = post.created_at;
|
return NextResponse.json(post);
|
||||||
delete post.created_at;
|
} else {
|
||||||
parser = 'rust';
|
const rustError = rustResult.stderr || rustResult.error?.toString() || 'Unknown error';
|
||||||
} else {
|
return NextResponse.json({ error: 'Rust parser error', details: rustError }, { status: 500 });
|
||||||
rustError = rustResult.stderr || rustResult.error?.toString() || 'Unknown error';
|
|
||||||
console.error('[Rust parser error]', rustError);
|
|
||||||
}
|
|
||||||
} catch (e) {
|
|
||||||
rustError = e instanceof Error ? e.message : String(e);
|
|
||||||
console.error('[Rust parser exception]', rustError);
|
|
||||||
}
|
}
|
||||||
if (!post) {
|
|
||||||
post = await getPostBySlug(slugPath);
|
|
||||||
}
|
|
||||||
const response = NextResponse.json(post);
|
|
||||||
response.headers.set('X-Parser', parser);
|
|
||||||
if (parser !== 'rust' && rustError) {
|
|
||||||
response.headers.set('X-Rust-Parser-Error', rustError);
|
|
||||||
}
|
|
||||||
return response;
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error('Error loading post:', error);
|
|
||||||
return NextResponse.json(
|
return NextResponse.json(
|
||||||
{
|
{ error: 'Error loading post', details: error instanceof Error ? error.message : 'Unknown error' },
|
||||||
error: 'Error loading post',
|
|
||||||
details: error instanceof Error ? error.message : 'Unknown error'
|
|
||||||
},
|
|
||||||
{ status: 500 }
|
{ status: 500 }
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
import { NextRequest, NextResponse } from 'next/server';
|
import { NextRequest, NextResponse } from 'next/server';
|
||||||
import { watchPosts, stopWatching } from '@/lib/markdown';
|
import { spawn } from 'child_process';
|
||||||
|
|
||||||
// Prevent static generation of this route
|
// Prevent static generation of this route
|
||||||
export const dynamic = 'force-dynamic';
|
export const dynamic = 'force-dynamic';
|
||||||
@@ -37,35 +37,87 @@ export async function GET(request: NextRequest) {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Set up file watcher if not already set up
|
// Set up Rust file watcher if not already set up
|
||||||
if (clients.size === 1) {
|
if (clients.size === 1) {
|
||||||
try {
|
try {
|
||||||
watchPosts(() => {
|
const rustWatcher = spawn(
|
||||||
// Notify all connected clients about the update
|
process.cwd() + '/markdown_backend/target/release/markdown_backend',
|
||||||
const message = JSON.stringify({ type: 'update', timestamp: new Date().toISOString() });
|
['watch'],
|
||||||
const clientsToRemove: ReadableStreamDefaultController[] = [];
|
{ stdio: ['pipe', 'pipe', 'pipe'] }
|
||||||
|
);
|
||||||
|
|
||||||
clients.forEach(client => {
|
rustWatcher.stdout.on('data', (data) => {
|
||||||
try {
|
const message = data.toString().trim();
|
||||||
client.enqueue(`data: ${message}\n\n`);
|
console.log('Rust watcher output:', message);
|
||||||
} catch (error) {
|
|
||||||
// Mark client for removal
|
if (message.includes('Posts directory changed!')) {
|
||||||
clientsToRemove.push(client);
|
// Notify all connected clients about the update
|
||||||
|
const updateMessage = JSON.stringify({ type: 'update', timestamp: new Date().toISOString() });
|
||||||
|
const clientsToRemove: ReadableStreamDefaultController[] = [];
|
||||||
|
|
||||||
|
clients.forEach(client => {
|
||||||
|
try {
|
||||||
|
client.enqueue(`data: ${updateMessage}\n\n`);
|
||||||
|
} catch (error) {
|
||||||
|
// Mark client for removal
|
||||||
|
clientsToRemove.push(client);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Remove disconnected clients
|
||||||
|
clientsToRemove.forEach(client => {
|
||||||
|
clients.delete(client);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Stop watching if no clients are connected
|
||||||
|
if (clients.size === 0) {
|
||||||
|
console.log('No clients connected, stopping watcher');
|
||||||
|
rustWatcher.kill();
|
||||||
}
|
}
|
||||||
});
|
|
||||||
|
|
||||||
// Remove disconnected clients
|
|
||||||
clientsToRemove.forEach(client => {
|
|
||||||
clients.delete(client);
|
|
||||||
});
|
|
||||||
|
|
||||||
// Stop watching if no clients are connected
|
|
||||||
if (clients.size === 0) {
|
|
||||||
stopWatching();
|
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
rustWatcher.stderr.on('data', (data) => {
|
||||||
|
const errorMessage = data.toString().trim();
|
||||||
|
console.error('Rust watcher error:', errorMessage);
|
||||||
|
|
||||||
|
// Don't treat RecvError as a real error - it's expected when the process is terminated
|
||||||
|
if (!errorMessage.includes('RecvError')) {
|
||||||
|
// Send error to clients
|
||||||
|
const errorData = JSON.stringify({ type: 'error', message: errorMessage });
|
||||||
|
const clientsToRemove: ReadableStreamDefaultController[] = [];
|
||||||
|
|
||||||
|
clients.forEach(client => {
|
||||||
|
try {
|
||||||
|
client.enqueue(`data: ${errorData}\n\n`);
|
||||||
|
} catch (error) {
|
||||||
|
clientsToRemove.push(client);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
clientsToRemove.forEach(client => {
|
||||||
|
clients.delete(client);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
rustWatcher.on('error', (error) => {
|
||||||
|
console.error('Rust watcher spawn error:', error);
|
||||||
|
});
|
||||||
|
|
||||||
|
rustWatcher.on('close', (code) => {
|
||||||
|
console.log('Rust watcher closed with code:', code);
|
||||||
|
// Only restart if we still have clients
|
||||||
|
if (clients.size > 0) {
|
||||||
|
console.log('Restarting watcher due to unexpected close');
|
||||||
|
// The watcher will be restarted when the next client connects
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Store the watcher process for cleanup
|
||||||
|
(controller as any).rustWatcher = rustWatcher;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error('Error setting up file watcher:', error);
|
console.error('Error setting up Rust file watcher:', error);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -75,16 +127,17 @@ export async function GET(request: NextRequest) {
|
|||||||
|
|
||||||
// Stop watching if no clients are connected
|
// Stop watching if no clients are connected
|
||||||
if (clients.size === 0) {
|
if (clients.size === 0) {
|
||||||
stopWatching();
|
const rustWatcher = (controller as any).rustWatcher;
|
||||||
|
if (rustWatcher) {
|
||||||
|
console.log('Last client disconnected, stopping watcher');
|
||||||
|
rustWatcher.kill();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
cancel() {
|
cancel() {
|
||||||
// Handle stream cancellation - we can't access the specific controller here
|
// Handle stream cancellation - this is called when the stream is cancelled
|
||||||
// The abort event handler will handle cleanup for the specific controller
|
// We can't access the specific controller here, so we'll handle cleanup in the abort event
|
||||||
if (clients.size === 0) {
|
|
||||||
stopWatching();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -1,272 +0,0 @@
|
|||||||
// This is the frontend Markdown parser.
|
|
||||||
// It is written in TypeScript
|
|
||||||
// While I was writing this, only I and God knew how it works.
|
|
||||||
// Now, only God knows.
|
|
||||||
//
|
|
||||||
// If you are trying to understand how it works , and optimize it. Please increse the counter
|
|
||||||
//
|
|
||||||
// Hours wasted here: 12
|
|
||||||
|
|
||||||
import fs from 'fs';
|
|
||||||
import path from 'path';
|
|
||||||
import matter from 'gray-matter';
|
|
||||||
import { marked } from 'marked';
|
|
||||||
import DOMPurify from 'dompurify';
|
|
||||||
import { JSDOM } from 'jsdom';
|
|
||||||
import chokidar from 'chokidar';
|
|
||||||
import type { FSWatcher } from 'chokidar';
|
|
||||||
import hljs from 'highlight.js';
|
|
||||||
import { getPostsDirectory } from './postsDirectory';
|
|
||||||
|
|
||||||
export interface Post {
|
|
||||||
slug: string;
|
|
||||||
title: string;
|
|
||||||
date: string;
|
|
||||||
tags: string[];
|
|
||||||
summary: string;
|
|
||||||
content: string;
|
|
||||||
createdAt: Date;
|
|
||||||
author: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
const postsDirectory = getPostsDirectory();
|
|
||||||
|
|
||||||
// Function to get file creation date
|
|
||||||
function getFileCreationDate(filePath: string): Date {
|
|
||||||
const stats = fs.statSync(filePath);
|
|
||||||
return stats.birthtime;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Function to generate ID from text (matches frontend logic)
|
|
||||||
function generateId(text: string): string {
|
|
||||||
return text
|
|
||||||
.toLowerCase()
|
|
||||||
.replace(/[^a-z0-9]+/g, '-')
|
|
||||||
.replace(/^-+|-+$/g, '');
|
|
||||||
}
|
|
||||||
|
|
||||||
// Enhanced slugification function that matches GitHub-style anchor links
|
|
||||||
function slugify(text: string): string {
|
|
||||||
return text
|
|
||||||
.toLowerCase()
|
|
||||||
.trim()
|
|
||||||
.replace(/[^\w\s-]/g, '') // Remove special characters except spaces and hyphens
|
|
||||||
.replace(/[\s_-]+/g, '-') // Replace spaces, underscores, and multiple hyphens with single hyphen
|
|
||||||
.replace(/^-+|-+$/g, ''); // Remove leading/trailing hyphens
|
|
||||||
}
|
|
||||||
|
|
||||||
// Function to process anchor links in markdown content
|
|
||||||
function processAnchorLinks(content: string): string {
|
|
||||||
// Find all markdown links that point to anchors (e.g., [text](#anchor))
|
|
||||||
return content.replace(/\[([^\]]+)\]\(#([^)]+)\)/g, (match, linkText, anchor) => {
|
|
||||||
// Only slugify if the anchor doesn't already look like a slug
|
|
||||||
// This prevents double-processing of already-correct anchor links
|
|
||||||
const isAlreadySlugified = /^[a-z0-9-]+$/.test(anchor);
|
|
||||||
const slugifiedAnchor = isAlreadySlugified ? anchor : slugify(anchor);
|
|
||||||
return `[${linkText}](#${slugifiedAnchor})`;
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// Utility function to debug anchor links (for development)
|
|
||||||
export function debugAnchorLinks(content: string): void {
|
|
||||||
if (process.env.NODE_ENV !== 'development') return;
|
|
||||||
|
|
||||||
console.log('=== Anchor Link Debug Info ===');
|
|
||||||
|
|
||||||
// Extract all headings and their IDs
|
|
||||||
const headingRegex = /^(#{1,6})\s+(.+)$/gm;
|
|
||||||
const headings: Array<{ level: number; text: string; id: string }> = [];
|
|
||||||
|
|
||||||
let match;
|
|
||||||
while ((match = headingRegex.exec(content)) !== null) {
|
|
||||||
const level = match[1].length;
|
|
||||||
const text = match[2].trim();
|
|
||||||
const id = slugify(text);
|
|
||||||
headings.push({ level, text, id });
|
|
||||||
}
|
|
||||||
|
|
||||||
console.log('Generated heading IDs:');
|
|
||||||
headings.forEach(({ level, text, id }) => {
|
|
||||||
console.log(` H${level}: "${text}" -> id="${id}"`);
|
|
||||||
});
|
|
||||||
|
|
||||||
// Extract all anchor links
|
|
||||||
const anchorLinkRegex = /\[([^\]]+)\]\(#([^)]+)\)/g;
|
|
||||||
const anchorLinks: Array<{ linkText: string; originalAnchor: string; slugifiedAnchor: string }> = [];
|
|
||||||
|
|
||||||
while ((match = anchorLinkRegex.exec(content)) !== null) {
|
|
||||||
const linkText = match[1];
|
|
||||||
const originalAnchor = match[2];
|
|
||||||
const slugifiedAnchor = slugify(originalAnchor);
|
|
||||||
anchorLinks.push({ linkText, originalAnchor, slugifiedAnchor });
|
|
||||||
}
|
|
||||||
|
|
||||||
console.log('Anchor links found:');
|
|
||||||
anchorLinks.forEach(({ linkText, originalAnchor, slugifiedAnchor }) => {
|
|
||||||
const headingExists = headings.some(h => h.id === slugifiedAnchor);
|
|
||||||
const status = headingExists ? '✅' : '❌';
|
|
||||||
console.log(` ${status} [${linkText}](#${originalAnchor}) -> [${linkText}](#${slugifiedAnchor})`);
|
|
||||||
});
|
|
||||||
|
|
||||||
// Show missing headings
|
|
||||||
const missingAnchors = anchorLinks.filter(({ slugifiedAnchor }) =>
|
|
||||||
!headings.some(h => h.id === slugifiedAnchor)
|
|
||||||
);
|
|
||||||
|
|
||||||
if (missingAnchors.length > 0) {
|
|
||||||
console.warn('Missing headings for these anchor links:');
|
|
||||||
missingAnchors.forEach(({ linkText, originalAnchor, slugifiedAnchor }) => {
|
|
||||||
console.warn(` - [${linkText}](#${originalAnchor}) -> id="${slugifiedAnchor}"`);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
console.log('=== End Debug Info ===');
|
|
||||||
}
|
|
||||||
|
|
||||||
const renderer = new marked.Renderer();
|
|
||||||
|
|
||||||
// Custom heading renderer to add IDs
|
|
||||||
renderer.heading = (text, level) => {
|
|
||||||
const id = slugify(text);
|
|
||||||
return `<h${level} id="${id}">${text}</h${level}>`;
|
|
||||||
};
|
|
||||||
|
|
||||||
renderer.code = (code, infostring, escaped) => {
|
|
||||||
const lang = (infostring || '').match(/\S*/)?.[0];
|
|
||||||
const highlighted = lang && hljs.getLanguage(lang)
|
|
||||||
? hljs.highlight(code, { language: lang }).value
|
|
||||||
: hljs.highlightAuto(code).value;
|
|
||||||
const langClass = lang ? `language-${lang}` : '';
|
|
||||||
return `<pre><code class="hljs ${langClass}">${highlighted}</code></pre>`;
|
|
||||||
};
|
|
||||||
|
|
||||||
marked.setOptions({
|
|
||||||
gfm: true,
|
|
||||||
breaks: true,
|
|
||||||
renderer,
|
|
||||||
});
|
|
||||||
|
|
||||||
export async function getPostBySlug(slug: string): Promise<Post> {
|
|
||||||
const realSlug = slug.replace(/\.md$/, '');
|
|
||||||
const fullPath = path.join(postsDirectory, `${realSlug}.md`);
|
|
||||||
const fileContents = fs.readFileSync(fullPath, 'utf8');
|
|
||||||
const { data, content } = matter(fileContents);
|
|
||||||
const createdAt = getFileCreationDate(fullPath);
|
|
||||||
|
|
||||||
let processedContent = '';
|
|
||||||
try {
|
|
||||||
// Debug anchor links in development
|
|
||||||
debugAnchorLinks(content);
|
|
||||||
|
|
||||||
// Process anchor links before parsing markdown
|
|
||||||
const processedMarkdown = processAnchorLinks(content);
|
|
||||||
const rawHtml = marked.parse(processedMarkdown);
|
|
||||||
const window = new JSDOM('').window;
|
|
||||||
const purify = DOMPurify(window);
|
|
||||||
processedContent = purify.sanitize(rawHtml as string, {
|
|
||||||
ALLOWED_TAGS: [
|
|
||||||
'h1', 'h2', 'h3', 'h4', 'h5', 'h6',
|
|
||||||
'p', 'a', 'ul', 'ol', 'li', 'blockquote',
|
|
||||||
'pre', 'code', 'em', 'strong', 'del',
|
|
||||||
'hr', 'br', 'img', 'table', 'thead', 'tbody',
|
|
||||||
'tr', 'th', 'td', 'div', 'span', 'iframe'
|
|
||||||
],
|
|
||||||
ALLOWED_ATTR: [
|
|
||||||
'class', 'id', 'style',
|
|
||||||
'href', 'target', 'rel',
|
|
||||||
'src', 'alt', 'title', 'width', 'height',
|
|
||||||
'frameborder', 'allowfullscreen'
|
|
||||||
],
|
|
||||||
ALLOWED_URI_REGEXP: /^(?:(?:(?:f|ht)tps?|mailto|tel|callto|cid|xmpp):|[^a-z]|[a-z+.\-]+(?:[^a-z+.\-:]|$))/i
|
|
||||||
});
|
|
||||||
} catch (err) {
|
|
||||||
console.error(`Error processing markdown for ${realSlug}:`, err);
|
|
||||||
processedContent = `<div class="error-message">
|
|
||||||
<p>Error processing markdown content. Please check the console for details.</p>
|
|
||||||
<pre>${err instanceof Error ? err.message : 'Unknown error'}</pre>
|
|
||||||
</div>`;
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
slug: realSlug,
|
|
||||||
title: data.title,
|
|
||||||
date: data.date,
|
|
||||||
tags: data.tags || [],
|
|
||||||
summary: data.summary,
|
|
||||||
content: processedContent,
|
|
||||||
createdAt,
|
|
||||||
author: process.env.NEXT_PUBLIC_BLOG_OWNER || 'Anonymous',
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function getAllPosts(): Promise<Post[]> {
|
|
||||||
const fileNames = fs.readdirSync(postsDirectory);
|
|
||||||
const allPostsData = await Promise.all(
|
|
||||||
fileNames
|
|
||||||
.filter((fileName) => fileName.endsWith('.md'))
|
|
||||||
.map(async (fileName) => {
|
|
||||||
const slug = fileName.replace(/\.md$/, '');
|
|
||||||
return getPostBySlug(slug);
|
|
||||||
})
|
|
||||||
);
|
|
||||||
|
|
||||||
// Sort by creation date (newest first)
|
|
||||||
return allPostsData.sort((a, b) => b.createdAt.getTime() - a.createdAt.getTime());
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function getPostsByTag(tag: string): Promise<Post[]> {
|
|
||||||
const allPosts = await getAllPosts();
|
|
||||||
return allPosts.filter((post) => post.tags.includes(tag));
|
|
||||||
}
|
|
||||||
|
|
||||||
// File watcher setup
|
|
||||||
let watcher: FSWatcher | null = null;
|
|
||||||
let onChangeCallback: (() => void) | null = null;
|
|
||||||
|
|
||||||
export function watchPosts(callback: () => void) {
|
|
||||||
if (watcher) {
|
|
||||||
watcher.close();
|
|
||||||
}
|
|
||||||
|
|
||||||
onChangeCallback = callback;
|
|
||||||
watcher = chokidar.watch(postsDirectory, {
|
|
||||||
ignored: [
|
|
||||||
/(^|[\/\\])\../, // ignore dotfiles
|
|
||||||
/node_modules/,
|
|
||||||
/\.git/,
|
|
||||||
/\.next/,
|
|
||||||
/\.cache/,
|
|
||||||
/\.DS_Store/,
|
|
||||||
/Thumbs\.db/,
|
|
||||||
/\.tmp$/,
|
|
||||||
/\.temp$/
|
|
||||||
],
|
|
||||||
persistent: true,
|
|
||||||
ignoreInitial: true, // Don't trigger on initial scan
|
|
||||||
awaitWriteFinish: {
|
|
||||||
stabilityThreshold: 1000, // Wait 1 second after file changes
|
|
||||||
pollInterval: 100 // Check every 100ms
|
|
||||||
},
|
|
||||||
usePolling: false, // Use native file system events when possible
|
|
||||||
interval: 1000 // Fallback polling interval (only used if native events fail)
|
|
||||||
});
|
|
||||||
|
|
||||||
watcher
|
|
||||||
.on('add', handleFileChange)
|
|
||||||
.on('change', handleFileChange)
|
|
||||||
.on('unlink', handleFileChange);
|
|
||||||
}
|
|
||||||
|
|
||||||
function handleFileChange() {
|
|
||||||
if (onChangeCallback) {
|
|
||||||
onChangeCallback();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export function stopWatching() {
|
|
||||||
if (watcher) {
|
|
||||||
watcher.close();
|
|
||||||
watcher = null;
|
|
||||||
}
|
|
||||||
onChangeCallback = null;
|
|
||||||
}
|
|
||||||
Reference in New Issue
Block a user