Merge pull request 'cleanup' (#11) from cleanup into main
Some checks failed
Deploy / build-and-deploy (push) Failing after 2s
Some checks failed
Deploy / build-and-deploy (push) Failing after 2s
Reviewed-on: http://10.0.0.13:3002/rattatwinko/markdownblog/pulls/11
This commit is contained in:
@@ -1,7 +1,7 @@
|
|||||||
#[warn(unused_imports)]
|
#[warn(unused_imports)]
|
||||||
use clap::{Parser, Subcommand};
|
use clap::{Parser, Subcommand};
|
||||||
mod markdown;
|
mod markdown;
|
||||||
use markdown::{get_all_posts, get_post_by_slug, get_posts_by_tag, watch_posts};
|
use markdown::{get_all_posts, get_post_by_slug, get_posts_by_tag, watch_posts, get_parser_logs, clear_parser_logs};
|
||||||
use serde_json;
|
use serde_json;
|
||||||
use std::fs;
|
use std::fs;
|
||||||
use std::io;
|
use std::io;
|
||||||
@@ -34,6 +34,10 @@ enum Commands {
|
|||||||
Rsparseinfo,
|
Rsparseinfo,
|
||||||
/// Check backend health
|
/// Check backend health
|
||||||
Checkhealth,
|
Checkhealth,
|
||||||
|
/// Get parser logs
|
||||||
|
Logs,
|
||||||
|
/// Clear parser logs
|
||||||
|
ClearLogs,
|
||||||
/// Parse markdown from file or stdin
|
/// Parse markdown from file or stdin
|
||||||
Parse {
|
Parse {
|
||||||
#[arg(long)]
|
#[arg(long)]
|
||||||
@@ -92,35 +96,36 @@ fn main() {
|
|||||||
let health = markdown::checkhealth();
|
let health = markdown::checkhealth();
|
||||||
println!("{}", serde_json::to_string_pretty(&health).unwrap());
|
println!("{}", serde_json::to_string_pretty(&health).unwrap());
|
||||||
}
|
}
|
||||||
|
Commands::Logs => {
|
||||||
|
let logs = get_parser_logs();
|
||||||
|
println!("{}", serde_json::to_string_pretty(&logs).unwrap());
|
||||||
|
}
|
||||||
|
Commands::ClearLogs => {
|
||||||
|
clear_parser_logs();
|
||||||
|
println!("{}", serde_json::to_string(&serde_json::json!({"success": true, "message": "Logs cleared"})).unwrap());
|
||||||
|
}
|
||||||
Commands::Parse { file, stdin, ast } => {
|
Commands::Parse { file, stdin, ast } => {
|
||||||
let input = if let Some(file_path) = file {
|
let content = if *stdin {
|
||||||
match std::fs::read_to_string(file_path) {
|
|
||||||
Ok(content) => content,
|
|
||||||
Err(e) => {
|
|
||||||
eprintln!("Failed to read file: {}", e);
|
|
||||||
std::process::exit(1);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else if *stdin {
|
|
||||||
let mut buffer = String::new();
|
let mut buffer = String::new();
|
||||||
if let Err(e) = io::stdin().read_to_string(&mut buffer) {
|
io::stdin().read_to_string(&mut buffer).unwrap();
|
||||||
eprintln!("Failed to read from stdin: {}", e);
|
|
||||||
std::process::exit(1);
|
|
||||||
}
|
|
||||||
buffer
|
buffer
|
||||||
|
} else if let Some(file_path) = file {
|
||||||
|
fs::read_to_string(file_path).unwrap()
|
||||||
} else {
|
} else {
|
||||||
eprintln!("Please provide --file <path> or --stdin");
|
eprintln!("Either --file or --stdin must be specified");
|
||||||
std::process::exit(1);
|
std::process::exit(1);
|
||||||
};
|
};
|
||||||
|
|
||||||
if *ast {
|
if *ast {
|
||||||
// Print pulldown_cmark events as debug output
|
// Parse and output AST as debug format
|
||||||
let parser = pulldown_cmark::Parser::new_ext(&input, pulldown_cmark::Options::all());
|
let parser = pulldown_cmark::Parser::new_ext(&content, pulldown_cmark::Options::all());
|
||||||
for event in parser {
|
let events: Vec<_> = parser.collect();
|
||||||
|
for event in events {
|
||||||
println!("{:?}", event);
|
println!("{:?}", event);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// Print HTML output
|
// Parse and output HTML
|
||||||
let parser = pulldown_cmark::Parser::new_ext(&input, pulldown_cmark::Options::all());
|
let parser = pulldown_cmark::Parser::new_ext(&content, pulldown_cmark::Options::all());
|
||||||
let mut html_output = String::new();
|
let mut html_output = String::new();
|
||||||
pulldown_cmark::html::push_html(&mut html_output, parser);
|
pulldown_cmark::html::push_html(&mut html_output, parser);
|
||||||
println!("{}", html_output);
|
println!("{}", html_output);
|
||||||
|
|||||||
@@ -1,41 +1,40 @@
|
|||||||
|
//
|
||||||
// src/markdown.rs
|
// src/markdown.rs
|
||||||
/*
|
// Written by: @rattatwinko
|
||||||
|
//
|
||||||
|
|
||||||
This is the Rust Markdown Parser.
|
|
||||||
It supports caching of posts and is
|
|
||||||
|
|
||||||
BLAZINGLY FAST!
|
|
||||||
|
|
||||||
*/
|
|
||||||
|
|
||||||
#[warn(unused_imports)]
|
|
||||||
use std::fs;
|
use std::fs;
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
|
use std::collections::HashMap;
|
||||||
|
use std::sync::RwLock;
|
||||||
|
use std::time::Instant;
|
||||||
|
use std::sync::mpsc::channel;
|
||||||
|
use std::collections::VecDeque;
|
||||||
|
|
||||||
use chrono::{DateTime, Utc};
|
use chrono::{DateTime, Utc};
|
||||||
use serde::Deserialize;
|
use serde::{Deserialize, Serialize};
|
||||||
use pulldown_cmark::{Parser, Options, html, Event, Tag, CowStr};
|
use pulldown_cmark::{Parser, Options, html, Event, Tag, CowStr};
|
||||||
use gray_matter::engine::YAML;
|
use gray_matter::engine::YAML;
|
||||||
use gray_matter::Matter;
|
use gray_matter::Matter;
|
||||||
use ammonia::clean;
|
|
||||||
use slug::slugify;
|
use slug::slugify;
|
||||||
use notify::{RecursiveMode, RecommendedWatcher, Watcher, Config};
|
use notify::{RecursiveMode, RecommendedWatcher, Watcher, Config};
|
||||||
use std::sync::mpsc::channel;
|
use syntect::highlighting::ThemeSet;
|
||||||
use std::time::{Duration, Instant};
|
|
||||||
use syntect::highlighting::{ThemeSet, Style};
|
|
||||||
use syntect::parsing::SyntaxSet;
|
use syntect::parsing::SyntaxSet;
|
||||||
use syntect::html::{highlighted_html_for_string, IncludeBackground};
|
use syntect::html::highlighted_html_for_string;
|
||||||
use once_cell::sync::Lazy;
|
use once_cell::sync::Lazy;
|
||||||
use std::collections::HashMap;
|
|
||||||
use std::sync::RwLock;
|
|
||||||
use serde_json;
|
use serde_json;
|
||||||
use sysinfo::{System, Pid, RefreshKind, CpuRefreshKind, ProcessRefreshKind};
|
use sysinfo::{System, RefreshKind, CpuRefreshKind, ProcessRefreshKind};
|
||||||
use serde::Serialize;
|
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
|
|
||||||
|
// Constants
|
||||||
const POSTS_CACHE_PATH: &str = "./cache/posts_cache.json";
|
const POSTS_CACHE_PATH: &str = "./cache/posts_cache.json";
|
||||||
const POST_STATS_PATH: &str = "./cache/post_stats.json";
|
const POST_STATS_PATH: &str = "./cache/post_stats.json";
|
||||||
|
const MAX_FILE_SIZE: usize = 10 * 1024 * 1024; // 10MB
|
||||||
|
const PARSING_TIMEOUT_SECS: u64 = 30;
|
||||||
|
const MAX_LOG_ENTRIES: usize = 1000;
|
||||||
|
|
||||||
#[derive(Debug, Deserialize, Clone, serde::Serialize)]
|
// Data structures
|
||||||
|
#[derive(Debug, Deserialize, Clone, Serialize)]
|
||||||
pub struct PostFrontmatter {
|
pub struct PostFrontmatter {
|
||||||
pub title: String,
|
pub title: String,
|
||||||
pub date: String,
|
pub date: String,
|
||||||
@@ -43,7 +42,7 @@ pub struct PostFrontmatter {
|
|||||||
pub summary: Option<String>,
|
pub summary: Option<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
pub struct Post {
|
pub struct Post {
|
||||||
pub slug: String,
|
pub slug: String,
|
||||||
pub title: String,
|
pub title: String,
|
||||||
@@ -55,21 +54,17 @@ pub struct Post {
|
|||||||
pub author: String,
|
pub author: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, Default)]
|
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
|
||||||
pub struct PostStats {
|
pub struct PostStats {
|
||||||
pub slug: String,
|
pub slug: String,
|
||||||
pub cache_hits: u64,
|
pub cache_hits: u64,
|
||||||
pub cache_misses: u64,
|
pub cache_misses: u64,
|
||||||
pub last_interpret_time_ms: u128,
|
pub last_interpret_time_ms: u128,
|
||||||
pub last_compile_time_ms: u128,
|
pub last_compile_time_ms: u128,
|
||||||
pub last_cpu_usage_percent: f32, // Not f64
|
pub last_cpu_usage_percent: f32,
|
||||||
pub last_cache_status: String, // "hit" or "miss"
|
pub last_cache_status: String, // "hit" or "miss"
|
||||||
}
|
}
|
||||||
|
|
||||||
static POST_CACHE: Lazy<RwLock<HashMap<String, Post>>> = Lazy::new(|| RwLock::new(HashMap::new()));
|
|
||||||
static ALL_POSTS_CACHE: Lazy<RwLock<Option<Vec<Post>>>> = Lazy::new(|| RwLock::new(None));
|
|
||||||
static POST_STATS: Lazy<RwLock<HashMap<String, PostStats>>> = Lazy::new(|| RwLock::new(HashMap::new()));
|
|
||||||
|
|
||||||
#[derive(Debug, Serialize)]
|
#[derive(Debug, Serialize)]
|
||||||
pub struct HealthReport {
|
pub struct HealthReport {
|
||||||
pub posts_dir_exists: bool,
|
pub posts_dir_exists: bool,
|
||||||
@@ -83,197 +78,32 @@ pub struct HealthReport {
|
|||||||
pub errors: Vec<String>,
|
pub errors: Vec<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_posts_directory() -> PathBuf {
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
// Check if we're running in Docker by looking for common Docker environment indicators
|
pub struct LogEntry {
|
||||||
let is_docker = std::env::var("DOCKER_CONTAINER").is_ok()
|
pub timestamp: String,
|
||||||
|| std::env::var("KUBERNETES_SERVICE_HOST").is_ok()
|
pub level: String, // "info", "warning", "error"
|
||||||
|| std::path::Path::new("/.dockerenv").exists();
|
pub message: String,
|
||||||
|
pub slug: Option<String>,
|
||||||
let candidates = if is_docker {
|
pub details: Option<String>,
|
||||||
vec![
|
|
||||||
"/app/docker", // Docker volume mount point (highest priority in Docker)
|
|
||||||
"/app/posts", // Fallback in Docker
|
|
||||||
"./posts",
|
|
||||||
"../posts",
|
|
||||||
"/posts",
|
|
||||||
"/docker"
|
|
||||||
]
|
|
||||||
} else {
|
|
||||||
vec![
|
|
||||||
"./posts",
|
|
||||||
"../posts",
|
|
||||||
"/posts",
|
|
||||||
"/docker",
|
|
||||||
"/app/docker" // Lower priority for non-Docker environments
|
|
||||||
]
|
|
||||||
};
|
|
||||||
|
|
||||||
for candidate in candidates.iter() {
|
|
||||||
let path = PathBuf::from(candidate);
|
|
||||||
if path.exists() && path.is_dir() {
|
|
||||||
return path;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// Fallback: default to ./posts
|
|
||||||
PathBuf::from("./posts")
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Helper function to recursively find all markdown files
|
// Static caches
|
||||||
fn find_markdown_files(dir: &Path) -> std::io::Result<Vec<PathBuf>> {
|
static POST_CACHE: Lazy<RwLock<HashMap<String, Post>>> = Lazy::new(|| RwLock::new(HashMap::new()));
|
||||||
let mut files = Vec::new();
|
static ALL_POSTS_CACHE: Lazy<RwLock<Option<Vec<Post>>>> = Lazy::new(|| RwLock::new(None));
|
||||||
if dir.is_dir() {
|
static POST_STATS: Lazy<RwLock<HashMap<String, PostStats>>> = Lazy::new(|| RwLock::new(HashMap::new()));
|
||||||
for entry in fs::read_dir(dir)? {
|
static PARSER_LOGS: Lazy<RwLock<VecDeque<LogEntry>>> = Lazy::new(|| RwLock::new(VecDeque::new()));
|
||||||
let entry = entry?;
|
|
||||||
let path = entry.path();
|
|
||||||
|
|
||||||
if path.is_dir() {
|
|
||||||
// Recursively scan subdirectories
|
|
||||||
files.extend(find_markdown_files(&path)?);
|
|
||||||
} else if path.extension().map(|e| e == "md").unwrap_or(false) {
|
|
||||||
files.push(path);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Ok(files)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Helper function to convert a file path to a slug
|
|
||||||
fn path_to_slug(file_path: &Path, posts_dir: &Path) -> String {
|
|
||||||
// Get the relative path from posts directory
|
|
||||||
let relative_path = file_path.strip_prefix(posts_dir).unwrap_or(file_path);
|
|
||||||
// Remove the .md extension
|
|
||||||
let without_ext = relative_path.with_extension("");
|
|
||||||
// Convert to string and replace path separators with a special separator
|
|
||||||
// Use "::" as a directory separator to avoid conflicts with hyphens in filenames
|
|
||||||
without_ext.to_string_lossy()
|
|
||||||
.replace(std::path::MAIN_SEPARATOR, "::")
|
|
||||||
.replace("/", "::")
|
|
||||||
.replace("\\", "::")
|
|
||||||
}
|
|
||||||
|
|
||||||
// Helper function to convert a slug back to a file path
|
|
||||||
fn slug_to_path(slug: &str, posts_dir: &Path) -> PathBuf {
|
|
||||||
// Split by the special directory separator "::"
|
|
||||||
let parts: Vec<&str> = slug.split("::").collect();
|
|
||||||
if parts.len() == 1 {
|
|
||||||
// Single part, no subdirectory
|
|
||||||
posts_dir.join(format!("{}.md", parts[0]))
|
|
||||||
} else {
|
|
||||||
// Multiple parts, all but the last are directories, last is filename
|
|
||||||
let mut path = posts_dir.to_path_buf();
|
|
||||||
for (i, part) in parts.iter().enumerate() {
|
|
||||||
if i == parts.len() - 1 {
|
|
||||||
// Last part is the filename
|
|
||||||
path = path.join(format!("{}.md", part));
|
|
||||||
} else {
|
|
||||||
// Other parts are directories
|
|
||||||
path = path.join(part);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
path
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get_file_creation_date(path: &Path) -> std::io::Result<DateTime<Utc>> {
|
|
||||||
let metadata = fs::metadata(path)?;
|
|
||||||
// Try to get creation time, fall back to modification time if not available
|
|
||||||
match metadata.created() {
|
|
||||||
Ok(created) => Ok(DateTime::<Utc>::from(created)),
|
|
||||||
Err(_) => {
|
|
||||||
// Fall back to modification time if creation time is not available
|
|
||||||
let modified = metadata.modified()?;
|
|
||||||
Ok(DateTime::<Utc>::from(modified))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn process_anchor_links(content: &str) -> String {
|
|
||||||
// Replace [text](#anchor) with slugified anchor
|
|
||||||
let re = regex::Regex::new(r"\[([^\]]+)\]\(#([^)]+)\)").unwrap();
|
|
||||||
re.replace_all(content, |caps: ®ex::Captures| {
|
|
||||||
let link_text = &caps[1];
|
|
||||||
let anchor = &caps[2];
|
|
||||||
let slugified = slugify(anchor);
|
|
||||||
format!("[{}](#{})", link_text, slugified)
|
|
||||||
}).to_string()
|
|
||||||
}
|
|
||||||
|
|
||||||
// Helper function to strip emojis from a string
|
|
||||||
// Neccesary for the slugify function to work correctly. And the ID's to work with the frontend.
|
|
||||||
fn strip_emojis(s: &str) -> String {
|
|
||||||
// Remove all characters in the Emoji Unicode ranges
|
|
||||||
// This is a simple approach and may not cover all emojis, but works for most cases
|
|
||||||
s.chars()
|
|
||||||
.filter(|c| {
|
|
||||||
let c = *c as u32;
|
|
||||||
// Basic Emoji ranges
|
|
||||||
!( (c >= 0x1F600 && c <= 0x1F64F) // Emoticons
|
|
||||||
|| (c >= 0x1F300 && c <= 0x1F5FF) // Misc Symbols and Pictographs
|
|
||||||
|| (c >= 0x1F680 && c <= 0x1F6FF) // Transport and Map
|
|
||||||
|| (c >= 0x2600 && c <= 0x26FF) // Misc symbols
|
|
||||||
|| (c >= 0x2700 && c <= 0x27BF) // Dingbats
|
|
||||||
|| (c >= 0x1F900 && c <= 0x1F9FF) // Supplemental Symbols and Pictographs
|
|
||||||
|| (c >= 0x1FA70 && c <= 0x1FAFF) // Symbols and Pictographs Extended-A
|
|
||||||
|| (c >= 0x1F1E6 && c <= 0x1F1FF) // Regional Indicator Symbols
|
|
||||||
)
|
|
||||||
})
|
|
||||||
.collect()
|
|
||||||
}
|
|
||||||
|
|
||||||
// Function to process custom tags in markdown content
|
|
||||||
fn process_custom_tags(content: &str) -> String {
|
|
||||||
let mut processed = content.to_string();
|
|
||||||
|
|
||||||
// Handle simple tags without parameters FIRST
|
|
||||||
let simple_tags = [
|
|
||||||
("<mytag />", "<div class=\"custom-tag mytag\">This is my custom tag content!</div>"),
|
|
||||||
("<warning />", "<div class=\"custom-tag warning\" style=\"background: #fff3cd; border: 1px solid #ffeaa7; padding: 1rem; border-radius: 4px; margin: 1rem 0;\">⚠️ Warning: This is a custom warning tag!</div>"),
|
|
||||||
("<info />", "<div class=\"custom-tag info\" style=\"background: #d1ecf1; border: 1px solid #bee5eb; padding: 1rem; border-radius: 4px; margin: 1rem 0;\">ℹ️ Info: This is a custom info tag!</div>"),
|
|
||||||
("<success />", "<div class=\"custom-tag success\" style=\"background: #d4edda; border: 1px solid #c3e6cb; padding: 1rem; border-radius: 4px; margin: 1rem 0;\">✅ Success: This is a custom success tag!</div>"),
|
|
||||||
("<error />", "<div class=\"custom-tag error\" style=\"background: #f8d7da; border: 1px solid #f5c6cb; padding: 1rem; border-radius: 4px; margin: 1rem 0;\">❌ Error: This is a custom error tag!</div>"),
|
|
||||||
];
|
|
||||||
|
|
||||||
for (tag, replacement) in simple_tags.iter() {
|
|
||||||
processed = processed.replace(tag, replacement);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Handle tags with parameters like <mytag param="value" />
|
|
||||||
let tag_with_params = Regex::new(r"<(\w+)\s+([^>]*?[a-zA-Z0-9=])[^>]*/>").unwrap();
|
|
||||||
processed = tag_with_params.replace_all(&processed, |caps: ®ex::Captures| {
|
|
||||||
let tag_name = &caps[1];
|
|
||||||
let params = &caps[2];
|
|
||||||
|
|
||||||
match tag_name {
|
|
||||||
"mytag" => {
|
|
||||||
// Parse parameters and generate custom HTML
|
|
||||||
format!("<div class=\"custom-tag mytag\" data-params=\"{}\">Custom content with params: {}</div>", params, params)
|
|
||||||
},
|
|
||||||
"alert" => {
|
|
||||||
// Parse alert type from params
|
|
||||||
if params.contains("type=\"warning\"") {
|
|
||||||
"<div class=\"custom-tag alert warning\" style=\"background: #fff3cd; border: 1px solid #ffeaa7; padding: 1rem; border-radius: 4px; margin: 1rem 0;\">⚠️ Warning Alert!</div>".to_string()
|
|
||||||
} else if params.contains("type=\"error\"") {
|
|
||||||
"<div class=\"custom-tag alert error\" style=\"background: #f8d7da; border: 1px solid #f5c6cb; padding: 1rem; border-radius: 4px; margin: 1rem 0;\">❌ Error Alert!</div>".to_string()
|
|
||||||
} else {
|
|
||||||
"<div class=\"custom-tag alert info\" style=\"background: #d1ecf1; border: 1px solid #bee5eb; padding: 1rem; border-radius: 4px; margin: 1rem 0;\">ℹ️ Info Alert!</div>".to_string()
|
|
||||||
}
|
|
||||||
},
|
|
||||||
_ => format!("<div class=\"custom-tag {}\">Unknown custom tag: {}</div>", tag_name, tag_name)
|
|
||||||
}
|
|
||||||
}).to_string();
|
|
||||||
|
|
||||||
processed
|
|
||||||
}
|
|
||||||
|
|
||||||
|
// Ammonia HTML sanitizer configuration
|
||||||
static AMMONIA: Lazy<ammonia::Builder<'static>> = Lazy::new(|| {
|
static AMMONIA: Lazy<ammonia::Builder<'static>> = Lazy::new(|| {
|
||||||
let mut builder = ammonia::Builder::default();
|
let mut builder = ammonia::Builder::default();
|
||||||
// All possible HTML Tags so that you can stylize via HTML
|
|
||||||
builder.add_tag_attributes("h1", &["id", "style"]);
|
// Add allowed attributes for various HTML tags
|
||||||
builder.add_tag_attributes("h2", &["id", "style"]);
|
builder.add_tag_attributes("h1", &["style", "id"]);
|
||||||
builder.add_tag_attributes("h3", &["id", "style"]);
|
builder.add_tag_attributes("h2", &["style", "id"]);
|
||||||
builder.add_tag_attributes("h4", &["id", "style"]);
|
builder.add_tag_attributes("h3", &["style", "id"]);
|
||||||
builder.add_tag_attributes("h5", &["id", "style"]);
|
builder.add_tag_attributes("h4", &["style", "id"]);
|
||||||
builder.add_tag_attributes("h6", &["id", "style"]);
|
builder.add_tag_attributes("h5", &["style", "id"]);
|
||||||
|
builder.add_tag_attributes("h6", &["style", "id"]);
|
||||||
builder.add_tag_attributes("p", &["style"]);
|
builder.add_tag_attributes("p", &["style"]);
|
||||||
builder.add_tag_attributes("span", &["style"]);
|
builder.add_tag_attributes("span", &["style"]);
|
||||||
builder.add_tag_attributes("strong", &["style"]);
|
builder.add_tag_attributes("strong", &["style"]);
|
||||||
@@ -290,7 +120,6 @@ static AMMONIA: Lazy<ammonia::Builder<'static>> = Lazy::new(|| {
|
|||||||
builder.add_tag_attributes("pre", &["style"]);
|
builder.add_tag_attributes("pre", &["style"]);
|
||||||
builder.add_tag_attributes("kbd", &["style"]);
|
builder.add_tag_attributes("kbd", &["style"]);
|
||||||
builder.add_tag_attributes("samp", &["style"]);
|
builder.add_tag_attributes("samp", &["style"]);
|
||||||
builder.add_tag_attributes("div", &["style", "class"]);
|
|
||||||
builder.add_tag_attributes("section", &["style"]);
|
builder.add_tag_attributes("section", &["style"]);
|
||||||
builder.add_tag_attributes("article", &["style"]);
|
builder.add_tag_attributes("article", &["style"]);
|
||||||
builder.add_tag_attributes("header", &["style"]);
|
builder.add_tag_attributes("header", &["style"]);
|
||||||
@@ -335,15 +164,197 @@ static AMMONIA: Lazy<ammonia::Builder<'static>> = Lazy::new(|| {
|
|||||||
builder.add_tag_attributes("fieldset", &["style"]);
|
builder.add_tag_attributes("fieldset", &["style"]);
|
||||||
builder.add_tag_attributes("legend", &["style"]);
|
builder.add_tag_attributes("legend", &["style"]);
|
||||||
builder.add_tag_attributes("blockquote", &["style"]);
|
builder.add_tag_attributes("blockquote", &["style"]);
|
||||||
builder.add_tag_attributes("font", &["style"]); // deprecated
|
builder.add_tag_attributes("font", &["style"]);
|
||||||
builder.add_tag_attributes("center", &["style"]); // deprecated
|
builder.add_tag_attributes("center", &["style"]);
|
||||||
builder.add_tag_attributes("big", &["style"]); // deprecated
|
builder.add_tag_attributes("big", &["style"]);
|
||||||
builder.add_tag_attributes("tt", &["style"]); // deprecated
|
builder.add_tag_attributes("tt", &["style"]);
|
||||||
|
|
||||||
|
// Add class attribute for div
|
||||||
|
builder.add_tag_attributes("div", &["style", "class"]);
|
||||||
|
|
||||||
builder
|
builder
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// Helper functions
|
||||||
|
fn get_posts_directory() -> PathBuf {
|
||||||
|
let is_docker = std::env::var("DOCKER_CONTAINER").is_ok()
|
||||||
|
|| std::env::var("KUBERNETES_SERVICE_HOST").is_ok()
|
||||||
|
|| std::path::Path::new("/.dockerenv").exists();
|
||||||
|
|
||||||
|
let candidates = if is_docker {
|
||||||
|
vec![
|
||||||
|
"/app/docker", // Docker volume mount point (highest priority in Docker)
|
||||||
|
"/app/posts", // Fallback in Docker
|
||||||
|
"./posts",
|
||||||
|
"../posts",
|
||||||
|
"/posts",
|
||||||
|
"/docker"
|
||||||
|
]
|
||||||
|
} else {
|
||||||
|
vec![
|
||||||
|
"./posts",
|
||||||
|
"../posts",
|
||||||
|
"/posts",
|
||||||
|
"/docker",
|
||||||
|
"/app/docker" // Lower priority for non-Docker environments
|
||||||
|
]
|
||||||
|
};
|
||||||
|
|
||||||
|
for candidate in candidates.iter() {
|
||||||
|
let path = PathBuf::from(candidate);
|
||||||
|
if path.exists() && path.is_dir() {
|
||||||
|
return path;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Fallback: default to ./posts
|
||||||
|
PathBuf::from("./posts")
|
||||||
|
}
|
||||||
|
|
||||||
|
fn find_markdown_files(dir: &Path) -> std::io::Result<Vec<PathBuf>> {
|
||||||
|
let mut files = Vec::new();
|
||||||
|
if dir.is_dir() {
|
||||||
|
for entry in fs::read_dir(dir)? {
|
||||||
|
let entry = entry?;
|
||||||
|
let path = entry.path();
|
||||||
|
|
||||||
|
if path.is_dir() {
|
||||||
|
files.extend(find_markdown_files(&path)?);
|
||||||
|
} else if path.extension().map(|e| e == "md").unwrap_or(false) {
|
||||||
|
files.push(path);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(files)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn path_to_slug(file_path: &Path, posts_dir: &Path) -> String {
|
||||||
|
let relative_path = file_path.strip_prefix(posts_dir).unwrap_or(file_path);
|
||||||
|
let without_ext = relative_path.with_extension("");
|
||||||
|
without_ext.to_string_lossy()
|
||||||
|
.replace(std::path::MAIN_SEPARATOR, "::")
|
||||||
|
.replace("/", "::")
|
||||||
|
.replace("\\", "::")
|
||||||
|
}
|
||||||
|
|
||||||
|
fn slug_to_path(slug: &str, posts_dir: &Path) -> PathBuf {
|
||||||
|
let parts: Vec<&str> = slug.split("::").collect();
|
||||||
|
if parts.len() == 1 {
|
||||||
|
posts_dir.join(format!("{}.md", parts[0]))
|
||||||
|
} else {
|
||||||
|
let mut path = posts_dir.to_path_buf();
|
||||||
|
for (i, part) in parts.iter().enumerate() {
|
||||||
|
if i == parts.len() - 1 {
|
||||||
|
path = path.join(format!("{}.md", part));
|
||||||
|
} else {
|
||||||
|
path = path.join(part);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
path
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_file_creation_date(path: &Path) -> std::io::Result<DateTime<Utc>> {
|
||||||
|
let metadata = fs::metadata(path)?;
|
||||||
|
match metadata.created() {
|
||||||
|
Ok(created) => Ok(DateTime::<Utc>::from(created)),
|
||||||
|
Err(_) => {
|
||||||
|
let modified = metadata.modified()?;
|
||||||
|
Ok(DateTime::<Utc>::from(modified))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn process_anchor_links(content: &str) -> String {
|
||||||
|
let re = regex::Regex::new(r"\[([^\]]+)\]\(#([^)]+)\)").unwrap();
|
||||||
|
re.replace_all(content, |caps: ®ex::Captures| {
|
||||||
|
let link_text = &caps[1];
|
||||||
|
let anchor = &caps[2];
|
||||||
|
let slugified = slugify(anchor);
|
||||||
|
format!("[{}](#{})", link_text, slugified)
|
||||||
|
}).to_string()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn strip_emojis(s: &str) -> String {
|
||||||
|
s.chars()
|
||||||
|
.filter(|c| {
|
||||||
|
let c = *c as u32;
|
||||||
|
!( (c >= 0x1F600 && c <= 0x1F64F) // Emoticons
|
||||||
|
|| (c >= 0x1F300 && c <= 0x1F5FF) // Misc Symbols and Pictographs
|
||||||
|
|| (c >= 0x1F680 && c <= 0x1F6FF) // Transport and Map
|
||||||
|
|| (c >= 0x2600 && c <= 0x26FF) // Misc symbols
|
||||||
|
|| (c >= 0x2700 && c <= 0x27BF) // Dingbats
|
||||||
|
|| (c >= 0x1F900 && c <= 0x1F9FF) // Supplemental Symbols and Pictographs
|
||||||
|
|| (c >= 0x1FA70 && c <= 0x1FAFF) // Symbols and Pictographs Extended-A
|
||||||
|
|| (c >= 0x1F1E6 && c <= 0x1F1FF) // Regional Indicator Symbols
|
||||||
|
)
|
||||||
|
})
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn process_custom_tags(content: &str) -> String {
|
||||||
|
let mut processed = content.to_string();
|
||||||
|
|
||||||
|
// Handle simple tags without parameters
|
||||||
|
let simple_tags = [
|
||||||
|
("<mytag />", "<div class=\"custom-tag mytag\">This is my custom tag content!</div>"),
|
||||||
|
("<warning />", "<div class=\"custom-tag warning\" style=\"background: #fff3cd; border: 1px solid #ffeaa7; padding: 1rem; border-radius: 4px; margin: 1rem 0;\">⚠️ Warning: This is a custom warning tag!</div>"),
|
||||||
|
("<info />", "<div class=\"custom-tag info\" style=\"background: #d1ecf1; border: 1px solid #bee5eb; padding: 1rem; border-radius: 4px; margin: 1rem 0;\">ℹ️ Info: This is a custom info tag!</div>"),
|
||||||
|
("<success />", "<div class=\"custom-tag success\" style=\"background: #d4edda; border: 1px solid #c3e6cb; padding: 1rem; border-radius: 4px; margin: 1rem 0;\">✅ Success: This is a custom success tag!</div>"),
|
||||||
|
("<error />", "<div class=\"custom-tag error\" style=\"background: #f8d7da; border: 1px solid #f5c6cb; padding: 1rem; border-radius: 4px; margin: 1rem 0;\">❌ Error: This is a custom error tag!</div>"),
|
||||||
|
];
|
||||||
|
|
||||||
|
for (tag, replacement) in simple_tags.iter() {
|
||||||
|
processed = processed.replace(tag, replacement);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle tags with parameters
|
||||||
|
let tag_with_params = Regex::new(r"<(\w+)\s+([^>]*?[a-zA-Z0-9=])[^>]*/>").unwrap();
|
||||||
|
processed = tag_with_params.replace_all(&processed, |caps: ®ex::Captures| {
|
||||||
|
let tag_name = &caps[1];
|
||||||
|
let params = &caps[2];
|
||||||
|
|
||||||
|
match tag_name {
|
||||||
|
"mytag" => {
|
||||||
|
format!("<div class=\"custom-tag mytag\" data-params=\"{}\">Custom content with params: {}</div>", params, params)
|
||||||
|
},
|
||||||
|
"alert" => {
|
||||||
|
if params.contains("type=\"warning\"") {
|
||||||
|
"<div class=\"custom-tag alert warning\" style=\"background: #fff3cd; border: 1px solid #ffeaa7; padding: 1rem; border-radius: 4px; margin: 1rem 0;\">⚠️ Warning Alert!</div>".to_string()
|
||||||
|
} else if params.contains("type=\"error\"") {
|
||||||
|
"<div class=\"custom-tag alert error\" style=\"background: #f8d7da; border: 1px solid #f5c6cb; padding: 1rem; border-radius: 4px; margin: 1rem 0;\">❌ Error Alert!</div>".to_string()
|
||||||
|
} else {
|
||||||
|
"<div class=\"custom-tag alert info\" style=\"background: #d1ecf1; border: 1px solid #bee5eb; padding: 1rem; border-radius: 4px; margin: 1rem 0;\">ℹ️ Info Alert!</div>".to_string()
|
||||||
|
}
|
||||||
|
},
|
||||||
|
_ => format!("<div class=\"custom-tag {}\">Unknown custom tag: {}</div>", tag_name, tag_name)
|
||||||
|
}
|
||||||
|
}).to_string();
|
||||||
|
|
||||||
|
processed
|
||||||
|
}
|
||||||
|
|
||||||
|
// Logging functions
|
||||||
|
fn add_log(level: &str, message: &str, slug: Option<&str>, details: Option<&str>) {
|
||||||
|
let timestamp = chrono::Utc::now().to_rfc3339();
|
||||||
|
let log_entry = LogEntry {
|
||||||
|
timestamp,
|
||||||
|
level: level.to_string(),
|
||||||
|
message: message.to_string(),
|
||||||
|
slug: slug.map(|s| s.to_string()),
|
||||||
|
details: details.map(|s| s.to_string()),
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut logs = PARSER_LOGS.write().unwrap();
|
||||||
|
logs.push_back(log_entry);
|
||||||
|
|
||||||
|
// Keep only the last MAX_LOG_ENTRIES
|
||||||
|
if logs.len() > MAX_LOG_ENTRIES {
|
||||||
|
logs.pop_front();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Main public functions
|
||||||
pub fn rsparseinfo() -> String {
|
pub fn rsparseinfo() -> String {
|
||||||
// Eagerly load all posts to populate stats
|
|
||||||
let _ = get_all_posts();
|
let _ = get_all_posts();
|
||||||
let stats = POST_STATS.read().unwrap();
|
let stats = POST_STATS.read().unwrap();
|
||||||
let values: Vec<&PostStats> = stats.values().collect();
|
let values: Vec<&PostStats> = stats.values().collect();
|
||||||
@@ -355,16 +366,20 @@ pub fn rsparseinfo() -> String {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_post_by_slug(slug: &str) -> Result<Post, Box<dyn std::error::Error>> {
|
pub fn get_post_by_slug(slug: &str) -> Result<Post, Box<dyn std::error::Error>> {
|
||||||
|
add_log("info", "Starting post parsing", Some(slug), None);
|
||||||
|
|
||||||
let mut sys = System::new_with_specifics(RefreshKind::new().with_processes(ProcessRefreshKind::everything()).with_cpu(CpuRefreshKind::everything()));
|
let mut sys = System::new_with_specifics(RefreshKind::new().with_processes(ProcessRefreshKind::everything()).with_cpu(CpuRefreshKind::everything()));
|
||||||
sys.refresh_processes();
|
sys.refresh_processes();
|
||||||
let pid = sysinfo::get_current_pid()?;
|
let pid = sysinfo::get_current_pid()?;
|
||||||
let before_cpu = sys.process(pid).map(|p| p.cpu_usage()).unwrap_or(0.0);
|
let before_cpu = sys.process(pid).map(|p| p.cpu_usage()).unwrap_or(0.0);
|
||||||
let start = Instant::now();
|
let start = Instant::now();
|
||||||
|
|
||||||
let mut stats = POST_STATS.write().unwrap();
|
let mut stats = POST_STATS.write().unwrap();
|
||||||
let entry = stats.entry(slug.to_string()).or_insert_with(|| PostStats {
|
let entry = stats.entry(slug.to_string()).or_insert_with(|| PostStats {
|
||||||
slug: slug.to_string(),
|
slug: slug.to_string(),
|
||||||
..Default::default()
|
..Default::default()
|
||||||
});
|
});
|
||||||
|
|
||||||
// Try cache first
|
// Try cache first
|
||||||
if let Some(post) = POST_CACHE.read().unwrap().get(slug).cloned() {
|
if let Some(post) = POST_CACHE.read().unwrap().get(slug).cloned() {
|
||||||
entry.cache_hits += 1;
|
entry.cache_hits += 1;
|
||||||
@@ -373,32 +388,30 @@ pub fn get_post_by_slug(slug: &str) -> Result<Post, Box<dyn std::error::Error>>
|
|||||||
entry.last_cache_status = "hit".to_string();
|
entry.last_cache_status = "hit".to_string();
|
||||||
sys.refresh_process(pid);
|
sys.refresh_process(pid);
|
||||||
entry.last_cpu_usage_percent = sys.process(pid).map(|p| p.cpu_usage()).unwrap_or(0.0) - before_cpu;
|
entry.last_cpu_usage_percent = sys.process(pid).map(|p| p.cpu_usage()).unwrap_or(0.0) - before_cpu;
|
||||||
|
add_log("info", "Cache hit", Some(slug), None);
|
||||||
return Ok(post);
|
return Ok(post);
|
||||||
}
|
}
|
||||||
|
|
||||||
entry.cache_misses += 1;
|
entry.cache_misses += 1;
|
||||||
entry.last_cache_status = "miss".to_string();
|
entry.last_cache_status = "miss".to_string();
|
||||||
drop(stats); // Release lock before heavy work
|
drop(stats);
|
||||||
|
|
||||||
let posts_dir = get_posts_directory();
|
let posts_dir = get_posts_directory();
|
||||||
let file_path = slug_to_path(slug, &posts_dir);
|
let file_path = slug_to_path(slug, &posts_dir);
|
||||||
|
|
||||||
// Add debugging for file path resolution
|
|
||||||
eprintln!("[Rust Parser] Looking for file: {:?}", file_path);
|
|
||||||
eprintln!("[Rust Parser] Posts directory: {:?}", posts_dir);
|
|
||||||
eprintln!("[Rust Parser] Slug: {}", slug);
|
|
||||||
|
|
||||||
if !file_path.exists() {
|
if !file_path.exists() {
|
||||||
eprintln!("[Rust Parser] File does not exist: {:?}", file_path);
|
let error_msg = format!("File not found: {:?}", file_path);
|
||||||
return Err(format!("File not found: {:?}", file_path).into());
|
add_log("error", &error_msg, Some(slug), None);
|
||||||
|
return Err(error_msg.into());
|
||||||
}
|
}
|
||||||
|
|
||||||
let file_content = fs::read_to_string(&file_path)?;
|
let file_content = fs::read_to_string(&file_path)?;
|
||||||
eprintln!("[Rust Parser] File size: {} bytes", file_content.len());
|
add_log("info", &format!("File loaded: {} bytes", file_content.len()), Some(slug), None);
|
||||||
|
|
||||||
// Check file size limit (10MB)
|
|
||||||
const MAX_FILE_SIZE: usize = 10 * 1024 * 1024; // 10MB
|
|
||||||
if file_content.len() > MAX_FILE_SIZE {
|
if file_content.len() > MAX_FILE_SIZE {
|
||||||
eprintln!("[Rust Parser] File too large: {} bytes (max: {} bytes)", file_content.len(), MAX_FILE_SIZE);
|
let error_msg = format!("File too large: {} bytes (max: {} bytes)", file_content.len(), MAX_FILE_SIZE);
|
||||||
return Err(format!("File too large: {} bytes (max: {} bytes)", file_content.len(), MAX_FILE_SIZE).into());
|
add_log("error", &error_msg, Some(slug), None);
|
||||||
|
return Err(error_msg.into());
|
||||||
}
|
}
|
||||||
|
|
||||||
let matter = Matter::<YAML>::new();
|
let matter = Matter::<YAML>::new();
|
||||||
@@ -408,20 +421,21 @@ pub fn get_post_by_slug(slug: &str) -> Result<Post, Box<dyn std::error::Error>>
|
|||||||
match data.deserialize() {
|
match data.deserialize() {
|
||||||
Ok(front) => front,
|
Ok(front) => front,
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
eprintln!("[Rust Parser] Failed to deserialize frontmatter for post {}: {}", slug, e);
|
let error_msg = format!("Failed to deserialize frontmatter: {}", e);
|
||||||
return Err(format!("Failed to deserialize frontmatter: {}", e).into());
|
add_log("error", &error_msg, Some(slug), None);
|
||||||
|
return Err(error_msg.into());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
eprintln!("[Rust Parser] No frontmatter found for post: {}", slug);
|
add_log("error", "No frontmatter found", Some(slug), None);
|
||||||
return Err("No frontmatter found".into());
|
return Err("No frontmatter found".into());
|
||||||
};
|
};
|
||||||
|
|
||||||
let created_at = get_file_creation_date(&file_path)?;
|
let created_at = get_file_creation_date(&file_path)?;
|
||||||
|
|
||||||
let processed_markdown = process_anchor_links(&result.content);
|
let processed_markdown = process_anchor_links(&result.content);
|
||||||
let processed_markdown = process_custom_tags(&processed_markdown);
|
let processed_markdown = process_custom_tags(&processed_markdown);
|
||||||
eprintln!("[Rust Parser] Processed markdown length: {} characters", processed_markdown.len());
|
|
||||||
|
add_log("info", "Starting markdown parsing", Some(slug), Some(&format!("Content length: {} chars", processed_markdown.len())));
|
||||||
|
|
||||||
let parser = Parser::new_ext(&processed_markdown, Options::all());
|
let parser = Parser::new_ext(&processed_markdown, Options::all());
|
||||||
let mut html_output = String::new();
|
let mut html_output = String::new();
|
||||||
@@ -432,22 +446,19 @@ pub fn get_post_by_slug(slug: &str) -> Result<Post, Box<dyn std::error::Error>>
|
|||||||
let mut code_block_lang = String::new();
|
let mut code_block_lang = String::new();
|
||||||
let mut code_block_content = String::new();
|
let mut code_block_content = String::new();
|
||||||
let mut events = Vec::new();
|
let mut events = Vec::new();
|
||||||
let ss = SyntaxSet::load_defaults_newlines(); // SS 卐
|
let ss = SyntaxSet::load_defaults_newlines();
|
||||||
let ts = ThemeSet::load_defaults();
|
let ts = ThemeSet::load_defaults();
|
||||||
let theme = &ts.themes["base16-ocean.dark"];
|
let theme = &ts.themes["base16-ocean.dark"];
|
||||||
|
|
||||||
// Add error handling around the parsing loop
|
|
||||||
let mut event_count = 0;
|
|
||||||
let start_parsing = Instant::now();
|
let start_parsing = Instant::now();
|
||||||
|
let mut event_count = 0;
|
||||||
|
|
||||||
for event in parser {
|
for event in parser {
|
||||||
event_count += 1;
|
event_count += 1;
|
||||||
if event_count % 1000 == 0 {
|
if start_parsing.elapsed().as_secs() > PARSING_TIMEOUT_SECS {
|
||||||
eprintln!("[Rust Parser] Processed {} events for slug: {}", event_count, slug);
|
let error_msg = "Parsing timeout - file too large";
|
||||||
// Check for timeout (30 seconds)
|
add_log("error", error_msg, Some(slug), Some(&format!("Processed {} events", event_count)));
|
||||||
if start_parsing.elapsed().as_secs() > 30 {
|
return Err(error_msg.into());
|
||||||
eprintln!("[Rust Parser] Timeout reached for slug: {}", slug);
|
|
||||||
return Err("Parsing timeout - file too large".into());
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
match &event {
|
match &event {
|
||||||
@@ -458,10 +469,8 @@ pub fn get_post_by_slug(slug: &str) -> Result<Post, Box<dyn std::error::Error>>
|
|||||||
},
|
},
|
||||||
Event::End(Tag::Heading(_, _, _)) => {
|
Event::End(Tag::Heading(_, _, _)) => {
|
||||||
in_heading = false;
|
in_heading = false;
|
||||||
// Strip emojis before slugifying for the id
|
|
||||||
let heading_no_emoji = strip_emojis(&heading_text);
|
let heading_no_emoji = strip_emojis(&heading_text);
|
||||||
let id = slugify(&heading_no_emoji);
|
let id = slugify(&heading_no_emoji);
|
||||||
// Add basic CSS style for headings
|
|
||||||
let style = "color: #2d3748; margin-top: 1.5em; margin-bottom: 0.5em;";
|
let style = "color: #2d3748; margin-top: 1.5em; margin-bottom: 0.5em;";
|
||||||
events.push(Event::Html(CowStr::Boxed(format!("<h{lvl} id=\"{id}\" style=\"{style}\">", lvl=heading_level, id=id, style=style).into_boxed_str())));
|
events.push(Event::Html(CowStr::Boxed(format!("<h{lvl} id=\"{id}\" style=\"{style}\">", lvl=heading_level, id=id, style=style).into_boxed_str())));
|
||||||
events.push(Event::Text(CowStr::Boxed(heading_text.clone().into_boxed_str())));
|
events.push(Event::Text(CowStr::Boxed(heading_text.clone().into_boxed_str())));
|
||||||
@@ -480,7 +489,6 @@ pub fn get_post_by_slug(slug: &str) -> Result<Post, Box<dyn std::error::Error>>
|
|||||||
},
|
},
|
||||||
Event::End(Tag::CodeBlock(_)) => {
|
Event::End(Tag::CodeBlock(_)) => {
|
||||||
in_code_block = false;
|
in_code_block = false;
|
||||||
// Highlight code block
|
|
||||||
let highlighted = if !code_block_lang.is_empty() {
|
let highlighted = if !code_block_lang.is_empty() {
|
||||||
if let Some(syntax) = ss.find_syntax_by_token(&code_block_lang) {
|
if let Some(syntax) = ss.find_syntax_by_token(&code_block_lang) {
|
||||||
highlighted_html_for_string(&code_block_content, &ss, syntax, theme).unwrap_or_else(|_| format!("<pre style=\"background: #2d2d2d; color: #f8f8f2; padding: 1em; border-radius: 6px; overflow-x: auto;\"><code style=\"background: none;\">{}</code></pre>", html_escape::encode_text(&code_block_content)))
|
highlighted_html_for_string(&code_block_content, &ss, syntax, theme).unwrap_or_else(|_| format!("<pre style=\"background: #2d2d2d; color: #f8f8f2; padding: 1em; border-radius: 6px; overflow-x: auto;\"><code style=\"background: none;\">{}</code></pre>", html_escape::encode_text(&code_block_content)))
|
||||||
@@ -488,7 +496,6 @@ pub fn get_post_by_slug(slug: &str) -> Result<Post, Box<dyn std::error::Error>>
|
|||||||
format!("<pre style=\"background: #2d2d2d; color: #f8f8f2; padding: 1em; border-radius: 6px; overflow-x: auto;\"><code style=\"background: none;\">{}</code></pre>", html_escape::encode_text(&code_block_content))
|
format!("<pre style=\"background: #2d2d2d; color: #f8f8f2; padding: 1em; border-radius: 6px; overflow-x: auto;\"><code style=\"background: none;\">{}</code></pre>", html_escape::encode_text(&code_block_content))
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// No language specified
|
|
||||||
format!("<pre style=\"background: #2d2d2d; color: #f8f8f2; padding: 1em; border-radius: 6px; overflow-x: auto;\"><code style=\"background: none;\">{}</code></pre>", html_escape::encode_text(&code_block_content))
|
format!("<pre style=\"background: #2d2d2d; color: #f8f8f2; padding: 1em; border-radius: 6px; overflow-x: auto;\"><code style=\"background: none;\">{}</code></pre>", html_escape::encode_text(&code_block_content))
|
||||||
};
|
};
|
||||||
events.push(Event::Html(CowStr::Boxed(highlighted.into_boxed_str())));
|
events.push(Event::Html(CowStr::Boxed(highlighted.into_boxed_str())));
|
||||||
@@ -502,12 +509,11 @@ pub fn get_post_by_slug(slug: &str) -> Result<Post, Box<dyn std::error::Error>>
|
|||||||
_ => {},
|
_ => {},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
eprintln!("[Rust Parser] Total events processed: {} for slug: {}", event_count, slug);
|
|
||||||
|
add_log("info", "Markdown parsing completed", Some(slug), Some(&format!("Processed {} events", event_count)));
|
||||||
|
|
||||||
html::push_html(&mut html_output, events.into_iter());
|
html::push_html(&mut html_output, events.into_iter());
|
||||||
eprintln!("[Rust Parser] HTML output length: {} characters", html_output.len());
|
|
||||||
|
|
||||||
let sanitized_html = AMMONIA.clean(&html_output).to_string();
|
let sanitized_html = AMMONIA.clean(&html_output).to_string();
|
||||||
eprintln!("[Rust Parser] Sanitized HTML length: {} characters", sanitized_html.len());
|
|
||||||
|
|
||||||
let interpret_time = start.elapsed();
|
let interpret_time = start.elapsed();
|
||||||
let compile_start = Instant::now();
|
let compile_start = Instant::now();
|
||||||
@@ -522,8 +528,10 @@ pub fn get_post_by_slug(slug: &str) -> Result<Post, Box<dyn std::error::Error>>
|
|||||||
author: std::env::var("BLOG_OWNER").unwrap_or_else(|_| "Anonymous".to_string()),
|
author: std::env::var("BLOG_OWNER").unwrap_or_else(|_| "Anonymous".to_string()),
|
||||||
};
|
};
|
||||||
let compile_time = compile_start.elapsed();
|
let compile_time = compile_start.elapsed();
|
||||||
|
|
||||||
// Insert into cache
|
// Insert into cache
|
||||||
POST_CACHE.write().unwrap().insert(slug.to_string(), post.clone());
|
POST_CACHE.write().unwrap().insert(slug.to_string(), post.clone());
|
||||||
|
|
||||||
// Update stats
|
// Update stats
|
||||||
let mut stats = POST_STATS.write().unwrap();
|
let mut stats = POST_STATS.write().unwrap();
|
||||||
let entry = stats.entry(slug.to_string()).or_insert_with(|| PostStats {
|
let entry = stats.entry(slug.to_string()).or_insert_with(|| PostStats {
|
||||||
@@ -534,6 +542,9 @@ pub fn get_post_by_slug(slug: &str) -> Result<Post, Box<dyn std::error::Error>>
|
|||||||
entry.last_compile_time_ms = compile_time.as_millis();
|
entry.last_compile_time_ms = compile_time.as_millis();
|
||||||
sys.refresh_process(pid);
|
sys.refresh_process(pid);
|
||||||
entry.last_cpu_usage_percent = sys.process(pid).map(|p| p.cpu_usage()).unwrap_or(0.0) - before_cpu;
|
entry.last_cpu_usage_percent = sys.process(pid).map(|p| p.cpu_usage()).unwrap_or(0.0) - before_cpu;
|
||||||
|
|
||||||
|
add_log("info", "Post parsing completed successfully", Some(slug), Some(&format!("Interpret: {}ms, Compile: {}ms", interpret_time.as_millis(), compile_time.as_millis())));
|
||||||
|
|
||||||
Ok(post)
|
Ok(post)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -542,6 +553,7 @@ pub fn get_all_posts() -> Result<Vec<Post>, Box<dyn std::error::Error>> {
|
|||||||
if let Some(posts) = ALL_POSTS_CACHE.read().unwrap().clone() {
|
if let Some(posts) = ALL_POSTS_CACHE.read().unwrap().clone() {
|
||||||
return Ok(posts);
|
return Ok(posts);
|
||||||
}
|
}
|
||||||
|
|
||||||
let posts_dir = get_posts_directory();
|
let posts_dir = get_posts_directory();
|
||||||
let markdown_files = find_markdown_files(&posts_dir)?;
|
let markdown_files = find_markdown_files(&posts_dir)?;
|
||||||
let mut posts = Vec::new();
|
let mut posts = Vec::new();
|
||||||
@@ -549,14 +561,12 @@ pub fn get_all_posts() -> Result<Vec<Post>, Box<dyn std::error::Error>> {
|
|||||||
for file_path in markdown_files {
|
for file_path in markdown_files {
|
||||||
let slug = path_to_slug(&file_path, &posts_dir);
|
let slug = path_to_slug(&file_path, &posts_dir);
|
||||||
if let Ok(post) = get_post_by_slug(&slug) {
|
if let Ok(post) = get_post_by_slug(&slug) {
|
||||||
// Insert each post into the individual post cache as well
|
|
||||||
POST_CACHE.write().unwrap().insert(slug.clone(), post.clone());
|
POST_CACHE.write().unwrap().insert(slug.clone(), post.clone());
|
||||||
posts.push(post);
|
posts.push(post);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
posts.sort_by(|a, b| b.created_at.cmp(&a.created_at));
|
posts.sort_by(|a, b| b.created_at.cmp(&a.created_at));
|
||||||
// Cache the result
|
|
||||||
*ALL_POSTS_CACHE.write().unwrap() = Some(posts.clone());
|
*ALL_POSTS_CACHE.write().unwrap() = Some(posts.clone());
|
||||||
Ok(posts)
|
Ok(posts)
|
||||||
}
|
}
|
||||||
@@ -570,11 +580,11 @@ pub fn watch_posts<F: Fn() + Send + 'static>(on_change: F) -> notify::Result<Rec
|
|||||||
let (tx, rx) = channel();
|
let (tx, rx) = channel();
|
||||||
let mut watcher = RecommendedWatcher::new(tx, Config::default())?;
|
let mut watcher = RecommendedWatcher::new(tx, Config::default())?;
|
||||||
watcher.watch(get_posts_directory().as_path(), RecursiveMode::Recursive)?;
|
watcher.watch(get_posts_directory().as_path(), RecursiveMode::Recursive)?;
|
||||||
|
|
||||||
std::thread::spawn(move || {
|
std::thread::spawn(move || {
|
||||||
loop {
|
loop {
|
||||||
match rx.recv() {
|
match rx.recv() {
|
||||||
Ok(_event) => {
|
Ok(_event) => {
|
||||||
// Invalidate caches on any change
|
|
||||||
POST_CACHE.write().unwrap().clear();
|
POST_CACHE.write().unwrap().clear();
|
||||||
*ALL_POSTS_CACHE.write().unwrap() = None;
|
*ALL_POSTS_CACHE.write().unwrap() = None;
|
||||||
on_change();
|
on_change();
|
||||||
@@ -618,6 +628,7 @@ pub fn checkhealth() -> HealthReport {
|
|||||||
let posts_dir = get_posts_directory();
|
let posts_dir = get_posts_directory();
|
||||||
let posts_dir_exists = posts_dir.exists() && posts_dir.is_dir();
|
let posts_dir_exists = posts_dir.exists() && posts_dir.is_dir();
|
||||||
let mut posts_count = 0;
|
let mut posts_count = 0;
|
||||||
|
|
||||||
if posts_dir_exists {
|
if posts_dir_exists {
|
||||||
match std::fs::read_dir(&posts_dir) {
|
match std::fs::read_dir(&posts_dir) {
|
||||||
Ok(entries) => {
|
Ok(entries) => {
|
||||||
@@ -630,9 +641,11 @@ pub fn checkhealth() -> HealthReport {
|
|||||||
} else {
|
} else {
|
||||||
errors.push("Posts directory does not exist".to_string());
|
errors.push("Posts directory does not exist".to_string());
|
||||||
}
|
}
|
||||||
|
|
||||||
let cache_file_exists = Path::new(POSTS_CACHE_PATH).exists();
|
let cache_file_exists = Path::new(POSTS_CACHE_PATH).exists();
|
||||||
let cache_stats_file_exists = Path::new(POST_STATS_PATH).exists();
|
let cache_stats_file_exists = Path::new(POST_STATS_PATH).exists();
|
||||||
let (mut cache_readable, mut cache_post_count) = (false, None);
|
let (mut cache_readable, mut cache_post_count) = (false, None);
|
||||||
|
|
||||||
if cache_file_exists {
|
if cache_file_exists {
|
||||||
match std::fs::read_to_string(POSTS_CACHE_PATH) {
|
match std::fs::read_to_string(POSTS_CACHE_PATH) {
|
||||||
Ok(data) => {
|
Ok(data) => {
|
||||||
@@ -647,6 +660,7 @@ pub fn checkhealth() -> HealthReport {
|
|||||||
Err(e) => errors.push(format!("Failed to read cache file: {}", e)),
|
Err(e) => errors.push(format!("Failed to read cache file: {}", e)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let (mut cache_stats_readable, mut cache_stats_count) = (false, None);
|
let (mut cache_stats_readable, mut cache_stats_count) = (false, None);
|
||||||
if cache_stats_file_exists {
|
if cache_stats_file_exists {
|
||||||
match std::fs::read_to_string(POST_STATS_PATH) {
|
match std::fs::read_to_string(POST_STATS_PATH) {
|
||||||
@@ -662,6 +676,7 @@ pub fn checkhealth() -> HealthReport {
|
|||||||
Err(e) => errors.push(format!("Failed to read cache stats file: {}", e)),
|
Err(e) => errors.push(format!("Failed to read cache stats file: {}", e)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
HealthReport {
|
HealthReport {
|
||||||
posts_dir_exists,
|
posts_dir_exists,
|
||||||
posts_count,
|
posts_count,
|
||||||
@@ -674,3 +689,13 @@ pub fn checkhealth() -> HealthReport {
|
|||||||
errors,
|
errors,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn get_parser_logs() -> Vec<LogEntry> {
|
||||||
|
let logs = PARSER_LOGS.read().unwrap();
|
||||||
|
logs.iter().cloned().collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn clear_parser_logs() {
|
||||||
|
let mut logs = PARSER_LOGS.write().unwrap();
|
||||||
|
logs.clear();
|
||||||
|
}
|
||||||
@@ -21,6 +21,14 @@ interface HealthReport {
|
|||||||
errors: string[];
|
errors: string[];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
interface LogEntry {
|
||||||
|
timestamp: string;
|
||||||
|
level: string;
|
||||||
|
message: string;
|
||||||
|
slug?: string;
|
||||||
|
details?: string;
|
||||||
|
}
|
||||||
|
|
||||||
export default function RustStatusPage() {
|
export default function RustStatusPage() {
|
||||||
const [stats, setStats] = useState<PostStats[]>([]);
|
const [stats, setStats] = useState<PostStats[]>([]);
|
||||||
const [loading, setLoading] = useState(true);
|
const [loading, setLoading] = useState(true);
|
||||||
@@ -28,6 +36,11 @@ export default function RustStatusPage() {
|
|||||||
const [health, setHealth] = useState<HealthReport | null>(null);
|
const [health, setHealth] = useState<HealthReport | null>(null);
|
||||||
const [healthLoading, setHealthLoading] = useState(true);
|
const [healthLoading, setHealthLoading] = useState(true);
|
||||||
const [healthError, setHealthError] = useState<string | null>(null);
|
const [healthError, setHealthError] = useState<string | null>(null);
|
||||||
|
const [logs, setLogs] = useState<LogEntry[]>([]);
|
||||||
|
const [logsLoading, setLogsLoading] = useState(true);
|
||||||
|
const [logsError, setLogsError] = useState<string | null>(null);
|
||||||
|
const [logFilter, setLogFilter] = useState<string>('all'); // 'all', 'info', 'warning', 'error'
|
||||||
|
const [logSearch, setLogSearch] = useState<string>('');
|
||||||
|
|
||||||
// Summary calculations
|
// Summary calculations
|
||||||
const totalHits = stats.reduce((sum, s) => sum + s.cache_hits, 0);
|
const totalHits = stats.reduce((sum, s) => sum + s.cache_hits, 0);
|
||||||
@@ -65,11 +78,65 @@ export default function RustStatusPage() {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const fetchLogs = async () => {
|
||||||
|
setLogsLoading(true);
|
||||||
|
setLogsError(null);
|
||||||
|
try {
|
||||||
|
const res = await fetch('/api/admin/posts?logs=1');
|
||||||
|
if (!res.ok) throw new Error('Fehler beim Laden der Logs');
|
||||||
|
const data = await res.json();
|
||||||
|
setLogs(data);
|
||||||
|
} catch (e: any) {
|
||||||
|
setLogsError(e.message || 'Unbekannter Fehler');
|
||||||
|
} finally {
|
||||||
|
setLogsLoading(false);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const clearLogs = async () => {
|
||||||
|
try {
|
||||||
|
const res = await fetch('/api/admin/posts?clearLogs=1', { method: 'DELETE' });
|
||||||
|
if (!res.ok) throw new Error('Fehler beim Löschen der Logs');
|
||||||
|
await fetchLogs(); // Refresh logs after clearing
|
||||||
|
} catch (e: any) {
|
||||||
|
console.error('Error clearing logs:', e);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
fetchStats();
|
fetchStats();
|
||||||
fetchHealth();
|
fetchHealth();
|
||||||
|
fetchLogs();
|
||||||
}, []);
|
}, []);
|
||||||
|
|
||||||
|
// Filter logs based on level and search term
|
||||||
|
const filteredLogs = logs.filter(log => {
|
||||||
|
const matchesLevel = logFilter === 'all' || log.level === logFilter;
|
||||||
|
const matchesSearch = !logSearch ||
|
||||||
|
log.message.toLowerCase().includes(logSearch.toLowerCase()) ||
|
||||||
|
(log.slug && log.slug.toLowerCase().includes(logSearch.toLowerCase())) ||
|
||||||
|
(log.details && log.details.toLowerCase().includes(logSearch.toLowerCase()));
|
||||||
|
return matchesLevel && matchesSearch;
|
||||||
|
});
|
||||||
|
|
||||||
|
const getLevelColor = (level: string) => {
|
||||||
|
switch (level) {
|
||||||
|
case 'error': return 'text-red-600 bg-red-50';
|
||||||
|
case 'warning': return 'text-yellow-600 bg-yellow-50';
|
||||||
|
case 'info': return 'text-blue-600 bg-blue-50';
|
||||||
|
default: return 'text-gray-600 bg-gray-50';
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const getLevelIcon = (level: string) => {
|
||||||
|
switch (level) {
|
||||||
|
case 'error': return '❌';
|
||||||
|
case 'warning': return '⚠️';
|
||||||
|
case 'info': return 'ℹ️';
|
||||||
|
default: return '📝';
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="min-h-screen bg-gray-100 p-4 sm:p-6">
|
<div className="min-h-screen bg-gray-100 p-4 sm:p-6">
|
||||||
<div className="max-w-6xl mx-auto">
|
<div className="max-w-6xl mx-auto">
|
||||||
@@ -101,13 +168,17 @@ export default function RustStatusPage() {
|
|||||||
|
|
||||||
{/* Refresh button */}
|
{/* Refresh button */}
|
||||||
<button
|
<button
|
||||||
onClick={fetchStats}
|
onClick={() => {
|
||||||
|
fetchStats();
|
||||||
|
fetchHealth();
|
||||||
|
fetchLogs();
|
||||||
|
}}
|
||||||
className="p-2 sm:px-4 sm:py-2 bg-blue-500 hover:bg-blue-600 text-white rounded-lg shadow flex items-center gap-1 transition-colors"
|
className="p-2 sm:px-4 sm:py-2 bg-blue-500 hover:bg-blue-600 text-white rounded-lg shadow flex items-center gap-1 transition-colors"
|
||||||
title="Aktualisieren"
|
title="Aktualisieren"
|
||||||
disabled={loading}
|
disabled={loading || healthLoading || logsLoading}
|
||||||
>
|
>
|
||||||
<svg
|
<svg
|
||||||
className={`w-5 h-5 ${loading ? 'animate-spin' : ''}`}
|
className={`w-5 h-5 ${(loading || healthLoading || logsLoading) ? 'animate-spin' : ''}`}
|
||||||
fill="none"
|
fill="none"
|
||||||
stroke="currentColor"
|
stroke="currentColor"
|
||||||
viewBox="0 0 24 24"
|
viewBox="0 0 24 24"
|
||||||
@@ -196,6 +267,93 @@ export default function RustStatusPage() {
|
|||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
{/* Parser Logs Section */}
|
||||||
|
<div className="bg-white rounded-lg shadow p-4 mb-6">
|
||||||
|
<div className="flex flex-col sm:flex-row sm:items-center sm:justify-between gap-4 mb-4">
|
||||||
|
<h2 className="text-lg font-semibold">Parser Logs</h2>
|
||||||
|
<div className="flex flex-col sm:flex-row gap-2">
|
||||||
|
<button
|
||||||
|
onClick={clearLogs}
|
||||||
|
className="px-3 py-2 bg-red-500 hover:bg-red-600 text-white rounded text-sm transition-colors"
|
||||||
|
title="Clear all logs"
|
||||||
|
>
|
||||||
|
Clear Logs
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Log Filters */}
|
||||||
|
<div className="flex flex-col sm:flex-row gap-4 mb-4">
|
||||||
|
<div className="flex-1">
|
||||||
|
<input
|
||||||
|
type="text"
|
||||||
|
placeholder="Search logs..."
|
||||||
|
value={logSearch}
|
||||||
|
onChange={(e) => setLogSearch(e.target.value)}
|
||||||
|
className="w-full px-3 py-2 border border-gray-300 rounded-md focus:outline-none focus:ring-2 focus:ring-blue-500"
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
<div className="flex gap-2">
|
||||||
|
<select
|
||||||
|
value={logFilter}
|
||||||
|
onChange={(e) => setLogFilter(e.target.value)}
|
||||||
|
className="px-3 py-2 border border-gray-300 rounded-md focus:outline-none focus:ring-2 focus:ring-blue-500"
|
||||||
|
>
|
||||||
|
<option value="all">All Levels</option>
|
||||||
|
<option value="info">Info</option>
|
||||||
|
<option value="warning">Warning</option>
|
||||||
|
<option value="error">Error</option>
|
||||||
|
</select>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Logs Display */}
|
||||||
|
<div className="max-h-96 overflow-y-auto">
|
||||||
|
{logsLoading && <div className="text-center py-4">Loading logs...</div>}
|
||||||
|
{logsError && <div className="text-red-500 text-center py-4">{logsError}</div>}
|
||||||
|
{!logsLoading && !logsError && (
|
||||||
|
<div className="space-y-2">
|
||||||
|
{filteredLogs.length === 0 ? (
|
||||||
|
<div className="text-center py-4 text-gray-500">No logs found</div>
|
||||||
|
) : (
|
||||||
|
filteredLogs.map((log, index) => (
|
||||||
|
<div key={index} className={`p-3 rounded-lg border ${getLevelColor(log.level)}`}>
|
||||||
|
<div className="flex items-start gap-2">
|
||||||
|
<span className="text-lg">{getLevelIcon(log.level)}</span>
|
||||||
|
<div className="flex-1 min-w-0">
|
||||||
|
<div className="flex items-center gap-2 mb-1">
|
||||||
|
<span className="text-xs font-mono text-gray-500">
|
||||||
|
{new Date(log.timestamp).toLocaleString()}
|
||||||
|
</span>
|
||||||
|
<span className={`px-2 py-1 rounded text-xs font-medium ${
|
||||||
|
log.level === 'error' ? 'bg-red-200 text-red-800' :
|
||||||
|
log.level === 'warning' ? 'bg-yellow-200 text-yellow-800' :
|
||||||
|
'bg-blue-200 text-blue-800'
|
||||||
|
}`}>
|
||||||
|
{log.level.toUpperCase()}
|
||||||
|
</span>
|
||||||
|
{log.slug && (
|
||||||
|
<span className="px-2 py-1 bg-gray-200 text-gray-700 rounded text-xs font-mono">
|
||||||
|
{log.slug}
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
<div className="text-sm font-medium">{log.message}</div>
|
||||||
|
{log.details && (
|
||||||
|
<div className="text-xs text-gray-600 mt-1 font-mono bg-gray-100 p-2 rounded">
|
||||||
|
{log.details}
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
))
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
{/* Table */}
|
{/* Table */}
|
||||||
<div className="bg-white rounded-lg shadow p-3 sm:p-4 overflow-x-auto">
|
<div className="bg-white rounded-lg shadow p-3 sm:p-4 overflow-x-auto">
|
||||||
<h2 className="text-base sm:text-lg font-semibold mb-3">Rohdaten</h2>
|
<h2 className="text-base sm:text-lg font-semibold mb-3">Rohdaten</h2>
|
||||||
|
|||||||
@@ -90,6 +90,26 @@ export async function GET(request: Request) {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
const logs = searchParams.get('logs');
|
||||||
|
if (logs === '1') {
|
||||||
|
// Call the Rust backend for parser logs
|
||||||
|
const rustResult = spawnSync(
|
||||||
|
process.cwd() + '/markdown_backend/target/release/markdown_backend',
|
||||||
|
['logs'],
|
||||||
|
{ encoding: 'utf-8' }
|
||||||
|
);
|
||||||
|
if (rustResult.status === 0 && rustResult.stdout) {
|
||||||
|
return new Response(rustResult.stdout, {
|
||||||
|
status: 200,
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
return new Response(JSON.stringify({ error: rustResult.stderr || rustResult.error }), {
|
||||||
|
status: 500,
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
// Return the current pinned.json object
|
// Return the current pinned.json object
|
||||||
try {
|
try {
|
||||||
const pinnedPath = path.join(process.cwd(), 'posts', 'pinned.json');
|
const pinnedPath = path.join(process.cwd(), 'posts', 'pinned.json');
|
||||||
@@ -150,4 +170,36 @@ export async function PUT(request: Request) {
|
|||||||
console.error('Error editing post:', error);
|
console.error('Error editing post:', error);
|
||||||
return NextResponse.json({ error: 'Error editing post' }, { status: 500 });
|
return NextResponse.json({ error: 'Error editing post' }, { status: 500 });
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function DELETE(request: Request) {
|
||||||
|
try {
|
||||||
|
const { searchParams } = new URL(request.url);
|
||||||
|
const clearLogs = searchParams.get('clearLogs');
|
||||||
|
|
||||||
|
if (clearLogs === '1') {
|
||||||
|
// Call the Rust backend to clear parser logs
|
||||||
|
const rustResult = spawnSync(
|
||||||
|
process.cwd() + '/markdown_backend/target/release/markdown_backend',
|
||||||
|
['clearLogs'],
|
||||||
|
{ encoding: 'utf-8' }
|
||||||
|
);
|
||||||
|
if (rustResult.status === 0 && rustResult.stdout) {
|
||||||
|
return new Response(rustResult.stdout, {
|
||||||
|
status: 200,
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
return new Response(JSON.stringify({ error: rustResult.stderr || rustResult.error }), {
|
||||||
|
status: 500,
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return NextResponse.json({ error: 'Invalid delete operation' }, { status: 400 });
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error clearing logs:', error);
|
||||||
|
return NextResponse.json({ error: 'Error clearing logs' }, { status: 500 });
|
||||||
|
}
|
||||||
}
|
}
|
||||||
@@ -1,172 +1,34 @@
|
|||||||
export const dynamic = "force-dynamic";
|
|
||||||
|
|
||||||
import { NextResponse } from 'next/server';
|
import { NextResponse } from 'next/server';
|
||||||
import fs from 'fs';
|
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
import matter from 'gray-matter';
|
|
||||||
import { marked } from 'marked';
|
|
||||||
import DOMPurify from 'dompurify';
|
|
||||||
import { JSDOM } from 'jsdom';
|
|
||||||
import hljs from 'highlight.js';
|
|
||||||
import { getPostsDirectory } from '@/lib/postsDirectory';
|
import { getPostsDirectory } from '@/lib/postsDirectory';
|
||||||
import { spawnSync } from 'child_process';
|
import { spawnSync } from 'child_process';
|
||||||
|
|
||||||
const postsDirectory = getPostsDirectory();
|
const postsDirectory = getPostsDirectory();
|
||||||
|
|
||||||
// Function to get file creation date
|
|
||||||
function getFileCreationDate(filePath: string): Date {
|
|
||||||
const stats = fs.statSync(filePath);
|
|
||||||
return stats.birthtime ?? stats.mtime;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Function to generate ID from text (matches frontend logic)
|
|
||||||
function generateId(text: string): string {
|
|
||||||
return text
|
|
||||||
.toLowerCase()
|
|
||||||
.replace(/[^a-z0-9]+/g, '-')
|
|
||||||
.replace(/^-+|-+$/g, '');
|
|
||||||
}
|
|
||||||
|
|
||||||
const renderer = new marked.Renderer();
|
|
||||||
|
|
||||||
// Custom heading renderer to add IDs
|
|
||||||
renderer.heading = (text, level) => {
|
|
||||||
const id = generateId(text);
|
|
||||||
return `<h${level} id="${id}">${text}</h${level}>`;
|
|
||||||
};
|
|
||||||
|
|
||||||
renderer.code = (code, infostring, escaped) => {
|
|
||||||
const lang = (infostring || '').match(/\S*/)?.[0];
|
|
||||||
const highlighted = lang && hljs.getLanguage(lang)
|
|
||||||
? hljs.highlight(code, { language: lang }).value
|
|
||||||
: hljs.highlightAuto(code).value;
|
|
||||||
const langClass = lang ? `language-${lang}` : '';
|
|
||||||
return `<pre><code class="hljs ${langClass}">${highlighted}</code></pre>`;
|
|
||||||
};
|
|
||||||
|
|
||||||
marked.setOptions({
|
|
||||||
gfm: true,
|
|
||||||
breaks: true,
|
|
||||||
renderer,
|
|
||||||
});
|
|
||||||
|
|
||||||
async function getPostBySlug(slug: string) {
|
|
||||||
const realSlug = slug.replace(/\.md$/, '');
|
|
||||||
const fullPath = path.join(postsDirectory, `${realSlug}.md`);
|
|
||||||
let rustResult;
|
|
||||||
try {
|
|
||||||
// Try Rust backend first
|
|
||||||
rustResult = spawnSync(
|
|
||||||
path.resolve(process.cwd(), 'markdown_backend/target/release/markdown_backend'),
|
|
||||||
['show', realSlug],
|
|
||||||
{ encoding: 'utf-8' }
|
|
||||||
);
|
|
||||||
if (rustResult.status === 0 && rustResult.stdout) {
|
|
||||||
// Expect Rust to output a JSON object matching the post shape
|
|
||||||
const post = JSON.parse(rustResult.stdout);
|
|
||||||
// Map snake_case to camelCase for frontend compatibility
|
|
||||||
post.createdAt = post.created_at;
|
|
||||||
delete post.created_at;
|
|
||||||
return post;
|
|
||||||
} else {
|
|
||||||
console.error('[Rust parser error]', rustResult.stderr || rustResult.error);
|
|
||||||
}
|
|
||||||
} catch (e) {
|
|
||||||
console.error('[Rust parser exception]', e);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Fallback to TypeScript parser
|
|
||||||
const fileContents = fs.readFileSync(fullPath, 'utf8');
|
|
||||||
const { data, content } = matter(fileContents);
|
|
||||||
const createdAt = getFileCreationDate(fullPath);
|
|
||||||
|
|
||||||
let processedContent = '';
|
|
||||||
try {
|
|
||||||
// Convert markdown to HTML
|
|
||||||
const rawHtml = marked.parse(content);
|
|
||||||
const window = new JSDOM('').window;
|
|
||||||
const purify = DOMPurify(window);
|
|
||||||
processedContent = purify.sanitize(rawHtml as string, {
|
|
||||||
ALLOWED_TAGS: [
|
|
||||||
'h1', 'h2', 'h3', 'h4', 'h5', 'h6',
|
|
||||||
'p', 'a', 'ul', 'ol', 'li', 'blockquote',
|
|
||||||
'pre', 'code', 'em', 'strong', 'del',
|
|
||||||
'hr', 'br', 'img', 'table', 'thead', 'tbody',
|
|
||||||
'tr', 'th', 'td', 'div', 'span', 'iframe'
|
|
||||||
],
|
|
||||||
ALLOWED_ATTR: [
|
|
||||||
'class', 'id', 'style',
|
|
||||||
'href', 'target', 'rel',
|
|
||||||
'src', 'alt', 'title', 'width', 'height',
|
|
||||||
'frameborder', 'allowfullscreen'
|
|
||||||
],
|
|
||||||
ALLOWED_URI_REGEXP: /^(?:(?:(?:f|ht)tps?|mailto|tel|callto|cid|xmpp):|[^a-z]|[a-z+.-]+(?:[^a-z+.-:]|$))/i
|
|
||||||
});
|
|
||||||
} catch (err) {
|
|
||||||
console.error(`Error processing markdown for slug "${realSlug}":`, err);
|
|
||||||
processedContent = `<div class="error-message">
|
|
||||||
<p>Error processing markdown content. Please check the console for details.</p>
|
|
||||||
<pre>${err instanceof Error ? err.message : 'Unknown error'}</pre>
|
|
||||||
</div>`;
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
slug: realSlug,
|
|
||||||
title: data.title,
|
|
||||||
date: data.date,
|
|
||||||
tags: data.tags || [],
|
|
||||||
summary: data.summary,
|
|
||||||
content: processedContent,
|
|
||||||
createdAt: createdAt.toISOString(),
|
|
||||||
author: (process.env.NEXT_PUBLIC_BLOG_OWNER || 'Anonymous') + "'s",
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function GET(
|
export async function GET(
|
||||||
request: Request,
|
request: Request,
|
||||||
{ params }: { params: { slug: string[] | string } }
|
{ params }: { params: { slug: string[] | string } }
|
||||||
) {
|
) {
|
||||||
let parser = 'typescript';
|
|
||||||
let rustError = '';
|
|
||||||
try {
|
try {
|
||||||
const slugArr = Array.isArray(params.slug) ? params.slug : [params.slug];
|
const slugArr = Array.isArray(params.slug) ? params.slug : [params.slug];
|
||||||
const slugPath = slugArr.join('/');
|
const slugPath = slugArr.join('/');
|
||||||
let post;
|
const rustResult = spawnSync(
|
||||||
try {
|
path.resolve(process.cwd(), 'markdown_backend/target/release/markdown_backend'),
|
||||||
const rustResult = spawnSync(
|
['show', slugPath],
|
||||||
path.resolve(process.cwd(), 'markdown_backend/target/release/markdown_backend'),
|
{ encoding: 'utf-8' }
|
||||||
['show', slugPath],
|
);
|
||||||
{ encoding: 'utf-8' }
|
if (rustResult.status === 0 && rustResult.stdout) {
|
||||||
);
|
const post = JSON.parse(rustResult.stdout);
|
||||||
if (rustResult.status === 0 && rustResult.stdout) {
|
post.createdAt = post.created_at;
|
||||||
post = JSON.parse(rustResult.stdout);
|
delete post.created_at;
|
||||||
post.createdAt = post.created_at;
|
return NextResponse.json(post);
|
||||||
delete post.created_at;
|
} else {
|
||||||
parser = 'rust';
|
const rustError = rustResult.stderr || rustResult.error?.toString() || 'Unknown error';
|
||||||
} else {
|
return NextResponse.json({ error: 'Rust parser error', details: rustError }, { status: 500 });
|
||||||
rustError = rustResult.stderr || rustResult.error?.toString() || 'Unknown error';
|
|
||||||
console.error('[Rust parser error]', rustError);
|
|
||||||
}
|
|
||||||
} catch (e) {
|
|
||||||
rustError = e instanceof Error ? e.message : String(e);
|
|
||||||
console.error('[Rust parser exception]', rustError);
|
|
||||||
}
|
}
|
||||||
if (!post) {
|
|
||||||
post = await getPostBySlug(slugPath);
|
|
||||||
}
|
|
||||||
const response = NextResponse.json(post);
|
|
||||||
response.headers.set('X-Parser', parser);
|
|
||||||
if (parser !== 'rust' && rustError) {
|
|
||||||
response.headers.set('X-Rust-Parser-Error', rustError);
|
|
||||||
}
|
|
||||||
return response;
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error('Error loading post:', error);
|
|
||||||
return NextResponse.json(
|
return NextResponse.json(
|
||||||
{
|
{ error: 'Error loading post', details: error instanceof Error ? error.message : 'Unknown error' },
|
||||||
error: 'Error loading post',
|
|
||||||
details: error instanceof Error ? error.message : 'Unknown error'
|
|
||||||
},
|
|
||||||
{ status: 500 }
|
{ status: 500 }
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
import { NextRequest, NextResponse } from 'next/server';
|
import { NextRequest, NextResponse } from 'next/server';
|
||||||
import { watchPosts, stopWatching } from '@/lib/markdown';
|
import { spawn } from 'child_process';
|
||||||
|
|
||||||
// Prevent static generation of this route
|
// Prevent static generation of this route
|
||||||
export const dynamic = 'force-dynamic';
|
export const dynamic = 'force-dynamic';
|
||||||
@@ -37,35 +37,87 @@ export async function GET(request: NextRequest) {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Set up file watcher if not already set up
|
// Set up Rust file watcher if not already set up
|
||||||
if (clients.size === 1) {
|
if (clients.size === 1) {
|
||||||
try {
|
try {
|
||||||
watchPosts(() => {
|
const rustWatcher = spawn(
|
||||||
// Notify all connected clients about the update
|
process.cwd() + '/markdown_backend/target/release/markdown_backend',
|
||||||
const message = JSON.stringify({ type: 'update', timestamp: new Date().toISOString() });
|
['watch'],
|
||||||
const clientsToRemove: ReadableStreamDefaultController[] = [];
|
{ stdio: ['pipe', 'pipe', 'pipe'] }
|
||||||
|
);
|
||||||
|
|
||||||
|
rustWatcher.stdout.on('data', (data) => {
|
||||||
|
const message = data.toString().trim();
|
||||||
|
console.log('Rust watcher output:', message);
|
||||||
|
|
||||||
clients.forEach(client => {
|
if (message.includes('Posts directory changed!')) {
|
||||||
try {
|
// Notify all connected clients about the update
|
||||||
client.enqueue(`data: ${message}\n\n`);
|
const updateMessage = JSON.stringify({ type: 'update', timestamp: new Date().toISOString() });
|
||||||
} catch (error) {
|
const clientsToRemove: ReadableStreamDefaultController[] = [];
|
||||||
// Mark client for removal
|
|
||||||
clientsToRemove.push(client);
|
clients.forEach(client => {
|
||||||
|
try {
|
||||||
|
client.enqueue(`data: ${updateMessage}\n\n`);
|
||||||
|
} catch (error) {
|
||||||
|
// Mark client for removal
|
||||||
|
clientsToRemove.push(client);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Remove disconnected clients
|
||||||
|
clientsToRemove.forEach(client => {
|
||||||
|
clients.delete(client);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Stop watching if no clients are connected
|
||||||
|
if (clients.size === 0) {
|
||||||
|
console.log('No clients connected, stopping watcher');
|
||||||
|
rustWatcher.kill();
|
||||||
}
|
}
|
||||||
});
|
|
||||||
|
|
||||||
// Remove disconnected clients
|
|
||||||
clientsToRemove.forEach(client => {
|
|
||||||
clients.delete(client);
|
|
||||||
});
|
|
||||||
|
|
||||||
// Stop watching if no clients are connected
|
|
||||||
if (clients.size === 0) {
|
|
||||||
stopWatching();
|
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
rustWatcher.stderr.on('data', (data) => {
|
||||||
|
const errorMessage = data.toString().trim();
|
||||||
|
console.error('Rust watcher error:', errorMessage);
|
||||||
|
|
||||||
|
// Don't treat RecvError as a real error - it's expected when the process is terminated
|
||||||
|
if (!errorMessage.includes('RecvError')) {
|
||||||
|
// Send error to clients
|
||||||
|
const errorData = JSON.stringify({ type: 'error', message: errorMessage });
|
||||||
|
const clientsToRemove: ReadableStreamDefaultController[] = [];
|
||||||
|
|
||||||
|
clients.forEach(client => {
|
||||||
|
try {
|
||||||
|
client.enqueue(`data: ${errorData}\n\n`);
|
||||||
|
} catch (error) {
|
||||||
|
clientsToRemove.push(client);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
clientsToRemove.forEach(client => {
|
||||||
|
clients.delete(client);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
rustWatcher.on('error', (error) => {
|
||||||
|
console.error('Rust watcher spawn error:', error);
|
||||||
|
});
|
||||||
|
|
||||||
|
rustWatcher.on('close', (code) => {
|
||||||
|
console.log('Rust watcher closed with code:', code);
|
||||||
|
// Only restart if we still have clients
|
||||||
|
if (clients.size > 0) {
|
||||||
|
console.log('Restarting watcher due to unexpected close');
|
||||||
|
// The watcher will be restarted when the next client connects
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Store the watcher process for cleanup
|
||||||
|
(controller as any).rustWatcher = rustWatcher;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error('Error setting up file watcher:', error);
|
console.error('Error setting up Rust file watcher:', error);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -75,16 +127,17 @@ export async function GET(request: NextRequest) {
|
|||||||
|
|
||||||
// Stop watching if no clients are connected
|
// Stop watching if no clients are connected
|
||||||
if (clients.size === 0) {
|
if (clients.size === 0) {
|
||||||
stopWatching();
|
const rustWatcher = (controller as any).rustWatcher;
|
||||||
|
if (rustWatcher) {
|
||||||
|
console.log('Last client disconnected, stopping watcher');
|
||||||
|
rustWatcher.kill();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
cancel() {
|
cancel() {
|
||||||
// Handle stream cancellation - we can't access the specific controller here
|
// Handle stream cancellation - this is called when the stream is cancelled
|
||||||
// The abort event handler will handle cleanup for the specific controller
|
// We can't access the specific controller here, so we'll handle cleanup in the abort event
|
||||||
if (clients.size === 0) {
|
|
||||||
stopWatching();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -1,272 +0,0 @@
|
|||||||
// This is the frontend Markdown parser.
|
|
||||||
// It is written in TypeScript
|
|
||||||
// While I was writing this, only I and God knew how it works.
|
|
||||||
// Now, only God knows.
|
|
||||||
//
|
|
||||||
// If you are trying to understand how it works , and optimize it. Please increse the counter
|
|
||||||
//
|
|
||||||
// Hours wasted here: 12
|
|
||||||
|
|
||||||
import fs from 'fs';
|
|
||||||
import path from 'path';
|
|
||||||
import matter from 'gray-matter';
|
|
||||||
import { marked } from 'marked';
|
|
||||||
import DOMPurify from 'dompurify';
|
|
||||||
import { JSDOM } from 'jsdom';
|
|
||||||
import chokidar from 'chokidar';
|
|
||||||
import type { FSWatcher } from 'chokidar';
|
|
||||||
import hljs from 'highlight.js';
|
|
||||||
import { getPostsDirectory } from './postsDirectory';
|
|
||||||
|
|
||||||
export interface Post {
|
|
||||||
slug: string;
|
|
||||||
title: string;
|
|
||||||
date: string;
|
|
||||||
tags: string[];
|
|
||||||
summary: string;
|
|
||||||
content: string;
|
|
||||||
createdAt: Date;
|
|
||||||
author: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
const postsDirectory = getPostsDirectory();
|
|
||||||
|
|
||||||
// Function to get file creation date
|
|
||||||
function getFileCreationDate(filePath: string): Date {
|
|
||||||
const stats = fs.statSync(filePath);
|
|
||||||
return stats.birthtime;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Function to generate ID from text (matches frontend logic)
|
|
||||||
function generateId(text: string): string {
|
|
||||||
return text
|
|
||||||
.toLowerCase()
|
|
||||||
.replace(/[^a-z0-9]+/g, '-')
|
|
||||||
.replace(/^-+|-+$/g, '');
|
|
||||||
}
|
|
||||||
|
|
||||||
// Enhanced slugification function that matches GitHub-style anchor links
|
|
||||||
function slugify(text: string): string {
|
|
||||||
return text
|
|
||||||
.toLowerCase()
|
|
||||||
.trim()
|
|
||||||
.replace(/[^\w\s-]/g, '') // Remove special characters except spaces and hyphens
|
|
||||||
.replace(/[\s_-]+/g, '-') // Replace spaces, underscores, and multiple hyphens with single hyphen
|
|
||||||
.replace(/^-+|-+$/g, ''); // Remove leading/trailing hyphens
|
|
||||||
}
|
|
||||||
|
|
||||||
// Function to process anchor links in markdown content
|
|
||||||
function processAnchorLinks(content: string): string {
|
|
||||||
// Find all markdown links that point to anchors (e.g., [text](#anchor))
|
|
||||||
return content.replace(/\[([^\]]+)\]\(#([^)]+)\)/g, (match, linkText, anchor) => {
|
|
||||||
// Only slugify if the anchor doesn't already look like a slug
|
|
||||||
// This prevents double-processing of already-correct anchor links
|
|
||||||
const isAlreadySlugified = /^[a-z0-9-]+$/.test(anchor);
|
|
||||||
const slugifiedAnchor = isAlreadySlugified ? anchor : slugify(anchor);
|
|
||||||
return `[${linkText}](#${slugifiedAnchor})`;
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// Utility function to debug anchor links (for development)
|
|
||||||
export function debugAnchorLinks(content: string): void {
|
|
||||||
if (process.env.NODE_ENV !== 'development') return;
|
|
||||||
|
|
||||||
console.log('=== Anchor Link Debug Info ===');
|
|
||||||
|
|
||||||
// Extract all headings and their IDs
|
|
||||||
const headingRegex = /^(#{1,6})\s+(.+)$/gm;
|
|
||||||
const headings: Array<{ level: number; text: string; id: string }> = [];
|
|
||||||
|
|
||||||
let match;
|
|
||||||
while ((match = headingRegex.exec(content)) !== null) {
|
|
||||||
const level = match[1].length;
|
|
||||||
const text = match[2].trim();
|
|
||||||
const id = slugify(text);
|
|
||||||
headings.push({ level, text, id });
|
|
||||||
}
|
|
||||||
|
|
||||||
console.log('Generated heading IDs:');
|
|
||||||
headings.forEach(({ level, text, id }) => {
|
|
||||||
console.log(` H${level}: "${text}" -> id="${id}"`);
|
|
||||||
});
|
|
||||||
|
|
||||||
// Extract all anchor links
|
|
||||||
const anchorLinkRegex = /\[([^\]]+)\]\(#([^)]+)\)/g;
|
|
||||||
const anchorLinks: Array<{ linkText: string; originalAnchor: string; slugifiedAnchor: string }> = [];
|
|
||||||
|
|
||||||
while ((match = anchorLinkRegex.exec(content)) !== null) {
|
|
||||||
const linkText = match[1];
|
|
||||||
const originalAnchor = match[2];
|
|
||||||
const slugifiedAnchor = slugify(originalAnchor);
|
|
||||||
anchorLinks.push({ linkText, originalAnchor, slugifiedAnchor });
|
|
||||||
}
|
|
||||||
|
|
||||||
console.log('Anchor links found:');
|
|
||||||
anchorLinks.forEach(({ linkText, originalAnchor, slugifiedAnchor }) => {
|
|
||||||
const headingExists = headings.some(h => h.id === slugifiedAnchor);
|
|
||||||
const status = headingExists ? '✅' : '❌';
|
|
||||||
console.log(` ${status} [${linkText}](#${originalAnchor}) -> [${linkText}](#${slugifiedAnchor})`);
|
|
||||||
});
|
|
||||||
|
|
||||||
// Show missing headings
|
|
||||||
const missingAnchors = anchorLinks.filter(({ slugifiedAnchor }) =>
|
|
||||||
!headings.some(h => h.id === slugifiedAnchor)
|
|
||||||
);
|
|
||||||
|
|
||||||
if (missingAnchors.length > 0) {
|
|
||||||
console.warn('Missing headings for these anchor links:');
|
|
||||||
missingAnchors.forEach(({ linkText, originalAnchor, slugifiedAnchor }) => {
|
|
||||||
console.warn(` - [${linkText}](#${originalAnchor}) -> id="${slugifiedAnchor}"`);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
console.log('=== End Debug Info ===');
|
|
||||||
}
|
|
||||||
|
|
||||||
const renderer = new marked.Renderer();
|
|
||||||
|
|
||||||
// Custom heading renderer to add IDs
|
|
||||||
renderer.heading = (text, level) => {
|
|
||||||
const id = slugify(text);
|
|
||||||
return `<h${level} id="${id}">${text}</h${level}>`;
|
|
||||||
};
|
|
||||||
|
|
||||||
renderer.code = (code, infostring, escaped) => {
|
|
||||||
const lang = (infostring || '').match(/\S*/)?.[0];
|
|
||||||
const highlighted = lang && hljs.getLanguage(lang)
|
|
||||||
? hljs.highlight(code, { language: lang }).value
|
|
||||||
: hljs.highlightAuto(code).value;
|
|
||||||
const langClass = lang ? `language-${lang}` : '';
|
|
||||||
return `<pre><code class="hljs ${langClass}">${highlighted}</code></pre>`;
|
|
||||||
};
|
|
||||||
|
|
||||||
marked.setOptions({
|
|
||||||
gfm: true,
|
|
||||||
breaks: true,
|
|
||||||
renderer,
|
|
||||||
});
|
|
||||||
|
|
||||||
export async function getPostBySlug(slug: string): Promise<Post> {
|
|
||||||
const realSlug = slug.replace(/\.md$/, '');
|
|
||||||
const fullPath = path.join(postsDirectory, `${realSlug}.md`);
|
|
||||||
const fileContents = fs.readFileSync(fullPath, 'utf8');
|
|
||||||
const { data, content } = matter(fileContents);
|
|
||||||
const createdAt = getFileCreationDate(fullPath);
|
|
||||||
|
|
||||||
let processedContent = '';
|
|
||||||
try {
|
|
||||||
// Debug anchor links in development
|
|
||||||
debugAnchorLinks(content);
|
|
||||||
|
|
||||||
// Process anchor links before parsing markdown
|
|
||||||
const processedMarkdown = processAnchorLinks(content);
|
|
||||||
const rawHtml = marked.parse(processedMarkdown);
|
|
||||||
const window = new JSDOM('').window;
|
|
||||||
const purify = DOMPurify(window);
|
|
||||||
processedContent = purify.sanitize(rawHtml as string, {
|
|
||||||
ALLOWED_TAGS: [
|
|
||||||
'h1', 'h2', 'h3', 'h4', 'h5', 'h6',
|
|
||||||
'p', 'a', 'ul', 'ol', 'li', 'blockquote',
|
|
||||||
'pre', 'code', 'em', 'strong', 'del',
|
|
||||||
'hr', 'br', 'img', 'table', 'thead', 'tbody',
|
|
||||||
'tr', 'th', 'td', 'div', 'span', 'iframe'
|
|
||||||
],
|
|
||||||
ALLOWED_ATTR: [
|
|
||||||
'class', 'id', 'style',
|
|
||||||
'href', 'target', 'rel',
|
|
||||||
'src', 'alt', 'title', 'width', 'height',
|
|
||||||
'frameborder', 'allowfullscreen'
|
|
||||||
],
|
|
||||||
ALLOWED_URI_REGEXP: /^(?:(?:(?:f|ht)tps?|mailto|tel|callto|cid|xmpp):|[^a-z]|[a-z+.\-]+(?:[^a-z+.\-:]|$))/i
|
|
||||||
});
|
|
||||||
} catch (err) {
|
|
||||||
console.error(`Error processing markdown for ${realSlug}:`, err);
|
|
||||||
processedContent = `<div class="error-message">
|
|
||||||
<p>Error processing markdown content. Please check the console for details.</p>
|
|
||||||
<pre>${err instanceof Error ? err.message : 'Unknown error'}</pre>
|
|
||||||
</div>`;
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
slug: realSlug,
|
|
||||||
title: data.title,
|
|
||||||
date: data.date,
|
|
||||||
tags: data.tags || [],
|
|
||||||
summary: data.summary,
|
|
||||||
content: processedContent,
|
|
||||||
createdAt,
|
|
||||||
author: process.env.NEXT_PUBLIC_BLOG_OWNER || 'Anonymous',
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function getAllPosts(): Promise<Post[]> {
|
|
||||||
const fileNames = fs.readdirSync(postsDirectory);
|
|
||||||
const allPostsData = await Promise.all(
|
|
||||||
fileNames
|
|
||||||
.filter((fileName) => fileName.endsWith('.md'))
|
|
||||||
.map(async (fileName) => {
|
|
||||||
const slug = fileName.replace(/\.md$/, '');
|
|
||||||
return getPostBySlug(slug);
|
|
||||||
})
|
|
||||||
);
|
|
||||||
|
|
||||||
// Sort by creation date (newest first)
|
|
||||||
return allPostsData.sort((a, b) => b.createdAt.getTime() - a.createdAt.getTime());
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function getPostsByTag(tag: string): Promise<Post[]> {
|
|
||||||
const allPosts = await getAllPosts();
|
|
||||||
return allPosts.filter((post) => post.tags.includes(tag));
|
|
||||||
}
|
|
||||||
|
|
||||||
// File watcher setup
|
|
||||||
let watcher: FSWatcher | null = null;
|
|
||||||
let onChangeCallback: (() => void) | null = null;
|
|
||||||
|
|
||||||
export function watchPosts(callback: () => void) {
|
|
||||||
if (watcher) {
|
|
||||||
watcher.close();
|
|
||||||
}
|
|
||||||
|
|
||||||
onChangeCallback = callback;
|
|
||||||
watcher = chokidar.watch(postsDirectory, {
|
|
||||||
ignored: [
|
|
||||||
/(^|[\/\\])\../, // ignore dotfiles
|
|
||||||
/node_modules/,
|
|
||||||
/\.git/,
|
|
||||||
/\.next/,
|
|
||||||
/\.cache/,
|
|
||||||
/\.DS_Store/,
|
|
||||||
/Thumbs\.db/,
|
|
||||||
/\.tmp$/,
|
|
||||||
/\.temp$/
|
|
||||||
],
|
|
||||||
persistent: true,
|
|
||||||
ignoreInitial: true, // Don't trigger on initial scan
|
|
||||||
awaitWriteFinish: {
|
|
||||||
stabilityThreshold: 1000, // Wait 1 second after file changes
|
|
||||||
pollInterval: 100 // Check every 100ms
|
|
||||||
},
|
|
||||||
usePolling: false, // Use native file system events when possible
|
|
||||||
interval: 1000 // Fallback polling interval (only used if native events fail)
|
|
||||||
});
|
|
||||||
|
|
||||||
watcher
|
|
||||||
.on('add', handleFileChange)
|
|
||||||
.on('change', handleFileChange)
|
|
||||||
.on('unlink', handleFileChange);
|
|
||||||
}
|
|
||||||
|
|
||||||
function handleFileChange() {
|
|
||||||
if (onChangeCallback) {
|
|
||||||
onChangeCallback();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export function stopWatching() {
|
|
||||||
if (watcher) {
|
|
||||||
watcher.close();
|
|
||||||
watcher = null;
|
|
||||||
}
|
|
||||||
onChangeCallback = null;
|
|
||||||
}
|
|
||||||
Reference in New Issue
Block a user