Enhance blog features and improve backend functionality

- Added a VS Code-style editor with YAML frontmatter support and live preview.
- Implemented force reparse functionality for immediate updates of posts.
- Improved directory scanning with error handling and automatic directory creation.
- Introduced new CLI commands for cache management: `reinterpret-all` and `reparse-post`.
- Enhanced logging for better debugging and monitoring of the Rust backend.
- Updated README to reflect new features and improvements.
This commit is contained in:
2025-07-05 22:23:58 +02:00
parent f94ddaa3b1
commit 21f13ef8ae
8 changed files with 705 additions and 110 deletions

View File

@@ -1,7 +1,17 @@
#[warn(unused_imports)]
use clap::{Parser, Subcommand};
mod markdown;
use markdown::{get_all_posts, get_post_by_slug, get_posts_by_tag, watch_posts, get_parser_logs, clear_parser_logs, load_parser_logs_from_disk};
use markdown::{
get_all_posts,
get_post_by_slug,
get_posts_by_tag,
watch_posts,
get_parser_logs,
clear_parser_logs,
load_parser_logs_from_disk,
force_reinterpret_all_posts,
force_reparse_single_post
};
use serde_json;
use std::fs;
use std::io;
@@ -44,6 +54,12 @@ enum Commands {
Logs,
/// Clear parser logs
ClearLogs,
/// Force reinterpret all posts (clear cache and re-parse)
ReinterpretAll,
/// Force reparse a single post (clear cache and re-parse)
ReparsePost {
slug: String,
},
/// Parse markdown from file or stdin
Parse {
#[arg(long)]
@@ -111,6 +127,35 @@ fn main() {
clear_parser_logs();
println!("{}", serde_json::to_string(&serde_json::json!({"success": true, "message": "Logs cleared"})).unwrap());
}
Commands::ReinterpretAll => {
match force_reinterpret_all_posts() {
Ok(posts) => {
println!("{}", serde_json::to_string(&serde_json::json!({
"success": true,
"message": format!("All posts reinterpreted successfully. Processed {} posts.", posts.len())
})).unwrap());
}
Err(e) => {
eprintln!("{}", e);
std::process::exit(1);
}
}
}
Commands::ReparsePost { slug } => {
match force_reparse_single_post(slug) {
Ok(post) => {
println!("{}", serde_json::to_string(&serde_json::json!({
"success": true,
"message": format!("Post '{}' reparsed successfully", slug),
"post": post
})).unwrap());
}
Err(e) => {
eprintln!("{}", e);
std::process::exit(1);
}
}
}
Commands::Parse { file, stdin, ast } => {
let content = if *stdin {
let mut buffer = String::new();
@@ -139,4 +184,4 @@ fn main() {
}
}
}
}
}

View File

@@ -180,6 +180,18 @@ static AMMONIA: Lazy<ammonia::Builder<'static>> = Lazy::new(|| {
});
// Helper functions
fn ensure_cache_directory() {
let cache_dir = PathBuf::from("./cache");
if !cache_dir.exists() {
if let Err(e) = fs::create_dir_all(&cache_dir) {
eprintln!("Failed to create cache directory: {}", e);
add_log("error", &format!("Failed to create cache directory: {}", e), None, None);
} else {
add_log("info", "Created cache directory: ./cache", None, None);
}
}
}
fn get_posts_directory() -> PathBuf {
let is_docker = std::env::var("DOCKER_CONTAINER").is_ok()
|| std::env::var("KUBERNETES_SERVICE_HOST").is_ok()
@@ -207,29 +219,101 @@ fn get_posts_directory() -> PathBuf {
for candidate in candidates.iter() {
let path = PathBuf::from(candidate);
if path.exists() && path.is_dir() {
add_log("info", &format!("Using posts directory: {:?}", path), None, None);
return path;
}
}
// Fallback: default to ./posts
PathBuf::from("./posts")
// Fallback: create ./posts if it doesn't exist
let fallback_path = PathBuf::from("./posts");
if !fallback_path.exists() {
if let Err(e) = fs::create_dir_all(&fallback_path) {
add_log("error", &format!("Failed to create posts directory: {}", e), None, None);
} else {
add_log("info", "Created posts directory: ./posts", None, None);
}
}
fallback_path
}
// Function to find Markdown files.
// This will scan Directories recursively
// Function to find Markdown files with improved reliability
fn find_markdown_files(dir: &Path) -> std::io::Result<Vec<PathBuf>> {
let mut files = Vec::new();
if dir.is_dir() {
for entry in fs::read_dir(dir)? {
let entry = entry?;
let path = entry.path();
if path.is_dir() {
files.extend(find_markdown_files(&path)?);
} else if path.extension().map(|e| e == "md").unwrap_or(false) {
files.push(path);
let mut errors = Vec::new();
if !dir.exists() {
let error_msg = format!("Directory does not exist: {:?}", dir);
add_log("error", &error_msg, None, None);
return Err(std::io::Error::new(std::io::ErrorKind::NotFound, error_msg));
}
if !dir.is_dir() {
let error_msg = format!("Path is not a directory: {:?}", dir);
add_log("error", &error_msg, None, None);
return Err(std::io::Error::new(std::io::ErrorKind::InvalidInput, error_msg));
}
// Try to read directory with retry logic
let entries = match fs::read_dir(dir) {
Ok(entries) => entries,
Err(e) => {
add_log("error", &format!("Failed to read directory {:?}: {}", dir, e), None, None);
return Err(e);
}
};
for entry_result in entries {
match entry_result {
Ok(entry) => {
let path = entry.path();
// Skip hidden files and directories
if let Some(name) = path.file_name() {
if name.to_string_lossy().starts_with('.') {
continue;
}
}
if path.is_dir() {
// Recursively scan subdirectories
match find_markdown_files(&path) {
Ok(subfiles) => files.extend(subfiles),
Err(e) => {
let error_msg = format!("Error scanning subdirectory {:?}: {}", path, e);
add_log("warning", &error_msg, None, None);
errors.push(error_msg);
}
}
} else if path.extension().map(|e| e == "md").unwrap_or(false) {
// Verify the file is readable
match fs::metadata(&path) {
Ok(metadata) => {
if metadata.is_file() {
files.push(path);
}
}
Err(e) => {
let error_msg = format!("Cannot access file {:?}: {}", path, e);
add_log("warning", &error_msg, None, None);
errors.push(error_msg);
}
}
}
}
Err(e) => {
let error_msg = format!("Error reading directory entry: {}", e);
add_log("warning", &error_msg, None, None);
errors.push(error_msg);
}
}
}
// Log summary
add_log("info", &format!("Found {} markdown files in {:?}", files.len(), dir), None, None);
if !errors.is_empty() {
add_log("warning", &format!("Encountered {} errors during directory scan", errors.len()), None, None);
}
Ok(files)
}
@@ -372,7 +456,7 @@ fn add_log(level: &str, message: &str, slug: Option<&str>, details: Option<&str>
}
fn save_parser_logs_to_disk_inner(logs: &VecDeque<LogEntry>) -> std::io::Result<()> {
let _ = std::fs::create_dir_all("./cache");
ensure_cache_directory();
let logs_vec: Vec<_> = logs.iter().cloned().collect();
let json = serde_json::to_string(&logs_vec)?;
std::fs::write(PARSER_LOGS_PATH, json)?;
@@ -654,12 +738,11 @@ pub fn load_post_cache_from_disk() {
}
pub fn save_post_cache_to_disk() {
ensure_cache_directory();
if let Ok(map) = serde_json::to_string(&*POST_CACHE.read().unwrap()) {
let _ = fs::create_dir_all("./cache");
let _ = fs::write(POSTS_CACHE_PATH, map);
}
if let Ok(map) = serde_json::to_string(&*POST_STATS.read().unwrap()) {
let _ = fs::create_dir_all("./cache");
let _ = fs::write(POST_STATS_PATH, map);
}
}
@@ -739,7 +822,90 @@ pub fn get_parser_logs() -> Vec<LogEntry> {
}
pub fn clear_parser_logs() {
let mut logs = PARSER_LOGS.write().unwrap();
logs.clear();
let _ = std::fs::remove_file(PARSER_LOGS_PATH);
PARSER_LOGS.write().unwrap().clear();
if let Err(e) = save_parser_logs_to_disk_inner(&VecDeque::new()) {
eprintln!("Failed to save empty logs to disk: {}", e);
}
}
// Force reinterpret all posts by clearing cache and re-parsing
pub fn force_reinterpret_all_posts() -> Result<Vec<Post>, Box<dyn std::error::Error>> {
add_log("info", "Starting force reinterpret of all posts", None, None);
// Clear all caches
POST_CACHE.write().unwrap().clear();
ALL_POSTS_CACHE.write().unwrap().take();
POST_STATS.write().unwrap().clear();
add_log("info", "Cleared all caches", None, None);
// Get posts directory and find all markdown files
let posts_dir = get_posts_directory();
let markdown_files = find_markdown_files(&posts_dir)?;
add_log("info", &format!("Found {} markdown files to reinterpret", markdown_files.len()), None, None);
let mut posts = Vec::new();
let mut success_count = 0;
let mut error_count = 0;
for file_path in markdown_files {
let slug = path_to_slug(&file_path, &posts_dir);
match get_post_by_slug(&slug) {
Ok(post) => {
posts.push(post);
success_count += 1;
add_log("info", &format!("Successfully reinterpreted: {}", slug), Some(&slug), None);
}
Err(e) => {
error_count += 1;
add_log("error", &format!("Failed to reinterpret {}: {}", slug, e), Some(&slug), None);
}
}
}
// Update the all posts cache
ALL_POSTS_CACHE.write().unwrap().replace(posts.clone());
// Save cache to disk
save_post_cache_to_disk();
add_log("info", &format!("Force reinterpret completed. Success: {}, Errors: {}", success_count, error_count), None, None);
Ok(posts)
}
// Force reparse a single post by clearing its cache and re-parsing
pub fn force_reparse_single_post(slug: &str) -> Result<Post, Box<dyn std::error::Error>> {
add_log("info", &format!("Starting force reparse of post: {}", slug), Some(slug), None);
// Clear this specific post from all caches
POST_CACHE.write().unwrap().remove(slug);
POST_STATS.write().unwrap().remove(slug);
// Clear the all posts cache since it might contain this post
ALL_POSTS_CACHE.write().unwrap().take();
add_log("info", &format!("Cleared cache for post: {}", slug), Some(slug), None);
// Re-parse the post
let post = get_post_by_slug(slug)?;
// Update the all posts cache with the new post
let mut all_posts_cache = ALL_POSTS_CACHE.write().unwrap();
if let Some(ref mut posts) = *all_posts_cache {
// Remove old version if it exists
posts.retain(|p| p.slug != slug);
// Add new version
posts.push(post.clone());
// Sort by creation date
posts.sort_by(|a, b| b.created_at.cmp(&a.created_at));
}
// Save cache to disk
save_post_cache_to_disk();
add_log("info", &format!("Successfully reparsed post: {}", slug), Some(slug), None);
Ok(post)
}