updated graphic layout for rust
Some checks failed
Deploy / build-and-deploy (push) Failing after 2s

This commit is contained in:
2025-06-29 21:02:57 +02:00
parent baad7309df
commit a401732d7d
4 changed files with 253 additions and 98 deletions

View File

@@ -1,13 +1,19 @@
#[warn(unused_imports)]
use clap::{Parser, Subcommand};
mod markdown;
use markdown::{get_all_posts, get_post_by_slug, get_posts_by_tag, watch_posts, get_parser_logs, clear_parser_logs};
use markdown::{get_all_posts, get_post_by_slug, get_posts_by_tag, watch_posts, get_parser_logs, clear_parser_logs, load_parser_logs_from_disk};
use serde_json;
use std::fs;
use std::io;
use std::io::Read; // STD AYOOOOOOOOOOOOOO - Tsodin
//
// This is the Parsers "Command Centeral"
// Commands for the CLI are Defined Here
// The Parser will provide appropriate Errors, if you care then modify.
// Hours wasted: 2.42h (Due to shitty error logging)
#[derive(Parser)]
#[command(name = "Markdown Backend")]
#[command(about = "A CLI for managing markdown blog posts", long_about = None)]
@@ -51,6 +57,7 @@ enum Commands {
fn main() {
markdown::load_post_cache_from_disk();
load_parser_logs_from_disk();
let cli = Cli::parse();
match &cli.command {
Commands::List => {

View File

@@ -29,9 +29,10 @@ use regex::Regex;
// Constants
const POSTS_CACHE_PATH: &str = "./cache/posts_cache.json";
const POST_STATS_PATH: &str = "./cache/post_stats.json";
const MAX_FILE_SIZE: usize = 10 * 1024 * 1024; // 10MB
const PARSING_TIMEOUT_SECS: u64 = 30;
const MAX_FILE_SIZE: usize = 2 * 1024 * 1024; // 10MB
const PARSING_TIMEOUT_SECS: u64 = 6000;
const MAX_LOG_ENTRIES: usize = 1000;
const PARSER_LOGS_PATH: &str = "./cache/parser_logs.json";
// Data structures
#[derive(Debug, Deserialize, Clone, Serialize)]
@@ -41,7 +42,7 @@ pub struct PostFrontmatter {
pub tags: Option<Vec<String>>,
pub summary: Option<String>,
}
// Post Data Structures
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Post {
pub slug: String,
@@ -54,6 +55,7 @@ pub struct Post {
pub author: String,
}
// Data Structure for Posts Statistics
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
pub struct PostStats {
pub slug: String,
@@ -65,6 +67,7 @@ pub struct PostStats {
pub last_cache_status: String, // "hit" or "miss"
}
// Data Structures for Health Reporting
#[derive(Debug, Serialize)]
pub struct HealthReport {
pub posts_dir_exists: bool,
@@ -78,6 +81,7 @@ pub struct HealthReport {
pub errors: Vec<String>,
}
// Log Data Structure (frontend related)
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct LogEntry {
pub timestamp: String,
@@ -210,6 +214,8 @@ fn get_posts_directory() -> PathBuf {
PathBuf::from("./posts")
}
// Function to find Markdown files.
// This will scan Directories recursively
fn find_markdown_files(dir: &Path) -> std::io::Result<Vec<PathBuf>> {
let mut files = Vec::new();
if dir.is_dir() {
@@ -227,6 +233,7 @@ fn find_markdown_files(dir: &Path) -> std::io::Result<Vec<PathBuf>> {
Ok(files)
}
// Generate a SlugPath.
fn path_to_slug(file_path: &Path, posts_dir: &Path) -> String {
let relative_path = file_path.strip_prefix(posts_dir).unwrap_or(file_path);
let without_ext = relative_path.with_extension("");
@@ -236,6 +243,7 @@ fn path_to_slug(file_path: &Path, posts_dir: &Path) -> String {
.replace("\\", "::")
}
// Slugify the Path
fn slug_to_path(slug: &str, posts_dir: &Path) -> PathBuf {
let parts: Vec<&str> = slug.split("::").collect();
if parts.len() == 1 {
@@ -253,6 +261,7 @@ fn slug_to_path(slug: &str, posts_dir: &Path) -> PathBuf {
}
}
// Look at the Markdown File and generate a Creation Date based upon gathered things.
fn get_file_creation_date(path: &Path) -> std::io::Result<DateTime<Utc>> {
let metadata = fs::metadata(path)?;
match metadata.created() {
@@ -264,6 +273,9 @@ fn get_file_creation_date(path: &Path) -> std::io::Result<DateTime<Utc>> {
}
}
// The Frontend expects a plain old string that will be used for the anchor
// something like this -> #i-am-a-heading
// This creates a crossreference for Links that scroll to said heading
fn process_anchor_links(content: &str) -> String {
let re = regex::Regex::new(r"\[([^\]]+)\]\(#([^)]+)\)").unwrap();
re.replace_all(content, |caps: &regex::Captures| {
@@ -274,6 +286,8 @@ fn process_anchor_links(content: &str) -> String {
}).to_string()
}
// Here we just remove the Emoji if it is in the heading.
// Example "🏳️‍🌈 Hi!" will turn into "#hi"
fn strip_emojis(s: &str) -> String {
s.chars()
.filter(|c| {
@@ -291,6 +305,8 @@ fn strip_emojis(s: &str) -> String {
.collect()
}
// This is a obsolete Function for Custom Tags for HTML
// Example usage in Text: <warning />
fn process_custom_tags(content: &str) -> String {
let mut processed = content.to_string();
@@ -343,13 +359,35 @@ fn add_log(level: &str, message: &str, slug: Option<&str>, details: Option<&str>
slug: slug.map(|s| s.to_string()),
details: details.map(|s| s.to_string()),
};
let mut logs = PARSER_LOGS.write().unwrap();
logs.push_back(log_entry);
// Keep only the last MAX_LOG_ENTRIES
if logs.len() > MAX_LOG_ENTRIES {
logs.pop_front();
{
let mut logs = PARSER_LOGS.write().unwrap();
logs.push_back(log_entry.clone());
// Keep only the last MAX_LOG_ENTRIES
while logs.len() > MAX_LOG_ENTRIES {
logs.pop_front();
}
// Write logs to disk
let _ = save_parser_logs_to_disk_inner(&logs);
}
}
fn save_parser_logs_to_disk_inner(logs: &VecDeque<LogEntry>) -> std::io::Result<()> {
let _ = std::fs::create_dir_all("./cache");
let logs_vec: Vec<_> = logs.iter().cloned().collect();
let json = serde_json::to_string(&logs_vec)?;
std::fs::write(PARSER_LOGS_PATH, json)?;
Ok(())
}
pub fn load_parser_logs_from_disk() {
if let Ok(data) = std::fs::read_to_string(PARSER_LOGS_PATH) {
if let Ok(logs_vec) = serde_json::from_str::<Vec<LogEntry>>(&data) {
let mut logs = PARSER_LOGS.write().unwrap();
logs.clear();
for entry in logs_vec {
logs.push_back(entry);
}
}
}
}
@@ -365,6 +403,8 @@ pub fn rsparseinfo() -> String {
}
}
// This Function gets the Post by its Slugified Version.
// This is basically only used for Caching (loading from it).
pub fn get_post_by_slug(slug: &str) -> Result<Post, Box<dyn std::error::Error>> {
add_log("info", "Starting post parsing", Some(slug), None);
@@ -530,6 +570,7 @@ pub fn get_post_by_slug(slug: &str) -> Result<Post, Box<dyn std::error::Error>>
let compile_time = compile_start.elapsed();
// Insert into cache
// If this no worky , programm fucky wucky? - Check Logs
POST_CACHE.write().unwrap().insert(slug.to_string(), post.clone());
// Update stats
@@ -691,6 +732,8 @@ pub fn checkhealth() -> HealthReport {
}
pub fn get_parser_logs() -> Vec<LogEntry> {
// Always reload from disk to ensure up-to-date logs
load_parser_logs_from_disk();
let logs = PARSER_LOGS.read().unwrap();
logs.iter().cloned().collect()
}
@@ -698,4 +741,5 @@ pub fn get_parser_logs() -> Vec<LogEntry> {
pub fn clear_parser_logs() {
let mut logs = PARSER_LOGS.write().unwrap();
logs.clear();
let _ = std::fs::remove_file(PARSER_LOGS_PATH);
}