Update environment configuration, enhance deployment script, and localize backend messages
All checks were successful
Deploy / build-and-deploy (push) Successful in 31m44s
All checks were successful
Deploy / build-and-deploy (push) Successful in 31m44s
- Added instructions in .env.local for Docker deployment. - Improved docker.sh to display deployment status with colored output and added ASCII art. - Updated main.js to indicate future deprecation of the Electron app. - Translated various log messages and CLI command outputs in the Rust backend to German for better localization. - Removed unused asset (peta.png) from the project. - Updated RustStatusPage component to reflect German translations in UI elements and error messages.
This commit is contained in:
@@ -1,6 +1,9 @@
|
||||
#-------------------------------------------------------------------- # -----------------------------------------------------------------------#
|
||||
# In here you have to set your socials / links # Explenations of Variables #
|
||||
#-------------------------------------------------------------------- # -----------------------------------------------------------------------#
|
||||
# Modify This before deploying with docker / locally #
|
||||
#---------------------------------------------------------------------#
|
||||
#
|
||||
NEXT_PUBLIC_BLOG_OWNER=Rattatwinko # Your Name goes here #
|
||||
NEXT_ABOUT_ME_LINK="http://localhost:80" # Your WebPage goes here #
|
||||
NEXT_SOCIAL_INSTAGRAM="http://instagram.com/rattatwinko" # Your Instagram Link goes here #
|
||||
|
||||
34
docker.sh
34
docker.sh
@@ -44,5 +44,37 @@ if ! docker ps | grep -q $CONTAINER_NAME; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Output with colors
|
||||
GREEN='\033[1;32m' # Green
|
||||
CYAN='\033[1;36m'
|
||||
RESET='\033[0m'
|
||||
|
||||
echo ""
|
||||
echo "Deployment complete!"
|
||||
echo "App should be available at http://localhost:$PORT"
|
||||
echo ""
|
||||
echo -e " App is running at: ${GREEN}http://localhost:${PORT}${RESET}"
|
||||
echo ""
|
||||
|
||||
# Rainbow ASCII Art
|
||||
RAINBOW=(
|
||||
'\033[1;31m' # Red
|
||||
'\033[1;33m' # Yellow
|
||||
'\033[1;32m' # Green
|
||||
'\033[1;36m' # Cyan
|
||||
'\033[1;34m' # Blue
|
||||
'\033[1;35m' # Magenta
|
||||
)
|
||||
|
||||
ASCII=(
|
||||
" __ ___ __ __ ____ __ "
|
||||
" / |/ /___ ______/ /______/ /___ _ ______ / __ )/ /___ ____ _"
|
||||
" / /|_/ / __ \`/ ___/ //_/ __ / __ \\ | /| / / __ \\/ __ / / __ \\/ __ \`/"
|
||||
" / / / / /_/ / / / ,< / /_/ / /_/ / |/ |/ / / / / /_/ / / /_/ / /_/ / "
|
||||
"/_/ /_/\\__,_/_/ /_/|_|\\__,_/\\____/|__/|__/_/ /_/_____/_/\\____/\\__, / "
|
||||
" /____/ "
|
||||
)
|
||||
|
||||
for i in "${!ASCII[@]}"; do
|
||||
color="${RAINBOW[$((i % ${#RAINBOW[@]}))]}"
|
||||
echo -e "${color}${ASCII[$i]}${RESET}"
|
||||
done
|
||||
|
||||
@@ -2,6 +2,13 @@ const { app, BrowserWindow } = require('electron');
|
||||
const path = require('path');
|
||||
const isDev = process.env.NODE_ENV === 'development';
|
||||
|
||||
/*
|
||||
|
||||
This will be discontinued in a bit.
|
||||
Either move to Docker or get fucked.
|
||||
|
||||
*/
|
||||
|
||||
function createWindow() {
|
||||
const mainWindow = new BrowserWindow({
|
||||
width: 1200,
|
||||
|
||||
@@ -26,7 +26,7 @@ use std::io::Read; // STD AYOOOOOOOOOOOOOO - Tsodin
|
||||
|
||||
#[derive(Parser)]
|
||||
#[command(name = "Markdown Backend")]
|
||||
#[command(about = "A CLI for managing markdown blog posts", long_about = None)]
|
||||
#[command(about = "Ein CLI für die Verwaltung von Markdown-Blogbeiträgen", long_about = None)]
|
||||
struct Cli {
|
||||
#[command(subcommand)]
|
||||
command: Commands,
|
||||
@@ -103,9 +103,9 @@ fn main() {
|
||||
println!("{}", serde_json::to_string(&posts).unwrap());
|
||||
}
|
||||
Commands::Watch => {
|
||||
println!("Watching for changes in posts directory. Press Ctrl+C to exit.");
|
||||
println!("Überwache Änderungen im Posts-Verzeichnis. Drücken Sie Strg+C zum Beenden.");
|
||||
let _ = watch_posts(|| {
|
||||
println!("Posts directory changed!");
|
||||
println!("Posts-Verzeichnis hat sich geändert!");
|
||||
});
|
||||
// Keep the main thread alive
|
||||
loop {
|
||||
@@ -125,14 +125,14 @@ fn main() {
|
||||
}
|
||||
Commands::ClearLogs => {
|
||||
clear_parser_logs();
|
||||
println!("{}", serde_json::to_string(&serde_json::json!({"success": true, "message": "Logs cleared"})).unwrap());
|
||||
println!("{}", serde_json::to_string(&serde_json::json!({"success": true, "message": "Protokolle gelöscht"})).unwrap());
|
||||
}
|
||||
Commands::ReinterpretAll => {
|
||||
match force_reinterpret_all_posts() {
|
||||
Ok(posts) => {
|
||||
println!("{}", serde_json::to_string(&serde_json::json!({
|
||||
"success": true,
|
||||
"message": format!("All posts reinterpreted successfully. Processed {} posts.", posts.len())
|
||||
"message": format!("Alle Beiträge erfolgreich neu interpretiert. {} Beiträge verarbeitet.", posts.len())
|
||||
})).unwrap());
|
||||
}
|
||||
Err(e) => {
|
||||
@@ -146,7 +146,7 @@ fn main() {
|
||||
Ok(post) => {
|
||||
println!("{}", serde_json::to_string(&serde_json::json!({
|
||||
"success": true,
|
||||
"message": format!("Post '{}' reparsed successfully", slug),
|
||||
"message": format!("Beitrag '{}' erfolgreich neu geparst", slug),
|
||||
"post": post
|
||||
})).unwrap());
|
||||
}
|
||||
@@ -164,7 +164,7 @@ fn main() {
|
||||
} else if let Some(file_path) = file {
|
||||
fs::read_to_string(file_path).unwrap()
|
||||
} else {
|
||||
eprintln!("Either --file or --stdin must be specified");
|
||||
eprintln!("Entweder --file oder --stdin muss angegeben werden");
|
||||
std::process::exit(1);
|
||||
};
|
||||
|
||||
|
||||
@@ -184,10 +184,10 @@ fn ensure_cache_directory() {
|
||||
let cache_dir = PathBuf::from("./cache");
|
||||
if !cache_dir.exists() {
|
||||
if let Err(e) = fs::create_dir_all(&cache_dir) {
|
||||
eprintln!("Failed to create cache directory: {}", e);
|
||||
add_log("error", &format!("Failed to create cache directory: {}", e), None, None);
|
||||
eprintln!("Fehler beim Erstellen des Cache-Verzeichnisses: {}", e);
|
||||
add_log("error", &format!("Fehler beim Erstellen des Cache-Verzeichnisses: {}", e), None, None);
|
||||
} else {
|
||||
add_log("info", "Created cache directory: ./cache", None, None);
|
||||
add_log("info", "Cache-Verzeichnis erstellt: ./cache", None, None);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -219,7 +219,7 @@ fn get_posts_directory() -> PathBuf {
|
||||
for candidate in candidates.iter() {
|
||||
let path = PathBuf::from(candidate);
|
||||
if path.exists() && path.is_dir() {
|
||||
add_log("info", &format!("Using posts directory: {:?}", path), None, None);
|
||||
add_log("info", &format!("Verwende Posts-Verzeichnis: {:?}", path), None, None);
|
||||
return path;
|
||||
}
|
||||
}
|
||||
@@ -228,9 +228,9 @@ fn get_posts_directory() -> PathBuf {
|
||||
let fallback_path = PathBuf::from("./posts");
|
||||
if !fallback_path.exists() {
|
||||
if let Err(e) = fs::create_dir_all(&fallback_path) {
|
||||
add_log("error", &format!("Failed to create posts directory: {}", e), None, None);
|
||||
add_log("error", &format!("Fehler beim Erstellen des Posts-Verzeichnisses: {}", e), None, None);
|
||||
} else {
|
||||
add_log("info", "Created posts directory: ./posts", None, None);
|
||||
add_log("info", "Posts-Verzeichnis erstellt: ./posts", None, None);
|
||||
}
|
||||
}
|
||||
fallback_path
|
||||
@@ -242,13 +242,13 @@ fn find_markdown_files(dir: &Path) -> std::io::Result<Vec<PathBuf>> {
|
||||
let mut errors = Vec::new();
|
||||
|
||||
if !dir.exists() {
|
||||
let error_msg = format!("Directory does not exist: {:?}", dir);
|
||||
let error_msg = format!("Verzeichnis existiert nicht: {:?}", dir);
|
||||
add_log("error", &error_msg, None, None);
|
||||
return Err(std::io::Error::new(std::io::ErrorKind::NotFound, error_msg));
|
||||
}
|
||||
|
||||
if !dir.is_dir() {
|
||||
let error_msg = format!("Path is not a directory: {:?}", dir);
|
||||
let error_msg = format!("Pfad ist kein Verzeichnis: {:?}", dir);
|
||||
add_log("error", &error_msg, None, None);
|
||||
return Err(std::io::Error::new(std::io::ErrorKind::InvalidInput, error_msg));
|
||||
}
|
||||
@@ -257,7 +257,7 @@ fn find_markdown_files(dir: &Path) -> std::io::Result<Vec<PathBuf>> {
|
||||
let entries = match fs::read_dir(dir) {
|
||||
Ok(entries) => entries,
|
||||
Err(e) => {
|
||||
add_log("error", &format!("Failed to read directory {:?}: {}", dir, e), None, None);
|
||||
add_log("error", &format!("Fehler beim Lesen des Verzeichnisses {:?}: {}", dir, e), None, None);
|
||||
return Err(e);
|
||||
}
|
||||
};
|
||||
@@ -279,7 +279,7 @@ fn find_markdown_files(dir: &Path) -> std::io::Result<Vec<PathBuf>> {
|
||||
match find_markdown_files(&path) {
|
||||
Ok(subfiles) => files.extend(subfiles),
|
||||
Err(e) => {
|
||||
let error_msg = format!("Error scanning subdirectory {:?}: {}", path, e);
|
||||
let error_msg = format!("Fehler beim Scannen des Unterverzeichnisses {:?}: {}", path, e);
|
||||
add_log("warning", &error_msg, None, None);
|
||||
errors.push(error_msg);
|
||||
}
|
||||
@@ -293,7 +293,7 @@ fn find_markdown_files(dir: &Path) -> std::io::Result<Vec<PathBuf>> {
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
let error_msg = format!("Cannot access file {:?}: {}", path, e);
|
||||
let error_msg = format!("Datei nicht zugänglich {:?}: {}", path, e);
|
||||
add_log("warning", &error_msg, None, None);
|
||||
errors.push(error_msg);
|
||||
}
|
||||
@@ -301,7 +301,7 @@ fn find_markdown_files(dir: &Path) -> std::io::Result<Vec<PathBuf>> {
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
let error_msg = format!("Error reading directory entry: {}", e);
|
||||
let error_msg = format!("Fehler beim Lesen des Verzeichniseintrags: {}", e);
|
||||
add_log("warning", &error_msg, None, None);
|
||||
errors.push(error_msg);
|
||||
}
|
||||
@@ -309,9 +309,9 @@ fn find_markdown_files(dir: &Path) -> std::io::Result<Vec<PathBuf>> {
|
||||
}
|
||||
|
||||
// Log summary
|
||||
add_log("info", &format!("Found {} markdown files in {:?}", files.len(), dir), None, None);
|
||||
add_log("info", &format!("{} Markdown-Dateien in {:?} gefunden", files.len(), dir), None, None);
|
||||
if !errors.is_empty() {
|
||||
add_log("warning", &format!("Encountered {} errors during directory scan", errors.len()), None, None);
|
||||
add_log("warning", &format!("{} Fehler während der Verzeichnissuche aufgetreten", errors.len()), None, None);
|
||||
}
|
||||
|
||||
Ok(files)
|
||||
@@ -396,11 +396,11 @@ fn process_custom_tags(content: &str) -> String {
|
||||
|
||||
// Handle simple tags without parameters
|
||||
let simple_tags = [
|
||||
("<mytag />", "<div class=\"custom-tag mytag\">This is my custom tag content!</div>"),
|
||||
("<warning />", "<div class=\"custom-tag warning\" style=\"background: #fff3cd; border: 1px solid #ffeaa7; padding: 1rem; border-radius: 4px; margin: 1rem 0;\">⚠️ Warning: This is a custom warning tag!</div>"),
|
||||
("<info />", "<div class=\"custom-tag info\" style=\"background: #d1ecf1; border: 1px solid #bee5eb; padding: 1rem; border-radius: 4px; margin: 1rem 0;\">ℹ️ Info: This is a custom info tag!</div>"),
|
||||
("<success />", "<div class=\"custom-tag success\" style=\"background: #d4edda; border: 1px solid #c3e6cb; padding: 1rem; border-radius: 4px; margin: 1rem 0;\">✅ Success: This is a custom success tag!</div>"),
|
||||
("<error />", "<div class=\"custom-tag error\" style=\"background: #f8d7da; border: 1px solid #f5c6cb; padding: 1rem; border-radius: 4px; margin: 1rem 0;\">❌ Error: This is a custom error tag!</div>"),
|
||||
("<mytag />", "<div class=\"custom-tag mytag\">Dies ist mein benutzerdefinierter Tag-Inhalt!</div>"),
|
||||
("<warning />", "<div class=\"custom-tag warning\" style=\"background: #fff3cd; border: 1px solid #ffeaa7; padding: 1rem; border-radius: 4px; margin: 1rem 0;\">⚠️ Warnung: Dies ist ein benutzerdefiniertes Warnungs-Tag!</div>"),
|
||||
("<info />", "<div class=\"custom-tag info\" style=\"background: #d1ecf1; border: 1px solid #bee5eb; padding: 1rem; border-radius: 4px; margin: 1rem 0;\">ℹ️ Info: Dies ist ein benutzerdefiniertes Info-Tag!</div>"),
|
||||
("<success />", "<div class=\"custom-tag success\" style=\"background: #d4edda; border: 1px solid #c3e6cb; padding: 1rem; border-radius: 4px; margin: 1rem 0;\">✅ Erfolg: Dies ist ein benutzerdefiniertes Erfolgs-Tag!</div>"),
|
||||
("<error />", "<div class=\"custom-tag error\" style=\"background: #f8d7da; border: 1px solid #f5c6cb; padding: 1rem; border-radius: 4px; margin: 1rem 0;\">❌ Fehler: Dies ist ein benutzerdefiniertes Fehler-Tag!</div>"),
|
||||
];
|
||||
|
||||
for (tag, replacement) in simple_tags.iter() {
|
||||
@@ -415,18 +415,18 @@ fn process_custom_tags(content: &str) -> String {
|
||||
|
||||
match tag_name {
|
||||
"mytag" => {
|
||||
format!("<div class=\"custom-tag mytag\" data-params=\"{}\">Custom content with params: {}</div>", params, params)
|
||||
format!("<div class=\"custom-tag mytag\" data-params=\"{}\">Benutzerdefinierter Inhalt mit Parametern: {}</div>", params, params)
|
||||
},
|
||||
"alert" => {
|
||||
if params.contains("type=\"warning\"") {
|
||||
"<div class=\"custom-tag alert warning\" style=\"background: #fff3cd; border: 1px solid #ffeaa7; padding: 1rem; border-radius: 4px; margin: 1rem 0;\">⚠️ Warning Alert!</div>".to_string()
|
||||
"<div class=\"custom-tag alert warning\" style=\"background: #fff3cd; border: 1px solid #ffeaa7; padding: 1rem; border-radius: 4px; margin: 1rem 0;\">⚠️ Warnungs-Alert!</div>".to_string()
|
||||
} else if params.contains("type=\"error\"") {
|
||||
"<div class=\"custom-tag alert error\" style=\"background: #f8d7da; border: 1px solid #f5c6cb; padding: 1rem; border-radius: 4px; margin: 1rem 0;\">❌ Error Alert!</div>".to_string()
|
||||
"<div class=\"custom-tag alert error\" style=\"background: #f8d7da; border: 1px solid #f5c6cb; padding: 1rem; border-radius: 4px; margin: 1rem 0;\">❌ Fehler-Alert!</div>".to_string()
|
||||
} else {
|
||||
"<div class=\"custom-tag alert info\" style=\"background: #d1ecf1; border: 1px solid #bee5eb; padding: 1rem; border-radius: 4px; margin: 1rem 0;\">ℹ️ Info Alert!</div>".to_string()
|
||||
"<div class=\"custom-tag alert info\" style=\"background: #d1ecf1; border: 1px solid #bee5eb; padding: 1rem; border-radius: 4px; margin: 1rem 0;\">ℹ️ Info-Alert!</div>".to_string()
|
||||
}
|
||||
},
|
||||
_ => format!("<div class=\"custom-tag {}\">Unknown custom tag: {}</div>", tag_name, tag_name)
|
||||
_ => format!("<div class=\"custom-tag {}\">Unbekanntes benutzerdefiniertes Tag: {}</div>", tag_name, tag_name)
|
||||
}
|
||||
}).to_string();
|
||||
|
||||
@@ -490,7 +490,7 @@ pub fn rsparseinfo() -> String {
|
||||
// This Function gets the Post by its Slugified Version.
|
||||
// This is basically only used for Caching (loading from it).
|
||||
pub fn get_post_by_slug(slug: &str) -> Result<Post, Box<dyn std::error::Error>> {
|
||||
add_log("info", "Starting post parsing", Some(slug), None);
|
||||
add_log("info", "Starte Post-Parsing", Some(slug), None);
|
||||
|
||||
let mut sys = System::new_with_specifics(RefreshKind::new().with_processes(ProcessRefreshKind::everything()).with_cpu(CpuRefreshKind::everything()));
|
||||
sys.refresh_processes();
|
||||
@@ -512,7 +512,7 @@ pub fn get_post_by_slug(slug: &str) -> Result<Post, Box<dyn std::error::Error>>
|
||||
entry.last_cache_status = "hit".to_string();
|
||||
sys.refresh_process(pid);
|
||||
entry.last_cpu_usage_percent = sys.process(pid).map(|p| p.cpu_usage()).unwrap_or(0.0) - before_cpu;
|
||||
add_log("info", "Cache hit", Some(slug), None);
|
||||
add_log("info", "Cache-Treffer", Some(slug), None);
|
||||
return Ok(post);
|
||||
}
|
||||
|
||||
@@ -524,16 +524,16 @@ pub fn get_post_by_slug(slug: &str) -> Result<Post, Box<dyn std::error::Error>>
|
||||
let file_path = slug_to_path(slug, &posts_dir);
|
||||
|
||||
if !file_path.exists() {
|
||||
let error_msg = format!("File not found: {:?}", file_path);
|
||||
let error_msg = format!("Datei nicht gefunden: {:?}", file_path);
|
||||
add_log("error", &error_msg, Some(slug), None);
|
||||
return Err(error_msg.into());
|
||||
}
|
||||
|
||||
let file_content = fs::read_to_string(&file_path)?;
|
||||
add_log("info", &format!("File loaded: {} bytes", file_content.len()), Some(slug), None);
|
||||
add_log("info", &format!("Datei geladen: {} Bytes", file_content.len()), Some(slug), None);
|
||||
|
||||
if file_content.len() > MAX_FILE_SIZE {
|
||||
let error_msg = format!("File too large: {} bytes (max: {} bytes)", file_content.len(), MAX_FILE_SIZE);
|
||||
let error_msg = format!("Datei zu groß: {} Bytes (max: {} Bytes)", file_content.len(), MAX_FILE_SIZE);
|
||||
add_log("error", &error_msg, Some(slug), None);
|
||||
return Err(error_msg.into());
|
||||
}
|
||||
@@ -545,21 +545,21 @@ pub fn get_post_by_slug(slug: &str) -> Result<Post, Box<dyn std::error::Error>>
|
||||
match data.deserialize() {
|
||||
Ok(front) => front,
|
||||
Err(e) => {
|
||||
let error_msg = format!("Failed to deserialize frontmatter: {}", e);
|
||||
let error_msg = format!("Fehler beim Deserialisieren des Frontmatters: {}", e);
|
||||
add_log("error", &error_msg, Some(slug), None);
|
||||
return Err(error_msg.into());
|
||||
}
|
||||
}
|
||||
} else {
|
||||
add_log("error", "No frontmatter found", Some(slug), None);
|
||||
return Err("No frontmatter found".into());
|
||||
add_log("error", "Kein Frontmatter gefunden", Some(slug), None);
|
||||
return Err("Kein Frontmatter gefunden".into());
|
||||
};
|
||||
|
||||
let created_at = get_file_creation_date(&file_path)?;
|
||||
let processed_markdown = process_anchor_links(&result.content);
|
||||
let processed_markdown = process_custom_tags(&processed_markdown);
|
||||
|
||||
add_log("info", "Starting markdown parsing", Some(slug), Some(&format!("Content length: {} chars", processed_markdown.len())));
|
||||
add_log("info", "Starte Markdown-Parsing", Some(slug), Some(&format!("Inhaltslänge: {} Zeichen", processed_markdown.len())));
|
||||
|
||||
let parser = Parser::new_ext(&processed_markdown, Options::all());
|
||||
let mut html_output = String::new();
|
||||
@@ -580,8 +580,8 @@ pub fn get_post_by_slug(slug: &str) -> Result<Post, Box<dyn std::error::Error>>
|
||||
for event in parser {
|
||||
event_count += 1;
|
||||
if start_parsing.elapsed().as_secs() > PARSING_TIMEOUT_SECS {
|
||||
let error_msg = "Parsing timeout - file too large";
|
||||
add_log("error", error_msg, Some(slug), Some(&format!("Processed {} events", event_count)));
|
||||
let error_msg = "Parsing-Timeout - Datei zu groß";
|
||||
add_log("error", error_msg, Some(slug), Some(&format!("{} Events verarbeitet", event_count)));
|
||||
return Err(error_msg.into());
|
||||
}
|
||||
|
||||
@@ -634,7 +634,7 @@ pub fn get_post_by_slug(slug: &str) -> Result<Post, Box<dyn std::error::Error>>
|
||||
}
|
||||
}
|
||||
|
||||
add_log("info", "Markdown parsing completed", Some(slug), Some(&format!("Processed {} events", event_count)));
|
||||
add_log("info", "Markdown-Parsing abgeschlossen", Some(slug), Some(&format!("{} Events verarbeitet", event_count)));
|
||||
|
||||
html::push_html(&mut html_output, events.into_iter());
|
||||
let sanitized_html = AMMONIA.clean(&html_output).to_string();
|
||||
@@ -649,7 +649,7 @@ pub fn get_post_by_slug(slug: &str) -> Result<Post, Box<dyn std::error::Error>>
|
||||
summary: front.summary,
|
||||
content: sanitized_html,
|
||||
created_at: created_at.to_rfc3339(),
|
||||
author: std::env::var("BLOG_OWNER").unwrap_or_else(|_| "Anonymous".to_string()),
|
||||
author: std::env::var("BLOG_OWNER").unwrap_or_else(|_| "Anonym".to_string()),
|
||||
};
|
||||
let compile_time = compile_start.elapsed();
|
||||
|
||||
@@ -668,7 +668,7 @@ pub fn get_post_by_slug(slug: &str) -> Result<Post, Box<dyn std::error::Error>>
|
||||
sys.refresh_process(pid);
|
||||
entry.last_cpu_usage_percent = sys.process(pid).map(|p| p.cpu_usage()).unwrap_or(0.0) - before_cpu;
|
||||
|
||||
add_log("info", "Post parsing completed successfully", Some(slug), Some(&format!("Interpret: {}ms, Compile: {}ms", interpret_time.as_millis(), compile_time.as_millis())));
|
||||
add_log("info", "Post-Parsing erfolgreich abgeschlossen", Some(slug), Some(&format!("Interpretation: {}ms, Kompilierung: {}ms", interpret_time.as_millis(), compile_time.as_millis())));
|
||||
|
||||
Ok(post)
|
||||
}
|
||||
@@ -715,7 +715,7 @@ pub fn watch_posts<F: Fn() + Send + 'static>(on_change: F) -> notify::Result<Rec
|
||||
on_change();
|
||||
},
|
||||
Err(e) => {
|
||||
eprintln!("watch error: {:?}", e);
|
||||
eprintln!("Überwachungsfehler: {:?}", e);
|
||||
break;
|
||||
}
|
||||
}
|
||||
@@ -760,10 +760,10 @@ pub fn checkhealth() -> HealthReport {
|
||||
.filter(|e| e.path().extension().map(|ext| ext == "md").unwrap_or(false))
|
||||
.count();
|
||||
},
|
||||
Err(e) => errors.push(format!("Failed to read posts dir: {}", e)),
|
||||
Err(e) => errors.push(format!("Fehler beim Lesen des Posts-Verzeichnisses: {}", e)),
|
||||
}
|
||||
} else {
|
||||
errors.push("Posts directory does not exist".to_string());
|
||||
errors.push("Posts-Verzeichnis existiert nicht".to_string());
|
||||
}
|
||||
|
||||
let cache_file_exists = Path::new(POSTS_CACHE_PATH).exists();
|
||||
@@ -778,10 +778,10 @@ pub fn checkhealth() -> HealthReport {
|
||||
cache_readable = true;
|
||||
cache_post_count = Some(map.len());
|
||||
},
|
||||
Err(e) => errors.push(format!("Cache file not valid JSON: {}", e)),
|
||||
Err(e) => errors.push(format!("Cache-Datei ist kein gültiges JSON: {}", e)),
|
||||
}
|
||||
},
|
||||
Err(e) => errors.push(format!("Failed to read cache file: {}", e)),
|
||||
Err(e) => errors.push(format!("Fehler beim Lesen der Cache-Datei: {}", e)),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -794,10 +794,10 @@ pub fn checkhealth() -> HealthReport {
|
||||
cache_stats_readable = true;
|
||||
cache_stats_count = Some(map.len());
|
||||
},
|
||||
Err(e) => errors.push(format!("Cache stats file not valid JSON: {}", e)),
|
||||
Err(e) => errors.push(format!("Cache-Statistik-Datei ist kein gültiges JSON: {}", e)),
|
||||
}
|
||||
},
|
||||
Err(e) => errors.push(format!("Failed to read cache stats file: {}", e)),
|
||||
Err(e) => errors.push(format!("Fehler beim Lesen der Cache-Statistik-Datei: {}", e)),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -824,26 +824,26 @@ pub fn get_parser_logs() -> Vec<LogEntry> {
|
||||
pub fn clear_parser_logs() {
|
||||
PARSER_LOGS.write().unwrap().clear();
|
||||
if let Err(e) = save_parser_logs_to_disk_inner(&VecDeque::new()) {
|
||||
eprintln!("Failed to save empty logs to disk: {}", e);
|
||||
eprintln!("Fehler beim Speichern leerer Protokolle auf Festplatte: {}", e);
|
||||
}
|
||||
}
|
||||
|
||||
// Force reinterpret all posts by clearing cache and re-parsing
|
||||
pub fn force_reinterpret_all_posts() -> Result<Vec<Post>, Box<dyn std::error::Error>> {
|
||||
add_log("info", "Starting force reinterpret of all posts", None, None);
|
||||
add_log("info", "Starte erzwungene Neuinterpretation aller Posts", None, None);
|
||||
|
||||
// Clear all caches
|
||||
POST_CACHE.write().unwrap().clear();
|
||||
ALL_POSTS_CACHE.write().unwrap().take();
|
||||
POST_STATS.write().unwrap().clear();
|
||||
|
||||
add_log("info", "Cleared all caches", None, None);
|
||||
add_log("info", "Alle Caches geleert", None, None);
|
||||
|
||||
// Get posts directory and find all markdown files
|
||||
let posts_dir = get_posts_directory();
|
||||
let markdown_files = find_markdown_files(&posts_dir)?;
|
||||
|
||||
add_log("info", &format!("Found {} markdown files to reinterpret", markdown_files.len()), None, None);
|
||||
add_log("info", &format!("{} Markdown-Dateien zur Neuinterpretation gefunden", markdown_files.len()), None, None);
|
||||
|
||||
let mut posts = Vec::new();
|
||||
let mut success_count = 0;
|
||||
@@ -855,11 +855,11 @@ pub fn force_reinterpret_all_posts() -> Result<Vec<Post>, Box<dyn std::error::Er
|
||||
Ok(post) => {
|
||||
posts.push(post);
|
||||
success_count += 1;
|
||||
add_log("info", &format!("Successfully reinterpreted: {}", slug), Some(&slug), None);
|
||||
add_log("info", &format!("Erfolgreich neuinterpretiert: {}", slug), Some(&slug), None);
|
||||
}
|
||||
Err(e) => {
|
||||
error_count += 1;
|
||||
add_log("error", &format!("Failed to reinterpret {}: {}", slug, e), Some(&slug), None);
|
||||
add_log("error", &format!("Fehler bei der Neuinterpretation von {}: {}", slug, e), Some(&slug), None);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -870,14 +870,14 @@ pub fn force_reinterpret_all_posts() -> Result<Vec<Post>, Box<dyn std::error::Er
|
||||
// Save cache to disk
|
||||
save_post_cache_to_disk();
|
||||
|
||||
add_log("info", &format!("Force reinterpret completed. Success: {}, Errors: {}", success_count, error_count), None, None);
|
||||
add_log("info", &format!("Erzwungene Neuinterpretation abgeschlossen. Erfolgreich: {}, Fehler: {}", success_count, error_count), None, None);
|
||||
|
||||
Ok(posts)
|
||||
}
|
||||
|
||||
// Force reparse a single post by clearing its cache and re-parsing
|
||||
pub fn force_reparse_single_post(slug: &str) -> Result<Post, Box<dyn std::error::Error>> {
|
||||
add_log("info", &format!("Starting force reparse of post: {}", slug), Some(slug), None);
|
||||
add_log("info", &format!("Starte erzwungenes Neuparsing des Posts: {}", slug), Some(slug), None);
|
||||
|
||||
// Clear this specific post from all caches
|
||||
POST_CACHE.write().unwrap().remove(slug);
|
||||
@@ -886,7 +886,7 @@ pub fn force_reparse_single_post(slug: &str) -> Result<Post, Box<dyn std::error:
|
||||
// Clear the all posts cache since it might contain this post
|
||||
ALL_POSTS_CACHE.write().unwrap().take();
|
||||
|
||||
add_log("info", &format!("Cleared cache for post: {}", slug), Some(slug), None);
|
||||
add_log("info", &format!("Cache für Post geleert: {}", slug), Some(slug), None);
|
||||
|
||||
// Re-parse the post
|
||||
let post = get_post_by_slug(slug)?;
|
||||
@@ -905,7 +905,7 @@ pub fn force_reparse_single_post(slug: &str) -> Result<Post, Box<dyn std::error:
|
||||
// Save cache to disk
|
||||
save_post_cache_to_disk();
|
||||
|
||||
add_log("info", &format!("Successfully reparsed post: {}", slug), Some(slug), None);
|
||||
add_log("info", &format!("Post erfolgreich neugeparst: {}", slug), Some(slug), None);
|
||||
|
||||
Ok(post)
|
||||
}
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 921 KiB |
@@ -108,11 +108,11 @@ export default function RustStatusPage() {
|
||||
const res = await fetch('/api/admin/posts?reinterpretAll=1');
|
||||
if (!res.ok) throw new Error('Fehler beim Neuinterpretieren der Posts');
|
||||
const data = await res.json();
|
||||
console.log('Reinterpret result:', data);
|
||||
console.log('Neu-Interpretier Ergebins:', data);
|
||||
// Refresh all data after reinterpret
|
||||
await Promise.all([fetchStats(), fetchHealth(), fetchLogs()]);
|
||||
} catch (e: any) {
|
||||
console.error('Error reinterpreting posts:', e);
|
||||
console.error('Fehler beim Neu-Interpretieren => ', e);
|
||||
}
|
||||
};
|
||||
|
||||
@@ -374,16 +374,16 @@ export default function RustStatusPage() {
|
||||
<button
|
||||
onClick={reinterpretAllPosts}
|
||||
className="px-2.5 py-1.5 bg-orange-500 hover:bg-orange-600 text-white rounded text-xs transition-colors"
|
||||
title="Force reinterpret all posts"
|
||||
title="Neuinterpretation aller Beiträge erzwingen"
|
||||
>
|
||||
Reinterpret All
|
||||
Alle Posts neu Interpretieren?
|
||||
</button>
|
||||
<button
|
||||
onClick={clearLogs}
|
||||
className="px-2.5 py-1.5 bg-red-500 hover:bg-red-600 text-white rounded text-xs transition-colors"
|
||||
title="Clear all logs"
|
||||
title="Logs Leeren"
|
||||
>
|
||||
Clear Logs
|
||||
Logs Leeren
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
@@ -405,10 +405,10 @@ export default function RustStatusPage() {
|
||||
onChange={(e) => setLogFilter(e.target.value)}
|
||||
className="px-3 py-1.5 border border-gray-300 rounded-md focus:outline-none focus:ring-2 focus:ring-blue-500 text-sm"
|
||||
>
|
||||
<option value="all">All Levels</option>
|
||||
<option value="all">Alle Stufen</option>
|
||||
<option value="info">Info</option>
|
||||
<option value="warning">Warning</option>
|
||||
<option value="error">Error</option>
|
||||
<option value="warning">Warnungen</option>
|
||||
<option value="error">Fehler</option>
|
||||
</select>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
Reference in New Issue
Block a user