|
|
|
@ -2,6 +2,7 @@ use flair::flair;
|
|
|
|
|
use ssw::websrv::WebsrvConfig;
|
|
|
|
|
use ssw::websrv::run_server;
|
|
|
|
|
use std::fs;
|
|
|
|
|
use std::env;
|
|
|
|
|
use std::collections::HashMap;
|
|
|
|
|
|
|
|
|
|
//1 - Ingest all the pages into a variable
|
|
|
|
@ -11,7 +12,11 @@ use std::collections::HashMap;
|
|
|
|
|
//4 - Start the server and serve
|
|
|
|
|
|
|
|
|
|
//#1 : Sends (pages_summary, pages_content)
|
|
|
|
|
fn ingest_pages(pages_path: &str) -> Option<(String, String)> {
|
|
|
|
|
fn ingest_pages(pages_path: &str,
|
|
|
|
|
header_path: &str,
|
|
|
|
|
footer_path: &str,
|
|
|
|
|
html_path: &str,
|
|
|
|
|
) -> Option<(String, String)> {
|
|
|
|
|
|
|
|
|
|
//Getting the name of existing pages
|
|
|
|
|
let mut pages: Vec<String> = Vec::new();
|
|
|
|
@ -26,7 +31,7 @@ fn ingest_pages(pages_path: &str) -> Option<(String, String)> {
|
|
|
|
|
.unwrap()
|
|
|
|
|
);
|
|
|
|
|
|
|
|
|
|
&mut pages.push(mypage);
|
|
|
|
|
let _ = &mut pages.push(mypage);
|
|
|
|
|
}
|
|
|
|
|
pages.sort();
|
|
|
|
|
pages.reverse();
|
|
|
|
@ -37,17 +42,13 @@ fn ingest_pages(pages_path: &str) -> Option<(String, String)> {
|
|
|
|
|
let mut page_sum = String::new();
|
|
|
|
|
for page in pages {
|
|
|
|
|
let vars: HashMap<String, String> = HashMap::from([
|
|
|
|
|
(String::from("header"), fs::read_to_string("./html/header.html").unwrap()),
|
|
|
|
|
(String::from("footer"), fs::read_to_string("./html/footer.html").unwrap()),
|
|
|
|
|
(String::from("header"), fs::read_to_string(header_path).unwrap()),
|
|
|
|
|
(String::from("footer"), fs::read_to_string(footer_path).unwrap()),
|
|
|
|
|
]);
|
|
|
|
|
let fullpath = String::from(format!("{pages_path}/{page}"));
|
|
|
|
|
let page_res = flair::analyze_file(&fullpath, vars).unwrap();
|
|
|
|
|
//TODO Write the pages to their own file
|
|
|
|
|
//and put the correct address in %%sum%%
|
|
|
|
|
//So the index is simply "latest articles
|
|
|
|
|
|
|
|
|
|
page_sum.push_str(&page_link(&page, &page_res));
|
|
|
|
|
&mut pages_content.push_str(&page_res);
|
|
|
|
|
page_sum.push_str(&page_link(&page, &page_res, html_path));
|
|
|
|
|
let _ = &mut pages_content.push_str(&page_res);
|
|
|
|
|
}
|
|
|
|
|
return Some((page_sum, pages_content));
|
|
|
|
|
} else {
|
|
|
|
@ -55,29 +56,40 @@ fn ingest_pages(pages_path: &str) -> Option<(String, String)> {
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn page_link(page_name: &str, content: &String) -> String {
|
|
|
|
|
fn page_link(page_name: &str, content: &String, html_path: &str) -> String {
|
|
|
|
|
let page_html = str::replace(page_name, ".ft", "");
|
|
|
|
|
let page_loc = format!("./html/pages/{page_html}");
|
|
|
|
|
let page_loc = format!("{html_path}/pages/{page_html}");
|
|
|
|
|
let page_name_short = str::replace(page_name, ".html.ft", "");
|
|
|
|
|
let page_link = format!("<a href=\"pages/{page_html}\">{page_name_short}</a><br>\n");
|
|
|
|
|
|
|
|
|
|
fs::write(page_loc, content);
|
|
|
|
|
match fs::write(page_loc, content) {
|
|
|
|
|
Ok(_) => (),
|
|
|
|
|
Err(e) => panic!("{}", e),
|
|
|
|
|
};
|
|
|
|
|
return String::from(page_link);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
//#2 & #3
|
|
|
|
|
fn generate_index(pages: (String, String), index_path: &str, html_path: &str) {
|
|
|
|
|
fn generate_index(pages: (String, String),
|
|
|
|
|
index_template_path: &str,
|
|
|
|
|
html_path: &str,
|
|
|
|
|
header_path: &str,
|
|
|
|
|
footer_path: &str,
|
|
|
|
|
) {
|
|
|
|
|
|
|
|
|
|
let index_path = format!("{html_path}/index.html");
|
|
|
|
|
|
|
|
|
|
let vars: HashMap<String, String> = HashMap::from([
|
|
|
|
|
(String::from("header"), fs::read_to_string("./html/header.html").unwrap()),
|
|
|
|
|
(String::from("footer"), fs::read_to_string("./html/footer.html").unwrap()),
|
|
|
|
|
(String::from("header"), fs::read_to_string(&header_path).unwrap()),
|
|
|
|
|
(String::from("footer"), fs::read_to_string(&footer_path).unwrap()),
|
|
|
|
|
(String::from("pages"), pages.1),
|
|
|
|
|
(String::from("sum"), pages.0),
|
|
|
|
|
]);
|
|
|
|
|
|
|
|
|
|
let index = flair::analyze_file("./templates/index.html.ft", vars).unwrap();
|
|
|
|
|
let index = flair::analyze_file(index_template_path, vars).unwrap();
|
|
|
|
|
|
|
|
|
|
fs::write("./html/index.html", index.as_str()).expect("Could not write index");
|
|
|
|
|
fs::write(&index_path, index.as_str()).expect("Could not write index");
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
//#4
|
|
|
|
@ -89,18 +101,31 @@ fn start_websrv(conf: WebsrvConfig) {
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn main() {
|
|
|
|
|
let pages = match ingest_pages("./templates/pages") {
|
|
|
|
|
//Initialize from env variables
|
|
|
|
|
let root_path_env = "FLOG_ROOT_FOLDER";
|
|
|
|
|
let root_folder = match env::var(root_path_env) {
|
|
|
|
|
Ok(v) => v,
|
|
|
|
|
Err(_) => String::from("/opt/flog"),
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
let pages_path = format!("{root_folder}/templates/pages");
|
|
|
|
|
let html_folder_path = format!("{root_folder}/html");
|
|
|
|
|
let index_template_path = format!("{root_folder}/templates/index.html.ft");
|
|
|
|
|
let header_path = format!("{html_folder_path}/header.html");
|
|
|
|
|
let footer_path = format!("{html_folder_path}/footer.html");
|
|
|
|
|
|
|
|
|
|
let pages = match ingest_pages(&pages_path, &header_path, &footer_path, &html_folder_path) {
|
|
|
|
|
Some(p) => p,
|
|
|
|
|
None => (String::new(), String::new()),
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
generate_index(pages, "./templates/index.html.ft", "./html");
|
|
|
|
|
generate_index(pages, &index_template_path, &html_folder_path, &header_path, &footer_path);
|
|
|
|
|
|
|
|
|
|
//Web conf
|
|
|
|
|
let webconf = WebsrvConfig {
|
|
|
|
|
nbr_of_threads: 10,
|
|
|
|
|
bind_addr: String::from("0.0.0.0:8000"),
|
|
|
|
|
root_folder: String::from("./html"),
|
|
|
|
|
root_folder: String::from(&html_folder_path),
|
|
|
|
|
};
|
|
|
|
|
start_websrv(webconf);
|
|
|
|
|
|
|
|
|
|