You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
1291 lines
40 KiB
1291 lines
40 KiB
|
|
|
|
|
|
#include <ctime> |
|
#include <ios> |
|
#include <fstream> |
|
#include <iostream> |
|
#include <filesystem> |
|
#include <sstream> |
|
#include <stdexcept> |
|
#include <string> |
|
#include <cstdlib> |
|
#include <optional> |
|
#include <chrono> |
|
#include <set> |
|
#include <string_view> |
|
#include <type_traits> |
|
#include <ranges> |
|
#include <stack> |
|
#include <unordered_map> |
|
#include <thread> |
|
#include <queue> |
|
|
|
#define katex_support |
|
|
|
|
|
#include <json.hpp> |
|
#include <date.h> |
|
#include <spdlog/spdlog.h> |
|
#include <cxxopts.hpp> |
|
#include <tinyxml2.h> |
|
|
|
#include <unistd.h> |
|
#include <csignal> |
|
|
|
|
|
#include "default-templates.h" |
|
#include "util.h" |
|
#include "markdown.h" |
|
#include "templater.hpp" |
|
|
|
#include "srchighlight.hpp" |
|
|
|
|
|
namespace lex { |
|
extern "C" { |
|
//#include <lexbor/dom/dom.h> |
|
#include "lexbor_base.h" |
|
|
|
} |
|
|
|
struct Dom { |
|
|
|
lxb_html_document_t *document; |
|
lxb_dom_collection_t *collection; |
|
|
|
Dom(const std::string & html_text) { |
|
|
|
lxb_html_document_t *document = |
|
parse((const unsigned char *)html_text.c_str(), html_text.length()); |
|
|
|
collection = |
|
lxb_dom_collection_make(&document->dom_document, 128); |
|
|
|
if (collection == nullptr) { |
|
spdlog::warn("failed to parse html?"); |
|
throw "Unable to parse html for dom"; |
|
} |
|
} |
|
|
|
std::string str() { |
|
lexbor_str_t res {}; |
|
auto err = lxb_html_serialize_tree_str(lxb_dom_interface_node(document), |
|
&res); |
|
|
|
if (err != LXB_STATUS_OK) { |
|
spdlog::warn ("Failed to serialization HTML tree"); |
|
} |
|
|
|
return std::string (res.data, res.data + res.length); |
|
} |
|
|
|
~Dom() { |
|
lxb_dom_collection_destroy(collection, true); |
|
lxb_html_document_destroy(document); |
|
} |
|
}; |
|
}; |
|
|
|
const std::string SITE_CONFIG_FNAME = "stgen.json"; |
|
const std::string DEFAULT_PUBLISH_ROOT = "public"; |
|
const std::string DEFAULT_SOURCE_DIR = "."; |
|
const std::string TEMPLATE_FILEEXT = "template"; |
|
const std::string DEFAULT_WATCH_FILETYPES[] = {"md", "markdown", TEMPLATE_FILEEXT}; |
|
const std::string DEFAULT_IGNORE_FILES[] = {SITE_CONFIG_FNAME}; |
|
const std::string TEST_URL = "http://localhost:"; |
|
const std::string TEST_PORT = "8000"; |
|
|
|
static volatile bool continue_running = true; |
|
|
|
const std::string short_info("stgen3 prerelease from git.topost.net (c) 2020-2021 Alistair Michael."); |
|
|
|
using json = nlohmann::json; |
|
namespace fs = std::filesystem; |
|
|
|
struct runtime_config { |
|
bool no_interactive = false; |
|
bool save_config = false; |
|
bool test = false; |
|
bool comp_katex = true; |
|
bool comp_code = true; |
|
std::set<std::string> readonly_properties {"source_root"}; |
|
std::string port = TEST_PORT; |
|
}; |
|
|
|
runtime_config settings; |
|
|
|
#ifdef katex_support |
|
namespace duk { |
|
extern "C" { |
|
#include <duk_config.h> |
|
#include <duktape.h> |
|
} |
|
|
|
|
|
|
|
class duktape { |
|
duk_context *ctx = nullptr; |
|
|
|
public: |
|
duktape() {} |
|
|
|
|
|
~duktape() { |
|
if (ctx) |
|
duk_destroy_heap(ctx); |
|
} |
|
|
|
void compile_file(std::string fname) |
|
{ |
|
std::string file = read_file(fname); |
|
if (duk_pcompile_lstring(ctx, 0, file.c_str(), file.length()) != 0) { |
|
std::cout << "Error :" << duk_safe_to_string(ctx, -1); |
|
} |
|
} |
|
|
|
void compile_text(std::string file) |
|
{ |
|
if (duk_pcompile_lstring(ctx, 0, file.c_str(), file.length()) != 0) { |
|
std::cout << "Error :" << duk_safe_to_string(ctx, -1); |
|
} |
|
} |
|
|
|
void run() |
|
{ |
|
duk_call(ctx, 0); |
|
std::cout << "program result: " << duk_safe_to_string(ctx, -1); |
|
duk_pop(ctx); |
|
} |
|
|
|
void eval_file(std::string file) { |
|
compile_file(file); |
|
duk_call(ctx, 0); |
|
duk_pop(ctx); |
|
} |
|
|
|
void testkatex() |
|
{ |
|
eval_file("katex.min.js"); |
|
eval_file("auto-render.min.js"); |
|
|
|
std::string line = "const parser = new DOMParser();"; |
|
duk_push_lstring(ctx, line.c_str(), line.length()); |
|
if (duk_peval(ctx) != 0) { |
|
std::cout << "Error (test):" << duk_safe_to_string(ctx, -1); |
|
} else { |
|
std::cout << "Result :" << duk_safe_to_string(ctx, -1); |
|
} |
|
|
|
// renderMathInElement(document.body); |
|
|
|
} |
|
|
|
void prepare_katex() { |
|
|
|
ctx = duk_create_heap_default(); |
|
|
|
if (duk_pcompile_lstring(ctx, 0, (const char *)default_templates_katex_min_js, |
|
default_templates_katex_min_js_len) != 0) { |
|
std::cout << "Error :" << duk_safe_to_string(ctx, -1); |
|
} |
|
|
|
duk_call(ctx, 0); |
|
duk_pop(ctx); |
|
|
|
|
|
} |
|
|
|
std::string eval_katex(std::string text) { |
|
//math_text = math_text.substr(2, math_text.length() - 4); |
|
|
|
std::string katexargs = "throwOnError: false"; |
|
|
|
if (text.length() > 4) { |
|
|
|
if (text[1] == '[') { |
|
katexargs += ", displayMode: true"; |
|
} else if (text[1] != '(') { |
|
spdlog::debug("mathmode detection broken eval_katex()"); |
|
} |
|
|
|
|
|
|
|
text = text.substr(2, text.length() - 4); |
|
} else { |
|
spdlog::error("Generate katex: short \n\t{}", text); |
|
return "math syntax error"; |
|
} |
|
std::string math_text {}; |
|
|
|
for (char i:text) { |
|
if (i == '\'') { |
|
math_text += "\\'"; |
|
} else if (i == '\\') { |
|
math_text += "\\\\"; |
|
|
|
} else if (i == '\n') { |
|
math_text += "\\n"; |
|
} else { |
|
math_text += i; |
|
} |
|
} |
|
|
|
const char *matht = math_text.c_str(); |
|
|
|
std::string line = "katex.renderToString('"; |
|
line += matht; |
|
line += "',{" + katexargs + "});"; |
|
|
|
|
|
duk_push_lstring(ctx, line.c_str(), line.length()); |
|
if (duk_peval(ctx) != 0) { |
|
spdlog::error("Generate katex: {} \n\t{}", duk_safe_to_string(ctx, -1), line); |
|
} else { |
|
return std::string{duk_safe_to_string(ctx, -1)}; |
|
} |
|
|
|
return "math syntax error"; |
|
} |
|
|
|
void test() { |
|
compile_file("katex.min.js"); |
|
duk_call(ctx, 0); |
|
duk_pop(ctx); |
|
std::string line = "katex.renderToString(\"c = \\pm\\sqrt{a^2 + b^2}\", {throwOnError: false });"; |
|
duk_push_lstring(ctx, line.c_str(), line.length()); |
|
if (duk_peval(ctx) != 0) { |
|
std::cout << "Error (test):" << duk_safe_to_string(ctx, -1); |
|
} else { |
|
std::cout << "Result :" << duk_safe_to_string(ctx, -1); |
|
} |
|
|
|
duk_pop(ctx); |
|
} |
|
|
|
|
|
|
|
}; |
|
|
|
}; |
|
|
|
#endif // katex_support |
|
|
|
|
|
/* Fork and exec a command given in args taking input from in and sending |
|
* output to out. The arguments array must end in a NULL. |
|
* |
|
* Returns true on success or 1 on failure. |
|
*/ |
|
int execute(int in_FD, int out_FD, char *args[], pid_t* child_ID) { |
|
|
|
int err = fork(); |
|
|
|
if (err == -1) { |
|
return 1; |
|
} |
|
|
|
if (err) { // parent |
|
*child_ID = err; |
|
return 0; |
|
} else { // child |
|
dup2(in_FD, 0); |
|
dup2(out_FD, 1); |
|
|
|
err = execvp(args[0], args); |
|
if (err) { |
|
perror("Bad"); |
|
} |
|
} |
|
|
|
return 0; |
|
} |
|
|
|
namespace stgen { |
|
|
|
|
|
class builder { |
|
mmd::markdown_parser parser; |
|
std::unordered_map<std::string, std::string> default_templates; |
|
std::unordered_map<std::string, std::string> properties; |
|
templater *templr; |
|
templater *textsub_templr; |
|
templater *s2_templr; |
|
|
|
std::unordered_map<fs::path, time_t, pathHash> last_build; |
|
gnu_highlighter shl {}; |
|
|
|
|
|
const std::set<std::string> apply_templates_exts {"md","html", "txt", "markdown", "xml", "atom", "rss"}; |
|
|
|
#ifdef katex_support |
|
duk::duktape dukengine {}; |
|
#endif |
|
|
|
std::string |
|
postprocess_math(const std::string &html_text) |
|
{ |
|
using namespace lex; |
|
|
|
lxb_html_document_t *document = |
|
parse((const unsigned char *)html_text.c_str(), html_text.length()); |
|
|
|
|
|
lxb_dom_collection_t *collection = |
|
lxb_dom_collection_make(&document->dom_document, 128); |
|
|
|
if (collection == nullptr) { |
|
spdlog::warn("failed to parse html?"); |
|
// messy |
|
return html_text; |
|
} |
|
|
|
|
|
auto err = lxb_dom_elements_by_class_name( |
|
lxb_dom_interface_element(document->body), |
|
collection, (const lxb_char_t *) "math", 4); |
|
|
|
|
|
if (err != LXB_STATUS_OK) { |
|
spdlog::warn("failed to parse html?"); |
|
|
|
lxb_dom_collection_destroy(collection, true); |
|
lxb_html_document_destroy(document); |
|
return html_text; |
|
} |
|
|
|
for (size_t i = 0; i < lxb_dom_collection_length(collection); i++) { |
|
lxb_dom_element_t *element = lxb_dom_collection_element(collection, i); |
|
|
|
size_t len; |
|
lxb_char_t *text = lxb_dom_node_text_content(&element->node, &len); |
|
const std::string tt {(const char *)text, (const char *)text + len}; |
|
|
|
#ifdef katex_support |
|
std::string mathtext = dukengine.eval_katex(tt); |
|
#endif |
|
|
|
// std::string mtext = imtx.get_svg_from_mtex(tt); |
|
|
|
const unsigned char local_name[] = "math"; |
|
|
|
// auto mathtext = "<img src='data:image/svg+xml;base64, " + base64_encode_mime(mtext) + "'/>"; |
|
|
|
lxb_html_element_t * ele = lxb_html_document_create_element(document, local_name, |
|
(size_t)4, nullptr); |
|
|
|
auto elem = lxb_dom_interface_element(ele); |
|
|
|
lxb_dom_node_t *n = lxb_html_document_parse_fragment(document, |
|
elem, (const unsigned char *)mathtext.c_str(), mathtext.length()); |
|
|
|
lxb_dom_node_insert_after(&element->node, n); |
|
lxb_dom_node_remove(&element->node); |
|
} |
|
|
|
lexbor_str_t res {}; |
|
|
|
err = lxb_html_serialize_tree_str(lxb_dom_interface_node(document), |
|
&res); |
|
|
|
if (err != LXB_STATUS_OK) { |
|
FAILED("Failed to serialization HTML tree"); |
|
} |
|
|
|
|
|
std::string result = {res.data, res.data + res.length}; |
|
|
|
lxb_dom_collection_destroy(collection, true); |
|
lxb_html_document_destroy(document); |
|
|
|
return result; |
|
} |
|
|
|
|
|
std::string |
|
postprocess_code(const std::string &html_text) |
|
{ |
|
using namespace lex; |
|
|
|
lxb_html_document_t *document = |
|
parse((const unsigned char *)html_text.c_str(), html_text.length()); |
|
|
|
|
|
lxb_dom_collection_t *collection = |
|
lxb_dom_collection_make(&document->dom_document, 128); |
|
|
|
if (collection == nullptr) { |
|
spdlog::warn("failed to parse html?"); |
|
// messy |
|
return html_text; |
|
} |
|
|
|
|
|
auto err = lxb_dom_elements_by_tag_name( |
|
lxb_dom_interface_element(document->body), |
|
collection, (const lxb_char_t *) "code", 4); |
|
|
|
if (err != LXB_STATUS_OK) { |
|
spdlog::warn("failed to get code tags?"); |
|
return html_text; |
|
} |
|
|
|
for (size_t i = 0; i < lxb_dom_collection_length(collection); i++) { |
|
lxb_dom_element_t *element = lxb_dom_collection_element(collection, i); |
|
|
|
size_t len; |
|
lxb_char_t *text = lxb_dom_node_text_content(&element->node, &len); |
|
const std::string tt {(const char *)text, (const char *)text + len}; |
|
|
|
auto clname = lxb_dom_element_class(element, &len); |
|
|
|
if (!clname) |
|
continue; // skip tags with no class |
|
|
|
const std::string dec_lang {clname, clname + len}; |
|
auto language = shl.get_lang(dec_lang); |
|
|
|
if (!language) { |
|
spdlog::warn("Unknown language: {}", dec_lang); |
|
continue; // skip tags with unsupported language |
|
} |
|
|
|
|
|
std::string highlighted = shl.highlight(tt, *language); |
|
|
|
const unsigned char local_name[] = "code"; |
|
|
|
lxb_html_element_t * ele = lxb_html_document_create_element(document, local_name, |
|
(size_t)4, nullptr); |
|
|
|
lxb_dom_element_t * elem = lxb_dom_interface_element(ele); |
|
|
|
|
|
highlighted = "<code class=\"" + dec_lang + "\">" + highlighted + "</code>"; |
|
|
|
lxb_dom_node_t *n = lxb_html_document_parse_fragment(document, |
|
elem, (const unsigned char *)highlighted.c_str(), highlighted.length()); |
|
|
|
lxb_dom_node_insert_after(lxb_dom_node_parent(&element->node), n); |
|
lxb_dom_node_remove(lxb_dom_node_parent(&element->node)); |
|
} |
|
|
|
lexbor_str_t res {}; |
|
|
|
err = lxb_html_serialize_tree_str(lxb_dom_interface_node(document), |
|
&res); |
|
|
|
if (err != LXB_STATUS_OK) { |
|
FAILED("Failed to serialization HTML tree"); |
|
} |
|
|
|
std::string result = {res.data, res.data + res.length}; |
|
|
|
lxb_dom_collection_destroy(collection, true); |
|
lxb_html_document_destroy(document); |
|
|
|
return result; |
|
|
|
} |
|
|
|
|
|
|
|
|
|
/*** Build Function **************** |
|
* |
|
* Plugin types: |
|
* - ${include} hooks |
|
* - per file |
|
* - per directory |
|
* - end of build |
|
* - what kind of state do they want? |
|
* |
|
* 1. Recurse through directory |
|
* 2. for each file: |
|
* - compute_target, compare the modification dates, if source newer build |
|
* the file |
|
* - compute_target: resolve templates (also newer), plugins in dry run mode |
|
* - build: check if its in the watched filetypes list, build |
|
* (1) builtin templating, (2) copy to build directory |
|
* (2) plugin templating/etc mutates build directory |
|
* - else just copy it to the build dir |
|
* - sync build directory to publish directory |
|
* 3. recurse for each folder |
|
* |
|
*/ |
|
void build_recursive(std::unordered_map<std::string, std::string> page_templates, |
|
std::unordered_map<std::string, std::string> properties, |
|
std::unordered_map<fs::path, blog_item, pathHash> &compile_jobs, |
|
const fs::path &directory) { |
|
|
|
|
|
std::vector<fs::path> next_directories; |
|
std::vector<fs::path> next_files; |
|
|
|
properties["current_directory"] = directory; |
|
|
|
// update the properties unordered_map if there is a new cconfig file |
|
// and not in testing mode |
|
fs::path confname = fs::path(directory) / SITE_CONFIG_FNAME; |
|
|
|
if (fs::exists(confname)) { |
|
json parse_json {}; |
|
std::ifstream conffile (confname); |
|
conffile >> parse_json; |
|
|
|
// dump json into properties unordered_map |
|
for (json::iterator it = parse_json.begin(); it != parse_json.end(); ++it) { |
|
if (!settings.readonly_properties.count(it.key())) |
|
properties[it.key()] = it.value(); |
|
} |
|
} |
|
|
|
time_t dir_last_update_time = to_time_t(fs::last_write_time(directory)); |
|
|
|
|
|
// first pass of directory |
|
// |
|
// Queues directories and files to process and extracts config and |
|
// templates |
|
for (fs::path entry : fs::directory_iterator(directory)) { |
|
|
|
std::string filename = fs::relative(entry, directory); |
|
if (filename.at(0) == '.') { |
|
// skip hidden file |
|
continue; |
|
} |
|
|
|
// get directories for last step |
|
if (fs::is_directory(entry)) { |
|
next_directories.push_back(entry); |
|
continue; |
|
} |
|
|
|
if (filename == SITE_CONFIG_FNAME) { |
|
continue; |
|
} |
|
|
|
// update templates |
|
// |
|
// default template: html.template, md.template |
|
// key is just "md" or "html" |
|
// |
|
// other templates: templatename.html.template |
|
// key is "templatename.html" |
|
// |
|
std::string fname = entry.filename(); |
|
auto stext = fname.find_last_of(".") + 1; |
|
|
|
if (stext == std::string::npos) { |
|
spdlog::warn("Warn: templates must contain two '.'s'"); |
|
} |
|
|
|
std::string ext1 = fname.substr(stext, fname.length()); |
|
|
|
if (ext1 == TEMPLATE_FILEEXT) { |
|
stext = ext1.rfind(".") + 1; |
|
|
|
if (stext == std::string::npos) { |
|
// setting new default template |
|
std::string key = fname.substr(fname.find("."), fname.length()); |
|
page_templates[key] = read_file(entry); |
|
continue; |
|
} else { |
|
std::string key = fname.substr(0, fname.find_last_of(".")); |
|
page_templates[key] = read_file(entry); |
|
} |
|
|
|
continue; |
|
} |
|
|
|
// else |
|
|
|
next_files.push_back(entry); |
|
} |
|
|
|
for (fs::path entry: next_files) { |
|
|
|
time_t article_last_update = to_time_t(fs::last_write_time(entry)); |
|
|
|
if (file_ext(entry) == "draft") { |
|
continue; |
|
} |
|
|
|
|
|
// create a copy to prevent inheritance |
|
std::unordered_map<std::string, std::string> article_properties (properties); |
|
article_properties["current_file"] = entry.string(); |
|
article_properties["page_url"] = compute_url(entry.string(), article_properties); |
|
|
|
auto stext = entry.string().find_last_of(".") + 1; |
|
std::string ext = entry.string().substr(stext, entry.string().length()); |
|
|
|
|
|
if (apply_templates_exts.count(ext)) { |
|
std::string text; |
|
text = read_file(entry); |
|
auto metavalues = parser.get_all_metavalues(text); |
|
for (auto e : metavalues) { |
|
// lots of unneccessary copies |
|
article_properties[e.first] = e.second; |
|
} |
|
|
|
auto target = compute_target(entry, article_properties); |
|
article_properties["target"] = target.string(); |
|
|
|
if (article_properties.count("date")) { |
|
|
|
std::istringstream in {article_properties.at("date")}; |
|
date::sys_seconds timepoint; |
|
in >> date::parse(article_properties.at("date-in-format"), timepoint); |
|
time_t t = std::chrono::system_clock::to_time_t(timepoint); |
|
|
|
|
|
if (t != (time_t){}) { |
|
article_last_update = t; |
|
|
|
if (t > dir_last_update_time) { |
|
dir_last_update_time = t; |
|
} |
|
} else { |
|
spdlog::info("bad date parse"); |
|
} |
|
|
|
std::string ndate = reformat_date(article_properties.at("date"), article_properties); |
|
article_properties["date"] = ndate; |
|
|
|
} |
|
|
|
|
|
////////////////// GET THE TEMPLATE READY ////////////////////// |
|
|
|
std::string default_template_name = "none"; |
|
|
|
/* Process body content */ |
|
if (ext == "html") { |
|
default_template_name = "html"; |
|
} |
|
|
|
if (ext == "txt") { |
|
default_template_name = "none"; |
|
} |
|
|
|
if (ext == "md" || ext == "markdown") { |
|
default_template_name = "html"; |
|
} |
|
|
|
std::string new_page {}; |
|
|
|
/* |
|
if (page_templates.count(default_template_name)) { |
|
new_page = page_templates.at(default_template_name); |
|
} else { |
|
spdlog::warn("Default template does not exist: '{}'", default_template_name); |
|
} |
|
*/ |
|
auto template_name = default_template_name; |
|
|
|
if (article_properties.count("template")) { |
|
// custom template if defined |
|
template_name = article_properties.at("template"); |
|
} |
|
|
|
if (template_name == "none") { |
|
new_page = "{{:body}}"; |
|
} else if (page_templates.count(template_name)) { |
|
new_page = page_templates.at(template_name); |
|
} else { |
|
spdlog::warn("Template not found for: {}", entry.string()); |
|
} |
|
|
|
|
|
// TEMPLATING ON THE TEMPLATE // |
|
templr->run_substitution_plugins(new_page, article_properties); |
|
|
|
|
|
///////////////////////////////////////////////////////////////// |
|
|
|
|
|
//////////////// RUN TEMPLATING ON CONTENT ////////////////////// |
|
if (!article_properties.count("notemplating")) { |
|
templr->run_substitution_plugins(text, article_properties); |
|
} |
|
|
|
std::string ntext = parser.cut_frontmatter(text); |
|
|
|
// article_properties["body"] = parser.cut_frontmatter(text); |
|
auto job_t = job_type::TEMPLATE; |
|
|
|
if (ext == "md" || ext == "markdown") { |
|
text = parser.parse_to_html(text); |
|
if (settings.comp_katex) |
|
text = postprocess_math(text); |
|
|
|
if (settings.comp_code) |
|
text = postprocess_code(text); |
|
} |
|
|
|
|
|
///////////////////////////////////////////////////////////////// |
|
|
|
article_properties["original"] = text; |
|
|
|
|
|
/* write content into template */ |
|
|
|
std::unordered_map<std::string, std::string> sub_content_keys {{"body", text}}; |
|
textsub_templr->run_substitution_plugins_once(new_page, sub_content_keys); |
|
|
|
article_properties["body"] = new_page; |
|
|
|
|
|
|
|
compile_jobs[target] = {job_t, entry, article_last_update, std::unordered_map<std::string, std::string>(article_properties)}; |
|
continue; |
|
} |
|
|
|
|
|
// else just copy |
|
auto target = compute_target(entry, article_properties); |
|
compile_jobs[target] = {job_type::COPY_FILE, entry, dir_last_update_time, std::unordered_map<std::string, std::string>(article_properties)}; |
|
|
|
|
|
// build |
|
// |
|
// (1) compute target |
|
// (2) compute compiler type |
|
// metadata and setting sub variables |
|
// tempaltes and stuff |
|
// plugins |
|
// multimarkdown / pandoc |
|
// write |
|
// |
|
// |
|
// 1. get all metavalues from an article and chuck them in the properties |
|
// dict |
|
// 2. parse the content and chuck it in the dict under a specific value |
|
// 3. pass the dict to the substitution plugin with the default template / |
|
// overriden template |
|
// |
|
|
|
} |
|
|
|
compile_jobs[compute_target(directory, properties)] = {job_type::MAKE_DIR, directory, dir_last_update_time, std::unordered_map<std::string, std::string>(properties)}; |
|
|
|
|
|
// recurse |
|
for (auto d : next_directories) { |
|
build_recursive(std::unordered_map<std::string, std::string>(page_templates), |
|
std::unordered_map<std::string, std::string>(properties), compile_jobs, d); |
|
} |
|
|
|
if (directory.string() != properties.at("source_root")) { |
|
time_t *parent_date = &compile_jobs.at(compute_target(directory.parent_path(), properties)).post_date; |
|
if (*parent_date < dir_last_update_time) { |
|
*parent_date = dir_last_update_time; |
|
} |
|
} |
|
} |
|
|
|
void |
|
add_default_templates () |
|
{ |
|
|
|
std::string css ((char *)default_templates_style_css, |
|
default_templates_style_css_len); |
|
|
|
std::string properties_json ((char *)default_templates_stgen_json, |
|
default_templates_stgen_json_len); |
|
|
|
std::string html_template((char *)default_templates_html_template, |
|
default_templates_html_template_len); |
|
|
|
/* PROPERTIES.JSON DEFAULT VALUES */ |
|
std::istringstream in (properties_json); |
|
|
|
json default_properties {}; |
|
in >> default_properties; |
|
|
|
std::unordered_map<std::string,std::string> m2 = default_properties; |
|
|
|
// update properties |
|
properties.merge(m2); |
|
|
|
for (auto s : {"publish_root", "source_root"}) { |
|
fs::path p(properties.at(s)); |
|
|
|
if (!p.is_absolute()) { |
|
properties[s] = fs::absolute(p); |
|
} |
|
} |
|
|
|
if (!properties.count("name")) { |
|
properties["name"] = fs::path(properties.at("source_root")).filename(); |
|
} |
|
|
|
if (!properties.count("url")) { |
|
properties["url"] = properties.at("publish_root"); |
|
} |
|
|
|
properties["css"] = css; |
|
properties["current_file"] = "default_html_template"; |
|
|
|
default_templates["html"]= html_template; |
|
default_templates["css"] = css; |
|
properties.erase("current_file"); |
|
properties.erase("css"); |
|
|
|
} |
|
|
|
public: |
|
builder(std::unordered_map<std::string, std::string> m) |
|
{ |
|
properties = m; |
|
|
|
templr = new templater({new variable_transclude_plugin{}, new file_index_plugin{{SITE_CONFIG_FNAME, "index.md", "html.template"}}, new ifdef_plugin{}, new ifndef_plugin{}, new file_transclude_plugin{}, new mmd_snippet_transclude_plugin{}}, |
|
{new rss_feed_plugin{}, new microblog_plugin{}}); |
|
|
|
textsub_templr = new templater({new variable_transclude_plugin{}}, {}); |
|
|
|
s2_templr = new templater({}, {new rss_feed_plugin{}, new microblog_plugin{}}); |
|
|
|
add_default_templates(); |
|
#ifdef katex_support |
|
if (settings.comp_katex) |
|
dukengine.prepare_katex(); |
|
#endif |
|
} |
|
|
|
~builder() {delete templr; delete textsub_templr; delete s2_templr;} |
|
|
|
bool get_confirmation(std::string message) { |
|
if (settings.no_interactive) { |
|
spdlog::info("{}: YES", message); |
|
return true; |
|
} |
|
|
|
std::cout << message << std::endl; |
|
std::cout << "Y/n: "; |
|
std::string confirmation; |
|
std::cin >> confirmation; |
|
|
|
|
|
if (!(confirmation == "y" || confirmation == "Y")) { |
|
return false; |
|
} |
|
return true; |
|
} |
|
|
|
void |
|
write_build(std::unordered_map<fs::path, blog_item, pathHash> &compile_jobs) |
|
{ |
|
time_t now = to_time_t(std::chrono::system_clock::now()); |
|
|
|
// make directory structure |
|
for (auto &e: compile_jobs) { |
|
if (job_type::MAKE_DIR & e.second.type) { |
|
fs::create_directories(e.first); |
|
last_build[e.second.src] = now; |
|
} |
|
} |
|
|
|
// do copy and deletions |
|
for (auto &e : compile_jobs) { |
|
auto src = e.second.src; |
|
auto dest = e.first; |
|
if (job_type::COPY_FILE & e.second.type) { |
|
fs::copy(src, dest, fs::copy_options::update_existing); |
|
last_build[e.second.src] = now; |
|
} |
|
|
|
if (job_type::DELETE_FILE & e.second.type) { |
|
fs::remove_all(e.first); |
|
} |
|
} |
|
|
|
for (auto &e: compile_jobs) { |
|
if (job_type::TEMPLATE & e.second.type) { |
|
// reapply stg1 and stg2 templates |
|
std::string page = e.second.properties.at("body"); |
|
|
|
e.second.properties.erase("body"); |
|
|
|
std::optional<const std::unordered_map<fs::path, blog_item, pathHash>> non {compile_jobs}; |
|
s2_templr->run_substitution_plugins(page, |
|
e.second.properties, non); |
|
|
|
e.second.properties["body"] = page; |
|
} |
|
if (job_type::MARKDOWN & e.second.type) { |
|
// run markdown compiler and templating |
|
} |
|
} |
|
|
|
for (auto &e: compile_jobs) { |
|
if ((job_type::POSTPROCESS_HTML) & e.second.type) { |
|
// run js postprocessing plugins |
|
} |
|
|
|
} |
|
|
|
|
|
for (auto &e: compile_jobs) { |
|
if ((job_type::MARKDOWN | job_type::TEMPLATE | job_type::WRITE_ARTICLE) & e.second.type) { |
|
write_file(e.first.string(), e.second.properties.at("body")); |
|
last_build[e.second.src] = now; |
|
} |
|
|
|
} |
|
} |
|
|
|
void |
|
build() |
|
{ |
|
std::string start_dir {properties.at("source_root")}; |
|
|
|
if (!fs::exists(start_dir)) { |
|
dump_default_site_template(); |
|
} |
|
|
|
if (settings.save_config) { |
|
const std::string stgen_dest = fs::path(properties.at("source_root")).append(SITE_CONFIG_FNAME); |
|
if (fs::exists(stgen_dest)) { |
|
if (get_confirmation("Config file exists, do you want to overwrite it?")) { |
|
dump_stgen_json(); |
|
} |
|
} else { |
|
dump_stgen_json(); |
|
} |
|
} |
|
|
|
std::unordered_map<fs::path, blog_item, pathHash> compile_jobs {}; |
|
|
|
build_recursive( |
|
default_templates, |
|
std::unordered_map<std::string, std::string>(properties), |
|
compile_jobs, |
|
start_dir |
|
); |
|
|
|
|
|
std::vector<fs::path> to_delete {}; |
|
|
|
// this needs to be moved to the main recursing function and do it one |
|
// directory level at a time files+directories |
|
if (fs::exists(properties.at("publish_root"))) { |
|
for (auto e:fs::recursive_directory_iterator(properties.at("publish_root"))) { |
|
if (!compile_jobs.contains(e.path().string())) { |
|
compile_jobs[e.path()] = {job_type::DELETE_FILE}; |
|
spdlog::warn("Deleting: {}", e.path().string()); |
|
} |
|
} |
|
|
|
/* |
|
int removed = 0; |
|
for (auto e :to_delete) { |
|
removed += fs::remove_all(e); |
|
} |
|
*/ |
|
} |
|
|
|
|
|
// stage 2 build |
|
|
|
write_build(compile_jobs); |
|
update_file_dates(); |
|
} |
|
|
|
void update_file_dates() { |
|
for (auto e = fs::recursive_directory_iterator(properties.at("source_root")); |
|
e != fs::recursive_directory_iterator(); |
|
++e) |
|
{ |
|
// skip hidden files |
|
std::string filename = e->path().filename().string(); |
|
if (filename.length() > 0 && filename.at(0) == '.') { |
|
e.disable_recursion_pending(); |
|
continue; |
|
} |
|
|
|
time_t last_modification = to_time_t(fs::last_write_time(e->path())); |
|
last_build[e->path()] = last_modification; |
|
} |
|
} |
|
|
|
void rebuild_if_newer() { |
|
bool rebuild = false; |
|
// for (auto &e: fs::directory_iterator(properties.at("source_root"))) { |
|
for (auto e = fs::recursive_directory_iterator(properties.at("source_root")); |
|
e != fs::recursive_directory_iterator(); |
|
++e) |
|
{ |
|
// skip hidden files |
|
std::string filename = e->path().filename().string(); |
|
if (filename.length() > 0 && filename.at(0) == '.') { |
|
e.disable_recursion_pending(); |
|
continue; |
|
} |
|
|
|
if (this->last_build.count(e->path())) { |
|
|
|
time_t last_modification = to_time_t(fs::last_write_time(e->path())); |
|
|
|
if (std::difftime(last_modification, last_build.at(e->path())) > 0) { |
|
rebuild = true; |
|
spdlog::info("Rebuilding with updated file: {}", e->path().string()); |
|
break; |
|
} |
|
|
|
} else { |
|
rebuild = true; |
|
spdlog::info("Rebuilding with new file: {}", e->path().string()); |
|
break; |
|
} |
|
} |
|
|
|
if (rebuild) |
|
build(); |
|
} |
|
|
|
void dump_stgen_json() { |
|
const std::string stgen_dest = fs::path(properties.at("source_root")).append(SITE_CONFIG_FNAME); |
|
|
|
std::string jtext = "{\n"; |
|
for (auto t = properties.begin(); t != properties.end();) { |
|
jtext += " \"" + t->first + "\": \"" + t->second + "\""; |
|
if (++t == properties.end()) { |
|
jtext += "\n}"; |
|
} else { |
|
jtext += ",\n"; |
|
} |
|
} |
|
|
|
write_file(stgen_dest, jtext); |
|
} |
|
|
|
/* |
|
* Defaults have to be imported into the properties unordered_map first |
|
*/ |
|
void dump_default_site_template() { |
|
|
|
if (!get_confirmation("Site source does not exist, do you want to create a new site?")) { |
|
exit (0); |
|
} |
|
|
|
fs::create_directories(properties.at("source_root")); |
|
|
|
const std::string template_dest = fs::path(properties.at("source_root")).append("html.template"); |
|
const std::string style_dest = fs::path(properties.at("source_root")).append("style.css"); |
|
const std::string doc_dest = fs::path(properties.at("source_root")).append("index.md"); |
|
const std::string katex_dest = fs::path(properties.at("source_root")).append("katex.min.css"); |
|
const std::string katexjs_dest = fs::path(properties.at("source_root")).append("katex.min.js"); |
|
|
|
const std::string documentation = std::string{(char *)default_templates_documentation_md, default_templates_documentation_md_len}; |
|
|
|
const std::string style = std::string{(char *)default_templates_style_css, default_templates_style_css_len}; |
|
const std::string katex = std::string{(char *)default_templates_katex_min_css, default_templates_katex_min_css_len}; |
|
//const std::string katexjs = std::string{(char *)default_templates_katex_min_js, default_templates_katex_min_js_len}; |
|
// json propj = json{properties}; |
|
// std::string jtext = propj.dump(4); |
|
|
|
write_file(template_dest, default_templates.at("html")); |
|
write_file(doc_dest, documentation); |
|
write_file(style_dest, style); |
|
write_file(katex_dest, katex); |
|
//write_file(katexjs_dest, katexjs); |
|
dump_stgen_json(); |
|
} |
|
}; |
|
|
|
}; |
|
|
|
std::unordered_map<std::string, std::string> parse_options (int argc, char **argv) { |
|
|
|
cxxopts::Options options("stgen3", "A tiny C++ static site generator.\n\r"); |
|
|
|
options |
|
.positional_help("[source destination]"); |
|
|
|
options.add_options() |
|
("h,help", "Show usage") |
|
("d,documentation", "Show in-depth documentation") |
|
("s,save-config", "Save the configuration used to build the site to src/stgen.json") |
|
("y,noninteractive", "Non-interactive mode (confirm everything)") |
|
("t,test", "Start internal server and build in test mode") |
|
("k,nokatex", "Do not compile KaTeX math at run time") |
|
("c,nohlcode", "Do not highlight source code at run time") |
|
("p,port", "Port to use for the internal server", cxxopts::value<std::string>()) |
|
("n,name", "Site name, defaults to $source", cxxopts::value<std::string>()) |
|
("u,url", "Site url, defaults to file://$destination", cxxopts::value<std::string>()) |
|
("a,author", "Site author", cxxopts::value<std::string>()) |
|
//("input", "Input directory", cxxopts::value<std::string>()) |
|
//("output", "Output directory", cxxopts::value<std::string>()) |
|
("positional", "Destination directory", cxxopts::value<std::vector<std::string>>()) |
|
; |
|
|
|
options.parse_positional({"positional"}); |
|
|
|
auto result = options.parse(argc, argv); |
|
|
|
std::unordered_map<std::string, std::string> parsed_opts {}; |
|
|
|
if (result.count("documentation")) { |
|
const std::string documentation = std::string{(char *)default_templates_documentation_md, default_templates_documentation_md_len}; |
|
|
|
std::cout << documentation << std::endl; |
|
|
|
exit(0); |
|
} |
|
|
|
if (result.count("save-config")) { |
|
settings.save_config = true; |
|
} |
|
if (result.count("test")) { |
|
settings.readonly_properties.insert("url"); |
|
settings.readonly_properties.insert("publish_root"); |
|
settings.test = true; |
|
} |
|
|
|
if (result.count("nokatex")) |
|
settings.comp_katex = false; |
|
|
|
if (result.count("nohlcode")) |
|
settings.comp_code = false; |
|
|
|
if (result.count("port")) { |
|
settings.port = result["port"].as<std::string>(); |
|
} |
|
|
|
if (result.count("help")) |
|
{ |
|
std::cout << options.help({""}) << std::endl; |
|
exit(0); |
|
} |
|
if (result.count("positional") && result["positional"].as<std::vector<std::string>>().size() == 2) { |
|
auto io = result["positional"].as<std::vector<std::string>>(); |
|
parsed_opts["source_root"] = io.at(0); |
|
parsed_opts["publish_root"] = io.at(1); |
|
} else { |
|
fs::path confname = ""; |
|
if (!result.count("positional")) { |
|
confname = fs::path(SITE_CONFIG_FNAME); |
|
} else if (result["positional"].as<std::vector<std::string>>().size() == 1){ |
|
auto io = result["positional"].as<std::vector<std::string>>(); |
|
confname = fs::absolute(io.at(0)).append(SITE_CONFIG_FNAME); |
|
} else { |
|
std::cout << options.help({""}) << std::endl; |
|
exit (1); |
|
} |
|
|
|
if (fs::exists(confname)) { |
|
json parse_json {}; |
|
std::ifstream conffile (confname); |
|
conffile >> parse_json; |
|
|
|
// dump json into properties unordered_map |
|
for (json::iterator it = parse_json.begin(); it != parse_json.end(); ++it) { |
|
parsed_opts[it.key()] = it.value(); |
|
} |
|
} |
|
|
|
if (!parsed_opts.count("publish_root")) { |
|
std::cout << "Destination not specified by argument or config file." << std::endl << std::endl; |
|
std::cout << options.help({""}) << std::endl; |
|
exit (1); |
|
} |
|
} |
|
|
|
|
|
const std::vector v {"name", "url", "author"}; |
|
for (auto i: v) { |
|
if (result.count(i)) { |
|
parsed_opts[i] = result[i].as<std::string>(); |
|
} |
|
} |
|
|
|
if (settings.test) { |
|
parsed_opts["url"] = TEST_URL + settings.port + "/"; |
|
} |
|
|
|
|
|
if (result.count("noninteractive")) |
|
{ |
|
settings.no_interactive = true; |
|
} |
|
|
|
|
|
return parsed_opts; |
|
} |
|
|
|
void kill_childs(int sig) |
|
{ |
|
continue_running = false; |
|
} |
|
|
|
|
|
int main(int argc, char **argv) { |
|
std::cout << short_info << std::endl; |
|
// TODO: |
|
// |
|
// - force dirty everything option |
|
// - a command that dumps the default config (.json, html, css, markdown) |
|
// into a directory as a starting site template. |
|
// |
|
// - bundle KaTeX (w cmake) |
|
// - budnle highlight.js (w cmake) |
|
// |
|
// TODO: Shell command subst plugin |
|
// TODO: define more file types to be processed using templating |
|
// TODO: documentation |
|
// - there are too many string literals |
|
// TODO: fix cli arguments with config file semantics |
|
// TODO: make some setting immutable? / store them separately |
|
// TODO: generate toc from html/markdown? |
|
// TODO: rss feed |
|
// TODO: cli arg to save config to a file |
|
// TODO: - make it only overwrite what is explicitly specified via the |
|
// cli , if the config file already exists. |
|
// TODO: Add a header flag to exclude an article from inclusion in the |
|
// directory list |
|
// TODO: Recursive directory lists |
|
// TODO: Fix specifying folder names and dates in directory lists |
|
// TODO: exclude posts dated for in the future? seems dumb idk |
|
// TODO: transclude from web |
|
// TODO: make templating better wrt recursion and queueing |
|
// TODO: compute target based off of default template sourcefilext -> |
|
// default template -> destination filext |
|
// TODO: Custom template substitution: make recursive through the template |
|
// header declaration: keep running substitution until you run out of |
|
// templates to apply. |
|
// TODO: post tags |
|
// TODO: post list from an RSS feed |
|
// TODO: filesystem watch mode using libevent |
|
// TODO: folder listing semantics: need to look at the index.html for the |
|
// folder. |
|
// TODO: using -s should only set the things specified on the command line |
|
|
|
auto cmd_options = parse_options(argc, argv); |
|
|
|
stgen::builder b (cmd_options); |
|
b.build(); |
|
|
|
if (settings.test) { |
|
|
|
signal(SIGINT, kill_childs); |
|
|
|
server s {}; |
|
s.serve_now(cmd_options.at("publish_root"), settings.port, "127.0.0.1"); |
|
/* darkhttpd registers a signal handler so this just uses that as it is |
|
* compiled in rather than fork and exec'd |
|
*/ |
|
while (continue_running) { |
|
b.rebuild_if_newer(); |
|
std::this_thread::sleep_for(std::chrono::milliseconds(200)); |
|
|
|
} |
|
s.stop_serving(); |
|
} |
|
|
|
putchar('\n'); |
|
return 0; |
|
}
|
|
|