Browse Source

2 stage build

master
alistair 3 years ago
parent
commit
372dfd6ed2
  1. 132
      main.cpp

132
main.cpp

@ -290,7 +290,6 @@ compute_url(fs::path path, std::map<std::string, std::string> properties) @@ -290,7 +290,6 @@ compute_url(fs::path path, std::map<std::string, std::string> properties)
fs::path target = compute_target(path, properties);
fs::path rel = fs::relative(target, properties.at("publish_root"));
fs::path url = fs::path(properties.at("url")) / rel;
return url.string();
}
@ -753,7 +752,9 @@ class feed_builder { @@ -753,7 +752,9 @@ class feed_builder {
auto e_link = e_feed->InsertNewChildElement("link");
e_link->SetAttribute("rel", "self");
e_link->SetAttribute("href", url.c_str());
std::string uurl = url;
e_link->SetAttribute("href", uurl.c_str());
auto e_updated = e_feed->InsertNewChildElement("updated");
/* shouldn't really be generation time: should be last modification time
@ -772,7 +773,7 @@ class feed_builder { @@ -772,7 +773,7 @@ class feed_builder {
}
auto e_feed_id = e_feed->InsertNewChildElement("id");
e_feed_id->SetText(url.c_str());
e_feed_id->SetText(uurl.c_str());
}
@ -897,7 +898,7 @@ class rss_feed_plugin : public substitution_plugin { @@ -897,7 +898,7 @@ class rss_feed_plugin : public substitution_plugin {
}
}
feed_builder f {properties.at("url"), properties.at("name"), {properties.at("author")}};
feed_builder f {compute_url(properties.at("current_file"), properties), properties.at("name"), {properties.at("author")}};
for (auto entry = entries.rbegin(); entry != entries.rend(); entry++) {
// write into rss feed
@ -924,10 +925,24 @@ class builder { @@ -924,10 +925,24 @@ class builder {
std::map<std::string, substitution_plugin *> substitution_commands;
std::map<std::string, std::string> default_templates;
std::map<std::string, std::string> properties;
std::set<std::string> touched_files;
const std::set<std::string> apply_templates_exts {"md","html", "txt", "markdown", "xml"};
enum job_type {
COPY_FILE = 1,
MARKDOWN = 1 << 1,
TEMPLATE = 1 << 2,
DELETE_FILE = 1 << 3,
MAKE_DIR = 1 << 4
};
struct blog_item {
job_type type;
fs::path src;
std::map<std::string, std::string> properties;
};
void
add_substitution_plugins()
{
@ -1039,14 +1054,14 @@ class builder { @@ -1039,14 +1054,14 @@ class builder {
}
bool notify_file_write(fs::path target) {
if (touched_files.count(target)) {
spdlog::error("File conflict: {}", target.string());
spdlog::info("Multiple sources compile to {}", target.string());
//if (touched_files.count(target)) {
// spdlog::error("File conflict: {}", target.string());
// spdlog::info("Multiple sources compile to {}", target.string());
return true;
}
//}
touched_files.insert(target);
return false;
//touched_files.insert(target);
//return false;
}
/*** Build Function ****************
@ -1073,8 +1088,10 @@ class builder { @@ -1073,8 +1088,10 @@ class builder {
*/
void build_recursive(std::map<std::string, std::string> page_templates,
std::map<std::string, std::string> properties,
std::map<fs::path, blog_item> &compile_jobs,
const fs::path &directory) {
std::vector<fs::path> next_directories;
std::vector<fs::path> next_files;
@ -1096,8 +1113,7 @@ class builder { @@ -1096,8 +1113,7 @@ class builder {
/* DO NOTHING BEFORE HERE */
fs::create_directories(compute_target(directory, properties));
notify_file_write(compute_target(directory, properties));
compile_jobs[compute_target(directory, properties)] = {job_type::MAKE_DIR, directory};
// first pass of directory
//
@ -1185,6 +1201,7 @@ class builder { @@ -1185,6 +1201,7 @@ class builder {
}
auto target = compute_target(entry, article_properties);
article_properties["target"] = target.string();
if (article_properties.count("date")) {
std::string ndate = reformat_date(article_properties.at("date"), article_properties);
@ -1239,17 +1256,18 @@ class builder { @@ -1239,17 +1256,18 @@ class builder {
/* write body into template */
run_substitution_plugins(new_page, article_properties);
article_properties["body"] = new_page;
notify_file_write(target);
write_file(target, new_page);
compile_jobs[target] = {job_type::TEMPLATE, entry, std::map<std::string, std::string>(article_properties)};
continue;
}
// else just copy
auto target = compute_target(entry, properties);
auto target = compute_target(entry, article_properties);
notify_file_write(target);
fs::copy(entry, target, fs::copy_options::update_existing);
compile_jobs[target] = {job_type::COPY_FILE, entry, std::map<std::string, std::string>(article_properties)};
// build
@ -1275,7 +1293,7 @@ class builder { @@ -1275,7 +1293,7 @@ class builder {
// recurse
for (auto d : next_directories) {
build_recursive(std::map<std::string, std::string>(page_templates),
std::map<std::string, std::string>(properties), d);
std::map<std::string, std::string>(properties), compile_jobs, d);
}
}
@ -1364,6 +1382,50 @@ class builder { @@ -1364,6 +1382,50 @@ class builder {
return true;
}
void
write_build(std::map<fs::path, blog_item> &compile_jobs)
{
for (auto e: compile_jobs) {
auto src = e.second.src;
auto dest = e.first;
if (job_type::COPY_FILE & e.second.type) {
fs::copy(src, dest, fs::copy_options::update_existing);
}
if (job_type::MAKE_DIR & e.second.type) {
fs::create_directories(e.first);
}
if (job_type::DELETE_FILE & e.second.type) {
fs::remove_all(e.first);
}
}
for (auto e: compile_jobs) {
if (job_type::TEMPLATE & e.second.type) {
// reapply stg1 and stg2 templates
}
if (job_type::MARKDOWN & e.second.type) {
// run markdown
}
}
for (auto e: compile_jobs) {
}
for (auto e: compile_jobs) {
if ((job_type::MARKDOWN | job_type::TEMPLATE) & e.second.type) {
write_file(e.first, e.second.properties.at("body"));
}
}
}
void
build()
{
@ -1382,28 +1444,42 @@ class builder { @@ -1382,28 +1444,42 @@ class builder {
}
}
touched_files = std::set<std::string> ();
std::map<fs::path, blog_item> compile_jobs {};
build_recursive(
default_templates,
std::map<std::string, std::string>(properties),
start_dir);
compile_jobs,
start_dir
);
std::vector<fs::path> to_delete {};
// this needs to be moved to the main recursing function and do it one
// directory level at a time files+directories
for (auto e:fs::recursive_directory_iterator(properties.at("publish_root"))) {
if (!touched_files.contains(e.path().string())) {
to_delete.push_back(e.path());
spdlog::warn("Deleting: {}", e.path().string());
if (fs::exists(properties.at("publish_root"))) {
for (auto e:fs::recursive_directory_iterator(properties.at("publish_root"))) {
if (!compile_jobs.contains(e.path().string())) {
// to_delete.push_back(e.path());
compile_jobs[e.path()] = {job_type::DELETE_FILE};
spdlog::warn("Deleting: {}", e.path().string());
}
}
}
int removed = 0;
for (auto e :to_delete) {
removed += fs::remove_all(e);
/*
int removed = 0;
for (auto e :to_delete) {
removed += fs::remove_all(e);
}
*/
}
// stage 2 build
write_build(compile_jobs);
}
void dump_stgen_json() {
@ -1453,7 +1529,7 @@ std::map<std::string, std::string> parse_options (int argc, char **argv) { @@ -1453,7 +1529,7 @@ std::map<std::string, std::string> parse_options (int argc, char **argv) {
cxxopts::Options options("stgen3", "A tiny C++ static site generator.\n\r");
options
.positional_help("source destination");
.positional_help("[source destination]");
options.add_options()
("h,help", "Show usage")

Loading…
Cancel
Save