aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/CMakeLists.txt13
-rw-r--r--src/FileAssembler.cpp232
-rw-r--r--src/main.cpp108
3 files changed, 353 insertions, 0 deletions
diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt
new file mode 100644
index 0000000..81f1e7d
--- /dev/null
+++ b/src/CMakeLists.txt
@@ -0,0 +1,13 @@
+cmake_minimum_required(VERSION 2.6)
+include_directories(${PROJECT_SOURCE_DIR}/inc)
+add_executable(
+ ${EXE}
+ main.cpp
+ FileAssembler.cpp
+ )
+add_custom_target(run
+ COMMAND ${EXE}
+ DEPENDS ${EXE}
+ WORKING_DIRECTORY ${CMAKE_PROJECT_DIR}
+ )
+target_link_libraries(${EXE})
diff --git a/src/FileAssembler.cpp b/src/FileAssembler.cpp
new file mode 100644
index 0000000..b224a0c
--- /dev/null
+++ b/src/FileAssembler.cpp
@@ -0,0 +1,232 @@
+#include "../inc/FileAssembler.h"
+#include "../inc/maddy/parser.h"
+#include <filesystem>
+#include <iostream>
+#include <string.h>
+#include <fstream>
+#include <sstream>
+#include <list>
+#include <map>
+
+using namespace std;
+namespace fs = filesystem;
+
+FileAssembler::FileAssembler(string path): path(path){
+ for (const auto & l1 : fs::directory_iterator(path)){
+ if(l1.path() == path+".config")
+ parse_variables();
+ else if(l1.path() == path+"templates" ||
+ l1.path() == path+"pages" ||
+ l1.path() == path+"posts"){
+ for (const auto & l2 : fs::directory_iterator(l1.path())){
+ string content = get_file_content(l2.path());
+ stringstream ss;
+ shared_ptr<maddy::ParserConfig> config = make_shared<maddy::ParserConfig>();
+ config->isEmphasizedParserEnabled = true; // default
+ config->isHTMLWrappedInParagraph = false;
+ shared_ptr<maddy::Parser> parser = make_shared<maddy::Parser>(config);
+ if(l1.path() == path+"templates"){
+ templates[l2.path().filename()] = content;
+ }else if(l1.path() == path+"pages"){
+ ss.str(content);
+ if(l2.path().filename().u8string().substr(0, 5) == "link_") // Link
+ pages[l2.path().filename()] = content.substr(0,content.length()-1); //keeping plaintext link
+ else
+ pages[l2.path().filename()] = parser->Parse(ss); //parsing md to html
+ }else{
+ ss.str(content);
+ posts[l2.path().filename()] = parser->Parse(ss); //parsing md to html
+ }
+ }
+ }
+ }
+}
+
+void FileAssembler::parse_variables(){
+ string to_parse = get_file_content(path+".config");
+ size_t pos_eq = 0;
+ size_t pos_endl = 0;
+ while ((pos_eq = to_parse.find("=")) != string::npos){
+ if((pos_endl = to_parse.find("\n")) == string::npos)
+ pos_endl = to_parse.length()-1;
+ string name = to_parse.substr(0, pos_eq);
+ variables[name] = to_parse.substr(pos_eq+1, pos_endl-(pos_eq+1));
+ to_parse.erase(0,pos_endl+1);
+ }
+ if(variables.find("website") == variables.end()){
+ cerr << "Error: swg: Variable 'website' is not defined." << endl;
+ exit(3);
+ }
+ string url = variables["website"];
+ if(url[url.length()-1] != '/')
+ url += '/';
+ variables["website"] = url;
+}
+
+string FileAssembler::get_file_content(string path){
+ fstream file;
+ file.open(path, ios::in);
+ string content;
+ if(file.is_open()){
+ string l;
+ while(getline(file, l))
+ content += l+"\n";
+ file.close();
+ }
+ return content;
+}
+
+map <string, string> FileAssembler::get_pages(){
+ return assemble_from_iterator(pages.begin(), pages.end(), false);
+}
+
+map <string, string> FileAssembler::get_posts(){
+ return assemble_from_iterator(posts.begin(), posts.end(), true);
+}
+
+map <string, string> FileAssembler::assemble_from_iterator(map<string, string>::iterator it, map<string, string>::iterator end, bool is_post){
+ if(templates.find("header.html") == templates.end()){
+ cerr << "Error: swg: header.html is not present in the sourced folder." << endl;
+ exit(2);
+ } else if(templates.find("footer.html") == templates.end()){
+ cerr << "Error: swg: footer.html is not present in the sourced folder." << endl;
+ exit(2);
+ }/* else if(templates.find("menu_listing.html") == templates.end()){
+ cerr << "Error: swg: menu_listing.html is not present in the sourced folder." << endl;
+ exit(2);
+ } else if(templates.find("post_listing.html") == templates.end()){
+ cerr << "Error: swg: post_listing.html is not present in the sourced folder." << endl;
+ exit(2);
+ }*/
+ map<string, string> p_it;
+ while (it != end){
+ if(it->first.substr(0, 5) != "link_"){ // Ignoring link pages
+ p_it[it->first] = parse(it->first, templates["header.html"] + it->second + templates["footer.html"], is_post);
+ }
+ it ++;
+ }
+ return p_it;
+}
+
+string FileAssembler::parse(string title, string to_parse, bool is_post){
+ string parsed = to_parse;
+ string url = variables["website"];
+
+ if(is_post)
+ variables["link"] = url + "posts/" + lowercase(title) + ".html";
+ else{
+ if(title == variables["index"]){
+ variables["link"] = url + "index.html";
+ }else if(title.substr(0, 5) == "link_"){ // Link
+ variables["link"] = pages[title];
+ }else{
+ variables["link"] = url + lowercase(title) + ".html";
+ }
+ variables.erase("date"); // we are not doing it if it's a post, because it could be a listing
+ }
+ // Parsing variables and functions
+ size_t pos_first = 0;
+ size_t pos_second = 0;
+ while ((pos_first = parsed.find("$")) != string::npos){
+ if((pos_second = parsed.find("$", pos_first+1)) == string::npos){
+ cerr << "Error: swg: unclosed $ section in " << title << "." << endl;
+ }
+ string input = parsed.substr(pos_first+1, pos_second-pos_first-1);
+ //cout << pos_first << " + " << pos_second << "=" << input << endl;
+
+ string output = "";
+
+ if(input == "title") //TITLE
+ if(title.substr(0, 5) == "link_") // Link
+ output = title.substr(5,title.length()-5);
+ else
+ output = title;
+
+ else if(input.substr(0, 5) == "date("){
+ variables["date"] = parse_arg("date",to_parse);
+ output = " "; // I have to fill the variable to erase the $ section
+ }else if(variables.find(input) != variables.end()) //VARIABLES
+ output = variables[input];
+
+ else if(input.substr(0,4) == "res("){ //RESOURCES
+ string full_path = input.substr(4,input.length()-1-4);
+ output = url + full_path;
+
+ if(full_path.find("/") != string::npos){
+ size_t s;
+
+ string tok;
+ string last_tok;
+
+ while ((s = full_path.find("/")) != string::npos){ // Adding the path progressively
+ tok = full_path.substr(0,s-1);
+ if(find(cached_res.begin(), cached_res.end(), last_tok+tok) == cached_res.end())
+ cached_res.push_back(last_tok+tok); // Not found, adding it
+ last_tok += tok+"/";
+ full_path.erase(0,s);
+ }
+ }
+ if(find(cached_res.begin(), cached_res.end(), input.substr(4,input.length()-1-4)) == cached_res.end())
+ cached_res.push_back(input.substr(4,input.length()-1-4)); // Not found, adding it
+
+
+ } else if(input.substr(0,5) == "list_"){ //LISTINGS
+
+ string name = input.substr(5,input.length()-1-4);
+ string list_template = name + "_listing.html";
+
+ if(templates.find(list_template) == templates.end()){
+ cerr << "Error: swg: Listing template '" << list_template << "' does not exist." << endl;
+ exit(2);
+ }
+
+ if(name == "menu"){
+ string current_link = variables["link"];
+ map<string, string>::iterator it = pages.begin();
+ while (it != pages.end()){
+ output += parse(it->first,templates[list_template], false);
+ it ++;
+ }
+ variables["link"] = current_link;
+ }else if(name == "post"){
+ string current_date = "";
+ string current_link = variables["link"];
+ if(variables.find("date") != templates.end())
+ current_date = variables["date"];
+ map<string, string>::iterator it = posts.begin();
+ while (it != posts.end()){
+ string date = parse_arg("date", it->second);
+ if(date != "")
+ variables["date"] = date;
+ else{
+ cerr << "Error: swg: Variable 'date' of post '" << it->first << "' is not defined." << endl;
+ exit(5);
+ }
+ output += parse(it->first,templates[list_template], true);
+ it ++;
+ }
+ if(current_date != "")
+ variables["date"] = current_date;
+ else
+ variables.erase("date");
+ variables["link"] = current_link;
+ }
+ }
+ if(output.length() == 0){
+ cerr << "Error: swg: Invalid swg text section: \"" << input << "\"." << endl;
+ exit(4);
+ }
+ parsed.replace(pos_first,pos_second-pos_first+1, output);
+ }
+ return parsed;
+}
+
+string FileAssembler::parse_arg(string arg_name, string to_parse){
+ if(to_parse.substr(3,6) != "$"+arg_name+"(")
+ return "";
+ size_t end = to_parse.find(")");
+ if (end == string::npos)
+ return "";
+ string parsed = to_parse.substr(9, end-9);
+ return parsed;
+}
diff --git a/src/main.cpp b/src/main.cpp
new file mode 100644
index 0000000..333a841
--- /dev/null
+++ b/src/main.cpp
@@ -0,0 +1,108 @@
+#include "../inc/FileAssembler.h"
+#include <filesystem>
+#include <iostream>
+#include <fstream>
+#include <cstring>
+#include <string>
+
+
+using namespace std;
+namespace fs = filesystem;
+
+void write_file(string path, string content){
+ fstream f;
+ f.open(path, ios::out);
+ if (!f) {
+ cerr << "Error: swg: File '" << path << "' could not be created!" << endl;
+ exit(6);
+ }else{
+ f << content;
+ f.close();
+ }
+}
+
+void recursive_directory_copy(const fs::path& src, const fs::path& dst) noexcept
+{
+}
+
+int generateWebsite(string arg, string config){
+ // cout << arg << " " << config << endl;
+ if(config[config.length()-1] != '/')
+ config += "/";
+ FileAssembler *fa = new FileAssembler(config);
+ /* TODO
+ * clean or don't clean*/
+ map<string, string> pages = fa->get_pages();
+ map<string, string> posts = fa->get_posts();
+ string target = fa->get_target();
+ string index = fa->get_index();
+
+ if(!fs::create_directory(target)){
+ cerr << "Error: swg: Directory '" << target << "' could not be created!" << endl;
+ exit(6);
+ }
+ if(!fs::create_directory(target+"/posts")){
+ cerr << "Error: swg: Directory '" << target << "/posts' could not be created!" << endl;
+ exit(6);
+ }
+ map<string, string>::iterator it = pages.begin();
+ while(it != pages.end()){
+ if(it->first == index){
+ write_file(target+"/index.html", it->second);
+ }else{
+ write_file(target+"/"+FileAssembler::lowercase(it->first)+".html", it->second);
+ }
+ it++;
+ }
+
+ it = posts.begin();
+ while(it != posts.end()){
+ write_file(target+"/posts/"+FileAssembler::lowercase(it->first)+".html", it->second);
+ it++;
+ }
+
+ if (false){ // Only copying the cached resources vs all of them
+ /** list<string> resources = fa->get_cached_resources();
+
+ list<string>::iterator res = resources.begin();
+ while(res != resources.end()){
+ error_code ec;
+ fs::copy(config+"resources/"+*res, target+"/"+*res,ec);
+ if(ec.value() != 0){
+ cerr << "Error: swg: Resource file '" << *res << "' could not be copied!" << endl;
+ exit(6);
+ }
+ res ++;
+ }
+ **/
+ }else{
+ // Recursively copies all files and folders from src to target and overwrites existing files in target.
+ try{
+ fs::copy(config+"resources/", target, fs::copy_options::overwrite_existing | fs::copy_options::recursive);
+ }catch (std::exception& e){
+ std::cout << e.what(); // TODO better print
+ }
+ }
+
+ return 0;
+}
+
+int main(int argc, char * argv[]){
+ for(int i = 1; i < argc; i++){
+ if(!strcmp(argv[i], "-h") || !strcmp(argv[i], "--help") ){
+ cout << "Usage: swg <options>\nOptions are:" << endl;
+ cout << "Option list goes here" << endl;
+ exit(0);
+ }else if(!strcmp(argv[i], "-t") || !strcmp(argv[i], "-g") || !strcmp(argv[i], "-cg")){
+ }else{ //TODO - MAKE IT CORRECTLY - ACCEPT TWO PATH IF G IS SPECIFIED
+ if(i == argc-1){ //passes if not actual options specified because i == argc-1
+ return generateWebsite(argv[i-1], argv[i]);
+ }
+ cerr << "ERROR: Invalid Command Line Option Found: \"" << argv[i] << "\"." << endl;
+ return 1;
+ }
+ }
+
+ cerr << "ERROR: No Command Line Option Found. Type in --help or -h" << endl;
+ return 1;
+}