diff options
-rw-r--r-- | asm/preprocesser.cpp | 52 |
1 files changed, 52 insertions, 0 deletions
diff --git a/asm/preprocesser.cpp b/asm/preprocesser.cpp index 615cf93..d429bf1 100644 --- a/asm/preprocesser.cpp +++ b/asm/preprocesser.cpp @@ -11,3 +11,55 @@ */ #include "./preprocesser.hpp" +#include "./base.hpp" + +#include <algorithm> +#include <unordered_map> + +using std::pair, std::vector, std::make_pair, std::string, std::string_view; + +#define ERR(E) std::make_pair(tokens, (E)) +#define VAL(E) std::make_pair(E, pp_err_t{pp_err_type_t::OK}) + +pair<vector<token_t *>, pp_err_t> +preprocess_use_blocks(vector<token_t *> tokens) +{ + vector<token_t *> new_tokens; + for (size_t i = 0; i < tokens.size(); ++i) + { + token_t *t = tokens[i]; + if (t->type == token_type_t::PP_USE) + { + if (i + 1 >= tokens.size() || + tokens[i + 1]->type != token_type_t::LITERAL_STRING) + { + new_tokens.clear(); + return ERR(pp_err_t(pp_err_type_t::EXPECTED_STRING, t)); + } + + token_t *name = tokens[i + 1]; + auto source = read_file(name->content.c_str()); + if (!source) + { + new_tokens.clear(); + return ERR(pp_err_t(pp_err_type_t::FILE_NONEXISTENT, name)); + } + + std::vector<token_t *> ftokens; + lerr_t lerr = tokenise_buffer(source.value(), ftokens); + if (lerr != lerr_t::OK) + { + new_tokens.clear(); + return ERR(pp_err_t(pp_err_type_t::FILE_PARSE_ERROR, name, lerr)); + } + + new_tokens.insert(new_tokens.end(), ftokens.begin(), ftokens.end()); + + i += 2; + } + else + new_tokens.push_back(new token_t{*t}); + } + return VAL(new_tokens); +} + |