Implement preprocess_use_blocks

While being very similar in style to the C version, it takes 27 lines
of code less to implement it due to the niceties of C++ (41 lines vs
68).
This commit is contained in:
2024-04-15 04:44:15 +06:30
parent f661438c93
commit 0a93ad5a8a

View File

@@ -11,3 +11,55 @@
*/
#include "./preprocesser.hpp"
#include "./base.hpp"
#include <algorithm>
#include <unordered_map>
using std::pair, std::vector, std::make_pair, std::string, std::string_view;
#define ERR(E) std::make_pair(tokens, (E))
#define VAL(E) std::make_pair(E, pp_err_t{pp_err_type_t::OK})
pair<vector<token_t *>, pp_err_t>
preprocess_use_blocks(vector<token_t *> tokens)
{
vector<token_t *> new_tokens;
for (size_t i = 0; i < tokens.size(); ++i)
{
token_t *t = tokens[i];
if (t->type == token_type_t::PP_USE)
{
if (i + 1 >= tokens.size() ||
tokens[i + 1]->type != token_type_t::LITERAL_STRING)
{
new_tokens.clear();
return ERR(pp_err_t(pp_err_type_t::EXPECTED_STRING, t));
}
token_t *name = tokens[i + 1];
auto source = read_file(name->content.c_str());
if (!source)
{
new_tokens.clear();
return ERR(pp_err_t(pp_err_type_t::FILE_NONEXISTENT, name));
}
std::vector<token_t *> ftokens;
lerr_t lerr = tokenise_buffer(source.value(), ftokens);
if (lerr != lerr_t::OK)
{
new_tokens.clear();
return ERR(pp_err_t(pp_err_type_t::FILE_PARSE_ERROR, name, lerr));
}
new_tokens.insert(new_tokens.end(), ftokens.begin(), ftokens.end());
i += 2;
}
else
new_tokens.push_back(new token_t{*t});
}
return VAL(new_tokens);
}