token_t -> Token
Use C++'s implicit typedef
This commit is contained in:
100
src/lexer.cpp
100
src/lexer.cpp
@@ -38,8 +38,8 @@ bool initial_match(string_view src, string_view match)
|
|||||||
return (src.size() > match.size() && src.substr(0, match.size()) == match);
|
return (src.size() > match.size() && src.substr(0, match.size()) == match);
|
||||||
}
|
}
|
||||||
|
|
||||||
pair<token_t, lerr_t> tokenise_symbol(string_view &source, size_t &column,
|
pair<Token, lerr_t> tokenise_symbol(string_view &source, size_t &column,
|
||||||
size_t line)
|
size_t line)
|
||||||
{
|
{
|
||||||
auto end = source.find_first_not_of(VALID_SYMBOL);
|
auto end = source.find_first_not_of(VALID_SYMBOL);
|
||||||
if (end == string::npos)
|
if (end == string::npos)
|
||||||
@@ -48,7 +48,7 @@ pair<token_t, lerr_t> tokenise_symbol(string_view &source, size_t &column,
|
|||||||
source.remove_prefix(end);
|
source.remove_prefix(end);
|
||||||
std::transform(sym.begin(), sym.end(), sym.begin(), ::toupper);
|
std::transform(sym.begin(), sym.end(), sym.begin(), ::toupper);
|
||||||
|
|
||||||
token_t t{};
|
Token t{};
|
||||||
|
|
||||||
if (sym == "%CONST")
|
if (sym == "%CONST")
|
||||||
{
|
{
|
||||||
@@ -69,7 +69,7 @@ pair<token_t, lerr_t> tokenise_symbol(string_view &source, size_t &column,
|
|||||||
}
|
}
|
||||||
else if (sym.size() > 1 && sym[0] == '$')
|
else if (sym.size() > 1 && sym[0] == '$')
|
||||||
{
|
{
|
||||||
t = token_t(token_type_t::PP_REFERENCE, sym.substr(1));
|
t = Token(token_type_t::PP_REFERENCE, sym.substr(1));
|
||||||
}
|
}
|
||||||
else if (sym == "NOOP")
|
else if (sym == "NOOP")
|
||||||
{
|
{
|
||||||
@@ -81,47 +81,47 @@ pair<token_t, lerr_t> tokenise_symbol(string_view &source, size_t &column,
|
|||||||
}
|
}
|
||||||
else if (initial_match(sym, "PUSH.REG."))
|
else if (initial_match(sym, "PUSH.REG."))
|
||||||
{
|
{
|
||||||
t = token_t(token_type_t::PUSH_REG, sym.substr(9));
|
t = Token(token_type_t::PUSH_REG, sym.substr(9));
|
||||||
}
|
}
|
||||||
else if (initial_match(sym, "PUSH."))
|
else if (initial_match(sym, "PUSH."))
|
||||||
{
|
{
|
||||||
t = token_t(token_type_t::PUSH, sym.substr(5));
|
t = Token(token_type_t::PUSH, sym.substr(5));
|
||||||
}
|
}
|
||||||
else if (initial_match(sym, "POP."))
|
else if (initial_match(sym, "POP."))
|
||||||
{
|
{
|
||||||
t = token_t(token_type_t::POP, sym.substr(4));
|
t = Token(token_type_t::POP, sym.substr(4));
|
||||||
}
|
}
|
||||||
else if (initial_match(sym, "MOV."))
|
else if (initial_match(sym, "MOV."))
|
||||||
{
|
{
|
||||||
t = token_t(token_type_t::MOV, sym.substr(4));
|
t = Token(token_type_t::MOV, sym.substr(4));
|
||||||
}
|
}
|
||||||
else if (initial_match(sym, "DUP."))
|
else if (initial_match(sym, "DUP."))
|
||||||
{
|
{
|
||||||
t = token_t(token_type_t::DUP, sym.substr(4));
|
t = Token(token_type_t::DUP, sym.substr(4));
|
||||||
}
|
}
|
||||||
else if (initial_match(sym, "MALLOC.STACK."))
|
else if (initial_match(sym, "MALLOC.STACK."))
|
||||||
{
|
{
|
||||||
t = token_t(token_type_t::MALLOC_STACK, sym.substr(13));
|
t = Token(token_type_t::MALLOC_STACK, sym.substr(13));
|
||||||
}
|
}
|
||||||
else if (initial_match(sym, "MALLOC."))
|
else if (initial_match(sym, "MALLOC."))
|
||||||
{
|
{
|
||||||
t = token_t(token_type_t::MALLOC, sym.substr(7));
|
t = Token(token_type_t::MALLOC, sym.substr(7));
|
||||||
}
|
}
|
||||||
else if (initial_match(sym, "MSET.STACK."))
|
else if (initial_match(sym, "MSET.STACK."))
|
||||||
{
|
{
|
||||||
t = token_t(token_type_t::MSET_STACK, sym.substr(11));
|
t = Token(token_type_t::MSET_STACK, sym.substr(11));
|
||||||
}
|
}
|
||||||
else if (initial_match(sym, "MSET."))
|
else if (initial_match(sym, "MSET."))
|
||||||
{
|
{
|
||||||
t = token_t(token_type_t::MSET, sym.substr(5));
|
t = Token(token_type_t::MSET, sym.substr(5));
|
||||||
}
|
}
|
||||||
else if (initial_match(sym, "MGET.STACK."))
|
else if (initial_match(sym, "MGET.STACK."))
|
||||||
{
|
{
|
||||||
t = token_t(token_type_t::MGET_STACK, sym.substr(11));
|
t = Token(token_type_t::MGET_STACK, sym.substr(11));
|
||||||
}
|
}
|
||||||
else if (initial_match(sym, "MGET."))
|
else if (initial_match(sym, "MGET."))
|
||||||
{
|
{
|
||||||
t = token_t(token_type_t::MGET, sym.substr(5));
|
t = Token(token_type_t::MGET, sym.substr(5));
|
||||||
}
|
}
|
||||||
else if (sym == "MDELETE")
|
else if (sym == "MDELETE")
|
||||||
{
|
{
|
||||||
@@ -133,55 +133,55 @@ pair<token_t, lerr_t> tokenise_symbol(string_view &source, size_t &column,
|
|||||||
}
|
}
|
||||||
else if (initial_match(sym, "NOT."))
|
else if (initial_match(sym, "NOT."))
|
||||||
{
|
{
|
||||||
t = token_t(token_type_t::NOT, sym.substr(4));
|
t = Token(token_type_t::NOT, sym.substr(4));
|
||||||
}
|
}
|
||||||
else if (initial_match(sym, "OR."))
|
else if (initial_match(sym, "OR."))
|
||||||
{
|
{
|
||||||
t = token_t(token_type_t::OR, sym.substr(3));
|
t = Token(token_type_t::OR, sym.substr(3));
|
||||||
}
|
}
|
||||||
else if (initial_match(sym, "AND."))
|
else if (initial_match(sym, "AND."))
|
||||||
{
|
{
|
||||||
t = token_t(token_type_t::AND, sym.substr(4));
|
t = Token(token_type_t::AND, sym.substr(4));
|
||||||
}
|
}
|
||||||
else if (initial_match(sym, "XOR."))
|
else if (initial_match(sym, "XOR."))
|
||||||
{
|
{
|
||||||
t = token_t(token_type_t::XOR, sym.substr(4));
|
t = Token(token_type_t::XOR, sym.substr(4));
|
||||||
}
|
}
|
||||||
else if (initial_match(sym, "EQ."))
|
else if (initial_match(sym, "EQ."))
|
||||||
{
|
{
|
||||||
t = token_t(token_type_t::EQ, sym.substr(3));
|
t = Token(token_type_t::EQ, sym.substr(3));
|
||||||
}
|
}
|
||||||
else if (initial_match(sym, "LTE."))
|
else if (initial_match(sym, "LTE."))
|
||||||
{
|
{
|
||||||
t = token_t(token_type_t::LTE, sym.substr(4));
|
t = Token(token_type_t::LTE, sym.substr(4));
|
||||||
}
|
}
|
||||||
else if (initial_match(sym, "LT."))
|
else if (initial_match(sym, "LT."))
|
||||||
{
|
{
|
||||||
t = token_t(token_type_t::LT, sym.substr(3));
|
t = Token(token_type_t::LT, sym.substr(3));
|
||||||
}
|
}
|
||||||
else if (initial_match(sym, "GTE."))
|
else if (initial_match(sym, "GTE."))
|
||||||
{
|
{
|
||||||
t = token_t(token_type_t::GTE, sym.substr(4));
|
t = Token(token_type_t::GTE, sym.substr(4));
|
||||||
}
|
}
|
||||||
else if (initial_match(sym, "GT."))
|
else if (initial_match(sym, "GT."))
|
||||||
{
|
{
|
||||||
t = token_t(token_type_t::GT, sym.substr(3));
|
t = Token(token_type_t::GT, sym.substr(3));
|
||||||
}
|
}
|
||||||
else if (initial_match(sym, "SUB."))
|
else if (initial_match(sym, "SUB."))
|
||||||
{
|
{
|
||||||
t = token_t(token_type_t::SUB, sym.substr(4));
|
t = Token(token_type_t::SUB, sym.substr(4));
|
||||||
}
|
}
|
||||||
else if (initial_match(sym, "PLUS."))
|
else if (initial_match(sym, "PLUS."))
|
||||||
{
|
{
|
||||||
t = token_t(token_type_t::PLUS, sym.substr(5));
|
t = Token(token_type_t::PLUS, sym.substr(5));
|
||||||
}
|
}
|
||||||
else if (initial_match(sym, "MULT."))
|
else if (initial_match(sym, "MULT."))
|
||||||
{
|
{
|
||||||
t = token_t(token_type_t::MULT, sym.substr(5));
|
t = Token(token_type_t::MULT, sym.substr(5));
|
||||||
}
|
}
|
||||||
else if (initial_match(sym, "PRINT."))
|
else if (initial_match(sym, "PRINT."))
|
||||||
{
|
{
|
||||||
t = token_t(token_type_t::PRINT, sym.substr(6));
|
t = Token(token_type_t::PRINT, sym.substr(6));
|
||||||
}
|
}
|
||||||
else if (sym == "JUMP.ABS")
|
else if (sym == "JUMP.ABS")
|
||||||
{
|
{
|
||||||
@@ -193,7 +193,7 @@ pair<token_t, lerr_t> tokenise_symbol(string_view &source, size_t &column,
|
|||||||
}
|
}
|
||||||
else if (initial_match(sym, "JUMP.IF."))
|
else if (initial_match(sym, "JUMP.IF."))
|
||||||
{
|
{
|
||||||
t = token_t(token_type_t::JUMP_IF, sym.substr(8));
|
t = Token(token_type_t::JUMP_IF, sym.substr(8));
|
||||||
}
|
}
|
||||||
else if (sym == "CALL.STACK")
|
else if (sym == "CALL.STACK")
|
||||||
{
|
{
|
||||||
@@ -223,7 +223,7 @@ pair<token_t, lerr_t> tokenise_symbol(string_view &source, size_t &column,
|
|||||||
return make_pair(t, lerr_t());
|
return make_pair(t, lerr_t());
|
||||||
}
|
}
|
||||||
|
|
||||||
token_t tokenise_literal_number(string_view &source, size_t &column)
|
Token tokenise_literal_number(string_view &source, size_t &column)
|
||||||
{
|
{
|
||||||
bool is_negative = false;
|
bool is_negative = false;
|
||||||
if (source[0] == '-')
|
if (source[0] == '-')
|
||||||
@@ -238,15 +238,15 @@ token_t tokenise_literal_number(string_view &source, size_t &column)
|
|||||||
string digits{source.substr(0, end)};
|
string digits{source.substr(0, end)};
|
||||||
source.remove_prefix(end);
|
source.remove_prefix(end);
|
||||||
|
|
||||||
token_t t{token_type_t::LITERAL_NUMBER, (is_negative ? "-" : "") + digits,
|
Token t{token_type_t::LITERAL_NUMBER, (is_negative ? "-" : "") + digits,
|
||||||
column};
|
column};
|
||||||
|
|
||||||
column += digits.size() + (is_negative ? 1 : 0);
|
column += digits.size() + (is_negative ? 1 : 0);
|
||||||
|
|
||||||
return t;
|
return t;
|
||||||
}
|
}
|
||||||
|
|
||||||
token_t tokenise_literal_hex(string_view &source, size_t &column)
|
Token tokenise_literal_hex(string_view &source, size_t &column)
|
||||||
{
|
{
|
||||||
// Remove x char from source
|
// Remove x char from source
|
||||||
source.remove_prefix(1);
|
source.remove_prefix(1);
|
||||||
@@ -256,16 +256,16 @@ token_t tokenise_literal_hex(string_view &source, size_t &column)
|
|||||||
string digits{source.substr(0, end)};
|
string digits{source.substr(0, end)};
|
||||||
source.remove_prefix(end);
|
source.remove_prefix(end);
|
||||||
|
|
||||||
token_t t = {token_type_t::LITERAL_NUMBER, "0x" + digits, column};
|
Token t = {token_type_t::LITERAL_NUMBER, "0x" + digits, column};
|
||||||
|
|
||||||
column += digits.size() + 1;
|
column += digits.size() + 1;
|
||||||
return t;
|
return t;
|
||||||
}
|
}
|
||||||
|
|
||||||
pair<token_t, lerr_t> tokenise_literal_char(string_view &source, size_t &column,
|
pair<Token, lerr_t> tokenise_literal_char(string_view &source, size_t &column,
|
||||||
size_t &line)
|
size_t &line)
|
||||||
{
|
{
|
||||||
token_t t{};
|
Token t{};
|
||||||
auto end = source.find('\'', 1);
|
auto end = source.find('\'', 1);
|
||||||
if (source.size() < 3 || end == 1 || end > 3)
|
if (source.size() < 3 || end == 1 || end > 3)
|
||||||
return make_pair(t,
|
return make_pair(t,
|
||||||
@@ -299,37 +299,37 @@ pair<token_t, lerr_t> tokenise_literal_char(string_view &source, size_t &column,
|
|||||||
column, line));
|
column, line));
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
t = token_t{token_type_t::LITERAL_CHAR, std::to_string(escape), column};
|
t = Token{token_type_t::LITERAL_CHAR, std::to_string(escape), column};
|
||||||
column += 4;
|
column += 4;
|
||||||
source.remove_prefix(4);
|
source.remove_prefix(4);
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
t = token_t(token_type_t::LITERAL_CHAR, std::to_string(source[1]));
|
t = Token(token_type_t::LITERAL_CHAR, std::to_string(source[1]));
|
||||||
column += 3;
|
column += 3;
|
||||||
source.remove_prefix(3);
|
source.remove_prefix(3);
|
||||||
}
|
}
|
||||||
return make_pair(t, lerr_t());
|
return make_pair(t, lerr_t());
|
||||||
}
|
}
|
||||||
|
|
||||||
token_t tokenise_literal_string(string_view &source, size_t &column, size_t end)
|
Token tokenise_literal_string(string_view &source, size_t &column, size_t end)
|
||||||
{
|
{
|
||||||
source.remove_prefix(1);
|
source.remove_prefix(1);
|
||||||
token_t token{token_type_t::LITERAL_STRING, string(source.substr(0, end - 1)),
|
Token token{token_type_t::LITERAL_STRING, string(source.substr(0, end - 1)),
|
||||||
column};
|
column};
|
||||||
source.remove_prefix(end);
|
source.remove_prefix(end);
|
||||||
column += end + 1;
|
column += end + 1;
|
||||||
return token;
|
return token;
|
||||||
}
|
}
|
||||||
|
|
||||||
lerr_t tokenise_buffer(string_view source, std::vector<token_t *> &tokens)
|
lerr_t tokenise_buffer(string_view source, std::vector<Token *> &tokens)
|
||||||
{
|
{
|
||||||
size_t column = 0, line = 1;
|
size_t column = 0, line = 1;
|
||||||
while (source.size() > 0)
|
while (source.size() > 0)
|
||||||
{
|
{
|
||||||
bool is_token = true;
|
bool is_token = true;
|
||||||
char first = source[0];
|
char first = source[0];
|
||||||
token_t t{};
|
Token t{};
|
||||||
if (isspace(first) || first == '\0')
|
if (isspace(first) || first == '\0')
|
||||||
{
|
{
|
||||||
size_t i;
|
size_t i;
|
||||||
@@ -359,7 +359,7 @@ lerr_t tokenise_buffer(string_view source, std::vector<token_t *> &tokens)
|
|||||||
}
|
}
|
||||||
else if (first == '*')
|
else if (first == '*')
|
||||||
{
|
{
|
||||||
t = token_t(token_type_t::STAR, "", column);
|
t = Token(token_type_t::STAR, "", column);
|
||||||
source.remove_prefix(1);
|
source.remove_prefix(1);
|
||||||
}
|
}
|
||||||
else if (first == '\"')
|
else if (first == '\"')
|
||||||
@@ -411,24 +411,24 @@ lerr_t tokenise_buffer(string_view source, std::vector<token_t *> &tokens)
|
|||||||
|
|
||||||
if (is_token)
|
if (is_token)
|
||||||
{
|
{
|
||||||
t.line = line;
|
t.line = line;
|
||||||
token_t *acc = new token_t(t);
|
Token *acc = new Token(t);
|
||||||
tokens.push_back(acc);
|
tokens.push_back(acc);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return lerr_t{};
|
return lerr_t{};
|
||||||
}
|
}
|
||||||
|
|
||||||
std::ostream &operator<<(std::ostream &os, token_t &t)
|
std::ostream &operator<<(std::ostream &os, Token &t)
|
||||||
{
|
{
|
||||||
return os << token_type_as_cstr(t.type) << "(`" << t.content << "`)@"
|
return os << token_type_as_cstr(t.type) << "(`" << t.content << "`)@"
|
||||||
<< t.line << ", " << t.column;
|
<< t.line << ", " << t.column;
|
||||||
}
|
}
|
||||||
|
|
||||||
token_t::token_t()
|
Token::Token()
|
||||||
{}
|
{}
|
||||||
|
|
||||||
token_t::token_t(token_type_t type, string content, size_t col, size_t line)
|
Token::Token(token_type_t type, string content, size_t col, size_t line)
|
||||||
: type{type}, column{col}, line{line}, content{content}
|
: type{type}, column{col}, line{line}, content{content}
|
||||||
{}
|
{}
|
||||||
|
|
||||||
|
|||||||
@@ -68,17 +68,17 @@ enum class token_type_t
|
|||||||
|
|
||||||
const char *token_type_as_cstr(token_type_t type);
|
const char *token_type_as_cstr(token_type_t type);
|
||||||
|
|
||||||
struct token_t
|
struct Token
|
||||||
{
|
{
|
||||||
token_type_t type;
|
token_type_t type;
|
||||||
size_t column, line;
|
size_t column, line;
|
||||||
std::string content;
|
std::string content;
|
||||||
|
|
||||||
token_t();
|
Token();
|
||||||
token_t(token_type_t, std::string, size_t col = 0, size_t line = 0);
|
Token(token_type_t, std::string, size_t col = 0, size_t line = 0);
|
||||||
};
|
};
|
||||||
|
|
||||||
std::ostream &operator<<(std::ostream &, token_t &);
|
std::ostream &operator<<(std::ostream &, Token &);
|
||||||
|
|
||||||
enum class lerr_type_t
|
enum class lerr_type_t
|
||||||
{
|
{
|
||||||
@@ -101,6 +101,6 @@ struct lerr_t
|
|||||||
|
|
||||||
std::ostream &operator<<(std::ostream &, lerr_t &);
|
std::ostream &operator<<(std::ostream &, lerr_t &);
|
||||||
|
|
||||||
lerr_t tokenise_buffer(std::string_view, std::vector<token_t *> &);
|
lerr_t tokenise_buffer(std::string_view, std::vector<Token *> &);
|
||||||
|
|
||||||
#endif
|
#endif
|
||||||
|
|||||||
@@ -66,7 +66,7 @@ int main(int argc, const char *argv[])
|
|||||||
string source_str;
|
string source_str;
|
||||||
string_view original;
|
string_view original;
|
||||||
string_view src;
|
string_view src;
|
||||||
vector<token_t *> tokens, preprocessed_tokens;
|
vector<Token *> tokens, preprocessed_tokens;
|
||||||
lerr_t lerr;
|
lerr_t lerr;
|
||||||
pp_err_t pp_err;
|
pp_err_t pp_err;
|
||||||
|
|
||||||
|
|||||||
@@ -20,17 +20,17 @@ using std::pair, std::vector, std::make_pair, std::string, std::string_view;
|
|||||||
|
|
||||||
#define VCLEAR(V) \
|
#define VCLEAR(V) \
|
||||||
std::for_each((V).begin(), (V).end(), \
|
std::for_each((V).begin(), (V).end(), \
|
||||||
[](token_t *t) \
|
[](Token *t) \
|
||||||
{ \
|
{ \
|
||||||
delete t; \
|
delete t; \
|
||||||
});
|
});
|
||||||
|
|
||||||
pp_err_t preprocess_use_blocks(const vector<token_t *> &tokens,
|
pp_err_t preprocess_use_blocks(const vector<Token *> &tokens,
|
||||||
vector<token_t *> &vec_out)
|
vector<Token *> &vec_out)
|
||||||
{
|
{
|
||||||
for (size_t i = 0; i < tokens.size(); ++i)
|
for (size_t i = 0; i < tokens.size(); ++i)
|
||||||
{
|
{
|
||||||
token_t *t = tokens[i];
|
Token *t = tokens[i];
|
||||||
if (t->type == token_type_t::PP_USE)
|
if (t->type == token_type_t::PP_USE)
|
||||||
{
|
{
|
||||||
if (i + 1 >= tokens.size() ||
|
if (i + 1 >= tokens.size() ||
|
||||||
@@ -41,8 +41,8 @@ pp_err_t preprocess_use_blocks(const vector<token_t *> &tokens,
|
|||||||
return pp_err_t(pp_err_type_t::EXPECTED_STRING, t);
|
return pp_err_t(pp_err_type_t::EXPECTED_STRING, t);
|
||||||
}
|
}
|
||||||
|
|
||||||
token_t *name = tokens[i + 1];
|
Token *name = tokens[i + 1];
|
||||||
auto source = read_file(name->content.c_str());
|
auto source = read_file(name->content.c_str());
|
||||||
if (!source)
|
if (!source)
|
||||||
{
|
{
|
||||||
VCLEAR(vec_out);
|
VCLEAR(vec_out);
|
||||||
@@ -50,7 +50,7 @@ pp_err_t preprocess_use_blocks(const vector<token_t *> &tokens,
|
|||||||
return pp_err_t(pp_err_type_t::FILE_NONEXISTENT, name);
|
return pp_err_t(pp_err_type_t::FILE_NONEXISTENT, name);
|
||||||
}
|
}
|
||||||
|
|
||||||
std::vector<token_t *> ftokens;
|
std::vector<Token *> ftokens;
|
||||||
lerr_t lerr = tokenise_buffer(source.value(), ftokens);
|
lerr_t lerr = tokenise_buffer(source.value(), ftokens);
|
||||||
if (lerr.type != lerr_type_t::OK)
|
if (lerr.type != lerr_type_t::OK)
|
||||||
{
|
{
|
||||||
@@ -64,7 +64,7 @@ pp_err_t preprocess_use_blocks(const vector<token_t *> &tokens,
|
|||||||
++i;
|
++i;
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
vec_out.push_back(new token_t{*t});
|
vec_out.push_back(new Token{*t});
|
||||||
}
|
}
|
||||||
return pp_err_t();
|
return pp_err_t();
|
||||||
}
|
}
|
||||||
@@ -74,13 +74,13 @@ struct const_t
|
|||||||
size_t start, end;
|
size_t start, end;
|
||||||
};
|
};
|
||||||
|
|
||||||
pp_err_t preprocess_const_blocks(const vector<token_t *> &tokens,
|
pp_err_t preprocess_const_blocks(const vector<Token *> &tokens,
|
||||||
vector<token_t *> &vec_out)
|
vector<Token *> &vec_out)
|
||||||
{
|
{
|
||||||
std::unordered_map<string_view, const_t> blocks;
|
std::unordered_map<string_view, const_t> blocks;
|
||||||
for (size_t i = 0; i < tokens.size(); ++i)
|
for (size_t i = 0; i < tokens.size(); ++i)
|
||||||
{
|
{
|
||||||
token_t *t = tokens[i];
|
Token *t = tokens[i];
|
||||||
if (t->type == token_type_t::PP_CONST)
|
if (t->type == token_type_t::PP_CONST)
|
||||||
{
|
{
|
||||||
string_view capture;
|
string_view capture;
|
||||||
@@ -106,14 +106,14 @@ pp_err_t preprocess_const_blocks(const vector<token_t *> &tokens,
|
|||||||
if (blocks.size() == 0)
|
if (blocks.size() == 0)
|
||||||
{
|
{
|
||||||
// Just construct a new vector and carry on
|
// Just construct a new vector and carry on
|
||||||
for (token_t *token : tokens)
|
for (Token *token : tokens)
|
||||||
vec_out.push_back(new token_t{*token});
|
vec_out.push_back(new Token{*token});
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
for (size_t i = 0; i < tokens.size(); ++i)
|
for (size_t i = 0; i < tokens.size(); ++i)
|
||||||
{
|
{
|
||||||
token_t *token = tokens[i];
|
Token *token = tokens[i];
|
||||||
// Skip the tokens that construct the const
|
// Skip the tokens that construct the const
|
||||||
if (token->type == token_type_t::PP_CONST)
|
if (token->type == token_type_t::PP_CONST)
|
||||||
for (; i < tokens.size() && tokens[i]->type != token_type_t::PP_END;
|
for (; i < tokens.size() && tokens[i]->type != token_type_t::PP_END;
|
||||||
@@ -131,20 +131,19 @@ pp_err_t preprocess_const_blocks(const vector<token_t *> &tokens,
|
|||||||
|
|
||||||
const_t block = it->second;
|
const_t block = it->second;
|
||||||
for (size_t i = block.start; i < block.end; ++i)
|
for (size_t i = block.start; i < block.end; ++i)
|
||||||
vec_out.push_back(new token_t{*tokens[i]});
|
vec_out.push_back(new Token{*tokens[i]});
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
vec_out.push_back(new token_t{*token});
|
vec_out.push_back(new Token{*token});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return pp_err_t();
|
return pp_err_t();
|
||||||
}
|
}
|
||||||
|
|
||||||
pp_err_t preprocesser(const vector<token_t *> &tokens,
|
pp_err_t preprocesser(const vector<Token *> &tokens, vector<Token *> &vec_out)
|
||||||
vector<token_t *> &vec_out)
|
|
||||||
{
|
{
|
||||||
vector<token_t *> use_block_tokens;
|
vector<Token *> use_block_tokens;
|
||||||
pp_err_t pperr = preprocess_use_blocks(tokens, use_block_tokens);
|
pp_err_t pperr = preprocess_use_blocks(tokens, use_block_tokens);
|
||||||
if (pperr.type != pp_err_type_t::OK)
|
if (pperr.type != pp_err_type_t::OK)
|
||||||
{
|
{
|
||||||
@@ -152,7 +151,7 @@ pp_err_t preprocesser(const vector<token_t *> &tokens,
|
|||||||
return pperr;
|
return pperr;
|
||||||
}
|
}
|
||||||
|
|
||||||
vector<token_t *> const_block_tokens;
|
vector<Token *> const_block_tokens;
|
||||||
pperr = preprocess_const_blocks(use_block_tokens, const_block_tokens);
|
pperr = preprocess_const_blocks(use_block_tokens, const_block_tokens);
|
||||||
if (pperr.type != pp_err_type_t::OK)
|
if (pperr.type != pp_err_type_t::OK)
|
||||||
{
|
{
|
||||||
@@ -168,8 +167,7 @@ pp_err_t preprocesser(const vector<token_t *> &tokens,
|
|||||||
}
|
}
|
||||||
|
|
||||||
// TODO: Implement this
|
// TODO: Implement this
|
||||||
pp_err_t preprocess_macro_blocks(const vector<token_t *> &,
|
pp_err_t preprocess_macro_blocks(const vector<Token *> &, vector<Token *> &);
|
||||||
vector<token_t *> &);
|
|
||||||
|
|
||||||
std::ostream &operator<<(std::ostream &os, pp_err_t &err)
|
std::ostream &operator<<(std::ostream &os, pp_err_t &err)
|
||||||
{
|
{
|
||||||
@@ -201,15 +199,15 @@ pp_err_t::pp_err_t() : reference{nullptr}, type{pp_err_type_t::OK}, lerr{}
|
|||||||
pp_err_t::pp_err_t(pp_err_type_t e) : reference{nullptr}, type{e}, lerr{}
|
pp_err_t::pp_err_t(pp_err_type_t e) : reference{nullptr}, type{e}, lerr{}
|
||||||
{}
|
{}
|
||||||
|
|
||||||
pp_err_t::pp_err_t(pp_err_type_t err, const token_t *ref)
|
pp_err_t::pp_err_t(pp_err_type_t err, const Token *ref)
|
||||||
: reference{ref}, type{err}
|
: reference{ref}, type{err}
|
||||||
{}
|
{}
|
||||||
|
|
||||||
pp_err_t::pp_err_t(pp_err_type_t err, const token_t *ref, lerr_t lerr)
|
pp_err_t::pp_err_t(pp_err_type_t err, const Token *ref, lerr_t lerr)
|
||||||
: reference{ref}, type{err}, lerr{lerr}
|
: reference{ref}, type{err}, lerr{lerr}
|
||||||
{}
|
{}
|
||||||
|
|
||||||
// pp_unit_t::pp_unit_t(const token_t *const token) : resolved{false},
|
// pp_unit_t::pp_unit_t(const Token *const token) : resolved{false},
|
||||||
// token{token}
|
// token{token}
|
||||||
// {}
|
// {}
|
||||||
|
|
||||||
|
|||||||
@@ -30,33 +30,33 @@ enum pp_err_type_t
|
|||||||
|
|
||||||
struct pp_err_t
|
struct pp_err_t
|
||||||
{
|
{
|
||||||
const token_t *reference;
|
const Token *reference;
|
||||||
pp_err_type_t type;
|
pp_err_type_t type;
|
||||||
lerr_t lerr;
|
lerr_t lerr;
|
||||||
|
|
||||||
pp_err_t();
|
pp_err_t();
|
||||||
pp_err_t(pp_err_type_t);
|
pp_err_t(pp_err_type_t);
|
||||||
pp_err_t(pp_err_type_t, const token_t *);
|
pp_err_t(pp_err_type_t, const Token *);
|
||||||
pp_err_t(pp_err_type_t, const token_t *, lerr_t);
|
pp_err_t(pp_err_type_t, const Token *, lerr_t);
|
||||||
};
|
};
|
||||||
|
|
||||||
std::ostream &operator<<(std::ostream &, pp_err_t &);
|
std::ostream &operator<<(std::ostream &, pp_err_t &);
|
||||||
|
|
||||||
struct pp_unit_t
|
struct pp_unit_t
|
||||||
{
|
{
|
||||||
const token_t *const token;
|
const Token *const token;
|
||||||
struct
|
struct
|
||||||
{
|
{
|
||||||
std::string_view name;
|
std::string_view name;
|
||||||
std::vector<pp_unit_t> elements;
|
std::vector<pp_unit_t> elements;
|
||||||
} container;
|
} container;
|
||||||
|
|
||||||
pp_unit_t(const token_t *const);
|
pp_unit_t(const Token *const);
|
||||||
pp_unit_t(std::string_view, std::vector<pp_unit_t>);
|
pp_unit_t(std::string_view, std::vector<pp_unit_t>);
|
||||||
};
|
};
|
||||||
|
|
||||||
std::vector<pp_unit_t> tokens_to_units(const std::vector<token_t *> &);
|
std::vector<pp_unit_t> tokens_to_units(const std::vector<Token *> &);
|
||||||
pp_err_t preprocess_use(std::vector<pp_unit_t> &);
|
pp_err_t preprocess_use(std::vector<pp_unit_t> &);
|
||||||
pp_err_t preprocesser(const std::vector<token_t *> &, std::vector<token_t *> &);
|
pp_err_t preprocesser(const std::vector<Token *> &, std::vector<Token *> &);
|
||||||
|
|
||||||
#endif
|
#endif
|
||||||
|
|||||||
Reference in New Issue
Block a user