lerr_t and lerr_type_t -> Lexer::Err and Lexer::Err::Type
This commit is contained in:
@@ -42,8 +42,8 @@ namespace Lexer
|
||||
return (src.size() > match.size() && src.substr(0, match.size()) == match);
|
||||
}
|
||||
|
||||
pair<Token, lerr_t> tokenise_symbol(string_view &source, size_t &column,
|
||||
size_t line)
|
||||
pair<Token, Err> tokenise_symbol(string_view &source, size_t &column,
|
||||
size_t line)
|
||||
{
|
||||
auto end = source.find_first_not_of(VALID_SYMBOL);
|
||||
if (end == string::npos)
|
||||
@@ -69,7 +69,7 @@ namespace Lexer
|
||||
else if (sym[0] == '%')
|
||||
{
|
||||
return make_pair(
|
||||
t, lerr_t(lerr_type_t::INVALID_PREPROCESSOR_DIRECTIVE, column, line));
|
||||
t, Err(Err::Type::INVALID_PREPROCESSOR_DIRECTIVE, column, line));
|
||||
}
|
||||
else if (sym.size() > 1 && sym[0] == '$')
|
||||
{
|
||||
@@ -224,7 +224,7 @@ namespace Lexer
|
||||
t.content = sym;
|
||||
t.column = column;
|
||||
column += sym.size() - 1;
|
||||
return make_pair(t, lerr_t());
|
||||
return make_pair(t, Err());
|
||||
}
|
||||
|
||||
Token tokenise_literal_number(string_view &source, size_t &column)
|
||||
@@ -266,22 +266,20 @@ namespace Lexer
|
||||
return t;
|
||||
}
|
||||
|
||||
pair<Token, lerr_t> tokenise_literal_char(string_view &source, size_t &column,
|
||||
size_t &line)
|
||||
pair<Token, Err> tokenise_literal_char(string_view &source, size_t &column,
|
||||
size_t &line)
|
||||
{
|
||||
Token t{};
|
||||
auto end = source.find('\'', 1);
|
||||
if (source.size() < 3 || end == 1 || end > 3)
|
||||
return make_pair(t,
|
||||
lerr_t(lerr_type_t::INVALID_CHAR_LITERAL, column, line));
|
||||
return make_pair(t, Err(Err::Type::INVALID_CHAR_LITERAL, column, line));
|
||||
else if (source[1] == '\\')
|
||||
{
|
||||
// Escape sequence
|
||||
char escape = '\0';
|
||||
if (source.size() < 4 || source[3] != '\'')
|
||||
return make_pair(
|
||||
t, lerr_t(lerr_type_t::INVALID_CHAR_LITERAL_ESCAPE_SEQUENCE, column,
|
||||
line));
|
||||
return make_pair(t, Err(Err::Type::INVALID_CHAR_LITERAL_ESCAPE_SEQUENCE,
|
||||
column, line));
|
||||
switch (source[2])
|
||||
{
|
||||
case 'n':
|
||||
@@ -298,9 +296,8 @@ namespace Lexer
|
||||
break;
|
||||
default:
|
||||
column += 2;
|
||||
return make_pair(
|
||||
t, lerr_t(lerr_type_t::INVALID_CHAR_LITERAL_ESCAPE_SEQUENCE, column,
|
||||
line));
|
||||
return make_pair(t, Err(Err::Type::INVALID_CHAR_LITERAL_ESCAPE_SEQUENCE,
|
||||
column, line));
|
||||
break;
|
||||
}
|
||||
t = Token{Token::Type::LITERAL_CHAR, std::to_string(escape), column};
|
||||
@@ -313,7 +310,7 @@ namespace Lexer
|
||||
column += 3;
|
||||
source.remove_prefix(3);
|
||||
}
|
||||
return make_pair(t, lerr_t());
|
||||
return make_pair(t, Err());
|
||||
}
|
||||
|
||||
Token tokenise_literal_string(string_view &source, size_t &column, size_t end)
|
||||
@@ -326,7 +323,7 @@ namespace Lexer
|
||||
return token;
|
||||
}
|
||||
|
||||
lerr_t tokenise_buffer(string_view source, std::vector<Token *> &tokens)
|
||||
Err tokenise_buffer(string_view source, std::vector<Token *> &tokens)
|
||||
{
|
||||
size_t column = 0, line = 1;
|
||||
while (source.size() > 0)
|
||||
@@ -371,14 +368,14 @@ namespace Lexer
|
||||
{
|
||||
auto end = source.find('\"', 1);
|
||||
if (end == string::npos)
|
||||
return lerr_t(lerr_type_t::INVALID_STRING_LITERAL, column, line);
|
||||
return Err(Err::Type::INVALID_STRING_LITERAL, column, line);
|
||||
t = tokenise_literal_string(source, column, end);
|
||||
}
|
||||
else if (first == '\'')
|
||||
{
|
||||
lerr_t lerr;
|
||||
Err lerr;
|
||||
std::tie(t, lerr) = tokenise_literal_char(source, column, line);
|
||||
if (lerr.type != lerr_type_t::OK)
|
||||
if (lerr.type != Err::Type::OK)
|
||||
return lerr;
|
||||
}
|
||||
else if (isdigit(first) ||
|
||||
@@ -388,7 +385,7 @@ namespace Lexer
|
||||
if (end == string::npos)
|
||||
end = source.size() - 1;
|
||||
else if (end != string::npos && !(isspace(source[end])))
|
||||
return lerr_t(lerr_type_t::INVALID_NUMBER_LITERAL, column, line);
|
||||
return Err(Err::Type::INVALID_NUMBER_LITERAL, column, line);
|
||||
t = tokenise_literal_number(source, column);
|
||||
}
|
||||
else if (first == '0' && source.size() > 2 && source[1] == 'x' &&
|
||||
@@ -398,20 +395,20 @@ namespace Lexer
|
||||
if (end == string::npos)
|
||||
end = source.size() - 1;
|
||||
else if (end != string::npos && !(isspace(source[end])))
|
||||
return lerr_t(lerr_type_t::INVALID_NUMBER_LITERAL, column, line);
|
||||
return Err(Err::Type::INVALID_NUMBER_LITERAL, column, line);
|
||||
t = tokenise_literal_hex(source, column);
|
||||
}
|
||||
else if (is_char_in_s(first, VALID_SYMBOL))
|
||||
{
|
||||
lerr_t lerr;
|
||||
Err lerr;
|
||||
std::tie(t, lerr) = tokenise_symbol(source, column, line);
|
||||
if (lerr.type != lerr_type_t::OK)
|
||||
if (lerr.type != Err::Type::OK)
|
||||
return lerr;
|
||||
}
|
||||
else
|
||||
{
|
||||
++column;
|
||||
return lerr_t{lerr_type_t::UNKNOWN_LEXEME, column, line};
|
||||
return Err{Err::Type::UNKNOWN_LEXEME, column, line};
|
||||
}
|
||||
|
||||
if (is_token)
|
||||
@@ -421,7 +418,7 @@ namespace Lexer
|
||||
tokens.push_back(acc);
|
||||
}
|
||||
}
|
||||
return lerr_t{};
|
||||
return Err{};
|
||||
}
|
||||
|
||||
std::ostream &operator<<(std::ostream &os, Token &t)
|
||||
@@ -533,30 +530,30 @@ namespace Lexer
|
||||
return "";
|
||||
}
|
||||
|
||||
std::ostream &operator<<(std::ostream &os, lerr_t &lerr)
|
||||
std::ostream &operator<<(std::ostream &os, Err &lerr)
|
||||
{
|
||||
os << lerr.line << ":" << lerr.col << ": ";
|
||||
switch (lerr.type)
|
||||
{
|
||||
case lerr_type_t::OK:
|
||||
case Err::Type::OK:
|
||||
os << "OK";
|
||||
break;
|
||||
case lerr_type_t::INVALID_CHAR_LITERAL:
|
||||
case Err::Type::INVALID_CHAR_LITERAL:
|
||||
os << "INVALID_CHAR_LITERAL";
|
||||
break;
|
||||
case lerr_type_t::INVALID_CHAR_LITERAL_ESCAPE_SEQUENCE:
|
||||
case Err::Type::INVALID_CHAR_LITERAL_ESCAPE_SEQUENCE:
|
||||
os << "INVALID_CHAR_LITERAL_ESCAPE_SEQUENCE";
|
||||
break;
|
||||
case lerr_type_t::INVALID_STRING_LITERAL:
|
||||
case Err::Type::INVALID_STRING_LITERAL:
|
||||
os << "INVALID_STRING_LITERAL";
|
||||
break;
|
||||
case lerr_type_t::INVALID_NUMBER_LITERAL:
|
||||
case Err::Type::INVALID_NUMBER_LITERAL:
|
||||
os << "INVALID_NUMBER_LITERAL";
|
||||
break;
|
||||
case lerr_type_t::INVALID_PREPROCESSOR_DIRECTIVE:
|
||||
case Err::Type::INVALID_PREPROCESSOR_DIRECTIVE:
|
||||
os << "INVALID_PREPROCESSOR_DIRECTIVE";
|
||||
break;
|
||||
case lerr_type_t::UNKNOWN_LEXEME:
|
||||
case Err::Type::UNKNOWN_LEXEME:
|
||||
os << "UNKNOWN_LEXEME";
|
||||
break;
|
||||
default:
|
||||
@@ -565,7 +562,7 @@ namespace Lexer
|
||||
return os;
|
||||
}
|
||||
|
||||
lerr_t::lerr_t(lerr_type_t type, size_t col, size_t line)
|
||||
Err::Err(Err::Type type, size_t col, size_t line)
|
||||
: col{col}, line{line}, type{type}
|
||||
{}
|
||||
} // namespace Lexer
|
||||
|
||||
@@ -80,28 +80,26 @@ namespace Lexer
|
||||
|
||||
std::ostream &operator<<(std::ostream &, Token &);
|
||||
|
||||
enum class lerr_type_t
|
||||
{
|
||||
OK = 0,
|
||||
INVALID_CHAR_LITERAL,
|
||||
INVALID_CHAR_LITERAL_ESCAPE_SEQUENCE,
|
||||
INVALID_STRING_LITERAL,
|
||||
INVALID_NUMBER_LITERAL,
|
||||
INVALID_PREPROCESSOR_DIRECTIVE,
|
||||
UNKNOWN_LEXEME,
|
||||
};
|
||||
|
||||
struct lerr_t
|
||||
struct Err
|
||||
{
|
||||
size_t col, line;
|
||||
lerr_type_t type;
|
||||
enum class Type
|
||||
{
|
||||
OK = 0,
|
||||
INVALID_CHAR_LITERAL,
|
||||
INVALID_CHAR_LITERAL_ESCAPE_SEQUENCE,
|
||||
INVALID_STRING_LITERAL,
|
||||
INVALID_NUMBER_LITERAL,
|
||||
INVALID_PREPROCESSOR_DIRECTIVE,
|
||||
UNKNOWN_LEXEME,
|
||||
} type;
|
||||
|
||||
lerr_t(lerr_type_t type = lerr_type_t::OK, size_t col = 0, size_t line = 0);
|
||||
Err(Type type = Type::OK, size_t col = 0, size_t line = 0);
|
||||
};
|
||||
|
||||
std::ostream &operator<<(std::ostream &, lerr_t &);
|
||||
std::ostream &operator<<(std::ostream &, Err &);
|
||||
|
||||
lerr_t tokenise_buffer(std::string_view, std::vector<Token *> &);
|
||||
Err tokenise_buffer(std::string_view, std::vector<Token *> &);
|
||||
} // namespace Lexer
|
||||
|
||||
#endif
|
||||
|
||||
@@ -30,7 +30,8 @@ extern "C"
|
||||
using std::cout, std::cerr, std::endl;
|
||||
using std::pair, std::string, std::string_view, std::vector;
|
||||
|
||||
using Lexer::Token, Lexer::lerr_t, Lexer::lerr_type_t;
|
||||
using Lexer::Token;
|
||||
using Lex_Err = Lexer::Err;
|
||||
|
||||
void usage(const char *program_name, FILE *fp)
|
||||
{
|
||||
@@ -69,7 +70,7 @@ int main(int argc, const char *argv[])
|
||||
string_view original;
|
||||
string_view src;
|
||||
vector<Token *> tokens, preprocessed_tokens;
|
||||
lerr_t lerr;
|
||||
Lex_Err lerr;
|
||||
pp_err_t pp_err;
|
||||
|
||||
// Highest scoped variable cut off point
|
||||
@@ -86,7 +87,7 @@ int main(int argc, const char *argv[])
|
||||
src = string_view{source_str};
|
||||
lerr = tokenise_buffer(src, tokens);
|
||||
|
||||
if (lerr.type != lerr_type_t::OK)
|
||||
if (lerr.type != Lexer ::Err::Type::OK)
|
||||
{
|
||||
cerr << source_name << ":" << lerr << endl;
|
||||
ret = 255 - static_cast<int>(lerr.type);
|
||||
|
||||
@@ -18,7 +18,7 @@
|
||||
|
||||
using std::pair, std::vector, std::make_pair, std::string, std::string_view;
|
||||
|
||||
using Lexer::Token, Lexer::lerr_t, Lexer::lerr_type_t;
|
||||
using Lexer::Token, Lexer::Err;
|
||||
|
||||
#define VCLEAR(V) \
|
||||
std::for_each((V).begin(), (V).end(), \
|
||||
@@ -53,8 +53,8 @@ pp_err_t preprocess_use_blocks(const vector<Token *> &tokens,
|
||||
}
|
||||
|
||||
std::vector<Token *> ftokens;
|
||||
lerr_t lerr = tokenise_buffer(source.value(), ftokens);
|
||||
if (lerr.type != lerr_type_t::OK)
|
||||
Err lerr = tokenise_buffer(source.value(), ftokens);
|
||||
if (lerr.type != Err::Type::OK)
|
||||
{
|
||||
VCLEAR(vec_out);
|
||||
vec_out.clear();
|
||||
@@ -204,7 +204,7 @@ pp_err_t::pp_err_t(pp_err_type_t err, const Token *ref)
|
||||
: reference{ref}, type{err}
|
||||
{}
|
||||
|
||||
pp_err_t::pp_err_t(pp_err_type_t err, const Token *ref, lerr_t lerr)
|
||||
pp_err_t::pp_err_t(pp_err_type_t err, const Token *ref, Err lerr)
|
||||
: reference{ref}, type{err}, lerr{lerr}
|
||||
{}
|
||||
|
||||
|
||||
@@ -32,12 +32,12 @@ struct pp_err_t
|
||||
{
|
||||
const Lexer::Token *reference;
|
||||
pp_err_type_t type;
|
||||
Lexer::lerr_t lerr;
|
||||
Lexer::Err lerr;
|
||||
|
||||
pp_err_t();
|
||||
pp_err_t(pp_err_type_t);
|
||||
pp_err_t(pp_err_type_t, const Lexer::Token *);
|
||||
pp_err_t(pp_err_type_t, const Lexer::Token *, Lexer::lerr_t);
|
||||
pp_err_t(pp_err_type_t, const Lexer::Token *, Lexer::Err);
|
||||
};
|
||||
|
||||
std::ostream &operator<<(std::ostream &, pp_err_t &);
|
||||
|
||||
Reference in New Issue
Block a user