lexer now produces a vector of heap allocated tokens

This removes the problem of possibly expensive copies occurring due to
working with tokens produced from the lexer (that C++ just... does):
now we hold pointers where the copy operator is a lot easier to use.

I want expensive stuff to be done by me and for a reason: I want to
be holding the shotgun.
This commit is contained in:
2024-04-15 04:42:24 +06:30
parent 062ed12278
commit f01d64b5f4
2 changed files with 5 additions and 4 deletions

View File

@@ -310,7 +310,7 @@ token_t tokenise_literal_string(string_view &source, size_t &column, size_t end)
return token;
}
lerr_t tokenise_buffer(string_view source, std::vector<token_t> &tokens)
lerr_t tokenise_buffer(string_view source, std::vector<token_t *> &tokens)
{
size_t column = 0, line = 1;
while (source.size() > 0)
@@ -393,8 +393,9 @@ lerr_t tokenise_buffer(string_view source, std::vector<token_t> &tokens)
}
if (is_token)
{
t.line = line;
tokens.push_back(t);
t.line = line;
token_t *acc = new token_t(t);
tokens.push_back(acc);
}
}
return lerr_t::OK;

View File

@@ -91,6 +91,6 @@ enum class lerr_t
};
const char *lerr_as_cstr(lerr_t);
lerr_t tokenise_buffer(std::string_view, std::vector<token_t> &);
lerr_t tokenise_buffer(std::string_view, std::vector<token_t *> &);
#endif