diff --git a/src/preprocesser.cpp b/src/preprocesser.cpp index 2712aa5..d1dd892 100644 --- a/src/preprocesser.cpp +++ b/src/preprocesser.cpp @@ -129,14 +129,14 @@ namespace Preprocesser Lexer::Err lexer_err = Lexer::tokenise_buffer(tokens[i + 1]->content, content.value(), body); - if (lexer_err.type != LET::OK) - return new Err{ET::IN_FILE_LEXING, token, nullptr, lexer_err}; - - // Here we add the tokens, freshly allocated, to the bag so we can - // free it later + // Add tokens to the bag for deallocation later + // NOTE: We do this before errors so no memory leaks happen new_token_bag.insert(std::end(new_token_bag), std::begin(body), std::end(body)); + if (lexer_err.type != LET::OK) + return new Err{ET::IN_FILE_LEXING, token, nullptr, lexer_err}; + file_map[name].body = body; std::vector body_units; Err *err = preprocess(body, body_units, new_token_bag, const_map,