aboutsummaryrefslogtreecommitdiff
path: root/asm/main.c
blob: bfa12e0845d2cc67e614c380593f985e4536bb3a (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
/* Copyright (C) 2023 Aryadev Chavali

 * You may distribute and modify this code under the terms of the
 * GPLv2 license.  You should have received a copy of the GPLv2
 * license with this file.  If not, please write to:
 * aryadev@aryadevchavali.com.

 * Created: 2023-10-23
 * Author: Aryadev Chavali
 * Description: Assembly source code compiler, targeting OVM
 */

#include <lib/darr.h>

#include "./lexer.h"
#include "./parser.h"

void usage(const char *program_name, FILE *fp)
{
  fprintf(fp,
          "Usage: %s FILE OUT-FILE\n"
          "\tFILE: Source code to compile\n"
          "\tOUT-FILE: Name of file to store bytecode\n",
          program_name);
}

int main(int argc, char *argv[])
{
  int ret           = 0;
  char *source_file = "";
  char *out_file    = "";
  if (argc < 3)
  {
    usage(argv[0], stderr);
    return 1;
  }

  source_file   = argv[1];
  out_file      = argv[2];
  FILE *fp      = fopen(source_file, "rb");
  darr_t buffer = darr_read_file(fp);
  fclose(fp);

  token_stream_t tokens = {0};
  lerr_t lex_error      = tokenise_buffer(&buffer, &tokens);
  if (lex_error)
  {
    // Compute the line/newlines by hand
    size_t column = 0, line = 1;
    for (size_t i = 0; i < buffer.used; ++i)
    {
      if (buffer.data[i] == '\n')
      {
        column = 0;
        ++line;
      }
      else
        ++column;
    }
    fprintf(stderr, "%s:%lu:%lu: %s\n", source_file, line, column,
            lerr_as_cstr(lex_error));
    ret = 255 - lex_error;
    goto end;
  }
#if VERBOSE >= 1
  printf("[%sTOKENISER%s]: %lu bytes -> %lu tokens\n", TERM_GREEN, TERM_RESET,
         buffer.used, tokens.available);
#endif
  free(buffer.data);

  size_t number        = 0;
  inst_t *instructions = NULL;
  perr_t parse_error   = parse_stream(&tokens, &instructions, &number);
  if (parse_error)
  {
    size_t column = 0;
    size_t line   = 0;
    if (tokens.used < tokens.available)
    {
      token_t t = TOKEN_STREAM_AT(tokens.data, tokens.used);
      column    = t.column;
      line      = t.line;
    }
    fprintf(stderr, "%s:%lu:%lu: %s\n", source_file, line, column,
            perr_as_cstr(parse_error));
    ret = 255 - parse_error;
    goto end;
  }
#if VERBOSE >= 1
  printf("[%sPARSER%s]: %lu tokens -> %lu instructions\n", TERM_GREEN,
         TERM_RESET, tokens.available, number);
#endif

  fp = fopen(out_file, "wb");
  insts_write_bytecode_file(instructions, number, fp);
  fclose(fp);
#if VERBOSE >= 1
  printf("[%sCOMPILER%s]: Wrote bytecode to `%s`\n", TERM_GREEN, TERM_RESET,
         out_file);
#endif
end:
  // Free the tokens and parsed data
  if (tokens.data)
  {
    for (size_t i = 0; i < tokens.available; ++i)
      free(TOKEN_STREAM_AT(tokens.data, i).str);
    free(tokens.data);
  }
  if (instructions)
    free(instructions);
  return ret;
}