aboutsummaryrefslogtreecommitdiffstats
path: root/src/main.c
diff options
context:
space:
mode:
Diffstat (limited to 'src/main.c')
-rw-r--r--src/main.c96
1 files changed, 44 insertions, 52 deletions
diff --git a/src/main.c b/src/main.c
index 618d325..65c05ff 100644
--- a/src/main.c
+++ b/src/main.c
@@ -12,7 +12,26 @@
12// TODO: unions 12// TODO: unions
13// TODO: embed (binary file) and include (source file) 13// TODO: embed (binary file) and include (source file)
14// TODO: revisit ast parsing for pointers and arrays (I think I'm missing corner 14// TODO: revisit ast parsing for pointers and arrays (I think I'm missing corner
15// cases). 15// cases).
16// TODO: fix semantics for the following expression:
17// let b = int
18// a type shouldn't be a valid symbol name
19// TODO: consider making all tye types PascalCase: Int F64 Str...
20// TODO: add a `const` keyword that can only take literals or constexpr values.
21// TODO: "first class functions" via function pointers
22// TODO: convenient function calls per data type instead of methods:
23// fun add(a: int, b: int): int a + b
24// add(12, 34) == 12.add(34)
25// concat(str, str): str
26// "hello ".concat("world") ; "hello world"
27// TODO: more numeric types
28// TODO: structs and user defined types
29// TODO: tail-call optimization.
30// TODO: constant folding
31// TODO: constexpr evaluation
32// TODO: shortcircuit evaluation for && and || operators.
33// TODO: casting on demand (1:u16, 0x123:ptr, "hi":int ??? how to deal with
34// unsafe casts?)
16 35
17typedef enum ExecMode { 36typedef enum ExecMode {
18 RUN_NORMAL, 37 RUN_NORMAL,
@@ -44,6 +63,9 @@ process_file(Str path) {
44 sz errors = 0; 63 sz errors = 0;
45 64
46 // Lexer. 65 // Lexer.
66#if DEBUG == 1
67 println("lexing...");
68#endif
47 Scanner scanner = {.str = file.data}; 69 Scanner scanner = {.str = file.data};
48 Token *tokens = NULL; 70 Token *tokens = NULL;
49 Token tok = {0}; 71 Token tok = {0};
@@ -67,6 +89,9 @@ process_file(Str path) {
67 } 89 }
68 90
69 // Parser. 91 // Parser.
92#if DEBUG == 1
93 println("parsing...");
94#endif
70 Parser parser = { 95 Parser parser = {
71 .tokens = tokens, 96 .tokens = tokens,
72 .storage = &lexer_arena, 97 .storage = &lexer_arena,
@@ -94,6 +119,9 @@ process_file(Str path) {
94 } 119 }
95 120
96 // Semantic analysis. 121 // Semantic analysis.
122#if DEBUG == 1
123 println("semantic analysis...");
124#endif
97 Analyzer analyzer = (Analyzer){ 125 Analyzer analyzer = (Analyzer){
98 .storage = &lexer_arena, 126 .storage = &lexer_arena,
99 .file_name = path, 127 .file_name = path,
@@ -166,63 +194,27 @@ process_file(Str path) {
166 // TODO: Type checking. 194 // TODO: Type checking.
167 195
168 // Compile roots. 196 // Compile roots.
197#if DEBUG == 1
198 println("compilation...");
199#endif
169 Arena bytecode_arena = arena_create(LEXER_MEM, os_allocator); 200 Arena bytecode_arena = arena_create(LEXER_MEM, os_allocator);
170 Chunk chunk = {.file_name = path, .storage = &bytecode_arena}; 201 Compiler compiler = {
171 array_zero(chunk.constants, 256, &bytecode_arena); 202 .file_name = path,
172 array_zero(chunk.code, 0xffff, &bytecode_arena); 203 .storage = &bytecode_arena,
173 sz n_roots = array_size(parser.nodes); 204 .integer_types = analyzer.integer_types,
174 CompResult res; 205 .numeric_types = analyzer.numeric_types,
175 for (sz i = 0; i < n_roots; i++) { 206 };
176 // The parser stores the root nodes as a stack. 207 bytecode_compiler(&compiler, parser);
177 Node *root = parser.nodes[i]; 208 disassemble_chunk(compiler.main_chunk);
178 res = compile_expr(&chunk, root);
179 if (res.type == COMP_ERR) {
180 eprintln("compilation error...");
181 exit(EXIT_FAILURE);
182 }
183 }
184 sz res_reg = 0;
185 switch (res.type) {
186 case COMP_CONST: {
187 res_reg = chunk.reg_idx++;
188 Instruction inst =
189 (Instruction){.op = OP_LD64K, .dst = res_reg, .a = res.idx};
190 array_push(chunk.code, inst, chunk.storage);
191 } break;
192 case COMP_REG: {
193 res_reg = res.idx;
194 } break;
195 default: break;
196 }
197 // After we are done move the last result to r0 for printing.
198 Instruction halt = (Instruction){.op = OP_HALT, .dst = res_reg};
199 array_push(chunk.code, halt, &bytecode_arena);
200
201 if (chunk.const_idx >= 256) {
202 eprintln("too many constants on chunk %s", chunk.id);
203 exit(EXIT_FAILURE);
204 }
205 if (chunk.str_idx >= 256) {
206 eprintln("too many strings on chunk %s", chunk.id);
207 exit(EXIT_FAILURE);
208 }
209 if (chunk.reg_idx >= 256) {
210 eprintln("too many registers used on chunk %s", chunk.id);
211 exit(EXIT_FAILURE);
212 }
213
214 disassemble_chunk(chunk);
215 209
216 // Run bytecode on VM. 210 // Run bytecode on VM.
217 VM vm = {0}; 211 VM vm = {0};
218 vm_init(&vm, &chunk); 212 vm_init(&vm, &compiler.main_chunk);
219 // println("VM REGISTERS BEFORE:\n%{Mem}",
220 // &(Array){.mem = (u8 *)&vm.regs, sizeof(vm.regs)});
221 vm_run(&vm); 213 vm_run(&vm);
222 println("VM REGISTERS AFTER:\n%{Mem}", 214#if DEBUG == 1
223 &(Array){.mem = (u8 *)&vm.regs, sizeof(vm.regs)}); 215 println("MEMORY:\n%{Mem}",
224 println("VM MEMORY AFTER:\n%{Mem}",
225 &(Array){.mem = (u8 *)&vm.stack, sizeof(vm.stack)}); 216 &(Array){.mem = (u8 *)&vm.stack, sizeof(vm.stack)});
217#endif
226 218
227#if DEBUG == 1 219#if DEBUG == 1
228 println("Space used: %{Arena}", &lexer_arena); 220 println("Space used: %{Arena}", &lexer_arena);