aboutsummaryrefslogtreecommitdiffstats
path: root/src/main.c
diff options
context:
space:
mode:
Diffstat (limited to 'src/main.c')
-rw-r--r--src/main.c129
1 files changed, 71 insertions, 58 deletions
diff --git a/src/main.c b/src/main.c
index 0e453c6..1ede868 100644
--- a/src/main.c
+++ b/src/main.c
@@ -12,7 +12,49 @@
12// TODO: unions 12// TODO: unions
13// TODO: embed (binary file) and include (source file) 13// TODO: embed (binary file) and include (source file)
14// TODO: revisit ast parsing for pointers and arrays (I think I'm missing corner 14// TODO: revisit ast parsing for pointers and arrays (I think I'm missing corner
15// cases). 15// cases):
16// []Int vs Int[], @sym vs sym@. On the right dereference, on the left
17// declaration or taking the address/reference of a variable. Similar
18// to what Odin does.
19// let a: @Int ; pointer to int
20// let b: @@Int ; pointer to pointer to int (etc.)
21// let c: [123]Int ; static array -> @Int
22// let m: [32][32]Int ; multi-dimensional-arrays
23// let d: []Int ; slice / view -> struct Slice { u8* mem ; sz size ; }
24// let e: [...]Int ; dynamic array -> struct DArry { u8* mem ; sz size ; sz cap ; }
25// let f: [123]@Int ; static array of int pointers [](@Int)
26// let g: @[123]Int ; pointer to a static array of integers @([123]Int)
27// let h: #[Str:Int] ; Hash map of string keys and integer values
28// let i: #[Str:@Int] ; Hash map of string keys and pointers to integer
29// let j: #[@Str:Int] ; Hash map of string pointers to integers
30// let k: @#[Str:Int] ; Pointer to a hash map of string to ints
31// let l: (Int Int : Int) ; Function pointer == @(fun(Int,Int):Int)
32// TODO: constexpr or const expressions could be evaluated with the bytecode
33// interpreter if we are performing compilation.
34// TODO: "first class functions" via function pointers
35// TODO: convenient function calls per data type instead of methods:
36// fun add(a: int, b: int): int a + b
37// add(12, 34) == 12.add(34)
38// concat(str, str): str
39// "hello ".concat("world") ; "hello world"
40// TODO: structs and user defined types
41// TODO: constant folding
42// TODO: casting on demand (1:u16, 0x123:ptr, "hi":int ??? how to deal with
43// unsafe casts?)
44// TODO: extract table generation from compiler into the semantic analyzer,
45// compiler should only focus on translating things to bytecode/linear ir.
46// TODO: sizeof function doesn't work well for arrays and variables.
47// TODO: store more parameters for variables, for example
48// TODO: don't allow overwriting default types with other symbols within a scope
49// {
50// let Int = 0.0
51// }
52// TODO: This shouldn't be possible
53// let b = 4
54// let a: [32]U8 = @b
55// TODO: Properly reference and dereference multiple chains of arrays:
56// let a: [32][32]Int ; The size is not correct at the moment for this.
57// a[1][2] ?
16 58
17typedef enum ExecMode { 59typedef enum ExecMode {
18 RUN_NORMAL, 60 RUN_NORMAL,
@@ -44,6 +86,9 @@ process_file(Str path) {
44 sz errors = 0; 86 sz errors = 0;
45 87
46 // Lexer. 88 // Lexer.
89#if DEBUG == 1
90 println("lexing...");
91#endif
47 Scanner scanner = {.str = file.data}; 92 Scanner scanner = {.str = file.data};
48 Token *tokens = NULL; 93 Token *tokens = NULL;
49 Token tok = {0}; 94 Token tok = {0};
@@ -67,6 +112,9 @@ process_file(Str path) {
67 } 112 }
68 113
69 // Parser. 114 // Parser.
115#if DEBUG == 1
116 println("parsing...");
117#endif
70 Parser parser = { 118 Parser parser = {
71 .tokens = tokens, 119 .tokens = tokens,
72 .storage = &lexer_arena, 120 .storage = &lexer_arena,
@@ -80,9 +128,6 @@ process_file(Str path) {
80 println("ROOT: %d", ctr++); 128 println("ROOT: %d", ctr++);
81#endif 129#endif
82 parse_expr(&parser, PREC_LOW); 130 parse_expr(&parser, PREC_LOW);
83 if (parser.panic) {
84 break;
85 }
86 } 131 }
87 parse_consume(&parser, TOK_EOF, cstr("expected end of file")); 132 parse_consume(&parser, TOK_EOF, cstr("expected end of file"));
88 if (parser.err) { 133 if (parser.err) {
@@ -94,6 +139,9 @@ process_file(Str path) {
94 } 139 }
95 140
96 // Semantic analysis. 141 // Semantic analysis.
142#if DEBUG == 1
143 println("semantic analysis...");
144#endif
97 Analyzer analyzer = (Analyzer){ 145 Analyzer analyzer = (Analyzer){
98 .storage = &lexer_arena, 146 .storage = &lexer_arena,
99 .file_name = path, 147 .file_name = path,
@@ -163,66 +211,31 @@ process_file(Str path) {
163 } 211 }
164#endif 212#endif
165 213
166 // TODO: Type checking.
167
168 // Compile roots. 214 // Compile roots.
215#if DEBUG == 1
216 println("compilation...");
217#endif
169 Arena bytecode_arena = arena_create(LEXER_MEM, os_allocator); 218 Arena bytecode_arena = arena_create(LEXER_MEM, os_allocator);
170 Chunk chunk = {.file_name = path, .storage = &bytecode_arena}; 219 Compiler compiler = {
171 array_zero(chunk.constants, 256, &bytecode_arena); 220 .file_name = path,
172 array_zero(chunk.code, 0xffff, &bytecode_arena); 221 .storage = &bytecode_arena,
173 sz n_roots = array_size(parser.nodes); 222 .integer_types = analyzer.integer_types,
174 CompResult res = {0}; 223 .float_types = analyzer.float_types,
175 for (sz i = 0; i < n_roots; i++) { 224 .numeric_types = analyzer.numeric_types,
176 // The parser stores the root nodes as a stack. 225 .lab_pre = -1,
177 Node *root = parser.nodes[i]; 226 .lab_post = -1,
178 res = compile_expr(&chunk, root); 227 };
179 if (res.type == COMP_ERR) { 228 bytecode_compiler(&compiler, parser);
180 eprintln("compilation error..."); 229 disassemble_chunk(compiler.main_chunk);
181 exit(EXIT_FAILURE);
182 }
183 }
184 sz res_reg = 0;
185 switch (res.type) {
186 case COMP_CONST: {
187 res_reg = chunk.reg_idx++;
188 Instruction inst =
189 (Instruction){.op = OP_LD64K, .dst = res_reg, .a = res.idx};
190 array_push(chunk.code, inst, chunk.storage);
191 } break;
192 case COMP_REG: {
193 res_reg = res.idx;
194 } break;
195 default: break;
196 }
197 // After we are done move the last result to r0 for printing.
198 Instruction halt = (Instruction){.op = OP_HALT, .dst = res_reg};
199 array_push(chunk.code, halt, &bytecode_arena);
200
201 if (chunk.const_idx >= 256) {
202 eprintln("too many constants on chunk %s", chunk.id);
203 exit(EXIT_FAILURE);
204 }
205 if (chunk.str_idx >= 256) {
206 eprintln("too many strings on chunk %s", chunk.id);
207 exit(EXIT_FAILURE);
208 }
209 if (chunk.reg_idx >= 256) {
210 eprintln("too many registers used on chunk %s", chunk.id);
211 exit(EXIT_FAILURE);
212 }
213
214 disassemble_chunk(chunk);
215 230
216 // Run bytecode on VM. 231 // Run bytecode on VM.
217 VM vm = {0}; 232 VM vm = {0};
218 vm_init(&vm, &chunk); 233 vm_init(&vm, &compiler.main_chunk);
219 // println("VM REGISTERS BEFORE:\n%{Mem}",
220 // &(Array){.mem = (u8 *)&vm.regs, sizeof(vm.regs)});
221 vm_run(&vm); 234 vm_run(&vm);
222 // println("VM REGISTERS AFTER:\n%{Mem}", 235#if DEBUG == 1
223 // &(Array){.mem = (u8 *)&vm.regs, sizeof(vm.regs)}); 236 println("MEMORY:\n%{Mem}",
224 // println("VM MEMORY AFTER:\n%{Mem}", 237 &(Array){.mem = (u8 *)&vm.stack, sizeof(vm.stack)});
225 // &(Array){.mem = (u8 *)&vm.stack, sizeof(vm.stack)}); 238#endif
226 239
227#if DEBUG == 1 240#if DEBUG == 1
228 println("Space used: %{Arena}", &lexer_arena); 241 println("Space used: %{Arena}", &lexer_arena);