diff options
author | Bad Diode <bd@badd10de.dev> | 2021-10-29 19:11:40 +0200 |
---|---|---|
committer | Bad Diode <bd@badd10de.dev> | 2021-10-29 19:11:40 +0200 |
commit | 5ed73b695e6b463149ab0c9ae3eccb26a4ec5807 (patch) | |
tree | 01aa089934d1b49fe515fe86fffca01c471c69e9 /src/lexer.c | |
parent | e73a4c16a2269cdb2f5e7d66fb9839e4c44e14de (diff) | |
download | bdl-5ed73b695e6b463149ab0c9ae3eccb26a4ec5807.tar.gz bdl-5ed73b695e6b463149ab0c9ae3eccb26a4ec5807.zip |
Add parser for tokens->ast conversion
Diffstat (limited to 'src/lexer.c')
-rwxr-xr-x | src/lexer.c | 37 |
1 files changed, 18 insertions, 19 deletions
diff --git a/src/lexer.c b/src/lexer.c index 6a417e4..ac93a5c 100755 --- a/src/lexer.c +++ b/src/lexer.c | |||
@@ -16,7 +16,7 @@ static const char* token_str[] = { | |||
16 | 16 | ||
17 | void | 17 | void |
18 | print_token(Token tok) { | 18 | print_token(Token tok) { |
19 | printf("[%4ld:%-4ld] ", tok.line, tok.column); | 19 | printf("[%4ld:%-4ld] ", tok.line, tok.col); |
20 | printf("%s", token_str[tok.type]); | 20 | printf("%s", token_str[tok.type]); |
21 | switch (tok.type) { | 21 | switch (tok.type) { |
22 | case TOKEN_FIXNUM: { | 22 | case TOKEN_FIXNUM: { |
@@ -128,11 +128,10 @@ find_primitive_type(const StringView value) { | |||
128 | return TOKEN_SYMBOL; | 128 | return TOKEN_SYMBOL; |
129 | } | 129 | } |
130 | 130 | ||
131 | Tokens | 131 | Token * |
132 | tokenize(const StringView *sv) { | 132 | tokenize(const StringView *sv, Errors *errors) { |
133 | Tokens tokens = {0}; | 133 | Token *tokens = NULL; |
134 | tokens.tokens = NULL; | 134 | array_init(tokens, 1); |
135 | array_init(tokens.tokens, 1); | ||
136 | Scanner scanner = (Scanner){ | 135 | Scanner scanner = (Scanner){ |
137 | .current = *sv, | 136 | .current = *sv, |
138 | .line_number = 1, | 137 | .line_number = 1, |
@@ -163,7 +162,7 @@ tokenize(const StringView *sv) { | |||
163 | n++; | 162 | n++; |
164 | } | 163 | } |
165 | if (!found) { | 164 | if (!found) { |
166 | error_push(&tokens.errors, (Error){ | 165 | error_push(errors, (Error){ |
167 | .type = ERR_TYPE_LEXER, | 166 | .type = ERR_TYPE_LEXER, |
168 | .value = ERR_UNMATCHED_STRING, | 167 | .value = ERR_UNMATCHED_STRING, |
169 | .line = line, | 168 | .line = line, |
@@ -178,9 +177,9 @@ tokenize(const StringView *sv) { | |||
178 | }, | 177 | }, |
179 | .type = TOKEN_STRING, | 178 | .type = TOKEN_STRING, |
180 | .line = line, | 179 | .line = line, |
181 | .column = col, | 180 | .col = col, |
182 | }; | 181 | }; |
183 | array_push(tokens.tokens, token); | 182 | array_push(tokens, token); |
184 | } break; | 183 | } break; |
185 | case '(': { | 184 | case '(': { |
186 | if (scan_peek(&scanner) == ')') { | 185 | if (scan_peek(&scanner) == ')') { |
@@ -188,25 +187,25 @@ tokenize(const StringView *sv) { | |||
188 | Token token = (Token){ | 187 | Token token = (Token){ |
189 | .type = TOKEN_NIL, | 188 | .type = TOKEN_NIL, |
190 | .line = line, | 189 | .line = line, |
191 | .column = col, | 190 | .col = col, |
192 | }; | 191 | }; |
193 | array_push(tokens.tokens, token); | 192 | array_push(tokens, token); |
194 | } else { | 193 | } else { |
195 | Token token = (Token){ | 194 | Token token = (Token){ |
196 | .type = TOKEN_LPAREN, | 195 | .type = TOKEN_LPAREN, |
197 | .line = line, | 196 | .line = line, |
198 | .column = col, | 197 | .col = col, |
199 | }; | 198 | }; |
200 | array_push(tokens.tokens, token); | 199 | array_push(tokens, token); |
201 | } | 200 | } |
202 | } break; | 201 | } break; |
203 | case ')': { | 202 | case ')': { |
204 | Token token = (Token){ | 203 | Token token = (Token){ |
205 | .type = TOKEN_RPAREN, | 204 | .type = TOKEN_RPAREN, |
206 | .line = line, | 205 | .line = line, |
207 | .column = col, | 206 | .col = col, |
208 | }; | 207 | }; |
209 | array_push(tokens.tokens, token); | 208 | array_push(tokens, token); |
210 | } break; | 209 | } break; |
211 | default: { | 210 | default: { |
212 | size_t n = 1; | 211 | size_t n = 1; |
@@ -224,10 +223,10 @@ tokenize(const StringView *sv) { | |||
224 | }, | 223 | }, |
225 | .type = TOKEN_SYMBOL, | 224 | .type = TOKEN_SYMBOL, |
226 | .line = line, | 225 | .line = line, |
227 | .column = col, | 226 | .col = col, |
228 | }; | 227 | }; |
229 | token.type = find_primitive_type(token.value); | 228 | token.type = find_primitive_type(token.value); |
230 | array_push(tokens.tokens, token); | 229 | array_push(tokens, token); |
231 | } break; | 230 | } break; |
232 | } | 231 | } |
233 | } | 232 | } |
@@ -236,9 +235,9 @@ tokenize(const StringView *sv) { | |||
236 | Token token = (Token){ | 235 | Token token = (Token){ |
237 | .type = TOKEN_EOF, | 236 | .type = TOKEN_EOF, |
238 | .line = scanner.line_number, | 237 | .line = scanner.line_number, |
239 | .column = 1, | 238 | .col = 1, |
240 | }; | 239 | }; |
241 | array_push(tokens.tokens, token); | 240 | array_push(tokens, token); |
242 | 241 | ||
243 | return tokens; | 242 | return tokens; |
244 | } | 243 | } |