diff options
-rwxr-xr-x | src/lexer.c | 2 | ||||
-rwxr-xr-x | src/lexer.h | 1 | ||||
-rw-r--r-- | src/parser.c | 43 |
3 files changed, 46 insertions, 0 deletions
diff --git a/src/lexer.c b/src/lexer.c index 704ee66..09c8f6c 100755 --- a/src/lexer.c +++ b/src/lexer.c | |||
@@ -15,6 +15,7 @@ static const char* token_str[] = { | |||
15 | [TOKEN_IF] = "TOKEN_IF", | 15 | [TOKEN_IF] = "TOKEN_IF", |
16 | [TOKEN_DEF] = "TOKEN_DEF", | 16 | [TOKEN_DEF] = "TOKEN_DEF", |
17 | [TOKEN_SET] = "TOKEN_SET", | 17 | [TOKEN_SET] = "TOKEN_SET", |
18 | [TOKEN_FUN] = "TOKEN_FUN", | ||
18 | [TOKEN_EOF] = "TOKEN_EOF", | 19 | [TOKEN_EOF] = "TOKEN_EOF", |
19 | }; | 20 | }; |
20 | 21 | ||
@@ -132,6 +133,7 @@ find_primitive_type(const StringView value) { | |||
132 | if (TOKEN_IS_KEYWORD(value, "if")) { return TOKEN_IF; } | 133 | if (TOKEN_IS_KEYWORD(value, "if")) { return TOKEN_IF; } |
133 | if (TOKEN_IS_KEYWORD(value, "def")) { return TOKEN_DEF; } | 134 | if (TOKEN_IS_KEYWORD(value, "def")) { return TOKEN_DEF; } |
134 | if (TOKEN_IS_KEYWORD(value, "set!")) { return TOKEN_SET; } | 135 | if (TOKEN_IS_KEYWORD(value, "set!")) { return TOKEN_SET; } |
136 | if (TOKEN_IS_KEYWORD(value, "fun")) { return TOKEN_FUN; } | ||
135 | 137 | ||
136 | return TOKEN_SYMBOL; | 138 | return TOKEN_SYMBOL; |
137 | } | 139 | } |
diff --git a/src/lexer.h b/src/lexer.h index f187526..c477fbd 100755 --- a/src/lexer.h +++ b/src/lexer.h | |||
@@ -23,6 +23,7 @@ typedef enum TokenType { | |||
23 | TOKEN_IF, | 23 | TOKEN_IF, |
24 | TOKEN_DEF, | 24 | TOKEN_DEF, |
25 | TOKEN_SET, | 25 | TOKEN_SET, |
26 | TOKEN_FUN, | ||
26 | 27 | ||
27 | // End of file. | 28 | // End of file. |
28 | TOKEN_EOF, | 29 | TOKEN_EOF, |
diff --git a/src/parser.c b/src/parser.c index f831e50..e32a571 100644 --- a/src/parser.c +++ b/src/parser.c | |||
@@ -116,6 +116,15 @@ parse_lambda(Parser *parser, Errors *errors) { | |||
116 | Object *expr = parse_tree(parser, errors); | 116 | Object *expr = parse_tree(parser, errors); |
117 | array_push(lambda->body, expr); | 117 | array_push(lambda->body, expr); |
118 | } | 118 | } |
119 | if (array_size(lambda->body) == 0) { | ||
120 | error_push(errors, (Error){ | ||
121 | .type = ERR_TYPE_PARSER, | ||
122 | .value = ERR_NOT_ENOUGH_ARGS, | ||
123 | .line = start.line, | ||
124 | .col = start.col, | ||
125 | }); | ||
126 | return NULL; | ||
127 | } | ||
119 | return lambda; | 128 | return lambda; |
120 | } | 129 | } |
121 | 130 | ||
@@ -227,6 +236,39 @@ parse_var(Parser *parser, Errors *errors) { | |||
227 | } | 236 | } |
228 | 237 | ||
229 | Object * | 238 | Object * |
239 | parse_fun(Parser *parser, Errors *errors) { | ||
240 | Token start = next_token(parser); | ||
241 | Object *ret = object_alloc(start, OBJ_TYPE_DEF); | ||
242 | |||
243 | // Variable name. | ||
244 | Token tok = peek_token(parser); | ||
245 | if (tok.type == TOKEN_RPAREN) { | ||
246 | error_push(errors, (Error){ | ||
247 | .type = ERR_TYPE_PARSER, | ||
248 | .value = ERR_NOT_ENOUGH_ARGS, | ||
249 | .line = tok.line, | ||
250 | .col = tok.col, | ||
251 | }); | ||
252 | return NULL; | ||
253 | } | ||
254 | if (tok.type != TOKEN_SYMBOL) { | ||
255 | error_push(errors, (Error){ | ||
256 | .type = ERR_TYPE_PARSER, | ||
257 | .value = ERR_WRONG_ARG_TYPE, | ||
258 | .line = tok.line, | ||
259 | .col = tok.col, | ||
260 | }); | ||
261 | return NULL; | ||
262 | } | ||
263 | ret->var_name = parse_tree(parser, errors); | ||
264 | |||
265 | // Variable value (expression). | ||
266 | rewind_token(parser); | ||
267 | ret->var_expr = parse_lambda(parser, errors); | ||
268 | return ret; | ||
269 | } | ||
270 | |||
271 | Object * | ||
230 | parse_list(Parser *parser, Errors *errors) { | 272 | parse_list(Parser *parser, Errors *errors) { |
231 | if (errors->n != 0) { | 273 | if (errors->n != 0) { |
232 | return NULL; | 274 | return NULL; |
@@ -247,6 +289,7 @@ parse_list(Parser *parser, Errors *errors) { | |||
247 | case TOKEN_IF: { return parse_if(parser, errors); } break; | 289 | case TOKEN_IF: { return parse_if(parser, errors); } break; |
248 | case TOKEN_DEF: { return parse_var(parser, errors); } break; | 290 | case TOKEN_DEF: { return parse_var(parser, errors); } break; |
249 | case TOKEN_SET: { return parse_var(parser, errors); } break; | 291 | case TOKEN_SET: { return parse_var(parser, errors); } break; |
292 | case TOKEN_FUN: { return parse_fun(parser, errors); } break; | ||
250 | default: break; | 293 | default: break; |
251 | } | 294 | } |
252 | 295 | ||