blob: 632ac4e4f50e2bf0ed6c2914fe8b33e172eedeca [file] [log] [blame]
Damien429d7192013-10-04 19:53:11 +01001#include <unistd.h>
2#include <stdlib.h>
3#include <stdint.h>
4#include <stdio.h>
5#include <ctype.h>
6#include <string.h>
7#include <assert.h>
8
9#include "misc.h"
Damienc025ebb2013-10-12 14:30:21 +010010#include "mpyconfig.h"
Damien429d7192013-10-04 19:53:11 +010011#include "lexer.h"
Damien429d7192013-10-04 19:53:11 +010012#include "parse.h"
13
14#define RULE_ACT_KIND_MASK (0xf0)
15#define RULE_ACT_ARG_MASK (0x0f)
16#define RULE_ACT_OR (0x10)
17#define RULE_ACT_AND (0x20)
18#define RULE_ACT_LIST (0x30)
19
20#define RULE_ARG_BLANK (0x0000)
21#define RULE_ARG_KIND_MASK (0xf000)
22#define RULE_ARG_ARG_MASK (0x0fff)
23#define RULE_ARG_TOK (0x1000)
24#define RULE_ARG_RULE (0x2000)
25#define RULE_ARG_OPT_TOK (0x3000)
26#define RULE_ARG_OPT_RULE (0x4000)
27
28// (un)comment to use rule names; for debugging
29//#define USE_RULE_NAME (1)
30
31typedef struct _rule_t {
32 byte rule_id;
33 byte act;
34#ifdef USE_RULE_NAME
35 const char *rule_name;
36#endif
37 uint16_t arg[];
38} rule_t;
39
40enum {
41 RULE_none = 0,
42#define DEF_RULE(rule, comp, kind, arg...) RULE_##rule,
43#include "grammar.h"
44#undef DEF_RULE
45 RULE_maximum_number_of,
46};
47
48#define or(n) (RULE_ACT_OR | n)
49#define and(n) (RULE_ACT_AND | n)
50#define one_or_more (RULE_ACT_LIST | 2)
51#define list (RULE_ACT_LIST | 1)
52#define list_with_end (RULE_ACT_LIST | 3)
53#define tok(t) (RULE_ARG_TOK | PY_TOKEN_##t)
54#define rule(r) (RULE_ARG_RULE | RULE_##r)
55#define opt_tok(t) (RULE_ARG_OPT_TOK | PY_TOKEN_##t)
56#define opt_rule(r) (RULE_ARG_OPT_RULE | RULE_##r)
57#ifdef USE_RULE_NAME
Damiendf4b4f32013-10-19 18:28:01 +010058#define DEF_RULE(rule, comp, kind, arg...) static const rule_t rule_##rule = { RULE_##rule, kind, #rule, { arg } };
Damien429d7192013-10-04 19:53:11 +010059#else
Damiendf4b4f32013-10-19 18:28:01 +010060#define DEF_RULE(rule, comp, kind, arg...) static const rule_t rule_##rule = { RULE_##rule, kind, { arg } };
Damien429d7192013-10-04 19:53:11 +010061#endif
62#include "grammar.h"
63#undef or
64#undef and
65#undef list
66#undef list_with_end
67#undef tok
68#undef rule
69#undef opt_tok
70#undef opt_rule
71#undef one_or_more
72#undef DEF_RULE
73
Damiendf4b4f32013-10-19 18:28:01 +010074static const rule_t *rules[] = {
Damien429d7192013-10-04 19:53:11 +010075 NULL,
76#define DEF_RULE(rule, comp, kind, arg...) &rule_##rule,
77#include "grammar.h"
78#undef DEF_RULE
79};
80
81typedef struct _rule_stack_t {
82 byte rule_id;
83 int32_t arg_i; // what should be the size and signedness?
84} rule_stack_t;
85
86typedef struct _parser_t {
87 uint rule_stack_alloc;
88 uint rule_stack_top;
89 rule_stack_t *rule_stack;
90
91 uint result_stack_top;
92 py_parse_node_t *result_stack;
93} parser_t;
94
Damiendf4b4f32013-10-19 18:28:01 +010095static void push_rule(parser_t *parser, const rule_t *rule, int arg_i) {
Damien429d7192013-10-04 19:53:11 +010096 if (parser->rule_stack_top >= parser->rule_stack_alloc) {
97 parser->rule_stack_alloc *= 2;
98 parser->rule_stack = m_renew(rule_stack_t, parser->rule_stack, parser->rule_stack_alloc);
99 }
100 parser->rule_stack[parser->rule_stack_top].rule_id = rule->rule_id;
101 parser->rule_stack[parser->rule_stack_top].arg_i = arg_i;
102 parser->rule_stack_top += 1;
103}
104
105static void push_rule_from_arg(parser_t *parser, uint arg) {
106 assert((arg & RULE_ARG_KIND_MASK) == RULE_ARG_RULE || (arg & RULE_ARG_KIND_MASK) == RULE_ARG_OPT_RULE);
107 uint rule_id = arg & RULE_ARG_ARG_MASK;
108 assert(rule_id < RULE_maximum_number_of);
109 push_rule(parser, rules[rule_id], 0);
110}
111
Damiendf4b4f32013-10-19 18:28:01 +0100112static void pop_rule(parser_t *parser, const rule_t **rule, uint *arg_i) {
Damien429d7192013-10-04 19:53:11 +0100113 parser->rule_stack_top -= 1;
114 *rule = rules[parser->rule_stack[parser->rule_stack_top].rule_id];
115 *arg_i = parser->rule_stack[parser->rule_stack_top].arg_i;
116}
117
118py_parse_node_t py_parse_node_new_leaf(machine_int_t kind, machine_int_t arg) {
119 return (py_parse_node_t)(kind | (arg << 4));
120}
121
122int num_parse_nodes_allocated = 0;
123py_parse_node_struct_t *parse_node_new_struct(int rule_id, int num_args) {
124 py_parse_node_struct_t *pn = m_malloc(sizeof(py_parse_node_struct_t) + num_args * sizeof(py_parse_node_t));
125 pn->source = 0; // TODO
126 pn->kind_num_nodes = (rule_id & 0xff) | (num_args << 8);
127 num_parse_nodes_allocated += 1;
128 return pn;
129}
130
Damien5ac1b2e2013-10-18 19:58:12 +0100131void py_parse_node_show(py_parse_node_t pn, int indent) {
Damien429d7192013-10-04 19:53:11 +0100132 for (int i = 0; i < indent; i++) {
133 printf(" ");
134 }
135 if (PY_PARSE_NODE_IS_NULL(pn)) {
136 printf("NULL\n");
137 } else if (PY_PARSE_NODE_IS_LEAF(pn)) {
138 int arg = PY_PARSE_NODE_LEAF_ARG(pn);
139 switch (PY_PARSE_NODE_LEAF_KIND(pn)) {
140 case PY_PARSE_NODE_ID: printf("id(%s)\n", qstr_str(arg)); break;
141 case PY_PARSE_NODE_SMALL_INT: printf("int(%d)\n", arg); break;
142 case PY_PARSE_NODE_INTEGER: printf("int(%s)\n", qstr_str(arg)); break;
143 case PY_PARSE_NODE_DECIMAL: printf("dec(%s)\n", qstr_str(arg)); break;
144 case PY_PARSE_NODE_STRING: printf("str(%s)\n", qstr_str(arg)); break;
145 case PY_PARSE_NODE_BYTES: printf("bytes(%s)\n", qstr_str(arg)); break;
146 case PY_PARSE_NODE_TOKEN: printf("tok(%d)\n", arg); break;
147 default: assert(0);
148 }
149 } else {
150 py_parse_node_struct_t *pns2 = (py_parse_node_struct_t*)pn;
151 int n = pns2->kind_num_nodes >> 8;
152#ifdef USE_RULE_NAME
153 printf("%s(%d) (n=%d)\n", rules[PY_PARSE_NODE_STRUCT_KIND(pns2)]->rule_name, PY_PARSE_NODE_STRUCT_KIND(pns2), n);
154#else
155 printf("rule(%u) (n=%d)\n", (uint)PY_PARSE_NODE_STRUCT_KIND(pns2), n);
156#endif
157 for (int i = 0; i < n; i++) {
Damien5ac1b2e2013-10-18 19:58:12 +0100158 py_parse_node_show(pns2->nodes[i], indent + 2);
Damien429d7192013-10-04 19:53:11 +0100159 }
160 }
161}
162
163/*
164static void result_stack_show(parser_t *parser) {
165 printf("result stack, most recent first\n");
166 for (int i = parser->result_stack_top - 1; i >= 0; i--) {
Damien5ac1b2e2013-10-18 19:58:12 +0100167 py_parse_node_show(parser->result_stack[i], 0);
Damien429d7192013-10-04 19:53:11 +0100168 }
169}
170*/
171
172static py_parse_node_t pop_result(parser_t *parser) {
173 assert(parser->result_stack_top > 0);
174 return parser->result_stack[--parser->result_stack_top];
175}
176
177static py_parse_node_t peek_result(parser_t *parser, int pos) {
178 assert(parser->result_stack_top > pos);
179 return parser->result_stack[parser->result_stack_top - 1 - pos];
180}
181
182static void push_result_node(parser_t *parser, py_parse_node_t pn) {
183 parser->result_stack[parser->result_stack_top++] = pn;
184}
185
186static void push_result_token(parser_t *parser, const py_lexer_t *lex) {
187 const py_token_t *tok = py_lexer_cur(lex);
188 py_parse_node_t pn;
189 if (tok->kind == PY_TOKEN_NAME) {
190 pn = py_parse_node_new_leaf(PY_PARSE_NODE_ID, qstr_from_strn_copy(tok->str, tok->len));
191 } else if (tok->kind == PY_TOKEN_NUMBER) {
192 bool dec = false;
193 bool small_int = true;
194 int int_val = 0;
195 int len = tok->len;
196 const char *str = tok->str;
197 int base = 10;
198 int i = 0;
199 if (len >= 3 && str[0] == '0') {
200 if (str[1] == 'o' || str[1] == 'O') {
201 // octal
202 base = 8;
203 i = 2;
204 } else if (str[1] == 'x' || str[1] == 'X') {
205 // hexadecimal
206 base = 16;
207 i = 2;
208 } else if (str[1] == 'b' || str[1] == 'B') {
209 // binary
210 base = 2;
211 i = 2;
212 }
213 }
214 for (; i < len; i++) {
215 if (g_unichar_isdigit(str[i]) && str[i] - '0' < base) {
216 int_val = base * int_val + str[i] - '0';
217 } else if (base == 16 && 'a' <= str[i] && str[i] <= 'f') {
218 int_val = base * int_val + str[i] - 'a' + 10;
219 } else if (base == 16 && 'F' <= str[i] && str[i] <= 'F') {
220 int_val = base * int_val + str[i] - 'A' + 10;
Damien7410e442013-11-02 19:47:57 +0000221 } else if (str[i] == '.' || str[i] == 'e' || str[i] == 'E' || str[i] == 'j' || str[i] == 'J') {
Damien429d7192013-10-04 19:53:11 +0100222 dec = true;
223 break;
224 } else {
225 small_int = false;
226 break;
227 }
228 }
229 if (dec) {
230 pn = py_parse_node_new_leaf(PY_PARSE_NODE_DECIMAL, qstr_from_strn_copy(str, len));
Damien0efb3a12013-10-12 16:16:56 +0100231 } else if (small_int && PY_FIT_SMALL_INT(int_val)) {
Damien429d7192013-10-04 19:53:11 +0100232 pn = py_parse_node_new_leaf(PY_PARSE_NODE_SMALL_INT, int_val);
233 } else {
234 pn = py_parse_node_new_leaf(PY_PARSE_NODE_INTEGER, qstr_from_strn_copy(str, len));
235 }
236 } else if (tok->kind == PY_TOKEN_STRING) {
237 pn = py_parse_node_new_leaf(PY_PARSE_NODE_STRING, qstr_from_strn_copy(tok->str, tok->len));
238 } else if (tok->kind == PY_TOKEN_BYTES) {
239 pn = py_parse_node_new_leaf(PY_PARSE_NODE_BYTES, qstr_from_strn_copy(tok->str, tok->len));
240 } else {
241 pn = py_parse_node_new_leaf(PY_PARSE_NODE_TOKEN, tok->kind);
242 }
243 push_result_node(parser, pn);
244}
245
Damiendf4b4f32013-10-19 18:28:01 +0100246static void push_result_rule(parser_t *parser, const rule_t *rule, int num_args) {
Damien429d7192013-10-04 19:53:11 +0100247 py_parse_node_struct_t *pn = parse_node_new_struct(rule->rule_id, num_args);
248 for (int i = num_args; i > 0; i--) {
249 pn->nodes[i - 1] = pop_result(parser);
250 }
251 push_result_node(parser, (py_parse_node_t)pn);
252}
253
Damien5ac1b2e2013-10-18 19:58:12 +0100254py_parse_node_t py_parse(py_lexer_t *lex, py_parse_input_kind_t input_kind) {
Damien429d7192013-10-04 19:53:11 +0100255 parser_t *parser = m_new(parser_t, 1);
256 parser->rule_stack_alloc = 64;
257 parser->rule_stack_top = 0;
258 parser->rule_stack = m_new(rule_stack_t, parser->rule_stack_alloc);
259
260 parser->result_stack = m_new(py_parse_node_t, 1000);
261 parser->result_stack_top = 0;
262
Damien5ac1b2e2013-10-18 19:58:12 +0100263 int top_level_rule;
264 switch (input_kind) {
265 case PY_PARSE_SINGLE_INPUT: top_level_rule = RULE_single_input; break;
266 //case PY_PARSE_EVAL_INPUT: top_level_rule = RULE_eval_input; break;
267 default: top_level_rule = RULE_file_input;
268 }
269 push_rule(parser, rules[top_level_rule], 0);
Damien429d7192013-10-04 19:53:11 +0100270
271 uint n, i;
272 bool backtrack = false;
Damiendf4b4f32013-10-19 18:28:01 +0100273 const rule_t *rule;
Damien429d7192013-10-04 19:53:11 +0100274 py_token_kind_t tok_kind;
275 bool emit_rule;
276 bool had_trailing_sep;
277
278 for (;;) {
279 next_rule:
280 if (parser->rule_stack_top == 0) {
281 break;
282 }
283
284 pop_rule(parser, &rule, &i);
285 n = rule->act & RULE_ACT_ARG_MASK;
286
287 /*
288 // debugging
289 printf("depth=%d ", parser->rule_stack_top);
290 for (int j = 0; j < parser->rule_stack_top; ++j) {
291 printf(" ");
292 }
293 printf("%s n=%d i=%d bt=%d\n", rule->rule_name, n, i, backtrack);
294 */
295
296 switch (rule->act & RULE_ACT_KIND_MASK) {
297 case RULE_ACT_OR:
298 if (i > 0 && !backtrack) {
299 goto next_rule;
300 } else {
301 backtrack = false;
302 }
303 for (; i < n - 1; ++i) {
304 switch (rule->arg[i] & RULE_ARG_KIND_MASK) {
305 case RULE_ARG_TOK:
306 if (py_lexer_is_kind(lex, rule->arg[i] & RULE_ARG_ARG_MASK)) {
307 push_result_token(parser, lex);
308 py_lexer_to_next(lex);
309 goto next_rule;
310 }
311 break;
312 case RULE_ARG_RULE:
313 push_rule(parser, rule, i + 1);
314 push_rule_from_arg(parser, rule->arg[i]);
315 goto next_rule;
316 default:
317 assert(0);
318 }
319 }
320 if ((rule->arg[i] & RULE_ARG_KIND_MASK) == RULE_ARG_TOK) {
321 if (py_lexer_is_kind(lex, rule->arg[i] & RULE_ARG_ARG_MASK)) {
322 push_result_token(parser, lex);
323 py_lexer_to_next(lex);
324 } else {
325 backtrack = true;
326 goto next_rule;
327 }
328 } else {
329 push_rule_from_arg(parser, rule->arg[i]);
330 }
331 break;
332
333 case RULE_ACT_AND:
334
335 // failed, backtrack if we can, else syntax error
336 if (backtrack) {
337 assert(i > 0);
338 if ((rule->arg[i - 1] & RULE_ARG_KIND_MASK) == RULE_ARG_OPT_RULE) {
339 // an optional rule that failed, so continue with next arg
340 push_result_node(parser, PY_PARSE_NODE_NULL);
341 backtrack = false;
342 } else {
343 // a mandatory rule that failed, so propagate backtrack
344 if (i > 1) {
345 // already eaten tokens so can't backtrack
346 goto syntax_error;
347 } else {
348 goto next_rule;
349 }
350 }
351 }
352
353 // progress through the rule
354 for (; i < n; ++i) {
355 switch (rule->arg[i] & RULE_ARG_KIND_MASK) {
356 case RULE_ARG_TOK:
357 // need to match a token
358 tok_kind = rule->arg[i] & RULE_ARG_ARG_MASK;
359 if (py_lexer_is_kind(lex, tok_kind)) {
360 // matched token
361 if (tok_kind == PY_TOKEN_NAME) {
362 push_result_token(parser, lex);
363 }
364 py_lexer_to_next(lex);
365 } else {
366 // failed to match token
367 if (i > 0) {
368 // already eaten tokens so can't backtrack
369 goto syntax_error;
370 } else {
371 // this rule failed, so backtrack
372 backtrack = true;
373 goto next_rule;
374 }
375 }
376 break;
377 case RULE_ARG_RULE:
378 //if (i + 1 < n) {
379 push_rule(parser, rule, i + 1);
380 //}
381 push_rule_from_arg(parser, rule->arg[i]);
382 goto next_rule;
383 case RULE_ARG_OPT_RULE:
384 push_rule(parser, rule, i + 1);
385 push_rule_from_arg(parser, rule->arg[i]);
386 goto next_rule;
387 default:
388 assert(0);
389 }
390 }
391
392 assert(i == n);
393
394 // matched the rule, so now build the corresponding parse_node
395
396 // count number of arguments for the parse_node
397 i = 0;
398 emit_rule = false;
399 for (int x = 0; x < n; ++x) {
400 if ((rule->arg[x] & RULE_ARG_KIND_MASK) == RULE_ARG_TOK) {
401 tok_kind = rule->arg[x] & RULE_ARG_ARG_MASK;
402 if (tok_kind >= PY_TOKEN_NAME) {
403 emit_rule = true;
404 }
405 if (tok_kind == PY_TOKEN_NAME) {
406 // only tokens which were names are pushed to stack
407 i += 1;
408 }
409 } else {
410 // rules are always pushed
411 i += 1;
412 }
413 }
414
415 // always emit these rules, even if they have only 1 argument
416 if (rule->rule_id == RULE_expr_stmt || rule->rule_id == RULE_yield_stmt) {
417 emit_rule = true;
418 }
419
420 // never emit these rules if they have only 1 argument
421 // NOTE: can't put atom_paren here because we need it to distinguisg, for example, [a,b] from [(a,b)]
Damienb14de212013-10-06 00:28:28 +0100422 // TODO possibly put varargslist_name, varargslist_equal here as well
423 if (rule->rule_id == RULE_else_stmt || rule->rule_id == RULE_testlist_comp_3b || rule->rule_id == RULE_import_as_names_paren || rule->rule_id == RULE_typedargslist_name || rule->rule_id == RULE_typedargslist_colon || rule->rule_id == RULE_typedargslist_equal || rule->rule_id == RULE_dictorsetmaker_colon || rule->rule_id == RULE_classdef_2 || rule->rule_id == RULE_with_item_as || rule->rule_id == RULE_assert_stmt_extra || rule->rule_id == RULE_as_name || rule->rule_id == RULE_raise_stmt_from || rule->rule_id == RULE_vfpdef) {
Damien429d7192013-10-04 19:53:11 +0100424 emit_rule = false;
425 }
426
427 // always emit these rules, and add an extra blank node at the end (to be used by the compiler to store data)
428 if (rule->rule_id == RULE_funcdef || rule->rule_id == RULE_classdef || rule->rule_id == RULE_comp_for || rule->rule_id == RULE_lambdef || rule->rule_id == RULE_lambdef_nocond) {
429 emit_rule = true;
430 push_result_node(parser, PY_PARSE_NODE_NULL);
431 i += 1;
432 }
433
434 int num_not_nil = 0;
435 for (int x = 0; x < i; ++x) {
436 if (peek_result(parser, x) != PY_PARSE_NODE_NULL) {
437 num_not_nil += 1;
438 }
439 }
440 //printf("done and %s n=%d i=%d notnil=%d\n", rule->rule_name, n, i, num_not_nil);
441 if (emit_rule) {
442 push_result_rule(parser, rule, i);
443 } else if (num_not_nil == 0) {
444 push_result_rule(parser, rule, i); // needed for, eg, atom_paren, testlist_comp_3b
445 //result_stack_show(parser);
446 //assert(0);
447 } else if (num_not_nil == 1) {
448 // single result, leave it on stack
449 py_parse_node_t pn = PY_PARSE_NODE_NULL;
450 for (int x = 0; x < i; ++x) {
451 py_parse_node_t pn2 = pop_result(parser);
452 if (pn2 != PY_PARSE_NODE_NULL) {
453 pn = pn2;
454 }
455 }
456 push_result_node(parser, pn);
457 } else {
458 push_result_rule(parser, rule, i);
459 }
460 break;
461
462 case RULE_ACT_LIST:
463 // n=2 is: item item*
464 // n=1 is: item (sep item)*
465 // n=3 is: item (sep item)* [sep]
466 if (backtrack) {
467 list_backtrack:
468 had_trailing_sep = false;
469 if (n == 2) {
470 if (i == 1) {
471 // fail on item, first time round; propagate backtrack
472 goto next_rule;
473 } else {
474 // fail on item, in later rounds; finish with this rule
475 backtrack = false;
476 }
477 } else {
478 if (i == 1) {
479 // fail on item, first time round; propagate backtrack
480 goto next_rule;
481 } else if ((i & 1) == 1) {
482 // fail on item, in later rounds; have eaten tokens so can't backtrack
483 if (n == 3) {
484 // list allows trailing separator; finish parsing list
485 had_trailing_sep = true;
486 backtrack = false;
487 } else {
488 // list doesn't allowing trailing separator; fail
489 goto syntax_error;
490 }
491 } else {
492 // fail on separator; finish parsing list
493 backtrack = false;
494 }
495 }
496 } else {
497 for (;;) {
498 uint arg = rule->arg[i & 1 & n];
499 switch (arg & RULE_ARG_KIND_MASK) {
500 case RULE_ARG_TOK:
501 if (py_lexer_is_kind(lex, arg & RULE_ARG_ARG_MASK)) {
502 if (i & 1 & n) {
503 // separators which are tokens are not pushed to result stack
504 } else {
505 push_result_token(parser, lex);
506 }
507 py_lexer_to_next(lex);
508 // got element of list, so continue parsing list
509 i += 1;
510 } else {
511 // couldn't get element of list
512 i += 1;
513 backtrack = true;
514 goto list_backtrack;
515 }
516 break;
517 case RULE_ARG_RULE:
518 push_rule(parser, rule, i + 1);
519 push_rule_from_arg(parser, arg);
520 goto next_rule;
521 default:
522 assert(0);
523 }
524 }
525 }
526 assert(i >= 1);
527
528 // compute number of elements in list, result in i
529 i -= 1;
530 if ((n & 1) && (rule->arg[1] & RULE_ARG_KIND_MASK) == RULE_ARG_TOK) {
531 // don't count separators when they are tokens
532 i = (i + 1) / 2;
533 }
534
535 if (i == 1) {
536 // list matched single item
537 if (had_trailing_sep) {
538 // if there was a trailing separator, make a list of a single item
539 push_result_rule(parser, rule, i);
540 } else {
541 // just leave single item on stack (ie don't wrap in a list)
542 }
543 } else {
544 //printf("done list %s %d %d\n", rule->rule_name, n, i);
545 push_result_rule(parser, rule, i);
546 }
547 break;
548
549 default:
550 assert(0);
551 }
552 }
Damien91d387d2013-10-09 15:09:52 +0100553
554 // check we are at the end of the token stream
Damien429d7192013-10-04 19:53:11 +0100555 if (!py_lexer_is_kind(lex, PY_TOKEN_END)) {
Damien91d387d2013-10-09 15:09:52 +0100556 goto syntax_error;
Damien429d7192013-10-04 19:53:11 +0100557 }
Damien91d387d2013-10-09 15:09:52 +0100558
Damien429d7192013-10-04 19:53:11 +0100559 //printf("--------------\n");
560 //result_stack_show(parser);
561 assert(parser->result_stack_top == 1);
562 //printf("maximum depth: %d\n", parser->rule_stack_alloc);
563 //printf("number of parse nodes allocated: %d\n", num_parse_nodes_allocated);
564 return parser->result_stack[0];
565
566syntax_error:
Damien91d387d2013-10-09 15:09:52 +0100567 if (py_lexer_is_kind(lex, PY_TOKEN_INDENT)) {
568 py_lexer_show_error_pythonic(lex, "IndentationError: unexpected indent");
569 } else if (py_lexer_is_kind(lex, PY_TOKEN_DEDENT_MISMATCH)) {
570 py_lexer_show_error_pythonic(lex, "IndentationError: unindent does not match any outer indentation level");
571 } else {
572 py_lexer_show_error_pythonic(lex, "syntax error:");
Damien429d7192013-10-04 19:53:11 +0100573#ifdef USE_RULE_NAME
Damien91d387d2013-10-09 15:09:52 +0100574 py_lexer_show_error(lex, rule->rule_name);
Damien429d7192013-10-04 19:53:11 +0100575#endif
Damien91d387d2013-10-09 15:09:52 +0100576 py_token_show(py_lexer_cur(lex));
577 }
Damien429d7192013-10-04 19:53:11 +0100578 return PY_PARSE_NODE_NULL;
579}