1 module zua.parser.parser;
2 import zua.parser.lexer;
3 import zua.compiler.ir : BinaryOperation, UnaryOperation;
4 import zua.diagnostic;
5 import zua.parser.ast;
6 import std.typecons;
7 import std.conv;
8 
9 private alias PrefixParselet = Expr delegate(Token op);
10 private alias InfixParselet = Expr delegate(Expr left, Token op);
11 
12 /** A parser for Lua */
13 final class Parser {
14 
15 	private Lexer lexer;
16 	private Diagnostic[]* diagnostics;
17 
18 	private PrefixParselet[string] prefixOps;
19 	private InfixParselet[Tuple!(uint, string)] infixOps;
20 
21 	/** Any infix operator with a precedence >= this one requires the lhs to be a prefixexp */
22 	private uint prefixexpTreshold = 85;
23 
24 	private void registerGenericInfix(bool leftAssociative = true)(uint precedence, string op, BinaryOperation bOp) {
25 		infixOps[tuple(precedence, op)] = delegate(Expr left, Token opToken) {
26 			BinaryExpr result = new BinaryExpr();
27 			result.start = left.start;
28 			result.opToken = opToken;
29 			result.lhs = left;
30 			result.op = bOp;
31 			static if (leftAssociative) {
32 				result.rhs = expr(precedence);
33 			}
34 			else {
35 				result.rhs = expr(precedence - 1);
36 			}
37 			result.end = lexer.last.get;
38 			return result;
39 		};
40 	}
41 
42 	private void registerGenericPrefix(uint precedence, string op, UnaryOperation bOp) {
43 		prefixOps[op] = delegate(Token opToken) {
44 			UnaryExpr result = new UnaryExpr();
45 			result.start = opToken;
46 			result.op = bOp;
47 			result.expr = expr(precedence);
48 			result.end = lexer.last.get;
49 			return result;
50 		};
51 	}
52 
53 	/** Create a new parser */
54 	this(Lexer lexer, ref Diagnostic[] diagnostics) {
55 		this.lexer = lexer;
56 		this.diagnostics = &diagnostics;
57 
58 		registerGenericInfix(10, "or", BinaryOperation.Or);
59 
60 		registerGenericInfix(20, "and", BinaryOperation.And);
61 
62 		registerGenericInfix(30, "<", BinaryOperation.CmpLt);
63 		registerGenericInfix(30, ">", BinaryOperation.CmpGt);
64 		registerGenericInfix(30, "<=", BinaryOperation.CmpLe);
65 		registerGenericInfix(30, ">=", BinaryOperation.CmpGe);
66 		registerGenericInfix(30, "~=", BinaryOperation.CmpNe);
67 		registerGenericInfix(30, "==", BinaryOperation.CmpEq);
68 
69 		registerGenericInfix!false(40, "..", BinaryOperation.Concat);
70 		
71 		registerGenericInfix(50, "+", BinaryOperation.Add);
72 		registerGenericInfix(50, "-", BinaryOperation.Sub);
73 
74 		registerGenericInfix(60, "*", BinaryOperation.Mul);
75 		registerGenericInfix(60, "/", BinaryOperation.Div);
76 		registerGenericInfix(60, "%", BinaryOperation.Mod);
77 
78 		registerGenericPrefix(70, "not", UnaryOperation.Not);
79 		registerGenericPrefix(70, "#", UnaryOperation.Length);
80 		registerGenericPrefix(70, "-", UnaryOperation.Negate);
81 
82 		registerGenericInfix!false(80, "^", BinaryOperation.Exp);
83 
84 		prefixOps["("] = delegate(Token opToken) {
85 			BracketExpr result = new BracketExpr();
86 			result.start = opToken;
87 			result.expr = expr();
88 
89 			lexer.consume!(No.toConsume, "to close bracket expression")(TokenType.Symbol, ")");
90 
91 			result.end = lexer.last.get;
92 			return result;
93 		};
94 
95 		infixOps[tuple(90u, "(")] = delegate(Expr left, Token opToken) {
96 			if (cast(BracketExpr)left) {
97 				const prev = decodeIndex(left.end.index, lexer.source);
98 				const here = decodeIndex(opToken.index, lexer.source);
99 				if (prev.line != here.line) {
100 					Diagnostic err;
101 					err.message = "ambiguous syntax; is this a function call or two separate expressions?";
102 					err.type = DiagnosticType.Error;
103 					err.add(left.end, opToken);
104 
105 					Quickfix meantToCall = {
106 						message: "convert to a function call",
107 						ranges: [QuickfixRange(left.end, opToken, ")(")]
108 					};
109 					err.quickfix ~= meantToCall;
110 
111 					Quickfix separate = {
112 						message: "convert to separate expressions",
113 						ranges: [QuickfixRange(left.end, ");")]
114 					};
115 					err.quickfix ~= separate;
116 
117 					*this.diagnostics ~= err;
118 				}
119 			}
120 
121 			CallExpr result = new CallExpr();
122 			result.start = left.start;
123 			result.base = left;
124 			result.args = tupleExpr();
125 
126 			lexer.consume!(No.toConsume, "to close argument list")(TokenType.Symbol, ")");
127 
128 			result.end = lexer.last.get;
129 			return result;
130 		};
131 
132 		infixOps[tuple(100u, "[")] = delegate(Expr left, Token _) {
133 			IndexExpr result = new IndexExpr();
134 			result.start = left.start;
135 			result.base = left;
136 			result.key = expr();
137 
138 			lexer.consume!(No.toConsume, "to close index parameter")(TokenType.Symbol, "]");
139 
140 			result.end = lexer.last.get;
141 			return result;
142 		};
143 
144 		infixOps[tuple(100u, ".")] = delegate(Expr left, Token _) {
145 			IndexExpr result = new IndexExpr();
146 			result.start = left.start;
147 			result.base = left;
148 
149 			const ident = lexer.consume(TokenType.Identifier);
150 			if (ident[0]) {
151 				StringExpr key = new StringExpr();
152 				key.start = ident[1];
153 				key.value = ident[1].value;
154 				key.end = ident[1];
155 
156 				result.key = key;
157 			}
158 			else {
159 				ErrorExpr key = new ErrorExpr();
160 				key.start = ident[1];
161 				key.end = ident[1];
162 
163 				result.key = key;
164 			}
165 
166 			result.end = lexer.last.get;
167 			return result;
168 		};
169 
170 		infixOps[tuple(100u, ":")] = delegate(Expr left, Token _) {
171 			CallExpr result = new CallExpr();
172 			result.start = left.start;
173 			result.base = left;
174 
175 			const ident = lexer.consume(TokenType.Identifier);
176 			if (ident[0]) {
177 				result.method = ident[1].value;
178 			}
179 
180 			lexer.consume!(No.toConsume, "to begin argument list in method call")(TokenType.Symbol, "(");
181 			result.args = tupleExpr();
182 			lexer.consume!(No.toConsume, "to close argument list")(TokenType.Symbol, ")");
183 
184 			result.end = lexer.last.get;
185 			return result;
186 		};
187 	}
188 
189 	/**
190 	 * Read a block.
191 	 * If includeStart is yes, then the immediate next token is skipped over.
192 	 */
193 	private Block block(Flag!"includeStart" includeStart)(string endKeyword = "end") {
194 		Block result = new Block();
195 		result.start = lexer.peek();
196 		static if (includeStart == Yes.includeStart) lexer.next();
197 		while (!lexer.eof && !lexer.isNext(TokenType.Keyword, endKeyword)) {
198 			result.body ~= stat();
199 			lexer.tryConsume(TokenType.Symbol, ";");
200 		}
201 
202 		if (lexer.eof) {
203 			Diagnostic err;
204 			err.message = "missing '" ~ endKeyword ~ "'";
205 			err.add(lexer.next());
206 			err.type = DiagnosticType.Error;
207 			*diagnostics ~= err;
208 		}
209 		else {
210 			lexer.next();
211 		}
212 
213 		result.end = lexer.last.get(result.start);
214 		return result;
215 	}
216 
217 	private Block block(string startKeyword = "do", string endKeyword = "end") {
218 		if (this.lexer.isNext(TokenType.Keyword, startKeyword)) {
219 			return block!(Yes.includeStart)(endKeyword);
220 		}
221 		else {
222 			lexer.consume!(No.toConsume)(TokenType.Keyword, startKeyword);
223 			return block!(No.includeStart)(endKeyword);
224 		}
225 	}
226 
227 	private string[] nameList() {
228 		string[] result;
229 		auto name = lexer.consume(TokenType.Identifier);
230 
231 		if (name[0]) result ~= name[1].value;
232 
233 		while (lexer.isNext(TokenType.Symbol, ",")) {
234 			lexer.save();
235 			lexer.next(); // skip over comma
236 			if (lexer.isNext(TokenType.Identifier)) {
237 				lexer.discard();
238 				result ~= lexer.next().value;
239 			}
240 			else { // if not an identifier, put the comma back and return
241 				lexer.restore();
242 				break;
243 			}
244 		}
245 
246 		return result;
247 	}
248 
249 	private Expr[] tupleExpr() {
250 		if (lexer.isNext(TokenType.Symbol, ")")
251 			|| lexer.isNext(TokenType.Keyword, "end")
252 			|| lexer.isNext(TokenType.Keyword, "until")
253 			|| lexer.isNext(TokenType.Keyword, "else")
254 			|| lexer.isNext(TokenType.Keyword, "elseif")) return [];
255 
256 		Expr[] result;
257 		result ~= expr();
258 
259 		while (lexer.tryConsume(TokenType.Symbol, ",")) {
260 			result ~= expr();
261 		}
262 
263 		return result;
264 	}
265 
266 	private FunctionExpr functionBody() {
267 		FunctionExpr result = new FunctionExpr();
268 		result.start = lexer.peek();
269 
270 		lexer.consume!(No.toConsume)(TokenType.Symbol, "(");
271 
272 		if (lexer.tryConsume(TokenType.Symbol, "...")) {
273 			result.variadic = true;
274 		}
275 		else if (!lexer.isNext(TokenType.Symbol, ")")) {
276 			result.args = nameList();
277 			if (lexer.tryConsume(TokenType.Symbol, ",")) {
278 				result.variadic = true;
279 				lexer.consume!(Yes.toConsume)(TokenType.Symbol, "...");
280 			}
281 		}
282 
283 		lexer.consume!(No.toConsume)(TokenType.Symbol, ")");
284 
285 		result.body = block!(No.includeStart);
286 
287 		result.end = lexer.last.get(result.start);
288 		return result;
289 	}
290 
291 	private Stat stat() {
292 		if (lexer.isNext(TokenType.Keyword, "do")) {
293 			return block();
294 		}
295 		else if (lexer.tryConsume(TokenType.Keyword, "while")) {
296 			WhileStat result = new WhileStat();
297 			result.start = lexer.last.get;
298 			result.cond = expr();
299 			result.body = block();
300 			result.end = lexer.last.get;
301 			return result;
302 		}
303 		else if (lexer.isNext(TokenType.Keyword, "repeat")) {
304 			RepeatStat result = new RepeatStat();
305 			result.start = lexer.peek();
306 			result.body = block("repeat", "until");
307 			result.endCond = expr();
308 			result.end = lexer.last.get;
309 			return result;
310 		}
311 		else if (lexer.tryConsume(TokenType.Keyword, "if")) {
312 			IfStat result = new IfStat();
313 			result.start = lexer.last.get;
314 
315 			enum IfBlockType {
316 				If,
317 				Else,
318 				End
319 			}
320 
321 			IfBlockType nextType = IfBlockType.If;
322 			while (nextType == IfBlockType.If) {
323 				Expr ifCond = expr();
324 				Block ifBody = new Block();
325 				ifBody.start = lexer.consume!(No.toConsume)(TokenType.Keyword, "then")[1];
326 				while (!lexer.eof && !lexer.isNext(TokenType.Keyword, "end") && !lexer.isNext(TokenType.Keyword, "elseif")
327 					&& !lexer.isNext(TokenType.Keyword, "else")) {
328 					ifBody.body ~= stat();
329 					lexer.tryConsume(TokenType.Symbol, ";");
330 				}
331 
332 				if (lexer.eof) {
333 					Diagnostic err;
334 					err.message = "missing 'end'";
335 					err.add(lexer.next());
336 					err.type = DiagnosticType.Error;
337 					*diagnostics ~= err;
338 				}
339 				else {
340 					if (lexer.isNext(TokenType.Keyword, "end")) nextType = IfBlockType.End;
341 					else if (lexer.isNext(TokenType.Keyword, "elseif")) nextType = IfBlockType.If;
342 					else if (lexer.isNext(TokenType.Keyword, "else")) nextType = IfBlockType.Else;
343 					lexer.next();
344 				}
345 
346 				ifBody.end = lexer.last.get;
347 				result.entries ~= cast(Tuple!(Expr, "cond", Block, "body"))tuple(ifCond, ifBody);
348 			}
349 
350 			if (nextType == IfBlockType.Else) {
351 				result.elseBody = block!(No.includeStart).nullable;
352 			}
353 
354 			result.end = lexer.last.get;
355 			return result;
356 		}
357 		else if (lexer.tryConsume(TokenType.Keyword, "for")) {
358 			Token start = lexer.last.get;
359 			Tuple!(bool, Token) firstVar = lexer.consume(TokenType.Identifier);
360 			if (!firstVar[0]) {
361 				ErrorStat stat = new ErrorStat();
362 				stat.start = start;
363 				stat.end = start;
364 				return stat;
365 			}
366 
367 			if (lexer.tryConsume(TokenType.Symbol, "=")) {
368 				NumericForStat result = new NumericForStat();
369 				result.start = start;
370 				result.var = firstVar[1].value;
371 				result.low = expr();
372 				lexer.consume(TokenType.Symbol, ",");
373 				result.high = expr();
374 				if (lexer.tryConsume(TokenType.Symbol, ",")) {
375 					result.step = expr();
376 				}
377 				result.body = block();
378 				result.end = lexer.last.get;
379 				return result;
380 			}
381 			else {
382 				ForeachStat result = new ForeachStat();
383 				result.start = start;
384 				result.vars ~= firstVar[1].value;
385 				if (lexer.tryConsume(TokenType.Symbol, ",")) {
386 					result.vars ~= nameList();
387 				}
388 				lexer.consume(TokenType.Keyword, "in");
389 				result.iter = tupleExpr();
390 				result.body = block();
391 				result.end = lexer.last.get;
392 				return result;
393 			}
394 		}
395 		else if (lexer.tryConsume(TokenType.Keyword, "local")) {
396 			if (lexer.tryConsume(TokenType.Keyword, "function")) {
397 				FunctionDeclarationStat result = new FunctionDeclarationStat();
398 				result.start = lexer.last.get;
399 				auto name = lexer.consume(TokenType.Identifier);
400 				if (name[0]) {
401 					result.key = name[1].value;
402 				}
403 				else {
404 					result.key = "";
405 				}
406 
407 				FunctionExpr fn = functionBody();
408 				fn.start = result.start;
409 				result.value = fn;
410 				result.end = lexer.last.get;
411 				return result;
412 			}
413 			else {
414 				DeclarationStat result = new DeclarationStat();
415 				result.start = lexer.last.get;
416 				result.keys = nameList();
417 				if (lexer.tryConsume(TokenType.Symbol, "=")) {
418 					result.values = tupleExpr();
419 				}
420 				result.end = lexer.last.get;
421 				return result;
422 			}
423 		}
424 		else if (lexer.tryConsume(TokenType.Keyword, "function")) {
425 			AssignStat result = new AssignStat();
426 			result.start = lexer.last.get;
427 			auto name = lexer.consume(TokenType.Identifier);
428 			LvalueExpr key = null;
429 			if (name[0]) {
430 				VariableExpr var = new VariableExpr();
431 				var.start = name[1];
432 				var.name = name[1].value;
433 				var.end = name[1];
434 				key = var;
435 			}
436 			else {
437 				key = new ErrorExpr();
438 			}
439 
440 			while (lexer.tryConsume(TokenType.Symbol, ".")) {
441 				auto indexName = lexer.consume(TokenType.Identifier);
442 				if (indexName[0]) {
443 					IndexExpr var = new IndexExpr();
444 					var.start = key.start;
445 					StringExpr str = new StringExpr;
446 					str.start = indexName[1];
447 					str.value = indexName[1].value;
448 					str.end = indexName[1];
449 					var.base = key;
450 					var.key = str;
451 					var.end = lexer.last.get;
452 					key = var;
453 				}
454 				else {
455 					key = new ErrorExpr();
456 					break;
457 				}
458 			}
459 
460 			bool namecall = false;
461 
462 			if (lexer.tryConsume(TokenType.Symbol, ":")) {
463 				auto indexName = lexer.consume(TokenType.Identifier);
464 				if (indexName[0]) {
465 					IndexExpr var = new IndexExpr();
466 					var.start = key.start;
467 					StringExpr str = new StringExpr;
468 					str.start = indexName[1];
469 					str.value = indexName[1].value;
470 					str.end = indexName[1];
471 					var.base = key;
472 					var.key = str;
473 					var.end = lexer.last.get;
474 					key = var;
475 					namecall = true;
476 				}
477 				else {
478 					key = new ErrorExpr();
479 				}
480 			}
481 
482 			result.keys ~= key;
483 
484 			FunctionExpr fn = functionBody();
485 			fn.start = result.start;
486 			result.values ~= fn;
487 			result.end = lexer.last.get;
488 			if (namecall) {
489 				fn.args = "self" ~ fn.args;
490 			}
491 			return result;
492 		}
493 		else if (lexer.tryConsume(TokenType.Keyword, "return")) {
494 			ReturnStat result = new ReturnStat();
495 			result.start = lexer.last.get;
496 			result.values = tupleExpr();
497 			result.end = lexer.last.get;
498 			return result;
499 		}
500 		else if (lexer.tryConsume(TokenType.Keyword, "break")) {
501 			AtomicStat result = new AtomicStat();
502 			result.start = lexer.last.get;
503 			result.type = AtomicStatType.Break;
504 			result.end = lexer.last.get;
505 			return result;
506 		}
507 		else {
508 			lexer.save();
509 
510 			Expr base = expr();
511 			if (cast(LvalueExpr)base && (lexer.isNext(TokenType.Symbol, ",") || lexer.isNext(TokenType.Symbol, "="))) {
512 				lexer.discard();
513 				LvalueExpr[] keys;
514 				keys ~= cast(LvalueExpr)base;
515 				while (lexer.tryConsume(TokenType.Symbol, ",")) {
516 					Expr k = expr();
517 					if (auto lvalue = cast(LvalueExpr)k) {
518 						keys ~= lvalue;
519 					}
520 					else {
521 						Diagnostic err;
522 						err.message = "expected lvalue expression";
523 						err.type = DiagnosticType.Error;
524 						err.add(k.start, k.end);
525 						*diagnostics ~= err;
526 					}
527 				}
528 				lexer.consume!(No.toConsume, "to separate keys from values in assignment statement")(TokenType.Symbol, "=");
529 				AssignStat res = new AssignStat;
530 				res.start = base.start;
531 				res.keys = keys;
532 				res.values = tupleExpr();
533 				res.end = lexer.last.get;
534 				return res;
535 			}
536 			else if (auto call = cast(CallExpr)base) {
537 				lexer.discard();
538 				ExprStat res = new ExprStat;
539 				res.start = base.start;
540 				res.end = base.end;
541 				res.expr = base;
542 				return res;
543 			}
544 
545 			lexer.restore();
546 
547 			Diagnostic err;
548 			err.message = "expected statement";
549 			err.type = DiagnosticType.Error;
550 			err.add(lexer.next());
551 			*diagnostics ~= err;
552 
553 			ErrorStat result = new ErrorStat();
554 			result.start = lexer.last.get;
555 			result.end = lexer.last.get;
556 			return result;
557 		}
558 	}
559 
560 	private Expr atom() {
561 		if (lexer.isNext(TokenType.Number)) {
562 			const Token token = lexer.next();
563 			NumberExpr result = new NumberExpr();
564 			result.start = token;
565 			result.value = token.numValue;
566 			result.end = token;
567 			return result;
568 		}
569 		else if (lexer.isNext(TokenType.String)) {
570 			const Token token = lexer.next();
571 			StringExpr result = new StringExpr();
572 			result.start = token;
573 			result.value = token.value;
574 			result.end = token;
575 			return result;
576 		}
577 		else if (lexer.isNext(TokenType.Identifier)) {
578 			const Token token = lexer.next();
579 			VariableExpr result = new VariableExpr();
580 			result.start = token;
581 			result.name = token.value;
582 			result.end = token;
583 			return result;
584 		}
585 		else if (lexer.tryConsume(TokenType.Keyword, "nil")) {
586 			AtomicExpr result = new AtomicExpr();
587 			result.start = lexer.last.get;
588 			result.type = AtomicExprType.Nil;
589 			result.end = lexer.last.get;
590 			return result;
591 		}
592 		else if (lexer.tryConsume(TokenType.Keyword, "false")) {
593 			AtomicExpr result = new AtomicExpr();
594 			result.start = lexer.last.get;
595 			result.type = AtomicExprType.False;
596 			result.end = lexer.last.get;
597 			return result;
598 		}
599 		else if (lexer.tryConsume(TokenType.Keyword, "true")) {
600 			AtomicExpr result = new AtomicExpr();
601 			result.start = lexer.last.get;
602 			result.type = AtomicExprType.True;
603 			result.end = lexer.last.get;
604 			return result;
605 		}
606 		else if (lexer.tryConsume(TokenType.Symbol, "...")) {
607 			AtomicExpr result = new AtomicExpr();
608 			result.start = lexer.last.get;
609 			result.type = AtomicExprType.VariadicTuple;
610 			result.end = lexer.last.get;
611 			return result;
612 		}
613 		else if (lexer.tryConsume(TokenType.Keyword, "function")) {
614 			const start = lexer.last.get;
615 			FunctionExpr result = functionBody();
616 			result.start = start;
617 			return result;
618 		}
619 		else if (lexer.tryConsume(TokenType.Symbol, "{")) {
620 			TableExpr result = new TableExpr();
621 			result.start = lexer.last.get;
622 
623 			while (!lexer.eof && !lexer.isNext(TokenType.Symbol, "}")) {
624 				if (lexer.tryConsume(TokenType.Symbol, "[")) {
625 					TableField field;
626 					field.key = expr();
627 					lexer.consume!(No.toConsume, "to close key component of table field")(TokenType.Symbol, "]");
628 					lexer.consume(TokenType.Symbol, "=");
629 					field.value = expr();
630 					result.fields ~= cast(FieldEntry)field;
631 				}
632 				else if (lexer.isNext(TokenType.Identifier)) {
633 					lexer.save();
634 					const keyToken = lexer.next();
635 					if (lexer.isNext(TokenType.Symbol, "=")) {
636 						lexer.discard();
637 						TableField field;
638 						StringExpr key = new StringExpr();
639 						key.start = keyToken;
640 						key.end = keyToken;
641 						key.value = keyToken.value;
642 						field.key = key;
643 						lexer.consume(TokenType.Symbol, "=");
644 						field.value = expr();
645 						result.fields ~= cast(FieldEntry)field;
646 					}
647 					else {
648 						lexer.restore();
649 						result.fields ~= cast(FieldEntry)expr();
650 					}
651 				}
652 				else {
653 					result.fields ~= cast(FieldEntry)expr();
654 				}
655 
656 				if (!lexer.tryConsume(TokenType.Symbol, ";") && !lexer.tryConsume(TokenType.Symbol, ",")) {
657 					break;
658 				}
659 			}
660 
661 			lexer.consume!(No.toConsume, "to close table constructor")(TokenType.Symbol, "}");
662 
663 			result.end = lexer.last.get;
664 			return result;
665 		}
666 		else {
667 			Diagnostic err;
668 			err.message = "expected expression";
669 			err.type = DiagnosticType.Error;
670 			err.add(lexer.next());
671 			*diagnostics ~= err;
672 			
673 			ErrorExpr result = new ErrorExpr();
674 			result.start = lexer.last.get;
675 			result.end = lexer.last.get;
676 			return result;
677 		}
678 	}
679 
680 	private Nullable!PrefixParselet nextPrefixOp() {
681 		if (lexer.isNext(TokenType.Keyword) || lexer.isNext(TokenType.Symbol)) {
682 			const op = lexer.peek().value;
683 			foreach (opStr; prefixOps.byKey) {
684 				if (opStr == op) return prefixOps[opStr].nullable;
685 			}
686 		}
687 		return Nullable!PrefixParselet();
688 	}
689 
690 	private Nullable!(Tuple!(uint, InfixParselet)) nextInfixOp() {
691 		if (lexer.isNext(TokenType.Keyword) || lexer.isNext(TokenType.Symbol)) {
692 			const op = lexer.peek().value;
693 			foreach (pair; infixOps.byKey) {
694 				if (pair[1] == op) return tuple(pair[0], infixOps[pair]).nullable;
695 			}
696 		}
697 		return Nullable!(Tuple!(uint, InfixParselet))();
698 	}
699 
700 	private Expr expr(uint precedence = 0) {
701 		Expr left;
702 
703 		const prefixOp = nextPrefixOp();
704 		if (prefixOp.isNull) {
705 			left = atom();
706 		}
707 		else {
708 			left = prefixOp.get()(lexer.next());
709 		}
710 
711 		bool changes = true;
712 		outer: while (changes) {
713 			changes = false;
714 
715 			auto infixOp = nextInfixOp();
716 			while (!infixOp.isNull && infixOp.get[0] > precedence) {
717 				if (infixOp.get[0] >= prefixexpTreshold && !cast(PrefixExpr)left) {
718 					break outer;
719 				}
720 
721 				changes = true;
722 				left = infixOp.get[1](left, lexer.next());
723 
724 				infixOp = nextInfixOp();
725 			}
726 
727 			// call precedence = 90
728 			if ((lexer.isNext(TokenType.Symbol, "{") || lexer.isNext(TokenType.String)) && 90 > precedence) {
729 				if (!cast(PrefixExpr)left) break outer;
730 
731 				changes = true;
732 				CallExpr callExpr = new CallExpr();
733 				callExpr.start = left.start;
734 				callExpr.base = left;
735 				callExpr.args ~= atom();
736 				callExpr.end = lexer.last.get;
737 				left = callExpr;
738 			}
739 		}
740 
741 		return left;
742 	}
743 
744 	/** Parse the top-level block */
745 	Block toplevel() {
746 		Block result = new Block();
747 		result.start = lexer.peek();
748 		while (!lexer.eof) {
749 			result.body ~= stat();
750 			lexer.tryConsume(TokenType.Symbol, ";");
751 		}
752 
753 		result.end = lexer.last.get(result.start);
754 		return result;
755 	}
756 
757 }
758 
759 unittest {
760 	Diagnostic[] dg;
761 	const source = q"(
762 		x, y().b = 3, 4, 6
763 	)";
764 	Lexer lexer = new Lexer(source, dg);
765 	Parser parser = new Parser(lexer, dg);
766 
767 	AssignStat a = cast(AssignStat)parser.stat();
768 	assert(a);
769 	assert(a.keys.length == 2);
770 	assert(a.values.length == 3);
771 
772 	IndexExpr b = cast(IndexExpr)a.keys[1];
773 	assert(b);
774 
775 	StringExpr c = cast(StringExpr)b.key;
776 	assert(c);
777 	assert(c.value == "b");
778 
779 	CallExpr d = cast(CallExpr)b.base;
780 	assert(d);
781 	assert(d.args == []);
782 
783 	VariableExpr e = cast(VariableExpr)a.keys[0];
784 	assert(e);
785 	assert(e.name == "x");
786 	
787 	VariableExpr f = cast(VariableExpr)d.base;
788 	assert(f);
789 	assert(f.name == "y");
790 
791 	assert(dg.length == 0);
792 }
793 
794 unittest {
795 	Diagnostic[] dg;
796 	const source = q"(
797 		for x, y in x, y do
798 		end
799 	)";
800 	Lexer lexer = new Lexer(source, dg);
801 	Parser parser = new Parser(lexer, dg);
802 
803 	ForeachStat a = cast(ForeachStat)parser.stat();
804 	assert(a);
805 	assert(a.vars.length == 2);
806 	assert(a.iter.length == 2);
807 
808 	assert(dg.length == 0);
809 }
810 
811 unittest {
812 	Diagnostic[] dg;
813 	const source = q"(
814 		for x, y in x do
815 		end
816 	)";
817 	Lexer lexer = new Lexer(source, dg);
818 	Parser parser = new Parser(lexer, dg);
819 
820 	ForeachStat a = cast(ForeachStat)parser.stat();
821 	assert(a);
822 	assert(a.vars.length == 2);
823 	assert(a.iter.length == 1);
824 
825 	assert(dg.length == 0);
826 }
827 
828 unittest {
829 	Diagnostic[] dg;
830 	const source = q"(
831 		for x in x do
832 		end
833 	)";
834 	Lexer lexer = new Lexer(source, dg);
835 	Parser parser = new Parser(lexer, dg);
836 
837 	ForeachStat a = cast(ForeachStat)parser.stat();
838 	assert(a);
839 	assert(a.vars.length == 1);
840 	assert(a.iter.length == 1);
841 
842 	assert(dg.length == 0);
843 }
844 
845 unittest {
846 	Diagnostic[] dg;
847 	const source = q"(
848 		for x = 1, 2, 3 do
849 		end
850 	)";
851 	Lexer lexer = new Lexer(source, dg);
852 	Parser parser = new Parser(lexer, dg);
853 
854 	NumericForStat a = cast(NumericForStat)parser.stat();
855 	assert(a);
856 	assert(a.var == "x");
857 	assert(!a.step.isNull);
858 
859 	assert(dg.length == 0);
860 }
861 
862 unittest {
863 	Diagnostic[] dg;
864 	const source = q"(
865 		for x = 1, 2 do
866 		end
867 	)";
868 	Lexer lexer = new Lexer(source, dg);
869 	Parser parser = new Parser(lexer, dg);
870 
871 	NumericForStat a = cast(NumericForStat)parser.stat();
872 	assert(a);
873 	assert(a.step.isNull);
874 
875 	assert(dg.length == 0);
876 }
877 
878 unittest {
879 	Diagnostic[] dg;
880 	const source = q"(
881 		if true then
882 		else
883 		end
884 	)";
885 	Lexer lexer = new Lexer(source, dg);
886 	Parser parser = new Parser(lexer, dg);
887 
888 	IfStat a = cast(IfStat)parser.stat();
889 	assert(a);
890 	assert(a.entries.length == 1);
891 	assert(!a.elseBody.isNull);
892 
893 	assert(dg.length == 0);
894 }
895 
896 unittest {
897 	Diagnostic[] dg;
898 	const source = q"(
899 		if true then
900 		end
901 	)";
902 	Lexer lexer = new Lexer(source, dg);
903 	Parser parser = new Parser(lexer, dg);
904 
905 	IfStat a = cast(IfStat)parser.stat();
906 	assert(a);
907 	assert(a.entries.length == 1);
908 	assert(a.elseBody.isNull);
909 
910 	assert(dg.length == 0);
911 }
912 
913 unittest {
914 	Diagnostic[] dg;
915 	const source = q"(
916 		if true then
917 		elseif false then
918 		end
919 	)";
920 	Lexer lexer = new Lexer(source, dg);
921 	Parser parser = new Parser(lexer, dg);
922 
923 	IfStat a = cast(IfStat)parser.stat();
924 	assert(a);
925 	assert(a.entries.length == 2);
926 	assert(a.elseBody.isNull);
927 
928 	assert(dg.length == 0);
929 }
930 
931 unittest {
932 	Diagnostic[] dg;
933 	const source = q"(
934 		if true then
935 		elseif false then
936 		else
937 		end
938 	)";
939 	Lexer lexer = new Lexer(source, dg);
940 	Parser parser = new Parser(lexer, dg);
941 
942 	IfStat a = cast(IfStat)parser.stat();
943 	assert(a);
944 	assert(a.entries.length == 2);
945 	assert(!a.elseBody.isNull);
946 
947 	assert(dg.length == 0);
948 }
949 
950 unittest {
951 	Diagnostic[] dg;
952 	const source = "repeat until true";
953 	Lexer lexer = new Lexer(source, dg);
954 	Parser parser = new Parser(lexer, dg);
955 
956 	RepeatStat a = cast(RepeatStat)parser.stat();
957 	assert(a);
958 	assert(cast(AtomicExpr)a.endCond);
959 
960 	assert(dg.length == 0);
961 }
962 
963 unittest {
964 	Diagnostic[] dg;
965 	const source = "while true do end";
966 	Lexer lexer = new Lexer(source, dg);
967 	Parser parser = new Parser(lexer, dg);
968 
969 	WhileStat a = cast(WhileStat)parser.stat();
970 	assert(a);
971 	assert(cast(AtomicExpr)a.cond);
972 
973 	assert(dg.length == 0);
974 }
975 
976 unittest {
977 	Diagnostic[] dg;
978 	const source = "(a)(b)";
979 	Lexer lexer = new Lexer(source, dg);
980 	Parser parser = new Parser(lexer, dg);
981 	parser.expr();
982 	assert(dg.length == 0);
983 }
984 
985 unittest {
986 	Diagnostic[] dg;
987 	const source = "(a)\n(b)";
988 	Lexer lexer = new Lexer(source, dg);
989 	Parser parser = new Parser(lexer, dg);
990 	parser.expr();
991 	assert(dg.length == 1);
992 	assert(dg[0].quickfix.length == 2);
993 	assert(dg[0].quickfix[0].apply(source) == "(a)(b)");
994 	assert(dg[0].quickfix[1].apply(source) == "(a);\n(b)");
995 }
996 
997 unittest {
998 	Diagnostic[] dg;
999 	Lexer lexer = new Lexer(q"(
1000 		{
1001 			a = 2;
1002 			["x"] = 3,
1003 			a;
1004 		}
1005 
1006 		{ a }
1007 	)", dg);
1008 	Parser parser = new Parser(lexer, dg);
1009 
1010 	TableExpr a = cast(TableExpr)parser.expr();
1011 
1012 	assert(a);
1013 	assert(a.fields.length == 3);
1014 	assert(a.fields[0].peek!TableField !is null);
1015 	assert(a.fields[1].peek!TableField !is null);
1016 	assert(a.fields[2].peek!Expr !is null);
1017 	assert(cast(StringExpr)a.fields[0].get!TableField.key);
1018 	assert(cast(NumberExpr)a.fields[0].get!TableField.value);
1019 	assert(cast(StringExpr)a.fields[1].get!TableField.key);
1020 	assert(cast(NumberExpr)a.fields[1].get!TableField.value);
1021 	assert(cast(VariableExpr)a.fields[2].get!Expr);
1022 
1023 	TableExpr b = cast(TableExpr)parser.expr();
1024 	assert(b);
1025 	assert(b.fields.length == 1);
1026 	assert(b.fields[0].peek!Expr !is null);
1027 	assert(cast(VariableExpr)b.fields[0].get!Expr);
1028 
1029 	assert(dg.length == 0);
1030 }
1031 
1032 unittest {
1033 	Diagnostic[] dg;
1034 	Lexer lexer = new Lexer(q"(
1035 		a:b(c)
1036 
1037 		a(b)
1038 
1039 		a.b.c(d)
1040 
1041 		a["b"]["c"](d)
1042 
1043 		a "b"
1044 
1045 		a { b }
1046 	)", dg);
1047 	Parser parser = new Parser(lexer, dg);
1048 
1049 	CallExpr a = cast(CallExpr)parser.expr();
1050 	assert(a);
1051 	assert(!a.method.isNull);
1052 	assert(a.method == "b");
1053 	assert(a.args.length == 1);
1054 
1055 	CallExpr b = cast(CallExpr)parser.expr();
1056 	assert(b);
1057 	assert(b.method.isNull);
1058 	assert(b.args.length == 1);
1059 
1060 	CallExpr c = cast(CallExpr)parser.expr();
1061 	assert(c);
1062 	assert(c.method.isNull);
1063 	assert(c.args.length == 1);
1064 	assert(cast(IndexExpr)c.base);
1065 
1066 	CallExpr d = cast(CallExpr)parser.expr();
1067 	assert(d);
1068 	assert(d.method.isNull);
1069 	assert(d.args.length == 1);
1070 	assert(cast(IndexExpr)d.base);
1071 
1072 	CallExpr e = cast(CallExpr)parser.expr();
1073 	assert(e);
1074 	assert(e.method.isNull);
1075 	assert(e.args.length == 1);
1076 	assert(cast(VariableExpr)e.base);
1077 	assert(cast(StringExpr)e.args[0]);
1078 
1079 	CallExpr f = cast(CallExpr)parser.expr();
1080 	assert(f);
1081 	assert(f.method.isNull);
1082 	assert(f.args.length == 1);
1083 	assert(cast(VariableExpr)f.base);
1084 	assert(cast(TableExpr)f.args[0]);
1085 
1086 	assert(dg.length == 0);
1087 }
1088 
1089 unittest {
1090 	Diagnostic[] dg;
1091 	Lexer lexer = new Lexer(q"(
1092 		2 + 3 * 4
1093 
1094 		2 + 3 + 4
1095 
1096 		2 * 3 + 4
1097 
1098 		2 ^ 3 ^ 4
1099 
1100 		(2 + 3) * 4
1101 	)", dg);
1102 	Parser parser = new Parser(lexer, dg);
1103 
1104 	BinaryExpr a = cast(BinaryExpr)parser.expr();
1105 	assert(a);
1106 	assert(a.op == BinaryOperation.Add);
1107 	assert(cast(BinaryExpr)a.rhs);
1108 
1109 	BinaryExpr b = cast(BinaryExpr)parser.expr();
1110 	assert(b);
1111 	assert(cast(BinaryExpr)b.lhs); // test left-associativity
1112 
1113 	BinaryExpr c = cast(BinaryExpr)parser.expr();
1114 	assert(c);
1115 	assert(c.op == BinaryOperation.Add);
1116 	assert(cast(BinaryExpr)c.lhs);
1117 
1118 	BinaryExpr d = cast(BinaryExpr)parser.expr();
1119 	assert(d);
1120 	assert(d.op == BinaryOperation.Exp);
1121 	assert(cast(BinaryExpr)d.rhs); // test right-associativity
1122 
1123 	BinaryExpr e = cast(BinaryExpr)parser.expr();
1124 	assert(e);
1125 	assert(e.op == BinaryOperation.Mul);
1126 	assert(cast(BracketExpr)e.lhs);
1127 	assert(cast(BinaryExpr)(cast(BracketExpr)e.lhs).expr);
1128 
1129 	assert(dg.length == 0);
1130 }