-
Notifications
You must be signed in to change notification settings - Fork 3.1k
/
Parsers.scala
3584 lines (3262 loc) · 133 KB
/
Parsers.scala
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
/*
* Scala (https://www.scala-lang.org)
*
* Copyright EPFL and Lightbend, Inc.
*
* Licensed under Apache License 2.0
* (http://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
//todo: allow infix type patterns
//todo verify when stableId's should be just plain qualified type ids
package scala.tools.nsc
package ast.parser
import scala.annotation.tailrec
import scala.collection.mutable, mutable.ListBuffer
import scala.reflect.internal.{ModifierFlags => Flags, Precedence}
import scala.reflect.internal.util.{FreshNameCreator, ListOfNil, Position, SourceFile}
import Tokens._
import scala.tools.nsc.Reporting.WarningCategory
/** Historical note: JavaParsers started life as a direct copy of Parsers
* but at a time when that Parsers had been replaced by a different one.
* Later it was dropped and the original Parsers reinstated, leaving us with
* massive duplication between Parsers and JavaParsers.
*
* This trait and the similar one for Scanners/JavaScanners represents
* the beginnings of a campaign against this latest incursion by Cutty
* McPastington and his army of very similar soldiers.
*/
trait ParsersCommon extends ScannersCommon { self =>
val global : Global
// the use of currentUnit in the parser should be avoided as it might
// cause unexpected behaviour when you work with two units at the
// same time; use Parser.unit instead
import global.{currentUnit => _, _}
def newLiteral(const: Any) = Literal(Constant(const))
def literalUnit = gen.mkSyntheticUnit()
/** This is now an abstract class, only to work around the optimizer:
* methods in traits are never inlined.
*/
abstract class ParserCommon {
val in: ScannerCommon
def deprecationWarning(off: Offset, msg: String, since: String): Unit
def accept(token: Token): Int
/** Methods inParensOrError and similar take a second argument which, should
* the next token not be the expected opener (e.g. LPAREN) will be returned
* instead of the contents of the groupers. However in all cases accept(LPAREN)
* will be called, so a parse error will still result. If the grouping is
* optional, in.token should be tested before calling these methods.
*
* Skip trailing comma is pushed down to scanner because this abstract parser
* doesn't have token info.
*/
@inline final def inGroupers[T](left: Token)(body: => T): T = {
accept(left)
try body
finally {
in.skipTrailingComma(left + 1)
accept(left + 1)
}
}
@inline final def inParens[T](body: => T): T = inGroupers(LPAREN)(body)
@inline final def inParensOrError[T](body: => T, alt: T): T = if (in.token == LPAREN) inParens(body) else { accept(LPAREN) ; alt }
@inline final def inParensOrUnit[T](body: => Tree): Tree = inParensOrError(body, literalUnit)
@inline final def inParensOrNil[T](body: => List[T]): List[T] = inParensOrError(body, Nil)
@inline final def inBraces[T](body: => T): T = inGroupers(LBRACE)(body)
@inline final def inBracesOrError[T](body: => T, alt: T): T = if (in.token == LBRACE) inBraces(body) else { accept(LBRACE) ; alt }
@inline final def inBracesOrNil[T](body: => List[T]): List[T] = inBracesOrError(body, Nil)
@inline final def inBracesOrUnit[T](body: => Tree): Tree = inBracesOrError(body, literalUnit)
@inline final def dropAnyBraces[T](body: => T): T = if (in.token == LBRACE) inBraces(body) else body
@inline final def inBrackets[T](body: => T): T = inGroupers(LBRACKET)(body)
/** Creates an actual Parens node (only used during parsing.)
*/
@inline final def makeParens(body: => List[Tree]): Parens =
Parens(inParens(if (in.token == RPAREN) Nil else body))
/** {{{ { `sep` part } }}}. */
def tokenSeparated[T](separator: Token, part: => T): List[T] = {
val ts = new ListBuffer[T]
ts += part
while (in.token == separator) {
in.nextToken()
ts += part
}
ts.toList
}
/** {{{ { `sep` part } }}}. */
def separatedToken[T](separator: Token, part: => T): List[T] = {
val ts = new ListBuffer[T]
while (in.token == separator) {
in.nextToken()
ts += part
}
ts.toList
}
/** {{{ tokenSeparated }}}, with the separator fixed to commas. */
@inline final def commaSeparated[T](part: => T): List[T] = tokenSeparated(COMMA, part)
}
}
/** Performs the following context-free rewritings:
*
* <ol>
* <li>
* Places all pattern variables in Bind nodes. In a pattern, for
* identifiers `x`:<pre>
* x => x @ _
* x:T => x @ (_ : T)</pre>
* </li>
* <li>Removes pattern definitions (PatDef's) as follows:
* If pattern is a simple (typed) identifier:<pre>
* <b>val</b> x = e ==> <b>val</b> x = e
* <b>val</b> x: T = e ==> <b>val</b> x: T = e</pre>
*
* if there are no variables in pattern<pre>
* <b>val</b> p = e ==> e match (case p => ())</pre>
*
* if there is exactly one variable in pattern<pre>
* <b>val</b> x_1 = e <b>match</b> (case p => (x_1))</pre>
*
* if there is more than one variable in pattern<pre>
* <b>val</b> p = e ==> <b>private synthetic val</b> t$ = e <b>match</b> (case p => (x_1, ..., x_N))
* <b>val</b> x_1 = t$._1
* ...
* <b>val</b> x_N = t$._N</pre>
* </li>
* <li>
* Removes function types as follows:<pre>
* (argtpes) => restpe ==> scala.Function_n[argtpes, restpe]</pre>
* </li>
* <li>
* Wraps naked case definitions in a match as follows:<pre>
* { cases } ==> (x => x.match {cases})<span style="font-family:normal;">, except when already argument to match</span></pre>
* </li>
* </ol>
*/
trait Parsers extends Scanners with MarkupParsers with ParsersCommon {
self =>
val global: Global
import global._
case class OpInfo(lhs: Tree, operator: TermName, targs: List[Tree], offset: Offset) {
def precedence = Precedence(operator.toString)
}
class SourceFileParser(val source: SourceFile) extends Parser {
/** The parse starting point depends on whether the source file is self-contained:
* if not, the AST will be supplemented.
*/
def parseStartRule =
if (source.isSelfContained) () => compilationUnit()
else () => scriptBody()
def newScanner(): Scanner = new SourceFileScanner(source)
val in = newScanner()
in.init()
def unit = global.currentUnit
// suppress warnings; silent abort on errors
def warning(offset: Offset, msg: String, category: WarningCategory): Unit = ()
def deprecationWarning(offset: Offset, msg: String, since: String): Unit = ()
def syntaxError(offset: Offset, msg: String): Unit = throw new MalformedInput(offset, msg)
def incompleteInputError(msg: String): Unit = throw new MalformedInput(source.content.length - 1, msg)
object symbXMLBuilder extends SymbolicXMLBuilder(this, preserveWS = true) { // DEBUG choices
val global: self.global.type = self.global
}
/** the markup parser
* The first time this lazy val is accessed, we assume we were trying to parse an xml literal.
* The current position is recorded for later error reporting if it turns out
* that we don't have the xml library on the compilation classpath.
*/
private[this] lazy val xmlp = {
unit.encounteredXml(o2p(in.offset))
new MarkupParser(this, preserveWS = true)
}
def xmlLiteral() : Tree = xmlp.xLiteral
def xmlLiteralPattern() : Tree = xmlp.xLiteralPattern
}
class OutlineParser(source: SourceFile) extends SourceFileParser(source) {
def skipBraces[T](body: T): T = {
accept(LBRACE)
var openBraces = 1
while (in.token != EOF && openBraces > 0) {
if (in.token == XMLSTART) xmlLiteral()
else {
if (in.token == LBRACE) openBraces += 1
else if (in.token == RBRACE) openBraces -= 1
in.nextToken()
}
}
body
}
override def blockExpr(): Tree = skipBraces(EmptyTree)
override def templateBody(isPre: Boolean) = skipBraces((noSelfType, EmptyTree.asList))
}
class UnitParser(override val unit: global.CompilationUnit, patches: List[BracePatch]) extends SourceFileParser(unit.source) { uself =>
def this(unit: global.CompilationUnit) = this(unit, Nil)
override def newScanner() = new UnitScanner(unit, patches)
override def warning(offset: Offset, msg: String, category: WarningCategory): Unit =
runReporting.warning(o2p(offset), msg, category, site = "")
override def deprecationWarning(offset: Offset, msg: String, since: String): Unit =
// we cannot provide a `site` in the parser, there's no context telling us where we are
runReporting.deprecationWarning(o2p(offset), msg, since, site = "", origin = "")
private var smartParsing = false
@inline private def withSmartParsing[T](body: => T): T = {
val saved = smartParsing
smartParsing = true
try body
finally smartParsing = saved
}
def withPatches(patches: List[BracePatch]): UnitParser = new UnitParser(unit, patches)
val syntaxErrors = new ListBuffer[(Int, String)]
def showSyntaxErrors() =
for ((offset, msg) <- syntaxErrors)
reporter.error(o2p(offset), msg)
override def syntaxError(offset: Offset, msg: String): Unit = {
if (smartParsing) syntaxErrors += ((offset, msg))
else reporter.error(o2p(offset), msg)
}
override def incompleteInputError(msg: String): Unit = {
val offset = source.content.length - 1
if (smartParsing) syntaxErrors += ((offset, msg))
else currentRun.parsing.incompleteInputError(o2p(offset), msg)
}
/** parse unit. If there are unbalanced braces,
* try to correct them and reparse.
*/
def smartParse(): Tree = withSmartParsing {
val firstTry = parse()
if (syntaxErrors.isEmpty) firstTry
else in.healBraces() match {
case Nil => showSyntaxErrors() ; firstTry
case patches => withPatches(patches).parse()
}
}
}
type Location = Int
final val Local: Location = 0
final val InBlock: Location = 1
final val InTemplate: Location = 2
type ParamOwner = Int
object ParamOwner {
final val Class = 0
final val Type = 1
final val TypeParam = 2 // unused
final val Def = 3
}
// These symbols may not yet be loaded (e.g. in the ide) so don't go
// through definitions to obtain the names.
lazy val ScalaValueClassNames = Seq(tpnme.AnyVal,
tpnme.Unit,
tpnme.Boolean,
tpnme.Byte,
tpnme.Short,
tpnme.Char,
tpnme.Int,
tpnme.Long,
tpnme.Float,
tpnme.Double)
import nme.raw
abstract class Parser extends ParserCommon { parser =>
val in: Scanner
def unit: CompilationUnit
def source: SourceFile
/** Scoping operator used to temporarily look into the future.
* Backs up scanner data before evaluating a block and restores it after.
*/
@inline final def lookingAhead[T](body: => T): T = {
val saved = new ScannerData {} copyFrom in
val seps = in.sepRegions
in.nextToken()
try body finally {
in.sepRegions = seps
in.copyFrom(saved)
}
}
class ParserTreeBuilder extends TreeBuilder {
val global: self.global.type = self.global
def unit = parser.unit
def source = parser.source
}
val treeBuilder = new ParserTreeBuilder
import treeBuilder.{global => _, unit => _, source => _, fresh => _, _}
implicit def fresh: FreshNameCreator = unit.fresh
def o2p(offset: Offset): Position = Position.offset(source, offset)
def r2p(start: Offset, mid: Offset, end: Offset): Position = rangePos(source, start, mid, end)
def r2p(start: Offset, mid: Offset): Position = r2p(start, mid, in.lastOffset max start)
def r2p(offset: Offset): Position = r2p(offset, offset)
/** whether a non-continuable syntax error has been seen */
private var lastErrorOffset : Int = -1
/** The types of the context bounds of type parameters of the surrounding class
*/
private var classContextBounds: List[Tree] = Nil
@inline private def savingClassContextBounds[T](op: => T): T = {
val saved = classContextBounds
try op
finally classContextBounds = saved
}
/** Are we inside the Scala package? Set for files that start with package scala
*/
private var inScalaPackage = false
private var currentPackage = ""
def resetPackage(): Unit = {
inScalaPackage = false
currentPackage = ""
}
private def inScalaRootPackage = inScalaPackage && currentPackage == "scala"
def parseStartRule: () => Tree
def parseRule[T](rule: this.type => T): T = {
val t = rule(this)
accept(EOF)
t
}
/** This is the general parse entry point.
*/
def parse(): Tree = parseRule(_.parseStartRule())
/** These are alternative entry points for repl, script runner, toolbox and parsing in macros.
*/
def parseStats(): List[Tree] = parseRule(_.templateStats())
def parseStatsOrPackages(): List[Tree] = parseRule(_.templateOrTopStatSeq())
/** This is the parse entry point for code which is not self-contained, e.g.
* a script which is a series of template statements. They will be
* swaddled in Trees until the AST is equivalent to the one returned
* by compilationUnit().
*/
def scriptBody(): Tree = {
// remain backwards-compatible if -Xscript was set but not reasonably
settings.script.value match {
case null | "" => settings.script.value = "Main"
case _ =>
}
val stmts = parseStats()
/* If there is only a single object template in the file and it has a
* suitable main method, we will use it rather than building another object
* around it. Since objects are loaded lazily the whole script would have
* been a no-op, so we're not taking much liberty.
*/
def searchForMain(mainModuleName: Name): Tree = {
import PartialFunction.cond
/* Have to be fairly liberal about what constitutes a main method since
* nothing has been typed yet - for instance we can't assume the parameter
* type will look exactly like "Array[String]" as it could have been renamed
* via import, etc.
*/
def isMainMethod(t: Tree) = t match {
case DefDef(_, nme.main, Nil, List(_), _, _) => true
case _ => false
}
def isApp(t: Tree) = t match {
case Template(parents, _, _) => parents.exists(cond(_) { case Ident(tpnme.App) => true })
case _ => false
}
// We allow only one main module.
var seenModule = false
var disallowed = EmptyTree: Tree
val newStmts = stmts.map {
case md @ ModuleDef(mods, name, template) if !seenModule && (isApp(template) || md.exists(isMainMethod)) =>
seenModule = true
// If we detect a main module with an arbitrary name, rename it to the expected name.
if (name == mainModuleName) md
else treeCopy.ModuleDef(md, mods, mainModuleName, template)
case md @ ModuleDef(_, _, _) => md
case cd @ ClassDef(_, _, _, _) => cd
case t @ Import(_, _) => t
case t =>
// If we see anything but the above, fail.
if (disallowed.isEmpty) disallowed = t
EmptyTree
}
if (seenModule && disallowed.isEmpty) makeEmptyPackage(0, newStmts)
else {
if (seenModule)
warning(disallowed.pos.point, "Script has a main object but statement is disallowed", WarningCategory.Other)
EmptyTree
}
}
// pick up object specified by `-Xscript Main`
def mainModule: Tree = settings.script.valueSetByUser.map(name => searchForMain(TermName(name))).getOrElse(EmptyTree)
/* Here we are building an AST representing the following source fiction,
* where `moduleName` is from -Xscript (defaults to "Main") and <stmts> are
* the result of parsing the script file.
*
* {{{
* object moduleName {
* def main(args: Array[String]): Unit =
* new AnyRef {
* stmts
* }
* }
* }}}
*/
def repackaged: Tree = {
val emptyInit = DefDef(
NoMods,
nme.CONSTRUCTOR,
Nil,
ListOfNil,
TypeTree(),
Block(List(Apply(gen.mkSuperInitCall, Nil)), literalUnit)
)
// def main
val mainParamType = AppliedTypeTree(Ident(tpnme.Array), List(Ident(tpnme.String)))
val mainParameter = List(ValDef(Modifiers(Flags.PARAM), nme.args, mainParamType, EmptyTree))
val mainDef = DefDef(NoMods, nme.main, Nil, List(mainParameter), scalaDot(tpnme.Unit), gen.mkAnonymousNew(stmts))
// object Main
val moduleName = TermName(settings.script.value)
val moduleBody = Template(atInPos(scalaAnyRefConstr) :: Nil, noSelfType, List(emptyInit, mainDef))
val moduleDef = ModuleDef(NoMods, moduleName, moduleBody)
// package <empty> { ... }
makeEmptyPackage(0, moduleDef :: Nil)
}
// either there is an entry point (a main method either detected or specified) or wrap it up
mainModule orElse repackaged
}
/* --------------- PLACEHOLDERS ------------------------------------------- */
/** The implicit parameters introduced by `_` in the current expression.
* Parameters appear in reverse order.
*/
var placeholderParams: List[ValDef] = Nil
/** The placeholderTypes introduced by `_` in the current type.
* Parameters appear in reverse order.
*/
var placeholderTypes: List[TypeDef] = Nil
def checkNoEscapingPlaceholders[T](op: => T): T = {
val savedPlaceholderParams = placeholderParams
val savedPlaceholderTypes = placeholderTypes
placeholderParams = List()
placeholderTypes = List()
val res = op
placeholderParams match {
case vd :: _ =>
syntaxError(vd.pos, "unbound placeholder parameter", skipIt = false)
placeholderParams = List()
case _ =>
}
placeholderTypes match {
case td :: _ =>
syntaxError(td.pos, "unbound wildcard type", skipIt = false)
placeholderTypes = List()
case _ =>
}
placeholderParams = savedPlaceholderParams
placeholderTypes = savedPlaceholderTypes
res
}
def placeholderTypeBoundary(op: => Tree): Tree = {
val savedPlaceholderTypes = placeholderTypes
placeholderTypes = List()
var t = op
if (!placeholderTypes.isEmpty && t.isInstanceOf[AppliedTypeTree]) {
val expos = t.pos
ensureNonOverlapping(t, placeholderTypes)
t = atPos(expos) { ExistentialTypeTree(t, placeholderTypes.reverse) }
placeholderTypes = List()
}
placeholderTypes = placeholderTypes ::: savedPlaceholderTypes
t
}
@tailrec
final def isWildcard(t: Tree): Boolean = t match {
case Ident(name1) => !placeholderParams.isEmpty && name1 == placeholderParams.head.name
case Typed(t1, _) => isWildcard(t1)
case Annotated(t1, _) => isWildcard(t1)
case _ => false
}
/* ------------- ERROR HANDLING ------------------------------------------- */
val assumedClosingParens = mutable.Map(RPAREN -> 0, RBRACKET -> 0, RBRACE -> 0)
private var inFunReturnType = false
@inline private def fromWithinReturnType[T](body: => T): T = {
val saved = inFunReturnType
inFunReturnType = true
try body
finally inFunReturnType = saved
}
protected def skip(targetToken: Token): Unit = {
var nparens = 0
var nbraces = 0
while (true) {
in.token match {
case EOF =>
return
case SEMI =>
if (nparens == 0 && nbraces == 0) return
case NEWLINE =>
if (nparens == 0 && nbraces == 0) return
case NEWLINES =>
if (nparens == 0 && nbraces == 0) return
case RPAREN =>
nparens -= 1
case RBRACE =>
if (nbraces == 0) return
nbraces -= 1
case LPAREN =>
nparens += 1
case LBRACE =>
nbraces += 1
case _ =>
}
if (targetToken == in.token && nparens == 0 && nbraces == 0) return
in.nextToken()
}
}
def warning(offset: Offset, msg: String, category: WarningCategory): Unit
def incompleteInputError(msg: String): Unit
def syntaxError(offset: Offset, msg: String): Unit
private def syntaxError(pos: Position, msg: String, skipIt: Boolean): Unit =
syntaxError(pos pointOrElse in.offset, msg, skipIt)
def syntaxError(msg: String, skipIt: Boolean): Unit =
syntaxError(in.offset, msg, skipIt)
def syntaxError(offset: Offset, msg: String, skipIt: Boolean): Unit = {
if (offset > lastErrorOffset) {
syntaxError(offset, msg)
lastErrorOffset = in.offset // no more errors on this token.
}
if (skipIt)
skip(UNDEF)
}
def warning(msg: String, category: WarningCategory): Unit = warning(in.offset, msg, category)
def syntaxErrorOrIncomplete(msg: String, skipIt: Boolean): Unit = {
if (in.token == EOF)
incompleteInputError(msg)
else
syntaxError(in.offset, msg, skipIt)
}
def syntaxErrorOrIncompleteAnd[T](msg: String, skipIt: Boolean)(and: T): T = {
syntaxErrorOrIncomplete(msg, skipIt)
and
}
def expectedMsgTemplate(exp: String, fnd: String) = s"$exp expected but $fnd found."
def expectedMsg(token: Token): String =
in.token match {
case NEWLINE | NEWLINES => s"${token2string(token)} expected."
case actual => expectedMsgTemplate(token2string(token), token2string(actual))
}
/** Consume one token of the specified type, or signal an error if it is not there. */
def accept(token: Token): Offset = {
val offset = in.offset
if (in.token != token) {
syntaxErrorOrIncomplete(expectedMsg(token), skipIt = false)
if ((token == RPAREN || token == RBRACE || token == RBRACKET))
if (in.parenBalance(token) + assumedClosingParens(token) < 0)
assumedClosingParens(token) += 1
else
skip(token)
else
skip(UNDEF)
}
if (in.token == token) in.nextToken()
offset
}
/** {{{
* semi = nl {nl} | `;`
* nl = `\n` // where allowed
* }}}
*/
def acceptStatSep(): Unit = in.token match {
case NEWLINE | NEWLINES => in.nextToken()
case _ => accept(SEMI)
}
def acceptStatSepOpt() =
if (!isStatSeqEnd)
acceptStatSep()
def errorTypeTree = setInPos(TypeTree() setType ErrorType)
def errorTermTree = setInPos(newLiteral(null))
def errorPatternTree = setInPos(Ident(nme.WILDCARD))
/** Check that type parameter is not by name or repeated. */
def checkNotByNameOrVarargs(tpt: Tree) = {
if (treeInfo isByNameParamType tpt)
syntaxError(tpt.pos, "no by-name parameter type allowed here", skipIt = false)
else if (treeInfo isRepeatedParamType tpt)
syntaxError(tpt.pos, "no * parameter type allowed here", skipIt = false)
}
/* -------------- TOKEN CLASSES ------------------------------------------- */
def isModifier: Boolean = in.token match {
case ABSTRACT | FINAL | SEALED | PRIVATE |
PROTECTED | OVERRIDE | IMPLICIT | LAZY => true
case _ => false
}
def isSoftModifier: Boolean =
currentRun.isScala3.value && in.token == IDENTIFIER && softModifierNames.contains(in.name)
/** Is the current token a soft modifier in a position where such a modifier is allowed? */
def isValidSoftModifier: Boolean =
isSoftModifier && {
val mod = in.name
lookingAhead {
while (in.token == NEWLINE || isModifier || isSoftModifier) in.nextToken()
in.token match {
case CLASS | CASECLASS => true
case DEF | TRAIT | TYPE => mod == nme.infix
case _ => false
}
}
}
def isAnnotation: Boolean = in.token == AT
def isLocalModifier: Boolean = in.token match {
case ABSTRACT | FINAL | SEALED | IMPLICIT | LAZY => true
case _ => false
}
def isTemplateIntro: Boolean = in.token match {
case OBJECT | CASEOBJECT | CLASS | CASECLASS | TRAIT => true
case _ => false
}
def isDclIntro: Boolean = in.token match {
case VAL | VAR | DEF | TYPE => true
case _ => false
}
def isDefIntro = isTemplateIntro || isDclIntro
def isNumericLit: Boolean = in.token match {
case INTLIT | LONGLIT | FLOATLIT | DOUBLELIT => true
case _ => false
}
def isIdentExcept(except: Name) = isIdent && in.name != except
def isIdentOf(name: Name) = isIdent && in.name == name
def isUnaryOp = isRawIdent && raw.isUnary(in.name)
def isRawStar = isRawIdent && in.name == raw.STAR
def isRawBar = isRawIdent && in.name == raw.BAR
def isRawIdent = in.token == IDENTIFIER
def isWildcardType = in.token == USCORE || isScala3WildcardType
def isScala3WildcardType = isRawIdent && in.name == raw.QMARK
def checkQMarkDefinition() =
if (isScala3WildcardType)
syntaxError(in.offset, "using `?` as a type name requires backticks.")
def checkKeywordDefinition() =
if (isRawIdent && scala3Keywords.contains(in.name))
deprecationWarning(in.offset,
s"Wrap `${in.name}` in backticks to use it as an identifier, it will become a keyword in Scala 3.", "2.13.7")
def isIdent = in.token == IDENTIFIER || in.token == BACKQUOTED_IDENT
def isMacro = in.token == IDENTIFIER && in.name == nme.MACROkw
def isLiteralToken(token: Token) = token match {
case CHARLIT | INTLIT | LONGLIT | FLOATLIT | DOUBLELIT |
STRINGLIT | INTERPOLATIONID | SYMBOLLIT | TRUE | FALSE | NULL => true
case _ => false
}
def isLiteral = isLiteralToken(in.token)
def isExprIntroToken(token: Token): Boolean =
!isValidSoftModifier && (isLiteralToken(token) || (token match {
case IDENTIFIER | BACKQUOTED_IDENT |
THIS | SUPER | IF | FOR | NEW | USCORE | TRY | WHILE |
DO | RETURN | THROW | LPAREN | LBRACE | XMLSTART => true
case _ => false
}))
def isExprIntro: Boolean = isExprIntroToken(in.token)
def isTypeIntroToken(token: Token): Boolean = (isLiteralToken(token) && token != NULL) || (token match {
case IDENTIFIER | BACKQUOTED_IDENT | THIS |
SUPER | USCORE | LPAREN | AT => true
case _ => false
})
def isStatSeqEnd = in.token == RBRACE || in.token == EOF
def isCaseDefEnd = in.token == RBRACE || in.token == CASE || in.token == EOF
def isStatSep(token: Token): Boolean =
token == NEWLINE || token == NEWLINES || token == SEMI
def isStatSep: Boolean = isStatSep(in.token)
/* --------- COMMENT AND ATTRIBUTE COLLECTION ----------------------------- */
/** A hook for joining the comment associated with a definition.
* Overridden by scaladoc.
*/
def joinComment(trees: => List[Tree]): List[Tree] = trees
/* ---------- TREE CONSTRUCTION ------------------------------------------- */
def atPos[T <: Tree](offset: Offset)(t: T): T = atPos(r2p(offset))(t)
def atPos[T <: Tree](start: Offset, point: Offset)(t: T): T = atPos(r2p(start, point))(t)
def atPos[T <: Tree](start: Offset, point: Offset, end: Offset)(t: T): T = atPos(r2p(start, point, end))(t)
def atPos[T <: Tree](pos: Position)(t: T): T = global.atPos(pos)(t)
def atInPos[T <: Tree](t: T): T = atPos(o2p(in.offset))(t)
def setInPos[T <: Tree](t: T): T = t setPos o2p(in.offset)
/** Convert tree to formal parameter list. */
def convertToParams(tree: Tree): List[ValDef] = tree match {
case Parens(ts) => ts map convertToParam
case _ => List(convertToParam(tree))
}
/** Convert tree to formal parameter. */
def convertToParam(tree: Tree): ValDef = atPos(tree.pos) {
def removeAsPlaceholder(name: Name): Unit = {
placeholderParams = placeholderParams filter (_.name != name)
}
def errorParam = makeParam(nme.ERROR, errorTypeTree setPos o2p(tree.pos.end))
def propagateNoWarnAttachment(from: Tree, to: ValDef): to.type =
if (from.hasAttachment[NoWarnAttachment.type]) to.updateAttachment(NoWarnAttachment)
else to
tree match {
case id @ Ident(name) =>
removeAsPlaceholder(name)
propagateNoWarnAttachment(id, makeParam(name.toTermName, TypeTree() setPos o2p(tree.pos.end)))
case Typed(id @ Ident(name), tpe) if tpe.isType => // get the ident!
removeAsPlaceholder(name)
propagateNoWarnAttachment(id, makeParam(name.toTermName, tpe))
case build.SyntacticTuple(as) =>
val arity = as.length
val example = analyzer.exampleTuplePattern(as map { case Ident(name) => name; case _ => nme.EMPTY })
val msg =
sm"""|not a legal formal parameter.
|Note: Tuples cannot be directly destructured in method or function parameters.
| Either create a single parameter accepting the Tuple${arity},
| or consider a pattern matching anonymous function: `{ case $example => ... }"""
syntaxError(tree.pos, msg, skipIt = false)
errorParam
case _ =>
syntaxError(tree.pos, "not a legal formal parameter", skipIt = false)
errorParam
}
}
/** Convert (qual)ident to type identifier. */
def convertToTypeId(tree: Tree): Tree = atPos(tree.pos) {
convertToTypeName(tree) getOrElse {
syntaxError(tree.pos, "identifier expected", skipIt = false)
errorTypeTree
}
}
/** {{{ part { `sep` part } }}}. */
override final def tokenSeparated[T](separator: Token, part: => T): List[T] = {
val ts = ListBuffer.empty[T].addOne(part)
var done = in.token != separator
while (!done) {
val skippable = separator == COMMA && in.sepRegions.nonEmpty && in.isTrailingComma(in.sepRegions.head)
if (!skippable) {
in.nextToken()
ts += part
}
done = (in.token != separator) || skippable
}
ts.toList
}
/** {{{ { `sep` part } }}}. */
override final def separatedToken[T](separator: Token, part: => T): List[T] = {
require(separator != COMMA, "separator cannot be a comma")
val ts = ListBuffer.empty[T]
while (in.token == separator) {
in.nextToken()
ts += part
}
ts.toList
}
@inline final def caseSeparated[T](part: => T): List[T] = separatedToken(CASE, part)
def readAnnots(part: => Tree): List[Tree] = separatedToken(AT, part)
/** Create a tuple type Tree. If the arity is not supported, a syntax error is emitted. */
def makeSafeTupleType(elems: List[Tree]) = {
if (checkTupleSize(elems)) makeTupleType(elems)
else makeTupleType(Nil) // create a dummy node; makeTupleType(elems) would fail
}
/** Create a tuple term Tree. If the arity is not supported, a syntax error is emitted. */
def makeSafeTupleTerm(elems: List[Tree]) = {
checkTupleSize(elems)
makeTupleTerm(elems)
}
/** Create a function Tree. If the arity is not supported, a syntax error is emitted. */
def makeSafeFunctionType(argtpes: List[Tree], restpe: Tree) = {
if (checkFunctionArity(argtpes)) makeFunctionTypeTree(argtpes, restpe)
else makeFunctionTypeTree(Nil, restpe) // create a dummy node
}
private[this] def checkTupleSize(elems: List[Tree]): Boolean =
elems.lengthCompare(definitions.MaxTupleArity) <= 0 || {
val firstInvalidElem = elems(definitions.MaxTupleArity)
val msg = s"tuples may not have more than ${definitions.MaxFunctionArity} elements, but ${elems.length} given"
syntaxError(firstInvalidElem.pos, msg, skipIt = false)
false
}
private[this] def checkFunctionArity(argtpes: List[Tree]): Boolean =
argtpes.lengthCompare(definitions.MaxFunctionArity) <= 0 || {
val firstInvalidArg = argtpes(definitions.MaxFunctionArity)
val msg = s"function values may not have more than ${definitions.MaxFunctionArity} parameters, but ${argtpes.length} given"
syntaxError(firstInvalidArg.pos, msg, skipIt = false)
false
}
/** Strip the artificial `Parens` node to create a tuple term Tree. */
def stripParens(t: Tree) = t match {
case Parens(ts) => atPos(t.pos) { makeSafeTupleTerm(ts) }
case _ => t
}
/** Create tree representing (unencoded) binary operation expression or pattern. */
def makeBinop(isExpr: Boolean, left: Tree, op: TermName, right: Tree, opPos: Position, targs: List[Tree] = Nil): Tree = {
require(isExpr || targs.isEmpty || targs.exists(_.isErroneous),
s"Incompatible args to makeBinop: !isExpr but targs=$targs")
val rightAssoc = !nme.isLeftAssoc(op)
def mkSelection(t: Tree) = {
val pos = (opPos union t.pos) makeTransparentIf rightAssoc
val sel = atPos(pos)(Select(stripParens(t), op.encode))
if (targs.isEmpty) sel
else {
/* if it's right-associative, `targs` are between `op` and `t` so make the pos transparent */
atPos((pos union targs.last.pos) makeTransparentIf rightAssoc) {
TypeApply(sel, targs)
}
}
}
def mkNamed(args: List[Tree]) = if (isExpr) args.map(treeInfo.assignmentToMaybeNamedArg) else args
var isMultiarg = false
val arguments = right match {
case Parens(Nil) => literalUnit :: Nil
case Parens(args @ (_ :: Nil)) => mkNamed(args)
case Parens(args) => isMultiarg = true ; mkNamed(args)
case _ => right :: Nil
}
def mkApply(fun: Tree, args: List[Tree]) = {
val apply = Apply(fun, args).updateAttachment(InfixAttachment)
if (isMultiarg) apply.updateAttachment(MultiargInfixAttachment)
apply
}
if (isExpr) {
if (rightAssoc) {
import symtab.Flags._
val x = freshTermName(nme.RIGHT_ASSOC_OP_PREFIX)
val liftedArg = atPos(left.pos) {
ValDef(Modifiers(FINAL | SYNTHETIC | ARTIFACT), x, TypeTree(), stripParens(left))
}
val apply = mkApply(mkSelection(right), List(Ident(x) setPos left.pos.focus))
Block(liftedArg :: Nil, apply)
} else
mkApply(mkSelection(left), arguments)
} else
mkApply(Ident(op.encode), stripParens(left) :: arguments)
}
/** Is current ident a `*`, and is it followed by a `)` or `, )`? */
def followingIsScala3Vararg(): Boolean =
currentRun.isScala3.value && isRawStar && lookingAhead {
in.token == RPAREN ||
in.token == COMMA && {
in.nextToken()
in.token == RPAREN
}
}
/* --------- OPERAND/OPERATOR STACK --------------------------------------- */
/** Modes for infix types. */
object InfixMode extends Enumeration {
val FirstOp, LeftOp, RightOp = Value
}
var opstack: List[OpInfo] = Nil
@deprecated("Use `scala.reflect.internal.Precedence`", "2.11.0")
def precedence(operator: Name): Int = Precedence(operator.toString).level
private def opHead = opstack.head
private def headPrecedence = opHead.precedence
private def popOpInfo(): OpInfo = try opHead finally opstack = opstack.tail
private def pushOpInfo(top: Tree): Unit = {
val name = in.name
val offset = in.offset
ident()
val targs = if (in.token == LBRACKET) exprTypeArgs() else Nil
val opinfo = OpInfo(top, name, targs, offset)
opstack ::= opinfo
}
def checkHeadAssoc(leftAssoc: Boolean) = checkAssoc(opHead.offset, opHead.operator, leftAssoc)
def checkAssoc(offset: Offset, op: Name, leftAssoc: Boolean) = (
if (nme.isLeftAssoc(op) != leftAssoc)
syntaxError(offset, "left- and right-associative operators with same precedence may not be mixed", skipIt = false)
)
def finishPostfixOp(start: Int, base: List[OpInfo], opinfo: OpInfo): Tree = {
if (opinfo.targs.nonEmpty)
syntaxError(opinfo.offset, "type application is not allowed for postfix operators")
val lhs = reduceExprStack(base, opinfo.lhs)
makePostfixSelect(if (lhs.pos.isDefined) lhs.pos.start else start, opinfo.offset, stripParens(lhs), opinfo.operator)
}
def finishBinaryOp(isExpr: Boolean, opinfo: OpInfo, rhs: Tree): Tree = {
import opinfo._
val operatorPos: Position = Position.range(rhs.pos.source, offset, offset, offset + operator.length)
val pos = lhs.pos.union(rhs.pos).union(operatorPos).withEnd(in.lastOffset).withPoint(offset)
atPos(pos)(makeBinop(isExpr, lhs, operator, rhs, operatorPos, opinfo.targs))
}
def reduceExprStack(base: List[OpInfo], top: Tree): Tree = reduceStack(isExpr = true, base, top)
def reducePatternStack(base: List[OpInfo], top: Tree): Tree = reduceStack(isExpr = false, base, top)
def reduceStack(isExpr: Boolean, base: List[OpInfo], top: Tree): Tree = {
val opPrecedence = if (isIdent) Precedence(in.name.toString) else Precedence(0)
val leftAssoc = !isIdent || (nme isLeftAssoc in.name)