Source file
src/go/parser/parser.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25 package parser
26
27 import (
28 "fmt"
29 "go/ast"
30 "go/build/constraint"
31 "go/scanner"
32 "go/token"
33 "strings"
34 )
35
36
37 type parser struct {
38 file *token.File
39 errors scanner.ErrorList
40 scanner scanner.Scanner
41
42
43 mode Mode
44 trace bool
45 indent int
46
47
48 comments []*ast.CommentGroup
49 leadComment *ast.CommentGroup
50 lineComment *ast.CommentGroup
51 top bool
52 goVersion string
53
54
55 pos token.Pos
56 tok token.Token
57 lit string
58
59
60
61
62
63 syncPos token.Pos
64 syncCnt int
65
66
67 exprLev int
68 inRhs bool
69
70 imports []*ast.ImportSpec
71
72
73
74 nestLev int
75 }
76
77 func (p *parser) init(file *token.File, src []byte, mode Mode) {
78 p.file = file
79 eh := func(pos token.Position, msg string) { p.errors.Add(pos, msg) }
80 p.scanner.Init(p.file, src, eh, scanner.ScanComments)
81
82 p.top = true
83 p.mode = mode
84 p.trace = mode&Trace != 0
85 p.next()
86 }
87
88
89
90
91 func (p *parser) printTrace(a ...any) {
92 const dots = ". . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . "
93 const n = len(dots)
94 pos := p.file.Position(p.pos)
95 fmt.Printf("%5d:%3d: ", pos.Line, pos.Column)
96 i := 2 * p.indent
97 for i > n {
98 fmt.Print(dots)
99 i -= n
100 }
101
102 fmt.Print(dots[0:i])
103 fmt.Println(a...)
104 }
105
106 func trace(p *parser, msg string) *parser {
107 p.printTrace(msg, "(")
108 p.indent++
109 return p
110 }
111
112
113 func un(p *parser) {
114 p.indent--
115 p.printTrace(")")
116 }
117
118
119 const maxNestLev int = 1e5
120
121 func incNestLev(p *parser) *parser {
122 p.nestLev++
123 if p.nestLev > maxNestLev {
124 p.error(p.pos, "exceeded max nesting depth")
125 panic(bailout{})
126 }
127 return p
128 }
129
130
131
132 func decNestLev(p *parser) {
133 p.nestLev--
134 }
135
136
137 func (p *parser) next0() {
138
139
140
141
142 if p.trace && p.pos.IsValid() {
143 s := p.tok.String()
144 switch {
145 case p.tok.IsLiteral():
146 p.printTrace(s, p.lit)
147 case p.tok.IsOperator(), p.tok.IsKeyword():
148 p.printTrace("\"" + s + "\"")
149 default:
150 p.printTrace(s)
151 }
152 }
153
154 for {
155 p.pos, p.tok, p.lit = p.scanner.Scan()
156 if p.tok == token.COMMENT {
157 if p.top && strings.HasPrefix(p.lit, "//go:build") {
158 if x, err := constraint.Parse(p.lit); err == nil {
159 p.goVersion = constraint.GoVersion(x)
160 }
161 }
162 if p.mode&ParseComments == 0 {
163 continue
164 }
165 } else {
166
167 p.top = false
168 }
169 break
170 }
171 }
172
173
174 func (p *parser) lineFor(pos token.Pos) int {
175 return p.file.PositionFor(pos, false).Line
176 }
177
178
179 func (p *parser) consumeComment() (comment *ast.Comment, endline int) {
180
181
182 endline = p.lineFor(p.pos)
183 if p.lit[1] == '*' {
184
185 for i := 0; i < len(p.lit); i++ {
186 if p.lit[i] == '\n' {
187 endline++
188 }
189 }
190 }
191
192 comment = &ast.Comment{Slash: p.pos, Text: p.lit}
193 p.next0()
194
195 return
196 }
197
198
199
200
201
202 func (p *parser) consumeCommentGroup(n int) (comments *ast.CommentGroup, endline int) {
203 var list []*ast.Comment
204 endline = p.lineFor(p.pos)
205 for p.tok == token.COMMENT && p.lineFor(p.pos) <= endline+n {
206 var comment *ast.Comment
207 comment, endline = p.consumeComment()
208 list = append(list, comment)
209 }
210
211
212 comments = &ast.CommentGroup{List: list}
213 p.comments = append(p.comments, comments)
214
215 return
216 }
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232 func (p *parser) next() {
233 p.leadComment = nil
234 p.lineComment = nil
235 prev := p.pos
236 p.next0()
237
238 if p.tok == token.COMMENT {
239 var comment *ast.CommentGroup
240 var endline int
241
242 if p.lineFor(p.pos) == p.lineFor(prev) {
243
244
245 comment, endline = p.consumeCommentGroup(0)
246 if p.lineFor(p.pos) != endline || p.tok == token.SEMICOLON || p.tok == token.EOF {
247
248
249 p.lineComment = comment
250 }
251 }
252
253
254 endline = -1
255 for p.tok == token.COMMENT {
256 comment, endline = p.consumeCommentGroup(1)
257 }
258
259 if endline+1 == p.lineFor(p.pos) {
260
261
262 p.leadComment = comment
263 }
264 }
265 }
266
267
268
269 type bailout struct {
270 pos token.Pos
271 msg string
272 }
273
274 func (p *parser) error(pos token.Pos, msg string) {
275 if p.trace {
276 defer un(trace(p, "error: "+msg))
277 }
278
279 epos := p.file.Position(pos)
280
281
282
283
284 if p.mode&AllErrors == 0 {
285 n := len(p.errors)
286 if n > 0 && p.errors[n-1].Pos.Line == epos.Line {
287 return
288 }
289 if n > 10 {
290 panic(bailout{})
291 }
292 }
293
294 p.errors.Add(epos, msg)
295 }
296
297 func (p *parser) errorExpected(pos token.Pos, msg string) {
298 msg = "expected " + msg
299 if pos == p.pos {
300
301
302 switch {
303 case p.tok == token.SEMICOLON && p.lit == "\n":
304 msg += ", found newline"
305 case p.tok.IsLiteral():
306
307 msg += ", found " + p.lit
308 default:
309 msg += ", found '" + p.tok.String() + "'"
310 }
311 }
312 p.error(pos, msg)
313 }
314
315 func (p *parser) expect(tok token.Token) token.Pos {
316 pos := p.pos
317 if p.tok != tok {
318 p.errorExpected(pos, "'"+tok.String()+"'")
319 }
320 p.next()
321 return pos
322 }
323
324
325
326 func (p *parser) expect2(tok token.Token) (pos token.Pos) {
327 if p.tok == tok {
328 pos = p.pos
329 } else {
330 p.errorExpected(p.pos, "'"+tok.String()+"'")
331 }
332 p.next()
333 return
334 }
335
336
337
338 func (p *parser) expectClosing(tok token.Token, context string) token.Pos {
339 if p.tok != tok && p.tok == token.SEMICOLON && p.lit == "\n" {
340 p.error(p.pos, "missing ',' before newline in "+context)
341 p.next()
342 }
343 return p.expect(tok)
344 }
345
346
347 func (p *parser) expectSemi() (comment *ast.CommentGroup) {
348
349 if p.tok != token.RPAREN && p.tok != token.RBRACE {
350 switch p.tok {
351 case token.COMMA:
352
353 p.errorExpected(p.pos, "';'")
354 fallthrough
355 case token.SEMICOLON:
356 if p.lit == ";" {
357
358 p.next()
359 comment = p.lineComment
360 } else {
361
362 comment = p.lineComment
363 p.next()
364 }
365 return comment
366 default:
367 p.errorExpected(p.pos, "';'")
368 p.advance(stmtStart)
369 }
370 }
371 return nil
372 }
373
374 func (p *parser) atComma(context string, follow token.Token) bool {
375 if p.tok == token.COMMA {
376 return true
377 }
378 if p.tok != follow {
379 msg := "missing ','"
380 if p.tok == token.SEMICOLON && p.lit == "\n" {
381 msg += " before newline"
382 }
383 p.error(p.pos, msg+" in "+context)
384 return true
385 }
386 return false
387 }
388
389 func assert(cond bool, msg string) {
390 if !cond {
391 panic("go/parser internal error: " + msg)
392 }
393 }
394
395
396
397 func (p *parser) advance(to map[token.Token]bool) {
398 for ; p.tok != token.EOF; p.next() {
399 if to[p.tok] {
400
401
402
403
404
405
406
407 if p.pos == p.syncPos && p.syncCnt < 10 {
408 p.syncCnt++
409 return
410 }
411 if p.pos > p.syncPos {
412 p.syncPos = p.pos
413 p.syncCnt = 0
414 return
415 }
416
417
418
419
420
421 }
422 }
423 }
424
425 var stmtStart = map[token.Token]bool{
426 token.BREAK: true,
427 token.CONST: true,
428 token.CONTINUE: true,
429 token.DEFER: true,
430 token.FALLTHROUGH: true,
431 token.FOR: true,
432 token.GO: true,
433 token.GOTO: true,
434 token.IF: true,
435 token.RETURN: true,
436 token.SELECT: true,
437 token.SWITCH: true,
438 token.TYPE: true,
439 token.VAR: true,
440 }
441
442 var declStart = map[token.Token]bool{
443 token.IMPORT: true,
444 token.CONST: true,
445 token.TYPE: true,
446 token.VAR: true,
447 }
448
449 var exprEnd = map[token.Token]bool{
450 token.COMMA: true,
451 token.COLON: true,
452 token.SEMICOLON: true,
453 token.RPAREN: true,
454 token.RBRACK: true,
455 token.RBRACE: true,
456 }
457
458
459
460
461
462
463
464
465
466
467 func (p *parser) safePos(pos token.Pos) (res token.Pos) {
468 defer func() {
469 if recover() != nil {
470 res = token.Pos(p.file.Base() + p.file.Size())
471 }
472 }()
473 _ = p.file.Offset(pos)
474 return pos
475 }
476
477
478
479
480 func (p *parser) parseIdent() *ast.Ident {
481 pos := p.pos
482 name := "_"
483 if p.tok == token.IDENT {
484 name = p.lit
485 p.next()
486 } else {
487 p.expect(token.IDENT)
488 }
489 return &ast.Ident{NamePos: pos, Name: name}
490 }
491
492 func (p *parser) parseIdentList() (list []*ast.Ident) {
493 if p.trace {
494 defer un(trace(p, "IdentList"))
495 }
496
497 list = append(list, p.parseIdent())
498 for p.tok == token.COMMA {
499 p.next()
500 list = append(list, p.parseIdent())
501 }
502
503 return
504 }
505
506
507
508
509
510 func (p *parser) parseExprList() (list []ast.Expr) {
511 if p.trace {
512 defer un(trace(p, "ExpressionList"))
513 }
514
515 list = append(list, p.parseExpr())
516 for p.tok == token.COMMA {
517 p.next()
518 list = append(list, p.parseExpr())
519 }
520
521 return
522 }
523
524 func (p *parser) parseList(inRhs bool) []ast.Expr {
525 old := p.inRhs
526 p.inRhs = inRhs
527 list := p.parseExprList()
528 p.inRhs = old
529 return list
530 }
531
532
533
534
535 func (p *parser) parseType() ast.Expr {
536 if p.trace {
537 defer un(trace(p, "Type"))
538 }
539
540 typ := p.tryIdentOrType()
541
542 if typ == nil {
543 pos := p.pos
544 p.errorExpected(pos, "type")
545 p.advance(exprEnd)
546 return &ast.BadExpr{From: pos, To: p.pos}
547 }
548
549 return typ
550 }
551
552 func (p *parser) parseQualifiedIdent(ident *ast.Ident) ast.Expr {
553 if p.trace {
554 defer un(trace(p, "QualifiedIdent"))
555 }
556
557 typ := p.parseTypeName(ident)
558 if p.tok == token.LBRACK {
559 typ = p.parseTypeInstance(typ)
560 }
561
562 return typ
563 }
564
565
566 func (p *parser) parseTypeName(ident *ast.Ident) ast.Expr {
567 if p.trace {
568 defer un(trace(p, "TypeName"))
569 }
570
571 if ident == nil {
572 ident = p.parseIdent()
573 }
574
575 if p.tok == token.PERIOD {
576
577 p.next()
578 sel := p.parseIdent()
579 return &ast.SelectorExpr{X: ident, Sel: sel}
580 }
581
582 return ident
583 }
584
585
586
587 func (p *parser) parseArrayType(lbrack token.Pos, len ast.Expr) *ast.ArrayType {
588 if p.trace {
589 defer un(trace(p, "ArrayType"))
590 }
591
592 if len == nil {
593 p.exprLev++
594
595 if p.tok == token.ELLIPSIS {
596 len = &ast.Ellipsis{Ellipsis: p.pos}
597 p.next()
598 } else if p.tok != token.RBRACK {
599 len = p.parseRhs()
600 }
601 p.exprLev--
602 }
603 if p.tok == token.COMMA {
604
605
606
607 p.error(p.pos, "unexpected comma; expecting ]")
608 p.next()
609 }
610 p.expect(token.RBRACK)
611 elt := p.parseType()
612 return &ast.ArrayType{Lbrack: lbrack, Len: len, Elt: elt}
613 }
614
615 func (p *parser) parseArrayFieldOrTypeInstance(x *ast.Ident) (*ast.Ident, ast.Expr) {
616 if p.trace {
617 defer un(trace(p, "ArrayFieldOrTypeInstance"))
618 }
619
620 lbrack := p.expect(token.LBRACK)
621 trailingComma := token.NoPos
622 var args []ast.Expr
623 if p.tok != token.RBRACK {
624 p.exprLev++
625 args = append(args, p.parseRhs())
626 for p.tok == token.COMMA {
627 comma := p.pos
628 p.next()
629 if p.tok == token.RBRACK {
630 trailingComma = comma
631 break
632 }
633 args = append(args, p.parseRhs())
634 }
635 p.exprLev--
636 }
637 rbrack := p.expect(token.RBRACK)
638
639 if len(args) == 0 {
640
641 elt := p.parseType()
642 return x, &ast.ArrayType{Lbrack: lbrack, Elt: elt}
643 }
644
645
646 if len(args) == 1 {
647 elt := p.tryIdentOrType()
648 if elt != nil {
649
650 if trailingComma.IsValid() {
651
652 p.error(trailingComma, "unexpected comma; expecting ]")
653 }
654 return x, &ast.ArrayType{Lbrack: lbrack, Len: args[0], Elt: elt}
655 }
656 }
657
658
659 return nil, packIndexExpr(x, lbrack, args, rbrack)
660 }
661
662 func (p *parser) parseFieldDecl() *ast.Field {
663 if p.trace {
664 defer un(trace(p, "FieldDecl"))
665 }
666
667 doc := p.leadComment
668
669 var names []*ast.Ident
670 var typ ast.Expr
671 switch p.tok {
672 case token.IDENT:
673 name := p.parseIdent()
674 if p.tok == token.PERIOD || p.tok == token.STRING || p.tok == token.SEMICOLON || p.tok == token.RBRACE {
675
676 typ = name
677 if p.tok == token.PERIOD {
678 typ = p.parseQualifiedIdent(name)
679 }
680 } else {
681
682 names = []*ast.Ident{name}
683 for p.tok == token.COMMA {
684 p.next()
685 names = append(names, p.parseIdent())
686 }
687
688
689 if len(names) == 1 && p.tok == token.LBRACK {
690 name, typ = p.parseArrayFieldOrTypeInstance(name)
691 if name == nil {
692 names = nil
693 }
694 } else {
695
696 typ = p.parseType()
697 }
698 }
699 case token.MUL:
700 star := p.pos
701 p.next()
702 if p.tok == token.LPAREN {
703
704 p.error(p.pos, "cannot parenthesize embedded type")
705 p.next()
706 typ = p.parseQualifiedIdent(nil)
707
708 if p.tok == token.RPAREN {
709 p.next()
710 }
711 } else {
712
713 typ = p.parseQualifiedIdent(nil)
714 }
715 typ = &ast.StarExpr{Star: star, X: typ}
716
717 case token.LPAREN:
718 p.error(p.pos, "cannot parenthesize embedded type")
719 p.next()
720 if p.tok == token.MUL {
721
722 star := p.pos
723 p.next()
724 typ = &ast.StarExpr{Star: star, X: p.parseQualifiedIdent(nil)}
725 } else {
726
727 typ = p.parseQualifiedIdent(nil)
728 }
729
730 if p.tok == token.RPAREN {
731 p.next()
732 }
733
734 default:
735 pos := p.pos
736 p.errorExpected(pos, "field name or embedded type")
737 p.advance(exprEnd)
738 typ = &ast.BadExpr{From: pos, To: p.pos}
739 }
740
741 var tag *ast.BasicLit
742 if p.tok == token.STRING {
743 tag = &ast.BasicLit{ValuePos: p.pos, Kind: p.tok, Value: p.lit}
744 p.next()
745 }
746
747 comment := p.expectSemi()
748
749 field := &ast.Field{Doc: doc, Names: names, Type: typ, Tag: tag, Comment: comment}
750 return field
751 }
752
753 func (p *parser) parseStructType() *ast.StructType {
754 if p.trace {
755 defer un(trace(p, "StructType"))
756 }
757
758 pos := p.expect(token.STRUCT)
759 lbrace := p.expect(token.LBRACE)
760 var list []*ast.Field
761 for p.tok == token.IDENT || p.tok == token.MUL || p.tok == token.LPAREN {
762
763
764
765 list = append(list, p.parseFieldDecl())
766 }
767 rbrace := p.expect(token.RBRACE)
768
769 return &ast.StructType{
770 Struct: pos,
771 Fields: &ast.FieldList{
772 Opening: lbrace,
773 List: list,
774 Closing: rbrace,
775 },
776 }
777 }
778
779 func (p *parser) parsePointerType() *ast.StarExpr {
780 if p.trace {
781 defer un(trace(p, "PointerType"))
782 }
783
784 star := p.expect(token.MUL)
785 base := p.parseType()
786
787 return &ast.StarExpr{Star: star, X: base}
788 }
789
790 func (p *parser) parseDotsType() *ast.Ellipsis {
791 if p.trace {
792 defer un(trace(p, "DotsType"))
793 }
794
795 pos := p.expect(token.ELLIPSIS)
796 elt := p.parseType()
797
798 return &ast.Ellipsis{Ellipsis: pos, Elt: elt}
799 }
800
801 type field struct {
802 name *ast.Ident
803 typ ast.Expr
804 }
805
806 func (p *parser) parseParamDecl(name *ast.Ident, typeSetsOK bool) (f field) {
807
808
809 if p.trace {
810 defer un(trace(p, "ParamDecl"))
811 }
812
813 ptok := p.tok
814 if name != nil {
815 p.tok = token.IDENT
816 } else if typeSetsOK && p.tok == token.TILDE {
817
818 return field{nil, p.embeddedElem(nil)}
819 }
820
821 switch p.tok {
822 case token.IDENT:
823
824 if name != nil {
825 f.name = name
826 p.tok = ptok
827 } else {
828 f.name = p.parseIdent()
829 }
830 switch p.tok {
831 case token.IDENT, token.MUL, token.ARROW, token.FUNC, token.CHAN, token.MAP, token.STRUCT, token.INTERFACE, token.LPAREN:
832
833 f.typ = p.parseType()
834
835 case token.LBRACK:
836
837 f.name, f.typ = p.parseArrayFieldOrTypeInstance(f.name)
838
839 case token.ELLIPSIS:
840
841 f.typ = p.parseDotsType()
842 return
843
844 case token.PERIOD:
845
846 f.typ = p.parseQualifiedIdent(f.name)
847 f.name = nil
848
849 case token.TILDE:
850 if typeSetsOK {
851 f.typ = p.embeddedElem(nil)
852 return
853 }
854
855 case token.OR:
856 if typeSetsOK {
857
858 f.typ = p.embeddedElem(f.name)
859 f.name = nil
860 return
861 }
862 }
863
864 case token.MUL, token.ARROW, token.FUNC, token.LBRACK, token.CHAN, token.MAP, token.STRUCT, token.INTERFACE, token.LPAREN:
865
866 f.typ = p.parseType()
867
868 case token.ELLIPSIS:
869
870
871 f.typ = p.parseDotsType()
872 return
873
874 default:
875
876
877 p.errorExpected(p.pos, "')'")
878 p.advance(exprEnd)
879 }
880
881
882 if typeSetsOK && p.tok == token.OR && f.typ != nil {
883 f.typ = p.embeddedElem(f.typ)
884 }
885
886 return
887 }
888
889 func (p *parser) parseParameterList(name0 *ast.Ident, typ0 ast.Expr, closing token.Token, dddok bool) (params []*ast.Field) {
890 if p.trace {
891 defer un(trace(p, "ParameterList"))
892 }
893
894
895 tparams := closing == token.RBRACK
896
897 pos0 := p.pos
898 if name0 != nil {
899 pos0 = name0.Pos()
900 } else if typ0 != nil {
901 pos0 = typ0.Pos()
902 }
903
904
905
906
907
908
909
910 var list []field
911 var named int
912 var typed int
913
914 for name0 != nil || p.tok != closing && p.tok != token.EOF {
915 var par field
916 if typ0 != nil {
917 if tparams {
918 typ0 = p.embeddedElem(typ0)
919 }
920 par = field{name0, typ0}
921 } else {
922 par = p.parseParamDecl(name0, tparams)
923 }
924 name0 = nil
925 typ0 = nil
926 if par.name != nil || par.typ != nil {
927 list = append(list, par)
928 if par.name != nil && par.typ != nil {
929 named++
930 }
931 if par.typ != nil {
932 typed++
933 }
934 }
935 if !p.atComma("parameter list", closing) {
936 break
937 }
938 p.next()
939 }
940
941 if len(list) == 0 {
942 return
943 }
944
945
946 if named == 0 {
947
948 for i := range list {
949 par := &list[i]
950 if typ := par.name; typ != nil {
951 par.typ = typ
952 par.name = nil
953 }
954 }
955 if tparams {
956
957
958 var errPos token.Pos
959 var msg string
960 if named == typed {
961 errPos = p.pos
962 msg = "missing type constraint"
963 } else {
964 errPos = pos0
965 msg = "missing type parameter name"
966 if len(list) == 1 {
967 msg += " or invalid array length"
968 }
969 }
970 p.error(errPos, msg)
971 }
972 } else if named != len(list) {
973
974 var errPos token.Pos
975 var typ ast.Expr
976 for i := range list {
977 if par := &list[len(list)-i-1]; par.typ != nil {
978 typ = par.typ
979 if par.name == nil {
980 errPos = typ.Pos()
981 n := ast.NewIdent("_")
982 n.NamePos = errPos
983 par.name = n
984 }
985 } else if typ != nil {
986 par.typ = typ
987 } else {
988
989 errPos = par.name.Pos()
990 par.typ = &ast.BadExpr{From: errPos, To: p.pos}
991 }
992 }
993 if errPos.IsValid() {
994
995
996
997
998
999
1000 var msg string
1001 if named == typed {
1002 errPos = p.pos
1003 if tparams {
1004 msg = "missing type constraint"
1005 } else {
1006 msg = "missing parameter type"
1007 }
1008 } else {
1009 if tparams {
1010 msg = "missing type parameter name"
1011
1012 if len(list) == 1 {
1013 msg += " or invalid array length"
1014 }
1015 } else {
1016 msg = "missing parameter name"
1017 }
1018 }
1019 p.error(errPos, msg)
1020 }
1021 }
1022
1023
1024 first := true
1025 for i, _ := range list {
1026 f := &list[i]
1027 if t, _ := f.typ.(*ast.Ellipsis); t != nil && (!dddok || i+1 < len(list)) {
1028 if first {
1029 first = false
1030 if dddok {
1031 p.error(t.Ellipsis, "can only use ... with final parameter")
1032 } else {
1033 p.error(t.Ellipsis, "invalid use of ...")
1034 }
1035 }
1036
1037
1038
1039 f.typ = &ast.BadExpr{From: t.Pos(), To: t.End()}
1040 }
1041 }
1042
1043
1044
1045 if named == 0 {
1046
1047 for _, par := range list {
1048 assert(par.typ != nil, "nil type in unnamed parameter list")
1049 params = append(params, &ast.Field{Type: par.typ})
1050 }
1051 return
1052 }
1053
1054
1055
1056 var names []*ast.Ident
1057 var typ ast.Expr
1058 addParams := func() {
1059 assert(typ != nil, "nil type in named parameter list")
1060 field := &ast.Field{Names: names, Type: typ}
1061 params = append(params, field)
1062 names = nil
1063 }
1064 for _, par := range list {
1065 if par.typ != typ {
1066 if len(names) > 0 {
1067 addParams()
1068 }
1069 typ = par.typ
1070 }
1071 names = append(names, par.name)
1072 }
1073 if len(names) > 0 {
1074 addParams()
1075 }
1076 return
1077 }
1078
1079 func (p *parser) parseTypeParameters() *ast.FieldList {
1080 if p.trace {
1081 defer un(trace(p, "TypeParameters"))
1082 }
1083
1084 lbrack := p.expect(token.LBRACK)
1085 var list []*ast.Field
1086 if p.tok != token.RBRACK {
1087 list = p.parseParameterList(nil, nil, token.RBRACK, false)
1088 }
1089 rbrack := p.expect(token.RBRACK)
1090
1091 if len(list) == 0 {
1092 p.error(rbrack, "empty type parameter list")
1093 return nil
1094 }
1095
1096 return &ast.FieldList{Opening: lbrack, List: list, Closing: rbrack}
1097 }
1098
1099 func (p *parser) parseParameters(result bool) *ast.FieldList {
1100 if p.trace {
1101 defer un(trace(p, "Parameters"))
1102 }
1103
1104 if !result || p.tok == token.LPAREN {
1105 lparen := p.expect(token.LPAREN)
1106 var list []*ast.Field
1107 if p.tok != token.RPAREN {
1108 list = p.parseParameterList(nil, nil, token.RPAREN, !result)
1109 }
1110 rparen := p.expect(token.RPAREN)
1111 return &ast.FieldList{Opening: lparen, List: list, Closing: rparen}
1112 }
1113
1114 if typ := p.tryIdentOrType(); typ != nil {
1115 list := make([]*ast.Field, 1)
1116 list[0] = &ast.Field{Type: typ}
1117 return &ast.FieldList{List: list}
1118 }
1119
1120 return nil
1121 }
1122
1123 func (p *parser) parseFuncType() *ast.FuncType {
1124 if p.trace {
1125 defer un(trace(p, "FuncType"))
1126 }
1127
1128 pos := p.expect(token.FUNC)
1129
1130 if p.tok == token.LBRACK {
1131 tparams := p.parseTypeParameters()
1132 if tparams != nil {
1133 p.error(tparams.Opening, "function type must have no type parameters")
1134 }
1135 }
1136 params := p.parseParameters(false)
1137 results := p.parseParameters(true)
1138
1139 return &ast.FuncType{Func: pos, Params: params, Results: results}
1140 }
1141
1142 func (p *parser) parseMethodSpec() *ast.Field {
1143 if p.trace {
1144 defer un(trace(p, "MethodSpec"))
1145 }
1146
1147 doc := p.leadComment
1148 var idents []*ast.Ident
1149 var typ ast.Expr
1150 x := p.parseTypeName(nil)
1151 if ident, _ := x.(*ast.Ident); ident != nil {
1152 switch {
1153 case p.tok == token.LBRACK:
1154
1155 lbrack := p.pos
1156 p.next()
1157 p.exprLev++
1158 x := p.parseExpr()
1159 p.exprLev--
1160 if name0, _ := x.(*ast.Ident); name0 != nil && p.tok != token.COMMA && p.tok != token.RBRACK {
1161
1162
1163
1164
1165 _ = p.parseParameterList(name0, nil, token.RBRACK, false)
1166 _ = p.expect(token.RBRACK)
1167 p.error(lbrack, "interface method must have no type parameters")
1168
1169
1170 params := p.parseParameters(false)
1171 results := p.parseParameters(true)
1172 idents = []*ast.Ident{ident}
1173 typ = &ast.FuncType{
1174 Func: token.NoPos,
1175 Params: params,
1176 Results: results,
1177 }
1178 } else {
1179
1180
1181 list := []ast.Expr{x}
1182 if p.atComma("type argument list", token.RBRACK) {
1183 p.exprLev++
1184 p.next()
1185 for p.tok != token.RBRACK && p.tok != token.EOF {
1186 list = append(list, p.parseType())
1187 if !p.atComma("type argument list", token.RBRACK) {
1188 break
1189 }
1190 p.next()
1191 }
1192 p.exprLev--
1193 }
1194 rbrack := p.expectClosing(token.RBRACK, "type argument list")
1195 typ = packIndexExpr(ident, lbrack, list, rbrack)
1196 }
1197 case p.tok == token.LPAREN:
1198
1199
1200 params := p.parseParameters(false)
1201 results := p.parseParameters(true)
1202 idents = []*ast.Ident{ident}
1203 typ = &ast.FuncType{Func: token.NoPos, Params: params, Results: results}
1204 default:
1205
1206 typ = x
1207 }
1208 } else {
1209
1210 typ = x
1211 if p.tok == token.LBRACK {
1212
1213 typ = p.parseTypeInstance(typ)
1214 }
1215 }
1216
1217
1218
1219
1220
1221 return &ast.Field{Doc: doc, Names: idents, Type: typ}
1222 }
1223
1224 func (p *parser) embeddedElem(x ast.Expr) ast.Expr {
1225 if p.trace {
1226 defer un(trace(p, "EmbeddedElem"))
1227 }
1228 if x == nil {
1229 x = p.embeddedTerm()
1230 }
1231 for p.tok == token.OR {
1232 t := new(ast.BinaryExpr)
1233 t.OpPos = p.pos
1234 t.Op = token.OR
1235 p.next()
1236 t.X = x
1237 t.Y = p.embeddedTerm()
1238 x = t
1239 }
1240 return x
1241 }
1242
1243 func (p *parser) embeddedTerm() ast.Expr {
1244 if p.trace {
1245 defer un(trace(p, "EmbeddedTerm"))
1246 }
1247 if p.tok == token.TILDE {
1248 t := new(ast.UnaryExpr)
1249 t.OpPos = p.pos
1250 t.Op = token.TILDE
1251 p.next()
1252 t.X = p.parseType()
1253 return t
1254 }
1255
1256 t := p.tryIdentOrType()
1257 if t == nil {
1258 pos := p.pos
1259 p.errorExpected(pos, "~ term or type")
1260 p.advance(exprEnd)
1261 return &ast.BadExpr{From: pos, To: p.pos}
1262 }
1263
1264 return t
1265 }
1266
1267 func (p *parser) parseInterfaceType() *ast.InterfaceType {
1268 if p.trace {
1269 defer un(trace(p, "InterfaceType"))
1270 }
1271
1272 pos := p.expect(token.INTERFACE)
1273 lbrace := p.expect(token.LBRACE)
1274
1275 var list []*ast.Field
1276
1277 parseElements:
1278 for {
1279 switch {
1280 case p.tok == token.IDENT:
1281 f := p.parseMethodSpec()
1282 if f.Names == nil {
1283 f.Type = p.embeddedElem(f.Type)
1284 }
1285 f.Comment = p.expectSemi()
1286 list = append(list, f)
1287 case p.tok == token.TILDE:
1288 typ := p.embeddedElem(nil)
1289 comment := p.expectSemi()
1290 list = append(list, &ast.Field{Type: typ, Comment: comment})
1291 default:
1292 if t := p.tryIdentOrType(); t != nil {
1293 typ := p.embeddedElem(t)
1294 comment := p.expectSemi()
1295 list = append(list, &ast.Field{Type: typ, Comment: comment})
1296 } else {
1297 break parseElements
1298 }
1299 }
1300 }
1301
1302
1303
1304 rbrace := p.expect(token.RBRACE)
1305
1306 return &ast.InterfaceType{
1307 Interface: pos,
1308 Methods: &ast.FieldList{
1309 Opening: lbrace,
1310 List: list,
1311 Closing: rbrace,
1312 },
1313 }
1314 }
1315
1316 func (p *parser) parseMapType() *ast.MapType {
1317 if p.trace {
1318 defer un(trace(p, "MapType"))
1319 }
1320
1321 pos := p.expect(token.MAP)
1322 p.expect(token.LBRACK)
1323 key := p.parseType()
1324 p.expect(token.RBRACK)
1325 value := p.parseType()
1326
1327 return &ast.MapType{Map: pos, Key: key, Value: value}
1328 }
1329
1330 func (p *parser) parseChanType() *ast.ChanType {
1331 if p.trace {
1332 defer un(trace(p, "ChanType"))
1333 }
1334
1335 pos := p.pos
1336 dir := ast.SEND | ast.RECV
1337 var arrow token.Pos
1338 if p.tok == token.CHAN {
1339 p.next()
1340 if p.tok == token.ARROW {
1341 arrow = p.pos
1342 p.next()
1343 dir = ast.SEND
1344 }
1345 } else {
1346 arrow = p.expect(token.ARROW)
1347 p.expect(token.CHAN)
1348 dir = ast.RECV
1349 }
1350 value := p.parseType()
1351
1352 return &ast.ChanType{Begin: pos, Arrow: arrow, Dir: dir, Value: value}
1353 }
1354
1355 func (p *parser) parseTypeInstance(typ ast.Expr) ast.Expr {
1356 if p.trace {
1357 defer un(trace(p, "TypeInstance"))
1358 }
1359
1360 opening := p.expect(token.LBRACK)
1361 p.exprLev++
1362 var list []ast.Expr
1363 for p.tok != token.RBRACK && p.tok != token.EOF {
1364 list = append(list, p.parseType())
1365 if !p.atComma("type argument list", token.RBRACK) {
1366 break
1367 }
1368 p.next()
1369 }
1370 p.exprLev--
1371
1372 closing := p.expectClosing(token.RBRACK, "type argument list")
1373
1374 if len(list) == 0 {
1375 p.errorExpected(closing, "type argument list")
1376 return &ast.IndexExpr{
1377 X: typ,
1378 Lbrack: opening,
1379 Index: &ast.BadExpr{From: opening + 1, To: closing},
1380 Rbrack: closing,
1381 }
1382 }
1383
1384 return packIndexExpr(typ, opening, list, closing)
1385 }
1386
1387 func (p *parser) tryIdentOrType() ast.Expr {
1388 defer decNestLev(incNestLev(p))
1389
1390 switch p.tok {
1391 case token.IDENT:
1392 typ := p.parseTypeName(nil)
1393 if p.tok == token.LBRACK {
1394 typ = p.parseTypeInstance(typ)
1395 }
1396 return typ
1397 case token.LBRACK:
1398 lbrack := p.expect(token.LBRACK)
1399 return p.parseArrayType(lbrack, nil)
1400 case token.STRUCT:
1401 return p.parseStructType()
1402 case token.MUL:
1403 return p.parsePointerType()
1404 case token.FUNC:
1405 return p.parseFuncType()
1406 case token.INTERFACE:
1407 return p.parseInterfaceType()
1408 case token.MAP:
1409 return p.parseMapType()
1410 case token.CHAN, token.ARROW:
1411 return p.parseChanType()
1412 case token.LPAREN:
1413 lparen := p.pos
1414 p.next()
1415 typ := p.parseType()
1416 rparen := p.expect(token.RPAREN)
1417 return &ast.ParenExpr{Lparen: lparen, X: typ, Rparen: rparen}
1418 }
1419
1420
1421 return nil
1422 }
1423
1424
1425
1426
1427 func (p *parser) parseStmtList() (list []ast.Stmt) {
1428 if p.trace {
1429 defer un(trace(p, "StatementList"))
1430 }
1431
1432 for p.tok != token.CASE && p.tok != token.DEFAULT && p.tok != token.RBRACE && p.tok != token.EOF {
1433 list = append(list, p.parseStmt())
1434 }
1435
1436 return
1437 }
1438
1439 func (p *parser) parseBody() *ast.BlockStmt {
1440 if p.trace {
1441 defer un(trace(p, "Body"))
1442 }
1443
1444 lbrace := p.expect(token.LBRACE)
1445 list := p.parseStmtList()
1446 rbrace := p.expect2(token.RBRACE)
1447
1448 return &ast.BlockStmt{Lbrace: lbrace, List: list, Rbrace: rbrace}
1449 }
1450
1451 func (p *parser) parseBlockStmt() *ast.BlockStmt {
1452 if p.trace {
1453 defer un(trace(p, "BlockStmt"))
1454 }
1455
1456 lbrace := p.expect(token.LBRACE)
1457 list := p.parseStmtList()
1458 rbrace := p.expect2(token.RBRACE)
1459
1460 return &ast.BlockStmt{Lbrace: lbrace, List: list, Rbrace: rbrace}
1461 }
1462
1463
1464
1465
1466 func (p *parser) parseFuncTypeOrLit() ast.Expr {
1467 if p.trace {
1468 defer un(trace(p, "FuncTypeOrLit"))
1469 }
1470
1471 typ := p.parseFuncType()
1472 if p.tok != token.LBRACE {
1473
1474 return typ
1475 }
1476
1477 p.exprLev++
1478 body := p.parseBody()
1479 p.exprLev--
1480
1481 return &ast.FuncLit{Type: typ, Body: body}
1482 }
1483
1484
1485
1486 func (p *parser) parseOperand() ast.Expr {
1487 if p.trace {
1488 defer un(trace(p, "Operand"))
1489 }
1490
1491 switch p.tok {
1492 case token.IDENT:
1493 x := p.parseIdent()
1494 return x
1495
1496 case token.INT, token.FLOAT, token.IMAG, token.CHAR, token.STRING:
1497 x := &ast.BasicLit{ValuePos: p.pos, Kind: p.tok, Value: p.lit}
1498 p.next()
1499 return x
1500
1501 case token.LPAREN:
1502 lparen := p.pos
1503 p.next()
1504 p.exprLev++
1505 x := p.parseRhs()
1506 p.exprLev--
1507 rparen := p.expect(token.RPAREN)
1508 return &ast.ParenExpr{Lparen: lparen, X: x, Rparen: rparen}
1509
1510 case token.FUNC:
1511 return p.parseFuncTypeOrLit()
1512 }
1513
1514 if typ := p.tryIdentOrType(); typ != nil {
1515
1516 _, isIdent := typ.(*ast.Ident)
1517 assert(!isIdent, "type cannot be identifier")
1518 return typ
1519 }
1520
1521
1522 pos := p.pos
1523 p.errorExpected(pos, "operand")
1524 p.advance(stmtStart)
1525 return &ast.BadExpr{From: pos, To: p.pos}
1526 }
1527
1528 func (p *parser) parseSelector(x ast.Expr) ast.Expr {
1529 if p.trace {
1530 defer un(trace(p, "Selector"))
1531 }
1532
1533 sel := p.parseIdent()
1534
1535 return &ast.SelectorExpr{X: x, Sel: sel}
1536 }
1537
1538 func (p *parser) parseTypeAssertion(x ast.Expr) ast.Expr {
1539 if p.trace {
1540 defer un(trace(p, "TypeAssertion"))
1541 }
1542
1543 lparen := p.expect(token.LPAREN)
1544 var typ ast.Expr
1545 if p.tok == token.TYPE {
1546
1547 p.next()
1548 } else {
1549 typ = p.parseType()
1550 }
1551 rparen := p.expect(token.RPAREN)
1552
1553 return &ast.TypeAssertExpr{X: x, Type: typ, Lparen: lparen, Rparen: rparen}
1554 }
1555
1556 func (p *parser) parseIndexOrSliceOrInstance(x ast.Expr) ast.Expr {
1557 if p.trace {
1558 defer un(trace(p, "parseIndexOrSliceOrInstance"))
1559 }
1560
1561 lbrack := p.expect(token.LBRACK)
1562 if p.tok == token.RBRACK {
1563
1564
1565 p.errorExpected(p.pos, "operand")
1566 rbrack := p.pos
1567 p.next()
1568 return &ast.IndexExpr{
1569 X: x,
1570 Lbrack: lbrack,
1571 Index: &ast.BadExpr{From: rbrack, To: rbrack},
1572 Rbrack: rbrack,
1573 }
1574 }
1575 p.exprLev++
1576
1577 const N = 3
1578 var args []ast.Expr
1579 var index [N]ast.Expr
1580 var colons [N - 1]token.Pos
1581 if p.tok != token.COLON {
1582
1583
1584 index[0] = p.parseRhs()
1585 }
1586 ncolons := 0
1587 switch p.tok {
1588 case token.COLON:
1589
1590 for p.tok == token.COLON && ncolons < len(colons) {
1591 colons[ncolons] = p.pos
1592 ncolons++
1593 p.next()
1594 if p.tok != token.COLON && p.tok != token.RBRACK && p.tok != token.EOF {
1595 index[ncolons] = p.parseRhs()
1596 }
1597 }
1598 case token.COMMA:
1599
1600 args = append(args, index[0])
1601 for p.tok == token.COMMA {
1602 p.next()
1603 if p.tok != token.RBRACK && p.tok != token.EOF {
1604 args = append(args, p.parseType())
1605 }
1606 }
1607 }
1608
1609 p.exprLev--
1610 rbrack := p.expect(token.RBRACK)
1611
1612 if ncolons > 0 {
1613
1614 slice3 := false
1615 if ncolons == 2 {
1616 slice3 = true
1617
1618
1619 if index[1] == nil {
1620 p.error(colons[0], "middle index required in 3-index slice")
1621 index[1] = &ast.BadExpr{From: colons[0] + 1, To: colons[1]}
1622 }
1623 if index[2] == nil {
1624 p.error(colons[1], "final index required in 3-index slice")
1625 index[2] = &ast.BadExpr{From: colons[1] + 1, To: rbrack}
1626 }
1627 }
1628 return &ast.SliceExpr{X: x, Lbrack: lbrack, Low: index[0], High: index[1], Max: index[2], Slice3: slice3, Rbrack: rbrack}
1629 }
1630
1631 if len(args) == 0 {
1632
1633 return &ast.IndexExpr{X: x, Lbrack: lbrack, Index: index[0], Rbrack: rbrack}
1634 }
1635
1636
1637 return packIndexExpr(x, lbrack, args, rbrack)
1638 }
1639
1640 func (p *parser) parseCallOrConversion(fun ast.Expr) *ast.CallExpr {
1641 if p.trace {
1642 defer un(trace(p, "CallOrConversion"))
1643 }
1644
1645 lparen := p.expect(token.LPAREN)
1646 p.exprLev++
1647 var list []ast.Expr
1648 var ellipsis token.Pos
1649 for p.tok != token.RPAREN && p.tok != token.EOF && !ellipsis.IsValid() {
1650 list = append(list, p.parseRhs())
1651 if p.tok == token.ELLIPSIS {
1652 ellipsis = p.pos
1653 p.next()
1654 }
1655 if !p.atComma("argument list", token.RPAREN) {
1656 break
1657 }
1658 p.next()
1659 }
1660 p.exprLev--
1661 rparen := p.expectClosing(token.RPAREN, "argument list")
1662
1663 return &ast.CallExpr{Fun: fun, Lparen: lparen, Args: list, Ellipsis: ellipsis, Rparen: rparen}
1664 }
1665
1666 func (p *parser) parseValue() ast.Expr {
1667 if p.trace {
1668 defer un(trace(p, "Element"))
1669 }
1670
1671 if p.tok == token.LBRACE {
1672 return p.parseLiteralValue(nil)
1673 }
1674
1675 x := p.parseExpr()
1676
1677 return x
1678 }
1679
1680 func (p *parser) parseElement() ast.Expr {
1681 if p.trace {
1682 defer un(trace(p, "Element"))
1683 }
1684
1685 x := p.parseValue()
1686 if p.tok == token.COLON {
1687 colon := p.pos
1688 p.next()
1689 x = &ast.KeyValueExpr{Key: x, Colon: colon, Value: p.parseValue()}
1690 }
1691
1692 return x
1693 }
1694
1695 func (p *parser) parseElementList() (list []ast.Expr) {
1696 if p.trace {
1697 defer un(trace(p, "ElementList"))
1698 }
1699
1700 for p.tok != token.RBRACE && p.tok != token.EOF {
1701 list = append(list, p.parseElement())
1702 if !p.atComma("composite literal", token.RBRACE) {
1703 break
1704 }
1705 p.next()
1706 }
1707
1708 return
1709 }
1710
1711 func (p *parser) parseLiteralValue(typ ast.Expr) ast.Expr {
1712 defer decNestLev(incNestLev(p))
1713
1714 if p.trace {
1715 defer un(trace(p, "LiteralValue"))
1716 }
1717
1718 lbrace := p.expect(token.LBRACE)
1719 var elts []ast.Expr
1720 p.exprLev++
1721 if p.tok != token.RBRACE {
1722 elts = p.parseElementList()
1723 }
1724 p.exprLev--
1725 rbrace := p.expectClosing(token.RBRACE, "composite literal")
1726 return &ast.CompositeLit{Type: typ, Lbrace: lbrace, Elts: elts, Rbrace: rbrace}
1727 }
1728
1729 func (p *parser) parsePrimaryExpr(x ast.Expr) ast.Expr {
1730 if p.trace {
1731 defer un(trace(p, "PrimaryExpr"))
1732 }
1733
1734 if x == nil {
1735 x = p.parseOperand()
1736 }
1737
1738
1739
1740 var n int
1741 defer func() { p.nestLev -= n }()
1742 for n = 1; ; n++ {
1743 incNestLev(p)
1744 switch p.tok {
1745 case token.PERIOD:
1746 p.next()
1747 switch p.tok {
1748 case token.IDENT:
1749 x = p.parseSelector(x)
1750 case token.LPAREN:
1751 x = p.parseTypeAssertion(x)
1752 default:
1753 pos := p.pos
1754 p.errorExpected(pos, "selector or type assertion")
1755
1756
1757
1758
1759
1760 if p.tok != token.RBRACE {
1761 p.next()
1762 }
1763 sel := &ast.Ident{NamePos: pos, Name: "_"}
1764 x = &ast.SelectorExpr{X: x, Sel: sel}
1765 }
1766 case token.LBRACK:
1767 x = p.parseIndexOrSliceOrInstance(x)
1768 case token.LPAREN:
1769 x = p.parseCallOrConversion(x)
1770 case token.LBRACE:
1771
1772
1773 t := ast.Unparen(x)
1774
1775 switch t.(type) {
1776 case *ast.BadExpr, *ast.Ident, *ast.SelectorExpr:
1777 if p.exprLev < 0 {
1778 return x
1779 }
1780
1781 case *ast.IndexExpr, *ast.IndexListExpr:
1782 if p.exprLev < 0 {
1783 return x
1784 }
1785
1786 case *ast.ArrayType, *ast.StructType, *ast.MapType:
1787
1788 default:
1789 return x
1790 }
1791 if t != x {
1792 p.error(t.Pos(), "cannot parenthesize type in composite literal")
1793
1794 }
1795 x = p.parseLiteralValue(x)
1796 default:
1797 return x
1798 }
1799 }
1800 }
1801
1802 func (p *parser) parseUnaryExpr() ast.Expr {
1803 defer decNestLev(incNestLev(p))
1804
1805 if p.trace {
1806 defer un(trace(p, "UnaryExpr"))
1807 }
1808
1809 switch p.tok {
1810 case token.ADD, token.SUB, token.NOT, token.XOR, token.AND, token.TILDE:
1811 pos, op := p.pos, p.tok
1812 p.next()
1813 x := p.parseUnaryExpr()
1814 return &ast.UnaryExpr{OpPos: pos, Op: op, X: x}
1815
1816 case token.ARROW:
1817
1818 arrow := p.pos
1819 p.next()
1820
1821
1822
1823
1824
1825
1826
1827
1828
1829
1830
1831
1832
1833
1834
1835 x := p.parseUnaryExpr()
1836
1837
1838 if typ, ok := x.(*ast.ChanType); ok {
1839
1840
1841
1842 dir := ast.SEND
1843 for ok && dir == ast.SEND {
1844 if typ.Dir == ast.RECV {
1845
1846 p.errorExpected(typ.Arrow, "'chan'")
1847 }
1848 arrow, typ.Begin, typ.Arrow = typ.Arrow, arrow, arrow
1849 dir, typ.Dir = typ.Dir, ast.RECV
1850 typ, ok = typ.Value.(*ast.ChanType)
1851 }
1852 if dir == ast.SEND {
1853 p.errorExpected(arrow, "channel type")
1854 }
1855
1856 return x
1857 }
1858
1859
1860 return &ast.UnaryExpr{OpPos: arrow, Op: token.ARROW, X: x}
1861
1862 case token.MUL:
1863
1864 pos := p.pos
1865 p.next()
1866 x := p.parseUnaryExpr()
1867 return &ast.StarExpr{Star: pos, X: x}
1868 }
1869
1870 return p.parsePrimaryExpr(nil)
1871 }
1872
1873 func (p *parser) tokPrec() (token.Token, int) {
1874 tok := p.tok
1875 if p.inRhs && tok == token.ASSIGN {
1876 tok = token.EQL
1877 }
1878 return tok, tok.Precedence()
1879 }
1880
1881
1882
1883
1884
1885 func (p *parser) parseBinaryExpr(x ast.Expr, prec1 int) ast.Expr {
1886 if p.trace {
1887 defer un(trace(p, "BinaryExpr"))
1888 }
1889
1890 if x == nil {
1891 x = p.parseUnaryExpr()
1892 }
1893
1894
1895
1896 var n int
1897 defer func() { p.nestLev -= n }()
1898 for n = 1; ; n++ {
1899 incNestLev(p)
1900 op, oprec := p.tokPrec()
1901 if oprec < prec1 {
1902 return x
1903 }
1904 pos := p.expect(op)
1905 y := p.parseBinaryExpr(nil, oprec+1)
1906 x = &ast.BinaryExpr{X: x, OpPos: pos, Op: op, Y: y}
1907 }
1908 }
1909
1910
1911 func (p *parser) parseExpr() ast.Expr {
1912 if p.trace {
1913 defer un(trace(p, "Expression"))
1914 }
1915
1916 return p.parseBinaryExpr(nil, token.LowestPrec+1)
1917 }
1918
1919 func (p *parser) parseRhs() ast.Expr {
1920 old := p.inRhs
1921 p.inRhs = true
1922 x := p.parseExpr()
1923 p.inRhs = old
1924 return x
1925 }
1926
1927
1928
1929
1930
1931 const (
1932 basic = iota
1933 labelOk
1934 rangeOk
1935 )
1936
1937
1938
1939
1940
1941 func (p *parser) parseSimpleStmt(mode int) (ast.Stmt, bool) {
1942 if p.trace {
1943 defer un(trace(p, "SimpleStmt"))
1944 }
1945
1946 x := p.parseList(false)
1947
1948 switch p.tok {
1949 case
1950 token.DEFINE, token.ASSIGN, token.ADD_ASSIGN,
1951 token.SUB_ASSIGN, token.MUL_ASSIGN, token.QUO_ASSIGN,
1952 token.REM_ASSIGN, token.AND_ASSIGN, token.OR_ASSIGN,
1953 token.XOR_ASSIGN, token.SHL_ASSIGN, token.SHR_ASSIGN, token.AND_NOT_ASSIGN:
1954
1955 pos, tok := p.pos, p.tok
1956 p.next()
1957 var y []ast.Expr
1958 isRange := false
1959 if mode == rangeOk && p.tok == token.RANGE && (tok == token.DEFINE || tok == token.ASSIGN) {
1960 pos := p.pos
1961 p.next()
1962 y = []ast.Expr{&ast.UnaryExpr{OpPos: pos, Op: token.RANGE, X: p.parseRhs()}}
1963 isRange = true
1964 } else {
1965 y = p.parseList(true)
1966 }
1967 return &ast.AssignStmt{Lhs: x, TokPos: pos, Tok: tok, Rhs: y}, isRange
1968 }
1969
1970 if len(x) > 1 {
1971 p.errorExpected(x[0].Pos(), "1 expression")
1972
1973 }
1974
1975 switch p.tok {
1976 case token.COLON:
1977
1978 colon := p.pos
1979 p.next()
1980 if label, isIdent := x[0].(*ast.Ident); mode == labelOk && isIdent {
1981
1982
1983
1984 stmt := &ast.LabeledStmt{Label: label, Colon: colon, Stmt: p.parseStmt()}
1985 return stmt, false
1986 }
1987
1988
1989
1990
1991
1992
1993 p.error(colon, "illegal label declaration")
1994 return &ast.BadStmt{From: x[0].Pos(), To: colon + 1}, false
1995
1996 case token.ARROW:
1997
1998 arrow := p.pos
1999 p.next()
2000 y := p.parseRhs()
2001 return &ast.SendStmt{Chan: x[0], Arrow: arrow, Value: y}, false
2002
2003 case token.INC, token.DEC:
2004
2005 s := &ast.IncDecStmt{X: x[0], TokPos: p.pos, Tok: p.tok}
2006 p.next()
2007 return s, false
2008 }
2009
2010
2011 return &ast.ExprStmt{X: x[0]}, false
2012 }
2013
2014 func (p *parser) parseCallExpr(callType string) *ast.CallExpr {
2015 x := p.parseRhs()
2016 if t := ast.Unparen(x); t != x {
2017 p.error(x.Pos(), fmt.Sprintf("expression in %s must not be parenthesized", callType))
2018 x = t
2019 }
2020 if call, isCall := x.(*ast.CallExpr); isCall {
2021 return call
2022 }
2023 if _, isBad := x.(*ast.BadExpr); !isBad {
2024
2025 p.error(p.safePos(x.End()), fmt.Sprintf("expression in %s must be function call", callType))
2026 }
2027 return nil
2028 }
2029
2030 func (p *parser) parseGoStmt() ast.Stmt {
2031 if p.trace {
2032 defer un(trace(p, "GoStmt"))
2033 }
2034
2035 pos := p.expect(token.GO)
2036 call := p.parseCallExpr("go")
2037 p.expectSemi()
2038 if call == nil {
2039 return &ast.BadStmt{From: pos, To: pos + 2}
2040 }
2041
2042 return &ast.GoStmt{Go: pos, Call: call}
2043 }
2044
2045 func (p *parser) parseDeferStmt() ast.Stmt {
2046 if p.trace {
2047 defer un(trace(p, "DeferStmt"))
2048 }
2049
2050 pos := p.expect(token.DEFER)
2051 call := p.parseCallExpr("defer")
2052 p.expectSemi()
2053 if call == nil {
2054 return &ast.BadStmt{From: pos, To: pos + 5}
2055 }
2056
2057 return &ast.DeferStmt{Defer: pos, Call: call}
2058 }
2059
2060 func (p *parser) parseReturnStmt() *ast.ReturnStmt {
2061 if p.trace {
2062 defer un(trace(p, "ReturnStmt"))
2063 }
2064
2065 pos := p.pos
2066 p.expect(token.RETURN)
2067 var x []ast.Expr
2068 if p.tok != token.SEMICOLON && p.tok != token.RBRACE {
2069 x = p.parseList(true)
2070 }
2071 p.expectSemi()
2072
2073 return &ast.ReturnStmt{Return: pos, Results: x}
2074 }
2075
2076 func (p *parser) parseBranchStmt(tok token.Token) *ast.BranchStmt {
2077 if p.trace {
2078 defer un(trace(p, "BranchStmt"))
2079 }
2080
2081 pos := p.expect(tok)
2082 var label *ast.Ident
2083 if tok == token.GOTO || ((tok == token.CONTINUE || tok == token.BREAK) && p.tok == token.IDENT) {
2084 label = p.parseIdent()
2085 }
2086 p.expectSemi()
2087
2088 return &ast.BranchStmt{TokPos: pos, Tok: tok, Label: label}
2089 }
2090
2091 func (p *parser) makeExpr(s ast.Stmt, want string) ast.Expr {
2092 if s == nil {
2093 return nil
2094 }
2095 if es, isExpr := s.(*ast.ExprStmt); isExpr {
2096 return es.X
2097 }
2098 found := "simple statement"
2099 if _, isAss := s.(*ast.AssignStmt); isAss {
2100 found = "assignment"
2101 }
2102 p.error(s.Pos(), fmt.Sprintf("expected %s, found %s (missing parentheses around composite literal?)", want, found))
2103 return &ast.BadExpr{From: s.Pos(), To: p.safePos(s.End())}
2104 }
2105
2106
2107
2108
2109 func (p *parser) parseIfHeader() (init ast.Stmt, cond ast.Expr) {
2110 if p.tok == token.LBRACE {
2111 p.error(p.pos, "missing condition in if statement")
2112 cond = &ast.BadExpr{From: p.pos, To: p.pos}
2113 return
2114 }
2115
2116
2117 prevLev := p.exprLev
2118 p.exprLev = -1
2119
2120 if p.tok != token.SEMICOLON {
2121
2122 if p.tok == token.VAR {
2123 p.next()
2124 p.error(p.pos, "var declaration not allowed in if initializer")
2125 }
2126 init, _ = p.parseSimpleStmt(basic)
2127 }
2128
2129 var condStmt ast.Stmt
2130 var semi struct {
2131 pos token.Pos
2132 lit string
2133 }
2134 if p.tok != token.LBRACE {
2135 if p.tok == token.SEMICOLON {
2136 semi.pos = p.pos
2137 semi.lit = p.lit
2138 p.next()
2139 } else {
2140 p.expect(token.SEMICOLON)
2141 }
2142 if p.tok != token.LBRACE {
2143 condStmt, _ = p.parseSimpleStmt(basic)
2144 }
2145 } else {
2146 condStmt = init
2147 init = nil
2148 }
2149
2150 if condStmt != nil {
2151 cond = p.makeExpr(condStmt, "boolean expression")
2152 } else if semi.pos.IsValid() {
2153 if semi.lit == "\n" {
2154 p.error(semi.pos, "unexpected newline, expecting { after if clause")
2155 } else {
2156 p.error(semi.pos, "missing condition in if statement")
2157 }
2158 }
2159
2160
2161 if cond == nil {
2162 cond = &ast.BadExpr{From: p.pos, To: p.pos}
2163 }
2164
2165 p.exprLev = prevLev
2166 return
2167 }
2168
2169 func (p *parser) parseIfStmt() *ast.IfStmt {
2170 defer decNestLev(incNestLev(p))
2171
2172 if p.trace {
2173 defer un(trace(p, "IfStmt"))
2174 }
2175
2176 pos := p.expect(token.IF)
2177
2178 init, cond := p.parseIfHeader()
2179 body := p.parseBlockStmt()
2180
2181 var else_ ast.Stmt
2182 if p.tok == token.ELSE {
2183 p.next()
2184 switch p.tok {
2185 case token.IF:
2186 else_ = p.parseIfStmt()
2187 case token.LBRACE:
2188 else_ = p.parseBlockStmt()
2189 p.expectSemi()
2190 default:
2191 p.errorExpected(p.pos, "if statement or block")
2192 else_ = &ast.BadStmt{From: p.pos, To: p.pos}
2193 }
2194 } else {
2195 p.expectSemi()
2196 }
2197
2198 return &ast.IfStmt{If: pos, Init: init, Cond: cond, Body: body, Else: else_}
2199 }
2200
2201 func (p *parser) parseCaseClause() *ast.CaseClause {
2202 if p.trace {
2203 defer un(trace(p, "CaseClause"))
2204 }
2205
2206 pos := p.pos
2207 var list []ast.Expr
2208 if p.tok == token.CASE {
2209 p.next()
2210 list = p.parseList(true)
2211 } else {
2212 p.expect(token.DEFAULT)
2213 }
2214
2215 colon := p.expect(token.COLON)
2216 body := p.parseStmtList()
2217
2218 return &ast.CaseClause{Case: pos, List: list, Colon: colon, Body: body}
2219 }
2220
2221 func isTypeSwitchAssert(x ast.Expr) bool {
2222 a, ok := x.(*ast.TypeAssertExpr)
2223 return ok && a.Type == nil
2224 }
2225
2226 func (p *parser) isTypeSwitchGuard(s ast.Stmt) bool {
2227 switch t := s.(type) {
2228 case *ast.ExprStmt:
2229
2230 return isTypeSwitchAssert(t.X)
2231 case *ast.AssignStmt:
2232
2233 if len(t.Lhs) == 1 && len(t.Rhs) == 1 && isTypeSwitchAssert(t.Rhs[0]) {
2234 switch t.Tok {
2235 case token.ASSIGN:
2236
2237 p.error(t.TokPos, "expected ':=', found '='")
2238 fallthrough
2239 case token.DEFINE:
2240 return true
2241 }
2242 }
2243 }
2244 return false
2245 }
2246
2247 func (p *parser) parseSwitchStmt() ast.Stmt {
2248 if p.trace {
2249 defer un(trace(p, "SwitchStmt"))
2250 }
2251
2252 pos := p.expect(token.SWITCH)
2253
2254 var s1, s2 ast.Stmt
2255 if p.tok != token.LBRACE {
2256 prevLev := p.exprLev
2257 p.exprLev = -1
2258 if p.tok != token.SEMICOLON {
2259 s2, _ = p.parseSimpleStmt(basic)
2260 }
2261 if p.tok == token.SEMICOLON {
2262 p.next()
2263 s1 = s2
2264 s2 = nil
2265 if p.tok != token.LBRACE {
2266
2267
2268
2269
2270
2271
2272
2273
2274
2275
2276
2277
2278 s2, _ = p.parseSimpleStmt(basic)
2279 }
2280 }
2281 p.exprLev = prevLev
2282 }
2283
2284 typeSwitch := p.isTypeSwitchGuard(s2)
2285 lbrace := p.expect(token.LBRACE)
2286 var list []ast.Stmt
2287 for p.tok == token.CASE || p.tok == token.DEFAULT {
2288 list = append(list, p.parseCaseClause())
2289 }
2290 rbrace := p.expect(token.RBRACE)
2291 p.expectSemi()
2292 body := &ast.BlockStmt{Lbrace: lbrace, List: list, Rbrace: rbrace}
2293
2294 if typeSwitch {
2295 return &ast.TypeSwitchStmt{Switch: pos, Init: s1, Assign: s2, Body: body}
2296 }
2297
2298 return &ast.SwitchStmt{Switch: pos, Init: s1, Tag: p.makeExpr(s2, "switch expression"), Body: body}
2299 }
2300
2301 func (p *parser) parseCommClause() *ast.CommClause {
2302 if p.trace {
2303 defer un(trace(p, "CommClause"))
2304 }
2305
2306 pos := p.pos
2307 var comm ast.Stmt
2308 if p.tok == token.CASE {
2309 p.next()
2310 lhs := p.parseList(false)
2311 if p.tok == token.ARROW {
2312
2313 if len(lhs) > 1 {
2314 p.errorExpected(lhs[0].Pos(), "1 expression")
2315
2316 }
2317 arrow := p.pos
2318 p.next()
2319 rhs := p.parseRhs()
2320 comm = &ast.SendStmt{Chan: lhs[0], Arrow: arrow, Value: rhs}
2321 } else {
2322
2323 if tok := p.tok; tok == token.ASSIGN || tok == token.DEFINE {
2324
2325 if len(lhs) > 2 {
2326 p.errorExpected(lhs[0].Pos(), "1 or 2 expressions")
2327
2328 lhs = lhs[0:2]
2329 }
2330 pos := p.pos
2331 p.next()
2332 rhs := p.parseRhs()
2333 comm = &ast.AssignStmt{Lhs: lhs, TokPos: pos, Tok: tok, Rhs: []ast.Expr{rhs}}
2334 } else {
2335
2336 if len(lhs) > 1 {
2337 p.errorExpected(lhs[0].Pos(), "1 expression")
2338
2339 }
2340 comm = &ast.ExprStmt{X: lhs[0]}
2341 }
2342 }
2343 } else {
2344 p.expect(token.DEFAULT)
2345 }
2346
2347 colon := p.expect(token.COLON)
2348 body := p.parseStmtList()
2349
2350 return &ast.CommClause{Case: pos, Comm: comm, Colon: colon, Body: body}
2351 }
2352
2353 func (p *parser) parseSelectStmt() *ast.SelectStmt {
2354 if p.trace {
2355 defer un(trace(p, "SelectStmt"))
2356 }
2357
2358 pos := p.expect(token.SELECT)
2359 lbrace := p.expect(token.LBRACE)
2360 var list []ast.Stmt
2361 for p.tok == token.CASE || p.tok == token.DEFAULT {
2362 list = append(list, p.parseCommClause())
2363 }
2364 rbrace := p.expect(token.RBRACE)
2365 p.expectSemi()
2366 body := &ast.BlockStmt{Lbrace: lbrace, List: list, Rbrace: rbrace}
2367
2368 return &ast.SelectStmt{Select: pos, Body: body}
2369 }
2370
2371 func (p *parser) parseForStmt() ast.Stmt {
2372 if p.trace {
2373 defer un(trace(p, "ForStmt"))
2374 }
2375
2376 pos := p.expect(token.FOR)
2377
2378 var s1, s2, s3 ast.Stmt
2379 var isRange bool
2380 if p.tok != token.LBRACE {
2381 prevLev := p.exprLev
2382 p.exprLev = -1
2383 if p.tok != token.SEMICOLON {
2384 if p.tok == token.RANGE {
2385
2386 pos := p.pos
2387 p.next()
2388 y := []ast.Expr{&ast.UnaryExpr{OpPos: pos, Op: token.RANGE, X: p.parseRhs()}}
2389 s2 = &ast.AssignStmt{Rhs: y}
2390 isRange = true
2391 } else {
2392 s2, isRange = p.parseSimpleStmt(rangeOk)
2393 }
2394 }
2395 if !isRange && p.tok == token.SEMICOLON {
2396 p.next()
2397 s1 = s2
2398 s2 = nil
2399 if p.tok != token.SEMICOLON {
2400 s2, _ = p.parseSimpleStmt(basic)
2401 }
2402 p.expectSemi()
2403 if p.tok != token.LBRACE {
2404 s3, _ = p.parseSimpleStmt(basic)
2405 }
2406 }
2407 p.exprLev = prevLev
2408 }
2409
2410 body := p.parseBlockStmt()
2411 p.expectSemi()
2412
2413 if isRange {
2414 as := s2.(*ast.AssignStmt)
2415
2416 var key, value ast.Expr
2417 switch len(as.Lhs) {
2418 case 0:
2419
2420 case 1:
2421 key = as.Lhs[0]
2422 case 2:
2423 key, value = as.Lhs[0], as.Lhs[1]
2424 default:
2425 p.errorExpected(as.Lhs[len(as.Lhs)-1].Pos(), "at most 2 expressions")
2426 return &ast.BadStmt{From: pos, To: p.safePos(body.End())}
2427 }
2428
2429
2430 x := as.Rhs[0].(*ast.UnaryExpr).X
2431 return &ast.RangeStmt{
2432 For: pos,
2433 Key: key,
2434 Value: value,
2435 TokPos: as.TokPos,
2436 Tok: as.Tok,
2437 Range: as.Rhs[0].Pos(),
2438 X: x,
2439 Body: body,
2440 }
2441 }
2442
2443
2444 return &ast.ForStmt{
2445 For: pos,
2446 Init: s1,
2447 Cond: p.makeExpr(s2, "boolean or range expression"),
2448 Post: s3,
2449 Body: body,
2450 }
2451 }
2452
2453 func (p *parser) parseStmt() (s ast.Stmt) {
2454 defer decNestLev(incNestLev(p))
2455
2456 if p.trace {
2457 defer un(trace(p, "Statement"))
2458 }
2459
2460 switch p.tok {
2461 case token.CONST, token.TYPE, token.VAR:
2462 s = &ast.DeclStmt{Decl: p.parseDecl(stmtStart)}
2463 case
2464
2465 token.IDENT, token.INT, token.FLOAT, token.IMAG, token.CHAR, token.STRING, token.FUNC, token.LPAREN,
2466 token.LBRACK, token.STRUCT, token.MAP, token.CHAN, token.INTERFACE,
2467 token.ADD, token.SUB, token.MUL, token.AND, token.XOR, token.ARROW, token.NOT:
2468 s, _ = p.parseSimpleStmt(labelOk)
2469
2470
2471
2472 if _, isLabeledStmt := s.(*ast.LabeledStmt); !isLabeledStmt {
2473 p.expectSemi()
2474 }
2475 case token.GO:
2476 s = p.parseGoStmt()
2477 case token.DEFER:
2478 s = p.parseDeferStmt()
2479 case token.RETURN:
2480 s = p.parseReturnStmt()
2481 case token.BREAK, token.CONTINUE, token.GOTO, token.FALLTHROUGH:
2482 s = p.parseBranchStmt(p.tok)
2483 case token.LBRACE:
2484 s = p.parseBlockStmt()
2485 p.expectSemi()
2486 case token.IF:
2487 s = p.parseIfStmt()
2488 case token.SWITCH:
2489 s = p.parseSwitchStmt()
2490 case token.SELECT:
2491 s = p.parseSelectStmt()
2492 case token.FOR:
2493 s = p.parseForStmt()
2494 case token.SEMICOLON:
2495
2496
2497
2498 s = &ast.EmptyStmt{Semicolon: p.pos, Implicit: p.lit == "\n"}
2499 p.next()
2500 case token.RBRACE:
2501
2502 s = &ast.EmptyStmt{Semicolon: p.pos, Implicit: true}
2503 default:
2504
2505 pos := p.pos
2506 p.errorExpected(pos, "statement")
2507 p.advance(stmtStart)
2508 s = &ast.BadStmt{From: pos, To: p.pos}
2509 }
2510
2511 return
2512 }
2513
2514
2515
2516
2517 type parseSpecFunction func(doc *ast.CommentGroup, keyword token.Token, iota int) ast.Spec
2518
2519 func (p *parser) parseImportSpec(doc *ast.CommentGroup, _ token.Token, _ int) ast.Spec {
2520 if p.trace {
2521 defer un(trace(p, "ImportSpec"))
2522 }
2523
2524 var ident *ast.Ident
2525 switch p.tok {
2526 case token.IDENT:
2527 ident = p.parseIdent()
2528 case token.PERIOD:
2529 ident = &ast.Ident{NamePos: p.pos, Name: "."}
2530 p.next()
2531 }
2532
2533 pos := p.pos
2534 var path string
2535 if p.tok == token.STRING {
2536 path = p.lit
2537 p.next()
2538 } else if p.tok.IsLiteral() {
2539 p.error(pos, "import path must be a string")
2540 p.next()
2541 } else {
2542 p.error(pos, "missing import path")
2543 p.advance(exprEnd)
2544 }
2545 comment := p.expectSemi()
2546
2547
2548 spec := &ast.ImportSpec{
2549 Doc: doc,
2550 Name: ident,
2551 Path: &ast.BasicLit{ValuePos: pos, Kind: token.STRING, Value: path},
2552 Comment: comment,
2553 }
2554 p.imports = append(p.imports, spec)
2555
2556 return spec
2557 }
2558
2559 func (p *parser) parseValueSpec(doc *ast.CommentGroup, keyword token.Token, iota int) ast.Spec {
2560 if p.trace {
2561 defer un(trace(p, keyword.String()+"Spec"))
2562 }
2563
2564 idents := p.parseIdentList()
2565 var typ ast.Expr
2566 var values []ast.Expr
2567 switch keyword {
2568 case token.CONST:
2569
2570 if p.tok != token.EOF && p.tok != token.SEMICOLON && p.tok != token.RPAREN {
2571 typ = p.tryIdentOrType()
2572 if p.tok == token.ASSIGN {
2573 p.next()
2574 values = p.parseList(true)
2575 }
2576 }
2577 case token.VAR:
2578 if p.tok != token.ASSIGN {
2579 typ = p.parseType()
2580 }
2581 if p.tok == token.ASSIGN {
2582 p.next()
2583 values = p.parseList(true)
2584 }
2585 default:
2586 panic("unreachable")
2587 }
2588 comment := p.expectSemi()
2589
2590 spec := &ast.ValueSpec{
2591 Doc: doc,
2592 Names: idents,
2593 Type: typ,
2594 Values: values,
2595 Comment: comment,
2596 }
2597 return spec
2598 }
2599
2600 func (p *parser) parseGenericType(spec *ast.TypeSpec, openPos token.Pos, name0 *ast.Ident, typ0 ast.Expr) {
2601 if p.trace {
2602 defer un(trace(p, "parseGenericType"))
2603 }
2604
2605 list := p.parseParameterList(name0, typ0, token.RBRACK, false)
2606 closePos := p.expect(token.RBRACK)
2607 spec.TypeParams = &ast.FieldList{Opening: openPos, List: list, Closing: closePos}
2608 if p.tok == token.ASSIGN {
2609
2610 spec.Assign = p.pos
2611 p.next()
2612 }
2613 spec.Type = p.parseType()
2614 }
2615
2616 func (p *parser) parseTypeSpec(doc *ast.CommentGroup, _ token.Token, _ int) ast.Spec {
2617 if p.trace {
2618 defer un(trace(p, "TypeSpec"))
2619 }
2620
2621 name := p.parseIdent()
2622 spec := &ast.TypeSpec{Doc: doc, Name: name}
2623
2624 if p.tok == token.LBRACK {
2625
2626
2627 lbrack := p.pos
2628 p.next()
2629 if p.tok == token.IDENT {
2630
2631
2632
2633
2634
2635
2636
2637
2638
2639
2640
2641
2642
2643
2644
2645 var x ast.Expr = p.parseIdent()
2646 if p.tok != token.LBRACK {
2647
2648
2649
2650 p.exprLev++
2651 lhs := p.parsePrimaryExpr(x)
2652 x = p.parseBinaryExpr(lhs, token.LowestPrec+1)
2653 p.exprLev--
2654 }
2655
2656
2657
2658
2659
2660
2661
2662 if pname, ptype := extractName(x, p.tok == token.COMMA); pname != nil && (ptype != nil || p.tok != token.RBRACK) {
2663
2664
2665
2666 p.parseGenericType(spec, lbrack, pname, ptype)
2667 } else {
2668
2669
2670 spec.Type = p.parseArrayType(lbrack, x)
2671 }
2672 } else {
2673
2674 spec.Type = p.parseArrayType(lbrack, nil)
2675 }
2676 } else {
2677
2678 if p.tok == token.ASSIGN {
2679
2680 spec.Assign = p.pos
2681 p.next()
2682 }
2683 spec.Type = p.parseType()
2684 }
2685
2686 spec.Comment = p.expectSemi()
2687
2688 return spec
2689 }
2690
2691
2692
2693
2694
2695
2696
2697
2698
2699
2700
2701
2702
2703
2704
2705
2706
2707
2708
2709 func extractName(x ast.Expr, force bool) (*ast.Ident, ast.Expr) {
2710 switch x := x.(type) {
2711 case *ast.Ident:
2712 return x, nil
2713 case *ast.BinaryExpr:
2714 switch x.Op {
2715 case token.MUL:
2716 if name, _ := x.X.(*ast.Ident); name != nil && (force || isTypeElem(x.Y)) {
2717
2718 return name, &ast.StarExpr{Star: x.OpPos, X: x.Y}
2719 }
2720 case token.OR:
2721 if name, lhs := extractName(x.X, force || isTypeElem(x.Y)); name != nil && lhs != nil {
2722
2723 op := *x
2724 op.X = lhs
2725 return name, &op
2726 }
2727 }
2728 case *ast.CallExpr:
2729 if name, _ := x.Fun.(*ast.Ident); name != nil {
2730 if len(x.Args) == 1 && x.Ellipsis == token.NoPos && (force || isTypeElem(x.Args[0])) {
2731
2732
2733
2734 return name, &ast.ParenExpr{
2735 Lparen: x.Lparen,
2736 X: x.Args[0],
2737 Rparen: x.Rparen,
2738 }
2739 }
2740 }
2741 }
2742 return nil, x
2743 }
2744
2745
2746
2747 func isTypeElem(x ast.Expr) bool {
2748 switch x := x.(type) {
2749 case *ast.ArrayType, *ast.StructType, *ast.FuncType, *ast.InterfaceType, *ast.MapType, *ast.ChanType:
2750 return true
2751 case *ast.BinaryExpr:
2752 return isTypeElem(x.X) || isTypeElem(x.Y)
2753 case *ast.UnaryExpr:
2754 return x.Op == token.TILDE
2755 case *ast.ParenExpr:
2756 return isTypeElem(x.X)
2757 }
2758 return false
2759 }
2760
2761 func (p *parser) parseGenDecl(keyword token.Token, f parseSpecFunction) *ast.GenDecl {
2762 if p.trace {
2763 defer un(trace(p, "GenDecl("+keyword.String()+")"))
2764 }
2765
2766 doc := p.leadComment
2767 pos := p.expect(keyword)
2768 var lparen, rparen token.Pos
2769 var list []ast.Spec
2770 if p.tok == token.LPAREN {
2771 lparen = p.pos
2772 p.next()
2773 for iota := 0; p.tok != token.RPAREN && p.tok != token.EOF; iota++ {
2774 list = append(list, f(p.leadComment, keyword, iota))
2775 }
2776 rparen = p.expect(token.RPAREN)
2777 p.expectSemi()
2778 } else {
2779 list = append(list, f(nil, keyword, 0))
2780 }
2781
2782 return &ast.GenDecl{
2783 Doc: doc,
2784 TokPos: pos,
2785 Tok: keyword,
2786 Lparen: lparen,
2787 Specs: list,
2788 Rparen: rparen,
2789 }
2790 }
2791
2792 func (p *parser) parseFuncDecl() *ast.FuncDecl {
2793 if p.trace {
2794 defer un(trace(p, "FunctionDecl"))
2795 }
2796
2797 doc := p.leadComment
2798 pos := p.expect(token.FUNC)
2799
2800 var recv *ast.FieldList
2801 if p.tok == token.LPAREN {
2802 recv = p.parseParameters(false)
2803 }
2804
2805 ident := p.parseIdent()
2806
2807 var tparams *ast.FieldList
2808 if p.tok == token.LBRACK {
2809 tparams = p.parseTypeParameters()
2810 if recv != nil && tparams != nil {
2811
2812
2813 p.error(tparams.Opening, "method must have no type parameters")
2814 tparams = nil
2815 }
2816 }
2817 params := p.parseParameters(false)
2818 results := p.parseParameters(true)
2819
2820 var body *ast.BlockStmt
2821 switch p.tok {
2822 case token.LBRACE:
2823 body = p.parseBody()
2824 p.expectSemi()
2825 case token.SEMICOLON:
2826 p.next()
2827 if p.tok == token.LBRACE {
2828
2829 p.error(p.pos, "unexpected semicolon or newline before {")
2830 body = p.parseBody()
2831 p.expectSemi()
2832 }
2833 default:
2834 p.expectSemi()
2835 }
2836
2837 decl := &ast.FuncDecl{
2838 Doc: doc,
2839 Recv: recv,
2840 Name: ident,
2841 Type: &ast.FuncType{
2842 Func: pos,
2843 TypeParams: tparams,
2844 Params: params,
2845 Results: results,
2846 },
2847 Body: body,
2848 }
2849 return decl
2850 }
2851
2852 func (p *parser) parseDecl(sync map[token.Token]bool) ast.Decl {
2853 if p.trace {
2854 defer un(trace(p, "Declaration"))
2855 }
2856
2857 var f parseSpecFunction
2858 switch p.tok {
2859 case token.IMPORT:
2860 f = p.parseImportSpec
2861
2862 case token.CONST, token.VAR:
2863 f = p.parseValueSpec
2864
2865 case token.TYPE:
2866 f = p.parseTypeSpec
2867
2868 case token.FUNC:
2869 return p.parseFuncDecl()
2870
2871 default:
2872 pos := p.pos
2873 p.errorExpected(pos, "declaration")
2874 p.advance(sync)
2875 return &ast.BadDecl{From: pos, To: p.pos}
2876 }
2877
2878 return p.parseGenDecl(p.tok, f)
2879 }
2880
2881
2882
2883
2884 func (p *parser) parseFile() *ast.File {
2885 if p.trace {
2886 defer un(trace(p, "File"))
2887 }
2888
2889
2890
2891 if p.errors.Len() != 0 {
2892 return nil
2893 }
2894
2895
2896 doc := p.leadComment
2897 pos := p.expect(token.PACKAGE)
2898
2899
2900 ident := p.parseIdent()
2901 if ident.Name == "_" && p.mode&DeclarationErrors != 0 {
2902 p.error(p.pos, "invalid package name _")
2903 }
2904 p.expectSemi()
2905
2906
2907
2908 if p.errors.Len() != 0 {
2909 return nil
2910 }
2911
2912 var decls []ast.Decl
2913 if p.mode&PackageClauseOnly == 0 {
2914
2915 for p.tok == token.IMPORT {
2916 decls = append(decls, p.parseGenDecl(token.IMPORT, p.parseImportSpec))
2917 }
2918
2919 if p.mode&ImportsOnly == 0 {
2920
2921 prev := token.IMPORT
2922 for p.tok != token.EOF {
2923
2924 if p.tok == token.IMPORT && prev != token.IMPORT {
2925 p.error(p.pos, "imports must appear before other declarations")
2926 }
2927 prev = p.tok
2928
2929 decls = append(decls, p.parseDecl(declStart))
2930 }
2931 }
2932 }
2933
2934 f := &ast.File{
2935 Doc: doc,
2936 Package: pos,
2937 Name: ident,
2938 Decls: decls,
2939
2940 Imports: p.imports,
2941 Comments: p.comments,
2942 GoVersion: p.goVersion,
2943 }
2944 var declErr func(token.Pos, string)
2945 if p.mode&DeclarationErrors != 0 {
2946 declErr = p.error
2947 }
2948 if p.mode&SkipObjectResolution == 0 {
2949 resolveFile(f, p.file, declErr)
2950 }
2951
2952 return f
2953 }
2954
2955
2956 func packIndexExpr(x ast.Expr, lbrack token.Pos, exprs []ast.Expr, rbrack token.Pos) ast.Expr {
2957 switch len(exprs) {
2958 case 0:
2959 panic("internal error: packIndexExpr with empty expr slice")
2960 case 1:
2961 return &ast.IndexExpr{
2962 X: x,
2963 Lbrack: lbrack,
2964 Index: exprs[0],
2965 Rbrack: rbrack,
2966 }
2967 default:
2968 return &ast.IndexListExpr{
2969 X: x,
2970 Lbrack: lbrack,
2971 Indices: exprs,
2972 Rbrack: rbrack,
2973 }
2974 }
2975 }
2976
View as plain text