Source file
src/go/parser/parser.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25 package parser
26
27 import (
28 "fmt"
29 "go/ast"
30 "go/build/constraint"
31 "go/internal/scannerhooks"
32 "go/scanner"
33 "go/token"
34 "strings"
35 )
36
37
38 type parser struct {
39 file *token.File
40 errors scanner.ErrorList
41 scanner scanner.Scanner
42
43
44 mode Mode
45 trace bool
46 indent int
47
48
49 comments []*ast.CommentGroup
50 leadComment *ast.CommentGroup
51 lineComment *ast.CommentGroup
52 top bool
53 goVersion string
54
55
56 pos token.Pos
57 tok token.Token
58 lit string
59 stringEnd token.Pos
60
61
62
63
64
65 syncPos token.Pos
66 syncCnt int
67
68
69 exprLev int
70 inRhs bool
71
72 imports []*ast.ImportSpec
73
74
75
76 nestLev int
77 }
78
79 func (p *parser) init(file *token.File, src []byte, mode Mode) {
80 p.file = file
81 eh := func(pos token.Position, msg string) { p.errors.Add(pos, msg) }
82 p.scanner.Init(p.file, src, eh, scanner.ScanComments)
83
84 p.top = true
85 p.mode = mode
86 p.trace = mode&Trace != 0
87 p.next()
88 }
89
90
91
92
93 func (p *parser) printTrace(a ...any) {
94 const dots = ". . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . "
95 const n = len(dots)
96 pos := p.file.Position(p.pos)
97 fmt.Printf("%5d:%3d: ", pos.Line, pos.Column)
98 i := 2 * p.indent
99 for i > n {
100 fmt.Print(dots)
101 i -= n
102 }
103
104 fmt.Print(dots[0:i])
105 fmt.Println(a...)
106 }
107
108 func trace(p *parser, msg string) *parser {
109 p.printTrace(msg, "(")
110 p.indent++
111 return p
112 }
113
114
115 func un(p *parser) {
116 p.indent--
117 p.printTrace(")")
118 }
119
120
121 const maxNestLev int = 1e5
122
123 func incNestLev(p *parser) *parser {
124 p.nestLev++
125 if p.nestLev > maxNestLev {
126 p.error(p.pos, "exceeded max nesting depth")
127 panic(bailout{})
128 }
129 return p
130 }
131
132
133
134 func decNestLev(p *parser) {
135 p.nestLev--
136 }
137
138
139 func (p *parser) next0() {
140
141
142
143
144 if p.trace && p.pos.IsValid() {
145 s := p.tok.String()
146 switch {
147 case p.tok.IsLiteral():
148 p.printTrace(s, p.lit)
149 case p.tok.IsOperator(), p.tok.IsKeyword():
150 p.printTrace("\"" + s + "\"")
151 default:
152 p.printTrace(s)
153 }
154 }
155
156 for {
157 p.pos, p.tok, p.lit = p.scanner.Scan()
158 if p.tok == token.COMMENT {
159 if p.top && strings.HasPrefix(p.lit, "//go:build") {
160 if x, err := constraint.Parse(p.lit); err == nil {
161 p.goVersion = constraint.GoVersion(x)
162 }
163 }
164 if p.mode&ParseComments == 0 {
165 continue
166 }
167 } else {
168 if p.tok == token.STRING {
169 p.stringEnd = scannerhooks.StringEnd(&p.scanner)
170 }
171
172
173 p.top = false
174 }
175 break
176 }
177 }
178
179
180 func (p *parser) lineFor(pos token.Pos) int {
181 return p.file.PositionFor(pos, false).Line
182 }
183
184
185 func (p *parser) consumeComment() (comment *ast.Comment, endline int) {
186
187
188 endline = p.lineFor(p.pos)
189 if p.lit[1] == '*' {
190
191 for i := 0; i < len(p.lit); i++ {
192 if p.lit[i] == '\n' {
193 endline++
194 }
195 }
196 }
197
198 comment = &ast.Comment{Slash: p.pos, Text: p.lit}
199 p.next0()
200
201 return
202 }
203
204
205
206
207
208 func (p *parser) consumeCommentGroup(n int) (comments *ast.CommentGroup, endline int) {
209 var list []*ast.Comment
210 endline = p.lineFor(p.pos)
211 for p.tok == token.COMMENT && p.lineFor(p.pos) <= endline+n {
212 var comment *ast.Comment
213 comment, endline = p.consumeComment()
214 list = append(list, comment)
215 }
216
217
218 comments = &ast.CommentGroup{List: list}
219 p.comments = append(p.comments, comments)
220
221 return
222 }
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238 func (p *parser) next() {
239 p.leadComment = nil
240 p.lineComment = nil
241 prev := p.pos
242 p.next0()
243
244 if p.tok == token.COMMENT {
245 var comment *ast.CommentGroup
246 var endline int
247
248 if p.lineFor(p.pos) == p.lineFor(prev) {
249
250
251 comment, endline = p.consumeCommentGroup(0)
252 if p.lineFor(p.pos) != endline || p.tok == token.SEMICOLON || p.tok == token.EOF {
253
254
255 p.lineComment = comment
256 }
257 }
258
259
260 endline = -1
261 for p.tok == token.COMMENT {
262 comment, endline = p.consumeCommentGroup(1)
263 }
264
265 if endline+1 == p.lineFor(p.pos) {
266
267
268 p.leadComment = comment
269 }
270 }
271 }
272
273
274
275 type bailout struct {
276 pos token.Pos
277 msg string
278 }
279
280 func (p *parser) error(pos token.Pos, msg string) {
281 if p.trace {
282 defer un(trace(p, "error: "+msg))
283 }
284
285 epos := p.file.Position(pos)
286
287
288
289
290 if p.mode&AllErrors == 0 {
291 n := len(p.errors)
292 if n > 0 && p.errors[n-1].Pos.Line == epos.Line {
293 return
294 }
295 if n > 10 {
296 panic(bailout{})
297 }
298 }
299
300 p.errors.Add(epos, msg)
301 }
302
303 func (p *parser) errorExpected(pos token.Pos, msg string) {
304 msg = "expected " + msg
305 if pos == p.pos {
306
307
308 switch {
309 case p.tok == token.SEMICOLON && p.lit == "\n":
310 msg += ", found newline"
311 case p.tok.IsLiteral():
312
313 msg += ", found " + p.lit
314 default:
315 msg += ", found '" + p.tok.String() + "'"
316 }
317 }
318 p.error(pos, msg)
319 }
320
321 func (p *parser) expect(tok token.Token) token.Pos {
322 pos := p.pos
323 if p.tok != tok {
324 p.errorExpected(pos, "'"+tok.String()+"'")
325 }
326 p.next()
327 return pos
328 }
329
330
331
332 func (p *parser) expect2(tok token.Token) (pos token.Pos) {
333 if p.tok == tok {
334 pos = p.pos
335 } else {
336 p.errorExpected(p.pos, "'"+tok.String()+"'")
337 }
338 p.next()
339 return
340 }
341
342
343
344 func (p *parser) expectClosing(tok token.Token, context string) token.Pos {
345 if p.tok != tok && p.tok == token.SEMICOLON && p.lit == "\n" {
346 p.error(p.pos, "missing ',' before newline in "+context)
347 p.next()
348 }
349 return p.expect(tok)
350 }
351
352
353 func (p *parser) expectSemi() (comment *ast.CommentGroup) {
354 switch p.tok {
355 case token.RPAREN, token.RBRACE:
356 return nil
357 case token.COMMA:
358
359 p.errorExpected(p.pos, "';'")
360 fallthrough
361 case token.SEMICOLON:
362 if p.lit == ";" {
363
364 p.next()
365 comment = p.lineComment
366 } else {
367
368 comment = p.lineComment
369 p.next()
370 }
371 return comment
372 default:
373 p.errorExpected(p.pos, "';'")
374 p.advance(stmtStart)
375 return nil
376 }
377 }
378
379 func (p *parser) atComma(context string, follow token.Token) bool {
380 if p.tok == token.COMMA {
381 return true
382 }
383 if p.tok != follow {
384 msg := "missing ','"
385 if p.tok == token.SEMICOLON && p.lit == "\n" {
386 msg += " before newline"
387 }
388 p.error(p.pos, msg+" in "+context)
389 return true
390 }
391 return false
392 }
393
394 func assert(cond bool, msg string) {
395 if !cond {
396 panic("go/parser internal error: " + msg)
397 }
398 }
399
400
401
402 func (p *parser) advance(to map[token.Token]bool) {
403 for ; p.tok != token.EOF; p.next() {
404 if to[p.tok] {
405
406
407
408
409
410
411
412 if p.pos == p.syncPos && p.syncCnt < 10 {
413 p.syncCnt++
414 return
415 }
416 if p.pos > p.syncPos {
417 p.syncPos = p.pos
418 p.syncCnt = 0
419 return
420 }
421
422
423
424
425
426 }
427 }
428 }
429
430 var stmtStart = map[token.Token]bool{
431 token.BREAK: true,
432 token.CONST: true,
433 token.CONTINUE: true,
434 token.DEFER: true,
435 token.FALLTHROUGH: true,
436 token.FOR: true,
437 token.GO: true,
438 token.GOTO: true,
439 token.IF: true,
440 token.RETURN: true,
441 token.SELECT: true,
442 token.SWITCH: true,
443 token.TYPE: true,
444 token.VAR: true,
445 }
446
447 var declStart = map[token.Token]bool{
448 token.IMPORT: true,
449 token.CONST: true,
450 token.TYPE: true,
451 token.VAR: true,
452 }
453
454 var exprEnd = map[token.Token]bool{
455 token.COMMA: true,
456 token.COLON: true,
457 token.SEMICOLON: true,
458 token.RPAREN: true,
459 token.RBRACK: true,
460 token.RBRACE: true,
461 }
462
463
464
465
466 func (p *parser) parseIdent() *ast.Ident {
467 pos := p.pos
468 name := "_"
469 if p.tok == token.IDENT {
470 name = p.lit
471 p.next()
472 } else {
473 p.expect(token.IDENT)
474 }
475 return &ast.Ident{NamePos: pos, Name: name}
476 }
477
478 func (p *parser) parseIdentList() (list []*ast.Ident) {
479 if p.trace {
480 defer un(trace(p, "IdentList"))
481 }
482
483 list = append(list, p.parseIdent())
484 for p.tok == token.COMMA {
485 p.next()
486 list = append(list, p.parseIdent())
487 }
488
489 return
490 }
491
492
493
494
495
496 func (p *parser) parseExprList() (list []ast.Expr) {
497 if p.trace {
498 defer un(trace(p, "ExpressionList"))
499 }
500
501 list = append(list, p.parseExpr())
502 for p.tok == token.COMMA {
503 p.next()
504 list = append(list, p.parseExpr())
505 }
506
507 return
508 }
509
510 func (p *parser) parseList(inRhs bool) []ast.Expr {
511 old := p.inRhs
512 p.inRhs = inRhs
513 list := p.parseExprList()
514 p.inRhs = old
515 return list
516 }
517
518
519
520
521 func (p *parser) parseType() ast.Expr {
522 if p.trace {
523 defer un(trace(p, "Type"))
524 }
525
526 typ := p.tryIdentOrType()
527
528 if typ == nil {
529 pos := p.pos
530 p.errorExpected(pos, "type")
531 p.advance(exprEnd)
532 return &ast.BadExpr{From: pos, To: p.pos}
533 }
534
535 return typ
536 }
537
538 func (p *parser) parseQualifiedIdent(ident *ast.Ident) ast.Expr {
539 if p.trace {
540 defer un(trace(p, "QualifiedIdent"))
541 }
542
543 typ := p.parseTypeName(ident)
544 if p.tok == token.LBRACK {
545 typ = p.parseTypeInstance(typ)
546 }
547
548 return typ
549 }
550
551
552 func (p *parser) parseTypeName(ident *ast.Ident) ast.Expr {
553 if p.trace {
554 defer un(trace(p, "TypeName"))
555 }
556
557 if ident == nil {
558 ident = p.parseIdent()
559 }
560
561 if p.tok == token.PERIOD {
562
563 p.next()
564 sel := p.parseIdent()
565 return &ast.SelectorExpr{X: ident, Sel: sel}
566 }
567
568 return ident
569 }
570
571
572
573 func (p *parser) parseArrayType(lbrack token.Pos, len ast.Expr) *ast.ArrayType {
574 if p.trace {
575 defer un(trace(p, "ArrayType"))
576 }
577
578 if len == nil {
579 p.exprLev++
580
581 if p.tok == token.ELLIPSIS {
582 len = &ast.Ellipsis{Ellipsis: p.pos}
583 p.next()
584 } else if p.tok != token.RBRACK {
585 len = p.parseRhs()
586 }
587 p.exprLev--
588 }
589 if p.tok == token.COMMA {
590
591
592
593 p.error(p.pos, "unexpected comma; expecting ]")
594 p.next()
595 }
596 p.expect(token.RBRACK)
597 elt := p.parseType()
598 return &ast.ArrayType{Lbrack: lbrack, Len: len, Elt: elt}
599 }
600
601 func (p *parser) parseArrayFieldOrTypeInstance(x *ast.Ident) (*ast.Ident, ast.Expr) {
602 if p.trace {
603 defer un(trace(p, "ArrayFieldOrTypeInstance"))
604 }
605
606 lbrack := p.expect(token.LBRACK)
607 trailingComma := token.NoPos
608 var args []ast.Expr
609 if p.tok != token.RBRACK {
610 p.exprLev++
611 args = append(args, p.parseRhs())
612 for p.tok == token.COMMA {
613 comma := p.pos
614 p.next()
615 if p.tok == token.RBRACK {
616 trailingComma = comma
617 break
618 }
619 args = append(args, p.parseRhs())
620 }
621 p.exprLev--
622 }
623 rbrack := p.expect(token.RBRACK)
624
625 if len(args) == 0 {
626
627 elt := p.parseType()
628 return x, &ast.ArrayType{Lbrack: lbrack, Elt: elt}
629 }
630
631
632 if len(args) == 1 {
633 elt := p.tryIdentOrType()
634 if elt != nil {
635
636 if trailingComma.IsValid() {
637
638 p.error(trailingComma, "unexpected comma; expecting ]")
639 }
640 return x, &ast.ArrayType{Lbrack: lbrack, Len: args[0], Elt: elt}
641 }
642 }
643
644
645 return nil, packIndexExpr(x, lbrack, args, rbrack)
646 }
647
648 func (p *parser) parseFieldDecl() *ast.Field {
649 if p.trace {
650 defer un(trace(p, "FieldDecl"))
651 }
652
653 doc := p.leadComment
654
655 var names []*ast.Ident
656 var typ ast.Expr
657 switch p.tok {
658 case token.IDENT:
659 name := p.parseIdent()
660 if p.tok == token.PERIOD || p.tok == token.STRING || p.tok == token.SEMICOLON || p.tok == token.RBRACE {
661
662 typ = name
663 if p.tok == token.PERIOD {
664 typ = p.parseQualifiedIdent(name)
665 }
666 } else {
667
668 names = []*ast.Ident{name}
669 for p.tok == token.COMMA {
670 p.next()
671 names = append(names, p.parseIdent())
672 }
673
674
675 if len(names) == 1 && p.tok == token.LBRACK {
676 name, typ = p.parseArrayFieldOrTypeInstance(name)
677 if name == nil {
678 names = nil
679 }
680 } else {
681
682 typ = p.parseType()
683 }
684 }
685 case token.MUL:
686 star := p.pos
687 p.next()
688 if p.tok == token.LPAREN {
689
690 p.error(p.pos, "cannot parenthesize embedded type")
691 p.next()
692 typ = p.parseQualifiedIdent(nil)
693
694 if p.tok == token.RPAREN {
695 p.next()
696 }
697 } else {
698
699 typ = p.parseQualifiedIdent(nil)
700 }
701 typ = &ast.StarExpr{Star: star, X: typ}
702
703 case token.LPAREN:
704 p.error(p.pos, "cannot parenthesize embedded type")
705 p.next()
706 if p.tok == token.MUL {
707
708 star := p.pos
709 p.next()
710 typ = &ast.StarExpr{Star: star, X: p.parseQualifiedIdent(nil)}
711 } else {
712
713 typ = p.parseQualifiedIdent(nil)
714 }
715
716 if p.tok == token.RPAREN {
717 p.next()
718 }
719
720 default:
721 pos := p.pos
722 p.errorExpected(pos, "field name or embedded type")
723 p.advance(exprEnd)
724 typ = &ast.BadExpr{From: pos, To: p.pos}
725 }
726
727 var tag *ast.BasicLit
728 if p.tok == token.STRING {
729 tag = &ast.BasicLit{ValuePos: p.pos, ValueEnd: p.stringEnd, Kind: p.tok, Value: p.lit}
730 p.next()
731 }
732
733 comment := p.expectSemi()
734
735 field := &ast.Field{Doc: doc, Names: names, Type: typ, Tag: tag, Comment: comment}
736 return field
737 }
738
739 func (p *parser) parseStructType() *ast.StructType {
740 if p.trace {
741 defer un(trace(p, "StructType"))
742 }
743
744 pos := p.expect(token.STRUCT)
745 lbrace := p.expect(token.LBRACE)
746 var list []*ast.Field
747 for p.tok == token.IDENT || p.tok == token.MUL || p.tok == token.LPAREN {
748
749
750
751 list = append(list, p.parseFieldDecl())
752 }
753 rbrace := p.expect(token.RBRACE)
754
755 return &ast.StructType{
756 Struct: pos,
757 Fields: &ast.FieldList{
758 Opening: lbrace,
759 List: list,
760 Closing: rbrace,
761 },
762 }
763 }
764
765 func (p *parser) parsePointerType() *ast.StarExpr {
766 if p.trace {
767 defer un(trace(p, "PointerType"))
768 }
769
770 star := p.expect(token.MUL)
771 base := p.parseType()
772
773 return &ast.StarExpr{Star: star, X: base}
774 }
775
776 func (p *parser) parseDotsType() *ast.Ellipsis {
777 if p.trace {
778 defer un(trace(p, "DotsType"))
779 }
780
781 pos := p.expect(token.ELLIPSIS)
782 elt := p.parseType()
783
784 return &ast.Ellipsis{Ellipsis: pos, Elt: elt}
785 }
786
787 type field struct {
788 name *ast.Ident
789 typ ast.Expr
790 }
791
792 func (p *parser) parseParamDecl(name *ast.Ident, typeSetsOK bool) (f field) {
793
794
795 if p.trace {
796 defer un(trace(p, "ParamDecl"))
797 }
798
799 ptok := p.tok
800 if name != nil {
801 p.tok = token.IDENT
802 } else if typeSetsOK && p.tok == token.TILDE {
803
804 return field{nil, p.embeddedElem(nil)}
805 }
806
807 switch p.tok {
808 case token.IDENT:
809
810 if name != nil {
811 f.name = name
812 p.tok = ptok
813 } else {
814 f.name = p.parseIdent()
815 }
816 switch p.tok {
817 case token.IDENT, token.MUL, token.ARROW, token.FUNC, token.CHAN, token.MAP, token.STRUCT, token.INTERFACE, token.LPAREN:
818
819 f.typ = p.parseType()
820
821 case token.LBRACK:
822
823 f.name, f.typ = p.parseArrayFieldOrTypeInstance(f.name)
824
825 case token.ELLIPSIS:
826
827 f.typ = p.parseDotsType()
828 return
829
830 case token.PERIOD:
831
832 f.typ = p.parseQualifiedIdent(f.name)
833 f.name = nil
834
835 case token.TILDE:
836 if typeSetsOK {
837 f.typ = p.embeddedElem(nil)
838 return
839 }
840
841 case token.OR:
842 if typeSetsOK {
843
844 f.typ = p.embeddedElem(f.name)
845 f.name = nil
846 return
847 }
848 }
849
850 case token.MUL, token.ARROW, token.FUNC, token.LBRACK, token.CHAN, token.MAP, token.STRUCT, token.INTERFACE, token.LPAREN:
851
852 f.typ = p.parseType()
853
854 case token.ELLIPSIS:
855
856
857 f.typ = p.parseDotsType()
858 return
859
860 default:
861
862
863 p.errorExpected(p.pos, "')'")
864 p.advance(exprEnd)
865 }
866
867
868 if typeSetsOK && p.tok == token.OR && f.typ != nil {
869 f.typ = p.embeddedElem(f.typ)
870 }
871
872 return
873 }
874
875 func (p *parser) parseParameterList(name0 *ast.Ident, typ0 ast.Expr, closing token.Token, dddok bool) (params []*ast.Field) {
876 if p.trace {
877 defer un(trace(p, "ParameterList"))
878 }
879
880
881 tparams := closing == token.RBRACK
882
883 pos0 := p.pos
884 if name0 != nil {
885 pos0 = name0.Pos()
886 } else if typ0 != nil {
887 pos0 = typ0.Pos()
888 }
889
890
891
892
893
894
895
896 var list []field
897 var named int
898 var typed int
899
900 for name0 != nil || p.tok != closing && p.tok != token.EOF {
901 var par field
902 if typ0 != nil {
903 if tparams {
904 typ0 = p.embeddedElem(typ0)
905 }
906 par = field{name0, typ0}
907 } else {
908 par = p.parseParamDecl(name0, tparams)
909 }
910 name0 = nil
911 typ0 = nil
912 if par.name != nil || par.typ != nil {
913 list = append(list, par)
914 if par.name != nil && par.typ != nil {
915 named++
916 }
917 if par.typ != nil {
918 typed++
919 }
920 }
921 if !p.atComma("parameter list", closing) {
922 break
923 }
924 p.next()
925 }
926
927 if len(list) == 0 {
928 return
929 }
930
931
932 if named == 0 {
933
934 for i := range list {
935 par := &list[i]
936 if typ := par.name; typ != nil {
937 par.typ = typ
938 par.name = nil
939 }
940 }
941 if tparams {
942
943
944 var errPos token.Pos
945 var msg string
946 if named == typed {
947 errPos = p.pos
948 msg = "missing type constraint"
949 } else {
950 errPos = pos0
951 msg = "missing type parameter name"
952 if len(list) == 1 {
953 msg += " or invalid array length"
954 }
955 }
956 p.error(errPos, msg)
957 }
958 } else if named != len(list) {
959
960 var errPos token.Pos
961 var typ ast.Expr
962 for i := range list {
963 if par := &list[len(list)-i-1]; par.typ != nil {
964 typ = par.typ
965 if par.name == nil {
966 errPos = typ.Pos()
967 n := ast.NewIdent("_")
968 n.NamePos = errPos
969 par.name = n
970 }
971 } else if typ != nil {
972 par.typ = typ
973 } else {
974
975 errPos = par.name.Pos()
976 par.typ = &ast.BadExpr{From: errPos, To: p.pos}
977 }
978 }
979 if errPos.IsValid() {
980
981
982
983
984
985
986 var msg string
987 if named == typed {
988 errPos = p.pos
989 if tparams {
990 msg = "missing type constraint"
991 } else {
992 msg = "missing parameter type"
993 }
994 } else {
995 if tparams {
996 msg = "missing type parameter name"
997
998 if len(list) == 1 {
999 msg += " or invalid array length"
1000 }
1001 } else {
1002 msg = "missing parameter name"
1003 }
1004 }
1005 p.error(errPos, msg)
1006 }
1007 }
1008
1009
1010 first := true
1011 for i, _ := range list {
1012 f := &list[i]
1013 if t, _ := f.typ.(*ast.Ellipsis); t != nil && (!dddok || i+1 < len(list)) {
1014 if first {
1015 first = false
1016 if dddok {
1017 p.error(t.Ellipsis, "can only use ... with final parameter")
1018 } else {
1019 p.error(t.Ellipsis, "invalid use of ...")
1020 }
1021 }
1022
1023
1024
1025 f.typ = &ast.BadExpr{From: t.Pos(), To: t.End()}
1026 }
1027 }
1028
1029
1030
1031 if named == 0 {
1032
1033 for _, par := range list {
1034 assert(par.typ != nil, "nil type in unnamed parameter list")
1035 params = append(params, &ast.Field{Type: par.typ})
1036 }
1037 return
1038 }
1039
1040
1041
1042 var names []*ast.Ident
1043 var typ ast.Expr
1044 addParams := func() {
1045 assert(typ != nil, "nil type in named parameter list")
1046 field := &ast.Field{Names: names, Type: typ}
1047 params = append(params, field)
1048 names = nil
1049 }
1050 for _, par := range list {
1051 if par.typ != typ {
1052 if len(names) > 0 {
1053 addParams()
1054 }
1055 typ = par.typ
1056 }
1057 names = append(names, par.name)
1058 }
1059 if len(names) > 0 {
1060 addParams()
1061 }
1062 return
1063 }
1064
1065 func (p *parser) parseTypeParameters() *ast.FieldList {
1066 if p.trace {
1067 defer un(trace(p, "TypeParameters"))
1068 }
1069
1070 lbrack := p.expect(token.LBRACK)
1071 var list []*ast.Field
1072 if p.tok != token.RBRACK {
1073 list = p.parseParameterList(nil, nil, token.RBRACK, false)
1074 }
1075 rbrack := p.expect(token.RBRACK)
1076
1077 if len(list) == 0 {
1078 p.error(rbrack, "empty type parameter list")
1079 return nil
1080 }
1081
1082 return &ast.FieldList{Opening: lbrack, List: list, Closing: rbrack}
1083 }
1084
1085 func (p *parser) parseParameters(result bool) *ast.FieldList {
1086 if p.trace {
1087 defer un(trace(p, "Parameters"))
1088 }
1089
1090 if !result || p.tok == token.LPAREN {
1091 lparen := p.expect(token.LPAREN)
1092 var list []*ast.Field
1093 if p.tok != token.RPAREN {
1094 list = p.parseParameterList(nil, nil, token.RPAREN, !result)
1095 }
1096 rparen := p.expect(token.RPAREN)
1097 return &ast.FieldList{Opening: lparen, List: list, Closing: rparen}
1098 }
1099
1100 if typ := p.tryIdentOrType(); typ != nil {
1101 list := make([]*ast.Field, 1)
1102 list[0] = &ast.Field{Type: typ}
1103 return &ast.FieldList{List: list}
1104 }
1105
1106 return nil
1107 }
1108
1109 func (p *parser) parseFuncType() *ast.FuncType {
1110 if p.trace {
1111 defer un(trace(p, "FuncType"))
1112 }
1113
1114 pos := p.expect(token.FUNC)
1115
1116 if p.tok == token.LBRACK {
1117 tparams := p.parseTypeParameters()
1118 if tparams != nil {
1119 p.error(tparams.Opening, "function type must have no type parameters")
1120 }
1121 }
1122 params := p.parseParameters(false)
1123 results := p.parseParameters(true)
1124
1125 return &ast.FuncType{Func: pos, Params: params, Results: results}
1126 }
1127
1128 func (p *parser) parseMethodSpec() *ast.Field {
1129 if p.trace {
1130 defer un(trace(p, "MethodSpec"))
1131 }
1132
1133 doc := p.leadComment
1134 var idents []*ast.Ident
1135 var typ ast.Expr
1136 x := p.parseTypeName(nil)
1137 if ident, _ := x.(*ast.Ident); ident != nil {
1138 switch {
1139 case p.tok == token.LBRACK:
1140
1141 lbrack := p.pos
1142 p.next()
1143 p.exprLev++
1144 x := p.parseExpr()
1145 p.exprLev--
1146 if name0, _ := x.(*ast.Ident); name0 != nil && p.tok != token.COMMA && p.tok != token.RBRACK {
1147
1148
1149
1150
1151 _ = p.parseParameterList(name0, nil, token.RBRACK, false)
1152 _ = p.expect(token.RBRACK)
1153 p.error(lbrack, "interface method must have no type parameters")
1154
1155
1156 params := p.parseParameters(false)
1157 results := p.parseParameters(true)
1158 idents = []*ast.Ident{ident}
1159 typ = &ast.FuncType{
1160 Func: token.NoPos,
1161 Params: params,
1162 Results: results,
1163 }
1164 } else {
1165
1166
1167 list := []ast.Expr{x}
1168 if p.atComma("type argument list", token.RBRACK) {
1169 p.exprLev++
1170 p.next()
1171 for p.tok != token.RBRACK && p.tok != token.EOF {
1172 list = append(list, p.parseType())
1173 if !p.atComma("type argument list", token.RBRACK) {
1174 break
1175 }
1176 p.next()
1177 }
1178 p.exprLev--
1179 }
1180 rbrack := p.expectClosing(token.RBRACK, "type argument list")
1181 typ = packIndexExpr(ident, lbrack, list, rbrack)
1182 }
1183 case p.tok == token.LPAREN:
1184
1185
1186 params := p.parseParameters(false)
1187 results := p.parseParameters(true)
1188 idents = []*ast.Ident{ident}
1189 typ = &ast.FuncType{Func: token.NoPos, Params: params, Results: results}
1190 default:
1191
1192 typ = x
1193 }
1194 } else {
1195
1196 typ = x
1197 if p.tok == token.LBRACK {
1198
1199 typ = p.parseTypeInstance(typ)
1200 }
1201 }
1202
1203
1204
1205
1206
1207 return &ast.Field{Doc: doc, Names: idents, Type: typ}
1208 }
1209
1210 func (p *parser) embeddedElem(x ast.Expr) ast.Expr {
1211 if p.trace {
1212 defer un(trace(p, "EmbeddedElem"))
1213 }
1214 if x == nil {
1215 x = p.embeddedTerm()
1216 }
1217 for p.tok == token.OR {
1218 t := new(ast.BinaryExpr)
1219 t.OpPos = p.pos
1220 t.Op = token.OR
1221 p.next()
1222 t.X = x
1223 t.Y = p.embeddedTerm()
1224 x = t
1225 }
1226 return x
1227 }
1228
1229 func (p *parser) embeddedTerm() ast.Expr {
1230 if p.trace {
1231 defer un(trace(p, "EmbeddedTerm"))
1232 }
1233 if p.tok == token.TILDE {
1234 t := new(ast.UnaryExpr)
1235 t.OpPos = p.pos
1236 t.Op = token.TILDE
1237 p.next()
1238 t.X = p.parseType()
1239 return t
1240 }
1241
1242 t := p.tryIdentOrType()
1243 if t == nil {
1244 pos := p.pos
1245 p.errorExpected(pos, "~ term or type")
1246 p.advance(exprEnd)
1247 return &ast.BadExpr{From: pos, To: p.pos}
1248 }
1249
1250 return t
1251 }
1252
1253 func (p *parser) parseInterfaceType() *ast.InterfaceType {
1254 if p.trace {
1255 defer un(trace(p, "InterfaceType"))
1256 }
1257
1258 pos := p.expect(token.INTERFACE)
1259 lbrace := p.expect(token.LBRACE)
1260
1261 var list []*ast.Field
1262
1263 parseElements:
1264 for {
1265 switch {
1266 case p.tok == token.IDENT:
1267 f := p.parseMethodSpec()
1268 if f.Names == nil {
1269 f.Type = p.embeddedElem(f.Type)
1270 }
1271 f.Comment = p.expectSemi()
1272 list = append(list, f)
1273 case p.tok == token.TILDE:
1274 typ := p.embeddedElem(nil)
1275 comment := p.expectSemi()
1276 list = append(list, &ast.Field{Type: typ, Comment: comment})
1277 default:
1278 if t := p.tryIdentOrType(); t != nil {
1279 typ := p.embeddedElem(t)
1280 comment := p.expectSemi()
1281 list = append(list, &ast.Field{Type: typ, Comment: comment})
1282 } else {
1283 break parseElements
1284 }
1285 }
1286 }
1287
1288
1289
1290 rbrace := p.expect(token.RBRACE)
1291
1292 return &ast.InterfaceType{
1293 Interface: pos,
1294 Methods: &ast.FieldList{
1295 Opening: lbrace,
1296 List: list,
1297 Closing: rbrace,
1298 },
1299 }
1300 }
1301
1302 func (p *parser) parseMapType() *ast.MapType {
1303 if p.trace {
1304 defer un(trace(p, "MapType"))
1305 }
1306
1307 pos := p.expect(token.MAP)
1308 p.expect(token.LBRACK)
1309 key := p.parseType()
1310 p.expect(token.RBRACK)
1311 value := p.parseType()
1312
1313 return &ast.MapType{Map: pos, Key: key, Value: value}
1314 }
1315
1316 func (p *parser) parseChanType() *ast.ChanType {
1317 if p.trace {
1318 defer un(trace(p, "ChanType"))
1319 }
1320
1321 pos := p.pos
1322 dir := ast.SEND | ast.RECV
1323 var arrow token.Pos
1324 if p.tok == token.CHAN {
1325 p.next()
1326 if p.tok == token.ARROW {
1327 arrow = p.pos
1328 p.next()
1329 dir = ast.SEND
1330 }
1331 } else {
1332 arrow = p.expect(token.ARROW)
1333 p.expect(token.CHAN)
1334 dir = ast.RECV
1335 }
1336 value := p.parseType()
1337
1338 return &ast.ChanType{Begin: pos, Arrow: arrow, Dir: dir, Value: value}
1339 }
1340
1341 func (p *parser) parseTypeInstance(typ ast.Expr) ast.Expr {
1342 if p.trace {
1343 defer un(trace(p, "TypeInstance"))
1344 }
1345
1346 opening := p.expect(token.LBRACK)
1347 p.exprLev++
1348 var list []ast.Expr
1349 for p.tok != token.RBRACK && p.tok != token.EOF {
1350 list = append(list, p.parseType())
1351 if !p.atComma("type argument list", token.RBRACK) {
1352 break
1353 }
1354 p.next()
1355 }
1356 p.exprLev--
1357
1358 closing := p.expectClosing(token.RBRACK, "type argument list")
1359
1360 if len(list) == 0 {
1361 p.errorExpected(closing, "type argument list")
1362 return &ast.IndexExpr{
1363 X: typ,
1364 Lbrack: opening,
1365 Index: &ast.BadExpr{From: opening + 1, To: closing},
1366 Rbrack: closing,
1367 }
1368 }
1369
1370 return packIndexExpr(typ, opening, list, closing)
1371 }
1372
1373 func (p *parser) tryIdentOrType() ast.Expr {
1374 defer decNestLev(incNestLev(p))
1375
1376 switch p.tok {
1377 case token.IDENT:
1378 typ := p.parseTypeName(nil)
1379 if p.tok == token.LBRACK {
1380 typ = p.parseTypeInstance(typ)
1381 }
1382 return typ
1383 case token.LBRACK:
1384 lbrack := p.expect(token.LBRACK)
1385 return p.parseArrayType(lbrack, nil)
1386 case token.STRUCT:
1387 return p.parseStructType()
1388 case token.MUL:
1389 return p.parsePointerType()
1390 case token.FUNC:
1391 return p.parseFuncType()
1392 case token.INTERFACE:
1393 return p.parseInterfaceType()
1394 case token.MAP:
1395 return p.parseMapType()
1396 case token.CHAN, token.ARROW:
1397 return p.parseChanType()
1398 case token.LPAREN:
1399 lparen := p.pos
1400 p.next()
1401 typ := p.parseType()
1402 rparen := p.expect(token.RPAREN)
1403 return &ast.ParenExpr{Lparen: lparen, X: typ, Rparen: rparen}
1404 }
1405
1406
1407 return nil
1408 }
1409
1410
1411
1412
1413 func (p *parser) parseStmtList() (list []ast.Stmt) {
1414 if p.trace {
1415 defer un(trace(p, "StatementList"))
1416 }
1417
1418 for p.tok != token.CASE && p.tok != token.DEFAULT && p.tok != token.RBRACE && p.tok != token.EOF {
1419 list = append(list, p.parseStmt())
1420 }
1421
1422 return
1423 }
1424
1425 func (p *parser) parseBody() *ast.BlockStmt {
1426 if p.trace {
1427 defer un(trace(p, "Body"))
1428 }
1429
1430 lbrace := p.expect(token.LBRACE)
1431 list := p.parseStmtList()
1432 rbrace := p.expect2(token.RBRACE)
1433
1434 return &ast.BlockStmt{Lbrace: lbrace, List: list, Rbrace: rbrace}
1435 }
1436
1437 func (p *parser) parseBlockStmt() *ast.BlockStmt {
1438 if p.trace {
1439 defer un(trace(p, "BlockStmt"))
1440 }
1441
1442 lbrace := p.expect(token.LBRACE)
1443 list := p.parseStmtList()
1444 rbrace := p.expect2(token.RBRACE)
1445
1446 return &ast.BlockStmt{Lbrace: lbrace, List: list, Rbrace: rbrace}
1447 }
1448
1449
1450
1451
1452 func (p *parser) parseFuncTypeOrLit() ast.Expr {
1453 if p.trace {
1454 defer un(trace(p, "FuncTypeOrLit"))
1455 }
1456
1457 typ := p.parseFuncType()
1458 if p.tok != token.LBRACE {
1459
1460 return typ
1461 }
1462
1463 p.exprLev++
1464 body := p.parseBody()
1465 p.exprLev--
1466
1467 return &ast.FuncLit{Type: typ, Body: body}
1468 }
1469
1470
1471
1472 func (p *parser) parseOperand() ast.Expr {
1473 if p.trace {
1474 defer un(trace(p, "Operand"))
1475 }
1476
1477 switch p.tok {
1478 case token.IDENT:
1479 x := p.parseIdent()
1480 return x
1481
1482 case token.INT, token.FLOAT, token.IMAG, token.CHAR, token.STRING:
1483 end := p.pos + token.Pos(len(p.lit))
1484 if p.tok == token.STRING {
1485 end = p.stringEnd
1486 }
1487 x := &ast.BasicLit{ValuePos: p.pos, ValueEnd: end, Kind: p.tok, Value: p.lit}
1488 p.next()
1489 return x
1490
1491 case token.LPAREN:
1492 lparen := p.pos
1493 p.next()
1494 p.exprLev++
1495 x := p.parseRhs()
1496 p.exprLev--
1497 rparen := p.expect(token.RPAREN)
1498 return &ast.ParenExpr{Lparen: lparen, X: x, Rparen: rparen}
1499
1500 case token.FUNC:
1501 return p.parseFuncTypeOrLit()
1502 }
1503
1504 if typ := p.tryIdentOrType(); typ != nil {
1505
1506 _, isIdent := typ.(*ast.Ident)
1507 assert(!isIdent, "type cannot be identifier")
1508 return typ
1509 }
1510
1511
1512 pos := p.pos
1513 p.errorExpected(pos, "operand")
1514 p.advance(stmtStart)
1515 return &ast.BadExpr{From: pos, To: p.pos}
1516 }
1517
1518 func (p *parser) parseSelector(x ast.Expr) ast.Expr {
1519 if p.trace {
1520 defer un(trace(p, "Selector"))
1521 }
1522
1523 sel := p.parseIdent()
1524
1525 return &ast.SelectorExpr{X: x, Sel: sel}
1526 }
1527
1528 func (p *parser) parseTypeAssertion(x ast.Expr) ast.Expr {
1529 if p.trace {
1530 defer un(trace(p, "TypeAssertion"))
1531 }
1532
1533 lparen := p.expect(token.LPAREN)
1534 var typ ast.Expr
1535 if p.tok == token.TYPE {
1536
1537 p.next()
1538 } else {
1539 typ = p.parseType()
1540 }
1541 rparen := p.expect(token.RPAREN)
1542
1543 return &ast.TypeAssertExpr{X: x, Type: typ, Lparen: lparen, Rparen: rparen}
1544 }
1545
1546 func (p *parser) parseIndexOrSliceOrInstance(x ast.Expr) ast.Expr {
1547 if p.trace {
1548 defer un(trace(p, "parseIndexOrSliceOrInstance"))
1549 }
1550
1551 lbrack := p.expect(token.LBRACK)
1552 if p.tok == token.RBRACK {
1553
1554
1555 p.errorExpected(p.pos, "operand")
1556 rbrack := p.pos
1557 p.next()
1558 return &ast.IndexExpr{
1559 X: x,
1560 Lbrack: lbrack,
1561 Index: &ast.BadExpr{From: rbrack, To: rbrack},
1562 Rbrack: rbrack,
1563 }
1564 }
1565 p.exprLev++
1566
1567 const N = 3
1568 var args []ast.Expr
1569 var index [N]ast.Expr
1570 var colons [N - 1]token.Pos
1571 if p.tok != token.COLON {
1572
1573
1574 index[0] = p.parseRhs()
1575 }
1576 ncolons := 0
1577 switch p.tok {
1578 case token.COLON:
1579
1580 for p.tok == token.COLON && ncolons < len(colons) {
1581 colons[ncolons] = p.pos
1582 ncolons++
1583 p.next()
1584 if p.tok != token.COLON && p.tok != token.RBRACK && p.tok != token.EOF {
1585 index[ncolons] = p.parseRhs()
1586 }
1587 }
1588 case token.COMMA:
1589
1590 args = append(args, index[0])
1591 for p.tok == token.COMMA {
1592 p.next()
1593 if p.tok != token.RBRACK && p.tok != token.EOF {
1594 args = append(args, p.parseType())
1595 }
1596 }
1597 }
1598
1599 p.exprLev--
1600 rbrack := p.expect(token.RBRACK)
1601
1602 if ncolons > 0 {
1603
1604 slice3 := false
1605 if ncolons == 2 {
1606 slice3 = true
1607
1608
1609 if index[1] == nil {
1610 p.error(colons[0], "middle index required in 3-index slice")
1611 index[1] = &ast.BadExpr{From: colons[0] + 1, To: colons[1]}
1612 }
1613 if index[2] == nil {
1614 p.error(colons[1], "final index required in 3-index slice")
1615 index[2] = &ast.BadExpr{From: colons[1] + 1, To: rbrack}
1616 }
1617 }
1618 return &ast.SliceExpr{X: x, Lbrack: lbrack, Low: index[0], High: index[1], Max: index[2], Slice3: slice3, Rbrack: rbrack}
1619 }
1620
1621 if len(args) == 0 {
1622
1623 return &ast.IndexExpr{X: x, Lbrack: lbrack, Index: index[0], Rbrack: rbrack}
1624 }
1625
1626
1627 return packIndexExpr(x, lbrack, args, rbrack)
1628 }
1629
1630 func (p *parser) parseCallOrConversion(fun ast.Expr) *ast.CallExpr {
1631 if p.trace {
1632 defer un(trace(p, "CallOrConversion"))
1633 }
1634
1635 lparen := p.expect(token.LPAREN)
1636 p.exprLev++
1637 var list []ast.Expr
1638 var ellipsis token.Pos
1639 for p.tok != token.RPAREN && p.tok != token.EOF && !ellipsis.IsValid() {
1640 list = append(list, p.parseRhs())
1641 if p.tok == token.ELLIPSIS {
1642 ellipsis = p.pos
1643 p.next()
1644 }
1645 if !p.atComma("argument list", token.RPAREN) {
1646 break
1647 }
1648 p.next()
1649 }
1650 p.exprLev--
1651 rparen := p.expectClosing(token.RPAREN, "argument list")
1652
1653 return &ast.CallExpr{Fun: fun, Lparen: lparen, Args: list, Ellipsis: ellipsis, Rparen: rparen}
1654 }
1655
1656 func (p *parser) parseValue() ast.Expr {
1657 if p.trace {
1658 defer un(trace(p, "Element"))
1659 }
1660
1661 if p.tok == token.LBRACE {
1662 return p.parseLiteralValue(nil)
1663 }
1664
1665 x := p.parseExpr()
1666
1667 return x
1668 }
1669
1670 func (p *parser) parseElement() ast.Expr {
1671 if p.trace {
1672 defer un(trace(p, "Element"))
1673 }
1674
1675 x := p.parseValue()
1676 if p.tok == token.COLON {
1677 colon := p.pos
1678 p.next()
1679 x = &ast.KeyValueExpr{Key: x, Colon: colon, Value: p.parseValue()}
1680 }
1681
1682 return x
1683 }
1684
1685 func (p *parser) parseElementList() (list []ast.Expr) {
1686 if p.trace {
1687 defer un(trace(p, "ElementList"))
1688 }
1689
1690 for p.tok != token.RBRACE && p.tok != token.EOF {
1691 list = append(list, p.parseElement())
1692 if !p.atComma("composite literal", token.RBRACE) {
1693 break
1694 }
1695 p.next()
1696 }
1697
1698 return
1699 }
1700
1701 func (p *parser) parseLiteralValue(typ ast.Expr) ast.Expr {
1702 defer decNestLev(incNestLev(p))
1703
1704 if p.trace {
1705 defer un(trace(p, "LiteralValue"))
1706 }
1707
1708 lbrace := p.expect(token.LBRACE)
1709 var elts []ast.Expr
1710 p.exprLev++
1711 if p.tok != token.RBRACE {
1712 elts = p.parseElementList()
1713 }
1714 p.exprLev--
1715 rbrace := p.expectClosing(token.RBRACE, "composite literal")
1716 return &ast.CompositeLit{Type: typ, Lbrace: lbrace, Elts: elts, Rbrace: rbrace}
1717 }
1718
1719 func (p *parser) parsePrimaryExpr(x ast.Expr) ast.Expr {
1720 if p.trace {
1721 defer un(trace(p, "PrimaryExpr"))
1722 }
1723
1724 if x == nil {
1725 x = p.parseOperand()
1726 }
1727
1728
1729
1730 var n int
1731 defer func() { p.nestLev -= n }()
1732 for n = 1; ; n++ {
1733 incNestLev(p)
1734 switch p.tok {
1735 case token.PERIOD:
1736 p.next()
1737 switch p.tok {
1738 case token.IDENT:
1739 x = p.parseSelector(x)
1740 case token.LPAREN:
1741 x = p.parseTypeAssertion(x)
1742 default:
1743 pos := p.pos
1744 p.errorExpected(pos, "selector or type assertion")
1745
1746
1747
1748
1749
1750 if p.tok != token.RBRACE {
1751 p.next()
1752 }
1753 sel := &ast.Ident{NamePos: pos, Name: "_"}
1754 x = &ast.SelectorExpr{X: x, Sel: sel}
1755 }
1756 case token.LBRACK:
1757 x = p.parseIndexOrSliceOrInstance(x)
1758 case token.LPAREN:
1759 x = p.parseCallOrConversion(x)
1760 case token.LBRACE:
1761
1762
1763 t := ast.Unparen(x)
1764
1765 switch t.(type) {
1766 case *ast.BadExpr, *ast.Ident, *ast.SelectorExpr:
1767 if p.exprLev < 0 {
1768 return x
1769 }
1770
1771 case *ast.IndexExpr, *ast.IndexListExpr:
1772 if p.exprLev < 0 {
1773 return x
1774 }
1775
1776 case *ast.ArrayType, *ast.StructType, *ast.MapType:
1777
1778 default:
1779 return x
1780 }
1781 if t != x {
1782 p.error(t.Pos(), "cannot parenthesize type in composite literal")
1783
1784 }
1785 x = p.parseLiteralValue(x)
1786 default:
1787 return x
1788 }
1789 }
1790 }
1791
1792 func (p *parser) parseUnaryExpr() ast.Expr {
1793 defer decNestLev(incNestLev(p))
1794
1795 if p.trace {
1796 defer un(trace(p, "UnaryExpr"))
1797 }
1798
1799 switch p.tok {
1800 case token.ADD, token.SUB, token.NOT, token.XOR, token.AND, token.TILDE:
1801 pos, op := p.pos, p.tok
1802 p.next()
1803 x := p.parseUnaryExpr()
1804 return &ast.UnaryExpr{OpPos: pos, Op: op, X: x}
1805
1806 case token.ARROW:
1807
1808 arrow := p.pos
1809 p.next()
1810
1811
1812
1813
1814
1815
1816
1817
1818
1819
1820
1821
1822
1823
1824
1825 x := p.parseUnaryExpr()
1826
1827
1828 if typ, ok := x.(*ast.ChanType); ok {
1829
1830
1831
1832 dir := ast.SEND
1833 for ok && dir == ast.SEND {
1834 if typ.Dir == ast.RECV {
1835
1836 p.errorExpected(typ.Arrow, "'chan'")
1837 }
1838 arrow, typ.Begin, typ.Arrow = typ.Arrow, arrow, arrow
1839 dir, typ.Dir = typ.Dir, ast.RECV
1840 typ, ok = typ.Value.(*ast.ChanType)
1841 }
1842 if dir == ast.SEND {
1843 p.errorExpected(arrow, "channel type")
1844 }
1845
1846 return x
1847 }
1848
1849
1850 return &ast.UnaryExpr{OpPos: arrow, Op: token.ARROW, X: x}
1851
1852 case token.MUL:
1853
1854 pos := p.pos
1855 p.next()
1856 x := p.parseUnaryExpr()
1857 return &ast.StarExpr{Star: pos, X: x}
1858 }
1859
1860 return p.parsePrimaryExpr(nil)
1861 }
1862
1863 func (p *parser) tokPrec() (token.Token, int) {
1864 tok := p.tok
1865 if p.inRhs && tok == token.ASSIGN {
1866 tok = token.EQL
1867 }
1868 return tok, tok.Precedence()
1869 }
1870
1871
1872
1873
1874
1875 func (p *parser) parseBinaryExpr(x ast.Expr, prec1 int) ast.Expr {
1876 if p.trace {
1877 defer un(trace(p, "BinaryExpr"))
1878 }
1879
1880 if x == nil {
1881 x = p.parseUnaryExpr()
1882 }
1883
1884
1885
1886 var n int
1887 defer func() { p.nestLev -= n }()
1888 for n = 1; ; n++ {
1889 incNestLev(p)
1890 op, oprec := p.tokPrec()
1891 if oprec < prec1 {
1892 return x
1893 }
1894 pos := p.expect(op)
1895 y := p.parseBinaryExpr(nil, oprec+1)
1896 x = &ast.BinaryExpr{X: x, OpPos: pos, Op: op, Y: y}
1897 }
1898 }
1899
1900
1901 func (p *parser) parseExpr() ast.Expr {
1902 if p.trace {
1903 defer un(trace(p, "Expression"))
1904 }
1905
1906 return p.parseBinaryExpr(nil, token.LowestPrec+1)
1907 }
1908
1909 func (p *parser) parseRhs() ast.Expr {
1910 old := p.inRhs
1911 p.inRhs = true
1912 x := p.parseExpr()
1913 p.inRhs = old
1914 return x
1915 }
1916
1917
1918
1919
1920
1921 const (
1922 basic = iota
1923 labelOk
1924 rangeOk
1925 )
1926
1927
1928
1929
1930
1931 func (p *parser) parseSimpleStmt(mode int) (ast.Stmt, bool) {
1932 if p.trace {
1933 defer un(trace(p, "SimpleStmt"))
1934 }
1935
1936 x := p.parseList(false)
1937
1938 switch p.tok {
1939 case
1940 token.DEFINE, token.ASSIGN, token.ADD_ASSIGN,
1941 token.SUB_ASSIGN, token.MUL_ASSIGN, token.QUO_ASSIGN,
1942 token.REM_ASSIGN, token.AND_ASSIGN, token.OR_ASSIGN,
1943 token.XOR_ASSIGN, token.SHL_ASSIGN, token.SHR_ASSIGN, token.AND_NOT_ASSIGN:
1944
1945 pos, tok := p.pos, p.tok
1946 p.next()
1947 var y []ast.Expr
1948 isRange := false
1949 if mode == rangeOk && p.tok == token.RANGE && (tok == token.DEFINE || tok == token.ASSIGN) {
1950 pos := p.pos
1951 p.next()
1952 y = []ast.Expr{&ast.UnaryExpr{OpPos: pos, Op: token.RANGE, X: p.parseRhs()}}
1953 isRange = true
1954 } else {
1955 y = p.parseList(true)
1956 }
1957 return &ast.AssignStmt{Lhs: x, TokPos: pos, Tok: tok, Rhs: y}, isRange
1958 }
1959
1960 if len(x) > 1 {
1961 p.errorExpected(x[0].Pos(), "1 expression")
1962
1963 }
1964
1965 switch p.tok {
1966 case token.COLON:
1967
1968 colon := p.pos
1969 p.next()
1970 if label, isIdent := x[0].(*ast.Ident); mode == labelOk && isIdent {
1971
1972
1973
1974 stmt := &ast.LabeledStmt{Label: label, Colon: colon, Stmt: p.parseStmt()}
1975 return stmt, false
1976 }
1977
1978
1979
1980
1981
1982
1983 p.error(colon, "illegal label declaration")
1984 return &ast.BadStmt{From: x[0].Pos(), To: colon + 1}, false
1985
1986 case token.ARROW:
1987
1988 arrow := p.pos
1989 p.next()
1990 y := p.parseRhs()
1991 return &ast.SendStmt{Chan: x[0], Arrow: arrow, Value: y}, false
1992
1993 case token.INC, token.DEC:
1994
1995 s := &ast.IncDecStmt{X: x[0], TokPos: p.pos, Tok: p.tok}
1996 p.next()
1997 return s, false
1998 }
1999
2000
2001 return &ast.ExprStmt{X: x[0]}, false
2002 }
2003
2004 func (p *parser) parseCallExpr(callType string) *ast.CallExpr {
2005 x := p.parseRhs()
2006 if t := ast.Unparen(x); t != x {
2007 p.error(x.Pos(), fmt.Sprintf("expression in %s must not be parenthesized", callType))
2008 x = t
2009 }
2010 if call, isCall := x.(*ast.CallExpr); isCall {
2011 return call
2012 }
2013 if _, isBad := x.(*ast.BadExpr); !isBad {
2014
2015 p.error(x.End(), fmt.Sprintf("expression in %s must be function call", callType))
2016 }
2017 return nil
2018 }
2019
2020 func (p *parser) parseGoStmt() ast.Stmt {
2021 if p.trace {
2022 defer un(trace(p, "GoStmt"))
2023 }
2024
2025 pos := p.expect(token.GO)
2026 call := p.parseCallExpr("go")
2027 p.expectSemi()
2028 if call == nil {
2029 return &ast.BadStmt{From: pos, To: pos + 2}
2030 }
2031
2032 return &ast.GoStmt{Go: pos, Call: call}
2033 }
2034
2035 func (p *parser) parseDeferStmt() ast.Stmt {
2036 if p.trace {
2037 defer un(trace(p, "DeferStmt"))
2038 }
2039
2040 pos := p.expect(token.DEFER)
2041 call := p.parseCallExpr("defer")
2042 p.expectSemi()
2043 if call == nil {
2044 return &ast.BadStmt{From: pos, To: pos + 5}
2045 }
2046
2047 return &ast.DeferStmt{Defer: pos, Call: call}
2048 }
2049
2050 func (p *parser) parseReturnStmt() *ast.ReturnStmt {
2051 if p.trace {
2052 defer un(trace(p, "ReturnStmt"))
2053 }
2054
2055 pos := p.pos
2056 p.expect(token.RETURN)
2057 var x []ast.Expr
2058 if p.tok != token.SEMICOLON && p.tok != token.RBRACE {
2059 x = p.parseList(true)
2060 }
2061 p.expectSemi()
2062
2063 return &ast.ReturnStmt{Return: pos, Results: x}
2064 }
2065
2066 func (p *parser) parseBranchStmt(tok token.Token) *ast.BranchStmt {
2067 if p.trace {
2068 defer un(trace(p, "BranchStmt"))
2069 }
2070
2071 pos := p.expect(tok)
2072 var label *ast.Ident
2073 if tok == token.GOTO || ((tok == token.CONTINUE || tok == token.BREAK) && p.tok == token.IDENT) {
2074 label = p.parseIdent()
2075 }
2076 p.expectSemi()
2077
2078 return &ast.BranchStmt{TokPos: pos, Tok: tok, Label: label}
2079 }
2080
2081 func (p *parser) makeExpr(s ast.Stmt, want string) ast.Expr {
2082 if s == nil {
2083 return nil
2084 }
2085 if es, isExpr := s.(*ast.ExprStmt); isExpr {
2086 return es.X
2087 }
2088 found := "simple statement"
2089 if _, isAss := s.(*ast.AssignStmt); isAss {
2090 found = "assignment"
2091 }
2092 p.error(s.Pos(), fmt.Sprintf("expected %s, found %s (missing parentheses around composite literal?)", want, found))
2093 return &ast.BadExpr{From: s.Pos(), To: s.End()}
2094 }
2095
2096
2097
2098
2099 func (p *parser) parseIfHeader() (init ast.Stmt, cond ast.Expr) {
2100 if p.tok == token.LBRACE {
2101 p.error(p.pos, "missing condition in if statement")
2102 cond = &ast.BadExpr{From: p.pos, To: p.pos}
2103 return
2104 }
2105
2106
2107 prevLev := p.exprLev
2108 p.exprLev = -1
2109
2110 if p.tok != token.SEMICOLON {
2111
2112 if p.tok == token.VAR {
2113 p.next()
2114 p.error(p.pos, "var declaration not allowed in if initializer")
2115 }
2116 init, _ = p.parseSimpleStmt(basic)
2117 }
2118
2119 var condStmt ast.Stmt
2120 var semi struct {
2121 pos token.Pos
2122 lit string
2123 }
2124 if p.tok != token.LBRACE {
2125 if p.tok == token.SEMICOLON {
2126 semi.pos = p.pos
2127 semi.lit = p.lit
2128 p.next()
2129 } else {
2130 p.expect(token.SEMICOLON)
2131 }
2132 if p.tok != token.LBRACE {
2133 condStmt, _ = p.parseSimpleStmt(basic)
2134 }
2135 } else {
2136 condStmt = init
2137 init = nil
2138 }
2139
2140 if condStmt != nil {
2141 cond = p.makeExpr(condStmt, "boolean expression")
2142 } else if semi.pos.IsValid() {
2143 if semi.lit == "\n" {
2144 p.error(semi.pos, "unexpected newline, expecting { after if clause")
2145 } else {
2146 p.error(semi.pos, "missing condition in if statement")
2147 }
2148 }
2149
2150
2151 if cond == nil {
2152 cond = &ast.BadExpr{From: p.pos, To: p.pos}
2153 }
2154
2155 p.exprLev = prevLev
2156 return
2157 }
2158
2159 func (p *parser) parseIfStmt() *ast.IfStmt {
2160 defer decNestLev(incNestLev(p))
2161
2162 if p.trace {
2163 defer un(trace(p, "IfStmt"))
2164 }
2165
2166 pos := p.expect(token.IF)
2167
2168 init, cond := p.parseIfHeader()
2169 body := p.parseBlockStmt()
2170
2171 var else_ ast.Stmt
2172 if p.tok == token.ELSE {
2173 p.next()
2174 switch p.tok {
2175 case token.IF:
2176 else_ = p.parseIfStmt()
2177 case token.LBRACE:
2178 else_ = p.parseBlockStmt()
2179 p.expectSemi()
2180 default:
2181 p.errorExpected(p.pos, "if statement or block")
2182 else_ = &ast.BadStmt{From: p.pos, To: p.pos}
2183 }
2184 } else {
2185 p.expectSemi()
2186 }
2187
2188 return &ast.IfStmt{If: pos, Init: init, Cond: cond, Body: body, Else: else_}
2189 }
2190
2191 func (p *parser) parseCaseClause() *ast.CaseClause {
2192 if p.trace {
2193 defer un(trace(p, "CaseClause"))
2194 }
2195
2196 pos := p.pos
2197 var list []ast.Expr
2198 if p.tok == token.CASE {
2199 p.next()
2200 list = p.parseList(true)
2201 } else {
2202 p.expect(token.DEFAULT)
2203 }
2204
2205 colon := p.expect(token.COLON)
2206 body := p.parseStmtList()
2207
2208 return &ast.CaseClause{Case: pos, List: list, Colon: colon, Body: body}
2209 }
2210
2211 func isTypeSwitchAssert(x ast.Expr) bool {
2212 a, ok := x.(*ast.TypeAssertExpr)
2213 return ok && a.Type == nil
2214 }
2215
2216 func (p *parser) isTypeSwitchGuard(s ast.Stmt) bool {
2217 switch t := s.(type) {
2218 case *ast.ExprStmt:
2219
2220 return isTypeSwitchAssert(t.X)
2221 case *ast.AssignStmt:
2222
2223 if len(t.Lhs) == 1 && len(t.Rhs) == 1 && isTypeSwitchAssert(t.Rhs[0]) {
2224 switch t.Tok {
2225 case token.ASSIGN:
2226
2227 p.error(t.TokPos, "expected ':=', found '='")
2228 fallthrough
2229 case token.DEFINE:
2230 return true
2231 }
2232 }
2233 }
2234 return false
2235 }
2236
2237 func (p *parser) parseSwitchStmt() ast.Stmt {
2238 if p.trace {
2239 defer un(trace(p, "SwitchStmt"))
2240 }
2241
2242 pos := p.expect(token.SWITCH)
2243
2244 var s1, s2 ast.Stmt
2245 if p.tok != token.LBRACE {
2246 prevLev := p.exprLev
2247 p.exprLev = -1
2248 if p.tok != token.SEMICOLON {
2249 s2, _ = p.parseSimpleStmt(basic)
2250 }
2251 if p.tok == token.SEMICOLON {
2252 p.next()
2253 s1 = s2
2254 s2 = nil
2255 if p.tok != token.LBRACE {
2256
2257
2258
2259
2260
2261
2262
2263
2264
2265
2266
2267
2268 s2, _ = p.parseSimpleStmt(basic)
2269 }
2270 }
2271 p.exprLev = prevLev
2272 }
2273
2274 typeSwitch := p.isTypeSwitchGuard(s2)
2275 lbrace := p.expect(token.LBRACE)
2276 var list []ast.Stmt
2277 for p.tok == token.CASE || p.tok == token.DEFAULT {
2278 list = append(list, p.parseCaseClause())
2279 }
2280 rbrace := p.expect(token.RBRACE)
2281 p.expectSemi()
2282 body := &ast.BlockStmt{Lbrace: lbrace, List: list, Rbrace: rbrace}
2283
2284 if typeSwitch {
2285 return &ast.TypeSwitchStmt{Switch: pos, Init: s1, Assign: s2, Body: body}
2286 }
2287
2288 return &ast.SwitchStmt{Switch: pos, Init: s1, Tag: p.makeExpr(s2, "switch expression"), Body: body}
2289 }
2290
2291 func (p *parser) parseCommClause() *ast.CommClause {
2292 if p.trace {
2293 defer un(trace(p, "CommClause"))
2294 }
2295
2296 pos := p.pos
2297 var comm ast.Stmt
2298 if p.tok == token.CASE {
2299 p.next()
2300 lhs := p.parseList(false)
2301 if p.tok == token.ARROW {
2302
2303 if len(lhs) > 1 {
2304 p.errorExpected(lhs[0].Pos(), "1 expression")
2305
2306 }
2307 arrow := p.pos
2308 p.next()
2309 rhs := p.parseRhs()
2310 comm = &ast.SendStmt{Chan: lhs[0], Arrow: arrow, Value: rhs}
2311 } else {
2312
2313 if tok := p.tok; tok == token.ASSIGN || tok == token.DEFINE {
2314
2315 if len(lhs) > 2 {
2316 p.errorExpected(lhs[0].Pos(), "1 or 2 expressions")
2317
2318 lhs = lhs[0:2]
2319 }
2320 pos := p.pos
2321 p.next()
2322 rhs := p.parseRhs()
2323 comm = &ast.AssignStmt{Lhs: lhs, TokPos: pos, Tok: tok, Rhs: []ast.Expr{rhs}}
2324 } else {
2325
2326 if len(lhs) > 1 {
2327 p.errorExpected(lhs[0].Pos(), "1 expression")
2328
2329 }
2330 comm = &ast.ExprStmt{X: lhs[0]}
2331 }
2332 }
2333 } else {
2334 p.expect(token.DEFAULT)
2335 }
2336
2337 colon := p.expect(token.COLON)
2338 body := p.parseStmtList()
2339
2340 return &ast.CommClause{Case: pos, Comm: comm, Colon: colon, Body: body}
2341 }
2342
2343 func (p *parser) parseSelectStmt() *ast.SelectStmt {
2344 if p.trace {
2345 defer un(trace(p, "SelectStmt"))
2346 }
2347
2348 pos := p.expect(token.SELECT)
2349 lbrace := p.expect(token.LBRACE)
2350 var list []ast.Stmt
2351 for p.tok == token.CASE || p.tok == token.DEFAULT {
2352 list = append(list, p.parseCommClause())
2353 }
2354 rbrace := p.expect(token.RBRACE)
2355 p.expectSemi()
2356 body := &ast.BlockStmt{Lbrace: lbrace, List: list, Rbrace: rbrace}
2357
2358 return &ast.SelectStmt{Select: pos, Body: body}
2359 }
2360
2361 func (p *parser) parseForStmt() ast.Stmt {
2362 if p.trace {
2363 defer un(trace(p, "ForStmt"))
2364 }
2365
2366 pos := p.expect(token.FOR)
2367
2368 var s1, s2, s3 ast.Stmt
2369 var isRange bool
2370 if p.tok != token.LBRACE {
2371 prevLev := p.exprLev
2372 p.exprLev = -1
2373 if p.tok != token.SEMICOLON {
2374 if p.tok == token.RANGE {
2375
2376 pos := p.pos
2377 p.next()
2378 y := []ast.Expr{&ast.UnaryExpr{OpPos: pos, Op: token.RANGE, X: p.parseRhs()}}
2379 s2 = &ast.AssignStmt{Rhs: y}
2380 isRange = true
2381 } else {
2382 s2, isRange = p.parseSimpleStmt(rangeOk)
2383 }
2384 }
2385 if !isRange && p.tok == token.SEMICOLON {
2386 p.next()
2387 s1 = s2
2388 s2 = nil
2389 if p.tok != token.SEMICOLON {
2390 s2, _ = p.parseSimpleStmt(basic)
2391 }
2392 p.expectSemi()
2393 if p.tok != token.LBRACE {
2394 s3, _ = p.parseSimpleStmt(basic)
2395 }
2396 }
2397 p.exprLev = prevLev
2398 }
2399
2400 body := p.parseBlockStmt()
2401 p.expectSemi()
2402
2403 if isRange {
2404 as := s2.(*ast.AssignStmt)
2405
2406 var key, value ast.Expr
2407 switch len(as.Lhs) {
2408 case 0:
2409
2410 case 1:
2411 key = as.Lhs[0]
2412 case 2:
2413 key, value = as.Lhs[0], as.Lhs[1]
2414 default:
2415 p.errorExpected(as.Lhs[len(as.Lhs)-1].Pos(), "at most 2 expressions")
2416 return &ast.BadStmt{From: pos, To: body.End()}
2417 }
2418
2419
2420 x := as.Rhs[0].(*ast.UnaryExpr).X
2421 return &ast.RangeStmt{
2422 For: pos,
2423 Key: key,
2424 Value: value,
2425 TokPos: as.TokPos,
2426 Tok: as.Tok,
2427 Range: as.Rhs[0].Pos(),
2428 X: x,
2429 Body: body,
2430 }
2431 }
2432
2433
2434 return &ast.ForStmt{
2435 For: pos,
2436 Init: s1,
2437 Cond: p.makeExpr(s2, "boolean or range expression"),
2438 Post: s3,
2439 Body: body,
2440 }
2441 }
2442
2443 func (p *parser) parseStmt() (s ast.Stmt) {
2444 defer decNestLev(incNestLev(p))
2445
2446 if p.trace {
2447 defer un(trace(p, "Statement"))
2448 }
2449
2450 switch p.tok {
2451 case token.CONST, token.TYPE, token.VAR:
2452 s = &ast.DeclStmt{Decl: p.parseDecl(stmtStart)}
2453 case
2454
2455 token.IDENT, token.INT, token.FLOAT, token.IMAG, token.CHAR, token.STRING, token.FUNC, token.LPAREN,
2456 token.LBRACK, token.STRUCT, token.MAP, token.CHAN, token.INTERFACE,
2457 token.ADD, token.SUB, token.MUL, token.AND, token.XOR, token.ARROW, token.NOT:
2458 s, _ = p.parseSimpleStmt(labelOk)
2459
2460
2461
2462 if _, isLabeledStmt := s.(*ast.LabeledStmt); !isLabeledStmt {
2463 p.expectSemi()
2464 }
2465 case token.GO:
2466 s = p.parseGoStmt()
2467 case token.DEFER:
2468 s = p.parseDeferStmt()
2469 case token.RETURN:
2470 s = p.parseReturnStmt()
2471 case token.BREAK, token.CONTINUE, token.GOTO, token.FALLTHROUGH:
2472 s = p.parseBranchStmt(p.tok)
2473 case token.LBRACE:
2474 s = p.parseBlockStmt()
2475 p.expectSemi()
2476 case token.IF:
2477 s = p.parseIfStmt()
2478 case token.SWITCH:
2479 s = p.parseSwitchStmt()
2480 case token.SELECT:
2481 s = p.parseSelectStmt()
2482 case token.FOR:
2483 s = p.parseForStmt()
2484 case token.SEMICOLON:
2485
2486
2487
2488 s = &ast.EmptyStmt{Semicolon: p.pos, Implicit: p.lit == "\n"}
2489 p.next()
2490 case token.RBRACE:
2491
2492 s = &ast.EmptyStmt{Semicolon: p.pos, Implicit: true}
2493 default:
2494
2495 pos := p.pos
2496 p.errorExpected(pos, "statement")
2497 p.advance(stmtStart)
2498 s = &ast.BadStmt{From: pos, To: p.pos}
2499 }
2500
2501 return
2502 }
2503
2504
2505
2506
2507 type parseSpecFunction func(doc *ast.CommentGroup, keyword token.Token, iota int) ast.Spec
2508
2509 func (p *parser) parseImportSpec(doc *ast.CommentGroup, _ token.Token, _ int) ast.Spec {
2510 if p.trace {
2511 defer un(trace(p, "ImportSpec"))
2512 }
2513
2514 var ident *ast.Ident
2515 switch p.tok {
2516 case token.IDENT:
2517 ident = p.parseIdent()
2518 case token.PERIOD:
2519 ident = &ast.Ident{NamePos: p.pos, Name: "."}
2520 p.next()
2521 }
2522
2523 pos := p.pos
2524 end := p.pos
2525 var path string
2526 if p.tok == token.STRING {
2527 path = p.lit
2528 end = p.stringEnd
2529 p.next()
2530 } else if p.tok.IsLiteral() {
2531 p.error(pos, "import path must be a string")
2532 p.next()
2533 } else {
2534 p.error(pos, "missing import path")
2535 p.advance(exprEnd)
2536 }
2537 comment := p.expectSemi()
2538
2539
2540 spec := &ast.ImportSpec{
2541 Doc: doc,
2542 Name: ident,
2543 Path: &ast.BasicLit{ValuePos: pos, ValueEnd: end, Kind: token.STRING, Value: path},
2544 Comment: comment,
2545 }
2546 p.imports = append(p.imports, spec)
2547
2548 return spec
2549 }
2550
2551 func (p *parser) parseValueSpec(doc *ast.CommentGroup, keyword token.Token, iota int) ast.Spec {
2552 if p.trace {
2553 defer un(trace(p, keyword.String()+"Spec"))
2554 }
2555
2556 idents := p.parseIdentList()
2557 var typ ast.Expr
2558 var values []ast.Expr
2559 switch keyword {
2560 case token.CONST:
2561
2562 if p.tok != token.EOF && p.tok != token.SEMICOLON && p.tok != token.RPAREN {
2563 typ = p.tryIdentOrType()
2564 if p.tok == token.ASSIGN {
2565 p.next()
2566 values = p.parseList(true)
2567 }
2568 }
2569 case token.VAR:
2570 if p.tok != token.ASSIGN {
2571 typ = p.parseType()
2572 }
2573 if p.tok == token.ASSIGN {
2574 p.next()
2575 values = p.parseList(true)
2576 }
2577 default:
2578 panic("unreachable")
2579 }
2580 comment := p.expectSemi()
2581
2582 spec := &ast.ValueSpec{
2583 Doc: doc,
2584 Names: idents,
2585 Type: typ,
2586 Values: values,
2587 Comment: comment,
2588 }
2589 return spec
2590 }
2591
2592 func (p *parser) parseGenericType(spec *ast.TypeSpec, openPos token.Pos, name0 *ast.Ident, typ0 ast.Expr) {
2593 if p.trace {
2594 defer un(trace(p, "parseGenericType"))
2595 }
2596
2597 list := p.parseParameterList(name0, typ0, token.RBRACK, false)
2598 closePos := p.expect(token.RBRACK)
2599 spec.TypeParams = &ast.FieldList{Opening: openPos, List: list, Closing: closePos}
2600 if p.tok == token.ASSIGN {
2601
2602 spec.Assign = p.pos
2603 p.next()
2604 }
2605 spec.Type = p.parseType()
2606 }
2607
2608 func (p *parser) parseTypeSpec(doc *ast.CommentGroup, _ token.Token, _ int) ast.Spec {
2609 if p.trace {
2610 defer un(trace(p, "TypeSpec"))
2611 }
2612
2613 name := p.parseIdent()
2614 spec := &ast.TypeSpec{Doc: doc, Name: name}
2615
2616 if p.tok == token.LBRACK {
2617
2618
2619 lbrack := p.pos
2620 p.next()
2621 if p.tok == token.IDENT {
2622
2623
2624
2625
2626
2627
2628
2629
2630
2631
2632
2633
2634
2635
2636
2637 var x ast.Expr = p.parseIdent()
2638 if p.tok != token.LBRACK {
2639
2640
2641
2642 p.exprLev++
2643 lhs := p.parsePrimaryExpr(x)
2644 x = p.parseBinaryExpr(lhs, token.LowestPrec+1)
2645 p.exprLev--
2646 }
2647
2648
2649
2650
2651
2652
2653
2654 if pname, ptype := extractName(x, p.tok == token.COMMA); pname != nil && (ptype != nil || p.tok != token.RBRACK) {
2655
2656
2657
2658 p.parseGenericType(spec, lbrack, pname, ptype)
2659 } else {
2660
2661
2662 spec.Type = p.parseArrayType(lbrack, x)
2663 }
2664 } else {
2665
2666 spec.Type = p.parseArrayType(lbrack, nil)
2667 }
2668 } else {
2669
2670 if p.tok == token.ASSIGN {
2671
2672 spec.Assign = p.pos
2673 p.next()
2674 }
2675 spec.Type = p.parseType()
2676 }
2677
2678 spec.Comment = p.expectSemi()
2679
2680 return spec
2681 }
2682
2683
2684
2685
2686
2687
2688
2689
2690
2691
2692
2693
2694
2695
2696
2697
2698
2699
2700
2701 func extractName(x ast.Expr, force bool) (*ast.Ident, ast.Expr) {
2702 switch x := x.(type) {
2703 case *ast.Ident:
2704 return x, nil
2705 case *ast.BinaryExpr:
2706 switch x.Op {
2707 case token.MUL:
2708 if name, _ := x.X.(*ast.Ident); name != nil && (force || isTypeElem(x.Y)) {
2709
2710 return name, &ast.StarExpr{Star: x.OpPos, X: x.Y}
2711 }
2712 case token.OR:
2713 if name, lhs := extractName(x.X, force || isTypeElem(x.Y)); name != nil && lhs != nil {
2714
2715 op := *x
2716 op.X = lhs
2717 return name, &op
2718 }
2719 }
2720 case *ast.CallExpr:
2721 if name, _ := x.Fun.(*ast.Ident); name != nil {
2722 if len(x.Args) == 1 && x.Ellipsis == token.NoPos && (force || isTypeElem(x.Args[0])) {
2723
2724
2725
2726 return name, &ast.ParenExpr{
2727 Lparen: x.Lparen,
2728 X: x.Args[0],
2729 Rparen: x.Rparen,
2730 }
2731 }
2732 }
2733 }
2734 return nil, x
2735 }
2736
2737
2738
2739 func isTypeElem(x ast.Expr) bool {
2740 switch x := x.(type) {
2741 case *ast.ArrayType, *ast.StructType, *ast.FuncType, *ast.InterfaceType, *ast.MapType, *ast.ChanType:
2742 return true
2743 case *ast.BinaryExpr:
2744 return isTypeElem(x.X) || isTypeElem(x.Y)
2745 case *ast.UnaryExpr:
2746 return x.Op == token.TILDE
2747 case *ast.ParenExpr:
2748 return isTypeElem(x.X)
2749 }
2750 return false
2751 }
2752
2753 func (p *parser) parseGenDecl(keyword token.Token, f parseSpecFunction) *ast.GenDecl {
2754 if p.trace {
2755 defer un(trace(p, "GenDecl("+keyword.String()+")"))
2756 }
2757
2758 doc := p.leadComment
2759 pos := p.expect(keyword)
2760 var lparen, rparen token.Pos
2761 var list []ast.Spec
2762 if p.tok == token.LPAREN {
2763 lparen = p.pos
2764 p.next()
2765 for iota := 0; p.tok != token.RPAREN && p.tok != token.EOF; iota++ {
2766 list = append(list, f(p.leadComment, keyword, iota))
2767 }
2768 rparen = p.expect(token.RPAREN)
2769 p.expectSemi()
2770 } else {
2771 list = append(list, f(nil, keyword, 0))
2772 }
2773
2774 return &ast.GenDecl{
2775 Doc: doc,
2776 TokPos: pos,
2777 Tok: keyword,
2778 Lparen: lparen,
2779 Specs: list,
2780 Rparen: rparen,
2781 }
2782 }
2783
2784 func (p *parser) parseFuncDecl() *ast.FuncDecl {
2785 if p.trace {
2786 defer un(trace(p, "FunctionDecl"))
2787 }
2788
2789 doc := p.leadComment
2790 pos := p.expect(token.FUNC)
2791
2792 var recv *ast.FieldList
2793 if p.tok == token.LPAREN {
2794 recv = p.parseParameters(false)
2795 }
2796
2797 ident := p.parseIdent()
2798
2799 var tparams *ast.FieldList
2800 if p.tok == token.LBRACK {
2801 tparams = p.parseTypeParameters()
2802 if recv != nil && tparams != nil {
2803
2804
2805 p.error(tparams.Opening, "method must have no type parameters")
2806 tparams = nil
2807 }
2808 }
2809 params := p.parseParameters(false)
2810 results := p.parseParameters(true)
2811
2812 var body *ast.BlockStmt
2813 switch p.tok {
2814 case token.LBRACE:
2815 body = p.parseBody()
2816 p.expectSemi()
2817 case token.SEMICOLON:
2818 p.next()
2819 if p.tok == token.LBRACE {
2820
2821 p.error(p.pos, "unexpected semicolon or newline before {")
2822 body = p.parseBody()
2823 p.expectSemi()
2824 }
2825 default:
2826 p.expectSemi()
2827 }
2828
2829 decl := &ast.FuncDecl{
2830 Doc: doc,
2831 Recv: recv,
2832 Name: ident,
2833 Type: &ast.FuncType{
2834 Func: pos,
2835 TypeParams: tparams,
2836 Params: params,
2837 Results: results,
2838 },
2839 Body: body,
2840 }
2841 return decl
2842 }
2843
2844 func (p *parser) parseDecl(sync map[token.Token]bool) ast.Decl {
2845 if p.trace {
2846 defer un(trace(p, "Declaration"))
2847 }
2848
2849 var f parseSpecFunction
2850 switch p.tok {
2851 case token.IMPORT:
2852 f = p.parseImportSpec
2853
2854 case token.CONST, token.VAR:
2855 f = p.parseValueSpec
2856
2857 case token.TYPE:
2858 f = p.parseTypeSpec
2859
2860 case token.FUNC:
2861 return p.parseFuncDecl()
2862
2863 default:
2864 pos := p.pos
2865 p.errorExpected(pos, "declaration")
2866 p.advance(sync)
2867 return &ast.BadDecl{From: pos, To: p.pos}
2868 }
2869
2870 return p.parseGenDecl(p.tok, f)
2871 }
2872
2873
2874
2875
2876 func (p *parser) parseFile() *ast.File {
2877 if p.trace {
2878 defer un(trace(p, "File"))
2879 }
2880
2881
2882
2883 if p.errors.Len() != 0 {
2884 return nil
2885 }
2886
2887
2888 doc := p.leadComment
2889 pos := p.expect(token.PACKAGE)
2890
2891
2892 ident := p.parseIdent()
2893 if ident.Name == "_" && p.mode&DeclarationErrors != 0 {
2894 p.error(p.pos, "invalid package name _")
2895 }
2896 p.expectSemi()
2897
2898
2899
2900 if p.errors.Len() != 0 {
2901 return nil
2902 }
2903
2904 var decls []ast.Decl
2905 if p.mode&PackageClauseOnly == 0 {
2906
2907 for p.tok == token.IMPORT {
2908 decls = append(decls, p.parseGenDecl(token.IMPORT, p.parseImportSpec))
2909 }
2910
2911 if p.mode&ImportsOnly == 0 {
2912
2913 prev := token.IMPORT
2914 for p.tok != token.EOF {
2915
2916 if p.tok == token.IMPORT && prev != token.IMPORT {
2917 p.error(p.pos, "imports must appear before other declarations")
2918 }
2919 prev = p.tok
2920
2921 decls = append(decls, p.parseDecl(declStart))
2922 }
2923 }
2924 }
2925
2926 f := &ast.File{
2927 Doc: doc,
2928 Package: pos,
2929 Name: ident,
2930 Decls: decls,
2931
2932 Imports: p.imports,
2933 Comments: p.comments,
2934 GoVersion: p.goVersion,
2935 }
2936 var declErr func(token.Pos, string)
2937 if p.mode&DeclarationErrors != 0 {
2938 declErr = p.error
2939 }
2940 if p.mode&SkipObjectResolution == 0 {
2941 resolveFile(f, p.file, declErr)
2942 }
2943
2944 return f
2945 }
2946
2947
2948 func packIndexExpr(x ast.Expr, lbrack token.Pos, exprs []ast.Expr, rbrack token.Pos) ast.Expr {
2949 switch len(exprs) {
2950 case 0:
2951 panic("internal error: packIndexExpr with empty expr slice")
2952 case 1:
2953 return &ast.IndexExpr{
2954 X: x,
2955 Lbrack: lbrack,
2956 Index: exprs[0],
2957 Rbrack: rbrack,
2958 }
2959 default:
2960 return &ast.IndexListExpr{
2961 X: x,
2962 Lbrack: lbrack,
2963 Indices: exprs,
2964 Rbrack: rbrack,
2965 }
2966 }
2967 }
2968
View as plain text