1
2
3
4
5 package wasm
6
7 import (
8 "cmd/compile/internal/base"
9 "cmd/compile/internal/ir"
10 "cmd/compile/internal/logopt"
11 "cmd/compile/internal/objw"
12 "cmd/compile/internal/ssa"
13 "cmd/compile/internal/ssagen"
14 "cmd/compile/internal/types"
15 "cmd/internal/obj"
16 "cmd/internal/obj/wasm"
17 "internal/buildcfg"
18 )
19
20
132
133 func Init(arch *ssagen.ArchInfo) {
134 arch.LinkArch = &wasm.Linkwasm
135 arch.REGSP = wasm.REG_SP
136 arch.MAXWIDTH = 1 << 50
137
138 arch.ZeroRange = zeroRange
139 arch.Ginsnop = ginsnop
140
141 arch.SSAMarkMoves = ssaMarkMoves
142 arch.SSAGenValue = ssaGenValue
143 arch.SSAGenBlock = ssaGenBlock
144 }
145
146 func zeroRange(pp *objw.Progs, p *obj.Prog, off, cnt int64, state *uint32) *obj.Prog {
147 if cnt == 0 {
148 return p
149 }
150 if cnt%8 != 0 {
151 base.Fatalf("zerorange count not a multiple of widthptr %d", cnt)
152 }
153
154 for i := int64(0); i < cnt; i += 8 {
155 p = pp.Append(p, wasm.AGet, obj.TYPE_REG, wasm.REG_SP, 0, 0, 0, 0)
156 p = pp.Append(p, wasm.AI64Const, obj.TYPE_CONST, 0, 0, 0, 0, 0)
157 p = pp.Append(p, wasm.AI64Store, 0, 0, 0, obj.TYPE_CONST, 0, off+i)
158 }
159
160 return p
161 }
162
163 func ginsnop(pp *objw.Progs) *obj.Prog {
164 return pp.Prog(wasm.ANop)
165 }
166
167 func ssaMarkMoves(s *ssagen.State, b *ssa.Block) {
168 }
169
170 func ssaGenBlock(s *ssagen.State, b, next *ssa.Block) {
171 switch b.Kind {
172 case ssa.BlockPlain, ssa.BlockDefer:
173 if next != b.Succs[0].Block() {
174 s.Br(obj.AJMP, b.Succs[0].Block())
175 }
176
177 case ssa.BlockIf:
178 switch next {
179 case b.Succs[0].Block():
180
181 getValue32(s, b.Controls[0])
182 s.Prog(wasm.AI32Eqz)
183 s.Prog(wasm.AIf)
184 s.Br(obj.AJMP, b.Succs[1].Block())
185 s.Prog(wasm.AEnd)
186 case b.Succs[1].Block():
187
188 getValue32(s, b.Controls[0])
189 s.Prog(wasm.AIf)
190 s.Br(obj.AJMP, b.Succs[0].Block())
191 s.Prog(wasm.AEnd)
192 default:
193
194 getValue32(s, b.Controls[0])
195 s.Prog(wasm.AIf)
196 s.Br(obj.AJMP, b.Succs[0].Block())
197 s.Prog(wasm.AEnd)
198 s.Br(obj.AJMP, b.Succs[1].Block())
199 }
200
201 case ssa.BlockRet:
202 s.Prog(obj.ARET)
203
204 case ssa.BlockExit, ssa.BlockRetJmp:
205
206 default:
207 panic("unexpected block")
208 }
209
210
211 s.Prog(wasm.ARESUMEPOINT)
212
213 if s.OnWasmStackSkipped != 0 {
214 panic("wasm: bad stack")
215 }
216 }
217
218 func ssaGenValue(s *ssagen.State, v *ssa.Value) {
219 switch v.Op {
220 case ssa.OpWasmLoweredStaticCall, ssa.OpWasmLoweredClosureCall, ssa.OpWasmLoweredInterCall, ssa.OpWasmLoweredTailCall:
221 s.PrepareCall(v)
222 if call, ok := v.Aux.(*ssa.AuxCall); ok && call.Fn == ir.Syms.Deferreturn {
223
224
225
226
227
228 s.Prog(wasm.ARESUMEPOINT)
229 }
230 if v.Op == ssa.OpWasmLoweredClosureCall {
231 getValue64(s, v.Args[1])
232 setReg(s, wasm.REG_CTXT)
233 }
234 if call, ok := v.Aux.(*ssa.AuxCall); ok && call.Fn != nil {
235 sym := call.Fn
236 p := s.Prog(obj.ACALL)
237 p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: sym}
238 p.Pos = v.Pos
239 if v.Op == ssa.OpWasmLoweredTailCall {
240 p.As = obj.ARET
241 }
242 } else {
243 getValue64(s, v.Args[0])
244 p := s.Prog(obj.ACALL)
245 p.To = obj.Addr{Type: obj.TYPE_NONE}
246 p.Pos = v.Pos
247 }
248
249 case ssa.OpWasmLoweredMove:
250 getValue32(s, v.Args[0])
251 getValue32(s, v.Args[1])
252 i32Const(s, int32(v.AuxInt))
253 s.Prog(wasm.AMemoryCopy)
254
255 case ssa.OpWasmLoweredZero:
256 getValue32(s, v.Args[0])
257 i32Const(s, 0)
258 i32Const(s, int32(v.AuxInt))
259 s.Prog(wasm.AMemoryFill)
260
261 case ssa.OpWasmLoweredNilCheck:
262 getValue64(s, v.Args[0])
263 s.Prog(wasm.AI64Eqz)
264 s.Prog(wasm.AIf)
265 p := s.Prog(wasm.ACALLNORESUME)
266 p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: ir.Syms.SigPanic}
267 s.Prog(wasm.AEnd)
268 if logopt.Enabled() {
269 logopt.LogOpt(v.Pos, "nilcheck", "genssa", v.Block.Func.Name)
270 }
271 if base.Debug.Nil != 0 && v.Pos.Line() > 1 {
272 base.WarnfAt(v.Pos, "generated nil check")
273 }
274
275 case ssa.OpWasmLoweredWB:
276 p := s.Prog(wasm.ACall)
277
278 p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: ir.Syms.GCWriteBarrier[v.AuxInt-1]}
279 setReg(s, v.Reg0())
280
281 case ssa.OpWasmI64Store8, ssa.OpWasmI64Store16, ssa.OpWasmI64Store32, ssa.OpWasmI64Store, ssa.OpWasmF32Store, ssa.OpWasmF64Store:
282 getValue32(s, v.Args[0])
283 getValue64(s, v.Args[1])
284 p := s.Prog(v.Op.Asm())
285 p.To = obj.Addr{Type: obj.TYPE_CONST, Offset: v.AuxInt}
286
287 case ssa.OpStoreReg:
288 getReg(s, wasm.REG_SP)
289 getValue64(s, v.Args[0])
290 p := s.Prog(storeOp(v.Type))
291 ssagen.AddrAuto(&p.To, v)
292
293 case ssa.OpClobber, ssa.OpClobberReg:
294
295
296 default:
297 if v.Type.IsMemory() {
298 return
299 }
300 if v.OnWasmStack {
301 s.OnWasmStackSkipped++
302
303
304 return
305 }
306 ssaGenValueOnStack(s, v, true)
307 if s.OnWasmStackSkipped != 0 {
308 panic("wasm: bad stack")
309 }
310 setReg(s, v.Reg())
311 }
312 }
313
314 func ssaGenValueOnStack(s *ssagen.State, v *ssa.Value, extend bool) {
315 switch v.Op {
316 case ssa.OpWasmLoweredGetClosurePtr:
317 getReg(s, wasm.REG_CTXT)
318
319 case ssa.OpWasmLoweredGetCallerPC:
320 p := s.Prog(wasm.AI64Load)
321
322 p.From = obj.Addr{
323 Type: obj.TYPE_MEM,
324 Name: obj.NAME_PARAM,
325 Offset: -8,
326 }
327
328 case ssa.OpWasmLoweredGetCallerSP:
329 p := s.Prog(wasm.AGet)
330
331 p.From = obj.Addr{
332 Type: obj.TYPE_ADDR,
333 Name: obj.NAME_PARAM,
334 Reg: wasm.REG_SP,
335 Offset: 0,
336 }
337
338 case ssa.OpWasmLoweredAddr:
339 if v.Aux == nil {
340 getValue64(s, v.Args[0])
341 i64Const(s, v.AuxInt)
342 s.Prog(wasm.AI64Add)
343 break
344 }
345 p := s.Prog(wasm.AGet)
346 p.From.Type = obj.TYPE_ADDR
347 switch v.Aux.(type) {
348 case *obj.LSym:
349 ssagen.AddAux(&p.From, v)
350 case *ir.Name:
351 p.From.Reg = v.Args[0].Reg()
352 ssagen.AddAux(&p.From, v)
353 default:
354 panic("wasm: bad LoweredAddr")
355 }
356
357 case ssa.OpWasmLoweredConvert:
358 getValue64(s, v.Args[0])
359
360 case ssa.OpWasmSelect:
361 getValue64(s, v.Args[0])
362 getValue64(s, v.Args[1])
363 getValue32(s, v.Args[2])
364 s.Prog(v.Op.Asm())
365
366 case ssa.OpWasmI64AddConst:
367 getValue64(s, v.Args[0])
368 i64Const(s, v.AuxInt)
369 s.Prog(v.Op.Asm())
370
371 case ssa.OpWasmI64Const:
372 i64Const(s, v.AuxInt)
373
374 case ssa.OpWasmF32Const:
375 f32Const(s, v.AuxFloat())
376
377 case ssa.OpWasmF64Const:
378 f64Const(s, v.AuxFloat())
379
380 case ssa.OpWasmI64Load8U, ssa.OpWasmI64Load8S, ssa.OpWasmI64Load16U, ssa.OpWasmI64Load16S, ssa.OpWasmI64Load32U, ssa.OpWasmI64Load32S, ssa.OpWasmI64Load, ssa.OpWasmF32Load, ssa.OpWasmF64Load:
381 getValue32(s, v.Args[0])
382 p := s.Prog(v.Op.Asm())
383 p.From = obj.Addr{Type: obj.TYPE_CONST, Offset: v.AuxInt}
384
385 case ssa.OpWasmI64Eqz:
386 getValue64(s, v.Args[0])
387 s.Prog(v.Op.Asm())
388 if extend {
389 s.Prog(wasm.AI64ExtendI32U)
390 }
391
392 case ssa.OpWasmI64Eq, ssa.OpWasmI64Ne, ssa.OpWasmI64LtS, ssa.OpWasmI64LtU, ssa.OpWasmI64GtS, ssa.OpWasmI64GtU, ssa.OpWasmI64LeS, ssa.OpWasmI64LeU, ssa.OpWasmI64GeS, ssa.OpWasmI64GeU,
393 ssa.OpWasmF32Eq, ssa.OpWasmF32Ne, ssa.OpWasmF32Lt, ssa.OpWasmF32Gt, ssa.OpWasmF32Le, ssa.OpWasmF32Ge,
394 ssa.OpWasmF64Eq, ssa.OpWasmF64Ne, ssa.OpWasmF64Lt, ssa.OpWasmF64Gt, ssa.OpWasmF64Le, ssa.OpWasmF64Ge:
395 getValue64(s, v.Args[0])
396 getValue64(s, v.Args[1])
397 s.Prog(v.Op.Asm())
398 if extend {
399 s.Prog(wasm.AI64ExtendI32U)
400 }
401
402 case ssa.OpWasmI64Add, ssa.OpWasmI64Sub, ssa.OpWasmI64Mul, ssa.OpWasmI64DivU, ssa.OpWasmI64RemS, ssa.OpWasmI64RemU, ssa.OpWasmI64And, ssa.OpWasmI64Or, ssa.OpWasmI64Xor, ssa.OpWasmI64Shl, ssa.OpWasmI64ShrS, ssa.OpWasmI64ShrU, ssa.OpWasmI64Rotl,
403 ssa.OpWasmF32Add, ssa.OpWasmF32Sub, ssa.OpWasmF32Mul, ssa.OpWasmF32Div, ssa.OpWasmF32Copysign,
404 ssa.OpWasmF64Add, ssa.OpWasmF64Sub, ssa.OpWasmF64Mul, ssa.OpWasmF64Div, ssa.OpWasmF64Copysign:
405 getValue64(s, v.Args[0])
406 getValue64(s, v.Args[1])
407 s.Prog(v.Op.Asm())
408
409 case ssa.OpWasmI32Rotl:
410 getValue32(s, v.Args[0])
411 getValue32(s, v.Args[1])
412 s.Prog(wasm.AI32Rotl)
413 s.Prog(wasm.AI64ExtendI32U)
414
415 case ssa.OpWasmI64DivS:
416 getValue64(s, v.Args[0])
417 getValue64(s, v.Args[1])
418 if v.Type.Size() == 8 {
419
420 p := s.Prog(wasm.ACall)
421 p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: ir.Syms.WasmDiv}
422 break
423 }
424 s.Prog(wasm.AI64DivS)
425
426 case ssa.OpWasmI64TruncSatF32S, ssa.OpWasmI64TruncSatF64S:
427 getValue64(s, v.Args[0])
428 if buildcfg.GOWASM.SatConv {
429 s.Prog(v.Op.Asm())
430 } else {
431 if v.Op == ssa.OpWasmI64TruncSatF32S {
432 s.Prog(wasm.AF64PromoteF32)
433 }
434 p := s.Prog(wasm.ACall)
435 p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: ir.Syms.WasmTruncS}
436 }
437
438 case ssa.OpWasmI64TruncSatF32U, ssa.OpWasmI64TruncSatF64U:
439 getValue64(s, v.Args[0])
440 if buildcfg.GOWASM.SatConv {
441 s.Prog(v.Op.Asm())
442 } else {
443 if v.Op == ssa.OpWasmI64TruncSatF32U {
444 s.Prog(wasm.AF64PromoteF32)
445 }
446 p := s.Prog(wasm.ACall)
447 p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: ir.Syms.WasmTruncU}
448 }
449
450 case ssa.OpWasmF32DemoteF64:
451 getValue64(s, v.Args[0])
452 s.Prog(v.Op.Asm())
453
454 case ssa.OpWasmF64PromoteF32:
455 getValue64(s, v.Args[0])
456 s.Prog(v.Op.Asm())
457
458 case ssa.OpWasmF32ConvertI64S, ssa.OpWasmF32ConvertI64U,
459 ssa.OpWasmF64ConvertI64S, ssa.OpWasmF64ConvertI64U,
460 ssa.OpWasmI64Extend8S, ssa.OpWasmI64Extend16S, ssa.OpWasmI64Extend32S,
461 ssa.OpWasmF32Neg, ssa.OpWasmF32Sqrt, ssa.OpWasmF32Trunc, ssa.OpWasmF32Ceil, ssa.OpWasmF32Floor, ssa.OpWasmF32Nearest, ssa.OpWasmF32Abs,
462 ssa.OpWasmF64Neg, ssa.OpWasmF64Sqrt, ssa.OpWasmF64Trunc, ssa.OpWasmF64Ceil, ssa.OpWasmF64Floor, ssa.OpWasmF64Nearest, ssa.OpWasmF64Abs,
463 ssa.OpWasmI64Ctz, ssa.OpWasmI64Clz, ssa.OpWasmI64Popcnt:
464 getValue64(s, v.Args[0])
465 s.Prog(v.Op.Asm())
466
467 case ssa.OpLoadReg:
468 p := s.Prog(loadOp(v.Type))
469 ssagen.AddrAuto(&p.From, v.Args[0])
470
471 case ssa.OpCopy:
472 getValue64(s, v.Args[0])
473
474 default:
475 v.Fatalf("unexpected op: %s", v.Op)
476
477 }
478 }
479
480 func isCmp(v *ssa.Value) bool {
481 switch v.Op {
482 case ssa.OpWasmI64Eqz, ssa.OpWasmI64Eq, ssa.OpWasmI64Ne, ssa.OpWasmI64LtS, ssa.OpWasmI64LtU, ssa.OpWasmI64GtS, ssa.OpWasmI64GtU, ssa.OpWasmI64LeS, ssa.OpWasmI64LeU, ssa.OpWasmI64GeS, ssa.OpWasmI64GeU,
483 ssa.OpWasmF32Eq, ssa.OpWasmF32Ne, ssa.OpWasmF32Lt, ssa.OpWasmF32Gt, ssa.OpWasmF32Le, ssa.OpWasmF32Ge,
484 ssa.OpWasmF64Eq, ssa.OpWasmF64Ne, ssa.OpWasmF64Lt, ssa.OpWasmF64Gt, ssa.OpWasmF64Le, ssa.OpWasmF64Ge:
485 return true
486 default:
487 return false
488 }
489 }
490
491 func getValue32(s *ssagen.State, v *ssa.Value) {
492 if v.OnWasmStack {
493 s.OnWasmStackSkipped--
494 ssaGenValueOnStack(s, v, false)
495 if !isCmp(v) {
496 s.Prog(wasm.AI32WrapI64)
497 }
498 return
499 }
500
501 reg := v.Reg()
502 getReg(s, reg)
503 if reg != wasm.REG_SP {
504 s.Prog(wasm.AI32WrapI64)
505 }
506 }
507
508 func getValue64(s *ssagen.State, v *ssa.Value) {
509 if v.OnWasmStack {
510 s.OnWasmStackSkipped--
511 ssaGenValueOnStack(s, v, true)
512 return
513 }
514
515 reg := v.Reg()
516 getReg(s, reg)
517 if reg == wasm.REG_SP {
518 s.Prog(wasm.AI64ExtendI32U)
519 }
520 }
521
522 func i32Const(s *ssagen.State, val int32) {
523 p := s.Prog(wasm.AI32Const)
524 p.From = obj.Addr{Type: obj.TYPE_CONST, Offset: int64(val)}
525 }
526
527 func i64Const(s *ssagen.State, val int64) {
528 p := s.Prog(wasm.AI64Const)
529 p.From = obj.Addr{Type: obj.TYPE_CONST, Offset: val}
530 }
531
532 func f32Const(s *ssagen.State, val float64) {
533 p := s.Prog(wasm.AF32Const)
534 p.From = obj.Addr{Type: obj.TYPE_FCONST, Val: val}
535 }
536
537 func f64Const(s *ssagen.State, val float64) {
538 p := s.Prog(wasm.AF64Const)
539 p.From = obj.Addr{Type: obj.TYPE_FCONST, Val: val}
540 }
541
542 func getReg(s *ssagen.State, reg int16) {
543 p := s.Prog(wasm.AGet)
544 p.From = obj.Addr{Type: obj.TYPE_REG, Reg: reg}
545 }
546
547 func setReg(s *ssagen.State, reg int16) {
548 p := s.Prog(wasm.ASet)
549 p.To = obj.Addr{Type: obj.TYPE_REG, Reg: reg}
550 }
551
552 func loadOp(t *types.Type) obj.As {
553 if t.IsFloat() {
554 switch t.Size() {
555 case 4:
556 return wasm.AF32Load
557 case 8:
558 return wasm.AF64Load
559 default:
560 panic("bad load type")
561 }
562 }
563
564 switch t.Size() {
565 case 1:
566 if t.IsSigned() {
567 return wasm.AI64Load8S
568 }
569 return wasm.AI64Load8U
570 case 2:
571 if t.IsSigned() {
572 return wasm.AI64Load16S
573 }
574 return wasm.AI64Load16U
575 case 4:
576 if t.IsSigned() {
577 return wasm.AI64Load32S
578 }
579 return wasm.AI64Load32U
580 case 8:
581 return wasm.AI64Load
582 default:
583 panic("bad load type")
584 }
585 }
586
587 func storeOp(t *types.Type) obj.As {
588 if t.IsFloat() {
589 switch t.Size() {
590 case 4:
591 return wasm.AF32Store
592 case 8:
593 return wasm.AF64Store
594 default:
595 panic("bad store type")
596 }
597 }
598
599 switch t.Size() {
600 case 1:
601 return wasm.AI64Store8
602 case 2:
603 return wasm.AI64Store16
604 case 4:
605 return wasm.AI64Store32
606 case 8:
607 return wasm.AI64Store
608 default:
609 panic("bad store type")
610 }
611 }
612
View as plain text