1
2
3
4
5 package wasm
6
7 import (
8 "cmd/compile/internal/base"
9 "cmd/compile/internal/ir"
10 "cmd/compile/internal/logopt"
11 "cmd/compile/internal/objw"
12 "cmd/compile/internal/ssa"
13 "cmd/compile/internal/ssagen"
14 "cmd/compile/internal/types"
15 "cmd/internal/obj"
16 "cmd/internal/obj/wasm"
17 )
18
19
131
132 func Init(arch *ssagen.ArchInfo) {
133 arch.LinkArch = &wasm.Linkwasm
134 arch.REGSP = wasm.REG_SP
135 arch.MAXWIDTH = 1 << 50
136
137 arch.ZeroRange = zeroRange
138 arch.Ginsnop = ginsnop
139
140 arch.SSAMarkMoves = ssaMarkMoves
141 arch.SSAGenValue = ssaGenValue
142 arch.SSAGenBlock = ssaGenBlock
143 }
144
145 func zeroRange(pp *objw.Progs, p *obj.Prog, off, cnt int64, state *uint32) *obj.Prog {
146 if cnt == 0 {
147 return p
148 }
149 if cnt%8 != 0 {
150 base.Fatalf("zerorange count not a multiple of widthptr %d", cnt)
151 }
152
153 for i := int64(0); i < cnt; i += 8 {
154 p = pp.Append(p, wasm.AGet, obj.TYPE_REG, wasm.REG_SP, 0, 0, 0, 0)
155 p = pp.Append(p, wasm.AI64Const, obj.TYPE_CONST, 0, 0, 0, 0, 0)
156 p = pp.Append(p, wasm.AI64Store, 0, 0, 0, obj.TYPE_CONST, 0, off+i)
157 }
158
159 return p
160 }
161
162 func ginsnop(pp *objw.Progs) *obj.Prog {
163 return pp.Prog(wasm.ANop)
164 }
165
166 func ssaMarkMoves(s *ssagen.State, b *ssa.Block) {
167 }
168
169 func ssaGenBlock(s *ssagen.State, b, next *ssa.Block) {
170 switch b.Kind {
171 case ssa.BlockPlain, ssa.BlockDefer:
172 if next != b.Succs[0].Block() {
173 s.Br(obj.AJMP, b.Succs[0].Block())
174 }
175
176 case ssa.BlockIf:
177 switch next {
178 case b.Succs[0].Block():
179
180 getValue32(s, b.Controls[0])
181 s.Prog(wasm.AI32Eqz)
182 s.Prog(wasm.AIf)
183 s.Br(obj.AJMP, b.Succs[1].Block())
184 s.Prog(wasm.AEnd)
185 case b.Succs[1].Block():
186
187 getValue32(s, b.Controls[0])
188 s.Prog(wasm.AIf)
189 s.Br(obj.AJMP, b.Succs[0].Block())
190 s.Prog(wasm.AEnd)
191 default:
192
193 getValue32(s, b.Controls[0])
194 s.Prog(wasm.AIf)
195 s.Br(obj.AJMP, b.Succs[0].Block())
196 s.Prog(wasm.AEnd)
197 s.Br(obj.AJMP, b.Succs[1].Block())
198 }
199
200 case ssa.BlockRet:
201 s.Prog(obj.ARET)
202
203 case ssa.BlockExit, ssa.BlockRetJmp:
204
205 default:
206 panic("unexpected block")
207 }
208
209
210 s.Prog(wasm.ARESUMEPOINT)
211
212 if s.OnWasmStackSkipped != 0 {
213 panic("wasm: bad stack")
214 }
215 }
216
217 func ssaGenValue(s *ssagen.State, v *ssa.Value) {
218 switch v.Op {
219 case ssa.OpWasmLoweredStaticCall, ssa.OpWasmLoweredClosureCall, ssa.OpWasmLoweredInterCall, ssa.OpWasmLoweredTailCall, ssa.OpWasmLoweredTailCallInter:
220 s.PrepareCall(v)
221 if call, ok := v.Aux.(*ssa.AuxCall); ok && call.Fn == ir.Syms.Deferreturn {
222
223
224
225
226
227 s.Prog(wasm.ARESUMEPOINT)
228 }
229 if v.Op == ssa.OpWasmLoweredClosureCall {
230 getValue64(s, v.Args[1])
231 setReg(s, wasm.REG_CTXT)
232 }
233 if call, ok := v.Aux.(*ssa.AuxCall); ok && call.Fn != nil {
234 sym := call.Fn
235 p := s.Prog(obj.ACALL)
236 p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: sym}
237 p.Pos = v.Pos
238 if v.Op == ssa.OpWasmLoweredTailCall {
239 p.As = obj.ARET
240 }
241 } else {
242 getValue64(s, v.Args[0])
243 p := s.Prog(obj.ACALL)
244 p.To = obj.Addr{Type: obj.TYPE_NONE}
245 p.Pos = v.Pos
246 if v.Op == ssa.OpWasmLoweredTailCallInter {
247 p.As = obj.ARET
248 }
249 }
250
251 case ssa.OpWasmLoweredMove:
252 getValue32(s, v.Args[0])
253 getValue32(s, v.Args[1])
254 i32Const(s, int32(v.AuxInt))
255 s.Prog(wasm.AMemoryCopy)
256
257 case ssa.OpWasmLoweredZero:
258 getValue32(s, v.Args[0])
259 i32Const(s, 0)
260 i32Const(s, int32(v.AuxInt))
261 s.Prog(wasm.AMemoryFill)
262
263 case ssa.OpWasmLoweredNilCheck:
264 getValue64(s, v.Args[0])
265 s.Prog(wasm.AI64Eqz)
266 s.Prog(wasm.AIf)
267 p := s.Prog(wasm.ACALLNORESUME)
268 p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: ir.Syms.SigPanic}
269 s.Prog(wasm.AEnd)
270 if logopt.Enabled() {
271 logopt.LogOpt(v.Pos, "nilcheck", "genssa", v.Block.Func.Name)
272 }
273 if base.Debug.Nil != 0 && v.Pos.Line() > 1 {
274 base.WarnfAt(v.Pos, "generated nil check")
275 }
276
277 case ssa.OpWasmLoweredWB:
278 p := s.Prog(wasm.ACall)
279
280 p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: ir.Syms.GCWriteBarrier[v.AuxInt-1]}
281 setReg(s, v.Reg0())
282
283 case ssa.OpWasmI64Store8, ssa.OpWasmI64Store16, ssa.OpWasmI64Store32, ssa.OpWasmI64Store, ssa.OpWasmF32Store, ssa.OpWasmF64Store:
284 getValue32(s, v.Args[0])
285 getValue64(s, v.Args[1])
286 p := s.Prog(v.Op.Asm())
287 p.To = obj.Addr{Type: obj.TYPE_CONST, Offset: v.AuxInt}
288
289 case ssa.OpStoreReg:
290 getReg(s, wasm.REG_SP)
291 getValue64(s, v.Args[0])
292 p := s.Prog(storeOp(v.Type))
293 ssagen.AddrAuto(&p.To, v)
294
295 case ssa.OpClobber, ssa.OpClobberReg:
296
297
298 default:
299 if v.Type.IsMemory() {
300 return
301 }
302 if v.OnWasmStack {
303 s.OnWasmStackSkipped++
304
305
306 return
307 }
308 ssaGenValueOnStack(s, v, true)
309 if s.OnWasmStackSkipped != 0 {
310 panic("wasm: bad stack")
311 }
312 setReg(s, v.Reg())
313 }
314 }
315
316 func ssaGenValueOnStack(s *ssagen.State, v *ssa.Value, extend bool) {
317 switch v.Op {
318 case ssa.OpWasmLoweredGetClosurePtr:
319 getReg(s, wasm.REG_CTXT)
320
321 case ssa.OpWasmLoweredGetCallerPC:
322 p := s.Prog(wasm.AI64Load)
323
324 p.From = obj.Addr{
325 Type: obj.TYPE_MEM,
326 Name: obj.NAME_PARAM,
327 Offset: -8,
328 }
329
330 case ssa.OpWasmLoweredGetCallerSP:
331 p := s.Prog(wasm.AGet)
332
333 p.From = obj.Addr{
334 Type: obj.TYPE_ADDR,
335 Name: obj.NAME_PARAM,
336 Reg: wasm.REG_SP,
337 Offset: 0,
338 }
339
340 case ssa.OpWasmLoweredAddr:
341 if v.Aux == nil {
342 getValue64(s, v.Args[0])
343 i64Const(s, v.AuxInt)
344 s.Prog(wasm.AI64Add)
345 break
346 }
347 p := s.Prog(wasm.AGet)
348 p.From.Type = obj.TYPE_ADDR
349 switch v.Aux.(type) {
350 case *obj.LSym:
351 ssagen.AddAux(&p.From, v)
352 case *ir.Name:
353 p.From.Reg = v.Args[0].Reg()
354 ssagen.AddAux(&p.From, v)
355 default:
356 panic("wasm: bad LoweredAddr")
357 }
358
359 case ssa.OpWasmLoweredConvert:
360 getValue64(s, v.Args[0])
361
362 case ssa.OpWasmSelect:
363 getValue64(s, v.Args[0])
364 getValue64(s, v.Args[1])
365 getValue32(s, v.Args[2])
366 s.Prog(v.Op.Asm())
367
368 case ssa.OpWasmI64AddConst:
369 getValue64(s, v.Args[0])
370 i64Const(s, v.AuxInt)
371 s.Prog(v.Op.Asm())
372
373 case ssa.OpWasmI64Const:
374 i64Const(s, v.AuxInt)
375
376 case ssa.OpWasmF32Const:
377 f32Const(s, v.AuxFloat())
378
379 case ssa.OpWasmF64Const:
380 f64Const(s, v.AuxFloat())
381
382 case ssa.OpWasmI64Load8U, ssa.OpWasmI64Load8S, ssa.OpWasmI64Load16U, ssa.OpWasmI64Load16S, ssa.OpWasmI64Load32U, ssa.OpWasmI64Load32S, ssa.OpWasmI64Load, ssa.OpWasmF32Load, ssa.OpWasmF64Load:
383 getValue32(s, v.Args[0])
384 p := s.Prog(v.Op.Asm())
385 p.From = obj.Addr{Type: obj.TYPE_CONST, Offset: v.AuxInt}
386
387 case ssa.OpWasmI64Eqz:
388 getValue64(s, v.Args[0])
389 s.Prog(v.Op.Asm())
390 if extend {
391 s.Prog(wasm.AI64ExtendI32U)
392 }
393
394 case ssa.OpWasmI64Eq, ssa.OpWasmI64Ne, ssa.OpWasmI64LtS, ssa.OpWasmI64LtU, ssa.OpWasmI64GtS, ssa.OpWasmI64GtU, ssa.OpWasmI64LeS, ssa.OpWasmI64LeU, ssa.OpWasmI64GeS, ssa.OpWasmI64GeU,
395 ssa.OpWasmF32Eq, ssa.OpWasmF32Ne, ssa.OpWasmF32Lt, ssa.OpWasmF32Gt, ssa.OpWasmF32Le, ssa.OpWasmF32Ge,
396 ssa.OpWasmF64Eq, ssa.OpWasmF64Ne, ssa.OpWasmF64Lt, ssa.OpWasmF64Gt, ssa.OpWasmF64Le, ssa.OpWasmF64Ge:
397 getValue64(s, v.Args[0])
398 getValue64(s, v.Args[1])
399 s.Prog(v.Op.Asm())
400 if extend {
401 s.Prog(wasm.AI64ExtendI32U)
402 }
403
404 case ssa.OpWasmI64Add, ssa.OpWasmI64Sub, ssa.OpWasmI64Mul, ssa.OpWasmI64DivU, ssa.OpWasmI64RemS, ssa.OpWasmI64RemU, ssa.OpWasmI64And, ssa.OpWasmI64Or, ssa.OpWasmI64Xor, ssa.OpWasmI64Shl, ssa.OpWasmI64ShrS, ssa.OpWasmI64ShrU, ssa.OpWasmI64Rotl,
405 ssa.OpWasmF32Add, ssa.OpWasmF32Sub, ssa.OpWasmF32Mul, ssa.OpWasmF32Div, ssa.OpWasmF32Copysign,
406 ssa.OpWasmF64Add, ssa.OpWasmF64Sub, ssa.OpWasmF64Mul, ssa.OpWasmF64Div, ssa.OpWasmF64Copysign:
407 getValue64(s, v.Args[0])
408 getValue64(s, v.Args[1])
409 s.Prog(v.Op.Asm())
410
411 case ssa.OpWasmI32Rotl:
412 getValue32(s, v.Args[0])
413 getValue32(s, v.Args[1])
414 s.Prog(wasm.AI32Rotl)
415 s.Prog(wasm.AI64ExtendI32U)
416
417 case ssa.OpWasmI64DivS:
418 getValue64(s, v.Args[0])
419 getValue64(s, v.Args[1])
420 if v.Type.Size() == 8 {
421
422 p := s.Prog(wasm.ACall)
423 p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: ir.Syms.WasmDiv}
424 break
425 }
426 s.Prog(wasm.AI64DivS)
427
428 case ssa.OpWasmI64TruncSatF32S, ssa.OpWasmI64TruncSatF64S:
429 getValue64(s, v.Args[0])
430 s.Prog(v.Op.Asm())
431
432 case ssa.OpWasmI64TruncSatF32U, ssa.OpWasmI64TruncSatF64U:
433 getValue64(s, v.Args[0])
434 s.Prog(v.Op.Asm())
435
436 case ssa.OpWasmF32DemoteF64:
437 getValue64(s, v.Args[0])
438 s.Prog(v.Op.Asm())
439
440 case ssa.OpWasmF64PromoteF32:
441 getValue64(s, v.Args[0])
442 s.Prog(v.Op.Asm())
443
444 case ssa.OpWasmF32ConvertI64S, ssa.OpWasmF32ConvertI64U,
445 ssa.OpWasmF64ConvertI64S, ssa.OpWasmF64ConvertI64U,
446 ssa.OpWasmI64Extend8S, ssa.OpWasmI64Extend16S, ssa.OpWasmI64Extend32S,
447 ssa.OpWasmF32Neg, ssa.OpWasmF32Sqrt, ssa.OpWasmF32Trunc, ssa.OpWasmF32Ceil, ssa.OpWasmF32Floor, ssa.OpWasmF32Nearest, ssa.OpWasmF32Abs,
448 ssa.OpWasmF64Neg, ssa.OpWasmF64Sqrt, ssa.OpWasmF64Trunc, ssa.OpWasmF64Ceil, ssa.OpWasmF64Floor, ssa.OpWasmF64Nearest, ssa.OpWasmF64Abs,
449 ssa.OpWasmI64Ctz, ssa.OpWasmI64Clz, ssa.OpWasmI64Popcnt:
450 getValue64(s, v.Args[0])
451 s.Prog(v.Op.Asm())
452
453 case ssa.OpLoadReg:
454 p := s.Prog(loadOp(v.Type))
455 ssagen.AddrAuto(&p.From, v.Args[0])
456
457 case ssa.OpCopy:
458 getValue64(s, v.Args[0])
459
460 default:
461 v.Fatalf("unexpected op: %s", v.Op)
462
463 }
464 }
465
466 func isCmp(v *ssa.Value) bool {
467 switch v.Op {
468 case ssa.OpWasmI64Eqz, ssa.OpWasmI64Eq, ssa.OpWasmI64Ne, ssa.OpWasmI64LtS, ssa.OpWasmI64LtU, ssa.OpWasmI64GtS, ssa.OpWasmI64GtU, ssa.OpWasmI64LeS, ssa.OpWasmI64LeU, ssa.OpWasmI64GeS, ssa.OpWasmI64GeU,
469 ssa.OpWasmF32Eq, ssa.OpWasmF32Ne, ssa.OpWasmF32Lt, ssa.OpWasmF32Gt, ssa.OpWasmF32Le, ssa.OpWasmF32Ge,
470 ssa.OpWasmF64Eq, ssa.OpWasmF64Ne, ssa.OpWasmF64Lt, ssa.OpWasmF64Gt, ssa.OpWasmF64Le, ssa.OpWasmF64Ge:
471 return true
472 default:
473 return false
474 }
475 }
476
477 func getValue32(s *ssagen.State, v *ssa.Value) {
478 if v.OnWasmStack {
479 s.OnWasmStackSkipped--
480 ssaGenValueOnStack(s, v, false)
481 if !isCmp(v) {
482 s.Prog(wasm.AI32WrapI64)
483 }
484 return
485 }
486
487 reg := v.Reg()
488 getReg(s, reg)
489 if reg != wasm.REG_SP {
490 s.Prog(wasm.AI32WrapI64)
491 }
492 }
493
494 func getValue64(s *ssagen.State, v *ssa.Value) {
495 if v.OnWasmStack {
496 s.OnWasmStackSkipped--
497 ssaGenValueOnStack(s, v, true)
498 return
499 }
500
501 reg := v.Reg()
502 getReg(s, reg)
503 if reg == wasm.REG_SP {
504 s.Prog(wasm.AI64ExtendI32U)
505 }
506 }
507
508 func i32Const(s *ssagen.State, val int32) {
509 p := s.Prog(wasm.AI32Const)
510 p.From = obj.Addr{Type: obj.TYPE_CONST, Offset: int64(val)}
511 }
512
513 func i64Const(s *ssagen.State, val int64) {
514 p := s.Prog(wasm.AI64Const)
515 p.From = obj.Addr{Type: obj.TYPE_CONST, Offset: val}
516 }
517
518 func f32Const(s *ssagen.State, val float64) {
519 p := s.Prog(wasm.AF32Const)
520 p.From = obj.Addr{Type: obj.TYPE_FCONST, Val: val}
521 }
522
523 func f64Const(s *ssagen.State, val float64) {
524 p := s.Prog(wasm.AF64Const)
525 p.From = obj.Addr{Type: obj.TYPE_FCONST, Val: val}
526 }
527
528 func getReg(s *ssagen.State, reg int16) {
529 p := s.Prog(wasm.AGet)
530 p.From = obj.Addr{Type: obj.TYPE_REG, Reg: reg}
531 }
532
533 func setReg(s *ssagen.State, reg int16) {
534 p := s.Prog(wasm.ASet)
535 p.To = obj.Addr{Type: obj.TYPE_REG, Reg: reg}
536 }
537
538 func loadOp(t *types.Type) obj.As {
539 if t.IsFloat() {
540 switch t.Size() {
541 case 4:
542 return wasm.AF32Load
543 case 8:
544 return wasm.AF64Load
545 default:
546 panic("bad load type")
547 }
548 }
549
550 switch t.Size() {
551 case 1:
552 if t.IsSigned() {
553 return wasm.AI64Load8S
554 }
555 return wasm.AI64Load8U
556 case 2:
557 if t.IsSigned() {
558 return wasm.AI64Load16S
559 }
560 return wasm.AI64Load16U
561 case 4:
562 if t.IsSigned() {
563 return wasm.AI64Load32S
564 }
565 return wasm.AI64Load32U
566 case 8:
567 return wasm.AI64Load
568 default:
569 panic("bad load type")
570 }
571 }
572
573 func storeOp(t *types.Type) obj.As {
574 if t.IsFloat() {
575 switch t.Size() {
576 case 4:
577 return wasm.AF32Store
578 case 8:
579 return wasm.AF64Store
580 default:
581 panic("bad store type")
582 }
583 }
584
585 switch t.Size() {
586 case 1:
587 return wasm.AI64Store8
588 case 2:
589 return wasm.AI64Store16
590 case 4:
591 return wasm.AI64Store32
592 case 8:
593 return wasm.AI64Store
594 default:
595 panic("bad store type")
596 }
597 }
598
View as plain text