1 // Copyright 2018 The Go Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style
3 // license that can be found in the LICENSE file.
8 "cmd/compile/internal/base"
9 "cmd/compile/internal/ir"
10 "cmd/compile/internal/logopt"
11 "cmd/compile/internal/objw"
12 "cmd/compile/internal/ssa"
13 "cmd/compile/internal/ssagen"
14 "cmd/compile/internal/types"
16 "cmd/internal/obj/wasm"
20 func Init(arch *ssagen.ArchInfo) {
21 arch.LinkArch = &wasm.Linkwasm
22 arch.REGSP = wasm.REG_SP
23 arch.MAXWIDTH = 1 << 50
25 arch.ZeroRange = zeroRange
26 arch.Ginsnop = ginsnop
27 arch.Ginsnopdefer = ginsnop
29 arch.SSAMarkMoves = ssaMarkMoves
30 arch.SSAGenValue = ssaGenValue
31 arch.SSAGenBlock = ssaGenBlock
34 func zeroRange(pp *objw.Progs, p *obj.Prog, off, cnt int64, state *uint32) *obj.Prog {
39 base.Fatalf("zerorange count not a multiple of widthptr %d", cnt)
42 for i := int64(0); i < cnt; i += 8 {
43 p = pp.Append(p, wasm.AGet, obj.TYPE_REG, wasm.REG_SP, 0, 0, 0, 0)
44 p = pp.Append(p, wasm.AI64Const, obj.TYPE_CONST, 0, 0, 0, 0, 0)
45 p = pp.Append(p, wasm.AI64Store, 0, 0, 0, obj.TYPE_CONST, 0, off+i)
51 func ginsnop(pp *objw.Progs) *obj.Prog {
52 return pp.Prog(wasm.ANop)
55 func ssaMarkMoves(s *ssagen.State, b *ssa.Block) {
58 func ssaGenBlock(s *ssagen.State, b, next *ssa.Block) {
61 if next != b.Succs[0].Block() {
62 s.Br(obj.AJMP, b.Succs[0].Block())
67 case b.Succs[0].Block():
68 // if false, jump to b.Succs[1]
69 getValue32(s, b.Controls[0])
72 s.Br(obj.AJMP, b.Succs[1].Block())
74 case b.Succs[1].Block():
75 // if true, jump to b.Succs[0]
76 getValue32(s, b.Controls[0])
78 s.Br(obj.AJMP, b.Succs[0].Block())
81 // if true, jump to b.Succs[0], else jump to b.Succs[1]
82 getValue32(s, b.Controls[0])
84 s.Br(obj.AJMP, b.Succs[0].Block())
86 s.Br(obj.AJMP, b.Succs[1].Block())
94 p.To.Type = obj.TYPE_MEM
95 p.To.Name = obj.NAME_EXTERN
96 p.To.Sym = b.Aux.(*obj.LSym)
101 p := s.Prog(wasm.AGet)
102 p.From = obj.Addr{Type: obj.TYPE_REG, Reg: wasm.REG_RET0}
106 s.Br(obj.AJMP, b.Succs[1].Block())
108 if next != b.Succs[0].Block() {
109 s.Br(obj.AJMP, b.Succs[0].Block())
113 panic("unexpected block")
116 // Entry point for the next block. Used by the JMP in goToBlock.
117 s.Prog(wasm.ARESUMEPOINT)
119 if s.OnWasmStackSkipped != 0 {
120 panic("wasm: bad stack")
124 func ssaGenValue(s *ssagen.State, v *ssa.Value) {
126 case ssa.OpWasmLoweredStaticCall, ssa.OpWasmLoweredClosureCall, ssa.OpWasmLoweredInterCall:
128 if call, ok := v.Aux.(*ssa.AuxCall); ok && call.Fn == ir.Syms.Deferreturn {
129 // add a resume point before call to deferreturn so it can be called again via jmpdefer
130 s.Prog(wasm.ARESUMEPOINT)
132 if v.Op == ssa.OpWasmLoweredClosureCall {
133 getValue64(s, v.Args[1])
134 setReg(s, wasm.REG_CTXT)
136 if call, ok := v.Aux.(*ssa.AuxCall); ok && call.Fn != nil {
138 p := s.Prog(obj.ACALL)
139 p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: sym}
142 getValue64(s, v.Args[0])
143 p := s.Prog(obj.ACALL)
144 p.To = obj.Addr{Type: obj.TYPE_NONE}
148 case ssa.OpWasmLoweredMove:
149 getValue32(s, v.Args[0])
150 getValue32(s, v.Args[1])
151 i32Const(s, int32(v.AuxInt))
152 p := s.Prog(wasm.ACall)
153 p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: ir.Syms.WasmMove}
155 case ssa.OpWasmLoweredZero:
156 getValue32(s, v.Args[0])
157 i32Const(s, int32(v.AuxInt))
158 p := s.Prog(wasm.ACall)
159 p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: ir.Syms.WasmZero}
161 case ssa.OpWasmLoweredNilCheck:
162 getValue64(s, v.Args[0])
165 p := s.Prog(wasm.ACALLNORESUME)
166 p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: ir.Syms.SigPanic}
168 if logopt.Enabled() {
169 logopt.LogOpt(v.Pos, "nilcheck", "genssa", v.Block.Func.Name)
171 if base.Debug.Nil != 0 && v.Pos.Line() > 1 { // v.Pos.Line()==1 in generated wrappers
172 base.WarnfAt(v.Pos, "generated nil check")
175 case ssa.OpWasmLoweredWB:
176 getValue64(s, v.Args[0])
177 getValue64(s, v.Args[1])
178 p := s.Prog(wasm.ACALLNORESUME) // TODO(neelance): If possible, turn this into a simple wasm.ACall).
179 p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: v.Aux.(*obj.LSym)}
181 case ssa.OpWasmI64Store8, ssa.OpWasmI64Store16, ssa.OpWasmI64Store32, ssa.OpWasmI64Store, ssa.OpWasmF32Store, ssa.OpWasmF64Store:
182 getValue32(s, v.Args[0])
183 getValue64(s, v.Args[1])
184 p := s.Prog(v.Op.Asm())
185 p.To = obj.Addr{Type: obj.TYPE_CONST, Offset: v.AuxInt}
188 getReg(s, wasm.REG_SP)
189 getValue64(s, v.Args[0])
190 p := s.Prog(storeOp(v.Type))
191 ssagen.AddrAuto(&p.To, v)
193 case ssa.OpClobber, ssa.OpClobberReg:
194 // TODO: implement for clobberdead experiment. Nop is ok for now.
197 if v.Type.IsMemory() {
201 s.OnWasmStackSkipped++
202 // If a Value is marked OnWasmStack, we don't generate the value and store it to a register now.
203 // Instead, we delay the generation to when the value is used and then directly generate it on the WebAssembly stack.
206 ssaGenValueOnStack(s, v, true)
207 if s.OnWasmStackSkipped != 0 {
208 panic("wasm: bad stack")
214 func ssaGenValueOnStack(s *ssagen.State, v *ssa.Value, extend bool) {
216 case ssa.OpWasmLoweredGetClosurePtr:
217 getReg(s, wasm.REG_CTXT)
219 case ssa.OpWasmLoweredGetCallerPC:
220 p := s.Prog(wasm.AI64Load)
221 // Caller PC is stored 8 bytes below first parameter.
224 Name: obj.NAME_PARAM,
228 case ssa.OpWasmLoweredGetCallerSP:
229 p := s.Prog(wasm.AGet)
230 // Caller SP is the address of the first parameter.
233 Name: obj.NAME_PARAM,
238 case ssa.OpWasmLoweredAddr:
239 if v.Aux == nil { // address of off(SP), no symbol
240 getValue64(s, v.Args[0])
241 i64Const(s, v.AuxInt)
245 p := s.Prog(wasm.AGet)
246 p.From.Type = obj.TYPE_ADDR
247 switch v.Aux.(type) {
249 ssagen.AddAux(&p.From, v)
251 p.From.Reg = v.Args[0].Reg()
252 ssagen.AddAux(&p.From, v)
254 panic("wasm: bad LoweredAddr")
257 case ssa.OpWasmLoweredConvert:
258 getValue64(s, v.Args[0])
260 case ssa.OpWasmSelect:
261 getValue64(s, v.Args[0])
262 getValue64(s, v.Args[1])
263 getValue32(s, v.Args[2])
266 case ssa.OpWasmI64AddConst:
267 getValue64(s, v.Args[0])
268 i64Const(s, v.AuxInt)
271 case ssa.OpWasmI64Const:
272 i64Const(s, v.AuxInt)
274 case ssa.OpWasmF32Const:
275 f32Const(s, v.AuxFloat())
277 case ssa.OpWasmF64Const:
278 f64Const(s, v.AuxFloat())
280 case ssa.OpWasmI64Load8U, ssa.OpWasmI64Load8S, ssa.OpWasmI64Load16U, ssa.OpWasmI64Load16S, ssa.OpWasmI64Load32U, ssa.OpWasmI64Load32S, ssa.OpWasmI64Load, ssa.OpWasmF32Load, ssa.OpWasmF64Load:
281 getValue32(s, v.Args[0])
282 p := s.Prog(v.Op.Asm())
283 p.From = obj.Addr{Type: obj.TYPE_CONST, Offset: v.AuxInt}
285 case ssa.OpWasmI64Eqz:
286 getValue64(s, v.Args[0])
289 s.Prog(wasm.AI64ExtendI32U)
292 case ssa.OpWasmI64Eq, ssa.OpWasmI64Ne, ssa.OpWasmI64LtS, ssa.OpWasmI64LtU, ssa.OpWasmI64GtS, ssa.OpWasmI64GtU, ssa.OpWasmI64LeS, ssa.OpWasmI64LeU, ssa.OpWasmI64GeS, ssa.OpWasmI64GeU,
293 ssa.OpWasmF32Eq, ssa.OpWasmF32Ne, ssa.OpWasmF32Lt, ssa.OpWasmF32Gt, ssa.OpWasmF32Le, ssa.OpWasmF32Ge,
294 ssa.OpWasmF64Eq, ssa.OpWasmF64Ne, ssa.OpWasmF64Lt, ssa.OpWasmF64Gt, ssa.OpWasmF64Le, ssa.OpWasmF64Ge:
295 getValue64(s, v.Args[0])
296 getValue64(s, v.Args[1])
299 s.Prog(wasm.AI64ExtendI32U)
302 case ssa.OpWasmI64Add, ssa.OpWasmI64Sub, ssa.OpWasmI64Mul, ssa.OpWasmI64DivU, ssa.OpWasmI64RemS, ssa.OpWasmI64RemU, ssa.OpWasmI64And, ssa.OpWasmI64Or, ssa.OpWasmI64Xor, ssa.OpWasmI64Shl, ssa.OpWasmI64ShrS, ssa.OpWasmI64ShrU, ssa.OpWasmI64Rotl,
303 ssa.OpWasmF32Add, ssa.OpWasmF32Sub, ssa.OpWasmF32Mul, ssa.OpWasmF32Div, ssa.OpWasmF32Copysign,
304 ssa.OpWasmF64Add, ssa.OpWasmF64Sub, ssa.OpWasmF64Mul, ssa.OpWasmF64Div, ssa.OpWasmF64Copysign:
305 getValue64(s, v.Args[0])
306 getValue64(s, v.Args[1])
309 case ssa.OpWasmI32Rotl:
310 getValue32(s, v.Args[0])
311 getValue32(s, v.Args[1])
312 s.Prog(wasm.AI32Rotl)
313 s.Prog(wasm.AI64ExtendI32U)
315 case ssa.OpWasmI64DivS:
316 getValue64(s, v.Args[0])
317 getValue64(s, v.Args[1])
318 if v.Type.Size() == 8 {
319 // Division of int64 needs helper function wasmDiv to handle the MinInt64 / -1 case.
320 p := s.Prog(wasm.ACall)
321 p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: ir.Syms.WasmDiv}
324 s.Prog(wasm.AI64DivS)
326 case ssa.OpWasmI64TruncSatF32S, ssa.OpWasmI64TruncSatF64S:
327 getValue64(s, v.Args[0])
328 if buildcfg.GOWASM.SatConv {
331 if v.Op == ssa.OpWasmI64TruncSatF32S {
332 s.Prog(wasm.AF64PromoteF32)
334 p := s.Prog(wasm.ACall)
335 p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: ir.Syms.WasmTruncS}
338 case ssa.OpWasmI64TruncSatF32U, ssa.OpWasmI64TruncSatF64U:
339 getValue64(s, v.Args[0])
340 if buildcfg.GOWASM.SatConv {
343 if v.Op == ssa.OpWasmI64TruncSatF32U {
344 s.Prog(wasm.AF64PromoteF32)
346 p := s.Prog(wasm.ACall)
347 p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: ir.Syms.WasmTruncU}
350 case ssa.OpWasmF32DemoteF64:
351 getValue64(s, v.Args[0])
354 case ssa.OpWasmF64PromoteF32:
355 getValue64(s, v.Args[0])
358 case ssa.OpWasmF32ConvertI64S, ssa.OpWasmF32ConvertI64U,
359 ssa.OpWasmF64ConvertI64S, ssa.OpWasmF64ConvertI64U,
360 ssa.OpWasmI64Extend8S, ssa.OpWasmI64Extend16S, ssa.OpWasmI64Extend32S,
361 ssa.OpWasmF32Neg, ssa.OpWasmF32Sqrt, ssa.OpWasmF32Trunc, ssa.OpWasmF32Ceil, ssa.OpWasmF32Floor, ssa.OpWasmF32Nearest, ssa.OpWasmF32Abs,
362 ssa.OpWasmF64Neg, ssa.OpWasmF64Sqrt, ssa.OpWasmF64Trunc, ssa.OpWasmF64Ceil, ssa.OpWasmF64Floor, ssa.OpWasmF64Nearest, ssa.OpWasmF64Abs,
363 ssa.OpWasmI64Ctz, ssa.OpWasmI64Clz, ssa.OpWasmI64Popcnt:
364 getValue64(s, v.Args[0])
368 p := s.Prog(loadOp(v.Type))
369 ssagen.AddrAuto(&p.From, v.Args[0])
372 getValue64(s, v.Args[0])
375 v.Fatalf("unexpected op: %s", v.Op)
380 func isCmp(v *ssa.Value) bool {
382 case ssa.OpWasmI64Eqz, ssa.OpWasmI64Eq, ssa.OpWasmI64Ne, ssa.OpWasmI64LtS, ssa.OpWasmI64LtU, ssa.OpWasmI64GtS, ssa.OpWasmI64GtU, ssa.OpWasmI64LeS, ssa.OpWasmI64LeU, ssa.OpWasmI64GeS, ssa.OpWasmI64GeU,
383 ssa.OpWasmF32Eq, ssa.OpWasmF32Ne, ssa.OpWasmF32Lt, ssa.OpWasmF32Gt, ssa.OpWasmF32Le, ssa.OpWasmF32Ge,
384 ssa.OpWasmF64Eq, ssa.OpWasmF64Ne, ssa.OpWasmF64Lt, ssa.OpWasmF64Gt, ssa.OpWasmF64Le, ssa.OpWasmF64Ge:
391 func getValue32(s *ssagen.State, v *ssa.Value) {
393 s.OnWasmStackSkipped--
394 ssaGenValueOnStack(s, v, false)
396 s.Prog(wasm.AI32WrapI64)
403 if reg != wasm.REG_SP {
404 s.Prog(wasm.AI32WrapI64)
408 func getValue64(s *ssagen.State, v *ssa.Value) {
410 s.OnWasmStackSkipped--
411 ssaGenValueOnStack(s, v, true)
417 if reg == wasm.REG_SP {
418 s.Prog(wasm.AI64ExtendI32U)
422 func i32Const(s *ssagen.State, val int32) {
423 p := s.Prog(wasm.AI32Const)
424 p.From = obj.Addr{Type: obj.TYPE_CONST, Offset: int64(val)}
427 func i64Const(s *ssagen.State, val int64) {
428 p := s.Prog(wasm.AI64Const)
429 p.From = obj.Addr{Type: obj.TYPE_CONST, Offset: val}
432 func f32Const(s *ssagen.State, val float64) {
433 p := s.Prog(wasm.AF32Const)
434 p.From = obj.Addr{Type: obj.TYPE_FCONST, Val: val}
437 func f64Const(s *ssagen.State, val float64) {
438 p := s.Prog(wasm.AF64Const)
439 p.From = obj.Addr{Type: obj.TYPE_FCONST, Val: val}
442 func getReg(s *ssagen.State, reg int16) {
443 p := s.Prog(wasm.AGet)
444 p.From = obj.Addr{Type: obj.TYPE_REG, Reg: reg}
447 func setReg(s *ssagen.State, reg int16) {
448 p := s.Prog(wasm.ASet)
449 p.To = obj.Addr{Type: obj.TYPE_REG, Reg: reg}
452 func loadOp(t *types.Type) obj.As {
460 panic("bad load type")
467 return wasm.AI64Load8S
469 return wasm.AI64Load8U
472 return wasm.AI64Load16S
474 return wasm.AI64Load16U
477 return wasm.AI64Load32S
479 return wasm.AI64Load32U
483 panic("bad load type")
487 func storeOp(t *types.Type) obj.As {
491 return wasm.AF32Store
493 return wasm.AF64Store
495 panic("bad store type")
501 return wasm.AI64Store8
503 return wasm.AI64Store16
505 return wasm.AI64Store32
507 return wasm.AI64Store
509 panic("bad store type")