Source file src/pkg/cmd/compile/internal/wasm/ssa.go
1
2
3
4
5 package wasm
6
7 import (
8 "cmd/compile/internal/gc"
9 "cmd/compile/internal/ssa"
10 "cmd/compile/internal/types"
11 "cmd/internal/obj"
12 "cmd/internal/obj/wasm"
13 "cmd/internal/objabi"
14 )
15
16 func Init(arch *gc.Arch) {
17 arch.LinkArch = &wasm.Linkwasm
18 arch.REGSP = wasm.REG_SP
19 arch.MAXWIDTH = 1 << 50
20
21 arch.ZeroRange = zeroRange
22 arch.ZeroAuto = zeroAuto
23 arch.Ginsnop = ginsnop
24 arch.Ginsnopdefer = ginsnop
25
26 arch.SSAMarkMoves = ssaMarkMoves
27 arch.SSAGenValue = ssaGenValue
28 arch.SSAGenBlock = ssaGenBlock
29 }
30
31 func zeroRange(pp *gc.Progs, p *obj.Prog, off, cnt int64, state *uint32) *obj.Prog {
32 if cnt == 0 {
33 return p
34 }
35 if cnt%8 != 0 {
36 gc.Fatalf("zerorange count not a multiple of widthptr %d", cnt)
37 }
38
39 for i := int64(0); i < cnt; i += 8 {
40 p = pp.Appendpp(p, wasm.AGet, obj.TYPE_REG, wasm.REG_SP, 0, 0, 0, 0)
41 p = pp.Appendpp(p, wasm.AI64Const, obj.TYPE_CONST, 0, 0, 0, 0, 0)
42 p = pp.Appendpp(p, wasm.AI64Store, 0, 0, 0, obj.TYPE_CONST, 0, off+i)
43 }
44
45 return p
46 }
47
48 func zeroAuto(pp *gc.Progs, n *gc.Node) {
49 sym := n.Sym.Linksym()
50 size := n.Type.Size()
51 for i := int64(0); i < size; i += 8 {
52 p := pp.Prog(wasm.AGet)
53 p.From = obj.Addr{Type: obj.TYPE_REG, Reg: wasm.REG_SP}
54
55 p = pp.Prog(wasm.AI64Const)
56 p.From = obj.Addr{Type: obj.TYPE_CONST, Offset: 0}
57
58 p = pp.Prog(wasm.AI64Store)
59 p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_AUTO, Offset: n.Xoffset + i, Sym: sym}
60 }
61 }
62
63 func ginsnop(pp *gc.Progs) *obj.Prog {
64 return pp.Prog(wasm.ANop)
65 }
66
67 func ssaMarkMoves(s *gc.SSAGenState, b *ssa.Block) {
68 }
69
70 func ssaGenBlock(s *gc.SSAGenState, b, next *ssa.Block) {
71 goToBlock := func(block *ssa.Block, canFallthrough bool) {
72 if canFallthrough && block == next {
73 return
74 }
75 s.Br(obj.AJMP, block)
76 }
77
78 switch b.Kind {
79 case ssa.BlockPlain:
80 goToBlock(b.Succs[0].Block(), true)
81
82 case ssa.BlockIf:
83 getValue32(s, b.Control)
84 s.Prog(wasm.AI32Eqz)
85 s.Prog(wasm.AIf)
86 goToBlock(b.Succs[1].Block(), false)
87 s.Prog(wasm.AEnd)
88 goToBlock(b.Succs[0].Block(), true)
89
90 case ssa.BlockRet:
91 s.Prog(obj.ARET)
92
93 case ssa.BlockRetJmp:
94 p := s.Prog(obj.ARET)
95 p.To.Type = obj.TYPE_MEM
96 p.To.Name = obj.NAME_EXTERN
97 p.To.Sym = b.Aux.(*obj.LSym)
98
99 case ssa.BlockExit:
100
101 case ssa.BlockDefer:
102 p := s.Prog(wasm.AGet)
103 p.From = obj.Addr{Type: obj.TYPE_REG, Reg: wasm.REG_RET0}
104 s.Prog(wasm.AI64Eqz)
105 s.Prog(wasm.AI32Eqz)
106 s.Prog(wasm.AIf)
107 goToBlock(b.Succs[1].Block(), false)
108 s.Prog(wasm.AEnd)
109 goToBlock(b.Succs[0].Block(), true)
110
111 default:
112 panic("unexpected block")
113 }
114
115
116 s.Prog(wasm.ARESUMEPOINT)
117
118 if s.OnWasmStackSkipped != 0 {
119 panic("wasm: bad stack")
120 }
121 }
122
123 func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) {
124 switch v.Op {
125 case ssa.OpWasmLoweredStaticCall, ssa.OpWasmLoweredClosureCall, ssa.OpWasmLoweredInterCall:
126 s.PrepareCall(v)
127 if v.Aux == gc.Deferreturn {
128
129 s.Prog(wasm.ARESUMEPOINT)
130 }
131 if v.Op == ssa.OpWasmLoweredClosureCall {
132 getValue64(s, v.Args[1])
133 setReg(s, wasm.REG_CTXT)
134 }
135 if sym, ok := v.Aux.(*obj.LSym); ok {
136 p := s.Prog(obj.ACALL)
137 p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: sym}
138 p.Pos = v.Pos
139 } else {
140 getValue64(s, v.Args[0])
141 p := s.Prog(obj.ACALL)
142 p.To = obj.Addr{Type: obj.TYPE_NONE}
143 p.Pos = v.Pos
144 }
145
146 case ssa.OpWasmLoweredMove:
147 getValue32(s, v.Args[0])
148 getValue32(s, v.Args[1])
149 i32Const(s, int32(v.AuxInt))
150 p := s.Prog(wasm.ACall)
151 p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: gc.WasmMove}
152
153 case ssa.OpWasmLoweredZero:
154 getValue32(s, v.Args[0])
155 i32Const(s, int32(v.AuxInt))
156 p := s.Prog(wasm.ACall)
157 p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: gc.WasmZero}
158
159 case ssa.OpWasmLoweredNilCheck:
160 getValue64(s, v.Args[0])
161 s.Prog(wasm.AI64Eqz)
162 s.Prog(wasm.AIf)
163 p := s.Prog(wasm.ACALLNORESUME)
164 p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: gc.SigPanic}
165 s.Prog(wasm.AEnd)
166 if gc.Debug_checknil != 0 && v.Pos.Line() > 1 {
167 gc.Warnl(v.Pos, "generated nil check")
168 }
169
170 case ssa.OpWasmLoweredWB:
171 getValue64(s, v.Args[0])
172 getValue64(s, v.Args[1])
173 p := s.Prog(wasm.ACALLNORESUME)
174 p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: v.Aux.(*obj.LSym)}
175
176 case ssa.OpWasmI64Store8, ssa.OpWasmI64Store16, ssa.OpWasmI64Store32, ssa.OpWasmI64Store, ssa.OpWasmF32Store, ssa.OpWasmF64Store:
177 getValue32(s, v.Args[0])
178 getValue64(s, v.Args[1])
179 if v.Op == ssa.OpWasmF32Store {
180 s.Prog(wasm.AF32DemoteF64)
181 }
182 p := s.Prog(v.Op.Asm())
183 p.To = obj.Addr{Type: obj.TYPE_CONST, Offset: v.AuxInt}
184
185 case ssa.OpStoreReg:
186 getReg(s, wasm.REG_SP)
187 getValue64(s, v.Args[0])
188 if v.Type.Etype == types.TFLOAT32 {
189 s.Prog(wasm.AF32DemoteF64)
190 }
191 p := s.Prog(storeOp(v.Type))
192 gc.AddrAuto(&p.To, v)
193
194 default:
195 if v.Type.IsMemory() {
196 return
197 }
198 if v.OnWasmStack {
199 s.OnWasmStackSkipped++
200
201
202 return
203 }
204 ssaGenValueOnStack(s, v)
205 if s.OnWasmStackSkipped != 0 {
206 panic("wasm: bad stack")
207 }
208 setReg(s, v.Reg())
209 }
210 }
211
212 func ssaGenValueOnStack(s *gc.SSAGenState, v *ssa.Value) {
213 switch v.Op {
214 case ssa.OpWasmLoweredGetClosurePtr:
215 getReg(s, wasm.REG_CTXT)
216
217 case ssa.OpWasmLoweredGetCallerPC:
218 p := s.Prog(wasm.AI64Load)
219
220 p.From = obj.Addr{
221 Type: obj.TYPE_MEM,
222 Name: obj.NAME_PARAM,
223 Offset: -8,
224 }
225
226 case ssa.OpWasmLoweredGetCallerSP:
227 p := s.Prog(wasm.AGet)
228
229 p.From = obj.Addr{
230 Type: obj.TYPE_ADDR,
231 Name: obj.NAME_PARAM,
232 Reg: wasm.REG_SP,
233 Offset: 0,
234 }
235
236 case ssa.OpWasmLoweredAddr:
237 p := s.Prog(wasm.AGet)
238 p.From.Type = obj.TYPE_ADDR
239 switch v.Aux.(type) {
240 case *obj.LSym:
241 gc.AddAux(&p.From, v)
242 case *gc.Node:
243 p.From.Reg = v.Args[0].Reg()
244 gc.AddAux(&p.From, v)
245 default:
246 panic("wasm: bad LoweredAddr")
247 }
248
249 case ssa.OpWasmLoweredRound32F:
250 getValue64(s, v.Args[0])
251 s.Prog(wasm.AF32DemoteF64)
252 s.Prog(wasm.AF64PromoteF32)
253
254 case ssa.OpWasmLoweredConvert:
255 getValue64(s, v.Args[0])
256
257 case ssa.OpWasmSelect:
258 getValue64(s, v.Args[0])
259 getValue64(s, v.Args[1])
260 getValue64(s, v.Args[2])
261 s.Prog(wasm.AI32WrapI64)
262 s.Prog(v.Op.Asm())
263
264 case ssa.OpWasmI64AddConst:
265 getValue64(s, v.Args[0])
266 i64Const(s, v.AuxInt)
267 s.Prog(v.Op.Asm())
268
269 case ssa.OpWasmI64Const:
270 i64Const(s, v.AuxInt)
271
272 case ssa.OpWasmF64Const:
273 f64Const(s, v.AuxFloat())
274
275 case ssa.OpWasmI64Load8U, ssa.OpWasmI64Load8S, ssa.OpWasmI64Load16U, ssa.OpWasmI64Load16S, ssa.OpWasmI64Load32U, ssa.OpWasmI64Load32S, ssa.OpWasmI64Load, ssa.OpWasmF32Load, ssa.OpWasmF64Load:
276 getValue32(s, v.Args[0])
277 p := s.Prog(v.Op.Asm())
278 p.From = obj.Addr{Type: obj.TYPE_CONST, Offset: v.AuxInt}
279 if v.Op == ssa.OpWasmF32Load {
280 s.Prog(wasm.AF64PromoteF32)
281 }
282
283 case ssa.OpWasmI64Eqz:
284 getValue64(s, v.Args[0])
285 s.Prog(v.Op.Asm())
286 s.Prog(wasm.AI64ExtendI32U)
287
288 case ssa.OpWasmI64Eq, ssa.OpWasmI64Ne, ssa.OpWasmI64LtS, ssa.OpWasmI64LtU, ssa.OpWasmI64GtS, ssa.OpWasmI64GtU, ssa.OpWasmI64LeS, ssa.OpWasmI64LeU, ssa.OpWasmI64GeS, ssa.OpWasmI64GeU, ssa.OpWasmF64Eq, ssa.OpWasmF64Ne, ssa.OpWasmF64Lt, ssa.OpWasmF64Gt, ssa.OpWasmF64Le, ssa.OpWasmF64Ge:
289 getValue64(s, v.Args[0])
290 getValue64(s, v.Args[1])
291 s.Prog(v.Op.Asm())
292 s.Prog(wasm.AI64ExtendI32U)
293
294 case ssa.OpWasmI64Add, ssa.OpWasmI64Sub, ssa.OpWasmI64Mul, ssa.OpWasmI64DivU, ssa.OpWasmI64RemS, ssa.OpWasmI64RemU, ssa.OpWasmI64And, ssa.OpWasmI64Or, ssa.OpWasmI64Xor, ssa.OpWasmI64Shl, ssa.OpWasmI64ShrS, ssa.OpWasmI64ShrU, ssa.OpWasmF64Add, ssa.OpWasmF64Sub, ssa.OpWasmF64Mul, ssa.OpWasmF64Div, ssa.OpWasmF64Copysign, ssa.OpWasmI64Rotl:
295 getValue64(s, v.Args[0])
296 getValue64(s, v.Args[1])
297 s.Prog(v.Op.Asm())
298
299 case ssa.OpWasmI64DivS:
300 getValue64(s, v.Args[0])
301 getValue64(s, v.Args[1])
302 if v.Type.Size() == 8 {
303
304 p := s.Prog(wasm.ACall)
305 p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: gc.WasmDiv}
306 break
307 }
308 s.Prog(wasm.AI64DivS)
309
310 case ssa.OpWasmI64TruncSatF64S:
311 getValue64(s, v.Args[0])
312 if objabi.GOWASM.SatConv {
313 s.Prog(v.Op.Asm())
314 } else {
315 p := s.Prog(wasm.ACall)
316 p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: gc.WasmTruncS}
317 }
318
319 case ssa.OpWasmI64TruncSatF64U:
320 getValue64(s, v.Args[0])
321 if objabi.GOWASM.SatConv {
322 s.Prog(v.Op.Asm())
323 } else {
324 p := s.Prog(wasm.ACall)
325 p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: gc.WasmTruncU}
326 }
327
328 case
329 ssa.OpWasmF64Neg, ssa.OpWasmF64ConvertI64S, ssa.OpWasmF64ConvertI64U,
330 ssa.OpWasmI64Extend8S, ssa.OpWasmI64Extend16S, ssa.OpWasmI64Extend32S,
331 ssa.OpWasmF64Sqrt, ssa.OpWasmF64Trunc, ssa.OpWasmF64Ceil, ssa.OpWasmF64Floor, ssa.OpWasmF64Nearest, ssa.OpWasmF64Abs, ssa.OpWasmI64Ctz, ssa.OpWasmI64Clz, ssa.OpWasmI64Popcnt:
332 getValue64(s, v.Args[0])
333 s.Prog(v.Op.Asm())
334
335 case ssa.OpLoadReg:
336 p := s.Prog(loadOp(v.Type))
337 gc.AddrAuto(&p.From, v.Args[0])
338 if v.Type.Etype == types.TFLOAT32 {
339 s.Prog(wasm.AF64PromoteF32)
340 }
341
342 case ssa.OpCopy:
343 getValue64(s, v.Args[0])
344
345 default:
346 v.Fatalf("unexpected op: %s", v.Op)
347
348 }
349 }
350
351 func getValue32(s *gc.SSAGenState, v *ssa.Value) {
352 if v.OnWasmStack {
353 s.OnWasmStackSkipped--
354 ssaGenValueOnStack(s, v)
355 s.Prog(wasm.AI32WrapI64)
356 return
357 }
358
359 reg := v.Reg()
360 getReg(s, reg)
361 if reg != wasm.REG_SP {
362 s.Prog(wasm.AI32WrapI64)
363 }
364 }
365
366 func getValue64(s *gc.SSAGenState, v *ssa.Value) {
367 if v.OnWasmStack {
368 s.OnWasmStackSkipped--
369 ssaGenValueOnStack(s, v)
370 return
371 }
372
373 reg := v.Reg()
374 getReg(s, reg)
375 if reg == wasm.REG_SP {
376 s.Prog(wasm.AI64ExtendI32U)
377 }
378 }
379
380 func i32Const(s *gc.SSAGenState, val int32) {
381 p := s.Prog(wasm.AI32Const)
382 p.From = obj.Addr{Type: obj.TYPE_CONST, Offset: int64(val)}
383 }
384
385 func i64Const(s *gc.SSAGenState, val int64) {
386 p := s.Prog(wasm.AI64Const)
387 p.From = obj.Addr{Type: obj.TYPE_CONST, Offset: val}
388 }
389
390 func f64Const(s *gc.SSAGenState, val float64) {
391 p := s.Prog(wasm.AF64Const)
392 p.From = obj.Addr{Type: obj.TYPE_FCONST, Val: val}
393 }
394
395 func getReg(s *gc.SSAGenState, reg int16) {
396 p := s.Prog(wasm.AGet)
397 p.From = obj.Addr{Type: obj.TYPE_REG, Reg: reg}
398 }
399
400 func setReg(s *gc.SSAGenState, reg int16) {
401 p := s.Prog(wasm.ASet)
402 p.To = obj.Addr{Type: obj.TYPE_REG, Reg: reg}
403 }
404
405 func loadOp(t *types.Type) obj.As {
406 if t.IsFloat() {
407 switch t.Size() {
408 case 4:
409 return wasm.AF32Load
410 case 8:
411 return wasm.AF64Load
412 default:
413 panic("bad load type")
414 }
415 }
416
417 switch t.Size() {
418 case 1:
419 if t.IsSigned() {
420 return wasm.AI64Load8S
421 }
422 return wasm.AI64Load8U
423 case 2:
424 if t.IsSigned() {
425 return wasm.AI64Load16S
426 }
427 return wasm.AI64Load16U
428 case 4:
429 if t.IsSigned() {
430 return wasm.AI64Load32S
431 }
432 return wasm.AI64Load32U
433 case 8:
434 return wasm.AI64Load
435 default:
436 panic("bad load type")
437 }
438 }
439
440 func storeOp(t *types.Type) obj.As {
441 if t.IsFloat() {
442 switch t.Size() {
443 case 4:
444 return wasm.AF32Store
445 case 8:
446 return wasm.AF64Store
447 default:
448 panic("bad store type")
449 }
450 }
451
452 switch t.Size() {
453 case 1:
454 return wasm.AI64Store8
455 case 2:
456 return wasm.AI64Store16
457 case 4:
458 return wasm.AI64Store32
459 case 8:
460 return wasm.AI64Store
461 default:
462 panic("bad store type")
463 }
464 }
465
View as plain text