Source file src/pkg/cmd/compile/internal/arm64/ssa.go
1
2
3
4
5 package arm64
6
7 import (
8 "math"
9
10 "cmd/compile/internal/gc"
11 "cmd/compile/internal/ssa"
12 "cmd/compile/internal/types"
13 "cmd/internal/obj"
14 "cmd/internal/obj/arm64"
15 )
16
17
18 func loadByType(t *types.Type) obj.As {
19 if t.IsFloat() {
20 switch t.Size() {
21 case 4:
22 return arm64.AFMOVS
23 case 8:
24 return arm64.AFMOVD
25 }
26 } else {
27 switch t.Size() {
28 case 1:
29 if t.IsSigned() {
30 return arm64.AMOVB
31 } else {
32 return arm64.AMOVBU
33 }
34 case 2:
35 if t.IsSigned() {
36 return arm64.AMOVH
37 } else {
38 return arm64.AMOVHU
39 }
40 case 4:
41 if t.IsSigned() {
42 return arm64.AMOVW
43 } else {
44 return arm64.AMOVWU
45 }
46 case 8:
47 return arm64.AMOVD
48 }
49 }
50 panic("bad load type")
51 }
52
53
54 func storeByType(t *types.Type) obj.As {
55 if t.IsFloat() {
56 switch t.Size() {
57 case 4:
58 return arm64.AFMOVS
59 case 8:
60 return arm64.AFMOVD
61 }
62 } else {
63 switch t.Size() {
64 case 1:
65 return arm64.AMOVB
66 case 2:
67 return arm64.AMOVH
68 case 4:
69 return arm64.AMOVW
70 case 8:
71 return arm64.AMOVD
72 }
73 }
74 panic("bad store type")
75 }
76
77
78 func makeshift(reg int16, typ int64, s int64) int64 {
79 return int64(reg&31)<<16 | typ | (s&63)<<10
80 }
81
82
83 func genshift(s *gc.SSAGenState, as obj.As, r0, r1, r int16, typ int64, n int64) *obj.Prog {
84 p := s.Prog(as)
85 p.From.Type = obj.TYPE_SHIFT
86 p.From.Offset = makeshift(r1, typ, n)
87 p.Reg = r0
88 if r != 0 {
89 p.To.Type = obj.TYPE_REG
90 p.To.Reg = r
91 }
92 return p
93 }
94
95
96 func genIndexedOperand(v *ssa.Value) obj.Addr {
97
98 mop := obj.Addr{Type: obj.TYPE_MEM, Reg: v.Args[0].Reg()}
99 switch v.Op {
100 case ssa.OpARM64MOVDloadidx8, ssa.OpARM64MOVDstoreidx8, ssa.OpARM64MOVDstorezeroidx8:
101 mop.Index = arm64.REG_LSL | 3<<5 | v.Args[1].Reg()&31
102 case ssa.OpARM64MOVWloadidx4, ssa.OpARM64MOVWUloadidx4, ssa.OpARM64MOVWstoreidx4, ssa.OpARM64MOVWstorezeroidx4:
103 mop.Index = arm64.REG_LSL | 2<<5 | v.Args[1].Reg()&31
104 case ssa.OpARM64MOVHloadidx2, ssa.OpARM64MOVHUloadidx2, ssa.OpARM64MOVHstoreidx2, ssa.OpARM64MOVHstorezeroidx2:
105 mop.Index = arm64.REG_LSL | 1<<5 | v.Args[1].Reg()&31
106 default:
107 mop.Index = v.Args[1].Reg()
108 }
109 return mop
110 }
111
112 func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) {
113 switch v.Op {
114 case ssa.OpCopy, ssa.OpARM64MOVDreg:
115 if v.Type.IsMemory() {
116 return
117 }
118 x := v.Args[0].Reg()
119 y := v.Reg()
120 if x == y {
121 return
122 }
123 as := arm64.AMOVD
124 if v.Type.IsFloat() {
125 switch v.Type.Size() {
126 case 4:
127 as = arm64.AFMOVS
128 case 8:
129 as = arm64.AFMOVD
130 default:
131 panic("bad float size")
132 }
133 }
134 p := s.Prog(as)
135 p.From.Type = obj.TYPE_REG
136 p.From.Reg = x
137 p.To.Type = obj.TYPE_REG
138 p.To.Reg = y
139 case ssa.OpARM64MOVDnop:
140 if v.Reg() != v.Args[0].Reg() {
141 v.Fatalf("input[0] and output not in same register %s", v.LongString())
142 }
143
144 case ssa.OpLoadReg:
145 if v.Type.IsFlags() {
146 v.Fatalf("load flags not implemented: %v", v.LongString())
147 return
148 }
149 p := s.Prog(loadByType(v.Type))
150 gc.AddrAuto(&p.From, v.Args[0])
151 p.To.Type = obj.TYPE_REG
152 p.To.Reg = v.Reg()
153 case ssa.OpStoreReg:
154 if v.Type.IsFlags() {
155 v.Fatalf("store flags not implemented: %v", v.LongString())
156 return
157 }
158 p := s.Prog(storeByType(v.Type))
159 p.From.Type = obj.TYPE_REG
160 p.From.Reg = v.Args[0].Reg()
161 gc.AddrAuto(&p.To, v)
162 case ssa.OpARM64ADD,
163 ssa.OpARM64SUB,
164 ssa.OpARM64AND,
165 ssa.OpARM64OR,
166 ssa.OpARM64XOR,
167 ssa.OpARM64BIC,
168 ssa.OpARM64EON,
169 ssa.OpARM64ORN,
170 ssa.OpARM64MUL,
171 ssa.OpARM64MULW,
172 ssa.OpARM64MNEG,
173 ssa.OpARM64MNEGW,
174 ssa.OpARM64MULH,
175 ssa.OpARM64UMULH,
176 ssa.OpARM64MULL,
177 ssa.OpARM64UMULL,
178 ssa.OpARM64DIV,
179 ssa.OpARM64UDIV,
180 ssa.OpARM64DIVW,
181 ssa.OpARM64UDIVW,
182 ssa.OpARM64MOD,
183 ssa.OpARM64UMOD,
184 ssa.OpARM64MODW,
185 ssa.OpARM64UMODW,
186 ssa.OpARM64SLL,
187 ssa.OpARM64SRL,
188 ssa.OpARM64SRA,
189 ssa.OpARM64FADDS,
190 ssa.OpARM64FADDD,
191 ssa.OpARM64FSUBS,
192 ssa.OpARM64FSUBD,
193 ssa.OpARM64FMULS,
194 ssa.OpARM64FMULD,
195 ssa.OpARM64FNMULS,
196 ssa.OpARM64FNMULD,
197 ssa.OpARM64FDIVS,
198 ssa.OpARM64FDIVD,
199 ssa.OpARM64ROR,
200 ssa.OpARM64RORW:
201 r := v.Reg()
202 r1 := v.Args[0].Reg()
203 r2 := v.Args[1].Reg()
204 p := s.Prog(v.Op.Asm())
205 p.From.Type = obj.TYPE_REG
206 p.From.Reg = r2
207 p.Reg = r1
208 p.To.Type = obj.TYPE_REG
209 p.To.Reg = r
210 case ssa.OpARM64FMADDS,
211 ssa.OpARM64FMADDD,
212 ssa.OpARM64FNMADDS,
213 ssa.OpARM64FNMADDD,
214 ssa.OpARM64FMSUBS,
215 ssa.OpARM64FMSUBD,
216 ssa.OpARM64FNMSUBS,
217 ssa.OpARM64FNMSUBD,
218 ssa.OpARM64MADD,
219 ssa.OpARM64MADDW,
220 ssa.OpARM64MSUB,
221 ssa.OpARM64MSUBW:
222 rt := v.Reg()
223 ra := v.Args[0].Reg()
224 rm := v.Args[1].Reg()
225 rn := v.Args[2].Reg()
226 p := s.Prog(v.Op.Asm())
227 p.Reg = ra
228 p.From.Type = obj.TYPE_REG
229 p.From.Reg = rm
230 p.SetFrom3(obj.Addr{Type: obj.TYPE_REG, Reg: rn})
231 p.To.Type = obj.TYPE_REG
232 p.To.Reg = rt
233 case ssa.OpARM64ADDconst,
234 ssa.OpARM64SUBconst,
235 ssa.OpARM64ANDconst,
236 ssa.OpARM64ORconst,
237 ssa.OpARM64XORconst,
238 ssa.OpARM64SLLconst,
239 ssa.OpARM64SRLconst,
240 ssa.OpARM64SRAconst,
241 ssa.OpARM64RORconst,
242 ssa.OpARM64RORWconst:
243 p := s.Prog(v.Op.Asm())
244 p.From.Type = obj.TYPE_CONST
245 p.From.Offset = v.AuxInt
246 p.Reg = v.Args[0].Reg()
247 p.To.Type = obj.TYPE_REG
248 p.To.Reg = v.Reg()
249 case ssa.OpARM64ADDSconstflags:
250 p := s.Prog(v.Op.Asm())
251 p.From.Type = obj.TYPE_CONST
252 p.From.Offset = v.AuxInt
253 p.Reg = v.Args[0].Reg()
254 p.To.Type = obj.TYPE_REG
255 p.To.Reg = v.Reg0()
256 case ssa.OpARM64ADCzerocarry:
257 p := s.Prog(v.Op.Asm())
258 p.From.Type = obj.TYPE_REG
259 p.From.Reg = arm64.REGZERO
260 p.Reg = arm64.REGZERO
261 p.To.Type = obj.TYPE_REG
262 p.To.Reg = v.Reg()
263 case ssa.OpARM64ADCSflags,
264 ssa.OpARM64ADDSflags,
265 ssa.OpARM64SBCSflags,
266 ssa.OpARM64SUBSflags:
267 r := v.Reg0()
268 r1 := v.Args[0].Reg()
269 r2 := v.Args[1].Reg()
270 p := s.Prog(v.Op.Asm())
271 p.From.Type = obj.TYPE_REG
272 p.From.Reg = r2
273 p.Reg = r1
274 p.To.Type = obj.TYPE_REG
275 p.To.Reg = r
276 case ssa.OpARM64NEGSflags:
277 p := s.Prog(v.Op.Asm())
278 p.From.Type = obj.TYPE_REG
279 p.From.Reg = v.Args[0].Reg()
280 p.To.Type = obj.TYPE_REG
281 p.To.Reg = v.Reg0()
282 case ssa.OpARM64NGCzerocarry:
283 p := s.Prog(v.Op.Asm())
284 p.From.Type = obj.TYPE_REG
285 p.From.Reg = arm64.REGZERO
286 p.To.Type = obj.TYPE_REG
287 p.To.Reg = v.Reg()
288 case ssa.OpARM64EXTRconst,
289 ssa.OpARM64EXTRWconst:
290 p := s.Prog(v.Op.Asm())
291 p.From.Type = obj.TYPE_CONST
292 p.From.Offset = v.AuxInt
293 p.SetFrom3(obj.Addr{Type: obj.TYPE_REG, Reg: v.Args[0].Reg()})
294 p.Reg = v.Args[1].Reg()
295 p.To.Type = obj.TYPE_REG
296 p.To.Reg = v.Reg()
297 case ssa.OpARM64MVNshiftLL, ssa.OpARM64NEGshiftLL:
298 genshift(s, v.Op.Asm(), 0, v.Args[0].Reg(), v.Reg(), arm64.SHIFT_LL, v.AuxInt)
299 case ssa.OpARM64MVNshiftRL, ssa.OpARM64NEGshiftRL:
300 genshift(s, v.Op.Asm(), 0, v.Args[0].Reg(), v.Reg(), arm64.SHIFT_LR, v.AuxInt)
301 case ssa.OpARM64MVNshiftRA, ssa.OpARM64NEGshiftRA:
302 genshift(s, v.Op.Asm(), 0, v.Args[0].Reg(), v.Reg(), arm64.SHIFT_AR, v.AuxInt)
303 case ssa.OpARM64ADDshiftLL,
304 ssa.OpARM64SUBshiftLL,
305 ssa.OpARM64ANDshiftLL,
306 ssa.OpARM64ORshiftLL,
307 ssa.OpARM64XORshiftLL,
308 ssa.OpARM64EONshiftLL,
309 ssa.OpARM64ORNshiftLL,
310 ssa.OpARM64BICshiftLL:
311 genshift(s, v.Op.Asm(), v.Args[0].Reg(), v.Args[1].Reg(), v.Reg(), arm64.SHIFT_LL, v.AuxInt)
312 case ssa.OpARM64ADDshiftRL,
313 ssa.OpARM64SUBshiftRL,
314 ssa.OpARM64ANDshiftRL,
315 ssa.OpARM64ORshiftRL,
316 ssa.OpARM64XORshiftRL,
317 ssa.OpARM64EONshiftRL,
318 ssa.OpARM64ORNshiftRL,
319 ssa.OpARM64BICshiftRL:
320 genshift(s, v.Op.Asm(), v.Args[0].Reg(), v.Args[1].Reg(), v.Reg(), arm64.SHIFT_LR, v.AuxInt)
321 case ssa.OpARM64ADDshiftRA,
322 ssa.OpARM64SUBshiftRA,
323 ssa.OpARM64ANDshiftRA,
324 ssa.OpARM64ORshiftRA,
325 ssa.OpARM64XORshiftRA,
326 ssa.OpARM64EONshiftRA,
327 ssa.OpARM64ORNshiftRA,
328 ssa.OpARM64BICshiftRA:
329 genshift(s, v.Op.Asm(), v.Args[0].Reg(), v.Args[1].Reg(), v.Reg(), arm64.SHIFT_AR, v.AuxInt)
330 case ssa.OpARM64MOVDconst:
331 p := s.Prog(v.Op.Asm())
332 p.From.Type = obj.TYPE_CONST
333 p.From.Offset = v.AuxInt
334 p.To.Type = obj.TYPE_REG
335 p.To.Reg = v.Reg()
336 case ssa.OpARM64FMOVSconst,
337 ssa.OpARM64FMOVDconst:
338 p := s.Prog(v.Op.Asm())
339 p.From.Type = obj.TYPE_FCONST
340 p.From.Val = math.Float64frombits(uint64(v.AuxInt))
341 p.To.Type = obj.TYPE_REG
342 p.To.Reg = v.Reg()
343 case ssa.OpARM64FCMPS0,
344 ssa.OpARM64FCMPD0:
345 p := s.Prog(v.Op.Asm())
346 p.From.Type = obj.TYPE_FCONST
347 p.From.Val = math.Float64frombits(0)
348 p.Reg = v.Args[0].Reg()
349 case ssa.OpARM64CMP,
350 ssa.OpARM64CMPW,
351 ssa.OpARM64CMN,
352 ssa.OpARM64CMNW,
353 ssa.OpARM64TST,
354 ssa.OpARM64TSTW,
355 ssa.OpARM64FCMPS,
356 ssa.OpARM64FCMPD:
357 p := s.Prog(v.Op.Asm())
358 p.From.Type = obj.TYPE_REG
359 p.From.Reg = v.Args[1].Reg()
360 p.Reg = v.Args[0].Reg()
361 case ssa.OpARM64CMPconst,
362 ssa.OpARM64CMPWconst,
363 ssa.OpARM64CMNconst,
364 ssa.OpARM64CMNWconst,
365 ssa.OpARM64TSTconst,
366 ssa.OpARM64TSTWconst:
367 p := s.Prog(v.Op.Asm())
368 p.From.Type = obj.TYPE_CONST
369 p.From.Offset = v.AuxInt
370 p.Reg = v.Args[0].Reg()
371 case ssa.OpARM64CMPshiftLL, ssa.OpARM64CMNshiftLL, ssa.OpARM64TSTshiftLL:
372 genshift(s, v.Op.Asm(), v.Args[0].Reg(), v.Args[1].Reg(), 0, arm64.SHIFT_LL, v.AuxInt)
373 case ssa.OpARM64CMPshiftRL, ssa.OpARM64CMNshiftRL, ssa.OpARM64TSTshiftRL:
374 genshift(s, v.Op.Asm(), v.Args[0].Reg(), v.Args[1].Reg(), 0, arm64.SHIFT_LR, v.AuxInt)
375 case ssa.OpARM64CMPshiftRA, ssa.OpARM64CMNshiftRA, ssa.OpARM64TSTshiftRA:
376 genshift(s, v.Op.Asm(), v.Args[0].Reg(), v.Args[1].Reg(), 0, arm64.SHIFT_AR, v.AuxInt)
377 case ssa.OpARM64MOVDaddr:
378 p := s.Prog(arm64.AMOVD)
379 p.From.Type = obj.TYPE_ADDR
380 p.From.Reg = v.Args[0].Reg()
381 p.To.Type = obj.TYPE_REG
382 p.To.Reg = v.Reg()
383
384 var wantreg string
385
386
387
388
389
390 switch v.Aux.(type) {
391 default:
392 v.Fatalf("aux is of unknown type %T", v.Aux)
393 case *obj.LSym:
394 wantreg = "SB"
395 gc.AddAux(&p.From, v)
396 case *gc.Node:
397 wantreg = "SP"
398 gc.AddAux(&p.From, v)
399 case nil:
400
401 wantreg = "SP"
402 p.From.Offset = v.AuxInt
403 }
404 if reg := v.Args[0].RegName(); reg != wantreg {
405 v.Fatalf("bad reg %s for symbol type %T, want %s", reg, v.Aux, wantreg)
406 }
407 case ssa.OpARM64MOVBload,
408 ssa.OpARM64MOVBUload,
409 ssa.OpARM64MOVHload,
410 ssa.OpARM64MOVHUload,
411 ssa.OpARM64MOVWload,
412 ssa.OpARM64MOVWUload,
413 ssa.OpARM64MOVDload,
414 ssa.OpARM64FMOVSload,
415 ssa.OpARM64FMOVDload:
416 p := s.Prog(v.Op.Asm())
417 p.From.Type = obj.TYPE_MEM
418 p.From.Reg = v.Args[0].Reg()
419 gc.AddAux(&p.From, v)
420 p.To.Type = obj.TYPE_REG
421 p.To.Reg = v.Reg()
422 case ssa.OpARM64MOVBloadidx,
423 ssa.OpARM64MOVBUloadidx,
424 ssa.OpARM64MOVHloadidx,
425 ssa.OpARM64MOVHUloadidx,
426 ssa.OpARM64MOVWloadidx,
427 ssa.OpARM64MOVWUloadidx,
428 ssa.OpARM64MOVDloadidx,
429 ssa.OpARM64FMOVSloadidx,
430 ssa.OpARM64FMOVDloadidx,
431 ssa.OpARM64MOVHloadidx2,
432 ssa.OpARM64MOVHUloadidx2,
433 ssa.OpARM64MOVWloadidx4,
434 ssa.OpARM64MOVWUloadidx4,
435 ssa.OpARM64MOVDloadidx8:
436 p := s.Prog(v.Op.Asm())
437 p.From = genIndexedOperand(v)
438 p.To.Type = obj.TYPE_REG
439 p.To.Reg = v.Reg()
440 case ssa.OpARM64LDAR,
441 ssa.OpARM64LDARB,
442 ssa.OpARM64LDARW:
443 p := s.Prog(v.Op.Asm())
444 p.From.Type = obj.TYPE_MEM
445 p.From.Reg = v.Args[0].Reg()
446 gc.AddAux(&p.From, v)
447 p.To.Type = obj.TYPE_REG
448 p.To.Reg = v.Reg0()
449 case ssa.OpARM64MOVBstore,
450 ssa.OpARM64MOVHstore,
451 ssa.OpARM64MOVWstore,
452 ssa.OpARM64MOVDstore,
453 ssa.OpARM64FMOVSstore,
454 ssa.OpARM64FMOVDstore,
455 ssa.OpARM64STLR,
456 ssa.OpARM64STLRW:
457 p := s.Prog(v.Op.Asm())
458 p.From.Type = obj.TYPE_REG
459 p.From.Reg = v.Args[1].Reg()
460 p.To.Type = obj.TYPE_MEM
461 p.To.Reg = v.Args[0].Reg()
462 gc.AddAux(&p.To, v)
463 case ssa.OpARM64MOVBstoreidx,
464 ssa.OpARM64MOVHstoreidx,
465 ssa.OpARM64MOVWstoreidx,
466 ssa.OpARM64MOVDstoreidx,
467 ssa.OpARM64FMOVSstoreidx,
468 ssa.OpARM64FMOVDstoreidx,
469 ssa.OpARM64MOVHstoreidx2,
470 ssa.OpARM64MOVWstoreidx4,
471 ssa.OpARM64MOVDstoreidx8:
472 p := s.Prog(v.Op.Asm())
473 p.To = genIndexedOperand(v)
474 p.From.Type = obj.TYPE_REG
475 p.From.Reg = v.Args[2].Reg()
476 case ssa.OpARM64STP:
477 p := s.Prog(v.Op.Asm())
478 p.From.Type = obj.TYPE_REGREG
479 p.From.Reg = v.Args[1].Reg()
480 p.From.Offset = int64(v.Args[2].Reg())
481 p.To.Type = obj.TYPE_MEM
482 p.To.Reg = v.Args[0].Reg()
483 gc.AddAux(&p.To, v)
484 case ssa.OpARM64MOVBstorezero,
485 ssa.OpARM64MOVHstorezero,
486 ssa.OpARM64MOVWstorezero,
487 ssa.OpARM64MOVDstorezero:
488 p := s.Prog(v.Op.Asm())
489 p.From.Type = obj.TYPE_REG
490 p.From.Reg = arm64.REGZERO
491 p.To.Type = obj.TYPE_MEM
492 p.To.Reg = v.Args[0].Reg()
493 gc.AddAux(&p.To, v)
494 case ssa.OpARM64MOVBstorezeroidx,
495 ssa.OpARM64MOVHstorezeroidx,
496 ssa.OpARM64MOVWstorezeroidx,
497 ssa.OpARM64MOVDstorezeroidx,
498 ssa.OpARM64MOVHstorezeroidx2,
499 ssa.OpARM64MOVWstorezeroidx4,
500 ssa.OpARM64MOVDstorezeroidx8:
501 p := s.Prog(v.Op.Asm())
502 p.To = genIndexedOperand(v)
503 p.From.Type = obj.TYPE_REG
504 p.From.Reg = arm64.REGZERO
505 case ssa.OpARM64MOVQstorezero:
506 p := s.Prog(v.Op.Asm())
507 p.From.Type = obj.TYPE_REGREG
508 p.From.Reg = arm64.REGZERO
509 p.From.Offset = int64(arm64.REGZERO)
510 p.To.Type = obj.TYPE_MEM
511 p.To.Reg = v.Args[0].Reg()
512 gc.AddAux(&p.To, v)
513 case ssa.OpARM64BFI,
514 ssa.OpARM64BFXIL:
515 r := v.Reg()
516 if r != v.Args[0].Reg() {
517 v.Fatalf("input[0] and output not in same register %s", v.LongString())
518 }
519 p := s.Prog(v.Op.Asm())
520 p.From.Type = obj.TYPE_CONST
521 p.From.Offset = v.AuxInt >> 8
522 p.SetFrom3(obj.Addr{Type: obj.TYPE_CONST, Offset: v.AuxInt & 0xff})
523 p.Reg = v.Args[1].Reg()
524 p.To.Type = obj.TYPE_REG
525 p.To.Reg = r
526 case ssa.OpARM64SBFIZ,
527 ssa.OpARM64SBFX,
528 ssa.OpARM64UBFIZ,
529 ssa.OpARM64UBFX:
530 p := s.Prog(v.Op.Asm())
531 p.From.Type = obj.TYPE_CONST
532 p.From.Offset = v.AuxInt >> 8
533 p.SetFrom3(obj.Addr{Type: obj.TYPE_CONST, Offset: v.AuxInt & 0xff})
534 p.Reg = v.Args[0].Reg()
535 p.To.Type = obj.TYPE_REG
536 p.To.Reg = v.Reg()
537 case ssa.OpARM64LoweredMuluhilo:
538 r0 := v.Args[0].Reg()
539 r1 := v.Args[1].Reg()
540 p := s.Prog(arm64.AUMULH)
541 p.From.Type = obj.TYPE_REG
542 p.From.Reg = r1
543 p.Reg = r0
544 p.To.Type = obj.TYPE_REG
545 p.To.Reg = v.Reg0()
546 p1 := s.Prog(arm64.AMUL)
547 p1.From.Type = obj.TYPE_REG
548 p1.From.Reg = r1
549 p1.Reg = r0
550 p1.To.Type = obj.TYPE_REG
551 p1.To.Reg = v.Reg1()
552 case ssa.OpARM64LoweredAtomicExchange64,
553 ssa.OpARM64LoweredAtomicExchange32:
554
555
556
557 ld := arm64.ALDAXR
558 st := arm64.ASTLXR
559 if v.Op == ssa.OpARM64LoweredAtomicExchange32 {
560 ld = arm64.ALDAXRW
561 st = arm64.ASTLXRW
562 }
563 r0 := v.Args[0].Reg()
564 r1 := v.Args[1].Reg()
565 out := v.Reg0()
566 p := s.Prog(ld)
567 p.From.Type = obj.TYPE_MEM
568 p.From.Reg = r0
569 p.To.Type = obj.TYPE_REG
570 p.To.Reg = out
571 p1 := s.Prog(st)
572 p1.From.Type = obj.TYPE_REG
573 p1.From.Reg = r1
574 p1.To.Type = obj.TYPE_MEM
575 p1.To.Reg = r0
576 p1.RegTo2 = arm64.REGTMP
577 p2 := s.Prog(arm64.ACBNZ)
578 p2.From.Type = obj.TYPE_REG
579 p2.From.Reg = arm64.REGTMP
580 p2.To.Type = obj.TYPE_BRANCH
581 gc.Patch(p2, p)
582 case ssa.OpARM64LoweredAtomicAdd64,
583 ssa.OpARM64LoweredAtomicAdd32:
584
585
586
587
588 ld := arm64.ALDAXR
589 st := arm64.ASTLXR
590 if v.Op == ssa.OpARM64LoweredAtomicAdd32 {
591 ld = arm64.ALDAXRW
592 st = arm64.ASTLXRW
593 }
594 r0 := v.Args[0].Reg()
595 r1 := v.Args[1].Reg()
596 out := v.Reg0()
597 p := s.Prog(ld)
598 p.From.Type = obj.TYPE_MEM
599 p.From.Reg = r0
600 p.To.Type = obj.TYPE_REG
601 p.To.Reg = out
602 p1 := s.Prog(arm64.AADD)
603 p1.From.Type = obj.TYPE_REG
604 p1.From.Reg = r1
605 p1.To.Type = obj.TYPE_REG
606 p1.To.Reg = out
607 p2 := s.Prog(st)
608 p2.From.Type = obj.TYPE_REG
609 p2.From.Reg = out
610 p2.To.Type = obj.TYPE_MEM
611 p2.To.Reg = r0
612 p2.RegTo2 = arm64.REGTMP
613 p3 := s.Prog(arm64.ACBNZ)
614 p3.From.Type = obj.TYPE_REG
615 p3.From.Reg = arm64.REGTMP
616 p3.To.Type = obj.TYPE_BRANCH
617 gc.Patch(p3, p)
618 case ssa.OpARM64LoweredAtomicAdd64Variant,
619 ssa.OpARM64LoweredAtomicAdd32Variant:
620
621
622 op := arm64.ALDADDALD
623 if v.Op == ssa.OpARM64LoweredAtomicAdd32Variant {
624 op = arm64.ALDADDALW
625 }
626 r0 := v.Args[0].Reg()
627 r1 := v.Args[1].Reg()
628 out := v.Reg0()
629 p := s.Prog(op)
630 p.From.Type = obj.TYPE_REG
631 p.From.Reg = r1
632 p.To.Type = obj.TYPE_MEM
633 p.To.Reg = r0
634 p.RegTo2 = out
635 p1 := s.Prog(arm64.AADD)
636 p1.From.Type = obj.TYPE_REG
637 p1.From.Reg = r1
638 p1.To.Type = obj.TYPE_REG
639 p1.To.Reg = out
640 case ssa.OpARM64LoweredAtomicCas64,
641 ssa.OpARM64LoweredAtomicCas32:
642
643
644
645
646
647
648 ld := arm64.ALDAXR
649 st := arm64.ASTLXR
650 cmp := arm64.ACMP
651 if v.Op == ssa.OpARM64LoweredAtomicCas32 {
652 ld = arm64.ALDAXRW
653 st = arm64.ASTLXRW
654 cmp = arm64.ACMPW
655 }
656 r0 := v.Args[0].Reg()
657 r1 := v.Args[1].Reg()
658 r2 := v.Args[2].Reg()
659 out := v.Reg0()
660 p := s.Prog(ld)
661 p.From.Type = obj.TYPE_MEM
662 p.From.Reg = r0
663 p.To.Type = obj.TYPE_REG
664 p.To.Reg = arm64.REGTMP
665 p1 := s.Prog(cmp)
666 p1.From.Type = obj.TYPE_REG
667 p1.From.Reg = r1
668 p1.Reg = arm64.REGTMP
669 p2 := s.Prog(arm64.ABNE)
670 p2.To.Type = obj.TYPE_BRANCH
671 p3 := s.Prog(st)
672 p3.From.Type = obj.TYPE_REG
673 p3.From.Reg = r2
674 p3.To.Type = obj.TYPE_MEM
675 p3.To.Reg = r0
676 p3.RegTo2 = arm64.REGTMP
677 p4 := s.Prog(arm64.ACBNZ)
678 p4.From.Type = obj.TYPE_REG
679 p4.From.Reg = arm64.REGTMP
680 p4.To.Type = obj.TYPE_BRANCH
681 gc.Patch(p4, p)
682 p5 := s.Prog(arm64.ACSET)
683 p5.From.Type = obj.TYPE_REG
684 p5.From.Reg = arm64.COND_EQ
685 p5.To.Type = obj.TYPE_REG
686 p5.To.Reg = out
687 gc.Patch(p2, p5)
688 case ssa.OpARM64LoweredAtomicAnd8,
689 ssa.OpARM64LoweredAtomicOr8:
690
691
692
693
694 r0 := v.Args[0].Reg()
695 r1 := v.Args[1].Reg()
696 out := v.Reg0()
697 p := s.Prog(arm64.ALDAXRB)
698 p.From.Type = obj.TYPE_MEM
699 p.From.Reg = r0
700 p.To.Type = obj.TYPE_REG
701 p.To.Reg = out
702 p1 := s.Prog(v.Op.Asm())
703 p1.From.Type = obj.TYPE_REG
704 p1.From.Reg = r1
705 p1.To.Type = obj.TYPE_REG
706 p1.To.Reg = out
707 p2 := s.Prog(arm64.ASTLXRB)
708 p2.From.Type = obj.TYPE_REG
709 p2.From.Reg = out
710 p2.To.Type = obj.TYPE_MEM
711 p2.To.Reg = r0
712 p2.RegTo2 = arm64.REGTMP
713 p3 := s.Prog(arm64.ACBNZ)
714 p3.From.Type = obj.TYPE_REG
715 p3.From.Reg = arm64.REGTMP
716 p3.To.Type = obj.TYPE_BRANCH
717 gc.Patch(p3, p)
718 case ssa.OpARM64MOVBreg,
719 ssa.OpARM64MOVBUreg,
720 ssa.OpARM64MOVHreg,
721 ssa.OpARM64MOVHUreg,
722 ssa.OpARM64MOVWreg,
723 ssa.OpARM64MOVWUreg:
724 a := v.Args[0]
725 for a.Op == ssa.OpCopy || a.Op == ssa.OpARM64MOVDreg {
726 a = a.Args[0]
727 }
728 if a.Op == ssa.OpLoadReg {
729 t := a.Type
730 switch {
731 case v.Op == ssa.OpARM64MOVBreg && t.Size() == 1 && t.IsSigned(),
732 v.Op == ssa.OpARM64MOVBUreg && t.Size() == 1 && !t.IsSigned(),
733 v.Op == ssa.OpARM64MOVHreg && t.Size() == 2 && t.IsSigned(),
734 v.Op == ssa.OpARM64MOVHUreg && t.Size() == 2 && !t.IsSigned(),
735 v.Op == ssa.OpARM64MOVWreg && t.Size() == 4 && t.IsSigned(),
736 v.Op == ssa.OpARM64MOVWUreg && t.Size() == 4 && !t.IsSigned():
737
738 if v.Reg() == v.Args[0].Reg() {
739 return
740 }
741 p := s.Prog(arm64.AMOVD)
742 p.From.Type = obj.TYPE_REG
743 p.From.Reg = v.Args[0].Reg()
744 p.To.Type = obj.TYPE_REG
745 p.To.Reg = v.Reg()
746 return
747 default:
748 }
749 }
750 fallthrough
751 case ssa.OpARM64MVN,
752 ssa.OpARM64NEG,
753 ssa.OpARM64FABSD,
754 ssa.OpARM64FMOVDfpgp,
755 ssa.OpARM64FMOVDgpfp,
756 ssa.OpARM64FMOVSfpgp,
757 ssa.OpARM64FMOVSgpfp,
758 ssa.OpARM64FNEGS,
759 ssa.OpARM64FNEGD,
760 ssa.OpARM64FSQRTD,
761 ssa.OpARM64FCVTZSSW,
762 ssa.OpARM64FCVTZSDW,
763 ssa.OpARM64FCVTZUSW,
764 ssa.OpARM64FCVTZUDW,
765 ssa.OpARM64FCVTZSS,
766 ssa.OpARM64FCVTZSD,
767 ssa.OpARM64FCVTZUS,
768 ssa.OpARM64FCVTZUD,
769 ssa.OpARM64SCVTFWS,
770 ssa.OpARM64SCVTFWD,
771 ssa.OpARM64SCVTFS,
772 ssa.OpARM64SCVTFD,
773 ssa.OpARM64UCVTFWS,
774 ssa.OpARM64UCVTFWD,
775 ssa.OpARM64UCVTFS,
776 ssa.OpARM64UCVTFD,
777 ssa.OpARM64FCVTSD,
778 ssa.OpARM64FCVTDS,
779 ssa.OpARM64REV,
780 ssa.OpARM64REVW,
781 ssa.OpARM64REV16W,
782 ssa.OpARM64RBIT,
783 ssa.OpARM64RBITW,
784 ssa.OpARM64CLZ,
785 ssa.OpARM64CLZW,
786 ssa.OpARM64FRINTAD,
787 ssa.OpARM64FRINTMD,
788 ssa.OpARM64FRINTND,
789 ssa.OpARM64FRINTPD,
790 ssa.OpARM64FRINTZD:
791 p := s.Prog(v.Op.Asm())
792 p.From.Type = obj.TYPE_REG
793 p.From.Reg = v.Args[0].Reg()
794 p.To.Type = obj.TYPE_REG
795 p.To.Reg = v.Reg()
796 case ssa.OpARM64LoweredRound32F, ssa.OpARM64LoweredRound64F:
797
798 case ssa.OpARM64VCNT:
799 p := s.Prog(v.Op.Asm())
800 p.From.Type = obj.TYPE_REG
801 p.From.Reg = (v.Args[0].Reg()-arm64.REG_F0)&31 + arm64.REG_ARNG + ((arm64.ARNG_8B & 15) << 5)
802 p.To.Type = obj.TYPE_REG
803 p.To.Reg = (v.Reg()-arm64.REG_F0)&31 + arm64.REG_ARNG + ((arm64.ARNG_8B & 15) << 5)
804 case ssa.OpARM64VUADDLV:
805 p := s.Prog(v.Op.Asm())
806 p.From.Type = obj.TYPE_REG
807 p.From.Reg = (v.Args[0].Reg()-arm64.REG_F0)&31 + arm64.REG_ARNG + ((arm64.ARNG_8B & 15) << 5)
808 p.To.Type = obj.TYPE_REG
809 p.To.Reg = v.Reg() - arm64.REG_F0 + arm64.REG_V0
810 case ssa.OpARM64CSEL, ssa.OpARM64CSEL0:
811 r1 := int16(arm64.REGZERO)
812 if v.Op != ssa.OpARM64CSEL0 {
813 r1 = v.Args[1].Reg()
814 }
815 p := s.Prog(v.Op.Asm())
816 p.From.Type = obj.TYPE_REG
817 p.From.Reg = condBits[v.Aux.(ssa.Op)]
818 p.Reg = v.Args[0].Reg()
819 p.SetFrom3(obj.Addr{Type: obj.TYPE_REG, Reg: r1})
820 p.To.Type = obj.TYPE_REG
821 p.To.Reg = v.Reg()
822 case ssa.OpARM64DUFFZERO:
823
824 p := s.Prog(obj.ADUFFZERO)
825 p.To.Type = obj.TYPE_MEM
826 p.To.Name = obj.NAME_EXTERN
827 p.To.Sym = gc.Duffzero
828 p.To.Offset = v.AuxInt
829 case ssa.OpARM64LoweredZero:
830
831
832
833
834 p := s.Prog(arm64.ASTP)
835 p.Scond = arm64.C_XPOST
836 p.From.Type = obj.TYPE_REGREG
837 p.From.Reg = arm64.REGZERO
838 p.From.Offset = int64(arm64.REGZERO)
839 p.To.Type = obj.TYPE_MEM
840 p.To.Reg = arm64.REG_R16
841 p.To.Offset = 16
842 p2 := s.Prog(arm64.ACMP)
843 p2.From.Type = obj.TYPE_REG
844 p2.From.Reg = v.Args[1].Reg()
845 p2.Reg = arm64.REG_R16
846 p3 := s.Prog(arm64.ABLE)
847 p3.To.Type = obj.TYPE_BRANCH
848 gc.Patch(p3, p)
849 case ssa.OpARM64DUFFCOPY:
850 p := s.Prog(obj.ADUFFCOPY)
851 p.To.Type = obj.TYPE_MEM
852 p.To.Name = obj.NAME_EXTERN
853 p.To.Sym = gc.Duffcopy
854 p.To.Offset = v.AuxInt
855 case ssa.OpARM64LoweredMove:
856
857
858
859
860
861 p := s.Prog(arm64.AMOVD)
862 p.Scond = arm64.C_XPOST
863 p.From.Type = obj.TYPE_MEM
864 p.From.Reg = arm64.REG_R16
865 p.From.Offset = 8
866 p.To.Type = obj.TYPE_REG
867 p.To.Reg = arm64.REGTMP
868 p2 := s.Prog(arm64.AMOVD)
869 p2.Scond = arm64.C_XPOST
870 p2.From.Type = obj.TYPE_REG
871 p2.From.Reg = arm64.REGTMP
872 p2.To.Type = obj.TYPE_MEM
873 p2.To.Reg = arm64.REG_R17
874 p2.To.Offset = 8
875 p3 := s.Prog(arm64.ACMP)
876 p3.From.Type = obj.TYPE_REG
877 p3.From.Reg = v.Args[2].Reg()
878 p3.Reg = arm64.REG_R16
879 p4 := s.Prog(arm64.ABLE)
880 p4.To.Type = obj.TYPE_BRANCH
881 gc.Patch(p4, p)
882 case ssa.OpARM64CALLstatic, ssa.OpARM64CALLclosure, ssa.OpARM64CALLinter:
883 s.Call(v)
884 case ssa.OpARM64LoweredWB:
885 p := s.Prog(obj.ACALL)
886 p.To.Type = obj.TYPE_MEM
887 p.To.Name = obj.NAME_EXTERN
888 p.To.Sym = v.Aux.(*obj.LSym)
889 case ssa.OpARM64LoweredPanicBoundsA, ssa.OpARM64LoweredPanicBoundsB, ssa.OpARM64LoweredPanicBoundsC:
890 p := s.Prog(obj.ACALL)
891 p.To.Type = obj.TYPE_MEM
892 p.To.Name = obj.NAME_EXTERN
893 p.To.Sym = gc.BoundsCheckFunc[v.AuxInt]
894 s.UseArgs(16)
895 case ssa.OpARM64LoweredNilCheck:
896
897 p := s.Prog(arm64.AMOVB)
898 p.From.Type = obj.TYPE_MEM
899 p.From.Reg = v.Args[0].Reg()
900 gc.AddAux(&p.From, v)
901 p.To.Type = obj.TYPE_REG
902 p.To.Reg = arm64.REGTMP
903 if gc.Debug_checknil != 0 && v.Pos.Line() > 1 {
904 gc.Warnl(v.Pos, "generated nil check")
905 }
906 case ssa.OpARM64Equal,
907 ssa.OpARM64NotEqual,
908 ssa.OpARM64LessThan,
909 ssa.OpARM64LessEqual,
910 ssa.OpARM64GreaterThan,
911 ssa.OpARM64GreaterEqual,
912 ssa.OpARM64LessThanU,
913 ssa.OpARM64LessEqualU,
914 ssa.OpARM64GreaterThanU,
915 ssa.OpARM64GreaterEqualU,
916 ssa.OpARM64LessThanF,
917 ssa.OpARM64LessEqualF,
918 ssa.OpARM64GreaterThanF,
919 ssa.OpARM64GreaterEqualF:
920
921 p := s.Prog(arm64.ACSET)
922 p.From.Type = obj.TYPE_REG
923 p.From.Reg = condBits[v.Op]
924 p.To.Type = obj.TYPE_REG
925 p.To.Reg = v.Reg()
926 case ssa.OpARM64LoweredGetClosurePtr:
927
928 gc.CheckLoweredGetClosurePtr(v)
929 case ssa.OpARM64LoweredGetCallerSP:
930
931 p := s.Prog(arm64.AMOVD)
932 p.From.Type = obj.TYPE_ADDR
933 p.From.Offset = -gc.Ctxt.FixedFrameSize()
934 p.From.Name = obj.NAME_PARAM
935 p.To.Type = obj.TYPE_REG
936 p.To.Reg = v.Reg()
937 case ssa.OpARM64LoweredGetCallerPC:
938 p := s.Prog(obj.AGETCALLERPC)
939 p.To.Type = obj.TYPE_REG
940 p.To.Reg = v.Reg()
941 case ssa.OpARM64FlagEQ,
942 ssa.OpARM64FlagLT_ULT,
943 ssa.OpARM64FlagLT_UGT,
944 ssa.OpARM64FlagGT_ULT,
945 ssa.OpARM64FlagGT_UGT:
946 v.Fatalf("Flag* ops should never make it to codegen %v", v.LongString())
947 case ssa.OpARM64InvertFlags:
948 v.Fatalf("InvertFlags should never make it to codegen %v", v.LongString())
949 case ssa.OpClobber:
950
951 default:
952 v.Fatalf("genValue not implemented: %s", v.LongString())
953 }
954 }
955
956 var condBits = map[ssa.Op]int16{
957 ssa.OpARM64Equal: arm64.COND_EQ,
958 ssa.OpARM64NotEqual: arm64.COND_NE,
959 ssa.OpARM64LessThan: arm64.COND_LT,
960 ssa.OpARM64LessThanU: arm64.COND_LO,
961 ssa.OpARM64LessEqual: arm64.COND_LE,
962 ssa.OpARM64LessEqualU: arm64.COND_LS,
963 ssa.OpARM64GreaterThan: arm64.COND_GT,
964 ssa.OpARM64GreaterThanU: arm64.COND_HI,
965 ssa.OpARM64GreaterEqual: arm64.COND_GE,
966 ssa.OpARM64GreaterEqualU: arm64.COND_HS,
967 ssa.OpARM64LessThanF: arm64.COND_MI,
968 ssa.OpARM64LessEqualF: arm64.COND_LS,
969 ssa.OpARM64GreaterThanF: arm64.COND_GT,
970 ssa.OpARM64GreaterEqualF: arm64.COND_GE,
971 }
972
973 var blockJump = map[ssa.BlockKind]struct {
974 asm, invasm obj.As
975 }{
976 ssa.BlockARM64EQ: {arm64.ABEQ, arm64.ABNE},
977 ssa.BlockARM64NE: {arm64.ABNE, arm64.ABEQ},
978 ssa.BlockARM64LT: {arm64.ABLT, arm64.ABGE},
979 ssa.BlockARM64GE: {arm64.ABGE, arm64.ABLT},
980 ssa.BlockARM64LE: {arm64.ABLE, arm64.ABGT},
981 ssa.BlockARM64GT: {arm64.ABGT, arm64.ABLE},
982 ssa.BlockARM64ULT: {arm64.ABLO, arm64.ABHS},
983 ssa.BlockARM64UGE: {arm64.ABHS, arm64.ABLO},
984 ssa.BlockARM64UGT: {arm64.ABHI, arm64.ABLS},
985 ssa.BlockARM64ULE: {arm64.ABLS, arm64.ABHI},
986 ssa.BlockARM64Z: {arm64.ACBZ, arm64.ACBNZ},
987 ssa.BlockARM64NZ: {arm64.ACBNZ, arm64.ACBZ},
988 ssa.BlockARM64ZW: {arm64.ACBZW, arm64.ACBNZW},
989 ssa.BlockARM64NZW: {arm64.ACBNZW, arm64.ACBZW},
990 ssa.BlockARM64TBZ: {arm64.ATBZ, arm64.ATBNZ},
991 ssa.BlockARM64TBNZ: {arm64.ATBNZ, arm64.ATBZ},
992 ssa.BlockARM64FLT: {arm64.ABMI, arm64.ABPL},
993 ssa.BlockARM64FGE: {arm64.ABGE, arm64.ABLT},
994 ssa.BlockARM64FLE: {arm64.ABLS, arm64.ABHI},
995 ssa.BlockARM64FGT: {arm64.ABGT, arm64.ABLE},
996 }
997
998 func ssaGenBlock(s *gc.SSAGenState, b, next *ssa.Block) {
999 switch b.Kind {
1000 case ssa.BlockPlain:
1001 if b.Succs[0].Block() != next {
1002 p := s.Prog(obj.AJMP)
1003 p.To.Type = obj.TYPE_BRANCH
1004 s.Branches = append(s.Branches, gc.Branch{P: p, B: b.Succs[0].Block()})
1005 }
1006
1007 case ssa.BlockDefer:
1008
1009
1010
1011 p := s.Prog(arm64.ACMP)
1012 p.From.Type = obj.TYPE_CONST
1013 p.From.Offset = 0
1014 p.Reg = arm64.REG_R0
1015 p = s.Prog(arm64.ABNE)
1016 p.To.Type = obj.TYPE_BRANCH
1017 s.Branches = append(s.Branches, gc.Branch{P: p, B: b.Succs[1].Block()})
1018 if b.Succs[0].Block() != next {
1019 p := s.Prog(obj.AJMP)
1020 p.To.Type = obj.TYPE_BRANCH
1021 s.Branches = append(s.Branches, gc.Branch{P: p, B: b.Succs[0].Block()})
1022 }
1023
1024 case ssa.BlockExit:
1025
1026 case ssa.BlockRet:
1027 s.Prog(obj.ARET)
1028
1029 case ssa.BlockRetJmp:
1030 p := s.Prog(obj.ARET)
1031 p.To.Type = obj.TYPE_MEM
1032 p.To.Name = obj.NAME_EXTERN
1033 p.To.Sym = b.Aux.(*obj.LSym)
1034
1035 case ssa.BlockARM64EQ, ssa.BlockARM64NE,
1036 ssa.BlockARM64LT, ssa.BlockARM64GE,
1037 ssa.BlockARM64LE, ssa.BlockARM64GT,
1038 ssa.BlockARM64ULT, ssa.BlockARM64UGT,
1039 ssa.BlockARM64ULE, ssa.BlockARM64UGE,
1040 ssa.BlockARM64Z, ssa.BlockARM64NZ,
1041 ssa.BlockARM64ZW, ssa.BlockARM64NZW,
1042 ssa.BlockARM64FLT, ssa.BlockARM64FGE,
1043 ssa.BlockARM64FLE, ssa.BlockARM64FGT:
1044 jmp := blockJump[b.Kind]
1045 var p *obj.Prog
1046 switch next {
1047 case b.Succs[0].Block():
1048 p = s.Br(jmp.invasm, b.Succs[1].Block())
1049 case b.Succs[1].Block():
1050 p = s.Br(jmp.asm, b.Succs[0].Block())
1051 default:
1052 if b.Likely != ssa.BranchUnlikely {
1053 p = s.Br(jmp.asm, b.Succs[0].Block())
1054 s.Br(obj.AJMP, b.Succs[1].Block())
1055 } else {
1056 p = s.Br(jmp.invasm, b.Succs[1].Block())
1057 s.Br(obj.AJMP, b.Succs[0].Block())
1058 }
1059 }
1060 if !b.Control.Type.IsFlags() {
1061 p.From.Type = obj.TYPE_REG
1062 p.From.Reg = b.Control.Reg()
1063 }
1064 case ssa.BlockARM64TBZ, ssa.BlockARM64TBNZ:
1065 jmp := blockJump[b.Kind]
1066 var p *obj.Prog
1067 switch next {
1068 case b.Succs[0].Block():
1069 p = s.Br(jmp.invasm, b.Succs[1].Block())
1070 case b.Succs[1].Block():
1071 p = s.Br(jmp.asm, b.Succs[0].Block())
1072 default:
1073 if b.Likely != ssa.BranchUnlikely {
1074 p = s.Br(jmp.asm, b.Succs[0].Block())
1075 s.Br(obj.AJMP, b.Succs[1].Block())
1076 } else {
1077 p = s.Br(jmp.invasm, b.Succs[1].Block())
1078 s.Br(obj.AJMP, b.Succs[0].Block())
1079 }
1080 }
1081 p.From.Offset = b.Aux.(int64)
1082 p.From.Type = obj.TYPE_CONST
1083 p.Reg = b.Control.Reg()
1084
1085 default:
1086 b.Fatalf("branch not implemented: %s. Control: %s", b.LongString(), b.Control.LongString())
1087 }
1088 }
1089
View as plain text