Source file src/pkg/cmd/compile/internal/mips64/ssa.go
1
2
3
4
5 package mips64
6
7 import (
8 "math"
9
10 "cmd/compile/internal/gc"
11 "cmd/compile/internal/ssa"
12 "cmd/compile/internal/types"
13 "cmd/internal/obj"
14 "cmd/internal/obj/mips"
15 )
16
17
18 func isFPreg(r int16) bool {
19 return mips.REG_F0 <= r && r <= mips.REG_F31
20 }
21
22
23 func isHILO(r int16) bool {
24 return r == mips.REG_HI || r == mips.REG_LO
25 }
26
27
28 func loadByType(t *types.Type, r int16) obj.As {
29 if isFPreg(r) {
30 if t.Size() == 4 {
31 return mips.AMOVF
32 } else {
33 return mips.AMOVD
34 }
35 } else {
36 switch t.Size() {
37 case 1:
38 if t.IsSigned() {
39 return mips.AMOVB
40 } else {
41 return mips.AMOVBU
42 }
43 case 2:
44 if t.IsSigned() {
45 return mips.AMOVH
46 } else {
47 return mips.AMOVHU
48 }
49 case 4:
50 if t.IsSigned() {
51 return mips.AMOVW
52 } else {
53 return mips.AMOVWU
54 }
55 case 8:
56 return mips.AMOVV
57 }
58 }
59 panic("bad load type")
60 }
61
62
63 func storeByType(t *types.Type, r int16) obj.As {
64 if isFPreg(r) {
65 if t.Size() == 4 {
66 return mips.AMOVF
67 } else {
68 return mips.AMOVD
69 }
70 } else {
71 switch t.Size() {
72 case 1:
73 return mips.AMOVB
74 case 2:
75 return mips.AMOVH
76 case 4:
77 return mips.AMOVW
78 case 8:
79 return mips.AMOVV
80 }
81 }
82 panic("bad store type")
83 }
84
85 func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) {
86 switch v.Op {
87 case ssa.OpCopy, ssa.OpMIPS64MOVVreg:
88 if v.Type.IsMemory() {
89 return
90 }
91 x := v.Args[0].Reg()
92 y := v.Reg()
93 if x == y {
94 return
95 }
96 as := mips.AMOVV
97 if isFPreg(x) && isFPreg(y) {
98 as = mips.AMOVD
99 }
100 p := s.Prog(as)
101 p.From.Type = obj.TYPE_REG
102 p.From.Reg = x
103 p.To.Type = obj.TYPE_REG
104 p.To.Reg = y
105 if isHILO(x) && isHILO(y) || isHILO(x) && isFPreg(y) || isFPreg(x) && isHILO(y) {
106
107 p.To.Reg = mips.REGTMP
108 p = s.Prog(mips.AMOVV)
109 p.From.Type = obj.TYPE_REG
110 p.From.Reg = mips.REGTMP
111 p.To.Type = obj.TYPE_REG
112 p.To.Reg = y
113 }
114 case ssa.OpMIPS64MOVVnop:
115 if v.Reg() != v.Args[0].Reg() {
116 v.Fatalf("input[0] and output not in same register %s", v.LongString())
117 }
118
119 case ssa.OpLoadReg:
120 if v.Type.IsFlags() {
121 v.Fatalf("load flags not implemented: %v", v.LongString())
122 return
123 }
124 r := v.Reg()
125 p := s.Prog(loadByType(v.Type, r))
126 gc.AddrAuto(&p.From, v.Args[0])
127 p.To.Type = obj.TYPE_REG
128 p.To.Reg = r
129 if isHILO(r) {
130
131 p.To.Reg = mips.REGTMP
132 p = s.Prog(mips.AMOVV)
133 p.From.Type = obj.TYPE_REG
134 p.From.Reg = mips.REGTMP
135 p.To.Type = obj.TYPE_REG
136 p.To.Reg = r
137 }
138 case ssa.OpStoreReg:
139 if v.Type.IsFlags() {
140 v.Fatalf("store flags not implemented: %v", v.LongString())
141 return
142 }
143 r := v.Args[0].Reg()
144 if isHILO(r) {
145
146 p := s.Prog(mips.AMOVV)
147 p.From.Type = obj.TYPE_REG
148 p.From.Reg = r
149 p.To.Type = obj.TYPE_REG
150 p.To.Reg = mips.REGTMP
151 r = mips.REGTMP
152 }
153 p := s.Prog(storeByType(v.Type, r))
154 p.From.Type = obj.TYPE_REG
155 p.From.Reg = r
156 gc.AddrAuto(&p.To, v)
157 case ssa.OpMIPS64ADDV,
158 ssa.OpMIPS64SUBV,
159 ssa.OpMIPS64AND,
160 ssa.OpMIPS64OR,
161 ssa.OpMIPS64XOR,
162 ssa.OpMIPS64NOR,
163 ssa.OpMIPS64SLLV,
164 ssa.OpMIPS64SRLV,
165 ssa.OpMIPS64SRAV,
166 ssa.OpMIPS64ADDF,
167 ssa.OpMIPS64ADDD,
168 ssa.OpMIPS64SUBF,
169 ssa.OpMIPS64SUBD,
170 ssa.OpMIPS64MULF,
171 ssa.OpMIPS64MULD,
172 ssa.OpMIPS64DIVF,
173 ssa.OpMIPS64DIVD:
174 p := s.Prog(v.Op.Asm())
175 p.From.Type = obj.TYPE_REG
176 p.From.Reg = v.Args[1].Reg()
177 p.Reg = v.Args[0].Reg()
178 p.To.Type = obj.TYPE_REG
179 p.To.Reg = v.Reg()
180 case ssa.OpMIPS64SGT,
181 ssa.OpMIPS64SGTU:
182 p := s.Prog(v.Op.Asm())
183 p.From.Type = obj.TYPE_REG
184 p.From.Reg = v.Args[0].Reg()
185 p.Reg = v.Args[1].Reg()
186 p.To.Type = obj.TYPE_REG
187 p.To.Reg = v.Reg()
188 case ssa.OpMIPS64ADDVconst,
189 ssa.OpMIPS64SUBVconst,
190 ssa.OpMIPS64ANDconst,
191 ssa.OpMIPS64ORconst,
192 ssa.OpMIPS64XORconst,
193 ssa.OpMIPS64NORconst,
194 ssa.OpMIPS64SLLVconst,
195 ssa.OpMIPS64SRLVconst,
196 ssa.OpMIPS64SRAVconst,
197 ssa.OpMIPS64SGTconst,
198 ssa.OpMIPS64SGTUconst:
199 p := s.Prog(v.Op.Asm())
200 p.From.Type = obj.TYPE_CONST
201 p.From.Offset = v.AuxInt
202 p.Reg = v.Args[0].Reg()
203 p.To.Type = obj.TYPE_REG
204 p.To.Reg = v.Reg()
205 case ssa.OpMIPS64MULV,
206 ssa.OpMIPS64MULVU,
207 ssa.OpMIPS64DIVV,
208 ssa.OpMIPS64DIVVU:
209
210 p := s.Prog(v.Op.Asm())
211 p.From.Type = obj.TYPE_REG
212 p.From.Reg = v.Args[1].Reg()
213 p.Reg = v.Args[0].Reg()
214 case ssa.OpMIPS64MOVVconst:
215 r := v.Reg()
216 p := s.Prog(v.Op.Asm())
217 p.From.Type = obj.TYPE_CONST
218 p.From.Offset = v.AuxInt
219 p.To.Type = obj.TYPE_REG
220 p.To.Reg = r
221 if isFPreg(r) || isHILO(r) {
222
223 p.To.Reg = mips.REGTMP
224 p = s.Prog(mips.AMOVV)
225 p.From.Type = obj.TYPE_REG
226 p.From.Reg = mips.REGTMP
227 p.To.Type = obj.TYPE_REG
228 p.To.Reg = r
229 }
230 case ssa.OpMIPS64MOVFconst,
231 ssa.OpMIPS64MOVDconst:
232 p := s.Prog(v.Op.Asm())
233 p.From.Type = obj.TYPE_FCONST
234 p.From.Val = math.Float64frombits(uint64(v.AuxInt))
235 p.To.Type = obj.TYPE_REG
236 p.To.Reg = v.Reg()
237 case ssa.OpMIPS64CMPEQF,
238 ssa.OpMIPS64CMPEQD,
239 ssa.OpMIPS64CMPGEF,
240 ssa.OpMIPS64CMPGED,
241 ssa.OpMIPS64CMPGTF,
242 ssa.OpMIPS64CMPGTD:
243 p := s.Prog(v.Op.Asm())
244 p.From.Type = obj.TYPE_REG
245 p.From.Reg = v.Args[0].Reg()
246 p.Reg = v.Args[1].Reg()
247 case ssa.OpMIPS64MOVVaddr:
248 p := s.Prog(mips.AMOVV)
249 p.From.Type = obj.TYPE_ADDR
250 p.From.Reg = v.Args[0].Reg()
251 var wantreg string
252
253
254
255
256
257 switch v.Aux.(type) {
258 default:
259 v.Fatalf("aux is of unknown type %T", v.Aux)
260 case *obj.LSym:
261 wantreg = "SB"
262 gc.AddAux(&p.From, v)
263 case *gc.Node:
264 wantreg = "SP"
265 gc.AddAux(&p.From, v)
266 case nil:
267
268 wantreg = "SP"
269 p.From.Offset = v.AuxInt
270 }
271 if reg := v.Args[0].RegName(); reg != wantreg {
272 v.Fatalf("bad reg %s for symbol type %T, want %s", reg, v.Aux, wantreg)
273 }
274 p.To.Type = obj.TYPE_REG
275 p.To.Reg = v.Reg()
276 case ssa.OpMIPS64MOVBload,
277 ssa.OpMIPS64MOVBUload,
278 ssa.OpMIPS64MOVHload,
279 ssa.OpMIPS64MOVHUload,
280 ssa.OpMIPS64MOVWload,
281 ssa.OpMIPS64MOVWUload,
282 ssa.OpMIPS64MOVVload,
283 ssa.OpMIPS64MOVFload,
284 ssa.OpMIPS64MOVDload:
285 p := s.Prog(v.Op.Asm())
286 p.From.Type = obj.TYPE_MEM
287 p.From.Reg = v.Args[0].Reg()
288 gc.AddAux(&p.From, v)
289 p.To.Type = obj.TYPE_REG
290 p.To.Reg = v.Reg()
291 case ssa.OpMIPS64MOVBstore,
292 ssa.OpMIPS64MOVHstore,
293 ssa.OpMIPS64MOVWstore,
294 ssa.OpMIPS64MOVVstore,
295 ssa.OpMIPS64MOVFstore,
296 ssa.OpMIPS64MOVDstore:
297 p := s.Prog(v.Op.Asm())
298 p.From.Type = obj.TYPE_REG
299 p.From.Reg = v.Args[1].Reg()
300 p.To.Type = obj.TYPE_MEM
301 p.To.Reg = v.Args[0].Reg()
302 gc.AddAux(&p.To, v)
303 case ssa.OpMIPS64MOVBstorezero,
304 ssa.OpMIPS64MOVHstorezero,
305 ssa.OpMIPS64MOVWstorezero,
306 ssa.OpMIPS64MOVVstorezero:
307 p := s.Prog(v.Op.Asm())
308 p.From.Type = obj.TYPE_REG
309 p.From.Reg = mips.REGZERO
310 p.To.Type = obj.TYPE_MEM
311 p.To.Reg = v.Args[0].Reg()
312 gc.AddAux(&p.To, v)
313 case ssa.OpMIPS64MOVBreg,
314 ssa.OpMIPS64MOVBUreg,
315 ssa.OpMIPS64MOVHreg,
316 ssa.OpMIPS64MOVHUreg,
317 ssa.OpMIPS64MOVWreg,
318 ssa.OpMIPS64MOVWUreg:
319 a := v.Args[0]
320 for a.Op == ssa.OpCopy || a.Op == ssa.OpMIPS64MOVVreg {
321 a = a.Args[0]
322 }
323 if a.Op == ssa.OpLoadReg {
324 t := a.Type
325 switch {
326 case v.Op == ssa.OpMIPS64MOVBreg && t.Size() == 1 && t.IsSigned(),
327 v.Op == ssa.OpMIPS64MOVBUreg && t.Size() == 1 && !t.IsSigned(),
328 v.Op == ssa.OpMIPS64MOVHreg && t.Size() == 2 && t.IsSigned(),
329 v.Op == ssa.OpMIPS64MOVHUreg && t.Size() == 2 && !t.IsSigned(),
330 v.Op == ssa.OpMIPS64MOVWreg && t.Size() == 4 && t.IsSigned(),
331 v.Op == ssa.OpMIPS64MOVWUreg && t.Size() == 4 && !t.IsSigned():
332
333 if v.Reg() == v.Args[0].Reg() {
334 return
335 }
336 p := s.Prog(mips.AMOVV)
337 p.From.Type = obj.TYPE_REG
338 p.From.Reg = v.Args[0].Reg()
339 p.To.Type = obj.TYPE_REG
340 p.To.Reg = v.Reg()
341 return
342 default:
343 }
344 }
345 fallthrough
346 case ssa.OpMIPS64MOVWF,
347 ssa.OpMIPS64MOVWD,
348 ssa.OpMIPS64TRUNCFW,
349 ssa.OpMIPS64TRUNCDW,
350 ssa.OpMIPS64MOVVF,
351 ssa.OpMIPS64MOVVD,
352 ssa.OpMIPS64TRUNCFV,
353 ssa.OpMIPS64TRUNCDV,
354 ssa.OpMIPS64MOVFD,
355 ssa.OpMIPS64MOVDF,
356 ssa.OpMIPS64NEGF,
357 ssa.OpMIPS64NEGD,
358 ssa.OpMIPS64SQRTD:
359 p := s.Prog(v.Op.Asm())
360 p.From.Type = obj.TYPE_REG
361 p.From.Reg = v.Args[0].Reg()
362 p.To.Type = obj.TYPE_REG
363 p.To.Reg = v.Reg()
364 case ssa.OpMIPS64NEGV:
365
366 p := s.Prog(mips.ASUBVU)
367 p.From.Type = obj.TYPE_REG
368 p.From.Reg = v.Args[0].Reg()
369 p.Reg = mips.REGZERO
370 p.To.Type = obj.TYPE_REG
371 p.To.Reg = v.Reg()
372 case ssa.OpMIPS64DUFFZERO:
373
374 p := s.Prog(mips.ASUBVU)
375 p.From.Type = obj.TYPE_CONST
376 p.From.Offset = 8
377 p.Reg = v.Args[0].Reg()
378 p.To.Type = obj.TYPE_REG
379 p.To.Reg = mips.REG_R1
380 p = s.Prog(obj.ADUFFZERO)
381 p.To.Type = obj.TYPE_MEM
382 p.To.Name = obj.NAME_EXTERN
383 p.To.Sym = gc.Duffzero
384 p.To.Offset = v.AuxInt
385 case ssa.OpMIPS64LoweredZero:
386
387
388
389
390
391 var sz int64
392 var mov obj.As
393 switch {
394 case v.AuxInt%8 == 0:
395 sz = 8
396 mov = mips.AMOVV
397 case v.AuxInt%4 == 0:
398 sz = 4
399 mov = mips.AMOVW
400 case v.AuxInt%2 == 0:
401 sz = 2
402 mov = mips.AMOVH
403 default:
404 sz = 1
405 mov = mips.AMOVB
406 }
407 p := s.Prog(mips.ASUBVU)
408 p.From.Type = obj.TYPE_CONST
409 p.From.Offset = sz
410 p.To.Type = obj.TYPE_REG
411 p.To.Reg = mips.REG_R1
412 p2 := s.Prog(mov)
413 p2.From.Type = obj.TYPE_REG
414 p2.From.Reg = mips.REGZERO
415 p2.To.Type = obj.TYPE_MEM
416 p2.To.Reg = mips.REG_R1
417 p2.To.Offset = sz
418 p3 := s.Prog(mips.AADDVU)
419 p3.From.Type = obj.TYPE_CONST
420 p3.From.Offset = sz
421 p3.To.Type = obj.TYPE_REG
422 p3.To.Reg = mips.REG_R1
423 p4 := s.Prog(mips.ABNE)
424 p4.From.Type = obj.TYPE_REG
425 p4.From.Reg = v.Args[1].Reg()
426 p4.Reg = mips.REG_R1
427 p4.To.Type = obj.TYPE_BRANCH
428 gc.Patch(p4, p2)
429 case ssa.OpMIPS64LoweredMove:
430
431
432
433
434
435
436
437 var sz int64
438 var mov obj.As
439 switch {
440 case v.AuxInt%8 == 0:
441 sz = 8
442 mov = mips.AMOVV
443 case v.AuxInt%4 == 0:
444 sz = 4
445 mov = mips.AMOVW
446 case v.AuxInt%2 == 0:
447 sz = 2
448 mov = mips.AMOVH
449 default:
450 sz = 1
451 mov = mips.AMOVB
452 }
453 p := s.Prog(mips.ASUBVU)
454 p.From.Type = obj.TYPE_CONST
455 p.From.Offset = sz
456 p.To.Type = obj.TYPE_REG
457 p.To.Reg = mips.REG_R1
458 p2 := s.Prog(mov)
459 p2.From.Type = obj.TYPE_MEM
460 p2.From.Reg = mips.REG_R1
461 p2.From.Offset = sz
462 p2.To.Type = obj.TYPE_REG
463 p2.To.Reg = mips.REGTMP
464 p3 := s.Prog(mov)
465 p3.From.Type = obj.TYPE_REG
466 p3.From.Reg = mips.REGTMP
467 p3.To.Type = obj.TYPE_MEM
468 p3.To.Reg = mips.REG_R2
469 p4 := s.Prog(mips.AADDVU)
470 p4.From.Type = obj.TYPE_CONST
471 p4.From.Offset = sz
472 p4.To.Type = obj.TYPE_REG
473 p4.To.Reg = mips.REG_R1
474 p5 := s.Prog(mips.AADDVU)
475 p5.From.Type = obj.TYPE_CONST
476 p5.From.Offset = sz
477 p5.To.Type = obj.TYPE_REG
478 p5.To.Reg = mips.REG_R2
479 p6 := s.Prog(mips.ABNE)
480 p6.From.Type = obj.TYPE_REG
481 p6.From.Reg = v.Args[2].Reg()
482 p6.Reg = mips.REG_R1
483 p6.To.Type = obj.TYPE_BRANCH
484 gc.Patch(p6, p2)
485 case ssa.OpMIPS64CALLstatic, ssa.OpMIPS64CALLclosure, ssa.OpMIPS64CALLinter:
486 s.Call(v)
487 case ssa.OpMIPS64LoweredWB:
488 p := s.Prog(obj.ACALL)
489 p.To.Type = obj.TYPE_MEM
490 p.To.Name = obj.NAME_EXTERN
491 p.To.Sym = v.Aux.(*obj.LSym)
492 case ssa.OpMIPS64LoweredPanicBoundsA, ssa.OpMIPS64LoweredPanicBoundsB, ssa.OpMIPS64LoweredPanicBoundsC:
493 p := s.Prog(obj.ACALL)
494 p.To.Type = obj.TYPE_MEM
495 p.To.Name = obj.NAME_EXTERN
496 p.To.Sym = gc.BoundsCheckFunc[v.AuxInt]
497 s.UseArgs(16)
498 case ssa.OpMIPS64LoweredAtomicLoad8, ssa.OpMIPS64LoweredAtomicLoad32, ssa.OpMIPS64LoweredAtomicLoad64:
499 as := mips.AMOVV
500 switch v.Op {
501 case ssa.OpMIPS64LoweredAtomicLoad8:
502 as = mips.AMOVB
503 case ssa.OpMIPS64LoweredAtomicLoad32:
504 as = mips.AMOVW
505 }
506 s.Prog(mips.ASYNC)
507 p := s.Prog(as)
508 p.From.Type = obj.TYPE_MEM
509 p.From.Reg = v.Args[0].Reg()
510 p.To.Type = obj.TYPE_REG
511 p.To.Reg = v.Reg0()
512 s.Prog(mips.ASYNC)
513 case ssa.OpMIPS64LoweredAtomicStore32, ssa.OpMIPS64LoweredAtomicStore64:
514 as := mips.AMOVV
515 if v.Op == ssa.OpMIPS64LoweredAtomicStore32 {
516 as = mips.AMOVW
517 }
518 s.Prog(mips.ASYNC)
519 p := s.Prog(as)
520 p.From.Type = obj.TYPE_REG
521 p.From.Reg = v.Args[1].Reg()
522 p.To.Type = obj.TYPE_MEM
523 p.To.Reg = v.Args[0].Reg()
524 s.Prog(mips.ASYNC)
525 case ssa.OpMIPS64LoweredAtomicStorezero32, ssa.OpMIPS64LoweredAtomicStorezero64:
526 as := mips.AMOVV
527 if v.Op == ssa.OpMIPS64LoweredAtomicStorezero32 {
528 as = mips.AMOVW
529 }
530 s.Prog(mips.ASYNC)
531 p := s.Prog(as)
532 p.From.Type = obj.TYPE_REG
533 p.From.Reg = mips.REGZERO
534 p.To.Type = obj.TYPE_MEM
535 p.To.Reg = v.Args[0].Reg()
536 s.Prog(mips.ASYNC)
537 case ssa.OpMIPS64LoweredAtomicExchange32, ssa.OpMIPS64LoweredAtomicExchange64:
538
539
540
541
542
543
544 ll := mips.ALLV
545 sc := mips.ASCV
546 if v.Op == ssa.OpMIPS64LoweredAtomicExchange32 {
547 ll = mips.ALL
548 sc = mips.ASC
549 }
550 s.Prog(mips.ASYNC)
551 p := s.Prog(mips.AMOVV)
552 p.From.Type = obj.TYPE_REG
553 p.From.Reg = v.Args[1].Reg()
554 p.To.Type = obj.TYPE_REG
555 p.To.Reg = mips.REGTMP
556 p1 := s.Prog(ll)
557 p1.From.Type = obj.TYPE_MEM
558 p1.From.Reg = v.Args[0].Reg()
559 p1.To.Type = obj.TYPE_REG
560 p1.To.Reg = v.Reg0()
561 p2 := s.Prog(sc)
562 p2.From.Type = obj.TYPE_REG
563 p2.From.Reg = mips.REGTMP
564 p2.To.Type = obj.TYPE_MEM
565 p2.To.Reg = v.Args[0].Reg()
566 p3 := s.Prog(mips.ABEQ)
567 p3.From.Type = obj.TYPE_REG
568 p3.From.Reg = mips.REGTMP
569 p3.To.Type = obj.TYPE_BRANCH
570 gc.Patch(p3, p)
571 s.Prog(mips.ASYNC)
572 case ssa.OpMIPS64LoweredAtomicAdd32, ssa.OpMIPS64LoweredAtomicAdd64:
573
574
575
576
577
578
579
580 ll := mips.ALLV
581 sc := mips.ASCV
582 if v.Op == ssa.OpMIPS64LoweredAtomicAdd32 {
583 ll = mips.ALL
584 sc = mips.ASC
585 }
586 s.Prog(mips.ASYNC)
587 p := s.Prog(ll)
588 p.From.Type = obj.TYPE_MEM
589 p.From.Reg = v.Args[0].Reg()
590 p.To.Type = obj.TYPE_REG
591 p.To.Reg = v.Reg0()
592 p1 := s.Prog(mips.AADDVU)
593 p1.From.Type = obj.TYPE_REG
594 p1.From.Reg = v.Args[1].Reg()
595 p1.Reg = v.Reg0()
596 p1.To.Type = obj.TYPE_REG
597 p1.To.Reg = mips.REGTMP
598 p2 := s.Prog(sc)
599 p2.From.Type = obj.TYPE_REG
600 p2.From.Reg = mips.REGTMP
601 p2.To.Type = obj.TYPE_MEM
602 p2.To.Reg = v.Args[0].Reg()
603 p3 := s.Prog(mips.ABEQ)
604 p3.From.Type = obj.TYPE_REG
605 p3.From.Reg = mips.REGTMP
606 p3.To.Type = obj.TYPE_BRANCH
607 gc.Patch(p3, p)
608 s.Prog(mips.ASYNC)
609 p4 := s.Prog(mips.AADDVU)
610 p4.From.Type = obj.TYPE_REG
611 p4.From.Reg = v.Args[1].Reg()
612 p4.Reg = v.Reg0()
613 p4.To.Type = obj.TYPE_REG
614 p4.To.Reg = v.Reg0()
615 case ssa.OpMIPS64LoweredAtomicAddconst32, ssa.OpMIPS64LoweredAtomicAddconst64:
616
617
618
619
620
621
622
623 ll := mips.ALLV
624 sc := mips.ASCV
625 if v.Op == ssa.OpMIPS64LoweredAtomicAddconst32 {
626 ll = mips.ALL
627 sc = mips.ASC
628 }
629 s.Prog(mips.ASYNC)
630 p := s.Prog(ll)
631 p.From.Type = obj.TYPE_MEM
632 p.From.Reg = v.Args[0].Reg()
633 p.To.Type = obj.TYPE_REG
634 p.To.Reg = v.Reg0()
635 p1 := s.Prog(mips.AADDVU)
636 p1.From.Type = obj.TYPE_CONST
637 p1.From.Offset = v.AuxInt
638 p1.Reg = v.Reg0()
639 p1.To.Type = obj.TYPE_REG
640 p1.To.Reg = mips.REGTMP
641 p2 := s.Prog(sc)
642 p2.From.Type = obj.TYPE_REG
643 p2.From.Reg = mips.REGTMP
644 p2.To.Type = obj.TYPE_MEM
645 p2.To.Reg = v.Args[0].Reg()
646 p3 := s.Prog(mips.ABEQ)
647 p3.From.Type = obj.TYPE_REG
648 p3.From.Reg = mips.REGTMP
649 p3.To.Type = obj.TYPE_BRANCH
650 gc.Patch(p3, p)
651 s.Prog(mips.ASYNC)
652 p4 := s.Prog(mips.AADDVU)
653 p4.From.Type = obj.TYPE_CONST
654 p4.From.Offset = v.AuxInt
655 p4.Reg = v.Reg0()
656 p4.To.Type = obj.TYPE_REG
657 p4.To.Reg = v.Reg0()
658 case ssa.OpMIPS64LoweredAtomicCas32, ssa.OpMIPS64LoweredAtomicCas64:
659
660
661
662
663
664
665
666
667 ll := mips.ALLV
668 sc := mips.ASCV
669 if v.Op == ssa.OpMIPS64LoweredAtomicCas32 {
670 ll = mips.ALL
671 sc = mips.ASC
672 }
673 p := s.Prog(mips.AMOVV)
674 p.From.Type = obj.TYPE_REG
675 p.From.Reg = mips.REGZERO
676 p.To.Type = obj.TYPE_REG
677 p.To.Reg = v.Reg0()
678 s.Prog(mips.ASYNC)
679 p1 := s.Prog(ll)
680 p1.From.Type = obj.TYPE_MEM
681 p1.From.Reg = v.Args[0].Reg()
682 p1.To.Type = obj.TYPE_REG
683 p1.To.Reg = mips.REGTMP
684 p2 := s.Prog(mips.ABNE)
685 p2.From.Type = obj.TYPE_REG
686 p2.From.Reg = v.Args[1].Reg()
687 p2.Reg = mips.REGTMP
688 p2.To.Type = obj.TYPE_BRANCH
689 p3 := s.Prog(mips.AMOVV)
690 p3.From.Type = obj.TYPE_REG
691 p3.From.Reg = v.Args[2].Reg()
692 p3.To.Type = obj.TYPE_REG
693 p3.To.Reg = v.Reg0()
694 p4 := s.Prog(sc)
695 p4.From.Type = obj.TYPE_REG
696 p4.From.Reg = v.Reg0()
697 p4.To.Type = obj.TYPE_MEM
698 p4.To.Reg = v.Args[0].Reg()
699 p5 := s.Prog(mips.ABEQ)
700 p5.From.Type = obj.TYPE_REG
701 p5.From.Reg = v.Reg0()
702 p5.To.Type = obj.TYPE_BRANCH
703 gc.Patch(p5, p1)
704 p6 := s.Prog(mips.ASYNC)
705 gc.Patch(p2, p6)
706 case ssa.OpMIPS64LoweredNilCheck:
707
708 p := s.Prog(mips.AMOVB)
709 p.From.Type = obj.TYPE_MEM
710 p.From.Reg = v.Args[0].Reg()
711 gc.AddAux(&p.From, v)
712 p.To.Type = obj.TYPE_REG
713 p.To.Reg = mips.REGTMP
714 if gc.Debug_checknil != 0 && v.Pos.Line() > 1 {
715 gc.Warnl(v.Pos, "generated nil check")
716 }
717 case ssa.OpMIPS64FPFlagTrue,
718 ssa.OpMIPS64FPFlagFalse:
719
720
721
722 branch := mips.ABFPF
723 if v.Op == ssa.OpMIPS64FPFlagFalse {
724 branch = mips.ABFPT
725 }
726 p := s.Prog(mips.AMOVV)
727 p.From.Type = obj.TYPE_REG
728 p.From.Reg = mips.REGZERO
729 p.To.Type = obj.TYPE_REG
730 p.To.Reg = v.Reg()
731 p2 := s.Prog(branch)
732 p2.To.Type = obj.TYPE_BRANCH
733 p3 := s.Prog(mips.AMOVV)
734 p3.From.Type = obj.TYPE_CONST
735 p3.From.Offset = 1
736 p3.To.Type = obj.TYPE_REG
737 p3.To.Reg = v.Reg()
738 p4 := s.Prog(obj.ANOP)
739 gc.Patch(p2, p4)
740 case ssa.OpMIPS64LoweredGetClosurePtr:
741
742 gc.CheckLoweredGetClosurePtr(v)
743 case ssa.OpMIPS64LoweredGetCallerSP:
744
745 p := s.Prog(mips.AMOVV)
746 p.From.Type = obj.TYPE_ADDR
747 p.From.Offset = -gc.Ctxt.FixedFrameSize()
748 p.From.Name = obj.NAME_PARAM
749 p.To.Type = obj.TYPE_REG
750 p.To.Reg = v.Reg()
751 case ssa.OpMIPS64LoweredGetCallerPC:
752 p := s.Prog(obj.AGETCALLERPC)
753 p.To.Type = obj.TYPE_REG
754 p.To.Reg = v.Reg()
755 case ssa.OpClobber:
756
757 default:
758 v.Fatalf("genValue not implemented: %s", v.LongString())
759 }
760 }
761
762 var blockJump = map[ssa.BlockKind]struct {
763 asm, invasm obj.As
764 }{
765 ssa.BlockMIPS64EQ: {mips.ABEQ, mips.ABNE},
766 ssa.BlockMIPS64NE: {mips.ABNE, mips.ABEQ},
767 ssa.BlockMIPS64LTZ: {mips.ABLTZ, mips.ABGEZ},
768 ssa.BlockMIPS64GEZ: {mips.ABGEZ, mips.ABLTZ},
769 ssa.BlockMIPS64LEZ: {mips.ABLEZ, mips.ABGTZ},
770 ssa.BlockMIPS64GTZ: {mips.ABGTZ, mips.ABLEZ},
771 ssa.BlockMIPS64FPT: {mips.ABFPT, mips.ABFPF},
772 ssa.BlockMIPS64FPF: {mips.ABFPF, mips.ABFPT},
773 }
774
775 func ssaGenBlock(s *gc.SSAGenState, b, next *ssa.Block) {
776 switch b.Kind {
777 case ssa.BlockPlain:
778 if b.Succs[0].Block() != next {
779 p := s.Prog(obj.AJMP)
780 p.To.Type = obj.TYPE_BRANCH
781 s.Branches = append(s.Branches, gc.Branch{P: p, B: b.Succs[0].Block()})
782 }
783 case ssa.BlockDefer:
784
785
786
787 p := s.Prog(mips.ABNE)
788 p.From.Type = obj.TYPE_REG
789 p.From.Reg = mips.REGZERO
790 p.Reg = mips.REG_R1
791 p.To.Type = obj.TYPE_BRANCH
792 s.Branches = append(s.Branches, gc.Branch{P: p, B: b.Succs[1].Block()})
793 if b.Succs[0].Block() != next {
794 p := s.Prog(obj.AJMP)
795 p.To.Type = obj.TYPE_BRANCH
796 s.Branches = append(s.Branches, gc.Branch{P: p, B: b.Succs[0].Block()})
797 }
798 case ssa.BlockExit:
799 case ssa.BlockRet:
800 s.Prog(obj.ARET)
801 case ssa.BlockRetJmp:
802 p := s.Prog(obj.ARET)
803 p.To.Type = obj.TYPE_MEM
804 p.To.Name = obj.NAME_EXTERN
805 p.To.Sym = b.Aux.(*obj.LSym)
806 case ssa.BlockMIPS64EQ, ssa.BlockMIPS64NE,
807 ssa.BlockMIPS64LTZ, ssa.BlockMIPS64GEZ,
808 ssa.BlockMIPS64LEZ, ssa.BlockMIPS64GTZ,
809 ssa.BlockMIPS64FPT, ssa.BlockMIPS64FPF:
810 jmp := blockJump[b.Kind]
811 var p *obj.Prog
812 switch next {
813 case b.Succs[0].Block():
814 p = s.Br(jmp.invasm, b.Succs[1].Block())
815 case b.Succs[1].Block():
816 p = s.Br(jmp.asm, b.Succs[0].Block())
817 default:
818 if b.Likely != ssa.BranchUnlikely {
819 p = s.Br(jmp.asm, b.Succs[0].Block())
820 s.Br(obj.AJMP, b.Succs[1].Block())
821 } else {
822 p = s.Br(jmp.invasm, b.Succs[1].Block())
823 s.Br(obj.AJMP, b.Succs[0].Block())
824 }
825 }
826 if !b.Control.Type.IsFlags() {
827 p.From.Type = obj.TYPE_REG
828 p.From.Reg = b.Control.Reg()
829 }
830 default:
831 b.Fatalf("branch not implemented: %s. Control: %s", b.LongString(), b.Control.LongString())
832 }
833 }
834
View as plain text