Source file src/pkg/cmd/compile/internal/ssa/rewrite.go
1
2
3
4
5 package ssa
6
7 import (
8 "cmd/compile/internal/types"
9 "cmd/internal/obj"
10 "cmd/internal/objabi"
11 "cmd/internal/src"
12 "encoding/binary"
13 "fmt"
14 "io"
15 "math"
16 "math/bits"
17 "os"
18 "path/filepath"
19 )
20
21 func applyRewrite(f *Func, rb blockRewriter, rv valueRewriter) {
22
23 pendingLines := f.cachedLineStarts
24 pendingLines.clear()
25 for {
26 change := false
27 for _, b := range f.Blocks {
28 if b.Control != nil && b.Control.Op == OpCopy {
29 for b.Control.Op == OpCopy {
30 b.SetControl(b.Control.Args[0])
31 }
32 }
33 if rb(b) {
34 change = true
35 }
36 for j, v := range b.Values {
37 change = phielimValue(v) || change
38
39
40
41
42
43
44
45
46 for i, a := range v.Args {
47 if a.Op != OpCopy {
48 continue
49 }
50 aa := copySource(a)
51 v.SetArg(i, aa)
52
53
54
55
56
57 if a.Pos.IsStmt() == src.PosIsStmt {
58 if aa.Block == a.Block && aa.Pos.Line() == a.Pos.Line() && aa.Pos.IsStmt() != src.PosNotStmt {
59 aa.Pos = aa.Pos.WithIsStmt()
60 } else if v.Block == a.Block && v.Pos.Line() == a.Pos.Line() && v.Pos.IsStmt() != src.PosNotStmt {
61 v.Pos = v.Pos.WithIsStmt()
62 } else {
63
64
65
66
67 pendingLines.set(a.Pos, int32(a.Block.ID))
68 }
69 a.Pos = a.Pos.WithNotStmt()
70 }
71 change = true
72 for a.Uses == 0 {
73 b := a.Args[0]
74 a.reset(OpInvalid)
75 a = b
76 }
77 }
78
79
80 if rv(v) {
81 change = true
82
83 if v.Pos.IsStmt() == src.PosIsStmt {
84 if k := nextGoodStatementIndex(v, j, b); k != j {
85 v.Pos = v.Pos.WithNotStmt()
86 b.Values[k].Pos = b.Values[k].Pos.WithIsStmt()
87 }
88 }
89 }
90 }
91 }
92 if !change {
93 break
94 }
95 }
96
97 for _, b := range f.Blocks {
98 j := 0
99 for i, v := range b.Values {
100 vl := v.Pos
101 if v.Op == OpInvalid {
102 if v.Pos.IsStmt() == src.PosIsStmt {
103 pendingLines.set(vl, int32(b.ID))
104 }
105 f.freeValue(v)
106 continue
107 }
108 if v.Pos.IsStmt() != src.PosNotStmt && pendingLines.get(vl) == int32(b.ID) {
109 pendingLines.remove(vl)
110 v.Pos = v.Pos.WithIsStmt()
111 }
112 if i != j {
113 b.Values[j] = v
114 }
115 j++
116 }
117 if pendingLines.get(b.Pos) == int32(b.ID) {
118 b.Pos = b.Pos.WithIsStmt()
119 pendingLines.remove(b.Pos)
120 }
121 if j != len(b.Values) {
122 tail := b.Values[j:]
123 for j := range tail {
124 tail[j] = nil
125 }
126 b.Values = b.Values[:j]
127 }
128 }
129 }
130
131
132
133 func is64BitFloat(t *types.Type) bool {
134 return t.Size() == 8 && t.IsFloat()
135 }
136
137 func is32BitFloat(t *types.Type) bool {
138 return t.Size() == 4 && t.IsFloat()
139 }
140
141 func is64BitInt(t *types.Type) bool {
142 return t.Size() == 8 && t.IsInteger()
143 }
144
145 func is32BitInt(t *types.Type) bool {
146 return t.Size() == 4 && t.IsInteger()
147 }
148
149 func is16BitInt(t *types.Type) bool {
150 return t.Size() == 2 && t.IsInteger()
151 }
152
153 func is8BitInt(t *types.Type) bool {
154 return t.Size() == 1 && t.IsInteger()
155 }
156
157 func isPtr(t *types.Type) bool {
158 return t.IsPtrShaped()
159 }
160
161 func isSigned(t *types.Type) bool {
162 return t.IsSigned()
163 }
164
165
166
167 func mergeSym(x, y interface{}) interface{} {
168 if x == nil {
169 return y
170 }
171 if y == nil {
172 return x
173 }
174 panic(fmt.Sprintf("mergeSym with two non-nil syms %s %s", x, y))
175 }
176 func canMergeSym(x, y interface{}) bool {
177 return x == nil || y == nil
178 }
179
180
181
182
183
184 func canMergeLoadClobber(target, load, x *Value) bool {
185
186
187
188
189
190
191 if x.Uses != 1 {
192 return false
193 }
194 loopnest := x.Block.Func.loopnest()
195 loopnest.calculateDepths()
196 if loopnest.depth(target.Block.ID) > loopnest.depth(x.Block.ID) {
197 return false
198 }
199 return canMergeLoad(target, load)
200 }
201
202
203
204 func canMergeLoad(target, load *Value) bool {
205 if target.Block.ID != load.Block.ID {
206
207 return false
208 }
209
210
211
212 if load.Uses != 1 {
213 return false
214 }
215
216 mem := load.MemoryArg()
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233 var args []*Value
234 for _, a := range target.Args {
235 if a != load && a.Block.ID == target.Block.ID {
236 args = append(args, a)
237 }
238 }
239
240
241
242 var memPreds map[*Value]bool
243 for i := 0; len(args) > 0; i++ {
244 const limit = 100
245 if i >= limit {
246
247 return false
248 }
249 v := args[len(args)-1]
250 args = args[:len(args)-1]
251 if target.Block.ID != v.Block.ID {
252
253
254 continue
255 }
256 if v.Op == OpPhi {
257
258
259
260 continue
261 }
262 if v.Type.IsTuple() && v.Type.FieldType(1).IsMemory() {
263
264
265 return false
266 }
267 if v.Op.SymEffect()&SymAddr != 0 {
268
269
270
271
272
273
274
275
276
277
278
279 return false
280 }
281 if v.Type.IsMemory() {
282 if memPreds == nil {
283
284
285
286 memPreds = make(map[*Value]bool)
287 m := mem
288 const limit = 50
289 for i := 0; i < limit; i++ {
290 if m.Op == OpPhi {
291
292
293 break
294 }
295 if m.Block.ID != target.Block.ID {
296 break
297 }
298 if !m.Type.IsMemory() {
299 break
300 }
301 memPreds[m] = true
302 if len(m.Args) == 0 {
303 break
304 }
305 m = m.MemoryArg()
306 }
307 }
308
309
310
311
312
313
314
315
316
317 if memPreds[v] {
318 continue
319 }
320 return false
321 }
322 if len(v.Args) > 0 && v.Args[len(v.Args)-1] == mem {
323
324
325 continue
326 }
327 for _, a := range v.Args {
328 if target.Block.ID == a.Block.ID {
329 args = append(args, a)
330 }
331 }
332 }
333
334 return true
335 }
336
337
338 func isSameSym(sym interface{}, name string) bool {
339 s, ok := sym.(fmt.Stringer)
340 return ok && s.String() == name
341 }
342
343
344 func nlz(x int64) int64 {
345 return int64(bits.LeadingZeros64(uint64(x)))
346 }
347
348
349 func ntz(x int64) int64 {
350 return int64(bits.TrailingZeros64(uint64(x)))
351 }
352
353 func oneBit(x int64) bool {
354 return bits.OnesCount64(uint64(x)) == 1
355 }
356
357
358 func nlo(x int64) int64 {
359 return nlz(^x)
360 }
361
362
363 func nto(x int64) int64 {
364 return ntz(^x)
365 }
366
367
368
369 func log2(n int64) int64 {
370 return int64(bits.Len64(uint64(n))) - 1
371 }
372
373
374
375 func log2uint32(n int64) int64 {
376 return int64(bits.Len32(uint32(n))) - 1
377 }
378
379
380 func isPowerOfTwo(n int64) bool {
381 return n > 0 && n&(n-1) == 0
382 }
383
384
385 func isUint64PowerOfTwo(in int64) bool {
386 n := uint64(in)
387 return n > 0 && n&(n-1) == 0
388 }
389
390
391 func isUint32PowerOfTwo(in int64) bool {
392 n := uint64(uint32(in))
393 return n > 0 && n&(n-1) == 0
394 }
395
396
397 func is32Bit(n int64) bool {
398 return n == int64(int32(n))
399 }
400
401
402 func is16Bit(n int64) bool {
403 return n == int64(int16(n))
404 }
405
406
407 func isU12Bit(n int64) bool {
408 return 0 <= n && n < (1<<12)
409 }
410
411
412 func isU16Bit(n int64) bool {
413 return n == int64(uint16(n))
414 }
415
416
417 func isU32Bit(n int64) bool {
418 return n == int64(uint32(n))
419 }
420
421
422 func is20Bit(n int64) bool {
423 return -(1<<19) <= n && n < (1<<19)
424 }
425
426
427 func b2i(b bool) int64 {
428 if b {
429 return 1
430 }
431 return 0
432 }
433
434
435
436 func shiftIsBounded(v *Value) bool {
437 return v.AuxInt != 0
438 }
439
440
441
442 func truncate64Fto32F(f float64) float32 {
443 if !isExactFloat32(f) {
444 panic("truncate64Fto32F: truncation is not exact")
445 }
446 if !math.IsNaN(f) {
447 return float32(f)
448 }
449
450
451 b := math.Float64bits(f)
452 m := b & ((1 << 52) - 1)
453
454 r := uint32(((b >> 32) & (1 << 31)) | 0x7f800000 | (m >> (52 - 23)))
455 return math.Float32frombits(r)
456 }
457
458
459
460 func extend32Fto64F(f float32) float64 {
461 if !math.IsNaN(float64(f)) {
462 return float64(f)
463 }
464
465
466 b := uint64(math.Float32bits(f))
467
468 r := ((b << 32) & (1 << 63)) | (0x7ff << 52) | ((b & 0x7fffff) << (52 - 23))
469 return math.Float64frombits(r)
470 }
471
472
473 func NeedsFixUp(v *Value) bool {
474 return v.AuxInt == 0
475 }
476
477
478 func i2f(i int64) float64 {
479 return math.Float64frombits(uint64(i))
480 }
481
482
483 func auxFrom64F(f float64) int64 {
484 return int64(math.Float64bits(f))
485 }
486
487
488 func auxFrom32F(f float32) int64 {
489 return int64(math.Float64bits(extend32Fto64F(f)))
490 }
491
492
493 func auxTo32F(i int64) float32 {
494 return truncate64Fto32F(math.Float64frombits(uint64(i)))
495 }
496
497
498 func auxTo64F(i int64) float64 {
499 return math.Float64frombits(uint64(i))
500 }
501
502
503 func uaddOvf(a, b int64) bool {
504 return uint64(a)+uint64(b) < uint64(a)
505 }
506
507
508
509 func devirt(v *Value, sym interface{}, offset int64) *obj.LSym {
510 f := v.Block.Func
511 n, ok := sym.(*obj.LSym)
512 if !ok {
513 return nil
514 }
515 lsym := f.fe.DerefItab(n, offset)
516 if f.pass.debug > 0 {
517 if lsym != nil {
518 f.Warnl(v.Pos, "de-virtualizing call")
519 } else {
520 f.Warnl(v.Pos, "couldn't de-virtualize call")
521 }
522 }
523 return lsym
524 }
525
526
527 func isSamePtr(p1, p2 *Value) bool {
528 if p1 == p2 {
529 return true
530 }
531 if p1.Op != p2.Op {
532 return false
533 }
534 switch p1.Op {
535 case OpOffPtr:
536 return p1.AuxInt == p2.AuxInt && isSamePtr(p1.Args[0], p2.Args[0])
537 case OpAddr, OpLocalAddr:
538
539
540 return p1.Aux == p2.Aux && p1.Args[0].Op == p2.Args[0].Op
541 case OpAddPtr:
542 return p1.Args[1] == p2.Args[1] && isSamePtr(p1.Args[0], p2.Args[0])
543 }
544 return false
545 }
546
547 func isStackPtr(v *Value) bool {
548 for v.Op == OpOffPtr || v.Op == OpAddPtr {
549 v = v.Args[0]
550 }
551 return v.Op == OpSP || v.Op == OpLocalAddr
552 }
553
554
555
556
557 func disjoint(p1 *Value, n1 int64, p2 *Value, n2 int64) bool {
558 if n1 == 0 || n2 == 0 {
559 return true
560 }
561 if p1 == p2 {
562 return false
563 }
564 baseAndOffset := func(ptr *Value) (base *Value, offset int64) {
565 base, offset = ptr, 0
566 for base.Op == OpOffPtr {
567 offset += base.AuxInt
568 base = base.Args[0]
569 }
570 return base, offset
571 }
572 p1, off1 := baseAndOffset(p1)
573 p2, off2 := baseAndOffset(p2)
574 if isSamePtr(p1, p2) {
575 return !overlap(off1, n1, off2, n2)
576 }
577
578
579
580
581 switch p1.Op {
582 case OpAddr, OpLocalAddr:
583 if p2.Op == OpAddr || p2.Op == OpLocalAddr || p2.Op == OpSP {
584 return true
585 }
586 return p2.Op == OpArg && p1.Args[0].Op == OpSP
587 case OpArg:
588 if p2.Op == OpSP || p2.Op == OpLocalAddr {
589 return true
590 }
591 case OpSP:
592 return p2.Op == OpAddr || p2.Op == OpLocalAddr || p2.Op == OpArg || p2.Op == OpSP
593 }
594 return false
595 }
596
597
598 func moveSize(align int64, c *Config) int64 {
599 switch {
600 case align%8 == 0 && c.PtrSize == 8:
601 return 8
602 case align%4 == 0:
603 return 4
604 case align%2 == 0:
605 return 2
606 }
607 return 1
608 }
609
610
611
612
613 func mergePoint(b *Block, a ...*Value) *Block {
614
615
616
617 d := 100
618
619 for d > 0 {
620 for _, x := range a {
621 if b == x.Block {
622 goto found
623 }
624 }
625 if len(b.Preds) > 1 {
626
627 return nil
628 }
629 b = b.Preds[0].b
630 d--
631 }
632 return nil
633 found:
634
635
636 r := b
637
638
639 na := 0
640 for d > 0 {
641 for _, x := range a {
642 if b == x.Block {
643 na++
644 }
645 }
646 if na == len(a) {
647
648 return r
649 }
650 if len(b.Preds) > 1 {
651 return nil
652 }
653 b = b.Preds[0].b
654 d--
655
656 }
657 return nil
658 }
659
660
661
662
663
664 func clobber(v *Value) bool {
665 v.reset(OpInvalid)
666
667 return true
668 }
669
670
671
672
673 func clobberIfDead(v *Value) bool {
674 if v.Uses == 1 {
675 v.reset(OpInvalid)
676 }
677
678 return true
679 }
680
681
682
683
684
685 func noteRule(s string) bool {
686 fmt.Println(s)
687 return true
688 }
689
690
691
692
693
694
695 func countRule(v *Value, key string) bool {
696 f := v.Block.Func
697 if f.ruleMatches == nil {
698 f.ruleMatches = make(map[string]int)
699 }
700 f.ruleMatches[key]++
701 return true
702 }
703
704
705
706 func warnRule(cond bool, v *Value, s string) bool {
707 if pos := v.Pos; pos.Line() > 1 && cond {
708 v.Block.Func.Warnl(pos, s)
709 }
710 return true
711 }
712
713
714 func flagArg(v *Value) *Value {
715 if len(v.Args) != 1 || !v.Args[0].Type.IsFlags() {
716 return nil
717 }
718 return v.Args[0]
719 }
720
721
722
723
724
725 func arm64Negate(op Op) Op {
726 switch op {
727 case OpARM64LessThan:
728 return OpARM64GreaterEqual
729 case OpARM64LessThanU:
730 return OpARM64GreaterEqualU
731 case OpARM64GreaterThan:
732 return OpARM64LessEqual
733 case OpARM64GreaterThanU:
734 return OpARM64LessEqualU
735 case OpARM64LessEqual:
736 return OpARM64GreaterThan
737 case OpARM64LessEqualU:
738 return OpARM64GreaterThanU
739 case OpARM64GreaterEqual:
740 return OpARM64LessThan
741 case OpARM64GreaterEqualU:
742 return OpARM64LessThanU
743 case OpARM64Equal:
744 return OpARM64NotEqual
745 case OpARM64NotEqual:
746 return OpARM64Equal
747 case OpARM64LessThanF:
748 return OpARM64GreaterEqualF
749 case OpARM64GreaterThanF:
750 return OpARM64LessEqualF
751 case OpARM64LessEqualF:
752 return OpARM64GreaterThanF
753 case OpARM64GreaterEqualF:
754 return OpARM64LessThanF
755 default:
756 panic("unreachable")
757 }
758 }
759
760
761
762
763
764
765
766
767 func arm64Invert(op Op) Op {
768 switch op {
769 case OpARM64LessThan:
770 return OpARM64GreaterThan
771 case OpARM64LessThanU:
772 return OpARM64GreaterThanU
773 case OpARM64GreaterThan:
774 return OpARM64LessThan
775 case OpARM64GreaterThanU:
776 return OpARM64LessThanU
777 case OpARM64LessEqual:
778 return OpARM64GreaterEqual
779 case OpARM64LessEqualU:
780 return OpARM64GreaterEqualU
781 case OpARM64GreaterEqual:
782 return OpARM64LessEqual
783 case OpARM64GreaterEqualU:
784 return OpARM64LessEqualU
785 case OpARM64Equal, OpARM64NotEqual:
786 return op
787 case OpARM64LessThanF:
788 return OpARM64GreaterThanF
789 case OpARM64GreaterThanF:
790 return OpARM64LessThanF
791 case OpARM64LessEqualF:
792 return OpARM64GreaterEqualF
793 case OpARM64GreaterEqualF:
794 return OpARM64LessEqualF
795 default:
796 panic("unreachable")
797 }
798 }
799
800
801
802
803 func ccARM64Eval(cc interface{}, flags *Value) int {
804 op := cc.(Op)
805 fop := flags.Op
806 switch fop {
807 case OpARM64InvertFlags:
808 return -ccARM64Eval(op, flags.Args[0])
809 case OpARM64FlagEQ:
810 switch op {
811 case OpARM64Equal, OpARM64GreaterEqual, OpARM64LessEqual,
812 OpARM64GreaterEqualU, OpARM64LessEqualU:
813 return 1
814 default:
815 return -1
816 }
817 case OpARM64FlagLT_ULT:
818 switch op {
819 case OpARM64LessThan, OpARM64LessThanU,
820 OpARM64LessEqual, OpARM64LessEqualU:
821 return 1
822 default:
823 return -1
824 }
825 case OpARM64FlagLT_UGT:
826 switch op {
827 case OpARM64LessThan, OpARM64GreaterThanU,
828 OpARM64LessEqual, OpARM64GreaterEqualU:
829 return 1
830 default:
831 return -1
832 }
833 case OpARM64FlagGT_ULT:
834 switch op {
835 case OpARM64GreaterThan, OpARM64LessThanU,
836 OpARM64GreaterEqual, OpARM64LessEqualU:
837 return 1
838 default:
839 return -1
840 }
841 case OpARM64FlagGT_UGT:
842 switch op {
843 case OpARM64GreaterThan, OpARM64GreaterThanU,
844 OpARM64GreaterEqual, OpARM64GreaterEqualU:
845 return 1
846 default:
847 return -1
848 }
849 default:
850 return 0
851 }
852 }
853
854
855
856 func logRule(s string) {
857 if ruleFile == nil {
858
859
860
861
862
863
864 w, err := os.OpenFile(filepath.Join(os.Getenv("GOROOT"), "src", "rulelog"),
865 os.O_CREATE|os.O_WRONLY|os.O_APPEND, 0666)
866 if err != nil {
867 panic(err)
868 }
869 ruleFile = w
870 }
871 _, err := fmt.Fprintln(ruleFile, s)
872 if err != nil {
873 panic(err)
874 }
875 }
876
877 var ruleFile io.Writer
878
879 func min(x, y int64) int64 {
880 if x < y {
881 return x
882 }
883 return y
884 }
885
886 func isConstZero(v *Value) bool {
887 switch v.Op {
888 case OpConstNil:
889 return true
890 case OpConst64, OpConst32, OpConst16, OpConst8, OpConstBool, OpConst32F, OpConst64F:
891 return v.AuxInt == 0
892 }
893 return false
894 }
895
896
897 func reciprocalExact64(c float64) bool {
898 b := math.Float64bits(c)
899 man := b & (1<<52 - 1)
900 if man != 0 {
901 return false
902 }
903 exp := b >> 52 & (1<<11 - 1)
904
905
906 switch exp {
907 case 0:
908 return false
909 case 0x7ff:
910 return false
911 case 0x7fe:
912 return false
913 default:
914 return true
915 }
916 }
917
918
919 func reciprocalExact32(c float32) bool {
920 b := math.Float32bits(c)
921 man := b & (1<<23 - 1)
922 if man != 0 {
923 return false
924 }
925 exp := b >> 23 & (1<<8 - 1)
926
927
928 switch exp {
929 case 0:
930 return false
931 case 0xff:
932 return false
933 case 0xfe:
934 return false
935 default:
936 return true
937 }
938 }
939
940
941 func isARMImmRot(v uint32) bool {
942 for i := 0; i < 16; i++ {
943 if v&^0xff == 0 {
944 return true
945 }
946 v = v<<2 | v>>30
947 }
948
949 return false
950 }
951
952
953
954 func overlap(offset1, size1, offset2, size2 int64) bool {
955 if offset1 >= offset2 && offset2+size2 > offset1 {
956 return true
957 }
958 if offset2 >= offset1 && offset1+size1 > offset2 {
959 return true
960 }
961 return false
962 }
963
964 func areAdjacentOffsets(off1, off2, size int64) bool {
965 return off1+size == off2 || off1 == off2+size
966 }
967
968
969
970
971 func zeroUpper32Bits(x *Value, depth int) bool {
972 switch x.Op {
973 case OpAMD64MOVLconst, OpAMD64MOVLload, OpAMD64MOVLQZX, OpAMD64MOVLloadidx1,
974 OpAMD64MOVWload, OpAMD64MOVWloadidx1, OpAMD64MOVBload, OpAMD64MOVBloadidx1,
975 OpAMD64MOVLloadidx4, OpAMD64ADDLload, OpAMD64SUBLload, OpAMD64ANDLload,
976 OpAMD64ORLload, OpAMD64XORLload, OpAMD64CVTTSD2SL,
977 OpAMD64ADDL, OpAMD64ADDLconst, OpAMD64SUBL, OpAMD64SUBLconst,
978 OpAMD64ANDL, OpAMD64ANDLconst, OpAMD64ORL, OpAMD64ORLconst,
979 OpAMD64XORL, OpAMD64XORLconst, OpAMD64NEGL, OpAMD64NOTL:
980 return true
981 case OpArg:
982 return x.Type.Width == 4
983 case OpPhi, OpSelect0, OpSelect1:
984
985
986 if depth <= 0 {
987 return false
988 }
989 for i := range x.Args {
990 if !zeroUpper32Bits(x.Args[i], depth-1) {
991 return false
992 }
993 }
994 return true
995
996 }
997 return false
998 }
999
1000
1001 func zeroUpper48Bits(x *Value, depth int) bool {
1002 switch x.Op {
1003 case OpAMD64MOVWQZX, OpAMD64MOVWload, OpAMD64MOVWloadidx1, OpAMD64MOVWloadidx2:
1004 return true
1005 case OpArg:
1006 return x.Type.Width == 2
1007 case OpPhi, OpSelect0, OpSelect1:
1008
1009
1010 if depth <= 0 {
1011 return false
1012 }
1013 for i := range x.Args {
1014 if !zeroUpper48Bits(x.Args[i], depth-1) {
1015 return false
1016 }
1017 }
1018 return true
1019
1020 }
1021 return false
1022 }
1023
1024
1025 func zeroUpper56Bits(x *Value, depth int) bool {
1026 switch x.Op {
1027 case OpAMD64MOVBQZX, OpAMD64MOVBload, OpAMD64MOVBloadidx1:
1028 return true
1029 case OpArg:
1030 return x.Type.Width == 1
1031 case OpPhi, OpSelect0, OpSelect1:
1032
1033
1034 if depth <= 0 {
1035 return false
1036 }
1037 for i := range x.Args {
1038 if !zeroUpper56Bits(x.Args[i], depth-1) {
1039 return false
1040 }
1041 }
1042 return true
1043
1044 }
1045 return false
1046 }
1047
1048
1049
1050
1051
1052 func isInlinableMemmove(dst, src *Value, sz int64, c *Config) bool {
1053
1054
1055
1056
1057 switch c.arch {
1058 case "amd64", "amd64p32":
1059 return sz <= 16 || (sz < 1024 && disjoint(dst, sz, src, sz))
1060 case "386", "ppc64", "ppc64le", "arm64":
1061 return sz <= 8
1062 case "s390x":
1063 return sz <= 8 || disjoint(dst, sz, src, sz)
1064 case "arm", "mips", "mips64", "mipsle", "mips64le":
1065 return sz <= 4
1066 }
1067 return false
1068 }
1069
1070
1071
1072 func hasSmallRotate(c *Config) bool {
1073 switch c.arch {
1074 case "amd64", "amd64p32", "386":
1075 return true
1076 default:
1077 return false
1078 }
1079 }
1080
1081
1082 func armBFAuxInt(lsb, width int64) int64 {
1083 if lsb < 0 || lsb > 63 {
1084 panic("ARM(64) bit field lsb constant out of range")
1085 }
1086 if width < 1 || width > 64 {
1087 panic("ARM(64) bit field width constant out of range")
1088 }
1089 return width | lsb<<8
1090 }
1091
1092
1093 func getARM64BFlsb(bfc int64) int64 {
1094 return int64(uint64(bfc) >> 8)
1095 }
1096
1097
1098 func getARM64BFwidth(bfc int64) int64 {
1099 return bfc & 0xff
1100 }
1101
1102
1103 func isARM64BFMask(lsb, mask, rshift int64) bool {
1104 shiftedMask := int64(uint64(mask) >> uint64(rshift))
1105 return shiftedMask != 0 && isPowerOfTwo(shiftedMask+1) && nto(shiftedMask)+lsb < 64
1106 }
1107
1108
1109 func arm64BFWidth(mask, rshift int64) int64 {
1110 shiftedMask := int64(uint64(mask) >> uint64(rshift))
1111 if shiftedMask == 0 {
1112 panic("ARM64 BF mask is zero")
1113 }
1114 return nto(shiftedMask)
1115 }
1116
1117
1118
1119 func sizeof(t interface{}) int64 {
1120 return t.(*types.Type).Size()
1121 }
1122
1123
1124
1125 func alignof(t interface{}) int64 {
1126 return t.(*types.Type).Alignment()
1127 }
1128
1129
1130
1131
1132
1133 func registerizable(b *Block, t interface{}) bool {
1134 typ := t.(*types.Type)
1135 if typ.IsPtrShaped() || typ.IsFloat() {
1136 return true
1137 }
1138 if typ.IsInteger() {
1139 return typ.Size() <= b.Func.Config.RegSize
1140 }
1141 return false
1142 }
1143
1144
1145 func needRaceCleanup(sym interface{}, v *Value) bool {
1146 f := v.Block.Func
1147 if !f.Config.Race {
1148 return false
1149 }
1150 if !isSameSym(sym, "runtime.racefuncenter") && !isSameSym(sym, "runtime.racefuncexit") {
1151 return false
1152 }
1153 for _, b := range f.Blocks {
1154 for _, v := range b.Values {
1155 switch v.Op {
1156 case OpStaticCall:
1157
1158
1159 s := v.Aux.(fmt.Stringer).String()
1160 switch s {
1161 case "runtime.racefuncenter", "runtime.racefuncexit",
1162 "runtime.panicdivide", "runtime.panicwrap",
1163 "runtime.panicshift":
1164 continue
1165 }
1166
1167
1168 return false
1169 case OpPanicBounds, OpPanicExtend:
1170
1171 case OpClosureCall, OpInterCall:
1172
1173 return false
1174 }
1175 }
1176 }
1177 return true
1178 }
1179
1180
1181 func symIsRO(sym interface{}) bool {
1182 lsym := sym.(*obj.LSym)
1183 return lsym.Type == objabi.SRODATA && len(lsym.R) == 0
1184 }
1185
1186
1187 func read8(sym interface{}, off int64) uint8 {
1188 lsym := sym.(*obj.LSym)
1189 if off >= int64(len(lsym.P)) || off < 0 {
1190
1191
1192
1193
1194 return 0
1195 }
1196 return lsym.P[off]
1197 }
1198
1199
1200 func read16(sym interface{}, off int64, bigEndian bool) uint16 {
1201 lsym := sym.(*obj.LSym)
1202 if off >= int64(len(lsym.P))-1 || off < 0 {
1203 return 0
1204 }
1205 if bigEndian {
1206 return binary.BigEndian.Uint16(lsym.P[off:])
1207 } else {
1208 return binary.LittleEndian.Uint16(lsym.P[off:])
1209 }
1210 }
1211
1212
1213 func read32(sym interface{}, off int64, bigEndian bool) uint32 {
1214 lsym := sym.(*obj.LSym)
1215 if off >= int64(len(lsym.P))-3 || off < 0 {
1216 return 0
1217 }
1218 if bigEndian {
1219 return binary.BigEndian.Uint32(lsym.P[off:])
1220 } else {
1221 return binary.LittleEndian.Uint32(lsym.P[off:])
1222 }
1223 }
1224
1225
1226 func read64(sym interface{}, off int64, bigEndian bool) uint64 {
1227 lsym := sym.(*obj.LSym)
1228 if off >= int64(len(lsym.P))-7 || off < 0 {
1229 return 0
1230 }
1231 if bigEndian {
1232 return binary.BigEndian.Uint64(lsym.P[off:])
1233 } else {
1234 return binary.LittleEndian.Uint64(lsym.P[off:])
1235 }
1236 }
1237
View as plain text