Source file src/cmd/compile/internal/ssa/rewriteARM64.go
1
2
3
4 package ssa
5
6 import "fmt"
7 import "math"
8 import "cmd/internal/obj"
9 import "cmd/internal/objabi"
10 import "cmd/compile/internal/types"
11
12 var _ = fmt.Println
13 var _ = math.MinInt8
14 var _ = obj.ANOP
15 var _ = objabi.GOROOT
16 var _ = types.TypeMem
17
18 func rewriteValueARM64(v *Value) bool {
19 switch v.Op {
20 case OpARM64ADCSflags:
21 return rewriteValueARM64_OpARM64ADCSflags_0(v)
22 case OpARM64ADD:
23 return rewriteValueARM64_OpARM64ADD_0(v) || rewriteValueARM64_OpARM64ADD_10(v) || rewriteValueARM64_OpARM64ADD_20(v)
24 case OpARM64ADDconst:
25 return rewriteValueARM64_OpARM64ADDconst_0(v)
26 case OpARM64ADDshiftLL:
27 return rewriteValueARM64_OpARM64ADDshiftLL_0(v)
28 case OpARM64ADDshiftRA:
29 return rewriteValueARM64_OpARM64ADDshiftRA_0(v)
30 case OpARM64ADDshiftRL:
31 return rewriteValueARM64_OpARM64ADDshiftRL_0(v)
32 case OpARM64AND:
33 return rewriteValueARM64_OpARM64AND_0(v) || rewriteValueARM64_OpARM64AND_10(v)
34 case OpARM64ANDconst:
35 return rewriteValueARM64_OpARM64ANDconst_0(v)
36 case OpARM64ANDshiftLL:
37 return rewriteValueARM64_OpARM64ANDshiftLL_0(v)
38 case OpARM64ANDshiftRA:
39 return rewriteValueARM64_OpARM64ANDshiftRA_0(v)
40 case OpARM64ANDshiftRL:
41 return rewriteValueARM64_OpARM64ANDshiftRL_0(v)
42 case OpARM64BIC:
43 return rewriteValueARM64_OpARM64BIC_0(v)
44 case OpARM64BICshiftLL:
45 return rewriteValueARM64_OpARM64BICshiftLL_0(v)
46 case OpARM64BICshiftRA:
47 return rewriteValueARM64_OpARM64BICshiftRA_0(v)
48 case OpARM64BICshiftRL:
49 return rewriteValueARM64_OpARM64BICshiftRL_0(v)
50 case OpARM64CMN:
51 return rewriteValueARM64_OpARM64CMN_0(v)
52 case OpARM64CMNW:
53 return rewriteValueARM64_OpARM64CMNW_0(v)
54 case OpARM64CMNWconst:
55 return rewriteValueARM64_OpARM64CMNWconst_0(v)
56 case OpARM64CMNconst:
57 return rewriteValueARM64_OpARM64CMNconst_0(v)
58 case OpARM64CMNshiftLL:
59 return rewriteValueARM64_OpARM64CMNshiftLL_0(v)
60 case OpARM64CMNshiftRA:
61 return rewriteValueARM64_OpARM64CMNshiftRA_0(v)
62 case OpARM64CMNshiftRL:
63 return rewriteValueARM64_OpARM64CMNshiftRL_0(v)
64 case OpARM64CMP:
65 return rewriteValueARM64_OpARM64CMP_0(v)
66 case OpARM64CMPW:
67 return rewriteValueARM64_OpARM64CMPW_0(v)
68 case OpARM64CMPWconst:
69 return rewriteValueARM64_OpARM64CMPWconst_0(v)
70 case OpARM64CMPconst:
71 return rewriteValueARM64_OpARM64CMPconst_0(v)
72 case OpARM64CMPshiftLL:
73 return rewriteValueARM64_OpARM64CMPshiftLL_0(v)
74 case OpARM64CMPshiftRA:
75 return rewriteValueARM64_OpARM64CMPshiftRA_0(v)
76 case OpARM64CMPshiftRL:
77 return rewriteValueARM64_OpARM64CMPshiftRL_0(v)
78 case OpARM64CSEL:
79 return rewriteValueARM64_OpARM64CSEL_0(v)
80 case OpARM64CSEL0:
81 return rewriteValueARM64_OpARM64CSEL0_0(v)
82 case OpARM64DIV:
83 return rewriteValueARM64_OpARM64DIV_0(v)
84 case OpARM64DIVW:
85 return rewriteValueARM64_OpARM64DIVW_0(v)
86 case OpARM64EON:
87 return rewriteValueARM64_OpARM64EON_0(v)
88 case OpARM64EONshiftLL:
89 return rewriteValueARM64_OpARM64EONshiftLL_0(v)
90 case OpARM64EONshiftRA:
91 return rewriteValueARM64_OpARM64EONshiftRA_0(v)
92 case OpARM64EONshiftRL:
93 return rewriteValueARM64_OpARM64EONshiftRL_0(v)
94 case OpARM64Equal:
95 return rewriteValueARM64_OpARM64Equal_0(v)
96 case OpARM64FADDD:
97 return rewriteValueARM64_OpARM64FADDD_0(v)
98 case OpARM64FADDS:
99 return rewriteValueARM64_OpARM64FADDS_0(v)
100 case OpARM64FCMPD:
101 return rewriteValueARM64_OpARM64FCMPD_0(v)
102 case OpARM64FCMPS:
103 return rewriteValueARM64_OpARM64FCMPS_0(v)
104 case OpARM64FMOVDfpgp:
105 return rewriteValueARM64_OpARM64FMOVDfpgp_0(v)
106 case OpARM64FMOVDgpfp:
107 return rewriteValueARM64_OpARM64FMOVDgpfp_0(v)
108 case OpARM64FMOVDload:
109 return rewriteValueARM64_OpARM64FMOVDload_0(v)
110 case OpARM64FMOVDloadidx:
111 return rewriteValueARM64_OpARM64FMOVDloadidx_0(v)
112 case OpARM64FMOVDstore:
113 return rewriteValueARM64_OpARM64FMOVDstore_0(v)
114 case OpARM64FMOVDstoreidx:
115 return rewriteValueARM64_OpARM64FMOVDstoreidx_0(v)
116 case OpARM64FMOVSload:
117 return rewriteValueARM64_OpARM64FMOVSload_0(v)
118 case OpARM64FMOVSloadidx:
119 return rewriteValueARM64_OpARM64FMOVSloadidx_0(v)
120 case OpARM64FMOVSstore:
121 return rewriteValueARM64_OpARM64FMOVSstore_0(v)
122 case OpARM64FMOVSstoreidx:
123 return rewriteValueARM64_OpARM64FMOVSstoreidx_0(v)
124 case OpARM64FMULD:
125 return rewriteValueARM64_OpARM64FMULD_0(v)
126 case OpARM64FMULS:
127 return rewriteValueARM64_OpARM64FMULS_0(v)
128 case OpARM64FNEGD:
129 return rewriteValueARM64_OpARM64FNEGD_0(v)
130 case OpARM64FNEGS:
131 return rewriteValueARM64_OpARM64FNEGS_0(v)
132 case OpARM64FNMULD:
133 return rewriteValueARM64_OpARM64FNMULD_0(v)
134 case OpARM64FNMULS:
135 return rewriteValueARM64_OpARM64FNMULS_0(v)
136 case OpARM64FSUBD:
137 return rewriteValueARM64_OpARM64FSUBD_0(v)
138 case OpARM64FSUBS:
139 return rewriteValueARM64_OpARM64FSUBS_0(v)
140 case OpARM64GreaterEqual:
141 return rewriteValueARM64_OpARM64GreaterEqual_0(v)
142 case OpARM64GreaterEqualF:
143 return rewriteValueARM64_OpARM64GreaterEqualF_0(v)
144 case OpARM64GreaterEqualU:
145 return rewriteValueARM64_OpARM64GreaterEqualU_0(v)
146 case OpARM64GreaterThan:
147 return rewriteValueARM64_OpARM64GreaterThan_0(v)
148 case OpARM64GreaterThanF:
149 return rewriteValueARM64_OpARM64GreaterThanF_0(v)
150 case OpARM64GreaterThanU:
151 return rewriteValueARM64_OpARM64GreaterThanU_0(v)
152 case OpARM64LessEqual:
153 return rewriteValueARM64_OpARM64LessEqual_0(v)
154 case OpARM64LessEqualF:
155 return rewriteValueARM64_OpARM64LessEqualF_0(v)
156 case OpARM64LessEqualU:
157 return rewriteValueARM64_OpARM64LessEqualU_0(v)
158 case OpARM64LessThan:
159 return rewriteValueARM64_OpARM64LessThan_0(v)
160 case OpARM64LessThanF:
161 return rewriteValueARM64_OpARM64LessThanF_0(v)
162 case OpARM64LessThanU:
163 return rewriteValueARM64_OpARM64LessThanU_0(v)
164 case OpARM64MADD:
165 return rewriteValueARM64_OpARM64MADD_0(v) || rewriteValueARM64_OpARM64MADD_10(v) || rewriteValueARM64_OpARM64MADD_20(v)
166 case OpARM64MADDW:
167 return rewriteValueARM64_OpARM64MADDW_0(v) || rewriteValueARM64_OpARM64MADDW_10(v) || rewriteValueARM64_OpARM64MADDW_20(v)
168 case OpARM64MNEG:
169 return rewriteValueARM64_OpARM64MNEG_0(v) || rewriteValueARM64_OpARM64MNEG_10(v) || rewriteValueARM64_OpARM64MNEG_20(v)
170 case OpARM64MNEGW:
171 return rewriteValueARM64_OpARM64MNEGW_0(v) || rewriteValueARM64_OpARM64MNEGW_10(v) || rewriteValueARM64_OpARM64MNEGW_20(v)
172 case OpARM64MOD:
173 return rewriteValueARM64_OpARM64MOD_0(v)
174 case OpARM64MODW:
175 return rewriteValueARM64_OpARM64MODW_0(v)
176 case OpARM64MOVBUload:
177 return rewriteValueARM64_OpARM64MOVBUload_0(v)
178 case OpARM64MOVBUloadidx:
179 return rewriteValueARM64_OpARM64MOVBUloadidx_0(v)
180 case OpARM64MOVBUreg:
181 return rewriteValueARM64_OpARM64MOVBUreg_0(v)
182 case OpARM64MOVBload:
183 return rewriteValueARM64_OpARM64MOVBload_0(v)
184 case OpARM64MOVBloadidx:
185 return rewriteValueARM64_OpARM64MOVBloadidx_0(v)
186 case OpARM64MOVBreg:
187 return rewriteValueARM64_OpARM64MOVBreg_0(v)
188 case OpARM64MOVBstore:
189 return rewriteValueARM64_OpARM64MOVBstore_0(v) || rewriteValueARM64_OpARM64MOVBstore_10(v) || rewriteValueARM64_OpARM64MOVBstore_20(v) || rewriteValueARM64_OpARM64MOVBstore_30(v) || rewriteValueARM64_OpARM64MOVBstore_40(v)
190 case OpARM64MOVBstoreidx:
191 return rewriteValueARM64_OpARM64MOVBstoreidx_0(v) || rewriteValueARM64_OpARM64MOVBstoreidx_10(v)
192 case OpARM64MOVBstorezero:
193 return rewriteValueARM64_OpARM64MOVBstorezero_0(v)
194 case OpARM64MOVBstorezeroidx:
195 return rewriteValueARM64_OpARM64MOVBstorezeroidx_0(v)
196 case OpARM64MOVDload:
197 return rewriteValueARM64_OpARM64MOVDload_0(v)
198 case OpARM64MOVDloadidx:
199 return rewriteValueARM64_OpARM64MOVDloadidx_0(v)
200 case OpARM64MOVDloadidx8:
201 return rewriteValueARM64_OpARM64MOVDloadidx8_0(v)
202 case OpARM64MOVDreg:
203 return rewriteValueARM64_OpARM64MOVDreg_0(v)
204 case OpARM64MOVDstore:
205 return rewriteValueARM64_OpARM64MOVDstore_0(v)
206 case OpARM64MOVDstoreidx:
207 return rewriteValueARM64_OpARM64MOVDstoreidx_0(v)
208 case OpARM64MOVDstoreidx8:
209 return rewriteValueARM64_OpARM64MOVDstoreidx8_0(v)
210 case OpARM64MOVDstorezero:
211 return rewriteValueARM64_OpARM64MOVDstorezero_0(v)
212 case OpARM64MOVDstorezeroidx:
213 return rewriteValueARM64_OpARM64MOVDstorezeroidx_0(v)
214 case OpARM64MOVDstorezeroidx8:
215 return rewriteValueARM64_OpARM64MOVDstorezeroidx8_0(v)
216 case OpARM64MOVHUload:
217 return rewriteValueARM64_OpARM64MOVHUload_0(v)
218 case OpARM64MOVHUloadidx:
219 return rewriteValueARM64_OpARM64MOVHUloadidx_0(v)
220 case OpARM64MOVHUloadidx2:
221 return rewriteValueARM64_OpARM64MOVHUloadidx2_0(v)
222 case OpARM64MOVHUreg:
223 return rewriteValueARM64_OpARM64MOVHUreg_0(v) || rewriteValueARM64_OpARM64MOVHUreg_10(v)
224 case OpARM64MOVHload:
225 return rewriteValueARM64_OpARM64MOVHload_0(v)
226 case OpARM64MOVHloadidx:
227 return rewriteValueARM64_OpARM64MOVHloadidx_0(v)
228 case OpARM64MOVHloadidx2:
229 return rewriteValueARM64_OpARM64MOVHloadidx2_0(v)
230 case OpARM64MOVHreg:
231 return rewriteValueARM64_OpARM64MOVHreg_0(v) || rewriteValueARM64_OpARM64MOVHreg_10(v)
232 case OpARM64MOVHstore:
233 return rewriteValueARM64_OpARM64MOVHstore_0(v) || rewriteValueARM64_OpARM64MOVHstore_10(v) || rewriteValueARM64_OpARM64MOVHstore_20(v)
234 case OpARM64MOVHstoreidx:
235 return rewriteValueARM64_OpARM64MOVHstoreidx_0(v) || rewriteValueARM64_OpARM64MOVHstoreidx_10(v)
236 case OpARM64MOVHstoreidx2:
237 return rewriteValueARM64_OpARM64MOVHstoreidx2_0(v)
238 case OpARM64MOVHstorezero:
239 return rewriteValueARM64_OpARM64MOVHstorezero_0(v)
240 case OpARM64MOVHstorezeroidx:
241 return rewriteValueARM64_OpARM64MOVHstorezeroidx_0(v)
242 case OpARM64MOVHstorezeroidx2:
243 return rewriteValueARM64_OpARM64MOVHstorezeroidx2_0(v)
244 case OpARM64MOVQstorezero:
245 return rewriteValueARM64_OpARM64MOVQstorezero_0(v)
246 case OpARM64MOVWUload:
247 return rewriteValueARM64_OpARM64MOVWUload_0(v)
248 case OpARM64MOVWUloadidx:
249 return rewriteValueARM64_OpARM64MOVWUloadidx_0(v)
250 case OpARM64MOVWUloadidx4:
251 return rewriteValueARM64_OpARM64MOVWUloadidx4_0(v)
252 case OpARM64MOVWUreg:
253 return rewriteValueARM64_OpARM64MOVWUreg_0(v) || rewriteValueARM64_OpARM64MOVWUreg_10(v)
254 case OpARM64MOVWload:
255 return rewriteValueARM64_OpARM64MOVWload_0(v)
256 case OpARM64MOVWloadidx:
257 return rewriteValueARM64_OpARM64MOVWloadidx_0(v)
258 case OpARM64MOVWloadidx4:
259 return rewriteValueARM64_OpARM64MOVWloadidx4_0(v)
260 case OpARM64MOVWreg:
261 return rewriteValueARM64_OpARM64MOVWreg_0(v) || rewriteValueARM64_OpARM64MOVWreg_10(v)
262 case OpARM64MOVWstore:
263 return rewriteValueARM64_OpARM64MOVWstore_0(v) || rewriteValueARM64_OpARM64MOVWstore_10(v)
264 case OpARM64MOVWstoreidx:
265 return rewriteValueARM64_OpARM64MOVWstoreidx_0(v)
266 case OpARM64MOVWstoreidx4:
267 return rewriteValueARM64_OpARM64MOVWstoreidx4_0(v)
268 case OpARM64MOVWstorezero:
269 return rewriteValueARM64_OpARM64MOVWstorezero_0(v)
270 case OpARM64MOVWstorezeroidx:
271 return rewriteValueARM64_OpARM64MOVWstorezeroidx_0(v)
272 case OpARM64MOVWstorezeroidx4:
273 return rewriteValueARM64_OpARM64MOVWstorezeroidx4_0(v)
274 case OpARM64MSUB:
275 return rewriteValueARM64_OpARM64MSUB_0(v) || rewriteValueARM64_OpARM64MSUB_10(v) || rewriteValueARM64_OpARM64MSUB_20(v)
276 case OpARM64MSUBW:
277 return rewriteValueARM64_OpARM64MSUBW_0(v) || rewriteValueARM64_OpARM64MSUBW_10(v) || rewriteValueARM64_OpARM64MSUBW_20(v)
278 case OpARM64MUL:
279 return rewriteValueARM64_OpARM64MUL_0(v) || rewriteValueARM64_OpARM64MUL_10(v) || rewriteValueARM64_OpARM64MUL_20(v)
280 case OpARM64MULW:
281 return rewriteValueARM64_OpARM64MULW_0(v) || rewriteValueARM64_OpARM64MULW_10(v) || rewriteValueARM64_OpARM64MULW_20(v)
282 case OpARM64MVN:
283 return rewriteValueARM64_OpARM64MVN_0(v)
284 case OpARM64MVNshiftLL:
285 return rewriteValueARM64_OpARM64MVNshiftLL_0(v)
286 case OpARM64MVNshiftRA:
287 return rewriteValueARM64_OpARM64MVNshiftRA_0(v)
288 case OpARM64MVNshiftRL:
289 return rewriteValueARM64_OpARM64MVNshiftRL_0(v)
290 case OpARM64NEG:
291 return rewriteValueARM64_OpARM64NEG_0(v)
292 case OpARM64NEGshiftLL:
293 return rewriteValueARM64_OpARM64NEGshiftLL_0(v)
294 case OpARM64NEGshiftRA:
295 return rewriteValueARM64_OpARM64NEGshiftRA_0(v)
296 case OpARM64NEGshiftRL:
297 return rewriteValueARM64_OpARM64NEGshiftRL_0(v)
298 case OpARM64NotEqual:
299 return rewriteValueARM64_OpARM64NotEqual_0(v)
300 case OpARM64OR:
301 return rewriteValueARM64_OpARM64OR_0(v) || rewriteValueARM64_OpARM64OR_10(v) || rewriteValueARM64_OpARM64OR_20(v) || rewriteValueARM64_OpARM64OR_30(v) || rewriteValueARM64_OpARM64OR_40(v)
302 case OpARM64ORN:
303 return rewriteValueARM64_OpARM64ORN_0(v)
304 case OpARM64ORNshiftLL:
305 return rewriteValueARM64_OpARM64ORNshiftLL_0(v)
306 case OpARM64ORNshiftRA:
307 return rewriteValueARM64_OpARM64ORNshiftRA_0(v)
308 case OpARM64ORNshiftRL:
309 return rewriteValueARM64_OpARM64ORNshiftRL_0(v)
310 case OpARM64ORconst:
311 return rewriteValueARM64_OpARM64ORconst_0(v)
312 case OpARM64ORshiftLL:
313 return rewriteValueARM64_OpARM64ORshiftLL_0(v) || rewriteValueARM64_OpARM64ORshiftLL_10(v) || rewriteValueARM64_OpARM64ORshiftLL_20(v)
314 case OpARM64ORshiftRA:
315 return rewriteValueARM64_OpARM64ORshiftRA_0(v)
316 case OpARM64ORshiftRL:
317 return rewriteValueARM64_OpARM64ORshiftRL_0(v)
318 case OpARM64RORWconst:
319 return rewriteValueARM64_OpARM64RORWconst_0(v)
320 case OpARM64RORconst:
321 return rewriteValueARM64_OpARM64RORconst_0(v)
322 case OpARM64SBCSflags:
323 return rewriteValueARM64_OpARM64SBCSflags_0(v)
324 case OpARM64SLL:
325 return rewriteValueARM64_OpARM64SLL_0(v)
326 case OpARM64SLLconst:
327 return rewriteValueARM64_OpARM64SLLconst_0(v)
328 case OpARM64SRA:
329 return rewriteValueARM64_OpARM64SRA_0(v)
330 case OpARM64SRAconst:
331 return rewriteValueARM64_OpARM64SRAconst_0(v)
332 case OpARM64SRL:
333 return rewriteValueARM64_OpARM64SRL_0(v)
334 case OpARM64SRLconst:
335 return rewriteValueARM64_OpARM64SRLconst_0(v) || rewriteValueARM64_OpARM64SRLconst_10(v)
336 case OpARM64STP:
337 return rewriteValueARM64_OpARM64STP_0(v)
338 case OpARM64SUB:
339 return rewriteValueARM64_OpARM64SUB_0(v) || rewriteValueARM64_OpARM64SUB_10(v)
340 case OpARM64SUBconst:
341 return rewriteValueARM64_OpARM64SUBconst_0(v)
342 case OpARM64SUBshiftLL:
343 return rewriteValueARM64_OpARM64SUBshiftLL_0(v)
344 case OpARM64SUBshiftRA:
345 return rewriteValueARM64_OpARM64SUBshiftRA_0(v)
346 case OpARM64SUBshiftRL:
347 return rewriteValueARM64_OpARM64SUBshiftRL_0(v)
348 case OpARM64TST:
349 return rewriteValueARM64_OpARM64TST_0(v)
350 case OpARM64TSTW:
351 return rewriteValueARM64_OpARM64TSTW_0(v)
352 case OpARM64TSTWconst:
353 return rewriteValueARM64_OpARM64TSTWconst_0(v)
354 case OpARM64TSTconst:
355 return rewriteValueARM64_OpARM64TSTconst_0(v)
356 case OpARM64TSTshiftLL:
357 return rewriteValueARM64_OpARM64TSTshiftLL_0(v)
358 case OpARM64TSTshiftRA:
359 return rewriteValueARM64_OpARM64TSTshiftRA_0(v)
360 case OpARM64TSTshiftRL:
361 return rewriteValueARM64_OpARM64TSTshiftRL_0(v)
362 case OpARM64UBFIZ:
363 return rewriteValueARM64_OpARM64UBFIZ_0(v)
364 case OpARM64UBFX:
365 return rewriteValueARM64_OpARM64UBFX_0(v)
366 case OpARM64UDIV:
367 return rewriteValueARM64_OpARM64UDIV_0(v)
368 case OpARM64UDIVW:
369 return rewriteValueARM64_OpARM64UDIVW_0(v)
370 case OpARM64UMOD:
371 return rewriteValueARM64_OpARM64UMOD_0(v)
372 case OpARM64UMODW:
373 return rewriteValueARM64_OpARM64UMODW_0(v)
374 case OpARM64XOR:
375 return rewriteValueARM64_OpARM64XOR_0(v) || rewriteValueARM64_OpARM64XOR_10(v)
376 case OpARM64XORconst:
377 return rewriteValueARM64_OpARM64XORconst_0(v)
378 case OpARM64XORshiftLL:
379 return rewriteValueARM64_OpARM64XORshiftLL_0(v)
380 case OpARM64XORshiftRA:
381 return rewriteValueARM64_OpARM64XORshiftRA_0(v)
382 case OpARM64XORshiftRL:
383 return rewriteValueARM64_OpARM64XORshiftRL_0(v)
384 case OpAbs:
385 return rewriteValueARM64_OpAbs_0(v)
386 case OpAdd16:
387 return rewriteValueARM64_OpAdd16_0(v)
388 case OpAdd32:
389 return rewriteValueARM64_OpAdd32_0(v)
390 case OpAdd32F:
391 return rewriteValueARM64_OpAdd32F_0(v)
392 case OpAdd64:
393 return rewriteValueARM64_OpAdd64_0(v)
394 case OpAdd64F:
395 return rewriteValueARM64_OpAdd64F_0(v)
396 case OpAdd8:
397 return rewriteValueARM64_OpAdd8_0(v)
398 case OpAddPtr:
399 return rewriteValueARM64_OpAddPtr_0(v)
400 case OpAddr:
401 return rewriteValueARM64_OpAddr_0(v)
402 case OpAnd16:
403 return rewriteValueARM64_OpAnd16_0(v)
404 case OpAnd32:
405 return rewriteValueARM64_OpAnd32_0(v)
406 case OpAnd64:
407 return rewriteValueARM64_OpAnd64_0(v)
408 case OpAnd8:
409 return rewriteValueARM64_OpAnd8_0(v)
410 case OpAndB:
411 return rewriteValueARM64_OpAndB_0(v)
412 case OpAtomicAdd32:
413 return rewriteValueARM64_OpAtomicAdd32_0(v)
414 case OpAtomicAdd32Variant:
415 return rewriteValueARM64_OpAtomicAdd32Variant_0(v)
416 case OpAtomicAdd64:
417 return rewriteValueARM64_OpAtomicAdd64_0(v)
418 case OpAtomicAdd64Variant:
419 return rewriteValueARM64_OpAtomicAdd64Variant_0(v)
420 case OpAtomicAnd8:
421 return rewriteValueARM64_OpAtomicAnd8_0(v)
422 case OpAtomicCompareAndSwap32:
423 return rewriteValueARM64_OpAtomicCompareAndSwap32_0(v)
424 case OpAtomicCompareAndSwap64:
425 return rewriteValueARM64_OpAtomicCompareAndSwap64_0(v)
426 case OpAtomicExchange32:
427 return rewriteValueARM64_OpAtomicExchange32_0(v)
428 case OpAtomicExchange64:
429 return rewriteValueARM64_OpAtomicExchange64_0(v)
430 case OpAtomicLoad32:
431 return rewriteValueARM64_OpAtomicLoad32_0(v)
432 case OpAtomicLoad64:
433 return rewriteValueARM64_OpAtomicLoad64_0(v)
434 case OpAtomicLoad8:
435 return rewriteValueARM64_OpAtomicLoad8_0(v)
436 case OpAtomicLoadPtr:
437 return rewriteValueARM64_OpAtomicLoadPtr_0(v)
438 case OpAtomicOr8:
439 return rewriteValueARM64_OpAtomicOr8_0(v)
440 case OpAtomicStore32:
441 return rewriteValueARM64_OpAtomicStore32_0(v)
442 case OpAtomicStore64:
443 return rewriteValueARM64_OpAtomicStore64_0(v)
444 case OpAtomicStorePtrNoWB:
445 return rewriteValueARM64_OpAtomicStorePtrNoWB_0(v)
446 case OpAvg64u:
447 return rewriteValueARM64_OpAvg64u_0(v)
448 case OpBitLen32:
449 return rewriteValueARM64_OpBitLen32_0(v)
450 case OpBitLen64:
451 return rewriteValueARM64_OpBitLen64_0(v)
452 case OpBitRev16:
453 return rewriteValueARM64_OpBitRev16_0(v)
454 case OpBitRev32:
455 return rewriteValueARM64_OpBitRev32_0(v)
456 case OpBitRev64:
457 return rewriteValueARM64_OpBitRev64_0(v)
458 case OpBitRev8:
459 return rewriteValueARM64_OpBitRev8_0(v)
460 case OpBswap32:
461 return rewriteValueARM64_OpBswap32_0(v)
462 case OpBswap64:
463 return rewriteValueARM64_OpBswap64_0(v)
464 case OpCeil:
465 return rewriteValueARM64_OpCeil_0(v)
466 case OpClosureCall:
467 return rewriteValueARM64_OpClosureCall_0(v)
468 case OpCom16:
469 return rewriteValueARM64_OpCom16_0(v)
470 case OpCom32:
471 return rewriteValueARM64_OpCom32_0(v)
472 case OpCom64:
473 return rewriteValueARM64_OpCom64_0(v)
474 case OpCom8:
475 return rewriteValueARM64_OpCom8_0(v)
476 case OpCondSelect:
477 return rewriteValueARM64_OpCondSelect_0(v)
478 case OpConst16:
479 return rewriteValueARM64_OpConst16_0(v)
480 case OpConst32:
481 return rewriteValueARM64_OpConst32_0(v)
482 case OpConst32F:
483 return rewriteValueARM64_OpConst32F_0(v)
484 case OpConst64:
485 return rewriteValueARM64_OpConst64_0(v)
486 case OpConst64F:
487 return rewriteValueARM64_OpConst64F_0(v)
488 case OpConst8:
489 return rewriteValueARM64_OpConst8_0(v)
490 case OpConstBool:
491 return rewriteValueARM64_OpConstBool_0(v)
492 case OpConstNil:
493 return rewriteValueARM64_OpConstNil_0(v)
494 case OpCtz16:
495 return rewriteValueARM64_OpCtz16_0(v)
496 case OpCtz16NonZero:
497 return rewriteValueARM64_OpCtz16NonZero_0(v)
498 case OpCtz32:
499 return rewriteValueARM64_OpCtz32_0(v)
500 case OpCtz32NonZero:
501 return rewriteValueARM64_OpCtz32NonZero_0(v)
502 case OpCtz64:
503 return rewriteValueARM64_OpCtz64_0(v)
504 case OpCtz64NonZero:
505 return rewriteValueARM64_OpCtz64NonZero_0(v)
506 case OpCtz8:
507 return rewriteValueARM64_OpCtz8_0(v)
508 case OpCtz8NonZero:
509 return rewriteValueARM64_OpCtz8NonZero_0(v)
510 case OpCvt32Fto32:
511 return rewriteValueARM64_OpCvt32Fto32_0(v)
512 case OpCvt32Fto32U:
513 return rewriteValueARM64_OpCvt32Fto32U_0(v)
514 case OpCvt32Fto64:
515 return rewriteValueARM64_OpCvt32Fto64_0(v)
516 case OpCvt32Fto64F:
517 return rewriteValueARM64_OpCvt32Fto64F_0(v)
518 case OpCvt32Fto64U:
519 return rewriteValueARM64_OpCvt32Fto64U_0(v)
520 case OpCvt32Uto32F:
521 return rewriteValueARM64_OpCvt32Uto32F_0(v)
522 case OpCvt32Uto64F:
523 return rewriteValueARM64_OpCvt32Uto64F_0(v)
524 case OpCvt32to32F:
525 return rewriteValueARM64_OpCvt32to32F_0(v)
526 case OpCvt32to64F:
527 return rewriteValueARM64_OpCvt32to64F_0(v)
528 case OpCvt64Fto32:
529 return rewriteValueARM64_OpCvt64Fto32_0(v)
530 case OpCvt64Fto32F:
531 return rewriteValueARM64_OpCvt64Fto32F_0(v)
532 case OpCvt64Fto32U:
533 return rewriteValueARM64_OpCvt64Fto32U_0(v)
534 case OpCvt64Fto64:
535 return rewriteValueARM64_OpCvt64Fto64_0(v)
536 case OpCvt64Fto64U:
537 return rewriteValueARM64_OpCvt64Fto64U_0(v)
538 case OpCvt64Uto32F:
539 return rewriteValueARM64_OpCvt64Uto32F_0(v)
540 case OpCvt64Uto64F:
541 return rewriteValueARM64_OpCvt64Uto64F_0(v)
542 case OpCvt64to32F:
543 return rewriteValueARM64_OpCvt64to32F_0(v)
544 case OpCvt64to64F:
545 return rewriteValueARM64_OpCvt64to64F_0(v)
546 case OpDiv16:
547 return rewriteValueARM64_OpDiv16_0(v)
548 case OpDiv16u:
549 return rewriteValueARM64_OpDiv16u_0(v)
550 case OpDiv32:
551 return rewriteValueARM64_OpDiv32_0(v)
552 case OpDiv32F:
553 return rewriteValueARM64_OpDiv32F_0(v)
554 case OpDiv32u:
555 return rewriteValueARM64_OpDiv32u_0(v)
556 case OpDiv64:
557 return rewriteValueARM64_OpDiv64_0(v)
558 case OpDiv64F:
559 return rewriteValueARM64_OpDiv64F_0(v)
560 case OpDiv64u:
561 return rewriteValueARM64_OpDiv64u_0(v)
562 case OpDiv8:
563 return rewriteValueARM64_OpDiv8_0(v)
564 case OpDiv8u:
565 return rewriteValueARM64_OpDiv8u_0(v)
566 case OpEq16:
567 return rewriteValueARM64_OpEq16_0(v)
568 case OpEq32:
569 return rewriteValueARM64_OpEq32_0(v)
570 case OpEq32F:
571 return rewriteValueARM64_OpEq32F_0(v)
572 case OpEq64:
573 return rewriteValueARM64_OpEq64_0(v)
574 case OpEq64F:
575 return rewriteValueARM64_OpEq64F_0(v)
576 case OpEq8:
577 return rewriteValueARM64_OpEq8_0(v)
578 case OpEqB:
579 return rewriteValueARM64_OpEqB_0(v)
580 case OpEqPtr:
581 return rewriteValueARM64_OpEqPtr_0(v)
582 case OpFloor:
583 return rewriteValueARM64_OpFloor_0(v)
584 case OpGeq16:
585 return rewriteValueARM64_OpGeq16_0(v)
586 case OpGeq16U:
587 return rewriteValueARM64_OpGeq16U_0(v)
588 case OpGeq32:
589 return rewriteValueARM64_OpGeq32_0(v)
590 case OpGeq32F:
591 return rewriteValueARM64_OpGeq32F_0(v)
592 case OpGeq32U:
593 return rewriteValueARM64_OpGeq32U_0(v)
594 case OpGeq64:
595 return rewriteValueARM64_OpGeq64_0(v)
596 case OpGeq64F:
597 return rewriteValueARM64_OpGeq64F_0(v)
598 case OpGeq64U:
599 return rewriteValueARM64_OpGeq64U_0(v)
600 case OpGeq8:
601 return rewriteValueARM64_OpGeq8_0(v)
602 case OpGeq8U:
603 return rewriteValueARM64_OpGeq8U_0(v)
604 case OpGetCallerPC:
605 return rewriteValueARM64_OpGetCallerPC_0(v)
606 case OpGetCallerSP:
607 return rewriteValueARM64_OpGetCallerSP_0(v)
608 case OpGetClosurePtr:
609 return rewriteValueARM64_OpGetClosurePtr_0(v)
610 case OpGreater16:
611 return rewriteValueARM64_OpGreater16_0(v)
612 case OpGreater16U:
613 return rewriteValueARM64_OpGreater16U_0(v)
614 case OpGreater32:
615 return rewriteValueARM64_OpGreater32_0(v)
616 case OpGreater32F:
617 return rewriteValueARM64_OpGreater32F_0(v)
618 case OpGreater32U:
619 return rewriteValueARM64_OpGreater32U_0(v)
620 case OpGreater64:
621 return rewriteValueARM64_OpGreater64_0(v)
622 case OpGreater64F:
623 return rewriteValueARM64_OpGreater64F_0(v)
624 case OpGreater64U:
625 return rewriteValueARM64_OpGreater64U_0(v)
626 case OpGreater8:
627 return rewriteValueARM64_OpGreater8_0(v)
628 case OpGreater8U:
629 return rewriteValueARM64_OpGreater8U_0(v)
630 case OpHmul32:
631 return rewriteValueARM64_OpHmul32_0(v)
632 case OpHmul32u:
633 return rewriteValueARM64_OpHmul32u_0(v)
634 case OpHmul64:
635 return rewriteValueARM64_OpHmul64_0(v)
636 case OpHmul64u:
637 return rewriteValueARM64_OpHmul64u_0(v)
638 case OpInterCall:
639 return rewriteValueARM64_OpInterCall_0(v)
640 case OpIsInBounds:
641 return rewriteValueARM64_OpIsInBounds_0(v)
642 case OpIsNonNil:
643 return rewriteValueARM64_OpIsNonNil_0(v)
644 case OpIsSliceInBounds:
645 return rewriteValueARM64_OpIsSliceInBounds_0(v)
646 case OpLeq16:
647 return rewriteValueARM64_OpLeq16_0(v)
648 case OpLeq16U:
649 return rewriteValueARM64_OpLeq16U_0(v)
650 case OpLeq32:
651 return rewriteValueARM64_OpLeq32_0(v)
652 case OpLeq32F:
653 return rewriteValueARM64_OpLeq32F_0(v)
654 case OpLeq32U:
655 return rewriteValueARM64_OpLeq32U_0(v)
656 case OpLeq64:
657 return rewriteValueARM64_OpLeq64_0(v)
658 case OpLeq64F:
659 return rewriteValueARM64_OpLeq64F_0(v)
660 case OpLeq64U:
661 return rewriteValueARM64_OpLeq64U_0(v)
662 case OpLeq8:
663 return rewriteValueARM64_OpLeq8_0(v)
664 case OpLeq8U:
665 return rewriteValueARM64_OpLeq8U_0(v)
666 case OpLess16:
667 return rewriteValueARM64_OpLess16_0(v)
668 case OpLess16U:
669 return rewriteValueARM64_OpLess16U_0(v)
670 case OpLess32:
671 return rewriteValueARM64_OpLess32_0(v)
672 case OpLess32F:
673 return rewriteValueARM64_OpLess32F_0(v)
674 case OpLess32U:
675 return rewriteValueARM64_OpLess32U_0(v)
676 case OpLess64:
677 return rewriteValueARM64_OpLess64_0(v)
678 case OpLess64F:
679 return rewriteValueARM64_OpLess64F_0(v)
680 case OpLess64U:
681 return rewriteValueARM64_OpLess64U_0(v)
682 case OpLess8:
683 return rewriteValueARM64_OpLess8_0(v)
684 case OpLess8U:
685 return rewriteValueARM64_OpLess8U_0(v)
686 case OpLoad:
687 return rewriteValueARM64_OpLoad_0(v)
688 case OpLocalAddr:
689 return rewriteValueARM64_OpLocalAddr_0(v)
690 case OpLsh16x16:
691 return rewriteValueARM64_OpLsh16x16_0(v)
692 case OpLsh16x32:
693 return rewriteValueARM64_OpLsh16x32_0(v)
694 case OpLsh16x64:
695 return rewriteValueARM64_OpLsh16x64_0(v)
696 case OpLsh16x8:
697 return rewriteValueARM64_OpLsh16x8_0(v)
698 case OpLsh32x16:
699 return rewriteValueARM64_OpLsh32x16_0(v)
700 case OpLsh32x32:
701 return rewriteValueARM64_OpLsh32x32_0(v)
702 case OpLsh32x64:
703 return rewriteValueARM64_OpLsh32x64_0(v)
704 case OpLsh32x8:
705 return rewriteValueARM64_OpLsh32x8_0(v)
706 case OpLsh64x16:
707 return rewriteValueARM64_OpLsh64x16_0(v)
708 case OpLsh64x32:
709 return rewriteValueARM64_OpLsh64x32_0(v)
710 case OpLsh64x64:
711 return rewriteValueARM64_OpLsh64x64_0(v)
712 case OpLsh64x8:
713 return rewriteValueARM64_OpLsh64x8_0(v)
714 case OpLsh8x16:
715 return rewriteValueARM64_OpLsh8x16_0(v)
716 case OpLsh8x32:
717 return rewriteValueARM64_OpLsh8x32_0(v)
718 case OpLsh8x64:
719 return rewriteValueARM64_OpLsh8x64_0(v)
720 case OpLsh8x8:
721 return rewriteValueARM64_OpLsh8x8_0(v)
722 case OpMod16:
723 return rewriteValueARM64_OpMod16_0(v)
724 case OpMod16u:
725 return rewriteValueARM64_OpMod16u_0(v)
726 case OpMod32:
727 return rewriteValueARM64_OpMod32_0(v)
728 case OpMod32u:
729 return rewriteValueARM64_OpMod32u_0(v)
730 case OpMod64:
731 return rewriteValueARM64_OpMod64_0(v)
732 case OpMod64u:
733 return rewriteValueARM64_OpMod64u_0(v)
734 case OpMod8:
735 return rewriteValueARM64_OpMod8_0(v)
736 case OpMod8u:
737 return rewriteValueARM64_OpMod8u_0(v)
738 case OpMove:
739 return rewriteValueARM64_OpMove_0(v) || rewriteValueARM64_OpMove_10(v)
740 case OpMul16:
741 return rewriteValueARM64_OpMul16_0(v)
742 case OpMul32:
743 return rewriteValueARM64_OpMul32_0(v)
744 case OpMul32F:
745 return rewriteValueARM64_OpMul32F_0(v)
746 case OpMul64:
747 return rewriteValueARM64_OpMul64_0(v)
748 case OpMul64F:
749 return rewriteValueARM64_OpMul64F_0(v)
750 case OpMul64uhilo:
751 return rewriteValueARM64_OpMul64uhilo_0(v)
752 case OpMul8:
753 return rewriteValueARM64_OpMul8_0(v)
754 case OpNeg16:
755 return rewriteValueARM64_OpNeg16_0(v)
756 case OpNeg32:
757 return rewriteValueARM64_OpNeg32_0(v)
758 case OpNeg32F:
759 return rewriteValueARM64_OpNeg32F_0(v)
760 case OpNeg64:
761 return rewriteValueARM64_OpNeg64_0(v)
762 case OpNeg64F:
763 return rewriteValueARM64_OpNeg64F_0(v)
764 case OpNeg8:
765 return rewriteValueARM64_OpNeg8_0(v)
766 case OpNeq16:
767 return rewriteValueARM64_OpNeq16_0(v)
768 case OpNeq32:
769 return rewriteValueARM64_OpNeq32_0(v)
770 case OpNeq32F:
771 return rewriteValueARM64_OpNeq32F_0(v)
772 case OpNeq64:
773 return rewriteValueARM64_OpNeq64_0(v)
774 case OpNeq64F:
775 return rewriteValueARM64_OpNeq64F_0(v)
776 case OpNeq8:
777 return rewriteValueARM64_OpNeq8_0(v)
778 case OpNeqB:
779 return rewriteValueARM64_OpNeqB_0(v)
780 case OpNeqPtr:
781 return rewriteValueARM64_OpNeqPtr_0(v)
782 case OpNilCheck:
783 return rewriteValueARM64_OpNilCheck_0(v)
784 case OpNot:
785 return rewriteValueARM64_OpNot_0(v)
786 case OpOffPtr:
787 return rewriteValueARM64_OpOffPtr_0(v)
788 case OpOr16:
789 return rewriteValueARM64_OpOr16_0(v)
790 case OpOr32:
791 return rewriteValueARM64_OpOr32_0(v)
792 case OpOr64:
793 return rewriteValueARM64_OpOr64_0(v)
794 case OpOr8:
795 return rewriteValueARM64_OpOr8_0(v)
796 case OpOrB:
797 return rewriteValueARM64_OpOrB_0(v)
798 case OpPanicBounds:
799 return rewriteValueARM64_OpPanicBounds_0(v)
800 case OpPopCount16:
801 return rewriteValueARM64_OpPopCount16_0(v)
802 case OpPopCount32:
803 return rewriteValueARM64_OpPopCount32_0(v)
804 case OpPopCount64:
805 return rewriteValueARM64_OpPopCount64_0(v)
806 case OpRotateLeft16:
807 return rewriteValueARM64_OpRotateLeft16_0(v)
808 case OpRotateLeft32:
809 return rewriteValueARM64_OpRotateLeft32_0(v)
810 case OpRotateLeft64:
811 return rewriteValueARM64_OpRotateLeft64_0(v)
812 case OpRotateLeft8:
813 return rewriteValueARM64_OpRotateLeft8_0(v)
814 case OpRound:
815 return rewriteValueARM64_OpRound_0(v)
816 case OpRound32F:
817 return rewriteValueARM64_OpRound32F_0(v)
818 case OpRound64F:
819 return rewriteValueARM64_OpRound64F_0(v)
820 case OpRoundToEven:
821 return rewriteValueARM64_OpRoundToEven_0(v)
822 case OpRsh16Ux16:
823 return rewriteValueARM64_OpRsh16Ux16_0(v)
824 case OpRsh16Ux32:
825 return rewriteValueARM64_OpRsh16Ux32_0(v)
826 case OpRsh16Ux64:
827 return rewriteValueARM64_OpRsh16Ux64_0(v)
828 case OpRsh16Ux8:
829 return rewriteValueARM64_OpRsh16Ux8_0(v)
830 case OpRsh16x16:
831 return rewriteValueARM64_OpRsh16x16_0(v)
832 case OpRsh16x32:
833 return rewriteValueARM64_OpRsh16x32_0(v)
834 case OpRsh16x64:
835 return rewriteValueARM64_OpRsh16x64_0(v)
836 case OpRsh16x8:
837 return rewriteValueARM64_OpRsh16x8_0(v)
838 case OpRsh32Ux16:
839 return rewriteValueARM64_OpRsh32Ux16_0(v)
840 case OpRsh32Ux32:
841 return rewriteValueARM64_OpRsh32Ux32_0(v)
842 case OpRsh32Ux64:
843 return rewriteValueARM64_OpRsh32Ux64_0(v)
844 case OpRsh32Ux8:
845 return rewriteValueARM64_OpRsh32Ux8_0(v)
846 case OpRsh32x16:
847 return rewriteValueARM64_OpRsh32x16_0(v)
848 case OpRsh32x32:
849 return rewriteValueARM64_OpRsh32x32_0(v)
850 case OpRsh32x64:
851 return rewriteValueARM64_OpRsh32x64_0(v)
852 case OpRsh32x8:
853 return rewriteValueARM64_OpRsh32x8_0(v)
854 case OpRsh64Ux16:
855 return rewriteValueARM64_OpRsh64Ux16_0(v)
856 case OpRsh64Ux32:
857 return rewriteValueARM64_OpRsh64Ux32_0(v)
858 case OpRsh64Ux64:
859 return rewriteValueARM64_OpRsh64Ux64_0(v)
860 case OpRsh64Ux8:
861 return rewriteValueARM64_OpRsh64Ux8_0(v)
862 case OpRsh64x16:
863 return rewriteValueARM64_OpRsh64x16_0(v)
864 case OpRsh64x32:
865 return rewriteValueARM64_OpRsh64x32_0(v)
866 case OpRsh64x64:
867 return rewriteValueARM64_OpRsh64x64_0(v)
868 case OpRsh64x8:
869 return rewriteValueARM64_OpRsh64x8_0(v)
870 case OpRsh8Ux16:
871 return rewriteValueARM64_OpRsh8Ux16_0(v)
872 case OpRsh8Ux32:
873 return rewriteValueARM64_OpRsh8Ux32_0(v)
874 case OpRsh8Ux64:
875 return rewriteValueARM64_OpRsh8Ux64_0(v)
876 case OpRsh8Ux8:
877 return rewriteValueARM64_OpRsh8Ux8_0(v)
878 case OpRsh8x16:
879 return rewriteValueARM64_OpRsh8x16_0(v)
880 case OpRsh8x32:
881 return rewriteValueARM64_OpRsh8x32_0(v)
882 case OpRsh8x64:
883 return rewriteValueARM64_OpRsh8x64_0(v)
884 case OpRsh8x8:
885 return rewriteValueARM64_OpRsh8x8_0(v)
886 case OpSelect0:
887 return rewriteValueARM64_OpSelect0_0(v)
888 case OpSelect1:
889 return rewriteValueARM64_OpSelect1_0(v)
890 case OpSignExt16to32:
891 return rewriteValueARM64_OpSignExt16to32_0(v)
892 case OpSignExt16to64:
893 return rewriteValueARM64_OpSignExt16to64_0(v)
894 case OpSignExt32to64:
895 return rewriteValueARM64_OpSignExt32to64_0(v)
896 case OpSignExt8to16:
897 return rewriteValueARM64_OpSignExt8to16_0(v)
898 case OpSignExt8to32:
899 return rewriteValueARM64_OpSignExt8to32_0(v)
900 case OpSignExt8to64:
901 return rewriteValueARM64_OpSignExt8to64_0(v)
902 case OpSlicemask:
903 return rewriteValueARM64_OpSlicemask_0(v)
904 case OpSqrt:
905 return rewriteValueARM64_OpSqrt_0(v)
906 case OpStaticCall:
907 return rewriteValueARM64_OpStaticCall_0(v)
908 case OpStore:
909 return rewriteValueARM64_OpStore_0(v)
910 case OpSub16:
911 return rewriteValueARM64_OpSub16_0(v)
912 case OpSub32:
913 return rewriteValueARM64_OpSub32_0(v)
914 case OpSub32F:
915 return rewriteValueARM64_OpSub32F_0(v)
916 case OpSub64:
917 return rewriteValueARM64_OpSub64_0(v)
918 case OpSub64F:
919 return rewriteValueARM64_OpSub64F_0(v)
920 case OpSub8:
921 return rewriteValueARM64_OpSub8_0(v)
922 case OpSubPtr:
923 return rewriteValueARM64_OpSubPtr_0(v)
924 case OpTrunc:
925 return rewriteValueARM64_OpTrunc_0(v)
926 case OpTrunc16to8:
927 return rewriteValueARM64_OpTrunc16to8_0(v)
928 case OpTrunc32to16:
929 return rewriteValueARM64_OpTrunc32to16_0(v)
930 case OpTrunc32to8:
931 return rewriteValueARM64_OpTrunc32to8_0(v)
932 case OpTrunc64to16:
933 return rewriteValueARM64_OpTrunc64to16_0(v)
934 case OpTrunc64to32:
935 return rewriteValueARM64_OpTrunc64to32_0(v)
936 case OpTrunc64to8:
937 return rewriteValueARM64_OpTrunc64to8_0(v)
938 case OpWB:
939 return rewriteValueARM64_OpWB_0(v)
940 case OpXor16:
941 return rewriteValueARM64_OpXor16_0(v)
942 case OpXor32:
943 return rewriteValueARM64_OpXor32_0(v)
944 case OpXor64:
945 return rewriteValueARM64_OpXor64_0(v)
946 case OpXor8:
947 return rewriteValueARM64_OpXor8_0(v)
948 case OpZero:
949 return rewriteValueARM64_OpZero_0(v) || rewriteValueARM64_OpZero_10(v) || rewriteValueARM64_OpZero_20(v)
950 case OpZeroExt16to32:
951 return rewriteValueARM64_OpZeroExt16to32_0(v)
952 case OpZeroExt16to64:
953 return rewriteValueARM64_OpZeroExt16to64_0(v)
954 case OpZeroExt32to64:
955 return rewriteValueARM64_OpZeroExt32to64_0(v)
956 case OpZeroExt8to16:
957 return rewriteValueARM64_OpZeroExt8to16_0(v)
958 case OpZeroExt8to32:
959 return rewriteValueARM64_OpZeroExt8to32_0(v)
960 case OpZeroExt8to64:
961 return rewriteValueARM64_OpZeroExt8to64_0(v)
962 }
963 return false
964 }
965 func rewriteValueARM64_OpARM64ADCSflags_0(v *Value) bool {
966 b := v.Block
967 typ := &b.Func.Config.Types
968
969
970
971 for {
972 _ = v.Args[2]
973 x := v.Args[0]
974 y := v.Args[1]
975 v_2 := v.Args[2]
976 if v_2.Op != OpSelect1 {
977 break
978 }
979 if v_2.Type != types.TypeFlags {
980 break
981 }
982 v_2_0 := v_2.Args[0]
983 if v_2_0.Op != OpARM64ADDSconstflags {
984 break
985 }
986 if v_2_0.AuxInt != -1 {
987 break
988 }
989 v_2_0_0 := v_2_0.Args[0]
990 if v_2_0_0.Op != OpARM64ADCzerocarry {
991 break
992 }
993 if v_2_0_0.Type != typ.UInt64 {
994 break
995 }
996 c := v_2_0_0.Args[0]
997 v.reset(OpARM64ADCSflags)
998 v.AddArg(x)
999 v.AddArg(y)
1000 v.AddArg(c)
1001 return true
1002 }
1003
1004
1005
1006 for {
1007 _ = v.Args[2]
1008 x := v.Args[0]
1009 y := v.Args[1]
1010 v_2 := v.Args[2]
1011 if v_2.Op != OpSelect1 {
1012 break
1013 }
1014 if v_2.Type != types.TypeFlags {
1015 break
1016 }
1017 v_2_0 := v_2.Args[0]
1018 if v_2_0.Op != OpARM64ADDSconstflags {
1019 break
1020 }
1021 if v_2_0.AuxInt != -1 {
1022 break
1023 }
1024 v_2_0_0 := v_2_0.Args[0]
1025 if v_2_0_0.Op != OpARM64MOVDconst {
1026 break
1027 }
1028 if v_2_0_0.AuxInt != 0 {
1029 break
1030 }
1031 v.reset(OpARM64ADDSflags)
1032 v.AddArg(x)
1033 v.AddArg(y)
1034 return true
1035 }
1036 return false
1037 }
1038 func rewriteValueARM64_OpARM64ADD_0(v *Value) bool {
1039
1040
1041
1042 for {
1043 _ = v.Args[1]
1044 x := v.Args[0]
1045 v_1 := v.Args[1]
1046 if v_1.Op != OpARM64MOVDconst {
1047 break
1048 }
1049 c := v_1.AuxInt
1050 v.reset(OpARM64ADDconst)
1051 v.AuxInt = c
1052 v.AddArg(x)
1053 return true
1054 }
1055
1056
1057
1058 for {
1059 x := v.Args[1]
1060 v_0 := v.Args[0]
1061 if v_0.Op != OpARM64MOVDconst {
1062 break
1063 }
1064 c := v_0.AuxInt
1065 v.reset(OpARM64ADDconst)
1066 v.AuxInt = c
1067 v.AddArg(x)
1068 return true
1069 }
1070
1071
1072
1073 for {
1074 _ = v.Args[1]
1075 a := v.Args[0]
1076 l := v.Args[1]
1077 if l.Op != OpARM64MUL {
1078 break
1079 }
1080 y := l.Args[1]
1081 x := l.Args[0]
1082 if !(l.Uses == 1 && clobber(l)) {
1083 break
1084 }
1085 v.reset(OpARM64MADD)
1086 v.AddArg(a)
1087 v.AddArg(x)
1088 v.AddArg(y)
1089 return true
1090 }
1091
1092
1093
1094 for {
1095 a := v.Args[1]
1096 l := v.Args[0]
1097 if l.Op != OpARM64MUL {
1098 break
1099 }
1100 y := l.Args[1]
1101 x := l.Args[0]
1102 if !(l.Uses == 1 && clobber(l)) {
1103 break
1104 }
1105 v.reset(OpARM64MADD)
1106 v.AddArg(a)
1107 v.AddArg(x)
1108 v.AddArg(y)
1109 return true
1110 }
1111
1112
1113
1114 for {
1115 _ = v.Args[1]
1116 a := v.Args[0]
1117 l := v.Args[1]
1118 if l.Op != OpARM64MNEG {
1119 break
1120 }
1121 y := l.Args[1]
1122 x := l.Args[0]
1123 if !(l.Uses == 1 && clobber(l)) {
1124 break
1125 }
1126 v.reset(OpARM64MSUB)
1127 v.AddArg(a)
1128 v.AddArg(x)
1129 v.AddArg(y)
1130 return true
1131 }
1132
1133
1134
1135 for {
1136 a := v.Args[1]
1137 l := v.Args[0]
1138 if l.Op != OpARM64MNEG {
1139 break
1140 }
1141 y := l.Args[1]
1142 x := l.Args[0]
1143 if !(l.Uses == 1 && clobber(l)) {
1144 break
1145 }
1146 v.reset(OpARM64MSUB)
1147 v.AddArg(a)
1148 v.AddArg(x)
1149 v.AddArg(y)
1150 return true
1151 }
1152
1153
1154
1155 for {
1156 _ = v.Args[1]
1157 a := v.Args[0]
1158 l := v.Args[1]
1159 if l.Op != OpARM64MULW {
1160 break
1161 }
1162 y := l.Args[1]
1163 x := l.Args[0]
1164 if !(a.Type.Size() != 8 && l.Uses == 1 && clobber(l)) {
1165 break
1166 }
1167 v.reset(OpARM64MADDW)
1168 v.AddArg(a)
1169 v.AddArg(x)
1170 v.AddArg(y)
1171 return true
1172 }
1173
1174
1175
1176 for {
1177 a := v.Args[1]
1178 l := v.Args[0]
1179 if l.Op != OpARM64MULW {
1180 break
1181 }
1182 y := l.Args[1]
1183 x := l.Args[0]
1184 if !(a.Type.Size() != 8 && l.Uses == 1 && clobber(l)) {
1185 break
1186 }
1187 v.reset(OpARM64MADDW)
1188 v.AddArg(a)
1189 v.AddArg(x)
1190 v.AddArg(y)
1191 return true
1192 }
1193
1194
1195
1196 for {
1197 _ = v.Args[1]
1198 a := v.Args[0]
1199 l := v.Args[1]
1200 if l.Op != OpARM64MNEGW {
1201 break
1202 }
1203 y := l.Args[1]
1204 x := l.Args[0]
1205 if !(a.Type.Size() != 8 && l.Uses == 1 && clobber(l)) {
1206 break
1207 }
1208 v.reset(OpARM64MSUBW)
1209 v.AddArg(a)
1210 v.AddArg(x)
1211 v.AddArg(y)
1212 return true
1213 }
1214
1215
1216
1217 for {
1218 a := v.Args[1]
1219 l := v.Args[0]
1220 if l.Op != OpARM64MNEGW {
1221 break
1222 }
1223 y := l.Args[1]
1224 x := l.Args[0]
1225 if !(a.Type.Size() != 8 && l.Uses == 1 && clobber(l)) {
1226 break
1227 }
1228 v.reset(OpARM64MSUBW)
1229 v.AddArg(a)
1230 v.AddArg(x)
1231 v.AddArg(y)
1232 return true
1233 }
1234 return false
1235 }
1236 func rewriteValueARM64_OpARM64ADD_10(v *Value) bool {
1237 b := v.Block
1238 typ := &b.Func.Config.Types
1239
1240
1241
1242 for {
1243 _ = v.Args[1]
1244 x := v.Args[0]
1245 v_1 := v.Args[1]
1246 if v_1.Op != OpARM64NEG {
1247 break
1248 }
1249 y := v_1.Args[0]
1250 v.reset(OpARM64SUB)
1251 v.AddArg(x)
1252 v.AddArg(y)
1253 return true
1254 }
1255
1256
1257
1258 for {
1259 x := v.Args[1]
1260 v_0 := v.Args[0]
1261 if v_0.Op != OpARM64NEG {
1262 break
1263 }
1264 y := v_0.Args[0]
1265 v.reset(OpARM64SUB)
1266 v.AddArg(x)
1267 v.AddArg(y)
1268 return true
1269 }
1270
1271
1272
1273 for {
1274 _ = v.Args[1]
1275 x0 := v.Args[0]
1276 x1 := v.Args[1]
1277 if x1.Op != OpARM64SLLconst {
1278 break
1279 }
1280 c := x1.AuxInt
1281 y := x1.Args[0]
1282 if !(clobberIfDead(x1)) {
1283 break
1284 }
1285 v.reset(OpARM64ADDshiftLL)
1286 v.AuxInt = c
1287 v.AddArg(x0)
1288 v.AddArg(y)
1289 return true
1290 }
1291
1292
1293
1294 for {
1295 x0 := v.Args[1]
1296 x1 := v.Args[0]
1297 if x1.Op != OpARM64SLLconst {
1298 break
1299 }
1300 c := x1.AuxInt
1301 y := x1.Args[0]
1302 if !(clobberIfDead(x1)) {
1303 break
1304 }
1305 v.reset(OpARM64ADDshiftLL)
1306 v.AuxInt = c
1307 v.AddArg(x0)
1308 v.AddArg(y)
1309 return true
1310 }
1311
1312
1313
1314 for {
1315 _ = v.Args[1]
1316 x0 := v.Args[0]
1317 x1 := v.Args[1]
1318 if x1.Op != OpARM64SRLconst {
1319 break
1320 }
1321 c := x1.AuxInt
1322 y := x1.Args[0]
1323 if !(clobberIfDead(x1)) {
1324 break
1325 }
1326 v.reset(OpARM64ADDshiftRL)
1327 v.AuxInt = c
1328 v.AddArg(x0)
1329 v.AddArg(y)
1330 return true
1331 }
1332
1333
1334
1335 for {
1336 x0 := v.Args[1]
1337 x1 := v.Args[0]
1338 if x1.Op != OpARM64SRLconst {
1339 break
1340 }
1341 c := x1.AuxInt
1342 y := x1.Args[0]
1343 if !(clobberIfDead(x1)) {
1344 break
1345 }
1346 v.reset(OpARM64ADDshiftRL)
1347 v.AuxInt = c
1348 v.AddArg(x0)
1349 v.AddArg(y)
1350 return true
1351 }
1352
1353
1354
1355 for {
1356 _ = v.Args[1]
1357 x0 := v.Args[0]
1358 x1 := v.Args[1]
1359 if x1.Op != OpARM64SRAconst {
1360 break
1361 }
1362 c := x1.AuxInt
1363 y := x1.Args[0]
1364 if !(clobberIfDead(x1)) {
1365 break
1366 }
1367 v.reset(OpARM64ADDshiftRA)
1368 v.AuxInt = c
1369 v.AddArg(x0)
1370 v.AddArg(y)
1371 return true
1372 }
1373
1374
1375
1376 for {
1377 x0 := v.Args[1]
1378 x1 := v.Args[0]
1379 if x1.Op != OpARM64SRAconst {
1380 break
1381 }
1382 c := x1.AuxInt
1383 y := x1.Args[0]
1384 if !(clobberIfDead(x1)) {
1385 break
1386 }
1387 v.reset(OpARM64ADDshiftRA)
1388 v.AuxInt = c
1389 v.AddArg(x0)
1390 v.AddArg(y)
1391 return true
1392 }
1393
1394
1395
1396 for {
1397 _ = v.Args[1]
1398 v_0 := v.Args[0]
1399 if v_0.Op != OpARM64SLL {
1400 break
1401 }
1402 _ = v_0.Args[1]
1403 x := v_0.Args[0]
1404 v_0_1 := v_0.Args[1]
1405 if v_0_1.Op != OpARM64ANDconst {
1406 break
1407 }
1408 t := v_0_1.Type
1409 if v_0_1.AuxInt != 63 {
1410 break
1411 }
1412 y := v_0_1.Args[0]
1413 v_1 := v.Args[1]
1414 if v_1.Op != OpARM64CSEL0 {
1415 break
1416 }
1417 if v_1.Type != typ.UInt64 {
1418 break
1419 }
1420 cc := v_1.Aux
1421 _ = v_1.Args[1]
1422 v_1_0 := v_1.Args[0]
1423 if v_1_0.Op != OpARM64SRL {
1424 break
1425 }
1426 if v_1_0.Type != typ.UInt64 {
1427 break
1428 }
1429 _ = v_1_0.Args[1]
1430 if x != v_1_0.Args[0] {
1431 break
1432 }
1433 v_1_0_1 := v_1_0.Args[1]
1434 if v_1_0_1.Op != OpARM64SUB {
1435 break
1436 }
1437 if v_1_0_1.Type != t {
1438 break
1439 }
1440 _ = v_1_0_1.Args[1]
1441 v_1_0_1_0 := v_1_0_1.Args[0]
1442 if v_1_0_1_0.Op != OpARM64MOVDconst {
1443 break
1444 }
1445 if v_1_0_1_0.AuxInt != 64 {
1446 break
1447 }
1448 v_1_0_1_1 := v_1_0_1.Args[1]
1449 if v_1_0_1_1.Op != OpARM64ANDconst {
1450 break
1451 }
1452 if v_1_0_1_1.Type != t {
1453 break
1454 }
1455 if v_1_0_1_1.AuxInt != 63 {
1456 break
1457 }
1458 if y != v_1_0_1_1.Args[0] {
1459 break
1460 }
1461 v_1_1 := v_1.Args[1]
1462 if v_1_1.Op != OpARM64CMPconst {
1463 break
1464 }
1465 if v_1_1.AuxInt != 64 {
1466 break
1467 }
1468 v_1_1_0 := v_1_1.Args[0]
1469 if v_1_1_0.Op != OpARM64SUB {
1470 break
1471 }
1472 if v_1_1_0.Type != t {
1473 break
1474 }
1475 _ = v_1_1_0.Args[1]
1476 v_1_1_0_0 := v_1_1_0.Args[0]
1477 if v_1_1_0_0.Op != OpARM64MOVDconst {
1478 break
1479 }
1480 if v_1_1_0_0.AuxInt != 64 {
1481 break
1482 }
1483 v_1_1_0_1 := v_1_1_0.Args[1]
1484 if v_1_1_0_1.Op != OpARM64ANDconst {
1485 break
1486 }
1487 if v_1_1_0_1.Type != t {
1488 break
1489 }
1490 if v_1_1_0_1.AuxInt != 63 {
1491 break
1492 }
1493 if y != v_1_1_0_1.Args[0] {
1494 break
1495 }
1496 if !(cc.(Op) == OpARM64LessThanU) {
1497 break
1498 }
1499 v.reset(OpARM64ROR)
1500 v.AddArg(x)
1501 v0 := b.NewValue0(v.Pos, OpARM64NEG, t)
1502 v0.AddArg(y)
1503 v.AddArg(v0)
1504 return true
1505 }
1506
1507
1508
1509 for {
1510 _ = v.Args[1]
1511 v_0 := v.Args[0]
1512 if v_0.Op != OpARM64CSEL0 {
1513 break
1514 }
1515 if v_0.Type != typ.UInt64 {
1516 break
1517 }
1518 cc := v_0.Aux
1519 _ = v_0.Args[1]
1520 v_0_0 := v_0.Args[0]
1521 if v_0_0.Op != OpARM64SRL {
1522 break
1523 }
1524 if v_0_0.Type != typ.UInt64 {
1525 break
1526 }
1527 _ = v_0_0.Args[1]
1528 x := v_0_0.Args[0]
1529 v_0_0_1 := v_0_0.Args[1]
1530 if v_0_0_1.Op != OpARM64SUB {
1531 break
1532 }
1533 t := v_0_0_1.Type
1534 _ = v_0_0_1.Args[1]
1535 v_0_0_1_0 := v_0_0_1.Args[0]
1536 if v_0_0_1_0.Op != OpARM64MOVDconst {
1537 break
1538 }
1539 if v_0_0_1_0.AuxInt != 64 {
1540 break
1541 }
1542 v_0_0_1_1 := v_0_0_1.Args[1]
1543 if v_0_0_1_1.Op != OpARM64ANDconst {
1544 break
1545 }
1546 if v_0_0_1_1.Type != t {
1547 break
1548 }
1549 if v_0_0_1_1.AuxInt != 63 {
1550 break
1551 }
1552 y := v_0_0_1_1.Args[0]
1553 v_0_1 := v_0.Args[1]
1554 if v_0_1.Op != OpARM64CMPconst {
1555 break
1556 }
1557 if v_0_1.AuxInt != 64 {
1558 break
1559 }
1560 v_0_1_0 := v_0_1.Args[0]
1561 if v_0_1_0.Op != OpARM64SUB {
1562 break
1563 }
1564 if v_0_1_0.Type != t {
1565 break
1566 }
1567 _ = v_0_1_0.Args[1]
1568 v_0_1_0_0 := v_0_1_0.Args[0]
1569 if v_0_1_0_0.Op != OpARM64MOVDconst {
1570 break
1571 }
1572 if v_0_1_0_0.AuxInt != 64 {
1573 break
1574 }
1575 v_0_1_0_1 := v_0_1_0.Args[1]
1576 if v_0_1_0_1.Op != OpARM64ANDconst {
1577 break
1578 }
1579 if v_0_1_0_1.Type != t {
1580 break
1581 }
1582 if v_0_1_0_1.AuxInt != 63 {
1583 break
1584 }
1585 if y != v_0_1_0_1.Args[0] {
1586 break
1587 }
1588 v_1 := v.Args[1]
1589 if v_1.Op != OpARM64SLL {
1590 break
1591 }
1592 _ = v_1.Args[1]
1593 if x != v_1.Args[0] {
1594 break
1595 }
1596 v_1_1 := v_1.Args[1]
1597 if v_1_1.Op != OpARM64ANDconst {
1598 break
1599 }
1600 if v_1_1.Type != t {
1601 break
1602 }
1603 if v_1_1.AuxInt != 63 {
1604 break
1605 }
1606 if y != v_1_1.Args[0] {
1607 break
1608 }
1609 if !(cc.(Op) == OpARM64LessThanU) {
1610 break
1611 }
1612 v.reset(OpARM64ROR)
1613 v.AddArg(x)
1614 v0 := b.NewValue0(v.Pos, OpARM64NEG, t)
1615 v0.AddArg(y)
1616 v.AddArg(v0)
1617 return true
1618 }
1619 return false
1620 }
1621 func rewriteValueARM64_OpARM64ADD_20(v *Value) bool {
1622 b := v.Block
1623 typ := &b.Func.Config.Types
1624
1625
1626
1627 for {
1628 _ = v.Args[1]
1629 v_0 := v.Args[0]
1630 if v_0.Op != OpARM64SRL {
1631 break
1632 }
1633 if v_0.Type != typ.UInt64 {
1634 break
1635 }
1636 _ = v_0.Args[1]
1637 x := v_0.Args[0]
1638 v_0_1 := v_0.Args[1]
1639 if v_0_1.Op != OpARM64ANDconst {
1640 break
1641 }
1642 t := v_0_1.Type
1643 if v_0_1.AuxInt != 63 {
1644 break
1645 }
1646 y := v_0_1.Args[0]
1647 v_1 := v.Args[1]
1648 if v_1.Op != OpARM64CSEL0 {
1649 break
1650 }
1651 if v_1.Type != typ.UInt64 {
1652 break
1653 }
1654 cc := v_1.Aux
1655 _ = v_1.Args[1]
1656 v_1_0 := v_1.Args[0]
1657 if v_1_0.Op != OpARM64SLL {
1658 break
1659 }
1660 _ = v_1_0.Args[1]
1661 if x != v_1_0.Args[0] {
1662 break
1663 }
1664 v_1_0_1 := v_1_0.Args[1]
1665 if v_1_0_1.Op != OpARM64SUB {
1666 break
1667 }
1668 if v_1_0_1.Type != t {
1669 break
1670 }
1671 _ = v_1_0_1.Args[1]
1672 v_1_0_1_0 := v_1_0_1.Args[0]
1673 if v_1_0_1_0.Op != OpARM64MOVDconst {
1674 break
1675 }
1676 if v_1_0_1_0.AuxInt != 64 {
1677 break
1678 }
1679 v_1_0_1_1 := v_1_0_1.Args[1]
1680 if v_1_0_1_1.Op != OpARM64ANDconst {
1681 break
1682 }
1683 if v_1_0_1_1.Type != t {
1684 break
1685 }
1686 if v_1_0_1_1.AuxInt != 63 {
1687 break
1688 }
1689 if y != v_1_0_1_1.Args[0] {
1690 break
1691 }
1692 v_1_1 := v_1.Args[1]
1693 if v_1_1.Op != OpARM64CMPconst {
1694 break
1695 }
1696 if v_1_1.AuxInt != 64 {
1697 break
1698 }
1699 v_1_1_0 := v_1_1.Args[0]
1700 if v_1_1_0.Op != OpARM64SUB {
1701 break
1702 }
1703 if v_1_1_0.Type != t {
1704 break
1705 }
1706 _ = v_1_1_0.Args[1]
1707 v_1_1_0_0 := v_1_1_0.Args[0]
1708 if v_1_1_0_0.Op != OpARM64MOVDconst {
1709 break
1710 }
1711 if v_1_1_0_0.AuxInt != 64 {
1712 break
1713 }
1714 v_1_1_0_1 := v_1_1_0.Args[1]
1715 if v_1_1_0_1.Op != OpARM64ANDconst {
1716 break
1717 }
1718 if v_1_1_0_1.Type != t {
1719 break
1720 }
1721 if v_1_1_0_1.AuxInt != 63 {
1722 break
1723 }
1724 if y != v_1_1_0_1.Args[0] {
1725 break
1726 }
1727 if !(cc.(Op) == OpARM64LessThanU) {
1728 break
1729 }
1730 v.reset(OpARM64ROR)
1731 v.AddArg(x)
1732 v.AddArg(y)
1733 return true
1734 }
1735
1736
1737
1738 for {
1739 _ = v.Args[1]
1740 v_0 := v.Args[0]
1741 if v_0.Op != OpARM64CSEL0 {
1742 break
1743 }
1744 if v_0.Type != typ.UInt64 {
1745 break
1746 }
1747 cc := v_0.Aux
1748 _ = v_0.Args[1]
1749 v_0_0 := v_0.Args[0]
1750 if v_0_0.Op != OpARM64SLL {
1751 break
1752 }
1753 _ = v_0_0.Args[1]
1754 x := v_0_0.Args[0]
1755 v_0_0_1 := v_0_0.Args[1]
1756 if v_0_0_1.Op != OpARM64SUB {
1757 break
1758 }
1759 t := v_0_0_1.Type
1760 _ = v_0_0_1.Args[1]
1761 v_0_0_1_0 := v_0_0_1.Args[0]
1762 if v_0_0_1_0.Op != OpARM64MOVDconst {
1763 break
1764 }
1765 if v_0_0_1_0.AuxInt != 64 {
1766 break
1767 }
1768 v_0_0_1_1 := v_0_0_1.Args[1]
1769 if v_0_0_1_1.Op != OpARM64ANDconst {
1770 break
1771 }
1772 if v_0_0_1_1.Type != t {
1773 break
1774 }
1775 if v_0_0_1_1.AuxInt != 63 {
1776 break
1777 }
1778 y := v_0_0_1_1.Args[0]
1779 v_0_1 := v_0.Args[1]
1780 if v_0_1.Op != OpARM64CMPconst {
1781 break
1782 }
1783 if v_0_1.AuxInt != 64 {
1784 break
1785 }
1786 v_0_1_0 := v_0_1.Args[0]
1787 if v_0_1_0.Op != OpARM64SUB {
1788 break
1789 }
1790 if v_0_1_0.Type != t {
1791 break
1792 }
1793 _ = v_0_1_0.Args[1]
1794 v_0_1_0_0 := v_0_1_0.Args[0]
1795 if v_0_1_0_0.Op != OpARM64MOVDconst {
1796 break
1797 }
1798 if v_0_1_0_0.AuxInt != 64 {
1799 break
1800 }
1801 v_0_1_0_1 := v_0_1_0.Args[1]
1802 if v_0_1_0_1.Op != OpARM64ANDconst {
1803 break
1804 }
1805 if v_0_1_0_1.Type != t {
1806 break
1807 }
1808 if v_0_1_0_1.AuxInt != 63 {
1809 break
1810 }
1811 if y != v_0_1_0_1.Args[0] {
1812 break
1813 }
1814 v_1 := v.Args[1]
1815 if v_1.Op != OpARM64SRL {
1816 break
1817 }
1818 if v_1.Type != typ.UInt64 {
1819 break
1820 }
1821 _ = v_1.Args[1]
1822 if x != v_1.Args[0] {
1823 break
1824 }
1825 v_1_1 := v_1.Args[1]
1826 if v_1_1.Op != OpARM64ANDconst {
1827 break
1828 }
1829 if v_1_1.Type != t {
1830 break
1831 }
1832 if v_1_1.AuxInt != 63 {
1833 break
1834 }
1835 if y != v_1_1.Args[0] {
1836 break
1837 }
1838 if !(cc.(Op) == OpARM64LessThanU) {
1839 break
1840 }
1841 v.reset(OpARM64ROR)
1842 v.AddArg(x)
1843 v.AddArg(y)
1844 return true
1845 }
1846
1847
1848
1849 for {
1850 _ = v.Args[1]
1851 v_0 := v.Args[0]
1852 if v_0.Op != OpARM64SLL {
1853 break
1854 }
1855 _ = v_0.Args[1]
1856 x := v_0.Args[0]
1857 v_0_1 := v_0.Args[1]
1858 if v_0_1.Op != OpARM64ANDconst {
1859 break
1860 }
1861 t := v_0_1.Type
1862 if v_0_1.AuxInt != 31 {
1863 break
1864 }
1865 y := v_0_1.Args[0]
1866 v_1 := v.Args[1]
1867 if v_1.Op != OpARM64CSEL0 {
1868 break
1869 }
1870 if v_1.Type != typ.UInt32 {
1871 break
1872 }
1873 cc := v_1.Aux
1874 _ = v_1.Args[1]
1875 v_1_0 := v_1.Args[0]
1876 if v_1_0.Op != OpARM64SRL {
1877 break
1878 }
1879 if v_1_0.Type != typ.UInt32 {
1880 break
1881 }
1882 _ = v_1_0.Args[1]
1883 v_1_0_0 := v_1_0.Args[0]
1884 if v_1_0_0.Op != OpARM64MOVWUreg {
1885 break
1886 }
1887 if x != v_1_0_0.Args[0] {
1888 break
1889 }
1890 v_1_0_1 := v_1_0.Args[1]
1891 if v_1_0_1.Op != OpARM64SUB {
1892 break
1893 }
1894 if v_1_0_1.Type != t {
1895 break
1896 }
1897 _ = v_1_0_1.Args[1]
1898 v_1_0_1_0 := v_1_0_1.Args[0]
1899 if v_1_0_1_0.Op != OpARM64MOVDconst {
1900 break
1901 }
1902 if v_1_0_1_0.AuxInt != 32 {
1903 break
1904 }
1905 v_1_0_1_1 := v_1_0_1.Args[1]
1906 if v_1_0_1_1.Op != OpARM64ANDconst {
1907 break
1908 }
1909 if v_1_0_1_1.Type != t {
1910 break
1911 }
1912 if v_1_0_1_1.AuxInt != 31 {
1913 break
1914 }
1915 if y != v_1_0_1_1.Args[0] {
1916 break
1917 }
1918 v_1_1 := v_1.Args[1]
1919 if v_1_1.Op != OpARM64CMPconst {
1920 break
1921 }
1922 if v_1_1.AuxInt != 64 {
1923 break
1924 }
1925 v_1_1_0 := v_1_1.Args[0]
1926 if v_1_1_0.Op != OpARM64SUB {
1927 break
1928 }
1929 if v_1_1_0.Type != t {
1930 break
1931 }
1932 _ = v_1_1_0.Args[1]
1933 v_1_1_0_0 := v_1_1_0.Args[0]
1934 if v_1_1_0_0.Op != OpARM64MOVDconst {
1935 break
1936 }
1937 if v_1_1_0_0.AuxInt != 32 {
1938 break
1939 }
1940 v_1_1_0_1 := v_1_1_0.Args[1]
1941 if v_1_1_0_1.Op != OpARM64ANDconst {
1942 break
1943 }
1944 if v_1_1_0_1.Type != t {
1945 break
1946 }
1947 if v_1_1_0_1.AuxInt != 31 {
1948 break
1949 }
1950 if y != v_1_1_0_1.Args[0] {
1951 break
1952 }
1953 if !(cc.(Op) == OpARM64LessThanU) {
1954 break
1955 }
1956 v.reset(OpARM64RORW)
1957 v.AddArg(x)
1958 v0 := b.NewValue0(v.Pos, OpARM64NEG, t)
1959 v0.AddArg(y)
1960 v.AddArg(v0)
1961 return true
1962 }
1963
1964
1965
1966 for {
1967 _ = v.Args[1]
1968 v_0 := v.Args[0]
1969 if v_0.Op != OpARM64CSEL0 {
1970 break
1971 }
1972 if v_0.Type != typ.UInt32 {
1973 break
1974 }
1975 cc := v_0.Aux
1976 _ = v_0.Args[1]
1977 v_0_0 := v_0.Args[0]
1978 if v_0_0.Op != OpARM64SRL {
1979 break
1980 }
1981 if v_0_0.Type != typ.UInt32 {
1982 break
1983 }
1984 _ = v_0_0.Args[1]
1985 v_0_0_0 := v_0_0.Args[0]
1986 if v_0_0_0.Op != OpARM64MOVWUreg {
1987 break
1988 }
1989 x := v_0_0_0.Args[0]
1990 v_0_0_1 := v_0_0.Args[1]
1991 if v_0_0_1.Op != OpARM64SUB {
1992 break
1993 }
1994 t := v_0_0_1.Type
1995 _ = v_0_0_1.Args[1]
1996 v_0_0_1_0 := v_0_0_1.Args[0]
1997 if v_0_0_1_0.Op != OpARM64MOVDconst {
1998 break
1999 }
2000 if v_0_0_1_0.AuxInt != 32 {
2001 break
2002 }
2003 v_0_0_1_1 := v_0_0_1.Args[1]
2004 if v_0_0_1_1.Op != OpARM64ANDconst {
2005 break
2006 }
2007 if v_0_0_1_1.Type != t {
2008 break
2009 }
2010 if v_0_0_1_1.AuxInt != 31 {
2011 break
2012 }
2013 y := v_0_0_1_1.Args[0]
2014 v_0_1 := v_0.Args[1]
2015 if v_0_1.Op != OpARM64CMPconst {
2016 break
2017 }
2018 if v_0_1.AuxInt != 64 {
2019 break
2020 }
2021 v_0_1_0 := v_0_1.Args[0]
2022 if v_0_1_0.Op != OpARM64SUB {
2023 break
2024 }
2025 if v_0_1_0.Type != t {
2026 break
2027 }
2028 _ = v_0_1_0.Args[1]
2029 v_0_1_0_0 := v_0_1_0.Args[0]
2030 if v_0_1_0_0.Op != OpARM64MOVDconst {
2031 break
2032 }
2033 if v_0_1_0_0.AuxInt != 32 {
2034 break
2035 }
2036 v_0_1_0_1 := v_0_1_0.Args[1]
2037 if v_0_1_0_1.Op != OpARM64ANDconst {
2038 break
2039 }
2040 if v_0_1_0_1.Type != t {
2041 break
2042 }
2043 if v_0_1_0_1.AuxInt != 31 {
2044 break
2045 }
2046 if y != v_0_1_0_1.Args[0] {
2047 break
2048 }
2049 v_1 := v.Args[1]
2050 if v_1.Op != OpARM64SLL {
2051 break
2052 }
2053 _ = v_1.Args[1]
2054 if x != v_1.Args[0] {
2055 break
2056 }
2057 v_1_1 := v_1.Args[1]
2058 if v_1_1.Op != OpARM64ANDconst {
2059 break
2060 }
2061 if v_1_1.Type != t {
2062 break
2063 }
2064 if v_1_1.AuxInt != 31 {
2065 break
2066 }
2067 if y != v_1_1.Args[0] {
2068 break
2069 }
2070 if !(cc.(Op) == OpARM64LessThanU) {
2071 break
2072 }
2073 v.reset(OpARM64RORW)
2074 v.AddArg(x)
2075 v0 := b.NewValue0(v.Pos, OpARM64NEG, t)
2076 v0.AddArg(y)
2077 v.AddArg(v0)
2078 return true
2079 }
2080
2081
2082
2083 for {
2084 _ = v.Args[1]
2085 v_0 := v.Args[0]
2086 if v_0.Op != OpARM64SRL {
2087 break
2088 }
2089 if v_0.Type != typ.UInt32 {
2090 break
2091 }
2092 _ = v_0.Args[1]
2093 v_0_0 := v_0.Args[0]
2094 if v_0_0.Op != OpARM64MOVWUreg {
2095 break
2096 }
2097 x := v_0_0.Args[0]
2098 v_0_1 := v_0.Args[1]
2099 if v_0_1.Op != OpARM64ANDconst {
2100 break
2101 }
2102 t := v_0_1.Type
2103 if v_0_1.AuxInt != 31 {
2104 break
2105 }
2106 y := v_0_1.Args[0]
2107 v_1 := v.Args[1]
2108 if v_1.Op != OpARM64CSEL0 {
2109 break
2110 }
2111 if v_1.Type != typ.UInt32 {
2112 break
2113 }
2114 cc := v_1.Aux
2115 _ = v_1.Args[1]
2116 v_1_0 := v_1.Args[0]
2117 if v_1_0.Op != OpARM64SLL {
2118 break
2119 }
2120 _ = v_1_0.Args[1]
2121 if x != v_1_0.Args[0] {
2122 break
2123 }
2124 v_1_0_1 := v_1_0.Args[1]
2125 if v_1_0_1.Op != OpARM64SUB {
2126 break
2127 }
2128 if v_1_0_1.Type != t {
2129 break
2130 }
2131 _ = v_1_0_1.Args[1]
2132 v_1_0_1_0 := v_1_0_1.Args[0]
2133 if v_1_0_1_0.Op != OpARM64MOVDconst {
2134 break
2135 }
2136 if v_1_0_1_0.AuxInt != 32 {
2137 break
2138 }
2139 v_1_0_1_1 := v_1_0_1.Args[1]
2140 if v_1_0_1_1.Op != OpARM64ANDconst {
2141 break
2142 }
2143 if v_1_0_1_1.Type != t {
2144 break
2145 }
2146 if v_1_0_1_1.AuxInt != 31 {
2147 break
2148 }
2149 if y != v_1_0_1_1.Args[0] {
2150 break
2151 }
2152 v_1_1 := v_1.Args[1]
2153 if v_1_1.Op != OpARM64CMPconst {
2154 break
2155 }
2156 if v_1_1.AuxInt != 64 {
2157 break
2158 }
2159 v_1_1_0 := v_1_1.Args[0]
2160 if v_1_1_0.Op != OpARM64SUB {
2161 break
2162 }
2163 if v_1_1_0.Type != t {
2164 break
2165 }
2166 _ = v_1_1_0.Args[1]
2167 v_1_1_0_0 := v_1_1_0.Args[0]
2168 if v_1_1_0_0.Op != OpARM64MOVDconst {
2169 break
2170 }
2171 if v_1_1_0_0.AuxInt != 32 {
2172 break
2173 }
2174 v_1_1_0_1 := v_1_1_0.Args[1]
2175 if v_1_1_0_1.Op != OpARM64ANDconst {
2176 break
2177 }
2178 if v_1_1_0_1.Type != t {
2179 break
2180 }
2181 if v_1_1_0_1.AuxInt != 31 {
2182 break
2183 }
2184 if y != v_1_1_0_1.Args[0] {
2185 break
2186 }
2187 if !(cc.(Op) == OpARM64LessThanU) {
2188 break
2189 }
2190 v.reset(OpARM64RORW)
2191 v.AddArg(x)
2192 v.AddArg(y)
2193 return true
2194 }
2195
2196
2197
2198 for {
2199 _ = v.Args[1]
2200 v_0 := v.Args[0]
2201 if v_0.Op != OpARM64CSEL0 {
2202 break
2203 }
2204 if v_0.Type != typ.UInt32 {
2205 break
2206 }
2207 cc := v_0.Aux
2208 _ = v_0.Args[1]
2209 v_0_0 := v_0.Args[0]
2210 if v_0_0.Op != OpARM64SLL {
2211 break
2212 }
2213 _ = v_0_0.Args[1]
2214 x := v_0_0.Args[0]
2215 v_0_0_1 := v_0_0.Args[1]
2216 if v_0_0_1.Op != OpARM64SUB {
2217 break
2218 }
2219 t := v_0_0_1.Type
2220 _ = v_0_0_1.Args[1]
2221 v_0_0_1_0 := v_0_0_1.Args[0]
2222 if v_0_0_1_0.Op != OpARM64MOVDconst {
2223 break
2224 }
2225 if v_0_0_1_0.AuxInt != 32 {
2226 break
2227 }
2228 v_0_0_1_1 := v_0_0_1.Args[1]
2229 if v_0_0_1_1.Op != OpARM64ANDconst {
2230 break
2231 }
2232 if v_0_0_1_1.Type != t {
2233 break
2234 }
2235 if v_0_0_1_1.AuxInt != 31 {
2236 break
2237 }
2238 y := v_0_0_1_1.Args[0]
2239 v_0_1 := v_0.Args[1]
2240 if v_0_1.Op != OpARM64CMPconst {
2241 break
2242 }
2243 if v_0_1.AuxInt != 64 {
2244 break
2245 }
2246 v_0_1_0 := v_0_1.Args[0]
2247 if v_0_1_0.Op != OpARM64SUB {
2248 break
2249 }
2250 if v_0_1_0.Type != t {
2251 break
2252 }
2253 _ = v_0_1_0.Args[1]
2254 v_0_1_0_0 := v_0_1_0.Args[0]
2255 if v_0_1_0_0.Op != OpARM64MOVDconst {
2256 break
2257 }
2258 if v_0_1_0_0.AuxInt != 32 {
2259 break
2260 }
2261 v_0_1_0_1 := v_0_1_0.Args[1]
2262 if v_0_1_0_1.Op != OpARM64ANDconst {
2263 break
2264 }
2265 if v_0_1_0_1.Type != t {
2266 break
2267 }
2268 if v_0_1_0_1.AuxInt != 31 {
2269 break
2270 }
2271 if y != v_0_1_0_1.Args[0] {
2272 break
2273 }
2274 v_1 := v.Args[1]
2275 if v_1.Op != OpARM64SRL {
2276 break
2277 }
2278 if v_1.Type != typ.UInt32 {
2279 break
2280 }
2281 _ = v_1.Args[1]
2282 v_1_0 := v_1.Args[0]
2283 if v_1_0.Op != OpARM64MOVWUreg {
2284 break
2285 }
2286 if x != v_1_0.Args[0] {
2287 break
2288 }
2289 v_1_1 := v_1.Args[1]
2290 if v_1_1.Op != OpARM64ANDconst {
2291 break
2292 }
2293 if v_1_1.Type != t {
2294 break
2295 }
2296 if v_1_1.AuxInt != 31 {
2297 break
2298 }
2299 if y != v_1_1.Args[0] {
2300 break
2301 }
2302 if !(cc.(Op) == OpARM64LessThanU) {
2303 break
2304 }
2305 v.reset(OpARM64RORW)
2306 v.AddArg(x)
2307 v.AddArg(y)
2308 return true
2309 }
2310 return false
2311 }
2312 func rewriteValueARM64_OpARM64ADDconst_0(v *Value) bool {
2313
2314
2315
2316 for {
2317 off1 := v.AuxInt
2318 v_0 := v.Args[0]
2319 if v_0.Op != OpARM64MOVDaddr {
2320 break
2321 }
2322 off2 := v_0.AuxInt
2323 sym := v_0.Aux
2324 ptr := v_0.Args[0]
2325 v.reset(OpARM64MOVDaddr)
2326 v.AuxInt = off1 + off2
2327 v.Aux = sym
2328 v.AddArg(ptr)
2329 return true
2330 }
2331
2332
2333
2334 for {
2335 if v.AuxInt != 0 {
2336 break
2337 }
2338 x := v.Args[0]
2339 v.reset(OpCopy)
2340 v.Type = x.Type
2341 v.AddArg(x)
2342 return true
2343 }
2344
2345
2346
2347 for {
2348 c := v.AuxInt
2349 v_0 := v.Args[0]
2350 if v_0.Op != OpARM64MOVDconst {
2351 break
2352 }
2353 d := v_0.AuxInt
2354 v.reset(OpARM64MOVDconst)
2355 v.AuxInt = c + d
2356 return true
2357 }
2358
2359
2360
2361 for {
2362 c := v.AuxInt
2363 v_0 := v.Args[0]
2364 if v_0.Op != OpARM64ADDconst {
2365 break
2366 }
2367 d := v_0.AuxInt
2368 x := v_0.Args[0]
2369 v.reset(OpARM64ADDconst)
2370 v.AuxInt = c + d
2371 v.AddArg(x)
2372 return true
2373 }
2374
2375
2376
2377 for {
2378 c := v.AuxInt
2379 v_0 := v.Args[0]
2380 if v_0.Op != OpARM64SUBconst {
2381 break
2382 }
2383 d := v_0.AuxInt
2384 x := v_0.Args[0]
2385 v.reset(OpARM64ADDconst)
2386 v.AuxInt = c - d
2387 v.AddArg(x)
2388 return true
2389 }
2390 return false
2391 }
2392 func rewriteValueARM64_OpARM64ADDshiftLL_0(v *Value) bool {
2393 b := v.Block
2394 typ := &b.Func.Config.Types
2395
2396
2397
2398 for {
2399 d := v.AuxInt
2400 x := v.Args[1]
2401 v_0 := v.Args[0]
2402 if v_0.Op != OpARM64MOVDconst {
2403 break
2404 }
2405 c := v_0.AuxInt
2406 v.reset(OpARM64ADDconst)
2407 v.AuxInt = c
2408 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type)
2409 v0.AuxInt = d
2410 v0.AddArg(x)
2411 v.AddArg(v0)
2412 return true
2413 }
2414
2415
2416
2417 for {
2418 d := v.AuxInt
2419 _ = v.Args[1]
2420 x := v.Args[0]
2421 v_1 := v.Args[1]
2422 if v_1.Op != OpARM64MOVDconst {
2423 break
2424 }
2425 c := v_1.AuxInt
2426 v.reset(OpARM64ADDconst)
2427 v.AuxInt = int64(uint64(c) << uint64(d))
2428 v.AddArg(x)
2429 return true
2430 }
2431
2432
2433
2434 for {
2435 c := v.AuxInt
2436 x := v.Args[1]
2437 v_0 := v.Args[0]
2438 if v_0.Op != OpARM64SRLconst {
2439 break
2440 }
2441 if v_0.AuxInt != 64-c {
2442 break
2443 }
2444 if x != v_0.Args[0] {
2445 break
2446 }
2447 v.reset(OpARM64RORconst)
2448 v.AuxInt = 64 - c
2449 v.AddArg(x)
2450 return true
2451 }
2452
2453
2454
2455 for {
2456 t := v.Type
2457 c := v.AuxInt
2458 x := v.Args[1]
2459 v_0 := v.Args[0]
2460 if v_0.Op != OpARM64UBFX {
2461 break
2462 }
2463 bfc := v_0.AuxInt
2464 if x != v_0.Args[0] {
2465 break
2466 }
2467 if !(c < 32 && t.Size() == 4 && bfc == armBFAuxInt(32-c, c)) {
2468 break
2469 }
2470 v.reset(OpARM64RORWconst)
2471 v.AuxInt = 32 - c
2472 v.AddArg(x)
2473 return true
2474 }
2475
2476
2477
2478 for {
2479 if v.Type != typ.UInt16 {
2480 break
2481 }
2482 if v.AuxInt != 8 {
2483 break
2484 }
2485 x := v.Args[1]
2486 v_0 := v.Args[0]
2487 if v_0.Op != OpARM64UBFX {
2488 break
2489 }
2490 if v_0.Type != typ.UInt16 {
2491 break
2492 }
2493 if v_0.AuxInt != armBFAuxInt(8, 8) {
2494 break
2495 }
2496 if x != v_0.Args[0] {
2497 break
2498 }
2499 v.reset(OpARM64REV16W)
2500 v.AddArg(x)
2501 return true
2502 }
2503
2504
2505
2506 for {
2507 c := v.AuxInt
2508 x2 := v.Args[1]
2509 v_0 := v.Args[0]
2510 if v_0.Op != OpARM64SRLconst {
2511 break
2512 }
2513 if v_0.AuxInt != 64-c {
2514 break
2515 }
2516 x := v_0.Args[0]
2517 v.reset(OpARM64EXTRconst)
2518 v.AuxInt = 64 - c
2519 v.AddArg(x2)
2520 v.AddArg(x)
2521 return true
2522 }
2523
2524
2525
2526 for {
2527 t := v.Type
2528 c := v.AuxInt
2529 x2 := v.Args[1]
2530 v_0 := v.Args[0]
2531 if v_0.Op != OpARM64UBFX {
2532 break
2533 }
2534 bfc := v_0.AuxInt
2535 x := v_0.Args[0]
2536 if !(c < 32 && t.Size() == 4 && bfc == armBFAuxInt(32-c, c)) {
2537 break
2538 }
2539 v.reset(OpARM64EXTRWconst)
2540 v.AuxInt = 32 - c
2541 v.AddArg(x2)
2542 v.AddArg(x)
2543 return true
2544 }
2545 return false
2546 }
2547 func rewriteValueARM64_OpARM64ADDshiftRA_0(v *Value) bool {
2548 b := v.Block
2549
2550
2551
2552 for {
2553 d := v.AuxInt
2554 x := v.Args[1]
2555 v_0 := v.Args[0]
2556 if v_0.Op != OpARM64MOVDconst {
2557 break
2558 }
2559 c := v_0.AuxInt
2560 v.reset(OpARM64ADDconst)
2561 v.AuxInt = c
2562 v0 := b.NewValue0(v.Pos, OpARM64SRAconst, x.Type)
2563 v0.AuxInt = d
2564 v0.AddArg(x)
2565 v.AddArg(v0)
2566 return true
2567 }
2568
2569
2570
2571 for {
2572 d := v.AuxInt
2573 _ = v.Args[1]
2574 x := v.Args[0]
2575 v_1 := v.Args[1]
2576 if v_1.Op != OpARM64MOVDconst {
2577 break
2578 }
2579 c := v_1.AuxInt
2580 v.reset(OpARM64ADDconst)
2581 v.AuxInt = c >> uint64(d)
2582 v.AddArg(x)
2583 return true
2584 }
2585 return false
2586 }
2587 func rewriteValueARM64_OpARM64ADDshiftRL_0(v *Value) bool {
2588 b := v.Block
2589
2590
2591
2592 for {
2593 d := v.AuxInt
2594 x := v.Args[1]
2595 v_0 := v.Args[0]
2596 if v_0.Op != OpARM64MOVDconst {
2597 break
2598 }
2599 c := v_0.AuxInt
2600 v.reset(OpARM64ADDconst)
2601 v.AuxInt = c
2602 v0 := b.NewValue0(v.Pos, OpARM64SRLconst, x.Type)
2603 v0.AuxInt = d
2604 v0.AddArg(x)
2605 v.AddArg(v0)
2606 return true
2607 }
2608
2609
2610
2611 for {
2612 d := v.AuxInt
2613 _ = v.Args[1]
2614 x := v.Args[0]
2615 v_1 := v.Args[1]
2616 if v_1.Op != OpARM64MOVDconst {
2617 break
2618 }
2619 c := v_1.AuxInt
2620 v.reset(OpARM64ADDconst)
2621 v.AuxInt = int64(uint64(c) >> uint64(d))
2622 v.AddArg(x)
2623 return true
2624 }
2625
2626
2627
2628 for {
2629 c := v.AuxInt
2630 x := v.Args[1]
2631 v_0 := v.Args[0]
2632 if v_0.Op != OpARM64SLLconst {
2633 break
2634 }
2635 if v_0.AuxInt != 64-c {
2636 break
2637 }
2638 if x != v_0.Args[0] {
2639 break
2640 }
2641 v.reset(OpARM64RORconst)
2642 v.AuxInt = c
2643 v.AddArg(x)
2644 return true
2645 }
2646
2647
2648
2649 for {
2650 t := v.Type
2651 c := v.AuxInt
2652 _ = v.Args[1]
2653 v_0 := v.Args[0]
2654 if v_0.Op != OpARM64SLLconst {
2655 break
2656 }
2657 if v_0.AuxInt != 32-c {
2658 break
2659 }
2660 x := v_0.Args[0]
2661 v_1 := v.Args[1]
2662 if v_1.Op != OpARM64MOVWUreg {
2663 break
2664 }
2665 if x != v_1.Args[0] {
2666 break
2667 }
2668 if !(c < 32 && t.Size() == 4) {
2669 break
2670 }
2671 v.reset(OpARM64RORWconst)
2672 v.AuxInt = c
2673 v.AddArg(x)
2674 return true
2675 }
2676 return false
2677 }
2678 func rewriteValueARM64_OpARM64AND_0(v *Value) bool {
2679
2680
2681
2682 for {
2683 _ = v.Args[1]
2684 x := v.Args[0]
2685 v_1 := v.Args[1]
2686 if v_1.Op != OpARM64MOVDconst {
2687 break
2688 }
2689 c := v_1.AuxInt
2690 v.reset(OpARM64ANDconst)
2691 v.AuxInt = c
2692 v.AddArg(x)
2693 return true
2694 }
2695
2696
2697
2698 for {
2699 x := v.Args[1]
2700 v_0 := v.Args[0]
2701 if v_0.Op != OpARM64MOVDconst {
2702 break
2703 }
2704 c := v_0.AuxInt
2705 v.reset(OpARM64ANDconst)
2706 v.AuxInt = c
2707 v.AddArg(x)
2708 return true
2709 }
2710
2711
2712
2713 for {
2714 x := v.Args[1]
2715 if x != v.Args[0] {
2716 break
2717 }
2718 v.reset(OpCopy)
2719 v.Type = x.Type
2720 v.AddArg(x)
2721 return true
2722 }
2723
2724
2725
2726 for {
2727 _ = v.Args[1]
2728 x := v.Args[0]
2729 v_1 := v.Args[1]
2730 if v_1.Op != OpARM64MVN {
2731 break
2732 }
2733 y := v_1.Args[0]
2734 v.reset(OpARM64BIC)
2735 v.AddArg(x)
2736 v.AddArg(y)
2737 return true
2738 }
2739
2740
2741
2742 for {
2743 x := v.Args[1]
2744 v_0 := v.Args[0]
2745 if v_0.Op != OpARM64MVN {
2746 break
2747 }
2748 y := v_0.Args[0]
2749 v.reset(OpARM64BIC)
2750 v.AddArg(x)
2751 v.AddArg(y)
2752 return true
2753 }
2754
2755
2756
2757 for {
2758 _ = v.Args[1]
2759 x0 := v.Args[0]
2760 x1 := v.Args[1]
2761 if x1.Op != OpARM64SLLconst {
2762 break
2763 }
2764 c := x1.AuxInt
2765 y := x1.Args[0]
2766 if !(clobberIfDead(x1)) {
2767 break
2768 }
2769 v.reset(OpARM64ANDshiftLL)
2770 v.AuxInt = c
2771 v.AddArg(x0)
2772 v.AddArg(y)
2773 return true
2774 }
2775
2776
2777
2778 for {
2779 x0 := v.Args[1]
2780 x1 := v.Args[0]
2781 if x1.Op != OpARM64SLLconst {
2782 break
2783 }
2784 c := x1.AuxInt
2785 y := x1.Args[0]
2786 if !(clobberIfDead(x1)) {
2787 break
2788 }
2789 v.reset(OpARM64ANDshiftLL)
2790 v.AuxInt = c
2791 v.AddArg(x0)
2792 v.AddArg(y)
2793 return true
2794 }
2795
2796
2797
2798 for {
2799 _ = v.Args[1]
2800 x0 := v.Args[0]
2801 x1 := v.Args[1]
2802 if x1.Op != OpARM64SRLconst {
2803 break
2804 }
2805 c := x1.AuxInt
2806 y := x1.Args[0]
2807 if !(clobberIfDead(x1)) {
2808 break
2809 }
2810 v.reset(OpARM64ANDshiftRL)
2811 v.AuxInt = c
2812 v.AddArg(x0)
2813 v.AddArg(y)
2814 return true
2815 }
2816
2817
2818
2819 for {
2820 x0 := v.Args[1]
2821 x1 := v.Args[0]
2822 if x1.Op != OpARM64SRLconst {
2823 break
2824 }
2825 c := x1.AuxInt
2826 y := x1.Args[0]
2827 if !(clobberIfDead(x1)) {
2828 break
2829 }
2830 v.reset(OpARM64ANDshiftRL)
2831 v.AuxInt = c
2832 v.AddArg(x0)
2833 v.AddArg(y)
2834 return true
2835 }
2836
2837
2838
2839 for {
2840 _ = v.Args[1]
2841 x0 := v.Args[0]
2842 x1 := v.Args[1]
2843 if x1.Op != OpARM64SRAconst {
2844 break
2845 }
2846 c := x1.AuxInt
2847 y := x1.Args[0]
2848 if !(clobberIfDead(x1)) {
2849 break
2850 }
2851 v.reset(OpARM64ANDshiftRA)
2852 v.AuxInt = c
2853 v.AddArg(x0)
2854 v.AddArg(y)
2855 return true
2856 }
2857 return false
2858 }
2859 func rewriteValueARM64_OpARM64AND_10(v *Value) bool {
2860
2861
2862
2863 for {
2864 x0 := v.Args[1]
2865 x1 := v.Args[0]
2866 if x1.Op != OpARM64SRAconst {
2867 break
2868 }
2869 c := x1.AuxInt
2870 y := x1.Args[0]
2871 if !(clobberIfDead(x1)) {
2872 break
2873 }
2874 v.reset(OpARM64ANDshiftRA)
2875 v.AuxInt = c
2876 v.AddArg(x0)
2877 v.AddArg(y)
2878 return true
2879 }
2880 return false
2881 }
2882 func rewriteValueARM64_OpARM64ANDconst_0(v *Value) bool {
2883
2884
2885
2886 for {
2887 if v.AuxInt != 0 {
2888 break
2889 }
2890 v.reset(OpARM64MOVDconst)
2891 v.AuxInt = 0
2892 return true
2893 }
2894
2895
2896
2897 for {
2898 if v.AuxInt != -1 {
2899 break
2900 }
2901 x := v.Args[0]
2902 v.reset(OpCopy)
2903 v.Type = x.Type
2904 v.AddArg(x)
2905 return true
2906 }
2907
2908
2909
2910 for {
2911 c := v.AuxInt
2912 v_0 := v.Args[0]
2913 if v_0.Op != OpARM64MOVDconst {
2914 break
2915 }
2916 d := v_0.AuxInt
2917 v.reset(OpARM64MOVDconst)
2918 v.AuxInt = c & d
2919 return true
2920 }
2921
2922
2923
2924 for {
2925 c := v.AuxInt
2926 v_0 := v.Args[0]
2927 if v_0.Op != OpARM64ANDconst {
2928 break
2929 }
2930 d := v_0.AuxInt
2931 x := v_0.Args[0]
2932 v.reset(OpARM64ANDconst)
2933 v.AuxInt = c & d
2934 v.AddArg(x)
2935 return true
2936 }
2937
2938
2939
2940 for {
2941 c := v.AuxInt
2942 v_0 := v.Args[0]
2943 if v_0.Op != OpARM64MOVWUreg {
2944 break
2945 }
2946 x := v_0.Args[0]
2947 v.reset(OpARM64ANDconst)
2948 v.AuxInt = c & (1<<32 - 1)
2949 v.AddArg(x)
2950 return true
2951 }
2952
2953
2954
2955 for {
2956 c := v.AuxInt
2957 v_0 := v.Args[0]
2958 if v_0.Op != OpARM64MOVHUreg {
2959 break
2960 }
2961 x := v_0.Args[0]
2962 v.reset(OpARM64ANDconst)
2963 v.AuxInt = c & (1<<16 - 1)
2964 v.AddArg(x)
2965 return true
2966 }
2967
2968
2969
2970 for {
2971 c := v.AuxInt
2972 v_0 := v.Args[0]
2973 if v_0.Op != OpARM64MOVBUreg {
2974 break
2975 }
2976 x := v_0.Args[0]
2977 v.reset(OpARM64ANDconst)
2978 v.AuxInt = c & (1<<8 - 1)
2979 v.AddArg(x)
2980 return true
2981 }
2982
2983
2984
2985 for {
2986 ac := v.AuxInt
2987 v_0 := v.Args[0]
2988 if v_0.Op != OpARM64SLLconst {
2989 break
2990 }
2991 sc := v_0.AuxInt
2992 x := v_0.Args[0]
2993 if !(isARM64BFMask(sc, ac, sc)) {
2994 break
2995 }
2996 v.reset(OpARM64UBFIZ)
2997 v.AuxInt = armBFAuxInt(sc, arm64BFWidth(ac, sc))
2998 v.AddArg(x)
2999 return true
3000 }
3001
3002
3003
3004 for {
3005 ac := v.AuxInt
3006 v_0 := v.Args[0]
3007 if v_0.Op != OpARM64SRLconst {
3008 break
3009 }
3010 sc := v_0.AuxInt
3011 x := v_0.Args[0]
3012 if !(isARM64BFMask(sc, ac, 0)) {
3013 break
3014 }
3015 v.reset(OpARM64UBFX)
3016 v.AuxInt = armBFAuxInt(sc, arm64BFWidth(ac, 0))
3017 v.AddArg(x)
3018 return true
3019 }
3020 return false
3021 }
3022 func rewriteValueARM64_OpARM64ANDshiftLL_0(v *Value) bool {
3023 b := v.Block
3024
3025
3026
3027 for {
3028 d := v.AuxInt
3029 x := v.Args[1]
3030 v_0 := v.Args[0]
3031 if v_0.Op != OpARM64MOVDconst {
3032 break
3033 }
3034 c := v_0.AuxInt
3035 v.reset(OpARM64ANDconst)
3036 v.AuxInt = c
3037 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type)
3038 v0.AuxInt = d
3039 v0.AddArg(x)
3040 v.AddArg(v0)
3041 return true
3042 }
3043
3044
3045
3046 for {
3047 d := v.AuxInt
3048 _ = v.Args[1]
3049 x := v.Args[0]
3050 v_1 := v.Args[1]
3051 if v_1.Op != OpARM64MOVDconst {
3052 break
3053 }
3054 c := v_1.AuxInt
3055 v.reset(OpARM64ANDconst)
3056 v.AuxInt = int64(uint64(c) << uint64(d))
3057 v.AddArg(x)
3058 return true
3059 }
3060
3061
3062
3063 for {
3064 d := v.AuxInt
3065 _ = v.Args[1]
3066 x := v.Args[0]
3067 y := v.Args[1]
3068 if y.Op != OpARM64SLLconst {
3069 break
3070 }
3071 c := y.AuxInt
3072 if x != y.Args[0] {
3073 break
3074 }
3075 if !(c == d) {
3076 break
3077 }
3078 v.reset(OpCopy)
3079 v.Type = y.Type
3080 v.AddArg(y)
3081 return true
3082 }
3083 return false
3084 }
3085 func rewriteValueARM64_OpARM64ANDshiftRA_0(v *Value) bool {
3086 b := v.Block
3087
3088
3089
3090 for {
3091 d := v.AuxInt
3092 x := v.Args[1]
3093 v_0 := v.Args[0]
3094 if v_0.Op != OpARM64MOVDconst {
3095 break
3096 }
3097 c := v_0.AuxInt
3098 v.reset(OpARM64ANDconst)
3099 v.AuxInt = c
3100 v0 := b.NewValue0(v.Pos, OpARM64SRAconst, x.Type)
3101 v0.AuxInt = d
3102 v0.AddArg(x)
3103 v.AddArg(v0)
3104 return true
3105 }
3106
3107
3108
3109 for {
3110 d := v.AuxInt
3111 _ = v.Args[1]
3112 x := v.Args[0]
3113 v_1 := v.Args[1]
3114 if v_1.Op != OpARM64MOVDconst {
3115 break
3116 }
3117 c := v_1.AuxInt
3118 v.reset(OpARM64ANDconst)
3119 v.AuxInt = c >> uint64(d)
3120 v.AddArg(x)
3121 return true
3122 }
3123
3124
3125
3126 for {
3127 d := v.AuxInt
3128 _ = v.Args[1]
3129 x := v.Args[0]
3130 y := v.Args[1]
3131 if y.Op != OpARM64SRAconst {
3132 break
3133 }
3134 c := y.AuxInt
3135 if x != y.Args[0] {
3136 break
3137 }
3138 if !(c == d) {
3139 break
3140 }
3141 v.reset(OpCopy)
3142 v.Type = y.Type
3143 v.AddArg(y)
3144 return true
3145 }
3146 return false
3147 }
3148 func rewriteValueARM64_OpARM64ANDshiftRL_0(v *Value) bool {
3149 b := v.Block
3150
3151
3152
3153 for {
3154 d := v.AuxInt
3155 x := v.Args[1]
3156 v_0 := v.Args[0]
3157 if v_0.Op != OpARM64MOVDconst {
3158 break
3159 }
3160 c := v_0.AuxInt
3161 v.reset(OpARM64ANDconst)
3162 v.AuxInt = c
3163 v0 := b.NewValue0(v.Pos, OpARM64SRLconst, x.Type)
3164 v0.AuxInt = d
3165 v0.AddArg(x)
3166 v.AddArg(v0)
3167 return true
3168 }
3169
3170
3171
3172 for {
3173 d := v.AuxInt
3174 _ = v.Args[1]
3175 x := v.Args[0]
3176 v_1 := v.Args[1]
3177 if v_1.Op != OpARM64MOVDconst {
3178 break
3179 }
3180 c := v_1.AuxInt
3181 v.reset(OpARM64ANDconst)
3182 v.AuxInt = int64(uint64(c) >> uint64(d))
3183 v.AddArg(x)
3184 return true
3185 }
3186
3187
3188
3189 for {
3190 d := v.AuxInt
3191 _ = v.Args[1]
3192 x := v.Args[0]
3193 y := v.Args[1]
3194 if y.Op != OpARM64SRLconst {
3195 break
3196 }
3197 c := y.AuxInt
3198 if x != y.Args[0] {
3199 break
3200 }
3201 if !(c == d) {
3202 break
3203 }
3204 v.reset(OpCopy)
3205 v.Type = y.Type
3206 v.AddArg(y)
3207 return true
3208 }
3209 return false
3210 }
3211 func rewriteValueARM64_OpARM64BIC_0(v *Value) bool {
3212
3213
3214
3215 for {
3216 _ = v.Args[1]
3217 x := v.Args[0]
3218 v_1 := v.Args[1]
3219 if v_1.Op != OpARM64MOVDconst {
3220 break
3221 }
3222 c := v_1.AuxInt
3223 v.reset(OpARM64ANDconst)
3224 v.AuxInt = ^c
3225 v.AddArg(x)
3226 return true
3227 }
3228
3229
3230
3231 for {
3232 x := v.Args[1]
3233 if x != v.Args[0] {
3234 break
3235 }
3236 v.reset(OpARM64MOVDconst)
3237 v.AuxInt = 0
3238 return true
3239 }
3240
3241
3242
3243 for {
3244 _ = v.Args[1]
3245 x0 := v.Args[0]
3246 x1 := v.Args[1]
3247 if x1.Op != OpARM64SLLconst {
3248 break
3249 }
3250 c := x1.AuxInt
3251 y := x1.Args[0]
3252 if !(clobberIfDead(x1)) {
3253 break
3254 }
3255 v.reset(OpARM64BICshiftLL)
3256 v.AuxInt = c
3257 v.AddArg(x0)
3258 v.AddArg(y)
3259 return true
3260 }
3261
3262
3263
3264 for {
3265 _ = v.Args[1]
3266 x0 := v.Args[0]
3267 x1 := v.Args[1]
3268 if x1.Op != OpARM64SRLconst {
3269 break
3270 }
3271 c := x1.AuxInt
3272 y := x1.Args[0]
3273 if !(clobberIfDead(x1)) {
3274 break
3275 }
3276 v.reset(OpARM64BICshiftRL)
3277 v.AuxInt = c
3278 v.AddArg(x0)
3279 v.AddArg(y)
3280 return true
3281 }
3282
3283
3284
3285 for {
3286 _ = v.Args[1]
3287 x0 := v.Args[0]
3288 x1 := v.Args[1]
3289 if x1.Op != OpARM64SRAconst {
3290 break
3291 }
3292 c := x1.AuxInt
3293 y := x1.Args[0]
3294 if !(clobberIfDead(x1)) {
3295 break
3296 }
3297 v.reset(OpARM64BICshiftRA)
3298 v.AuxInt = c
3299 v.AddArg(x0)
3300 v.AddArg(y)
3301 return true
3302 }
3303 return false
3304 }
3305 func rewriteValueARM64_OpARM64BICshiftLL_0(v *Value) bool {
3306
3307
3308
3309 for {
3310 d := v.AuxInt
3311 _ = v.Args[1]
3312 x := v.Args[0]
3313 v_1 := v.Args[1]
3314 if v_1.Op != OpARM64MOVDconst {
3315 break
3316 }
3317 c := v_1.AuxInt
3318 v.reset(OpARM64ANDconst)
3319 v.AuxInt = ^int64(uint64(c) << uint64(d))
3320 v.AddArg(x)
3321 return true
3322 }
3323
3324
3325
3326 for {
3327 d := v.AuxInt
3328 _ = v.Args[1]
3329 x := v.Args[0]
3330 v_1 := v.Args[1]
3331 if v_1.Op != OpARM64SLLconst {
3332 break
3333 }
3334 c := v_1.AuxInt
3335 if x != v_1.Args[0] {
3336 break
3337 }
3338 if !(c == d) {
3339 break
3340 }
3341 v.reset(OpARM64MOVDconst)
3342 v.AuxInt = 0
3343 return true
3344 }
3345 return false
3346 }
3347 func rewriteValueARM64_OpARM64BICshiftRA_0(v *Value) bool {
3348
3349
3350
3351 for {
3352 d := v.AuxInt
3353 _ = v.Args[1]
3354 x := v.Args[0]
3355 v_1 := v.Args[1]
3356 if v_1.Op != OpARM64MOVDconst {
3357 break
3358 }
3359 c := v_1.AuxInt
3360 v.reset(OpARM64ANDconst)
3361 v.AuxInt = ^(c >> uint64(d))
3362 v.AddArg(x)
3363 return true
3364 }
3365
3366
3367
3368 for {
3369 d := v.AuxInt
3370 _ = v.Args[1]
3371 x := v.Args[0]
3372 v_1 := v.Args[1]
3373 if v_1.Op != OpARM64SRAconst {
3374 break
3375 }
3376 c := v_1.AuxInt
3377 if x != v_1.Args[0] {
3378 break
3379 }
3380 if !(c == d) {
3381 break
3382 }
3383 v.reset(OpARM64MOVDconst)
3384 v.AuxInt = 0
3385 return true
3386 }
3387 return false
3388 }
3389 func rewriteValueARM64_OpARM64BICshiftRL_0(v *Value) bool {
3390
3391
3392
3393 for {
3394 d := v.AuxInt
3395 _ = v.Args[1]
3396 x := v.Args[0]
3397 v_1 := v.Args[1]
3398 if v_1.Op != OpARM64MOVDconst {
3399 break
3400 }
3401 c := v_1.AuxInt
3402 v.reset(OpARM64ANDconst)
3403 v.AuxInt = ^int64(uint64(c) >> uint64(d))
3404 v.AddArg(x)
3405 return true
3406 }
3407
3408
3409
3410 for {
3411 d := v.AuxInt
3412 _ = v.Args[1]
3413 x := v.Args[0]
3414 v_1 := v.Args[1]
3415 if v_1.Op != OpARM64SRLconst {
3416 break
3417 }
3418 c := v_1.AuxInt
3419 if x != v_1.Args[0] {
3420 break
3421 }
3422 if !(c == d) {
3423 break
3424 }
3425 v.reset(OpARM64MOVDconst)
3426 v.AuxInt = 0
3427 return true
3428 }
3429 return false
3430 }
3431 func rewriteValueARM64_OpARM64CMN_0(v *Value) bool {
3432
3433
3434
3435 for {
3436 _ = v.Args[1]
3437 x := v.Args[0]
3438 v_1 := v.Args[1]
3439 if v_1.Op != OpARM64MOVDconst {
3440 break
3441 }
3442 c := v_1.AuxInt
3443 v.reset(OpARM64CMNconst)
3444 v.AuxInt = c
3445 v.AddArg(x)
3446 return true
3447 }
3448
3449
3450
3451 for {
3452 x := v.Args[1]
3453 v_0 := v.Args[0]
3454 if v_0.Op != OpARM64MOVDconst {
3455 break
3456 }
3457 c := v_0.AuxInt
3458 v.reset(OpARM64CMNconst)
3459 v.AuxInt = c
3460 v.AddArg(x)
3461 return true
3462 }
3463
3464
3465
3466 for {
3467 _ = v.Args[1]
3468 x0 := v.Args[0]
3469 x1 := v.Args[1]
3470 if x1.Op != OpARM64SLLconst {
3471 break
3472 }
3473 c := x1.AuxInt
3474 y := x1.Args[0]
3475 if !(clobberIfDead(x1)) {
3476 break
3477 }
3478 v.reset(OpARM64CMNshiftLL)
3479 v.AuxInt = c
3480 v.AddArg(x0)
3481 v.AddArg(y)
3482 return true
3483 }
3484
3485
3486
3487 for {
3488 x0 := v.Args[1]
3489 x1 := v.Args[0]
3490 if x1.Op != OpARM64SLLconst {
3491 break
3492 }
3493 c := x1.AuxInt
3494 y := x1.Args[0]
3495 if !(clobberIfDead(x1)) {
3496 break
3497 }
3498 v.reset(OpARM64CMNshiftLL)
3499 v.AuxInt = c
3500 v.AddArg(x0)
3501 v.AddArg(y)
3502 return true
3503 }
3504
3505
3506
3507 for {
3508 _ = v.Args[1]
3509 x0 := v.Args[0]
3510 x1 := v.Args[1]
3511 if x1.Op != OpARM64SRLconst {
3512 break
3513 }
3514 c := x1.AuxInt
3515 y := x1.Args[0]
3516 if !(clobberIfDead(x1)) {
3517 break
3518 }
3519 v.reset(OpARM64CMNshiftRL)
3520 v.AuxInt = c
3521 v.AddArg(x0)
3522 v.AddArg(y)
3523 return true
3524 }
3525
3526
3527
3528 for {
3529 x0 := v.Args[1]
3530 x1 := v.Args[0]
3531 if x1.Op != OpARM64SRLconst {
3532 break
3533 }
3534 c := x1.AuxInt
3535 y := x1.Args[0]
3536 if !(clobberIfDead(x1)) {
3537 break
3538 }
3539 v.reset(OpARM64CMNshiftRL)
3540 v.AuxInt = c
3541 v.AddArg(x0)
3542 v.AddArg(y)
3543 return true
3544 }
3545
3546
3547
3548 for {
3549 _ = v.Args[1]
3550 x0 := v.Args[0]
3551 x1 := v.Args[1]
3552 if x1.Op != OpARM64SRAconst {
3553 break
3554 }
3555 c := x1.AuxInt
3556 y := x1.Args[0]
3557 if !(clobberIfDead(x1)) {
3558 break
3559 }
3560 v.reset(OpARM64CMNshiftRA)
3561 v.AuxInt = c
3562 v.AddArg(x0)
3563 v.AddArg(y)
3564 return true
3565 }
3566
3567
3568
3569 for {
3570 x0 := v.Args[1]
3571 x1 := v.Args[0]
3572 if x1.Op != OpARM64SRAconst {
3573 break
3574 }
3575 c := x1.AuxInt
3576 y := x1.Args[0]
3577 if !(clobberIfDead(x1)) {
3578 break
3579 }
3580 v.reset(OpARM64CMNshiftRA)
3581 v.AuxInt = c
3582 v.AddArg(x0)
3583 v.AddArg(y)
3584 return true
3585 }
3586 return false
3587 }
3588 func rewriteValueARM64_OpARM64CMNW_0(v *Value) bool {
3589
3590
3591
3592 for {
3593 _ = v.Args[1]
3594 x := v.Args[0]
3595 v_1 := v.Args[1]
3596 if v_1.Op != OpARM64MOVDconst {
3597 break
3598 }
3599 c := v_1.AuxInt
3600 v.reset(OpARM64CMNWconst)
3601 v.AuxInt = c
3602 v.AddArg(x)
3603 return true
3604 }
3605
3606
3607
3608 for {
3609 x := v.Args[1]
3610 v_0 := v.Args[0]
3611 if v_0.Op != OpARM64MOVDconst {
3612 break
3613 }
3614 c := v_0.AuxInt
3615 v.reset(OpARM64CMNWconst)
3616 v.AuxInt = c
3617 v.AddArg(x)
3618 return true
3619 }
3620 return false
3621 }
3622 func rewriteValueARM64_OpARM64CMNWconst_0(v *Value) bool {
3623
3624
3625
3626 for {
3627 y := v.AuxInt
3628 v_0 := v.Args[0]
3629 if v_0.Op != OpARM64MOVDconst {
3630 break
3631 }
3632 x := v_0.AuxInt
3633 if !(int32(x) == int32(-y)) {
3634 break
3635 }
3636 v.reset(OpARM64FlagEQ)
3637 return true
3638 }
3639
3640
3641
3642 for {
3643 y := v.AuxInt
3644 v_0 := v.Args[0]
3645 if v_0.Op != OpARM64MOVDconst {
3646 break
3647 }
3648 x := v_0.AuxInt
3649 if !(int32(x) < int32(-y) && uint32(x) < uint32(-y)) {
3650 break
3651 }
3652 v.reset(OpARM64FlagLT_ULT)
3653 return true
3654 }
3655
3656
3657
3658 for {
3659 y := v.AuxInt
3660 v_0 := v.Args[0]
3661 if v_0.Op != OpARM64MOVDconst {
3662 break
3663 }
3664 x := v_0.AuxInt
3665 if !(int32(x) < int32(-y) && uint32(x) > uint32(-y)) {
3666 break
3667 }
3668 v.reset(OpARM64FlagLT_UGT)
3669 return true
3670 }
3671
3672
3673
3674 for {
3675 y := v.AuxInt
3676 v_0 := v.Args[0]
3677 if v_0.Op != OpARM64MOVDconst {
3678 break
3679 }
3680 x := v_0.AuxInt
3681 if !(int32(x) > int32(-y) && uint32(x) < uint32(-y)) {
3682 break
3683 }
3684 v.reset(OpARM64FlagGT_ULT)
3685 return true
3686 }
3687
3688
3689
3690 for {
3691 y := v.AuxInt
3692 v_0 := v.Args[0]
3693 if v_0.Op != OpARM64MOVDconst {
3694 break
3695 }
3696 x := v_0.AuxInt
3697 if !(int32(x) > int32(-y) && uint32(x) > uint32(-y)) {
3698 break
3699 }
3700 v.reset(OpARM64FlagGT_UGT)
3701 return true
3702 }
3703 return false
3704 }
3705 func rewriteValueARM64_OpARM64CMNconst_0(v *Value) bool {
3706
3707
3708
3709 for {
3710 y := v.AuxInt
3711 v_0 := v.Args[0]
3712 if v_0.Op != OpARM64MOVDconst {
3713 break
3714 }
3715 x := v_0.AuxInt
3716 if !(int64(x) == int64(-y)) {
3717 break
3718 }
3719 v.reset(OpARM64FlagEQ)
3720 return true
3721 }
3722
3723
3724
3725 for {
3726 y := v.AuxInt
3727 v_0 := v.Args[0]
3728 if v_0.Op != OpARM64MOVDconst {
3729 break
3730 }
3731 x := v_0.AuxInt
3732 if !(int64(x) < int64(-y) && uint64(x) < uint64(-y)) {
3733 break
3734 }
3735 v.reset(OpARM64FlagLT_ULT)
3736 return true
3737 }
3738
3739
3740
3741 for {
3742 y := v.AuxInt
3743 v_0 := v.Args[0]
3744 if v_0.Op != OpARM64MOVDconst {
3745 break
3746 }
3747 x := v_0.AuxInt
3748 if !(int64(x) < int64(-y) && uint64(x) > uint64(-y)) {
3749 break
3750 }
3751 v.reset(OpARM64FlagLT_UGT)
3752 return true
3753 }
3754
3755
3756
3757 for {
3758 y := v.AuxInt
3759 v_0 := v.Args[0]
3760 if v_0.Op != OpARM64MOVDconst {
3761 break
3762 }
3763 x := v_0.AuxInt
3764 if !(int64(x) > int64(-y) && uint64(x) < uint64(-y)) {
3765 break
3766 }
3767 v.reset(OpARM64FlagGT_ULT)
3768 return true
3769 }
3770
3771
3772
3773 for {
3774 y := v.AuxInt
3775 v_0 := v.Args[0]
3776 if v_0.Op != OpARM64MOVDconst {
3777 break
3778 }
3779 x := v_0.AuxInt
3780 if !(int64(x) > int64(-y) && uint64(x) > uint64(-y)) {
3781 break
3782 }
3783 v.reset(OpARM64FlagGT_UGT)
3784 return true
3785 }
3786 return false
3787 }
3788 func rewriteValueARM64_OpARM64CMNshiftLL_0(v *Value) bool {
3789 b := v.Block
3790
3791
3792
3793 for {
3794 d := v.AuxInt
3795 x := v.Args[1]
3796 v_0 := v.Args[0]
3797 if v_0.Op != OpARM64MOVDconst {
3798 break
3799 }
3800 c := v_0.AuxInt
3801 v.reset(OpARM64CMNconst)
3802 v.AuxInt = c
3803 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type)
3804 v0.AuxInt = d
3805 v0.AddArg(x)
3806 v.AddArg(v0)
3807 return true
3808 }
3809
3810
3811
3812 for {
3813 d := v.AuxInt
3814 _ = v.Args[1]
3815 x := v.Args[0]
3816 v_1 := v.Args[1]
3817 if v_1.Op != OpARM64MOVDconst {
3818 break
3819 }
3820 c := v_1.AuxInt
3821 v.reset(OpARM64CMNconst)
3822 v.AuxInt = int64(uint64(c) << uint64(d))
3823 v.AddArg(x)
3824 return true
3825 }
3826 return false
3827 }
3828 func rewriteValueARM64_OpARM64CMNshiftRA_0(v *Value) bool {
3829 b := v.Block
3830
3831
3832
3833 for {
3834 d := v.AuxInt
3835 x := v.Args[1]
3836 v_0 := v.Args[0]
3837 if v_0.Op != OpARM64MOVDconst {
3838 break
3839 }
3840 c := v_0.AuxInt
3841 v.reset(OpARM64CMNconst)
3842 v.AuxInt = c
3843 v0 := b.NewValue0(v.Pos, OpARM64SRAconst, x.Type)
3844 v0.AuxInt = d
3845 v0.AddArg(x)
3846 v.AddArg(v0)
3847 return true
3848 }
3849
3850
3851
3852 for {
3853 d := v.AuxInt
3854 _ = v.Args[1]
3855 x := v.Args[0]
3856 v_1 := v.Args[1]
3857 if v_1.Op != OpARM64MOVDconst {
3858 break
3859 }
3860 c := v_1.AuxInt
3861 v.reset(OpARM64CMNconst)
3862 v.AuxInt = c >> uint64(d)
3863 v.AddArg(x)
3864 return true
3865 }
3866 return false
3867 }
3868 func rewriteValueARM64_OpARM64CMNshiftRL_0(v *Value) bool {
3869 b := v.Block
3870
3871
3872
3873 for {
3874 d := v.AuxInt
3875 x := v.Args[1]
3876 v_0 := v.Args[0]
3877 if v_0.Op != OpARM64MOVDconst {
3878 break
3879 }
3880 c := v_0.AuxInt
3881 v.reset(OpARM64CMNconst)
3882 v.AuxInt = c
3883 v0 := b.NewValue0(v.Pos, OpARM64SRLconst, x.Type)
3884 v0.AuxInt = d
3885 v0.AddArg(x)
3886 v.AddArg(v0)
3887 return true
3888 }
3889
3890
3891
3892 for {
3893 d := v.AuxInt
3894 _ = v.Args[1]
3895 x := v.Args[0]
3896 v_1 := v.Args[1]
3897 if v_1.Op != OpARM64MOVDconst {
3898 break
3899 }
3900 c := v_1.AuxInt
3901 v.reset(OpARM64CMNconst)
3902 v.AuxInt = int64(uint64(c) >> uint64(d))
3903 v.AddArg(x)
3904 return true
3905 }
3906 return false
3907 }
3908 func rewriteValueARM64_OpARM64CMP_0(v *Value) bool {
3909 b := v.Block
3910
3911
3912
3913 for {
3914 _ = v.Args[1]
3915 x := v.Args[0]
3916 v_1 := v.Args[1]
3917 if v_1.Op != OpARM64MOVDconst {
3918 break
3919 }
3920 c := v_1.AuxInt
3921 v.reset(OpARM64CMPconst)
3922 v.AuxInt = c
3923 v.AddArg(x)
3924 return true
3925 }
3926
3927
3928
3929 for {
3930 x := v.Args[1]
3931 v_0 := v.Args[0]
3932 if v_0.Op != OpARM64MOVDconst {
3933 break
3934 }
3935 c := v_0.AuxInt
3936 v.reset(OpARM64InvertFlags)
3937 v0 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
3938 v0.AuxInt = c
3939 v0.AddArg(x)
3940 v.AddArg(v0)
3941 return true
3942 }
3943
3944
3945
3946 for {
3947 _ = v.Args[1]
3948 x0 := v.Args[0]
3949 x1 := v.Args[1]
3950 if x1.Op != OpARM64SLLconst {
3951 break
3952 }
3953 c := x1.AuxInt
3954 y := x1.Args[0]
3955 if !(clobberIfDead(x1)) {
3956 break
3957 }
3958 v.reset(OpARM64CMPshiftLL)
3959 v.AuxInt = c
3960 v.AddArg(x0)
3961 v.AddArg(y)
3962 return true
3963 }
3964
3965
3966
3967 for {
3968 x1 := v.Args[1]
3969 x0 := v.Args[0]
3970 if x0.Op != OpARM64SLLconst {
3971 break
3972 }
3973 c := x0.AuxInt
3974 y := x0.Args[0]
3975 if !(clobberIfDead(x0)) {
3976 break
3977 }
3978 v.reset(OpARM64InvertFlags)
3979 v0 := b.NewValue0(v.Pos, OpARM64CMPshiftLL, types.TypeFlags)
3980 v0.AuxInt = c
3981 v0.AddArg(x1)
3982 v0.AddArg(y)
3983 v.AddArg(v0)
3984 return true
3985 }
3986
3987
3988
3989 for {
3990 _ = v.Args[1]
3991 x0 := v.Args[0]
3992 x1 := v.Args[1]
3993 if x1.Op != OpARM64SRLconst {
3994 break
3995 }
3996 c := x1.AuxInt
3997 y := x1.Args[0]
3998 if !(clobberIfDead(x1)) {
3999 break
4000 }
4001 v.reset(OpARM64CMPshiftRL)
4002 v.AuxInt = c
4003 v.AddArg(x0)
4004 v.AddArg(y)
4005 return true
4006 }
4007
4008
4009
4010 for {
4011 x1 := v.Args[1]
4012 x0 := v.Args[0]
4013 if x0.Op != OpARM64SRLconst {
4014 break
4015 }
4016 c := x0.AuxInt
4017 y := x0.Args[0]
4018 if !(clobberIfDead(x0)) {
4019 break
4020 }
4021 v.reset(OpARM64InvertFlags)
4022 v0 := b.NewValue0(v.Pos, OpARM64CMPshiftRL, types.TypeFlags)
4023 v0.AuxInt = c
4024 v0.AddArg(x1)
4025 v0.AddArg(y)
4026 v.AddArg(v0)
4027 return true
4028 }
4029
4030
4031
4032 for {
4033 _ = v.Args[1]
4034 x0 := v.Args[0]
4035 x1 := v.Args[1]
4036 if x1.Op != OpARM64SRAconst {
4037 break
4038 }
4039 c := x1.AuxInt
4040 y := x1.Args[0]
4041 if !(clobberIfDead(x1)) {
4042 break
4043 }
4044 v.reset(OpARM64CMPshiftRA)
4045 v.AuxInt = c
4046 v.AddArg(x0)
4047 v.AddArg(y)
4048 return true
4049 }
4050
4051
4052
4053 for {
4054 x1 := v.Args[1]
4055 x0 := v.Args[0]
4056 if x0.Op != OpARM64SRAconst {
4057 break
4058 }
4059 c := x0.AuxInt
4060 y := x0.Args[0]
4061 if !(clobberIfDead(x0)) {
4062 break
4063 }
4064 v.reset(OpARM64InvertFlags)
4065 v0 := b.NewValue0(v.Pos, OpARM64CMPshiftRA, types.TypeFlags)
4066 v0.AuxInt = c
4067 v0.AddArg(x1)
4068 v0.AddArg(y)
4069 v.AddArg(v0)
4070 return true
4071 }
4072 return false
4073 }
4074 func rewriteValueARM64_OpARM64CMPW_0(v *Value) bool {
4075 b := v.Block
4076
4077
4078
4079 for {
4080 _ = v.Args[1]
4081 x := v.Args[0]
4082 v_1 := v.Args[1]
4083 if v_1.Op != OpARM64MOVDconst {
4084 break
4085 }
4086 c := v_1.AuxInt
4087 v.reset(OpARM64CMPWconst)
4088 v.AuxInt = int64(int32(c))
4089 v.AddArg(x)
4090 return true
4091 }
4092
4093
4094
4095 for {
4096 x := v.Args[1]
4097 v_0 := v.Args[0]
4098 if v_0.Op != OpARM64MOVDconst {
4099 break
4100 }
4101 c := v_0.AuxInt
4102 v.reset(OpARM64InvertFlags)
4103 v0 := b.NewValue0(v.Pos, OpARM64CMPWconst, types.TypeFlags)
4104 v0.AuxInt = int64(int32(c))
4105 v0.AddArg(x)
4106 v.AddArg(v0)
4107 return true
4108 }
4109 return false
4110 }
4111 func rewriteValueARM64_OpARM64CMPWconst_0(v *Value) bool {
4112
4113
4114
4115 for {
4116 y := v.AuxInt
4117 v_0 := v.Args[0]
4118 if v_0.Op != OpARM64MOVDconst {
4119 break
4120 }
4121 x := v_0.AuxInt
4122 if !(int32(x) == int32(y)) {
4123 break
4124 }
4125 v.reset(OpARM64FlagEQ)
4126 return true
4127 }
4128
4129
4130
4131 for {
4132 y := v.AuxInt
4133 v_0 := v.Args[0]
4134 if v_0.Op != OpARM64MOVDconst {
4135 break
4136 }
4137 x := v_0.AuxInt
4138 if !(int32(x) < int32(y) && uint32(x) < uint32(y)) {
4139 break
4140 }
4141 v.reset(OpARM64FlagLT_ULT)
4142 return true
4143 }
4144
4145
4146
4147 for {
4148 y := v.AuxInt
4149 v_0 := v.Args[0]
4150 if v_0.Op != OpARM64MOVDconst {
4151 break
4152 }
4153 x := v_0.AuxInt
4154 if !(int32(x) < int32(y) && uint32(x) > uint32(y)) {
4155 break
4156 }
4157 v.reset(OpARM64FlagLT_UGT)
4158 return true
4159 }
4160
4161
4162
4163 for {
4164 y := v.AuxInt
4165 v_0 := v.Args[0]
4166 if v_0.Op != OpARM64MOVDconst {
4167 break
4168 }
4169 x := v_0.AuxInt
4170 if !(int32(x) > int32(y) && uint32(x) < uint32(y)) {
4171 break
4172 }
4173 v.reset(OpARM64FlagGT_ULT)
4174 return true
4175 }
4176
4177
4178
4179 for {
4180 y := v.AuxInt
4181 v_0 := v.Args[0]
4182 if v_0.Op != OpARM64MOVDconst {
4183 break
4184 }
4185 x := v_0.AuxInt
4186 if !(int32(x) > int32(y) && uint32(x) > uint32(y)) {
4187 break
4188 }
4189 v.reset(OpARM64FlagGT_UGT)
4190 return true
4191 }
4192
4193
4194
4195 for {
4196 c := v.AuxInt
4197 v_0 := v.Args[0]
4198 if v_0.Op != OpARM64MOVBUreg {
4199 break
4200 }
4201 if !(0xff < int32(c)) {
4202 break
4203 }
4204 v.reset(OpARM64FlagLT_ULT)
4205 return true
4206 }
4207
4208
4209
4210 for {
4211 c := v.AuxInt
4212 v_0 := v.Args[0]
4213 if v_0.Op != OpARM64MOVHUreg {
4214 break
4215 }
4216 if !(0xffff < int32(c)) {
4217 break
4218 }
4219 v.reset(OpARM64FlagLT_ULT)
4220 return true
4221 }
4222 return false
4223 }
4224 func rewriteValueARM64_OpARM64CMPconst_0(v *Value) bool {
4225
4226
4227
4228 for {
4229 y := v.AuxInt
4230 v_0 := v.Args[0]
4231 if v_0.Op != OpARM64MOVDconst {
4232 break
4233 }
4234 x := v_0.AuxInt
4235 if !(x == y) {
4236 break
4237 }
4238 v.reset(OpARM64FlagEQ)
4239 return true
4240 }
4241
4242
4243
4244 for {
4245 y := v.AuxInt
4246 v_0 := v.Args[0]
4247 if v_0.Op != OpARM64MOVDconst {
4248 break
4249 }
4250 x := v_0.AuxInt
4251 if !(x < y && uint64(x) < uint64(y)) {
4252 break
4253 }
4254 v.reset(OpARM64FlagLT_ULT)
4255 return true
4256 }
4257
4258
4259
4260 for {
4261 y := v.AuxInt
4262 v_0 := v.Args[0]
4263 if v_0.Op != OpARM64MOVDconst {
4264 break
4265 }
4266 x := v_0.AuxInt
4267 if !(x < y && uint64(x) > uint64(y)) {
4268 break
4269 }
4270 v.reset(OpARM64FlagLT_UGT)
4271 return true
4272 }
4273
4274
4275
4276 for {
4277 y := v.AuxInt
4278 v_0 := v.Args[0]
4279 if v_0.Op != OpARM64MOVDconst {
4280 break
4281 }
4282 x := v_0.AuxInt
4283 if !(x > y && uint64(x) < uint64(y)) {
4284 break
4285 }
4286 v.reset(OpARM64FlagGT_ULT)
4287 return true
4288 }
4289
4290
4291
4292 for {
4293 y := v.AuxInt
4294 v_0 := v.Args[0]
4295 if v_0.Op != OpARM64MOVDconst {
4296 break
4297 }
4298 x := v_0.AuxInt
4299 if !(x > y && uint64(x) > uint64(y)) {
4300 break
4301 }
4302 v.reset(OpARM64FlagGT_UGT)
4303 return true
4304 }
4305
4306
4307
4308 for {
4309 c := v.AuxInt
4310 v_0 := v.Args[0]
4311 if v_0.Op != OpARM64MOVBUreg {
4312 break
4313 }
4314 if !(0xff < c) {
4315 break
4316 }
4317 v.reset(OpARM64FlagLT_ULT)
4318 return true
4319 }
4320
4321
4322
4323 for {
4324 c := v.AuxInt
4325 v_0 := v.Args[0]
4326 if v_0.Op != OpARM64MOVHUreg {
4327 break
4328 }
4329 if !(0xffff < c) {
4330 break
4331 }
4332 v.reset(OpARM64FlagLT_ULT)
4333 return true
4334 }
4335
4336
4337
4338 for {
4339 c := v.AuxInt
4340 v_0 := v.Args[0]
4341 if v_0.Op != OpARM64MOVWUreg {
4342 break
4343 }
4344 if !(0xffffffff < c) {
4345 break
4346 }
4347 v.reset(OpARM64FlagLT_ULT)
4348 return true
4349 }
4350
4351
4352
4353 for {
4354 n := v.AuxInt
4355 v_0 := v.Args[0]
4356 if v_0.Op != OpARM64ANDconst {
4357 break
4358 }
4359 m := v_0.AuxInt
4360 if !(0 <= m && m < n) {
4361 break
4362 }
4363 v.reset(OpARM64FlagLT_ULT)
4364 return true
4365 }
4366
4367
4368
4369 for {
4370 n := v.AuxInt
4371 v_0 := v.Args[0]
4372 if v_0.Op != OpARM64SRLconst {
4373 break
4374 }
4375 c := v_0.AuxInt
4376 if !(0 <= n && 0 < c && c <= 63 && (1<<uint64(64-c)) <= uint64(n)) {
4377 break
4378 }
4379 v.reset(OpARM64FlagLT_ULT)
4380 return true
4381 }
4382 return false
4383 }
4384 func rewriteValueARM64_OpARM64CMPshiftLL_0(v *Value) bool {
4385 b := v.Block
4386
4387
4388
4389 for {
4390 d := v.AuxInt
4391 x := v.Args[1]
4392 v_0 := v.Args[0]
4393 if v_0.Op != OpARM64MOVDconst {
4394 break
4395 }
4396 c := v_0.AuxInt
4397 v.reset(OpARM64InvertFlags)
4398 v0 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
4399 v0.AuxInt = c
4400 v1 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type)
4401 v1.AuxInt = d
4402 v1.AddArg(x)
4403 v0.AddArg(v1)
4404 v.AddArg(v0)
4405 return true
4406 }
4407
4408
4409
4410 for {
4411 d := v.AuxInt
4412 _ = v.Args[1]
4413 x := v.Args[0]
4414 v_1 := v.Args[1]
4415 if v_1.Op != OpARM64MOVDconst {
4416 break
4417 }
4418 c := v_1.AuxInt
4419 v.reset(OpARM64CMPconst)
4420 v.AuxInt = int64(uint64(c) << uint64(d))
4421 v.AddArg(x)
4422 return true
4423 }
4424 return false
4425 }
4426 func rewriteValueARM64_OpARM64CMPshiftRA_0(v *Value) bool {
4427 b := v.Block
4428
4429
4430
4431 for {
4432 d := v.AuxInt
4433 x := v.Args[1]
4434 v_0 := v.Args[0]
4435 if v_0.Op != OpARM64MOVDconst {
4436 break
4437 }
4438 c := v_0.AuxInt
4439 v.reset(OpARM64InvertFlags)
4440 v0 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
4441 v0.AuxInt = c
4442 v1 := b.NewValue0(v.Pos, OpARM64SRAconst, x.Type)
4443 v1.AuxInt = d
4444 v1.AddArg(x)
4445 v0.AddArg(v1)
4446 v.AddArg(v0)
4447 return true
4448 }
4449
4450
4451
4452 for {
4453 d := v.AuxInt
4454 _ = v.Args[1]
4455 x := v.Args[0]
4456 v_1 := v.Args[1]
4457 if v_1.Op != OpARM64MOVDconst {
4458 break
4459 }
4460 c := v_1.AuxInt
4461 v.reset(OpARM64CMPconst)
4462 v.AuxInt = c >> uint64(d)
4463 v.AddArg(x)
4464 return true
4465 }
4466 return false
4467 }
4468 func rewriteValueARM64_OpARM64CMPshiftRL_0(v *Value) bool {
4469 b := v.Block
4470
4471
4472
4473 for {
4474 d := v.AuxInt
4475 x := v.Args[1]
4476 v_0 := v.Args[0]
4477 if v_0.Op != OpARM64MOVDconst {
4478 break
4479 }
4480 c := v_0.AuxInt
4481 v.reset(OpARM64InvertFlags)
4482 v0 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
4483 v0.AuxInt = c
4484 v1 := b.NewValue0(v.Pos, OpARM64SRLconst, x.Type)
4485 v1.AuxInt = d
4486 v1.AddArg(x)
4487 v0.AddArg(v1)
4488 v.AddArg(v0)
4489 return true
4490 }
4491
4492
4493
4494 for {
4495 d := v.AuxInt
4496 _ = v.Args[1]
4497 x := v.Args[0]
4498 v_1 := v.Args[1]
4499 if v_1.Op != OpARM64MOVDconst {
4500 break
4501 }
4502 c := v_1.AuxInt
4503 v.reset(OpARM64CMPconst)
4504 v.AuxInt = int64(uint64(c) >> uint64(d))
4505 v.AddArg(x)
4506 return true
4507 }
4508 return false
4509 }
4510 func rewriteValueARM64_OpARM64CSEL_0(v *Value) bool {
4511
4512
4513
4514 for {
4515 cc := v.Aux
4516 flag := v.Args[2]
4517 x := v.Args[0]
4518 v_1 := v.Args[1]
4519 if v_1.Op != OpARM64MOVDconst {
4520 break
4521 }
4522 if v_1.AuxInt != 0 {
4523 break
4524 }
4525 v.reset(OpARM64CSEL0)
4526 v.Aux = cc
4527 v.AddArg(x)
4528 v.AddArg(flag)
4529 return true
4530 }
4531
4532
4533
4534 for {
4535 cc := v.Aux
4536 flag := v.Args[2]
4537 v_0 := v.Args[0]
4538 if v_0.Op != OpARM64MOVDconst {
4539 break
4540 }
4541 if v_0.AuxInt != 0 {
4542 break
4543 }
4544 y := v.Args[1]
4545 v.reset(OpARM64CSEL0)
4546 v.Aux = arm64Negate(cc.(Op))
4547 v.AddArg(y)
4548 v.AddArg(flag)
4549 return true
4550 }
4551
4552
4553
4554 for {
4555 cc := v.Aux
4556 _ = v.Args[2]
4557 x := v.Args[0]
4558 y := v.Args[1]
4559 v_2 := v.Args[2]
4560 if v_2.Op != OpARM64InvertFlags {
4561 break
4562 }
4563 cmp := v_2.Args[0]
4564 v.reset(OpARM64CSEL)
4565 v.Aux = arm64Invert(cc.(Op))
4566 v.AddArg(x)
4567 v.AddArg(y)
4568 v.AddArg(cmp)
4569 return true
4570 }
4571
4572
4573
4574 for {
4575 cc := v.Aux
4576 flag := v.Args[2]
4577 x := v.Args[0]
4578 if !(ccARM64Eval(cc, flag) > 0) {
4579 break
4580 }
4581 v.reset(OpCopy)
4582 v.Type = x.Type
4583 v.AddArg(x)
4584 return true
4585 }
4586
4587
4588
4589 for {
4590 cc := v.Aux
4591 flag := v.Args[2]
4592 y := v.Args[1]
4593 if !(ccARM64Eval(cc, flag) < 0) {
4594 break
4595 }
4596 v.reset(OpCopy)
4597 v.Type = y.Type
4598 v.AddArg(y)
4599 return true
4600 }
4601
4602
4603
4604 for {
4605 cc := v.Aux
4606 _ = v.Args[2]
4607 x := v.Args[0]
4608 y := v.Args[1]
4609 v_2 := v.Args[2]
4610 if v_2.Op != OpARM64CMPWconst {
4611 break
4612 }
4613 if v_2.AuxInt != 0 {
4614 break
4615 }
4616 boolval := v_2.Args[0]
4617 if !(cc.(Op) == OpARM64NotEqual && flagArg(boolval) != nil) {
4618 break
4619 }
4620 v.reset(OpARM64CSEL)
4621 v.Aux = boolval.Op
4622 v.AddArg(x)
4623 v.AddArg(y)
4624 v.AddArg(flagArg(boolval))
4625 return true
4626 }
4627
4628
4629
4630 for {
4631 cc := v.Aux
4632 _ = v.Args[2]
4633 x := v.Args[0]
4634 y := v.Args[1]
4635 v_2 := v.Args[2]
4636 if v_2.Op != OpARM64CMPWconst {
4637 break
4638 }
4639 if v_2.AuxInt != 0 {
4640 break
4641 }
4642 boolval := v_2.Args[0]
4643 if !(cc.(Op) == OpARM64Equal && flagArg(boolval) != nil) {
4644 break
4645 }
4646 v.reset(OpARM64CSEL)
4647 v.Aux = arm64Negate(boolval.Op)
4648 v.AddArg(x)
4649 v.AddArg(y)
4650 v.AddArg(flagArg(boolval))
4651 return true
4652 }
4653 return false
4654 }
4655 func rewriteValueARM64_OpARM64CSEL0_0(v *Value) bool {
4656
4657
4658
4659 for {
4660 cc := v.Aux
4661 _ = v.Args[1]
4662 x := v.Args[0]
4663 v_1 := v.Args[1]
4664 if v_1.Op != OpARM64InvertFlags {
4665 break
4666 }
4667 cmp := v_1.Args[0]
4668 v.reset(OpARM64CSEL0)
4669 v.Aux = arm64Invert(cc.(Op))
4670 v.AddArg(x)
4671 v.AddArg(cmp)
4672 return true
4673 }
4674
4675
4676
4677 for {
4678 cc := v.Aux
4679 flag := v.Args[1]
4680 x := v.Args[0]
4681 if !(ccARM64Eval(cc, flag) > 0) {
4682 break
4683 }
4684 v.reset(OpCopy)
4685 v.Type = x.Type
4686 v.AddArg(x)
4687 return true
4688 }
4689
4690
4691
4692 for {
4693 cc := v.Aux
4694 flag := v.Args[1]
4695 if !(ccARM64Eval(cc, flag) < 0) {
4696 break
4697 }
4698 v.reset(OpARM64MOVDconst)
4699 v.AuxInt = 0
4700 return true
4701 }
4702
4703
4704
4705 for {
4706 cc := v.Aux
4707 _ = v.Args[1]
4708 x := v.Args[0]
4709 v_1 := v.Args[1]
4710 if v_1.Op != OpARM64CMPWconst {
4711 break
4712 }
4713 if v_1.AuxInt != 0 {
4714 break
4715 }
4716 boolval := v_1.Args[0]
4717 if !(cc.(Op) == OpARM64NotEqual && flagArg(boolval) != nil) {
4718 break
4719 }
4720 v.reset(OpARM64CSEL0)
4721 v.Aux = boolval.Op
4722 v.AddArg(x)
4723 v.AddArg(flagArg(boolval))
4724 return true
4725 }
4726
4727
4728
4729 for {
4730 cc := v.Aux
4731 _ = v.Args[1]
4732 x := v.Args[0]
4733 v_1 := v.Args[1]
4734 if v_1.Op != OpARM64CMPWconst {
4735 break
4736 }
4737 if v_1.AuxInt != 0 {
4738 break
4739 }
4740 boolval := v_1.Args[0]
4741 if !(cc.(Op) == OpARM64Equal && flagArg(boolval) != nil) {
4742 break
4743 }
4744 v.reset(OpARM64CSEL0)
4745 v.Aux = arm64Negate(boolval.Op)
4746 v.AddArg(x)
4747 v.AddArg(flagArg(boolval))
4748 return true
4749 }
4750 return false
4751 }
4752 func rewriteValueARM64_OpARM64DIV_0(v *Value) bool {
4753
4754
4755
4756 for {
4757 _ = v.Args[1]
4758 v_0 := v.Args[0]
4759 if v_0.Op != OpARM64MOVDconst {
4760 break
4761 }
4762 c := v_0.AuxInt
4763 v_1 := v.Args[1]
4764 if v_1.Op != OpARM64MOVDconst {
4765 break
4766 }
4767 d := v_1.AuxInt
4768 v.reset(OpARM64MOVDconst)
4769 v.AuxInt = c / d
4770 return true
4771 }
4772 return false
4773 }
4774 func rewriteValueARM64_OpARM64DIVW_0(v *Value) bool {
4775
4776
4777
4778 for {
4779 _ = v.Args[1]
4780 v_0 := v.Args[0]
4781 if v_0.Op != OpARM64MOVDconst {
4782 break
4783 }
4784 c := v_0.AuxInt
4785 v_1 := v.Args[1]
4786 if v_1.Op != OpARM64MOVDconst {
4787 break
4788 }
4789 d := v_1.AuxInt
4790 v.reset(OpARM64MOVDconst)
4791 v.AuxInt = int64(int32(c) / int32(d))
4792 return true
4793 }
4794 return false
4795 }
4796 func rewriteValueARM64_OpARM64EON_0(v *Value) bool {
4797
4798
4799
4800 for {
4801 _ = v.Args[1]
4802 x := v.Args[0]
4803 v_1 := v.Args[1]
4804 if v_1.Op != OpARM64MOVDconst {
4805 break
4806 }
4807 c := v_1.AuxInt
4808 v.reset(OpARM64XORconst)
4809 v.AuxInt = ^c
4810 v.AddArg(x)
4811 return true
4812 }
4813
4814
4815
4816 for {
4817 x := v.Args[1]
4818 if x != v.Args[0] {
4819 break
4820 }
4821 v.reset(OpARM64MOVDconst)
4822 v.AuxInt = -1
4823 return true
4824 }
4825
4826
4827
4828 for {
4829 _ = v.Args[1]
4830 x0 := v.Args[0]
4831 x1 := v.Args[1]
4832 if x1.Op != OpARM64SLLconst {
4833 break
4834 }
4835 c := x1.AuxInt
4836 y := x1.Args[0]
4837 if !(clobberIfDead(x1)) {
4838 break
4839 }
4840 v.reset(OpARM64EONshiftLL)
4841 v.AuxInt = c
4842 v.AddArg(x0)
4843 v.AddArg(y)
4844 return true
4845 }
4846
4847
4848
4849 for {
4850 _ = v.Args[1]
4851 x0 := v.Args[0]
4852 x1 := v.Args[1]
4853 if x1.Op != OpARM64SRLconst {
4854 break
4855 }
4856 c := x1.AuxInt
4857 y := x1.Args[0]
4858 if !(clobberIfDead(x1)) {
4859 break
4860 }
4861 v.reset(OpARM64EONshiftRL)
4862 v.AuxInt = c
4863 v.AddArg(x0)
4864 v.AddArg(y)
4865 return true
4866 }
4867
4868
4869
4870 for {
4871 _ = v.Args[1]
4872 x0 := v.Args[0]
4873 x1 := v.Args[1]
4874 if x1.Op != OpARM64SRAconst {
4875 break
4876 }
4877 c := x1.AuxInt
4878 y := x1.Args[0]
4879 if !(clobberIfDead(x1)) {
4880 break
4881 }
4882 v.reset(OpARM64EONshiftRA)
4883 v.AuxInt = c
4884 v.AddArg(x0)
4885 v.AddArg(y)
4886 return true
4887 }
4888 return false
4889 }
4890 func rewriteValueARM64_OpARM64EONshiftLL_0(v *Value) bool {
4891
4892
4893
4894 for {
4895 d := v.AuxInt
4896 _ = v.Args[1]
4897 x := v.Args[0]
4898 v_1 := v.Args[1]
4899 if v_1.Op != OpARM64MOVDconst {
4900 break
4901 }
4902 c := v_1.AuxInt
4903 v.reset(OpARM64XORconst)
4904 v.AuxInt = ^int64(uint64(c) << uint64(d))
4905 v.AddArg(x)
4906 return true
4907 }
4908
4909
4910
4911 for {
4912 d := v.AuxInt
4913 _ = v.Args[1]
4914 x := v.Args[0]
4915 v_1 := v.Args[1]
4916 if v_1.Op != OpARM64SLLconst {
4917 break
4918 }
4919 c := v_1.AuxInt
4920 if x != v_1.Args[0] {
4921 break
4922 }
4923 if !(c == d) {
4924 break
4925 }
4926 v.reset(OpARM64MOVDconst)
4927 v.AuxInt = -1
4928 return true
4929 }
4930 return false
4931 }
4932 func rewriteValueARM64_OpARM64EONshiftRA_0(v *Value) bool {
4933
4934
4935
4936 for {
4937 d := v.AuxInt
4938 _ = v.Args[1]
4939 x := v.Args[0]
4940 v_1 := v.Args[1]
4941 if v_1.Op != OpARM64MOVDconst {
4942 break
4943 }
4944 c := v_1.AuxInt
4945 v.reset(OpARM64XORconst)
4946 v.AuxInt = ^(c >> uint64(d))
4947 v.AddArg(x)
4948 return true
4949 }
4950
4951
4952
4953 for {
4954 d := v.AuxInt
4955 _ = v.Args[1]
4956 x := v.Args[0]
4957 v_1 := v.Args[1]
4958 if v_1.Op != OpARM64SRAconst {
4959 break
4960 }
4961 c := v_1.AuxInt
4962 if x != v_1.Args[0] {
4963 break
4964 }
4965 if !(c == d) {
4966 break
4967 }
4968 v.reset(OpARM64MOVDconst)
4969 v.AuxInt = -1
4970 return true
4971 }
4972 return false
4973 }
4974 func rewriteValueARM64_OpARM64EONshiftRL_0(v *Value) bool {
4975
4976
4977
4978 for {
4979 d := v.AuxInt
4980 _ = v.Args[1]
4981 x := v.Args[0]
4982 v_1 := v.Args[1]
4983 if v_1.Op != OpARM64MOVDconst {
4984 break
4985 }
4986 c := v_1.AuxInt
4987 v.reset(OpARM64XORconst)
4988 v.AuxInt = ^int64(uint64(c) >> uint64(d))
4989 v.AddArg(x)
4990 return true
4991 }
4992
4993
4994
4995 for {
4996 d := v.AuxInt
4997 _ = v.Args[1]
4998 x := v.Args[0]
4999 v_1 := v.Args[1]
5000 if v_1.Op != OpARM64SRLconst {
5001 break
5002 }
5003 c := v_1.AuxInt
5004 if x != v_1.Args[0] {
5005 break
5006 }
5007 if !(c == d) {
5008 break
5009 }
5010 v.reset(OpARM64MOVDconst)
5011 v.AuxInt = -1
5012 return true
5013 }
5014 return false
5015 }
5016 func rewriteValueARM64_OpARM64Equal_0(v *Value) bool {
5017
5018
5019
5020 for {
5021 v_0 := v.Args[0]
5022 if v_0.Op != OpARM64FlagEQ {
5023 break
5024 }
5025 v.reset(OpARM64MOVDconst)
5026 v.AuxInt = 1
5027 return true
5028 }
5029
5030
5031
5032 for {
5033 v_0 := v.Args[0]
5034 if v_0.Op != OpARM64FlagLT_ULT {
5035 break
5036 }
5037 v.reset(OpARM64MOVDconst)
5038 v.AuxInt = 0
5039 return true
5040 }
5041
5042
5043
5044 for {
5045 v_0 := v.Args[0]
5046 if v_0.Op != OpARM64FlagLT_UGT {
5047 break
5048 }
5049 v.reset(OpARM64MOVDconst)
5050 v.AuxInt = 0
5051 return true
5052 }
5053
5054
5055
5056 for {
5057 v_0 := v.Args[0]
5058 if v_0.Op != OpARM64FlagGT_ULT {
5059 break
5060 }
5061 v.reset(OpARM64MOVDconst)
5062 v.AuxInt = 0
5063 return true
5064 }
5065
5066
5067
5068 for {
5069 v_0 := v.Args[0]
5070 if v_0.Op != OpARM64FlagGT_UGT {
5071 break
5072 }
5073 v.reset(OpARM64MOVDconst)
5074 v.AuxInt = 0
5075 return true
5076 }
5077
5078
5079
5080 for {
5081 v_0 := v.Args[0]
5082 if v_0.Op != OpARM64InvertFlags {
5083 break
5084 }
5085 x := v_0.Args[0]
5086 v.reset(OpARM64Equal)
5087 v.AddArg(x)
5088 return true
5089 }
5090 return false
5091 }
5092 func rewriteValueARM64_OpARM64FADDD_0(v *Value) bool {
5093
5094
5095
5096 for {
5097 _ = v.Args[1]
5098 a := v.Args[0]
5099 v_1 := v.Args[1]
5100 if v_1.Op != OpARM64FMULD {
5101 break
5102 }
5103 y := v_1.Args[1]
5104 x := v_1.Args[0]
5105 v.reset(OpARM64FMADDD)
5106 v.AddArg(a)
5107 v.AddArg(x)
5108 v.AddArg(y)
5109 return true
5110 }
5111
5112
5113
5114 for {
5115 a := v.Args[1]
5116 v_0 := v.Args[0]
5117 if v_0.Op != OpARM64FMULD {
5118 break
5119 }
5120 y := v_0.Args[1]
5121 x := v_0.Args[0]
5122 v.reset(OpARM64FMADDD)
5123 v.AddArg(a)
5124 v.AddArg(x)
5125 v.AddArg(y)
5126 return true
5127 }
5128
5129
5130
5131 for {
5132 _ = v.Args[1]
5133 a := v.Args[0]
5134 v_1 := v.Args[1]
5135 if v_1.Op != OpARM64FNMULD {
5136 break
5137 }
5138 y := v_1.Args[1]
5139 x := v_1.Args[0]
5140 v.reset(OpARM64FMSUBD)
5141 v.AddArg(a)
5142 v.AddArg(x)
5143 v.AddArg(y)
5144 return true
5145 }
5146
5147
5148
5149 for {
5150 a := v.Args[1]
5151 v_0 := v.Args[0]
5152 if v_0.Op != OpARM64FNMULD {
5153 break
5154 }
5155 y := v_0.Args[1]
5156 x := v_0.Args[0]
5157 v.reset(OpARM64FMSUBD)
5158 v.AddArg(a)
5159 v.AddArg(x)
5160 v.AddArg(y)
5161 return true
5162 }
5163 return false
5164 }
5165 func rewriteValueARM64_OpARM64FADDS_0(v *Value) bool {
5166
5167
5168
5169 for {
5170 _ = v.Args[1]
5171 a := v.Args[0]
5172 v_1 := v.Args[1]
5173 if v_1.Op != OpARM64FMULS {
5174 break
5175 }
5176 y := v_1.Args[1]
5177 x := v_1.Args[0]
5178 v.reset(OpARM64FMADDS)
5179 v.AddArg(a)
5180 v.AddArg(x)
5181 v.AddArg(y)
5182 return true
5183 }
5184
5185
5186
5187 for {
5188 a := v.Args[1]
5189 v_0 := v.Args[0]
5190 if v_0.Op != OpARM64FMULS {
5191 break
5192 }
5193 y := v_0.Args[1]
5194 x := v_0.Args[0]
5195 v.reset(OpARM64FMADDS)
5196 v.AddArg(a)
5197 v.AddArg(x)
5198 v.AddArg(y)
5199 return true
5200 }
5201
5202
5203
5204 for {
5205 _ = v.Args[1]
5206 a := v.Args[0]
5207 v_1 := v.Args[1]
5208 if v_1.Op != OpARM64FNMULS {
5209 break
5210 }
5211 y := v_1.Args[1]
5212 x := v_1.Args[0]
5213 v.reset(OpARM64FMSUBS)
5214 v.AddArg(a)
5215 v.AddArg(x)
5216 v.AddArg(y)
5217 return true
5218 }
5219
5220
5221
5222 for {
5223 a := v.Args[1]
5224 v_0 := v.Args[0]
5225 if v_0.Op != OpARM64FNMULS {
5226 break
5227 }
5228 y := v_0.Args[1]
5229 x := v_0.Args[0]
5230 v.reset(OpARM64FMSUBS)
5231 v.AddArg(a)
5232 v.AddArg(x)
5233 v.AddArg(y)
5234 return true
5235 }
5236 return false
5237 }
5238 func rewriteValueARM64_OpARM64FCMPD_0(v *Value) bool {
5239 b := v.Block
5240
5241
5242
5243 for {
5244 _ = v.Args[1]
5245 x := v.Args[0]
5246 v_1 := v.Args[1]
5247 if v_1.Op != OpARM64FMOVDconst {
5248 break
5249 }
5250 if v_1.AuxInt != 0 {
5251 break
5252 }
5253 v.reset(OpARM64FCMPD0)
5254 v.AddArg(x)
5255 return true
5256 }
5257
5258
5259
5260 for {
5261 x := v.Args[1]
5262 v_0 := v.Args[0]
5263 if v_0.Op != OpARM64FMOVDconst {
5264 break
5265 }
5266 if v_0.AuxInt != 0 {
5267 break
5268 }
5269 v.reset(OpARM64InvertFlags)
5270 v0 := b.NewValue0(v.Pos, OpARM64FCMPD0, types.TypeFlags)
5271 v0.AddArg(x)
5272 v.AddArg(v0)
5273 return true
5274 }
5275 return false
5276 }
5277 func rewriteValueARM64_OpARM64FCMPS_0(v *Value) bool {
5278 b := v.Block
5279
5280
5281
5282 for {
5283 _ = v.Args[1]
5284 x := v.Args[0]
5285 v_1 := v.Args[1]
5286 if v_1.Op != OpARM64FMOVSconst {
5287 break
5288 }
5289 if v_1.AuxInt != 0 {
5290 break
5291 }
5292 v.reset(OpARM64FCMPS0)
5293 v.AddArg(x)
5294 return true
5295 }
5296
5297
5298
5299 for {
5300 x := v.Args[1]
5301 v_0 := v.Args[0]
5302 if v_0.Op != OpARM64FMOVSconst {
5303 break
5304 }
5305 if v_0.AuxInt != 0 {
5306 break
5307 }
5308 v.reset(OpARM64InvertFlags)
5309 v0 := b.NewValue0(v.Pos, OpARM64FCMPS0, types.TypeFlags)
5310 v0.AddArg(x)
5311 v.AddArg(v0)
5312 return true
5313 }
5314 return false
5315 }
5316 func rewriteValueARM64_OpARM64FMOVDfpgp_0(v *Value) bool {
5317 b := v.Block
5318
5319
5320
5321 for {
5322 t := v.Type
5323 v_0 := v.Args[0]
5324 if v_0.Op != OpArg {
5325 break
5326 }
5327 off := v_0.AuxInt
5328 sym := v_0.Aux
5329 b = b.Func.Entry
5330 v0 := b.NewValue0(v.Pos, OpArg, t)
5331 v.reset(OpCopy)
5332 v.AddArg(v0)
5333 v0.AuxInt = off
5334 v0.Aux = sym
5335 return true
5336 }
5337 return false
5338 }
5339 func rewriteValueARM64_OpARM64FMOVDgpfp_0(v *Value) bool {
5340 b := v.Block
5341
5342
5343
5344 for {
5345 t := v.Type
5346 v_0 := v.Args[0]
5347 if v_0.Op != OpArg {
5348 break
5349 }
5350 off := v_0.AuxInt
5351 sym := v_0.Aux
5352 b = b.Func.Entry
5353 v0 := b.NewValue0(v.Pos, OpArg, t)
5354 v.reset(OpCopy)
5355 v.AddArg(v0)
5356 v0.AuxInt = off
5357 v0.Aux = sym
5358 return true
5359 }
5360 return false
5361 }
5362 func rewriteValueARM64_OpARM64FMOVDload_0(v *Value) bool {
5363 b := v.Block
5364 config := b.Func.Config
5365
5366
5367
5368 for {
5369 off := v.AuxInt
5370 sym := v.Aux
5371 _ = v.Args[1]
5372 ptr := v.Args[0]
5373 v_1 := v.Args[1]
5374 if v_1.Op != OpARM64MOVDstore {
5375 break
5376 }
5377 if v_1.AuxInt != off {
5378 break
5379 }
5380 if v_1.Aux != sym {
5381 break
5382 }
5383 _ = v_1.Args[2]
5384 if ptr != v_1.Args[0] {
5385 break
5386 }
5387 val := v_1.Args[1]
5388 v.reset(OpARM64FMOVDgpfp)
5389 v.AddArg(val)
5390 return true
5391 }
5392
5393
5394
5395 for {
5396 off1 := v.AuxInt
5397 sym := v.Aux
5398 mem := v.Args[1]
5399 v_0 := v.Args[0]
5400 if v_0.Op != OpARM64ADDconst {
5401 break
5402 }
5403 off2 := v_0.AuxInt
5404 ptr := v_0.Args[0]
5405 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
5406 break
5407 }
5408 v.reset(OpARM64FMOVDload)
5409 v.AuxInt = off1 + off2
5410 v.Aux = sym
5411 v.AddArg(ptr)
5412 v.AddArg(mem)
5413 return true
5414 }
5415
5416
5417
5418 for {
5419 off := v.AuxInt
5420 sym := v.Aux
5421 mem := v.Args[1]
5422 v_0 := v.Args[0]
5423 if v_0.Op != OpARM64ADD {
5424 break
5425 }
5426 idx := v_0.Args[1]
5427 ptr := v_0.Args[0]
5428 if !(off == 0 && sym == nil) {
5429 break
5430 }
5431 v.reset(OpARM64FMOVDloadidx)
5432 v.AddArg(ptr)
5433 v.AddArg(idx)
5434 v.AddArg(mem)
5435 return true
5436 }
5437
5438
5439
5440 for {
5441 off1 := v.AuxInt
5442 sym1 := v.Aux
5443 mem := v.Args[1]
5444 v_0 := v.Args[0]
5445 if v_0.Op != OpARM64MOVDaddr {
5446 break
5447 }
5448 off2 := v_0.AuxInt
5449 sym2 := v_0.Aux
5450 ptr := v_0.Args[0]
5451 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
5452 break
5453 }
5454 v.reset(OpARM64FMOVDload)
5455 v.AuxInt = off1 + off2
5456 v.Aux = mergeSym(sym1, sym2)
5457 v.AddArg(ptr)
5458 v.AddArg(mem)
5459 return true
5460 }
5461 return false
5462 }
5463 func rewriteValueARM64_OpARM64FMOVDloadidx_0(v *Value) bool {
5464
5465
5466
5467 for {
5468 mem := v.Args[2]
5469 ptr := v.Args[0]
5470 v_1 := v.Args[1]
5471 if v_1.Op != OpARM64MOVDconst {
5472 break
5473 }
5474 c := v_1.AuxInt
5475 v.reset(OpARM64FMOVDload)
5476 v.AuxInt = c
5477 v.AddArg(ptr)
5478 v.AddArg(mem)
5479 return true
5480 }
5481
5482
5483
5484 for {
5485 mem := v.Args[2]
5486 v_0 := v.Args[0]
5487 if v_0.Op != OpARM64MOVDconst {
5488 break
5489 }
5490 c := v_0.AuxInt
5491 ptr := v.Args[1]
5492 v.reset(OpARM64FMOVDload)
5493 v.AuxInt = c
5494 v.AddArg(ptr)
5495 v.AddArg(mem)
5496 return true
5497 }
5498 return false
5499 }
5500 func rewriteValueARM64_OpARM64FMOVDstore_0(v *Value) bool {
5501 b := v.Block
5502 config := b.Func.Config
5503
5504
5505
5506 for {
5507 off := v.AuxInt
5508 sym := v.Aux
5509 mem := v.Args[2]
5510 ptr := v.Args[0]
5511 v_1 := v.Args[1]
5512 if v_1.Op != OpARM64FMOVDgpfp {
5513 break
5514 }
5515 val := v_1.Args[0]
5516 v.reset(OpARM64MOVDstore)
5517 v.AuxInt = off
5518 v.Aux = sym
5519 v.AddArg(ptr)
5520 v.AddArg(val)
5521 v.AddArg(mem)
5522 return true
5523 }
5524
5525
5526
5527 for {
5528 off1 := v.AuxInt
5529 sym := v.Aux
5530 mem := v.Args[2]
5531 v_0 := v.Args[0]
5532 if v_0.Op != OpARM64ADDconst {
5533 break
5534 }
5535 off2 := v_0.AuxInt
5536 ptr := v_0.Args[0]
5537 val := v.Args[1]
5538 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
5539 break
5540 }
5541 v.reset(OpARM64FMOVDstore)
5542 v.AuxInt = off1 + off2
5543 v.Aux = sym
5544 v.AddArg(ptr)
5545 v.AddArg(val)
5546 v.AddArg(mem)
5547 return true
5548 }
5549
5550
5551
5552 for {
5553 off := v.AuxInt
5554 sym := v.Aux
5555 mem := v.Args[2]
5556 v_0 := v.Args[0]
5557 if v_0.Op != OpARM64ADD {
5558 break
5559 }
5560 idx := v_0.Args[1]
5561 ptr := v_0.Args[0]
5562 val := v.Args[1]
5563 if !(off == 0 && sym == nil) {
5564 break
5565 }
5566 v.reset(OpARM64FMOVDstoreidx)
5567 v.AddArg(ptr)
5568 v.AddArg(idx)
5569 v.AddArg(val)
5570 v.AddArg(mem)
5571 return true
5572 }
5573
5574
5575
5576 for {
5577 off1 := v.AuxInt
5578 sym1 := v.Aux
5579 mem := v.Args[2]
5580 v_0 := v.Args[0]
5581 if v_0.Op != OpARM64MOVDaddr {
5582 break
5583 }
5584 off2 := v_0.AuxInt
5585 sym2 := v_0.Aux
5586 ptr := v_0.Args[0]
5587 val := v.Args[1]
5588 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
5589 break
5590 }
5591 v.reset(OpARM64FMOVDstore)
5592 v.AuxInt = off1 + off2
5593 v.Aux = mergeSym(sym1, sym2)
5594 v.AddArg(ptr)
5595 v.AddArg(val)
5596 v.AddArg(mem)
5597 return true
5598 }
5599 return false
5600 }
5601 func rewriteValueARM64_OpARM64FMOVDstoreidx_0(v *Value) bool {
5602
5603
5604
5605 for {
5606 mem := v.Args[3]
5607 ptr := v.Args[0]
5608 v_1 := v.Args[1]
5609 if v_1.Op != OpARM64MOVDconst {
5610 break
5611 }
5612 c := v_1.AuxInt
5613 val := v.Args[2]
5614 v.reset(OpARM64FMOVDstore)
5615 v.AuxInt = c
5616 v.AddArg(ptr)
5617 v.AddArg(val)
5618 v.AddArg(mem)
5619 return true
5620 }
5621
5622
5623
5624 for {
5625 mem := v.Args[3]
5626 v_0 := v.Args[0]
5627 if v_0.Op != OpARM64MOVDconst {
5628 break
5629 }
5630 c := v_0.AuxInt
5631 idx := v.Args[1]
5632 val := v.Args[2]
5633 v.reset(OpARM64FMOVDstore)
5634 v.AuxInt = c
5635 v.AddArg(idx)
5636 v.AddArg(val)
5637 v.AddArg(mem)
5638 return true
5639 }
5640 return false
5641 }
5642 func rewriteValueARM64_OpARM64FMOVSload_0(v *Value) bool {
5643 b := v.Block
5644 config := b.Func.Config
5645
5646
5647
5648 for {
5649 off := v.AuxInt
5650 sym := v.Aux
5651 _ = v.Args[1]
5652 ptr := v.Args[0]
5653 v_1 := v.Args[1]
5654 if v_1.Op != OpARM64MOVWstore {
5655 break
5656 }
5657 if v_1.AuxInt != off {
5658 break
5659 }
5660 if v_1.Aux != sym {
5661 break
5662 }
5663 _ = v_1.Args[2]
5664 if ptr != v_1.Args[0] {
5665 break
5666 }
5667 val := v_1.Args[1]
5668 v.reset(OpARM64FMOVSgpfp)
5669 v.AddArg(val)
5670 return true
5671 }
5672
5673
5674
5675 for {
5676 off1 := v.AuxInt
5677 sym := v.Aux
5678 mem := v.Args[1]
5679 v_0 := v.Args[0]
5680 if v_0.Op != OpARM64ADDconst {
5681 break
5682 }
5683 off2 := v_0.AuxInt
5684 ptr := v_0.Args[0]
5685 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
5686 break
5687 }
5688 v.reset(OpARM64FMOVSload)
5689 v.AuxInt = off1 + off2
5690 v.Aux = sym
5691 v.AddArg(ptr)
5692 v.AddArg(mem)
5693 return true
5694 }
5695
5696
5697
5698 for {
5699 off := v.AuxInt
5700 sym := v.Aux
5701 mem := v.Args[1]
5702 v_0 := v.Args[0]
5703 if v_0.Op != OpARM64ADD {
5704 break
5705 }
5706 idx := v_0.Args[1]
5707 ptr := v_0.Args[0]
5708 if !(off == 0 && sym == nil) {
5709 break
5710 }
5711 v.reset(OpARM64FMOVSloadidx)
5712 v.AddArg(ptr)
5713 v.AddArg(idx)
5714 v.AddArg(mem)
5715 return true
5716 }
5717
5718
5719
5720 for {
5721 off1 := v.AuxInt
5722 sym1 := v.Aux
5723 mem := v.Args[1]
5724 v_0 := v.Args[0]
5725 if v_0.Op != OpARM64MOVDaddr {
5726 break
5727 }
5728 off2 := v_0.AuxInt
5729 sym2 := v_0.Aux
5730 ptr := v_0.Args[0]
5731 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
5732 break
5733 }
5734 v.reset(OpARM64FMOVSload)
5735 v.AuxInt = off1 + off2
5736 v.Aux = mergeSym(sym1, sym2)
5737 v.AddArg(ptr)
5738 v.AddArg(mem)
5739 return true
5740 }
5741 return false
5742 }
5743 func rewriteValueARM64_OpARM64FMOVSloadidx_0(v *Value) bool {
5744
5745
5746
5747 for {
5748 mem := v.Args[2]
5749 ptr := v.Args[0]
5750 v_1 := v.Args[1]
5751 if v_1.Op != OpARM64MOVDconst {
5752 break
5753 }
5754 c := v_1.AuxInt
5755 v.reset(OpARM64FMOVSload)
5756 v.AuxInt = c
5757 v.AddArg(ptr)
5758 v.AddArg(mem)
5759 return true
5760 }
5761
5762
5763
5764 for {
5765 mem := v.Args[2]
5766 v_0 := v.Args[0]
5767 if v_0.Op != OpARM64MOVDconst {
5768 break
5769 }
5770 c := v_0.AuxInt
5771 ptr := v.Args[1]
5772 v.reset(OpARM64FMOVSload)
5773 v.AuxInt = c
5774 v.AddArg(ptr)
5775 v.AddArg(mem)
5776 return true
5777 }
5778 return false
5779 }
5780 func rewriteValueARM64_OpARM64FMOVSstore_0(v *Value) bool {
5781 b := v.Block
5782 config := b.Func.Config
5783
5784
5785
5786 for {
5787 off := v.AuxInt
5788 sym := v.Aux
5789 mem := v.Args[2]
5790 ptr := v.Args[0]
5791 v_1 := v.Args[1]
5792 if v_1.Op != OpARM64FMOVSgpfp {
5793 break
5794 }
5795 val := v_1.Args[0]
5796 v.reset(OpARM64MOVWstore)
5797 v.AuxInt = off
5798 v.Aux = sym
5799 v.AddArg(ptr)
5800 v.AddArg(val)
5801 v.AddArg(mem)
5802 return true
5803 }
5804
5805
5806
5807 for {
5808 off1 := v.AuxInt
5809 sym := v.Aux
5810 mem := v.Args[2]
5811 v_0 := v.Args[0]
5812 if v_0.Op != OpARM64ADDconst {
5813 break
5814 }
5815 off2 := v_0.AuxInt
5816 ptr := v_0.Args[0]
5817 val := v.Args[1]
5818 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
5819 break
5820 }
5821 v.reset(OpARM64FMOVSstore)
5822 v.AuxInt = off1 + off2
5823 v.Aux = sym
5824 v.AddArg(ptr)
5825 v.AddArg(val)
5826 v.AddArg(mem)
5827 return true
5828 }
5829
5830
5831
5832 for {
5833 off := v.AuxInt
5834 sym := v.Aux
5835 mem := v.Args[2]
5836 v_0 := v.Args[0]
5837 if v_0.Op != OpARM64ADD {
5838 break
5839 }
5840 idx := v_0.Args[1]
5841 ptr := v_0.Args[0]
5842 val := v.Args[1]
5843 if !(off == 0 && sym == nil) {
5844 break
5845 }
5846 v.reset(OpARM64FMOVSstoreidx)
5847 v.AddArg(ptr)
5848 v.AddArg(idx)
5849 v.AddArg(val)
5850 v.AddArg(mem)
5851 return true
5852 }
5853
5854
5855
5856 for {
5857 off1 := v.AuxInt
5858 sym1 := v.Aux
5859 mem := v.Args[2]
5860 v_0 := v.Args[0]
5861 if v_0.Op != OpARM64MOVDaddr {
5862 break
5863 }
5864 off2 := v_0.AuxInt
5865 sym2 := v_0.Aux
5866 ptr := v_0.Args[0]
5867 val := v.Args[1]
5868 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
5869 break
5870 }
5871 v.reset(OpARM64FMOVSstore)
5872 v.AuxInt = off1 + off2
5873 v.Aux = mergeSym(sym1, sym2)
5874 v.AddArg(ptr)
5875 v.AddArg(val)
5876 v.AddArg(mem)
5877 return true
5878 }
5879 return false
5880 }
5881 func rewriteValueARM64_OpARM64FMOVSstoreidx_0(v *Value) bool {
5882
5883
5884
5885 for {
5886 mem := v.Args[3]
5887 ptr := v.Args[0]
5888 v_1 := v.Args[1]
5889 if v_1.Op != OpARM64MOVDconst {
5890 break
5891 }
5892 c := v_1.AuxInt
5893 val := v.Args[2]
5894 v.reset(OpARM64FMOVSstore)
5895 v.AuxInt = c
5896 v.AddArg(ptr)
5897 v.AddArg(val)
5898 v.AddArg(mem)
5899 return true
5900 }
5901
5902
5903
5904 for {
5905 mem := v.Args[3]
5906 v_0 := v.Args[0]
5907 if v_0.Op != OpARM64MOVDconst {
5908 break
5909 }
5910 c := v_0.AuxInt
5911 idx := v.Args[1]
5912 val := v.Args[2]
5913 v.reset(OpARM64FMOVSstore)
5914 v.AuxInt = c
5915 v.AddArg(idx)
5916 v.AddArg(val)
5917 v.AddArg(mem)
5918 return true
5919 }
5920 return false
5921 }
5922 func rewriteValueARM64_OpARM64FMULD_0(v *Value) bool {
5923
5924
5925
5926 for {
5927 y := v.Args[1]
5928 v_0 := v.Args[0]
5929 if v_0.Op != OpARM64FNEGD {
5930 break
5931 }
5932 x := v_0.Args[0]
5933 v.reset(OpARM64FNMULD)
5934 v.AddArg(x)
5935 v.AddArg(y)
5936 return true
5937 }
5938
5939
5940
5941 for {
5942 _ = v.Args[1]
5943 y := v.Args[0]
5944 v_1 := v.Args[1]
5945 if v_1.Op != OpARM64FNEGD {
5946 break
5947 }
5948 x := v_1.Args[0]
5949 v.reset(OpARM64FNMULD)
5950 v.AddArg(x)
5951 v.AddArg(y)
5952 return true
5953 }
5954 return false
5955 }
5956 func rewriteValueARM64_OpARM64FMULS_0(v *Value) bool {
5957
5958
5959
5960 for {
5961 y := v.Args[1]
5962 v_0 := v.Args[0]
5963 if v_0.Op != OpARM64FNEGS {
5964 break
5965 }
5966 x := v_0.Args[0]
5967 v.reset(OpARM64FNMULS)
5968 v.AddArg(x)
5969 v.AddArg(y)
5970 return true
5971 }
5972
5973
5974
5975 for {
5976 _ = v.Args[1]
5977 y := v.Args[0]
5978 v_1 := v.Args[1]
5979 if v_1.Op != OpARM64FNEGS {
5980 break
5981 }
5982 x := v_1.Args[0]
5983 v.reset(OpARM64FNMULS)
5984 v.AddArg(x)
5985 v.AddArg(y)
5986 return true
5987 }
5988 return false
5989 }
5990 func rewriteValueARM64_OpARM64FNEGD_0(v *Value) bool {
5991
5992
5993
5994 for {
5995 v_0 := v.Args[0]
5996 if v_0.Op != OpARM64FMULD {
5997 break
5998 }
5999 y := v_0.Args[1]
6000 x := v_0.Args[0]
6001 v.reset(OpARM64FNMULD)
6002 v.AddArg(x)
6003 v.AddArg(y)
6004 return true
6005 }
6006
6007
6008
6009 for {
6010 v_0 := v.Args[0]
6011 if v_0.Op != OpARM64FNMULD {
6012 break
6013 }
6014 y := v_0.Args[1]
6015 x := v_0.Args[0]
6016 v.reset(OpARM64FMULD)
6017 v.AddArg(x)
6018 v.AddArg(y)
6019 return true
6020 }
6021 return false
6022 }
6023 func rewriteValueARM64_OpARM64FNEGS_0(v *Value) bool {
6024
6025
6026
6027 for {
6028 v_0 := v.Args[0]
6029 if v_0.Op != OpARM64FMULS {
6030 break
6031 }
6032 y := v_0.Args[1]
6033 x := v_0.Args[0]
6034 v.reset(OpARM64FNMULS)
6035 v.AddArg(x)
6036 v.AddArg(y)
6037 return true
6038 }
6039
6040
6041
6042 for {
6043 v_0 := v.Args[0]
6044 if v_0.Op != OpARM64FNMULS {
6045 break
6046 }
6047 y := v_0.Args[1]
6048 x := v_0.Args[0]
6049 v.reset(OpARM64FMULS)
6050 v.AddArg(x)
6051 v.AddArg(y)
6052 return true
6053 }
6054 return false
6055 }
6056 func rewriteValueARM64_OpARM64FNMULD_0(v *Value) bool {
6057
6058
6059
6060 for {
6061 y := v.Args[1]
6062 v_0 := v.Args[0]
6063 if v_0.Op != OpARM64FNEGD {
6064 break
6065 }
6066 x := v_0.Args[0]
6067 v.reset(OpARM64FMULD)
6068 v.AddArg(x)
6069 v.AddArg(y)
6070 return true
6071 }
6072
6073
6074
6075 for {
6076 _ = v.Args[1]
6077 y := v.Args[0]
6078 v_1 := v.Args[1]
6079 if v_1.Op != OpARM64FNEGD {
6080 break
6081 }
6082 x := v_1.Args[0]
6083 v.reset(OpARM64FMULD)
6084 v.AddArg(x)
6085 v.AddArg(y)
6086 return true
6087 }
6088 return false
6089 }
6090 func rewriteValueARM64_OpARM64FNMULS_0(v *Value) bool {
6091
6092
6093
6094 for {
6095 y := v.Args[1]
6096 v_0 := v.Args[0]
6097 if v_0.Op != OpARM64FNEGS {
6098 break
6099 }
6100 x := v_0.Args[0]
6101 v.reset(OpARM64FMULS)
6102 v.AddArg(x)
6103 v.AddArg(y)
6104 return true
6105 }
6106
6107
6108
6109 for {
6110 _ = v.Args[1]
6111 y := v.Args[0]
6112 v_1 := v.Args[1]
6113 if v_1.Op != OpARM64FNEGS {
6114 break
6115 }
6116 x := v_1.Args[0]
6117 v.reset(OpARM64FMULS)
6118 v.AddArg(x)
6119 v.AddArg(y)
6120 return true
6121 }
6122 return false
6123 }
6124 func rewriteValueARM64_OpARM64FSUBD_0(v *Value) bool {
6125
6126
6127
6128 for {
6129 _ = v.Args[1]
6130 a := v.Args[0]
6131 v_1 := v.Args[1]
6132 if v_1.Op != OpARM64FMULD {
6133 break
6134 }
6135 y := v_1.Args[1]
6136 x := v_1.Args[0]
6137 v.reset(OpARM64FMSUBD)
6138 v.AddArg(a)
6139 v.AddArg(x)
6140 v.AddArg(y)
6141 return true
6142 }
6143
6144
6145
6146 for {
6147 a := v.Args[1]
6148 v_0 := v.Args[0]
6149 if v_0.Op != OpARM64FMULD {
6150 break
6151 }
6152 y := v_0.Args[1]
6153 x := v_0.Args[0]
6154 v.reset(OpARM64FNMSUBD)
6155 v.AddArg(a)
6156 v.AddArg(x)
6157 v.AddArg(y)
6158 return true
6159 }
6160
6161
6162
6163 for {
6164 _ = v.Args[1]
6165 a := v.Args[0]
6166 v_1 := v.Args[1]
6167 if v_1.Op != OpARM64FNMULD {
6168 break
6169 }
6170 y := v_1.Args[1]
6171 x := v_1.Args[0]
6172 v.reset(OpARM64FMADDD)
6173 v.AddArg(a)
6174 v.AddArg(x)
6175 v.AddArg(y)
6176 return true
6177 }
6178
6179
6180
6181 for {
6182 a := v.Args[1]
6183 v_0 := v.Args[0]
6184 if v_0.Op != OpARM64FNMULD {
6185 break
6186 }
6187 y := v_0.Args[1]
6188 x := v_0.Args[0]
6189 v.reset(OpARM64FNMADDD)
6190 v.AddArg(a)
6191 v.AddArg(x)
6192 v.AddArg(y)
6193 return true
6194 }
6195 return false
6196 }
6197 func rewriteValueARM64_OpARM64FSUBS_0(v *Value) bool {
6198
6199
6200
6201 for {
6202 _ = v.Args[1]
6203 a := v.Args[0]
6204 v_1 := v.Args[1]
6205 if v_1.Op != OpARM64FMULS {
6206 break
6207 }
6208 y := v_1.Args[1]
6209 x := v_1.Args[0]
6210 v.reset(OpARM64FMSUBS)
6211 v.AddArg(a)
6212 v.AddArg(x)
6213 v.AddArg(y)
6214 return true
6215 }
6216
6217
6218
6219 for {
6220 a := v.Args[1]
6221 v_0 := v.Args[0]
6222 if v_0.Op != OpARM64FMULS {
6223 break
6224 }
6225 y := v_0.Args[1]
6226 x := v_0.Args[0]
6227 v.reset(OpARM64FNMSUBS)
6228 v.AddArg(a)
6229 v.AddArg(x)
6230 v.AddArg(y)
6231 return true
6232 }
6233
6234
6235
6236 for {
6237 _ = v.Args[1]
6238 a := v.Args[0]
6239 v_1 := v.Args[1]
6240 if v_1.Op != OpARM64FNMULS {
6241 break
6242 }
6243 y := v_1.Args[1]
6244 x := v_1.Args[0]
6245 v.reset(OpARM64FMADDS)
6246 v.AddArg(a)
6247 v.AddArg(x)
6248 v.AddArg(y)
6249 return true
6250 }
6251
6252
6253
6254 for {
6255 a := v.Args[1]
6256 v_0 := v.Args[0]
6257 if v_0.Op != OpARM64FNMULS {
6258 break
6259 }
6260 y := v_0.Args[1]
6261 x := v_0.Args[0]
6262 v.reset(OpARM64FNMADDS)
6263 v.AddArg(a)
6264 v.AddArg(x)
6265 v.AddArg(y)
6266 return true
6267 }
6268 return false
6269 }
6270 func rewriteValueARM64_OpARM64GreaterEqual_0(v *Value) bool {
6271
6272
6273
6274 for {
6275 v_0 := v.Args[0]
6276 if v_0.Op != OpARM64FlagEQ {
6277 break
6278 }
6279 v.reset(OpARM64MOVDconst)
6280 v.AuxInt = 1
6281 return true
6282 }
6283
6284
6285
6286 for {
6287 v_0 := v.Args[0]
6288 if v_0.Op != OpARM64FlagLT_ULT {
6289 break
6290 }
6291 v.reset(OpARM64MOVDconst)
6292 v.AuxInt = 0
6293 return true
6294 }
6295
6296
6297
6298 for {
6299 v_0 := v.Args[0]
6300 if v_0.Op != OpARM64FlagLT_UGT {
6301 break
6302 }
6303 v.reset(OpARM64MOVDconst)
6304 v.AuxInt = 0
6305 return true
6306 }
6307
6308
6309
6310 for {
6311 v_0 := v.Args[0]
6312 if v_0.Op != OpARM64FlagGT_ULT {
6313 break
6314 }
6315 v.reset(OpARM64MOVDconst)
6316 v.AuxInt = 1
6317 return true
6318 }
6319
6320
6321
6322 for {
6323 v_0 := v.Args[0]
6324 if v_0.Op != OpARM64FlagGT_UGT {
6325 break
6326 }
6327 v.reset(OpARM64MOVDconst)
6328 v.AuxInt = 1
6329 return true
6330 }
6331
6332
6333
6334 for {
6335 v_0 := v.Args[0]
6336 if v_0.Op != OpARM64InvertFlags {
6337 break
6338 }
6339 x := v_0.Args[0]
6340 v.reset(OpARM64LessEqual)
6341 v.AddArg(x)
6342 return true
6343 }
6344 return false
6345 }
6346 func rewriteValueARM64_OpARM64GreaterEqualF_0(v *Value) bool {
6347
6348
6349
6350 for {
6351 v_0 := v.Args[0]
6352 if v_0.Op != OpARM64InvertFlags {
6353 break
6354 }
6355 x := v_0.Args[0]
6356 v.reset(OpARM64LessEqualF)
6357 v.AddArg(x)
6358 return true
6359 }
6360 return false
6361 }
6362 func rewriteValueARM64_OpARM64GreaterEqualU_0(v *Value) bool {
6363
6364
6365
6366 for {
6367 v_0 := v.Args[0]
6368 if v_0.Op != OpARM64FlagEQ {
6369 break
6370 }
6371 v.reset(OpARM64MOVDconst)
6372 v.AuxInt = 1
6373 return true
6374 }
6375
6376
6377
6378 for {
6379 v_0 := v.Args[0]
6380 if v_0.Op != OpARM64FlagLT_ULT {
6381 break
6382 }
6383 v.reset(OpARM64MOVDconst)
6384 v.AuxInt = 0
6385 return true
6386 }
6387
6388
6389
6390 for {
6391 v_0 := v.Args[0]
6392 if v_0.Op != OpARM64FlagLT_UGT {
6393 break
6394 }
6395 v.reset(OpARM64MOVDconst)
6396 v.AuxInt = 1
6397 return true
6398 }
6399
6400
6401
6402 for {
6403 v_0 := v.Args[0]
6404 if v_0.Op != OpARM64FlagGT_ULT {
6405 break
6406 }
6407 v.reset(OpARM64MOVDconst)
6408 v.AuxInt = 0
6409 return true
6410 }
6411
6412
6413
6414 for {
6415 v_0 := v.Args[0]
6416 if v_0.Op != OpARM64FlagGT_UGT {
6417 break
6418 }
6419 v.reset(OpARM64MOVDconst)
6420 v.AuxInt = 1
6421 return true
6422 }
6423
6424
6425
6426 for {
6427 v_0 := v.Args[0]
6428 if v_0.Op != OpARM64InvertFlags {
6429 break
6430 }
6431 x := v_0.Args[0]
6432 v.reset(OpARM64LessEqualU)
6433 v.AddArg(x)
6434 return true
6435 }
6436 return false
6437 }
6438 func rewriteValueARM64_OpARM64GreaterThan_0(v *Value) bool {
6439
6440
6441
6442 for {
6443 v_0 := v.Args[0]
6444 if v_0.Op != OpARM64FlagEQ {
6445 break
6446 }
6447 v.reset(OpARM64MOVDconst)
6448 v.AuxInt = 0
6449 return true
6450 }
6451
6452
6453
6454 for {
6455 v_0 := v.Args[0]
6456 if v_0.Op != OpARM64FlagLT_ULT {
6457 break
6458 }
6459 v.reset(OpARM64MOVDconst)
6460 v.AuxInt = 0
6461 return true
6462 }
6463
6464
6465
6466 for {
6467 v_0 := v.Args[0]
6468 if v_0.Op != OpARM64FlagLT_UGT {
6469 break
6470 }
6471 v.reset(OpARM64MOVDconst)
6472 v.AuxInt = 0
6473 return true
6474 }
6475
6476
6477
6478 for {
6479 v_0 := v.Args[0]
6480 if v_0.Op != OpARM64FlagGT_ULT {
6481 break
6482 }
6483 v.reset(OpARM64MOVDconst)
6484 v.AuxInt = 1
6485 return true
6486 }
6487
6488
6489
6490 for {
6491 v_0 := v.Args[0]
6492 if v_0.Op != OpARM64FlagGT_UGT {
6493 break
6494 }
6495 v.reset(OpARM64MOVDconst)
6496 v.AuxInt = 1
6497 return true
6498 }
6499
6500
6501
6502 for {
6503 v_0 := v.Args[0]
6504 if v_0.Op != OpARM64InvertFlags {
6505 break
6506 }
6507 x := v_0.Args[0]
6508 v.reset(OpARM64LessThan)
6509 v.AddArg(x)
6510 return true
6511 }
6512 return false
6513 }
6514 func rewriteValueARM64_OpARM64GreaterThanF_0(v *Value) bool {
6515
6516
6517
6518 for {
6519 v_0 := v.Args[0]
6520 if v_0.Op != OpARM64InvertFlags {
6521 break
6522 }
6523 x := v_0.Args[0]
6524 v.reset(OpARM64LessThanF)
6525 v.AddArg(x)
6526 return true
6527 }
6528 return false
6529 }
6530 func rewriteValueARM64_OpARM64GreaterThanU_0(v *Value) bool {
6531
6532
6533
6534 for {
6535 v_0 := v.Args[0]
6536 if v_0.Op != OpARM64FlagEQ {
6537 break
6538 }
6539 v.reset(OpARM64MOVDconst)
6540 v.AuxInt = 0
6541 return true
6542 }
6543
6544
6545
6546 for {
6547 v_0 := v.Args[0]
6548 if v_0.Op != OpARM64FlagLT_ULT {
6549 break
6550 }
6551 v.reset(OpARM64MOVDconst)
6552 v.AuxInt = 0
6553 return true
6554 }
6555
6556
6557
6558 for {
6559 v_0 := v.Args[0]
6560 if v_0.Op != OpARM64FlagLT_UGT {
6561 break
6562 }
6563 v.reset(OpARM64MOVDconst)
6564 v.AuxInt = 1
6565 return true
6566 }
6567
6568
6569
6570 for {
6571 v_0 := v.Args[0]
6572 if v_0.Op != OpARM64FlagGT_ULT {
6573 break
6574 }
6575 v.reset(OpARM64MOVDconst)
6576 v.AuxInt = 0
6577 return true
6578 }
6579
6580
6581
6582 for {
6583 v_0 := v.Args[0]
6584 if v_0.Op != OpARM64FlagGT_UGT {
6585 break
6586 }
6587 v.reset(OpARM64MOVDconst)
6588 v.AuxInt = 1
6589 return true
6590 }
6591
6592
6593
6594 for {
6595 v_0 := v.Args[0]
6596 if v_0.Op != OpARM64InvertFlags {
6597 break
6598 }
6599 x := v_0.Args[0]
6600 v.reset(OpARM64LessThanU)
6601 v.AddArg(x)
6602 return true
6603 }
6604 return false
6605 }
6606 func rewriteValueARM64_OpARM64LessEqual_0(v *Value) bool {
6607
6608
6609
6610 for {
6611 v_0 := v.Args[0]
6612 if v_0.Op != OpARM64FlagEQ {
6613 break
6614 }
6615 v.reset(OpARM64MOVDconst)
6616 v.AuxInt = 1
6617 return true
6618 }
6619
6620
6621
6622 for {
6623 v_0 := v.Args[0]
6624 if v_0.Op != OpARM64FlagLT_ULT {
6625 break
6626 }
6627 v.reset(OpARM64MOVDconst)
6628 v.AuxInt = 1
6629 return true
6630 }
6631
6632
6633
6634 for {
6635 v_0 := v.Args[0]
6636 if v_0.Op != OpARM64FlagLT_UGT {
6637 break
6638 }
6639 v.reset(OpARM64MOVDconst)
6640 v.AuxInt = 1
6641 return true
6642 }
6643
6644
6645
6646 for {
6647 v_0 := v.Args[0]
6648 if v_0.Op != OpARM64FlagGT_ULT {
6649 break
6650 }
6651 v.reset(OpARM64MOVDconst)
6652 v.AuxInt = 0
6653 return true
6654 }
6655
6656
6657
6658 for {
6659 v_0 := v.Args[0]
6660 if v_0.Op != OpARM64FlagGT_UGT {
6661 break
6662 }
6663 v.reset(OpARM64MOVDconst)
6664 v.AuxInt = 0
6665 return true
6666 }
6667
6668
6669
6670 for {
6671 v_0 := v.Args[0]
6672 if v_0.Op != OpARM64InvertFlags {
6673 break
6674 }
6675 x := v_0.Args[0]
6676 v.reset(OpARM64GreaterEqual)
6677 v.AddArg(x)
6678 return true
6679 }
6680 return false
6681 }
6682 func rewriteValueARM64_OpARM64LessEqualF_0(v *Value) bool {
6683
6684
6685
6686 for {
6687 v_0 := v.Args[0]
6688 if v_0.Op != OpARM64InvertFlags {
6689 break
6690 }
6691 x := v_0.Args[0]
6692 v.reset(OpARM64GreaterEqualF)
6693 v.AddArg(x)
6694 return true
6695 }
6696 return false
6697 }
6698 func rewriteValueARM64_OpARM64LessEqualU_0(v *Value) bool {
6699
6700
6701
6702 for {
6703 v_0 := v.Args[0]
6704 if v_0.Op != OpARM64FlagEQ {
6705 break
6706 }
6707 v.reset(OpARM64MOVDconst)
6708 v.AuxInt = 1
6709 return true
6710 }
6711
6712
6713
6714 for {
6715 v_0 := v.Args[0]
6716 if v_0.Op != OpARM64FlagLT_ULT {
6717 break
6718 }
6719 v.reset(OpARM64MOVDconst)
6720 v.AuxInt = 1
6721 return true
6722 }
6723
6724
6725
6726 for {
6727 v_0 := v.Args[0]
6728 if v_0.Op != OpARM64FlagLT_UGT {
6729 break
6730 }
6731 v.reset(OpARM64MOVDconst)
6732 v.AuxInt = 0
6733 return true
6734 }
6735
6736
6737
6738 for {
6739 v_0 := v.Args[0]
6740 if v_0.Op != OpARM64FlagGT_ULT {
6741 break
6742 }
6743 v.reset(OpARM64MOVDconst)
6744 v.AuxInt = 1
6745 return true
6746 }
6747
6748
6749
6750 for {
6751 v_0 := v.Args[0]
6752 if v_0.Op != OpARM64FlagGT_UGT {
6753 break
6754 }
6755 v.reset(OpARM64MOVDconst)
6756 v.AuxInt = 0
6757 return true
6758 }
6759
6760
6761
6762 for {
6763 v_0 := v.Args[0]
6764 if v_0.Op != OpARM64InvertFlags {
6765 break
6766 }
6767 x := v_0.Args[0]
6768 v.reset(OpARM64GreaterEqualU)
6769 v.AddArg(x)
6770 return true
6771 }
6772 return false
6773 }
6774 func rewriteValueARM64_OpARM64LessThan_0(v *Value) bool {
6775
6776
6777
6778 for {
6779 v_0 := v.Args[0]
6780 if v_0.Op != OpARM64FlagEQ {
6781 break
6782 }
6783 v.reset(OpARM64MOVDconst)
6784 v.AuxInt = 0
6785 return true
6786 }
6787
6788
6789
6790 for {
6791 v_0 := v.Args[0]
6792 if v_0.Op != OpARM64FlagLT_ULT {
6793 break
6794 }
6795 v.reset(OpARM64MOVDconst)
6796 v.AuxInt = 1
6797 return true
6798 }
6799
6800
6801
6802 for {
6803 v_0 := v.Args[0]
6804 if v_0.Op != OpARM64FlagLT_UGT {
6805 break
6806 }
6807 v.reset(OpARM64MOVDconst)
6808 v.AuxInt = 1
6809 return true
6810 }
6811
6812
6813
6814 for {
6815 v_0 := v.Args[0]
6816 if v_0.Op != OpARM64FlagGT_ULT {
6817 break
6818 }
6819 v.reset(OpARM64MOVDconst)
6820 v.AuxInt = 0
6821 return true
6822 }
6823
6824
6825
6826 for {
6827 v_0 := v.Args[0]
6828 if v_0.Op != OpARM64FlagGT_UGT {
6829 break
6830 }
6831 v.reset(OpARM64MOVDconst)
6832 v.AuxInt = 0
6833 return true
6834 }
6835
6836
6837
6838 for {
6839 v_0 := v.Args[0]
6840 if v_0.Op != OpARM64InvertFlags {
6841 break
6842 }
6843 x := v_0.Args[0]
6844 v.reset(OpARM64GreaterThan)
6845 v.AddArg(x)
6846 return true
6847 }
6848 return false
6849 }
6850 func rewriteValueARM64_OpARM64LessThanF_0(v *Value) bool {
6851
6852
6853
6854 for {
6855 v_0 := v.Args[0]
6856 if v_0.Op != OpARM64InvertFlags {
6857 break
6858 }
6859 x := v_0.Args[0]
6860 v.reset(OpARM64GreaterThanF)
6861 v.AddArg(x)
6862 return true
6863 }
6864 return false
6865 }
6866 func rewriteValueARM64_OpARM64LessThanU_0(v *Value) bool {
6867
6868
6869
6870 for {
6871 v_0 := v.Args[0]
6872 if v_0.Op != OpARM64FlagEQ {
6873 break
6874 }
6875 v.reset(OpARM64MOVDconst)
6876 v.AuxInt = 0
6877 return true
6878 }
6879
6880
6881
6882 for {
6883 v_0 := v.Args[0]
6884 if v_0.Op != OpARM64FlagLT_ULT {
6885 break
6886 }
6887 v.reset(OpARM64MOVDconst)
6888 v.AuxInt = 1
6889 return true
6890 }
6891
6892
6893
6894 for {
6895 v_0 := v.Args[0]
6896 if v_0.Op != OpARM64FlagLT_UGT {
6897 break
6898 }
6899 v.reset(OpARM64MOVDconst)
6900 v.AuxInt = 0
6901 return true
6902 }
6903
6904
6905
6906 for {
6907 v_0 := v.Args[0]
6908 if v_0.Op != OpARM64FlagGT_ULT {
6909 break
6910 }
6911 v.reset(OpARM64MOVDconst)
6912 v.AuxInt = 1
6913 return true
6914 }
6915
6916
6917
6918 for {
6919 v_0 := v.Args[0]
6920 if v_0.Op != OpARM64FlagGT_UGT {
6921 break
6922 }
6923 v.reset(OpARM64MOVDconst)
6924 v.AuxInt = 0
6925 return true
6926 }
6927
6928
6929
6930 for {
6931 v_0 := v.Args[0]
6932 if v_0.Op != OpARM64InvertFlags {
6933 break
6934 }
6935 x := v_0.Args[0]
6936 v.reset(OpARM64GreaterThanU)
6937 v.AddArg(x)
6938 return true
6939 }
6940 return false
6941 }
6942 func rewriteValueARM64_OpARM64MADD_0(v *Value) bool {
6943 b := v.Block
6944
6945
6946
6947 for {
6948 _ = v.Args[2]
6949 a := v.Args[0]
6950 x := v.Args[1]
6951 v_2 := v.Args[2]
6952 if v_2.Op != OpARM64MOVDconst {
6953 break
6954 }
6955 if v_2.AuxInt != -1 {
6956 break
6957 }
6958 v.reset(OpARM64SUB)
6959 v.AddArg(a)
6960 v.AddArg(x)
6961 return true
6962 }
6963
6964
6965
6966 for {
6967 _ = v.Args[2]
6968 a := v.Args[0]
6969 v_2 := v.Args[2]
6970 if v_2.Op != OpARM64MOVDconst {
6971 break
6972 }
6973 if v_2.AuxInt != 0 {
6974 break
6975 }
6976 v.reset(OpCopy)
6977 v.Type = a.Type
6978 v.AddArg(a)
6979 return true
6980 }
6981
6982
6983
6984 for {
6985 _ = v.Args[2]
6986 a := v.Args[0]
6987 x := v.Args[1]
6988 v_2 := v.Args[2]
6989 if v_2.Op != OpARM64MOVDconst {
6990 break
6991 }
6992 if v_2.AuxInt != 1 {
6993 break
6994 }
6995 v.reset(OpARM64ADD)
6996 v.AddArg(a)
6997 v.AddArg(x)
6998 return true
6999 }
7000
7001
7002
7003 for {
7004 _ = v.Args[2]
7005 a := v.Args[0]
7006 x := v.Args[1]
7007 v_2 := v.Args[2]
7008 if v_2.Op != OpARM64MOVDconst {
7009 break
7010 }
7011 c := v_2.AuxInt
7012 if !(isPowerOfTwo(c)) {
7013 break
7014 }
7015 v.reset(OpARM64ADDshiftLL)
7016 v.AuxInt = log2(c)
7017 v.AddArg(a)
7018 v.AddArg(x)
7019 return true
7020 }
7021
7022
7023
7024 for {
7025 _ = v.Args[2]
7026 a := v.Args[0]
7027 x := v.Args[1]
7028 v_2 := v.Args[2]
7029 if v_2.Op != OpARM64MOVDconst {
7030 break
7031 }
7032 c := v_2.AuxInt
7033 if !(isPowerOfTwo(c-1) && c >= 3) {
7034 break
7035 }
7036 v.reset(OpARM64ADD)
7037 v.AddArg(a)
7038 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
7039 v0.AuxInt = log2(c - 1)
7040 v0.AddArg(x)
7041 v0.AddArg(x)
7042 v.AddArg(v0)
7043 return true
7044 }
7045
7046
7047
7048 for {
7049 _ = v.Args[2]
7050 a := v.Args[0]
7051 x := v.Args[1]
7052 v_2 := v.Args[2]
7053 if v_2.Op != OpARM64MOVDconst {
7054 break
7055 }
7056 c := v_2.AuxInt
7057 if !(isPowerOfTwo(c+1) && c >= 7) {
7058 break
7059 }
7060 v.reset(OpARM64SUB)
7061 v.AddArg(a)
7062 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
7063 v0.AuxInt = log2(c + 1)
7064 v0.AddArg(x)
7065 v0.AddArg(x)
7066 v.AddArg(v0)
7067 return true
7068 }
7069
7070
7071
7072 for {
7073 _ = v.Args[2]
7074 a := v.Args[0]
7075 x := v.Args[1]
7076 v_2 := v.Args[2]
7077 if v_2.Op != OpARM64MOVDconst {
7078 break
7079 }
7080 c := v_2.AuxInt
7081 if !(c%3 == 0 && isPowerOfTwo(c/3)) {
7082 break
7083 }
7084 v.reset(OpARM64SUBshiftLL)
7085 v.AuxInt = log2(c / 3)
7086 v.AddArg(a)
7087 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
7088 v0.AuxInt = 2
7089 v0.AddArg(x)
7090 v0.AddArg(x)
7091 v.AddArg(v0)
7092 return true
7093 }
7094
7095
7096
7097 for {
7098 _ = v.Args[2]
7099 a := v.Args[0]
7100 x := v.Args[1]
7101 v_2 := v.Args[2]
7102 if v_2.Op != OpARM64MOVDconst {
7103 break
7104 }
7105 c := v_2.AuxInt
7106 if !(c%5 == 0 && isPowerOfTwo(c/5)) {
7107 break
7108 }
7109 v.reset(OpARM64ADDshiftLL)
7110 v.AuxInt = log2(c / 5)
7111 v.AddArg(a)
7112 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
7113 v0.AuxInt = 2
7114 v0.AddArg(x)
7115 v0.AddArg(x)
7116 v.AddArg(v0)
7117 return true
7118 }
7119
7120
7121
7122 for {
7123 _ = v.Args[2]
7124 a := v.Args[0]
7125 x := v.Args[1]
7126 v_2 := v.Args[2]
7127 if v_2.Op != OpARM64MOVDconst {
7128 break
7129 }
7130 c := v_2.AuxInt
7131 if !(c%7 == 0 && isPowerOfTwo(c/7)) {
7132 break
7133 }
7134 v.reset(OpARM64SUBshiftLL)
7135 v.AuxInt = log2(c / 7)
7136 v.AddArg(a)
7137 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
7138 v0.AuxInt = 3
7139 v0.AddArg(x)
7140 v0.AddArg(x)
7141 v.AddArg(v0)
7142 return true
7143 }
7144
7145
7146
7147 for {
7148 _ = v.Args[2]
7149 a := v.Args[0]
7150 x := v.Args[1]
7151 v_2 := v.Args[2]
7152 if v_2.Op != OpARM64MOVDconst {
7153 break
7154 }
7155 c := v_2.AuxInt
7156 if !(c%9 == 0 && isPowerOfTwo(c/9)) {
7157 break
7158 }
7159 v.reset(OpARM64ADDshiftLL)
7160 v.AuxInt = log2(c / 9)
7161 v.AddArg(a)
7162 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
7163 v0.AuxInt = 3
7164 v0.AddArg(x)
7165 v0.AddArg(x)
7166 v.AddArg(v0)
7167 return true
7168 }
7169 return false
7170 }
7171 func rewriteValueARM64_OpARM64MADD_10(v *Value) bool {
7172 b := v.Block
7173
7174
7175
7176 for {
7177 x := v.Args[2]
7178 a := v.Args[0]
7179 v_1 := v.Args[1]
7180 if v_1.Op != OpARM64MOVDconst {
7181 break
7182 }
7183 if v_1.AuxInt != -1 {
7184 break
7185 }
7186 v.reset(OpARM64SUB)
7187 v.AddArg(a)
7188 v.AddArg(x)
7189 return true
7190 }
7191
7192
7193
7194 for {
7195 _ = v.Args[2]
7196 a := v.Args[0]
7197 v_1 := v.Args[1]
7198 if v_1.Op != OpARM64MOVDconst {
7199 break
7200 }
7201 if v_1.AuxInt != 0 {
7202 break
7203 }
7204 v.reset(OpCopy)
7205 v.Type = a.Type
7206 v.AddArg(a)
7207 return true
7208 }
7209
7210
7211
7212 for {
7213 x := v.Args[2]
7214 a := v.Args[0]
7215 v_1 := v.Args[1]
7216 if v_1.Op != OpARM64MOVDconst {
7217 break
7218 }
7219 if v_1.AuxInt != 1 {
7220 break
7221 }
7222 v.reset(OpARM64ADD)
7223 v.AddArg(a)
7224 v.AddArg(x)
7225 return true
7226 }
7227
7228
7229
7230 for {
7231 x := v.Args[2]
7232 a := v.Args[0]
7233 v_1 := v.Args[1]
7234 if v_1.Op != OpARM64MOVDconst {
7235 break
7236 }
7237 c := v_1.AuxInt
7238 if !(isPowerOfTwo(c)) {
7239 break
7240 }
7241 v.reset(OpARM64ADDshiftLL)
7242 v.AuxInt = log2(c)
7243 v.AddArg(a)
7244 v.AddArg(x)
7245 return true
7246 }
7247
7248
7249
7250 for {
7251 x := v.Args[2]
7252 a := v.Args[0]
7253 v_1 := v.Args[1]
7254 if v_1.Op != OpARM64MOVDconst {
7255 break
7256 }
7257 c := v_1.AuxInt
7258 if !(isPowerOfTwo(c-1) && c >= 3) {
7259 break
7260 }
7261 v.reset(OpARM64ADD)
7262 v.AddArg(a)
7263 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
7264 v0.AuxInt = log2(c - 1)
7265 v0.AddArg(x)
7266 v0.AddArg(x)
7267 v.AddArg(v0)
7268 return true
7269 }
7270
7271
7272
7273 for {
7274 x := v.Args[2]
7275 a := v.Args[0]
7276 v_1 := v.Args[1]
7277 if v_1.Op != OpARM64MOVDconst {
7278 break
7279 }
7280 c := v_1.AuxInt
7281 if !(isPowerOfTwo(c+1) && c >= 7) {
7282 break
7283 }
7284 v.reset(OpARM64SUB)
7285 v.AddArg(a)
7286 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
7287 v0.AuxInt = log2(c + 1)
7288 v0.AddArg(x)
7289 v0.AddArg(x)
7290 v.AddArg(v0)
7291 return true
7292 }
7293
7294
7295
7296 for {
7297 x := v.Args[2]
7298 a := v.Args[0]
7299 v_1 := v.Args[1]
7300 if v_1.Op != OpARM64MOVDconst {
7301 break
7302 }
7303 c := v_1.AuxInt
7304 if !(c%3 == 0 && isPowerOfTwo(c/3)) {
7305 break
7306 }
7307 v.reset(OpARM64SUBshiftLL)
7308 v.AuxInt = log2(c / 3)
7309 v.AddArg(a)
7310 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
7311 v0.AuxInt = 2
7312 v0.AddArg(x)
7313 v0.AddArg(x)
7314 v.AddArg(v0)
7315 return true
7316 }
7317
7318
7319
7320 for {
7321 x := v.Args[2]
7322 a := v.Args[0]
7323 v_1 := v.Args[1]
7324 if v_1.Op != OpARM64MOVDconst {
7325 break
7326 }
7327 c := v_1.AuxInt
7328 if !(c%5 == 0 && isPowerOfTwo(c/5)) {
7329 break
7330 }
7331 v.reset(OpARM64ADDshiftLL)
7332 v.AuxInt = log2(c / 5)
7333 v.AddArg(a)
7334 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
7335 v0.AuxInt = 2
7336 v0.AddArg(x)
7337 v0.AddArg(x)
7338 v.AddArg(v0)
7339 return true
7340 }
7341
7342
7343
7344 for {
7345 x := v.Args[2]
7346 a := v.Args[0]
7347 v_1 := v.Args[1]
7348 if v_1.Op != OpARM64MOVDconst {
7349 break
7350 }
7351 c := v_1.AuxInt
7352 if !(c%7 == 0 && isPowerOfTwo(c/7)) {
7353 break
7354 }
7355 v.reset(OpARM64SUBshiftLL)
7356 v.AuxInt = log2(c / 7)
7357 v.AddArg(a)
7358 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
7359 v0.AuxInt = 3
7360 v0.AddArg(x)
7361 v0.AddArg(x)
7362 v.AddArg(v0)
7363 return true
7364 }
7365
7366
7367
7368 for {
7369 x := v.Args[2]
7370 a := v.Args[0]
7371 v_1 := v.Args[1]
7372 if v_1.Op != OpARM64MOVDconst {
7373 break
7374 }
7375 c := v_1.AuxInt
7376 if !(c%9 == 0 && isPowerOfTwo(c/9)) {
7377 break
7378 }
7379 v.reset(OpARM64ADDshiftLL)
7380 v.AuxInt = log2(c / 9)
7381 v.AddArg(a)
7382 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
7383 v0.AuxInt = 3
7384 v0.AddArg(x)
7385 v0.AddArg(x)
7386 v.AddArg(v0)
7387 return true
7388 }
7389 return false
7390 }
7391 func rewriteValueARM64_OpARM64MADD_20(v *Value) bool {
7392 b := v.Block
7393
7394
7395
7396 for {
7397 y := v.Args[2]
7398 v_0 := v.Args[0]
7399 if v_0.Op != OpARM64MOVDconst {
7400 break
7401 }
7402 c := v_0.AuxInt
7403 x := v.Args[1]
7404 v.reset(OpARM64ADDconst)
7405 v.AuxInt = c
7406 v0 := b.NewValue0(v.Pos, OpARM64MUL, x.Type)
7407 v0.AddArg(x)
7408 v0.AddArg(y)
7409 v.AddArg(v0)
7410 return true
7411 }
7412
7413
7414
7415 for {
7416 _ = v.Args[2]
7417 a := v.Args[0]
7418 v_1 := v.Args[1]
7419 if v_1.Op != OpARM64MOVDconst {
7420 break
7421 }
7422 c := v_1.AuxInt
7423 v_2 := v.Args[2]
7424 if v_2.Op != OpARM64MOVDconst {
7425 break
7426 }
7427 d := v_2.AuxInt
7428 v.reset(OpARM64ADDconst)
7429 v.AuxInt = c * d
7430 v.AddArg(a)
7431 return true
7432 }
7433 return false
7434 }
7435 func rewriteValueARM64_OpARM64MADDW_0(v *Value) bool {
7436 b := v.Block
7437
7438
7439
7440 for {
7441 _ = v.Args[2]
7442 a := v.Args[0]
7443 x := v.Args[1]
7444 v_2 := v.Args[2]
7445 if v_2.Op != OpARM64MOVDconst {
7446 break
7447 }
7448 c := v_2.AuxInt
7449 if !(int32(c) == -1) {
7450 break
7451 }
7452 v.reset(OpARM64SUB)
7453 v.AddArg(a)
7454 v.AddArg(x)
7455 return true
7456 }
7457
7458
7459
7460 for {
7461 _ = v.Args[2]
7462 a := v.Args[0]
7463 v_2 := v.Args[2]
7464 if v_2.Op != OpARM64MOVDconst {
7465 break
7466 }
7467 c := v_2.AuxInt
7468 if !(int32(c) == 0) {
7469 break
7470 }
7471 v.reset(OpCopy)
7472 v.Type = a.Type
7473 v.AddArg(a)
7474 return true
7475 }
7476
7477
7478
7479 for {
7480 _ = v.Args[2]
7481 a := v.Args[0]
7482 x := v.Args[1]
7483 v_2 := v.Args[2]
7484 if v_2.Op != OpARM64MOVDconst {
7485 break
7486 }
7487 c := v_2.AuxInt
7488 if !(int32(c) == 1) {
7489 break
7490 }
7491 v.reset(OpARM64ADD)
7492 v.AddArg(a)
7493 v.AddArg(x)
7494 return true
7495 }
7496
7497
7498
7499 for {
7500 _ = v.Args[2]
7501 a := v.Args[0]
7502 x := v.Args[1]
7503 v_2 := v.Args[2]
7504 if v_2.Op != OpARM64MOVDconst {
7505 break
7506 }
7507 c := v_2.AuxInt
7508 if !(isPowerOfTwo(c)) {
7509 break
7510 }
7511 v.reset(OpARM64ADDshiftLL)
7512 v.AuxInt = log2(c)
7513 v.AddArg(a)
7514 v.AddArg(x)
7515 return true
7516 }
7517
7518
7519
7520 for {
7521 _ = v.Args[2]
7522 a := v.Args[0]
7523 x := v.Args[1]
7524 v_2 := v.Args[2]
7525 if v_2.Op != OpARM64MOVDconst {
7526 break
7527 }
7528 c := v_2.AuxInt
7529 if !(isPowerOfTwo(c-1) && int32(c) >= 3) {
7530 break
7531 }
7532 v.reset(OpARM64ADD)
7533 v.AddArg(a)
7534 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
7535 v0.AuxInt = log2(c - 1)
7536 v0.AddArg(x)
7537 v0.AddArg(x)
7538 v.AddArg(v0)
7539 return true
7540 }
7541
7542
7543
7544 for {
7545 _ = v.Args[2]
7546 a := v.Args[0]
7547 x := v.Args[1]
7548 v_2 := v.Args[2]
7549 if v_2.Op != OpARM64MOVDconst {
7550 break
7551 }
7552 c := v_2.AuxInt
7553 if !(isPowerOfTwo(c+1) && int32(c) >= 7) {
7554 break
7555 }
7556 v.reset(OpARM64SUB)
7557 v.AddArg(a)
7558 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
7559 v0.AuxInt = log2(c + 1)
7560 v0.AddArg(x)
7561 v0.AddArg(x)
7562 v.AddArg(v0)
7563 return true
7564 }
7565
7566
7567
7568 for {
7569 _ = v.Args[2]
7570 a := v.Args[0]
7571 x := v.Args[1]
7572 v_2 := v.Args[2]
7573 if v_2.Op != OpARM64MOVDconst {
7574 break
7575 }
7576 c := v_2.AuxInt
7577 if !(c%3 == 0 && isPowerOfTwo(c/3) && is32Bit(c)) {
7578 break
7579 }
7580 v.reset(OpARM64SUBshiftLL)
7581 v.AuxInt = log2(c / 3)
7582 v.AddArg(a)
7583 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
7584 v0.AuxInt = 2
7585 v0.AddArg(x)
7586 v0.AddArg(x)
7587 v.AddArg(v0)
7588 return true
7589 }
7590
7591
7592
7593 for {
7594 _ = v.Args[2]
7595 a := v.Args[0]
7596 x := v.Args[1]
7597 v_2 := v.Args[2]
7598 if v_2.Op != OpARM64MOVDconst {
7599 break
7600 }
7601 c := v_2.AuxInt
7602 if !(c%5 == 0 && isPowerOfTwo(c/5) && is32Bit(c)) {
7603 break
7604 }
7605 v.reset(OpARM64ADDshiftLL)
7606 v.AuxInt = log2(c / 5)
7607 v.AddArg(a)
7608 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
7609 v0.AuxInt = 2
7610 v0.AddArg(x)
7611 v0.AddArg(x)
7612 v.AddArg(v0)
7613 return true
7614 }
7615
7616
7617
7618 for {
7619 _ = v.Args[2]
7620 a := v.Args[0]
7621 x := v.Args[1]
7622 v_2 := v.Args[2]
7623 if v_2.Op != OpARM64MOVDconst {
7624 break
7625 }
7626 c := v_2.AuxInt
7627 if !(c%7 == 0 && isPowerOfTwo(c/7) && is32Bit(c)) {
7628 break
7629 }
7630 v.reset(OpARM64SUBshiftLL)
7631 v.AuxInt = log2(c / 7)
7632 v.AddArg(a)
7633 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
7634 v0.AuxInt = 3
7635 v0.AddArg(x)
7636 v0.AddArg(x)
7637 v.AddArg(v0)
7638 return true
7639 }
7640
7641
7642
7643 for {
7644 _ = v.Args[2]
7645 a := v.Args[0]
7646 x := v.Args[1]
7647 v_2 := v.Args[2]
7648 if v_2.Op != OpARM64MOVDconst {
7649 break
7650 }
7651 c := v_2.AuxInt
7652 if !(c%9 == 0 && isPowerOfTwo(c/9) && is32Bit(c)) {
7653 break
7654 }
7655 v.reset(OpARM64ADDshiftLL)
7656 v.AuxInt = log2(c / 9)
7657 v.AddArg(a)
7658 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
7659 v0.AuxInt = 3
7660 v0.AddArg(x)
7661 v0.AddArg(x)
7662 v.AddArg(v0)
7663 return true
7664 }
7665 return false
7666 }
7667 func rewriteValueARM64_OpARM64MADDW_10(v *Value) bool {
7668 b := v.Block
7669
7670
7671
7672 for {
7673 x := v.Args[2]
7674 a := v.Args[0]
7675 v_1 := v.Args[1]
7676 if v_1.Op != OpARM64MOVDconst {
7677 break
7678 }
7679 c := v_1.AuxInt
7680 if !(int32(c) == -1) {
7681 break
7682 }
7683 v.reset(OpARM64SUB)
7684 v.AddArg(a)
7685 v.AddArg(x)
7686 return true
7687 }
7688
7689
7690
7691 for {
7692 _ = v.Args[2]
7693 a := v.Args[0]
7694 v_1 := v.Args[1]
7695 if v_1.Op != OpARM64MOVDconst {
7696 break
7697 }
7698 c := v_1.AuxInt
7699 if !(int32(c) == 0) {
7700 break
7701 }
7702 v.reset(OpCopy)
7703 v.Type = a.Type
7704 v.AddArg(a)
7705 return true
7706 }
7707
7708
7709
7710 for {
7711 x := v.Args[2]
7712 a := v.Args[0]
7713 v_1 := v.Args[1]
7714 if v_1.Op != OpARM64MOVDconst {
7715 break
7716 }
7717 c := v_1.AuxInt
7718 if !(int32(c) == 1) {
7719 break
7720 }
7721 v.reset(OpARM64ADD)
7722 v.AddArg(a)
7723 v.AddArg(x)
7724 return true
7725 }
7726
7727
7728
7729 for {
7730 x := v.Args[2]
7731 a := v.Args[0]
7732 v_1 := v.Args[1]
7733 if v_1.Op != OpARM64MOVDconst {
7734 break
7735 }
7736 c := v_1.AuxInt
7737 if !(isPowerOfTwo(c)) {
7738 break
7739 }
7740 v.reset(OpARM64ADDshiftLL)
7741 v.AuxInt = log2(c)
7742 v.AddArg(a)
7743 v.AddArg(x)
7744 return true
7745 }
7746
7747
7748
7749 for {
7750 x := v.Args[2]
7751 a := v.Args[0]
7752 v_1 := v.Args[1]
7753 if v_1.Op != OpARM64MOVDconst {
7754 break
7755 }
7756 c := v_1.AuxInt
7757 if !(isPowerOfTwo(c-1) && int32(c) >= 3) {
7758 break
7759 }
7760 v.reset(OpARM64ADD)
7761 v.AddArg(a)
7762 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
7763 v0.AuxInt = log2(c - 1)
7764 v0.AddArg(x)
7765 v0.AddArg(x)
7766 v.AddArg(v0)
7767 return true
7768 }
7769
7770
7771
7772 for {
7773 x := v.Args[2]
7774 a := v.Args[0]
7775 v_1 := v.Args[1]
7776 if v_1.Op != OpARM64MOVDconst {
7777 break
7778 }
7779 c := v_1.AuxInt
7780 if !(isPowerOfTwo(c+1) && int32(c) >= 7) {
7781 break
7782 }
7783 v.reset(OpARM64SUB)
7784 v.AddArg(a)
7785 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
7786 v0.AuxInt = log2(c + 1)
7787 v0.AddArg(x)
7788 v0.AddArg(x)
7789 v.AddArg(v0)
7790 return true
7791 }
7792
7793
7794
7795 for {
7796 x := v.Args[2]
7797 a := v.Args[0]
7798 v_1 := v.Args[1]
7799 if v_1.Op != OpARM64MOVDconst {
7800 break
7801 }
7802 c := v_1.AuxInt
7803 if !(c%3 == 0 && isPowerOfTwo(c/3) && is32Bit(c)) {
7804 break
7805 }
7806 v.reset(OpARM64SUBshiftLL)
7807 v.AuxInt = log2(c / 3)
7808 v.AddArg(a)
7809 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
7810 v0.AuxInt = 2
7811 v0.AddArg(x)
7812 v0.AddArg(x)
7813 v.AddArg(v0)
7814 return true
7815 }
7816
7817
7818
7819 for {
7820 x := v.Args[2]
7821 a := v.Args[0]
7822 v_1 := v.Args[1]
7823 if v_1.Op != OpARM64MOVDconst {
7824 break
7825 }
7826 c := v_1.AuxInt
7827 if !(c%5 == 0 && isPowerOfTwo(c/5) && is32Bit(c)) {
7828 break
7829 }
7830 v.reset(OpARM64ADDshiftLL)
7831 v.AuxInt = log2(c / 5)
7832 v.AddArg(a)
7833 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
7834 v0.AuxInt = 2
7835 v0.AddArg(x)
7836 v0.AddArg(x)
7837 v.AddArg(v0)
7838 return true
7839 }
7840
7841
7842
7843 for {
7844 x := v.Args[2]
7845 a := v.Args[0]
7846 v_1 := v.Args[1]
7847 if v_1.Op != OpARM64MOVDconst {
7848 break
7849 }
7850 c := v_1.AuxInt
7851 if !(c%7 == 0 && isPowerOfTwo(c/7) && is32Bit(c)) {
7852 break
7853 }
7854 v.reset(OpARM64SUBshiftLL)
7855 v.AuxInt = log2(c / 7)
7856 v.AddArg(a)
7857 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
7858 v0.AuxInt = 3
7859 v0.AddArg(x)
7860 v0.AddArg(x)
7861 v.AddArg(v0)
7862 return true
7863 }
7864
7865
7866
7867 for {
7868 x := v.Args[2]
7869 a := v.Args[0]
7870 v_1 := v.Args[1]
7871 if v_1.Op != OpARM64MOVDconst {
7872 break
7873 }
7874 c := v_1.AuxInt
7875 if !(c%9 == 0 && isPowerOfTwo(c/9) && is32Bit(c)) {
7876 break
7877 }
7878 v.reset(OpARM64ADDshiftLL)
7879 v.AuxInt = log2(c / 9)
7880 v.AddArg(a)
7881 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
7882 v0.AuxInt = 3
7883 v0.AddArg(x)
7884 v0.AddArg(x)
7885 v.AddArg(v0)
7886 return true
7887 }
7888 return false
7889 }
7890 func rewriteValueARM64_OpARM64MADDW_20(v *Value) bool {
7891 b := v.Block
7892
7893
7894
7895 for {
7896 y := v.Args[2]
7897 v_0 := v.Args[0]
7898 if v_0.Op != OpARM64MOVDconst {
7899 break
7900 }
7901 c := v_0.AuxInt
7902 x := v.Args[1]
7903 v.reset(OpARM64ADDconst)
7904 v.AuxInt = c
7905 v0 := b.NewValue0(v.Pos, OpARM64MULW, x.Type)
7906 v0.AddArg(x)
7907 v0.AddArg(y)
7908 v.AddArg(v0)
7909 return true
7910 }
7911
7912
7913
7914 for {
7915 _ = v.Args[2]
7916 a := v.Args[0]
7917 v_1 := v.Args[1]
7918 if v_1.Op != OpARM64MOVDconst {
7919 break
7920 }
7921 c := v_1.AuxInt
7922 v_2 := v.Args[2]
7923 if v_2.Op != OpARM64MOVDconst {
7924 break
7925 }
7926 d := v_2.AuxInt
7927 v.reset(OpARM64ADDconst)
7928 v.AuxInt = int64(int32(c) * int32(d))
7929 v.AddArg(a)
7930 return true
7931 }
7932 return false
7933 }
7934 func rewriteValueARM64_OpARM64MNEG_0(v *Value) bool {
7935 b := v.Block
7936
7937
7938
7939 for {
7940 _ = v.Args[1]
7941 x := v.Args[0]
7942 v_1 := v.Args[1]
7943 if v_1.Op != OpARM64MOVDconst {
7944 break
7945 }
7946 if v_1.AuxInt != -1 {
7947 break
7948 }
7949 v.reset(OpCopy)
7950 v.Type = x.Type
7951 v.AddArg(x)
7952 return true
7953 }
7954
7955
7956
7957 for {
7958 x := v.Args[1]
7959 v_0 := v.Args[0]
7960 if v_0.Op != OpARM64MOVDconst {
7961 break
7962 }
7963 if v_0.AuxInt != -1 {
7964 break
7965 }
7966 v.reset(OpCopy)
7967 v.Type = x.Type
7968 v.AddArg(x)
7969 return true
7970 }
7971
7972
7973
7974 for {
7975 _ = v.Args[1]
7976 v_1 := v.Args[1]
7977 if v_1.Op != OpARM64MOVDconst {
7978 break
7979 }
7980 if v_1.AuxInt != 0 {
7981 break
7982 }
7983 v.reset(OpARM64MOVDconst)
7984 v.AuxInt = 0
7985 return true
7986 }
7987
7988
7989
7990 for {
7991 _ = v.Args[1]
7992 v_0 := v.Args[0]
7993 if v_0.Op != OpARM64MOVDconst {
7994 break
7995 }
7996 if v_0.AuxInt != 0 {
7997 break
7998 }
7999 v.reset(OpARM64MOVDconst)
8000 v.AuxInt = 0
8001 return true
8002 }
8003
8004
8005
8006 for {
8007 _ = v.Args[1]
8008 x := v.Args[0]
8009 v_1 := v.Args[1]
8010 if v_1.Op != OpARM64MOVDconst {
8011 break
8012 }
8013 if v_1.AuxInt != 1 {
8014 break
8015 }
8016 v.reset(OpARM64NEG)
8017 v.AddArg(x)
8018 return true
8019 }
8020
8021
8022
8023 for {
8024 x := v.Args[1]
8025 v_0 := v.Args[0]
8026 if v_0.Op != OpARM64MOVDconst {
8027 break
8028 }
8029 if v_0.AuxInt != 1 {
8030 break
8031 }
8032 v.reset(OpARM64NEG)
8033 v.AddArg(x)
8034 return true
8035 }
8036
8037
8038
8039 for {
8040 _ = v.Args[1]
8041 x := v.Args[0]
8042 v_1 := v.Args[1]
8043 if v_1.Op != OpARM64MOVDconst {
8044 break
8045 }
8046 c := v_1.AuxInt
8047 if !(isPowerOfTwo(c)) {
8048 break
8049 }
8050 v.reset(OpARM64NEG)
8051 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type)
8052 v0.AuxInt = log2(c)
8053 v0.AddArg(x)
8054 v.AddArg(v0)
8055 return true
8056 }
8057
8058
8059
8060 for {
8061 x := v.Args[1]
8062 v_0 := v.Args[0]
8063 if v_0.Op != OpARM64MOVDconst {
8064 break
8065 }
8066 c := v_0.AuxInt
8067 if !(isPowerOfTwo(c)) {
8068 break
8069 }
8070 v.reset(OpARM64NEG)
8071 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type)
8072 v0.AuxInt = log2(c)
8073 v0.AddArg(x)
8074 v.AddArg(v0)
8075 return true
8076 }
8077
8078
8079
8080 for {
8081 _ = v.Args[1]
8082 x := v.Args[0]
8083 v_1 := v.Args[1]
8084 if v_1.Op != OpARM64MOVDconst {
8085 break
8086 }
8087 c := v_1.AuxInt
8088 if !(isPowerOfTwo(c-1) && c >= 3) {
8089 break
8090 }
8091 v.reset(OpARM64NEG)
8092 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
8093 v0.AuxInt = log2(c - 1)
8094 v0.AddArg(x)
8095 v0.AddArg(x)
8096 v.AddArg(v0)
8097 return true
8098 }
8099
8100
8101
8102 for {
8103 x := v.Args[1]
8104 v_0 := v.Args[0]
8105 if v_0.Op != OpARM64MOVDconst {
8106 break
8107 }
8108 c := v_0.AuxInt
8109 if !(isPowerOfTwo(c-1) && c >= 3) {
8110 break
8111 }
8112 v.reset(OpARM64NEG)
8113 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
8114 v0.AuxInt = log2(c - 1)
8115 v0.AddArg(x)
8116 v0.AddArg(x)
8117 v.AddArg(v0)
8118 return true
8119 }
8120 return false
8121 }
8122 func rewriteValueARM64_OpARM64MNEG_10(v *Value) bool {
8123 b := v.Block
8124
8125
8126
8127 for {
8128 _ = v.Args[1]
8129 x := v.Args[0]
8130 v_1 := v.Args[1]
8131 if v_1.Op != OpARM64MOVDconst {
8132 break
8133 }
8134 c := v_1.AuxInt
8135 if !(isPowerOfTwo(c+1) && c >= 7) {
8136 break
8137 }
8138 v.reset(OpARM64NEG)
8139 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
8140 v0.AuxInt = log2(c + 1)
8141 v1 := b.NewValue0(v.Pos, OpARM64NEG, x.Type)
8142 v1.AddArg(x)
8143 v0.AddArg(v1)
8144 v0.AddArg(x)
8145 v.AddArg(v0)
8146 return true
8147 }
8148
8149
8150
8151 for {
8152 x := v.Args[1]
8153 v_0 := v.Args[0]
8154 if v_0.Op != OpARM64MOVDconst {
8155 break
8156 }
8157 c := v_0.AuxInt
8158 if !(isPowerOfTwo(c+1) && c >= 7) {
8159 break
8160 }
8161 v.reset(OpARM64NEG)
8162 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
8163 v0.AuxInt = log2(c + 1)
8164 v1 := b.NewValue0(v.Pos, OpARM64NEG, x.Type)
8165 v1.AddArg(x)
8166 v0.AddArg(v1)
8167 v0.AddArg(x)
8168 v.AddArg(v0)
8169 return true
8170 }
8171
8172
8173
8174 for {
8175 _ = v.Args[1]
8176 x := v.Args[0]
8177 v_1 := v.Args[1]
8178 if v_1.Op != OpARM64MOVDconst {
8179 break
8180 }
8181 c := v_1.AuxInt
8182 if !(c%3 == 0 && isPowerOfTwo(c/3)) {
8183 break
8184 }
8185 v.reset(OpARM64SLLconst)
8186 v.Type = x.Type
8187 v.AuxInt = log2(c / 3)
8188 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
8189 v0.AuxInt = 2
8190 v0.AddArg(x)
8191 v0.AddArg(x)
8192 v.AddArg(v0)
8193 return true
8194 }
8195
8196
8197
8198 for {
8199 x := v.Args[1]
8200 v_0 := v.Args[0]
8201 if v_0.Op != OpARM64MOVDconst {
8202 break
8203 }
8204 c := v_0.AuxInt
8205 if !(c%3 == 0 && isPowerOfTwo(c/3)) {
8206 break
8207 }
8208 v.reset(OpARM64SLLconst)
8209 v.Type = x.Type
8210 v.AuxInt = log2(c / 3)
8211 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
8212 v0.AuxInt = 2
8213 v0.AddArg(x)
8214 v0.AddArg(x)
8215 v.AddArg(v0)
8216 return true
8217 }
8218
8219
8220
8221 for {
8222 _ = v.Args[1]
8223 x := v.Args[0]
8224 v_1 := v.Args[1]
8225 if v_1.Op != OpARM64MOVDconst {
8226 break
8227 }
8228 c := v_1.AuxInt
8229 if !(c%5 == 0 && isPowerOfTwo(c/5)) {
8230 break
8231 }
8232 v.reset(OpARM64NEG)
8233 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type)
8234 v0.AuxInt = log2(c / 5)
8235 v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
8236 v1.AuxInt = 2
8237 v1.AddArg(x)
8238 v1.AddArg(x)
8239 v0.AddArg(v1)
8240 v.AddArg(v0)
8241 return true
8242 }
8243
8244
8245
8246 for {
8247 x := v.Args[1]
8248 v_0 := v.Args[0]
8249 if v_0.Op != OpARM64MOVDconst {
8250 break
8251 }
8252 c := v_0.AuxInt
8253 if !(c%5 == 0 && isPowerOfTwo(c/5)) {
8254 break
8255 }
8256 v.reset(OpARM64NEG)
8257 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type)
8258 v0.AuxInt = log2(c / 5)
8259 v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
8260 v1.AuxInt = 2
8261 v1.AddArg(x)
8262 v1.AddArg(x)
8263 v0.AddArg(v1)
8264 v.AddArg(v0)
8265 return true
8266 }
8267
8268
8269
8270 for {
8271 _ = v.Args[1]
8272 x := v.Args[0]
8273 v_1 := v.Args[1]
8274 if v_1.Op != OpARM64MOVDconst {
8275 break
8276 }
8277 c := v_1.AuxInt
8278 if !(c%7 == 0 && isPowerOfTwo(c/7)) {
8279 break
8280 }
8281 v.reset(OpARM64SLLconst)
8282 v.Type = x.Type
8283 v.AuxInt = log2(c / 7)
8284 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
8285 v0.AuxInt = 3
8286 v0.AddArg(x)
8287 v0.AddArg(x)
8288 v.AddArg(v0)
8289 return true
8290 }
8291
8292
8293
8294 for {
8295 x := v.Args[1]
8296 v_0 := v.Args[0]
8297 if v_0.Op != OpARM64MOVDconst {
8298 break
8299 }
8300 c := v_0.AuxInt
8301 if !(c%7 == 0 && isPowerOfTwo(c/7)) {
8302 break
8303 }
8304 v.reset(OpARM64SLLconst)
8305 v.Type = x.Type
8306 v.AuxInt = log2(c / 7)
8307 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
8308 v0.AuxInt = 3
8309 v0.AddArg(x)
8310 v0.AddArg(x)
8311 v.AddArg(v0)
8312 return true
8313 }
8314
8315
8316
8317 for {
8318 _ = v.Args[1]
8319 x := v.Args[0]
8320 v_1 := v.Args[1]
8321 if v_1.Op != OpARM64MOVDconst {
8322 break
8323 }
8324 c := v_1.AuxInt
8325 if !(c%9 == 0 && isPowerOfTwo(c/9)) {
8326 break
8327 }
8328 v.reset(OpARM64NEG)
8329 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type)
8330 v0.AuxInt = log2(c / 9)
8331 v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
8332 v1.AuxInt = 3
8333 v1.AddArg(x)
8334 v1.AddArg(x)
8335 v0.AddArg(v1)
8336 v.AddArg(v0)
8337 return true
8338 }
8339
8340
8341
8342 for {
8343 x := v.Args[1]
8344 v_0 := v.Args[0]
8345 if v_0.Op != OpARM64MOVDconst {
8346 break
8347 }
8348 c := v_0.AuxInt
8349 if !(c%9 == 0 && isPowerOfTwo(c/9)) {
8350 break
8351 }
8352 v.reset(OpARM64NEG)
8353 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type)
8354 v0.AuxInt = log2(c / 9)
8355 v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
8356 v1.AuxInt = 3
8357 v1.AddArg(x)
8358 v1.AddArg(x)
8359 v0.AddArg(v1)
8360 v.AddArg(v0)
8361 return true
8362 }
8363 return false
8364 }
8365 func rewriteValueARM64_OpARM64MNEG_20(v *Value) bool {
8366
8367
8368
8369 for {
8370 _ = v.Args[1]
8371 v_0 := v.Args[0]
8372 if v_0.Op != OpARM64MOVDconst {
8373 break
8374 }
8375 c := v_0.AuxInt
8376 v_1 := v.Args[1]
8377 if v_1.Op != OpARM64MOVDconst {
8378 break
8379 }
8380 d := v_1.AuxInt
8381 v.reset(OpARM64MOVDconst)
8382 v.AuxInt = -c * d
8383 return true
8384 }
8385
8386
8387
8388 for {
8389 _ = v.Args[1]
8390 v_0 := v.Args[0]
8391 if v_0.Op != OpARM64MOVDconst {
8392 break
8393 }
8394 d := v_0.AuxInt
8395 v_1 := v.Args[1]
8396 if v_1.Op != OpARM64MOVDconst {
8397 break
8398 }
8399 c := v_1.AuxInt
8400 v.reset(OpARM64MOVDconst)
8401 v.AuxInt = -c * d
8402 return true
8403 }
8404 return false
8405 }
8406 func rewriteValueARM64_OpARM64MNEGW_0(v *Value) bool {
8407 b := v.Block
8408
8409
8410
8411 for {
8412 _ = v.Args[1]
8413 x := v.Args[0]
8414 v_1 := v.Args[1]
8415 if v_1.Op != OpARM64MOVDconst {
8416 break
8417 }
8418 c := v_1.AuxInt
8419 if !(int32(c) == -1) {
8420 break
8421 }
8422 v.reset(OpCopy)
8423 v.Type = x.Type
8424 v.AddArg(x)
8425 return true
8426 }
8427
8428
8429
8430 for {
8431 x := v.Args[1]
8432 v_0 := v.Args[0]
8433 if v_0.Op != OpARM64MOVDconst {
8434 break
8435 }
8436 c := v_0.AuxInt
8437 if !(int32(c) == -1) {
8438 break
8439 }
8440 v.reset(OpCopy)
8441 v.Type = x.Type
8442 v.AddArg(x)
8443 return true
8444 }
8445
8446
8447
8448 for {
8449 _ = v.Args[1]
8450 v_1 := v.Args[1]
8451 if v_1.Op != OpARM64MOVDconst {
8452 break
8453 }
8454 c := v_1.AuxInt
8455 if !(int32(c) == 0) {
8456 break
8457 }
8458 v.reset(OpARM64MOVDconst)
8459 v.AuxInt = 0
8460 return true
8461 }
8462
8463
8464
8465 for {
8466 _ = v.Args[1]
8467 v_0 := v.Args[0]
8468 if v_0.Op != OpARM64MOVDconst {
8469 break
8470 }
8471 c := v_0.AuxInt
8472 if !(int32(c) == 0) {
8473 break
8474 }
8475 v.reset(OpARM64MOVDconst)
8476 v.AuxInt = 0
8477 return true
8478 }
8479
8480
8481
8482 for {
8483 _ = v.Args[1]
8484 x := v.Args[0]
8485 v_1 := v.Args[1]
8486 if v_1.Op != OpARM64MOVDconst {
8487 break
8488 }
8489 c := v_1.AuxInt
8490 if !(int32(c) == 1) {
8491 break
8492 }
8493 v.reset(OpARM64NEG)
8494 v.AddArg(x)
8495 return true
8496 }
8497
8498
8499
8500 for {
8501 x := v.Args[1]
8502 v_0 := v.Args[0]
8503 if v_0.Op != OpARM64MOVDconst {
8504 break
8505 }
8506 c := v_0.AuxInt
8507 if !(int32(c) == 1) {
8508 break
8509 }
8510 v.reset(OpARM64NEG)
8511 v.AddArg(x)
8512 return true
8513 }
8514
8515
8516
8517 for {
8518 _ = v.Args[1]
8519 x := v.Args[0]
8520 v_1 := v.Args[1]
8521 if v_1.Op != OpARM64MOVDconst {
8522 break
8523 }
8524 c := v_1.AuxInt
8525 if !(isPowerOfTwo(c)) {
8526 break
8527 }
8528 v.reset(OpARM64NEG)
8529 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type)
8530 v0.AuxInt = log2(c)
8531 v0.AddArg(x)
8532 v.AddArg(v0)
8533 return true
8534 }
8535
8536
8537
8538 for {
8539 x := v.Args[1]
8540 v_0 := v.Args[0]
8541 if v_0.Op != OpARM64MOVDconst {
8542 break
8543 }
8544 c := v_0.AuxInt
8545 if !(isPowerOfTwo(c)) {
8546 break
8547 }
8548 v.reset(OpARM64NEG)
8549 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type)
8550 v0.AuxInt = log2(c)
8551 v0.AddArg(x)
8552 v.AddArg(v0)
8553 return true
8554 }
8555
8556
8557
8558 for {
8559 _ = v.Args[1]
8560 x := v.Args[0]
8561 v_1 := v.Args[1]
8562 if v_1.Op != OpARM64MOVDconst {
8563 break
8564 }
8565 c := v_1.AuxInt
8566 if !(isPowerOfTwo(c-1) && int32(c) >= 3) {
8567 break
8568 }
8569 v.reset(OpARM64NEG)
8570 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
8571 v0.AuxInt = log2(c - 1)
8572 v0.AddArg(x)
8573 v0.AddArg(x)
8574 v.AddArg(v0)
8575 return true
8576 }
8577
8578
8579
8580 for {
8581 x := v.Args[1]
8582 v_0 := v.Args[0]
8583 if v_0.Op != OpARM64MOVDconst {
8584 break
8585 }
8586 c := v_0.AuxInt
8587 if !(isPowerOfTwo(c-1) && int32(c) >= 3) {
8588 break
8589 }
8590 v.reset(OpARM64NEG)
8591 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
8592 v0.AuxInt = log2(c - 1)
8593 v0.AddArg(x)
8594 v0.AddArg(x)
8595 v.AddArg(v0)
8596 return true
8597 }
8598 return false
8599 }
8600 func rewriteValueARM64_OpARM64MNEGW_10(v *Value) bool {
8601 b := v.Block
8602
8603
8604
8605 for {
8606 _ = v.Args[1]
8607 x := v.Args[0]
8608 v_1 := v.Args[1]
8609 if v_1.Op != OpARM64MOVDconst {
8610 break
8611 }
8612 c := v_1.AuxInt
8613 if !(isPowerOfTwo(c+1) && int32(c) >= 7) {
8614 break
8615 }
8616 v.reset(OpARM64NEG)
8617 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
8618 v0.AuxInt = log2(c + 1)
8619 v1 := b.NewValue0(v.Pos, OpARM64NEG, x.Type)
8620 v1.AddArg(x)
8621 v0.AddArg(v1)
8622 v0.AddArg(x)
8623 v.AddArg(v0)
8624 return true
8625 }
8626
8627
8628
8629 for {
8630 x := v.Args[1]
8631 v_0 := v.Args[0]
8632 if v_0.Op != OpARM64MOVDconst {
8633 break
8634 }
8635 c := v_0.AuxInt
8636 if !(isPowerOfTwo(c+1) && int32(c) >= 7) {
8637 break
8638 }
8639 v.reset(OpARM64NEG)
8640 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
8641 v0.AuxInt = log2(c + 1)
8642 v1 := b.NewValue0(v.Pos, OpARM64NEG, x.Type)
8643 v1.AddArg(x)
8644 v0.AddArg(v1)
8645 v0.AddArg(x)
8646 v.AddArg(v0)
8647 return true
8648 }
8649
8650
8651
8652 for {
8653 _ = v.Args[1]
8654 x := v.Args[0]
8655 v_1 := v.Args[1]
8656 if v_1.Op != OpARM64MOVDconst {
8657 break
8658 }
8659 c := v_1.AuxInt
8660 if !(c%3 == 0 && isPowerOfTwo(c/3) && is32Bit(c)) {
8661 break
8662 }
8663 v.reset(OpARM64SLLconst)
8664 v.Type = x.Type
8665 v.AuxInt = log2(c / 3)
8666 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
8667 v0.AuxInt = 2
8668 v0.AddArg(x)
8669 v0.AddArg(x)
8670 v.AddArg(v0)
8671 return true
8672 }
8673
8674
8675
8676 for {
8677 x := v.Args[1]
8678 v_0 := v.Args[0]
8679 if v_0.Op != OpARM64MOVDconst {
8680 break
8681 }
8682 c := v_0.AuxInt
8683 if !(c%3 == 0 && isPowerOfTwo(c/3) && is32Bit(c)) {
8684 break
8685 }
8686 v.reset(OpARM64SLLconst)
8687 v.Type = x.Type
8688 v.AuxInt = log2(c / 3)
8689 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
8690 v0.AuxInt = 2
8691 v0.AddArg(x)
8692 v0.AddArg(x)
8693 v.AddArg(v0)
8694 return true
8695 }
8696
8697
8698
8699 for {
8700 _ = v.Args[1]
8701 x := v.Args[0]
8702 v_1 := v.Args[1]
8703 if v_1.Op != OpARM64MOVDconst {
8704 break
8705 }
8706 c := v_1.AuxInt
8707 if !(c%5 == 0 && isPowerOfTwo(c/5) && is32Bit(c)) {
8708 break
8709 }
8710 v.reset(OpARM64NEG)
8711 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type)
8712 v0.AuxInt = log2(c / 5)
8713 v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
8714 v1.AuxInt = 2
8715 v1.AddArg(x)
8716 v1.AddArg(x)
8717 v0.AddArg(v1)
8718 v.AddArg(v0)
8719 return true
8720 }
8721
8722
8723
8724 for {
8725 x := v.Args[1]
8726 v_0 := v.Args[0]
8727 if v_0.Op != OpARM64MOVDconst {
8728 break
8729 }
8730 c := v_0.AuxInt
8731 if !(c%5 == 0 && isPowerOfTwo(c/5) && is32Bit(c)) {
8732 break
8733 }
8734 v.reset(OpARM64NEG)
8735 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type)
8736 v0.AuxInt = log2(c / 5)
8737 v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
8738 v1.AuxInt = 2
8739 v1.AddArg(x)
8740 v1.AddArg(x)
8741 v0.AddArg(v1)
8742 v.AddArg(v0)
8743 return true
8744 }
8745
8746
8747
8748 for {
8749 _ = v.Args[1]
8750 x := v.Args[0]
8751 v_1 := v.Args[1]
8752 if v_1.Op != OpARM64MOVDconst {
8753 break
8754 }
8755 c := v_1.AuxInt
8756 if !(c%7 == 0 && isPowerOfTwo(c/7) && is32Bit(c)) {
8757 break
8758 }
8759 v.reset(OpARM64SLLconst)
8760 v.Type = x.Type
8761 v.AuxInt = log2(c / 7)
8762 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
8763 v0.AuxInt = 3
8764 v0.AddArg(x)
8765 v0.AddArg(x)
8766 v.AddArg(v0)
8767 return true
8768 }
8769
8770
8771
8772 for {
8773 x := v.Args[1]
8774 v_0 := v.Args[0]
8775 if v_0.Op != OpARM64MOVDconst {
8776 break
8777 }
8778 c := v_0.AuxInt
8779 if !(c%7 == 0 && isPowerOfTwo(c/7) && is32Bit(c)) {
8780 break
8781 }
8782 v.reset(OpARM64SLLconst)
8783 v.Type = x.Type
8784 v.AuxInt = log2(c / 7)
8785 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
8786 v0.AuxInt = 3
8787 v0.AddArg(x)
8788 v0.AddArg(x)
8789 v.AddArg(v0)
8790 return true
8791 }
8792
8793
8794
8795 for {
8796 _ = v.Args[1]
8797 x := v.Args[0]
8798 v_1 := v.Args[1]
8799 if v_1.Op != OpARM64MOVDconst {
8800 break
8801 }
8802 c := v_1.AuxInt
8803 if !(c%9 == 0 && isPowerOfTwo(c/9) && is32Bit(c)) {
8804 break
8805 }
8806 v.reset(OpARM64NEG)
8807 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type)
8808 v0.AuxInt = log2(c / 9)
8809 v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
8810 v1.AuxInt = 3
8811 v1.AddArg(x)
8812 v1.AddArg(x)
8813 v0.AddArg(v1)
8814 v.AddArg(v0)
8815 return true
8816 }
8817
8818
8819
8820 for {
8821 x := v.Args[1]
8822 v_0 := v.Args[0]
8823 if v_0.Op != OpARM64MOVDconst {
8824 break
8825 }
8826 c := v_0.AuxInt
8827 if !(c%9 == 0 && isPowerOfTwo(c/9) && is32Bit(c)) {
8828 break
8829 }
8830 v.reset(OpARM64NEG)
8831 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type)
8832 v0.AuxInt = log2(c / 9)
8833 v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
8834 v1.AuxInt = 3
8835 v1.AddArg(x)
8836 v1.AddArg(x)
8837 v0.AddArg(v1)
8838 v.AddArg(v0)
8839 return true
8840 }
8841 return false
8842 }
8843 func rewriteValueARM64_OpARM64MNEGW_20(v *Value) bool {
8844
8845
8846
8847 for {
8848 _ = v.Args[1]
8849 v_0 := v.Args[0]
8850 if v_0.Op != OpARM64MOVDconst {
8851 break
8852 }
8853 c := v_0.AuxInt
8854 v_1 := v.Args[1]
8855 if v_1.Op != OpARM64MOVDconst {
8856 break
8857 }
8858 d := v_1.AuxInt
8859 v.reset(OpARM64MOVDconst)
8860 v.AuxInt = -int64(int32(c) * int32(d))
8861 return true
8862 }
8863
8864
8865
8866 for {
8867 _ = v.Args[1]
8868 v_0 := v.Args[0]
8869 if v_0.Op != OpARM64MOVDconst {
8870 break
8871 }
8872 d := v_0.AuxInt
8873 v_1 := v.Args[1]
8874 if v_1.Op != OpARM64MOVDconst {
8875 break
8876 }
8877 c := v_1.AuxInt
8878 v.reset(OpARM64MOVDconst)
8879 v.AuxInt = -int64(int32(c) * int32(d))
8880 return true
8881 }
8882 return false
8883 }
8884 func rewriteValueARM64_OpARM64MOD_0(v *Value) bool {
8885
8886
8887
8888 for {
8889 _ = v.Args[1]
8890 v_0 := v.Args[0]
8891 if v_0.Op != OpARM64MOVDconst {
8892 break
8893 }
8894 c := v_0.AuxInt
8895 v_1 := v.Args[1]
8896 if v_1.Op != OpARM64MOVDconst {
8897 break
8898 }
8899 d := v_1.AuxInt
8900 v.reset(OpARM64MOVDconst)
8901 v.AuxInt = c % d
8902 return true
8903 }
8904 return false
8905 }
8906 func rewriteValueARM64_OpARM64MODW_0(v *Value) bool {
8907
8908
8909
8910 for {
8911 _ = v.Args[1]
8912 v_0 := v.Args[0]
8913 if v_0.Op != OpARM64MOVDconst {
8914 break
8915 }
8916 c := v_0.AuxInt
8917 v_1 := v.Args[1]
8918 if v_1.Op != OpARM64MOVDconst {
8919 break
8920 }
8921 d := v_1.AuxInt
8922 v.reset(OpARM64MOVDconst)
8923 v.AuxInt = int64(int32(c) % int32(d))
8924 return true
8925 }
8926 return false
8927 }
8928 func rewriteValueARM64_OpARM64MOVBUload_0(v *Value) bool {
8929 b := v.Block
8930 config := b.Func.Config
8931
8932
8933
8934 for {
8935 off1 := v.AuxInt
8936 sym := v.Aux
8937 mem := v.Args[1]
8938 v_0 := v.Args[0]
8939 if v_0.Op != OpARM64ADDconst {
8940 break
8941 }
8942 off2 := v_0.AuxInt
8943 ptr := v_0.Args[0]
8944 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
8945 break
8946 }
8947 v.reset(OpARM64MOVBUload)
8948 v.AuxInt = off1 + off2
8949 v.Aux = sym
8950 v.AddArg(ptr)
8951 v.AddArg(mem)
8952 return true
8953 }
8954
8955
8956
8957 for {
8958 off := v.AuxInt
8959 sym := v.Aux
8960 mem := v.Args[1]
8961 v_0 := v.Args[0]
8962 if v_0.Op != OpARM64ADD {
8963 break
8964 }
8965 idx := v_0.Args[1]
8966 ptr := v_0.Args[0]
8967 if !(off == 0 && sym == nil) {
8968 break
8969 }
8970 v.reset(OpARM64MOVBUloadidx)
8971 v.AddArg(ptr)
8972 v.AddArg(idx)
8973 v.AddArg(mem)
8974 return true
8975 }
8976
8977
8978
8979 for {
8980 off1 := v.AuxInt
8981 sym1 := v.Aux
8982 mem := v.Args[1]
8983 v_0 := v.Args[0]
8984 if v_0.Op != OpARM64MOVDaddr {
8985 break
8986 }
8987 off2 := v_0.AuxInt
8988 sym2 := v_0.Aux
8989 ptr := v_0.Args[0]
8990 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
8991 break
8992 }
8993 v.reset(OpARM64MOVBUload)
8994 v.AuxInt = off1 + off2
8995 v.Aux = mergeSym(sym1, sym2)
8996 v.AddArg(ptr)
8997 v.AddArg(mem)
8998 return true
8999 }
9000
9001
9002
9003 for {
9004 off := v.AuxInt
9005 sym := v.Aux
9006 _ = v.Args[1]
9007 ptr := v.Args[0]
9008 v_1 := v.Args[1]
9009 if v_1.Op != OpARM64MOVBstorezero {
9010 break
9011 }
9012 off2 := v_1.AuxInt
9013 sym2 := v_1.Aux
9014 _ = v_1.Args[1]
9015 ptr2 := v_1.Args[0]
9016 if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
9017 break
9018 }
9019 v.reset(OpARM64MOVDconst)
9020 v.AuxInt = 0
9021 return true
9022 }
9023
9024
9025
9026 for {
9027 off := v.AuxInt
9028 sym := v.Aux
9029 _ = v.Args[1]
9030 v_0 := v.Args[0]
9031 if v_0.Op != OpSB {
9032 break
9033 }
9034 if !(symIsRO(sym)) {
9035 break
9036 }
9037 v.reset(OpARM64MOVDconst)
9038 v.AuxInt = int64(read8(sym, off))
9039 return true
9040 }
9041 return false
9042 }
9043 func rewriteValueARM64_OpARM64MOVBUloadidx_0(v *Value) bool {
9044
9045
9046
9047 for {
9048 mem := v.Args[2]
9049 ptr := v.Args[0]
9050 v_1 := v.Args[1]
9051 if v_1.Op != OpARM64MOVDconst {
9052 break
9053 }
9054 c := v_1.AuxInt
9055 v.reset(OpARM64MOVBUload)
9056 v.AuxInt = c
9057 v.AddArg(ptr)
9058 v.AddArg(mem)
9059 return true
9060 }
9061
9062
9063
9064 for {
9065 mem := v.Args[2]
9066 v_0 := v.Args[0]
9067 if v_0.Op != OpARM64MOVDconst {
9068 break
9069 }
9070 c := v_0.AuxInt
9071 ptr := v.Args[1]
9072 v.reset(OpARM64MOVBUload)
9073 v.AuxInt = c
9074 v.AddArg(ptr)
9075 v.AddArg(mem)
9076 return true
9077 }
9078
9079
9080
9081 for {
9082 _ = v.Args[2]
9083 ptr := v.Args[0]
9084 idx := v.Args[1]
9085 v_2 := v.Args[2]
9086 if v_2.Op != OpARM64MOVBstorezeroidx {
9087 break
9088 }
9089 _ = v_2.Args[2]
9090 ptr2 := v_2.Args[0]
9091 idx2 := v_2.Args[1]
9092 if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2)) {
9093 break
9094 }
9095 v.reset(OpARM64MOVDconst)
9096 v.AuxInt = 0
9097 return true
9098 }
9099 return false
9100 }
9101 func rewriteValueARM64_OpARM64MOVBUreg_0(v *Value) bool {
9102
9103
9104
9105 for {
9106 x := v.Args[0]
9107 if x.Op != OpARM64MOVBUload {
9108 break
9109 }
9110 _ = x.Args[1]
9111 v.reset(OpARM64MOVDreg)
9112 v.AddArg(x)
9113 return true
9114 }
9115
9116
9117
9118 for {
9119 x := v.Args[0]
9120 if x.Op != OpARM64MOVBUloadidx {
9121 break
9122 }
9123 _ = x.Args[2]
9124 v.reset(OpARM64MOVDreg)
9125 v.AddArg(x)
9126 return true
9127 }
9128
9129
9130
9131 for {
9132 x := v.Args[0]
9133 if x.Op != OpARM64MOVBUreg {
9134 break
9135 }
9136 v.reset(OpARM64MOVDreg)
9137 v.AddArg(x)
9138 return true
9139 }
9140
9141
9142
9143 for {
9144 v_0 := v.Args[0]
9145 if v_0.Op != OpARM64ANDconst {
9146 break
9147 }
9148 c := v_0.AuxInt
9149 x := v_0.Args[0]
9150 v.reset(OpARM64ANDconst)
9151 v.AuxInt = c & (1<<8 - 1)
9152 v.AddArg(x)
9153 return true
9154 }
9155
9156
9157
9158 for {
9159 v_0 := v.Args[0]
9160 if v_0.Op != OpARM64MOVDconst {
9161 break
9162 }
9163 c := v_0.AuxInt
9164 v.reset(OpARM64MOVDconst)
9165 v.AuxInt = int64(uint8(c))
9166 return true
9167 }
9168
9169
9170
9171 for {
9172 x := v.Args[0]
9173 if !(x.Type.IsBoolean()) {
9174 break
9175 }
9176 v.reset(OpARM64MOVDreg)
9177 v.AddArg(x)
9178 return true
9179 }
9180
9181
9182
9183 for {
9184 v_0 := v.Args[0]
9185 if v_0.Op != OpARM64SLLconst {
9186 break
9187 }
9188 sc := v_0.AuxInt
9189 x := v_0.Args[0]
9190 if !(isARM64BFMask(sc, 1<<8-1, sc)) {
9191 break
9192 }
9193 v.reset(OpARM64UBFIZ)
9194 v.AuxInt = armBFAuxInt(sc, arm64BFWidth(1<<8-1, sc))
9195 v.AddArg(x)
9196 return true
9197 }
9198
9199
9200
9201 for {
9202 v_0 := v.Args[0]
9203 if v_0.Op != OpARM64SRLconst {
9204 break
9205 }
9206 sc := v_0.AuxInt
9207 x := v_0.Args[0]
9208 if !(isARM64BFMask(sc, 1<<8-1, 0)) {
9209 break
9210 }
9211 v.reset(OpARM64UBFX)
9212 v.AuxInt = armBFAuxInt(sc, 8)
9213 v.AddArg(x)
9214 return true
9215 }
9216 return false
9217 }
9218 func rewriteValueARM64_OpARM64MOVBload_0(v *Value) bool {
9219 b := v.Block
9220 config := b.Func.Config
9221
9222
9223
9224 for {
9225 off1 := v.AuxInt
9226 sym := v.Aux
9227 mem := v.Args[1]
9228 v_0 := v.Args[0]
9229 if v_0.Op != OpARM64ADDconst {
9230 break
9231 }
9232 off2 := v_0.AuxInt
9233 ptr := v_0.Args[0]
9234 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
9235 break
9236 }
9237 v.reset(OpARM64MOVBload)
9238 v.AuxInt = off1 + off2
9239 v.Aux = sym
9240 v.AddArg(ptr)
9241 v.AddArg(mem)
9242 return true
9243 }
9244
9245
9246
9247 for {
9248 off := v.AuxInt
9249 sym := v.Aux
9250 mem := v.Args[1]
9251 v_0 := v.Args[0]
9252 if v_0.Op != OpARM64ADD {
9253 break
9254 }
9255 idx := v_0.Args[1]
9256 ptr := v_0.Args[0]
9257 if !(off == 0 && sym == nil) {
9258 break
9259 }
9260 v.reset(OpARM64MOVBloadidx)
9261 v.AddArg(ptr)
9262 v.AddArg(idx)
9263 v.AddArg(mem)
9264 return true
9265 }
9266
9267
9268
9269 for {
9270 off1 := v.AuxInt
9271 sym1 := v.Aux
9272 mem := v.Args[1]
9273 v_0 := v.Args[0]
9274 if v_0.Op != OpARM64MOVDaddr {
9275 break
9276 }
9277 off2 := v_0.AuxInt
9278 sym2 := v_0.Aux
9279 ptr := v_0.Args[0]
9280 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
9281 break
9282 }
9283 v.reset(OpARM64MOVBload)
9284 v.AuxInt = off1 + off2
9285 v.Aux = mergeSym(sym1, sym2)
9286 v.AddArg(ptr)
9287 v.AddArg(mem)
9288 return true
9289 }
9290
9291
9292
9293 for {
9294 off := v.AuxInt
9295 sym := v.Aux
9296 _ = v.Args[1]
9297 ptr := v.Args[0]
9298 v_1 := v.Args[1]
9299 if v_1.Op != OpARM64MOVBstorezero {
9300 break
9301 }
9302 off2 := v_1.AuxInt
9303 sym2 := v_1.Aux
9304 _ = v_1.Args[1]
9305 ptr2 := v_1.Args[0]
9306 if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
9307 break
9308 }
9309 v.reset(OpARM64MOVDconst)
9310 v.AuxInt = 0
9311 return true
9312 }
9313 return false
9314 }
9315 func rewriteValueARM64_OpARM64MOVBloadidx_0(v *Value) bool {
9316
9317
9318
9319 for {
9320 mem := v.Args[2]
9321 ptr := v.Args[0]
9322 v_1 := v.Args[1]
9323 if v_1.Op != OpARM64MOVDconst {
9324 break
9325 }
9326 c := v_1.AuxInt
9327 v.reset(OpARM64MOVBload)
9328 v.AuxInt = c
9329 v.AddArg(ptr)
9330 v.AddArg(mem)
9331 return true
9332 }
9333
9334
9335
9336 for {
9337 mem := v.Args[2]
9338 v_0 := v.Args[0]
9339 if v_0.Op != OpARM64MOVDconst {
9340 break
9341 }
9342 c := v_0.AuxInt
9343 ptr := v.Args[1]
9344 v.reset(OpARM64MOVBload)
9345 v.AuxInt = c
9346 v.AddArg(ptr)
9347 v.AddArg(mem)
9348 return true
9349 }
9350
9351
9352
9353 for {
9354 _ = v.Args[2]
9355 ptr := v.Args[0]
9356 idx := v.Args[1]
9357 v_2 := v.Args[2]
9358 if v_2.Op != OpARM64MOVBstorezeroidx {
9359 break
9360 }
9361 _ = v_2.Args[2]
9362 ptr2 := v_2.Args[0]
9363 idx2 := v_2.Args[1]
9364 if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2)) {
9365 break
9366 }
9367 v.reset(OpARM64MOVDconst)
9368 v.AuxInt = 0
9369 return true
9370 }
9371 return false
9372 }
9373 func rewriteValueARM64_OpARM64MOVBreg_0(v *Value) bool {
9374
9375
9376
9377 for {
9378 x := v.Args[0]
9379 if x.Op != OpARM64MOVBload {
9380 break
9381 }
9382 _ = x.Args[1]
9383 v.reset(OpARM64MOVDreg)
9384 v.AddArg(x)
9385 return true
9386 }
9387
9388
9389
9390 for {
9391 x := v.Args[0]
9392 if x.Op != OpARM64MOVBloadidx {
9393 break
9394 }
9395 _ = x.Args[2]
9396 v.reset(OpARM64MOVDreg)
9397 v.AddArg(x)
9398 return true
9399 }
9400
9401
9402
9403 for {
9404 x := v.Args[0]
9405 if x.Op != OpARM64MOVBreg {
9406 break
9407 }
9408 v.reset(OpARM64MOVDreg)
9409 v.AddArg(x)
9410 return true
9411 }
9412
9413
9414
9415 for {
9416 v_0 := v.Args[0]
9417 if v_0.Op != OpARM64MOVDconst {
9418 break
9419 }
9420 c := v_0.AuxInt
9421 v.reset(OpARM64MOVDconst)
9422 v.AuxInt = int64(int8(c))
9423 return true
9424 }
9425
9426
9427
9428 for {
9429 v_0 := v.Args[0]
9430 if v_0.Op != OpARM64SLLconst {
9431 break
9432 }
9433 lc := v_0.AuxInt
9434 x := v_0.Args[0]
9435 if !(lc < 8) {
9436 break
9437 }
9438 v.reset(OpARM64SBFIZ)
9439 v.AuxInt = armBFAuxInt(lc, 8-lc)
9440 v.AddArg(x)
9441 return true
9442 }
9443 return false
9444 }
9445 func rewriteValueARM64_OpARM64MOVBstore_0(v *Value) bool {
9446 b := v.Block
9447 config := b.Func.Config
9448
9449
9450
9451 for {
9452 off1 := v.AuxInt
9453 sym := v.Aux
9454 mem := v.Args[2]
9455 v_0 := v.Args[0]
9456 if v_0.Op != OpARM64ADDconst {
9457 break
9458 }
9459 off2 := v_0.AuxInt
9460 ptr := v_0.Args[0]
9461 val := v.Args[1]
9462 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
9463 break
9464 }
9465 v.reset(OpARM64MOVBstore)
9466 v.AuxInt = off1 + off2
9467 v.Aux = sym
9468 v.AddArg(ptr)
9469 v.AddArg(val)
9470 v.AddArg(mem)
9471 return true
9472 }
9473
9474
9475
9476 for {
9477 off := v.AuxInt
9478 sym := v.Aux
9479 mem := v.Args[2]
9480 v_0 := v.Args[0]
9481 if v_0.Op != OpARM64ADD {
9482 break
9483 }
9484 idx := v_0.Args[1]
9485 ptr := v_0.Args[0]
9486 val := v.Args[1]
9487 if !(off == 0 && sym == nil) {
9488 break
9489 }
9490 v.reset(OpARM64MOVBstoreidx)
9491 v.AddArg(ptr)
9492 v.AddArg(idx)
9493 v.AddArg(val)
9494 v.AddArg(mem)
9495 return true
9496 }
9497
9498
9499
9500 for {
9501 off1 := v.AuxInt
9502 sym1 := v.Aux
9503 mem := v.Args[2]
9504 v_0 := v.Args[0]
9505 if v_0.Op != OpARM64MOVDaddr {
9506 break
9507 }
9508 off2 := v_0.AuxInt
9509 sym2 := v_0.Aux
9510 ptr := v_0.Args[0]
9511 val := v.Args[1]
9512 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
9513 break
9514 }
9515 v.reset(OpARM64MOVBstore)
9516 v.AuxInt = off1 + off2
9517 v.Aux = mergeSym(sym1, sym2)
9518 v.AddArg(ptr)
9519 v.AddArg(val)
9520 v.AddArg(mem)
9521 return true
9522 }
9523
9524
9525
9526 for {
9527 off := v.AuxInt
9528 sym := v.Aux
9529 mem := v.Args[2]
9530 ptr := v.Args[0]
9531 v_1 := v.Args[1]
9532 if v_1.Op != OpARM64MOVDconst {
9533 break
9534 }
9535 if v_1.AuxInt != 0 {
9536 break
9537 }
9538 v.reset(OpARM64MOVBstorezero)
9539 v.AuxInt = off
9540 v.Aux = sym
9541 v.AddArg(ptr)
9542 v.AddArg(mem)
9543 return true
9544 }
9545
9546
9547
9548 for {
9549 off := v.AuxInt
9550 sym := v.Aux
9551 mem := v.Args[2]
9552 ptr := v.Args[0]
9553 v_1 := v.Args[1]
9554 if v_1.Op != OpARM64MOVBreg {
9555 break
9556 }
9557 x := v_1.Args[0]
9558 v.reset(OpARM64MOVBstore)
9559 v.AuxInt = off
9560 v.Aux = sym
9561 v.AddArg(ptr)
9562 v.AddArg(x)
9563 v.AddArg(mem)
9564 return true
9565 }
9566
9567
9568
9569 for {
9570 off := v.AuxInt
9571 sym := v.Aux
9572 mem := v.Args[2]
9573 ptr := v.Args[0]
9574 v_1 := v.Args[1]
9575 if v_1.Op != OpARM64MOVBUreg {
9576 break
9577 }
9578 x := v_1.Args[0]
9579 v.reset(OpARM64MOVBstore)
9580 v.AuxInt = off
9581 v.Aux = sym
9582 v.AddArg(ptr)
9583 v.AddArg(x)
9584 v.AddArg(mem)
9585 return true
9586 }
9587
9588
9589
9590 for {
9591 off := v.AuxInt
9592 sym := v.Aux
9593 mem := v.Args[2]
9594 ptr := v.Args[0]
9595 v_1 := v.Args[1]
9596 if v_1.Op != OpARM64MOVHreg {
9597 break
9598 }
9599 x := v_1.Args[0]
9600 v.reset(OpARM64MOVBstore)
9601 v.AuxInt = off
9602 v.Aux = sym
9603 v.AddArg(ptr)
9604 v.AddArg(x)
9605 v.AddArg(mem)
9606 return true
9607 }
9608
9609
9610
9611 for {
9612 off := v.AuxInt
9613 sym := v.Aux
9614 mem := v.Args[2]
9615 ptr := v.Args[0]
9616 v_1 := v.Args[1]
9617 if v_1.Op != OpARM64MOVHUreg {
9618 break
9619 }
9620 x := v_1.Args[0]
9621 v.reset(OpARM64MOVBstore)
9622 v.AuxInt = off
9623 v.Aux = sym
9624 v.AddArg(ptr)
9625 v.AddArg(x)
9626 v.AddArg(mem)
9627 return true
9628 }
9629
9630
9631
9632 for {
9633 off := v.AuxInt
9634 sym := v.Aux
9635 mem := v.Args[2]
9636 ptr := v.Args[0]
9637 v_1 := v.Args[1]
9638 if v_1.Op != OpARM64MOVWreg {
9639 break
9640 }
9641 x := v_1.Args[0]
9642 v.reset(OpARM64MOVBstore)
9643 v.AuxInt = off
9644 v.Aux = sym
9645 v.AddArg(ptr)
9646 v.AddArg(x)
9647 v.AddArg(mem)
9648 return true
9649 }
9650
9651
9652
9653 for {
9654 off := v.AuxInt
9655 sym := v.Aux
9656 mem := v.Args[2]
9657 ptr := v.Args[0]
9658 v_1 := v.Args[1]
9659 if v_1.Op != OpARM64MOVWUreg {
9660 break
9661 }
9662 x := v_1.Args[0]
9663 v.reset(OpARM64MOVBstore)
9664 v.AuxInt = off
9665 v.Aux = sym
9666 v.AddArg(ptr)
9667 v.AddArg(x)
9668 v.AddArg(mem)
9669 return true
9670 }
9671 return false
9672 }
9673 func rewriteValueARM64_OpARM64MOVBstore_10(v *Value) bool {
9674
9675
9676
9677 for {
9678 i := v.AuxInt
9679 s := v.Aux
9680 _ = v.Args[2]
9681 ptr0 := v.Args[0]
9682 v_1 := v.Args[1]
9683 if v_1.Op != OpARM64SRLconst {
9684 break
9685 }
9686 if v_1.AuxInt != 8 {
9687 break
9688 }
9689 w := v_1.Args[0]
9690 x := v.Args[2]
9691 if x.Op != OpARM64MOVBstore {
9692 break
9693 }
9694 if x.AuxInt != i-1 {
9695 break
9696 }
9697 if x.Aux != s {
9698 break
9699 }
9700 mem := x.Args[2]
9701 ptr1 := x.Args[0]
9702 if w != x.Args[1] {
9703 break
9704 }
9705 if !(x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)) {
9706 break
9707 }
9708 v.reset(OpARM64MOVHstore)
9709 v.AuxInt = i - 1
9710 v.Aux = s
9711 v.AddArg(ptr0)
9712 v.AddArg(w)
9713 v.AddArg(mem)
9714 return true
9715 }
9716
9717
9718
9719 for {
9720 if v.AuxInt != 1 {
9721 break
9722 }
9723 s := v.Aux
9724 _ = v.Args[2]
9725 v_0 := v.Args[0]
9726 if v_0.Op != OpARM64ADD {
9727 break
9728 }
9729 idx0 := v_0.Args[1]
9730 ptr0 := v_0.Args[0]
9731 v_1 := v.Args[1]
9732 if v_1.Op != OpARM64SRLconst {
9733 break
9734 }
9735 if v_1.AuxInt != 8 {
9736 break
9737 }
9738 w := v_1.Args[0]
9739 x := v.Args[2]
9740 if x.Op != OpARM64MOVBstoreidx {
9741 break
9742 }
9743 mem := x.Args[3]
9744 ptr1 := x.Args[0]
9745 idx1 := x.Args[1]
9746 if w != x.Args[2] {
9747 break
9748 }
9749 if !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) {
9750 break
9751 }
9752 v.reset(OpARM64MOVHstoreidx)
9753 v.AddArg(ptr1)
9754 v.AddArg(idx1)
9755 v.AddArg(w)
9756 v.AddArg(mem)
9757 return true
9758 }
9759
9760
9761
9762 for {
9763 i := v.AuxInt
9764 s := v.Aux
9765 _ = v.Args[2]
9766 ptr0 := v.Args[0]
9767 v_1 := v.Args[1]
9768 if v_1.Op != OpARM64UBFX {
9769 break
9770 }
9771 if v_1.AuxInt != armBFAuxInt(8, 8) {
9772 break
9773 }
9774 w := v_1.Args[0]
9775 x := v.Args[2]
9776 if x.Op != OpARM64MOVBstore {
9777 break
9778 }
9779 if x.AuxInt != i-1 {
9780 break
9781 }
9782 if x.Aux != s {
9783 break
9784 }
9785 mem := x.Args[2]
9786 ptr1 := x.Args[0]
9787 if w != x.Args[1] {
9788 break
9789 }
9790 if !(x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)) {
9791 break
9792 }
9793 v.reset(OpARM64MOVHstore)
9794 v.AuxInt = i - 1
9795 v.Aux = s
9796 v.AddArg(ptr0)
9797 v.AddArg(w)
9798 v.AddArg(mem)
9799 return true
9800 }
9801
9802
9803
9804 for {
9805 if v.AuxInt != 1 {
9806 break
9807 }
9808 s := v.Aux
9809 _ = v.Args[2]
9810 v_0 := v.Args[0]
9811 if v_0.Op != OpARM64ADD {
9812 break
9813 }
9814 idx0 := v_0.Args[1]
9815 ptr0 := v_0.Args[0]
9816 v_1 := v.Args[1]
9817 if v_1.Op != OpARM64UBFX {
9818 break
9819 }
9820 if v_1.AuxInt != armBFAuxInt(8, 8) {
9821 break
9822 }
9823 w := v_1.Args[0]
9824 x := v.Args[2]
9825 if x.Op != OpARM64MOVBstoreidx {
9826 break
9827 }
9828 mem := x.Args[3]
9829 ptr1 := x.Args[0]
9830 idx1 := x.Args[1]
9831 if w != x.Args[2] {
9832 break
9833 }
9834 if !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) {
9835 break
9836 }
9837 v.reset(OpARM64MOVHstoreidx)
9838 v.AddArg(ptr1)
9839 v.AddArg(idx1)
9840 v.AddArg(w)
9841 v.AddArg(mem)
9842 return true
9843 }
9844
9845
9846
9847 for {
9848 i := v.AuxInt
9849 s := v.Aux
9850 _ = v.Args[2]
9851 ptr0 := v.Args[0]
9852 v_1 := v.Args[1]
9853 if v_1.Op != OpARM64UBFX {
9854 break
9855 }
9856 if v_1.AuxInt != armBFAuxInt(8, 24) {
9857 break
9858 }
9859 w := v_1.Args[0]
9860 x := v.Args[2]
9861 if x.Op != OpARM64MOVBstore {
9862 break
9863 }
9864 if x.AuxInt != i-1 {
9865 break
9866 }
9867 if x.Aux != s {
9868 break
9869 }
9870 mem := x.Args[2]
9871 ptr1 := x.Args[0]
9872 if w != x.Args[1] {
9873 break
9874 }
9875 if !(x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)) {
9876 break
9877 }
9878 v.reset(OpARM64MOVHstore)
9879 v.AuxInt = i - 1
9880 v.Aux = s
9881 v.AddArg(ptr0)
9882 v.AddArg(w)
9883 v.AddArg(mem)
9884 return true
9885 }
9886
9887
9888
9889 for {
9890 if v.AuxInt != 1 {
9891 break
9892 }
9893 s := v.Aux
9894 _ = v.Args[2]
9895 v_0 := v.Args[0]
9896 if v_0.Op != OpARM64ADD {
9897 break
9898 }
9899 idx0 := v_0.Args[1]
9900 ptr0 := v_0.Args[0]
9901 v_1 := v.Args[1]
9902 if v_1.Op != OpARM64UBFX {
9903 break
9904 }
9905 if v_1.AuxInt != armBFAuxInt(8, 24) {
9906 break
9907 }
9908 w := v_1.Args[0]
9909 x := v.Args[2]
9910 if x.Op != OpARM64MOVBstoreidx {
9911 break
9912 }
9913 mem := x.Args[3]
9914 ptr1 := x.Args[0]
9915 idx1 := x.Args[1]
9916 if w != x.Args[2] {
9917 break
9918 }
9919 if !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) {
9920 break
9921 }
9922 v.reset(OpARM64MOVHstoreidx)
9923 v.AddArg(ptr1)
9924 v.AddArg(idx1)
9925 v.AddArg(w)
9926 v.AddArg(mem)
9927 return true
9928 }
9929
9930
9931
9932 for {
9933 i := v.AuxInt
9934 s := v.Aux
9935 _ = v.Args[2]
9936 ptr0 := v.Args[0]
9937 v_1 := v.Args[1]
9938 if v_1.Op != OpARM64SRLconst {
9939 break
9940 }
9941 if v_1.AuxInt != 8 {
9942 break
9943 }
9944 v_1_0 := v_1.Args[0]
9945 if v_1_0.Op != OpARM64MOVDreg {
9946 break
9947 }
9948 w := v_1_0.Args[0]
9949 x := v.Args[2]
9950 if x.Op != OpARM64MOVBstore {
9951 break
9952 }
9953 if x.AuxInt != i-1 {
9954 break
9955 }
9956 if x.Aux != s {
9957 break
9958 }
9959 mem := x.Args[2]
9960 ptr1 := x.Args[0]
9961 if w != x.Args[1] {
9962 break
9963 }
9964 if !(x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)) {
9965 break
9966 }
9967 v.reset(OpARM64MOVHstore)
9968 v.AuxInt = i - 1
9969 v.Aux = s
9970 v.AddArg(ptr0)
9971 v.AddArg(w)
9972 v.AddArg(mem)
9973 return true
9974 }
9975
9976
9977
9978 for {
9979 if v.AuxInt != 1 {
9980 break
9981 }
9982 s := v.Aux
9983 _ = v.Args[2]
9984 v_0 := v.Args[0]
9985 if v_0.Op != OpARM64ADD {
9986 break
9987 }
9988 idx0 := v_0.Args[1]
9989 ptr0 := v_0.Args[0]
9990 v_1 := v.Args[1]
9991 if v_1.Op != OpARM64SRLconst {
9992 break
9993 }
9994 if v_1.AuxInt != 8 {
9995 break
9996 }
9997 v_1_0 := v_1.Args[0]
9998 if v_1_0.Op != OpARM64MOVDreg {
9999 break
10000 }
10001 w := v_1_0.Args[0]
10002 x := v.Args[2]
10003 if x.Op != OpARM64MOVBstoreidx {
10004 break
10005 }
10006 mem := x.Args[3]
10007 ptr1 := x.Args[0]
10008 idx1 := x.Args[1]
10009 if w != x.Args[2] {
10010 break
10011 }
10012 if !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) {
10013 break
10014 }
10015 v.reset(OpARM64MOVHstoreidx)
10016 v.AddArg(ptr1)
10017 v.AddArg(idx1)
10018 v.AddArg(w)
10019 v.AddArg(mem)
10020 return true
10021 }
10022
10023
10024
10025 for {
10026 i := v.AuxInt
10027 s := v.Aux
10028 _ = v.Args[2]
10029 ptr0 := v.Args[0]
10030 v_1 := v.Args[1]
10031 if v_1.Op != OpARM64SRLconst {
10032 break
10033 }
10034 j := v_1.AuxInt
10035 w := v_1.Args[0]
10036 x := v.Args[2]
10037 if x.Op != OpARM64MOVBstore {
10038 break
10039 }
10040 if x.AuxInt != i-1 {
10041 break
10042 }
10043 if x.Aux != s {
10044 break
10045 }
10046 mem := x.Args[2]
10047 ptr1 := x.Args[0]
10048 w0 := x.Args[1]
10049 if w0.Op != OpARM64SRLconst {
10050 break
10051 }
10052 if w0.AuxInt != j-8 {
10053 break
10054 }
10055 if w != w0.Args[0] {
10056 break
10057 }
10058 if !(x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)) {
10059 break
10060 }
10061 v.reset(OpARM64MOVHstore)
10062 v.AuxInt = i - 1
10063 v.Aux = s
10064 v.AddArg(ptr0)
10065 v.AddArg(w0)
10066 v.AddArg(mem)
10067 return true
10068 }
10069
10070
10071
10072 for {
10073 if v.AuxInt != 1 {
10074 break
10075 }
10076 s := v.Aux
10077 _ = v.Args[2]
10078 v_0 := v.Args[0]
10079 if v_0.Op != OpARM64ADD {
10080 break
10081 }
10082 idx0 := v_0.Args[1]
10083 ptr0 := v_0.Args[0]
10084 v_1 := v.Args[1]
10085 if v_1.Op != OpARM64SRLconst {
10086 break
10087 }
10088 j := v_1.AuxInt
10089 w := v_1.Args[0]
10090 x := v.Args[2]
10091 if x.Op != OpARM64MOVBstoreidx {
10092 break
10093 }
10094 mem := x.Args[3]
10095 ptr1 := x.Args[0]
10096 idx1 := x.Args[1]
10097 w0 := x.Args[2]
10098 if w0.Op != OpARM64SRLconst {
10099 break
10100 }
10101 if w0.AuxInt != j-8 {
10102 break
10103 }
10104 if w != w0.Args[0] {
10105 break
10106 }
10107 if !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) {
10108 break
10109 }
10110 v.reset(OpARM64MOVHstoreidx)
10111 v.AddArg(ptr1)
10112 v.AddArg(idx1)
10113 v.AddArg(w0)
10114 v.AddArg(mem)
10115 return true
10116 }
10117 return false
10118 }
10119 func rewriteValueARM64_OpARM64MOVBstore_20(v *Value) bool {
10120 b := v.Block
10121
10122
10123
10124 for {
10125 i := v.AuxInt
10126 s := v.Aux
10127 _ = v.Args[2]
10128 ptr0 := v.Args[0]
10129 v_1 := v.Args[1]
10130 if v_1.Op != OpARM64UBFX {
10131 break
10132 }
10133 bfc := v_1.AuxInt
10134 w := v_1.Args[0]
10135 x := v.Args[2]
10136 if x.Op != OpARM64MOVBstore {
10137 break
10138 }
10139 if x.AuxInt != i-1 {
10140 break
10141 }
10142 if x.Aux != s {
10143 break
10144 }
10145 mem := x.Args[2]
10146 ptr1 := x.Args[0]
10147 w0 := x.Args[1]
10148 if w0.Op != OpARM64UBFX {
10149 break
10150 }
10151 bfc2 := w0.AuxInt
10152 if w != w0.Args[0] {
10153 break
10154 }
10155 if !(x.Uses == 1 && isSamePtr(ptr0, ptr1) && getARM64BFwidth(bfc) == 32-getARM64BFlsb(bfc) && getARM64BFwidth(bfc2) == 32-getARM64BFlsb(bfc2) && getARM64BFlsb(bfc2) == getARM64BFlsb(bfc)-8 && clobber(x)) {
10156 break
10157 }
10158 v.reset(OpARM64MOVHstore)
10159 v.AuxInt = i - 1
10160 v.Aux = s
10161 v.AddArg(ptr0)
10162 v.AddArg(w0)
10163 v.AddArg(mem)
10164 return true
10165 }
10166
10167
10168
10169 for {
10170 if v.AuxInt != 1 {
10171 break
10172 }
10173 s := v.Aux
10174 _ = v.Args[2]
10175 v_0 := v.Args[0]
10176 if v_0.Op != OpARM64ADD {
10177 break
10178 }
10179 idx0 := v_0.Args[1]
10180 ptr0 := v_0.Args[0]
10181 v_1 := v.Args[1]
10182 if v_1.Op != OpARM64UBFX {
10183 break
10184 }
10185 bfc := v_1.AuxInt
10186 w := v_1.Args[0]
10187 x := v.Args[2]
10188 if x.Op != OpARM64MOVBstoreidx {
10189 break
10190 }
10191 mem := x.Args[3]
10192 ptr1 := x.Args[0]
10193 idx1 := x.Args[1]
10194 w0 := x.Args[2]
10195 if w0.Op != OpARM64UBFX {
10196 break
10197 }
10198 bfc2 := w0.AuxInt
10199 if w != w0.Args[0] {
10200 break
10201 }
10202 if !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && getARM64BFwidth(bfc) == 32-getARM64BFlsb(bfc) && getARM64BFwidth(bfc2) == 32-getARM64BFlsb(bfc2) && getARM64BFlsb(bfc2) == getARM64BFlsb(bfc)-8 && clobber(x)) {
10203 break
10204 }
10205 v.reset(OpARM64MOVHstoreidx)
10206 v.AddArg(ptr1)
10207 v.AddArg(idx1)
10208 v.AddArg(w0)
10209 v.AddArg(mem)
10210 return true
10211 }
10212
10213
10214
10215 for {
10216 i := v.AuxInt
10217 s := v.Aux
10218 _ = v.Args[2]
10219 ptr0 := v.Args[0]
10220 v_1 := v.Args[1]
10221 if v_1.Op != OpARM64SRLconst {
10222 break
10223 }
10224 j := v_1.AuxInt
10225 v_1_0 := v_1.Args[0]
10226 if v_1_0.Op != OpARM64MOVDreg {
10227 break
10228 }
10229 w := v_1_0.Args[0]
10230 x := v.Args[2]
10231 if x.Op != OpARM64MOVBstore {
10232 break
10233 }
10234 if x.AuxInt != i-1 {
10235 break
10236 }
10237 if x.Aux != s {
10238 break
10239 }
10240 mem := x.Args[2]
10241 ptr1 := x.Args[0]
10242 w0 := x.Args[1]
10243 if w0.Op != OpARM64SRLconst {
10244 break
10245 }
10246 if w0.AuxInt != j-8 {
10247 break
10248 }
10249 w0_0 := w0.Args[0]
10250 if w0_0.Op != OpARM64MOVDreg {
10251 break
10252 }
10253 if w != w0_0.Args[0] {
10254 break
10255 }
10256 if !(x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)) {
10257 break
10258 }
10259 v.reset(OpARM64MOVHstore)
10260 v.AuxInt = i - 1
10261 v.Aux = s
10262 v.AddArg(ptr0)
10263 v.AddArg(w0)
10264 v.AddArg(mem)
10265 return true
10266 }
10267
10268
10269
10270 for {
10271 if v.AuxInt != 1 {
10272 break
10273 }
10274 s := v.Aux
10275 _ = v.Args[2]
10276 v_0 := v.Args[0]
10277 if v_0.Op != OpARM64ADD {
10278 break
10279 }
10280 idx0 := v_0.Args[1]
10281 ptr0 := v_0.Args[0]
10282 v_1 := v.Args[1]
10283 if v_1.Op != OpARM64SRLconst {
10284 break
10285 }
10286 j := v_1.AuxInt
10287 v_1_0 := v_1.Args[0]
10288 if v_1_0.Op != OpARM64MOVDreg {
10289 break
10290 }
10291 w := v_1_0.Args[0]
10292 x := v.Args[2]
10293 if x.Op != OpARM64MOVBstoreidx {
10294 break
10295 }
10296 mem := x.Args[3]
10297 ptr1 := x.Args[0]
10298 idx1 := x.Args[1]
10299 w0 := x.Args[2]
10300 if w0.Op != OpARM64SRLconst {
10301 break
10302 }
10303 if w0.AuxInt != j-8 {
10304 break
10305 }
10306 w0_0 := w0.Args[0]
10307 if w0_0.Op != OpARM64MOVDreg {
10308 break
10309 }
10310 if w != w0_0.Args[0] {
10311 break
10312 }
10313 if !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) {
10314 break
10315 }
10316 v.reset(OpARM64MOVHstoreidx)
10317 v.AddArg(ptr1)
10318 v.AddArg(idx1)
10319 v.AddArg(w0)
10320 v.AddArg(mem)
10321 return true
10322 }
10323
10324
10325
10326 for {
10327 i := v.AuxInt
10328 s := v.Aux
10329 _ = v.Args[2]
10330 ptr := v.Args[0]
10331 w := v.Args[1]
10332 x0 := v.Args[2]
10333 if x0.Op != OpARM64MOVBstore {
10334 break
10335 }
10336 if x0.AuxInt != i-1 {
10337 break
10338 }
10339 if x0.Aux != s {
10340 break
10341 }
10342 _ = x0.Args[2]
10343 if ptr != x0.Args[0] {
10344 break
10345 }
10346 x0_1 := x0.Args[1]
10347 if x0_1.Op != OpARM64SRLconst {
10348 break
10349 }
10350 if x0_1.AuxInt != 8 {
10351 break
10352 }
10353 if w != x0_1.Args[0] {
10354 break
10355 }
10356 x1 := x0.Args[2]
10357 if x1.Op != OpARM64MOVBstore {
10358 break
10359 }
10360 if x1.AuxInt != i-2 {
10361 break
10362 }
10363 if x1.Aux != s {
10364 break
10365 }
10366 _ = x1.Args[2]
10367 if ptr != x1.Args[0] {
10368 break
10369 }
10370 x1_1 := x1.Args[1]
10371 if x1_1.Op != OpARM64SRLconst {
10372 break
10373 }
10374 if x1_1.AuxInt != 16 {
10375 break
10376 }
10377 if w != x1_1.Args[0] {
10378 break
10379 }
10380 x2 := x1.Args[2]
10381 if x2.Op != OpARM64MOVBstore {
10382 break
10383 }
10384 if x2.AuxInt != i-3 {
10385 break
10386 }
10387 if x2.Aux != s {
10388 break
10389 }
10390 _ = x2.Args[2]
10391 if ptr != x2.Args[0] {
10392 break
10393 }
10394 x2_1 := x2.Args[1]
10395 if x2_1.Op != OpARM64SRLconst {
10396 break
10397 }
10398 if x2_1.AuxInt != 24 {
10399 break
10400 }
10401 if w != x2_1.Args[0] {
10402 break
10403 }
10404 x3 := x2.Args[2]
10405 if x3.Op != OpARM64MOVBstore {
10406 break
10407 }
10408 if x3.AuxInt != i-4 {
10409 break
10410 }
10411 if x3.Aux != s {
10412 break
10413 }
10414 _ = x3.Args[2]
10415 if ptr != x3.Args[0] {
10416 break
10417 }
10418 x3_1 := x3.Args[1]
10419 if x3_1.Op != OpARM64SRLconst {
10420 break
10421 }
10422 if x3_1.AuxInt != 32 {
10423 break
10424 }
10425 if w != x3_1.Args[0] {
10426 break
10427 }
10428 x4 := x3.Args[2]
10429 if x4.Op != OpARM64MOVBstore {
10430 break
10431 }
10432 if x4.AuxInt != i-5 {
10433 break
10434 }
10435 if x4.Aux != s {
10436 break
10437 }
10438 _ = x4.Args[2]
10439 if ptr != x4.Args[0] {
10440 break
10441 }
10442 x4_1 := x4.Args[1]
10443 if x4_1.Op != OpARM64SRLconst {
10444 break
10445 }
10446 if x4_1.AuxInt != 40 {
10447 break
10448 }
10449 if w != x4_1.Args[0] {
10450 break
10451 }
10452 x5 := x4.Args[2]
10453 if x5.Op != OpARM64MOVBstore {
10454 break
10455 }
10456 if x5.AuxInt != i-6 {
10457 break
10458 }
10459 if x5.Aux != s {
10460 break
10461 }
10462 _ = x5.Args[2]
10463 if ptr != x5.Args[0] {
10464 break
10465 }
10466 x5_1 := x5.Args[1]
10467 if x5_1.Op != OpARM64SRLconst {
10468 break
10469 }
10470 if x5_1.AuxInt != 48 {
10471 break
10472 }
10473 if w != x5_1.Args[0] {
10474 break
10475 }
10476 x6 := x5.Args[2]
10477 if x6.Op != OpARM64MOVBstore {
10478 break
10479 }
10480 if x6.AuxInt != i-7 {
10481 break
10482 }
10483 if x6.Aux != s {
10484 break
10485 }
10486 mem := x6.Args[2]
10487 if ptr != x6.Args[0] {
10488 break
10489 }
10490 x6_1 := x6.Args[1]
10491 if x6_1.Op != OpARM64SRLconst {
10492 break
10493 }
10494 if x6_1.AuxInt != 56 {
10495 break
10496 }
10497 if w != x6_1.Args[0] {
10498 break
10499 }
10500 if !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6)) {
10501 break
10502 }
10503 v.reset(OpARM64MOVDstore)
10504 v.AuxInt = i - 7
10505 v.Aux = s
10506 v.AddArg(ptr)
10507 v0 := b.NewValue0(x6.Pos, OpARM64REV, w.Type)
10508 v0.AddArg(w)
10509 v.AddArg(v0)
10510 v.AddArg(mem)
10511 return true
10512 }
10513
10514
10515
10516 for {
10517 if v.AuxInt != 7 {
10518 break
10519 }
10520 s := v.Aux
10521 _ = v.Args[2]
10522 p := v.Args[0]
10523 w := v.Args[1]
10524 x0 := v.Args[2]
10525 if x0.Op != OpARM64MOVBstore {
10526 break
10527 }
10528 if x0.AuxInt != 6 {
10529 break
10530 }
10531 if x0.Aux != s {
10532 break
10533 }
10534 _ = x0.Args[2]
10535 if p != x0.Args[0] {
10536 break
10537 }
10538 x0_1 := x0.Args[1]
10539 if x0_1.Op != OpARM64SRLconst {
10540 break
10541 }
10542 if x0_1.AuxInt != 8 {
10543 break
10544 }
10545 if w != x0_1.Args[0] {
10546 break
10547 }
10548 x1 := x0.Args[2]
10549 if x1.Op != OpARM64MOVBstore {
10550 break
10551 }
10552 if x1.AuxInt != 5 {
10553 break
10554 }
10555 if x1.Aux != s {
10556 break
10557 }
10558 _ = x1.Args[2]
10559 if p != x1.Args[0] {
10560 break
10561 }
10562 x1_1 := x1.Args[1]
10563 if x1_1.Op != OpARM64SRLconst {
10564 break
10565 }
10566 if x1_1.AuxInt != 16 {
10567 break
10568 }
10569 if w != x1_1.Args[0] {
10570 break
10571 }
10572 x2 := x1.Args[2]
10573 if x2.Op != OpARM64MOVBstore {
10574 break
10575 }
10576 if x2.AuxInt != 4 {
10577 break
10578 }
10579 if x2.Aux != s {
10580 break
10581 }
10582 _ = x2.Args[2]
10583 if p != x2.Args[0] {
10584 break
10585 }
10586 x2_1 := x2.Args[1]
10587 if x2_1.Op != OpARM64SRLconst {
10588 break
10589 }
10590 if x2_1.AuxInt != 24 {
10591 break
10592 }
10593 if w != x2_1.Args[0] {
10594 break
10595 }
10596 x3 := x2.Args[2]
10597 if x3.Op != OpARM64MOVBstore {
10598 break
10599 }
10600 if x3.AuxInt != 3 {
10601 break
10602 }
10603 if x3.Aux != s {
10604 break
10605 }
10606 _ = x3.Args[2]
10607 if p != x3.Args[0] {
10608 break
10609 }
10610 x3_1 := x3.Args[1]
10611 if x3_1.Op != OpARM64SRLconst {
10612 break
10613 }
10614 if x3_1.AuxInt != 32 {
10615 break
10616 }
10617 if w != x3_1.Args[0] {
10618 break
10619 }
10620 x4 := x3.Args[2]
10621 if x4.Op != OpARM64MOVBstore {
10622 break
10623 }
10624 if x4.AuxInt != 2 {
10625 break
10626 }
10627 if x4.Aux != s {
10628 break
10629 }
10630 _ = x4.Args[2]
10631 if p != x4.Args[0] {
10632 break
10633 }
10634 x4_1 := x4.Args[1]
10635 if x4_1.Op != OpARM64SRLconst {
10636 break
10637 }
10638 if x4_1.AuxInt != 40 {
10639 break
10640 }
10641 if w != x4_1.Args[0] {
10642 break
10643 }
10644 x5 := x4.Args[2]
10645 if x5.Op != OpARM64MOVBstore {
10646 break
10647 }
10648 if x5.AuxInt != 1 {
10649 break
10650 }
10651 if x5.Aux != s {
10652 break
10653 }
10654 _ = x5.Args[2]
10655 p1 := x5.Args[0]
10656 if p1.Op != OpARM64ADD {
10657 break
10658 }
10659 idx1 := p1.Args[1]
10660 ptr1 := p1.Args[0]
10661 x5_1 := x5.Args[1]
10662 if x5_1.Op != OpARM64SRLconst {
10663 break
10664 }
10665 if x5_1.AuxInt != 48 {
10666 break
10667 }
10668 if w != x5_1.Args[0] {
10669 break
10670 }
10671 x6 := x5.Args[2]
10672 if x6.Op != OpARM64MOVBstoreidx {
10673 break
10674 }
10675 mem := x6.Args[3]
10676 ptr0 := x6.Args[0]
10677 idx0 := x6.Args[1]
10678 x6_2 := x6.Args[2]
10679 if x6_2.Op != OpARM64SRLconst {
10680 break
10681 }
10682 if x6_2.AuxInt != 56 {
10683 break
10684 }
10685 if w != x6_2.Args[0] {
10686 break
10687 }
10688 if !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6)) {
10689 break
10690 }
10691 v.reset(OpARM64MOVDstoreidx)
10692 v.AddArg(ptr0)
10693 v.AddArg(idx0)
10694 v0 := b.NewValue0(x5.Pos, OpARM64REV, w.Type)
10695 v0.AddArg(w)
10696 v.AddArg(v0)
10697 v.AddArg(mem)
10698 return true
10699 }
10700
10701
10702
10703 for {
10704 i := v.AuxInt
10705 s := v.Aux
10706 _ = v.Args[2]
10707 ptr := v.Args[0]
10708 w := v.Args[1]
10709 x0 := v.Args[2]
10710 if x0.Op != OpARM64MOVBstore {
10711 break
10712 }
10713 if x0.AuxInt != i-1 {
10714 break
10715 }
10716 if x0.Aux != s {
10717 break
10718 }
10719 _ = x0.Args[2]
10720 if ptr != x0.Args[0] {
10721 break
10722 }
10723 x0_1 := x0.Args[1]
10724 if x0_1.Op != OpARM64UBFX {
10725 break
10726 }
10727 if x0_1.AuxInt != armBFAuxInt(8, 24) {
10728 break
10729 }
10730 if w != x0_1.Args[0] {
10731 break
10732 }
10733 x1 := x0.Args[2]
10734 if x1.Op != OpARM64MOVBstore {
10735 break
10736 }
10737 if x1.AuxInt != i-2 {
10738 break
10739 }
10740 if x1.Aux != s {
10741 break
10742 }
10743 _ = x1.Args[2]
10744 if ptr != x1.Args[0] {
10745 break
10746 }
10747 x1_1 := x1.Args[1]
10748 if x1_1.Op != OpARM64UBFX {
10749 break
10750 }
10751 if x1_1.AuxInt != armBFAuxInt(16, 16) {
10752 break
10753 }
10754 if w != x1_1.Args[0] {
10755 break
10756 }
10757 x2 := x1.Args[2]
10758 if x2.Op != OpARM64MOVBstore {
10759 break
10760 }
10761 if x2.AuxInt != i-3 {
10762 break
10763 }
10764 if x2.Aux != s {
10765 break
10766 }
10767 mem := x2.Args[2]
10768 if ptr != x2.Args[0] {
10769 break
10770 }
10771 x2_1 := x2.Args[1]
10772 if x2_1.Op != OpARM64UBFX {
10773 break
10774 }
10775 if x2_1.AuxInt != armBFAuxInt(24, 8) {
10776 break
10777 }
10778 if w != x2_1.Args[0] {
10779 break
10780 }
10781 if !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && clobber(x0) && clobber(x1) && clobber(x2)) {
10782 break
10783 }
10784 v.reset(OpARM64MOVWstore)
10785 v.AuxInt = i - 3
10786 v.Aux = s
10787 v.AddArg(ptr)
10788 v0 := b.NewValue0(x2.Pos, OpARM64REVW, w.Type)
10789 v0.AddArg(w)
10790 v.AddArg(v0)
10791 v.AddArg(mem)
10792 return true
10793 }
10794
10795
10796
10797 for {
10798 if v.AuxInt != 3 {
10799 break
10800 }
10801 s := v.Aux
10802 _ = v.Args[2]
10803 p := v.Args[0]
10804 w := v.Args[1]
10805 x0 := v.Args[2]
10806 if x0.Op != OpARM64MOVBstore {
10807 break
10808 }
10809 if x0.AuxInt != 2 {
10810 break
10811 }
10812 if x0.Aux != s {
10813 break
10814 }
10815 _ = x0.Args[2]
10816 if p != x0.Args[0] {
10817 break
10818 }
10819 x0_1 := x0.Args[1]
10820 if x0_1.Op != OpARM64UBFX {
10821 break
10822 }
10823 if x0_1.AuxInt != armBFAuxInt(8, 24) {
10824 break
10825 }
10826 if w != x0_1.Args[0] {
10827 break
10828 }
10829 x1 := x0.Args[2]
10830 if x1.Op != OpARM64MOVBstore {
10831 break
10832 }
10833 if x1.AuxInt != 1 {
10834 break
10835 }
10836 if x1.Aux != s {
10837 break
10838 }
10839 _ = x1.Args[2]
10840 p1 := x1.Args[0]
10841 if p1.Op != OpARM64ADD {
10842 break
10843 }
10844 idx1 := p1.Args[1]
10845 ptr1 := p1.Args[0]
10846 x1_1 := x1.Args[1]
10847 if x1_1.Op != OpARM64UBFX {
10848 break
10849 }
10850 if x1_1.AuxInt != armBFAuxInt(16, 16) {
10851 break
10852 }
10853 if w != x1_1.Args[0] {
10854 break
10855 }
10856 x2 := x1.Args[2]
10857 if x2.Op != OpARM64MOVBstoreidx {
10858 break
10859 }
10860 mem := x2.Args[3]
10861 ptr0 := x2.Args[0]
10862 idx0 := x2.Args[1]
10863 x2_2 := x2.Args[2]
10864 if x2_2.Op != OpARM64UBFX {
10865 break
10866 }
10867 if x2_2.AuxInt != armBFAuxInt(24, 8) {
10868 break
10869 }
10870 if w != x2_2.Args[0] {
10871 break
10872 }
10873 if !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2)) {
10874 break
10875 }
10876 v.reset(OpARM64MOVWstoreidx)
10877 v.AddArg(ptr0)
10878 v.AddArg(idx0)
10879 v0 := b.NewValue0(x1.Pos, OpARM64REVW, w.Type)
10880 v0.AddArg(w)
10881 v.AddArg(v0)
10882 v.AddArg(mem)
10883 return true
10884 }
10885
10886
10887
10888 for {
10889 i := v.AuxInt
10890 s := v.Aux
10891 _ = v.Args[2]
10892 ptr := v.Args[0]
10893 w := v.Args[1]
10894 x0 := v.Args[2]
10895 if x0.Op != OpARM64MOVBstore {
10896 break
10897 }
10898 if x0.AuxInt != i-1 {
10899 break
10900 }
10901 if x0.Aux != s {
10902 break
10903 }
10904 _ = x0.Args[2]
10905 if ptr != x0.Args[0] {
10906 break
10907 }
10908 x0_1 := x0.Args[1]
10909 if x0_1.Op != OpARM64SRLconst {
10910 break
10911 }
10912 if x0_1.AuxInt != 8 {
10913 break
10914 }
10915 x0_1_0 := x0_1.Args[0]
10916 if x0_1_0.Op != OpARM64MOVDreg {
10917 break
10918 }
10919 if w != x0_1_0.Args[0] {
10920 break
10921 }
10922 x1 := x0.Args[2]
10923 if x1.Op != OpARM64MOVBstore {
10924 break
10925 }
10926 if x1.AuxInt != i-2 {
10927 break
10928 }
10929 if x1.Aux != s {
10930 break
10931 }
10932 _ = x1.Args[2]
10933 if ptr != x1.Args[0] {
10934 break
10935 }
10936 x1_1 := x1.Args[1]
10937 if x1_1.Op != OpARM64SRLconst {
10938 break
10939 }
10940 if x1_1.AuxInt != 16 {
10941 break
10942 }
10943 x1_1_0 := x1_1.Args[0]
10944 if x1_1_0.Op != OpARM64MOVDreg {
10945 break
10946 }
10947 if w != x1_1_0.Args[0] {
10948 break
10949 }
10950 x2 := x1.Args[2]
10951 if x2.Op != OpARM64MOVBstore {
10952 break
10953 }
10954 if x2.AuxInt != i-3 {
10955 break
10956 }
10957 if x2.Aux != s {
10958 break
10959 }
10960 mem := x2.Args[2]
10961 if ptr != x2.Args[0] {
10962 break
10963 }
10964 x2_1 := x2.Args[1]
10965 if x2_1.Op != OpARM64SRLconst {
10966 break
10967 }
10968 if x2_1.AuxInt != 24 {
10969 break
10970 }
10971 x2_1_0 := x2_1.Args[0]
10972 if x2_1_0.Op != OpARM64MOVDreg {
10973 break
10974 }
10975 if w != x2_1_0.Args[0] {
10976 break
10977 }
10978 if !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && clobber(x0) && clobber(x1) && clobber(x2)) {
10979 break
10980 }
10981 v.reset(OpARM64MOVWstore)
10982 v.AuxInt = i - 3
10983 v.Aux = s
10984 v.AddArg(ptr)
10985 v0 := b.NewValue0(x2.Pos, OpARM64REVW, w.Type)
10986 v0.AddArg(w)
10987 v.AddArg(v0)
10988 v.AddArg(mem)
10989 return true
10990 }
10991
10992
10993
10994 for {
10995 if v.AuxInt != 3 {
10996 break
10997 }
10998 s := v.Aux
10999 _ = v.Args[2]
11000 p := v.Args[0]
11001 w := v.Args[1]
11002 x0 := v.Args[2]
11003 if x0.Op != OpARM64MOVBstore {
11004 break
11005 }
11006 if x0.AuxInt != 2 {
11007 break
11008 }
11009 if x0.Aux != s {
11010 break
11011 }
11012 _ = x0.Args[2]
11013 if p != x0.Args[0] {
11014 break
11015 }
11016 x0_1 := x0.Args[1]
11017 if x0_1.Op != OpARM64SRLconst {
11018 break
11019 }
11020 if x0_1.AuxInt != 8 {
11021 break
11022 }
11023 x0_1_0 := x0_1.Args[0]
11024 if x0_1_0.Op != OpARM64MOVDreg {
11025 break
11026 }
11027 if w != x0_1_0.Args[0] {
11028 break
11029 }
11030 x1 := x0.Args[2]
11031 if x1.Op != OpARM64MOVBstore {
11032 break
11033 }
11034 if x1.AuxInt != 1 {
11035 break
11036 }
11037 if x1.Aux != s {
11038 break
11039 }
11040 _ = x1.Args[2]
11041 p1 := x1.Args[0]
11042 if p1.Op != OpARM64ADD {
11043 break
11044 }
11045 idx1 := p1.Args[1]
11046 ptr1 := p1.Args[0]
11047 x1_1 := x1.Args[1]
11048 if x1_1.Op != OpARM64SRLconst {
11049 break
11050 }
11051 if x1_1.AuxInt != 16 {
11052 break
11053 }
11054 x1_1_0 := x1_1.Args[0]
11055 if x1_1_0.Op != OpARM64MOVDreg {
11056 break
11057 }
11058 if w != x1_1_0.Args[0] {
11059 break
11060 }
11061 x2 := x1.Args[2]
11062 if x2.Op != OpARM64MOVBstoreidx {
11063 break
11064 }
11065 mem := x2.Args[3]
11066 ptr0 := x2.Args[0]
11067 idx0 := x2.Args[1]
11068 x2_2 := x2.Args[2]
11069 if x2_2.Op != OpARM64SRLconst {
11070 break
11071 }
11072 if x2_2.AuxInt != 24 {
11073 break
11074 }
11075 x2_2_0 := x2_2.Args[0]
11076 if x2_2_0.Op != OpARM64MOVDreg {
11077 break
11078 }
11079 if w != x2_2_0.Args[0] {
11080 break
11081 }
11082 if !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2)) {
11083 break
11084 }
11085 v.reset(OpARM64MOVWstoreidx)
11086 v.AddArg(ptr0)
11087 v.AddArg(idx0)
11088 v0 := b.NewValue0(x1.Pos, OpARM64REVW, w.Type)
11089 v0.AddArg(w)
11090 v.AddArg(v0)
11091 v.AddArg(mem)
11092 return true
11093 }
11094 return false
11095 }
11096 func rewriteValueARM64_OpARM64MOVBstore_30(v *Value) bool {
11097 b := v.Block
11098
11099
11100
11101 for {
11102 i := v.AuxInt
11103 s := v.Aux
11104 _ = v.Args[2]
11105 ptr := v.Args[0]
11106 w := v.Args[1]
11107 x0 := v.Args[2]
11108 if x0.Op != OpARM64MOVBstore {
11109 break
11110 }
11111 if x0.AuxInt != i-1 {
11112 break
11113 }
11114 if x0.Aux != s {
11115 break
11116 }
11117 _ = x0.Args[2]
11118 if ptr != x0.Args[0] {
11119 break
11120 }
11121 x0_1 := x0.Args[1]
11122 if x0_1.Op != OpARM64SRLconst {
11123 break
11124 }
11125 if x0_1.AuxInt != 8 {
11126 break
11127 }
11128 if w != x0_1.Args[0] {
11129 break
11130 }
11131 x1 := x0.Args[2]
11132 if x1.Op != OpARM64MOVBstore {
11133 break
11134 }
11135 if x1.AuxInt != i-2 {
11136 break
11137 }
11138 if x1.Aux != s {
11139 break
11140 }
11141 _ = x1.Args[2]
11142 if ptr != x1.Args[0] {
11143 break
11144 }
11145 x1_1 := x1.Args[1]
11146 if x1_1.Op != OpARM64SRLconst {
11147 break
11148 }
11149 if x1_1.AuxInt != 16 {
11150 break
11151 }
11152 if w != x1_1.Args[0] {
11153 break
11154 }
11155 x2 := x1.Args[2]
11156 if x2.Op != OpARM64MOVBstore {
11157 break
11158 }
11159 if x2.AuxInt != i-3 {
11160 break
11161 }
11162 if x2.Aux != s {
11163 break
11164 }
11165 mem := x2.Args[2]
11166 if ptr != x2.Args[0] {
11167 break
11168 }
11169 x2_1 := x2.Args[1]
11170 if x2_1.Op != OpARM64SRLconst {
11171 break
11172 }
11173 if x2_1.AuxInt != 24 {
11174 break
11175 }
11176 if w != x2_1.Args[0] {
11177 break
11178 }
11179 if !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && clobber(x0) && clobber(x1) && clobber(x2)) {
11180 break
11181 }
11182 v.reset(OpARM64MOVWstore)
11183 v.AuxInt = i - 3
11184 v.Aux = s
11185 v.AddArg(ptr)
11186 v0 := b.NewValue0(x2.Pos, OpARM64REVW, w.Type)
11187 v0.AddArg(w)
11188 v.AddArg(v0)
11189 v.AddArg(mem)
11190 return true
11191 }
11192
11193
11194
11195 for {
11196 if v.AuxInt != 3 {
11197 break
11198 }
11199 s := v.Aux
11200 _ = v.Args[2]
11201 p := v.Args[0]
11202 w := v.Args[1]
11203 x0 := v.Args[2]
11204 if x0.Op != OpARM64MOVBstore {
11205 break
11206 }
11207 if x0.AuxInt != 2 {
11208 break
11209 }
11210 if x0.Aux != s {
11211 break
11212 }
11213 _ = x0.Args[2]
11214 if p != x0.Args[0] {
11215 break
11216 }
11217 x0_1 := x0.Args[1]
11218 if x0_1.Op != OpARM64SRLconst {
11219 break
11220 }
11221 if x0_1.AuxInt != 8 {
11222 break
11223 }
11224 if w != x0_1.Args[0] {
11225 break
11226 }
11227 x1 := x0.Args[2]
11228 if x1.Op != OpARM64MOVBstore {
11229 break
11230 }
11231 if x1.AuxInt != 1 {
11232 break
11233 }
11234 if x1.Aux != s {
11235 break
11236 }
11237 _ = x1.Args[2]
11238 p1 := x1.Args[0]
11239 if p1.Op != OpARM64ADD {
11240 break
11241 }
11242 idx1 := p1.Args[1]
11243 ptr1 := p1.Args[0]
11244 x1_1 := x1.Args[1]
11245 if x1_1.Op != OpARM64SRLconst {
11246 break
11247 }
11248 if x1_1.AuxInt != 16 {
11249 break
11250 }
11251 if w != x1_1.Args[0] {
11252 break
11253 }
11254 x2 := x1.Args[2]
11255 if x2.Op != OpARM64MOVBstoreidx {
11256 break
11257 }
11258 mem := x2.Args[3]
11259 ptr0 := x2.Args[0]
11260 idx0 := x2.Args[1]
11261 x2_2 := x2.Args[2]
11262 if x2_2.Op != OpARM64SRLconst {
11263 break
11264 }
11265 if x2_2.AuxInt != 24 {
11266 break
11267 }
11268 if w != x2_2.Args[0] {
11269 break
11270 }
11271 if !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2)) {
11272 break
11273 }
11274 v.reset(OpARM64MOVWstoreidx)
11275 v.AddArg(ptr0)
11276 v.AddArg(idx0)
11277 v0 := b.NewValue0(x1.Pos, OpARM64REVW, w.Type)
11278 v0.AddArg(w)
11279 v.AddArg(v0)
11280 v.AddArg(mem)
11281 return true
11282 }
11283
11284
11285
11286 for {
11287 i := v.AuxInt
11288 s := v.Aux
11289 _ = v.Args[2]
11290 ptr := v.Args[0]
11291 w := v.Args[1]
11292 x := v.Args[2]
11293 if x.Op != OpARM64MOVBstore {
11294 break
11295 }
11296 if x.AuxInt != i-1 {
11297 break
11298 }
11299 if x.Aux != s {
11300 break
11301 }
11302 mem := x.Args[2]
11303 if ptr != x.Args[0] {
11304 break
11305 }
11306 x_1 := x.Args[1]
11307 if x_1.Op != OpARM64SRLconst {
11308 break
11309 }
11310 if x_1.AuxInt != 8 {
11311 break
11312 }
11313 if w != x_1.Args[0] {
11314 break
11315 }
11316 if !(x.Uses == 1 && clobber(x)) {
11317 break
11318 }
11319 v.reset(OpARM64MOVHstore)
11320 v.AuxInt = i - 1
11321 v.Aux = s
11322 v.AddArg(ptr)
11323 v0 := b.NewValue0(x.Pos, OpARM64REV16W, w.Type)
11324 v0.AddArg(w)
11325 v.AddArg(v0)
11326 v.AddArg(mem)
11327 return true
11328 }
11329
11330
11331
11332 for {
11333 if v.AuxInt != 1 {
11334 break
11335 }
11336 s := v.Aux
11337 _ = v.Args[2]
11338 v_0 := v.Args[0]
11339 if v_0.Op != OpARM64ADD {
11340 break
11341 }
11342 idx1 := v_0.Args[1]
11343 ptr1 := v_0.Args[0]
11344 w := v.Args[1]
11345 x := v.Args[2]
11346 if x.Op != OpARM64MOVBstoreidx {
11347 break
11348 }
11349 mem := x.Args[3]
11350 ptr0 := x.Args[0]
11351 idx0 := x.Args[1]
11352 x_2 := x.Args[2]
11353 if x_2.Op != OpARM64SRLconst {
11354 break
11355 }
11356 if x_2.AuxInt != 8 {
11357 break
11358 }
11359 if w != x_2.Args[0] {
11360 break
11361 }
11362 if !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) {
11363 break
11364 }
11365 v.reset(OpARM64MOVHstoreidx)
11366 v.AddArg(ptr0)
11367 v.AddArg(idx0)
11368 v0 := b.NewValue0(v.Pos, OpARM64REV16W, w.Type)
11369 v0.AddArg(w)
11370 v.AddArg(v0)
11371 v.AddArg(mem)
11372 return true
11373 }
11374
11375
11376
11377 for {
11378 i := v.AuxInt
11379 s := v.Aux
11380 _ = v.Args[2]
11381 ptr := v.Args[0]
11382 w := v.Args[1]
11383 x := v.Args[2]
11384 if x.Op != OpARM64MOVBstore {
11385 break
11386 }
11387 if x.AuxInt != i-1 {
11388 break
11389 }
11390 if x.Aux != s {
11391 break
11392 }
11393 mem := x.Args[2]
11394 if ptr != x.Args[0] {
11395 break
11396 }
11397 x_1 := x.Args[1]
11398 if x_1.Op != OpARM64UBFX {
11399 break
11400 }
11401 if x_1.AuxInt != armBFAuxInt(8, 8) {
11402 break
11403 }
11404 if w != x_1.Args[0] {
11405 break
11406 }
11407 if !(x.Uses == 1 && clobber(x)) {
11408 break
11409 }
11410 v.reset(OpARM64MOVHstore)
11411 v.AuxInt = i - 1
11412 v.Aux = s
11413 v.AddArg(ptr)
11414 v0 := b.NewValue0(x.Pos, OpARM64REV16W, w.Type)
11415 v0.AddArg(w)
11416 v.AddArg(v0)
11417 v.AddArg(mem)
11418 return true
11419 }
11420
11421
11422
11423 for {
11424 if v.AuxInt != 1 {
11425 break
11426 }
11427 s := v.Aux
11428 _ = v.Args[2]
11429 v_0 := v.Args[0]
11430 if v_0.Op != OpARM64ADD {
11431 break
11432 }
11433 idx1 := v_0.Args[1]
11434 ptr1 := v_0.Args[0]
11435 w := v.Args[1]
11436 x := v.Args[2]
11437 if x.Op != OpARM64MOVBstoreidx {
11438 break
11439 }
11440 mem := x.Args[3]
11441 ptr0 := x.Args[0]
11442 idx0 := x.Args[1]
11443 x_2 := x.Args[2]
11444 if x_2.Op != OpARM64UBFX {
11445 break
11446 }
11447 if x_2.AuxInt != armBFAuxInt(8, 8) {
11448 break
11449 }
11450 if w != x_2.Args[0] {
11451 break
11452 }
11453 if !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) {
11454 break
11455 }
11456 v.reset(OpARM64MOVHstoreidx)
11457 v.AddArg(ptr0)
11458 v.AddArg(idx0)
11459 v0 := b.NewValue0(v.Pos, OpARM64REV16W, w.Type)
11460 v0.AddArg(w)
11461 v.AddArg(v0)
11462 v.AddArg(mem)
11463 return true
11464 }
11465
11466
11467
11468 for {
11469 i := v.AuxInt
11470 s := v.Aux
11471 _ = v.Args[2]
11472 ptr := v.Args[0]
11473 w := v.Args[1]
11474 x := v.Args[2]
11475 if x.Op != OpARM64MOVBstore {
11476 break
11477 }
11478 if x.AuxInt != i-1 {
11479 break
11480 }
11481 if x.Aux != s {
11482 break
11483 }
11484 mem := x.Args[2]
11485 if ptr != x.Args[0] {
11486 break
11487 }
11488 x_1 := x.Args[1]
11489 if x_1.Op != OpARM64SRLconst {
11490 break
11491 }
11492 if x_1.AuxInt != 8 {
11493 break
11494 }
11495 x_1_0 := x_1.Args[0]
11496 if x_1_0.Op != OpARM64MOVDreg {
11497 break
11498 }
11499 if w != x_1_0.Args[0] {
11500 break
11501 }
11502 if !(x.Uses == 1 && clobber(x)) {
11503 break
11504 }
11505 v.reset(OpARM64MOVHstore)
11506 v.AuxInt = i - 1
11507 v.Aux = s
11508 v.AddArg(ptr)
11509 v0 := b.NewValue0(x.Pos, OpARM64REV16W, w.Type)
11510 v0.AddArg(w)
11511 v.AddArg(v0)
11512 v.AddArg(mem)
11513 return true
11514 }
11515
11516
11517
11518 for {
11519 if v.AuxInt != 1 {
11520 break
11521 }
11522 s := v.Aux
11523 _ = v.Args[2]
11524 v_0 := v.Args[0]
11525 if v_0.Op != OpARM64ADD {
11526 break
11527 }
11528 idx1 := v_0.Args[1]
11529 ptr1 := v_0.Args[0]
11530 w := v.Args[1]
11531 x := v.Args[2]
11532 if x.Op != OpARM64MOVBstoreidx {
11533 break
11534 }
11535 mem := x.Args[3]
11536 ptr0 := x.Args[0]
11537 idx0 := x.Args[1]
11538 x_2 := x.Args[2]
11539 if x_2.Op != OpARM64SRLconst {
11540 break
11541 }
11542 if x_2.AuxInt != 8 {
11543 break
11544 }
11545 x_2_0 := x_2.Args[0]
11546 if x_2_0.Op != OpARM64MOVDreg {
11547 break
11548 }
11549 if w != x_2_0.Args[0] {
11550 break
11551 }
11552 if !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) {
11553 break
11554 }
11555 v.reset(OpARM64MOVHstoreidx)
11556 v.AddArg(ptr0)
11557 v.AddArg(idx0)
11558 v0 := b.NewValue0(v.Pos, OpARM64REV16W, w.Type)
11559 v0.AddArg(w)
11560 v.AddArg(v0)
11561 v.AddArg(mem)
11562 return true
11563 }
11564
11565
11566
11567 for {
11568 i := v.AuxInt
11569 s := v.Aux
11570 _ = v.Args[2]
11571 ptr := v.Args[0]
11572 w := v.Args[1]
11573 x := v.Args[2]
11574 if x.Op != OpARM64MOVBstore {
11575 break
11576 }
11577 if x.AuxInt != i-1 {
11578 break
11579 }
11580 if x.Aux != s {
11581 break
11582 }
11583 mem := x.Args[2]
11584 if ptr != x.Args[0] {
11585 break
11586 }
11587 x_1 := x.Args[1]
11588 if x_1.Op != OpARM64UBFX {
11589 break
11590 }
11591 if x_1.AuxInt != armBFAuxInt(8, 24) {
11592 break
11593 }
11594 if w != x_1.Args[0] {
11595 break
11596 }
11597 if !(x.Uses == 1 && clobber(x)) {
11598 break
11599 }
11600 v.reset(OpARM64MOVHstore)
11601 v.AuxInt = i - 1
11602 v.Aux = s
11603 v.AddArg(ptr)
11604 v0 := b.NewValue0(x.Pos, OpARM64REV16W, w.Type)
11605 v0.AddArg(w)
11606 v.AddArg(v0)
11607 v.AddArg(mem)
11608 return true
11609 }
11610
11611
11612
11613 for {
11614 if v.AuxInt != 1 {
11615 break
11616 }
11617 s := v.Aux
11618 _ = v.Args[2]
11619 v_0 := v.Args[0]
11620 if v_0.Op != OpARM64ADD {
11621 break
11622 }
11623 idx1 := v_0.Args[1]
11624 ptr1 := v_0.Args[0]
11625 w := v.Args[1]
11626 x := v.Args[2]
11627 if x.Op != OpARM64MOVBstoreidx {
11628 break
11629 }
11630 mem := x.Args[3]
11631 ptr0 := x.Args[0]
11632 idx0 := x.Args[1]
11633 x_2 := x.Args[2]
11634 if x_2.Op != OpARM64UBFX {
11635 break
11636 }
11637 if x_2.AuxInt != armBFAuxInt(8, 24) {
11638 break
11639 }
11640 if w != x_2.Args[0] {
11641 break
11642 }
11643 if !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) {
11644 break
11645 }
11646 v.reset(OpARM64MOVHstoreidx)
11647 v.AddArg(ptr0)
11648 v.AddArg(idx0)
11649 v0 := b.NewValue0(v.Pos, OpARM64REV16W, w.Type)
11650 v0.AddArg(w)
11651 v.AddArg(v0)
11652 v.AddArg(mem)
11653 return true
11654 }
11655 return false
11656 }
11657 func rewriteValueARM64_OpARM64MOVBstore_40(v *Value) bool {
11658 b := v.Block
11659
11660
11661
11662 for {
11663 i := v.AuxInt
11664 s := v.Aux
11665 _ = v.Args[2]
11666 ptr := v.Args[0]
11667 w := v.Args[1]
11668 x := v.Args[2]
11669 if x.Op != OpARM64MOVBstore {
11670 break
11671 }
11672 if x.AuxInt != i-1 {
11673 break
11674 }
11675 if x.Aux != s {
11676 break
11677 }
11678 mem := x.Args[2]
11679 if ptr != x.Args[0] {
11680 break
11681 }
11682 x_1 := x.Args[1]
11683 if x_1.Op != OpARM64SRLconst {
11684 break
11685 }
11686 if x_1.AuxInt != 8 {
11687 break
11688 }
11689 x_1_0 := x_1.Args[0]
11690 if x_1_0.Op != OpARM64MOVDreg {
11691 break
11692 }
11693 if w != x_1_0.Args[0] {
11694 break
11695 }
11696 if !(x.Uses == 1 && clobber(x)) {
11697 break
11698 }
11699 v.reset(OpARM64MOVHstore)
11700 v.AuxInt = i - 1
11701 v.Aux = s
11702 v.AddArg(ptr)
11703 v0 := b.NewValue0(x.Pos, OpARM64REV16W, w.Type)
11704 v0.AddArg(w)
11705 v.AddArg(v0)
11706 v.AddArg(mem)
11707 return true
11708 }
11709
11710
11711
11712 for {
11713 if v.AuxInt != 1 {
11714 break
11715 }
11716 s := v.Aux
11717 _ = v.Args[2]
11718 v_0 := v.Args[0]
11719 if v_0.Op != OpARM64ADD {
11720 break
11721 }
11722 idx1 := v_0.Args[1]
11723 ptr1 := v_0.Args[0]
11724 w := v.Args[1]
11725 x := v.Args[2]
11726 if x.Op != OpARM64MOVBstoreidx {
11727 break
11728 }
11729 mem := x.Args[3]
11730 ptr0 := x.Args[0]
11731 idx0 := x.Args[1]
11732 x_2 := x.Args[2]
11733 if x_2.Op != OpARM64SRLconst {
11734 break
11735 }
11736 if x_2.AuxInt != 8 {
11737 break
11738 }
11739 x_2_0 := x_2.Args[0]
11740 if x_2_0.Op != OpARM64MOVDreg {
11741 break
11742 }
11743 if w != x_2_0.Args[0] {
11744 break
11745 }
11746 if !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) {
11747 break
11748 }
11749 v.reset(OpARM64MOVHstoreidx)
11750 v.AddArg(ptr0)
11751 v.AddArg(idx0)
11752 v0 := b.NewValue0(v.Pos, OpARM64REV16W, w.Type)
11753 v0.AddArg(w)
11754 v.AddArg(v0)
11755 v.AddArg(mem)
11756 return true
11757 }
11758 return false
11759 }
11760 func rewriteValueARM64_OpARM64MOVBstoreidx_0(v *Value) bool {
11761
11762
11763
11764 for {
11765 mem := v.Args[3]
11766 ptr := v.Args[0]
11767 v_1 := v.Args[1]
11768 if v_1.Op != OpARM64MOVDconst {
11769 break
11770 }
11771 c := v_1.AuxInt
11772 val := v.Args[2]
11773 v.reset(OpARM64MOVBstore)
11774 v.AuxInt = c
11775 v.AddArg(ptr)
11776 v.AddArg(val)
11777 v.AddArg(mem)
11778 return true
11779 }
11780
11781
11782
11783 for {
11784 mem := v.Args[3]
11785 v_0 := v.Args[0]
11786 if v_0.Op != OpARM64MOVDconst {
11787 break
11788 }
11789 c := v_0.AuxInt
11790 idx := v.Args[1]
11791 val := v.Args[2]
11792 v.reset(OpARM64MOVBstore)
11793 v.AuxInt = c
11794 v.AddArg(idx)
11795 v.AddArg(val)
11796 v.AddArg(mem)
11797 return true
11798 }
11799
11800
11801
11802 for {
11803 mem := v.Args[3]
11804 ptr := v.Args[0]
11805 idx := v.Args[1]
11806 v_2 := v.Args[2]
11807 if v_2.Op != OpARM64MOVDconst {
11808 break
11809 }
11810 if v_2.AuxInt != 0 {
11811 break
11812 }
11813 v.reset(OpARM64MOVBstorezeroidx)
11814 v.AddArg(ptr)
11815 v.AddArg(idx)
11816 v.AddArg(mem)
11817 return true
11818 }
11819
11820
11821
11822 for {
11823 mem := v.Args[3]
11824 ptr := v.Args[0]
11825 idx := v.Args[1]
11826 v_2 := v.Args[2]
11827 if v_2.Op != OpARM64MOVBreg {
11828 break
11829 }
11830 x := v_2.Args[0]
11831 v.reset(OpARM64MOVBstoreidx)
11832 v.AddArg(ptr)
11833 v.AddArg(idx)
11834 v.AddArg(x)
11835 v.AddArg(mem)
11836 return true
11837 }
11838
11839
11840
11841 for {
11842 mem := v.Args[3]
11843 ptr := v.Args[0]
11844 idx := v.Args[1]
11845 v_2 := v.Args[2]
11846 if v_2.Op != OpARM64MOVBUreg {
11847 break
11848 }
11849 x := v_2.Args[0]
11850 v.reset(OpARM64MOVBstoreidx)
11851 v.AddArg(ptr)
11852 v.AddArg(idx)
11853 v.AddArg(x)
11854 v.AddArg(mem)
11855 return true
11856 }
11857
11858
11859
11860 for {
11861 mem := v.Args[3]
11862 ptr := v.Args[0]
11863 idx := v.Args[1]
11864 v_2 := v.Args[2]
11865 if v_2.Op != OpARM64MOVHreg {
11866 break
11867 }
11868 x := v_2.Args[0]
11869 v.reset(OpARM64MOVBstoreidx)
11870 v.AddArg(ptr)
11871 v.AddArg(idx)
11872 v.AddArg(x)
11873 v.AddArg(mem)
11874 return true
11875 }
11876
11877
11878
11879 for {
11880 mem := v.Args[3]
11881 ptr := v.Args[0]
11882 idx := v.Args[1]
11883 v_2 := v.Args[2]
11884 if v_2.Op != OpARM64MOVHUreg {
11885 break
11886 }
11887 x := v_2.Args[0]
11888 v.reset(OpARM64MOVBstoreidx)
11889 v.AddArg(ptr)
11890 v.AddArg(idx)
11891 v.AddArg(x)
11892 v.AddArg(mem)
11893 return true
11894 }
11895
11896
11897
11898 for {
11899 mem := v.Args[3]
11900 ptr := v.Args[0]
11901 idx := v.Args[1]
11902 v_2 := v.Args[2]
11903 if v_2.Op != OpARM64MOVWreg {
11904 break
11905 }
11906 x := v_2.Args[0]
11907 v.reset(OpARM64MOVBstoreidx)
11908 v.AddArg(ptr)
11909 v.AddArg(idx)
11910 v.AddArg(x)
11911 v.AddArg(mem)
11912 return true
11913 }
11914
11915
11916
11917 for {
11918 mem := v.Args[3]
11919 ptr := v.Args[0]
11920 idx := v.Args[1]
11921 v_2 := v.Args[2]
11922 if v_2.Op != OpARM64MOVWUreg {
11923 break
11924 }
11925 x := v_2.Args[0]
11926 v.reset(OpARM64MOVBstoreidx)
11927 v.AddArg(ptr)
11928 v.AddArg(idx)
11929 v.AddArg(x)
11930 v.AddArg(mem)
11931 return true
11932 }
11933
11934
11935
11936 for {
11937 _ = v.Args[3]
11938 ptr := v.Args[0]
11939 v_1 := v.Args[1]
11940 if v_1.Op != OpARM64ADDconst {
11941 break
11942 }
11943 if v_1.AuxInt != 1 {
11944 break
11945 }
11946 idx := v_1.Args[0]
11947 v_2 := v.Args[2]
11948 if v_2.Op != OpARM64SRLconst {
11949 break
11950 }
11951 if v_2.AuxInt != 8 {
11952 break
11953 }
11954 w := v_2.Args[0]
11955 x := v.Args[3]
11956 if x.Op != OpARM64MOVBstoreidx {
11957 break
11958 }
11959 mem := x.Args[3]
11960 if ptr != x.Args[0] {
11961 break
11962 }
11963 if idx != x.Args[1] {
11964 break
11965 }
11966 if w != x.Args[2] {
11967 break
11968 }
11969 if !(x.Uses == 1 && clobber(x)) {
11970 break
11971 }
11972 v.reset(OpARM64MOVHstoreidx)
11973 v.AddArg(ptr)
11974 v.AddArg(idx)
11975 v.AddArg(w)
11976 v.AddArg(mem)
11977 return true
11978 }
11979 return false
11980 }
11981 func rewriteValueARM64_OpARM64MOVBstoreidx_10(v *Value) bool {
11982 b := v.Block
11983
11984
11985
11986 for {
11987 _ = v.Args[3]
11988 ptr := v.Args[0]
11989 v_1 := v.Args[1]
11990 if v_1.Op != OpARM64ADDconst {
11991 break
11992 }
11993 if v_1.AuxInt != 3 {
11994 break
11995 }
11996 idx := v_1.Args[0]
11997 w := v.Args[2]
11998 x0 := v.Args[3]
11999 if x0.Op != OpARM64MOVBstoreidx {
12000 break
12001 }
12002 _ = x0.Args[3]
12003 if ptr != x0.Args[0] {
12004 break
12005 }
12006 x0_1 := x0.Args[1]
12007 if x0_1.Op != OpARM64ADDconst {
12008 break
12009 }
12010 if x0_1.AuxInt != 2 {
12011 break
12012 }
12013 if idx != x0_1.Args[0] {
12014 break
12015 }
12016 x0_2 := x0.Args[2]
12017 if x0_2.Op != OpARM64UBFX {
12018 break
12019 }
12020 if x0_2.AuxInt != armBFAuxInt(8, 24) {
12021 break
12022 }
12023 if w != x0_2.Args[0] {
12024 break
12025 }
12026 x1 := x0.Args[3]
12027 if x1.Op != OpARM64MOVBstoreidx {
12028 break
12029 }
12030 _ = x1.Args[3]
12031 if ptr != x1.Args[0] {
12032 break
12033 }
12034 x1_1 := x1.Args[1]
12035 if x1_1.Op != OpARM64ADDconst {
12036 break
12037 }
12038 if x1_1.AuxInt != 1 {
12039 break
12040 }
12041 if idx != x1_1.Args[0] {
12042 break
12043 }
12044 x1_2 := x1.Args[2]
12045 if x1_2.Op != OpARM64UBFX {
12046 break
12047 }
12048 if x1_2.AuxInt != armBFAuxInt(16, 16) {
12049 break
12050 }
12051 if w != x1_2.Args[0] {
12052 break
12053 }
12054 x2 := x1.Args[3]
12055 if x2.Op != OpARM64MOVBstoreidx {
12056 break
12057 }
12058 mem := x2.Args[3]
12059 if ptr != x2.Args[0] {
12060 break
12061 }
12062 if idx != x2.Args[1] {
12063 break
12064 }
12065 x2_2 := x2.Args[2]
12066 if x2_2.Op != OpARM64UBFX {
12067 break
12068 }
12069 if x2_2.AuxInt != armBFAuxInt(24, 8) {
12070 break
12071 }
12072 if w != x2_2.Args[0] {
12073 break
12074 }
12075 if !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && clobber(x0) && clobber(x1) && clobber(x2)) {
12076 break
12077 }
12078 v.reset(OpARM64MOVWstoreidx)
12079 v.AddArg(ptr)
12080 v.AddArg(idx)
12081 v0 := b.NewValue0(v.Pos, OpARM64REVW, w.Type)
12082 v0.AddArg(w)
12083 v.AddArg(v0)
12084 v.AddArg(mem)
12085 return true
12086 }
12087
12088
12089
12090 for {
12091 _ = v.Args[3]
12092 ptr := v.Args[0]
12093 idx := v.Args[1]
12094 w := v.Args[2]
12095 x0 := v.Args[3]
12096 if x0.Op != OpARM64MOVBstoreidx {
12097 break
12098 }
12099 _ = x0.Args[3]
12100 if ptr != x0.Args[0] {
12101 break
12102 }
12103 x0_1 := x0.Args[1]
12104 if x0_1.Op != OpARM64ADDconst {
12105 break
12106 }
12107 if x0_1.AuxInt != 1 {
12108 break
12109 }
12110 if idx != x0_1.Args[0] {
12111 break
12112 }
12113 x0_2 := x0.Args[2]
12114 if x0_2.Op != OpARM64UBFX {
12115 break
12116 }
12117 if x0_2.AuxInt != armBFAuxInt(8, 24) {
12118 break
12119 }
12120 if w != x0_2.Args[0] {
12121 break
12122 }
12123 x1 := x0.Args[3]
12124 if x1.Op != OpARM64MOVBstoreidx {
12125 break
12126 }
12127 _ = x1.Args[3]
12128 if ptr != x1.Args[0] {
12129 break
12130 }
12131 x1_1 := x1.Args[1]
12132 if x1_1.Op != OpARM64ADDconst {
12133 break
12134 }
12135 if x1_1.AuxInt != 2 {
12136 break
12137 }
12138 if idx != x1_1.Args[0] {
12139 break
12140 }
12141 x1_2 := x1.Args[2]
12142 if x1_2.Op != OpARM64UBFX {
12143 break
12144 }
12145 if x1_2.AuxInt != armBFAuxInt(16, 16) {
12146 break
12147 }
12148 if w != x1_2.Args[0] {
12149 break
12150 }
12151 x2 := x1.Args[3]
12152 if x2.Op != OpARM64MOVBstoreidx {
12153 break
12154 }
12155 mem := x2.Args[3]
12156 if ptr != x2.Args[0] {
12157 break
12158 }
12159 x2_1 := x2.Args[1]
12160 if x2_1.Op != OpARM64ADDconst {
12161 break
12162 }
12163 if x2_1.AuxInt != 3 {
12164 break
12165 }
12166 if idx != x2_1.Args[0] {
12167 break
12168 }
12169 x2_2 := x2.Args[2]
12170 if x2_2.Op != OpARM64UBFX {
12171 break
12172 }
12173 if x2_2.AuxInt != armBFAuxInt(24, 8) {
12174 break
12175 }
12176 if w != x2_2.Args[0] {
12177 break
12178 }
12179 if !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && clobber(x0) && clobber(x1) && clobber(x2)) {
12180 break
12181 }
12182 v.reset(OpARM64MOVWstoreidx)
12183 v.AddArg(ptr)
12184 v.AddArg(idx)
12185 v.AddArg(w)
12186 v.AddArg(mem)
12187 return true
12188 }
12189
12190
12191
12192 for {
12193 _ = v.Args[3]
12194 ptr := v.Args[0]
12195 v_1 := v.Args[1]
12196 if v_1.Op != OpARM64ADDconst {
12197 break
12198 }
12199 if v_1.AuxInt != 1 {
12200 break
12201 }
12202 idx := v_1.Args[0]
12203 w := v.Args[2]
12204 x := v.Args[3]
12205 if x.Op != OpARM64MOVBstoreidx {
12206 break
12207 }
12208 mem := x.Args[3]
12209 if ptr != x.Args[0] {
12210 break
12211 }
12212 if idx != x.Args[1] {
12213 break
12214 }
12215 x_2 := x.Args[2]
12216 if x_2.Op != OpARM64UBFX {
12217 break
12218 }
12219 if x_2.AuxInt != armBFAuxInt(8, 8) {
12220 break
12221 }
12222 if w != x_2.Args[0] {
12223 break
12224 }
12225 if !(x.Uses == 1 && clobber(x)) {
12226 break
12227 }
12228 v.reset(OpARM64MOVHstoreidx)
12229 v.AddArg(ptr)
12230 v.AddArg(idx)
12231 v0 := b.NewValue0(v.Pos, OpARM64REV16W, w.Type)
12232 v0.AddArg(w)
12233 v.AddArg(v0)
12234 v.AddArg(mem)
12235 return true
12236 }
12237
12238
12239
12240 for {
12241 _ = v.Args[3]
12242 ptr := v.Args[0]
12243 idx := v.Args[1]
12244 w := v.Args[2]
12245 x := v.Args[3]
12246 if x.Op != OpARM64MOVBstoreidx {
12247 break
12248 }
12249 mem := x.Args[3]
12250 if ptr != x.Args[0] {
12251 break
12252 }
12253 x_1 := x.Args[1]
12254 if x_1.Op != OpARM64ADDconst {
12255 break
12256 }
12257 if x_1.AuxInt != 1 {
12258 break
12259 }
12260 if idx != x_1.Args[0] {
12261 break
12262 }
12263 x_2 := x.Args[2]
12264 if x_2.Op != OpARM64UBFX {
12265 break
12266 }
12267 if x_2.AuxInt != armBFAuxInt(8, 8) {
12268 break
12269 }
12270 if w != x_2.Args[0] {
12271 break
12272 }
12273 if !(x.Uses == 1 && clobber(x)) {
12274 break
12275 }
12276 v.reset(OpARM64MOVHstoreidx)
12277 v.AddArg(ptr)
12278 v.AddArg(idx)
12279 v.AddArg(w)
12280 v.AddArg(mem)
12281 return true
12282 }
12283 return false
12284 }
12285 func rewriteValueARM64_OpARM64MOVBstorezero_0(v *Value) bool {
12286 b := v.Block
12287 config := b.Func.Config
12288
12289
12290
12291 for {
12292 off1 := v.AuxInt
12293 sym := v.Aux
12294 mem := v.Args[1]
12295 v_0 := v.Args[0]
12296 if v_0.Op != OpARM64ADDconst {
12297 break
12298 }
12299 off2 := v_0.AuxInt
12300 ptr := v_0.Args[0]
12301 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
12302 break
12303 }
12304 v.reset(OpARM64MOVBstorezero)
12305 v.AuxInt = off1 + off2
12306 v.Aux = sym
12307 v.AddArg(ptr)
12308 v.AddArg(mem)
12309 return true
12310 }
12311
12312
12313
12314 for {
12315 off1 := v.AuxInt
12316 sym1 := v.Aux
12317 mem := v.Args[1]
12318 v_0 := v.Args[0]
12319 if v_0.Op != OpARM64MOVDaddr {
12320 break
12321 }
12322 off2 := v_0.AuxInt
12323 sym2 := v_0.Aux
12324 ptr := v_0.Args[0]
12325 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
12326 break
12327 }
12328 v.reset(OpARM64MOVBstorezero)
12329 v.AuxInt = off1 + off2
12330 v.Aux = mergeSym(sym1, sym2)
12331 v.AddArg(ptr)
12332 v.AddArg(mem)
12333 return true
12334 }
12335
12336
12337
12338 for {
12339 off := v.AuxInt
12340 sym := v.Aux
12341 mem := v.Args[1]
12342 v_0 := v.Args[0]
12343 if v_0.Op != OpARM64ADD {
12344 break
12345 }
12346 idx := v_0.Args[1]
12347 ptr := v_0.Args[0]
12348 if !(off == 0 && sym == nil) {
12349 break
12350 }
12351 v.reset(OpARM64MOVBstorezeroidx)
12352 v.AddArg(ptr)
12353 v.AddArg(idx)
12354 v.AddArg(mem)
12355 return true
12356 }
12357
12358
12359
12360 for {
12361 i := v.AuxInt
12362 s := v.Aux
12363 _ = v.Args[1]
12364 ptr0 := v.Args[0]
12365 x := v.Args[1]
12366 if x.Op != OpARM64MOVBstorezero {
12367 break
12368 }
12369 j := x.AuxInt
12370 if x.Aux != s {
12371 break
12372 }
12373 mem := x.Args[1]
12374 ptr1 := x.Args[0]
12375 if !(x.Uses == 1 && areAdjacentOffsets(i, j, 1) && is32Bit(min(i, j)) && isSamePtr(ptr0, ptr1) && clobber(x)) {
12376 break
12377 }
12378 v.reset(OpARM64MOVHstorezero)
12379 v.AuxInt = min(i, j)
12380 v.Aux = s
12381 v.AddArg(ptr0)
12382 v.AddArg(mem)
12383 return true
12384 }
12385
12386
12387
12388 for {
12389 if v.AuxInt != 1 {
12390 break
12391 }
12392 s := v.Aux
12393 _ = v.Args[1]
12394 v_0 := v.Args[0]
12395 if v_0.Op != OpARM64ADD {
12396 break
12397 }
12398 idx0 := v_0.Args[1]
12399 ptr0 := v_0.Args[0]
12400 x := v.Args[1]
12401 if x.Op != OpARM64MOVBstorezeroidx {
12402 break
12403 }
12404 mem := x.Args[2]
12405 ptr1 := x.Args[0]
12406 idx1 := x.Args[1]
12407 if !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) {
12408 break
12409 }
12410 v.reset(OpARM64MOVHstorezeroidx)
12411 v.AddArg(ptr1)
12412 v.AddArg(idx1)
12413 v.AddArg(mem)
12414 return true
12415 }
12416 return false
12417 }
12418 func rewriteValueARM64_OpARM64MOVBstorezeroidx_0(v *Value) bool {
12419
12420
12421
12422 for {
12423 mem := v.Args[2]
12424 ptr := v.Args[0]
12425 v_1 := v.Args[1]
12426 if v_1.Op != OpARM64MOVDconst {
12427 break
12428 }
12429 c := v_1.AuxInt
12430 v.reset(OpARM64MOVBstorezero)
12431 v.AuxInt = c
12432 v.AddArg(ptr)
12433 v.AddArg(mem)
12434 return true
12435 }
12436
12437
12438
12439 for {
12440 mem := v.Args[2]
12441 v_0 := v.Args[0]
12442 if v_0.Op != OpARM64MOVDconst {
12443 break
12444 }
12445 c := v_0.AuxInt
12446 idx := v.Args[1]
12447 v.reset(OpARM64MOVBstorezero)
12448 v.AuxInt = c
12449 v.AddArg(idx)
12450 v.AddArg(mem)
12451 return true
12452 }
12453
12454
12455
12456 for {
12457 _ = v.Args[2]
12458 ptr := v.Args[0]
12459 v_1 := v.Args[1]
12460 if v_1.Op != OpARM64ADDconst {
12461 break
12462 }
12463 if v_1.AuxInt != 1 {
12464 break
12465 }
12466 idx := v_1.Args[0]
12467 x := v.Args[2]
12468 if x.Op != OpARM64MOVBstorezeroidx {
12469 break
12470 }
12471 mem := x.Args[2]
12472 if ptr != x.Args[0] {
12473 break
12474 }
12475 if idx != x.Args[1] {
12476 break
12477 }
12478 if !(x.Uses == 1 && clobber(x)) {
12479 break
12480 }
12481 v.reset(OpARM64MOVHstorezeroidx)
12482 v.AddArg(ptr)
12483 v.AddArg(idx)
12484 v.AddArg(mem)
12485 return true
12486 }
12487 return false
12488 }
12489 func rewriteValueARM64_OpARM64MOVDload_0(v *Value) bool {
12490 b := v.Block
12491 config := b.Func.Config
12492
12493
12494
12495 for {
12496 off := v.AuxInt
12497 sym := v.Aux
12498 _ = v.Args[1]
12499 ptr := v.Args[0]
12500 v_1 := v.Args[1]
12501 if v_1.Op != OpARM64FMOVDstore {
12502 break
12503 }
12504 if v_1.AuxInt != off {
12505 break
12506 }
12507 if v_1.Aux != sym {
12508 break
12509 }
12510 _ = v_1.Args[2]
12511 if ptr != v_1.Args[0] {
12512 break
12513 }
12514 val := v_1.Args[1]
12515 v.reset(OpARM64FMOVDfpgp)
12516 v.AddArg(val)
12517 return true
12518 }
12519
12520
12521
12522 for {
12523 off1 := v.AuxInt
12524 sym := v.Aux
12525 mem := v.Args[1]
12526 v_0 := v.Args[0]
12527 if v_0.Op != OpARM64ADDconst {
12528 break
12529 }
12530 off2 := v_0.AuxInt
12531 ptr := v_0.Args[0]
12532 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
12533 break
12534 }
12535 v.reset(OpARM64MOVDload)
12536 v.AuxInt = off1 + off2
12537 v.Aux = sym
12538 v.AddArg(ptr)
12539 v.AddArg(mem)
12540 return true
12541 }
12542
12543
12544
12545 for {
12546 off := v.AuxInt
12547 sym := v.Aux
12548 mem := v.Args[1]
12549 v_0 := v.Args[0]
12550 if v_0.Op != OpARM64ADD {
12551 break
12552 }
12553 idx := v_0.Args[1]
12554 ptr := v_0.Args[0]
12555 if !(off == 0 && sym == nil) {
12556 break
12557 }
12558 v.reset(OpARM64MOVDloadidx)
12559 v.AddArg(ptr)
12560 v.AddArg(idx)
12561 v.AddArg(mem)
12562 return true
12563 }
12564
12565
12566
12567 for {
12568 off := v.AuxInt
12569 sym := v.Aux
12570 mem := v.Args[1]
12571 v_0 := v.Args[0]
12572 if v_0.Op != OpARM64ADDshiftLL {
12573 break
12574 }
12575 if v_0.AuxInt != 3 {
12576 break
12577 }
12578 idx := v_0.Args[1]
12579 ptr := v_0.Args[0]
12580 if !(off == 0 && sym == nil) {
12581 break
12582 }
12583 v.reset(OpARM64MOVDloadidx8)
12584 v.AddArg(ptr)
12585 v.AddArg(idx)
12586 v.AddArg(mem)
12587 return true
12588 }
12589
12590
12591
12592 for {
12593 off1 := v.AuxInt
12594 sym1 := v.Aux
12595 mem := v.Args[1]
12596 v_0 := v.Args[0]
12597 if v_0.Op != OpARM64MOVDaddr {
12598 break
12599 }
12600 off2 := v_0.AuxInt
12601 sym2 := v_0.Aux
12602 ptr := v_0.Args[0]
12603 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
12604 break
12605 }
12606 v.reset(OpARM64MOVDload)
12607 v.AuxInt = off1 + off2
12608 v.Aux = mergeSym(sym1, sym2)
12609 v.AddArg(ptr)
12610 v.AddArg(mem)
12611 return true
12612 }
12613
12614
12615
12616 for {
12617 off := v.AuxInt
12618 sym := v.Aux
12619 _ = v.Args[1]
12620 ptr := v.Args[0]
12621 v_1 := v.Args[1]
12622 if v_1.Op != OpARM64MOVDstorezero {
12623 break
12624 }
12625 off2 := v_1.AuxInt
12626 sym2 := v_1.Aux
12627 _ = v_1.Args[1]
12628 ptr2 := v_1.Args[0]
12629 if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
12630 break
12631 }
12632 v.reset(OpARM64MOVDconst)
12633 v.AuxInt = 0
12634 return true
12635 }
12636
12637
12638
12639 for {
12640 off := v.AuxInt
12641 sym := v.Aux
12642 _ = v.Args[1]
12643 v_0 := v.Args[0]
12644 if v_0.Op != OpSB {
12645 break
12646 }
12647 if !(symIsRO(sym)) {
12648 break
12649 }
12650 v.reset(OpARM64MOVDconst)
12651 v.AuxInt = int64(read64(sym, off, config.BigEndian))
12652 return true
12653 }
12654 return false
12655 }
12656 func rewriteValueARM64_OpARM64MOVDloadidx_0(v *Value) bool {
12657
12658
12659
12660 for {
12661 mem := v.Args[2]
12662 ptr := v.Args[0]
12663 v_1 := v.Args[1]
12664 if v_1.Op != OpARM64MOVDconst {
12665 break
12666 }
12667 c := v_1.AuxInt
12668 v.reset(OpARM64MOVDload)
12669 v.AuxInt = c
12670 v.AddArg(ptr)
12671 v.AddArg(mem)
12672 return true
12673 }
12674
12675
12676
12677 for {
12678 mem := v.Args[2]
12679 v_0 := v.Args[0]
12680 if v_0.Op != OpARM64MOVDconst {
12681 break
12682 }
12683 c := v_0.AuxInt
12684 ptr := v.Args[1]
12685 v.reset(OpARM64MOVDload)
12686 v.AuxInt = c
12687 v.AddArg(ptr)
12688 v.AddArg(mem)
12689 return true
12690 }
12691
12692
12693
12694 for {
12695 mem := v.Args[2]
12696 ptr := v.Args[0]
12697 v_1 := v.Args[1]
12698 if v_1.Op != OpARM64SLLconst {
12699 break
12700 }
12701 if v_1.AuxInt != 3 {
12702 break
12703 }
12704 idx := v_1.Args[0]
12705 v.reset(OpARM64MOVDloadidx8)
12706 v.AddArg(ptr)
12707 v.AddArg(idx)
12708 v.AddArg(mem)
12709 return true
12710 }
12711
12712
12713
12714 for {
12715 mem := v.Args[2]
12716 v_0 := v.Args[0]
12717 if v_0.Op != OpARM64SLLconst {
12718 break
12719 }
12720 if v_0.AuxInt != 3 {
12721 break
12722 }
12723 idx := v_0.Args[0]
12724 ptr := v.Args[1]
12725 v.reset(OpARM64MOVDloadidx8)
12726 v.AddArg(ptr)
12727 v.AddArg(idx)
12728 v.AddArg(mem)
12729 return true
12730 }
12731
12732
12733
12734 for {
12735 _ = v.Args[2]
12736 ptr := v.Args[0]
12737 idx := v.Args[1]
12738 v_2 := v.Args[2]
12739 if v_2.Op != OpARM64MOVDstorezeroidx {
12740 break
12741 }
12742 _ = v_2.Args[2]
12743 ptr2 := v_2.Args[0]
12744 idx2 := v_2.Args[1]
12745 if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2)) {
12746 break
12747 }
12748 v.reset(OpARM64MOVDconst)
12749 v.AuxInt = 0
12750 return true
12751 }
12752 return false
12753 }
12754 func rewriteValueARM64_OpARM64MOVDloadidx8_0(v *Value) bool {
12755
12756
12757
12758 for {
12759 mem := v.Args[2]
12760 ptr := v.Args[0]
12761 v_1 := v.Args[1]
12762 if v_1.Op != OpARM64MOVDconst {
12763 break
12764 }
12765 c := v_1.AuxInt
12766 v.reset(OpARM64MOVDload)
12767 v.AuxInt = c << 3
12768 v.AddArg(ptr)
12769 v.AddArg(mem)
12770 return true
12771 }
12772
12773
12774
12775 for {
12776 _ = v.Args[2]
12777 ptr := v.Args[0]
12778 idx := v.Args[1]
12779 v_2 := v.Args[2]
12780 if v_2.Op != OpARM64MOVDstorezeroidx8 {
12781 break
12782 }
12783 _ = v_2.Args[2]
12784 ptr2 := v_2.Args[0]
12785 idx2 := v_2.Args[1]
12786 if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2)) {
12787 break
12788 }
12789 v.reset(OpARM64MOVDconst)
12790 v.AuxInt = 0
12791 return true
12792 }
12793 return false
12794 }
12795 func rewriteValueARM64_OpARM64MOVDreg_0(v *Value) bool {
12796
12797
12798
12799 for {
12800 x := v.Args[0]
12801 if !(x.Uses == 1) {
12802 break
12803 }
12804 v.reset(OpARM64MOVDnop)
12805 v.AddArg(x)
12806 return true
12807 }
12808
12809
12810
12811 for {
12812 v_0 := v.Args[0]
12813 if v_0.Op != OpARM64MOVDconst {
12814 break
12815 }
12816 c := v_0.AuxInt
12817 v.reset(OpARM64MOVDconst)
12818 v.AuxInt = c
12819 return true
12820 }
12821 return false
12822 }
12823 func rewriteValueARM64_OpARM64MOVDstore_0(v *Value) bool {
12824 b := v.Block
12825 config := b.Func.Config
12826
12827
12828
12829 for {
12830 off := v.AuxInt
12831 sym := v.Aux
12832 mem := v.Args[2]
12833 ptr := v.Args[0]
12834 v_1 := v.Args[1]
12835 if v_1.Op != OpARM64FMOVDfpgp {
12836 break
12837 }
12838 val := v_1.Args[0]
12839 v.reset(OpARM64FMOVDstore)
12840 v.AuxInt = off
12841 v.Aux = sym
12842 v.AddArg(ptr)
12843 v.AddArg(val)
12844 v.AddArg(mem)
12845 return true
12846 }
12847
12848
12849
12850 for {
12851 off1 := v.AuxInt
12852 sym := v.Aux
12853 mem := v.Args[2]
12854 v_0 := v.Args[0]
12855 if v_0.Op != OpARM64ADDconst {
12856 break
12857 }
12858 off2 := v_0.AuxInt
12859 ptr := v_0.Args[0]
12860 val := v.Args[1]
12861 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
12862 break
12863 }
12864 v.reset(OpARM64MOVDstore)
12865 v.AuxInt = off1 + off2
12866 v.Aux = sym
12867 v.AddArg(ptr)
12868 v.AddArg(val)
12869 v.AddArg(mem)
12870 return true
12871 }
12872
12873
12874
12875 for {
12876 off := v.AuxInt
12877 sym := v.Aux
12878 mem := v.Args[2]
12879 v_0 := v.Args[0]
12880 if v_0.Op != OpARM64ADD {
12881 break
12882 }
12883 idx := v_0.Args[1]
12884 ptr := v_0.Args[0]
12885 val := v.Args[1]
12886 if !(off == 0 && sym == nil) {
12887 break
12888 }
12889 v.reset(OpARM64MOVDstoreidx)
12890 v.AddArg(ptr)
12891 v.AddArg(idx)
12892 v.AddArg(val)
12893 v.AddArg(mem)
12894 return true
12895 }
12896
12897
12898
12899 for {
12900 off := v.AuxInt
12901 sym := v.Aux
12902 mem := v.Args[2]
12903 v_0 := v.Args[0]
12904 if v_0.Op != OpARM64ADDshiftLL {
12905 break
12906 }
12907 if v_0.AuxInt != 3 {
12908 break
12909 }
12910 idx := v_0.Args[1]
12911 ptr := v_0.Args[0]
12912 val := v.Args[1]
12913 if !(off == 0 && sym == nil) {
12914 break
12915 }
12916 v.reset(OpARM64MOVDstoreidx8)
12917 v.AddArg(ptr)
12918 v.AddArg(idx)
12919 v.AddArg(val)
12920 v.AddArg(mem)
12921 return true
12922 }
12923
12924
12925
12926 for {
12927 off1 := v.AuxInt
12928 sym1 := v.Aux
12929 mem := v.Args[2]
12930 v_0 := v.Args[0]
12931 if v_0.Op != OpARM64MOVDaddr {
12932 break
12933 }
12934 off2 := v_0.AuxInt
12935 sym2 := v_0.Aux
12936 ptr := v_0.Args[0]
12937 val := v.Args[1]
12938 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
12939 break
12940 }
12941 v.reset(OpARM64MOVDstore)
12942 v.AuxInt = off1 + off2
12943 v.Aux = mergeSym(sym1, sym2)
12944 v.AddArg(ptr)
12945 v.AddArg(val)
12946 v.AddArg(mem)
12947 return true
12948 }
12949
12950
12951
12952 for {
12953 off := v.AuxInt
12954 sym := v.Aux
12955 mem := v.Args[2]
12956 ptr := v.Args[0]
12957 v_1 := v.Args[1]
12958 if v_1.Op != OpARM64MOVDconst {
12959 break
12960 }
12961 if v_1.AuxInt != 0 {
12962 break
12963 }
12964 v.reset(OpARM64MOVDstorezero)
12965 v.AuxInt = off
12966 v.Aux = sym
12967 v.AddArg(ptr)
12968 v.AddArg(mem)
12969 return true
12970 }
12971 return false
12972 }
12973 func rewriteValueARM64_OpARM64MOVDstoreidx_0(v *Value) bool {
12974
12975
12976
12977 for {
12978 mem := v.Args[3]
12979 ptr := v.Args[0]
12980 v_1 := v.Args[1]
12981 if v_1.Op != OpARM64MOVDconst {
12982 break
12983 }
12984 c := v_1.AuxInt
12985 val := v.Args[2]
12986 v.reset(OpARM64MOVDstore)
12987 v.AuxInt = c
12988 v.AddArg(ptr)
12989 v.AddArg(val)
12990 v.AddArg(mem)
12991 return true
12992 }
12993
12994
12995
12996 for {
12997 mem := v.Args[3]
12998 v_0 := v.Args[0]
12999 if v_0.Op != OpARM64MOVDconst {
13000 break
13001 }
13002 c := v_0.AuxInt
13003 idx := v.Args[1]
13004 val := v.Args[2]
13005 v.reset(OpARM64MOVDstore)
13006 v.AuxInt = c
13007 v.AddArg(idx)
13008 v.AddArg(val)
13009 v.AddArg(mem)
13010 return true
13011 }
13012
13013
13014
13015 for {
13016 mem := v.Args[3]
13017 ptr := v.Args[0]
13018 v_1 := v.Args[1]
13019 if v_1.Op != OpARM64SLLconst {
13020 break
13021 }
13022 if v_1.AuxInt != 3 {
13023 break
13024 }
13025 idx := v_1.Args[0]
13026 val := v.Args[2]
13027 v.reset(OpARM64MOVDstoreidx8)
13028 v.AddArg(ptr)
13029 v.AddArg(idx)
13030 v.AddArg(val)
13031 v.AddArg(mem)
13032 return true
13033 }
13034
13035
13036
13037 for {
13038 mem := v.Args[3]
13039 v_0 := v.Args[0]
13040 if v_0.Op != OpARM64SLLconst {
13041 break
13042 }
13043 if v_0.AuxInt != 3 {
13044 break
13045 }
13046 idx := v_0.Args[0]
13047 ptr := v.Args[1]
13048 val := v.Args[2]
13049 v.reset(OpARM64MOVDstoreidx8)
13050 v.AddArg(ptr)
13051 v.AddArg(idx)
13052 v.AddArg(val)
13053 v.AddArg(mem)
13054 return true
13055 }
13056
13057
13058
13059 for {
13060 mem := v.Args[3]
13061 ptr := v.Args[0]
13062 idx := v.Args[1]
13063 v_2 := v.Args[2]
13064 if v_2.Op != OpARM64MOVDconst {
13065 break
13066 }
13067 if v_2.AuxInt != 0 {
13068 break
13069 }
13070 v.reset(OpARM64MOVDstorezeroidx)
13071 v.AddArg(ptr)
13072 v.AddArg(idx)
13073 v.AddArg(mem)
13074 return true
13075 }
13076 return false
13077 }
13078 func rewriteValueARM64_OpARM64MOVDstoreidx8_0(v *Value) bool {
13079
13080
13081
13082 for {
13083 mem := v.Args[3]
13084 ptr := v.Args[0]
13085 v_1 := v.Args[1]
13086 if v_1.Op != OpARM64MOVDconst {
13087 break
13088 }
13089 c := v_1.AuxInt
13090 val := v.Args[2]
13091 v.reset(OpARM64MOVDstore)
13092 v.AuxInt = c << 3
13093 v.AddArg(ptr)
13094 v.AddArg(val)
13095 v.AddArg(mem)
13096 return true
13097 }
13098
13099
13100
13101 for {
13102 mem := v.Args[3]
13103 ptr := v.Args[0]
13104 idx := v.Args[1]
13105 v_2 := v.Args[2]
13106 if v_2.Op != OpARM64MOVDconst {
13107 break
13108 }
13109 if v_2.AuxInt != 0 {
13110 break
13111 }
13112 v.reset(OpARM64MOVDstorezeroidx8)
13113 v.AddArg(ptr)
13114 v.AddArg(idx)
13115 v.AddArg(mem)
13116 return true
13117 }
13118 return false
13119 }
13120 func rewriteValueARM64_OpARM64MOVDstorezero_0(v *Value) bool {
13121 b := v.Block
13122 config := b.Func.Config
13123
13124
13125
13126 for {
13127 off1 := v.AuxInt
13128 sym := v.Aux
13129 mem := v.Args[1]
13130 v_0 := v.Args[0]
13131 if v_0.Op != OpARM64ADDconst {
13132 break
13133 }
13134 off2 := v_0.AuxInt
13135 ptr := v_0.Args[0]
13136 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
13137 break
13138 }
13139 v.reset(OpARM64MOVDstorezero)
13140 v.AuxInt = off1 + off2
13141 v.Aux = sym
13142 v.AddArg(ptr)
13143 v.AddArg(mem)
13144 return true
13145 }
13146
13147
13148
13149 for {
13150 off1 := v.AuxInt
13151 sym1 := v.Aux
13152 mem := v.Args[1]
13153 v_0 := v.Args[0]
13154 if v_0.Op != OpARM64MOVDaddr {
13155 break
13156 }
13157 off2 := v_0.AuxInt
13158 sym2 := v_0.Aux
13159 ptr := v_0.Args[0]
13160 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
13161 break
13162 }
13163 v.reset(OpARM64MOVDstorezero)
13164 v.AuxInt = off1 + off2
13165 v.Aux = mergeSym(sym1, sym2)
13166 v.AddArg(ptr)
13167 v.AddArg(mem)
13168 return true
13169 }
13170
13171
13172
13173 for {
13174 off := v.AuxInt
13175 sym := v.Aux
13176 mem := v.Args[1]
13177 v_0 := v.Args[0]
13178 if v_0.Op != OpARM64ADD {
13179 break
13180 }
13181 idx := v_0.Args[1]
13182 ptr := v_0.Args[0]
13183 if !(off == 0 && sym == nil) {
13184 break
13185 }
13186 v.reset(OpARM64MOVDstorezeroidx)
13187 v.AddArg(ptr)
13188 v.AddArg(idx)
13189 v.AddArg(mem)
13190 return true
13191 }
13192
13193
13194
13195 for {
13196 off := v.AuxInt
13197 sym := v.Aux
13198 mem := v.Args[1]
13199 v_0 := v.Args[0]
13200 if v_0.Op != OpARM64ADDshiftLL {
13201 break
13202 }
13203 if v_0.AuxInt != 3 {
13204 break
13205 }
13206 idx := v_0.Args[1]
13207 ptr := v_0.Args[0]
13208 if !(off == 0 && sym == nil) {
13209 break
13210 }
13211 v.reset(OpARM64MOVDstorezeroidx8)
13212 v.AddArg(ptr)
13213 v.AddArg(idx)
13214 v.AddArg(mem)
13215 return true
13216 }
13217
13218
13219
13220 for {
13221 i := v.AuxInt
13222 s := v.Aux
13223 _ = v.Args[1]
13224 ptr0 := v.Args[0]
13225 x := v.Args[1]
13226 if x.Op != OpARM64MOVDstorezero {
13227 break
13228 }
13229 j := x.AuxInt
13230 if x.Aux != s {
13231 break
13232 }
13233 mem := x.Args[1]
13234 ptr1 := x.Args[0]
13235 if !(x.Uses == 1 && areAdjacentOffsets(i, j, 8) && is32Bit(min(i, j)) && isSamePtr(ptr0, ptr1) && clobber(x)) {
13236 break
13237 }
13238 v.reset(OpARM64MOVQstorezero)
13239 v.AuxInt = min(i, j)
13240 v.Aux = s
13241 v.AddArg(ptr0)
13242 v.AddArg(mem)
13243 return true
13244 }
13245
13246
13247
13248 for {
13249 if v.AuxInt != 8 {
13250 break
13251 }
13252 s := v.Aux
13253 _ = v.Args[1]
13254 p0 := v.Args[0]
13255 if p0.Op != OpARM64ADD {
13256 break
13257 }
13258 idx0 := p0.Args[1]
13259 ptr0 := p0.Args[0]
13260 x := v.Args[1]
13261 if x.Op != OpARM64MOVDstorezeroidx {
13262 break
13263 }
13264 mem := x.Args[2]
13265 ptr1 := x.Args[0]
13266 idx1 := x.Args[1]
13267 if !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) {
13268 break
13269 }
13270 v.reset(OpARM64MOVQstorezero)
13271 v.AuxInt = 0
13272 v.Aux = s
13273 v.AddArg(p0)
13274 v.AddArg(mem)
13275 return true
13276 }
13277
13278
13279
13280 for {
13281 if v.AuxInt != 8 {
13282 break
13283 }
13284 s := v.Aux
13285 _ = v.Args[1]
13286 p0 := v.Args[0]
13287 if p0.Op != OpARM64ADDshiftLL {
13288 break
13289 }
13290 if p0.AuxInt != 3 {
13291 break
13292 }
13293 idx0 := p0.Args[1]
13294 ptr0 := p0.Args[0]
13295 x := v.Args[1]
13296 if x.Op != OpARM64MOVDstorezeroidx8 {
13297 break
13298 }
13299 mem := x.Args[2]
13300 ptr1 := x.Args[0]
13301 idx1 := x.Args[1]
13302 if !(x.Uses == 1 && s == nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && clobber(x)) {
13303 break
13304 }
13305 v.reset(OpARM64MOVQstorezero)
13306 v.AuxInt = 0
13307 v.Aux = s
13308 v.AddArg(p0)
13309 v.AddArg(mem)
13310 return true
13311 }
13312 return false
13313 }
13314 func rewriteValueARM64_OpARM64MOVDstorezeroidx_0(v *Value) bool {
13315
13316
13317
13318 for {
13319 mem := v.Args[2]
13320 ptr := v.Args[0]
13321 v_1 := v.Args[1]
13322 if v_1.Op != OpARM64MOVDconst {
13323 break
13324 }
13325 c := v_1.AuxInt
13326 v.reset(OpARM64MOVDstorezero)
13327 v.AuxInt = c
13328 v.AddArg(ptr)
13329 v.AddArg(mem)
13330 return true
13331 }
13332
13333
13334
13335 for {
13336 mem := v.Args[2]
13337 v_0 := v.Args[0]
13338 if v_0.Op != OpARM64MOVDconst {
13339 break
13340 }
13341 c := v_0.AuxInt
13342 idx := v.Args[1]
13343 v.reset(OpARM64MOVDstorezero)
13344 v.AuxInt = c
13345 v.AddArg(idx)
13346 v.AddArg(mem)
13347 return true
13348 }
13349
13350
13351
13352 for {
13353 mem := v.Args[2]
13354 ptr := v.Args[0]
13355 v_1 := v.Args[1]
13356 if v_1.Op != OpARM64SLLconst {
13357 break
13358 }
13359 if v_1.AuxInt != 3 {
13360 break
13361 }
13362 idx := v_1.Args[0]
13363 v.reset(OpARM64MOVDstorezeroidx8)
13364 v.AddArg(ptr)
13365 v.AddArg(idx)
13366 v.AddArg(mem)
13367 return true
13368 }
13369
13370
13371
13372 for {
13373 mem := v.Args[2]
13374 v_0 := v.Args[0]
13375 if v_0.Op != OpARM64SLLconst {
13376 break
13377 }
13378 if v_0.AuxInt != 3 {
13379 break
13380 }
13381 idx := v_0.Args[0]
13382 ptr := v.Args[1]
13383 v.reset(OpARM64MOVDstorezeroidx8)
13384 v.AddArg(ptr)
13385 v.AddArg(idx)
13386 v.AddArg(mem)
13387 return true
13388 }
13389 return false
13390 }
13391 func rewriteValueARM64_OpARM64MOVDstorezeroidx8_0(v *Value) bool {
13392
13393
13394
13395 for {
13396 mem := v.Args[2]
13397 ptr := v.Args[0]
13398 v_1 := v.Args[1]
13399 if v_1.Op != OpARM64MOVDconst {
13400 break
13401 }
13402 c := v_1.AuxInt
13403 v.reset(OpARM64MOVDstorezero)
13404 v.AuxInt = c << 3
13405 v.AddArg(ptr)
13406 v.AddArg(mem)
13407 return true
13408 }
13409 return false
13410 }
13411 func rewriteValueARM64_OpARM64MOVHUload_0(v *Value) bool {
13412 b := v.Block
13413 config := b.Func.Config
13414
13415
13416
13417 for {
13418 off1 := v.AuxInt
13419 sym := v.Aux
13420 mem := v.Args[1]
13421 v_0 := v.Args[0]
13422 if v_0.Op != OpARM64ADDconst {
13423 break
13424 }
13425 off2 := v_0.AuxInt
13426 ptr := v_0.Args[0]
13427 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
13428 break
13429 }
13430 v.reset(OpARM64MOVHUload)
13431 v.AuxInt = off1 + off2
13432 v.Aux = sym
13433 v.AddArg(ptr)
13434 v.AddArg(mem)
13435 return true
13436 }
13437
13438
13439
13440 for {
13441 off := v.AuxInt
13442 sym := v.Aux
13443 mem := v.Args[1]
13444 v_0 := v.Args[0]
13445 if v_0.Op != OpARM64ADD {
13446 break
13447 }
13448 idx := v_0.Args[1]
13449 ptr := v_0.Args[0]
13450 if !(off == 0 && sym == nil) {
13451 break
13452 }
13453 v.reset(OpARM64MOVHUloadidx)
13454 v.AddArg(ptr)
13455 v.AddArg(idx)
13456 v.AddArg(mem)
13457 return true
13458 }
13459
13460
13461
13462 for {
13463 off := v.AuxInt
13464 sym := v.Aux
13465 mem := v.Args[1]
13466 v_0 := v.Args[0]
13467 if v_0.Op != OpARM64ADDshiftLL {
13468 break
13469 }
13470 if v_0.AuxInt != 1 {
13471 break
13472 }
13473 idx := v_0.Args[1]
13474 ptr := v_0.Args[0]
13475 if !(off == 0 && sym == nil) {
13476 break
13477 }
13478 v.reset(OpARM64MOVHUloadidx2)
13479 v.AddArg(ptr)
13480 v.AddArg(idx)
13481 v.AddArg(mem)
13482 return true
13483 }
13484
13485
13486
13487 for {
13488 off1 := v.AuxInt
13489 sym1 := v.Aux
13490 mem := v.Args[1]
13491 v_0 := v.Args[0]
13492 if v_0.Op != OpARM64MOVDaddr {
13493 break
13494 }
13495 off2 := v_0.AuxInt
13496 sym2 := v_0.Aux
13497 ptr := v_0.Args[0]
13498 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
13499 break
13500 }
13501 v.reset(OpARM64MOVHUload)
13502 v.AuxInt = off1 + off2
13503 v.Aux = mergeSym(sym1, sym2)
13504 v.AddArg(ptr)
13505 v.AddArg(mem)
13506 return true
13507 }
13508
13509
13510
13511 for {
13512 off := v.AuxInt
13513 sym := v.Aux
13514 _ = v.Args[1]
13515 ptr := v.Args[0]
13516 v_1 := v.Args[1]
13517 if v_1.Op != OpARM64MOVHstorezero {
13518 break
13519 }
13520 off2 := v_1.AuxInt
13521 sym2 := v_1.Aux
13522 _ = v_1.Args[1]
13523 ptr2 := v_1.Args[0]
13524 if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
13525 break
13526 }
13527 v.reset(OpARM64MOVDconst)
13528 v.AuxInt = 0
13529 return true
13530 }
13531
13532
13533
13534 for {
13535 off := v.AuxInt
13536 sym := v.Aux
13537 _ = v.Args[1]
13538 v_0 := v.Args[0]
13539 if v_0.Op != OpSB {
13540 break
13541 }
13542 if !(symIsRO(sym)) {
13543 break
13544 }
13545 v.reset(OpARM64MOVDconst)
13546 v.AuxInt = int64(read16(sym, off, config.BigEndian))
13547 return true
13548 }
13549 return false
13550 }
13551 func rewriteValueARM64_OpARM64MOVHUloadidx_0(v *Value) bool {
13552
13553
13554
13555 for {
13556 mem := v.Args[2]
13557 ptr := v.Args[0]
13558 v_1 := v.Args[1]
13559 if v_1.Op != OpARM64MOVDconst {
13560 break
13561 }
13562 c := v_1.AuxInt
13563 v.reset(OpARM64MOVHUload)
13564 v.AuxInt = c
13565 v.AddArg(ptr)
13566 v.AddArg(mem)
13567 return true
13568 }
13569
13570
13571
13572 for {
13573 mem := v.Args[2]
13574 v_0 := v.Args[0]
13575 if v_0.Op != OpARM64MOVDconst {
13576 break
13577 }
13578 c := v_0.AuxInt
13579 ptr := v.Args[1]
13580 v.reset(OpARM64MOVHUload)
13581 v.AuxInt = c
13582 v.AddArg(ptr)
13583 v.AddArg(mem)
13584 return true
13585 }
13586
13587
13588
13589 for {
13590 mem := v.Args[2]
13591 ptr := v.Args[0]
13592 v_1 := v.Args[1]
13593 if v_1.Op != OpARM64SLLconst {
13594 break
13595 }
13596 if v_1.AuxInt != 1 {
13597 break
13598 }
13599 idx := v_1.Args[0]
13600 v.reset(OpARM64MOVHUloadidx2)
13601 v.AddArg(ptr)
13602 v.AddArg(idx)
13603 v.AddArg(mem)
13604 return true
13605 }
13606
13607
13608
13609 for {
13610 mem := v.Args[2]
13611 ptr := v.Args[0]
13612 v_1 := v.Args[1]
13613 if v_1.Op != OpARM64ADD {
13614 break
13615 }
13616 idx := v_1.Args[1]
13617 if idx != v_1.Args[0] {
13618 break
13619 }
13620 v.reset(OpARM64MOVHUloadidx2)
13621 v.AddArg(ptr)
13622 v.AddArg(idx)
13623 v.AddArg(mem)
13624 return true
13625 }
13626
13627
13628
13629 for {
13630 mem := v.Args[2]
13631 v_0 := v.Args[0]
13632 if v_0.Op != OpARM64ADD {
13633 break
13634 }
13635 idx := v_0.Args[1]
13636 if idx != v_0.Args[0] {
13637 break
13638 }
13639 ptr := v.Args[1]
13640 v.reset(OpARM64MOVHUloadidx2)
13641 v.AddArg(ptr)
13642 v.AddArg(idx)
13643 v.AddArg(mem)
13644 return true
13645 }
13646
13647
13648
13649 for {
13650 _ = v.Args[2]
13651 ptr := v.Args[0]
13652 idx := v.Args[1]
13653 v_2 := v.Args[2]
13654 if v_2.Op != OpARM64MOVHstorezeroidx {
13655 break
13656 }
13657 _ = v_2.Args[2]
13658 ptr2 := v_2.Args[0]
13659 idx2 := v_2.Args[1]
13660 if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2)) {
13661 break
13662 }
13663 v.reset(OpARM64MOVDconst)
13664 v.AuxInt = 0
13665 return true
13666 }
13667 return false
13668 }
13669 func rewriteValueARM64_OpARM64MOVHUloadidx2_0(v *Value) bool {
13670
13671
13672
13673 for {
13674 mem := v.Args[2]
13675 ptr := v.Args[0]
13676 v_1 := v.Args[1]
13677 if v_1.Op != OpARM64MOVDconst {
13678 break
13679 }
13680 c := v_1.AuxInt
13681 v.reset(OpARM64MOVHUload)
13682 v.AuxInt = c << 1
13683 v.AddArg(ptr)
13684 v.AddArg(mem)
13685 return true
13686 }
13687
13688
13689
13690 for {
13691 _ = v.Args[2]
13692 ptr := v.Args[0]
13693 idx := v.Args[1]
13694 v_2 := v.Args[2]
13695 if v_2.Op != OpARM64MOVHstorezeroidx2 {
13696 break
13697 }
13698 _ = v_2.Args[2]
13699 ptr2 := v_2.Args[0]
13700 idx2 := v_2.Args[1]
13701 if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2)) {
13702 break
13703 }
13704 v.reset(OpARM64MOVDconst)
13705 v.AuxInt = 0
13706 return true
13707 }
13708 return false
13709 }
13710 func rewriteValueARM64_OpARM64MOVHUreg_0(v *Value) bool {
13711
13712
13713
13714 for {
13715 x := v.Args[0]
13716 if x.Op != OpARM64MOVBUload {
13717 break
13718 }
13719 _ = x.Args[1]
13720 v.reset(OpARM64MOVDreg)
13721 v.AddArg(x)
13722 return true
13723 }
13724
13725
13726
13727 for {
13728 x := v.Args[0]
13729 if x.Op != OpARM64MOVHUload {
13730 break
13731 }
13732 _ = x.Args[1]
13733 v.reset(OpARM64MOVDreg)
13734 v.AddArg(x)
13735 return true
13736 }
13737
13738
13739
13740 for {
13741 x := v.Args[0]
13742 if x.Op != OpARM64MOVBUloadidx {
13743 break
13744 }
13745 _ = x.Args[2]
13746 v.reset(OpARM64MOVDreg)
13747 v.AddArg(x)
13748 return true
13749 }
13750
13751
13752
13753 for {
13754 x := v.Args[0]
13755 if x.Op != OpARM64MOVHUloadidx {
13756 break
13757 }
13758 _ = x.Args[2]
13759 v.reset(OpARM64MOVDreg)
13760 v.AddArg(x)
13761 return true
13762 }
13763
13764
13765
13766 for {
13767 x := v.Args[0]
13768 if x.Op != OpARM64MOVHUloadidx2 {
13769 break
13770 }
13771 _ = x.Args[2]
13772 v.reset(OpARM64MOVDreg)
13773 v.AddArg(x)
13774 return true
13775 }
13776
13777
13778
13779 for {
13780 x := v.Args[0]
13781 if x.Op != OpARM64MOVBUreg {
13782 break
13783 }
13784 v.reset(OpARM64MOVDreg)
13785 v.AddArg(x)
13786 return true
13787 }
13788
13789
13790
13791 for {
13792 x := v.Args[0]
13793 if x.Op != OpARM64MOVHUreg {
13794 break
13795 }
13796 v.reset(OpARM64MOVDreg)
13797 v.AddArg(x)
13798 return true
13799 }
13800
13801
13802
13803 for {
13804 v_0 := v.Args[0]
13805 if v_0.Op != OpARM64ANDconst {
13806 break
13807 }
13808 c := v_0.AuxInt
13809 x := v_0.Args[0]
13810 v.reset(OpARM64ANDconst)
13811 v.AuxInt = c & (1<<16 - 1)
13812 v.AddArg(x)
13813 return true
13814 }
13815
13816
13817
13818 for {
13819 v_0 := v.Args[0]
13820 if v_0.Op != OpARM64MOVDconst {
13821 break
13822 }
13823 c := v_0.AuxInt
13824 v.reset(OpARM64MOVDconst)
13825 v.AuxInt = int64(uint16(c))
13826 return true
13827 }
13828
13829
13830
13831 for {
13832 v_0 := v.Args[0]
13833 if v_0.Op != OpARM64SLLconst {
13834 break
13835 }
13836 sc := v_0.AuxInt
13837 x := v_0.Args[0]
13838 if !(isARM64BFMask(sc, 1<<16-1, sc)) {
13839 break
13840 }
13841 v.reset(OpARM64UBFIZ)
13842 v.AuxInt = armBFAuxInt(sc, arm64BFWidth(1<<16-1, sc))
13843 v.AddArg(x)
13844 return true
13845 }
13846 return false
13847 }
13848 func rewriteValueARM64_OpARM64MOVHUreg_10(v *Value) bool {
13849
13850
13851
13852 for {
13853 v_0 := v.Args[0]
13854 if v_0.Op != OpARM64SRLconst {
13855 break
13856 }
13857 sc := v_0.AuxInt
13858 x := v_0.Args[0]
13859 if !(isARM64BFMask(sc, 1<<16-1, 0)) {
13860 break
13861 }
13862 v.reset(OpARM64UBFX)
13863 v.AuxInt = armBFAuxInt(sc, 16)
13864 v.AddArg(x)
13865 return true
13866 }
13867 return false
13868 }
13869 func rewriteValueARM64_OpARM64MOVHload_0(v *Value) bool {
13870 b := v.Block
13871 config := b.Func.Config
13872
13873
13874
13875 for {
13876 off1 := v.AuxInt
13877 sym := v.Aux
13878 mem := v.Args[1]
13879 v_0 := v.Args[0]
13880 if v_0.Op != OpARM64ADDconst {
13881 break
13882 }
13883 off2 := v_0.AuxInt
13884 ptr := v_0.Args[0]
13885 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
13886 break
13887 }
13888 v.reset(OpARM64MOVHload)
13889 v.AuxInt = off1 + off2
13890 v.Aux = sym
13891 v.AddArg(ptr)
13892 v.AddArg(mem)
13893 return true
13894 }
13895
13896
13897
13898 for {
13899 off := v.AuxInt
13900 sym := v.Aux
13901 mem := v.Args[1]
13902 v_0 := v.Args[0]
13903 if v_0.Op != OpARM64ADD {
13904 break
13905 }
13906 idx := v_0.Args[1]
13907 ptr := v_0.Args[0]
13908 if !(off == 0 && sym == nil) {
13909 break
13910 }
13911 v.reset(OpARM64MOVHloadidx)
13912 v.AddArg(ptr)
13913 v.AddArg(idx)
13914 v.AddArg(mem)
13915 return true
13916 }
13917
13918
13919
13920 for {
13921 off := v.AuxInt
13922 sym := v.Aux
13923 mem := v.Args[1]
13924 v_0 := v.Args[0]
13925 if v_0.Op != OpARM64ADDshiftLL {
13926 break
13927 }
13928 if v_0.AuxInt != 1 {
13929 break
13930 }
13931 idx := v_0.Args[1]
13932 ptr := v_0.Args[0]
13933 if !(off == 0 && sym == nil) {
13934 break
13935 }
13936 v.reset(OpARM64MOVHloadidx2)
13937 v.AddArg(ptr)
13938 v.AddArg(idx)
13939 v.AddArg(mem)
13940 return true
13941 }
13942
13943
13944
13945 for {
13946 off1 := v.AuxInt
13947 sym1 := v.Aux
13948 mem := v.Args[1]
13949 v_0 := v.Args[0]
13950 if v_0.Op != OpARM64MOVDaddr {
13951 break
13952 }
13953 off2 := v_0.AuxInt
13954 sym2 := v_0.Aux
13955 ptr := v_0.Args[0]
13956 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
13957 break
13958 }
13959 v.reset(OpARM64MOVHload)
13960 v.AuxInt = off1 + off2
13961 v.Aux = mergeSym(sym1, sym2)
13962 v.AddArg(ptr)
13963 v.AddArg(mem)
13964 return true
13965 }
13966
13967
13968
13969 for {
13970 off := v.AuxInt
13971 sym := v.Aux
13972 _ = v.Args[1]
13973 ptr := v.Args[0]
13974 v_1 := v.Args[1]
13975 if v_1.Op != OpARM64MOVHstorezero {
13976 break
13977 }
13978 off2 := v_1.AuxInt
13979 sym2 := v_1.Aux
13980 _ = v_1.Args[1]
13981 ptr2 := v_1.Args[0]
13982 if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
13983 break
13984 }
13985 v.reset(OpARM64MOVDconst)
13986 v.AuxInt = 0
13987 return true
13988 }
13989 return false
13990 }
13991 func rewriteValueARM64_OpARM64MOVHloadidx_0(v *Value) bool {
13992
13993
13994
13995 for {
13996 mem := v.Args[2]
13997 ptr := v.Args[0]
13998 v_1 := v.Args[1]
13999 if v_1.Op != OpARM64MOVDconst {
14000 break
14001 }
14002 c := v_1.AuxInt
14003 v.reset(OpARM64MOVHload)
14004 v.AuxInt = c
14005 v.AddArg(ptr)
14006 v.AddArg(mem)
14007 return true
14008 }
14009
14010
14011
14012 for {
14013 mem := v.Args[2]
14014 v_0 := v.Args[0]
14015 if v_0.Op != OpARM64MOVDconst {
14016 break
14017 }
14018 c := v_0.AuxInt
14019 ptr := v.Args[1]
14020 v.reset(OpARM64MOVHload)
14021 v.AuxInt = c
14022 v.AddArg(ptr)
14023 v.AddArg(mem)
14024 return true
14025 }
14026
14027
14028
14029 for {
14030 mem := v.Args[2]
14031 ptr := v.Args[0]
14032 v_1 := v.Args[1]
14033 if v_1.Op != OpARM64SLLconst {
14034 break
14035 }
14036 if v_1.AuxInt != 1 {
14037 break
14038 }
14039 idx := v_1.Args[0]
14040 v.reset(OpARM64MOVHloadidx2)
14041 v.AddArg(ptr)
14042 v.AddArg(idx)
14043 v.AddArg(mem)
14044 return true
14045 }
14046
14047
14048
14049 for {
14050 mem := v.Args[2]
14051 ptr := v.Args[0]
14052 v_1 := v.Args[1]
14053 if v_1.Op != OpARM64ADD {
14054 break
14055 }
14056 idx := v_1.Args[1]
14057 if idx != v_1.Args[0] {
14058 break
14059 }
14060 v.reset(OpARM64MOVHloadidx2)
14061 v.AddArg(ptr)
14062 v.AddArg(idx)
14063 v.AddArg(mem)
14064 return true
14065 }
14066
14067
14068
14069 for {
14070 mem := v.Args[2]
14071 v_0 := v.Args[0]
14072 if v_0.Op != OpARM64ADD {
14073 break
14074 }
14075 idx := v_0.Args[1]
14076 if idx != v_0.Args[0] {
14077 break
14078 }
14079 ptr := v.Args[1]
14080 v.reset(OpARM64MOVHloadidx2)
14081 v.AddArg(ptr)
14082 v.AddArg(idx)
14083 v.AddArg(mem)
14084 return true
14085 }
14086
14087
14088
14089 for {
14090 _ = v.Args[2]
14091 ptr := v.Args[0]
14092 idx := v.Args[1]
14093 v_2 := v.Args[2]
14094 if v_2.Op != OpARM64MOVHstorezeroidx {
14095 break
14096 }
14097 _ = v_2.Args[2]
14098 ptr2 := v_2.Args[0]
14099 idx2 := v_2.Args[1]
14100 if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2)) {
14101 break
14102 }
14103 v.reset(OpARM64MOVDconst)
14104 v.AuxInt = 0
14105 return true
14106 }
14107 return false
14108 }
14109 func rewriteValueARM64_OpARM64MOVHloadidx2_0(v *Value) bool {
14110
14111
14112
14113 for {
14114 mem := v.Args[2]
14115 ptr := v.Args[0]
14116 v_1 := v.Args[1]
14117 if v_1.Op != OpARM64MOVDconst {
14118 break
14119 }
14120 c := v_1.AuxInt
14121 v.reset(OpARM64MOVHload)
14122 v.AuxInt = c << 1
14123 v.AddArg(ptr)
14124 v.AddArg(mem)
14125 return true
14126 }
14127
14128
14129
14130 for {
14131 _ = v.Args[2]
14132 ptr := v.Args[0]
14133 idx := v.Args[1]
14134 v_2 := v.Args[2]
14135 if v_2.Op != OpARM64MOVHstorezeroidx2 {
14136 break
14137 }
14138 _ = v_2.Args[2]
14139 ptr2 := v_2.Args[0]
14140 idx2 := v_2.Args[1]
14141 if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2)) {
14142 break
14143 }
14144 v.reset(OpARM64MOVDconst)
14145 v.AuxInt = 0
14146 return true
14147 }
14148 return false
14149 }
14150 func rewriteValueARM64_OpARM64MOVHreg_0(v *Value) bool {
14151
14152
14153
14154 for {
14155 x := v.Args[0]
14156 if x.Op != OpARM64MOVBload {
14157 break
14158 }
14159 _ = x.Args[1]
14160 v.reset(OpARM64MOVDreg)
14161 v.AddArg(x)
14162 return true
14163 }
14164
14165
14166
14167 for {
14168 x := v.Args[0]
14169 if x.Op != OpARM64MOVBUload {
14170 break
14171 }
14172 _ = x.Args[1]
14173 v.reset(OpARM64MOVDreg)
14174 v.AddArg(x)
14175 return true
14176 }
14177
14178
14179
14180 for {
14181 x := v.Args[0]
14182 if x.Op != OpARM64MOVHload {
14183 break
14184 }
14185 _ = x.Args[1]
14186 v.reset(OpARM64MOVDreg)
14187 v.AddArg(x)
14188 return true
14189 }
14190
14191
14192
14193 for {
14194 x := v.Args[0]
14195 if x.Op != OpARM64MOVBloadidx {
14196 break
14197 }
14198 _ = x.Args[2]
14199 v.reset(OpARM64MOVDreg)
14200 v.AddArg(x)
14201 return true
14202 }
14203
14204
14205
14206 for {
14207 x := v.Args[0]
14208 if x.Op != OpARM64MOVBUloadidx {
14209 break
14210 }
14211 _ = x.Args[2]
14212 v.reset(OpARM64MOVDreg)
14213 v.AddArg(x)
14214 return true
14215 }
14216
14217
14218
14219 for {
14220 x := v.Args[0]
14221 if x.Op != OpARM64MOVHloadidx {
14222 break
14223 }
14224 _ = x.Args[2]
14225 v.reset(OpARM64MOVDreg)
14226 v.AddArg(x)
14227 return true
14228 }
14229
14230
14231
14232 for {
14233 x := v.Args[0]
14234 if x.Op != OpARM64MOVHloadidx2 {
14235 break
14236 }
14237 _ = x.Args[2]
14238 v.reset(OpARM64MOVDreg)
14239 v.AddArg(x)
14240 return true
14241 }
14242
14243
14244
14245 for {
14246 x := v.Args[0]
14247 if x.Op != OpARM64MOVBreg {
14248 break
14249 }
14250 v.reset(OpARM64MOVDreg)
14251 v.AddArg(x)
14252 return true
14253 }
14254
14255
14256
14257 for {
14258 x := v.Args[0]
14259 if x.Op != OpARM64MOVBUreg {
14260 break
14261 }
14262 v.reset(OpARM64MOVDreg)
14263 v.AddArg(x)
14264 return true
14265 }
14266
14267
14268
14269 for {
14270 x := v.Args[0]
14271 if x.Op != OpARM64MOVHreg {
14272 break
14273 }
14274 v.reset(OpARM64MOVDreg)
14275 v.AddArg(x)
14276 return true
14277 }
14278 return false
14279 }
14280 func rewriteValueARM64_OpARM64MOVHreg_10(v *Value) bool {
14281
14282
14283
14284 for {
14285 v_0 := v.Args[0]
14286 if v_0.Op != OpARM64MOVDconst {
14287 break
14288 }
14289 c := v_0.AuxInt
14290 v.reset(OpARM64MOVDconst)
14291 v.AuxInt = int64(int16(c))
14292 return true
14293 }
14294
14295
14296
14297 for {
14298 v_0 := v.Args[0]
14299 if v_0.Op != OpARM64SLLconst {
14300 break
14301 }
14302 lc := v_0.AuxInt
14303 x := v_0.Args[0]
14304 if !(lc < 16) {
14305 break
14306 }
14307 v.reset(OpARM64SBFIZ)
14308 v.AuxInt = armBFAuxInt(lc, 16-lc)
14309 v.AddArg(x)
14310 return true
14311 }
14312 return false
14313 }
14314 func rewriteValueARM64_OpARM64MOVHstore_0(v *Value) bool {
14315 b := v.Block
14316 config := b.Func.Config
14317
14318
14319
14320 for {
14321 off1 := v.AuxInt
14322 sym := v.Aux
14323 mem := v.Args[2]
14324 v_0 := v.Args[0]
14325 if v_0.Op != OpARM64ADDconst {
14326 break
14327 }
14328 off2 := v_0.AuxInt
14329 ptr := v_0.Args[0]
14330 val := v.Args[1]
14331 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
14332 break
14333 }
14334 v.reset(OpARM64MOVHstore)
14335 v.AuxInt = off1 + off2
14336 v.Aux = sym
14337 v.AddArg(ptr)
14338 v.AddArg(val)
14339 v.AddArg(mem)
14340 return true
14341 }
14342
14343
14344
14345 for {
14346 off := v.AuxInt
14347 sym := v.Aux
14348 mem := v.Args[2]
14349 v_0 := v.Args[0]
14350 if v_0.Op != OpARM64ADD {
14351 break
14352 }
14353 idx := v_0.Args[1]
14354 ptr := v_0.Args[0]
14355 val := v.Args[1]
14356 if !(off == 0 && sym == nil) {
14357 break
14358 }
14359 v.reset(OpARM64MOVHstoreidx)
14360 v.AddArg(ptr)
14361 v.AddArg(idx)
14362 v.AddArg(val)
14363 v.AddArg(mem)
14364 return true
14365 }
14366
14367
14368
14369 for {
14370 off := v.AuxInt
14371 sym := v.Aux
14372 mem := v.Args[2]
14373 v_0 := v.Args[0]
14374 if v_0.Op != OpARM64ADDshiftLL {
14375 break
14376 }
14377 if v_0.AuxInt != 1 {
14378 break
14379 }
14380 idx := v_0.Args[1]
14381 ptr := v_0.Args[0]
14382 val := v.Args[1]
14383 if !(off == 0 && sym == nil) {
14384 break
14385 }
14386 v.reset(OpARM64MOVHstoreidx2)
14387 v.AddArg(ptr)
14388 v.AddArg(idx)
14389 v.AddArg(val)
14390 v.AddArg(mem)
14391 return true
14392 }
14393
14394
14395
14396 for {
14397 off1 := v.AuxInt
14398 sym1 := v.Aux
14399 mem := v.Args[2]
14400 v_0 := v.Args[0]
14401 if v_0.Op != OpARM64MOVDaddr {
14402 break
14403 }
14404 off2 := v_0.AuxInt
14405 sym2 := v_0.Aux
14406 ptr := v_0.Args[0]
14407 val := v.Args[1]
14408 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
14409 break
14410 }
14411 v.reset(OpARM64MOVHstore)
14412 v.AuxInt = off1 + off2
14413 v.Aux = mergeSym(sym1, sym2)
14414 v.AddArg(ptr)
14415 v.AddArg(val)
14416 v.AddArg(mem)
14417 return true
14418 }
14419
14420
14421
14422 for {
14423 off := v.AuxInt
14424 sym := v.Aux
14425 mem := v.Args[2]
14426 ptr := v.Args[0]
14427 v_1 := v.Args[1]
14428 if v_1.Op != OpARM64MOVDconst {
14429 break
14430 }
14431 if v_1.AuxInt != 0 {
14432 break
14433 }
14434 v.reset(OpARM64MOVHstorezero)
14435 v.AuxInt = off
14436 v.Aux = sym
14437 v.AddArg(ptr)
14438 v.AddArg(mem)
14439 return true
14440 }
14441
14442
14443
14444 for {
14445 off := v.AuxInt
14446 sym := v.Aux
14447 mem := v.Args[2]
14448 ptr := v.Args[0]
14449 v_1 := v.Args[1]
14450 if v_1.Op != OpARM64MOVHreg {
14451 break
14452 }
14453 x := v_1.Args[0]
14454 v.reset(OpARM64MOVHstore)
14455 v.AuxInt = off
14456 v.Aux = sym
14457 v.AddArg(ptr)
14458 v.AddArg(x)
14459 v.AddArg(mem)
14460 return true
14461 }
14462
14463
14464
14465 for {
14466 off := v.AuxInt
14467 sym := v.Aux
14468 mem := v.Args[2]
14469 ptr := v.Args[0]
14470 v_1 := v.Args[1]
14471 if v_1.Op != OpARM64MOVHUreg {
14472 break
14473 }
14474 x := v_1.Args[0]
14475 v.reset(OpARM64MOVHstore)
14476 v.AuxInt = off
14477 v.Aux = sym
14478 v.AddArg(ptr)
14479 v.AddArg(x)
14480 v.AddArg(mem)
14481 return true
14482 }
14483
14484
14485
14486 for {
14487 off := v.AuxInt
14488 sym := v.Aux
14489 mem := v.Args[2]
14490 ptr := v.Args[0]
14491 v_1 := v.Args[1]
14492 if v_1.Op != OpARM64MOVWreg {
14493 break
14494 }
14495 x := v_1.Args[0]
14496 v.reset(OpARM64MOVHstore)
14497 v.AuxInt = off
14498 v.Aux = sym
14499 v.AddArg(ptr)
14500 v.AddArg(x)
14501 v.AddArg(mem)
14502 return true
14503 }
14504
14505
14506
14507 for {
14508 off := v.AuxInt
14509 sym := v.Aux
14510 mem := v.Args[2]
14511 ptr := v.Args[0]
14512 v_1 := v.Args[1]
14513 if v_1.Op != OpARM64MOVWUreg {
14514 break
14515 }
14516 x := v_1.Args[0]
14517 v.reset(OpARM64MOVHstore)
14518 v.AuxInt = off
14519 v.Aux = sym
14520 v.AddArg(ptr)
14521 v.AddArg(x)
14522 v.AddArg(mem)
14523 return true
14524 }
14525
14526
14527
14528 for {
14529 i := v.AuxInt
14530 s := v.Aux
14531 _ = v.Args[2]
14532 ptr0 := v.Args[0]
14533 v_1 := v.Args[1]
14534 if v_1.Op != OpARM64SRLconst {
14535 break
14536 }
14537 if v_1.AuxInt != 16 {
14538 break
14539 }
14540 w := v_1.Args[0]
14541 x := v.Args[2]
14542 if x.Op != OpARM64MOVHstore {
14543 break
14544 }
14545 if x.AuxInt != i-2 {
14546 break
14547 }
14548 if x.Aux != s {
14549 break
14550 }
14551 mem := x.Args[2]
14552 ptr1 := x.Args[0]
14553 if w != x.Args[1] {
14554 break
14555 }
14556 if !(x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)) {
14557 break
14558 }
14559 v.reset(OpARM64MOVWstore)
14560 v.AuxInt = i - 2
14561 v.Aux = s
14562 v.AddArg(ptr0)
14563 v.AddArg(w)
14564 v.AddArg(mem)
14565 return true
14566 }
14567 return false
14568 }
14569 func rewriteValueARM64_OpARM64MOVHstore_10(v *Value) bool {
14570 b := v.Block
14571
14572
14573
14574 for {
14575 if v.AuxInt != 2 {
14576 break
14577 }
14578 s := v.Aux
14579 _ = v.Args[2]
14580 v_0 := v.Args[0]
14581 if v_0.Op != OpARM64ADD {
14582 break
14583 }
14584 idx0 := v_0.Args[1]
14585 ptr0 := v_0.Args[0]
14586 v_1 := v.Args[1]
14587 if v_1.Op != OpARM64SRLconst {
14588 break
14589 }
14590 if v_1.AuxInt != 16 {
14591 break
14592 }
14593 w := v_1.Args[0]
14594 x := v.Args[2]
14595 if x.Op != OpARM64MOVHstoreidx {
14596 break
14597 }
14598 mem := x.Args[3]
14599 ptr1 := x.Args[0]
14600 idx1 := x.Args[1]
14601 if w != x.Args[2] {
14602 break
14603 }
14604 if !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) {
14605 break
14606 }
14607 v.reset(OpARM64MOVWstoreidx)
14608 v.AddArg(ptr1)
14609 v.AddArg(idx1)
14610 v.AddArg(w)
14611 v.AddArg(mem)
14612 return true
14613 }
14614
14615
14616
14617 for {
14618 if v.AuxInt != 2 {
14619 break
14620 }
14621 s := v.Aux
14622 _ = v.Args[2]
14623 v_0 := v.Args[0]
14624 if v_0.Op != OpARM64ADDshiftLL {
14625 break
14626 }
14627 if v_0.AuxInt != 1 {
14628 break
14629 }
14630 idx0 := v_0.Args[1]
14631 ptr0 := v_0.Args[0]
14632 v_1 := v.Args[1]
14633 if v_1.Op != OpARM64SRLconst {
14634 break
14635 }
14636 if v_1.AuxInt != 16 {
14637 break
14638 }
14639 w := v_1.Args[0]
14640 x := v.Args[2]
14641 if x.Op != OpARM64MOVHstoreidx2 {
14642 break
14643 }
14644 mem := x.Args[3]
14645 ptr1 := x.Args[0]
14646 idx1 := x.Args[1]
14647 if w != x.Args[2] {
14648 break
14649 }
14650 if !(x.Uses == 1 && s == nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && clobber(x)) {
14651 break
14652 }
14653 v.reset(OpARM64MOVWstoreidx)
14654 v.AddArg(ptr1)
14655 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, idx1.Type)
14656 v0.AuxInt = 1
14657 v0.AddArg(idx1)
14658 v.AddArg(v0)
14659 v.AddArg(w)
14660 v.AddArg(mem)
14661 return true
14662 }
14663
14664
14665
14666 for {
14667 i := v.AuxInt
14668 s := v.Aux
14669 _ = v.Args[2]
14670 ptr0 := v.Args[0]
14671 v_1 := v.Args[1]
14672 if v_1.Op != OpARM64UBFX {
14673 break
14674 }
14675 if v_1.AuxInt != armBFAuxInt(16, 16) {
14676 break
14677 }
14678 w := v_1.Args[0]
14679 x := v.Args[2]
14680 if x.Op != OpARM64MOVHstore {
14681 break
14682 }
14683 if x.AuxInt != i-2 {
14684 break
14685 }
14686 if x.Aux != s {
14687 break
14688 }
14689 mem := x.Args[2]
14690 ptr1 := x.Args[0]
14691 if w != x.Args[1] {
14692 break
14693 }
14694 if !(x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)) {
14695 break
14696 }
14697 v.reset(OpARM64MOVWstore)
14698 v.AuxInt = i - 2
14699 v.Aux = s
14700 v.AddArg(ptr0)
14701 v.AddArg(w)
14702 v.AddArg(mem)
14703 return true
14704 }
14705
14706
14707
14708 for {
14709 if v.AuxInt != 2 {
14710 break
14711 }
14712 s := v.Aux
14713 _ = v.Args[2]
14714 v_0 := v.Args[0]
14715 if v_0.Op != OpARM64ADD {
14716 break
14717 }
14718 idx0 := v_0.Args[1]
14719 ptr0 := v_0.Args[0]
14720 v_1 := v.Args[1]
14721 if v_1.Op != OpARM64UBFX {
14722 break
14723 }
14724 if v_1.AuxInt != armBFAuxInt(16, 16) {
14725 break
14726 }
14727 w := v_1.Args[0]
14728 x := v.Args[2]
14729 if x.Op != OpARM64MOVHstoreidx {
14730 break
14731 }
14732 mem := x.Args[3]
14733 ptr1 := x.Args[0]
14734 idx1 := x.Args[1]
14735 if w != x.Args[2] {
14736 break
14737 }
14738 if !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) {
14739 break
14740 }
14741 v.reset(OpARM64MOVWstoreidx)
14742 v.AddArg(ptr1)
14743 v.AddArg(idx1)
14744 v.AddArg(w)
14745 v.AddArg(mem)
14746 return true
14747 }
14748
14749
14750
14751 for {
14752 if v.AuxInt != 2 {
14753 break
14754 }
14755 s := v.Aux
14756 _ = v.Args[2]
14757 v_0 := v.Args[0]
14758 if v_0.Op != OpARM64ADDshiftLL {
14759 break
14760 }
14761 if v_0.AuxInt != 1 {
14762 break
14763 }
14764 idx0 := v_0.Args[1]
14765 ptr0 := v_0.Args[0]
14766 v_1 := v.Args[1]
14767 if v_1.Op != OpARM64UBFX {
14768 break
14769 }
14770 if v_1.AuxInt != armBFAuxInt(16, 16) {
14771 break
14772 }
14773 w := v_1.Args[0]
14774 x := v.Args[2]
14775 if x.Op != OpARM64MOVHstoreidx2 {
14776 break
14777 }
14778 mem := x.Args[3]
14779 ptr1 := x.Args[0]
14780 idx1 := x.Args[1]
14781 if w != x.Args[2] {
14782 break
14783 }
14784 if !(x.Uses == 1 && s == nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && clobber(x)) {
14785 break
14786 }
14787 v.reset(OpARM64MOVWstoreidx)
14788 v.AddArg(ptr1)
14789 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, idx1.Type)
14790 v0.AuxInt = 1
14791 v0.AddArg(idx1)
14792 v.AddArg(v0)
14793 v.AddArg(w)
14794 v.AddArg(mem)
14795 return true
14796 }
14797
14798
14799
14800 for {
14801 i := v.AuxInt
14802 s := v.Aux
14803 _ = v.Args[2]
14804 ptr0 := v.Args[0]
14805 v_1 := v.Args[1]
14806 if v_1.Op != OpARM64SRLconst {
14807 break
14808 }
14809 if v_1.AuxInt != 16 {
14810 break
14811 }
14812 v_1_0 := v_1.Args[0]
14813 if v_1_0.Op != OpARM64MOVDreg {
14814 break
14815 }
14816 w := v_1_0.Args[0]
14817 x := v.Args[2]
14818 if x.Op != OpARM64MOVHstore {
14819 break
14820 }
14821 if x.AuxInt != i-2 {
14822 break
14823 }
14824 if x.Aux != s {
14825 break
14826 }
14827 mem := x.Args[2]
14828 ptr1 := x.Args[0]
14829 if w != x.Args[1] {
14830 break
14831 }
14832 if !(x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)) {
14833 break
14834 }
14835 v.reset(OpARM64MOVWstore)
14836 v.AuxInt = i - 2
14837 v.Aux = s
14838 v.AddArg(ptr0)
14839 v.AddArg(w)
14840 v.AddArg(mem)
14841 return true
14842 }
14843
14844
14845
14846 for {
14847 if v.AuxInt != 2 {
14848 break
14849 }
14850 s := v.Aux
14851 _ = v.Args[2]
14852 v_0 := v.Args[0]
14853 if v_0.Op != OpARM64ADD {
14854 break
14855 }
14856 idx0 := v_0.Args[1]
14857 ptr0 := v_0.Args[0]
14858 v_1 := v.Args[1]
14859 if v_1.Op != OpARM64SRLconst {
14860 break
14861 }
14862 if v_1.AuxInt != 16 {
14863 break
14864 }
14865 v_1_0 := v_1.Args[0]
14866 if v_1_0.Op != OpARM64MOVDreg {
14867 break
14868 }
14869 w := v_1_0.Args[0]
14870 x := v.Args[2]
14871 if x.Op != OpARM64MOVHstoreidx {
14872 break
14873 }
14874 mem := x.Args[3]
14875 ptr1 := x.Args[0]
14876 idx1 := x.Args[1]
14877 if w != x.Args[2] {
14878 break
14879 }
14880 if !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) {
14881 break
14882 }
14883 v.reset(OpARM64MOVWstoreidx)
14884 v.AddArg(ptr1)
14885 v.AddArg(idx1)
14886 v.AddArg(w)
14887 v.AddArg(mem)
14888 return true
14889 }
14890
14891
14892
14893 for {
14894 if v.AuxInt != 2 {
14895 break
14896 }
14897 s := v.Aux
14898 _ = v.Args[2]
14899 v_0 := v.Args[0]
14900 if v_0.Op != OpARM64ADDshiftLL {
14901 break
14902 }
14903 if v_0.AuxInt != 1 {
14904 break
14905 }
14906 idx0 := v_0.Args[1]
14907 ptr0 := v_0.Args[0]
14908 v_1 := v.Args[1]
14909 if v_1.Op != OpARM64SRLconst {
14910 break
14911 }
14912 if v_1.AuxInt != 16 {
14913 break
14914 }
14915 v_1_0 := v_1.Args[0]
14916 if v_1_0.Op != OpARM64MOVDreg {
14917 break
14918 }
14919 w := v_1_0.Args[0]
14920 x := v.Args[2]
14921 if x.Op != OpARM64MOVHstoreidx2 {
14922 break
14923 }
14924 mem := x.Args[3]
14925 ptr1 := x.Args[0]
14926 idx1 := x.Args[1]
14927 if w != x.Args[2] {
14928 break
14929 }
14930 if !(x.Uses == 1 && s == nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && clobber(x)) {
14931 break
14932 }
14933 v.reset(OpARM64MOVWstoreidx)
14934 v.AddArg(ptr1)
14935 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, idx1.Type)
14936 v0.AuxInt = 1
14937 v0.AddArg(idx1)
14938 v.AddArg(v0)
14939 v.AddArg(w)
14940 v.AddArg(mem)
14941 return true
14942 }
14943
14944
14945
14946 for {
14947 i := v.AuxInt
14948 s := v.Aux
14949 _ = v.Args[2]
14950 ptr0 := v.Args[0]
14951 v_1 := v.Args[1]
14952 if v_1.Op != OpARM64SRLconst {
14953 break
14954 }
14955 j := v_1.AuxInt
14956 w := v_1.Args[0]
14957 x := v.Args[2]
14958 if x.Op != OpARM64MOVHstore {
14959 break
14960 }
14961 if x.AuxInt != i-2 {
14962 break
14963 }
14964 if x.Aux != s {
14965 break
14966 }
14967 mem := x.Args[2]
14968 ptr1 := x.Args[0]
14969 w0 := x.Args[1]
14970 if w0.Op != OpARM64SRLconst {
14971 break
14972 }
14973 if w0.AuxInt != j-16 {
14974 break
14975 }
14976 if w != w0.Args[0] {
14977 break
14978 }
14979 if !(x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)) {
14980 break
14981 }
14982 v.reset(OpARM64MOVWstore)
14983 v.AuxInt = i - 2
14984 v.Aux = s
14985 v.AddArg(ptr0)
14986 v.AddArg(w0)
14987 v.AddArg(mem)
14988 return true
14989 }
14990
14991
14992
14993 for {
14994 if v.AuxInt != 2 {
14995 break
14996 }
14997 s := v.Aux
14998 _ = v.Args[2]
14999 v_0 := v.Args[0]
15000 if v_0.Op != OpARM64ADD {
15001 break
15002 }
15003 idx0 := v_0.Args[1]
15004 ptr0 := v_0.Args[0]
15005 v_1 := v.Args[1]
15006 if v_1.Op != OpARM64SRLconst {
15007 break
15008 }
15009 j := v_1.AuxInt
15010 w := v_1.Args[0]
15011 x := v.Args[2]
15012 if x.Op != OpARM64MOVHstoreidx {
15013 break
15014 }
15015 mem := x.Args[3]
15016 ptr1 := x.Args[0]
15017 idx1 := x.Args[1]
15018 w0 := x.Args[2]
15019 if w0.Op != OpARM64SRLconst {
15020 break
15021 }
15022 if w0.AuxInt != j-16 {
15023 break
15024 }
15025 if w != w0.Args[0] {
15026 break
15027 }
15028 if !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) {
15029 break
15030 }
15031 v.reset(OpARM64MOVWstoreidx)
15032 v.AddArg(ptr1)
15033 v.AddArg(idx1)
15034 v.AddArg(w0)
15035 v.AddArg(mem)
15036 return true
15037 }
15038 return false
15039 }
15040 func rewriteValueARM64_OpARM64MOVHstore_20(v *Value) bool {
15041 b := v.Block
15042
15043
15044
15045 for {
15046 if v.AuxInt != 2 {
15047 break
15048 }
15049 s := v.Aux
15050 _ = v.Args[2]
15051 v_0 := v.Args[0]
15052 if v_0.Op != OpARM64ADDshiftLL {
15053 break
15054 }
15055 if v_0.AuxInt != 1 {
15056 break
15057 }
15058 idx0 := v_0.Args[1]
15059 ptr0 := v_0.Args[0]
15060 v_1 := v.Args[1]
15061 if v_1.Op != OpARM64SRLconst {
15062 break
15063 }
15064 j := v_1.AuxInt
15065 w := v_1.Args[0]
15066 x := v.Args[2]
15067 if x.Op != OpARM64MOVHstoreidx2 {
15068 break
15069 }
15070 mem := x.Args[3]
15071 ptr1 := x.Args[0]
15072 idx1 := x.Args[1]
15073 w0 := x.Args[2]
15074 if w0.Op != OpARM64SRLconst {
15075 break
15076 }
15077 if w0.AuxInt != j-16 {
15078 break
15079 }
15080 if w != w0.Args[0] {
15081 break
15082 }
15083 if !(x.Uses == 1 && s == nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && clobber(x)) {
15084 break
15085 }
15086 v.reset(OpARM64MOVWstoreidx)
15087 v.AddArg(ptr1)
15088 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, idx1.Type)
15089 v0.AuxInt = 1
15090 v0.AddArg(idx1)
15091 v.AddArg(v0)
15092 v.AddArg(w0)
15093 v.AddArg(mem)
15094 return true
15095 }
15096 return false
15097 }
15098 func rewriteValueARM64_OpARM64MOVHstoreidx_0(v *Value) bool {
15099
15100
15101
15102 for {
15103 mem := v.Args[3]
15104 ptr := v.Args[0]
15105 v_1 := v.Args[1]
15106 if v_1.Op != OpARM64MOVDconst {
15107 break
15108 }
15109 c := v_1.AuxInt
15110 val := v.Args[2]
15111 v.reset(OpARM64MOVHstore)
15112 v.AuxInt = c
15113 v.AddArg(ptr)
15114 v.AddArg(val)
15115 v.AddArg(mem)
15116 return true
15117 }
15118
15119
15120
15121 for {
15122 mem := v.Args[3]
15123 v_0 := v.Args[0]
15124 if v_0.Op != OpARM64MOVDconst {
15125 break
15126 }
15127 c := v_0.AuxInt
15128 idx := v.Args[1]
15129 val := v.Args[2]
15130 v.reset(OpARM64MOVHstore)
15131 v.AuxInt = c
15132 v.AddArg(idx)
15133 v.AddArg(val)
15134 v.AddArg(mem)
15135 return true
15136 }
15137
15138
15139
15140 for {
15141 mem := v.Args[3]
15142 ptr := v.Args[0]
15143 v_1 := v.Args[1]
15144 if v_1.Op != OpARM64SLLconst {
15145 break
15146 }
15147 if v_1.AuxInt != 1 {
15148 break
15149 }
15150 idx := v_1.Args[0]
15151 val := v.Args[2]
15152 v.reset(OpARM64MOVHstoreidx2)
15153 v.AddArg(ptr)
15154 v.AddArg(idx)
15155 v.AddArg(val)
15156 v.AddArg(mem)
15157 return true
15158 }
15159
15160
15161
15162 for {
15163 mem := v.Args[3]
15164 ptr := v.Args[0]
15165 v_1 := v.Args[1]
15166 if v_1.Op != OpARM64ADD {
15167 break
15168 }
15169 idx := v_1.Args[1]
15170 if idx != v_1.Args[0] {
15171 break
15172 }
15173 val := v.Args[2]
15174 v.reset(OpARM64MOVHstoreidx2)
15175 v.AddArg(ptr)
15176 v.AddArg(idx)
15177 v.AddArg(val)
15178 v.AddArg(mem)
15179 return true
15180 }
15181
15182
15183
15184 for {
15185 mem := v.Args[3]
15186 v_0 := v.Args[0]
15187 if v_0.Op != OpARM64SLLconst {
15188 break
15189 }
15190 if v_0.AuxInt != 1 {
15191 break
15192 }
15193 idx := v_0.Args[0]
15194 ptr := v.Args[1]
15195 val := v.Args[2]
15196 v.reset(OpARM64MOVHstoreidx2)
15197 v.AddArg(ptr)
15198 v.AddArg(idx)
15199 v.AddArg(val)
15200 v.AddArg(mem)
15201 return true
15202 }
15203
15204
15205
15206 for {
15207 mem := v.Args[3]
15208 v_0 := v.Args[0]
15209 if v_0.Op != OpARM64ADD {
15210 break
15211 }
15212 idx := v_0.Args[1]
15213 if idx != v_0.Args[0] {
15214 break
15215 }
15216 ptr := v.Args[1]
15217 val := v.Args[2]
15218 v.reset(OpARM64MOVHstoreidx2)
15219 v.AddArg(ptr)
15220 v.AddArg(idx)
15221 v.AddArg(val)
15222 v.AddArg(mem)
15223 return true
15224 }
15225
15226
15227
15228 for {
15229 mem := v.Args[3]
15230 ptr := v.Args[0]
15231 idx := v.Args[1]
15232 v_2 := v.Args[2]
15233 if v_2.Op != OpARM64MOVDconst {
15234 break
15235 }
15236 if v_2.AuxInt != 0 {
15237 break
15238 }
15239 v.reset(OpARM64MOVHstorezeroidx)
15240 v.AddArg(ptr)
15241 v.AddArg(idx)
15242 v.AddArg(mem)
15243 return true
15244 }
15245
15246
15247
15248 for {
15249 mem := v.Args[3]
15250 ptr := v.Args[0]
15251 idx := v.Args[1]
15252 v_2 := v.Args[2]
15253 if v_2.Op != OpARM64MOVHreg {
15254 break
15255 }
15256 x := v_2.Args[0]
15257 v.reset(OpARM64MOVHstoreidx)
15258 v.AddArg(ptr)
15259 v.AddArg(idx)
15260 v.AddArg(x)
15261 v.AddArg(mem)
15262 return true
15263 }
15264
15265
15266
15267 for {
15268 mem := v.Args[3]
15269 ptr := v.Args[0]
15270 idx := v.Args[1]
15271 v_2 := v.Args[2]
15272 if v_2.Op != OpARM64MOVHUreg {
15273 break
15274 }
15275 x := v_2.Args[0]
15276 v.reset(OpARM64MOVHstoreidx)
15277 v.AddArg(ptr)
15278 v.AddArg(idx)
15279 v.AddArg(x)
15280 v.AddArg(mem)
15281 return true
15282 }
15283
15284
15285
15286 for {
15287 mem := v.Args[3]
15288 ptr := v.Args[0]
15289 idx := v.Args[1]
15290 v_2 := v.Args[2]
15291 if v_2.Op != OpARM64MOVWreg {
15292 break
15293 }
15294 x := v_2.Args[0]
15295 v.reset(OpARM64MOVHstoreidx)
15296 v.AddArg(ptr)
15297 v.AddArg(idx)
15298 v.AddArg(x)
15299 v.AddArg(mem)
15300 return true
15301 }
15302 return false
15303 }
15304 func rewriteValueARM64_OpARM64MOVHstoreidx_10(v *Value) bool {
15305
15306
15307
15308 for {
15309 mem := v.Args[3]
15310 ptr := v.Args[0]
15311 idx := v.Args[1]
15312 v_2 := v.Args[2]
15313 if v_2.Op != OpARM64MOVWUreg {
15314 break
15315 }
15316 x := v_2.Args[0]
15317 v.reset(OpARM64MOVHstoreidx)
15318 v.AddArg(ptr)
15319 v.AddArg(idx)
15320 v.AddArg(x)
15321 v.AddArg(mem)
15322 return true
15323 }
15324
15325
15326
15327 for {
15328 _ = v.Args[3]
15329 ptr := v.Args[0]
15330 v_1 := v.Args[1]
15331 if v_1.Op != OpARM64ADDconst {
15332 break
15333 }
15334 if v_1.AuxInt != 2 {
15335 break
15336 }
15337 idx := v_1.Args[0]
15338 v_2 := v.Args[2]
15339 if v_2.Op != OpARM64SRLconst {
15340 break
15341 }
15342 if v_2.AuxInt != 16 {
15343 break
15344 }
15345 w := v_2.Args[0]
15346 x := v.Args[3]
15347 if x.Op != OpARM64MOVHstoreidx {
15348 break
15349 }
15350 mem := x.Args[3]
15351 if ptr != x.Args[0] {
15352 break
15353 }
15354 if idx != x.Args[1] {
15355 break
15356 }
15357 if w != x.Args[2] {
15358 break
15359 }
15360 if !(x.Uses == 1 && clobber(x)) {
15361 break
15362 }
15363 v.reset(OpARM64MOVWstoreidx)
15364 v.AddArg(ptr)
15365 v.AddArg(idx)
15366 v.AddArg(w)
15367 v.AddArg(mem)
15368 return true
15369 }
15370 return false
15371 }
15372 func rewriteValueARM64_OpARM64MOVHstoreidx2_0(v *Value) bool {
15373
15374
15375
15376 for {
15377 mem := v.Args[3]
15378 ptr := v.Args[0]
15379 v_1 := v.Args[1]
15380 if v_1.Op != OpARM64MOVDconst {
15381 break
15382 }
15383 c := v_1.AuxInt
15384 val := v.Args[2]
15385 v.reset(OpARM64MOVHstore)
15386 v.AuxInt = c << 1
15387 v.AddArg(ptr)
15388 v.AddArg(val)
15389 v.AddArg(mem)
15390 return true
15391 }
15392
15393
15394
15395 for {
15396 mem := v.Args[3]
15397 ptr := v.Args[0]
15398 idx := v.Args[1]
15399 v_2 := v.Args[2]
15400 if v_2.Op != OpARM64MOVDconst {
15401 break
15402 }
15403 if v_2.AuxInt != 0 {
15404 break
15405 }
15406 v.reset(OpARM64MOVHstorezeroidx2)
15407 v.AddArg(ptr)
15408 v.AddArg(idx)
15409 v.AddArg(mem)
15410 return true
15411 }
15412
15413
15414
15415 for {
15416 mem := v.Args[3]
15417 ptr := v.Args[0]
15418 idx := v.Args[1]
15419 v_2 := v.Args[2]
15420 if v_2.Op != OpARM64MOVHreg {
15421 break
15422 }
15423 x := v_2.Args[0]
15424 v.reset(OpARM64MOVHstoreidx2)
15425 v.AddArg(ptr)
15426 v.AddArg(idx)
15427 v.AddArg(x)
15428 v.AddArg(mem)
15429 return true
15430 }
15431
15432
15433
15434 for {
15435 mem := v.Args[3]
15436 ptr := v.Args[0]
15437 idx := v.Args[1]
15438 v_2 := v.Args[2]
15439 if v_2.Op != OpARM64MOVHUreg {
15440 break
15441 }
15442 x := v_2.Args[0]
15443 v.reset(OpARM64MOVHstoreidx2)
15444 v.AddArg(ptr)
15445 v.AddArg(idx)
15446 v.AddArg(x)
15447 v.AddArg(mem)
15448 return true
15449 }
15450
15451
15452
15453 for {
15454 mem := v.Args[3]
15455 ptr := v.Args[0]
15456 idx := v.Args[1]
15457 v_2 := v.Args[2]
15458 if v_2.Op != OpARM64MOVWreg {
15459 break
15460 }
15461 x := v_2.Args[0]
15462 v.reset(OpARM64MOVHstoreidx2)
15463 v.AddArg(ptr)
15464 v.AddArg(idx)
15465 v.AddArg(x)
15466 v.AddArg(mem)
15467 return true
15468 }
15469
15470
15471
15472 for {
15473 mem := v.Args[3]
15474 ptr := v.Args[0]
15475 idx := v.Args[1]
15476 v_2 := v.Args[2]
15477 if v_2.Op != OpARM64MOVWUreg {
15478 break
15479 }
15480 x := v_2.Args[0]
15481 v.reset(OpARM64MOVHstoreidx2)
15482 v.AddArg(ptr)
15483 v.AddArg(idx)
15484 v.AddArg(x)
15485 v.AddArg(mem)
15486 return true
15487 }
15488 return false
15489 }
15490 func rewriteValueARM64_OpARM64MOVHstorezero_0(v *Value) bool {
15491 b := v.Block
15492 config := b.Func.Config
15493
15494
15495
15496 for {
15497 off1 := v.AuxInt
15498 sym := v.Aux
15499 mem := v.Args[1]
15500 v_0 := v.Args[0]
15501 if v_0.Op != OpARM64ADDconst {
15502 break
15503 }
15504 off2 := v_0.AuxInt
15505 ptr := v_0.Args[0]
15506 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
15507 break
15508 }
15509 v.reset(OpARM64MOVHstorezero)
15510 v.AuxInt = off1 + off2
15511 v.Aux = sym
15512 v.AddArg(ptr)
15513 v.AddArg(mem)
15514 return true
15515 }
15516
15517
15518
15519 for {
15520 off1 := v.AuxInt
15521 sym1 := v.Aux
15522 mem := v.Args[1]
15523 v_0 := v.Args[0]
15524 if v_0.Op != OpARM64MOVDaddr {
15525 break
15526 }
15527 off2 := v_0.AuxInt
15528 sym2 := v_0.Aux
15529 ptr := v_0.Args[0]
15530 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
15531 break
15532 }
15533 v.reset(OpARM64MOVHstorezero)
15534 v.AuxInt = off1 + off2
15535 v.Aux = mergeSym(sym1, sym2)
15536 v.AddArg(ptr)
15537 v.AddArg(mem)
15538 return true
15539 }
15540
15541
15542
15543 for {
15544 off := v.AuxInt
15545 sym := v.Aux
15546 mem := v.Args[1]
15547 v_0 := v.Args[0]
15548 if v_0.Op != OpARM64ADD {
15549 break
15550 }
15551 idx := v_0.Args[1]
15552 ptr := v_0.Args[0]
15553 if !(off == 0 && sym == nil) {
15554 break
15555 }
15556 v.reset(OpARM64MOVHstorezeroidx)
15557 v.AddArg(ptr)
15558 v.AddArg(idx)
15559 v.AddArg(mem)
15560 return true
15561 }
15562
15563
15564
15565 for {
15566 off := v.AuxInt
15567 sym := v.Aux
15568 mem := v.Args[1]
15569 v_0 := v.Args[0]
15570 if v_0.Op != OpARM64ADDshiftLL {
15571 break
15572 }
15573 if v_0.AuxInt != 1 {
15574 break
15575 }
15576 idx := v_0.Args[1]
15577 ptr := v_0.Args[0]
15578 if !(off == 0 && sym == nil) {
15579 break
15580 }
15581 v.reset(OpARM64MOVHstorezeroidx2)
15582 v.AddArg(ptr)
15583 v.AddArg(idx)
15584 v.AddArg(mem)
15585 return true
15586 }
15587
15588
15589
15590 for {
15591 i := v.AuxInt
15592 s := v.Aux
15593 _ = v.Args[1]
15594 ptr0 := v.Args[0]
15595 x := v.Args[1]
15596 if x.Op != OpARM64MOVHstorezero {
15597 break
15598 }
15599 j := x.AuxInt
15600 if x.Aux != s {
15601 break
15602 }
15603 mem := x.Args[1]
15604 ptr1 := x.Args[0]
15605 if !(x.Uses == 1 && areAdjacentOffsets(i, j, 2) && is32Bit(min(i, j)) && isSamePtr(ptr0, ptr1) && clobber(x)) {
15606 break
15607 }
15608 v.reset(OpARM64MOVWstorezero)
15609 v.AuxInt = min(i, j)
15610 v.Aux = s
15611 v.AddArg(ptr0)
15612 v.AddArg(mem)
15613 return true
15614 }
15615
15616
15617
15618 for {
15619 if v.AuxInt != 2 {
15620 break
15621 }
15622 s := v.Aux
15623 _ = v.Args[1]
15624 v_0 := v.Args[0]
15625 if v_0.Op != OpARM64ADD {
15626 break
15627 }
15628 idx0 := v_0.Args[1]
15629 ptr0 := v_0.Args[0]
15630 x := v.Args[1]
15631 if x.Op != OpARM64MOVHstorezeroidx {
15632 break
15633 }
15634 mem := x.Args[2]
15635 ptr1 := x.Args[0]
15636 idx1 := x.Args[1]
15637 if !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) {
15638 break
15639 }
15640 v.reset(OpARM64MOVWstorezeroidx)
15641 v.AddArg(ptr1)
15642 v.AddArg(idx1)
15643 v.AddArg(mem)
15644 return true
15645 }
15646
15647
15648
15649 for {
15650 if v.AuxInt != 2 {
15651 break
15652 }
15653 s := v.Aux
15654 _ = v.Args[1]
15655 v_0 := v.Args[0]
15656 if v_0.Op != OpARM64ADDshiftLL {
15657 break
15658 }
15659 if v_0.AuxInt != 1 {
15660 break
15661 }
15662 idx0 := v_0.Args[1]
15663 ptr0 := v_0.Args[0]
15664 x := v.Args[1]
15665 if x.Op != OpARM64MOVHstorezeroidx2 {
15666 break
15667 }
15668 mem := x.Args[2]
15669 ptr1 := x.Args[0]
15670 idx1 := x.Args[1]
15671 if !(x.Uses == 1 && s == nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && clobber(x)) {
15672 break
15673 }
15674 v.reset(OpARM64MOVWstorezeroidx)
15675 v.AddArg(ptr1)
15676 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, idx1.Type)
15677 v0.AuxInt = 1
15678 v0.AddArg(idx1)
15679 v.AddArg(v0)
15680 v.AddArg(mem)
15681 return true
15682 }
15683 return false
15684 }
15685 func rewriteValueARM64_OpARM64MOVHstorezeroidx_0(v *Value) bool {
15686
15687
15688
15689 for {
15690 mem := v.Args[2]
15691 ptr := v.Args[0]
15692 v_1 := v.Args[1]
15693 if v_1.Op != OpARM64MOVDconst {
15694 break
15695 }
15696 c := v_1.AuxInt
15697 v.reset(OpARM64MOVHstorezero)
15698 v.AuxInt = c
15699 v.AddArg(ptr)
15700 v.AddArg(mem)
15701 return true
15702 }
15703
15704
15705
15706 for {
15707 mem := v.Args[2]
15708 v_0 := v.Args[0]
15709 if v_0.Op != OpARM64MOVDconst {
15710 break
15711 }
15712 c := v_0.AuxInt
15713 idx := v.Args[1]
15714 v.reset(OpARM64MOVHstorezero)
15715 v.AuxInt = c
15716 v.AddArg(idx)
15717 v.AddArg(mem)
15718 return true
15719 }
15720
15721
15722
15723 for {
15724 mem := v.Args[2]
15725 ptr := v.Args[0]
15726 v_1 := v.Args[1]
15727 if v_1.Op != OpARM64SLLconst {
15728 break
15729 }
15730 if v_1.AuxInt != 1 {
15731 break
15732 }
15733 idx := v_1.Args[0]
15734 v.reset(OpARM64MOVHstorezeroidx2)
15735 v.AddArg(ptr)
15736 v.AddArg(idx)
15737 v.AddArg(mem)
15738 return true
15739 }
15740
15741
15742
15743 for {
15744 mem := v.Args[2]
15745 ptr := v.Args[0]
15746 v_1 := v.Args[1]
15747 if v_1.Op != OpARM64ADD {
15748 break
15749 }
15750 idx := v_1.Args[1]
15751 if idx != v_1.Args[0] {
15752 break
15753 }
15754 v.reset(OpARM64MOVHstorezeroidx2)
15755 v.AddArg(ptr)
15756 v.AddArg(idx)
15757 v.AddArg(mem)
15758 return true
15759 }
15760
15761
15762
15763 for {
15764 mem := v.Args[2]
15765 v_0 := v.Args[0]
15766 if v_0.Op != OpARM64SLLconst {
15767 break
15768 }
15769 if v_0.AuxInt != 1 {
15770 break
15771 }
15772 idx := v_0.Args[0]
15773 ptr := v.Args[1]
15774 v.reset(OpARM64MOVHstorezeroidx2)
15775 v.AddArg(ptr)
15776 v.AddArg(idx)
15777 v.AddArg(mem)
15778 return true
15779 }
15780
15781
15782
15783 for {
15784 mem := v.Args[2]
15785 v_0 := v.Args[0]
15786 if v_0.Op != OpARM64ADD {
15787 break
15788 }
15789 idx := v_0.Args[1]
15790 if idx != v_0.Args[0] {
15791 break
15792 }
15793 ptr := v.Args[1]
15794 v.reset(OpARM64MOVHstorezeroidx2)
15795 v.AddArg(ptr)
15796 v.AddArg(idx)
15797 v.AddArg(mem)
15798 return true
15799 }
15800
15801
15802
15803 for {
15804 _ = v.Args[2]
15805 ptr := v.Args[0]
15806 v_1 := v.Args[1]
15807 if v_1.Op != OpARM64ADDconst {
15808 break
15809 }
15810 if v_1.AuxInt != 2 {
15811 break
15812 }
15813 idx := v_1.Args[0]
15814 x := v.Args[2]
15815 if x.Op != OpARM64MOVHstorezeroidx {
15816 break
15817 }
15818 mem := x.Args[2]
15819 if ptr != x.Args[0] {
15820 break
15821 }
15822 if idx != x.Args[1] {
15823 break
15824 }
15825 if !(x.Uses == 1 && clobber(x)) {
15826 break
15827 }
15828 v.reset(OpARM64MOVWstorezeroidx)
15829 v.AddArg(ptr)
15830 v.AddArg(idx)
15831 v.AddArg(mem)
15832 return true
15833 }
15834 return false
15835 }
15836 func rewriteValueARM64_OpARM64MOVHstorezeroidx2_0(v *Value) bool {
15837
15838
15839
15840 for {
15841 mem := v.Args[2]
15842 ptr := v.Args[0]
15843 v_1 := v.Args[1]
15844 if v_1.Op != OpARM64MOVDconst {
15845 break
15846 }
15847 c := v_1.AuxInt
15848 v.reset(OpARM64MOVHstorezero)
15849 v.AuxInt = c << 1
15850 v.AddArg(ptr)
15851 v.AddArg(mem)
15852 return true
15853 }
15854 return false
15855 }
15856 func rewriteValueARM64_OpARM64MOVQstorezero_0(v *Value) bool {
15857 b := v.Block
15858 config := b.Func.Config
15859
15860
15861
15862 for {
15863 off1 := v.AuxInt
15864 sym := v.Aux
15865 mem := v.Args[1]
15866 v_0 := v.Args[0]
15867 if v_0.Op != OpARM64ADDconst {
15868 break
15869 }
15870 off2 := v_0.AuxInt
15871 ptr := v_0.Args[0]
15872 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
15873 break
15874 }
15875 v.reset(OpARM64MOVQstorezero)
15876 v.AuxInt = off1 + off2
15877 v.Aux = sym
15878 v.AddArg(ptr)
15879 v.AddArg(mem)
15880 return true
15881 }
15882
15883
15884
15885 for {
15886 off1 := v.AuxInt
15887 sym1 := v.Aux
15888 mem := v.Args[1]
15889 v_0 := v.Args[0]
15890 if v_0.Op != OpARM64MOVDaddr {
15891 break
15892 }
15893 off2 := v_0.AuxInt
15894 sym2 := v_0.Aux
15895 ptr := v_0.Args[0]
15896 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
15897 break
15898 }
15899 v.reset(OpARM64MOVQstorezero)
15900 v.AuxInt = off1 + off2
15901 v.Aux = mergeSym(sym1, sym2)
15902 v.AddArg(ptr)
15903 v.AddArg(mem)
15904 return true
15905 }
15906 return false
15907 }
15908 func rewriteValueARM64_OpARM64MOVWUload_0(v *Value) bool {
15909 b := v.Block
15910 config := b.Func.Config
15911
15912
15913
15914 for {
15915 off := v.AuxInt
15916 sym := v.Aux
15917 _ = v.Args[1]
15918 ptr := v.Args[0]
15919 v_1 := v.Args[1]
15920 if v_1.Op != OpARM64FMOVSstore {
15921 break
15922 }
15923 if v_1.AuxInt != off {
15924 break
15925 }
15926 if v_1.Aux != sym {
15927 break
15928 }
15929 _ = v_1.Args[2]
15930 if ptr != v_1.Args[0] {
15931 break
15932 }
15933 val := v_1.Args[1]
15934 v.reset(OpARM64FMOVSfpgp)
15935 v.AddArg(val)
15936 return true
15937 }
15938
15939
15940
15941 for {
15942 off1 := v.AuxInt
15943 sym := v.Aux
15944 mem := v.Args[1]
15945 v_0 := v.Args[0]
15946 if v_0.Op != OpARM64ADDconst {
15947 break
15948 }
15949 off2 := v_0.AuxInt
15950 ptr := v_0.Args[0]
15951 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
15952 break
15953 }
15954 v.reset(OpARM64MOVWUload)
15955 v.AuxInt = off1 + off2
15956 v.Aux = sym
15957 v.AddArg(ptr)
15958 v.AddArg(mem)
15959 return true
15960 }
15961
15962
15963
15964 for {
15965 off := v.AuxInt
15966 sym := v.Aux
15967 mem := v.Args[1]
15968 v_0 := v.Args[0]
15969 if v_0.Op != OpARM64ADD {
15970 break
15971 }
15972 idx := v_0.Args[1]
15973 ptr := v_0.Args[0]
15974 if !(off == 0 && sym == nil) {
15975 break
15976 }
15977 v.reset(OpARM64MOVWUloadidx)
15978 v.AddArg(ptr)
15979 v.AddArg(idx)
15980 v.AddArg(mem)
15981 return true
15982 }
15983
15984
15985
15986 for {
15987 off := v.AuxInt
15988 sym := v.Aux
15989 mem := v.Args[1]
15990 v_0 := v.Args[0]
15991 if v_0.Op != OpARM64ADDshiftLL {
15992 break
15993 }
15994 if v_0.AuxInt != 2 {
15995 break
15996 }
15997 idx := v_0.Args[1]
15998 ptr := v_0.Args[0]
15999 if !(off == 0 && sym == nil) {
16000 break
16001 }
16002 v.reset(OpARM64MOVWUloadidx4)
16003 v.AddArg(ptr)
16004 v.AddArg(idx)
16005 v.AddArg(mem)
16006 return true
16007 }
16008
16009
16010
16011 for {
16012 off1 := v.AuxInt
16013 sym1 := v.Aux
16014 mem := v.Args[1]
16015 v_0 := v.Args[0]
16016 if v_0.Op != OpARM64MOVDaddr {
16017 break
16018 }
16019 off2 := v_0.AuxInt
16020 sym2 := v_0.Aux
16021 ptr := v_0.Args[0]
16022 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
16023 break
16024 }
16025 v.reset(OpARM64MOVWUload)
16026 v.AuxInt = off1 + off2
16027 v.Aux = mergeSym(sym1, sym2)
16028 v.AddArg(ptr)
16029 v.AddArg(mem)
16030 return true
16031 }
16032
16033
16034
16035 for {
16036 off := v.AuxInt
16037 sym := v.Aux
16038 _ = v.Args[1]
16039 ptr := v.Args[0]
16040 v_1 := v.Args[1]
16041 if v_1.Op != OpARM64MOVWstorezero {
16042 break
16043 }
16044 off2 := v_1.AuxInt
16045 sym2 := v_1.Aux
16046 _ = v_1.Args[1]
16047 ptr2 := v_1.Args[0]
16048 if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
16049 break
16050 }
16051 v.reset(OpARM64MOVDconst)
16052 v.AuxInt = 0
16053 return true
16054 }
16055
16056
16057
16058 for {
16059 off := v.AuxInt
16060 sym := v.Aux
16061 _ = v.Args[1]
16062 v_0 := v.Args[0]
16063 if v_0.Op != OpSB {
16064 break
16065 }
16066 if !(symIsRO(sym)) {
16067 break
16068 }
16069 v.reset(OpARM64MOVDconst)
16070 v.AuxInt = int64(read32(sym, off, config.BigEndian))
16071 return true
16072 }
16073 return false
16074 }
16075 func rewriteValueARM64_OpARM64MOVWUloadidx_0(v *Value) bool {
16076
16077
16078
16079 for {
16080 mem := v.Args[2]
16081 ptr := v.Args[0]
16082 v_1 := v.Args[1]
16083 if v_1.Op != OpARM64MOVDconst {
16084 break
16085 }
16086 c := v_1.AuxInt
16087 v.reset(OpARM64MOVWUload)
16088 v.AuxInt = c
16089 v.AddArg(ptr)
16090 v.AddArg(mem)
16091 return true
16092 }
16093
16094
16095
16096 for {
16097 mem := v.Args[2]
16098 v_0 := v.Args[0]
16099 if v_0.Op != OpARM64MOVDconst {
16100 break
16101 }
16102 c := v_0.AuxInt
16103 ptr := v.Args[1]
16104 v.reset(OpARM64MOVWUload)
16105 v.AuxInt = c
16106 v.AddArg(ptr)
16107 v.AddArg(mem)
16108 return true
16109 }
16110
16111
16112
16113 for {
16114 mem := v.Args[2]
16115 ptr := v.Args[0]
16116 v_1 := v.Args[1]
16117 if v_1.Op != OpARM64SLLconst {
16118 break
16119 }
16120 if v_1.AuxInt != 2 {
16121 break
16122 }
16123 idx := v_1.Args[0]
16124 v.reset(OpARM64MOVWUloadidx4)
16125 v.AddArg(ptr)
16126 v.AddArg(idx)
16127 v.AddArg(mem)
16128 return true
16129 }
16130
16131
16132
16133 for {
16134 mem := v.Args[2]
16135 v_0 := v.Args[0]
16136 if v_0.Op != OpARM64SLLconst {
16137 break
16138 }
16139 if v_0.AuxInt != 2 {
16140 break
16141 }
16142 idx := v_0.Args[0]
16143 ptr := v.Args[1]
16144 v.reset(OpARM64MOVWUloadidx4)
16145 v.AddArg(ptr)
16146 v.AddArg(idx)
16147 v.AddArg(mem)
16148 return true
16149 }
16150
16151
16152
16153 for {
16154 _ = v.Args[2]
16155 ptr := v.Args[0]
16156 idx := v.Args[1]
16157 v_2 := v.Args[2]
16158 if v_2.Op != OpARM64MOVWstorezeroidx {
16159 break
16160 }
16161 _ = v_2.Args[2]
16162 ptr2 := v_2.Args[0]
16163 idx2 := v_2.Args[1]
16164 if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2)) {
16165 break
16166 }
16167 v.reset(OpARM64MOVDconst)
16168 v.AuxInt = 0
16169 return true
16170 }
16171 return false
16172 }
16173 func rewriteValueARM64_OpARM64MOVWUloadidx4_0(v *Value) bool {
16174
16175
16176
16177 for {
16178 mem := v.Args[2]
16179 ptr := v.Args[0]
16180 v_1 := v.Args[1]
16181 if v_1.Op != OpARM64MOVDconst {
16182 break
16183 }
16184 c := v_1.AuxInt
16185 v.reset(OpARM64MOVWUload)
16186 v.AuxInt = c << 2
16187 v.AddArg(ptr)
16188 v.AddArg(mem)
16189 return true
16190 }
16191
16192
16193
16194 for {
16195 _ = v.Args[2]
16196 ptr := v.Args[0]
16197 idx := v.Args[1]
16198 v_2 := v.Args[2]
16199 if v_2.Op != OpARM64MOVWstorezeroidx4 {
16200 break
16201 }
16202 _ = v_2.Args[2]
16203 ptr2 := v_2.Args[0]
16204 idx2 := v_2.Args[1]
16205 if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2)) {
16206 break
16207 }
16208 v.reset(OpARM64MOVDconst)
16209 v.AuxInt = 0
16210 return true
16211 }
16212 return false
16213 }
16214 func rewriteValueARM64_OpARM64MOVWUreg_0(v *Value) bool {
16215
16216
16217
16218 for {
16219 x := v.Args[0]
16220 if x.Op != OpARM64MOVBUload {
16221 break
16222 }
16223 _ = x.Args[1]
16224 v.reset(OpARM64MOVDreg)
16225 v.AddArg(x)
16226 return true
16227 }
16228
16229
16230
16231 for {
16232 x := v.Args[0]
16233 if x.Op != OpARM64MOVHUload {
16234 break
16235 }
16236 _ = x.Args[1]
16237 v.reset(OpARM64MOVDreg)
16238 v.AddArg(x)
16239 return true
16240 }
16241
16242
16243
16244 for {
16245 x := v.Args[0]
16246 if x.Op != OpARM64MOVWUload {
16247 break
16248 }
16249 _ = x.Args[1]
16250 v.reset(OpARM64MOVDreg)
16251 v.AddArg(x)
16252 return true
16253 }
16254
16255
16256
16257 for {
16258 x := v.Args[0]
16259 if x.Op != OpARM64MOVBUloadidx {
16260 break
16261 }
16262 _ = x.Args[2]
16263 v.reset(OpARM64MOVDreg)
16264 v.AddArg(x)
16265 return true
16266 }
16267
16268
16269
16270 for {
16271 x := v.Args[0]
16272 if x.Op != OpARM64MOVHUloadidx {
16273 break
16274 }
16275 _ = x.Args[2]
16276 v.reset(OpARM64MOVDreg)
16277 v.AddArg(x)
16278 return true
16279 }
16280
16281
16282
16283 for {
16284 x := v.Args[0]
16285 if x.Op != OpARM64MOVWUloadidx {
16286 break
16287 }
16288 _ = x.Args[2]
16289 v.reset(OpARM64MOVDreg)
16290 v.AddArg(x)
16291 return true
16292 }
16293
16294
16295
16296 for {
16297 x := v.Args[0]
16298 if x.Op != OpARM64MOVHUloadidx2 {
16299 break
16300 }
16301 _ = x.Args[2]
16302 v.reset(OpARM64MOVDreg)
16303 v.AddArg(x)
16304 return true
16305 }
16306
16307
16308
16309 for {
16310 x := v.Args[0]
16311 if x.Op != OpARM64MOVWUloadidx4 {
16312 break
16313 }
16314 _ = x.Args[2]
16315 v.reset(OpARM64MOVDreg)
16316 v.AddArg(x)
16317 return true
16318 }
16319
16320
16321
16322 for {
16323 x := v.Args[0]
16324 if x.Op != OpARM64MOVBUreg {
16325 break
16326 }
16327 v.reset(OpARM64MOVDreg)
16328 v.AddArg(x)
16329 return true
16330 }
16331
16332
16333
16334 for {
16335 x := v.Args[0]
16336 if x.Op != OpARM64MOVHUreg {
16337 break
16338 }
16339 v.reset(OpARM64MOVDreg)
16340 v.AddArg(x)
16341 return true
16342 }
16343 return false
16344 }
16345 func rewriteValueARM64_OpARM64MOVWUreg_10(v *Value) bool {
16346
16347
16348
16349 for {
16350 x := v.Args[0]
16351 if x.Op != OpARM64MOVWUreg {
16352 break
16353 }
16354 v.reset(OpARM64MOVDreg)
16355 v.AddArg(x)
16356 return true
16357 }
16358
16359
16360
16361 for {
16362 v_0 := v.Args[0]
16363 if v_0.Op != OpARM64ANDconst {
16364 break
16365 }
16366 c := v_0.AuxInt
16367 x := v_0.Args[0]
16368 v.reset(OpARM64ANDconst)
16369 v.AuxInt = c & (1<<32 - 1)
16370 v.AddArg(x)
16371 return true
16372 }
16373
16374
16375
16376 for {
16377 v_0 := v.Args[0]
16378 if v_0.Op != OpARM64MOVDconst {
16379 break
16380 }
16381 c := v_0.AuxInt
16382 v.reset(OpARM64MOVDconst)
16383 v.AuxInt = int64(uint32(c))
16384 return true
16385 }
16386
16387
16388
16389 for {
16390 v_0 := v.Args[0]
16391 if v_0.Op != OpARM64SLLconst {
16392 break
16393 }
16394 sc := v_0.AuxInt
16395 x := v_0.Args[0]
16396 if !(isARM64BFMask(sc, 1<<32-1, sc)) {
16397 break
16398 }
16399 v.reset(OpARM64UBFIZ)
16400 v.AuxInt = armBFAuxInt(sc, arm64BFWidth(1<<32-1, sc))
16401 v.AddArg(x)
16402 return true
16403 }
16404
16405
16406
16407 for {
16408 v_0 := v.Args[0]
16409 if v_0.Op != OpARM64SRLconst {
16410 break
16411 }
16412 sc := v_0.AuxInt
16413 x := v_0.Args[0]
16414 if !(isARM64BFMask(sc, 1<<32-1, 0)) {
16415 break
16416 }
16417 v.reset(OpARM64UBFX)
16418 v.AuxInt = armBFAuxInt(sc, 32)
16419 v.AddArg(x)
16420 return true
16421 }
16422 return false
16423 }
16424 func rewriteValueARM64_OpARM64MOVWload_0(v *Value) bool {
16425 b := v.Block
16426 config := b.Func.Config
16427
16428
16429
16430 for {
16431 off1 := v.AuxInt
16432 sym := v.Aux
16433 mem := v.Args[1]
16434 v_0 := v.Args[0]
16435 if v_0.Op != OpARM64ADDconst {
16436 break
16437 }
16438 off2 := v_0.AuxInt
16439 ptr := v_0.Args[0]
16440 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
16441 break
16442 }
16443 v.reset(OpARM64MOVWload)
16444 v.AuxInt = off1 + off2
16445 v.Aux = sym
16446 v.AddArg(ptr)
16447 v.AddArg(mem)
16448 return true
16449 }
16450
16451
16452
16453 for {
16454 off := v.AuxInt
16455 sym := v.Aux
16456 mem := v.Args[1]
16457 v_0 := v.Args[0]
16458 if v_0.Op != OpARM64ADD {
16459 break
16460 }
16461 idx := v_0.Args[1]
16462 ptr := v_0.Args[0]
16463 if !(off == 0 && sym == nil) {
16464 break
16465 }
16466 v.reset(OpARM64MOVWloadidx)
16467 v.AddArg(ptr)
16468 v.AddArg(idx)
16469 v.AddArg(mem)
16470 return true
16471 }
16472
16473
16474
16475 for {
16476 off := v.AuxInt
16477 sym := v.Aux
16478 mem := v.Args[1]
16479 v_0 := v.Args[0]
16480 if v_0.Op != OpARM64ADDshiftLL {
16481 break
16482 }
16483 if v_0.AuxInt != 2 {
16484 break
16485 }
16486 idx := v_0.Args[1]
16487 ptr := v_0.Args[0]
16488 if !(off == 0 && sym == nil) {
16489 break
16490 }
16491 v.reset(OpARM64MOVWloadidx4)
16492 v.AddArg(ptr)
16493 v.AddArg(idx)
16494 v.AddArg(mem)
16495 return true
16496 }
16497
16498
16499
16500 for {
16501 off1 := v.AuxInt
16502 sym1 := v.Aux
16503 mem := v.Args[1]
16504 v_0 := v.Args[0]
16505 if v_0.Op != OpARM64MOVDaddr {
16506 break
16507 }
16508 off2 := v_0.AuxInt
16509 sym2 := v_0.Aux
16510 ptr := v_0.Args[0]
16511 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
16512 break
16513 }
16514 v.reset(OpARM64MOVWload)
16515 v.AuxInt = off1 + off2
16516 v.Aux = mergeSym(sym1, sym2)
16517 v.AddArg(ptr)
16518 v.AddArg(mem)
16519 return true
16520 }
16521
16522
16523
16524 for {
16525 off := v.AuxInt
16526 sym := v.Aux
16527 _ = v.Args[1]
16528 ptr := v.Args[0]
16529 v_1 := v.Args[1]
16530 if v_1.Op != OpARM64MOVWstorezero {
16531 break
16532 }
16533 off2 := v_1.AuxInt
16534 sym2 := v_1.Aux
16535 _ = v_1.Args[1]
16536 ptr2 := v_1.Args[0]
16537 if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
16538 break
16539 }
16540 v.reset(OpARM64MOVDconst)
16541 v.AuxInt = 0
16542 return true
16543 }
16544 return false
16545 }
16546 func rewriteValueARM64_OpARM64MOVWloadidx_0(v *Value) bool {
16547
16548
16549
16550 for {
16551 mem := v.Args[2]
16552 ptr := v.Args[0]
16553 v_1 := v.Args[1]
16554 if v_1.Op != OpARM64MOVDconst {
16555 break
16556 }
16557 c := v_1.AuxInt
16558 v.reset(OpARM64MOVWload)
16559 v.AuxInt = c
16560 v.AddArg(ptr)
16561 v.AddArg(mem)
16562 return true
16563 }
16564
16565
16566
16567 for {
16568 mem := v.Args[2]
16569 v_0 := v.Args[0]
16570 if v_0.Op != OpARM64MOVDconst {
16571 break
16572 }
16573 c := v_0.AuxInt
16574 ptr := v.Args[1]
16575 v.reset(OpARM64MOVWload)
16576 v.AuxInt = c
16577 v.AddArg(ptr)
16578 v.AddArg(mem)
16579 return true
16580 }
16581
16582
16583
16584 for {
16585 mem := v.Args[2]
16586 ptr := v.Args[0]
16587 v_1 := v.Args[1]
16588 if v_1.Op != OpARM64SLLconst {
16589 break
16590 }
16591 if v_1.AuxInt != 2 {
16592 break
16593 }
16594 idx := v_1.Args[0]
16595 v.reset(OpARM64MOVWloadidx4)
16596 v.AddArg(ptr)
16597 v.AddArg(idx)
16598 v.AddArg(mem)
16599 return true
16600 }
16601
16602
16603
16604 for {
16605 mem := v.Args[2]
16606 v_0 := v.Args[0]
16607 if v_0.Op != OpARM64SLLconst {
16608 break
16609 }
16610 if v_0.AuxInt != 2 {
16611 break
16612 }
16613 idx := v_0.Args[0]
16614 ptr := v.Args[1]
16615 v.reset(OpARM64MOVWloadidx4)
16616 v.AddArg(ptr)
16617 v.AddArg(idx)
16618 v.AddArg(mem)
16619 return true
16620 }
16621
16622
16623
16624 for {
16625 _ = v.Args[2]
16626 ptr := v.Args[0]
16627 idx := v.Args[1]
16628 v_2 := v.Args[2]
16629 if v_2.Op != OpARM64MOVWstorezeroidx {
16630 break
16631 }
16632 _ = v_2.Args[2]
16633 ptr2 := v_2.Args[0]
16634 idx2 := v_2.Args[1]
16635 if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2)) {
16636 break
16637 }
16638 v.reset(OpARM64MOVDconst)
16639 v.AuxInt = 0
16640 return true
16641 }
16642 return false
16643 }
16644 func rewriteValueARM64_OpARM64MOVWloadidx4_0(v *Value) bool {
16645
16646
16647
16648 for {
16649 mem := v.Args[2]
16650 ptr := v.Args[0]
16651 v_1 := v.Args[1]
16652 if v_1.Op != OpARM64MOVDconst {
16653 break
16654 }
16655 c := v_1.AuxInt
16656 v.reset(OpARM64MOVWload)
16657 v.AuxInt = c << 2
16658 v.AddArg(ptr)
16659 v.AddArg(mem)
16660 return true
16661 }
16662
16663
16664
16665 for {
16666 _ = v.Args[2]
16667 ptr := v.Args[0]
16668 idx := v.Args[1]
16669 v_2 := v.Args[2]
16670 if v_2.Op != OpARM64MOVWstorezeroidx4 {
16671 break
16672 }
16673 _ = v_2.Args[2]
16674 ptr2 := v_2.Args[0]
16675 idx2 := v_2.Args[1]
16676 if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2)) {
16677 break
16678 }
16679 v.reset(OpARM64MOVDconst)
16680 v.AuxInt = 0
16681 return true
16682 }
16683 return false
16684 }
16685 func rewriteValueARM64_OpARM64MOVWreg_0(v *Value) bool {
16686
16687
16688
16689 for {
16690 x := v.Args[0]
16691 if x.Op != OpARM64MOVBload {
16692 break
16693 }
16694 _ = x.Args[1]
16695 v.reset(OpARM64MOVDreg)
16696 v.AddArg(x)
16697 return true
16698 }
16699
16700
16701
16702 for {
16703 x := v.Args[0]
16704 if x.Op != OpARM64MOVBUload {
16705 break
16706 }
16707 _ = x.Args[1]
16708 v.reset(OpARM64MOVDreg)
16709 v.AddArg(x)
16710 return true
16711 }
16712
16713
16714
16715 for {
16716 x := v.Args[0]
16717 if x.Op != OpARM64MOVHload {
16718 break
16719 }
16720 _ = x.Args[1]
16721 v.reset(OpARM64MOVDreg)
16722 v.AddArg(x)
16723 return true
16724 }
16725
16726
16727
16728 for {
16729 x := v.Args[0]
16730 if x.Op != OpARM64MOVHUload {
16731 break
16732 }
16733 _ = x.Args[1]
16734 v.reset(OpARM64MOVDreg)
16735 v.AddArg(x)
16736 return true
16737 }
16738
16739
16740
16741 for {
16742 x := v.Args[0]
16743 if x.Op != OpARM64MOVWload {
16744 break
16745 }
16746 _ = x.Args[1]
16747 v.reset(OpARM64MOVDreg)
16748 v.AddArg(x)
16749 return true
16750 }
16751
16752
16753
16754 for {
16755 x := v.Args[0]
16756 if x.Op != OpARM64MOVBloadidx {
16757 break
16758 }
16759 _ = x.Args[2]
16760 v.reset(OpARM64MOVDreg)
16761 v.AddArg(x)
16762 return true
16763 }
16764
16765
16766
16767 for {
16768 x := v.Args[0]
16769 if x.Op != OpARM64MOVBUloadidx {
16770 break
16771 }
16772 _ = x.Args[2]
16773 v.reset(OpARM64MOVDreg)
16774 v.AddArg(x)
16775 return true
16776 }
16777
16778
16779
16780 for {
16781 x := v.Args[0]
16782 if x.Op != OpARM64MOVHloadidx {
16783 break
16784 }
16785 _ = x.Args[2]
16786 v.reset(OpARM64MOVDreg)
16787 v.AddArg(x)
16788 return true
16789 }
16790
16791
16792
16793 for {
16794 x := v.Args[0]
16795 if x.Op != OpARM64MOVHUloadidx {
16796 break
16797 }
16798 _ = x.Args[2]
16799 v.reset(OpARM64MOVDreg)
16800 v.AddArg(x)
16801 return true
16802 }
16803
16804
16805
16806 for {
16807 x := v.Args[0]
16808 if x.Op != OpARM64MOVWloadidx {
16809 break
16810 }
16811 _ = x.Args[2]
16812 v.reset(OpARM64MOVDreg)
16813 v.AddArg(x)
16814 return true
16815 }
16816 return false
16817 }
16818 func rewriteValueARM64_OpARM64MOVWreg_10(v *Value) bool {
16819
16820
16821
16822 for {
16823 x := v.Args[0]
16824 if x.Op != OpARM64MOVHloadidx2 {
16825 break
16826 }
16827 _ = x.Args[2]
16828 v.reset(OpARM64MOVDreg)
16829 v.AddArg(x)
16830 return true
16831 }
16832
16833
16834
16835 for {
16836 x := v.Args[0]
16837 if x.Op != OpARM64MOVHUloadidx2 {
16838 break
16839 }
16840 _ = x.Args[2]
16841 v.reset(OpARM64MOVDreg)
16842 v.AddArg(x)
16843 return true
16844 }
16845
16846
16847
16848 for {
16849 x := v.Args[0]
16850 if x.Op != OpARM64MOVWloadidx4 {
16851 break
16852 }
16853 _ = x.Args[2]
16854 v.reset(OpARM64MOVDreg)
16855 v.AddArg(x)
16856 return true
16857 }
16858
16859
16860
16861 for {
16862 x := v.Args[0]
16863 if x.Op != OpARM64MOVBreg {
16864 break
16865 }
16866 v.reset(OpARM64MOVDreg)
16867 v.AddArg(x)
16868 return true
16869 }
16870
16871
16872
16873 for {
16874 x := v.Args[0]
16875 if x.Op != OpARM64MOVBUreg {
16876 break
16877 }
16878 v.reset(OpARM64MOVDreg)
16879 v.AddArg(x)
16880 return true
16881 }
16882
16883
16884
16885 for {
16886 x := v.Args[0]
16887 if x.Op != OpARM64MOVHreg {
16888 break
16889 }
16890 v.reset(OpARM64MOVDreg)
16891 v.AddArg(x)
16892 return true
16893 }
16894
16895
16896
16897 for {
16898 x := v.Args[0]
16899 if x.Op != OpARM64MOVHreg {
16900 break
16901 }
16902 v.reset(OpARM64MOVDreg)
16903 v.AddArg(x)
16904 return true
16905 }
16906
16907
16908
16909 for {
16910 x := v.Args[0]
16911 if x.Op != OpARM64MOVWreg {
16912 break
16913 }
16914 v.reset(OpARM64MOVDreg)
16915 v.AddArg(x)
16916 return true
16917 }
16918
16919
16920
16921 for {
16922 v_0 := v.Args[0]
16923 if v_0.Op != OpARM64MOVDconst {
16924 break
16925 }
16926 c := v_0.AuxInt
16927 v.reset(OpARM64MOVDconst)
16928 v.AuxInt = int64(int32(c))
16929 return true
16930 }
16931
16932
16933
16934 for {
16935 v_0 := v.Args[0]
16936 if v_0.Op != OpARM64SLLconst {
16937 break
16938 }
16939 lc := v_0.AuxInt
16940 x := v_0.Args[0]
16941 if !(lc < 32) {
16942 break
16943 }
16944 v.reset(OpARM64SBFIZ)
16945 v.AuxInt = armBFAuxInt(lc, 32-lc)
16946 v.AddArg(x)
16947 return true
16948 }
16949 return false
16950 }
16951 func rewriteValueARM64_OpARM64MOVWstore_0(v *Value) bool {
16952 b := v.Block
16953 config := b.Func.Config
16954
16955
16956
16957 for {
16958 off := v.AuxInt
16959 sym := v.Aux
16960 mem := v.Args[2]
16961 ptr := v.Args[0]
16962 v_1 := v.Args[1]
16963 if v_1.Op != OpARM64FMOVSfpgp {
16964 break
16965 }
16966 val := v_1.Args[0]
16967 v.reset(OpARM64FMOVSstore)
16968 v.AuxInt = off
16969 v.Aux = sym
16970 v.AddArg(ptr)
16971 v.AddArg(val)
16972 v.AddArg(mem)
16973 return true
16974 }
16975
16976
16977
16978 for {
16979 off1 := v.AuxInt
16980 sym := v.Aux
16981 mem := v.Args[2]
16982 v_0 := v.Args[0]
16983 if v_0.Op != OpARM64ADDconst {
16984 break
16985 }
16986 off2 := v_0.AuxInt
16987 ptr := v_0.Args[0]
16988 val := v.Args[1]
16989 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
16990 break
16991 }
16992 v.reset(OpARM64MOVWstore)
16993 v.AuxInt = off1 + off2
16994 v.Aux = sym
16995 v.AddArg(ptr)
16996 v.AddArg(val)
16997 v.AddArg(mem)
16998 return true
16999 }
17000
17001
17002
17003 for {
17004 off := v.AuxInt
17005 sym := v.Aux
17006 mem := v.Args[2]
17007 v_0 := v.Args[0]
17008 if v_0.Op != OpARM64ADD {
17009 break
17010 }
17011 idx := v_0.Args[1]
17012 ptr := v_0.Args[0]
17013 val := v.Args[1]
17014 if !(off == 0 && sym == nil) {
17015 break
17016 }
17017 v.reset(OpARM64MOVWstoreidx)
17018 v.AddArg(ptr)
17019 v.AddArg(idx)
17020 v.AddArg(val)
17021 v.AddArg(mem)
17022 return true
17023 }
17024
17025
17026
17027 for {
17028 off := v.AuxInt
17029 sym := v.Aux
17030 mem := v.Args[2]
17031 v_0 := v.Args[0]
17032 if v_0.Op != OpARM64ADDshiftLL {
17033 break
17034 }
17035 if v_0.AuxInt != 2 {
17036 break
17037 }
17038 idx := v_0.Args[1]
17039 ptr := v_0.Args[0]
17040 val := v.Args[1]
17041 if !(off == 0 && sym == nil) {
17042 break
17043 }
17044 v.reset(OpARM64MOVWstoreidx4)
17045 v.AddArg(ptr)
17046 v.AddArg(idx)
17047 v.AddArg(val)
17048 v.AddArg(mem)
17049 return true
17050 }
17051
17052
17053
17054 for {
17055 off1 := v.AuxInt
17056 sym1 := v.Aux
17057 mem := v.Args[2]
17058 v_0 := v.Args[0]
17059 if v_0.Op != OpARM64MOVDaddr {
17060 break
17061 }
17062 off2 := v_0.AuxInt
17063 sym2 := v_0.Aux
17064 ptr := v_0.Args[0]
17065 val := v.Args[1]
17066 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
17067 break
17068 }
17069 v.reset(OpARM64MOVWstore)
17070 v.AuxInt = off1 + off2
17071 v.Aux = mergeSym(sym1, sym2)
17072 v.AddArg(ptr)
17073 v.AddArg(val)
17074 v.AddArg(mem)
17075 return true
17076 }
17077
17078
17079
17080 for {
17081 off := v.AuxInt
17082 sym := v.Aux
17083 mem := v.Args[2]
17084 ptr := v.Args[0]
17085 v_1 := v.Args[1]
17086 if v_1.Op != OpARM64MOVDconst {
17087 break
17088 }
17089 if v_1.AuxInt != 0 {
17090 break
17091 }
17092 v.reset(OpARM64MOVWstorezero)
17093 v.AuxInt = off
17094 v.Aux = sym
17095 v.AddArg(ptr)
17096 v.AddArg(mem)
17097 return true
17098 }
17099
17100
17101
17102 for {
17103 off := v.AuxInt
17104 sym := v.Aux
17105 mem := v.Args[2]
17106 ptr := v.Args[0]
17107 v_1 := v.Args[1]
17108 if v_1.Op != OpARM64MOVWreg {
17109 break
17110 }
17111 x := v_1.Args[0]
17112 v.reset(OpARM64MOVWstore)
17113 v.AuxInt = off
17114 v.Aux = sym
17115 v.AddArg(ptr)
17116 v.AddArg(x)
17117 v.AddArg(mem)
17118 return true
17119 }
17120
17121
17122
17123 for {
17124 off := v.AuxInt
17125 sym := v.Aux
17126 mem := v.Args[2]
17127 ptr := v.Args[0]
17128 v_1 := v.Args[1]
17129 if v_1.Op != OpARM64MOVWUreg {
17130 break
17131 }
17132 x := v_1.Args[0]
17133 v.reset(OpARM64MOVWstore)
17134 v.AuxInt = off
17135 v.Aux = sym
17136 v.AddArg(ptr)
17137 v.AddArg(x)
17138 v.AddArg(mem)
17139 return true
17140 }
17141
17142
17143
17144 for {
17145 i := v.AuxInt
17146 s := v.Aux
17147 _ = v.Args[2]
17148 ptr0 := v.Args[0]
17149 v_1 := v.Args[1]
17150 if v_1.Op != OpARM64SRLconst {
17151 break
17152 }
17153 if v_1.AuxInt != 32 {
17154 break
17155 }
17156 w := v_1.Args[0]
17157 x := v.Args[2]
17158 if x.Op != OpARM64MOVWstore {
17159 break
17160 }
17161 if x.AuxInt != i-4 {
17162 break
17163 }
17164 if x.Aux != s {
17165 break
17166 }
17167 mem := x.Args[2]
17168 ptr1 := x.Args[0]
17169 if w != x.Args[1] {
17170 break
17171 }
17172 if !(x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)) {
17173 break
17174 }
17175 v.reset(OpARM64MOVDstore)
17176 v.AuxInt = i - 4
17177 v.Aux = s
17178 v.AddArg(ptr0)
17179 v.AddArg(w)
17180 v.AddArg(mem)
17181 return true
17182 }
17183
17184
17185
17186 for {
17187 if v.AuxInt != 4 {
17188 break
17189 }
17190 s := v.Aux
17191 _ = v.Args[2]
17192 v_0 := v.Args[0]
17193 if v_0.Op != OpARM64ADD {
17194 break
17195 }
17196 idx0 := v_0.Args[1]
17197 ptr0 := v_0.Args[0]
17198 v_1 := v.Args[1]
17199 if v_1.Op != OpARM64SRLconst {
17200 break
17201 }
17202 if v_1.AuxInt != 32 {
17203 break
17204 }
17205 w := v_1.Args[0]
17206 x := v.Args[2]
17207 if x.Op != OpARM64MOVWstoreidx {
17208 break
17209 }
17210 mem := x.Args[3]
17211 ptr1 := x.Args[0]
17212 idx1 := x.Args[1]
17213 if w != x.Args[2] {
17214 break
17215 }
17216 if !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) {
17217 break
17218 }
17219 v.reset(OpARM64MOVDstoreidx)
17220 v.AddArg(ptr1)
17221 v.AddArg(idx1)
17222 v.AddArg(w)
17223 v.AddArg(mem)
17224 return true
17225 }
17226 return false
17227 }
17228 func rewriteValueARM64_OpARM64MOVWstore_10(v *Value) bool {
17229 b := v.Block
17230
17231
17232
17233 for {
17234 if v.AuxInt != 4 {
17235 break
17236 }
17237 s := v.Aux
17238 _ = v.Args[2]
17239 v_0 := v.Args[0]
17240 if v_0.Op != OpARM64ADDshiftLL {
17241 break
17242 }
17243 if v_0.AuxInt != 2 {
17244 break
17245 }
17246 idx0 := v_0.Args[1]
17247 ptr0 := v_0.Args[0]
17248 v_1 := v.Args[1]
17249 if v_1.Op != OpARM64SRLconst {
17250 break
17251 }
17252 if v_1.AuxInt != 32 {
17253 break
17254 }
17255 w := v_1.Args[0]
17256 x := v.Args[2]
17257 if x.Op != OpARM64MOVWstoreidx4 {
17258 break
17259 }
17260 mem := x.Args[3]
17261 ptr1 := x.Args[0]
17262 idx1 := x.Args[1]
17263 if w != x.Args[2] {
17264 break
17265 }
17266 if !(x.Uses == 1 && s == nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && clobber(x)) {
17267 break
17268 }
17269 v.reset(OpARM64MOVDstoreidx)
17270 v.AddArg(ptr1)
17271 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, idx1.Type)
17272 v0.AuxInt = 2
17273 v0.AddArg(idx1)
17274 v.AddArg(v0)
17275 v.AddArg(w)
17276 v.AddArg(mem)
17277 return true
17278 }
17279
17280
17281
17282 for {
17283 i := v.AuxInt
17284 s := v.Aux
17285 _ = v.Args[2]
17286 ptr0 := v.Args[0]
17287 v_1 := v.Args[1]
17288 if v_1.Op != OpARM64SRLconst {
17289 break
17290 }
17291 j := v_1.AuxInt
17292 w := v_1.Args[0]
17293 x := v.Args[2]
17294 if x.Op != OpARM64MOVWstore {
17295 break
17296 }
17297 if x.AuxInt != i-4 {
17298 break
17299 }
17300 if x.Aux != s {
17301 break
17302 }
17303 mem := x.Args[2]
17304 ptr1 := x.Args[0]
17305 w0 := x.Args[1]
17306 if w0.Op != OpARM64SRLconst {
17307 break
17308 }
17309 if w0.AuxInt != j-32 {
17310 break
17311 }
17312 if w != w0.Args[0] {
17313 break
17314 }
17315 if !(x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)) {
17316 break
17317 }
17318 v.reset(OpARM64MOVDstore)
17319 v.AuxInt = i - 4
17320 v.Aux = s
17321 v.AddArg(ptr0)
17322 v.AddArg(w0)
17323 v.AddArg(mem)
17324 return true
17325 }
17326
17327
17328
17329 for {
17330 if v.AuxInt != 4 {
17331 break
17332 }
17333 s := v.Aux
17334 _ = v.Args[2]
17335 v_0 := v.Args[0]
17336 if v_0.Op != OpARM64ADD {
17337 break
17338 }
17339 idx0 := v_0.Args[1]
17340 ptr0 := v_0.Args[0]
17341 v_1 := v.Args[1]
17342 if v_1.Op != OpARM64SRLconst {
17343 break
17344 }
17345 j := v_1.AuxInt
17346 w := v_1.Args[0]
17347 x := v.Args[2]
17348 if x.Op != OpARM64MOVWstoreidx {
17349 break
17350 }
17351 mem := x.Args[3]
17352 ptr1 := x.Args[0]
17353 idx1 := x.Args[1]
17354 w0 := x.Args[2]
17355 if w0.Op != OpARM64SRLconst {
17356 break
17357 }
17358 if w0.AuxInt != j-32 {
17359 break
17360 }
17361 if w != w0.Args[0] {
17362 break
17363 }
17364 if !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) {
17365 break
17366 }
17367 v.reset(OpARM64MOVDstoreidx)
17368 v.AddArg(ptr1)
17369 v.AddArg(idx1)
17370 v.AddArg(w0)
17371 v.AddArg(mem)
17372 return true
17373 }
17374
17375
17376
17377 for {
17378 if v.AuxInt != 4 {
17379 break
17380 }
17381 s := v.Aux
17382 _ = v.Args[2]
17383 v_0 := v.Args[0]
17384 if v_0.Op != OpARM64ADDshiftLL {
17385 break
17386 }
17387 if v_0.AuxInt != 2 {
17388 break
17389 }
17390 idx0 := v_0.Args[1]
17391 ptr0 := v_0.Args[0]
17392 v_1 := v.Args[1]
17393 if v_1.Op != OpARM64SRLconst {
17394 break
17395 }
17396 j := v_1.AuxInt
17397 w := v_1.Args[0]
17398 x := v.Args[2]
17399 if x.Op != OpARM64MOVWstoreidx4 {
17400 break
17401 }
17402 mem := x.Args[3]
17403 ptr1 := x.Args[0]
17404 idx1 := x.Args[1]
17405 w0 := x.Args[2]
17406 if w0.Op != OpARM64SRLconst {
17407 break
17408 }
17409 if w0.AuxInt != j-32 {
17410 break
17411 }
17412 if w != w0.Args[0] {
17413 break
17414 }
17415 if !(x.Uses == 1 && s == nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && clobber(x)) {
17416 break
17417 }
17418 v.reset(OpARM64MOVDstoreidx)
17419 v.AddArg(ptr1)
17420 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, idx1.Type)
17421 v0.AuxInt = 2
17422 v0.AddArg(idx1)
17423 v.AddArg(v0)
17424 v.AddArg(w0)
17425 v.AddArg(mem)
17426 return true
17427 }
17428 return false
17429 }
17430 func rewriteValueARM64_OpARM64MOVWstoreidx_0(v *Value) bool {
17431
17432
17433
17434 for {
17435 mem := v.Args[3]
17436 ptr := v.Args[0]
17437 v_1 := v.Args[1]
17438 if v_1.Op != OpARM64MOVDconst {
17439 break
17440 }
17441 c := v_1.AuxInt
17442 val := v.Args[2]
17443 v.reset(OpARM64MOVWstore)
17444 v.AuxInt = c
17445 v.AddArg(ptr)
17446 v.AddArg(val)
17447 v.AddArg(mem)
17448 return true
17449 }
17450
17451
17452
17453 for {
17454 mem := v.Args[3]
17455 v_0 := v.Args[0]
17456 if v_0.Op != OpARM64MOVDconst {
17457 break
17458 }
17459 c := v_0.AuxInt
17460 idx := v.Args[1]
17461 val := v.Args[2]
17462 v.reset(OpARM64MOVWstore)
17463 v.AuxInt = c
17464 v.AddArg(idx)
17465 v.AddArg(val)
17466 v.AddArg(mem)
17467 return true
17468 }
17469
17470
17471
17472 for {
17473 mem := v.Args[3]
17474 ptr := v.Args[0]
17475 v_1 := v.Args[1]
17476 if v_1.Op != OpARM64SLLconst {
17477 break
17478 }
17479 if v_1.AuxInt != 2 {
17480 break
17481 }
17482 idx := v_1.Args[0]
17483 val := v.Args[2]
17484 v.reset(OpARM64MOVWstoreidx4)
17485 v.AddArg(ptr)
17486 v.AddArg(idx)
17487 v.AddArg(val)
17488 v.AddArg(mem)
17489 return true
17490 }
17491
17492
17493
17494 for {
17495 mem := v.Args[3]
17496 v_0 := v.Args[0]
17497 if v_0.Op != OpARM64SLLconst {
17498 break
17499 }
17500 if v_0.AuxInt != 2 {
17501 break
17502 }
17503 idx := v_0.Args[0]
17504 ptr := v.Args[1]
17505 val := v.Args[2]
17506 v.reset(OpARM64MOVWstoreidx4)
17507 v.AddArg(ptr)
17508 v.AddArg(idx)
17509 v.AddArg(val)
17510 v.AddArg(mem)
17511 return true
17512 }
17513
17514
17515
17516 for {
17517 mem := v.Args[3]
17518 ptr := v.Args[0]
17519 idx := v.Args[1]
17520 v_2 := v.Args[2]
17521 if v_2.Op != OpARM64MOVDconst {
17522 break
17523 }
17524 if v_2.AuxInt != 0 {
17525 break
17526 }
17527 v.reset(OpARM64MOVWstorezeroidx)
17528 v.AddArg(ptr)
17529 v.AddArg(idx)
17530 v.AddArg(mem)
17531 return true
17532 }
17533
17534
17535
17536 for {
17537 mem := v.Args[3]
17538 ptr := v.Args[0]
17539 idx := v.Args[1]
17540 v_2 := v.Args[2]
17541 if v_2.Op != OpARM64MOVWreg {
17542 break
17543 }
17544 x := v_2.Args[0]
17545 v.reset(OpARM64MOVWstoreidx)
17546 v.AddArg(ptr)
17547 v.AddArg(idx)
17548 v.AddArg(x)
17549 v.AddArg(mem)
17550 return true
17551 }
17552
17553
17554
17555 for {
17556 mem := v.Args[3]
17557 ptr := v.Args[0]
17558 idx := v.Args[1]
17559 v_2 := v.Args[2]
17560 if v_2.Op != OpARM64MOVWUreg {
17561 break
17562 }
17563 x := v_2.Args[0]
17564 v.reset(OpARM64MOVWstoreidx)
17565 v.AddArg(ptr)
17566 v.AddArg(idx)
17567 v.AddArg(x)
17568 v.AddArg(mem)
17569 return true
17570 }
17571
17572
17573
17574 for {
17575 _ = v.Args[3]
17576 ptr := v.Args[0]
17577 v_1 := v.Args[1]
17578 if v_1.Op != OpARM64ADDconst {
17579 break
17580 }
17581 if v_1.AuxInt != 4 {
17582 break
17583 }
17584 idx := v_1.Args[0]
17585 v_2 := v.Args[2]
17586 if v_2.Op != OpARM64SRLconst {
17587 break
17588 }
17589 if v_2.AuxInt != 32 {
17590 break
17591 }
17592 w := v_2.Args[0]
17593 x := v.Args[3]
17594 if x.Op != OpARM64MOVWstoreidx {
17595 break
17596 }
17597 mem := x.Args[3]
17598 if ptr != x.Args[0] {
17599 break
17600 }
17601 if idx != x.Args[1] {
17602 break
17603 }
17604 if w != x.Args[2] {
17605 break
17606 }
17607 if !(x.Uses == 1 && clobber(x)) {
17608 break
17609 }
17610 v.reset(OpARM64MOVDstoreidx)
17611 v.AddArg(ptr)
17612 v.AddArg(idx)
17613 v.AddArg(w)
17614 v.AddArg(mem)
17615 return true
17616 }
17617 return false
17618 }
17619 func rewriteValueARM64_OpARM64MOVWstoreidx4_0(v *Value) bool {
17620
17621
17622
17623 for {
17624 mem := v.Args[3]
17625 ptr := v.Args[0]
17626 v_1 := v.Args[1]
17627 if v_1.Op != OpARM64MOVDconst {
17628 break
17629 }
17630 c := v_1.AuxInt
17631 val := v.Args[2]
17632 v.reset(OpARM64MOVWstore)
17633 v.AuxInt = c << 2
17634 v.AddArg(ptr)
17635 v.AddArg(val)
17636 v.AddArg(mem)
17637 return true
17638 }
17639
17640
17641
17642 for {
17643 mem := v.Args[3]
17644 ptr := v.Args[0]
17645 idx := v.Args[1]
17646 v_2 := v.Args[2]
17647 if v_2.Op != OpARM64MOVDconst {
17648 break
17649 }
17650 if v_2.AuxInt != 0 {
17651 break
17652 }
17653 v.reset(OpARM64MOVWstorezeroidx4)
17654 v.AddArg(ptr)
17655 v.AddArg(idx)
17656 v.AddArg(mem)
17657 return true
17658 }
17659
17660
17661
17662 for {
17663 mem := v.Args[3]
17664 ptr := v.Args[0]
17665 idx := v.Args[1]
17666 v_2 := v.Args[2]
17667 if v_2.Op != OpARM64MOVWreg {
17668 break
17669 }
17670 x := v_2.Args[0]
17671 v.reset(OpARM64MOVWstoreidx4)
17672 v.AddArg(ptr)
17673 v.AddArg(idx)
17674 v.AddArg(x)
17675 v.AddArg(mem)
17676 return true
17677 }
17678
17679
17680
17681 for {
17682 mem := v.Args[3]
17683 ptr := v.Args[0]
17684 idx := v.Args[1]
17685 v_2 := v.Args[2]
17686 if v_2.Op != OpARM64MOVWUreg {
17687 break
17688 }
17689 x := v_2.Args[0]
17690 v.reset(OpARM64MOVWstoreidx4)
17691 v.AddArg(ptr)
17692 v.AddArg(idx)
17693 v.AddArg(x)
17694 v.AddArg(mem)
17695 return true
17696 }
17697 return false
17698 }
17699 func rewriteValueARM64_OpARM64MOVWstorezero_0(v *Value) bool {
17700 b := v.Block
17701 config := b.Func.Config
17702
17703
17704
17705 for {
17706 off1 := v.AuxInt
17707 sym := v.Aux
17708 mem := v.Args[1]
17709 v_0 := v.Args[0]
17710 if v_0.Op != OpARM64ADDconst {
17711 break
17712 }
17713 off2 := v_0.AuxInt
17714 ptr := v_0.Args[0]
17715 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
17716 break
17717 }
17718 v.reset(OpARM64MOVWstorezero)
17719 v.AuxInt = off1 + off2
17720 v.Aux = sym
17721 v.AddArg(ptr)
17722 v.AddArg(mem)
17723 return true
17724 }
17725
17726
17727
17728 for {
17729 off1 := v.AuxInt
17730 sym1 := v.Aux
17731 mem := v.Args[1]
17732 v_0 := v.Args[0]
17733 if v_0.Op != OpARM64MOVDaddr {
17734 break
17735 }
17736 off2 := v_0.AuxInt
17737 sym2 := v_0.Aux
17738 ptr := v_0.Args[0]
17739 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
17740 break
17741 }
17742 v.reset(OpARM64MOVWstorezero)
17743 v.AuxInt = off1 + off2
17744 v.Aux = mergeSym(sym1, sym2)
17745 v.AddArg(ptr)
17746 v.AddArg(mem)
17747 return true
17748 }
17749
17750
17751
17752 for {
17753 off := v.AuxInt
17754 sym := v.Aux
17755 mem := v.Args[1]
17756 v_0 := v.Args[0]
17757 if v_0.Op != OpARM64ADD {
17758 break
17759 }
17760 idx := v_0.Args[1]
17761 ptr := v_0.Args[0]
17762 if !(off == 0 && sym == nil) {
17763 break
17764 }
17765 v.reset(OpARM64MOVWstorezeroidx)
17766 v.AddArg(ptr)
17767 v.AddArg(idx)
17768 v.AddArg(mem)
17769 return true
17770 }
17771
17772
17773
17774 for {
17775 off := v.AuxInt
17776 sym := v.Aux
17777 mem := v.Args[1]
17778 v_0 := v.Args[0]
17779 if v_0.Op != OpARM64ADDshiftLL {
17780 break
17781 }
17782 if v_0.AuxInt != 2 {
17783 break
17784 }
17785 idx := v_0.Args[1]
17786 ptr := v_0.Args[0]
17787 if !(off == 0 && sym == nil) {
17788 break
17789 }
17790 v.reset(OpARM64MOVWstorezeroidx4)
17791 v.AddArg(ptr)
17792 v.AddArg(idx)
17793 v.AddArg(mem)
17794 return true
17795 }
17796
17797
17798
17799 for {
17800 i := v.AuxInt
17801 s := v.Aux
17802 _ = v.Args[1]
17803 ptr0 := v.Args[0]
17804 x := v.Args[1]
17805 if x.Op != OpARM64MOVWstorezero {
17806 break
17807 }
17808 j := x.AuxInt
17809 if x.Aux != s {
17810 break
17811 }
17812 mem := x.Args[1]
17813 ptr1 := x.Args[0]
17814 if !(x.Uses == 1 && areAdjacentOffsets(i, j, 4) && is32Bit(min(i, j)) && isSamePtr(ptr0, ptr1) && clobber(x)) {
17815 break
17816 }
17817 v.reset(OpARM64MOVDstorezero)
17818 v.AuxInt = min(i, j)
17819 v.Aux = s
17820 v.AddArg(ptr0)
17821 v.AddArg(mem)
17822 return true
17823 }
17824
17825
17826
17827 for {
17828 if v.AuxInt != 4 {
17829 break
17830 }
17831 s := v.Aux
17832 _ = v.Args[1]
17833 v_0 := v.Args[0]
17834 if v_0.Op != OpARM64ADD {
17835 break
17836 }
17837 idx0 := v_0.Args[1]
17838 ptr0 := v_0.Args[0]
17839 x := v.Args[1]
17840 if x.Op != OpARM64MOVWstorezeroidx {
17841 break
17842 }
17843 mem := x.Args[2]
17844 ptr1 := x.Args[0]
17845 idx1 := x.Args[1]
17846 if !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) {
17847 break
17848 }
17849 v.reset(OpARM64MOVDstorezeroidx)
17850 v.AddArg(ptr1)
17851 v.AddArg(idx1)
17852 v.AddArg(mem)
17853 return true
17854 }
17855
17856
17857
17858 for {
17859 if v.AuxInt != 4 {
17860 break
17861 }
17862 s := v.Aux
17863 _ = v.Args[1]
17864 v_0 := v.Args[0]
17865 if v_0.Op != OpARM64ADDshiftLL {
17866 break
17867 }
17868 if v_0.AuxInt != 2 {
17869 break
17870 }
17871 idx0 := v_0.Args[1]
17872 ptr0 := v_0.Args[0]
17873 x := v.Args[1]
17874 if x.Op != OpARM64MOVWstorezeroidx4 {
17875 break
17876 }
17877 mem := x.Args[2]
17878 ptr1 := x.Args[0]
17879 idx1 := x.Args[1]
17880 if !(x.Uses == 1 && s == nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && clobber(x)) {
17881 break
17882 }
17883 v.reset(OpARM64MOVDstorezeroidx)
17884 v.AddArg(ptr1)
17885 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, idx1.Type)
17886 v0.AuxInt = 2
17887 v0.AddArg(idx1)
17888 v.AddArg(v0)
17889 v.AddArg(mem)
17890 return true
17891 }
17892 return false
17893 }
17894 func rewriteValueARM64_OpARM64MOVWstorezeroidx_0(v *Value) bool {
17895
17896
17897
17898 for {
17899 mem := v.Args[2]
17900 ptr := v.Args[0]
17901 v_1 := v.Args[1]
17902 if v_1.Op != OpARM64MOVDconst {
17903 break
17904 }
17905 c := v_1.AuxInt
17906 v.reset(OpARM64MOVWstorezero)
17907 v.AuxInt = c
17908 v.AddArg(ptr)
17909 v.AddArg(mem)
17910 return true
17911 }
17912
17913
17914
17915 for {
17916 mem := v.Args[2]
17917 v_0 := v.Args[0]
17918 if v_0.Op != OpARM64MOVDconst {
17919 break
17920 }
17921 c := v_0.AuxInt
17922 idx := v.Args[1]
17923 v.reset(OpARM64MOVWstorezero)
17924 v.AuxInt = c
17925 v.AddArg(idx)
17926 v.AddArg(mem)
17927 return true
17928 }
17929
17930
17931
17932 for {
17933 mem := v.Args[2]
17934 ptr := v.Args[0]
17935 v_1 := v.Args[1]
17936 if v_1.Op != OpARM64SLLconst {
17937 break
17938 }
17939 if v_1.AuxInt != 2 {
17940 break
17941 }
17942 idx := v_1.Args[0]
17943 v.reset(OpARM64MOVWstorezeroidx4)
17944 v.AddArg(ptr)
17945 v.AddArg(idx)
17946 v.AddArg(mem)
17947 return true
17948 }
17949
17950
17951
17952 for {
17953 mem := v.Args[2]
17954 v_0 := v.Args[0]
17955 if v_0.Op != OpARM64SLLconst {
17956 break
17957 }
17958 if v_0.AuxInt != 2 {
17959 break
17960 }
17961 idx := v_0.Args[0]
17962 ptr := v.Args[1]
17963 v.reset(OpARM64MOVWstorezeroidx4)
17964 v.AddArg(ptr)
17965 v.AddArg(idx)
17966 v.AddArg(mem)
17967 return true
17968 }
17969
17970
17971
17972 for {
17973 _ = v.Args[2]
17974 ptr := v.Args[0]
17975 v_1 := v.Args[1]
17976 if v_1.Op != OpARM64ADDconst {
17977 break
17978 }
17979 if v_1.AuxInt != 4 {
17980 break
17981 }
17982 idx := v_1.Args[0]
17983 x := v.Args[2]
17984 if x.Op != OpARM64MOVWstorezeroidx {
17985 break
17986 }
17987 mem := x.Args[2]
17988 if ptr != x.Args[0] {
17989 break
17990 }
17991 if idx != x.Args[1] {
17992 break
17993 }
17994 if !(x.Uses == 1 && clobber(x)) {
17995 break
17996 }
17997 v.reset(OpARM64MOVDstorezeroidx)
17998 v.AddArg(ptr)
17999 v.AddArg(idx)
18000 v.AddArg(mem)
18001 return true
18002 }
18003 return false
18004 }
18005 func rewriteValueARM64_OpARM64MOVWstorezeroidx4_0(v *Value) bool {
18006
18007
18008
18009 for {
18010 mem := v.Args[2]
18011 ptr := v.Args[0]
18012 v_1 := v.Args[1]
18013 if v_1.Op != OpARM64MOVDconst {
18014 break
18015 }
18016 c := v_1.AuxInt
18017 v.reset(OpARM64MOVWstorezero)
18018 v.AuxInt = c << 2
18019 v.AddArg(ptr)
18020 v.AddArg(mem)
18021 return true
18022 }
18023 return false
18024 }
18025 func rewriteValueARM64_OpARM64MSUB_0(v *Value) bool {
18026 b := v.Block
18027
18028
18029
18030 for {
18031 _ = v.Args[2]
18032 a := v.Args[0]
18033 x := v.Args[1]
18034 v_2 := v.Args[2]
18035 if v_2.Op != OpARM64MOVDconst {
18036 break
18037 }
18038 if v_2.AuxInt != -1 {
18039 break
18040 }
18041 v.reset(OpARM64ADD)
18042 v.AddArg(a)
18043 v.AddArg(x)
18044 return true
18045 }
18046
18047
18048
18049 for {
18050 _ = v.Args[2]
18051 a := v.Args[0]
18052 v_2 := v.Args[2]
18053 if v_2.Op != OpARM64MOVDconst {
18054 break
18055 }
18056 if v_2.AuxInt != 0 {
18057 break
18058 }
18059 v.reset(OpCopy)
18060 v.Type = a.Type
18061 v.AddArg(a)
18062 return true
18063 }
18064
18065
18066
18067 for {
18068 _ = v.Args[2]
18069 a := v.Args[0]
18070 x := v.Args[1]
18071 v_2 := v.Args[2]
18072 if v_2.Op != OpARM64MOVDconst {
18073 break
18074 }
18075 if v_2.AuxInt != 1 {
18076 break
18077 }
18078 v.reset(OpARM64SUB)
18079 v.AddArg(a)
18080 v.AddArg(x)
18081 return true
18082 }
18083
18084
18085
18086 for {
18087 _ = v.Args[2]
18088 a := v.Args[0]
18089 x := v.Args[1]
18090 v_2 := v.Args[2]
18091 if v_2.Op != OpARM64MOVDconst {
18092 break
18093 }
18094 c := v_2.AuxInt
18095 if !(isPowerOfTwo(c)) {
18096 break
18097 }
18098 v.reset(OpARM64SUBshiftLL)
18099 v.AuxInt = log2(c)
18100 v.AddArg(a)
18101 v.AddArg(x)
18102 return true
18103 }
18104
18105
18106
18107 for {
18108 _ = v.Args[2]
18109 a := v.Args[0]
18110 x := v.Args[1]
18111 v_2 := v.Args[2]
18112 if v_2.Op != OpARM64MOVDconst {
18113 break
18114 }
18115 c := v_2.AuxInt
18116 if !(isPowerOfTwo(c-1) && c >= 3) {
18117 break
18118 }
18119 v.reset(OpARM64SUB)
18120 v.AddArg(a)
18121 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
18122 v0.AuxInt = log2(c - 1)
18123 v0.AddArg(x)
18124 v0.AddArg(x)
18125 v.AddArg(v0)
18126 return true
18127 }
18128
18129
18130
18131 for {
18132 _ = v.Args[2]
18133 a := v.Args[0]
18134 x := v.Args[1]
18135 v_2 := v.Args[2]
18136 if v_2.Op != OpARM64MOVDconst {
18137 break
18138 }
18139 c := v_2.AuxInt
18140 if !(isPowerOfTwo(c+1) && c >= 7) {
18141 break
18142 }
18143 v.reset(OpARM64ADD)
18144 v.AddArg(a)
18145 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
18146 v0.AuxInt = log2(c + 1)
18147 v0.AddArg(x)
18148 v0.AddArg(x)
18149 v.AddArg(v0)
18150 return true
18151 }
18152
18153
18154
18155 for {
18156 _ = v.Args[2]
18157 a := v.Args[0]
18158 x := v.Args[1]
18159 v_2 := v.Args[2]
18160 if v_2.Op != OpARM64MOVDconst {
18161 break
18162 }
18163 c := v_2.AuxInt
18164 if !(c%3 == 0 && isPowerOfTwo(c/3)) {
18165 break
18166 }
18167 v.reset(OpARM64ADDshiftLL)
18168 v.AuxInt = log2(c / 3)
18169 v.AddArg(a)
18170 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
18171 v0.AuxInt = 2
18172 v0.AddArg(x)
18173 v0.AddArg(x)
18174 v.AddArg(v0)
18175 return true
18176 }
18177
18178
18179
18180 for {
18181 _ = v.Args[2]
18182 a := v.Args[0]
18183 x := v.Args[1]
18184 v_2 := v.Args[2]
18185 if v_2.Op != OpARM64MOVDconst {
18186 break
18187 }
18188 c := v_2.AuxInt
18189 if !(c%5 == 0 && isPowerOfTwo(c/5)) {
18190 break
18191 }
18192 v.reset(OpARM64SUBshiftLL)
18193 v.AuxInt = log2(c / 5)
18194 v.AddArg(a)
18195 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
18196 v0.AuxInt = 2
18197 v0.AddArg(x)
18198 v0.AddArg(x)
18199 v.AddArg(v0)
18200 return true
18201 }
18202
18203
18204
18205 for {
18206 _ = v.Args[2]
18207 a := v.Args[0]
18208 x := v.Args[1]
18209 v_2 := v.Args[2]
18210 if v_2.Op != OpARM64MOVDconst {
18211 break
18212 }
18213 c := v_2.AuxInt
18214 if !(c%7 == 0 && isPowerOfTwo(c/7)) {
18215 break
18216 }
18217 v.reset(OpARM64ADDshiftLL)
18218 v.AuxInt = log2(c / 7)
18219 v.AddArg(a)
18220 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
18221 v0.AuxInt = 3
18222 v0.AddArg(x)
18223 v0.AddArg(x)
18224 v.AddArg(v0)
18225 return true
18226 }
18227
18228
18229
18230 for {
18231 _ = v.Args[2]
18232 a := v.Args[0]
18233 x := v.Args[1]
18234 v_2 := v.Args[2]
18235 if v_2.Op != OpARM64MOVDconst {
18236 break
18237 }
18238 c := v_2.AuxInt
18239 if !(c%9 == 0 && isPowerOfTwo(c/9)) {
18240 break
18241 }
18242 v.reset(OpARM64SUBshiftLL)
18243 v.AuxInt = log2(c / 9)
18244 v.AddArg(a)
18245 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
18246 v0.AuxInt = 3
18247 v0.AddArg(x)
18248 v0.AddArg(x)
18249 v.AddArg(v0)
18250 return true
18251 }
18252 return false
18253 }
18254 func rewriteValueARM64_OpARM64MSUB_10(v *Value) bool {
18255 b := v.Block
18256
18257
18258
18259 for {
18260 x := v.Args[2]
18261 a := v.Args[0]
18262 v_1 := v.Args[1]
18263 if v_1.Op != OpARM64MOVDconst {
18264 break
18265 }
18266 if v_1.AuxInt != -1 {
18267 break
18268 }
18269 v.reset(OpARM64ADD)
18270 v.AddArg(a)
18271 v.AddArg(x)
18272 return true
18273 }
18274
18275
18276
18277 for {
18278 _ = v.Args[2]
18279 a := v.Args[0]
18280 v_1 := v.Args[1]
18281 if v_1.Op != OpARM64MOVDconst {
18282 break
18283 }
18284 if v_1.AuxInt != 0 {
18285 break
18286 }
18287 v.reset(OpCopy)
18288 v.Type = a.Type
18289 v.AddArg(a)
18290 return true
18291 }
18292
18293
18294
18295 for {
18296 x := v.Args[2]
18297 a := v.Args[0]
18298 v_1 := v.Args[1]
18299 if v_1.Op != OpARM64MOVDconst {
18300 break
18301 }
18302 if v_1.AuxInt != 1 {
18303 break
18304 }
18305 v.reset(OpARM64SUB)
18306 v.AddArg(a)
18307 v.AddArg(x)
18308 return true
18309 }
18310
18311
18312
18313 for {
18314 x := v.Args[2]
18315 a := v.Args[0]
18316 v_1 := v.Args[1]
18317 if v_1.Op != OpARM64MOVDconst {
18318 break
18319 }
18320 c := v_1.AuxInt
18321 if !(isPowerOfTwo(c)) {
18322 break
18323 }
18324 v.reset(OpARM64SUBshiftLL)
18325 v.AuxInt = log2(c)
18326 v.AddArg(a)
18327 v.AddArg(x)
18328 return true
18329 }
18330
18331
18332
18333 for {
18334 x := v.Args[2]
18335 a := v.Args[0]
18336 v_1 := v.Args[1]
18337 if v_1.Op != OpARM64MOVDconst {
18338 break
18339 }
18340 c := v_1.AuxInt
18341 if !(isPowerOfTwo(c-1) && c >= 3) {
18342 break
18343 }
18344 v.reset(OpARM64SUB)
18345 v.AddArg(a)
18346 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
18347 v0.AuxInt = log2(c - 1)
18348 v0.AddArg(x)
18349 v0.AddArg(x)
18350 v.AddArg(v0)
18351 return true
18352 }
18353
18354
18355
18356 for {
18357 x := v.Args[2]
18358 a := v.Args[0]
18359 v_1 := v.Args[1]
18360 if v_1.Op != OpARM64MOVDconst {
18361 break
18362 }
18363 c := v_1.AuxInt
18364 if !(isPowerOfTwo(c+1) && c >= 7) {
18365 break
18366 }
18367 v.reset(OpARM64ADD)
18368 v.AddArg(a)
18369 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
18370 v0.AuxInt = log2(c + 1)
18371 v0.AddArg(x)
18372 v0.AddArg(x)
18373 v.AddArg(v0)
18374 return true
18375 }
18376
18377
18378
18379 for {
18380 x := v.Args[2]
18381 a := v.Args[0]
18382 v_1 := v.Args[1]
18383 if v_1.Op != OpARM64MOVDconst {
18384 break
18385 }
18386 c := v_1.AuxInt
18387 if !(c%3 == 0 && isPowerOfTwo(c/3)) {
18388 break
18389 }
18390 v.reset(OpARM64ADDshiftLL)
18391 v.AuxInt = log2(c / 3)
18392 v.AddArg(a)
18393 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
18394 v0.AuxInt = 2
18395 v0.AddArg(x)
18396 v0.AddArg(x)
18397 v.AddArg(v0)
18398 return true
18399 }
18400
18401
18402
18403 for {
18404 x := v.Args[2]
18405 a := v.Args[0]
18406 v_1 := v.Args[1]
18407 if v_1.Op != OpARM64MOVDconst {
18408 break
18409 }
18410 c := v_1.AuxInt
18411 if !(c%5 == 0 && isPowerOfTwo(c/5)) {
18412 break
18413 }
18414 v.reset(OpARM64SUBshiftLL)
18415 v.AuxInt = log2(c / 5)
18416 v.AddArg(a)
18417 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
18418 v0.AuxInt = 2
18419 v0.AddArg(x)
18420 v0.AddArg(x)
18421 v.AddArg(v0)
18422 return true
18423 }
18424
18425
18426
18427 for {
18428 x := v.Args[2]
18429 a := v.Args[0]
18430 v_1 := v.Args[1]
18431 if v_1.Op != OpARM64MOVDconst {
18432 break
18433 }
18434 c := v_1.AuxInt
18435 if !(c%7 == 0 && isPowerOfTwo(c/7)) {
18436 break
18437 }
18438 v.reset(OpARM64ADDshiftLL)
18439 v.AuxInt = log2(c / 7)
18440 v.AddArg(a)
18441 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
18442 v0.AuxInt = 3
18443 v0.AddArg(x)
18444 v0.AddArg(x)
18445 v.AddArg(v0)
18446 return true
18447 }
18448
18449
18450
18451 for {
18452 x := v.Args[2]
18453 a := v.Args[0]
18454 v_1 := v.Args[1]
18455 if v_1.Op != OpARM64MOVDconst {
18456 break
18457 }
18458 c := v_1.AuxInt
18459 if !(c%9 == 0 && isPowerOfTwo(c/9)) {
18460 break
18461 }
18462 v.reset(OpARM64SUBshiftLL)
18463 v.AuxInt = log2(c / 9)
18464 v.AddArg(a)
18465 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
18466 v0.AuxInt = 3
18467 v0.AddArg(x)
18468 v0.AddArg(x)
18469 v.AddArg(v0)
18470 return true
18471 }
18472 return false
18473 }
18474 func rewriteValueARM64_OpARM64MSUB_20(v *Value) bool {
18475 b := v.Block
18476
18477
18478
18479 for {
18480 y := v.Args[2]
18481 v_0 := v.Args[0]
18482 if v_0.Op != OpARM64MOVDconst {
18483 break
18484 }
18485 c := v_0.AuxInt
18486 x := v.Args[1]
18487 v.reset(OpARM64ADDconst)
18488 v.AuxInt = c
18489 v0 := b.NewValue0(v.Pos, OpARM64MNEG, x.Type)
18490 v0.AddArg(x)
18491 v0.AddArg(y)
18492 v.AddArg(v0)
18493 return true
18494 }
18495
18496
18497
18498 for {
18499 _ = v.Args[2]
18500 a := v.Args[0]
18501 v_1 := v.Args[1]
18502 if v_1.Op != OpARM64MOVDconst {
18503 break
18504 }
18505 c := v_1.AuxInt
18506 v_2 := v.Args[2]
18507 if v_2.Op != OpARM64MOVDconst {
18508 break
18509 }
18510 d := v_2.AuxInt
18511 v.reset(OpARM64SUBconst)
18512 v.AuxInt = c * d
18513 v.AddArg(a)
18514 return true
18515 }
18516 return false
18517 }
18518 func rewriteValueARM64_OpARM64MSUBW_0(v *Value) bool {
18519 b := v.Block
18520
18521
18522
18523 for {
18524 _ = v.Args[2]
18525 a := v.Args[0]
18526 x := v.Args[1]
18527 v_2 := v.Args[2]
18528 if v_2.Op != OpARM64MOVDconst {
18529 break
18530 }
18531 c := v_2.AuxInt
18532 if !(int32(c) == -1) {
18533 break
18534 }
18535 v.reset(OpARM64ADD)
18536 v.AddArg(a)
18537 v.AddArg(x)
18538 return true
18539 }
18540
18541
18542
18543 for {
18544 _ = v.Args[2]
18545 a := v.Args[0]
18546 v_2 := v.Args[2]
18547 if v_2.Op != OpARM64MOVDconst {
18548 break
18549 }
18550 c := v_2.AuxInt
18551 if !(int32(c) == 0) {
18552 break
18553 }
18554 v.reset(OpCopy)
18555 v.Type = a.Type
18556 v.AddArg(a)
18557 return true
18558 }
18559
18560
18561
18562 for {
18563 _ = v.Args[2]
18564 a := v.Args[0]
18565 x := v.Args[1]
18566 v_2 := v.Args[2]
18567 if v_2.Op != OpARM64MOVDconst {
18568 break
18569 }
18570 c := v_2.AuxInt
18571 if !(int32(c) == 1) {
18572 break
18573 }
18574 v.reset(OpARM64SUB)
18575 v.AddArg(a)
18576 v.AddArg(x)
18577 return true
18578 }
18579
18580
18581
18582 for {
18583 _ = v.Args[2]
18584 a := v.Args[0]
18585 x := v.Args[1]
18586 v_2 := v.Args[2]
18587 if v_2.Op != OpARM64MOVDconst {
18588 break
18589 }
18590 c := v_2.AuxInt
18591 if !(isPowerOfTwo(c)) {
18592 break
18593 }
18594 v.reset(OpARM64SUBshiftLL)
18595 v.AuxInt = log2(c)
18596 v.AddArg(a)
18597 v.AddArg(x)
18598 return true
18599 }
18600
18601
18602
18603 for {
18604 _ = v.Args[2]
18605 a := v.Args[0]
18606 x := v.Args[1]
18607 v_2 := v.Args[2]
18608 if v_2.Op != OpARM64MOVDconst {
18609 break
18610 }
18611 c := v_2.AuxInt
18612 if !(isPowerOfTwo(c-1) && int32(c) >= 3) {
18613 break
18614 }
18615 v.reset(OpARM64SUB)
18616 v.AddArg(a)
18617 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
18618 v0.AuxInt = log2(c - 1)
18619 v0.AddArg(x)
18620 v0.AddArg(x)
18621 v.AddArg(v0)
18622 return true
18623 }
18624
18625
18626
18627 for {
18628 _ = v.Args[2]
18629 a := v.Args[0]
18630 x := v.Args[1]
18631 v_2 := v.Args[2]
18632 if v_2.Op != OpARM64MOVDconst {
18633 break
18634 }
18635 c := v_2.AuxInt
18636 if !(isPowerOfTwo(c+1) && int32(c) >= 7) {
18637 break
18638 }
18639 v.reset(OpARM64ADD)
18640 v.AddArg(a)
18641 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
18642 v0.AuxInt = log2(c + 1)
18643 v0.AddArg(x)
18644 v0.AddArg(x)
18645 v.AddArg(v0)
18646 return true
18647 }
18648
18649
18650
18651 for {
18652 _ = v.Args[2]
18653 a := v.Args[0]
18654 x := v.Args[1]
18655 v_2 := v.Args[2]
18656 if v_2.Op != OpARM64MOVDconst {
18657 break
18658 }
18659 c := v_2.AuxInt
18660 if !(c%3 == 0 && isPowerOfTwo(c/3) && is32Bit(c)) {
18661 break
18662 }
18663 v.reset(OpARM64ADDshiftLL)
18664 v.AuxInt = log2(c / 3)
18665 v.AddArg(a)
18666 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
18667 v0.AuxInt = 2
18668 v0.AddArg(x)
18669 v0.AddArg(x)
18670 v.AddArg(v0)
18671 return true
18672 }
18673
18674
18675
18676 for {
18677 _ = v.Args[2]
18678 a := v.Args[0]
18679 x := v.Args[1]
18680 v_2 := v.Args[2]
18681 if v_2.Op != OpARM64MOVDconst {
18682 break
18683 }
18684 c := v_2.AuxInt
18685 if !(c%5 == 0 && isPowerOfTwo(c/5) && is32Bit(c)) {
18686 break
18687 }
18688 v.reset(OpARM64SUBshiftLL)
18689 v.AuxInt = log2(c / 5)
18690 v.AddArg(a)
18691 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
18692 v0.AuxInt = 2
18693 v0.AddArg(x)
18694 v0.AddArg(x)
18695 v.AddArg(v0)
18696 return true
18697 }
18698
18699
18700
18701 for {
18702 _ = v.Args[2]
18703 a := v.Args[0]
18704 x := v.Args[1]
18705 v_2 := v.Args[2]
18706 if v_2.Op != OpARM64MOVDconst {
18707 break
18708 }
18709 c := v_2.AuxInt
18710 if !(c%7 == 0 && isPowerOfTwo(c/7) && is32Bit(c)) {
18711 break
18712 }
18713 v.reset(OpARM64ADDshiftLL)
18714 v.AuxInt = log2(c / 7)
18715 v.AddArg(a)
18716 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
18717 v0.AuxInt = 3
18718 v0.AddArg(x)
18719 v0.AddArg(x)
18720 v.AddArg(v0)
18721 return true
18722 }
18723
18724
18725
18726 for {
18727 _ = v.Args[2]
18728 a := v.Args[0]
18729 x := v.Args[1]
18730 v_2 := v.Args[2]
18731 if v_2.Op != OpARM64MOVDconst {
18732 break
18733 }
18734 c := v_2.AuxInt
18735 if !(c%9 == 0 && isPowerOfTwo(c/9) && is32Bit(c)) {
18736 break
18737 }
18738 v.reset(OpARM64SUBshiftLL)
18739 v.AuxInt = log2(c / 9)
18740 v.AddArg(a)
18741 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
18742 v0.AuxInt = 3
18743 v0.AddArg(x)
18744 v0.AddArg(x)
18745 v.AddArg(v0)
18746 return true
18747 }
18748 return false
18749 }
18750 func rewriteValueARM64_OpARM64MSUBW_10(v *Value) bool {
18751 b := v.Block
18752
18753
18754
18755 for {
18756 x := v.Args[2]
18757 a := v.Args[0]
18758 v_1 := v.Args[1]
18759 if v_1.Op != OpARM64MOVDconst {
18760 break
18761 }
18762 c := v_1.AuxInt
18763 if !(int32(c) == -1) {
18764 break
18765 }
18766 v.reset(OpARM64ADD)
18767 v.AddArg(a)
18768 v.AddArg(x)
18769 return true
18770 }
18771
18772
18773
18774 for {
18775 _ = v.Args[2]
18776 a := v.Args[0]
18777 v_1 := v.Args[1]
18778 if v_1.Op != OpARM64MOVDconst {
18779 break
18780 }
18781 c := v_1.AuxInt
18782 if !(int32(c) == 0) {
18783 break
18784 }
18785 v.reset(OpCopy)
18786 v.Type = a.Type
18787 v.AddArg(a)
18788 return true
18789 }
18790
18791
18792
18793 for {
18794 x := v.Args[2]
18795 a := v.Args[0]
18796 v_1 := v.Args[1]
18797 if v_1.Op != OpARM64MOVDconst {
18798 break
18799 }
18800 c := v_1.AuxInt
18801 if !(int32(c) == 1) {
18802 break
18803 }
18804 v.reset(OpARM64SUB)
18805 v.AddArg(a)
18806 v.AddArg(x)
18807 return true
18808 }
18809
18810
18811
18812 for {
18813 x := v.Args[2]
18814 a := v.Args[0]
18815 v_1 := v.Args[1]
18816 if v_1.Op != OpARM64MOVDconst {
18817 break
18818 }
18819 c := v_1.AuxInt
18820 if !(isPowerOfTwo(c)) {
18821 break
18822 }
18823 v.reset(OpARM64SUBshiftLL)
18824 v.AuxInt = log2(c)
18825 v.AddArg(a)
18826 v.AddArg(x)
18827 return true
18828 }
18829
18830
18831
18832 for {
18833 x := v.Args[2]
18834 a := v.Args[0]
18835 v_1 := v.Args[1]
18836 if v_1.Op != OpARM64MOVDconst {
18837 break
18838 }
18839 c := v_1.AuxInt
18840 if !(isPowerOfTwo(c-1) && int32(c) >= 3) {
18841 break
18842 }
18843 v.reset(OpARM64SUB)
18844 v.AddArg(a)
18845 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
18846 v0.AuxInt = log2(c - 1)
18847 v0.AddArg(x)
18848 v0.AddArg(x)
18849 v.AddArg(v0)
18850 return true
18851 }
18852
18853
18854
18855 for {
18856 x := v.Args[2]
18857 a := v.Args[0]
18858 v_1 := v.Args[1]
18859 if v_1.Op != OpARM64MOVDconst {
18860 break
18861 }
18862 c := v_1.AuxInt
18863 if !(isPowerOfTwo(c+1) && int32(c) >= 7) {
18864 break
18865 }
18866 v.reset(OpARM64ADD)
18867 v.AddArg(a)
18868 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
18869 v0.AuxInt = log2(c + 1)
18870 v0.AddArg(x)
18871 v0.AddArg(x)
18872 v.AddArg(v0)
18873 return true
18874 }
18875
18876
18877
18878 for {
18879 x := v.Args[2]
18880 a := v.Args[0]
18881 v_1 := v.Args[1]
18882 if v_1.Op != OpARM64MOVDconst {
18883 break
18884 }
18885 c := v_1.AuxInt
18886 if !(c%3 == 0 && isPowerOfTwo(c/3) && is32Bit(c)) {
18887 break
18888 }
18889 v.reset(OpARM64ADDshiftLL)
18890 v.AuxInt = log2(c / 3)
18891 v.AddArg(a)
18892 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
18893 v0.AuxInt = 2
18894 v0.AddArg(x)
18895 v0.AddArg(x)
18896 v.AddArg(v0)
18897 return true
18898 }
18899
18900
18901
18902 for {
18903 x := v.Args[2]
18904 a := v.Args[0]
18905 v_1 := v.Args[1]
18906 if v_1.Op != OpARM64MOVDconst {
18907 break
18908 }
18909 c := v_1.AuxInt
18910 if !(c%5 == 0 && isPowerOfTwo(c/5) && is32Bit(c)) {
18911 break
18912 }
18913 v.reset(OpARM64SUBshiftLL)
18914 v.AuxInt = log2(c / 5)
18915 v.AddArg(a)
18916 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
18917 v0.AuxInt = 2
18918 v0.AddArg(x)
18919 v0.AddArg(x)
18920 v.AddArg(v0)
18921 return true
18922 }
18923
18924
18925
18926 for {
18927 x := v.Args[2]
18928 a := v.Args[0]
18929 v_1 := v.Args[1]
18930 if v_1.Op != OpARM64MOVDconst {
18931 break
18932 }
18933 c := v_1.AuxInt
18934 if !(c%7 == 0 && isPowerOfTwo(c/7) && is32Bit(c)) {
18935 break
18936 }
18937 v.reset(OpARM64ADDshiftLL)
18938 v.AuxInt = log2(c / 7)
18939 v.AddArg(a)
18940 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
18941 v0.AuxInt = 3
18942 v0.AddArg(x)
18943 v0.AddArg(x)
18944 v.AddArg(v0)
18945 return true
18946 }
18947
18948
18949
18950 for {
18951 x := v.Args[2]
18952 a := v.Args[0]
18953 v_1 := v.Args[1]
18954 if v_1.Op != OpARM64MOVDconst {
18955 break
18956 }
18957 c := v_1.AuxInt
18958 if !(c%9 == 0 && isPowerOfTwo(c/9) && is32Bit(c)) {
18959 break
18960 }
18961 v.reset(OpARM64SUBshiftLL)
18962 v.AuxInt = log2(c / 9)
18963 v.AddArg(a)
18964 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
18965 v0.AuxInt = 3
18966 v0.AddArg(x)
18967 v0.AddArg(x)
18968 v.AddArg(v0)
18969 return true
18970 }
18971 return false
18972 }
18973 func rewriteValueARM64_OpARM64MSUBW_20(v *Value) bool {
18974 b := v.Block
18975
18976
18977
18978 for {
18979 y := v.Args[2]
18980 v_0 := v.Args[0]
18981 if v_0.Op != OpARM64MOVDconst {
18982 break
18983 }
18984 c := v_0.AuxInt
18985 x := v.Args[1]
18986 v.reset(OpARM64ADDconst)
18987 v.AuxInt = c
18988 v0 := b.NewValue0(v.Pos, OpARM64MNEGW, x.Type)
18989 v0.AddArg(x)
18990 v0.AddArg(y)
18991 v.AddArg(v0)
18992 return true
18993 }
18994
18995
18996
18997 for {
18998 _ = v.Args[2]
18999 a := v.Args[0]
19000 v_1 := v.Args[1]
19001 if v_1.Op != OpARM64MOVDconst {
19002 break
19003 }
19004 c := v_1.AuxInt
19005 v_2 := v.Args[2]
19006 if v_2.Op != OpARM64MOVDconst {
19007 break
19008 }
19009 d := v_2.AuxInt
19010 v.reset(OpARM64SUBconst)
19011 v.AuxInt = int64(int32(c) * int32(d))
19012 v.AddArg(a)
19013 return true
19014 }
19015 return false
19016 }
19017 func rewriteValueARM64_OpARM64MUL_0(v *Value) bool {
19018
19019
19020
19021 for {
19022 y := v.Args[1]
19023 v_0 := v.Args[0]
19024 if v_0.Op != OpARM64NEG {
19025 break
19026 }
19027 x := v_0.Args[0]
19028 v.reset(OpARM64MNEG)
19029 v.AddArg(x)
19030 v.AddArg(y)
19031 return true
19032 }
19033
19034
19035
19036 for {
19037 _ = v.Args[1]
19038 y := v.Args[0]
19039 v_1 := v.Args[1]
19040 if v_1.Op != OpARM64NEG {
19041 break
19042 }
19043 x := v_1.Args[0]
19044 v.reset(OpARM64MNEG)
19045 v.AddArg(x)
19046 v.AddArg(y)
19047 return true
19048 }
19049
19050
19051
19052 for {
19053 _ = v.Args[1]
19054 x := v.Args[0]
19055 v_1 := v.Args[1]
19056 if v_1.Op != OpARM64MOVDconst {
19057 break
19058 }
19059 if v_1.AuxInt != -1 {
19060 break
19061 }
19062 v.reset(OpARM64NEG)
19063 v.AddArg(x)
19064 return true
19065 }
19066
19067
19068
19069 for {
19070 x := v.Args[1]
19071 v_0 := v.Args[0]
19072 if v_0.Op != OpARM64MOVDconst {
19073 break
19074 }
19075 if v_0.AuxInt != -1 {
19076 break
19077 }
19078 v.reset(OpARM64NEG)
19079 v.AddArg(x)
19080 return true
19081 }
19082
19083
19084
19085 for {
19086 _ = v.Args[1]
19087 v_1 := v.Args[1]
19088 if v_1.Op != OpARM64MOVDconst {
19089 break
19090 }
19091 if v_1.AuxInt != 0 {
19092 break
19093 }
19094 v.reset(OpARM64MOVDconst)
19095 v.AuxInt = 0
19096 return true
19097 }
19098
19099
19100
19101 for {
19102 _ = v.Args[1]
19103 v_0 := v.Args[0]
19104 if v_0.Op != OpARM64MOVDconst {
19105 break
19106 }
19107 if v_0.AuxInt != 0 {
19108 break
19109 }
19110 v.reset(OpARM64MOVDconst)
19111 v.AuxInt = 0
19112 return true
19113 }
19114
19115
19116
19117 for {
19118 _ = v.Args[1]
19119 x := v.Args[0]
19120 v_1 := v.Args[1]
19121 if v_1.Op != OpARM64MOVDconst {
19122 break
19123 }
19124 if v_1.AuxInt != 1 {
19125 break
19126 }
19127 v.reset(OpCopy)
19128 v.Type = x.Type
19129 v.AddArg(x)
19130 return true
19131 }
19132
19133
19134
19135 for {
19136 x := v.Args[1]
19137 v_0 := v.Args[0]
19138 if v_0.Op != OpARM64MOVDconst {
19139 break
19140 }
19141 if v_0.AuxInt != 1 {
19142 break
19143 }
19144 v.reset(OpCopy)
19145 v.Type = x.Type
19146 v.AddArg(x)
19147 return true
19148 }
19149
19150
19151
19152 for {
19153 _ = v.Args[1]
19154 x := v.Args[0]
19155 v_1 := v.Args[1]
19156 if v_1.Op != OpARM64MOVDconst {
19157 break
19158 }
19159 c := v_1.AuxInt
19160 if !(isPowerOfTwo(c)) {
19161 break
19162 }
19163 v.reset(OpARM64SLLconst)
19164 v.AuxInt = log2(c)
19165 v.AddArg(x)
19166 return true
19167 }
19168
19169
19170
19171 for {
19172 x := v.Args[1]
19173 v_0 := v.Args[0]
19174 if v_0.Op != OpARM64MOVDconst {
19175 break
19176 }
19177 c := v_0.AuxInt
19178 if !(isPowerOfTwo(c)) {
19179 break
19180 }
19181 v.reset(OpARM64SLLconst)
19182 v.AuxInt = log2(c)
19183 v.AddArg(x)
19184 return true
19185 }
19186 return false
19187 }
19188 func rewriteValueARM64_OpARM64MUL_10(v *Value) bool {
19189 b := v.Block
19190
19191
19192
19193 for {
19194 _ = v.Args[1]
19195 x := v.Args[0]
19196 v_1 := v.Args[1]
19197 if v_1.Op != OpARM64MOVDconst {
19198 break
19199 }
19200 c := v_1.AuxInt
19201 if !(isPowerOfTwo(c-1) && c >= 3) {
19202 break
19203 }
19204 v.reset(OpARM64ADDshiftLL)
19205 v.AuxInt = log2(c - 1)
19206 v.AddArg(x)
19207 v.AddArg(x)
19208 return true
19209 }
19210
19211
19212
19213 for {
19214 x := v.Args[1]
19215 v_0 := v.Args[0]
19216 if v_0.Op != OpARM64MOVDconst {
19217 break
19218 }
19219 c := v_0.AuxInt
19220 if !(isPowerOfTwo(c-1) && c >= 3) {
19221 break
19222 }
19223 v.reset(OpARM64ADDshiftLL)
19224 v.AuxInt = log2(c - 1)
19225 v.AddArg(x)
19226 v.AddArg(x)
19227 return true
19228 }
19229
19230
19231
19232 for {
19233 _ = v.Args[1]
19234 x := v.Args[0]
19235 v_1 := v.Args[1]
19236 if v_1.Op != OpARM64MOVDconst {
19237 break
19238 }
19239 c := v_1.AuxInt
19240 if !(isPowerOfTwo(c+1) && c >= 7) {
19241 break
19242 }
19243 v.reset(OpARM64ADDshiftLL)
19244 v.AuxInt = log2(c + 1)
19245 v0 := b.NewValue0(v.Pos, OpARM64NEG, x.Type)
19246 v0.AddArg(x)
19247 v.AddArg(v0)
19248 v.AddArg(x)
19249 return true
19250 }
19251
19252
19253
19254 for {
19255 x := v.Args[1]
19256 v_0 := v.Args[0]
19257 if v_0.Op != OpARM64MOVDconst {
19258 break
19259 }
19260 c := v_0.AuxInt
19261 if !(isPowerOfTwo(c+1) && c >= 7) {
19262 break
19263 }
19264 v.reset(OpARM64ADDshiftLL)
19265 v.AuxInt = log2(c + 1)
19266 v0 := b.NewValue0(v.Pos, OpARM64NEG, x.Type)
19267 v0.AddArg(x)
19268 v.AddArg(v0)
19269 v.AddArg(x)
19270 return true
19271 }
19272
19273
19274
19275 for {
19276 _ = v.Args[1]
19277 x := v.Args[0]
19278 v_1 := v.Args[1]
19279 if v_1.Op != OpARM64MOVDconst {
19280 break
19281 }
19282 c := v_1.AuxInt
19283 if !(c%3 == 0 && isPowerOfTwo(c/3)) {
19284 break
19285 }
19286 v.reset(OpARM64SLLconst)
19287 v.AuxInt = log2(c / 3)
19288 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
19289 v0.AuxInt = 1
19290 v0.AddArg(x)
19291 v0.AddArg(x)
19292 v.AddArg(v0)
19293 return true
19294 }
19295
19296
19297
19298 for {
19299 x := v.Args[1]
19300 v_0 := v.Args[0]
19301 if v_0.Op != OpARM64MOVDconst {
19302 break
19303 }
19304 c := v_0.AuxInt
19305 if !(c%3 == 0 && isPowerOfTwo(c/3)) {
19306 break
19307 }
19308 v.reset(OpARM64SLLconst)
19309 v.AuxInt = log2(c / 3)
19310 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
19311 v0.AuxInt = 1
19312 v0.AddArg(x)
19313 v0.AddArg(x)
19314 v.AddArg(v0)
19315 return true
19316 }
19317
19318
19319
19320 for {
19321 _ = v.Args[1]
19322 x := v.Args[0]
19323 v_1 := v.Args[1]
19324 if v_1.Op != OpARM64MOVDconst {
19325 break
19326 }
19327 c := v_1.AuxInt
19328 if !(c%5 == 0 && isPowerOfTwo(c/5)) {
19329 break
19330 }
19331 v.reset(OpARM64SLLconst)
19332 v.AuxInt = log2(c / 5)
19333 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
19334 v0.AuxInt = 2
19335 v0.AddArg(x)
19336 v0.AddArg(x)
19337 v.AddArg(v0)
19338 return true
19339 }
19340
19341
19342
19343 for {
19344 x := v.Args[1]
19345 v_0 := v.Args[0]
19346 if v_0.Op != OpARM64MOVDconst {
19347 break
19348 }
19349 c := v_0.AuxInt
19350 if !(c%5 == 0 && isPowerOfTwo(c/5)) {
19351 break
19352 }
19353 v.reset(OpARM64SLLconst)
19354 v.AuxInt = log2(c / 5)
19355 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
19356 v0.AuxInt = 2
19357 v0.AddArg(x)
19358 v0.AddArg(x)
19359 v.AddArg(v0)
19360 return true
19361 }
19362
19363
19364
19365 for {
19366 _ = v.Args[1]
19367 x := v.Args[0]
19368 v_1 := v.Args[1]
19369 if v_1.Op != OpARM64MOVDconst {
19370 break
19371 }
19372 c := v_1.AuxInt
19373 if !(c%7 == 0 && isPowerOfTwo(c/7)) {
19374 break
19375 }
19376 v.reset(OpARM64SLLconst)
19377 v.AuxInt = log2(c / 7)
19378 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
19379 v0.AuxInt = 3
19380 v1 := b.NewValue0(v.Pos, OpARM64NEG, x.Type)
19381 v1.AddArg(x)
19382 v0.AddArg(v1)
19383 v0.AddArg(x)
19384 v.AddArg(v0)
19385 return true
19386 }
19387
19388
19389
19390 for {
19391 x := v.Args[1]
19392 v_0 := v.Args[0]
19393 if v_0.Op != OpARM64MOVDconst {
19394 break
19395 }
19396 c := v_0.AuxInt
19397 if !(c%7 == 0 && isPowerOfTwo(c/7)) {
19398 break
19399 }
19400 v.reset(OpARM64SLLconst)
19401 v.AuxInt = log2(c / 7)
19402 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
19403 v0.AuxInt = 3
19404 v1 := b.NewValue0(v.Pos, OpARM64NEG, x.Type)
19405 v1.AddArg(x)
19406 v0.AddArg(v1)
19407 v0.AddArg(x)
19408 v.AddArg(v0)
19409 return true
19410 }
19411 return false
19412 }
19413 func rewriteValueARM64_OpARM64MUL_20(v *Value) bool {
19414 b := v.Block
19415
19416
19417
19418 for {
19419 _ = v.Args[1]
19420 x := v.Args[0]
19421 v_1 := v.Args[1]
19422 if v_1.Op != OpARM64MOVDconst {
19423 break
19424 }
19425 c := v_1.AuxInt
19426 if !(c%9 == 0 && isPowerOfTwo(c/9)) {
19427 break
19428 }
19429 v.reset(OpARM64SLLconst)
19430 v.AuxInt = log2(c / 9)
19431 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
19432 v0.AuxInt = 3
19433 v0.AddArg(x)
19434 v0.AddArg(x)
19435 v.AddArg(v0)
19436 return true
19437 }
19438
19439
19440
19441 for {
19442 x := v.Args[1]
19443 v_0 := v.Args[0]
19444 if v_0.Op != OpARM64MOVDconst {
19445 break
19446 }
19447 c := v_0.AuxInt
19448 if !(c%9 == 0 && isPowerOfTwo(c/9)) {
19449 break
19450 }
19451 v.reset(OpARM64SLLconst)
19452 v.AuxInt = log2(c / 9)
19453 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
19454 v0.AuxInt = 3
19455 v0.AddArg(x)
19456 v0.AddArg(x)
19457 v.AddArg(v0)
19458 return true
19459 }
19460
19461
19462
19463 for {
19464 _ = v.Args[1]
19465 v_0 := v.Args[0]
19466 if v_0.Op != OpARM64MOVDconst {
19467 break
19468 }
19469 c := v_0.AuxInt
19470 v_1 := v.Args[1]
19471 if v_1.Op != OpARM64MOVDconst {
19472 break
19473 }
19474 d := v_1.AuxInt
19475 v.reset(OpARM64MOVDconst)
19476 v.AuxInt = c * d
19477 return true
19478 }
19479
19480
19481
19482 for {
19483 _ = v.Args[1]
19484 v_0 := v.Args[0]
19485 if v_0.Op != OpARM64MOVDconst {
19486 break
19487 }
19488 d := v_0.AuxInt
19489 v_1 := v.Args[1]
19490 if v_1.Op != OpARM64MOVDconst {
19491 break
19492 }
19493 c := v_1.AuxInt
19494 v.reset(OpARM64MOVDconst)
19495 v.AuxInt = c * d
19496 return true
19497 }
19498 return false
19499 }
19500 func rewriteValueARM64_OpARM64MULW_0(v *Value) bool {
19501
19502
19503
19504 for {
19505 y := v.Args[1]
19506 v_0 := v.Args[0]
19507 if v_0.Op != OpARM64NEG {
19508 break
19509 }
19510 x := v_0.Args[0]
19511 v.reset(OpARM64MNEGW)
19512 v.AddArg(x)
19513 v.AddArg(y)
19514 return true
19515 }
19516
19517
19518
19519 for {
19520 _ = v.Args[1]
19521 y := v.Args[0]
19522 v_1 := v.Args[1]
19523 if v_1.Op != OpARM64NEG {
19524 break
19525 }
19526 x := v_1.Args[0]
19527 v.reset(OpARM64MNEGW)
19528 v.AddArg(x)
19529 v.AddArg(y)
19530 return true
19531 }
19532
19533
19534
19535 for {
19536 _ = v.Args[1]
19537 x := v.Args[0]
19538 v_1 := v.Args[1]
19539 if v_1.Op != OpARM64MOVDconst {
19540 break
19541 }
19542 c := v_1.AuxInt
19543 if !(int32(c) == -1) {
19544 break
19545 }
19546 v.reset(OpARM64NEG)
19547 v.AddArg(x)
19548 return true
19549 }
19550
19551
19552
19553 for {
19554 x := v.Args[1]
19555 v_0 := v.Args[0]
19556 if v_0.Op != OpARM64MOVDconst {
19557 break
19558 }
19559 c := v_0.AuxInt
19560 if !(int32(c) == -1) {
19561 break
19562 }
19563 v.reset(OpARM64NEG)
19564 v.AddArg(x)
19565 return true
19566 }
19567
19568
19569
19570 for {
19571 _ = v.Args[1]
19572 v_1 := v.Args[1]
19573 if v_1.Op != OpARM64MOVDconst {
19574 break
19575 }
19576 c := v_1.AuxInt
19577 if !(int32(c) == 0) {
19578 break
19579 }
19580 v.reset(OpARM64MOVDconst)
19581 v.AuxInt = 0
19582 return true
19583 }
19584
19585
19586
19587 for {
19588 _ = v.Args[1]
19589 v_0 := v.Args[0]
19590 if v_0.Op != OpARM64MOVDconst {
19591 break
19592 }
19593 c := v_0.AuxInt
19594 if !(int32(c) == 0) {
19595 break
19596 }
19597 v.reset(OpARM64MOVDconst)
19598 v.AuxInt = 0
19599 return true
19600 }
19601
19602
19603
19604 for {
19605 _ = v.Args[1]
19606 x := v.Args[0]
19607 v_1 := v.Args[1]
19608 if v_1.Op != OpARM64MOVDconst {
19609 break
19610 }
19611 c := v_1.AuxInt
19612 if !(int32(c) == 1) {
19613 break
19614 }
19615 v.reset(OpCopy)
19616 v.Type = x.Type
19617 v.AddArg(x)
19618 return true
19619 }
19620
19621
19622
19623 for {
19624 x := v.Args[1]
19625 v_0 := v.Args[0]
19626 if v_0.Op != OpARM64MOVDconst {
19627 break
19628 }
19629 c := v_0.AuxInt
19630 if !(int32(c) == 1) {
19631 break
19632 }
19633 v.reset(OpCopy)
19634 v.Type = x.Type
19635 v.AddArg(x)
19636 return true
19637 }
19638
19639
19640
19641 for {
19642 _ = v.Args[1]
19643 x := v.Args[0]
19644 v_1 := v.Args[1]
19645 if v_1.Op != OpARM64MOVDconst {
19646 break
19647 }
19648 c := v_1.AuxInt
19649 if !(isPowerOfTwo(c)) {
19650 break
19651 }
19652 v.reset(OpARM64SLLconst)
19653 v.AuxInt = log2(c)
19654 v.AddArg(x)
19655 return true
19656 }
19657
19658
19659
19660 for {
19661 x := v.Args[1]
19662 v_0 := v.Args[0]
19663 if v_0.Op != OpARM64MOVDconst {
19664 break
19665 }
19666 c := v_0.AuxInt
19667 if !(isPowerOfTwo(c)) {
19668 break
19669 }
19670 v.reset(OpARM64SLLconst)
19671 v.AuxInt = log2(c)
19672 v.AddArg(x)
19673 return true
19674 }
19675 return false
19676 }
19677 func rewriteValueARM64_OpARM64MULW_10(v *Value) bool {
19678 b := v.Block
19679
19680
19681
19682 for {
19683 _ = v.Args[1]
19684 x := v.Args[0]
19685 v_1 := v.Args[1]
19686 if v_1.Op != OpARM64MOVDconst {
19687 break
19688 }
19689 c := v_1.AuxInt
19690 if !(isPowerOfTwo(c-1) && int32(c) >= 3) {
19691 break
19692 }
19693 v.reset(OpARM64ADDshiftLL)
19694 v.AuxInt = log2(c - 1)
19695 v.AddArg(x)
19696 v.AddArg(x)
19697 return true
19698 }
19699
19700
19701
19702 for {
19703 x := v.Args[1]
19704 v_0 := v.Args[0]
19705 if v_0.Op != OpARM64MOVDconst {
19706 break
19707 }
19708 c := v_0.AuxInt
19709 if !(isPowerOfTwo(c-1) && int32(c) >= 3) {
19710 break
19711 }
19712 v.reset(OpARM64ADDshiftLL)
19713 v.AuxInt = log2(c - 1)
19714 v.AddArg(x)
19715 v.AddArg(x)
19716 return true
19717 }
19718
19719
19720
19721 for {
19722 _ = v.Args[1]
19723 x := v.Args[0]
19724 v_1 := v.Args[1]
19725 if v_1.Op != OpARM64MOVDconst {
19726 break
19727 }
19728 c := v_1.AuxInt
19729 if !(isPowerOfTwo(c+1) && int32(c) >= 7) {
19730 break
19731 }
19732 v.reset(OpARM64ADDshiftLL)
19733 v.AuxInt = log2(c + 1)
19734 v0 := b.NewValue0(v.Pos, OpARM64NEG, x.Type)
19735 v0.AddArg(x)
19736 v.AddArg(v0)
19737 v.AddArg(x)
19738 return true
19739 }
19740
19741
19742
19743 for {
19744 x := v.Args[1]
19745 v_0 := v.Args[0]
19746 if v_0.Op != OpARM64MOVDconst {
19747 break
19748 }
19749 c := v_0.AuxInt
19750 if !(isPowerOfTwo(c+1) && int32(c) >= 7) {
19751 break
19752 }
19753 v.reset(OpARM64ADDshiftLL)
19754 v.AuxInt = log2(c + 1)
19755 v0 := b.NewValue0(v.Pos, OpARM64NEG, x.Type)
19756 v0.AddArg(x)
19757 v.AddArg(v0)
19758 v.AddArg(x)
19759 return true
19760 }
19761
19762
19763
19764 for {
19765 _ = v.Args[1]
19766 x := v.Args[0]
19767 v_1 := v.Args[1]
19768 if v_1.Op != OpARM64MOVDconst {
19769 break
19770 }
19771 c := v_1.AuxInt
19772 if !(c%3 == 0 && isPowerOfTwo(c/3) && is32Bit(c)) {
19773 break
19774 }
19775 v.reset(OpARM64SLLconst)
19776 v.AuxInt = log2(c / 3)
19777 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
19778 v0.AuxInt = 1
19779 v0.AddArg(x)
19780 v0.AddArg(x)
19781 v.AddArg(v0)
19782 return true
19783 }
19784
19785
19786
19787 for {
19788 x := v.Args[1]
19789 v_0 := v.Args[0]
19790 if v_0.Op != OpARM64MOVDconst {
19791 break
19792 }
19793 c := v_0.AuxInt
19794 if !(c%3 == 0 && isPowerOfTwo(c/3) && is32Bit(c)) {
19795 break
19796 }
19797 v.reset(OpARM64SLLconst)
19798 v.AuxInt = log2(c / 3)
19799 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
19800 v0.AuxInt = 1
19801 v0.AddArg(x)
19802 v0.AddArg(x)
19803 v.AddArg(v0)
19804 return true
19805 }
19806
19807
19808
19809 for {
19810 _ = v.Args[1]
19811 x := v.Args[0]
19812 v_1 := v.Args[1]
19813 if v_1.Op != OpARM64MOVDconst {
19814 break
19815 }
19816 c := v_1.AuxInt
19817 if !(c%5 == 0 && isPowerOfTwo(c/5) && is32Bit(c)) {
19818 break
19819 }
19820 v.reset(OpARM64SLLconst)
19821 v.AuxInt = log2(c / 5)
19822 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
19823 v0.AuxInt = 2
19824 v0.AddArg(x)
19825 v0.AddArg(x)
19826 v.AddArg(v0)
19827 return true
19828 }
19829
19830
19831
19832 for {
19833 x := v.Args[1]
19834 v_0 := v.Args[0]
19835 if v_0.Op != OpARM64MOVDconst {
19836 break
19837 }
19838 c := v_0.AuxInt
19839 if !(c%5 == 0 && isPowerOfTwo(c/5) && is32Bit(c)) {
19840 break
19841 }
19842 v.reset(OpARM64SLLconst)
19843 v.AuxInt = log2(c / 5)
19844 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
19845 v0.AuxInt = 2
19846 v0.AddArg(x)
19847 v0.AddArg(x)
19848 v.AddArg(v0)
19849 return true
19850 }
19851
19852
19853
19854 for {
19855 _ = v.Args[1]
19856 x := v.Args[0]
19857 v_1 := v.Args[1]
19858 if v_1.Op != OpARM64MOVDconst {
19859 break
19860 }
19861 c := v_1.AuxInt
19862 if !(c%7 == 0 && isPowerOfTwo(c/7) && is32Bit(c)) {
19863 break
19864 }
19865 v.reset(OpARM64SLLconst)
19866 v.AuxInt = log2(c / 7)
19867 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
19868 v0.AuxInt = 3
19869 v1 := b.NewValue0(v.Pos, OpARM64NEG, x.Type)
19870 v1.AddArg(x)
19871 v0.AddArg(v1)
19872 v0.AddArg(x)
19873 v.AddArg(v0)
19874 return true
19875 }
19876
19877
19878
19879 for {
19880 x := v.Args[1]
19881 v_0 := v.Args[0]
19882 if v_0.Op != OpARM64MOVDconst {
19883 break
19884 }
19885 c := v_0.AuxInt
19886 if !(c%7 == 0 && isPowerOfTwo(c/7) && is32Bit(c)) {
19887 break
19888 }
19889 v.reset(OpARM64SLLconst)
19890 v.AuxInt = log2(c / 7)
19891 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
19892 v0.AuxInt = 3
19893 v1 := b.NewValue0(v.Pos, OpARM64NEG, x.Type)
19894 v1.AddArg(x)
19895 v0.AddArg(v1)
19896 v0.AddArg(x)
19897 v.AddArg(v0)
19898 return true
19899 }
19900 return false
19901 }
19902 func rewriteValueARM64_OpARM64MULW_20(v *Value) bool {
19903 b := v.Block
19904
19905
19906
19907 for {
19908 _ = v.Args[1]
19909 x := v.Args[0]
19910 v_1 := v.Args[1]
19911 if v_1.Op != OpARM64MOVDconst {
19912 break
19913 }
19914 c := v_1.AuxInt
19915 if !(c%9 == 0 && isPowerOfTwo(c/9) && is32Bit(c)) {
19916 break
19917 }
19918 v.reset(OpARM64SLLconst)
19919 v.AuxInt = log2(c / 9)
19920 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
19921 v0.AuxInt = 3
19922 v0.AddArg(x)
19923 v0.AddArg(x)
19924 v.AddArg(v0)
19925 return true
19926 }
19927
19928
19929
19930 for {
19931 x := v.Args[1]
19932 v_0 := v.Args[0]
19933 if v_0.Op != OpARM64MOVDconst {
19934 break
19935 }
19936 c := v_0.AuxInt
19937 if !(c%9 == 0 && isPowerOfTwo(c/9) && is32Bit(c)) {
19938 break
19939 }
19940 v.reset(OpARM64SLLconst)
19941 v.AuxInt = log2(c / 9)
19942 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
19943 v0.AuxInt = 3
19944 v0.AddArg(x)
19945 v0.AddArg(x)
19946 v.AddArg(v0)
19947 return true
19948 }
19949
19950
19951
19952 for {
19953 _ = v.Args[1]
19954 v_0 := v.Args[0]
19955 if v_0.Op != OpARM64MOVDconst {
19956 break
19957 }
19958 c := v_0.AuxInt
19959 v_1 := v.Args[1]
19960 if v_1.Op != OpARM64MOVDconst {
19961 break
19962 }
19963 d := v_1.AuxInt
19964 v.reset(OpARM64MOVDconst)
19965 v.AuxInt = int64(int32(c) * int32(d))
19966 return true
19967 }
19968
19969
19970
19971 for {
19972 _ = v.Args[1]
19973 v_0 := v.Args[0]
19974 if v_0.Op != OpARM64MOVDconst {
19975 break
19976 }
19977 d := v_0.AuxInt
19978 v_1 := v.Args[1]
19979 if v_1.Op != OpARM64MOVDconst {
19980 break
19981 }
19982 c := v_1.AuxInt
19983 v.reset(OpARM64MOVDconst)
19984 v.AuxInt = int64(int32(c) * int32(d))
19985 return true
19986 }
19987 return false
19988 }
19989 func rewriteValueARM64_OpARM64MVN_0(v *Value) bool {
19990
19991
19992
19993 for {
19994 v_0 := v.Args[0]
19995 if v_0.Op != OpARM64MOVDconst {
19996 break
19997 }
19998 c := v_0.AuxInt
19999 v.reset(OpARM64MOVDconst)
20000 v.AuxInt = ^c
20001 return true
20002 }
20003
20004
20005
20006 for {
20007 x := v.Args[0]
20008 if x.Op != OpARM64SLLconst {
20009 break
20010 }
20011 c := x.AuxInt
20012 y := x.Args[0]
20013 if !(clobberIfDead(x)) {
20014 break
20015 }
20016 v.reset(OpARM64MVNshiftLL)
20017 v.AuxInt = c
20018 v.AddArg(y)
20019 return true
20020 }
20021
20022
20023
20024 for {
20025 x := v.Args[0]
20026 if x.Op != OpARM64SRLconst {
20027 break
20028 }
20029 c := x.AuxInt
20030 y := x.Args[0]
20031 if !(clobberIfDead(x)) {
20032 break
20033 }
20034 v.reset(OpARM64MVNshiftRL)
20035 v.AuxInt = c
20036 v.AddArg(y)
20037 return true
20038 }
20039
20040
20041
20042 for {
20043 x := v.Args[0]
20044 if x.Op != OpARM64SRAconst {
20045 break
20046 }
20047 c := x.AuxInt
20048 y := x.Args[0]
20049 if !(clobberIfDead(x)) {
20050 break
20051 }
20052 v.reset(OpARM64MVNshiftRA)
20053 v.AuxInt = c
20054 v.AddArg(y)
20055 return true
20056 }
20057 return false
20058 }
20059 func rewriteValueARM64_OpARM64MVNshiftLL_0(v *Value) bool {
20060
20061
20062
20063 for {
20064 d := v.AuxInt
20065 v_0 := v.Args[0]
20066 if v_0.Op != OpARM64MOVDconst {
20067 break
20068 }
20069 c := v_0.AuxInt
20070 v.reset(OpARM64MOVDconst)
20071 v.AuxInt = ^int64(uint64(c) << uint64(d))
20072 return true
20073 }
20074 return false
20075 }
20076 func rewriteValueARM64_OpARM64MVNshiftRA_0(v *Value) bool {
20077
20078
20079
20080 for {
20081 d := v.AuxInt
20082 v_0 := v.Args[0]
20083 if v_0.Op != OpARM64MOVDconst {
20084 break
20085 }
20086 c := v_0.AuxInt
20087 v.reset(OpARM64MOVDconst)
20088 v.AuxInt = ^(c >> uint64(d))
20089 return true
20090 }
20091 return false
20092 }
20093 func rewriteValueARM64_OpARM64MVNshiftRL_0(v *Value) bool {
20094
20095
20096
20097 for {
20098 d := v.AuxInt
20099 v_0 := v.Args[0]
20100 if v_0.Op != OpARM64MOVDconst {
20101 break
20102 }
20103 c := v_0.AuxInt
20104 v.reset(OpARM64MOVDconst)
20105 v.AuxInt = ^int64(uint64(c) >> uint64(d))
20106 return true
20107 }
20108 return false
20109 }
20110 func rewriteValueARM64_OpARM64NEG_0(v *Value) bool {
20111
20112
20113
20114 for {
20115 v_0 := v.Args[0]
20116 if v_0.Op != OpARM64MUL {
20117 break
20118 }
20119 y := v_0.Args[1]
20120 x := v_0.Args[0]
20121 v.reset(OpARM64MNEG)
20122 v.AddArg(x)
20123 v.AddArg(y)
20124 return true
20125 }
20126
20127
20128
20129 for {
20130 v_0 := v.Args[0]
20131 if v_0.Op != OpARM64MULW {
20132 break
20133 }
20134 y := v_0.Args[1]
20135 x := v_0.Args[0]
20136 v.reset(OpARM64MNEGW)
20137 v.AddArg(x)
20138 v.AddArg(y)
20139 return true
20140 }
20141
20142
20143
20144 for {
20145 v_0 := v.Args[0]
20146 if v_0.Op != OpARM64MOVDconst {
20147 break
20148 }
20149 c := v_0.AuxInt
20150 v.reset(OpARM64MOVDconst)
20151 v.AuxInt = -c
20152 return true
20153 }
20154
20155
20156
20157 for {
20158 x := v.Args[0]
20159 if x.Op != OpARM64SLLconst {
20160 break
20161 }
20162 c := x.AuxInt
20163 y := x.Args[0]
20164 if !(clobberIfDead(x)) {
20165 break
20166 }
20167 v.reset(OpARM64NEGshiftLL)
20168 v.AuxInt = c
20169 v.AddArg(y)
20170 return true
20171 }
20172
20173
20174
20175 for {
20176 x := v.Args[0]
20177 if x.Op != OpARM64SRLconst {
20178 break
20179 }
20180 c := x.AuxInt
20181 y := x.Args[0]
20182 if !(clobberIfDead(x)) {
20183 break
20184 }
20185 v.reset(OpARM64NEGshiftRL)
20186 v.AuxInt = c
20187 v.AddArg(y)
20188 return true
20189 }
20190
20191
20192
20193 for {
20194 x := v.Args[0]
20195 if x.Op != OpARM64SRAconst {
20196 break
20197 }
20198 c := x.AuxInt
20199 y := x.Args[0]
20200 if !(clobberIfDead(x)) {
20201 break
20202 }
20203 v.reset(OpARM64NEGshiftRA)
20204 v.AuxInt = c
20205 v.AddArg(y)
20206 return true
20207 }
20208 return false
20209 }
20210 func rewriteValueARM64_OpARM64NEGshiftLL_0(v *Value) bool {
20211
20212
20213
20214 for {
20215 d := v.AuxInt
20216 v_0 := v.Args[0]
20217 if v_0.Op != OpARM64MOVDconst {
20218 break
20219 }
20220 c := v_0.AuxInt
20221 v.reset(OpARM64MOVDconst)
20222 v.AuxInt = -int64(uint64(c) << uint64(d))
20223 return true
20224 }
20225 return false
20226 }
20227 func rewriteValueARM64_OpARM64NEGshiftRA_0(v *Value) bool {
20228
20229
20230
20231 for {
20232 d := v.AuxInt
20233 v_0 := v.Args[0]
20234 if v_0.Op != OpARM64MOVDconst {
20235 break
20236 }
20237 c := v_0.AuxInt
20238 v.reset(OpARM64MOVDconst)
20239 v.AuxInt = -(c >> uint64(d))
20240 return true
20241 }
20242 return false
20243 }
20244 func rewriteValueARM64_OpARM64NEGshiftRL_0(v *Value) bool {
20245
20246
20247
20248 for {
20249 d := v.AuxInt
20250 v_0 := v.Args[0]
20251 if v_0.Op != OpARM64MOVDconst {
20252 break
20253 }
20254 c := v_0.AuxInt
20255 v.reset(OpARM64MOVDconst)
20256 v.AuxInt = -int64(uint64(c) >> uint64(d))
20257 return true
20258 }
20259 return false
20260 }
20261 func rewriteValueARM64_OpARM64NotEqual_0(v *Value) bool {
20262
20263
20264
20265 for {
20266 v_0 := v.Args[0]
20267 if v_0.Op != OpARM64FlagEQ {
20268 break
20269 }
20270 v.reset(OpARM64MOVDconst)
20271 v.AuxInt = 0
20272 return true
20273 }
20274
20275
20276
20277 for {
20278 v_0 := v.Args[0]
20279 if v_0.Op != OpARM64FlagLT_ULT {
20280 break
20281 }
20282 v.reset(OpARM64MOVDconst)
20283 v.AuxInt = 1
20284 return true
20285 }
20286
20287
20288
20289 for {
20290 v_0 := v.Args[0]
20291 if v_0.Op != OpARM64FlagLT_UGT {
20292 break
20293 }
20294 v.reset(OpARM64MOVDconst)
20295 v.AuxInt = 1
20296 return true
20297 }
20298
20299
20300
20301 for {
20302 v_0 := v.Args[0]
20303 if v_0.Op != OpARM64FlagGT_ULT {
20304 break
20305 }
20306 v.reset(OpARM64MOVDconst)
20307 v.AuxInt = 1
20308 return true
20309 }
20310
20311
20312
20313 for {
20314 v_0 := v.Args[0]
20315 if v_0.Op != OpARM64FlagGT_UGT {
20316 break
20317 }
20318 v.reset(OpARM64MOVDconst)
20319 v.AuxInt = 1
20320 return true
20321 }
20322
20323
20324
20325 for {
20326 v_0 := v.Args[0]
20327 if v_0.Op != OpARM64InvertFlags {
20328 break
20329 }
20330 x := v_0.Args[0]
20331 v.reset(OpARM64NotEqual)
20332 v.AddArg(x)
20333 return true
20334 }
20335 return false
20336 }
20337 func rewriteValueARM64_OpARM64OR_0(v *Value) bool {
20338
20339
20340
20341 for {
20342 _ = v.Args[1]
20343 x := v.Args[0]
20344 v_1 := v.Args[1]
20345 if v_1.Op != OpARM64MOVDconst {
20346 break
20347 }
20348 c := v_1.AuxInt
20349 v.reset(OpARM64ORconst)
20350 v.AuxInt = c
20351 v.AddArg(x)
20352 return true
20353 }
20354
20355
20356
20357 for {
20358 x := v.Args[1]
20359 v_0 := v.Args[0]
20360 if v_0.Op != OpARM64MOVDconst {
20361 break
20362 }
20363 c := v_0.AuxInt
20364 v.reset(OpARM64ORconst)
20365 v.AuxInt = c
20366 v.AddArg(x)
20367 return true
20368 }
20369
20370
20371
20372 for {
20373 x := v.Args[1]
20374 if x != v.Args[0] {
20375 break
20376 }
20377 v.reset(OpCopy)
20378 v.Type = x.Type
20379 v.AddArg(x)
20380 return true
20381 }
20382
20383
20384
20385 for {
20386 _ = v.Args[1]
20387 x := v.Args[0]
20388 v_1 := v.Args[1]
20389 if v_1.Op != OpARM64MVN {
20390 break
20391 }
20392 y := v_1.Args[0]
20393 v.reset(OpARM64ORN)
20394 v.AddArg(x)
20395 v.AddArg(y)
20396 return true
20397 }
20398
20399
20400
20401 for {
20402 x := v.Args[1]
20403 v_0 := v.Args[0]
20404 if v_0.Op != OpARM64MVN {
20405 break
20406 }
20407 y := v_0.Args[0]
20408 v.reset(OpARM64ORN)
20409 v.AddArg(x)
20410 v.AddArg(y)
20411 return true
20412 }
20413
20414
20415
20416 for {
20417 _ = v.Args[1]
20418 x0 := v.Args[0]
20419 x1 := v.Args[1]
20420 if x1.Op != OpARM64SLLconst {
20421 break
20422 }
20423 c := x1.AuxInt
20424 y := x1.Args[0]
20425 if !(clobberIfDead(x1)) {
20426 break
20427 }
20428 v.reset(OpARM64ORshiftLL)
20429 v.AuxInt = c
20430 v.AddArg(x0)
20431 v.AddArg(y)
20432 return true
20433 }
20434
20435
20436
20437 for {
20438 x0 := v.Args[1]
20439 x1 := v.Args[0]
20440 if x1.Op != OpARM64SLLconst {
20441 break
20442 }
20443 c := x1.AuxInt
20444 y := x1.Args[0]
20445 if !(clobberIfDead(x1)) {
20446 break
20447 }
20448 v.reset(OpARM64ORshiftLL)
20449 v.AuxInt = c
20450 v.AddArg(x0)
20451 v.AddArg(y)
20452 return true
20453 }
20454
20455
20456
20457 for {
20458 _ = v.Args[1]
20459 x0 := v.Args[0]
20460 x1 := v.Args[1]
20461 if x1.Op != OpARM64SRLconst {
20462 break
20463 }
20464 c := x1.AuxInt
20465 y := x1.Args[0]
20466 if !(clobberIfDead(x1)) {
20467 break
20468 }
20469 v.reset(OpARM64ORshiftRL)
20470 v.AuxInt = c
20471 v.AddArg(x0)
20472 v.AddArg(y)
20473 return true
20474 }
20475
20476
20477
20478 for {
20479 x0 := v.Args[1]
20480 x1 := v.Args[0]
20481 if x1.Op != OpARM64SRLconst {
20482 break
20483 }
20484 c := x1.AuxInt
20485 y := x1.Args[0]
20486 if !(clobberIfDead(x1)) {
20487 break
20488 }
20489 v.reset(OpARM64ORshiftRL)
20490 v.AuxInt = c
20491 v.AddArg(x0)
20492 v.AddArg(y)
20493 return true
20494 }
20495
20496
20497
20498 for {
20499 _ = v.Args[1]
20500 x0 := v.Args[0]
20501 x1 := v.Args[1]
20502 if x1.Op != OpARM64SRAconst {
20503 break
20504 }
20505 c := x1.AuxInt
20506 y := x1.Args[0]
20507 if !(clobberIfDead(x1)) {
20508 break
20509 }
20510 v.reset(OpARM64ORshiftRA)
20511 v.AuxInt = c
20512 v.AddArg(x0)
20513 v.AddArg(y)
20514 return true
20515 }
20516 return false
20517 }
20518 func rewriteValueARM64_OpARM64OR_10(v *Value) bool {
20519 b := v.Block
20520 typ := &b.Func.Config.Types
20521
20522
20523
20524 for {
20525 x0 := v.Args[1]
20526 x1 := v.Args[0]
20527 if x1.Op != OpARM64SRAconst {
20528 break
20529 }
20530 c := x1.AuxInt
20531 y := x1.Args[0]
20532 if !(clobberIfDead(x1)) {
20533 break
20534 }
20535 v.reset(OpARM64ORshiftRA)
20536 v.AuxInt = c
20537 v.AddArg(x0)
20538 v.AddArg(y)
20539 return true
20540 }
20541
20542
20543
20544 for {
20545 _ = v.Args[1]
20546 v_0 := v.Args[0]
20547 if v_0.Op != OpARM64SLL {
20548 break
20549 }
20550 _ = v_0.Args[1]
20551 x := v_0.Args[0]
20552 v_0_1 := v_0.Args[1]
20553 if v_0_1.Op != OpARM64ANDconst {
20554 break
20555 }
20556 t := v_0_1.Type
20557 if v_0_1.AuxInt != 63 {
20558 break
20559 }
20560 y := v_0_1.Args[0]
20561 v_1 := v.Args[1]
20562 if v_1.Op != OpARM64CSEL0 {
20563 break
20564 }
20565 if v_1.Type != typ.UInt64 {
20566 break
20567 }
20568 cc := v_1.Aux
20569 _ = v_1.Args[1]
20570 v_1_0 := v_1.Args[0]
20571 if v_1_0.Op != OpARM64SRL {
20572 break
20573 }
20574 if v_1_0.Type != typ.UInt64 {
20575 break
20576 }
20577 _ = v_1_0.Args[1]
20578 if x != v_1_0.Args[0] {
20579 break
20580 }
20581 v_1_0_1 := v_1_0.Args[1]
20582 if v_1_0_1.Op != OpARM64SUB {
20583 break
20584 }
20585 if v_1_0_1.Type != t {
20586 break
20587 }
20588 _ = v_1_0_1.Args[1]
20589 v_1_0_1_0 := v_1_0_1.Args[0]
20590 if v_1_0_1_0.Op != OpARM64MOVDconst {
20591 break
20592 }
20593 if v_1_0_1_0.AuxInt != 64 {
20594 break
20595 }
20596 v_1_0_1_1 := v_1_0_1.Args[1]
20597 if v_1_0_1_1.Op != OpARM64ANDconst {
20598 break
20599 }
20600 if v_1_0_1_1.Type != t {
20601 break
20602 }
20603 if v_1_0_1_1.AuxInt != 63 {
20604 break
20605 }
20606 if y != v_1_0_1_1.Args[0] {
20607 break
20608 }
20609 v_1_1 := v_1.Args[1]
20610 if v_1_1.Op != OpARM64CMPconst {
20611 break
20612 }
20613 if v_1_1.AuxInt != 64 {
20614 break
20615 }
20616 v_1_1_0 := v_1_1.Args[0]
20617 if v_1_1_0.Op != OpARM64SUB {
20618 break
20619 }
20620 if v_1_1_0.Type != t {
20621 break
20622 }
20623 _ = v_1_1_0.Args[1]
20624 v_1_1_0_0 := v_1_1_0.Args[0]
20625 if v_1_1_0_0.Op != OpARM64MOVDconst {
20626 break
20627 }
20628 if v_1_1_0_0.AuxInt != 64 {
20629 break
20630 }
20631 v_1_1_0_1 := v_1_1_0.Args[1]
20632 if v_1_1_0_1.Op != OpARM64ANDconst {
20633 break
20634 }
20635 if v_1_1_0_1.Type != t {
20636 break
20637 }
20638 if v_1_1_0_1.AuxInt != 63 {
20639 break
20640 }
20641 if y != v_1_1_0_1.Args[0] {
20642 break
20643 }
20644 if !(cc.(Op) == OpARM64LessThanU) {
20645 break
20646 }
20647 v.reset(OpARM64ROR)
20648 v.AddArg(x)
20649 v0 := b.NewValue0(v.Pos, OpARM64NEG, t)
20650 v0.AddArg(y)
20651 v.AddArg(v0)
20652 return true
20653 }
20654
20655
20656
20657 for {
20658 _ = v.Args[1]
20659 v_0 := v.Args[0]
20660 if v_0.Op != OpARM64CSEL0 {
20661 break
20662 }
20663 if v_0.Type != typ.UInt64 {
20664 break
20665 }
20666 cc := v_0.Aux
20667 _ = v_0.Args[1]
20668 v_0_0 := v_0.Args[0]
20669 if v_0_0.Op != OpARM64SRL {
20670 break
20671 }
20672 if v_0_0.Type != typ.UInt64 {
20673 break
20674 }
20675 _ = v_0_0.Args[1]
20676 x := v_0_0.Args[0]
20677 v_0_0_1 := v_0_0.Args[1]
20678 if v_0_0_1.Op != OpARM64SUB {
20679 break
20680 }
20681 t := v_0_0_1.Type
20682 _ = v_0_0_1.Args[1]
20683 v_0_0_1_0 := v_0_0_1.Args[0]
20684 if v_0_0_1_0.Op != OpARM64MOVDconst {
20685 break
20686 }
20687 if v_0_0_1_0.AuxInt != 64 {
20688 break
20689 }
20690 v_0_0_1_1 := v_0_0_1.Args[1]
20691 if v_0_0_1_1.Op != OpARM64ANDconst {
20692 break
20693 }
20694 if v_0_0_1_1.Type != t {
20695 break
20696 }
20697 if v_0_0_1_1.AuxInt != 63 {
20698 break
20699 }
20700 y := v_0_0_1_1.Args[0]
20701 v_0_1 := v_0.Args[1]
20702 if v_0_1.Op != OpARM64CMPconst {
20703 break
20704 }
20705 if v_0_1.AuxInt != 64 {
20706 break
20707 }
20708 v_0_1_0 := v_0_1.Args[0]
20709 if v_0_1_0.Op != OpARM64SUB {
20710 break
20711 }
20712 if v_0_1_0.Type != t {
20713 break
20714 }
20715 _ = v_0_1_0.Args[1]
20716 v_0_1_0_0 := v_0_1_0.Args[0]
20717 if v_0_1_0_0.Op != OpARM64MOVDconst {
20718 break
20719 }
20720 if v_0_1_0_0.AuxInt != 64 {
20721 break
20722 }
20723 v_0_1_0_1 := v_0_1_0.Args[1]
20724 if v_0_1_0_1.Op != OpARM64ANDconst {
20725 break
20726 }
20727 if v_0_1_0_1.Type != t {
20728 break
20729 }
20730 if v_0_1_0_1.AuxInt != 63 {
20731 break
20732 }
20733 if y != v_0_1_0_1.Args[0] {
20734 break
20735 }
20736 v_1 := v.Args[1]
20737 if v_1.Op != OpARM64SLL {
20738 break
20739 }
20740 _ = v_1.Args[1]
20741 if x != v_1.Args[0] {
20742 break
20743 }
20744 v_1_1 := v_1.Args[1]
20745 if v_1_1.Op != OpARM64ANDconst {
20746 break
20747 }
20748 if v_1_1.Type != t {
20749 break
20750 }
20751 if v_1_1.AuxInt != 63 {
20752 break
20753 }
20754 if y != v_1_1.Args[0] {
20755 break
20756 }
20757 if !(cc.(Op) == OpARM64LessThanU) {
20758 break
20759 }
20760 v.reset(OpARM64ROR)
20761 v.AddArg(x)
20762 v0 := b.NewValue0(v.Pos, OpARM64NEG, t)
20763 v0.AddArg(y)
20764 v.AddArg(v0)
20765 return true
20766 }
20767
20768
20769
20770 for {
20771 _ = v.Args[1]
20772 v_0 := v.Args[0]
20773 if v_0.Op != OpARM64SRL {
20774 break
20775 }
20776 if v_0.Type != typ.UInt64 {
20777 break
20778 }
20779 _ = v_0.Args[1]
20780 x := v_0.Args[0]
20781 v_0_1 := v_0.Args[1]
20782 if v_0_1.Op != OpARM64ANDconst {
20783 break
20784 }
20785 t := v_0_1.Type
20786 if v_0_1.AuxInt != 63 {
20787 break
20788 }
20789 y := v_0_1.Args[0]
20790 v_1 := v.Args[1]
20791 if v_1.Op != OpARM64CSEL0 {
20792 break
20793 }
20794 if v_1.Type != typ.UInt64 {
20795 break
20796 }
20797 cc := v_1.Aux
20798 _ = v_1.Args[1]
20799 v_1_0 := v_1.Args[0]
20800 if v_1_0.Op != OpARM64SLL {
20801 break
20802 }
20803 _ = v_1_0.Args[1]
20804 if x != v_1_0.Args[0] {
20805 break
20806 }
20807 v_1_0_1 := v_1_0.Args[1]
20808 if v_1_0_1.Op != OpARM64SUB {
20809 break
20810 }
20811 if v_1_0_1.Type != t {
20812 break
20813 }
20814 _ = v_1_0_1.Args[1]
20815 v_1_0_1_0 := v_1_0_1.Args[0]
20816 if v_1_0_1_0.Op != OpARM64MOVDconst {
20817 break
20818 }
20819 if v_1_0_1_0.AuxInt != 64 {
20820 break
20821 }
20822 v_1_0_1_1 := v_1_0_1.Args[1]
20823 if v_1_0_1_1.Op != OpARM64ANDconst {
20824 break
20825 }
20826 if v_1_0_1_1.Type != t {
20827 break
20828 }
20829 if v_1_0_1_1.AuxInt != 63 {
20830 break
20831 }
20832 if y != v_1_0_1_1.Args[0] {
20833 break
20834 }
20835 v_1_1 := v_1.Args[1]
20836 if v_1_1.Op != OpARM64CMPconst {
20837 break
20838 }
20839 if v_1_1.AuxInt != 64 {
20840 break
20841 }
20842 v_1_1_0 := v_1_1.Args[0]
20843 if v_1_1_0.Op != OpARM64SUB {
20844 break
20845 }
20846 if v_1_1_0.Type != t {
20847 break
20848 }
20849 _ = v_1_1_0.Args[1]
20850 v_1_1_0_0 := v_1_1_0.Args[0]
20851 if v_1_1_0_0.Op != OpARM64MOVDconst {
20852 break
20853 }
20854 if v_1_1_0_0.AuxInt != 64 {
20855 break
20856 }
20857 v_1_1_0_1 := v_1_1_0.Args[1]
20858 if v_1_1_0_1.Op != OpARM64ANDconst {
20859 break
20860 }
20861 if v_1_1_0_1.Type != t {
20862 break
20863 }
20864 if v_1_1_0_1.AuxInt != 63 {
20865 break
20866 }
20867 if y != v_1_1_0_1.Args[0] {
20868 break
20869 }
20870 if !(cc.(Op) == OpARM64LessThanU) {
20871 break
20872 }
20873 v.reset(OpARM64ROR)
20874 v.AddArg(x)
20875 v.AddArg(y)
20876 return true
20877 }
20878
20879
20880
20881 for {
20882 _ = v.Args[1]
20883 v_0 := v.Args[0]
20884 if v_0.Op != OpARM64CSEL0 {
20885 break
20886 }
20887 if v_0.Type != typ.UInt64 {
20888 break
20889 }
20890 cc := v_0.Aux
20891 _ = v_0.Args[1]
20892 v_0_0 := v_0.Args[0]
20893 if v_0_0.Op != OpARM64SLL {
20894 break
20895 }
20896 _ = v_0_0.Args[1]
20897 x := v_0_0.Args[0]
20898 v_0_0_1 := v_0_0.Args[1]
20899 if v_0_0_1.Op != OpARM64SUB {
20900 break
20901 }
20902 t := v_0_0_1.Type
20903 _ = v_0_0_1.Args[1]
20904 v_0_0_1_0 := v_0_0_1.Args[0]
20905 if v_0_0_1_0.Op != OpARM64MOVDconst {
20906 break
20907 }
20908 if v_0_0_1_0.AuxInt != 64 {
20909 break
20910 }
20911 v_0_0_1_1 := v_0_0_1.Args[1]
20912 if v_0_0_1_1.Op != OpARM64ANDconst {
20913 break
20914 }
20915 if v_0_0_1_1.Type != t {
20916 break
20917 }
20918 if v_0_0_1_1.AuxInt != 63 {
20919 break
20920 }
20921 y := v_0_0_1_1.Args[0]
20922 v_0_1 := v_0.Args[1]
20923 if v_0_1.Op != OpARM64CMPconst {
20924 break
20925 }
20926 if v_0_1.AuxInt != 64 {
20927 break
20928 }
20929 v_0_1_0 := v_0_1.Args[0]
20930 if v_0_1_0.Op != OpARM64SUB {
20931 break
20932 }
20933 if v_0_1_0.Type != t {
20934 break
20935 }
20936 _ = v_0_1_0.Args[1]
20937 v_0_1_0_0 := v_0_1_0.Args[0]
20938 if v_0_1_0_0.Op != OpARM64MOVDconst {
20939 break
20940 }
20941 if v_0_1_0_0.AuxInt != 64 {
20942 break
20943 }
20944 v_0_1_0_1 := v_0_1_0.Args[1]
20945 if v_0_1_0_1.Op != OpARM64ANDconst {
20946 break
20947 }
20948 if v_0_1_0_1.Type != t {
20949 break
20950 }
20951 if v_0_1_0_1.AuxInt != 63 {
20952 break
20953 }
20954 if y != v_0_1_0_1.Args[0] {
20955 break
20956 }
20957 v_1 := v.Args[1]
20958 if v_1.Op != OpARM64SRL {
20959 break
20960 }
20961 if v_1.Type != typ.UInt64 {
20962 break
20963 }
20964 _ = v_1.Args[1]
20965 if x != v_1.Args[0] {
20966 break
20967 }
20968 v_1_1 := v_1.Args[1]
20969 if v_1_1.Op != OpARM64ANDconst {
20970 break
20971 }
20972 if v_1_1.Type != t {
20973 break
20974 }
20975 if v_1_1.AuxInt != 63 {
20976 break
20977 }
20978 if y != v_1_1.Args[0] {
20979 break
20980 }
20981 if !(cc.(Op) == OpARM64LessThanU) {
20982 break
20983 }
20984 v.reset(OpARM64ROR)
20985 v.AddArg(x)
20986 v.AddArg(y)
20987 return true
20988 }
20989
20990
20991
20992 for {
20993 _ = v.Args[1]
20994 v_0 := v.Args[0]
20995 if v_0.Op != OpARM64SLL {
20996 break
20997 }
20998 _ = v_0.Args[1]
20999 x := v_0.Args[0]
21000 v_0_1 := v_0.Args[1]
21001 if v_0_1.Op != OpARM64ANDconst {
21002 break
21003 }
21004 t := v_0_1.Type
21005 if v_0_1.AuxInt != 31 {
21006 break
21007 }
21008 y := v_0_1.Args[0]
21009 v_1 := v.Args[1]
21010 if v_1.Op != OpARM64CSEL0 {
21011 break
21012 }
21013 if v_1.Type != typ.UInt32 {
21014 break
21015 }
21016 cc := v_1.Aux
21017 _ = v_1.Args[1]
21018 v_1_0 := v_1.Args[0]
21019 if v_1_0.Op != OpARM64SRL {
21020 break
21021 }
21022 if v_1_0.Type != typ.UInt32 {
21023 break
21024 }
21025 _ = v_1_0.Args[1]
21026 v_1_0_0 := v_1_0.Args[0]
21027 if v_1_0_0.Op != OpARM64MOVWUreg {
21028 break
21029 }
21030 if x != v_1_0_0.Args[0] {
21031 break
21032 }
21033 v_1_0_1 := v_1_0.Args[1]
21034 if v_1_0_1.Op != OpARM64SUB {
21035 break
21036 }
21037 if v_1_0_1.Type != t {
21038 break
21039 }
21040 _ = v_1_0_1.Args[1]
21041 v_1_0_1_0 := v_1_0_1.Args[0]
21042 if v_1_0_1_0.Op != OpARM64MOVDconst {
21043 break
21044 }
21045 if v_1_0_1_0.AuxInt != 32 {
21046 break
21047 }
21048 v_1_0_1_1 := v_1_0_1.Args[1]
21049 if v_1_0_1_1.Op != OpARM64ANDconst {
21050 break
21051 }
21052 if v_1_0_1_1.Type != t {
21053 break
21054 }
21055 if v_1_0_1_1.AuxInt != 31 {
21056 break
21057 }
21058 if y != v_1_0_1_1.Args[0] {
21059 break
21060 }
21061 v_1_1 := v_1.Args[1]
21062 if v_1_1.Op != OpARM64CMPconst {
21063 break
21064 }
21065 if v_1_1.AuxInt != 64 {
21066 break
21067 }
21068 v_1_1_0 := v_1_1.Args[0]
21069 if v_1_1_0.Op != OpARM64SUB {
21070 break
21071 }
21072 if v_1_1_0.Type != t {
21073 break
21074 }
21075 _ = v_1_1_0.Args[1]
21076 v_1_1_0_0 := v_1_1_0.Args[0]
21077 if v_1_1_0_0.Op != OpARM64MOVDconst {
21078 break
21079 }
21080 if v_1_1_0_0.AuxInt != 32 {
21081 break
21082 }
21083 v_1_1_0_1 := v_1_1_0.Args[1]
21084 if v_1_1_0_1.Op != OpARM64ANDconst {
21085 break
21086 }
21087 if v_1_1_0_1.Type != t {
21088 break
21089 }
21090 if v_1_1_0_1.AuxInt != 31 {
21091 break
21092 }
21093 if y != v_1_1_0_1.Args[0] {
21094 break
21095 }
21096 if !(cc.(Op) == OpARM64LessThanU) {
21097 break
21098 }
21099 v.reset(OpARM64RORW)
21100 v.AddArg(x)
21101 v0 := b.NewValue0(v.Pos, OpARM64NEG, t)
21102 v0.AddArg(y)
21103 v.AddArg(v0)
21104 return true
21105 }
21106
21107
21108
21109 for {
21110 _ = v.Args[1]
21111 v_0 := v.Args[0]
21112 if v_0.Op != OpARM64CSEL0 {
21113 break
21114 }
21115 if v_0.Type != typ.UInt32 {
21116 break
21117 }
21118 cc := v_0.Aux
21119 _ = v_0.Args[1]
21120 v_0_0 := v_0.Args[0]
21121 if v_0_0.Op != OpARM64SRL {
21122 break
21123 }
21124 if v_0_0.Type != typ.UInt32 {
21125 break
21126 }
21127 _ = v_0_0.Args[1]
21128 v_0_0_0 := v_0_0.Args[0]
21129 if v_0_0_0.Op != OpARM64MOVWUreg {
21130 break
21131 }
21132 x := v_0_0_0.Args[0]
21133 v_0_0_1 := v_0_0.Args[1]
21134 if v_0_0_1.Op != OpARM64SUB {
21135 break
21136 }
21137 t := v_0_0_1.Type
21138 _ = v_0_0_1.Args[1]
21139 v_0_0_1_0 := v_0_0_1.Args[0]
21140 if v_0_0_1_0.Op != OpARM64MOVDconst {
21141 break
21142 }
21143 if v_0_0_1_0.AuxInt != 32 {
21144 break
21145 }
21146 v_0_0_1_1 := v_0_0_1.Args[1]
21147 if v_0_0_1_1.Op != OpARM64ANDconst {
21148 break
21149 }
21150 if v_0_0_1_1.Type != t {
21151 break
21152 }
21153 if v_0_0_1_1.AuxInt != 31 {
21154 break
21155 }
21156 y := v_0_0_1_1.Args[0]
21157 v_0_1 := v_0.Args[1]
21158 if v_0_1.Op != OpARM64CMPconst {
21159 break
21160 }
21161 if v_0_1.AuxInt != 64 {
21162 break
21163 }
21164 v_0_1_0 := v_0_1.Args[0]
21165 if v_0_1_0.Op != OpARM64SUB {
21166 break
21167 }
21168 if v_0_1_0.Type != t {
21169 break
21170 }
21171 _ = v_0_1_0.Args[1]
21172 v_0_1_0_0 := v_0_1_0.Args[0]
21173 if v_0_1_0_0.Op != OpARM64MOVDconst {
21174 break
21175 }
21176 if v_0_1_0_0.AuxInt != 32 {
21177 break
21178 }
21179 v_0_1_0_1 := v_0_1_0.Args[1]
21180 if v_0_1_0_1.Op != OpARM64ANDconst {
21181 break
21182 }
21183 if v_0_1_0_1.Type != t {
21184 break
21185 }
21186 if v_0_1_0_1.AuxInt != 31 {
21187 break
21188 }
21189 if y != v_0_1_0_1.Args[0] {
21190 break
21191 }
21192 v_1 := v.Args[1]
21193 if v_1.Op != OpARM64SLL {
21194 break
21195 }
21196 _ = v_1.Args[1]
21197 if x != v_1.Args[0] {
21198 break
21199 }
21200 v_1_1 := v_1.Args[1]
21201 if v_1_1.Op != OpARM64ANDconst {
21202 break
21203 }
21204 if v_1_1.Type != t {
21205 break
21206 }
21207 if v_1_1.AuxInt != 31 {
21208 break
21209 }
21210 if y != v_1_1.Args[0] {
21211 break
21212 }
21213 if !(cc.(Op) == OpARM64LessThanU) {
21214 break
21215 }
21216 v.reset(OpARM64RORW)
21217 v.AddArg(x)
21218 v0 := b.NewValue0(v.Pos, OpARM64NEG, t)
21219 v0.AddArg(y)
21220 v.AddArg(v0)
21221 return true
21222 }
21223
21224
21225
21226 for {
21227 _ = v.Args[1]
21228 v_0 := v.Args[0]
21229 if v_0.Op != OpARM64SRL {
21230 break
21231 }
21232 if v_0.Type != typ.UInt32 {
21233 break
21234 }
21235 _ = v_0.Args[1]
21236 v_0_0 := v_0.Args[0]
21237 if v_0_0.Op != OpARM64MOVWUreg {
21238 break
21239 }
21240 x := v_0_0.Args[0]
21241 v_0_1 := v_0.Args[1]
21242 if v_0_1.Op != OpARM64ANDconst {
21243 break
21244 }
21245 t := v_0_1.Type
21246 if v_0_1.AuxInt != 31 {
21247 break
21248 }
21249 y := v_0_1.Args[0]
21250 v_1 := v.Args[1]
21251 if v_1.Op != OpARM64CSEL0 {
21252 break
21253 }
21254 if v_1.Type != typ.UInt32 {
21255 break
21256 }
21257 cc := v_1.Aux
21258 _ = v_1.Args[1]
21259 v_1_0 := v_1.Args[0]
21260 if v_1_0.Op != OpARM64SLL {
21261 break
21262 }
21263 _ = v_1_0.Args[1]
21264 if x != v_1_0.Args[0] {
21265 break
21266 }
21267 v_1_0_1 := v_1_0.Args[1]
21268 if v_1_0_1.Op != OpARM64SUB {
21269 break
21270 }
21271 if v_1_0_1.Type != t {
21272 break
21273 }
21274 _ = v_1_0_1.Args[1]
21275 v_1_0_1_0 := v_1_0_1.Args[0]
21276 if v_1_0_1_0.Op != OpARM64MOVDconst {
21277 break
21278 }
21279 if v_1_0_1_0.AuxInt != 32 {
21280 break
21281 }
21282 v_1_0_1_1 := v_1_0_1.Args[1]
21283 if v_1_0_1_1.Op != OpARM64ANDconst {
21284 break
21285 }
21286 if v_1_0_1_1.Type != t {
21287 break
21288 }
21289 if v_1_0_1_1.AuxInt != 31 {
21290 break
21291 }
21292 if y != v_1_0_1_1.Args[0] {
21293 break
21294 }
21295 v_1_1 := v_1.Args[1]
21296 if v_1_1.Op != OpARM64CMPconst {
21297 break
21298 }
21299 if v_1_1.AuxInt != 64 {
21300 break
21301 }
21302 v_1_1_0 := v_1_1.Args[0]
21303 if v_1_1_0.Op != OpARM64SUB {
21304 break
21305 }
21306 if v_1_1_0.Type != t {
21307 break
21308 }
21309 _ = v_1_1_0.Args[1]
21310 v_1_1_0_0 := v_1_1_0.Args[0]
21311 if v_1_1_0_0.Op != OpARM64MOVDconst {
21312 break
21313 }
21314 if v_1_1_0_0.AuxInt != 32 {
21315 break
21316 }
21317 v_1_1_0_1 := v_1_1_0.Args[1]
21318 if v_1_1_0_1.Op != OpARM64ANDconst {
21319 break
21320 }
21321 if v_1_1_0_1.Type != t {
21322 break
21323 }
21324 if v_1_1_0_1.AuxInt != 31 {
21325 break
21326 }
21327 if y != v_1_1_0_1.Args[0] {
21328 break
21329 }
21330 if !(cc.(Op) == OpARM64LessThanU) {
21331 break
21332 }
21333 v.reset(OpARM64RORW)
21334 v.AddArg(x)
21335 v.AddArg(y)
21336 return true
21337 }
21338
21339
21340
21341 for {
21342 _ = v.Args[1]
21343 v_0 := v.Args[0]
21344 if v_0.Op != OpARM64CSEL0 {
21345 break
21346 }
21347 if v_0.Type != typ.UInt32 {
21348 break
21349 }
21350 cc := v_0.Aux
21351 _ = v_0.Args[1]
21352 v_0_0 := v_0.Args[0]
21353 if v_0_0.Op != OpARM64SLL {
21354 break
21355 }
21356 _ = v_0_0.Args[1]
21357 x := v_0_0.Args[0]
21358 v_0_0_1 := v_0_0.Args[1]
21359 if v_0_0_1.Op != OpARM64SUB {
21360 break
21361 }
21362 t := v_0_0_1.Type
21363 _ = v_0_0_1.Args[1]
21364 v_0_0_1_0 := v_0_0_1.Args[0]
21365 if v_0_0_1_0.Op != OpARM64MOVDconst {
21366 break
21367 }
21368 if v_0_0_1_0.AuxInt != 32 {
21369 break
21370 }
21371 v_0_0_1_1 := v_0_0_1.Args[1]
21372 if v_0_0_1_1.Op != OpARM64ANDconst {
21373 break
21374 }
21375 if v_0_0_1_1.Type != t {
21376 break
21377 }
21378 if v_0_0_1_1.AuxInt != 31 {
21379 break
21380 }
21381 y := v_0_0_1_1.Args[0]
21382 v_0_1 := v_0.Args[1]
21383 if v_0_1.Op != OpARM64CMPconst {
21384 break
21385 }
21386 if v_0_1.AuxInt != 64 {
21387 break
21388 }
21389 v_0_1_0 := v_0_1.Args[0]
21390 if v_0_1_0.Op != OpARM64SUB {
21391 break
21392 }
21393 if v_0_1_0.Type != t {
21394 break
21395 }
21396 _ = v_0_1_0.Args[1]
21397 v_0_1_0_0 := v_0_1_0.Args[0]
21398 if v_0_1_0_0.Op != OpARM64MOVDconst {
21399 break
21400 }
21401 if v_0_1_0_0.AuxInt != 32 {
21402 break
21403 }
21404 v_0_1_0_1 := v_0_1_0.Args[1]
21405 if v_0_1_0_1.Op != OpARM64ANDconst {
21406 break
21407 }
21408 if v_0_1_0_1.Type != t {
21409 break
21410 }
21411 if v_0_1_0_1.AuxInt != 31 {
21412 break
21413 }
21414 if y != v_0_1_0_1.Args[0] {
21415 break
21416 }
21417 v_1 := v.Args[1]
21418 if v_1.Op != OpARM64SRL {
21419 break
21420 }
21421 if v_1.Type != typ.UInt32 {
21422 break
21423 }
21424 _ = v_1.Args[1]
21425 v_1_0 := v_1.Args[0]
21426 if v_1_0.Op != OpARM64MOVWUreg {
21427 break
21428 }
21429 if x != v_1_0.Args[0] {
21430 break
21431 }
21432 v_1_1 := v_1.Args[1]
21433 if v_1_1.Op != OpARM64ANDconst {
21434 break
21435 }
21436 if v_1_1.Type != t {
21437 break
21438 }
21439 if v_1_1.AuxInt != 31 {
21440 break
21441 }
21442 if y != v_1_1.Args[0] {
21443 break
21444 }
21445 if !(cc.(Op) == OpARM64LessThanU) {
21446 break
21447 }
21448 v.reset(OpARM64RORW)
21449 v.AddArg(x)
21450 v.AddArg(y)
21451 return true
21452 }
21453
21454
21455
21456 for {
21457 _ = v.Args[1]
21458 v_0 := v.Args[0]
21459 if v_0.Op != OpARM64UBFIZ {
21460 break
21461 }
21462 bfc := v_0.AuxInt
21463 x := v_0.Args[0]
21464 v_1 := v.Args[1]
21465 if v_1.Op != OpARM64ANDconst {
21466 break
21467 }
21468 ac := v_1.AuxInt
21469 y := v_1.Args[0]
21470 if !(ac == ^((1<<uint(getARM64BFwidth(bfc)) - 1) << uint(getARM64BFlsb(bfc)))) {
21471 break
21472 }
21473 v.reset(OpARM64BFI)
21474 v.AuxInt = bfc
21475 v.AddArg(y)
21476 v.AddArg(x)
21477 return true
21478 }
21479 return false
21480 }
21481 func rewriteValueARM64_OpARM64OR_20(v *Value) bool {
21482 b := v.Block
21483
21484
21485
21486 for {
21487 _ = v.Args[1]
21488 v_0 := v.Args[0]
21489 if v_0.Op != OpARM64ANDconst {
21490 break
21491 }
21492 ac := v_0.AuxInt
21493 y := v_0.Args[0]
21494 v_1 := v.Args[1]
21495 if v_1.Op != OpARM64UBFIZ {
21496 break
21497 }
21498 bfc := v_1.AuxInt
21499 x := v_1.Args[0]
21500 if !(ac == ^((1<<uint(getARM64BFwidth(bfc)) - 1) << uint(getARM64BFlsb(bfc)))) {
21501 break
21502 }
21503 v.reset(OpARM64BFI)
21504 v.AuxInt = bfc
21505 v.AddArg(y)
21506 v.AddArg(x)
21507 return true
21508 }
21509
21510
21511
21512 for {
21513 _ = v.Args[1]
21514 v_0 := v.Args[0]
21515 if v_0.Op != OpARM64UBFX {
21516 break
21517 }
21518 bfc := v_0.AuxInt
21519 x := v_0.Args[0]
21520 v_1 := v.Args[1]
21521 if v_1.Op != OpARM64ANDconst {
21522 break
21523 }
21524 ac := v_1.AuxInt
21525 y := v_1.Args[0]
21526 if !(ac == ^(1<<uint(getARM64BFwidth(bfc)) - 1)) {
21527 break
21528 }
21529 v.reset(OpARM64BFXIL)
21530 v.AuxInt = bfc
21531 v.AddArg(y)
21532 v.AddArg(x)
21533 return true
21534 }
21535
21536
21537
21538 for {
21539 _ = v.Args[1]
21540 v_0 := v.Args[0]
21541 if v_0.Op != OpARM64ANDconst {
21542 break
21543 }
21544 ac := v_0.AuxInt
21545 y := v_0.Args[0]
21546 v_1 := v.Args[1]
21547 if v_1.Op != OpARM64UBFX {
21548 break
21549 }
21550 bfc := v_1.AuxInt
21551 x := v_1.Args[0]
21552 if !(ac == ^(1<<uint(getARM64BFwidth(bfc)) - 1)) {
21553 break
21554 }
21555 v.reset(OpARM64BFXIL)
21556 v.AuxInt = bfc
21557 v.AddArg(y)
21558 v.AddArg(x)
21559 return true
21560 }
21561
21562
21563
21564 for {
21565 t := v.Type
21566 _ = v.Args[1]
21567 o0 := v.Args[0]
21568 if o0.Op != OpARM64ORshiftLL {
21569 break
21570 }
21571 if o0.AuxInt != 8 {
21572 break
21573 }
21574 _ = o0.Args[1]
21575 o1 := o0.Args[0]
21576 if o1.Op != OpARM64ORshiftLL {
21577 break
21578 }
21579 if o1.AuxInt != 16 {
21580 break
21581 }
21582 _ = o1.Args[1]
21583 s0 := o1.Args[0]
21584 if s0.Op != OpARM64SLLconst {
21585 break
21586 }
21587 if s0.AuxInt != 24 {
21588 break
21589 }
21590 y0 := s0.Args[0]
21591 if y0.Op != OpARM64MOVDnop {
21592 break
21593 }
21594 x0 := y0.Args[0]
21595 if x0.Op != OpARM64MOVBUload {
21596 break
21597 }
21598 i3 := x0.AuxInt
21599 s := x0.Aux
21600 mem := x0.Args[1]
21601 p := x0.Args[0]
21602 y1 := o1.Args[1]
21603 if y1.Op != OpARM64MOVDnop {
21604 break
21605 }
21606 x1 := y1.Args[0]
21607 if x1.Op != OpARM64MOVBUload {
21608 break
21609 }
21610 i2 := x1.AuxInt
21611 if x1.Aux != s {
21612 break
21613 }
21614 _ = x1.Args[1]
21615 if p != x1.Args[0] {
21616 break
21617 }
21618 if mem != x1.Args[1] {
21619 break
21620 }
21621 y2 := o0.Args[1]
21622 if y2.Op != OpARM64MOVDnop {
21623 break
21624 }
21625 x2 := y2.Args[0]
21626 if x2.Op != OpARM64MOVBUload {
21627 break
21628 }
21629 i1 := x2.AuxInt
21630 if x2.Aux != s {
21631 break
21632 }
21633 _ = x2.Args[1]
21634 if p != x2.Args[0] {
21635 break
21636 }
21637 if mem != x2.Args[1] {
21638 break
21639 }
21640 y3 := v.Args[1]
21641 if y3.Op != OpARM64MOVDnop {
21642 break
21643 }
21644 x3 := y3.Args[0]
21645 if x3.Op != OpARM64MOVBUload {
21646 break
21647 }
21648 i0 := x3.AuxInt
21649 if x3.Aux != s {
21650 break
21651 }
21652 _ = x3.Args[1]
21653 if p != x3.Args[0] {
21654 break
21655 }
21656 if mem != x3.Args[1] {
21657 break
21658 }
21659 if !(i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0)) {
21660 break
21661 }
21662 b = mergePoint(b, x0, x1, x2, x3)
21663 v0 := b.NewValue0(x3.Pos, OpARM64MOVWUload, t)
21664 v.reset(OpCopy)
21665 v.AddArg(v0)
21666 v0.Aux = s
21667 v1 := b.NewValue0(x3.Pos, OpOffPtr, p.Type)
21668 v1.AuxInt = i0
21669 v1.AddArg(p)
21670 v0.AddArg(v1)
21671 v0.AddArg(mem)
21672 return true
21673 }
21674
21675
21676
21677 for {
21678 t := v.Type
21679 _ = v.Args[1]
21680 y3 := v.Args[0]
21681 if y3.Op != OpARM64MOVDnop {
21682 break
21683 }
21684 x3 := y3.Args[0]
21685 if x3.Op != OpARM64MOVBUload {
21686 break
21687 }
21688 i0 := x3.AuxInt
21689 s := x3.Aux
21690 mem := x3.Args[1]
21691 p := x3.Args[0]
21692 o0 := v.Args[1]
21693 if o0.Op != OpARM64ORshiftLL {
21694 break
21695 }
21696 if o0.AuxInt != 8 {
21697 break
21698 }
21699 _ = o0.Args[1]
21700 o1 := o0.Args[0]
21701 if o1.Op != OpARM64ORshiftLL {
21702 break
21703 }
21704 if o1.AuxInt != 16 {
21705 break
21706 }
21707 _ = o1.Args[1]
21708 s0 := o1.Args[0]
21709 if s0.Op != OpARM64SLLconst {
21710 break
21711 }
21712 if s0.AuxInt != 24 {
21713 break
21714 }
21715 y0 := s0.Args[0]
21716 if y0.Op != OpARM64MOVDnop {
21717 break
21718 }
21719 x0 := y0.Args[0]
21720 if x0.Op != OpARM64MOVBUload {
21721 break
21722 }
21723 i3 := x0.AuxInt
21724 if x0.Aux != s {
21725 break
21726 }
21727 _ = x0.Args[1]
21728 if p != x0.Args[0] {
21729 break
21730 }
21731 if mem != x0.Args[1] {
21732 break
21733 }
21734 y1 := o1.Args[1]
21735 if y1.Op != OpARM64MOVDnop {
21736 break
21737 }
21738 x1 := y1.Args[0]
21739 if x1.Op != OpARM64MOVBUload {
21740 break
21741 }
21742 i2 := x1.AuxInt
21743 if x1.Aux != s {
21744 break
21745 }
21746 _ = x1.Args[1]
21747 if p != x1.Args[0] {
21748 break
21749 }
21750 if mem != x1.Args[1] {
21751 break
21752 }
21753 y2 := o0.Args[1]
21754 if y2.Op != OpARM64MOVDnop {
21755 break
21756 }
21757 x2 := y2.Args[0]
21758 if x2.Op != OpARM64MOVBUload {
21759 break
21760 }
21761 i1 := x2.AuxInt
21762 if x2.Aux != s {
21763 break
21764 }
21765 _ = x2.Args[1]
21766 if p != x2.Args[0] {
21767 break
21768 }
21769 if mem != x2.Args[1] {
21770 break
21771 }
21772 if !(i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0)) {
21773 break
21774 }
21775 b = mergePoint(b, x0, x1, x2, x3)
21776 v0 := b.NewValue0(x2.Pos, OpARM64MOVWUload, t)
21777 v.reset(OpCopy)
21778 v.AddArg(v0)
21779 v0.Aux = s
21780 v1 := b.NewValue0(x2.Pos, OpOffPtr, p.Type)
21781 v1.AuxInt = i0
21782 v1.AddArg(p)
21783 v0.AddArg(v1)
21784 v0.AddArg(mem)
21785 return true
21786 }
21787
21788
21789
21790 for {
21791 t := v.Type
21792 _ = v.Args[1]
21793 o0 := v.Args[0]
21794 if o0.Op != OpARM64ORshiftLL {
21795 break
21796 }
21797 if o0.AuxInt != 8 {
21798 break
21799 }
21800 _ = o0.Args[1]
21801 o1 := o0.Args[0]
21802 if o1.Op != OpARM64ORshiftLL {
21803 break
21804 }
21805 if o1.AuxInt != 16 {
21806 break
21807 }
21808 _ = o1.Args[1]
21809 s0 := o1.Args[0]
21810 if s0.Op != OpARM64SLLconst {
21811 break
21812 }
21813 if s0.AuxInt != 24 {
21814 break
21815 }
21816 y0 := s0.Args[0]
21817 if y0.Op != OpARM64MOVDnop {
21818 break
21819 }
21820 x0 := y0.Args[0]
21821 if x0.Op != OpARM64MOVBUload {
21822 break
21823 }
21824 if x0.AuxInt != 3 {
21825 break
21826 }
21827 s := x0.Aux
21828 mem := x0.Args[1]
21829 p := x0.Args[0]
21830 y1 := o1.Args[1]
21831 if y1.Op != OpARM64MOVDnop {
21832 break
21833 }
21834 x1 := y1.Args[0]
21835 if x1.Op != OpARM64MOVBUload {
21836 break
21837 }
21838 if x1.AuxInt != 2 {
21839 break
21840 }
21841 if x1.Aux != s {
21842 break
21843 }
21844 _ = x1.Args[1]
21845 if p != x1.Args[0] {
21846 break
21847 }
21848 if mem != x1.Args[1] {
21849 break
21850 }
21851 y2 := o0.Args[1]
21852 if y2.Op != OpARM64MOVDnop {
21853 break
21854 }
21855 x2 := y2.Args[0]
21856 if x2.Op != OpARM64MOVBUload {
21857 break
21858 }
21859 if x2.AuxInt != 1 {
21860 break
21861 }
21862 if x2.Aux != s {
21863 break
21864 }
21865 _ = x2.Args[1]
21866 p1 := x2.Args[0]
21867 if p1.Op != OpARM64ADD {
21868 break
21869 }
21870 idx1 := p1.Args[1]
21871 ptr1 := p1.Args[0]
21872 if mem != x2.Args[1] {
21873 break
21874 }
21875 y3 := v.Args[1]
21876 if y3.Op != OpARM64MOVDnop {
21877 break
21878 }
21879 x3 := y3.Args[0]
21880 if x3.Op != OpARM64MOVBUloadidx {
21881 break
21882 }
21883 _ = x3.Args[2]
21884 ptr0 := x3.Args[0]
21885 idx0 := x3.Args[1]
21886 if mem != x3.Args[2] {
21887 break
21888 }
21889 if !(s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0)) {
21890 break
21891 }
21892 b = mergePoint(b, x0, x1, x2, x3)
21893 v0 := b.NewValue0(x2.Pos, OpARM64MOVWUloadidx, t)
21894 v.reset(OpCopy)
21895 v.AddArg(v0)
21896 v0.AddArg(ptr0)
21897 v0.AddArg(idx0)
21898 v0.AddArg(mem)
21899 return true
21900 }
21901
21902
21903
21904 for {
21905 t := v.Type
21906 _ = v.Args[1]
21907 y3 := v.Args[0]
21908 if y3.Op != OpARM64MOVDnop {
21909 break
21910 }
21911 x3 := y3.Args[0]
21912 if x3.Op != OpARM64MOVBUloadidx {
21913 break
21914 }
21915 mem := x3.Args[2]
21916 ptr0 := x3.Args[0]
21917 idx0 := x3.Args[1]
21918 o0 := v.Args[1]
21919 if o0.Op != OpARM64ORshiftLL {
21920 break
21921 }
21922 if o0.AuxInt != 8 {
21923 break
21924 }
21925 _ = o0.Args[1]
21926 o1 := o0.Args[0]
21927 if o1.Op != OpARM64ORshiftLL {
21928 break
21929 }
21930 if o1.AuxInt != 16 {
21931 break
21932 }
21933 _ = o1.Args[1]
21934 s0 := o1.Args[0]
21935 if s0.Op != OpARM64SLLconst {
21936 break
21937 }
21938 if s0.AuxInt != 24 {
21939 break
21940 }
21941 y0 := s0.Args[0]
21942 if y0.Op != OpARM64MOVDnop {
21943 break
21944 }
21945 x0 := y0.Args[0]
21946 if x0.Op != OpARM64MOVBUload {
21947 break
21948 }
21949 if x0.AuxInt != 3 {
21950 break
21951 }
21952 s := x0.Aux
21953 _ = x0.Args[1]
21954 p := x0.Args[0]
21955 if mem != x0.Args[1] {
21956 break
21957 }
21958 y1 := o1.Args[1]
21959 if y1.Op != OpARM64MOVDnop {
21960 break
21961 }
21962 x1 := y1.Args[0]
21963 if x1.Op != OpARM64MOVBUload {
21964 break
21965 }
21966 if x1.AuxInt != 2 {
21967 break
21968 }
21969 if x1.Aux != s {
21970 break
21971 }
21972 _ = x1.Args[1]
21973 if p != x1.Args[0] {
21974 break
21975 }
21976 if mem != x1.Args[1] {
21977 break
21978 }
21979 y2 := o0.Args[1]
21980 if y2.Op != OpARM64MOVDnop {
21981 break
21982 }
21983 x2 := y2.Args[0]
21984 if x2.Op != OpARM64MOVBUload {
21985 break
21986 }
21987 if x2.AuxInt != 1 {
21988 break
21989 }
21990 if x2.Aux != s {
21991 break
21992 }
21993 _ = x2.Args[1]
21994 p1 := x2.Args[0]
21995 if p1.Op != OpARM64ADD {
21996 break
21997 }
21998 idx1 := p1.Args[1]
21999 ptr1 := p1.Args[0]
22000 if mem != x2.Args[1] {
22001 break
22002 }
22003 if !(s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0)) {
22004 break
22005 }
22006 b = mergePoint(b, x0, x1, x2, x3)
22007 v0 := b.NewValue0(x2.Pos, OpARM64MOVWUloadidx, t)
22008 v.reset(OpCopy)
22009 v.AddArg(v0)
22010 v0.AddArg(ptr0)
22011 v0.AddArg(idx0)
22012 v0.AddArg(mem)
22013 return true
22014 }
22015
22016
22017
22018 for {
22019 t := v.Type
22020 _ = v.Args[1]
22021 o0 := v.Args[0]
22022 if o0.Op != OpARM64ORshiftLL {
22023 break
22024 }
22025 if o0.AuxInt != 8 {
22026 break
22027 }
22028 _ = o0.Args[1]
22029 o1 := o0.Args[0]
22030 if o1.Op != OpARM64ORshiftLL {
22031 break
22032 }
22033 if o1.AuxInt != 16 {
22034 break
22035 }
22036 _ = o1.Args[1]
22037 s0 := o1.Args[0]
22038 if s0.Op != OpARM64SLLconst {
22039 break
22040 }
22041 if s0.AuxInt != 24 {
22042 break
22043 }
22044 y0 := s0.Args[0]
22045 if y0.Op != OpARM64MOVDnop {
22046 break
22047 }
22048 x0 := y0.Args[0]
22049 if x0.Op != OpARM64MOVBUloadidx {
22050 break
22051 }
22052 mem := x0.Args[2]
22053 ptr := x0.Args[0]
22054 x0_1 := x0.Args[1]
22055 if x0_1.Op != OpARM64ADDconst {
22056 break
22057 }
22058 if x0_1.AuxInt != 3 {
22059 break
22060 }
22061 idx := x0_1.Args[0]
22062 y1 := o1.Args[1]
22063 if y1.Op != OpARM64MOVDnop {
22064 break
22065 }
22066 x1 := y1.Args[0]
22067 if x1.Op != OpARM64MOVBUloadidx {
22068 break
22069 }
22070 _ = x1.Args[2]
22071 if ptr != x1.Args[0] {
22072 break
22073 }
22074 x1_1 := x1.Args[1]
22075 if x1_1.Op != OpARM64ADDconst {
22076 break
22077 }
22078 if x1_1.AuxInt != 2 {
22079 break
22080 }
22081 if idx != x1_1.Args[0] {
22082 break
22083 }
22084 if mem != x1.Args[2] {
22085 break
22086 }
22087 y2 := o0.Args[1]
22088 if y2.Op != OpARM64MOVDnop {
22089 break
22090 }
22091 x2 := y2.Args[0]
22092 if x2.Op != OpARM64MOVBUloadidx {
22093 break
22094 }
22095 _ = x2.Args[2]
22096 if ptr != x2.Args[0] {
22097 break
22098 }
22099 x2_1 := x2.Args[1]
22100 if x2_1.Op != OpARM64ADDconst {
22101 break
22102 }
22103 if x2_1.AuxInt != 1 {
22104 break
22105 }
22106 if idx != x2_1.Args[0] {
22107 break
22108 }
22109 if mem != x2.Args[2] {
22110 break
22111 }
22112 y3 := v.Args[1]
22113 if y3.Op != OpARM64MOVDnop {
22114 break
22115 }
22116 x3 := y3.Args[0]
22117 if x3.Op != OpARM64MOVBUloadidx {
22118 break
22119 }
22120 _ = x3.Args[2]
22121 if ptr != x3.Args[0] {
22122 break
22123 }
22124 if idx != x3.Args[1] {
22125 break
22126 }
22127 if mem != x3.Args[2] {
22128 break
22129 }
22130 if !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0)) {
22131 break
22132 }
22133 b = mergePoint(b, x0, x1, x2, x3)
22134 v0 := b.NewValue0(v.Pos, OpARM64MOVWUloadidx, t)
22135 v.reset(OpCopy)
22136 v.AddArg(v0)
22137 v0.AddArg(ptr)
22138 v0.AddArg(idx)
22139 v0.AddArg(mem)
22140 return true
22141 }
22142
22143
22144
22145 for {
22146 t := v.Type
22147 _ = v.Args[1]
22148 y3 := v.Args[0]
22149 if y3.Op != OpARM64MOVDnop {
22150 break
22151 }
22152 x3 := y3.Args[0]
22153 if x3.Op != OpARM64MOVBUloadidx {
22154 break
22155 }
22156 mem := x3.Args[2]
22157 ptr := x3.Args[0]
22158 idx := x3.Args[1]
22159 o0 := v.Args[1]
22160 if o0.Op != OpARM64ORshiftLL {
22161 break
22162 }
22163 if o0.AuxInt != 8 {
22164 break
22165 }
22166 _ = o0.Args[1]
22167 o1 := o0.Args[0]
22168 if o1.Op != OpARM64ORshiftLL {
22169 break
22170 }
22171 if o1.AuxInt != 16 {
22172 break
22173 }
22174 _ = o1.Args[1]
22175 s0 := o1.Args[0]
22176 if s0.Op != OpARM64SLLconst {
22177 break
22178 }
22179 if s0.AuxInt != 24 {
22180 break
22181 }
22182 y0 := s0.Args[0]
22183 if y0.Op != OpARM64MOVDnop {
22184 break
22185 }
22186 x0 := y0.Args[0]
22187 if x0.Op != OpARM64MOVBUloadidx {
22188 break
22189 }
22190 _ = x0.Args[2]
22191 if ptr != x0.Args[0] {
22192 break
22193 }
22194 x0_1 := x0.Args[1]
22195 if x0_1.Op != OpARM64ADDconst {
22196 break
22197 }
22198 if x0_1.AuxInt != 3 {
22199 break
22200 }
22201 if idx != x0_1.Args[0] {
22202 break
22203 }
22204 if mem != x0.Args[2] {
22205 break
22206 }
22207 y1 := o1.Args[1]
22208 if y1.Op != OpARM64MOVDnop {
22209 break
22210 }
22211 x1 := y1.Args[0]
22212 if x1.Op != OpARM64MOVBUloadidx {
22213 break
22214 }
22215 _ = x1.Args[2]
22216 if ptr != x1.Args[0] {
22217 break
22218 }
22219 x1_1 := x1.Args[1]
22220 if x1_1.Op != OpARM64ADDconst {
22221 break
22222 }
22223 if x1_1.AuxInt != 2 {
22224 break
22225 }
22226 if idx != x1_1.Args[0] {
22227 break
22228 }
22229 if mem != x1.Args[2] {
22230 break
22231 }
22232 y2 := o0.Args[1]
22233 if y2.Op != OpARM64MOVDnop {
22234 break
22235 }
22236 x2 := y2.Args[0]
22237 if x2.Op != OpARM64MOVBUloadidx {
22238 break
22239 }
22240 _ = x2.Args[2]
22241 if ptr != x2.Args[0] {
22242 break
22243 }
22244 x2_1 := x2.Args[1]
22245 if x2_1.Op != OpARM64ADDconst {
22246 break
22247 }
22248 if x2_1.AuxInt != 1 {
22249 break
22250 }
22251 if idx != x2_1.Args[0] {
22252 break
22253 }
22254 if mem != x2.Args[2] {
22255 break
22256 }
22257 if !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0)) {
22258 break
22259 }
22260 b = mergePoint(b, x0, x1, x2, x3)
22261 v0 := b.NewValue0(v.Pos, OpARM64MOVWUloadidx, t)
22262 v.reset(OpCopy)
22263 v.AddArg(v0)
22264 v0.AddArg(ptr)
22265 v0.AddArg(idx)
22266 v0.AddArg(mem)
22267 return true
22268 }
22269
22270
22271
22272 for {
22273 t := v.Type
22274 _ = v.Args[1]
22275 o0 := v.Args[0]
22276 if o0.Op != OpARM64ORshiftLL {
22277 break
22278 }
22279 if o0.AuxInt != 8 {
22280 break
22281 }
22282 _ = o0.Args[1]
22283 o1 := o0.Args[0]
22284 if o1.Op != OpARM64ORshiftLL {
22285 break
22286 }
22287 if o1.AuxInt != 16 {
22288 break
22289 }
22290 _ = o1.Args[1]
22291 o2 := o1.Args[0]
22292 if o2.Op != OpARM64ORshiftLL {
22293 break
22294 }
22295 if o2.AuxInt != 24 {
22296 break
22297 }
22298 _ = o2.Args[1]
22299 o3 := o2.Args[0]
22300 if o3.Op != OpARM64ORshiftLL {
22301 break
22302 }
22303 if o3.AuxInt != 32 {
22304 break
22305 }
22306 _ = o3.Args[1]
22307 o4 := o3.Args[0]
22308 if o4.Op != OpARM64ORshiftLL {
22309 break
22310 }
22311 if o4.AuxInt != 40 {
22312 break
22313 }
22314 _ = o4.Args[1]
22315 o5 := o4.Args[0]
22316 if o5.Op != OpARM64ORshiftLL {
22317 break
22318 }
22319 if o5.AuxInt != 48 {
22320 break
22321 }
22322 _ = o5.Args[1]
22323 s0 := o5.Args[0]
22324 if s0.Op != OpARM64SLLconst {
22325 break
22326 }
22327 if s0.AuxInt != 56 {
22328 break
22329 }
22330 y0 := s0.Args[0]
22331 if y0.Op != OpARM64MOVDnop {
22332 break
22333 }
22334 x0 := y0.Args[0]
22335 if x0.Op != OpARM64MOVBUload {
22336 break
22337 }
22338 i7 := x0.AuxInt
22339 s := x0.Aux
22340 mem := x0.Args[1]
22341 p := x0.Args[0]
22342 y1 := o5.Args[1]
22343 if y1.Op != OpARM64MOVDnop {
22344 break
22345 }
22346 x1 := y1.Args[0]
22347 if x1.Op != OpARM64MOVBUload {
22348 break
22349 }
22350 i6 := x1.AuxInt
22351 if x1.Aux != s {
22352 break
22353 }
22354 _ = x1.Args[1]
22355 if p != x1.Args[0] {
22356 break
22357 }
22358 if mem != x1.Args[1] {
22359 break
22360 }
22361 y2 := o4.Args[1]
22362 if y2.Op != OpARM64MOVDnop {
22363 break
22364 }
22365 x2 := y2.Args[0]
22366 if x2.Op != OpARM64MOVBUload {
22367 break
22368 }
22369 i5 := x2.AuxInt
22370 if x2.Aux != s {
22371 break
22372 }
22373 _ = x2.Args[1]
22374 if p != x2.Args[0] {
22375 break
22376 }
22377 if mem != x2.Args[1] {
22378 break
22379 }
22380 y3 := o3.Args[1]
22381 if y3.Op != OpARM64MOVDnop {
22382 break
22383 }
22384 x3 := y3.Args[0]
22385 if x3.Op != OpARM64MOVBUload {
22386 break
22387 }
22388 i4 := x3.AuxInt
22389 if x3.Aux != s {
22390 break
22391 }
22392 _ = x3.Args[1]
22393 if p != x3.Args[0] {
22394 break
22395 }
22396 if mem != x3.Args[1] {
22397 break
22398 }
22399 y4 := o2.Args[1]
22400 if y4.Op != OpARM64MOVDnop {
22401 break
22402 }
22403 x4 := y4.Args[0]
22404 if x4.Op != OpARM64MOVBUload {
22405 break
22406 }
22407 i3 := x4.AuxInt
22408 if x4.Aux != s {
22409 break
22410 }
22411 _ = x4.Args[1]
22412 if p != x4.Args[0] {
22413 break
22414 }
22415 if mem != x4.Args[1] {
22416 break
22417 }
22418 y5 := o1.Args[1]
22419 if y5.Op != OpARM64MOVDnop {
22420 break
22421 }
22422 x5 := y5.Args[0]
22423 if x5.Op != OpARM64MOVBUload {
22424 break
22425 }
22426 i2 := x5.AuxInt
22427 if x5.Aux != s {
22428 break
22429 }
22430 _ = x5.Args[1]
22431 if p != x5.Args[0] {
22432 break
22433 }
22434 if mem != x5.Args[1] {
22435 break
22436 }
22437 y6 := o0.Args[1]
22438 if y6.Op != OpARM64MOVDnop {
22439 break
22440 }
22441 x6 := y6.Args[0]
22442 if x6.Op != OpARM64MOVBUload {
22443 break
22444 }
22445 i1 := x6.AuxInt
22446 if x6.Aux != s {
22447 break
22448 }
22449 _ = x6.Args[1]
22450 if p != x6.Args[0] {
22451 break
22452 }
22453 if mem != x6.Args[1] {
22454 break
22455 }
22456 y7 := v.Args[1]
22457 if y7.Op != OpARM64MOVDnop {
22458 break
22459 }
22460 x7 := y7.Args[0]
22461 if x7.Op != OpARM64MOVBUload {
22462 break
22463 }
22464 i0 := x7.AuxInt
22465 if x7.Aux != s {
22466 break
22467 }
22468 _ = x7.Args[1]
22469 if p != x7.Args[0] {
22470 break
22471 }
22472 if mem != x7.Args[1] {
22473 break
22474 }
22475 if !(i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && i4 == i0+4 && i5 == i0+5 && i6 == i0+6 && i7 == i0+7 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0)) {
22476 break
22477 }
22478 b = mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7)
22479 v0 := b.NewValue0(x7.Pos, OpARM64MOVDload, t)
22480 v.reset(OpCopy)
22481 v.AddArg(v0)
22482 v0.Aux = s
22483 v1 := b.NewValue0(x7.Pos, OpOffPtr, p.Type)
22484 v1.AuxInt = i0
22485 v1.AddArg(p)
22486 v0.AddArg(v1)
22487 v0.AddArg(mem)
22488 return true
22489 }
22490 return false
22491 }
22492 func rewriteValueARM64_OpARM64OR_30(v *Value) bool {
22493 b := v.Block
22494
22495
22496
22497 for {
22498 t := v.Type
22499 _ = v.Args[1]
22500 y7 := v.Args[0]
22501 if y7.Op != OpARM64MOVDnop {
22502 break
22503 }
22504 x7 := y7.Args[0]
22505 if x7.Op != OpARM64MOVBUload {
22506 break
22507 }
22508 i0 := x7.AuxInt
22509 s := x7.Aux
22510 mem := x7.Args[1]
22511 p := x7.Args[0]
22512 o0 := v.Args[1]
22513 if o0.Op != OpARM64ORshiftLL {
22514 break
22515 }
22516 if o0.AuxInt != 8 {
22517 break
22518 }
22519 _ = o0.Args[1]
22520 o1 := o0.Args[0]
22521 if o1.Op != OpARM64ORshiftLL {
22522 break
22523 }
22524 if o1.AuxInt != 16 {
22525 break
22526 }
22527 _ = o1.Args[1]
22528 o2 := o1.Args[0]
22529 if o2.Op != OpARM64ORshiftLL {
22530 break
22531 }
22532 if o2.AuxInt != 24 {
22533 break
22534 }
22535 _ = o2.Args[1]
22536 o3 := o2.Args[0]
22537 if o3.Op != OpARM64ORshiftLL {
22538 break
22539 }
22540 if o3.AuxInt != 32 {
22541 break
22542 }
22543 _ = o3.Args[1]
22544 o4 := o3.Args[0]
22545 if o4.Op != OpARM64ORshiftLL {
22546 break
22547 }
22548 if o4.AuxInt != 40 {
22549 break
22550 }
22551 _ = o4.Args[1]
22552 o5 := o4.Args[0]
22553 if o5.Op != OpARM64ORshiftLL {
22554 break
22555 }
22556 if o5.AuxInt != 48 {
22557 break
22558 }
22559 _ = o5.Args[1]
22560 s0 := o5.Args[0]
22561 if s0.Op != OpARM64SLLconst {
22562 break
22563 }
22564 if s0.AuxInt != 56 {
22565 break
22566 }
22567 y0 := s0.Args[0]
22568 if y0.Op != OpARM64MOVDnop {
22569 break
22570 }
22571 x0 := y0.Args[0]
22572 if x0.Op != OpARM64MOVBUload {
22573 break
22574 }
22575 i7 := x0.AuxInt
22576 if x0.Aux != s {
22577 break
22578 }
22579 _ = x0.Args[1]
22580 if p != x0.Args[0] {
22581 break
22582 }
22583 if mem != x0.Args[1] {
22584 break
22585 }
22586 y1 := o5.Args[1]
22587 if y1.Op != OpARM64MOVDnop {
22588 break
22589 }
22590 x1 := y1.Args[0]
22591 if x1.Op != OpARM64MOVBUload {
22592 break
22593 }
22594 i6 := x1.AuxInt
22595 if x1.Aux != s {
22596 break
22597 }
22598 _ = x1.Args[1]
22599 if p != x1.Args[0] {
22600 break
22601 }
22602 if mem != x1.Args[1] {
22603 break
22604 }
22605 y2 := o4.Args[1]
22606 if y2.Op != OpARM64MOVDnop {
22607 break
22608 }
22609 x2 := y2.Args[0]
22610 if x2.Op != OpARM64MOVBUload {
22611 break
22612 }
22613 i5 := x2.AuxInt
22614 if x2.Aux != s {
22615 break
22616 }
22617 _ = x2.Args[1]
22618 if p != x2.Args[0] {
22619 break
22620 }
22621 if mem != x2.Args[1] {
22622 break
22623 }
22624 y3 := o3.Args[1]
22625 if y3.Op != OpARM64MOVDnop {
22626 break
22627 }
22628 x3 := y3.Args[0]
22629 if x3.Op != OpARM64MOVBUload {
22630 break
22631 }
22632 i4 := x3.AuxInt
22633 if x3.Aux != s {
22634 break
22635 }
22636 _ = x3.Args[1]
22637 if p != x3.Args[0] {
22638 break
22639 }
22640 if mem != x3.Args[1] {
22641 break
22642 }
22643 y4 := o2.Args[1]
22644 if y4.Op != OpARM64MOVDnop {
22645 break
22646 }
22647 x4 := y4.Args[0]
22648 if x4.Op != OpARM64MOVBUload {
22649 break
22650 }
22651 i3 := x4.AuxInt
22652 if x4.Aux != s {
22653 break
22654 }
22655 _ = x4.Args[1]
22656 if p != x4.Args[0] {
22657 break
22658 }
22659 if mem != x4.Args[1] {
22660 break
22661 }
22662 y5 := o1.Args[1]
22663 if y5.Op != OpARM64MOVDnop {
22664 break
22665 }
22666 x5 := y5.Args[0]
22667 if x5.Op != OpARM64MOVBUload {
22668 break
22669 }
22670 i2 := x5.AuxInt
22671 if x5.Aux != s {
22672 break
22673 }
22674 _ = x5.Args[1]
22675 if p != x5.Args[0] {
22676 break
22677 }
22678 if mem != x5.Args[1] {
22679 break
22680 }
22681 y6 := o0.Args[1]
22682 if y6.Op != OpARM64MOVDnop {
22683 break
22684 }
22685 x6 := y6.Args[0]
22686 if x6.Op != OpARM64MOVBUload {
22687 break
22688 }
22689 i1 := x6.AuxInt
22690 if x6.Aux != s {
22691 break
22692 }
22693 _ = x6.Args[1]
22694 if p != x6.Args[0] {
22695 break
22696 }
22697 if mem != x6.Args[1] {
22698 break
22699 }
22700 if !(i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && i4 == i0+4 && i5 == i0+5 && i6 == i0+6 && i7 == i0+7 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0)) {
22701 break
22702 }
22703 b = mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7)
22704 v0 := b.NewValue0(x6.Pos, OpARM64MOVDload, t)
22705 v.reset(OpCopy)
22706 v.AddArg(v0)
22707 v0.Aux = s
22708 v1 := b.NewValue0(x6.Pos, OpOffPtr, p.Type)
22709 v1.AuxInt = i0
22710 v1.AddArg(p)
22711 v0.AddArg(v1)
22712 v0.AddArg(mem)
22713 return true
22714 }
22715
22716
22717
22718 for {
22719 t := v.Type
22720 _ = v.Args[1]
22721 o0 := v.Args[0]
22722 if o0.Op != OpARM64ORshiftLL {
22723 break
22724 }
22725 if o0.AuxInt != 8 {
22726 break
22727 }
22728 _ = o0.Args[1]
22729 o1 := o0.Args[0]
22730 if o1.Op != OpARM64ORshiftLL {
22731 break
22732 }
22733 if o1.AuxInt != 16 {
22734 break
22735 }
22736 _ = o1.Args[1]
22737 o2 := o1.Args[0]
22738 if o2.Op != OpARM64ORshiftLL {
22739 break
22740 }
22741 if o2.AuxInt != 24 {
22742 break
22743 }
22744 _ = o2.Args[1]
22745 o3 := o2.Args[0]
22746 if o3.Op != OpARM64ORshiftLL {
22747 break
22748 }
22749 if o3.AuxInt != 32 {
22750 break
22751 }
22752 _ = o3.Args[1]
22753 o4 := o3.Args[0]
22754 if o4.Op != OpARM64ORshiftLL {
22755 break
22756 }
22757 if o4.AuxInt != 40 {
22758 break
22759 }
22760 _ = o4.Args[1]
22761 o5 := o4.Args[0]
22762 if o5.Op != OpARM64ORshiftLL {
22763 break
22764 }
22765 if o5.AuxInt != 48 {
22766 break
22767 }
22768 _ = o5.Args[1]
22769 s0 := o5.Args[0]
22770 if s0.Op != OpARM64SLLconst {
22771 break
22772 }
22773 if s0.AuxInt != 56 {
22774 break
22775 }
22776 y0 := s0.Args[0]
22777 if y0.Op != OpARM64MOVDnop {
22778 break
22779 }
22780 x0 := y0.Args[0]
22781 if x0.Op != OpARM64MOVBUload {
22782 break
22783 }
22784 if x0.AuxInt != 7 {
22785 break
22786 }
22787 s := x0.Aux
22788 mem := x0.Args[1]
22789 p := x0.Args[0]
22790 y1 := o5.Args[1]
22791 if y1.Op != OpARM64MOVDnop {
22792 break
22793 }
22794 x1 := y1.Args[0]
22795 if x1.Op != OpARM64MOVBUload {
22796 break
22797 }
22798 if x1.AuxInt != 6 {
22799 break
22800 }
22801 if x1.Aux != s {
22802 break
22803 }
22804 _ = x1.Args[1]
22805 if p != x1.Args[0] {
22806 break
22807 }
22808 if mem != x1.Args[1] {
22809 break
22810 }
22811 y2 := o4.Args[1]
22812 if y2.Op != OpARM64MOVDnop {
22813 break
22814 }
22815 x2 := y2.Args[0]
22816 if x2.Op != OpARM64MOVBUload {
22817 break
22818 }
22819 if x2.AuxInt != 5 {
22820 break
22821 }
22822 if x2.Aux != s {
22823 break
22824 }
22825 _ = x2.Args[1]
22826 if p != x2.Args[0] {
22827 break
22828 }
22829 if mem != x2.Args[1] {
22830 break
22831 }
22832 y3 := o3.Args[1]
22833 if y3.Op != OpARM64MOVDnop {
22834 break
22835 }
22836 x3 := y3.Args[0]
22837 if x3.Op != OpARM64MOVBUload {
22838 break
22839 }
22840 if x3.AuxInt != 4 {
22841 break
22842 }
22843 if x3.Aux != s {
22844 break
22845 }
22846 _ = x3.Args[1]
22847 if p != x3.Args[0] {
22848 break
22849 }
22850 if mem != x3.Args[1] {
22851 break
22852 }
22853 y4 := o2.Args[1]
22854 if y4.Op != OpARM64MOVDnop {
22855 break
22856 }
22857 x4 := y4.Args[0]
22858 if x4.Op != OpARM64MOVBUload {
22859 break
22860 }
22861 if x4.AuxInt != 3 {
22862 break
22863 }
22864 if x4.Aux != s {
22865 break
22866 }
22867 _ = x4.Args[1]
22868 if p != x4.Args[0] {
22869 break
22870 }
22871 if mem != x4.Args[1] {
22872 break
22873 }
22874 y5 := o1.Args[1]
22875 if y5.Op != OpARM64MOVDnop {
22876 break
22877 }
22878 x5 := y5.Args[0]
22879 if x5.Op != OpARM64MOVBUload {
22880 break
22881 }
22882 if x5.AuxInt != 2 {
22883 break
22884 }
22885 if x5.Aux != s {
22886 break
22887 }
22888 _ = x5.Args[1]
22889 if p != x5.Args[0] {
22890 break
22891 }
22892 if mem != x5.Args[1] {
22893 break
22894 }
22895 y6 := o0.Args[1]
22896 if y6.Op != OpARM64MOVDnop {
22897 break
22898 }
22899 x6 := y6.Args[0]
22900 if x6.Op != OpARM64MOVBUload {
22901 break
22902 }
22903 if x6.AuxInt != 1 {
22904 break
22905 }
22906 if x6.Aux != s {
22907 break
22908 }
22909 _ = x6.Args[1]
22910 p1 := x6.Args[0]
22911 if p1.Op != OpARM64ADD {
22912 break
22913 }
22914 idx1 := p1.Args[1]
22915 ptr1 := p1.Args[0]
22916 if mem != x6.Args[1] {
22917 break
22918 }
22919 y7 := v.Args[1]
22920 if y7.Op != OpARM64MOVDnop {
22921 break
22922 }
22923 x7 := y7.Args[0]
22924 if x7.Op != OpARM64MOVBUloadidx {
22925 break
22926 }
22927 _ = x7.Args[2]
22928 ptr0 := x7.Args[0]
22929 idx0 := x7.Args[1]
22930 if mem != x7.Args[2] {
22931 break
22932 }
22933 if !(s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0)) {
22934 break
22935 }
22936 b = mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7)
22937 v0 := b.NewValue0(x6.Pos, OpARM64MOVDloadidx, t)
22938 v.reset(OpCopy)
22939 v.AddArg(v0)
22940 v0.AddArg(ptr0)
22941 v0.AddArg(idx0)
22942 v0.AddArg(mem)
22943 return true
22944 }
22945
22946
22947
22948 for {
22949 t := v.Type
22950 _ = v.Args[1]
22951 y7 := v.Args[0]
22952 if y7.Op != OpARM64MOVDnop {
22953 break
22954 }
22955 x7 := y7.Args[0]
22956 if x7.Op != OpARM64MOVBUloadidx {
22957 break
22958 }
22959 mem := x7.Args[2]
22960 ptr0 := x7.Args[0]
22961 idx0 := x7.Args[1]
22962 o0 := v.Args[1]
22963 if o0.Op != OpARM64ORshiftLL {
22964 break
22965 }
22966 if o0.AuxInt != 8 {
22967 break
22968 }
22969 _ = o0.Args[1]
22970 o1 := o0.Args[0]
22971 if o1.Op != OpARM64ORshiftLL {
22972 break
22973 }
22974 if o1.AuxInt != 16 {
22975 break
22976 }
22977 _ = o1.Args[1]
22978 o2 := o1.Args[0]
22979 if o2.Op != OpARM64ORshiftLL {
22980 break
22981 }
22982 if o2.AuxInt != 24 {
22983 break
22984 }
22985 _ = o2.Args[1]
22986 o3 := o2.Args[0]
22987 if o3.Op != OpARM64ORshiftLL {
22988 break
22989 }
22990 if o3.AuxInt != 32 {
22991 break
22992 }
22993 _ = o3.Args[1]
22994 o4 := o3.Args[0]
22995 if o4.Op != OpARM64ORshiftLL {
22996 break
22997 }
22998 if o4.AuxInt != 40 {
22999 break
23000 }
23001 _ = o4.Args[1]
23002 o5 := o4.Args[0]
23003 if o5.Op != OpARM64ORshiftLL {
23004 break
23005 }
23006 if o5.AuxInt != 48 {
23007 break
23008 }
23009 _ = o5.Args[1]
23010 s0 := o5.Args[0]
23011 if s0.Op != OpARM64SLLconst {
23012 break
23013 }
23014 if s0.AuxInt != 56 {
23015 break
23016 }
23017 y0 := s0.Args[0]
23018 if y0.Op != OpARM64MOVDnop {
23019 break
23020 }
23021 x0 := y0.Args[0]
23022 if x0.Op != OpARM64MOVBUload {
23023 break
23024 }
23025 if x0.AuxInt != 7 {
23026 break
23027 }
23028 s := x0.Aux
23029 _ = x0.Args[1]
23030 p := x0.Args[0]
23031 if mem != x0.Args[1] {
23032 break
23033 }
23034 y1 := o5.Args[1]
23035 if y1.Op != OpARM64MOVDnop {
23036 break
23037 }
23038 x1 := y1.Args[0]
23039 if x1.Op != OpARM64MOVBUload {
23040 break
23041 }
23042 if x1.AuxInt != 6 {
23043 break
23044 }
23045 if x1.Aux != s {
23046 break
23047 }
23048 _ = x1.Args[1]
23049 if p != x1.Args[0] {
23050 break
23051 }
23052 if mem != x1.Args[1] {
23053 break
23054 }
23055 y2 := o4.Args[1]
23056 if y2.Op != OpARM64MOVDnop {
23057 break
23058 }
23059 x2 := y2.Args[0]
23060 if x2.Op != OpARM64MOVBUload {
23061 break
23062 }
23063 if x2.AuxInt != 5 {
23064 break
23065 }
23066 if x2.Aux != s {
23067 break
23068 }
23069 _ = x2.Args[1]
23070 if p != x2.Args[0] {
23071 break
23072 }
23073 if mem != x2.Args[1] {
23074 break
23075 }
23076 y3 := o3.Args[1]
23077 if y3.Op != OpARM64MOVDnop {
23078 break
23079 }
23080 x3 := y3.Args[0]
23081 if x3.Op != OpARM64MOVBUload {
23082 break
23083 }
23084 if x3.AuxInt != 4 {
23085 break
23086 }
23087 if x3.Aux != s {
23088 break
23089 }
23090 _ = x3.Args[1]
23091 if p != x3.Args[0] {
23092 break
23093 }
23094 if mem != x3.Args[1] {
23095 break
23096 }
23097 y4 := o2.Args[1]
23098 if y4.Op != OpARM64MOVDnop {
23099 break
23100 }
23101 x4 := y4.Args[0]
23102 if x4.Op != OpARM64MOVBUload {
23103 break
23104 }
23105 if x4.AuxInt != 3 {
23106 break
23107 }
23108 if x4.Aux != s {
23109 break
23110 }
23111 _ = x4.Args[1]
23112 if p != x4.Args[0] {
23113 break
23114 }
23115 if mem != x4.Args[1] {
23116 break
23117 }
23118 y5 := o1.Args[1]
23119 if y5.Op != OpARM64MOVDnop {
23120 break
23121 }
23122 x5 := y5.Args[0]
23123 if x5.Op != OpARM64MOVBUload {
23124 break
23125 }
23126 if x5.AuxInt != 2 {
23127 break
23128 }
23129 if x5.Aux != s {
23130 break
23131 }
23132 _ = x5.Args[1]
23133 if p != x5.Args[0] {
23134 break
23135 }
23136 if mem != x5.Args[1] {
23137 break
23138 }
23139 y6 := o0.Args[1]
23140 if y6.Op != OpARM64MOVDnop {
23141 break
23142 }
23143 x6 := y6.Args[0]
23144 if x6.Op != OpARM64MOVBUload {
23145 break
23146 }
23147 if x6.AuxInt != 1 {
23148 break
23149 }
23150 if x6.Aux != s {
23151 break
23152 }
23153 _ = x6.Args[1]
23154 p1 := x6.Args[0]
23155 if p1.Op != OpARM64ADD {
23156 break
23157 }
23158 idx1 := p1.Args[1]
23159 ptr1 := p1.Args[0]
23160 if mem != x6.Args[1] {
23161 break
23162 }
23163 if !(s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0)) {
23164 break
23165 }
23166 b = mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7)
23167 v0 := b.NewValue0(x6.Pos, OpARM64MOVDloadidx, t)
23168 v.reset(OpCopy)
23169 v.AddArg(v0)
23170 v0.AddArg(ptr0)
23171 v0.AddArg(idx0)
23172 v0.AddArg(mem)
23173 return true
23174 }
23175
23176
23177
23178 for {
23179 t := v.Type
23180 _ = v.Args[1]
23181 o0 := v.Args[0]
23182 if o0.Op != OpARM64ORshiftLL {
23183 break
23184 }
23185 if o0.AuxInt != 8 {
23186 break
23187 }
23188 _ = o0.Args[1]
23189 o1 := o0.Args[0]
23190 if o1.Op != OpARM64ORshiftLL {
23191 break
23192 }
23193 if o1.AuxInt != 16 {
23194 break
23195 }
23196 _ = o1.Args[1]
23197 o2 := o1.Args[0]
23198 if o2.Op != OpARM64ORshiftLL {
23199 break
23200 }
23201 if o2.AuxInt != 24 {
23202 break
23203 }
23204 _ = o2.Args[1]
23205 o3 := o2.Args[0]
23206 if o3.Op != OpARM64ORshiftLL {
23207 break
23208 }
23209 if o3.AuxInt != 32 {
23210 break
23211 }
23212 _ = o3.Args[1]
23213 o4 := o3.Args[0]
23214 if o4.Op != OpARM64ORshiftLL {
23215 break
23216 }
23217 if o4.AuxInt != 40 {
23218 break
23219 }
23220 _ = o4.Args[1]
23221 o5 := o4.Args[0]
23222 if o5.Op != OpARM64ORshiftLL {
23223 break
23224 }
23225 if o5.AuxInt != 48 {
23226 break
23227 }
23228 _ = o5.Args[1]
23229 s0 := o5.Args[0]
23230 if s0.Op != OpARM64SLLconst {
23231 break
23232 }
23233 if s0.AuxInt != 56 {
23234 break
23235 }
23236 y0 := s0.Args[0]
23237 if y0.Op != OpARM64MOVDnop {
23238 break
23239 }
23240 x0 := y0.Args[0]
23241 if x0.Op != OpARM64MOVBUloadidx {
23242 break
23243 }
23244 mem := x0.Args[2]
23245 ptr := x0.Args[0]
23246 x0_1 := x0.Args[1]
23247 if x0_1.Op != OpARM64ADDconst {
23248 break
23249 }
23250 if x0_1.AuxInt != 7 {
23251 break
23252 }
23253 idx := x0_1.Args[0]
23254 y1 := o5.Args[1]
23255 if y1.Op != OpARM64MOVDnop {
23256 break
23257 }
23258 x1 := y1.Args[0]
23259 if x1.Op != OpARM64MOVBUloadidx {
23260 break
23261 }
23262 _ = x1.Args[2]
23263 if ptr != x1.Args[0] {
23264 break
23265 }
23266 x1_1 := x1.Args[1]
23267 if x1_1.Op != OpARM64ADDconst {
23268 break
23269 }
23270 if x1_1.AuxInt != 6 {
23271 break
23272 }
23273 if idx != x1_1.Args[0] {
23274 break
23275 }
23276 if mem != x1.Args[2] {
23277 break
23278 }
23279 y2 := o4.Args[1]
23280 if y2.Op != OpARM64MOVDnop {
23281 break
23282 }
23283 x2 := y2.Args[0]
23284 if x2.Op != OpARM64MOVBUloadidx {
23285 break
23286 }
23287 _ = x2.Args[2]
23288 if ptr != x2.Args[0] {
23289 break
23290 }
23291 x2_1 := x2.Args[1]
23292 if x2_1.Op != OpARM64ADDconst {
23293 break
23294 }
23295 if x2_1.AuxInt != 5 {
23296 break
23297 }
23298 if idx != x2_1.Args[0] {
23299 break
23300 }
23301 if mem != x2.Args[2] {
23302 break
23303 }
23304 y3 := o3.Args[1]
23305 if y3.Op != OpARM64MOVDnop {
23306 break
23307 }
23308 x3 := y3.Args[0]
23309 if x3.Op != OpARM64MOVBUloadidx {
23310 break
23311 }
23312 _ = x3.Args[2]
23313 if ptr != x3.Args[0] {
23314 break
23315 }
23316 x3_1 := x3.Args[1]
23317 if x3_1.Op != OpARM64ADDconst {
23318 break
23319 }
23320 if x3_1.AuxInt != 4 {
23321 break
23322 }
23323 if idx != x3_1.Args[0] {
23324 break
23325 }
23326 if mem != x3.Args[2] {
23327 break
23328 }
23329 y4 := o2.Args[1]
23330 if y4.Op != OpARM64MOVDnop {
23331 break
23332 }
23333 x4 := y4.Args[0]
23334 if x4.Op != OpARM64MOVBUloadidx {
23335 break
23336 }
23337 _ = x4.Args[2]
23338 if ptr != x4.Args[0] {
23339 break
23340 }
23341 x4_1 := x4.Args[1]
23342 if x4_1.Op != OpARM64ADDconst {
23343 break
23344 }
23345 if x4_1.AuxInt != 3 {
23346 break
23347 }
23348 if idx != x4_1.Args[0] {
23349 break
23350 }
23351 if mem != x4.Args[2] {
23352 break
23353 }
23354 y5 := o1.Args[1]
23355 if y5.Op != OpARM64MOVDnop {
23356 break
23357 }
23358 x5 := y5.Args[0]
23359 if x5.Op != OpARM64MOVBUloadidx {
23360 break
23361 }
23362 _ = x5.Args[2]
23363 if ptr != x5.Args[0] {
23364 break
23365 }
23366 x5_1 := x5.Args[1]
23367 if x5_1.Op != OpARM64ADDconst {
23368 break
23369 }
23370 if x5_1.AuxInt != 2 {
23371 break
23372 }
23373 if idx != x5_1.Args[0] {
23374 break
23375 }
23376 if mem != x5.Args[2] {
23377 break
23378 }
23379 y6 := o0.Args[1]
23380 if y6.Op != OpARM64MOVDnop {
23381 break
23382 }
23383 x6 := y6.Args[0]
23384 if x6.Op != OpARM64MOVBUloadidx {
23385 break
23386 }
23387 _ = x6.Args[2]
23388 if ptr != x6.Args[0] {
23389 break
23390 }
23391 x6_1 := x6.Args[1]
23392 if x6_1.Op != OpARM64ADDconst {
23393 break
23394 }
23395 if x6_1.AuxInt != 1 {
23396 break
23397 }
23398 if idx != x6_1.Args[0] {
23399 break
23400 }
23401 if mem != x6.Args[2] {
23402 break
23403 }
23404 y7 := v.Args[1]
23405 if y7.Op != OpARM64MOVDnop {
23406 break
23407 }
23408 x7 := y7.Args[0]
23409 if x7.Op != OpARM64MOVBUloadidx {
23410 break
23411 }
23412 _ = x7.Args[2]
23413 if ptr != x7.Args[0] {
23414 break
23415 }
23416 if idx != x7.Args[1] {
23417 break
23418 }
23419 if mem != x7.Args[2] {
23420 break
23421 }
23422 if !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0)) {
23423 break
23424 }
23425 b = mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7)
23426 v0 := b.NewValue0(v.Pos, OpARM64MOVDloadidx, t)
23427 v.reset(OpCopy)
23428 v.AddArg(v0)
23429 v0.AddArg(ptr)
23430 v0.AddArg(idx)
23431 v0.AddArg(mem)
23432 return true
23433 }
23434
23435
23436
23437 for {
23438 t := v.Type
23439 _ = v.Args[1]
23440 y7 := v.Args[0]
23441 if y7.Op != OpARM64MOVDnop {
23442 break
23443 }
23444 x7 := y7.Args[0]
23445 if x7.Op != OpARM64MOVBUloadidx {
23446 break
23447 }
23448 mem := x7.Args[2]
23449 ptr := x7.Args[0]
23450 idx := x7.Args[1]
23451 o0 := v.Args[1]
23452 if o0.Op != OpARM64ORshiftLL {
23453 break
23454 }
23455 if o0.AuxInt != 8 {
23456 break
23457 }
23458 _ = o0.Args[1]
23459 o1 := o0.Args[0]
23460 if o1.Op != OpARM64ORshiftLL {
23461 break
23462 }
23463 if o1.AuxInt != 16 {
23464 break
23465 }
23466 _ = o1.Args[1]
23467 o2 := o1.Args[0]
23468 if o2.Op != OpARM64ORshiftLL {
23469 break
23470 }
23471 if o2.AuxInt != 24 {
23472 break
23473 }
23474 _ = o2.Args[1]
23475 o3 := o2.Args[0]
23476 if o3.Op != OpARM64ORshiftLL {
23477 break
23478 }
23479 if o3.AuxInt != 32 {
23480 break
23481 }
23482 _ = o3.Args[1]
23483 o4 := o3.Args[0]
23484 if o4.Op != OpARM64ORshiftLL {
23485 break
23486 }
23487 if o4.AuxInt != 40 {
23488 break
23489 }
23490 _ = o4.Args[1]
23491 o5 := o4.Args[0]
23492 if o5.Op != OpARM64ORshiftLL {
23493 break
23494 }
23495 if o5.AuxInt != 48 {
23496 break
23497 }
23498 _ = o5.Args[1]
23499 s0 := o5.Args[0]
23500 if s0.Op != OpARM64SLLconst {
23501 break
23502 }
23503 if s0.AuxInt != 56 {
23504 break
23505 }
23506 y0 := s0.Args[0]
23507 if y0.Op != OpARM64MOVDnop {
23508 break
23509 }
23510 x0 := y0.Args[0]
23511 if x0.Op != OpARM64MOVBUloadidx {
23512 break
23513 }
23514 _ = x0.Args[2]
23515 if ptr != x0.Args[0] {
23516 break
23517 }
23518 x0_1 := x0.Args[1]
23519 if x0_1.Op != OpARM64ADDconst {
23520 break
23521 }
23522 if x0_1.AuxInt != 7 {
23523 break
23524 }
23525 if idx != x0_1.Args[0] {
23526 break
23527 }
23528 if mem != x0.Args[2] {
23529 break
23530 }
23531 y1 := o5.Args[1]
23532 if y1.Op != OpARM64MOVDnop {
23533 break
23534 }
23535 x1 := y1.Args[0]
23536 if x1.Op != OpARM64MOVBUloadidx {
23537 break
23538 }
23539 _ = x1.Args[2]
23540 if ptr != x1.Args[0] {
23541 break
23542 }
23543 x1_1 := x1.Args[1]
23544 if x1_1.Op != OpARM64ADDconst {
23545 break
23546 }
23547 if x1_1.AuxInt != 6 {
23548 break
23549 }
23550 if idx != x1_1.Args[0] {
23551 break
23552 }
23553 if mem != x1.Args[2] {
23554 break
23555 }
23556 y2 := o4.Args[1]
23557 if y2.Op != OpARM64MOVDnop {
23558 break
23559 }
23560 x2 := y2.Args[0]
23561 if x2.Op != OpARM64MOVBUloadidx {
23562 break
23563 }
23564 _ = x2.Args[2]
23565 if ptr != x2.Args[0] {
23566 break
23567 }
23568 x2_1 := x2.Args[1]
23569 if x2_1.Op != OpARM64ADDconst {
23570 break
23571 }
23572 if x2_1.AuxInt != 5 {
23573 break
23574 }
23575 if idx != x2_1.Args[0] {
23576 break
23577 }
23578 if mem != x2.Args[2] {
23579 break
23580 }
23581 y3 := o3.Args[1]
23582 if y3.Op != OpARM64MOVDnop {
23583 break
23584 }
23585 x3 := y3.Args[0]
23586 if x3.Op != OpARM64MOVBUloadidx {
23587 break
23588 }
23589 _ = x3.Args[2]
23590 if ptr != x3.Args[0] {
23591 break
23592 }
23593 x3_1 := x3.Args[1]
23594 if x3_1.Op != OpARM64ADDconst {
23595 break
23596 }
23597 if x3_1.AuxInt != 4 {
23598 break
23599 }
23600 if idx != x3_1.Args[0] {
23601 break
23602 }
23603 if mem != x3.Args[2] {
23604 break
23605 }
23606 y4 := o2.Args[1]
23607 if y4.Op != OpARM64MOVDnop {
23608 break
23609 }
23610 x4 := y4.Args[0]
23611 if x4.Op != OpARM64MOVBUloadidx {
23612 break
23613 }
23614 _ = x4.Args[2]
23615 if ptr != x4.Args[0] {
23616 break
23617 }
23618 x4_1 := x4.Args[1]
23619 if x4_1.Op != OpARM64ADDconst {
23620 break
23621 }
23622 if x4_1.AuxInt != 3 {
23623 break
23624 }
23625 if idx != x4_1.Args[0] {
23626 break
23627 }
23628 if mem != x4.Args[2] {
23629 break
23630 }
23631 y5 := o1.Args[1]
23632 if y5.Op != OpARM64MOVDnop {
23633 break
23634 }
23635 x5 := y5.Args[0]
23636 if x5.Op != OpARM64MOVBUloadidx {
23637 break
23638 }
23639 _ = x5.Args[2]
23640 if ptr != x5.Args[0] {
23641 break
23642 }
23643 x5_1 := x5.Args[1]
23644 if x5_1.Op != OpARM64ADDconst {
23645 break
23646 }
23647 if x5_1.AuxInt != 2 {
23648 break
23649 }
23650 if idx != x5_1.Args[0] {
23651 break
23652 }
23653 if mem != x5.Args[2] {
23654 break
23655 }
23656 y6 := o0.Args[1]
23657 if y6.Op != OpARM64MOVDnop {
23658 break
23659 }
23660 x6 := y6.Args[0]
23661 if x6.Op != OpARM64MOVBUloadidx {
23662 break
23663 }
23664 _ = x6.Args[2]
23665 if ptr != x6.Args[0] {
23666 break
23667 }
23668 x6_1 := x6.Args[1]
23669 if x6_1.Op != OpARM64ADDconst {
23670 break
23671 }
23672 if x6_1.AuxInt != 1 {
23673 break
23674 }
23675 if idx != x6_1.Args[0] {
23676 break
23677 }
23678 if mem != x6.Args[2] {
23679 break
23680 }
23681 if !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0)) {
23682 break
23683 }
23684 b = mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7)
23685 v0 := b.NewValue0(v.Pos, OpARM64MOVDloadidx, t)
23686 v.reset(OpCopy)
23687 v.AddArg(v0)
23688 v0.AddArg(ptr)
23689 v0.AddArg(idx)
23690 v0.AddArg(mem)
23691 return true
23692 }
23693
23694
23695
23696 for {
23697 t := v.Type
23698 _ = v.Args[1]
23699 o0 := v.Args[0]
23700 if o0.Op != OpARM64ORshiftLL {
23701 break
23702 }
23703 if o0.AuxInt != 8 {
23704 break
23705 }
23706 _ = o0.Args[1]
23707 o1 := o0.Args[0]
23708 if o1.Op != OpARM64ORshiftLL {
23709 break
23710 }
23711 if o1.AuxInt != 16 {
23712 break
23713 }
23714 _ = o1.Args[1]
23715 s0 := o1.Args[0]
23716 if s0.Op != OpARM64SLLconst {
23717 break
23718 }
23719 if s0.AuxInt != 24 {
23720 break
23721 }
23722 y0 := s0.Args[0]
23723 if y0.Op != OpARM64MOVDnop {
23724 break
23725 }
23726 x0 := y0.Args[0]
23727 if x0.Op != OpARM64MOVBUload {
23728 break
23729 }
23730 i0 := x0.AuxInt
23731 s := x0.Aux
23732 mem := x0.Args[1]
23733 p := x0.Args[0]
23734 y1 := o1.Args[1]
23735 if y1.Op != OpARM64MOVDnop {
23736 break
23737 }
23738 x1 := y1.Args[0]
23739 if x1.Op != OpARM64MOVBUload {
23740 break
23741 }
23742 i1 := x1.AuxInt
23743 if x1.Aux != s {
23744 break
23745 }
23746 _ = x1.Args[1]
23747 if p != x1.Args[0] {
23748 break
23749 }
23750 if mem != x1.Args[1] {
23751 break
23752 }
23753 y2 := o0.Args[1]
23754 if y2.Op != OpARM64MOVDnop {
23755 break
23756 }
23757 x2 := y2.Args[0]
23758 if x2.Op != OpARM64MOVBUload {
23759 break
23760 }
23761 i2 := x2.AuxInt
23762 if x2.Aux != s {
23763 break
23764 }
23765 _ = x2.Args[1]
23766 if p != x2.Args[0] {
23767 break
23768 }
23769 if mem != x2.Args[1] {
23770 break
23771 }
23772 y3 := v.Args[1]
23773 if y3.Op != OpARM64MOVDnop {
23774 break
23775 }
23776 x3 := y3.Args[0]
23777 if x3.Op != OpARM64MOVBUload {
23778 break
23779 }
23780 i3 := x3.AuxInt
23781 if x3.Aux != s {
23782 break
23783 }
23784 _ = x3.Args[1]
23785 if p != x3.Args[0] {
23786 break
23787 }
23788 if mem != x3.Args[1] {
23789 break
23790 }
23791 if !(i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0)) {
23792 break
23793 }
23794 b = mergePoint(b, x0, x1, x2, x3)
23795 v0 := b.NewValue0(x3.Pos, OpARM64REVW, t)
23796 v.reset(OpCopy)
23797 v.AddArg(v0)
23798 v1 := b.NewValue0(x3.Pos, OpARM64MOVWUload, t)
23799 v1.Aux = s
23800 v2 := b.NewValue0(x3.Pos, OpOffPtr, p.Type)
23801 v2.AuxInt = i0
23802 v2.AddArg(p)
23803 v1.AddArg(v2)
23804 v1.AddArg(mem)
23805 v0.AddArg(v1)
23806 return true
23807 }
23808
23809
23810
23811 for {
23812 t := v.Type
23813 _ = v.Args[1]
23814 y3 := v.Args[0]
23815 if y3.Op != OpARM64MOVDnop {
23816 break
23817 }
23818 x3 := y3.Args[0]
23819 if x3.Op != OpARM64MOVBUload {
23820 break
23821 }
23822 i3 := x3.AuxInt
23823 s := x3.Aux
23824 mem := x3.Args[1]
23825 p := x3.Args[0]
23826 o0 := v.Args[1]
23827 if o0.Op != OpARM64ORshiftLL {
23828 break
23829 }
23830 if o0.AuxInt != 8 {
23831 break
23832 }
23833 _ = o0.Args[1]
23834 o1 := o0.Args[0]
23835 if o1.Op != OpARM64ORshiftLL {
23836 break
23837 }
23838 if o1.AuxInt != 16 {
23839 break
23840 }
23841 _ = o1.Args[1]
23842 s0 := o1.Args[0]
23843 if s0.Op != OpARM64SLLconst {
23844 break
23845 }
23846 if s0.AuxInt != 24 {
23847 break
23848 }
23849 y0 := s0.Args[0]
23850 if y0.Op != OpARM64MOVDnop {
23851 break
23852 }
23853 x0 := y0.Args[0]
23854 if x0.Op != OpARM64MOVBUload {
23855 break
23856 }
23857 i0 := x0.AuxInt
23858 if x0.Aux != s {
23859 break
23860 }
23861 _ = x0.Args[1]
23862 if p != x0.Args[0] {
23863 break
23864 }
23865 if mem != x0.Args[1] {
23866 break
23867 }
23868 y1 := o1.Args[1]
23869 if y1.Op != OpARM64MOVDnop {
23870 break
23871 }
23872 x1 := y1.Args[0]
23873 if x1.Op != OpARM64MOVBUload {
23874 break
23875 }
23876 i1 := x1.AuxInt
23877 if x1.Aux != s {
23878 break
23879 }
23880 _ = x1.Args[1]
23881 if p != x1.Args[0] {
23882 break
23883 }
23884 if mem != x1.Args[1] {
23885 break
23886 }
23887 y2 := o0.Args[1]
23888 if y2.Op != OpARM64MOVDnop {
23889 break
23890 }
23891 x2 := y2.Args[0]
23892 if x2.Op != OpARM64MOVBUload {
23893 break
23894 }
23895 i2 := x2.AuxInt
23896 if x2.Aux != s {
23897 break
23898 }
23899 _ = x2.Args[1]
23900 if p != x2.Args[0] {
23901 break
23902 }
23903 if mem != x2.Args[1] {
23904 break
23905 }
23906 if !(i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0)) {
23907 break
23908 }
23909 b = mergePoint(b, x0, x1, x2, x3)
23910 v0 := b.NewValue0(x2.Pos, OpARM64REVW, t)
23911 v.reset(OpCopy)
23912 v.AddArg(v0)
23913 v1 := b.NewValue0(x2.Pos, OpARM64MOVWUload, t)
23914 v1.Aux = s
23915 v2 := b.NewValue0(x2.Pos, OpOffPtr, p.Type)
23916 v2.AuxInt = i0
23917 v2.AddArg(p)
23918 v1.AddArg(v2)
23919 v1.AddArg(mem)
23920 v0.AddArg(v1)
23921 return true
23922 }
23923
23924
23925
23926 for {
23927 t := v.Type
23928 _ = v.Args[1]
23929 o0 := v.Args[0]
23930 if o0.Op != OpARM64ORshiftLL {
23931 break
23932 }
23933 if o0.AuxInt != 8 {
23934 break
23935 }
23936 _ = o0.Args[1]
23937 o1 := o0.Args[0]
23938 if o1.Op != OpARM64ORshiftLL {
23939 break
23940 }
23941 if o1.AuxInt != 16 {
23942 break
23943 }
23944 _ = o1.Args[1]
23945 s0 := o1.Args[0]
23946 if s0.Op != OpARM64SLLconst {
23947 break
23948 }
23949 if s0.AuxInt != 24 {
23950 break
23951 }
23952 y0 := s0.Args[0]
23953 if y0.Op != OpARM64MOVDnop {
23954 break
23955 }
23956 x0 := y0.Args[0]
23957 if x0.Op != OpARM64MOVBUloadidx {
23958 break
23959 }
23960 mem := x0.Args[2]
23961 ptr0 := x0.Args[0]
23962 idx0 := x0.Args[1]
23963 y1 := o1.Args[1]
23964 if y1.Op != OpARM64MOVDnop {
23965 break
23966 }
23967 x1 := y1.Args[0]
23968 if x1.Op != OpARM64MOVBUload {
23969 break
23970 }
23971 if x1.AuxInt != 1 {
23972 break
23973 }
23974 s := x1.Aux
23975 _ = x1.Args[1]
23976 p1 := x1.Args[0]
23977 if p1.Op != OpARM64ADD {
23978 break
23979 }
23980 idx1 := p1.Args[1]
23981 ptr1 := p1.Args[0]
23982 if mem != x1.Args[1] {
23983 break
23984 }
23985 y2 := o0.Args[1]
23986 if y2.Op != OpARM64MOVDnop {
23987 break
23988 }
23989 x2 := y2.Args[0]
23990 if x2.Op != OpARM64MOVBUload {
23991 break
23992 }
23993 if x2.AuxInt != 2 {
23994 break
23995 }
23996 if x2.Aux != s {
23997 break
23998 }
23999 _ = x2.Args[1]
24000 p := x2.Args[0]
24001 if mem != x2.Args[1] {
24002 break
24003 }
24004 y3 := v.Args[1]
24005 if y3.Op != OpARM64MOVDnop {
24006 break
24007 }
24008 x3 := y3.Args[0]
24009 if x3.Op != OpARM64MOVBUload {
24010 break
24011 }
24012 if x3.AuxInt != 3 {
24013 break
24014 }
24015 if x3.Aux != s {
24016 break
24017 }
24018 _ = x3.Args[1]
24019 if p != x3.Args[0] {
24020 break
24021 }
24022 if mem != x3.Args[1] {
24023 break
24024 }
24025 if !(s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0)) {
24026 break
24027 }
24028 b = mergePoint(b, x0, x1, x2, x3)
24029 v0 := b.NewValue0(x3.Pos, OpARM64REVW, t)
24030 v.reset(OpCopy)
24031 v.AddArg(v0)
24032 v1 := b.NewValue0(x3.Pos, OpARM64MOVWUloadidx, t)
24033 v1.AddArg(ptr0)
24034 v1.AddArg(idx0)
24035 v1.AddArg(mem)
24036 v0.AddArg(v1)
24037 return true
24038 }
24039
24040
24041
24042 for {
24043 t := v.Type
24044 _ = v.Args[1]
24045 y3 := v.Args[0]
24046 if y3.Op != OpARM64MOVDnop {
24047 break
24048 }
24049 x3 := y3.Args[0]
24050 if x3.Op != OpARM64MOVBUload {
24051 break
24052 }
24053 if x3.AuxInt != 3 {
24054 break
24055 }
24056 s := x3.Aux
24057 mem := x3.Args[1]
24058 p := x3.Args[0]
24059 o0 := v.Args[1]
24060 if o0.Op != OpARM64ORshiftLL {
24061 break
24062 }
24063 if o0.AuxInt != 8 {
24064 break
24065 }
24066 _ = o0.Args[1]
24067 o1 := o0.Args[0]
24068 if o1.Op != OpARM64ORshiftLL {
24069 break
24070 }
24071 if o1.AuxInt != 16 {
24072 break
24073 }
24074 _ = o1.Args[1]
24075 s0 := o1.Args[0]
24076 if s0.Op != OpARM64SLLconst {
24077 break
24078 }
24079 if s0.AuxInt != 24 {
24080 break
24081 }
24082 y0 := s0.Args[0]
24083 if y0.Op != OpARM64MOVDnop {
24084 break
24085 }
24086 x0 := y0.Args[0]
24087 if x0.Op != OpARM64MOVBUloadidx {
24088 break
24089 }
24090 _ = x0.Args[2]
24091 ptr0 := x0.Args[0]
24092 idx0 := x0.Args[1]
24093 if mem != x0.Args[2] {
24094 break
24095 }
24096 y1 := o1.Args[1]
24097 if y1.Op != OpARM64MOVDnop {
24098 break
24099 }
24100 x1 := y1.Args[0]
24101 if x1.Op != OpARM64MOVBUload {
24102 break
24103 }
24104 if x1.AuxInt != 1 {
24105 break
24106 }
24107 if x1.Aux != s {
24108 break
24109 }
24110 _ = x1.Args[1]
24111 p1 := x1.Args[0]
24112 if p1.Op != OpARM64ADD {
24113 break
24114 }
24115 idx1 := p1.Args[1]
24116 ptr1 := p1.Args[0]
24117 if mem != x1.Args[1] {
24118 break
24119 }
24120 y2 := o0.Args[1]
24121 if y2.Op != OpARM64MOVDnop {
24122 break
24123 }
24124 x2 := y2.Args[0]
24125 if x2.Op != OpARM64MOVBUload {
24126 break
24127 }
24128 if x2.AuxInt != 2 {
24129 break
24130 }
24131 if x2.Aux != s {
24132 break
24133 }
24134 _ = x2.Args[1]
24135 if p != x2.Args[0] {
24136 break
24137 }
24138 if mem != x2.Args[1] {
24139 break
24140 }
24141 if !(s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0)) {
24142 break
24143 }
24144 b = mergePoint(b, x0, x1, x2, x3)
24145 v0 := b.NewValue0(x2.Pos, OpARM64REVW, t)
24146 v.reset(OpCopy)
24147 v.AddArg(v0)
24148 v1 := b.NewValue0(x2.Pos, OpARM64MOVWUloadidx, t)
24149 v1.AddArg(ptr0)
24150 v1.AddArg(idx0)
24151 v1.AddArg(mem)
24152 v0.AddArg(v1)
24153 return true
24154 }
24155
24156
24157
24158 for {
24159 t := v.Type
24160 _ = v.Args[1]
24161 o0 := v.Args[0]
24162 if o0.Op != OpARM64ORshiftLL {
24163 break
24164 }
24165 if o0.AuxInt != 8 {
24166 break
24167 }
24168 _ = o0.Args[1]
24169 o1 := o0.Args[0]
24170 if o1.Op != OpARM64ORshiftLL {
24171 break
24172 }
24173 if o1.AuxInt != 16 {
24174 break
24175 }
24176 _ = o1.Args[1]
24177 s0 := o1.Args[0]
24178 if s0.Op != OpARM64SLLconst {
24179 break
24180 }
24181 if s0.AuxInt != 24 {
24182 break
24183 }
24184 y0 := s0.Args[0]
24185 if y0.Op != OpARM64MOVDnop {
24186 break
24187 }
24188 x0 := y0.Args[0]
24189 if x0.Op != OpARM64MOVBUloadidx {
24190 break
24191 }
24192 mem := x0.Args[2]
24193 ptr := x0.Args[0]
24194 idx := x0.Args[1]
24195 y1 := o1.Args[1]
24196 if y1.Op != OpARM64MOVDnop {
24197 break
24198 }
24199 x1 := y1.Args[0]
24200 if x1.Op != OpARM64MOVBUloadidx {
24201 break
24202 }
24203 _ = x1.Args[2]
24204 if ptr != x1.Args[0] {
24205 break
24206 }
24207 x1_1 := x1.Args[1]
24208 if x1_1.Op != OpARM64ADDconst {
24209 break
24210 }
24211 if x1_1.AuxInt != 1 {
24212 break
24213 }
24214 if idx != x1_1.Args[0] {
24215 break
24216 }
24217 if mem != x1.Args[2] {
24218 break
24219 }
24220 y2 := o0.Args[1]
24221 if y2.Op != OpARM64MOVDnop {
24222 break
24223 }
24224 x2 := y2.Args[0]
24225 if x2.Op != OpARM64MOVBUloadidx {
24226 break
24227 }
24228 _ = x2.Args[2]
24229 if ptr != x2.Args[0] {
24230 break
24231 }
24232 x2_1 := x2.Args[1]
24233 if x2_1.Op != OpARM64ADDconst {
24234 break
24235 }
24236 if x2_1.AuxInt != 2 {
24237 break
24238 }
24239 if idx != x2_1.Args[0] {
24240 break
24241 }
24242 if mem != x2.Args[2] {
24243 break
24244 }
24245 y3 := v.Args[1]
24246 if y3.Op != OpARM64MOVDnop {
24247 break
24248 }
24249 x3 := y3.Args[0]
24250 if x3.Op != OpARM64MOVBUloadidx {
24251 break
24252 }
24253 _ = x3.Args[2]
24254 if ptr != x3.Args[0] {
24255 break
24256 }
24257 x3_1 := x3.Args[1]
24258 if x3_1.Op != OpARM64ADDconst {
24259 break
24260 }
24261 if x3_1.AuxInt != 3 {
24262 break
24263 }
24264 if idx != x3_1.Args[0] {
24265 break
24266 }
24267 if mem != x3.Args[2] {
24268 break
24269 }
24270 if !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0)) {
24271 break
24272 }
24273 b = mergePoint(b, x0, x1, x2, x3)
24274 v0 := b.NewValue0(v.Pos, OpARM64REVW, t)
24275 v.reset(OpCopy)
24276 v.AddArg(v0)
24277 v1 := b.NewValue0(v.Pos, OpARM64MOVWUloadidx, t)
24278 v1.AddArg(ptr)
24279 v1.AddArg(idx)
24280 v1.AddArg(mem)
24281 v0.AddArg(v1)
24282 return true
24283 }
24284 return false
24285 }
24286 func rewriteValueARM64_OpARM64OR_40(v *Value) bool {
24287 b := v.Block
24288
24289
24290
24291 for {
24292 t := v.Type
24293 _ = v.Args[1]
24294 y3 := v.Args[0]
24295 if y3.Op != OpARM64MOVDnop {
24296 break
24297 }
24298 x3 := y3.Args[0]
24299 if x3.Op != OpARM64MOVBUloadidx {
24300 break
24301 }
24302 mem := x3.Args[2]
24303 ptr := x3.Args[0]
24304 x3_1 := x3.Args[1]
24305 if x3_1.Op != OpARM64ADDconst {
24306 break
24307 }
24308 if x3_1.AuxInt != 3 {
24309 break
24310 }
24311 idx := x3_1.Args[0]
24312 o0 := v.Args[1]
24313 if o0.Op != OpARM64ORshiftLL {
24314 break
24315 }
24316 if o0.AuxInt != 8 {
24317 break
24318 }
24319 _ = o0.Args[1]
24320 o1 := o0.Args[0]
24321 if o1.Op != OpARM64ORshiftLL {
24322 break
24323 }
24324 if o1.AuxInt != 16 {
24325 break
24326 }
24327 _ = o1.Args[1]
24328 s0 := o1.Args[0]
24329 if s0.Op != OpARM64SLLconst {
24330 break
24331 }
24332 if s0.AuxInt != 24 {
24333 break
24334 }
24335 y0 := s0.Args[0]
24336 if y0.Op != OpARM64MOVDnop {
24337 break
24338 }
24339 x0 := y0.Args[0]
24340 if x0.Op != OpARM64MOVBUloadidx {
24341 break
24342 }
24343 _ = x0.Args[2]
24344 if ptr != x0.Args[0] {
24345 break
24346 }
24347 if idx != x0.Args[1] {
24348 break
24349 }
24350 if mem != x0.Args[2] {
24351 break
24352 }
24353 y1 := o1.Args[1]
24354 if y1.Op != OpARM64MOVDnop {
24355 break
24356 }
24357 x1 := y1.Args[0]
24358 if x1.Op != OpARM64MOVBUloadidx {
24359 break
24360 }
24361 _ = x1.Args[2]
24362 if ptr != x1.Args[0] {
24363 break
24364 }
24365 x1_1 := x1.Args[1]
24366 if x1_1.Op != OpARM64ADDconst {
24367 break
24368 }
24369 if x1_1.AuxInt != 1 {
24370 break
24371 }
24372 if idx != x1_1.Args[0] {
24373 break
24374 }
24375 if mem != x1.Args[2] {
24376 break
24377 }
24378 y2 := o0.Args[1]
24379 if y2.Op != OpARM64MOVDnop {
24380 break
24381 }
24382 x2 := y2.Args[0]
24383 if x2.Op != OpARM64MOVBUloadidx {
24384 break
24385 }
24386 _ = x2.Args[2]
24387 if ptr != x2.Args[0] {
24388 break
24389 }
24390 x2_1 := x2.Args[1]
24391 if x2_1.Op != OpARM64ADDconst {
24392 break
24393 }
24394 if x2_1.AuxInt != 2 {
24395 break
24396 }
24397 if idx != x2_1.Args[0] {
24398 break
24399 }
24400 if mem != x2.Args[2] {
24401 break
24402 }
24403 if !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0)) {
24404 break
24405 }
24406 b = mergePoint(b, x0, x1, x2, x3)
24407 v0 := b.NewValue0(v.Pos, OpARM64REVW, t)
24408 v.reset(OpCopy)
24409 v.AddArg(v0)
24410 v1 := b.NewValue0(v.Pos, OpARM64MOVWUloadidx, t)
24411 v1.AddArg(ptr)
24412 v1.AddArg(idx)
24413 v1.AddArg(mem)
24414 v0.AddArg(v1)
24415 return true
24416 }
24417
24418
24419
24420 for {
24421 t := v.Type
24422 _ = v.Args[1]
24423 o0 := v.Args[0]
24424 if o0.Op != OpARM64ORshiftLL {
24425 break
24426 }
24427 if o0.AuxInt != 8 {
24428 break
24429 }
24430 _ = o0.Args[1]
24431 o1 := o0.Args[0]
24432 if o1.Op != OpARM64ORshiftLL {
24433 break
24434 }
24435 if o1.AuxInt != 16 {
24436 break
24437 }
24438 _ = o1.Args[1]
24439 o2 := o1.Args[0]
24440 if o2.Op != OpARM64ORshiftLL {
24441 break
24442 }
24443 if o2.AuxInt != 24 {
24444 break
24445 }
24446 _ = o2.Args[1]
24447 o3 := o2.Args[0]
24448 if o3.Op != OpARM64ORshiftLL {
24449 break
24450 }
24451 if o3.AuxInt != 32 {
24452 break
24453 }
24454 _ = o3.Args[1]
24455 o4 := o3.Args[0]
24456 if o4.Op != OpARM64ORshiftLL {
24457 break
24458 }
24459 if o4.AuxInt != 40 {
24460 break
24461 }
24462 _ = o4.Args[1]
24463 o5 := o4.Args[0]
24464 if o5.Op != OpARM64ORshiftLL {
24465 break
24466 }
24467 if o5.AuxInt != 48 {
24468 break
24469 }
24470 _ = o5.Args[1]
24471 s0 := o5.Args[0]
24472 if s0.Op != OpARM64SLLconst {
24473 break
24474 }
24475 if s0.AuxInt != 56 {
24476 break
24477 }
24478 y0 := s0.Args[0]
24479 if y0.Op != OpARM64MOVDnop {
24480 break
24481 }
24482 x0 := y0.Args[0]
24483 if x0.Op != OpARM64MOVBUload {
24484 break
24485 }
24486 i0 := x0.AuxInt
24487 s := x0.Aux
24488 mem := x0.Args[1]
24489 p := x0.Args[0]
24490 y1 := o5.Args[1]
24491 if y1.Op != OpARM64MOVDnop {
24492 break
24493 }
24494 x1 := y1.Args[0]
24495 if x1.Op != OpARM64MOVBUload {
24496 break
24497 }
24498 i1 := x1.AuxInt
24499 if x1.Aux != s {
24500 break
24501 }
24502 _ = x1.Args[1]
24503 if p != x1.Args[0] {
24504 break
24505 }
24506 if mem != x1.Args[1] {
24507 break
24508 }
24509 y2 := o4.Args[1]
24510 if y2.Op != OpARM64MOVDnop {
24511 break
24512 }
24513 x2 := y2.Args[0]
24514 if x2.Op != OpARM64MOVBUload {
24515 break
24516 }
24517 i2 := x2.AuxInt
24518 if x2.Aux != s {
24519 break
24520 }
24521 _ = x2.Args[1]
24522 if p != x2.Args[0] {
24523 break
24524 }
24525 if mem != x2.Args[1] {
24526 break
24527 }
24528 y3 := o3.Args[1]
24529 if y3.Op != OpARM64MOVDnop {
24530 break
24531 }
24532 x3 := y3.Args[0]
24533 if x3.Op != OpARM64MOVBUload {
24534 break
24535 }
24536 i3 := x3.AuxInt
24537 if x3.Aux != s {
24538 break
24539 }
24540 _ = x3.Args[1]
24541 if p != x3.Args[0] {
24542 break
24543 }
24544 if mem != x3.Args[1] {
24545 break
24546 }
24547 y4 := o2.Args[1]
24548 if y4.Op != OpARM64MOVDnop {
24549 break
24550 }
24551 x4 := y4.Args[0]
24552 if x4.Op != OpARM64MOVBUload {
24553 break
24554 }
24555 i4 := x4.AuxInt
24556 if x4.Aux != s {
24557 break
24558 }
24559 _ = x4.Args[1]
24560 if p != x4.Args[0] {
24561 break
24562 }
24563 if mem != x4.Args[1] {
24564 break
24565 }
24566 y5 := o1.Args[1]
24567 if y5.Op != OpARM64MOVDnop {
24568 break
24569 }
24570 x5 := y5.Args[0]
24571 if x5.Op != OpARM64MOVBUload {
24572 break
24573 }
24574 i5 := x5.AuxInt
24575 if x5.Aux != s {
24576 break
24577 }
24578 _ = x5.Args[1]
24579 if p != x5.Args[0] {
24580 break
24581 }
24582 if mem != x5.Args[1] {
24583 break
24584 }
24585 y6 := o0.Args[1]
24586 if y6.Op != OpARM64MOVDnop {
24587 break
24588 }
24589 x6 := y6.Args[0]
24590 if x6.Op != OpARM64MOVBUload {
24591 break
24592 }
24593 i6 := x6.AuxInt
24594 if x6.Aux != s {
24595 break
24596 }
24597 _ = x6.Args[1]
24598 if p != x6.Args[0] {
24599 break
24600 }
24601 if mem != x6.Args[1] {
24602 break
24603 }
24604 y7 := v.Args[1]
24605 if y7.Op != OpARM64MOVDnop {
24606 break
24607 }
24608 x7 := y7.Args[0]
24609 if x7.Op != OpARM64MOVBUload {
24610 break
24611 }
24612 i7 := x7.AuxInt
24613 if x7.Aux != s {
24614 break
24615 }
24616 _ = x7.Args[1]
24617 if p != x7.Args[0] {
24618 break
24619 }
24620 if mem != x7.Args[1] {
24621 break
24622 }
24623 if !(i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && i4 == i0+4 && i5 == i0+5 && i6 == i0+6 && i7 == i0+7 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0)) {
24624 break
24625 }
24626 b = mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7)
24627 v0 := b.NewValue0(x7.Pos, OpARM64REV, t)
24628 v.reset(OpCopy)
24629 v.AddArg(v0)
24630 v1 := b.NewValue0(x7.Pos, OpARM64MOVDload, t)
24631 v1.Aux = s
24632 v2 := b.NewValue0(x7.Pos, OpOffPtr, p.Type)
24633 v2.AuxInt = i0
24634 v2.AddArg(p)
24635 v1.AddArg(v2)
24636 v1.AddArg(mem)
24637 v0.AddArg(v1)
24638 return true
24639 }
24640
24641
24642
24643 for {
24644 t := v.Type
24645 _ = v.Args[1]
24646 y7 := v.Args[0]
24647 if y7.Op != OpARM64MOVDnop {
24648 break
24649 }
24650 x7 := y7.Args[0]
24651 if x7.Op != OpARM64MOVBUload {
24652 break
24653 }
24654 i7 := x7.AuxInt
24655 s := x7.Aux
24656 mem := x7.Args[1]
24657 p := x7.Args[0]
24658 o0 := v.Args[1]
24659 if o0.Op != OpARM64ORshiftLL {
24660 break
24661 }
24662 if o0.AuxInt != 8 {
24663 break
24664 }
24665 _ = o0.Args[1]
24666 o1 := o0.Args[0]
24667 if o1.Op != OpARM64ORshiftLL {
24668 break
24669 }
24670 if o1.AuxInt != 16 {
24671 break
24672 }
24673 _ = o1.Args[1]
24674 o2 := o1.Args[0]
24675 if o2.Op != OpARM64ORshiftLL {
24676 break
24677 }
24678 if o2.AuxInt != 24 {
24679 break
24680 }
24681 _ = o2.Args[1]
24682 o3 := o2.Args[0]
24683 if o3.Op != OpARM64ORshiftLL {
24684 break
24685 }
24686 if o3.AuxInt != 32 {
24687 break
24688 }
24689 _ = o3.Args[1]
24690 o4 := o3.Args[0]
24691 if o4.Op != OpARM64ORshiftLL {
24692 break
24693 }
24694 if o4.AuxInt != 40 {
24695 break
24696 }
24697 _ = o4.Args[1]
24698 o5 := o4.Args[0]
24699 if o5.Op != OpARM64ORshiftLL {
24700 break
24701 }
24702 if o5.AuxInt != 48 {
24703 break
24704 }
24705 _ = o5.Args[1]
24706 s0 := o5.Args[0]
24707 if s0.Op != OpARM64SLLconst {
24708 break
24709 }
24710 if s0.AuxInt != 56 {
24711 break
24712 }
24713 y0 := s0.Args[0]
24714 if y0.Op != OpARM64MOVDnop {
24715 break
24716 }
24717 x0 := y0.Args[0]
24718 if x0.Op != OpARM64MOVBUload {
24719 break
24720 }
24721 i0 := x0.AuxInt
24722 if x0.Aux != s {
24723 break
24724 }
24725 _ = x0.Args[1]
24726 if p != x0.Args[0] {
24727 break
24728 }
24729 if mem != x0.Args[1] {
24730 break
24731 }
24732 y1 := o5.Args[1]
24733 if y1.Op != OpARM64MOVDnop {
24734 break
24735 }
24736 x1 := y1.Args[0]
24737 if x1.Op != OpARM64MOVBUload {
24738 break
24739 }
24740 i1 := x1.AuxInt
24741 if x1.Aux != s {
24742 break
24743 }
24744 _ = x1.Args[1]
24745 if p != x1.Args[0] {
24746 break
24747 }
24748 if mem != x1.Args[1] {
24749 break
24750 }
24751 y2 := o4.Args[1]
24752 if y2.Op != OpARM64MOVDnop {
24753 break
24754 }
24755 x2 := y2.Args[0]
24756 if x2.Op != OpARM64MOVBUload {
24757 break
24758 }
24759 i2 := x2.AuxInt
24760 if x2.Aux != s {
24761 break
24762 }
24763 _ = x2.Args[1]
24764 if p != x2.Args[0] {
24765 break
24766 }
24767 if mem != x2.Args[1] {
24768 break
24769 }
24770 y3 := o3.Args[1]
24771 if y3.Op != OpARM64MOVDnop {
24772 break
24773 }
24774 x3 := y3.Args[0]
24775 if x3.Op != OpARM64MOVBUload {
24776 break
24777 }
24778 i3 := x3.AuxInt
24779 if x3.Aux != s {
24780 break
24781 }
24782 _ = x3.Args[1]
24783 if p != x3.Args[0] {
24784 break
24785 }
24786 if mem != x3.Args[1] {
24787 break
24788 }
24789 y4 := o2.Args[1]
24790 if y4.Op != OpARM64MOVDnop {
24791 break
24792 }
24793 x4 := y4.Args[0]
24794 if x4.Op != OpARM64MOVBUload {
24795 break
24796 }
24797 i4 := x4.AuxInt
24798 if x4.Aux != s {
24799 break
24800 }
24801 _ = x4.Args[1]
24802 if p != x4.Args[0] {
24803 break
24804 }
24805 if mem != x4.Args[1] {
24806 break
24807 }
24808 y5 := o1.Args[1]
24809 if y5.Op != OpARM64MOVDnop {
24810 break
24811 }
24812 x5 := y5.Args[0]
24813 if x5.Op != OpARM64MOVBUload {
24814 break
24815 }
24816 i5 := x5.AuxInt
24817 if x5.Aux != s {
24818 break
24819 }
24820 _ = x5.Args[1]
24821 if p != x5.Args[0] {
24822 break
24823 }
24824 if mem != x5.Args[1] {
24825 break
24826 }
24827 y6 := o0.Args[1]
24828 if y6.Op != OpARM64MOVDnop {
24829 break
24830 }
24831 x6 := y6.Args[0]
24832 if x6.Op != OpARM64MOVBUload {
24833 break
24834 }
24835 i6 := x6.AuxInt
24836 if x6.Aux != s {
24837 break
24838 }
24839 _ = x6.Args[1]
24840 if p != x6.Args[0] {
24841 break
24842 }
24843 if mem != x6.Args[1] {
24844 break
24845 }
24846 if !(i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && i4 == i0+4 && i5 == i0+5 && i6 == i0+6 && i7 == i0+7 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0)) {
24847 break
24848 }
24849 b = mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7)
24850 v0 := b.NewValue0(x6.Pos, OpARM64REV, t)
24851 v.reset(OpCopy)
24852 v.AddArg(v0)
24853 v1 := b.NewValue0(x6.Pos, OpARM64MOVDload, t)
24854 v1.Aux = s
24855 v2 := b.NewValue0(x6.Pos, OpOffPtr, p.Type)
24856 v2.AuxInt = i0
24857 v2.AddArg(p)
24858 v1.AddArg(v2)
24859 v1.AddArg(mem)
24860 v0.AddArg(v1)
24861 return true
24862 }
24863
24864
24865
24866 for {
24867 t := v.Type
24868 _ = v.Args[1]
24869 o0 := v.Args[0]
24870 if o0.Op != OpARM64ORshiftLL {
24871 break
24872 }
24873 if o0.AuxInt != 8 {
24874 break
24875 }
24876 _ = o0.Args[1]
24877 o1 := o0.Args[0]
24878 if o1.Op != OpARM64ORshiftLL {
24879 break
24880 }
24881 if o1.AuxInt != 16 {
24882 break
24883 }
24884 _ = o1.Args[1]
24885 o2 := o1.Args[0]
24886 if o2.Op != OpARM64ORshiftLL {
24887 break
24888 }
24889 if o2.AuxInt != 24 {
24890 break
24891 }
24892 _ = o2.Args[1]
24893 o3 := o2.Args[0]
24894 if o3.Op != OpARM64ORshiftLL {
24895 break
24896 }
24897 if o3.AuxInt != 32 {
24898 break
24899 }
24900 _ = o3.Args[1]
24901 o4 := o3.Args[0]
24902 if o4.Op != OpARM64ORshiftLL {
24903 break
24904 }
24905 if o4.AuxInt != 40 {
24906 break
24907 }
24908 _ = o4.Args[1]
24909 o5 := o4.Args[0]
24910 if o5.Op != OpARM64ORshiftLL {
24911 break
24912 }
24913 if o5.AuxInt != 48 {
24914 break
24915 }
24916 _ = o5.Args[1]
24917 s0 := o5.Args[0]
24918 if s0.Op != OpARM64SLLconst {
24919 break
24920 }
24921 if s0.AuxInt != 56 {
24922 break
24923 }
24924 y0 := s0.Args[0]
24925 if y0.Op != OpARM64MOVDnop {
24926 break
24927 }
24928 x0 := y0.Args[0]
24929 if x0.Op != OpARM64MOVBUloadidx {
24930 break
24931 }
24932 mem := x0.Args[2]
24933 ptr0 := x0.Args[0]
24934 idx0 := x0.Args[1]
24935 y1 := o5.Args[1]
24936 if y1.Op != OpARM64MOVDnop {
24937 break
24938 }
24939 x1 := y1.Args[0]
24940 if x1.Op != OpARM64MOVBUload {
24941 break
24942 }
24943 if x1.AuxInt != 1 {
24944 break
24945 }
24946 s := x1.Aux
24947 _ = x1.Args[1]
24948 p1 := x1.Args[0]
24949 if p1.Op != OpARM64ADD {
24950 break
24951 }
24952 idx1 := p1.Args[1]
24953 ptr1 := p1.Args[0]
24954 if mem != x1.Args[1] {
24955 break
24956 }
24957 y2 := o4.Args[1]
24958 if y2.Op != OpARM64MOVDnop {
24959 break
24960 }
24961 x2 := y2.Args[0]
24962 if x2.Op != OpARM64MOVBUload {
24963 break
24964 }
24965 if x2.AuxInt != 2 {
24966 break
24967 }
24968 if x2.Aux != s {
24969 break
24970 }
24971 _ = x2.Args[1]
24972 p := x2.Args[0]
24973 if mem != x2.Args[1] {
24974 break
24975 }
24976 y3 := o3.Args[1]
24977 if y3.Op != OpARM64MOVDnop {
24978 break
24979 }
24980 x3 := y3.Args[0]
24981 if x3.Op != OpARM64MOVBUload {
24982 break
24983 }
24984 if x3.AuxInt != 3 {
24985 break
24986 }
24987 if x3.Aux != s {
24988 break
24989 }
24990 _ = x3.Args[1]
24991 if p != x3.Args[0] {
24992 break
24993 }
24994 if mem != x3.Args[1] {
24995 break
24996 }
24997 y4 := o2.Args[1]
24998 if y4.Op != OpARM64MOVDnop {
24999 break
25000 }
25001 x4 := y4.Args[0]
25002 if x4.Op != OpARM64MOVBUload {
25003 break
25004 }
25005 if x4.AuxInt != 4 {
25006 break
25007 }
25008 if x4.Aux != s {
25009 break
25010 }
25011 _ = x4.Args[1]
25012 if p != x4.Args[0] {
25013 break
25014 }
25015 if mem != x4.Args[1] {
25016 break
25017 }
25018 y5 := o1.Args[1]
25019 if y5.Op != OpARM64MOVDnop {
25020 break
25021 }
25022 x5 := y5.Args[0]
25023 if x5.Op != OpARM64MOVBUload {
25024 break
25025 }
25026 if x5.AuxInt != 5 {
25027 break
25028 }
25029 if x5.Aux != s {
25030 break
25031 }
25032 _ = x5.Args[1]
25033 if p != x5.Args[0] {
25034 break
25035 }
25036 if mem != x5.Args[1] {
25037 break
25038 }
25039 y6 := o0.Args[1]
25040 if y6.Op != OpARM64MOVDnop {
25041 break
25042 }
25043 x6 := y6.Args[0]
25044 if x6.Op != OpARM64MOVBUload {
25045 break
25046 }
25047 if x6.AuxInt != 6 {
25048 break
25049 }
25050 if x6.Aux != s {
25051 break
25052 }
25053 _ = x6.Args[1]
25054 if p != x6.Args[0] {
25055 break
25056 }
25057 if mem != x6.Args[1] {
25058 break
25059 }
25060 y7 := v.Args[1]
25061 if y7.Op != OpARM64MOVDnop {
25062 break
25063 }
25064 x7 := y7.Args[0]
25065 if x7.Op != OpARM64MOVBUload {
25066 break
25067 }
25068 if x7.AuxInt != 7 {
25069 break
25070 }
25071 if x7.Aux != s {
25072 break
25073 }
25074 _ = x7.Args[1]
25075 if p != x7.Args[0] {
25076 break
25077 }
25078 if mem != x7.Args[1] {
25079 break
25080 }
25081 if !(s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0)) {
25082 break
25083 }
25084 b = mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7)
25085 v0 := b.NewValue0(x7.Pos, OpARM64REV, t)
25086 v.reset(OpCopy)
25087 v.AddArg(v0)
25088 v1 := b.NewValue0(x7.Pos, OpARM64MOVDloadidx, t)
25089 v1.AddArg(ptr0)
25090 v1.AddArg(idx0)
25091 v1.AddArg(mem)
25092 v0.AddArg(v1)
25093 return true
25094 }
25095
25096
25097
25098 for {
25099 t := v.Type
25100 _ = v.Args[1]
25101 y7 := v.Args[0]
25102 if y7.Op != OpARM64MOVDnop {
25103 break
25104 }
25105 x7 := y7.Args[0]
25106 if x7.Op != OpARM64MOVBUload {
25107 break
25108 }
25109 if x7.AuxInt != 7 {
25110 break
25111 }
25112 s := x7.Aux
25113 mem := x7.Args[1]
25114 p := x7.Args[0]
25115 o0 := v.Args[1]
25116 if o0.Op != OpARM64ORshiftLL {
25117 break
25118 }
25119 if o0.AuxInt != 8 {
25120 break
25121 }
25122 _ = o0.Args[1]
25123 o1 := o0.Args[0]
25124 if o1.Op != OpARM64ORshiftLL {
25125 break
25126 }
25127 if o1.AuxInt != 16 {
25128 break
25129 }
25130 _ = o1.Args[1]
25131 o2 := o1.Args[0]
25132 if o2.Op != OpARM64ORshiftLL {
25133 break
25134 }
25135 if o2.AuxInt != 24 {
25136 break
25137 }
25138 _ = o2.Args[1]
25139 o3 := o2.Args[0]
25140 if o3.Op != OpARM64ORshiftLL {
25141 break
25142 }
25143 if o3.AuxInt != 32 {
25144 break
25145 }
25146 _ = o3.Args[1]
25147 o4 := o3.Args[0]
25148 if o4.Op != OpARM64ORshiftLL {
25149 break
25150 }
25151 if o4.AuxInt != 40 {
25152 break
25153 }
25154 _ = o4.Args[1]
25155 o5 := o4.Args[0]
25156 if o5.Op != OpARM64ORshiftLL {
25157 break
25158 }
25159 if o5.AuxInt != 48 {
25160 break
25161 }
25162 _ = o5.Args[1]
25163 s0 := o5.Args[0]
25164 if s0.Op != OpARM64SLLconst {
25165 break
25166 }
25167 if s0.AuxInt != 56 {
25168 break
25169 }
25170 y0 := s0.Args[0]
25171 if y0.Op != OpARM64MOVDnop {
25172 break
25173 }
25174 x0 := y0.Args[0]
25175 if x0.Op != OpARM64MOVBUloadidx {
25176 break
25177 }
25178 _ = x0.Args[2]
25179 ptr0 := x0.Args[0]
25180 idx0 := x0.Args[1]
25181 if mem != x0.Args[2] {
25182 break
25183 }
25184 y1 := o5.Args[1]
25185 if y1.Op != OpARM64MOVDnop {
25186 break
25187 }
25188 x1 := y1.Args[0]
25189 if x1.Op != OpARM64MOVBUload {
25190 break
25191 }
25192 if x1.AuxInt != 1 {
25193 break
25194 }
25195 if x1.Aux != s {
25196 break
25197 }
25198 _ = x1.Args[1]
25199 p1 := x1.Args[0]
25200 if p1.Op != OpARM64ADD {
25201 break
25202 }
25203 idx1 := p1.Args[1]
25204 ptr1 := p1.Args[0]
25205 if mem != x1.Args[1] {
25206 break
25207 }
25208 y2 := o4.Args[1]
25209 if y2.Op != OpARM64MOVDnop {
25210 break
25211 }
25212 x2 := y2.Args[0]
25213 if x2.Op != OpARM64MOVBUload {
25214 break
25215 }
25216 if x2.AuxInt != 2 {
25217 break
25218 }
25219 if x2.Aux != s {
25220 break
25221 }
25222 _ = x2.Args[1]
25223 if p != x2.Args[0] {
25224 break
25225 }
25226 if mem != x2.Args[1] {
25227 break
25228 }
25229 y3 := o3.Args[1]
25230 if y3.Op != OpARM64MOVDnop {
25231 break
25232 }
25233 x3 := y3.Args[0]
25234 if x3.Op != OpARM64MOVBUload {
25235 break
25236 }
25237 if x3.AuxInt != 3 {
25238 break
25239 }
25240 if x3.Aux != s {
25241 break
25242 }
25243 _ = x3.Args[1]
25244 if p != x3.Args[0] {
25245 break
25246 }
25247 if mem != x3.Args[1] {
25248 break
25249 }
25250 y4 := o2.Args[1]
25251 if y4.Op != OpARM64MOVDnop {
25252 break
25253 }
25254 x4 := y4.Args[0]
25255 if x4.Op != OpARM64MOVBUload {
25256 break
25257 }
25258 if x4.AuxInt != 4 {
25259 break
25260 }
25261 if x4.Aux != s {
25262 break
25263 }
25264 _ = x4.Args[1]
25265 if p != x4.Args[0] {
25266 break
25267 }
25268 if mem != x4.Args[1] {
25269 break
25270 }
25271 y5 := o1.Args[1]
25272 if y5.Op != OpARM64MOVDnop {
25273 break
25274 }
25275 x5 := y5.Args[0]
25276 if x5.Op != OpARM64MOVBUload {
25277 break
25278 }
25279 if x5.AuxInt != 5 {
25280 break
25281 }
25282 if x5.Aux != s {
25283 break
25284 }
25285 _ = x5.Args[1]
25286 if p != x5.Args[0] {
25287 break
25288 }
25289 if mem != x5.Args[1] {
25290 break
25291 }
25292 y6 := o0.Args[1]
25293 if y6.Op != OpARM64MOVDnop {
25294 break
25295 }
25296 x6 := y6.Args[0]
25297 if x6.Op != OpARM64MOVBUload {
25298 break
25299 }
25300 if x6.AuxInt != 6 {
25301 break
25302 }
25303 if x6.Aux != s {
25304 break
25305 }
25306 _ = x6.Args[1]
25307 if p != x6.Args[0] {
25308 break
25309 }
25310 if mem != x6.Args[1] {
25311 break
25312 }
25313 if !(s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0)) {
25314 break
25315 }
25316 b = mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7)
25317 v0 := b.NewValue0(x6.Pos, OpARM64REV, t)
25318 v.reset(OpCopy)
25319 v.AddArg(v0)
25320 v1 := b.NewValue0(x6.Pos, OpARM64MOVDloadidx, t)
25321 v1.AddArg(ptr0)
25322 v1.AddArg(idx0)
25323 v1.AddArg(mem)
25324 v0.AddArg(v1)
25325 return true
25326 }
25327
25328
25329
25330 for {
25331 t := v.Type
25332 _ = v.Args[1]
25333 o0 := v.Args[0]
25334 if o0.Op != OpARM64ORshiftLL {
25335 break
25336 }
25337 if o0.AuxInt != 8 {
25338 break
25339 }
25340 _ = o0.Args[1]
25341 o1 := o0.Args[0]
25342 if o1.Op != OpARM64ORshiftLL {
25343 break
25344 }
25345 if o1.AuxInt != 16 {
25346 break
25347 }
25348 _ = o1.Args[1]
25349 o2 := o1.Args[0]
25350 if o2.Op != OpARM64ORshiftLL {
25351 break
25352 }
25353 if o2.AuxInt != 24 {
25354 break
25355 }
25356 _ = o2.Args[1]
25357 o3 := o2.Args[0]
25358 if o3.Op != OpARM64ORshiftLL {
25359 break
25360 }
25361 if o3.AuxInt != 32 {
25362 break
25363 }
25364 _ = o3.Args[1]
25365 o4 := o3.Args[0]
25366 if o4.Op != OpARM64ORshiftLL {
25367 break
25368 }
25369 if o4.AuxInt != 40 {
25370 break
25371 }
25372 _ = o4.Args[1]
25373 o5 := o4.Args[0]
25374 if o5.Op != OpARM64ORshiftLL {
25375 break
25376 }
25377 if o5.AuxInt != 48 {
25378 break
25379 }
25380 _ = o5.Args[1]
25381 s0 := o5.Args[0]
25382 if s0.Op != OpARM64SLLconst {
25383 break
25384 }
25385 if s0.AuxInt != 56 {
25386 break
25387 }
25388 y0 := s0.Args[0]
25389 if y0.Op != OpARM64MOVDnop {
25390 break
25391 }
25392 x0 := y0.Args[0]
25393 if x0.Op != OpARM64MOVBUloadidx {
25394 break
25395 }
25396 mem := x0.Args[2]
25397 ptr := x0.Args[0]
25398 idx := x0.Args[1]
25399 y1 := o5.Args[1]
25400 if y1.Op != OpARM64MOVDnop {
25401 break
25402 }
25403 x1 := y1.Args[0]
25404 if x1.Op != OpARM64MOVBUloadidx {
25405 break
25406 }
25407 _ = x1.Args[2]
25408 if ptr != x1.Args[0] {
25409 break
25410 }
25411 x1_1 := x1.Args[1]
25412 if x1_1.Op != OpARM64ADDconst {
25413 break
25414 }
25415 if x1_1.AuxInt != 1 {
25416 break
25417 }
25418 if idx != x1_1.Args[0] {
25419 break
25420 }
25421 if mem != x1.Args[2] {
25422 break
25423 }
25424 y2 := o4.Args[1]
25425 if y2.Op != OpARM64MOVDnop {
25426 break
25427 }
25428 x2 := y2.Args[0]
25429 if x2.Op != OpARM64MOVBUloadidx {
25430 break
25431 }
25432 _ = x2.Args[2]
25433 if ptr != x2.Args[0] {
25434 break
25435 }
25436 x2_1 := x2.Args[1]
25437 if x2_1.Op != OpARM64ADDconst {
25438 break
25439 }
25440 if x2_1.AuxInt != 2 {
25441 break
25442 }
25443 if idx != x2_1.Args[0] {
25444 break
25445 }
25446 if mem != x2.Args[2] {
25447 break
25448 }
25449 y3 := o3.Args[1]
25450 if y3.Op != OpARM64MOVDnop {
25451 break
25452 }
25453 x3 := y3.Args[0]
25454 if x3.Op != OpARM64MOVBUloadidx {
25455 break
25456 }
25457 _ = x3.Args[2]
25458 if ptr != x3.Args[0] {
25459 break
25460 }
25461 x3_1 := x3.Args[1]
25462 if x3_1.Op != OpARM64ADDconst {
25463 break
25464 }
25465 if x3_1.AuxInt != 3 {
25466 break
25467 }
25468 if idx != x3_1.Args[0] {
25469 break
25470 }
25471 if mem != x3.Args[2] {
25472 break
25473 }
25474 y4 := o2.Args[1]
25475 if y4.Op != OpARM64MOVDnop {
25476 break
25477 }
25478 x4 := y4.Args[0]
25479 if x4.Op != OpARM64MOVBUloadidx {
25480 break
25481 }
25482 _ = x4.Args[2]
25483 if ptr != x4.Args[0] {
25484 break
25485 }
25486 x4_1 := x4.Args[1]
25487 if x4_1.Op != OpARM64ADDconst {
25488 break
25489 }
25490 if x4_1.AuxInt != 4 {
25491 break
25492 }
25493 if idx != x4_1.Args[0] {
25494 break
25495 }
25496 if mem != x4.Args[2] {
25497 break
25498 }
25499 y5 := o1.Args[1]
25500 if y5.Op != OpARM64MOVDnop {
25501 break
25502 }
25503 x5 := y5.Args[0]
25504 if x5.Op != OpARM64MOVBUloadidx {
25505 break
25506 }
25507 _ = x5.Args[2]
25508 if ptr != x5.Args[0] {
25509 break
25510 }
25511 x5_1 := x5.Args[1]
25512 if x5_1.Op != OpARM64ADDconst {
25513 break
25514 }
25515 if x5_1.AuxInt != 5 {
25516 break
25517 }
25518 if idx != x5_1.Args[0] {
25519 break
25520 }
25521 if mem != x5.Args[2] {
25522 break
25523 }
25524 y6 := o0.Args[1]
25525 if y6.Op != OpARM64MOVDnop {
25526 break
25527 }
25528 x6 := y6.Args[0]
25529 if x6.Op != OpARM64MOVBUloadidx {
25530 break
25531 }
25532 _ = x6.Args[2]
25533 if ptr != x6.Args[0] {
25534 break
25535 }
25536 x6_1 := x6.Args[1]
25537 if x6_1.Op != OpARM64ADDconst {
25538 break
25539 }
25540 if x6_1.AuxInt != 6 {
25541 break
25542 }
25543 if idx != x6_1.Args[0] {
25544 break
25545 }
25546 if mem != x6.Args[2] {
25547 break
25548 }
25549 y7 := v.Args[1]
25550 if y7.Op != OpARM64MOVDnop {
25551 break
25552 }
25553 x7 := y7.Args[0]
25554 if x7.Op != OpARM64MOVBUloadidx {
25555 break
25556 }
25557 _ = x7.Args[2]
25558 if ptr != x7.Args[0] {
25559 break
25560 }
25561 x7_1 := x7.Args[1]
25562 if x7_1.Op != OpARM64ADDconst {
25563 break
25564 }
25565 if x7_1.AuxInt != 7 {
25566 break
25567 }
25568 if idx != x7_1.Args[0] {
25569 break
25570 }
25571 if mem != x7.Args[2] {
25572 break
25573 }
25574 if !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0)) {
25575 break
25576 }
25577 b = mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7)
25578 v0 := b.NewValue0(v.Pos, OpARM64REV, t)
25579 v.reset(OpCopy)
25580 v.AddArg(v0)
25581 v1 := b.NewValue0(v.Pos, OpARM64MOVDloadidx, t)
25582 v1.AddArg(ptr)
25583 v1.AddArg(idx)
25584 v1.AddArg(mem)
25585 v0.AddArg(v1)
25586 return true
25587 }
25588
25589
25590
25591 for {
25592 t := v.Type
25593 _ = v.Args[1]
25594 y7 := v.Args[0]
25595 if y7.Op != OpARM64MOVDnop {
25596 break
25597 }
25598 x7 := y7.Args[0]
25599 if x7.Op != OpARM64MOVBUloadidx {
25600 break
25601 }
25602 mem := x7.Args[2]
25603 ptr := x7.Args[0]
25604 x7_1 := x7.Args[1]
25605 if x7_1.Op != OpARM64ADDconst {
25606 break
25607 }
25608 if x7_1.AuxInt != 7 {
25609 break
25610 }
25611 idx := x7_1.Args[0]
25612 o0 := v.Args[1]
25613 if o0.Op != OpARM64ORshiftLL {
25614 break
25615 }
25616 if o0.AuxInt != 8 {
25617 break
25618 }
25619 _ = o0.Args[1]
25620 o1 := o0.Args[0]
25621 if o1.Op != OpARM64ORshiftLL {
25622 break
25623 }
25624 if o1.AuxInt != 16 {
25625 break
25626 }
25627 _ = o1.Args[1]
25628 o2 := o1.Args[0]
25629 if o2.Op != OpARM64ORshiftLL {
25630 break
25631 }
25632 if o2.AuxInt != 24 {
25633 break
25634 }
25635 _ = o2.Args[1]
25636 o3 := o2.Args[0]
25637 if o3.Op != OpARM64ORshiftLL {
25638 break
25639 }
25640 if o3.AuxInt != 32 {
25641 break
25642 }
25643 _ = o3.Args[1]
25644 o4 := o3.Args[0]
25645 if o4.Op != OpARM64ORshiftLL {
25646 break
25647 }
25648 if o4.AuxInt != 40 {
25649 break
25650 }
25651 _ = o4.Args[1]
25652 o5 := o4.Args[0]
25653 if o5.Op != OpARM64ORshiftLL {
25654 break
25655 }
25656 if o5.AuxInt != 48 {
25657 break
25658 }
25659 _ = o5.Args[1]
25660 s0 := o5.Args[0]
25661 if s0.Op != OpARM64SLLconst {
25662 break
25663 }
25664 if s0.AuxInt != 56 {
25665 break
25666 }
25667 y0 := s0.Args[0]
25668 if y0.Op != OpARM64MOVDnop {
25669 break
25670 }
25671 x0 := y0.Args[0]
25672 if x0.Op != OpARM64MOVBUloadidx {
25673 break
25674 }
25675 _ = x0.Args[2]
25676 if ptr != x0.Args[0] {
25677 break
25678 }
25679 if idx != x0.Args[1] {
25680 break
25681 }
25682 if mem != x0.Args[2] {
25683 break
25684 }
25685 y1 := o5.Args[1]
25686 if y1.Op != OpARM64MOVDnop {
25687 break
25688 }
25689 x1 := y1.Args[0]
25690 if x1.Op != OpARM64MOVBUloadidx {
25691 break
25692 }
25693 _ = x1.Args[2]
25694 if ptr != x1.Args[0] {
25695 break
25696 }
25697 x1_1 := x1.Args[1]
25698 if x1_1.Op != OpARM64ADDconst {
25699 break
25700 }
25701 if x1_1.AuxInt != 1 {
25702 break
25703 }
25704 if idx != x1_1.Args[0] {
25705 break
25706 }
25707 if mem != x1.Args[2] {
25708 break
25709 }
25710 y2 := o4.Args[1]
25711 if y2.Op != OpARM64MOVDnop {
25712 break
25713 }
25714 x2 := y2.Args[0]
25715 if x2.Op != OpARM64MOVBUloadidx {
25716 break
25717 }
25718 _ = x2.Args[2]
25719 if ptr != x2.Args[0] {
25720 break
25721 }
25722 x2_1 := x2.Args[1]
25723 if x2_1.Op != OpARM64ADDconst {
25724 break
25725 }
25726 if x2_1.AuxInt != 2 {
25727 break
25728 }
25729 if idx != x2_1.Args[0] {
25730 break
25731 }
25732 if mem != x2.Args[2] {
25733 break
25734 }
25735 y3 := o3.Args[1]
25736 if y3.Op != OpARM64MOVDnop {
25737 break
25738 }
25739 x3 := y3.Args[0]
25740 if x3.Op != OpARM64MOVBUloadidx {
25741 break
25742 }
25743 _ = x3.Args[2]
25744 if ptr != x3.Args[0] {
25745 break
25746 }
25747 x3_1 := x3.Args[1]
25748 if x3_1.Op != OpARM64ADDconst {
25749 break
25750 }
25751 if x3_1.AuxInt != 3 {
25752 break
25753 }
25754 if idx != x3_1.Args[0] {
25755 break
25756 }
25757 if mem != x3.Args[2] {
25758 break
25759 }
25760 y4 := o2.Args[1]
25761 if y4.Op != OpARM64MOVDnop {
25762 break
25763 }
25764 x4 := y4.Args[0]
25765 if x4.Op != OpARM64MOVBUloadidx {
25766 break
25767 }
25768 _ = x4.Args[2]
25769 if ptr != x4.Args[0] {
25770 break
25771 }
25772 x4_1 := x4.Args[1]
25773 if x4_1.Op != OpARM64ADDconst {
25774 break
25775 }
25776 if x4_1.AuxInt != 4 {
25777 break
25778 }
25779 if idx != x4_1.Args[0] {
25780 break
25781 }
25782 if mem != x4.Args[2] {
25783 break
25784 }
25785 y5 := o1.Args[1]
25786 if y5.Op != OpARM64MOVDnop {
25787 break
25788 }
25789 x5 := y5.Args[0]
25790 if x5.Op != OpARM64MOVBUloadidx {
25791 break
25792 }
25793 _ = x5.Args[2]
25794 if ptr != x5.Args[0] {
25795 break
25796 }
25797 x5_1 := x5.Args[1]
25798 if x5_1.Op != OpARM64ADDconst {
25799 break
25800 }
25801 if x5_1.AuxInt != 5 {
25802 break
25803 }
25804 if idx != x5_1.Args[0] {
25805 break
25806 }
25807 if mem != x5.Args[2] {
25808 break
25809 }
25810 y6 := o0.Args[1]
25811 if y6.Op != OpARM64MOVDnop {
25812 break
25813 }
25814 x6 := y6.Args[0]
25815 if x6.Op != OpARM64MOVBUloadidx {
25816 break
25817 }
25818 _ = x6.Args[2]
25819 if ptr != x6.Args[0] {
25820 break
25821 }
25822 x6_1 := x6.Args[1]
25823 if x6_1.Op != OpARM64ADDconst {
25824 break
25825 }
25826 if x6_1.AuxInt != 6 {
25827 break
25828 }
25829 if idx != x6_1.Args[0] {
25830 break
25831 }
25832 if mem != x6.Args[2] {
25833 break
25834 }
25835 if !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0)) {
25836 break
25837 }
25838 b = mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7)
25839 v0 := b.NewValue0(v.Pos, OpARM64REV, t)
25840 v.reset(OpCopy)
25841 v.AddArg(v0)
25842 v1 := b.NewValue0(v.Pos, OpARM64MOVDloadidx, t)
25843 v1.AddArg(ptr)
25844 v1.AddArg(idx)
25845 v1.AddArg(mem)
25846 v0.AddArg(v1)
25847 return true
25848 }
25849 return false
25850 }
25851 func rewriteValueARM64_OpARM64ORN_0(v *Value) bool {
25852
25853
25854
25855 for {
25856 _ = v.Args[1]
25857 x := v.Args[0]
25858 v_1 := v.Args[1]
25859 if v_1.Op != OpARM64MOVDconst {
25860 break
25861 }
25862 c := v_1.AuxInt
25863 v.reset(OpARM64ORconst)
25864 v.AuxInt = ^c
25865 v.AddArg(x)
25866 return true
25867 }
25868
25869
25870
25871 for {
25872 x := v.Args[1]
25873 if x != v.Args[0] {
25874 break
25875 }
25876 v.reset(OpARM64MOVDconst)
25877 v.AuxInt = -1
25878 return true
25879 }
25880
25881
25882
25883 for {
25884 _ = v.Args[1]
25885 x0 := v.Args[0]
25886 x1 := v.Args[1]
25887 if x1.Op != OpARM64SLLconst {
25888 break
25889 }
25890 c := x1.AuxInt
25891 y := x1.Args[0]
25892 if !(clobberIfDead(x1)) {
25893 break
25894 }
25895 v.reset(OpARM64ORNshiftLL)
25896 v.AuxInt = c
25897 v.AddArg(x0)
25898 v.AddArg(y)
25899 return true
25900 }
25901
25902
25903
25904 for {
25905 _ = v.Args[1]
25906 x0 := v.Args[0]
25907 x1 := v.Args[1]
25908 if x1.Op != OpARM64SRLconst {
25909 break
25910 }
25911 c := x1.AuxInt
25912 y := x1.Args[0]
25913 if !(clobberIfDead(x1)) {
25914 break
25915 }
25916 v.reset(OpARM64ORNshiftRL)
25917 v.AuxInt = c
25918 v.AddArg(x0)
25919 v.AddArg(y)
25920 return true
25921 }
25922
25923
25924
25925 for {
25926 _ = v.Args[1]
25927 x0 := v.Args[0]
25928 x1 := v.Args[1]
25929 if x1.Op != OpARM64SRAconst {
25930 break
25931 }
25932 c := x1.AuxInt
25933 y := x1.Args[0]
25934 if !(clobberIfDead(x1)) {
25935 break
25936 }
25937 v.reset(OpARM64ORNshiftRA)
25938 v.AuxInt = c
25939 v.AddArg(x0)
25940 v.AddArg(y)
25941 return true
25942 }
25943 return false
25944 }
25945 func rewriteValueARM64_OpARM64ORNshiftLL_0(v *Value) bool {
25946
25947
25948
25949 for {
25950 d := v.AuxInt
25951 _ = v.Args[1]
25952 x := v.Args[0]
25953 v_1 := v.Args[1]
25954 if v_1.Op != OpARM64MOVDconst {
25955 break
25956 }
25957 c := v_1.AuxInt
25958 v.reset(OpARM64ORconst)
25959 v.AuxInt = ^int64(uint64(c) << uint64(d))
25960 v.AddArg(x)
25961 return true
25962 }
25963
25964
25965
25966 for {
25967 d := v.AuxInt
25968 _ = v.Args[1]
25969 x := v.Args[0]
25970 v_1 := v.Args[1]
25971 if v_1.Op != OpARM64SLLconst {
25972 break
25973 }
25974 c := v_1.AuxInt
25975 if x != v_1.Args[0] {
25976 break
25977 }
25978 if !(c == d) {
25979 break
25980 }
25981 v.reset(OpARM64MOVDconst)
25982 v.AuxInt = -1
25983 return true
25984 }
25985 return false
25986 }
25987 func rewriteValueARM64_OpARM64ORNshiftRA_0(v *Value) bool {
25988
25989
25990
25991 for {
25992 d := v.AuxInt
25993 _ = v.Args[1]
25994 x := v.Args[0]
25995 v_1 := v.Args[1]
25996 if v_1.Op != OpARM64MOVDconst {
25997 break
25998 }
25999 c := v_1.AuxInt
26000 v.reset(OpARM64ORconst)
26001 v.AuxInt = ^(c >> uint64(d))
26002 v.AddArg(x)
26003 return true
26004 }
26005
26006
26007
26008 for {
26009 d := v.AuxInt
26010 _ = v.Args[1]
26011 x := v.Args[0]
26012 v_1 := v.Args[1]
26013 if v_1.Op != OpARM64SRAconst {
26014 break
26015 }
26016 c := v_1.AuxInt
26017 if x != v_1.Args[0] {
26018 break
26019 }
26020 if !(c == d) {
26021 break
26022 }
26023 v.reset(OpARM64MOVDconst)
26024 v.AuxInt = -1
26025 return true
26026 }
26027 return false
26028 }
26029 func rewriteValueARM64_OpARM64ORNshiftRL_0(v *Value) bool {
26030
26031
26032
26033 for {
26034 d := v.AuxInt
26035 _ = v.Args[1]
26036 x := v.Args[0]
26037 v_1 := v.Args[1]
26038 if v_1.Op != OpARM64MOVDconst {
26039 break
26040 }
26041 c := v_1.AuxInt
26042 v.reset(OpARM64ORconst)
26043 v.AuxInt = ^int64(uint64(c) >> uint64(d))
26044 v.AddArg(x)
26045 return true
26046 }
26047
26048
26049
26050 for {
26051 d := v.AuxInt
26052 _ = v.Args[1]
26053 x := v.Args[0]
26054 v_1 := v.Args[1]
26055 if v_1.Op != OpARM64SRLconst {
26056 break
26057 }
26058 c := v_1.AuxInt
26059 if x != v_1.Args[0] {
26060 break
26061 }
26062 if !(c == d) {
26063 break
26064 }
26065 v.reset(OpARM64MOVDconst)
26066 v.AuxInt = -1
26067 return true
26068 }
26069 return false
26070 }
26071 func rewriteValueARM64_OpARM64ORconst_0(v *Value) bool {
26072
26073
26074
26075 for {
26076 if v.AuxInt != 0 {
26077 break
26078 }
26079 x := v.Args[0]
26080 v.reset(OpCopy)
26081 v.Type = x.Type
26082 v.AddArg(x)
26083 return true
26084 }
26085
26086
26087
26088 for {
26089 if v.AuxInt != -1 {
26090 break
26091 }
26092 v.reset(OpARM64MOVDconst)
26093 v.AuxInt = -1
26094 return true
26095 }
26096
26097
26098
26099 for {
26100 c := v.AuxInt
26101 v_0 := v.Args[0]
26102 if v_0.Op != OpARM64MOVDconst {
26103 break
26104 }
26105 d := v_0.AuxInt
26106 v.reset(OpARM64MOVDconst)
26107 v.AuxInt = c | d
26108 return true
26109 }
26110
26111
26112
26113 for {
26114 c := v.AuxInt
26115 v_0 := v.Args[0]
26116 if v_0.Op != OpARM64ORconst {
26117 break
26118 }
26119 d := v_0.AuxInt
26120 x := v_0.Args[0]
26121 v.reset(OpARM64ORconst)
26122 v.AuxInt = c | d
26123 v.AddArg(x)
26124 return true
26125 }
26126
26127
26128
26129 for {
26130 c1 := v.AuxInt
26131 v_0 := v.Args[0]
26132 if v_0.Op != OpARM64ANDconst {
26133 break
26134 }
26135 c2 := v_0.AuxInt
26136 x := v_0.Args[0]
26137 if !(c2|c1 == ^0) {
26138 break
26139 }
26140 v.reset(OpARM64ORconst)
26141 v.AuxInt = c1
26142 v.AddArg(x)
26143 return true
26144 }
26145 return false
26146 }
26147 func rewriteValueARM64_OpARM64ORshiftLL_0(v *Value) bool {
26148 b := v.Block
26149 typ := &b.Func.Config.Types
26150
26151
26152
26153 for {
26154 d := v.AuxInt
26155 x := v.Args[1]
26156 v_0 := v.Args[0]
26157 if v_0.Op != OpARM64MOVDconst {
26158 break
26159 }
26160 c := v_0.AuxInt
26161 v.reset(OpARM64ORconst)
26162 v.AuxInt = c
26163 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type)
26164 v0.AuxInt = d
26165 v0.AddArg(x)
26166 v.AddArg(v0)
26167 return true
26168 }
26169
26170
26171
26172 for {
26173 d := v.AuxInt
26174 _ = v.Args[1]
26175 x := v.Args[0]
26176 v_1 := v.Args[1]
26177 if v_1.Op != OpARM64MOVDconst {
26178 break
26179 }
26180 c := v_1.AuxInt
26181 v.reset(OpARM64ORconst)
26182 v.AuxInt = int64(uint64(c) << uint64(d))
26183 v.AddArg(x)
26184 return true
26185 }
26186
26187
26188
26189 for {
26190 d := v.AuxInt
26191 _ = v.Args[1]
26192 x := v.Args[0]
26193 y := v.Args[1]
26194 if y.Op != OpARM64SLLconst {
26195 break
26196 }
26197 c := y.AuxInt
26198 if x != y.Args[0] {
26199 break
26200 }
26201 if !(c == d) {
26202 break
26203 }
26204 v.reset(OpCopy)
26205 v.Type = y.Type
26206 v.AddArg(y)
26207 return true
26208 }
26209
26210
26211
26212 for {
26213 c := v.AuxInt
26214 x := v.Args[1]
26215 v_0 := v.Args[0]
26216 if v_0.Op != OpARM64SRLconst {
26217 break
26218 }
26219 if v_0.AuxInt != 64-c {
26220 break
26221 }
26222 if x != v_0.Args[0] {
26223 break
26224 }
26225 v.reset(OpARM64RORconst)
26226 v.AuxInt = 64 - c
26227 v.AddArg(x)
26228 return true
26229 }
26230
26231
26232
26233 for {
26234 t := v.Type
26235 c := v.AuxInt
26236 x := v.Args[1]
26237 v_0 := v.Args[0]
26238 if v_0.Op != OpARM64UBFX {
26239 break
26240 }
26241 bfc := v_0.AuxInt
26242 if x != v_0.Args[0] {
26243 break
26244 }
26245 if !(c < 32 && t.Size() == 4 && bfc == armBFAuxInt(32-c, c)) {
26246 break
26247 }
26248 v.reset(OpARM64RORWconst)
26249 v.AuxInt = 32 - c
26250 v.AddArg(x)
26251 return true
26252 }
26253
26254
26255
26256 for {
26257 if v.Type != typ.UInt16 {
26258 break
26259 }
26260 if v.AuxInt != 8 {
26261 break
26262 }
26263 x := v.Args[1]
26264 v_0 := v.Args[0]
26265 if v_0.Op != OpARM64UBFX {
26266 break
26267 }
26268 if v_0.Type != typ.UInt16 {
26269 break
26270 }
26271 if v_0.AuxInt != armBFAuxInt(8, 8) {
26272 break
26273 }
26274 if x != v_0.Args[0] {
26275 break
26276 }
26277 v.reset(OpARM64REV16W)
26278 v.AddArg(x)
26279 return true
26280 }
26281
26282
26283
26284 for {
26285 c := v.AuxInt
26286 x2 := v.Args[1]
26287 v_0 := v.Args[0]
26288 if v_0.Op != OpARM64SRLconst {
26289 break
26290 }
26291 if v_0.AuxInt != 64-c {
26292 break
26293 }
26294 x := v_0.Args[0]
26295 v.reset(OpARM64EXTRconst)
26296 v.AuxInt = 64 - c
26297 v.AddArg(x2)
26298 v.AddArg(x)
26299 return true
26300 }
26301
26302
26303
26304 for {
26305 t := v.Type
26306 c := v.AuxInt
26307 x2 := v.Args[1]
26308 v_0 := v.Args[0]
26309 if v_0.Op != OpARM64UBFX {
26310 break
26311 }
26312 bfc := v_0.AuxInt
26313 x := v_0.Args[0]
26314 if !(c < 32 && t.Size() == 4 && bfc == armBFAuxInt(32-c, c)) {
26315 break
26316 }
26317 v.reset(OpARM64EXTRWconst)
26318 v.AuxInt = 32 - c
26319 v.AddArg(x2)
26320 v.AddArg(x)
26321 return true
26322 }
26323
26324
26325
26326 for {
26327 sc := v.AuxInt
26328 _ = v.Args[1]
26329 v_0 := v.Args[0]
26330 if v_0.Op != OpARM64UBFX {
26331 break
26332 }
26333 bfc := v_0.AuxInt
26334 x := v_0.Args[0]
26335 v_1 := v.Args[1]
26336 if v_1.Op != OpARM64SRLconst {
26337 break
26338 }
26339 if v_1.AuxInt != sc {
26340 break
26341 }
26342 y := v_1.Args[0]
26343 if !(sc == getARM64BFwidth(bfc)) {
26344 break
26345 }
26346 v.reset(OpARM64BFXIL)
26347 v.AuxInt = bfc
26348 v.AddArg(y)
26349 v.AddArg(x)
26350 return true
26351 }
26352
26353
26354
26355 for {
26356 t := v.Type
26357 if v.AuxInt != 8 {
26358 break
26359 }
26360 _ = v.Args[1]
26361 y0 := v.Args[0]
26362 if y0.Op != OpARM64MOVDnop {
26363 break
26364 }
26365 x0 := y0.Args[0]
26366 if x0.Op != OpARM64MOVBUload {
26367 break
26368 }
26369 i0 := x0.AuxInt
26370 s := x0.Aux
26371 mem := x0.Args[1]
26372 p := x0.Args[0]
26373 y1 := v.Args[1]
26374 if y1.Op != OpARM64MOVDnop {
26375 break
26376 }
26377 x1 := y1.Args[0]
26378 if x1.Op != OpARM64MOVBUload {
26379 break
26380 }
26381 i1 := x1.AuxInt
26382 if x1.Aux != s {
26383 break
26384 }
26385 _ = x1.Args[1]
26386 if p != x1.Args[0] {
26387 break
26388 }
26389 if mem != x1.Args[1] {
26390 break
26391 }
26392 if !(i1 == i0+1 && x0.Uses == 1 && x1.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && mergePoint(b, x0, x1) != nil && clobber(x0) && clobber(x1) && clobber(y0) && clobber(y1)) {
26393 break
26394 }
26395 b = mergePoint(b, x0, x1)
26396 v0 := b.NewValue0(x1.Pos, OpARM64MOVHUload, t)
26397 v.reset(OpCopy)
26398 v.AddArg(v0)
26399 v0.Aux = s
26400 v1 := b.NewValue0(x1.Pos, OpOffPtr, p.Type)
26401 v1.AuxInt = i0
26402 v1.AddArg(p)
26403 v0.AddArg(v1)
26404 v0.AddArg(mem)
26405 return true
26406 }
26407 return false
26408 }
26409 func rewriteValueARM64_OpARM64ORshiftLL_10(v *Value) bool {
26410 b := v.Block
26411
26412
26413
26414 for {
26415 t := v.Type
26416 if v.AuxInt != 8 {
26417 break
26418 }
26419 _ = v.Args[1]
26420 y0 := v.Args[0]
26421 if y0.Op != OpARM64MOVDnop {
26422 break
26423 }
26424 x0 := y0.Args[0]
26425 if x0.Op != OpARM64MOVBUloadidx {
26426 break
26427 }
26428 mem := x0.Args[2]
26429 ptr0 := x0.Args[0]
26430 idx0 := x0.Args[1]
26431 y1 := v.Args[1]
26432 if y1.Op != OpARM64MOVDnop {
26433 break
26434 }
26435 x1 := y1.Args[0]
26436 if x1.Op != OpARM64MOVBUload {
26437 break
26438 }
26439 if x1.AuxInt != 1 {
26440 break
26441 }
26442 s := x1.Aux
26443 _ = x1.Args[1]
26444 p1 := x1.Args[0]
26445 if p1.Op != OpARM64ADD {
26446 break
26447 }
26448 idx1 := p1.Args[1]
26449 ptr1 := p1.Args[0]
26450 if mem != x1.Args[1] {
26451 break
26452 }
26453 if !(s == nil && x0.Uses == 1 && x1.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && mergePoint(b, x0, x1) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x0) && clobber(x1) && clobber(y0) && clobber(y1)) {
26454 break
26455 }
26456 b = mergePoint(b, x0, x1)
26457 v0 := b.NewValue0(x1.Pos, OpARM64MOVHUloadidx, t)
26458 v.reset(OpCopy)
26459 v.AddArg(v0)
26460 v0.AddArg(ptr0)
26461 v0.AddArg(idx0)
26462 v0.AddArg(mem)
26463 return true
26464 }
26465
26466
26467
26468 for {
26469 t := v.Type
26470 if v.AuxInt != 8 {
26471 break
26472 }
26473 _ = v.Args[1]
26474 y0 := v.Args[0]
26475 if y0.Op != OpARM64MOVDnop {
26476 break
26477 }
26478 x0 := y0.Args[0]
26479 if x0.Op != OpARM64MOVBUloadidx {
26480 break
26481 }
26482 mem := x0.Args[2]
26483 ptr := x0.Args[0]
26484 idx := x0.Args[1]
26485 y1 := v.Args[1]
26486 if y1.Op != OpARM64MOVDnop {
26487 break
26488 }
26489 x1 := y1.Args[0]
26490 if x1.Op != OpARM64MOVBUloadidx {
26491 break
26492 }
26493 _ = x1.Args[2]
26494 if ptr != x1.Args[0] {
26495 break
26496 }
26497 x1_1 := x1.Args[1]
26498 if x1_1.Op != OpARM64ADDconst {
26499 break
26500 }
26501 if x1_1.AuxInt != 1 {
26502 break
26503 }
26504 if idx != x1_1.Args[0] {
26505 break
26506 }
26507 if mem != x1.Args[2] {
26508 break
26509 }
26510 if !(x0.Uses == 1 && x1.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && mergePoint(b, x0, x1) != nil && clobber(x0) && clobber(x1) && clobber(y0) && clobber(y1)) {
26511 break
26512 }
26513 b = mergePoint(b, x0, x1)
26514 v0 := b.NewValue0(v.Pos, OpARM64MOVHUloadidx, t)
26515 v.reset(OpCopy)
26516 v.AddArg(v0)
26517 v0.AddArg(ptr)
26518 v0.AddArg(idx)
26519 v0.AddArg(mem)
26520 return true
26521 }
26522
26523
26524
26525 for {
26526 t := v.Type
26527 if v.AuxInt != 24 {
26528 break
26529 }
26530 _ = v.Args[1]
26531 o0 := v.Args[0]
26532 if o0.Op != OpARM64ORshiftLL {
26533 break
26534 }
26535 if o0.AuxInt != 16 {
26536 break
26537 }
26538 _ = o0.Args[1]
26539 x0 := o0.Args[0]
26540 if x0.Op != OpARM64MOVHUload {
26541 break
26542 }
26543 i0 := x0.AuxInt
26544 s := x0.Aux
26545 mem := x0.Args[1]
26546 p := x0.Args[0]
26547 y1 := o0.Args[1]
26548 if y1.Op != OpARM64MOVDnop {
26549 break
26550 }
26551 x1 := y1.Args[0]
26552 if x1.Op != OpARM64MOVBUload {
26553 break
26554 }
26555 i2 := x1.AuxInt
26556 if x1.Aux != s {
26557 break
26558 }
26559 _ = x1.Args[1]
26560 if p != x1.Args[0] {
26561 break
26562 }
26563 if mem != x1.Args[1] {
26564 break
26565 }
26566 y2 := v.Args[1]
26567 if y2.Op != OpARM64MOVDnop {
26568 break
26569 }
26570 x2 := y2.Args[0]
26571 if x2.Op != OpARM64MOVBUload {
26572 break
26573 }
26574 i3 := x2.AuxInt
26575 if x2.Aux != s {
26576 break
26577 }
26578 _ = x2.Args[1]
26579 if p != x2.Args[0] {
26580 break
26581 }
26582 if mem != x2.Args[1] {
26583 break
26584 }
26585 if !(i2 == i0+2 && i3 == i0+3 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && o0.Uses == 1 && mergePoint(b, x0, x1, x2) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(y1) && clobber(y2) && clobber(o0)) {
26586 break
26587 }
26588 b = mergePoint(b, x0, x1, x2)
26589 v0 := b.NewValue0(x2.Pos, OpARM64MOVWUload, t)
26590 v.reset(OpCopy)
26591 v.AddArg(v0)
26592 v0.Aux = s
26593 v1 := b.NewValue0(x2.Pos, OpOffPtr, p.Type)
26594 v1.AuxInt = i0
26595 v1.AddArg(p)
26596 v0.AddArg(v1)
26597 v0.AddArg(mem)
26598 return true
26599 }
26600
26601
26602
26603 for {
26604 t := v.Type
26605 if v.AuxInt != 24 {
26606 break
26607 }
26608 _ = v.Args[1]
26609 o0 := v.Args[0]
26610 if o0.Op != OpARM64ORshiftLL {
26611 break
26612 }
26613 if o0.AuxInt != 16 {
26614 break
26615 }
26616 _ = o0.Args[1]
26617 x0 := o0.Args[0]
26618 if x0.Op != OpARM64MOVHUloadidx {
26619 break
26620 }
26621 mem := x0.Args[2]
26622 ptr0 := x0.Args[0]
26623 idx0 := x0.Args[1]
26624 y1 := o0.Args[1]
26625 if y1.Op != OpARM64MOVDnop {
26626 break
26627 }
26628 x1 := y1.Args[0]
26629 if x1.Op != OpARM64MOVBUload {
26630 break
26631 }
26632 if x1.AuxInt != 2 {
26633 break
26634 }
26635 s := x1.Aux
26636 _ = x1.Args[1]
26637 p1 := x1.Args[0]
26638 if p1.Op != OpARM64ADD {
26639 break
26640 }
26641 idx1 := p1.Args[1]
26642 ptr1 := p1.Args[0]
26643 if mem != x1.Args[1] {
26644 break
26645 }
26646 y2 := v.Args[1]
26647 if y2.Op != OpARM64MOVDnop {
26648 break
26649 }
26650 x2 := y2.Args[0]
26651 if x2.Op != OpARM64MOVBUload {
26652 break
26653 }
26654 if x2.AuxInt != 3 {
26655 break
26656 }
26657 if x2.Aux != s {
26658 break
26659 }
26660 _ = x2.Args[1]
26661 p := x2.Args[0]
26662 if mem != x2.Args[1] {
26663 break
26664 }
26665 if !(s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && o0.Uses == 1 && mergePoint(b, x0, x1, x2) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(y1) && clobber(y2) && clobber(o0)) {
26666 break
26667 }
26668 b = mergePoint(b, x0, x1, x2)
26669 v0 := b.NewValue0(x2.Pos, OpARM64MOVWUloadidx, t)
26670 v.reset(OpCopy)
26671 v.AddArg(v0)
26672 v0.AddArg(ptr0)
26673 v0.AddArg(idx0)
26674 v0.AddArg(mem)
26675 return true
26676 }
26677
26678
26679
26680 for {
26681 t := v.Type
26682 if v.AuxInt != 24 {
26683 break
26684 }
26685 _ = v.Args[1]
26686 o0 := v.Args[0]
26687 if o0.Op != OpARM64ORshiftLL {
26688 break
26689 }
26690 if o0.AuxInt != 16 {
26691 break
26692 }
26693 _ = o0.Args[1]
26694 x0 := o0.Args[0]
26695 if x0.Op != OpARM64MOVHUloadidx {
26696 break
26697 }
26698 mem := x0.Args[2]
26699 ptr := x0.Args[0]
26700 idx := x0.Args[1]
26701 y1 := o0.Args[1]
26702 if y1.Op != OpARM64MOVDnop {
26703 break
26704 }
26705 x1 := y1.Args[0]
26706 if x1.Op != OpARM64MOVBUloadidx {
26707 break
26708 }
26709 _ = x1.Args[2]
26710 if ptr != x1.Args[0] {
26711 break
26712 }
26713 x1_1 := x1.Args[1]
26714 if x1_1.Op != OpARM64ADDconst {
26715 break
26716 }
26717 if x1_1.AuxInt != 2 {
26718 break
26719 }
26720 if idx != x1_1.Args[0] {
26721 break
26722 }
26723 if mem != x1.Args[2] {
26724 break
26725 }
26726 y2 := v.Args[1]
26727 if y2.Op != OpARM64MOVDnop {
26728 break
26729 }
26730 x2 := y2.Args[0]
26731 if x2.Op != OpARM64MOVBUloadidx {
26732 break
26733 }
26734 _ = x2.Args[2]
26735 if ptr != x2.Args[0] {
26736 break
26737 }
26738 x2_1 := x2.Args[1]
26739 if x2_1.Op != OpARM64ADDconst {
26740 break
26741 }
26742 if x2_1.AuxInt != 3 {
26743 break
26744 }
26745 if idx != x2_1.Args[0] {
26746 break
26747 }
26748 if mem != x2.Args[2] {
26749 break
26750 }
26751 if !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && o0.Uses == 1 && mergePoint(b, x0, x1, x2) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(y1) && clobber(y2) && clobber(o0)) {
26752 break
26753 }
26754 b = mergePoint(b, x0, x1, x2)
26755 v0 := b.NewValue0(v.Pos, OpARM64MOVWUloadidx, t)
26756 v.reset(OpCopy)
26757 v.AddArg(v0)
26758 v0.AddArg(ptr)
26759 v0.AddArg(idx)
26760 v0.AddArg(mem)
26761 return true
26762 }
26763
26764
26765
26766 for {
26767 t := v.Type
26768 if v.AuxInt != 24 {
26769 break
26770 }
26771 _ = v.Args[1]
26772 o0 := v.Args[0]
26773 if o0.Op != OpARM64ORshiftLL {
26774 break
26775 }
26776 if o0.AuxInt != 16 {
26777 break
26778 }
26779 _ = o0.Args[1]
26780 x0 := o0.Args[0]
26781 if x0.Op != OpARM64MOVHUloadidx2 {
26782 break
26783 }
26784 mem := x0.Args[2]
26785 ptr0 := x0.Args[0]
26786 idx0 := x0.Args[1]
26787 y1 := o0.Args[1]
26788 if y1.Op != OpARM64MOVDnop {
26789 break
26790 }
26791 x1 := y1.Args[0]
26792 if x1.Op != OpARM64MOVBUload {
26793 break
26794 }
26795 if x1.AuxInt != 2 {
26796 break
26797 }
26798 s := x1.Aux
26799 _ = x1.Args[1]
26800 p1 := x1.Args[0]
26801 if p1.Op != OpARM64ADDshiftLL {
26802 break
26803 }
26804 if p1.AuxInt != 1 {
26805 break
26806 }
26807 idx1 := p1.Args[1]
26808 ptr1 := p1.Args[0]
26809 if mem != x1.Args[1] {
26810 break
26811 }
26812 y2 := v.Args[1]
26813 if y2.Op != OpARM64MOVDnop {
26814 break
26815 }
26816 x2 := y2.Args[0]
26817 if x2.Op != OpARM64MOVBUload {
26818 break
26819 }
26820 if x2.AuxInt != 3 {
26821 break
26822 }
26823 if x2.Aux != s {
26824 break
26825 }
26826 _ = x2.Args[1]
26827 p := x2.Args[0]
26828 if mem != x2.Args[1] {
26829 break
26830 }
26831 if !(s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && o0.Uses == 1 && mergePoint(b, x0, x1, x2) != nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(y1) && clobber(y2) && clobber(o0)) {
26832 break
26833 }
26834 b = mergePoint(b, x0, x1, x2)
26835 v0 := b.NewValue0(x2.Pos, OpARM64MOVWUloadidx, t)
26836 v.reset(OpCopy)
26837 v.AddArg(v0)
26838 v0.AddArg(ptr0)
26839 v1 := b.NewValue0(x2.Pos, OpARM64SLLconst, idx0.Type)
26840 v1.AuxInt = 1
26841 v1.AddArg(idx0)
26842 v0.AddArg(v1)
26843 v0.AddArg(mem)
26844 return true
26845 }
26846
26847
26848
26849 for {
26850 t := v.Type
26851 if v.AuxInt != 56 {
26852 break
26853 }
26854 _ = v.Args[1]
26855 o0 := v.Args[0]
26856 if o0.Op != OpARM64ORshiftLL {
26857 break
26858 }
26859 if o0.AuxInt != 48 {
26860 break
26861 }
26862 _ = o0.Args[1]
26863 o1 := o0.Args[0]
26864 if o1.Op != OpARM64ORshiftLL {
26865 break
26866 }
26867 if o1.AuxInt != 40 {
26868 break
26869 }
26870 _ = o1.Args[1]
26871 o2 := o1.Args[0]
26872 if o2.Op != OpARM64ORshiftLL {
26873 break
26874 }
26875 if o2.AuxInt != 32 {
26876 break
26877 }
26878 _ = o2.Args[1]
26879 x0 := o2.Args[0]
26880 if x0.Op != OpARM64MOVWUload {
26881 break
26882 }
26883 i0 := x0.AuxInt
26884 s := x0.Aux
26885 mem := x0.Args[1]
26886 p := x0.Args[0]
26887 y1 := o2.Args[1]
26888 if y1.Op != OpARM64MOVDnop {
26889 break
26890 }
26891 x1 := y1.Args[0]
26892 if x1.Op != OpARM64MOVBUload {
26893 break
26894 }
26895 i4 := x1.AuxInt
26896 if x1.Aux != s {
26897 break
26898 }
26899 _ = x1.Args[1]
26900 if p != x1.Args[0] {
26901 break
26902 }
26903 if mem != x1.Args[1] {
26904 break
26905 }
26906 y2 := o1.Args[1]
26907 if y2.Op != OpARM64MOVDnop {
26908 break
26909 }
26910 x2 := y2.Args[0]
26911 if x2.Op != OpARM64MOVBUload {
26912 break
26913 }
26914 i5 := x2.AuxInt
26915 if x2.Aux != s {
26916 break
26917 }
26918 _ = x2.Args[1]
26919 if p != x2.Args[0] {
26920 break
26921 }
26922 if mem != x2.Args[1] {
26923 break
26924 }
26925 y3 := o0.Args[1]
26926 if y3.Op != OpARM64MOVDnop {
26927 break
26928 }
26929 x3 := y3.Args[0]
26930 if x3.Op != OpARM64MOVBUload {
26931 break
26932 }
26933 i6 := x3.AuxInt
26934 if x3.Aux != s {
26935 break
26936 }
26937 _ = x3.Args[1]
26938 if p != x3.Args[0] {
26939 break
26940 }
26941 if mem != x3.Args[1] {
26942 break
26943 }
26944 y4 := v.Args[1]
26945 if y4.Op != OpARM64MOVDnop {
26946 break
26947 }
26948 x4 := y4.Args[0]
26949 if x4.Op != OpARM64MOVBUload {
26950 break
26951 }
26952 i7 := x4.AuxInt
26953 if x4.Aux != s {
26954 break
26955 }
26956 _ = x4.Args[1]
26957 if p != x4.Args[0] {
26958 break
26959 }
26960 if mem != x4.Args[1] {
26961 break
26962 }
26963 if !(i4 == i0+4 && i5 == i0+5 && i6 == i0+6 && i7 == i0+7 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(o0) && clobber(o1) && clobber(o2)) {
26964 break
26965 }
26966 b = mergePoint(b, x0, x1, x2, x3, x4)
26967 v0 := b.NewValue0(x4.Pos, OpARM64MOVDload, t)
26968 v.reset(OpCopy)
26969 v.AddArg(v0)
26970 v0.Aux = s
26971 v1 := b.NewValue0(x4.Pos, OpOffPtr, p.Type)
26972 v1.AuxInt = i0
26973 v1.AddArg(p)
26974 v0.AddArg(v1)
26975 v0.AddArg(mem)
26976 return true
26977 }
26978
26979
26980
26981 for {
26982 t := v.Type
26983 if v.AuxInt != 56 {
26984 break
26985 }
26986 _ = v.Args[1]
26987 o0 := v.Args[0]
26988 if o0.Op != OpARM64ORshiftLL {
26989 break
26990 }
26991 if o0.AuxInt != 48 {
26992 break
26993 }
26994 _ = o0.Args[1]
26995 o1 := o0.Args[0]
26996 if o1.Op != OpARM64ORshiftLL {
26997 break
26998 }
26999 if o1.AuxInt != 40 {
27000 break
27001 }
27002 _ = o1.Args[1]
27003 o2 := o1.Args[0]
27004 if o2.Op != OpARM64ORshiftLL {
27005 break
27006 }
27007 if o2.AuxInt != 32 {
27008 break
27009 }
27010 _ = o2.Args[1]
27011 x0 := o2.Args[0]
27012 if x0.Op != OpARM64MOVWUloadidx {
27013 break
27014 }
27015 mem := x0.Args[2]
27016 ptr0 := x0.Args[0]
27017 idx0 := x0.Args[1]
27018 y1 := o2.Args[1]
27019 if y1.Op != OpARM64MOVDnop {
27020 break
27021 }
27022 x1 := y1.Args[0]
27023 if x1.Op != OpARM64MOVBUload {
27024 break
27025 }
27026 if x1.AuxInt != 4 {
27027 break
27028 }
27029 s := x1.Aux
27030 _ = x1.Args[1]
27031 p1 := x1.Args[0]
27032 if p1.Op != OpARM64ADD {
27033 break
27034 }
27035 idx1 := p1.Args[1]
27036 ptr1 := p1.Args[0]
27037 if mem != x1.Args[1] {
27038 break
27039 }
27040 y2 := o1.Args[1]
27041 if y2.Op != OpARM64MOVDnop {
27042 break
27043 }
27044 x2 := y2.Args[0]
27045 if x2.Op != OpARM64MOVBUload {
27046 break
27047 }
27048 if x2.AuxInt != 5 {
27049 break
27050 }
27051 if x2.Aux != s {
27052 break
27053 }
27054 _ = x2.Args[1]
27055 p := x2.Args[0]
27056 if mem != x2.Args[1] {
27057 break
27058 }
27059 y3 := o0.Args[1]
27060 if y3.Op != OpARM64MOVDnop {
27061 break
27062 }
27063 x3 := y3.Args[0]
27064 if x3.Op != OpARM64MOVBUload {
27065 break
27066 }
27067 if x3.AuxInt != 6 {
27068 break
27069 }
27070 if x3.Aux != s {
27071 break
27072 }
27073 _ = x3.Args[1]
27074 if p != x3.Args[0] {
27075 break
27076 }
27077 if mem != x3.Args[1] {
27078 break
27079 }
27080 y4 := v.Args[1]
27081 if y4.Op != OpARM64MOVDnop {
27082 break
27083 }
27084 x4 := y4.Args[0]
27085 if x4.Op != OpARM64MOVBUload {
27086 break
27087 }
27088 if x4.AuxInt != 7 {
27089 break
27090 }
27091 if x4.Aux != s {
27092 break
27093 }
27094 _ = x4.Args[1]
27095 if p != x4.Args[0] {
27096 break
27097 }
27098 if mem != x4.Args[1] {
27099 break
27100 }
27101 if !(s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(o0) && clobber(o1) && clobber(o2)) {
27102 break
27103 }
27104 b = mergePoint(b, x0, x1, x2, x3, x4)
27105 v0 := b.NewValue0(x4.Pos, OpARM64MOVDloadidx, t)
27106 v.reset(OpCopy)
27107 v.AddArg(v0)
27108 v0.AddArg(ptr0)
27109 v0.AddArg(idx0)
27110 v0.AddArg(mem)
27111 return true
27112 }
27113
27114
27115
27116 for {
27117 t := v.Type
27118 if v.AuxInt != 56 {
27119 break
27120 }
27121 _ = v.Args[1]
27122 o0 := v.Args[0]
27123 if o0.Op != OpARM64ORshiftLL {
27124 break
27125 }
27126 if o0.AuxInt != 48 {
27127 break
27128 }
27129 _ = o0.Args[1]
27130 o1 := o0.Args[0]
27131 if o1.Op != OpARM64ORshiftLL {
27132 break
27133 }
27134 if o1.AuxInt != 40 {
27135 break
27136 }
27137 _ = o1.Args[1]
27138 o2 := o1.Args[0]
27139 if o2.Op != OpARM64ORshiftLL {
27140 break
27141 }
27142 if o2.AuxInt != 32 {
27143 break
27144 }
27145 _ = o2.Args[1]
27146 x0 := o2.Args[0]
27147 if x0.Op != OpARM64MOVWUloadidx4 {
27148 break
27149 }
27150 mem := x0.Args[2]
27151 ptr0 := x0.Args[0]
27152 idx0 := x0.Args[1]
27153 y1 := o2.Args[1]
27154 if y1.Op != OpARM64MOVDnop {
27155 break
27156 }
27157 x1 := y1.Args[0]
27158 if x1.Op != OpARM64MOVBUload {
27159 break
27160 }
27161 if x1.AuxInt != 4 {
27162 break
27163 }
27164 s := x1.Aux
27165 _ = x1.Args[1]
27166 p1 := x1.Args[0]
27167 if p1.Op != OpARM64ADDshiftLL {
27168 break
27169 }
27170 if p1.AuxInt != 2 {
27171 break
27172 }
27173 idx1 := p1.Args[1]
27174 ptr1 := p1.Args[0]
27175 if mem != x1.Args[1] {
27176 break
27177 }
27178 y2 := o1.Args[1]
27179 if y2.Op != OpARM64MOVDnop {
27180 break
27181 }
27182 x2 := y2.Args[0]
27183 if x2.Op != OpARM64MOVBUload {
27184 break
27185 }
27186 if x2.AuxInt != 5 {
27187 break
27188 }
27189 if x2.Aux != s {
27190 break
27191 }
27192 _ = x2.Args[1]
27193 p := x2.Args[0]
27194 if mem != x2.Args[1] {
27195 break
27196 }
27197 y3 := o0.Args[1]
27198 if y3.Op != OpARM64MOVDnop {
27199 break
27200 }
27201 x3 := y3.Args[0]
27202 if x3.Op != OpARM64MOVBUload {
27203 break
27204 }
27205 if x3.AuxInt != 6 {
27206 break
27207 }
27208 if x3.Aux != s {
27209 break
27210 }
27211 _ = x3.Args[1]
27212 if p != x3.Args[0] {
27213 break
27214 }
27215 if mem != x3.Args[1] {
27216 break
27217 }
27218 y4 := v.Args[1]
27219 if y4.Op != OpARM64MOVDnop {
27220 break
27221 }
27222 x4 := y4.Args[0]
27223 if x4.Op != OpARM64MOVBUload {
27224 break
27225 }
27226 if x4.AuxInt != 7 {
27227 break
27228 }
27229 if x4.Aux != s {
27230 break
27231 }
27232 _ = x4.Args[1]
27233 if p != x4.Args[0] {
27234 break
27235 }
27236 if mem != x4.Args[1] {
27237 break
27238 }
27239 if !(s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4) != nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(o0) && clobber(o1) && clobber(o2)) {
27240 break
27241 }
27242 b = mergePoint(b, x0, x1, x2, x3, x4)
27243 v0 := b.NewValue0(x4.Pos, OpARM64MOVDloadidx, t)
27244 v.reset(OpCopy)
27245 v.AddArg(v0)
27246 v0.AddArg(ptr0)
27247 v1 := b.NewValue0(x4.Pos, OpARM64SLLconst, idx0.Type)
27248 v1.AuxInt = 2
27249 v1.AddArg(idx0)
27250 v0.AddArg(v1)
27251 v0.AddArg(mem)
27252 return true
27253 }
27254
27255
27256
27257 for {
27258 t := v.Type
27259 if v.AuxInt != 56 {
27260 break
27261 }
27262 _ = v.Args[1]
27263 o0 := v.Args[0]
27264 if o0.Op != OpARM64ORshiftLL {
27265 break
27266 }
27267 if o0.AuxInt != 48 {
27268 break
27269 }
27270 _ = o0.Args[1]
27271 o1 := o0.Args[0]
27272 if o1.Op != OpARM64ORshiftLL {
27273 break
27274 }
27275 if o1.AuxInt != 40 {
27276 break
27277 }
27278 _ = o1.Args[1]
27279 o2 := o1.Args[0]
27280 if o2.Op != OpARM64ORshiftLL {
27281 break
27282 }
27283 if o2.AuxInt != 32 {
27284 break
27285 }
27286 _ = o2.Args[1]
27287 x0 := o2.Args[0]
27288 if x0.Op != OpARM64MOVWUloadidx {
27289 break
27290 }
27291 mem := x0.Args[2]
27292 ptr := x0.Args[0]
27293 idx := x0.Args[1]
27294 y1 := o2.Args[1]
27295 if y1.Op != OpARM64MOVDnop {
27296 break
27297 }
27298 x1 := y1.Args[0]
27299 if x1.Op != OpARM64MOVBUloadidx {
27300 break
27301 }
27302 _ = x1.Args[2]
27303 if ptr != x1.Args[0] {
27304 break
27305 }
27306 x1_1 := x1.Args[1]
27307 if x1_1.Op != OpARM64ADDconst {
27308 break
27309 }
27310 if x1_1.AuxInt != 4 {
27311 break
27312 }
27313 if idx != x1_1.Args[0] {
27314 break
27315 }
27316 if mem != x1.Args[2] {
27317 break
27318 }
27319 y2 := o1.Args[1]
27320 if y2.Op != OpARM64MOVDnop {
27321 break
27322 }
27323 x2 := y2.Args[0]
27324 if x2.Op != OpARM64MOVBUloadidx {
27325 break
27326 }
27327 _ = x2.Args[2]
27328 if ptr != x2.Args[0] {
27329 break
27330 }
27331 x2_1 := x2.Args[1]
27332 if x2_1.Op != OpARM64ADDconst {
27333 break
27334 }
27335 if x2_1.AuxInt != 5 {
27336 break
27337 }
27338 if idx != x2_1.Args[0] {
27339 break
27340 }
27341 if mem != x2.Args[2] {
27342 break
27343 }
27344 y3 := o0.Args[1]
27345 if y3.Op != OpARM64MOVDnop {
27346 break
27347 }
27348 x3 := y3.Args[0]
27349 if x3.Op != OpARM64MOVBUloadidx {
27350 break
27351 }
27352 _ = x3.Args[2]
27353 if ptr != x3.Args[0] {
27354 break
27355 }
27356 x3_1 := x3.Args[1]
27357 if x3_1.Op != OpARM64ADDconst {
27358 break
27359 }
27360 if x3_1.AuxInt != 6 {
27361 break
27362 }
27363 if idx != x3_1.Args[0] {
27364 break
27365 }
27366 if mem != x3.Args[2] {
27367 break
27368 }
27369 y4 := v.Args[1]
27370 if y4.Op != OpARM64MOVDnop {
27371 break
27372 }
27373 x4 := y4.Args[0]
27374 if x4.Op != OpARM64MOVBUloadidx {
27375 break
27376 }
27377 _ = x4.Args[2]
27378 if ptr != x4.Args[0] {
27379 break
27380 }
27381 x4_1 := x4.Args[1]
27382 if x4_1.Op != OpARM64ADDconst {
27383 break
27384 }
27385 if x4_1.AuxInt != 7 {
27386 break
27387 }
27388 if idx != x4_1.Args[0] {
27389 break
27390 }
27391 if mem != x4.Args[2] {
27392 break
27393 }
27394 if !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(o0) && clobber(o1) && clobber(o2)) {
27395 break
27396 }
27397 b = mergePoint(b, x0, x1, x2, x3, x4)
27398 v0 := b.NewValue0(v.Pos, OpARM64MOVDloadidx, t)
27399 v.reset(OpCopy)
27400 v.AddArg(v0)
27401 v0.AddArg(ptr)
27402 v0.AddArg(idx)
27403 v0.AddArg(mem)
27404 return true
27405 }
27406 return false
27407 }
27408 func rewriteValueARM64_OpARM64ORshiftLL_20(v *Value) bool {
27409 b := v.Block
27410
27411
27412
27413 for {
27414 t := v.Type
27415 if v.AuxInt != 8 {
27416 break
27417 }
27418 _ = v.Args[1]
27419 y0 := v.Args[0]
27420 if y0.Op != OpARM64MOVDnop {
27421 break
27422 }
27423 x0 := y0.Args[0]
27424 if x0.Op != OpARM64MOVBUload {
27425 break
27426 }
27427 i1 := x0.AuxInt
27428 s := x0.Aux
27429 mem := x0.Args[1]
27430 p := x0.Args[0]
27431 y1 := v.Args[1]
27432 if y1.Op != OpARM64MOVDnop {
27433 break
27434 }
27435 x1 := y1.Args[0]
27436 if x1.Op != OpARM64MOVBUload {
27437 break
27438 }
27439 i0 := x1.AuxInt
27440 if x1.Aux != s {
27441 break
27442 }
27443 _ = x1.Args[1]
27444 if p != x1.Args[0] {
27445 break
27446 }
27447 if mem != x1.Args[1] {
27448 break
27449 }
27450 if !(i1 == i0+1 && x0.Uses == 1 && x1.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && mergePoint(b, x0, x1) != nil && clobber(x0) && clobber(x1) && clobber(y0) && clobber(y1)) {
27451 break
27452 }
27453 b = mergePoint(b, x0, x1)
27454 v0 := b.NewValue0(x1.Pos, OpARM64REV16W, t)
27455 v.reset(OpCopy)
27456 v.AddArg(v0)
27457 v1 := b.NewValue0(x1.Pos, OpARM64MOVHUload, t)
27458 v1.AuxInt = i0
27459 v1.Aux = s
27460 v1.AddArg(p)
27461 v1.AddArg(mem)
27462 v0.AddArg(v1)
27463 return true
27464 }
27465
27466
27467
27468 for {
27469 t := v.Type
27470 if v.AuxInt != 8 {
27471 break
27472 }
27473 _ = v.Args[1]
27474 y0 := v.Args[0]
27475 if y0.Op != OpARM64MOVDnop {
27476 break
27477 }
27478 x0 := y0.Args[0]
27479 if x0.Op != OpARM64MOVBUload {
27480 break
27481 }
27482 if x0.AuxInt != 1 {
27483 break
27484 }
27485 s := x0.Aux
27486 mem := x0.Args[1]
27487 p1 := x0.Args[0]
27488 if p1.Op != OpARM64ADD {
27489 break
27490 }
27491 idx1 := p1.Args[1]
27492 ptr1 := p1.Args[0]
27493 y1 := v.Args[1]
27494 if y1.Op != OpARM64MOVDnop {
27495 break
27496 }
27497 x1 := y1.Args[0]
27498 if x1.Op != OpARM64MOVBUloadidx {
27499 break
27500 }
27501 _ = x1.Args[2]
27502 ptr0 := x1.Args[0]
27503 idx0 := x1.Args[1]
27504 if mem != x1.Args[2] {
27505 break
27506 }
27507 if !(s == nil && x0.Uses == 1 && x1.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && mergePoint(b, x0, x1) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x0) && clobber(x1) && clobber(y0) && clobber(y1)) {
27508 break
27509 }
27510 b = mergePoint(b, x0, x1)
27511 v0 := b.NewValue0(x0.Pos, OpARM64REV16W, t)
27512 v.reset(OpCopy)
27513 v.AddArg(v0)
27514 v1 := b.NewValue0(x0.Pos, OpARM64MOVHUloadidx, t)
27515 v1.AddArg(ptr0)
27516 v1.AddArg(idx0)
27517 v1.AddArg(mem)
27518 v0.AddArg(v1)
27519 return true
27520 }
27521
27522
27523
27524 for {
27525 t := v.Type
27526 if v.AuxInt != 8 {
27527 break
27528 }
27529 _ = v.Args[1]
27530 y0 := v.Args[0]
27531 if y0.Op != OpARM64MOVDnop {
27532 break
27533 }
27534 x0 := y0.Args[0]
27535 if x0.Op != OpARM64MOVBUloadidx {
27536 break
27537 }
27538 mem := x0.Args[2]
27539 ptr := x0.Args[0]
27540 x0_1 := x0.Args[1]
27541 if x0_1.Op != OpARM64ADDconst {
27542 break
27543 }
27544 if x0_1.AuxInt != 1 {
27545 break
27546 }
27547 idx := x0_1.Args[0]
27548 y1 := v.Args[1]
27549 if y1.Op != OpARM64MOVDnop {
27550 break
27551 }
27552 x1 := y1.Args[0]
27553 if x1.Op != OpARM64MOVBUloadidx {
27554 break
27555 }
27556 _ = x1.Args[2]
27557 if ptr != x1.Args[0] {
27558 break
27559 }
27560 if idx != x1.Args[1] {
27561 break
27562 }
27563 if mem != x1.Args[2] {
27564 break
27565 }
27566 if !(x0.Uses == 1 && x1.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && mergePoint(b, x0, x1) != nil && clobber(x0) && clobber(x1) && clobber(y0) && clobber(y1)) {
27567 break
27568 }
27569 b = mergePoint(b, x0, x1)
27570 v0 := b.NewValue0(v.Pos, OpARM64REV16W, t)
27571 v.reset(OpCopy)
27572 v.AddArg(v0)
27573 v1 := b.NewValue0(v.Pos, OpARM64MOVHUloadidx, t)
27574 v1.AddArg(ptr)
27575 v1.AddArg(idx)
27576 v1.AddArg(mem)
27577 v0.AddArg(v1)
27578 return true
27579 }
27580
27581
27582
27583 for {
27584 t := v.Type
27585 if v.AuxInt != 24 {
27586 break
27587 }
27588 _ = v.Args[1]
27589 o0 := v.Args[0]
27590 if o0.Op != OpARM64ORshiftLL {
27591 break
27592 }
27593 if o0.AuxInt != 16 {
27594 break
27595 }
27596 _ = o0.Args[1]
27597 y0 := o0.Args[0]
27598 if y0.Op != OpARM64REV16W {
27599 break
27600 }
27601 x0 := y0.Args[0]
27602 if x0.Op != OpARM64MOVHUload {
27603 break
27604 }
27605 i2 := x0.AuxInt
27606 s := x0.Aux
27607 mem := x0.Args[1]
27608 p := x0.Args[0]
27609 y1 := o0.Args[1]
27610 if y1.Op != OpARM64MOVDnop {
27611 break
27612 }
27613 x1 := y1.Args[0]
27614 if x1.Op != OpARM64MOVBUload {
27615 break
27616 }
27617 i1 := x1.AuxInt
27618 if x1.Aux != s {
27619 break
27620 }
27621 _ = x1.Args[1]
27622 if p != x1.Args[0] {
27623 break
27624 }
27625 if mem != x1.Args[1] {
27626 break
27627 }
27628 y2 := v.Args[1]
27629 if y2.Op != OpARM64MOVDnop {
27630 break
27631 }
27632 x2 := y2.Args[0]
27633 if x2.Op != OpARM64MOVBUload {
27634 break
27635 }
27636 i0 := x2.AuxInt
27637 if x2.Aux != s {
27638 break
27639 }
27640 _ = x2.Args[1]
27641 if p != x2.Args[0] {
27642 break
27643 }
27644 if mem != x2.Args[1] {
27645 break
27646 }
27647 if !(i1 == i0+1 && i2 == i0+2 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && o0.Uses == 1 && mergePoint(b, x0, x1, x2) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(o0)) {
27648 break
27649 }
27650 b = mergePoint(b, x0, x1, x2)
27651 v0 := b.NewValue0(x2.Pos, OpARM64REVW, t)
27652 v.reset(OpCopy)
27653 v.AddArg(v0)
27654 v1 := b.NewValue0(x2.Pos, OpARM64MOVWUload, t)
27655 v1.Aux = s
27656 v2 := b.NewValue0(x2.Pos, OpOffPtr, p.Type)
27657 v2.AuxInt = i0
27658 v2.AddArg(p)
27659 v1.AddArg(v2)
27660 v1.AddArg(mem)
27661 v0.AddArg(v1)
27662 return true
27663 }
27664
27665
27666
27667 for {
27668 t := v.Type
27669 if v.AuxInt != 24 {
27670 break
27671 }
27672 _ = v.Args[1]
27673 o0 := v.Args[0]
27674 if o0.Op != OpARM64ORshiftLL {
27675 break
27676 }
27677 if o0.AuxInt != 16 {
27678 break
27679 }
27680 _ = o0.Args[1]
27681 y0 := o0.Args[0]
27682 if y0.Op != OpARM64REV16W {
27683 break
27684 }
27685 x0 := y0.Args[0]
27686 if x0.Op != OpARM64MOVHUload {
27687 break
27688 }
27689 if x0.AuxInt != 2 {
27690 break
27691 }
27692 s := x0.Aux
27693 mem := x0.Args[1]
27694 p := x0.Args[0]
27695 y1 := o0.Args[1]
27696 if y1.Op != OpARM64MOVDnop {
27697 break
27698 }
27699 x1 := y1.Args[0]
27700 if x1.Op != OpARM64MOVBUload {
27701 break
27702 }
27703 if x1.AuxInt != 1 {
27704 break
27705 }
27706 if x1.Aux != s {
27707 break
27708 }
27709 _ = x1.Args[1]
27710 p1 := x1.Args[0]
27711 if p1.Op != OpARM64ADD {
27712 break
27713 }
27714 idx1 := p1.Args[1]
27715 ptr1 := p1.Args[0]
27716 if mem != x1.Args[1] {
27717 break
27718 }
27719 y2 := v.Args[1]
27720 if y2.Op != OpARM64MOVDnop {
27721 break
27722 }
27723 x2 := y2.Args[0]
27724 if x2.Op != OpARM64MOVBUloadidx {
27725 break
27726 }
27727 _ = x2.Args[2]
27728 ptr0 := x2.Args[0]
27729 idx0 := x2.Args[1]
27730 if mem != x2.Args[2] {
27731 break
27732 }
27733 if !(s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && o0.Uses == 1 && mergePoint(b, x0, x1, x2) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(o0)) {
27734 break
27735 }
27736 b = mergePoint(b, x0, x1, x2)
27737 v0 := b.NewValue0(x1.Pos, OpARM64REVW, t)
27738 v.reset(OpCopy)
27739 v.AddArg(v0)
27740 v1 := b.NewValue0(x1.Pos, OpARM64MOVWUloadidx, t)
27741 v1.AddArg(ptr0)
27742 v1.AddArg(idx0)
27743 v1.AddArg(mem)
27744 v0.AddArg(v1)
27745 return true
27746 }
27747
27748
27749
27750 for {
27751 t := v.Type
27752 if v.AuxInt != 24 {
27753 break
27754 }
27755 _ = v.Args[1]
27756 o0 := v.Args[0]
27757 if o0.Op != OpARM64ORshiftLL {
27758 break
27759 }
27760 if o0.AuxInt != 16 {
27761 break
27762 }
27763 _ = o0.Args[1]
27764 y0 := o0.Args[0]
27765 if y0.Op != OpARM64REV16W {
27766 break
27767 }
27768 x0 := y0.Args[0]
27769 if x0.Op != OpARM64MOVHUloadidx {
27770 break
27771 }
27772 mem := x0.Args[2]
27773 ptr := x0.Args[0]
27774 x0_1 := x0.Args[1]
27775 if x0_1.Op != OpARM64ADDconst {
27776 break
27777 }
27778 if x0_1.AuxInt != 2 {
27779 break
27780 }
27781 idx := x0_1.Args[0]
27782 y1 := o0.Args[1]
27783 if y1.Op != OpARM64MOVDnop {
27784 break
27785 }
27786 x1 := y1.Args[0]
27787 if x1.Op != OpARM64MOVBUloadidx {
27788 break
27789 }
27790 _ = x1.Args[2]
27791 if ptr != x1.Args[0] {
27792 break
27793 }
27794 x1_1 := x1.Args[1]
27795 if x1_1.Op != OpARM64ADDconst {
27796 break
27797 }
27798 if x1_1.AuxInt != 1 {
27799 break
27800 }
27801 if idx != x1_1.Args[0] {
27802 break
27803 }
27804 if mem != x1.Args[2] {
27805 break
27806 }
27807 y2 := v.Args[1]
27808 if y2.Op != OpARM64MOVDnop {
27809 break
27810 }
27811 x2 := y2.Args[0]
27812 if x2.Op != OpARM64MOVBUloadidx {
27813 break
27814 }
27815 _ = x2.Args[2]
27816 if ptr != x2.Args[0] {
27817 break
27818 }
27819 if idx != x2.Args[1] {
27820 break
27821 }
27822 if mem != x2.Args[2] {
27823 break
27824 }
27825 if !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && o0.Uses == 1 && mergePoint(b, x0, x1, x2) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(o0)) {
27826 break
27827 }
27828 b = mergePoint(b, x0, x1, x2)
27829 v0 := b.NewValue0(v.Pos, OpARM64REVW, t)
27830 v.reset(OpCopy)
27831 v.AddArg(v0)
27832 v1 := b.NewValue0(v.Pos, OpARM64MOVWUloadidx, t)
27833 v1.AddArg(ptr)
27834 v1.AddArg(idx)
27835 v1.AddArg(mem)
27836 v0.AddArg(v1)
27837 return true
27838 }
27839
27840
27841
27842 for {
27843 t := v.Type
27844 if v.AuxInt != 56 {
27845 break
27846 }
27847 _ = v.Args[1]
27848 o0 := v.Args[0]
27849 if o0.Op != OpARM64ORshiftLL {
27850 break
27851 }
27852 if o0.AuxInt != 48 {
27853 break
27854 }
27855 _ = o0.Args[1]
27856 o1 := o0.Args[0]
27857 if o1.Op != OpARM64ORshiftLL {
27858 break
27859 }
27860 if o1.AuxInt != 40 {
27861 break
27862 }
27863 _ = o1.Args[1]
27864 o2 := o1.Args[0]
27865 if o2.Op != OpARM64ORshiftLL {
27866 break
27867 }
27868 if o2.AuxInt != 32 {
27869 break
27870 }
27871 _ = o2.Args[1]
27872 y0 := o2.Args[0]
27873 if y0.Op != OpARM64REVW {
27874 break
27875 }
27876 x0 := y0.Args[0]
27877 if x0.Op != OpARM64MOVWUload {
27878 break
27879 }
27880 i4 := x0.AuxInt
27881 s := x0.Aux
27882 mem := x0.Args[1]
27883 p := x0.Args[0]
27884 y1 := o2.Args[1]
27885 if y1.Op != OpARM64MOVDnop {
27886 break
27887 }
27888 x1 := y1.Args[0]
27889 if x1.Op != OpARM64MOVBUload {
27890 break
27891 }
27892 i3 := x1.AuxInt
27893 if x1.Aux != s {
27894 break
27895 }
27896 _ = x1.Args[1]
27897 if p != x1.Args[0] {
27898 break
27899 }
27900 if mem != x1.Args[1] {
27901 break
27902 }
27903 y2 := o1.Args[1]
27904 if y2.Op != OpARM64MOVDnop {
27905 break
27906 }
27907 x2 := y2.Args[0]
27908 if x2.Op != OpARM64MOVBUload {
27909 break
27910 }
27911 i2 := x2.AuxInt
27912 if x2.Aux != s {
27913 break
27914 }
27915 _ = x2.Args[1]
27916 if p != x2.Args[0] {
27917 break
27918 }
27919 if mem != x2.Args[1] {
27920 break
27921 }
27922 y3 := o0.Args[1]
27923 if y3.Op != OpARM64MOVDnop {
27924 break
27925 }
27926 x3 := y3.Args[0]
27927 if x3.Op != OpARM64MOVBUload {
27928 break
27929 }
27930 i1 := x3.AuxInt
27931 if x3.Aux != s {
27932 break
27933 }
27934 _ = x3.Args[1]
27935 if p != x3.Args[0] {
27936 break
27937 }
27938 if mem != x3.Args[1] {
27939 break
27940 }
27941 y4 := v.Args[1]
27942 if y4.Op != OpARM64MOVDnop {
27943 break
27944 }
27945 x4 := y4.Args[0]
27946 if x4.Op != OpARM64MOVBUload {
27947 break
27948 }
27949 i0 := x4.AuxInt
27950 if x4.Aux != s {
27951 break
27952 }
27953 _ = x4.Args[1]
27954 if p != x4.Args[0] {
27955 break
27956 }
27957 if mem != x4.Args[1] {
27958 break
27959 }
27960 if !(i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && i4 == i0+4 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(o0) && clobber(o1) && clobber(o2)) {
27961 break
27962 }
27963 b = mergePoint(b, x0, x1, x2, x3, x4)
27964 v0 := b.NewValue0(x4.Pos, OpARM64REV, t)
27965 v.reset(OpCopy)
27966 v.AddArg(v0)
27967 v1 := b.NewValue0(x4.Pos, OpARM64MOVDload, t)
27968 v1.Aux = s
27969 v2 := b.NewValue0(x4.Pos, OpOffPtr, p.Type)
27970 v2.AuxInt = i0
27971 v2.AddArg(p)
27972 v1.AddArg(v2)
27973 v1.AddArg(mem)
27974 v0.AddArg(v1)
27975 return true
27976 }
27977
27978
27979
27980 for {
27981 t := v.Type
27982 if v.AuxInt != 56 {
27983 break
27984 }
27985 _ = v.Args[1]
27986 o0 := v.Args[0]
27987 if o0.Op != OpARM64ORshiftLL {
27988 break
27989 }
27990 if o0.AuxInt != 48 {
27991 break
27992 }
27993 _ = o0.Args[1]
27994 o1 := o0.Args[0]
27995 if o1.Op != OpARM64ORshiftLL {
27996 break
27997 }
27998 if o1.AuxInt != 40 {
27999 break
28000 }
28001 _ = o1.Args[1]
28002 o2 := o1.Args[0]
28003 if o2.Op != OpARM64ORshiftLL {
28004 break
28005 }
28006 if o2.AuxInt != 32 {
28007 break
28008 }
28009 _ = o2.Args[1]
28010 y0 := o2.Args[0]
28011 if y0.Op != OpARM64REVW {
28012 break
28013 }
28014 x0 := y0.Args[0]
28015 if x0.Op != OpARM64MOVWUload {
28016 break
28017 }
28018 if x0.AuxInt != 4 {
28019 break
28020 }
28021 s := x0.Aux
28022 mem := x0.Args[1]
28023 p := x0.Args[0]
28024 y1 := o2.Args[1]
28025 if y1.Op != OpARM64MOVDnop {
28026 break
28027 }
28028 x1 := y1.Args[0]
28029 if x1.Op != OpARM64MOVBUload {
28030 break
28031 }
28032 if x1.AuxInt != 3 {
28033 break
28034 }
28035 if x1.Aux != s {
28036 break
28037 }
28038 _ = x1.Args[1]
28039 if p != x1.Args[0] {
28040 break
28041 }
28042 if mem != x1.Args[1] {
28043 break
28044 }
28045 y2 := o1.Args[1]
28046 if y2.Op != OpARM64MOVDnop {
28047 break
28048 }
28049 x2 := y2.Args[0]
28050 if x2.Op != OpARM64MOVBUload {
28051 break
28052 }
28053 if x2.AuxInt != 2 {
28054 break
28055 }
28056 if x2.Aux != s {
28057 break
28058 }
28059 _ = x2.Args[1]
28060 if p != x2.Args[0] {
28061 break
28062 }
28063 if mem != x2.Args[1] {
28064 break
28065 }
28066 y3 := o0.Args[1]
28067 if y3.Op != OpARM64MOVDnop {
28068 break
28069 }
28070 x3 := y3.Args[0]
28071 if x3.Op != OpARM64MOVBUload {
28072 break
28073 }
28074 if x3.AuxInt != 1 {
28075 break
28076 }
28077 if x3.Aux != s {
28078 break
28079 }
28080 _ = x3.Args[1]
28081 p1 := x3.Args[0]
28082 if p1.Op != OpARM64ADD {
28083 break
28084 }
28085 idx1 := p1.Args[1]
28086 ptr1 := p1.Args[0]
28087 if mem != x3.Args[1] {
28088 break
28089 }
28090 y4 := v.Args[1]
28091 if y4.Op != OpARM64MOVDnop {
28092 break
28093 }
28094 x4 := y4.Args[0]
28095 if x4.Op != OpARM64MOVBUloadidx {
28096 break
28097 }
28098 _ = x4.Args[2]
28099 ptr0 := x4.Args[0]
28100 idx0 := x4.Args[1]
28101 if mem != x4.Args[2] {
28102 break
28103 }
28104 if !(s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(o0) && clobber(o1) && clobber(o2)) {
28105 break
28106 }
28107 b = mergePoint(b, x0, x1, x2, x3, x4)
28108 v0 := b.NewValue0(x3.Pos, OpARM64REV, t)
28109 v.reset(OpCopy)
28110 v.AddArg(v0)
28111 v1 := b.NewValue0(x3.Pos, OpARM64MOVDloadidx, t)
28112 v1.AddArg(ptr0)
28113 v1.AddArg(idx0)
28114 v1.AddArg(mem)
28115 v0.AddArg(v1)
28116 return true
28117 }
28118
28119
28120
28121 for {
28122 t := v.Type
28123 if v.AuxInt != 56 {
28124 break
28125 }
28126 _ = v.Args[1]
28127 o0 := v.Args[0]
28128 if o0.Op != OpARM64ORshiftLL {
28129 break
28130 }
28131 if o0.AuxInt != 48 {
28132 break
28133 }
28134 _ = o0.Args[1]
28135 o1 := o0.Args[0]
28136 if o1.Op != OpARM64ORshiftLL {
28137 break
28138 }
28139 if o1.AuxInt != 40 {
28140 break
28141 }
28142 _ = o1.Args[1]
28143 o2 := o1.Args[0]
28144 if o2.Op != OpARM64ORshiftLL {
28145 break
28146 }
28147 if o2.AuxInt != 32 {
28148 break
28149 }
28150 _ = o2.Args[1]
28151 y0 := o2.Args[0]
28152 if y0.Op != OpARM64REVW {
28153 break
28154 }
28155 x0 := y0.Args[0]
28156 if x0.Op != OpARM64MOVWUloadidx {
28157 break
28158 }
28159 mem := x0.Args[2]
28160 ptr := x0.Args[0]
28161 x0_1 := x0.Args[1]
28162 if x0_1.Op != OpARM64ADDconst {
28163 break
28164 }
28165 if x0_1.AuxInt != 4 {
28166 break
28167 }
28168 idx := x0_1.Args[0]
28169 y1 := o2.Args[1]
28170 if y1.Op != OpARM64MOVDnop {
28171 break
28172 }
28173 x1 := y1.Args[0]
28174 if x1.Op != OpARM64MOVBUloadidx {
28175 break
28176 }
28177 _ = x1.Args[2]
28178 if ptr != x1.Args[0] {
28179 break
28180 }
28181 x1_1 := x1.Args[1]
28182 if x1_1.Op != OpARM64ADDconst {
28183 break
28184 }
28185 if x1_1.AuxInt != 3 {
28186 break
28187 }
28188 if idx != x1_1.Args[0] {
28189 break
28190 }
28191 if mem != x1.Args[2] {
28192 break
28193 }
28194 y2 := o1.Args[1]
28195 if y2.Op != OpARM64MOVDnop {
28196 break
28197 }
28198 x2 := y2.Args[0]
28199 if x2.Op != OpARM64MOVBUloadidx {
28200 break
28201 }
28202 _ = x2.Args[2]
28203 if ptr != x2.Args[0] {
28204 break
28205 }
28206 x2_1 := x2.Args[1]
28207 if x2_1.Op != OpARM64ADDconst {
28208 break
28209 }
28210 if x2_1.AuxInt != 2 {
28211 break
28212 }
28213 if idx != x2_1.Args[0] {
28214 break
28215 }
28216 if mem != x2.Args[2] {
28217 break
28218 }
28219 y3 := o0.Args[1]
28220 if y3.Op != OpARM64MOVDnop {
28221 break
28222 }
28223 x3 := y3.Args[0]
28224 if x3.Op != OpARM64MOVBUloadidx {
28225 break
28226 }
28227 _ = x3.Args[2]
28228 if ptr != x3.Args[0] {
28229 break
28230 }
28231 x3_1 := x3.Args[1]
28232 if x3_1.Op != OpARM64ADDconst {
28233 break
28234 }
28235 if x3_1.AuxInt != 1 {
28236 break
28237 }
28238 if idx != x3_1.Args[0] {
28239 break
28240 }
28241 if mem != x3.Args[2] {
28242 break
28243 }
28244 y4 := v.Args[1]
28245 if y4.Op != OpARM64MOVDnop {
28246 break
28247 }
28248 x4 := y4.Args[0]
28249 if x4.Op != OpARM64MOVBUloadidx {
28250 break
28251 }
28252 _ = x4.Args[2]
28253 if ptr != x4.Args[0] {
28254 break
28255 }
28256 if idx != x4.Args[1] {
28257 break
28258 }
28259 if mem != x4.Args[2] {
28260 break
28261 }
28262 if !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(o0) && clobber(o1) && clobber(o2)) {
28263 break
28264 }
28265 b = mergePoint(b, x0, x1, x2, x3, x4)
28266 v0 := b.NewValue0(v.Pos, OpARM64REV, t)
28267 v.reset(OpCopy)
28268 v.AddArg(v0)
28269 v1 := b.NewValue0(v.Pos, OpARM64MOVDloadidx, t)
28270 v1.AddArg(ptr)
28271 v1.AddArg(idx)
28272 v1.AddArg(mem)
28273 v0.AddArg(v1)
28274 return true
28275 }
28276 return false
28277 }
28278 func rewriteValueARM64_OpARM64ORshiftRA_0(v *Value) bool {
28279 b := v.Block
28280
28281
28282
28283 for {
28284 d := v.AuxInt
28285 x := v.Args[1]
28286 v_0 := v.Args[0]
28287 if v_0.Op != OpARM64MOVDconst {
28288 break
28289 }
28290 c := v_0.AuxInt
28291 v.reset(OpARM64ORconst)
28292 v.AuxInt = c
28293 v0 := b.NewValue0(v.Pos, OpARM64SRAconst, x.Type)
28294 v0.AuxInt = d
28295 v0.AddArg(x)
28296 v.AddArg(v0)
28297 return true
28298 }
28299
28300
28301
28302 for {
28303 d := v.AuxInt
28304 _ = v.Args[1]
28305 x := v.Args[0]
28306 v_1 := v.Args[1]
28307 if v_1.Op != OpARM64MOVDconst {
28308 break
28309 }
28310 c := v_1.AuxInt
28311 v.reset(OpARM64ORconst)
28312 v.AuxInt = c >> uint64(d)
28313 v.AddArg(x)
28314 return true
28315 }
28316
28317
28318
28319 for {
28320 d := v.AuxInt
28321 _ = v.Args[1]
28322 x := v.Args[0]
28323 y := v.Args[1]
28324 if y.Op != OpARM64SRAconst {
28325 break
28326 }
28327 c := y.AuxInt
28328 if x != y.Args[0] {
28329 break
28330 }
28331 if !(c == d) {
28332 break
28333 }
28334 v.reset(OpCopy)
28335 v.Type = y.Type
28336 v.AddArg(y)
28337 return true
28338 }
28339 return false
28340 }
28341 func rewriteValueARM64_OpARM64ORshiftRL_0(v *Value) bool {
28342 b := v.Block
28343
28344
28345
28346 for {
28347 d := v.AuxInt
28348 x := v.Args[1]
28349 v_0 := v.Args[0]
28350 if v_0.Op != OpARM64MOVDconst {
28351 break
28352 }
28353 c := v_0.AuxInt
28354 v.reset(OpARM64ORconst)
28355 v.AuxInt = c
28356 v0 := b.NewValue0(v.Pos, OpARM64SRLconst, x.Type)
28357 v0.AuxInt = d
28358 v0.AddArg(x)
28359 v.AddArg(v0)
28360 return true
28361 }
28362
28363
28364
28365 for {
28366 d := v.AuxInt
28367 _ = v.Args[1]
28368 x := v.Args[0]
28369 v_1 := v.Args[1]
28370 if v_1.Op != OpARM64MOVDconst {
28371 break
28372 }
28373 c := v_1.AuxInt
28374 v.reset(OpARM64ORconst)
28375 v.AuxInt = int64(uint64(c) >> uint64(d))
28376 v.AddArg(x)
28377 return true
28378 }
28379
28380
28381
28382 for {
28383 d := v.AuxInt
28384 _ = v.Args[1]
28385 x := v.Args[0]
28386 y := v.Args[1]
28387 if y.Op != OpARM64SRLconst {
28388 break
28389 }
28390 c := y.AuxInt
28391 if x != y.Args[0] {
28392 break
28393 }
28394 if !(c == d) {
28395 break
28396 }
28397 v.reset(OpCopy)
28398 v.Type = y.Type
28399 v.AddArg(y)
28400 return true
28401 }
28402
28403
28404
28405 for {
28406 c := v.AuxInt
28407 x := v.Args[1]
28408 v_0 := v.Args[0]
28409 if v_0.Op != OpARM64SLLconst {
28410 break
28411 }
28412 if v_0.AuxInt != 64-c {
28413 break
28414 }
28415 if x != v_0.Args[0] {
28416 break
28417 }
28418 v.reset(OpARM64RORconst)
28419 v.AuxInt = c
28420 v.AddArg(x)
28421 return true
28422 }
28423
28424
28425
28426 for {
28427 t := v.Type
28428 c := v.AuxInt
28429 _ = v.Args[1]
28430 v_0 := v.Args[0]
28431 if v_0.Op != OpARM64SLLconst {
28432 break
28433 }
28434 if v_0.AuxInt != 32-c {
28435 break
28436 }
28437 x := v_0.Args[0]
28438 v_1 := v.Args[1]
28439 if v_1.Op != OpARM64MOVWUreg {
28440 break
28441 }
28442 if x != v_1.Args[0] {
28443 break
28444 }
28445 if !(c < 32 && t.Size() == 4) {
28446 break
28447 }
28448 v.reset(OpARM64RORWconst)
28449 v.AuxInt = c
28450 v.AddArg(x)
28451 return true
28452 }
28453
28454
28455
28456 for {
28457 rc := v.AuxInt
28458 _ = v.Args[1]
28459 v_0 := v.Args[0]
28460 if v_0.Op != OpARM64ANDconst {
28461 break
28462 }
28463 ac := v_0.AuxInt
28464 x := v_0.Args[0]
28465 v_1 := v.Args[1]
28466 if v_1.Op != OpARM64SLLconst {
28467 break
28468 }
28469 lc := v_1.AuxInt
28470 y := v_1.Args[0]
28471 if !(lc > rc && ac == ^((1<<uint(64-lc)-1)<<uint64(lc-rc))) {
28472 break
28473 }
28474 v.reset(OpARM64BFI)
28475 v.AuxInt = armBFAuxInt(lc-rc, 64-lc)
28476 v.AddArg(x)
28477 v.AddArg(y)
28478 return true
28479 }
28480 return false
28481 }
28482 func rewriteValueARM64_OpARM64RORWconst_0(v *Value) bool {
28483
28484
28485
28486 for {
28487 c := v.AuxInt
28488 v_0 := v.Args[0]
28489 if v_0.Op != OpARM64RORWconst {
28490 break
28491 }
28492 d := v_0.AuxInt
28493 x := v_0.Args[0]
28494 v.reset(OpARM64RORWconst)
28495 v.AuxInt = (c + d) & 31
28496 v.AddArg(x)
28497 return true
28498 }
28499 return false
28500 }
28501 func rewriteValueARM64_OpARM64RORconst_0(v *Value) bool {
28502
28503
28504
28505 for {
28506 c := v.AuxInt
28507 v_0 := v.Args[0]
28508 if v_0.Op != OpARM64RORconst {
28509 break
28510 }
28511 d := v_0.AuxInt
28512 x := v_0.Args[0]
28513 v.reset(OpARM64RORconst)
28514 v.AuxInt = (c + d) & 63
28515 v.AddArg(x)
28516 return true
28517 }
28518 return false
28519 }
28520 func rewriteValueARM64_OpARM64SBCSflags_0(v *Value) bool {
28521 b := v.Block
28522 typ := &b.Func.Config.Types
28523
28524
28525
28526 for {
28527 _ = v.Args[2]
28528 x := v.Args[0]
28529 y := v.Args[1]
28530 v_2 := v.Args[2]
28531 if v_2.Op != OpSelect1 {
28532 break
28533 }
28534 if v_2.Type != types.TypeFlags {
28535 break
28536 }
28537 v_2_0 := v_2.Args[0]
28538 if v_2_0.Op != OpARM64NEGSflags {
28539 break
28540 }
28541 v_2_0_0 := v_2_0.Args[0]
28542 if v_2_0_0.Op != OpARM64NEG {
28543 break
28544 }
28545 if v_2_0_0.Type != typ.UInt64 {
28546 break
28547 }
28548 v_2_0_0_0 := v_2_0_0.Args[0]
28549 if v_2_0_0_0.Op != OpARM64NGCzerocarry {
28550 break
28551 }
28552 if v_2_0_0_0.Type != typ.UInt64 {
28553 break
28554 }
28555 bo := v_2_0_0_0.Args[0]
28556 v.reset(OpARM64SBCSflags)
28557 v.AddArg(x)
28558 v.AddArg(y)
28559 v.AddArg(bo)
28560 return true
28561 }
28562
28563
28564
28565 for {
28566 _ = v.Args[2]
28567 x := v.Args[0]
28568 y := v.Args[1]
28569 v_2 := v.Args[2]
28570 if v_2.Op != OpSelect1 {
28571 break
28572 }
28573 if v_2.Type != types.TypeFlags {
28574 break
28575 }
28576 v_2_0 := v_2.Args[0]
28577 if v_2_0.Op != OpARM64NEGSflags {
28578 break
28579 }
28580 v_2_0_0 := v_2_0.Args[0]
28581 if v_2_0_0.Op != OpARM64MOVDconst {
28582 break
28583 }
28584 if v_2_0_0.AuxInt != 0 {
28585 break
28586 }
28587 v.reset(OpARM64SUBSflags)
28588 v.AddArg(x)
28589 v.AddArg(y)
28590 return true
28591 }
28592 return false
28593 }
28594 func rewriteValueARM64_OpARM64SLL_0(v *Value) bool {
28595
28596
28597
28598 for {
28599 _ = v.Args[1]
28600 x := v.Args[0]
28601 v_1 := v.Args[1]
28602 if v_1.Op != OpARM64MOVDconst {
28603 break
28604 }
28605 c := v_1.AuxInt
28606 v.reset(OpARM64SLLconst)
28607 v.AuxInt = c & 63
28608 v.AddArg(x)
28609 return true
28610 }
28611 return false
28612 }
28613 func rewriteValueARM64_OpARM64SLLconst_0(v *Value) bool {
28614
28615
28616
28617 for {
28618 c := v.AuxInt
28619 v_0 := v.Args[0]
28620 if v_0.Op != OpARM64MOVDconst {
28621 break
28622 }
28623 d := v_0.AuxInt
28624 v.reset(OpARM64MOVDconst)
28625 v.AuxInt = d << uint64(c)
28626 return true
28627 }
28628
28629
28630
28631 for {
28632 c := v.AuxInt
28633 v_0 := v.Args[0]
28634 if v_0.Op != OpARM64SRLconst {
28635 break
28636 }
28637 if v_0.AuxInt != c {
28638 break
28639 }
28640 x := v_0.Args[0]
28641 if !(0 < c && c < 64) {
28642 break
28643 }
28644 v.reset(OpARM64ANDconst)
28645 v.AuxInt = ^(1<<uint(c) - 1)
28646 v.AddArg(x)
28647 return true
28648 }
28649
28650
28651
28652 for {
28653 sc := v.AuxInt
28654 v_0 := v.Args[0]
28655 if v_0.Op != OpARM64ANDconst {
28656 break
28657 }
28658 ac := v_0.AuxInt
28659 x := v_0.Args[0]
28660 if !(isARM64BFMask(sc, ac, 0)) {
28661 break
28662 }
28663 v.reset(OpARM64UBFIZ)
28664 v.AuxInt = armBFAuxInt(sc, arm64BFWidth(ac, 0))
28665 v.AddArg(x)
28666 return true
28667 }
28668
28669
28670
28671 for {
28672 sc := v.AuxInt
28673 v_0 := v.Args[0]
28674 if v_0.Op != OpARM64MOVWUreg {
28675 break
28676 }
28677 x := v_0.Args[0]
28678 if !(isARM64BFMask(sc, 1<<32-1, 0)) {
28679 break
28680 }
28681 v.reset(OpARM64UBFIZ)
28682 v.AuxInt = armBFAuxInt(sc, 32)
28683 v.AddArg(x)
28684 return true
28685 }
28686
28687
28688
28689 for {
28690 sc := v.AuxInt
28691 v_0 := v.Args[0]
28692 if v_0.Op != OpARM64MOVHUreg {
28693 break
28694 }
28695 x := v_0.Args[0]
28696 if !(isARM64BFMask(sc, 1<<16-1, 0)) {
28697 break
28698 }
28699 v.reset(OpARM64UBFIZ)
28700 v.AuxInt = armBFAuxInt(sc, 16)
28701 v.AddArg(x)
28702 return true
28703 }
28704
28705
28706
28707 for {
28708 sc := v.AuxInt
28709 v_0 := v.Args[0]
28710 if v_0.Op != OpARM64MOVBUreg {
28711 break
28712 }
28713 x := v_0.Args[0]
28714 if !(isARM64BFMask(sc, 1<<8-1, 0)) {
28715 break
28716 }
28717 v.reset(OpARM64UBFIZ)
28718 v.AuxInt = armBFAuxInt(sc, 8)
28719 v.AddArg(x)
28720 return true
28721 }
28722
28723
28724
28725 for {
28726 sc := v.AuxInt
28727 v_0 := v.Args[0]
28728 if v_0.Op != OpARM64UBFIZ {
28729 break
28730 }
28731 bfc := v_0.AuxInt
28732 x := v_0.Args[0]
28733 if !(sc+getARM64BFwidth(bfc)+getARM64BFlsb(bfc) < 64) {
28734 break
28735 }
28736 v.reset(OpARM64UBFIZ)
28737 v.AuxInt = armBFAuxInt(getARM64BFlsb(bfc)+sc, getARM64BFwidth(bfc))
28738 v.AddArg(x)
28739 return true
28740 }
28741 return false
28742 }
28743 func rewriteValueARM64_OpARM64SRA_0(v *Value) bool {
28744
28745
28746
28747 for {
28748 _ = v.Args[1]
28749 x := v.Args[0]
28750 v_1 := v.Args[1]
28751 if v_1.Op != OpARM64MOVDconst {
28752 break
28753 }
28754 c := v_1.AuxInt
28755 v.reset(OpARM64SRAconst)
28756 v.AuxInt = c & 63
28757 v.AddArg(x)
28758 return true
28759 }
28760 return false
28761 }
28762 func rewriteValueARM64_OpARM64SRAconst_0(v *Value) bool {
28763
28764
28765
28766 for {
28767 c := v.AuxInt
28768 v_0 := v.Args[0]
28769 if v_0.Op != OpARM64MOVDconst {
28770 break
28771 }
28772 d := v_0.AuxInt
28773 v.reset(OpARM64MOVDconst)
28774 v.AuxInt = d >> uint64(c)
28775 return true
28776 }
28777
28778
28779
28780 for {
28781 rc := v.AuxInt
28782 v_0 := v.Args[0]
28783 if v_0.Op != OpARM64SLLconst {
28784 break
28785 }
28786 lc := v_0.AuxInt
28787 x := v_0.Args[0]
28788 if !(lc > rc) {
28789 break
28790 }
28791 v.reset(OpARM64SBFIZ)
28792 v.AuxInt = armBFAuxInt(lc-rc, 64-lc)
28793 v.AddArg(x)
28794 return true
28795 }
28796
28797
28798
28799 for {
28800 rc := v.AuxInt
28801 v_0 := v.Args[0]
28802 if v_0.Op != OpARM64SLLconst {
28803 break
28804 }
28805 lc := v_0.AuxInt
28806 x := v_0.Args[0]
28807 if !(lc <= rc) {
28808 break
28809 }
28810 v.reset(OpARM64SBFX)
28811 v.AuxInt = armBFAuxInt(rc-lc, 64-rc)
28812 v.AddArg(x)
28813 return true
28814 }
28815
28816
28817
28818 for {
28819 rc := v.AuxInt
28820 v_0 := v.Args[0]
28821 if v_0.Op != OpARM64MOVWreg {
28822 break
28823 }
28824 x := v_0.Args[0]
28825 if !(rc < 32) {
28826 break
28827 }
28828 v.reset(OpARM64SBFX)
28829 v.AuxInt = armBFAuxInt(rc, 32-rc)
28830 v.AddArg(x)
28831 return true
28832 }
28833
28834
28835
28836 for {
28837 rc := v.AuxInt
28838 v_0 := v.Args[0]
28839 if v_0.Op != OpARM64MOVHreg {
28840 break
28841 }
28842 x := v_0.Args[0]
28843 if !(rc < 16) {
28844 break
28845 }
28846 v.reset(OpARM64SBFX)
28847 v.AuxInt = armBFAuxInt(rc, 16-rc)
28848 v.AddArg(x)
28849 return true
28850 }
28851
28852
28853
28854 for {
28855 rc := v.AuxInt
28856 v_0 := v.Args[0]
28857 if v_0.Op != OpARM64MOVBreg {
28858 break
28859 }
28860 x := v_0.Args[0]
28861 if !(rc < 8) {
28862 break
28863 }
28864 v.reset(OpARM64SBFX)
28865 v.AuxInt = armBFAuxInt(rc, 8-rc)
28866 v.AddArg(x)
28867 return true
28868 }
28869
28870
28871
28872 for {
28873 sc := v.AuxInt
28874 v_0 := v.Args[0]
28875 if v_0.Op != OpARM64SBFIZ {
28876 break
28877 }
28878 bfc := v_0.AuxInt
28879 x := v_0.Args[0]
28880 if !(sc < getARM64BFlsb(bfc)) {
28881 break
28882 }
28883 v.reset(OpARM64SBFIZ)
28884 v.AuxInt = armBFAuxInt(getARM64BFlsb(bfc)-sc, getARM64BFwidth(bfc))
28885 v.AddArg(x)
28886 return true
28887 }
28888
28889
28890
28891 for {
28892 sc := v.AuxInt
28893 v_0 := v.Args[0]
28894 if v_0.Op != OpARM64SBFIZ {
28895 break
28896 }
28897 bfc := v_0.AuxInt
28898 x := v_0.Args[0]
28899 if !(sc >= getARM64BFlsb(bfc) && sc < getARM64BFlsb(bfc)+getARM64BFwidth(bfc)) {
28900 break
28901 }
28902 v.reset(OpARM64SBFX)
28903 v.AuxInt = armBFAuxInt(sc-getARM64BFlsb(bfc), getARM64BFlsb(bfc)+getARM64BFwidth(bfc)-sc)
28904 v.AddArg(x)
28905 return true
28906 }
28907 return false
28908 }
28909 func rewriteValueARM64_OpARM64SRL_0(v *Value) bool {
28910
28911
28912
28913 for {
28914 _ = v.Args[1]
28915 x := v.Args[0]
28916 v_1 := v.Args[1]
28917 if v_1.Op != OpARM64MOVDconst {
28918 break
28919 }
28920 c := v_1.AuxInt
28921 v.reset(OpARM64SRLconst)
28922 v.AuxInt = c & 63
28923 v.AddArg(x)
28924 return true
28925 }
28926 return false
28927 }
28928 func rewriteValueARM64_OpARM64SRLconst_0(v *Value) bool {
28929
28930
28931
28932 for {
28933 c := v.AuxInt
28934 v_0 := v.Args[0]
28935 if v_0.Op != OpARM64MOVDconst {
28936 break
28937 }
28938 d := v_0.AuxInt
28939 v.reset(OpARM64MOVDconst)
28940 v.AuxInt = int64(uint64(d) >> uint64(c))
28941 return true
28942 }
28943
28944
28945
28946 for {
28947 c := v.AuxInt
28948 v_0 := v.Args[0]
28949 if v_0.Op != OpARM64SLLconst {
28950 break
28951 }
28952 if v_0.AuxInt != c {
28953 break
28954 }
28955 x := v_0.Args[0]
28956 if !(0 < c && c < 64) {
28957 break
28958 }
28959 v.reset(OpARM64ANDconst)
28960 v.AuxInt = 1<<uint(64-c) - 1
28961 v.AddArg(x)
28962 return true
28963 }
28964
28965
28966
28967 for {
28968 rc := v.AuxInt
28969 v_0 := v.Args[0]
28970 if v_0.Op != OpARM64SLLconst {
28971 break
28972 }
28973 lc := v_0.AuxInt
28974 x := v_0.Args[0]
28975 if !(lc > rc) {
28976 break
28977 }
28978 v.reset(OpARM64UBFIZ)
28979 v.AuxInt = armBFAuxInt(lc-rc, 64-lc)
28980 v.AddArg(x)
28981 return true
28982 }
28983
28984
28985
28986 for {
28987 sc := v.AuxInt
28988 v_0 := v.Args[0]
28989 if v_0.Op != OpARM64ANDconst {
28990 break
28991 }
28992 ac := v_0.AuxInt
28993 x := v_0.Args[0]
28994 if !(isARM64BFMask(sc, ac, sc)) {
28995 break
28996 }
28997 v.reset(OpARM64UBFX)
28998 v.AuxInt = armBFAuxInt(sc, arm64BFWidth(ac, sc))
28999 v.AddArg(x)
29000 return true
29001 }
29002
29003
29004
29005 for {
29006 sc := v.AuxInt
29007 v_0 := v.Args[0]
29008 if v_0.Op != OpARM64MOVWUreg {
29009 break
29010 }
29011 x := v_0.Args[0]
29012 if !(isARM64BFMask(sc, 1<<32-1, sc)) {
29013 break
29014 }
29015 v.reset(OpARM64UBFX)
29016 v.AuxInt = armBFAuxInt(sc, arm64BFWidth(1<<32-1, sc))
29017 v.AddArg(x)
29018 return true
29019 }
29020
29021
29022
29023 for {
29024 sc := v.AuxInt
29025 v_0 := v.Args[0]
29026 if v_0.Op != OpARM64MOVHUreg {
29027 break
29028 }
29029 x := v_0.Args[0]
29030 if !(isARM64BFMask(sc, 1<<16-1, sc)) {
29031 break
29032 }
29033 v.reset(OpARM64UBFX)
29034 v.AuxInt = armBFAuxInt(sc, arm64BFWidth(1<<16-1, sc))
29035 v.AddArg(x)
29036 return true
29037 }
29038
29039
29040
29041 for {
29042 sc := v.AuxInt
29043 v_0 := v.Args[0]
29044 if v_0.Op != OpARM64MOVBUreg {
29045 break
29046 }
29047 x := v_0.Args[0]
29048 if !(isARM64BFMask(sc, 1<<8-1, sc)) {
29049 break
29050 }
29051 v.reset(OpARM64UBFX)
29052 v.AuxInt = armBFAuxInt(sc, arm64BFWidth(1<<8-1, sc))
29053 v.AddArg(x)
29054 return true
29055 }
29056
29057
29058
29059 for {
29060 rc := v.AuxInt
29061 v_0 := v.Args[0]
29062 if v_0.Op != OpARM64SLLconst {
29063 break
29064 }
29065 lc := v_0.AuxInt
29066 x := v_0.Args[0]
29067 if !(lc < rc) {
29068 break
29069 }
29070 v.reset(OpARM64UBFX)
29071 v.AuxInt = armBFAuxInt(rc-lc, 64-rc)
29072 v.AddArg(x)
29073 return true
29074 }
29075
29076
29077
29078 for {
29079 sc := v.AuxInt
29080 v_0 := v.Args[0]
29081 if v_0.Op != OpARM64UBFX {
29082 break
29083 }
29084 bfc := v_0.AuxInt
29085 x := v_0.Args[0]
29086 if !(sc < getARM64BFwidth(bfc)) {
29087 break
29088 }
29089 v.reset(OpARM64UBFX)
29090 v.AuxInt = armBFAuxInt(getARM64BFlsb(bfc)+sc, getARM64BFwidth(bfc)-sc)
29091 v.AddArg(x)
29092 return true
29093 }
29094
29095
29096
29097 for {
29098 sc := v.AuxInt
29099 v_0 := v.Args[0]
29100 if v_0.Op != OpARM64UBFIZ {
29101 break
29102 }
29103 bfc := v_0.AuxInt
29104 x := v_0.Args[0]
29105 if !(sc == getARM64BFlsb(bfc)) {
29106 break
29107 }
29108 v.reset(OpARM64ANDconst)
29109 v.AuxInt = 1<<uint(getARM64BFwidth(bfc)) - 1
29110 v.AddArg(x)
29111 return true
29112 }
29113 return false
29114 }
29115 func rewriteValueARM64_OpARM64SRLconst_10(v *Value) bool {
29116
29117
29118
29119 for {
29120 sc := v.AuxInt
29121 v_0 := v.Args[0]
29122 if v_0.Op != OpARM64UBFIZ {
29123 break
29124 }
29125 bfc := v_0.AuxInt
29126 x := v_0.Args[0]
29127 if !(sc < getARM64BFlsb(bfc)) {
29128 break
29129 }
29130 v.reset(OpARM64UBFIZ)
29131 v.AuxInt = armBFAuxInt(getARM64BFlsb(bfc)-sc, getARM64BFwidth(bfc))
29132 v.AddArg(x)
29133 return true
29134 }
29135
29136
29137
29138 for {
29139 sc := v.AuxInt
29140 v_0 := v.Args[0]
29141 if v_0.Op != OpARM64UBFIZ {
29142 break
29143 }
29144 bfc := v_0.AuxInt
29145 x := v_0.Args[0]
29146 if !(sc > getARM64BFlsb(bfc) && sc < getARM64BFlsb(bfc)+getARM64BFwidth(bfc)) {
29147 break
29148 }
29149 v.reset(OpARM64UBFX)
29150 v.AuxInt = armBFAuxInt(sc-getARM64BFlsb(bfc), getARM64BFlsb(bfc)+getARM64BFwidth(bfc)-sc)
29151 v.AddArg(x)
29152 return true
29153 }
29154 return false
29155 }
29156 func rewriteValueARM64_OpARM64STP_0(v *Value) bool {
29157 b := v.Block
29158 config := b.Func.Config
29159
29160
29161
29162 for {
29163 off1 := v.AuxInt
29164 sym := v.Aux
29165 mem := v.Args[3]
29166 v_0 := v.Args[0]
29167 if v_0.Op != OpARM64ADDconst {
29168 break
29169 }
29170 off2 := v_0.AuxInt
29171 ptr := v_0.Args[0]
29172 val1 := v.Args[1]
29173 val2 := v.Args[2]
29174 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
29175 break
29176 }
29177 v.reset(OpARM64STP)
29178 v.AuxInt = off1 + off2
29179 v.Aux = sym
29180 v.AddArg(ptr)
29181 v.AddArg(val1)
29182 v.AddArg(val2)
29183 v.AddArg(mem)
29184 return true
29185 }
29186
29187
29188
29189 for {
29190 off1 := v.AuxInt
29191 sym1 := v.Aux
29192 mem := v.Args[3]
29193 v_0 := v.Args[0]
29194 if v_0.Op != OpARM64MOVDaddr {
29195 break
29196 }
29197 off2 := v_0.AuxInt
29198 sym2 := v_0.Aux
29199 ptr := v_0.Args[0]
29200 val1 := v.Args[1]
29201 val2 := v.Args[2]
29202 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
29203 break
29204 }
29205 v.reset(OpARM64STP)
29206 v.AuxInt = off1 + off2
29207 v.Aux = mergeSym(sym1, sym2)
29208 v.AddArg(ptr)
29209 v.AddArg(val1)
29210 v.AddArg(val2)
29211 v.AddArg(mem)
29212 return true
29213 }
29214
29215
29216
29217 for {
29218 off := v.AuxInt
29219 sym := v.Aux
29220 mem := v.Args[3]
29221 ptr := v.Args[0]
29222 v_1 := v.Args[1]
29223 if v_1.Op != OpARM64MOVDconst {
29224 break
29225 }
29226 if v_1.AuxInt != 0 {
29227 break
29228 }
29229 v_2 := v.Args[2]
29230 if v_2.Op != OpARM64MOVDconst {
29231 break
29232 }
29233 if v_2.AuxInt != 0 {
29234 break
29235 }
29236 v.reset(OpARM64MOVQstorezero)
29237 v.AuxInt = off
29238 v.Aux = sym
29239 v.AddArg(ptr)
29240 v.AddArg(mem)
29241 return true
29242 }
29243 return false
29244 }
29245 func rewriteValueARM64_OpARM64SUB_0(v *Value) bool {
29246 b := v.Block
29247
29248
29249
29250 for {
29251 _ = v.Args[1]
29252 x := v.Args[0]
29253 v_1 := v.Args[1]
29254 if v_1.Op != OpARM64MOVDconst {
29255 break
29256 }
29257 c := v_1.AuxInt
29258 v.reset(OpARM64SUBconst)
29259 v.AuxInt = c
29260 v.AddArg(x)
29261 return true
29262 }
29263
29264
29265
29266 for {
29267 _ = v.Args[1]
29268 a := v.Args[0]
29269 l := v.Args[1]
29270 if l.Op != OpARM64MUL {
29271 break
29272 }
29273 y := l.Args[1]
29274 x := l.Args[0]
29275 if !(l.Uses == 1 && clobber(l)) {
29276 break
29277 }
29278 v.reset(OpARM64MSUB)
29279 v.AddArg(a)
29280 v.AddArg(x)
29281 v.AddArg(y)
29282 return true
29283 }
29284
29285
29286
29287 for {
29288 _ = v.Args[1]
29289 a := v.Args[0]
29290 l := v.Args[1]
29291 if l.Op != OpARM64MNEG {
29292 break
29293 }
29294 y := l.Args[1]
29295 x := l.Args[0]
29296 if !(l.Uses == 1 && clobber(l)) {
29297 break
29298 }
29299 v.reset(OpARM64MADD)
29300 v.AddArg(a)
29301 v.AddArg(x)
29302 v.AddArg(y)
29303 return true
29304 }
29305
29306
29307
29308 for {
29309 _ = v.Args[1]
29310 a := v.Args[0]
29311 l := v.Args[1]
29312 if l.Op != OpARM64MULW {
29313 break
29314 }
29315 y := l.Args[1]
29316 x := l.Args[0]
29317 if !(a.Type.Size() != 8 && l.Uses == 1 && clobber(l)) {
29318 break
29319 }
29320 v.reset(OpARM64MSUBW)
29321 v.AddArg(a)
29322 v.AddArg(x)
29323 v.AddArg(y)
29324 return true
29325 }
29326
29327
29328
29329 for {
29330 _ = v.Args[1]
29331 a := v.Args[0]
29332 l := v.Args[1]
29333 if l.Op != OpARM64MNEGW {
29334 break
29335 }
29336 y := l.Args[1]
29337 x := l.Args[0]
29338 if !(a.Type.Size() != 8 && l.Uses == 1 && clobber(l)) {
29339 break
29340 }
29341 v.reset(OpARM64MADDW)
29342 v.AddArg(a)
29343 v.AddArg(x)
29344 v.AddArg(y)
29345 return true
29346 }
29347
29348
29349
29350 for {
29351 x := v.Args[1]
29352 if x != v.Args[0] {
29353 break
29354 }
29355 v.reset(OpARM64MOVDconst)
29356 v.AuxInt = 0
29357 return true
29358 }
29359
29360
29361
29362 for {
29363 _ = v.Args[1]
29364 x := v.Args[0]
29365 v_1 := v.Args[1]
29366 if v_1.Op != OpARM64SUB {
29367 break
29368 }
29369 z := v_1.Args[1]
29370 y := v_1.Args[0]
29371 v.reset(OpARM64SUB)
29372 v0 := b.NewValue0(v.Pos, OpARM64ADD, v.Type)
29373 v0.AddArg(x)
29374 v0.AddArg(z)
29375 v.AddArg(v0)
29376 v.AddArg(y)
29377 return true
29378 }
29379
29380
29381
29382 for {
29383 z := v.Args[1]
29384 v_0 := v.Args[0]
29385 if v_0.Op != OpARM64SUB {
29386 break
29387 }
29388 y := v_0.Args[1]
29389 x := v_0.Args[0]
29390 v.reset(OpARM64SUB)
29391 v.AddArg(x)
29392 v0 := b.NewValue0(v.Pos, OpARM64ADD, y.Type)
29393 v0.AddArg(y)
29394 v0.AddArg(z)
29395 v.AddArg(v0)
29396 return true
29397 }
29398
29399
29400
29401 for {
29402 _ = v.Args[1]
29403 x0 := v.Args[0]
29404 x1 := v.Args[1]
29405 if x1.Op != OpARM64SLLconst {
29406 break
29407 }
29408 c := x1.AuxInt
29409 y := x1.Args[0]
29410 if !(clobberIfDead(x1)) {
29411 break
29412 }
29413 v.reset(OpARM64SUBshiftLL)
29414 v.AuxInt = c
29415 v.AddArg(x0)
29416 v.AddArg(y)
29417 return true
29418 }
29419
29420
29421
29422 for {
29423 _ = v.Args[1]
29424 x0 := v.Args[0]
29425 x1 := v.Args[1]
29426 if x1.Op != OpARM64SRLconst {
29427 break
29428 }
29429 c := x1.AuxInt
29430 y := x1.Args[0]
29431 if !(clobberIfDead(x1)) {
29432 break
29433 }
29434 v.reset(OpARM64SUBshiftRL)
29435 v.AuxInt = c
29436 v.AddArg(x0)
29437 v.AddArg(y)
29438 return true
29439 }
29440 return false
29441 }
29442 func rewriteValueARM64_OpARM64SUB_10(v *Value) bool {
29443
29444
29445
29446 for {
29447 _ = v.Args[1]
29448 x0 := v.Args[0]
29449 x1 := v.Args[1]
29450 if x1.Op != OpARM64SRAconst {
29451 break
29452 }
29453 c := x1.AuxInt
29454 y := x1.Args[0]
29455 if !(clobberIfDead(x1)) {
29456 break
29457 }
29458 v.reset(OpARM64SUBshiftRA)
29459 v.AuxInt = c
29460 v.AddArg(x0)
29461 v.AddArg(y)
29462 return true
29463 }
29464 return false
29465 }
29466 func rewriteValueARM64_OpARM64SUBconst_0(v *Value) bool {
29467
29468
29469
29470 for {
29471 if v.AuxInt != 0 {
29472 break
29473 }
29474 x := v.Args[0]
29475 v.reset(OpCopy)
29476 v.Type = x.Type
29477 v.AddArg(x)
29478 return true
29479 }
29480
29481
29482
29483 for {
29484 c := v.AuxInt
29485 v_0 := v.Args[0]
29486 if v_0.Op != OpARM64MOVDconst {
29487 break
29488 }
29489 d := v_0.AuxInt
29490 v.reset(OpARM64MOVDconst)
29491 v.AuxInt = d - c
29492 return true
29493 }
29494
29495
29496
29497 for {
29498 c := v.AuxInt
29499 v_0 := v.Args[0]
29500 if v_0.Op != OpARM64SUBconst {
29501 break
29502 }
29503 d := v_0.AuxInt
29504 x := v_0.Args[0]
29505 v.reset(OpARM64ADDconst)
29506 v.AuxInt = -c - d
29507 v.AddArg(x)
29508 return true
29509 }
29510
29511
29512
29513 for {
29514 c := v.AuxInt
29515 v_0 := v.Args[0]
29516 if v_0.Op != OpARM64ADDconst {
29517 break
29518 }
29519 d := v_0.AuxInt
29520 x := v_0.Args[0]
29521 v.reset(OpARM64ADDconst)
29522 v.AuxInt = -c + d
29523 v.AddArg(x)
29524 return true
29525 }
29526 return false
29527 }
29528 func rewriteValueARM64_OpARM64SUBshiftLL_0(v *Value) bool {
29529
29530
29531
29532 for {
29533 d := v.AuxInt
29534 _ = v.Args[1]
29535 x := v.Args[0]
29536 v_1 := v.Args[1]
29537 if v_1.Op != OpARM64MOVDconst {
29538 break
29539 }
29540 c := v_1.AuxInt
29541 v.reset(OpARM64SUBconst)
29542 v.AuxInt = int64(uint64(c) << uint64(d))
29543 v.AddArg(x)
29544 return true
29545 }
29546
29547
29548
29549 for {
29550 d := v.AuxInt
29551 _ = v.Args[1]
29552 x := v.Args[0]
29553 v_1 := v.Args[1]
29554 if v_1.Op != OpARM64SLLconst {
29555 break
29556 }
29557 c := v_1.AuxInt
29558 if x != v_1.Args[0] {
29559 break
29560 }
29561 if !(c == d) {
29562 break
29563 }
29564 v.reset(OpARM64MOVDconst)
29565 v.AuxInt = 0
29566 return true
29567 }
29568 return false
29569 }
29570 func rewriteValueARM64_OpARM64SUBshiftRA_0(v *Value) bool {
29571
29572
29573
29574 for {
29575 d := v.AuxInt
29576 _ = v.Args[1]
29577 x := v.Args[0]
29578 v_1 := v.Args[1]
29579 if v_1.Op != OpARM64MOVDconst {
29580 break
29581 }
29582 c := v_1.AuxInt
29583 v.reset(OpARM64SUBconst)
29584 v.AuxInt = c >> uint64(d)
29585 v.AddArg(x)
29586 return true
29587 }
29588
29589
29590
29591 for {
29592 d := v.AuxInt
29593 _ = v.Args[1]
29594 x := v.Args[0]
29595 v_1 := v.Args[1]
29596 if v_1.Op != OpARM64SRAconst {
29597 break
29598 }
29599 c := v_1.AuxInt
29600 if x != v_1.Args[0] {
29601 break
29602 }
29603 if !(c == d) {
29604 break
29605 }
29606 v.reset(OpARM64MOVDconst)
29607 v.AuxInt = 0
29608 return true
29609 }
29610 return false
29611 }
29612 func rewriteValueARM64_OpARM64SUBshiftRL_0(v *Value) bool {
29613
29614
29615
29616 for {
29617 d := v.AuxInt
29618 _ = v.Args[1]
29619 x := v.Args[0]
29620 v_1 := v.Args[1]
29621 if v_1.Op != OpARM64MOVDconst {
29622 break
29623 }
29624 c := v_1.AuxInt
29625 v.reset(OpARM64SUBconst)
29626 v.AuxInt = int64(uint64(c) >> uint64(d))
29627 v.AddArg(x)
29628 return true
29629 }
29630
29631
29632
29633 for {
29634 d := v.AuxInt
29635 _ = v.Args[1]
29636 x := v.Args[0]
29637 v_1 := v.Args[1]
29638 if v_1.Op != OpARM64SRLconst {
29639 break
29640 }
29641 c := v_1.AuxInt
29642 if x != v_1.Args[0] {
29643 break
29644 }
29645 if !(c == d) {
29646 break
29647 }
29648 v.reset(OpARM64MOVDconst)
29649 v.AuxInt = 0
29650 return true
29651 }
29652 return false
29653 }
29654 func rewriteValueARM64_OpARM64TST_0(v *Value) bool {
29655
29656
29657
29658 for {
29659 _ = v.Args[1]
29660 x := v.Args[0]
29661 v_1 := v.Args[1]
29662 if v_1.Op != OpARM64MOVDconst {
29663 break
29664 }
29665 c := v_1.AuxInt
29666 v.reset(OpARM64TSTconst)
29667 v.AuxInt = c
29668 v.AddArg(x)
29669 return true
29670 }
29671
29672
29673
29674 for {
29675 x := v.Args[1]
29676 v_0 := v.Args[0]
29677 if v_0.Op != OpARM64MOVDconst {
29678 break
29679 }
29680 c := v_0.AuxInt
29681 v.reset(OpARM64TSTconst)
29682 v.AuxInt = c
29683 v.AddArg(x)
29684 return true
29685 }
29686
29687
29688
29689 for {
29690 _ = v.Args[1]
29691 x0 := v.Args[0]
29692 x1 := v.Args[1]
29693 if x1.Op != OpARM64SLLconst {
29694 break
29695 }
29696 c := x1.AuxInt
29697 y := x1.Args[0]
29698 if !(clobberIfDead(x1)) {
29699 break
29700 }
29701 v.reset(OpARM64TSTshiftLL)
29702 v.AuxInt = c
29703 v.AddArg(x0)
29704 v.AddArg(y)
29705 return true
29706 }
29707
29708
29709
29710 for {
29711 x0 := v.Args[1]
29712 x1 := v.Args[0]
29713 if x1.Op != OpARM64SLLconst {
29714 break
29715 }
29716 c := x1.AuxInt
29717 y := x1.Args[0]
29718 if !(clobberIfDead(x1)) {
29719 break
29720 }
29721 v.reset(OpARM64TSTshiftLL)
29722 v.AuxInt = c
29723 v.AddArg(x0)
29724 v.AddArg(y)
29725 return true
29726 }
29727
29728
29729
29730 for {
29731 _ = v.Args[1]
29732 x0 := v.Args[0]
29733 x1 := v.Args[1]
29734 if x1.Op != OpARM64SRLconst {
29735 break
29736 }
29737 c := x1.AuxInt
29738 y := x1.Args[0]
29739 if !(clobberIfDead(x1)) {
29740 break
29741 }
29742 v.reset(OpARM64TSTshiftRL)
29743 v.AuxInt = c
29744 v.AddArg(x0)
29745 v.AddArg(y)
29746 return true
29747 }
29748
29749
29750
29751 for {
29752 x0 := v.Args[1]
29753 x1 := v.Args[0]
29754 if x1.Op != OpARM64SRLconst {
29755 break
29756 }
29757 c := x1.AuxInt
29758 y := x1.Args[0]
29759 if !(clobberIfDead(x1)) {
29760 break
29761 }
29762 v.reset(OpARM64TSTshiftRL)
29763 v.AuxInt = c
29764 v.AddArg(x0)
29765 v.AddArg(y)
29766 return true
29767 }
29768
29769
29770
29771 for {
29772 _ = v.Args[1]
29773 x0 := v.Args[0]
29774 x1 := v.Args[1]
29775 if x1.Op != OpARM64SRAconst {
29776 break
29777 }
29778 c := x1.AuxInt
29779 y := x1.Args[0]
29780 if !(clobberIfDead(x1)) {
29781 break
29782 }
29783 v.reset(OpARM64TSTshiftRA)
29784 v.AuxInt = c
29785 v.AddArg(x0)
29786 v.AddArg(y)
29787 return true
29788 }
29789
29790
29791
29792 for {
29793 x0 := v.Args[1]
29794 x1 := v.Args[0]
29795 if x1.Op != OpARM64SRAconst {
29796 break
29797 }
29798 c := x1.AuxInt
29799 y := x1.Args[0]
29800 if !(clobberIfDead(x1)) {
29801 break
29802 }
29803 v.reset(OpARM64TSTshiftRA)
29804 v.AuxInt = c
29805 v.AddArg(x0)
29806 v.AddArg(y)
29807 return true
29808 }
29809 return false
29810 }
29811 func rewriteValueARM64_OpARM64TSTW_0(v *Value) bool {
29812
29813
29814
29815 for {
29816 _ = v.Args[1]
29817 x := v.Args[0]
29818 v_1 := v.Args[1]
29819 if v_1.Op != OpARM64MOVDconst {
29820 break
29821 }
29822 c := v_1.AuxInt
29823 v.reset(OpARM64TSTWconst)
29824 v.AuxInt = c
29825 v.AddArg(x)
29826 return true
29827 }
29828
29829
29830
29831 for {
29832 x := v.Args[1]
29833 v_0 := v.Args[0]
29834 if v_0.Op != OpARM64MOVDconst {
29835 break
29836 }
29837 c := v_0.AuxInt
29838 v.reset(OpARM64TSTWconst)
29839 v.AuxInt = c
29840 v.AddArg(x)
29841 return true
29842 }
29843 return false
29844 }
29845 func rewriteValueARM64_OpARM64TSTWconst_0(v *Value) bool {
29846
29847
29848
29849 for {
29850 y := v.AuxInt
29851 v_0 := v.Args[0]
29852 if v_0.Op != OpARM64MOVDconst {
29853 break
29854 }
29855 x := v_0.AuxInt
29856 if !(int32(x&y) == 0) {
29857 break
29858 }
29859 v.reset(OpARM64FlagEQ)
29860 return true
29861 }
29862
29863
29864
29865 for {
29866 y := v.AuxInt
29867 v_0 := v.Args[0]
29868 if v_0.Op != OpARM64MOVDconst {
29869 break
29870 }
29871 x := v_0.AuxInt
29872 if !(int32(x&y) < 0) {
29873 break
29874 }
29875 v.reset(OpARM64FlagLT_UGT)
29876 return true
29877 }
29878
29879
29880
29881 for {
29882 y := v.AuxInt
29883 v_0 := v.Args[0]
29884 if v_0.Op != OpARM64MOVDconst {
29885 break
29886 }
29887 x := v_0.AuxInt
29888 if !(int32(x&y) > 0) {
29889 break
29890 }
29891 v.reset(OpARM64FlagGT_UGT)
29892 return true
29893 }
29894 return false
29895 }
29896 func rewriteValueARM64_OpARM64TSTconst_0(v *Value) bool {
29897
29898
29899
29900 for {
29901 y := v.AuxInt
29902 v_0 := v.Args[0]
29903 if v_0.Op != OpARM64MOVDconst {
29904 break
29905 }
29906 x := v_0.AuxInt
29907 if !(int64(x&y) == 0) {
29908 break
29909 }
29910 v.reset(OpARM64FlagEQ)
29911 return true
29912 }
29913
29914
29915
29916 for {
29917 y := v.AuxInt
29918 v_0 := v.Args[0]
29919 if v_0.Op != OpARM64MOVDconst {
29920 break
29921 }
29922 x := v_0.AuxInt
29923 if !(int64(x&y) < 0) {
29924 break
29925 }
29926 v.reset(OpARM64FlagLT_UGT)
29927 return true
29928 }
29929
29930
29931
29932 for {
29933 y := v.AuxInt
29934 v_0 := v.Args[0]
29935 if v_0.Op != OpARM64MOVDconst {
29936 break
29937 }
29938 x := v_0.AuxInt
29939 if !(int64(x&y) > 0) {
29940 break
29941 }
29942 v.reset(OpARM64FlagGT_UGT)
29943 return true
29944 }
29945 return false
29946 }
29947 func rewriteValueARM64_OpARM64TSTshiftLL_0(v *Value) bool {
29948 b := v.Block
29949
29950
29951
29952 for {
29953 d := v.AuxInt
29954 x := v.Args[1]
29955 v_0 := v.Args[0]
29956 if v_0.Op != OpARM64MOVDconst {
29957 break
29958 }
29959 c := v_0.AuxInt
29960 v.reset(OpARM64TSTconst)
29961 v.AuxInt = c
29962 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type)
29963 v0.AuxInt = d
29964 v0.AddArg(x)
29965 v.AddArg(v0)
29966 return true
29967 }
29968
29969
29970
29971 for {
29972 d := v.AuxInt
29973 _ = v.Args[1]
29974 x := v.Args[0]
29975 v_1 := v.Args[1]
29976 if v_1.Op != OpARM64MOVDconst {
29977 break
29978 }
29979 c := v_1.AuxInt
29980 v.reset(OpARM64TSTconst)
29981 v.AuxInt = int64(uint64(c) << uint64(d))
29982 v.AddArg(x)
29983 return true
29984 }
29985 return false
29986 }
29987 func rewriteValueARM64_OpARM64TSTshiftRA_0(v *Value) bool {
29988 b := v.Block
29989
29990
29991
29992 for {
29993 d := v.AuxInt
29994 x := v.Args[1]
29995 v_0 := v.Args[0]
29996 if v_0.Op != OpARM64MOVDconst {
29997 break
29998 }
29999 c := v_0.AuxInt
30000 v.reset(OpARM64TSTconst)
30001 v.AuxInt = c
30002 v0 := b.NewValue0(v.Pos, OpARM64SRAconst, x.Type)
30003 v0.AuxInt = d
30004 v0.AddArg(x)
30005 v.AddArg(v0)
30006 return true
30007 }
30008
30009
30010
30011 for {
30012 d := v.AuxInt
30013 _ = v.Args[1]
30014 x := v.Args[0]
30015 v_1 := v.Args[1]
30016 if v_1.Op != OpARM64MOVDconst {
30017 break
30018 }
30019 c := v_1.AuxInt
30020 v.reset(OpARM64TSTconst)
30021 v.AuxInt = c >> uint64(d)
30022 v.AddArg(x)
30023 return true
30024 }
30025 return false
30026 }
30027 func rewriteValueARM64_OpARM64TSTshiftRL_0(v *Value) bool {
30028 b := v.Block
30029
30030
30031
30032 for {
30033 d := v.AuxInt
30034 x := v.Args[1]
30035 v_0 := v.Args[0]
30036 if v_0.Op != OpARM64MOVDconst {
30037 break
30038 }
30039 c := v_0.AuxInt
30040 v.reset(OpARM64TSTconst)
30041 v.AuxInt = c
30042 v0 := b.NewValue0(v.Pos, OpARM64SRLconst, x.Type)
30043 v0.AuxInt = d
30044 v0.AddArg(x)
30045 v.AddArg(v0)
30046 return true
30047 }
30048
30049
30050
30051 for {
30052 d := v.AuxInt
30053 _ = v.Args[1]
30054 x := v.Args[0]
30055 v_1 := v.Args[1]
30056 if v_1.Op != OpARM64MOVDconst {
30057 break
30058 }
30059 c := v_1.AuxInt
30060 v.reset(OpARM64TSTconst)
30061 v.AuxInt = int64(uint64(c) >> uint64(d))
30062 v.AddArg(x)
30063 return true
30064 }
30065 return false
30066 }
30067 func rewriteValueARM64_OpARM64UBFIZ_0(v *Value) bool {
30068
30069
30070
30071 for {
30072 bfc := v.AuxInt
30073 v_0 := v.Args[0]
30074 if v_0.Op != OpARM64SLLconst {
30075 break
30076 }
30077 sc := v_0.AuxInt
30078 x := v_0.Args[0]
30079 if !(sc < getARM64BFwidth(bfc)) {
30080 break
30081 }
30082 v.reset(OpARM64UBFIZ)
30083 v.AuxInt = armBFAuxInt(getARM64BFlsb(bfc)+sc, getARM64BFwidth(bfc)-sc)
30084 v.AddArg(x)
30085 return true
30086 }
30087 return false
30088 }
30089 func rewriteValueARM64_OpARM64UBFX_0(v *Value) bool {
30090
30091
30092
30093 for {
30094 bfc := v.AuxInt
30095 v_0 := v.Args[0]
30096 if v_0.Op != OpARM64SRLconst {
30097 break
30098 }
30099 sc := v_0.AuxInt
30100 x := v_0.Args[0]
30101 if !(sc+getARM64BFwidth(bfc)+getARM64BFlsb(bfc) < 64) {
30102 break
30103 }
30104 v.reset(OpARM64UBFX)
30105 v.AuxInt = armBFAuxInt(getARM64BFlsb(bfc)+sc, getARM64BFwidth(bfc))
30106 v.AddArg(x)
30107 return true
30108 }
30109
30110
30111
30112 for {
30113 bfc := v.AuxInt
30114 v_0 := v.Args[0]
30115 if v_0.Op != OpARM64SLLconst {
30116 break
30117 }
30118 sc := v_0.AuxInt
30119 x := v_0.Args[0]
30120 if !(sc == getARM64BFlsb(bfc)) {
30121 break
30122 }
30123 v.reset(OpARM64ANDconst)
30124 v.AuxInt = 1<<uint(getARM64BFwidth(bfc)) - 1
30125 v.AddArg(x)
30126 return true
30127 }
30128
30129
30130
30131 for {
30132 bfc := v.AuxInt
30133 v_0 := v.Args[0]
30134 if v_0.Op != OpARM64SLLconst {
30135 break
30136 }
30137 sc := v_0.AuxInt
30138 x := v_0.Args[0]
30139 if !(sc < getARM64BFlsb(bfc)) {
30140 break
30141 }
30142 v.reset(OpARM64UBFX)
30143 v.AuxInt = armBFAuxInt(getARM64BFlsb(bfc)-sc, getARM64BFwidth(bfc))
30144 v.AddArg(x)
30145 return true
30146 }
30147
30148
30149
30150 for {
30151 bfc := v.AuxInt
30152 v_0 := v.Args[0]
30153 if v_0.Op != OpARM64SLLconst {
30154 break
30155 }
30156 sc := v_0.AuxInt
30157 x := v_0.Args[0]
30158 if !(sc > getARM64BFlsb(bfc) && sc < getARM64BFlsb(bfc)+getARM64BFwidth(bfc)) {
30159 break
30160 }
30161 v.reset(OpARM64UBFIZ)
30162 v.AuxInt = armBFAuxInt(sc-getARM64BFlsb(bfc), getARM64BFlsb(bfc)+getARM64BFwidth(bfc)-sc)
30163 v.AddArg(x)
30164 return true
30165 }
30166 return false
30167 }
30168 func rewriteValueARM64_OpARM64UDIV_0(v *Value) bool {
30169
30170
30171
30172 for {
30173 _ = v.Args[1]
30174 x := v.Args[0]
30175 v_1 := v.Args[1]
30176 if v_1.Op != OpARM64MOVDconst {
30177 break
30178 }
30179 if v_1.AuxInt != 1 {
30180 break
30181 }
30182 v.reset(OpCopy)
30183 v.Type = x.Type
30184 v.AddArg(x)
30185 return true
30186 }
30187
30188
30189
30190 for {
30191 _ = v.Args[1]
30192 x := v.Args[0]
30193 v_1 := v.Args[1]
30194 if v_1.Op != OpARM64MOVDconst {
30195 break
30196 }
30197 c := v_1.AuxInt
30198 if !(isPowerOfTwo(c)) {
30199 break
30200 }
30201 v.reset(OpARM64SRLconst)
30202 v.AuxInt = log2(c)
30203 v.AddArg(x)
30204 return true
30205 }
30206
30207
30208
30209 for {
30210 _ = v.Args[1]
30211 v_0 := v.Args[0]
30212 if v_0.Op != OpARM64MOVDconst {
30213 break
30214 }
30215 c := v_0.AuxInt
30216 v_1 := v.Args[1]
30217 if v_1.Op != OpARM64MOVDconst {
30218 break
30219 }
30220 d := v_1.AuxInt
30221 v.reset(OpARM64MOVDconst)
30222 v.AuxInt = int64(uint64(c) / uint64(d))
30223 return true
30224 }
30225 return false
30226 }
30227 func rewriteValueARM64_OpARM64UDIVW_0(v *Value) bool {
30228
30229
30230
30231 for {
30232 _ = v.Args[1]
30233 x := v.Args[0]
30234 v_1 := v.Args[1]
30235 if v_1.Op != OpARM64MOVDconst {
30236 break
30237 }
30238 c := v_1.AuxInt
30239 if !(uint32(c) == 1) {
30240 break
30241 }
30242 v.reset(OpCopy)
30243 v.Type = x.Type
30244 v.AddArg(x)
30245 return true
30246 }
30247
30248
30249
30250 for {
30251 _ = v.Args[1]
30252 x := v.Args[0]
30253 v_1 := v.Args[1]
30254 if v_1.Op != OpARM64MOVDconst {
30255 break
30256 }
30257 c := v_1.AuxInt
30258 if !(isPowerOfTwo(c) && is32Bit(c)) {
30259 break
30260 }
30261 v.reset(OpARM64SRLconst)
30262 v.AuxInt = log2(c)
30263 v.AddArg(x)
30264 return true
30265 }
30266
30267
30268
30269 for {
30270 _ = v.Args[1]
30271 v_0 := v.Args[0]
30272 if v_0.Op != OpARM64MOVDconst {
30273 break
30274 }
30275 c := v_0.AuxInt
30276 v_1 := v.Args[1]
30277 if v_1.Op != OpARM64MOVDconst {
30278 break
30279 }
30280 d := v_1.AuxInt
30281 v.reset(OpARM64MOVDconst)
30282 v.AuxInt = int64(uint32(c) / uint32(d))
30283 return true
30284 }
30285 return false
30286 }
30287 func rewriteValueARM64_OpARM64UMOD_0(v *Value) bool {
30288 b := v.Block
30289 typ := &b.Func.Config.Types
30290
30291
30292
30293 for {
30294 if v.Type != typ.UInt64 {
30295 break
30296 }
30297 y := v.Args[1]
30298 x := v.Args[0]
30299 v.reset(OpARM64MSUB)
30300 v.Type = typ.UInt64
30301 v.AddArg(x)
30302 v.AddArg(y)
30303 v0 := b.NewValue0(v.Pos, OpARM64UDIV, typ.UInt64)
30304 v0.AddArg(x)
30305 v0.AddArg(y)
30306 v.AddArg(v0)
30307 return true
30308 }
30309
30310
30311
30312 for {
30313 _ = v.Args[1]
30314 v_1 := v.Args[1]
30315 if v_1.Op != OpARM64MOVDconst {
30316 break
30317 }
30318 if v_1.AuxInt != 1 {
30319 break
30320 }
30321 v.reset(OpARM64MOVDconst)
30322 v.AuxInt = 0
30323 return true
30324 }
30325
30326
30327
30328 for {
30329 _ = v.Args[1]
30330 x := v.Args[0]
30331 v_1 := v.Args[1]
30332 if v_1.Op != OpARM64MOVDconst {
30333 break
30334 }
30335 c := v_1.AuxInt
30336 if !(isPowerOfTwo(c)) {
30337 break
30338 }
30339 v.reset(OpARM64ANDconst)
30340 v.AuxInt = c - 1
30341 v.AddArg(x)
30342 return true
30343 }
30344
30345
30346
30347 for {
30348 _ = v.Args[1]
30349 v_0 := v.Args[0]
30350 if v_0.Op != OpARM64MOVDconst {
30351 break
30352 }
30353 c := v_0.AuxInt
30354 v_1 := v.Args[1]
30355 if v_1.Op != OpARM64MOVDconst {
30356 break
30357 }
30358 d := v_1.AuxInt
30359 v.reset(OpARM64MOVDconst)
30360 v.AuxInt = int64(uint64(c) % uint64(d))
30361 return true
30362 }
30363 return false
30364 }
30365 func rewriteValueARM64_OpARM64UMODW_0(v *Value) bool {
30366 b := v.Block
30367 typ := &b.Func.Config.Types
30368
30369
30370
30371 for {
30372 if v.Type != typ.UInt32 {
30373 break
30374 }
30375 y := v.Args[1]
30376 x := v.Args[0]
30377 v.reset(OpARM64MSUBW)
30378 v.Type = typ.UInt32
30379 v.AddArg(x)
30380 v.AddArg(y)
30381 v0 := b.NewValue0(v.Pos, OpARM64UDIVW, typ.UInt32)
30382 v0.AddArg(x)
30383 v0.AddArg(y)
30384 v.AddArg(v0)
30385 return true
30386 }
30387
30388
30389
30390 for {
30391 _ = v.Args[1]
30392 v_1 := v.Args[1]
30393 if v_1.Op != OpARM64MOVDconst {
30394 break
30395 }
30396 c := v_1.AuxInt
30397 if !(uint32(c) == 1) {
30398 break
30399 }
30400 v.reset(OpARM64MOVDconst)
30401 v.AuxInt = 0
30402 return true
30403 }
30404
30405
30406
30407 for {
30408 _ = v.Args[1]
30409 x := v.Args[0]
30410 v_1 := v.Args[1]
30411 if v_1.Op != OpARM64MOVDconst {
30412 break
30413 }
30414 c := v_1.AuxInt
30415 if !(isPowerOfTwo(c) && is32Bit(c)) {
30416 break
30417 }
30418 v.reset(OpARM64ANDconst)
30419 v.AuxInt = c - 1
30420 v.AddArg(x)
30421 return true
30422 }
30423
30424
30425
30426 for {
30427 _ = v.Args[1]
30428 v_0 := v.Args[0]
30429 if v_0.Op != OpARM64MOVDconst {
30430 break
30431 }
30432 c := v_0.AuxInt
30433 v_1 := v.Args[1]
30434 if v_1.Op != OpARM64MOVDconst {
30435 break
30436 }
30437 d := v_1.AuxInt
30438 v.reset(OpARM64MOVDconst)
30439 v.AuxInt = int64(uint32(c) % uint32(d))
30440 return true
30441 }
30442 return false
30443 }
30444 func rewriteValueARM64_OpARM64XOR_0(v *Value) bool {
30445
30446
30447
30448 for {
30449 _ = v.Args[1]
30450 x := v.Args[0]
30451 v_1 := v.Args[1]
30452 if v_1.Op != OpARM64MOVDconst {
30453 break
30454 }
30455 c := v_1.AuxInt
30456 v.reset(OpARM64XORconst)
30457 v.AuxInt = c
30458 v.AddArg(x)
30459 return true
30460 }
30461
30462
30463
30464 for {
30465 x := v.Args[1]
30466 v_0 := v.Args[0]
30467 if v_0.Op != OpARM64MOVDconst {
30468 break
30469 }
30470 c := v_0.AuxInt
30471 v.reset(OpARM64XORconst)
30472 v.AuxInt = c
30473 v.AddArg(x)
30474 return true
30475 }
30476
30477
30478
30479 for {
30480 x := v.Args[1]
30481 if x != v.Args[0] {
30482 break
30483 }
30484 v.reset(OpARM64MOVDconst)
30485 v.AuxInt = 0
30486 return true
30487 }
30488
30489
30490
30491 for {
30492 _ = v.Args[1]
30493 x := v.Args[0]
30494 v_1 := v.Args[1]
30495 if v_1.Op != OpARM64MVN {
30496 break
30497 }
30498 y := v_1.Args[0]
30499 v.reset(OpARM64EON)
30500 v.AddArg(x)
30501 v.AddArg(y)
30502 return true
30503 }
30504
30505
30506
30507 for {
30508 x := v.Args[1]
30509 v_0 := v.Args[0]
30510 if v_0.Op != OpARM64MVN {
30511 break
30512 }
30513 y := v_0.Args[0]
30514 v.reset(OpARM64EON)
30515 v.AddArg(x)
30516 v.AddArg(y)
30517 return true
30518 }
30519
30520
30521
30522 for {
30523 _ = v.Args[1]
30524 x0 := v.Args[0]
30525 x1 := v.Args[1]
30526 if x1.Op != OpARM64SLLconst {
30527 break
30528 }
30529 c := x1.AuxInt
30530 y := x1.Args[0]
30531 if !(clobberIfDead(x1)) {
30532 break
30533 }
30534 v.reset(OpARM64XORshiftLL)
30535 v.AuxInt = c
30536 v.AddArg(x0)
30537 v.AddArg(y)
30538 return true
30539 }
30540
30541
30542
30543 for {
30544 x0 := v.Args[1]
30545 x1 := v.Args[0]
30546 if x1.Op != OpARM64SLLconst {
30547 break
30548 }
30549 c := x1.AuxInt
30550 y := x1.Args[0]
30551 if !(clobberIfDead(x1)) {
30552 break
30553 }
30554 v.reset(OpARM64XORshiftLL)
30555 v.AuxInt = c
30556 v.AddArg(x0)
30557 v.AddArg(y)
30558 return true
30559 }
30560
30561
30562
30563 for {
30564 _ = v.Args[1]
30565 x0 := v.Args[0]
30566 x1 := v.Args[1]
30567 if x1.Op != OpARM64SRLconst {
30568 break
30569 }
30570 c := x1.AuxInt
30571 y := x1.Args[0]
30572 if !(clobberIfDead(x1)) {
30573 break
30574 }
30575 v.reset(OpARM64XORshiftRL)
30576 v.AuxInt = c
30577 v.AddArg(x0)
30578 v.AddArg(y)
30579 return true
30580 }
30581
30582
30583
30584 for {
30585 x0 := v.Args[1]
30586 x1 := v.Args[0]
30587 if x1.Op != OpARM64SRLconst {
30588 break
30589 }
30590 c := x1.AuxInt
30591 y := x1.Args[0]
30592 if !(clobberIfDead(x1)) {
30593 break
30594 }
30595 v.reset(OpARM64XORshiftRL)
30596 v.AuxInt = c
30597 v.AddArg(x0)
30598 v.AddArg(y)
30599 return true
30600 }
30601
30602
30603
30604 for {
30605 _ = v.Args[1]
30606 x0 := v.Args[0]
30607 x1 := v.Args[1]
30608 if x1.Op != OpARM64SRAconst {
30609 break
30610 }
30611 c := x1.AuxInt
30612 y := x1.Args[0]
30613 if !(clobberIfDead(x1)) {
30614 break
30615 }
30616 v.reset(OpARM64XORshiftRA)
30617 v.AuxInt = c
30618 v.AddArg(x0)
30619 v.AddArg(y)
30620 return true
30621 }
30622 return false
30623 }
30624 func rewriteValueARM64_OpARM64XOR_10(v *Value) bool {
30625 b := v.Block
30626 typ := &b.Func.Config.Types
30627
30628
30629
30630 for {
30631 x0 := v.Args[1]
30632 x1 := v.Args[0]
30633 if x1.Op != OpARM64SRAconst {
30634 break
30635 }
30636 c := x1.AuxInt
30637 y := x1.Args[0]
30638 if !(clobberIfDead(x1)) {
30639 break
30640 }
30641 v.reset(OpARM64XORshiftRA)
30642 v.AuxInt = c
30643 v.AddArg(x0)
30644 v.AddArg(y)
30645 return true
30646 }
30647
30648
30649
30650 for {
30651 _ = v.Args[1]
30652 v_0 := v.Args[0]
30653 if v_0.Op != OpARM64SLL {
30654 break
30655 }
30656 _ = v_0.Args[1]
30657 x := v_0.Args[0]
30658 v_0_1 := v_0.Args[1]
30659 if v_0_1.Op != OpARM64ANDconst {
30660 break
30661 }
30662 t := v_0_1.Type
30663 if v_0_1.AuxInt != 63 {
30664 break
30665 }
30666 y := v_0_1.Args[0]
30667 v_1 := v.Args[1]
30668 if v_1.Op != OpARM64CSEL0 {
30669 break
30670 }
30671 if v_1.Type != typ.UInt64 {
30672 break
30673 }
30674 cc := v_1.Aux
30675 _ = v_1.Args[1]
30676 v_1_0 := v_1.Args[0]
30677 if v_1_0.Op != OpARM64SRL {
30678 break
30679 }
30680 if v_1_0.Type != typ.UInt64 {
30681 break
30682 }
30683 _ = v_1_0.Args[1]
30684 if x != v_1_0.Args[0] {
30685 break
30686 }
30687 v_1_0_1 := v_1_0.Args[1]
30688 if v_1_0_1.Op != OpARM64SUB {
30689 break
30690 }
30691 if v_1_0_1.Type != t {
30692 break
30693 }
30694 _ = v_1_0_1.Args[1]
30695 v_1_0_1_0 := v_1_0_1.Args[0]
30696 if v_1_0_1_0.Op != OpARM64MOVDconst {
30697 break
30698 }
30699 if v_1_0_1_0.AuxInt != 64 {
30700 break
30701 }
30702 v_1_0_1_1 := v_1_0_1.Args[1]
30703 if v_1_0_1_1.Op != OpARM64ANDconst {
30704 break
30705 }
30706 if v_1_0_1_1.Type != t {
30707 break
30708 }
30709 if v_1_0_1_1.AuxInt != 63 {
30710 break
30711 }
30712 if y != v_1_0_1_1.Args[0] {
30713 break
30714 }
30715 v_1_1 := v_1.Args[1]
30716 if v_1_1.Op != OpARM64CMPconst {
30717 break
30718 }
30719 if v_1_1.AuxInt != 64 {
30720 break
30721 }
30722 v_1_1_0 := v_1_1.Args[0]
30723 if v_1_1_0.Op != OpARM64SUB {
30724 break
30725 }
30726 if v_1_1_0.Type != t {
30727 break
30728 }
30729 _ = v_1_1_0.Args[1]
30730 v_1_1_0_0 := v_1_1_0.Args[0]
30731 if v_1_1_0_0.Op != OpARM64MOVDconst {
30732 break
30733 }
30734 if v_1_1_0_0.AuxInt != 64 {
30735 break
30736 }
30737 v_1_1_0_1 := v_1_1_0.Args[1]
30738 if v_1_1_0_1.Op != OpARM64ANDconst {
30739 break
30740 }
30741 if v_1_1_0_1.Type != t {
30742 break
30743 }
30744 if v_1_1_0_1.AuxInt != 63 {
30745 break
30746 }
30747 if y != v_1_1_0_1.Args[0] {
30748 break
30749 }
30750 if !(cc.(Op) == OpARM64LessThanU) {
30751 break
30752 }
30753 v.reset(OpARM64ROR)
30754 v.AddArg(x)
30755 v0 := b.NewValue0(v.Pos, OpARM64NEG, t)
30756 v0.AddArg(y)
30757 v.AddArg(v0)
30758 return true
30759 }
30760
30761
30762
30763 for {
30764 _ = v.Args[1]
30765 v_0 := v.Args[0]
30766 if v_0.Op != OpARM64CSEL0 {
30767 break
30768 }
30769 if v_0.Type != typ.UInt64 {
30770 break
30771 }
30772 cc := v_0.Aux
30773 _ = v_0.Args[1]
30774 v_0_0 := v_0.Args[0]
30775 if v_0_0.Op != OpARM64SRL {
30776 break
30777 }
30778 if v_0_0.Type != typ.UInt64 {
30779 break
30780 }
30781 _ = v_0_0.Args[1]
30782 x := v_0_0.Args[0]
30783 v_0_0_1 := v_0_0.Args[1]
30784 if v_0_0_1.Op != OpARM64SUB {
30785 break
30786 }
30787 t := v_0_0_1.Type
30788 _ = v_0_0_1.Args[1]
30789 v_0_0_1_0 := v_0_0_1.Args[0]
30790 if v_0_0_1_0.Op != OpARM64MOVDconst {
30791 break
30792 }
30793 if v_0_0_1_0.AuxInt != 64 {
30794 break
30795 }
30796 v_0_0_1_1 := v_0_0_1.Args[1]
30797 if v_0_0_1_1.Op != OpARM64ANDconst {
30798 break
30799 }
30800 if v_0_0_1_1.Type != t {
30801 break
30802 }
30803 if v_0_0_1_1.AuxInt != 63 {
30804 break
30805 }
30806 y := v_0_0_1_1.Args[0]
30807 v_0_1 := v_0.Args[1]
30808 if v_0_1.Op != OpARM64CMPconst {
30809 break
30810 }
30811 if v_0_1.AuxInt != 64 {
30812 break
30813 }
30814 v_0_1_0 := v_0_1.Args[0]
30815 if v_0_1_0.Op != OpARM64SUB {
30816 break
30817 }
30818 if v_0_1_0.Type != t {
30819 break
30820 }
30821 _ = v_0_1_0.Args[1]
30822 v_0_1_0_0 := v_0_1_0.Args[0]
30823 if v_0_1_0_0.Op != OpARM64MOVDconst {
30824 break
30825 }
30826 if v_0_1_0_0.AuxInt != 64 {
30827 break
30828 }
30829 v_0_1_0_1 := v_0_1_0.Args[1]
30830 if v_0_1_0_1.Op != OpARM64ANDconst {
30831 break
30832 }
30833 if v_0_1_0_1.Type != t {
30834 break
30835 }
30836 if v_0_1_0_1.AuxInt != 63 {
30837 break
30838 }
30839 if y != v_0_1_0_1.Args[0] {
30840 break
30841 }
30842 v_1 := v.Args[1]
30843 if v_1.Op != OpARM64SLL {
30844 break
30845 }
30846 _ = v_1.Args[1]
30847 if x != v_1.Args[0] {
30848 break
30849 }
30850 v_1_1 := v_1.Args[1]
30851 if v_1_1.Op != OpARM64ANDconst {
30852 break
30853 }
30854 if v_1_1.Type != t {
30855 break
30856 }
30857 if v_1_1.AuxInt != 63 {
30858 break
30859 }
30860 if y != v_1_1.Args[0] {
30861 break
30862 }
30863 if !(cc.(Op) == OpARM64LessThanU) {
30864 break
30865 }
30866 v.reset(OpARM64ROR)
30867 v.AddArg(x)
30868 v0 := b.NewValue0(v.Pos, OpARM64NEG, t)
30869 v0.AddArg(y)
30870 v.AddArg(v0)
30871 return true
30872 }
30873
30874
30875
30876 for {
30877 _ = v.Args[1]
30878 v_0 := v.Args[0]
30879 if v_0.Op != OpARM64SRL {
30880 break
30881 }
30882 if v_0.Type != typ.UInt64 {
30883 break
30884 }
30885 _ = v_0.Args[1]
30886 x := v_0.Args[0]
30887 v_0_1 := v_0.Args[1]
30888 if v_0_1.Op != OpARM64ANDconst {
30889 break
30890 }
30891 t := v_0_1.Type
30892 if v_0_1.AuxInt != 63 {
30893 break
30894 }
30895 y := v_0_1.Args[0]
30896 v_1 := v.Args[1]
30897 if v_1.Op != OpARM64CSEL0 {
30898 break
30899 }
30900 if v_1.Type != typ.UInt64 {
30901 break
30902 }
30903 cc := v_1.Aux
30904 _ = v_1.Args[1]
30905 v_1_0 := v_1.Args[0]
30906 if v_1_0.Op != OpARM64SLL {
30907 break
30908 }
30909 _ = v_1_0.Args[1]
30910 if x != v_1_0.Args[0] {
30911 break
30912 }
30913 v_1_0_1 := v_1_0.Args[1]
30914 if v_1_0_1.Op != OpARM64SUB {
30915 break
30916 }
30917 if v_1_0_1.Type != t {
30918 break
30919 }
30920 _ = v_1_0_1.Args[1]
30921 v_1_0_1_0 := v_1_0_1.Args[0]
30922 if v_1_0_1_0.Op != OpARM64MOVDconst {
30923 break
30924 }
30925 if v_1_0_1_0.AuxInt != 64 {
30926 break
30927 }
30928 v_1_0_1_1 := v_1_0_1.Args[1]
30929 if v_1_0_1_1.Op != OpARM64ANDconst {
30930 break
30931 }
30932 if v_1_0_1_1.Type != t {
30933 break
30934 }
30935 if v_1_0_1_1.AuxInt != 63 {
30936 break
30937 }
30938 if y != v_1_0_1_1.Args[0] {
30939 break
30940 }
30941 v_1_1 := v_1.Args[1]
30942 if v_1_1.Op != OpARM64CMPconst {
30943 break
30944 }
30945 if v_1_1.AuxInt != 64 {
30946 break
30947 }
30948 v_1_1_0 := v_1_1.Args[0]
30949 if v_1_1_0.Op != OpARM64SUB {
30950 break
30951 }
30952 if v_1_1_0.Type != t {
30953 break
30954 }
30955 _ = v_1_1_0.Args[1]
30956 v_1_1_0_0 := v_1_1_0.Args[0]
30957 if v_1_1_0_0.Op != OpARM64MOVDconst {
30958 break
30959 }
30960 if v_1_1_0_0.AuxInt != 64 {
30961 break
30962 }
30963 v_1_1_0_1 := v_1_1_0.Args[1]
30964 if v_1_1_0_1.Op != OpARM64ANDconst {
30965 break
30966 }
30967 if v_1_1_0_1.Type != t {
30968 break
30969 }
30970 if v_1_1_0_1.AuxInt != 63 {
30971 break
30972 }
30973 if y != v_1_1_0_1.Args[0] {
30974 break
30975 }
30976 if !(cc.(Op) == OpARM64LessThanU) {
30977 break
30978 }
30979 v.reset(OpARM64ROR)
30980 v.AddArg(x)
30981 v.AddArg(y)
30982 return true
30983 }
30984
30985
30986
30987 for {
30988 _ = v.Args[1]
30989 v_0 := v.Args[0]
30990 if v_0.Op != OpARM64CSEL0 {
30991 break
30992 }
30993 if v_0.Type != typ.UInt64 {
30994 break
30995 }
30996 cc := v_0.Aux
30997 _ = v_0.Args[1]
30998 v_0_0 := v_0.Args[0]
30999 if v_0_0.Op != OpARM64SLL {
31000 break
31001 }
31002 _ = v_0_0.Args[1]
31003 x := v_0_0.Args[0]
31004 v_0_0_1 := v_0_0.Args[1]
31005 if v_0_0_1.Op != OpARM64SUB {
31006 break
31007 }
31008 t := v_0_0_1.Type
31009 _ = v_0_0_1.Args[1]
31010 v_0_0_1_0 := v_0_0_1.Args[0]
31011 if v_0_0_1_0.Op != OpARM64MOVDconst {
31012 break
31013 }
31014 if v_0_0_1_0.AuxInt != 64 {
31015 break
31016 }
31017 v_0_0_1_1 := v_0_0_1.Args[1]
31018 if v_0_0_1_1.Op != OpARM64ANDconst {
31019 break
31020 }
31021 if v_0_0_1_1.Type != t {
31022 break
31023 }
31024 if v_0_0_1_1.AuxInt != 63 {
31025 break
31026 }
31027 y := v_0_0_1_1.Args[0]
31028 v_0_1 := v_0.Args[1]
31029 if v_0_1.Op != OpARM64CMPconst {
31030 break
31031 }
31032 if v_0_1.AuxInt != 64 {
31033 break
31034 }
31035 v_0_1_0 := v_0_1.Args[0]
31036 if v_0_1_0.Op != OpARM64SUB {
31037 break
31038 }
31039 if v_0_1_0.Type != t {
31040 break
31041 }
31042 _ = v_0_1_0.Args[1]
31043 v_0_1_0_0 := v_0_1_0.Args[0]
31044 if v_0_1_0_0.Op != OpARM64MOVDconst {
31045 break
31046 }
31047 if v_0_1_0_0.AuxInt != 64 {
31048 break
31049 }
31050 v_0_1_0_1 := v_0_1_0.Args[1]
31051 if v_0_1_0_1.Op != OpARM64ANDconst {
31052 break
31053 }
31054 if v_0_1_0_1.Type != t {
31055 break
31056 }
31057 if v_0_1_0_1.AuxInt != 63 {
31058 break
31059 }
31060 if y != v_0_1_0_1.Args[0] {
31061 break
31062 }
31063 v_1 := v.Args[1]
31064 if v_1.Op != OpARM64SRL {
31065 break
31066 }
31067 if v_1.Type != typ.UInt64 {
31068 break
31069 }
31070 _ = v_1.Args[1]
31071 if x != v_1.Args[0] {
31072 break
31073 }
31074 v_1_1 := v_1.Args[1]
31075 if v_1_1.Op != OpARM64ANDconst {
31076 break
31077 }
31078 if v_1_1.Type != t {
31079 break
31080 }
31081 if v_1_1.AuxInt != 63 {
31082 break
31083 }
31084 if y != v_1_1.Args[0] {
31085 break
31086 }
31087 if !(cc.(Op) == OpARM64LessThanU) {
31088 break
31089 }
31090 v.reset(OpARM64ROR)
31091 v.AddArg(x)
31092 v.AddArg(y)
31093 return true
31094 }
31095
31096
31097
31098 for {
31099 _ = v.Args[1]
31100 v_0 := v.Args[0]
31101 if v_0.Op != OpARM64SLL {
31102 break
31103 }
31104 _ = v_0.Args[1]
31105 x := v_0.Args[0]
31106 v_0_1 := v_0.Args[1]
31107 if v_0_1.Op != OpARM64ANDconst {
31108 break
31109 }
31110 t := v_0_1.Type
31111 if v_0_1.AuxInt != 31 {
31112 break
31113 }
31114 y := v_0_1.Args[0]
31115 v_1 := v.Args[1]
31116 if v_1.Op != OpARM64CSEL0 {
31117 break
31118 }
31119 if v_1.Type != typ.UInt32 {
31120 break
31121 }
31122 cc := v_1.Aux
31123 _ = v_1.Args[1]
31124 v_1_0 := v_1.Args[0]
31125 if v_1_0.Op != OpARM64SRL {
31126 break
31127 }
31128 if v_1_0.Type != typ.UInt32 {
31129 break
31130 }
31131 _ = v_1_0.Args[1]
31132 v_1_0_0 := v_1_0.Args[0]
31133 if v_1_0_0.Op != OpARM64MOVWUreg {
31134 break
31135 }
31136 if x != v_1_0_0.Args[0] {
31137 break
31138 }
31139 v_1_0_1 := v_1_0.Args[1]
31140 if v_1_0_1.Op != OpARM64SUB {
31141 break
31142 }
31143 if v_1_0_1.Type != t {
31144 break
31145 }
31146 _ = v_1_0_1.Args[1]
31147 v_1_0_1_0 := v_1_0_1.Args[0]
31148 if v_1_0_1_0.Op != OpARM64MOVDconst {
31149 break
31150 }
31151 if v_1_0_1_0.AuxInt != 32 {
31152 break
31153 }
31154 v_1_0_1_1 := v_1_0_1.Args[1]
31155 if v_1_0_1_1.Op != OpARM64ANDconst {
31156 break
31157 }
31158 if v_1_0_1_1.Type != t {
31159 break
31160 }
31161 if v_1_0_1_1.AuxInt != 31 {
31162 break
31163 }
31164 if y != v_1_0_1_1.Args[0] {
31165 break
31166 }
31167 v_1_1 := v_1.Args[1]
31168 if v_1_1.Op != OpARM64CMPconst {
31169 break
31170 }
31171 if v_1_1.AuxInt != 64 {
31172 break
31173 }
31174 v_1_1_0 := v_1_1.Args[0]
31175 if v_1_1_0.Op != OpARM64SUB {
31176 break
31177 }
31178 if v_1_1_0.Type != t {
31179 break
31180 }
31181 _ = v_1_1_0.Args[1]
31182 v_1_1_0_0 := v_1_1_0.Args[0]
31183 if v_1_1_0_0.Op != OpARM64MOVDconst {
31184 break
31185 }
31186 if v_1_1_0_0.AuxInt != 32 {
31187 break
31188 }
31189 v_1_1_0_1 := v_1_1_0.Args[1]
31190 if v_1_1_0_1.Op != OpARM64ANDconst {
31191 break
31192 }
31193 if v_1_1_0_1.Type != t {
31194 break
31195 }
31196 if v_1_1_0_1.AuxInt != 31 {
31197 break
31198 }
31199 if y != v_1_1_0_1.Args[0] {
31200 break
31201 }
31202 if !(cc.(Op) == OpARM64LessThanU) {
31203 break
31204 }
31205 v.reset(OpARM64RORW)
31206 v.AddArg(x)
31207 v0 := b.NewValue0(v.Pos, OpARM64NEG, t)
31208 v0.AddArg(y)
31209 v.AddArg(v0)
31210 return true
31211 }
31212
31213
31214
31215 for {
31216 _ = v.Args[1]
31217 v_0 := v.Args[0]
31218 if v_0.Op != OpARM64CSEL0 {
31219 break
31220 }
31221 if v_0.Type != typ.UInt32 {
31222 break
31223 }
31224 cc := v_0.Aux
31225 _ = v_0.Args[1]
31226 v_0_0 := v_0.Args[0]
31227 if v_0_0.Op != OpARM64SRL {
31228 break
31229 }
31230 if v_0_0.Type != typ.UInt32 {
31231 break
31232 }
31233 _ = v_0_0.Args[1]
31234 v_0_0_0 := v_0_0.Args[0]
31235 if v_0_0_0.Op != OpARM64MOVWUreg {
31236 break
31237 }
31238 x := v_0_0_0.Args[0]
31239 v_0_0_1 := v_0_0.Args[1]
31240 if v_0_0_1.Op != OpARM64SUB {
31241 break
31242 }
31243 t := v_0_0_1.Type
31244 _ = v_0_0_1.Args[1]
31245 v_0_0_1_0 := v_0_0_1.Args[0]
31246 if v_0_0_1_0.Op != OpARM64MOVDconst {
31247 break
31248 }
31249 if v_0_0_1_0.AuxInt != 32 {
31250 break
31251 }
31252 v_0_0_1_1 := v_0_0_1.Args[1]
31253 if v_0_0_1_1.Op != OpARM64ANDconst {
31254 break
31255 }
31256 if v_0_0_1_1.Type != t {
31257 break
31258 }
31259 if v_0_0_1_1.AuxInt != 31 {
31260 break
31261 }
31262 y := v_0_0_1_1.Args[0]
31263 v_0_1 := v_0.Args[1]
31264 if v_0_1.Op != OpARM64CMPconst {
31265 break
31266 }
31267 if v_0_1.AuxInt != 64 {
31268 break
31269 }
31270 v_0_1_0 := v_0_1.Args[0]
31271 if v_0_1_0.Op != OpARM64SUB {
31272 break
31273 }
31274 if v_0_1_0.Type != t {
31275 break
31276 }
31277 _ = v_0_1_0.Args[1]
31278 v_0_1_0_0 := v_0_1_0.Args[0]
31279 if v_0_1_0_0.Op != OpARM64MOVDconst {
31280 break
31281 }
31282 if v_0_1_0_0.AuxInt != 32 {
31283 break
31284 }
31285 v_0_1_0_1 := v_0_1_0.Args[1]
31286 if v_0_1_0_1.Op != OpARM64ANDconst {
31287 break
31288 }
31289 if v_0_1_0_1.Type != t {
31290 break
31291 }
31292 if v_0_1_0_1.AuxInt != 31 {
31293 break
31294 }
31295 if y != v_0_1_0_1.Args[0] {
31296 break
31297 }
31298 v_1 := v.Args[1]
31299 if v_1.Op != OpARM64SLL {
31300 break
31301 }
31302 _ = v_1.Args[1]
31303 if x != v_1.Args[0] {
31304 break
31305 }
31306 v_1_1 := v_1.Args[1]
31307 if v_1_1.Op != OpARM64ANDconst {
31308 break
31309 }
31310 if v_1_1.Type != t {
31311 break
31312 }
31313 if v_1_1.AuxInt != 31 {
31314 break
31315 }
31316 if y != v_1_1.Args[0] {
31317 break
31318 }
31319 if !(cc.(Op) == OpARM64LessThanU) {
31320 break
31321 }
31322 v.reset(OpARM64RORW)
31323 v.AddArg(x)
31324 v0 := b.NewValue0(v.Pos, OpARM64NEG, t)
31325 v0.AddArg(y)
31326 v.AddArg(v0)
31327 return true
31328 }
31329
31330
31331
31332 for {
31333 _ = v.Args[1]
31334 v_0 := v.Args[0]
31335 if v_0.Op != OpARM64SRL {
31336 break
31337 }
31338 if v_0.Type != typ.UInt32 {
31339 break
31340 }
31341 _ = v_0.Args[1]
31342 v_0_0 := v_0.Args[0]
31343 if v_0_0.Op != OpARM64MOVWUreg {
31344 break
31345 }
31346 x := v_0_0.Args[0]
31347 v_0_1 := v_0.Args[1]
31348 if v_0_1.Op != OpARM64ANDconst {
31349 break
31350 }
31351 t := v_0_1.Type
31352 if v_0_1.AuxInt != 31 {
31353 break
31354 }
31355 y := v_0_1.Args[0]
31356 v_1 := v.Args[1]
31357 if v_1.Op != OpARM64CSEL0 {
31358 break
31359 }
31360 if v_1.Type != typ.UInt32 {
31361 break
31362 }
31363 cc := v_1.Aux
31364 _ = v_1.Args[1]
31365 v_1_0 := v_1.Args[0]
31366 if v_1_0.Op != OpARM64SLL {
31367 break
31368 }
31369 _ = v_1_0.Args[1]
31370 if x != v_1_0.Args[0] {
31371 break
31372 }
31373 v_1_0_1 := v_1_0.Args[1]
31374 if v_1_0_1.Op != OpARM64SUB {
31375 break
31376 }
31377 if v_1_0_1.Type != t {
31378 break
31379 }
31380 _ = v_1_0_1.Args[1]
31381 v_1_0_1_0 := v_1_0_1.Args[0]
31382 if v_1_0_1_0.Op != OpARM64MOVDconst {
31383 break
31384 }
31385 if v_1_0_1_0.AuxInt != 32 {
31386 break
31387 }
31388 v_1_0_1_1 := v_1_0_1.Args[1]
31389 if v_1_0_1_1.Op != OpARM64ANDconst {
31390 break
31391 }
31392 if v_1_0_1_1.Type != t {
31393 break
31394 }
31395 if v_1_0_1_1.AuxInt != 31 {
31396 break
31397 }
31398 if y != v_1_0_1_1.Args[0] {
31399 break
31400 }
31401 v_1_1 := v_1.Args[1]
31402 if v_1_1.Op != OpARM64CMPconst {
31403 break
31404 }
31405 if v_1_1.AuxInt != 64 {
31406 break
31407 }
31408 v_1_1_0 := v_1_1.Args[0]
31409 if v_1_1_0.Op != OpARM64SUB {
31410 break
31411 }
31412 if v_1_1_0.Type != t {
31413 break
31414 }
31415 _ = v_1_1_0.Args[1]
31416 v_1_1_0_0 := v_1_1_0.Args[0]
31417 if v_1_1_0_0.Op != OpARM64MOVDconst {
31418 break
31419 }
31420 if v_1_1_0_0.AuxInt != 32 {
31421 break
31422 }
31423 v_1_1_0_1 := v_1_1_0.Args[1]
31424 if v_1_1_0_1.Op != OpARM64ANDconst {
31425 break
31426 }
31427 if v_1_1_0_1.Type != t {
31428 break
31429 }
31430 if v_1_1_0_1.AuxInt != 31 {
31431 break
31432 }
31433 if y != v_1_1_0_1.Args[0] {
31434 break
31435 }
31436 if !(cc.(Op) == OpARM64LessThanU) {
31437 break
31438 }
31439 v.reset(OpARM64RORW)
31440 v.AddArg(x)
31441 v.AddArg(y)
31442 return true
31443 }
31444
31445
31446
31447 for {
31448 _ = v.Args[1]
31449 v_0 := v.Args[0]
31450 if v_0.Op != OpARM64CSEL0 {
31451 break
31452 }
31453 if v_0.Type != typ.UInt32 {
31454 break
31455 }
31456 cc := v_0.Aux
31457 _ = v_0.Args[1]
31458 v_0_0 := v_0.Args[0]
31459 if v_0_0.Op != OpARM64SLL {
31460 break
31461 }
31462 _ = v_0_0.Args[1]
31463 x := v_0_0.Args[0]
31464 v_0_0_1 := v_0_0.Args[1]
31465 if v_0_0_1.Op != OpARM64SUB {
31466 break
31467 }
31468 t := v_0_0_1.Type
31469 _ = v_0_0_1.Args[1]
31470 v_0_0_1_0 := v_0_0_1.Args[0]
31471 if v_0_0_1_0.Op != OpARM64MOVDconst {
31472 break
31473 }
31474 if v_0_0_1_0.AuxInt != 32 {
31475 break
31476 }
31477 v_0_0_1_1 := v_0_0_1.Args[1]
31478 if v_0_0_1_1.Op != OpARM64ANDconst {
31479 break
31480 }
31481 if v_0_0_1_1.Type != t {
31482 break
31483 }
31484 if v_0_0_1_1.AuxInt != 31 {
31485 break
31486 }
31487 y := v_0_0_1_1.Args[0]
31488 v_0_1 := v_0.Args[1]
31489 if v_0_1.Op != OpARM64CMPconst {
31490 break
31491 }
31492 if v_0_1.AuxInt != 64 {
31493 break
31494 }
31495 v_0_1_0 := v_0_1.Args[0]
31496 if v_0_1_0.Op != OpARM64SUB {
31497 break
31498 }
31499 if v_0_1_0.Type != t {
31500 break
31501 }
31502 _ = v_0_1_0.Args[1]
31503 v_0_1_0_0 := v_0_1_0.Args[0]
31504 if v_0_1_0_0.Op != OpARM64MOVDconst {
31505 break
31506 }
31507 if v_0_1_0_0.AuxInt != 32 {
31508 break
31509 }
31510 v_0_1_0_1 := v_0_1_0.Args[1]
31511 if v_0_1_0_1.Op != OpARM64ANDconst {
31512 break
31513 }
31514 if v_0_1_0_1.Type != t {
31515 break
31516 }
31517 if v_0_1_0_1.AuxInt != 31 {
31518 break
31519 }
31520 if y != v_0_1_0_1.Args[0] {
31521 break
31522 }
31523 v_1 := v.Args[1]
31524 if v_1.Op != OpARM64SRL {
31525 break
31526 }
31527 if v_1.Type != typ.UInt32 {
31528 break
31529 }
31530 _ = v_1.Args[1]
31531 v_1_0 := v_1.Args[0]
31532 if v_1_0.Op != OpARM64MOVWUreg {
31533 break
31534 }
31535 if x != v_1_0.Args[0] {
31536 break
31537 }
31538 v_1_1 := v_1.Args[1]
31539 if v_1_1.Op != OpARM64ANDconst {
31540 break
31541 }
31542 if v_1_1.Type != t {
31543 break
31544 }
31545 if v_1_1.AuxInt != 31 {
31546 break
31547 }
31548 if y != v_1_1.Args[0] {
31549 break
31550 }
31551 if !(cc.(Op) == OpARM64LessThanU) {
31552 break
31553 }
31554 v.reset(OpARM64RORW)
31555 v.AddArg(x)
31556 v.AddArg(y)
31557 return true
31558 }
31559 return false
31560 }
31561 func rewriteValueARM64_OpARM64XORconst_0(v *Value) bool {
31562
31563
31564
31565 for {
31566 if v.AuxInt != 0 {
31567 break
31568 }
31569 x := v.Args[0]
31570 v.reset(OpCopy)
31571 v.Type = x.Type
31572 v.AddArg(x)
31573 return true
31574 }
31575
31576
31577
31578 for {
31579 if v.AuxInt != -1 {
31580 break
31581 }
31582 x := v.Args[0]
31583 v.reset(OpARM64MVN)
31584 v.AddArg(x)
31585 return true
31586 }
31587
31588
31589
31590 for {
31591 c := v.AuxInt
31592 v_0 := v.Args[0]
31593 if v_0.Op != OpARM64MOVDconst {
31594 break
31595 }
31596 d := v_0.AuxInt
31597 v.reset(OpARM64MOVDconst)
31598 v.AuxInt = c ^ d
31599 return true
31600 }
31601
31602
31603
31604 for {
31605 c := v.AuxInt
31606 v_0 := v.Args[0]
31607 if v_0.Op != OpARM64XORconst {
31608 break
31609 }
31610 d := v_0.AuxInt
31611 x := v_0.Args[0]
31612 v.reset(OpARM64XORconst)
31613 v.AuxInt = c ^ d
31614 v.AddArg(x)
31615 return true
31616 }
31617 return false
31618 }
31619 func rewriteValueARM64_OpARM64XORshiftLL_0(v *Value) bool {
31620 b := v.Block
31621 typ := &b.Func.Config.Types
31622
31623
31624
31625 for {
31626 d := v.AuxInt
31627 x := v.Args[1]
31628 v_0 := v.Args[0]
31629 if v_0.Op != OpARM64MOVDconst {
31630 break
31631 }
31632 c := v_0.AuxInt
31633 v.reset(OpARM64XORconst)
31634 v.AuxInt = c
31635 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type)
31636 v0.AuxInt = d
31637 v0.AddArg(x)
31638 v.AddArg(v0)
31639 return true
31640 }
31641
31642
31643
31644 for {
31645 d := v.AuxInt
31646 _ = v.Args[1]
31647 x := v.Args[0]
31648 v_1 := v.Args[1]
31649 if v_1.Op != OpARM64MOVDconst {
31650 break
31651 }
31652 c := v_1.AuxInt
31653 v.reset(OpARM64XORconst)
31654 v.AuxInt = int64(uint64(c) << uint64(d))
31655 v.AddArg(x)
31656 return true
31657 }
31658
31659
31660
31661 for {
31662 d := v.AuxInt
31663 _ = v.Args[1]
31664 x := v.Args[0]
31665 v_1 := v.Args[1]
31666 if v_1.Op != OpARM64SLLconst {
31667 break
31668 }
31669 c := v_1.AuxInt
31670 if x != v_1.Args[0] {
31671 break
31672 }
31673 if !(c == d) {
31674 break
31675 }
31676 v.reset(OpARM64MOVDconst)
31677 v.AuxInt = 0
31678 return true
31679 }
31680
31681
31682
31683 for {
31684 c := v.AuxInt
31685 x := v.Args[1]
31686 v_0 := v.Args[0]
31687 if v_0.Op != OpARM64SRLconst {
31688 break
31689 }
31690 if v_0.AuxInt != 64-c {
31691 break
31692 }
31693 if x != v_0.Args[0] {
31694 break
31695 }
31696 v.reset(OpARM64RORconst)
31697 v.AuxInt = 64 - c
31698 v.AddArg(x)
31699 return true
31700 }
31701
31702
31703
31704 for {
31705 t := v.Type
31706 c := v.AuxInt
31707 x := v.Args[1]
31708 v_0 := v.Args[0]
31709 if v_0.Op != OpARM64UBFX {
31710 break
31711 }
31712 bfc := v_0.AuxInt
31713 if x != v_0.Args[0] {
31714 break
31715 }
31716 if !(c < 32 && t.Size() == 4 && bfc == armBFAuxInt(32-c, c)) {
31717 break
31718 }
31719 v.reset(OpARM64RORWconst)
31720 v.AuxInt = 32 - c
31721 v.AddArg(x)
31722 return true
31723 }
31724
31725
31726
31727 for {
31728 if v.Type != typ.UInt16 {
31729 break
31730 }
31731 if v.AuxInt != 8 {
31732 break
31733 }
31734 x := v.Args[1]
31735 v_0 := v.Args[0]
31736 if v_0.Op != OpARM64UBFX {
31737 break
31738 }
31739 if v_0.Type != typ.UInt16 {
31740 break
31741 }
31742 if v_0.AuxInt != armBFAuxInt(8, 8) {
31743 break
31744 }
31745 if x != v_0.Args[0] {
31746 break
31747 }
31748 v.reset(OpARM64REV16W)
31749 v.AddArg(x)
31750 return true
31751 }
31752
31753
31754
31755 for {
31756 c := v.AuxInt
31757 x2 := v.Args[1]
31758 v_0 := v.Args[0]
31759 if v_0.Op != OpARM64SRLconst {
31760 break
31761 }
31762 if v_0.AuxInt != 64-c {
31763 break
31764 }
31765 x := v_0.Args[0]
31766 v.reset(OpARM64EXTRconst)
31767 v.AuxInt = 64 - c
31768 v.AddArg(x2)
31769 v.AddArg(x)
31770 return true
31771 }
31772
31773
31774
31775 for {
31776 t := v.Type
31777 c := v.AuxInt
31778 x2 := v.Args[1]
31779 v_0 := v.Args[0]
31780 if v_0.Op != OpARM64UBFX {
31781 break
31782 }
31783 bfc := v_0.AuxInt
31784 x := v_0.Args[0]
31785 if !(c < 32 && t.Size() == 4 && bfc == armBFAuxInt(32-c, c)) {
31786 break
31787 }
31788 v.reset(OpARM64EXTRWconst)
31789 v.AuxInt = 32 - c
31790 v.AddArg(x2)
31791 v.AddArg(x)
31792 return true
31793 }
31794 return false
31795 }
31796 func rewriteValueARM64_OpARM64XORshiftRA_0(v *Value) bool {
31797 b := v.Block
31798
31799
31800
31801 for {
31802 d := v.AuxInt
31803 x := v.Args[1]
31804 v_0 := v.Args[0]
31805 if v_0.Op != OpARM64MOVDconst {
31806 break
31807 }
31808 c := v_0.AuxInt
31809 v.reset(OpARM64XORconst)
31810 v.AuxInt = c
31811 v0 := b.NewValue0(v.Pos, OpARM64SRAconst, x.Type)
31812 v0.AuxInt = d
31813 v0.AddArg(x)
31814 v.AddArg(v0)
31815 return true
31816 }
31817
31818
31819
31820 for {
31821 d := v.AuxInt
31822 _ = v.Args[1]
31823 x := v.Args[0]
31824 v_1 := v.Args[1]
31825 if v_1.Op != OpARM64MOVDconst {
31826 break
31827 }
31828 c := v_1.AuxInt
31829 v.reset(OpARM64XORconst)
31830 v.AuxInt = c >> uint64(d)
31831 v.AddArg(x)
31832 return true
31833 }
31834
31835
31836
31837 for {
31838 d := v.AuxInt
31839 _ = v.Args[1]
31840 x := v.Args[0]
31841 v_1 := v.Args[1]
31842 if v_1.Op != OpARM64SRAconst {
31843 break
31844 }
31845 c := v_1.AuxInt
31846 if x != v_1.Args[0] {
31847 break
31848 }
31849 if !(c == d) {
31850 break
31851 }
31852 v.reset(OpARM64MOVDconst)
31853 v.AuxInt = 0
31854 return true
31855 }
31856 return false
31857 }
31858 func rewriteValueARM64_OpARM64XORshiftRL_0(v *Value) bool {
31859 b := v.Block
31860
31861
31862
31863 for {
31864 d := v.AuxInt
31865 x := v.Args[1]
31866 v_0 := v.Args[0]
31867 if v_0.Op != OpARM64MOVDconst {
31868 break
31869 }
31870 c := v_0.AuxInt
31871 v.reset(OpARM64XORconst)
31872 v.AuxInt = c
31873 v0 := b.NewValue0(v.Pos, OpARM64SRLconst, x.Type)
31874 v0.AuxInt = d
31875 v0.AddArg(x)
31876 v.AddArg(v0)
31877 return true
31878 }
31879
31880
31881
31882 for {
31883 d := v.AuxInt
31884 _ = v.Args[1]
31885 x := v.Args[0]
31886 v_1 := v.Args[1]
31887 if v_1.Op != OpARM64MOVDconst {
31888 break
31889 }
31890 c := v_1.AuxInt
31891 v.reset(OpARM64XORconst)
31892 v.AuxInt = int64(uint64(c) >> uint64(d))
31893 v.AddArg(x)
31894 return true
31895 }
31896
31897
31898
31899 for {
31900 d := v.AuxInt
31901 _ = v.Args[1]
31902 x := v.Args[0]
31903 v_1 := v.Args[1]
31904 if v_1.Op != OpARM64SRLconst {
31905 break
31906 }
31907 c := v_1.AuxInt
31908 if x != v_1.Args[0] {
31909 break
31910 }
31911 if !(c == d) {
31912 break
31913 }
31914 v.reset(OpARM64MOVDconst)
31915 v.AuxInt = 0
31916 return true
31917 }
31918
31919
31920
31921 for {
31922 c := v.AuxInt
31923 x := v.Args[1]
31924 v_0 := v.Args[0]
31925 if v_0.Op != OpARM64SLLconst {
31926 break
31927 }
31928 if v_0.AuxInt != 64-c {
31929 break
31930 }
31931 if x != v_0.Args[0] {
31932 break
31933 }
31934 v.reset(OpARM64RORconst)
31935 v.AuxInt = c
31936 v.AddArg(x)
31937 return true
31938 }
31939
31940
31941
31942 for {
31943 t := v.Type
31944 c := v.AuxInt
31945 _ = v.Args[1]
31946 v_0 := v.Args[0]
31947 if v_0.Op != OpARM64SLLconst {
31948 break
31949 }
31950 if v_0.AuxInt != 32-c {
31951 break
31952 }
31953 x := v_0.Args[0]
31954 v_1 := v.Args[1]
31955 if v_1.Op != OpARM64MOVWUreg {
31956 break
31957 }
31958 if x != v_1.Args[0] {
31959 break
31960 }
31961 if !(c < 32 && t.Size() == 4) {
31962 break
31963 }
31964 v.reset(OpARM64RORWconst)
31965 v.AuxInt = c
31966 v.AddArg(x)
31967 return true
31968 }
31969 return false
31970 }
31971 func rewriteValueARM64_OpAbs_0(v *Value) bool {
31972
31973
31974
31975 for {
31976 x := v.Args[0]
31977 v.reset(OpARM64FABSD)
31978 v.AddArg(x)
31979 return true
31980 }
31981 }
31982 func rewriteValueARM64_OpAdd16_0(v *Value) bool {
31983
31984
31985
31986 for {
31987 y := v.Args[1]
31988 x := v.Args[0]
31989 v.reset(OpARM64ADD)
31990 v.AddArg(x)
31991 v.AddArg(y)
31992 return true
31993 }
31994 }
31995 func rewriteValueARM64_OpAdd32_0(v *Value) bool {
31996
31997
31998
31999 for {
32000 y := v.Args[1]
32001 x := v.Args[0]
32002 v.reset(OpARM64ADD)
32003 v.AddArg(x)
32004 v.AddArg(y)
32005 return true
32006 }
32007 }
32008 func rewriteValueARM64_OpAdd32F_0(v *Value) bool {
32009
32010
32011
32012 for {
32013 y := v.Args[1]
32014 x := v.Args[0]
32015 v.reset(OpARM64FADDS)
32016 v.AddArg(x)
32017 v.AddArg(y)
32018 return true
32019 }
32020 }
32021 func rewriteValueARM64_OpAdd64_0(v *Value) bool {
32022
32023
32024
32025 for {
32026 y := v.Args[1]
32027 x := v.Args[0]
32028 v.reset(OpARM64ADD)
32029 v.AddArg(x)
32030 v.AddArg(y)
32031 return true
32032 }
32033 }
32034 func rewriteValueARM64_OpAdd64F_0(v *Value) bool {
32035
32036
32037
32038 for {
32039 y := v.Args[1]
32040 x := v.Args[0]
32041 v.reset(OpARM64FADDD)
32042 v.AddArg(x)
32043 v.AddArg(y)
32044 return true
32045 }
32046 }
32047 func rewriteValueARM64_OpAdd8_0(v *Value) bool {
32048
32049
32050
32051 for {
32052 y := v.Args[1]
32053 x := v.Args[0]
32054 v.reset(OpARM64ADD)
32055 v.AddArg(x)
32056 v.AddArg(y)
32057 return true
32058 }
32059 }
32060 func rewriteValueARM64_OpAddPtr_0(v *Value) bool {
32061
32062
32063
32064 for {
32065 y := v.Args[1]
32066 x := v.Args[0]
32067 v.reset(OpARM64ADD)
32068 v.AddArg(x)
32069 v.AddArg(y)
32070 return true
32071 }
32072 }
32073 func rewriteValueARM64_OpAddr_0(v *Value) bool {
32074
32075
32076
32077 for {
32078 sym := v.Aux
32079 base := v.Args[0]
32080 v.reset(OpARM64MOVDaddr)
32081 v.Aux = sym
32082 v.AddArg(base)
32083 return true
32084 }
32085 }
32086 func rewriteValueARM64_OpAnd16_0(v *Value) bool {
32087
32088
32089
32090 for {
32091 y := v.Args[1]
32092 x := v.Args[0]
32093 v.reset(OpARM64AND)
32094 v.AddArg(x)
32095 v.AddArg(y)
32096 return true
32097 }
32098 }
32099 func rewriteValueARM64_OpAnd32_0(v *Value) bool {
32100
32101
32102
32103 for {
32104 y := v.Args[1]
32105 x := v.Args[0]
32106 v.reset(OpARM64AND)
32107 v.AddArg(x)
32108 v.AddArg(y)
32109 return true
32110 }
32111 }
32112 func rewriteValueARM64_OpAnd64_0(v *Value) bool {
32113
32114
32115
32116 for {
32117 y := v.Args[1]
32118 x := v.Args[0]
32119 v.reset(OpARM64AND)
32120 v.AddArg(x)
32121 v.AddArg(y)
32122 return true
32123 }
32124 }
32125 func rewriteValueARM64_OpAnd8_0(v *Value) bool {
32126
32127
32128
32129 for {
32130 y := v.Args[1]
32131 x := v.Args[0]
32132 v.reset(OpARM64AND)
32133 v.AddArg(x)
32134 v.AddArg(y)
32135 return true
32136 }
32137 }
32138 func rewriteValueARM64_OpAndB_0(v *Value) bool {
32139
32140
32141
32142 for {
32143 y := v.Args[1]
32144 x := v.Args[0]
32145 v.reset(OpARM64AND)
32146 v.AddArg(x)
32147 v.AddArg(y)
32148 return true
32149 }
32150 }
32151 func rewriteValueARM64_OpAtomicAdd32_0(v *Value) bool {
32152
32153
32154
32155 for {
32156 mem := v.Args[2]
32157 ptr := v.Args[0]
32158 val := v.Args[1]
32159 v.reset(OpARM64LoweredAtomicAdd32)
32160 v.AddArg(ptr)
32161 v.AddArg(val)
32162 v.AddArg(mem)
32163 return true
32164 }
32165 }
32166 func rewriteValueARM64_OpAtomicAdd32Variant_0(v *Value) bool {
32167
32168
32169
32170 for {
32171 mem := v.Args[2]
32172 ptr := v.Args[0]
32173 val := v.Args[1]
32174 v.reset(OpARM64LoweredAtomicAdd32Variant)
32175 v.AddArg(ptr)
32176 v.AddArg(val)
32177 v.AddArg(mem)
32178 return true
32179 }
32180 }
32181 func rewriteValueARM64_OpAtomicAdd64_0(v *Value) bool {
32182
32183
32184
32185 for {
32186 mem := v.Args[2]
32187 ptr := v.Args[0]
32188 val := v.Args[1]
32189 v.reset(OpARM64LoweredAtomicAdd64)
32190 v.AddArg(ptr)
32191 v.AddArg(val)
32192 v.AddArg(mem)
32193 return true
32194 }
32195 }
32196 func rewriteValueARM64_OpAtomicAdd64Variant_0(v *Value) bool {
32197
32198
32199
32200 for {
32201 mem := v.Args[2]
32202 ptr := v.Args[0]
32203 val := v.Args[1]
32204 v.reset(OpARM64LoweredAtomicAdd64Variant)
32205 v.AddArg(ptr)
32206 v.AddArg(val)
32207 v.AddArg(mem)
32208 return true
32209 }
32210 }
32211 func rewriteValueARM64_OpAtomicAnd8_0(v *Value) bool {
32212 b := v.Block
32213 typ := &b.Func.Config.Types
32214
32215
32216
32217 for {
32218 mem := v.Args[2]
32219 ptr := v.Args[0]
32220 val := v.Args[1]
32221 v.reset(OpSelect1)
32222 v0 := b.NewValue0(v.Pos, OpARM64LoweredAtomicAnd8, types.NewTuple(typ.UInt8, types.TypeMem))
32223 v0.AddArg(ptr)
32224 v0.AddArg(val)
32225 v0.AddArg(mem)
32226 v.AddArg(v0)
32227 return true
32228 }
32229 }
32230 func rewriteValueARM64_OpAtomicCompareAndSwap32_0(v *Value) bool {
32231
32232
32233
32234 for {
32235 mem := v.Args[3]
32236 ptr := v.Args[0]
32237 old := v.Args[1]
32238 new_ := v.Args[2]
32239 v.reset(OpARM64LoweredAtomicCas32)
32240 v.AddArg(ptr)
32241 v.AddArg(old)
32242 v.AddArg(new_)
32243 v.AddArg(mem)
32244 return true
32245 }
32246 }
32247 func rewriteValueARM64_OpAtomicCompareAndSwap64_0(v *Value) bool {
32248
32249
32250
32251 for {
32252 mem := v.Args[3]
32253 ptr := v.Args[0]
32254 old := v.Args[1]
32255 new_ := v.Args[2]
32256 v.reset(OpARM64LoweredAtomicCas64)
32257 v.AddArg(ptr)
32258 v.AddArg(old)
32259 v.AddArg(new_)
32260 v.AddArg(mem)
32261 return true
32262 }
32263 }
32264 func rewriteValueARM64_OpAtomicExchange32_0(v *Value) bool {
32265
32266
32267
32268 for {
32269 mem := v.Args[2]
32270 ptr := v.Args[0]
32271 val := v.Args[1]
32272 v.reset(OpARM64LoweredAtomicExchange32)
32273 v.AddArg(ptr)
32274 v.AddArg(val)
32275 v.AddArg(mem)
32276 return true
32277 }
32278 }
32279 func rewriteValueARM64_OpAtomicExchange64_0(v *Value) bool {
32280
32281
32282
32283 for {
32284 mem := v.Args[2]
32285 ptr := v.Args[0]
32286 val := v.Args[1]
32287 v.reset(OpARM64LoweredAtomicExchange64)
32288 v.AddArg(ptr)
32289 v.AddArg(val)
32290 v.AddArg(mem)
32291 return true
32292 }
32293 }
32294 func rewriteValueARM64_OpAtomicLoad32_0(v *Value) bool {
32295
32296
32297
32298 for {
32299 mem := v.Args[1]
32300 ptr := v.Args[0]
32301 v.reset(OpARM64LDARW)
32302 v.AddArg(ptr)
32303 v.AddArg(mem)
32304 return true
32305 }
32306 }
32307 func rewriteValueARM64_OpAtomicLoad64_0(v *Value) bool {
32308
32309
32310
32311 for {
32312 mem := v.Args[1]
32313 ptr := v.Args[0]
32314 v.reset(OpARM64LDAR)
32315 v.AddArg(ptr)
32316 v.AddArg(mem)
32317 return true
32318 }
32319 }
32320 func rewriteValueARM64_OpAtomicLoad8_0(v *Value) bool {
32321
32322
32323
32324 for {
32325 mem := v.Args[1]
32326 ptr := v.Args[0]
32327 v.reset(OpARM64LDARB)
32328 v.AddArg(ptr)
32329 v.AddArg(mem)
32330 return true
32331 }
32332 }
32333 func rewriteValueARM64_OpAtomicLoadPtr_0(v *Value) bool {
32334
32335
32336
32337 for {
32338 mem := v.Args[1]
32339 ptr := v.Args[0]
32340 v.reset(OpARM64LDAR)
32341 v.AddArg(ptr)
32342 v.AddArg(mem)
32343 return true
32344 }
32345 }
32346 func rewriteValueARM64_OpAtomicOr8_0(v *Value) bool {
32347 b := v.Block
32348 typ := &b.Func.Config.Types
32349
32350
32351
32352 for {
32353 mem := v.Args[2]
32354 ptr := v.Args[0]
32355 val := v.Args[1]
32356 v.reset(OpSelect1)
32357 v0 := b.NewValue0(v.Pos, OpARM64LoweredAtomicOr8, types.NewTuple(typ.UInt8, types.TypeMem))
32358 v0.AddArg(ptr)
32359 v0.AddArg(val)
32360 v0.AddArg(mem)
32361 v.AddArg(v0)
32362 return true
32363 }
32364 }
32365 func rewriteValueARM64_OpAtomicStore32_0(v *Value) bool {
32366
32367
32368
32369 for {
32370 mem := v.Args[2]
32371 ptr := v.Args[0]
32372 val := v.Args[1]
32373 v.reset(OpARM64STLRW)
32374 v.AddArg(ptr)
32375 v.AddArg(val)
32376 v.AddArg(mem)
32377 return true
32378 }
32379 }
32380 func rewriteValueARM64_OpAtomicStore64_0(v *Value) bool {
32381
32382
32383
32384 for {
32385 mem := v.Args[2]
32386 ptr := v.Args[0]
32387 val := v.Args[1]
32388 v.reset(OpARM64STLR)
32389 v.AddArg(ptr)
32390 v.AddArg(val)
32391 v.AddArg(mem)
32392 return true
32393 }
32394 }
32395 func rewriteValueARM64_OpAtomicStorePtrNoWB_0(v *Value) bool {
32396
32397
32398
32399 for {
32400 mem := v.Args[2]
32401 ptr := v.Args[0]
32402 val := v.Args[1]
32403 v.reset(OpARM64STLR)
32404 v.AddArg(ptr)
32405 v.AddArg(val)
32406 v.AddArg(mem)
32407 return true
32408 }
32409 }
32410 func rewriteValueARM64_OpAvg64u_0(v *Value) bool {
32411 b := v.Block
32412
32413
32414
32415 for {
32416 t := v.Type
32417 y := v.Args[1]
32418 x := v.Args[0]
32419 v.reset(OpARM64ADD)
32420 v0 := b.NewValue0(v.Pos, OpARM64SRLconst, t)
32421 v0.AuxInt = 1
32422 v1 := b.NewValue0(v.Pos, OpARM64SUB, t)
32423 v1.AddArg(x)
32424 v1.AddArg(y)
32425 v0.AddArg(v1)
32426 v.AddArg(v0)
32427 v.AddArg(y)
32428 return true
32429 }
32430 }
32431 func rewriteValueARM64_OpBitLen32_0(v *Value) bool {
32432 b := v.Block
32433 typ := &b.Func.Config.Types
32434
32435
32436
32437 for {
32438 x := v.Args[0]
32439 v.reset(OpARM64SUB)
32440 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
32441 v0.AuxInt = 32
32442 v.AddArg(v0)
32443 v1 := b.NewValue0(v.Pos, OpARM64CLZW, typ.Int)
32444 v1.AddArg(x)
32445 v.AddArg(v1)
32446 return true
32447 }
32448 }
32449 func rewriteValueARM64_OpBitLen64_0(v *Value) bool {
32450 b := v.Block
32451 typ := &b.Func.Config.Types
32452
32453
32454
32455 for {
32456 x := v.Args[0]
32457 v.reset(OpARM64SUB)
32458 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
32459 v0.AuxInt = 64
32460 v.AddArg(v0)
32461 v1 := b.NewValue0(v.Pos, OpARM64CLZ, typ.Int)
32462 v1.AddArg(x)
32463 v.AddArg(v1)
32464 return true
32465 }
32466 }
32467 func rewriteValueARM64_OpBitRev16_0(v *Value) bool {
32468 b := v.Block
32469 typ := &b.Func.Config.Types
32470
32471
32472
32473 for {
32474 x := v.Args[0]
32475 v.reset(OpARM64SRLconst)
32476 v.AuxInt = 48
32477 v0 := b.NewValue0(v.Pos, OpARM64RBIT, typ.UInt64)
32478 v0.AddArg(x)
32479 v.AddArg(v0)
32480 return true
32481 }
32482 }
32483 func rewriteValueARM64_OpBitRev32_0(v *Value) bool {
32484
32485
32486
32487 for {
32488 x := v.Args[0]
32489 v.reset(OpARM64RBITW)
32490 v.AddArg(x)
32491 return true
32492 }
32493 }
32494 func rewriteValueARM64_OpBitRev64_0(v *Value) bool {
32495
32496
32497
32498 for {
32499 x := v.Args[0]
32500 v.reset(OpARM64RBIT)
32501 v.AddArg(x)
32502 return true
32503 }
32504 }
32505 func rewriteValueARM64_OpBitRev8_0(v *Value) bool {
32506 b := v.Block
32507 typ := &b.Func.Config.Types
32508
32509
32510
32511 for {
32512 x := v.Args[0]
32513 v.reset(OpARM64SRLconst)
32514 v.AuxInt = 56
32515 v0 := b.NewValue0(v.Pos, OpARM64RBIT, typ.UInt64)
32516 v0.AddArg(x)
32517 v.AddArg(v0)
32518 return true
32519 }
32520 }
32521 func rewriteValueARM64_OpBswap32_0(v *Value) bool {
32522
32523
32524
32525 for {
32526 x := v.Args[0]
32527 v.reset(OpARM64REVW)
32528 v.AddArg(x)
32529 return true
32530 }
32531 }
32532 func rewriteValueARM64_OpBswap64_0(v *Value) bool {
32533
32534
32535
32536 for {
32537 x := v.Args[0]
32538 v.reset(OpARM64REV)
32539 v.AddArg(x)
32540 return true
32541 }
32542 }
32543 func rewriteValueARM64_OpCeil_0(v *Value) bool {
32544
32545
32546
32547 for {
32548 x := v.Args[0]
32549 v.reset(OpARM64FRINTPD)
32550 v.AddArg(x)
32551 return true
32552 }
32553 }
32554 func rewriteValueARM64_OpClosureCall_0(v *Value) bool {
32555
32556
32557
32558 for {
32559 argwid := v.AuxInt
32560 mem := v.Args[2]
32561 entry := v.Args[0]
32562 closure := v.Args[1]
32563 v.reset(OpARM64CALLclosure)
32564 v.AuxInt = argwid
32565 v.AddArg(entry)
32566 v.AddArg(closure)
32567 v.AddArg(mem)
32568 return true
32569 }
32570 }
32571 func rewriteValueARM64_OpCom16_0(v *Value) bool {
32572
32573
32574
32575 for {
32576 x := v.Args[0]
32577 v.reset(OpARM64MVN)
32578 v.AddArg(x)
32579 return true
32580 }
32581 }
32582 func rewriteValueARM64_OpCom32_0(v *Value) bool {
32583
32584
32585
32586 for {
32587 x := v.Args[0]
32588 v.reset(OpARM64MVN)
32589 v.AddArg(x)
32590 return true
32591 }
32592 }
32593 func rewriteValueARM64_OpCom64_0(v *Value) bool {
32594
32595
32596
32597 for {
32598 x := v.Args[0]
32599 v.reset(OpARM64MVN)
32600 v.AddArg(x)
32601 return true
32602 }
32603 }
32604 func rewriteValueARM64_OpCom8_0(v *Value) bool {
32605
32606
32607
32608 for {
32609 x := v.Args[0]
32610 v.reset(OpARM64MVN)
32611 v.AddArg(x)
32612 return true
32613 }
32614 }
32615 func rewriteValueARM64_OpCondSelect_0(v *Value) bool {
32616 b := v.Block
32617
32618
32619
32620 for {
32621 boolval := v.Args[2]
32622 x := v.Args[0]
32623 y := v.Args[1]
32624 if !(flagArg(boolval) != nil) {
32625 break
32626 }
32627 v.reset(OpARM64CSEL)
32628 v.Aux = boolval.Op
32629 v.AddArg(x)
32630 v.AddArg(y)
32631 v.AddArg(flagArg(boolval))
32632 return true
32633 }
32634
32635
32636
32637 for {
32638 boolval := v.Args[2]
32639 x := v.Args[0]
32640 y := v.Args[1]
32641 if !(flagArg(boolval) == nil) {
32642 break
32643 }
32644 v.reset(OpARM64CSEL)
32645 v.Aux = OpARM64NotEqual
32646 v.AddArg(x)
32647 v.AddArg(y)
32648 v0 := b.NewValue0(v.Pos, OpARM64CMPWconst, types.TypeFlags)
32649 v0.AuxInt = 0
32650 v0.AddArg(boolval)
32651 v.AddArg(v0)
32652 return true
32653 }
32654 return false
32655 }
32656 func rewriteValueARM64_OpConst16_0(v *Value) bool {
32657
32658
32659
32660 for {
32661 val := v.AuxInt
32662 v.reset(OpARM64MOVDconst)
32663 v.AuxInt = val
32664 return true
32665 }
32666 }
32667 func rewriteValueARM64_OpConst32_0(v *Value) bool {
32668
32669
32670
32671 for {
32672 val := v.AuxInt
32673 v.reset(OpARM64MOVDconst)
32674 v.AuxInt = val
32675 return true
32676 }
32677 }
32678 func rewriteValueARM64_OpConst32F_0(v *Value) bool {
32679
32680
32681
32682 for {
32683 val := v.AuxInt
32684 v.reset(OpARM64FMOVSconst)
32685 v.AuxInt = val
32686 return true
32687 }
32688 }
32689 func rewriteValueARM64_OpConst64_0(v *Value) bool {
32690
32691
32692
32693 for {
32694 val := v.AuxInt
32695 v.reset(OpARM64MOVDconst)
32696 v.AuxInt = val
32697 return true
32698 }
32699 }
32700 func rewriteValueARM64_OpConst64F_0(v *Value) bool {
32701
32702
32703
32704 for {
32705 val := v.AuxInt
32706 v.reset(OpARM64FMOVDconst)
32707 v.AuxInt = val
32708 return true
32709 }
32710 }
32711 func rewriteValueARM64_OpConst8_0(v *Value) bool {
32712
32713
32714
32715 for {
32716 val := v.AuxInt
32717 v.reset(OpARM64MOVDconst)
32718 v.AuxInt = val
32719 return true
32720 }
32721 }
32722 func rewriteValueARM64_OpConstBool_0(v *Value) bool {
32723
32724
32725
32726 for {
32727 b := v.AuxInt
32728 v.reset(OpARM64MOVDconst)
32729 v.AuxInt = b
32730 return true
32731 }
32732 }
32733 func rewriteValueARM64_OpConstNil_0(v *Value) bool {
32734
32735
32736
32737 for {
32738 v.reset(OpARM64MOVDconst)
32739 v.AuxInt = 0
32740 return true
32741 }
32742 }
32743 func rewriteValueARM64_OpCtz16_0(v *Value) bool {
32744 b := v.Block
32745 typ := &b.Func.Config.Types
32746
32747
32748
32749 for {
32750 t := v.Type
32751 x := v.Args[0]
32752 v.reset(OpARM64CLZW)
32753 v.Type = t
32754 v0 := b.NewValue0(v.Pos, OpARM64RBITW, typ.UInt32)
32755 v1 := b.NewValue0(v.Pos, OpARM64ORconst, typ.UInt32)
32756 v1.AuxInt = 0x10000
32757 v1.AddArg(x)
32758 v0.AddArg(v1)
32759 v.AddArg(v0)
32760 return true
32761 }
32762 }
32763 func rewriteValueARM64_OpCtz16NonZero_0(v *Value) bool {
32764
32765
32766
32767 for {
32768 x := v.Args[0]
32769 v.reset(OpCtz32)
32770 v.AddArg(x)
32771 return true
32772 }
32773 }
32774 func rewriteValueARM64_OpCtz32_0(v *Value) bool {
32775 b := v.Block
32776
32777
32778
32779 for {
32780 t := v.Type
32781 x := v.Args[0]
32782 v.reset(OpARM64CLZW)
32783 v0 := b.NewValue0(v.Pos, OpARM64RBITW, t)
32784 v0.AddArg(x)
32785 v.AddArg(v0)
32786 return true
32787 }
32788 }
32789 func rewriteValueARM64_OpCtz32NonZero_0(v *Value) bool {
32790
32791
32792
32793 for {
32794 x := v.Args[0]
32795 v.reset(OpCtz32)
32796 v.AddArg(x)
32797 return true
32798 }
32799 }
32800 func rewriteValueARM64_OpCtz64_0(v *Value) bool {
32801 b := v.Block
32802
32803
32804
32805 for {
32806 t := v.Type
32807 x := v.Args[0]
32808 v.reset(OpARM64CLZ)
32809 v0 := b.NewValue0(v.Pos, OpARM64RBIT, t)
32810 v0.AddArg(x)
32811 v.AddArg(v0)
32812 return true
32813 }
32814 }
32815 func rewriteValueARM64_OpCtz64NonZero_0(v *Value) bool {
32816
32817
32818
32819 for {
32820 x := v.Args[0]
32821 v.reset(OpCtz64)
32822 v.AddArg(x)
32823 return true
32824 }
32825 }
32826 func rewriteValueARM64_OpCtz8_0(v *Value) bool {
32827 b := v.Block
32828 typ := &b.Func.Config.Types
32829
32830
32831
32832 for {
32833 t := v.Type
32834 x := v.Args[0]
32835 v.reset(OpARM64CLZW)
32836 v.Type = t
32837 v0 := b.NewValue0(v.Pos, OpARM64RBITW, typ.UInt32)
32838 v1 := b.NewValue0(v.Pos, OpARM64ORconst, typ.UInt32)
32839 v1.AuxInt = 0x100
32840 v1.AddArg(x)
32841 v0.AddArg(v1)
32842 v.AddArg(v0)
32843 return true
32844 }
32845 }
32846 func rewriteValueARM64_OpCtz8NonZero_0(v *Value) bool {
32847
32848
32849
32850 for {
32851 x := v.Args[0]
32852 v.reset(OpCtz32)
32853 v.AddArg(x)
32854 return true
32855 }
32856 }
32857 func rewriteValueARM64_OpCvt32Fto32_0(v *Value) bool {
32858
32859
32860
32861 for {
32862 x := v.Args[0]
32863 v.reset(OpARM64FCVTZSSW)
32864 v.AddArg(x)
32865 return true
32866 }
32867 }
32868 func rewriteValueARM64_OpCvt32Fto32U_0(v *Value) bool {
32869
32870
32871
32872 for {
32873 x := v.Args[0]
32874 v.reset(OpARM64FCVTZUSW)
32875 v.AddArg(x)
32876 return true
32877 }
32878 }
32879 func rewriteValueARM64_OpCvt32Fto64_0(v *Value) bool {
32880
32881
32882
32883 for {
32884 x := v.Args[0]
32885 v.reset(OpARM64FCVTZSS)
32886 v.AddArg(x)
32887 return true
32888 }
32889 }
32890 func rewriteValueARM64_OpCvt32Fto64F_0(v *Value) bool {
32891
32892
32893
32894 for {
32895 x := v.Args[0]
32896 v.reset(OpARM64FCVTSD)
32897 v.AddArg(x)
32898 return true
32899 }
32900 }
32901 func rewriteValueARM64_OpCvt32Fto64U_0(v *Value) bool {
32902
32903
32904
32905 for {
32906 x := v.Args[0]
32907 v.reset(OpARM64FCVTZUS)
32908 v.AddArg(x)
32909 return true
32910 }
32911 }
32912 func rewriteValueARM64_OpCvt32Uto32F_0(v *Value) bool {
32913
32914
32915
32916 for {
32917 x := v.Args[0]
32918 v.reset(OpARM64UCVTFWS)
32919 v.AddArg(x)
32920 return true
32921 }
32922 }
32923 func rewriteValueARM64_OpCvt32Uto64F_0(v *Value) bool {
32924
32925
32926
32927 for {
32928 x := v.Args[0]
32929 v.reset(OpARM64UCVTFWD)
32930 v.AddArg(x)
32931 return true
32932 }
32933 }
32934 func rewriteValueARM64_OpCvt32to32F_0(v *Value) bool {
32935
32936
32937
32938 for {
32939 x := v.Args[0]
32940 v.reset(OpARM64SCVTFWS)
32941 v.AddArg(x)
32942 return true
32943 }
32944 }
32945 func rewriteValueARM64_OpCvt32to64F_0(v *Value) bool {
32946
32947
32948
32949 for {
32950 x := v.Args[0]
32951 v.reset(OpARM64SCVTFWD)
32952 v.AddArg(x)
32953 return true
32954 }
32955 }
32956 func rewriteValueARM64_OpCvt64Fto32_0(v *Value) bool {
32957
32958
32959
32960 for {
32961 x := v.Args[0]
32962 v.reset(OpARM64FCVTZSDW)
32963 v.AddArg(x)
32964 return true
32965 }
32966 }
32967 func rewriteValueARM64_OpCvt64Fto32F_0(v *Value) bool {
32968
32969
32970
32971 for {
32972 x := v.Args[0]
32973 v.reset(OpARM64FCVTDS)
32974 v.AddArg(x)
32975 return true
32976 }
32977 }
32978 func rewriteValueARM64_OpCvt64Fto32U_0(v *Value) bool {
32979
32980
32981
32982 for {
32983 x := v.Args[0]
32984 v.reset(OpARM64FCVTZUDW)
32985 v.AddArg(x)
32986 return true
32987 }
32988 }
32989 func rewriteValueARM64_OpCvt64Fto64_0(v *Value) bool {
32990
32991
32992
32993 for {
32994 x := v.Args[0]
32995 v.reset(OpARM64FCVTZSD)
32996 v.AddArg(x)
32997 return true
32998 }
32999 }
33000 func rewriteValueARM64_OpCvt64Fto64U_0(v *Value) bool {
33001
33002
33003
33004 for {
33005 x := v.Args[0]
33006 v.reset(OpARM64FCVTZUD)
33007 v.AddArg(x)
33008 return true
33009 }
33010 }
33011 func rewriteValueARM64_OpCvt64Uto32F_0(v *Value) bool {
33012
33013
33014
33015 for {
33016 x := v.Args[0]
33017 v.reset(OpARM64UCVTFS)
33018 v.AddArg(x)
33019 return true
33020 }
33021 }
33022 func rewriteValueARM64_OpCvt64Uto64F_0(v *Value) bool {
33023
33024
33025
33026 for {
33027 x := v.Args[0]
33028 v.reset(OpARM64UCVTFD)
33029 v.AddArg(x)
33030 return true
33031 }
33032 }
33033 func rewriteValueARM64_OpCvt64to32F_0(v *Value) bool {
33034
33035
33036
33037 for {
33038 x := v.Args[0]
33039 v.reset(OpARM64SCVTFS)
33040 v.AddArg(x)
33041 return true
33042 }
33043 }
33044 func rewriteValueARM64_OpCvt64to64F_0(v *Value) bool {
33045
33046
33047
33048 for {
33049 x := v.Args[0]
33050 v.reset(OpARM64SCVTFD)
33051 v.AddArg(x)
33052 return true
33053 }
33054 }
33055 func rewriteValueARM64_OpDiv16_0(v *Value) bool {
33056 b := v.Block
33057 typ := &b.Func.Config.Types
33058
33059
33060
33061 for {
33062 y := v.Args[1]
33063 x := v.Args[0]
33064 v.reset(OpARM64DIVW)
33065 v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
33066 v0.AddArg(x)
33067 v.AddArg(v0)
33068 v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
33069 v1.AddArg(y)
33070 v.AddArg(v1)
33071 return true
33072 }
33073 }
33074 func rewriteValueARM64_OpDiv16u_0(v *Value) bool {
33075 b := v.Block
33076 typ := &b.Func.Config.Types
33077
33078
33079
33080 for {
33081 y := v.Args[1]
33082 x := v.Args[0]
33083 v.reset(OpARM64UDIVW)
33084 v0 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
33085 v0.AddArg(x)
33086 v.AddArg(v0)
33087 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
33088 v1.AddArg(y)
33089 v.AddArg(v1)
33090 return true
33091 }
33092 }
33093 func rewriteValueARM64_OpDiv32_0(v *Value) bool {
33094
33095
33096
33097 for {
33098 y := v.Args[1]
33099 x := v.Args[0]
33100 v.reset(OpARM64DIVW)
33101 v.AddArg(x)
33102 v.AddArg(y)
33103 return true
33104 }
33105 }
33106 func rewriteValueARM64_OpDiv32F_0(v *Value) bool {
33107
33108
33109
33110 for {
33111 y := v.Args[1]
33112 x := v.Args[0]
33113 v.reset(OpARM64FDIVS)
33114 v.AddArg(x)
33115 v.AddArg(y)
33116 return true
33117 }
33118 }
33119 func rewriteValueARM64_OpDiv32u_0(v *Value) bool {
33120
33121
33122
33123 for {
33124 y := v.Args[1]
33125 x := v.Args[0]
33126 v.reset(OpARM64UDIVW)
33127 v.AddArg(x)
33128 v.AddArg(y)
33129 return true
33130 }
33131 }
33132 func rewriteValueARM64_OpDiv64_0(v *Value) bool {
33133
33134
33135
33136 for {
33137 y := v.Args[1]
33138 x := v.Args[0]
33139 v.reset(OpARM64DIV)
33140 v.AddArg(x)
33141 v.AddArg(y)
33142 return true
33143 }
33144 }
33145 func rewriteValueARM64_OpDiv64F_0(v *Value) bool {
33146
33147
33148
33149 for {
33150 y := v.Args[1]
33151 x := v.Args[0]
33152 v.reset(OpARM64FDIVD)
33153 v.AddArg(x)
33154 v.AddArg(y)
33155 return true
33156 }
33157 }
33158 func rewriteValueARM64_OpDiv64u_0(v *Value) bool {
33159
33160
33161
33162 for {
33163 y := v.Args[1]
33164 x := v.Args[0]
33165 v.reset(OpARM64UDIV)
33166 v.AddArg(x)
33167 v.AddArg(y)
33168 return true
33169 }
33170 }
33171 func rewriteValueARM64_OpDiv8_0(v *Value) bool {
33172 b := v.Block
33173 typ := &b.Func.Config.Types
33174
33175
33176
33177 for {
33178 y := v.Args[1]
33179 x := v.Args[0]
33180 v.reset(OpARM64DIVW)
33181 v0 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
33182 v0.AddArg(x)
33183 v.AddArg(v0)
33184 v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
33185 v1.AddArg(y)
33186 v.AddArg(v1)
33187 return true
33188 }
33189 }
33190 func rewriteValueARM64_OpDiv8u_0(v *Value) bool {
33191 b := v.Block
33192 typ := &b.Func.Config.Types
33193
33194
33195
33196 for {
33197 y := v.Args[1]
33198 x := v.Args[0]
33199 v.reset(OpARM64UDIVW)
33200 v0 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
33201 v0.AddArg(x)
33202 v.AddArg(v0)
33203 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
33204 v1.AddArg(y)
33205 v.AddArg(v1)
33206 return true
33207 }
33208 }
33209 func rewriteValueARM64_OpEq16_0(v *Value) bool {
33210 b := v.Block
33211 typ := &b.Func.Config.Types
33212
33213
33214
33215 for {
33216 y := v.Args[1]
33217 x := v.Args[0]
33218 v.reset(OpARM64Equal)
33219 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
33220 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
33221 v1.AddArg(x)
33222 v0.AddArg(v1)
33223 v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
33224 v2.AddArg(y)
33225 v0.AddArg(v2)
33226 v.AddArg(v0)
33227 return true
33228 }
33229 }
33230 func rewriteValueARM64_OpEq32_0(v *Value) bool {
33231 b := v.Block
33232
33233
33234
33235 for {
33236 y := v.Args[1]
33237 x := v.Args[0]
33238 v.reset(OpARM64Equal)
33239 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
33240 v0.AddArg(x)
33241 v0.AddArg(y)
33242 v.AddArg(v0)
33243 return true
33244 }
33245 }
33246 func rewriteValueARM64_OpEq32F_0(v *Value) bool {
33247 b := v.Block
33248
33249
33250
33251 for {
33252 y := v.Args[1]
33253 x := v.Args[0]
33254 v.reset(OpARM64Equal)
33255 v0 := b.NewValue0(v.Pos, OpARM64FCMPS, types.TypeFlags)
33256 v0.AddArg(x)
33257 v0.AddArg(y)
33258 v.AddArg(v0)
33259 return true
33260 }
33261 }
33262 func rewriteValueARM64_OpEq64_0(v *Value) bool {
33263 b := v.Block
33264
33265
33266
33267 for {
33268 y := v.Args[1]
33269 x := v.Args[0]
33270 v.reset(OpARM64Equal)
33271 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags)
33272 v0.AddArg(x)
33273 v0.AddArg(y)
33274 v.AddArg(v0)
33275 return true
33276 }
33277 }
33278 func rewriteValueARM64_OpEq64F_0(v *Value) bool {
33279 b := v.Block
33280
33281
33282
33283 for {
33284 y := v.Args[1]
33285 x := v.Args[0]
33286 v.reset(OpARM64Equal)
33287 v0 := b.NewValue0(v.Pos, OpARM64FCMPD, types.TypeFlags)
33288 v0.AddArg(x)
33289 v0.AddArg(y)
33290 v.AddArg(v0)
33291 return true
33292 }
33293 }
33294 func rewriteValueARM64_OpEq8_0(v *Value) bool {
33295 b := v.Block
33296 typ := &b.Func.Config.Types
33297
33298
33299
33300 for {
33301 y := v.Args[1]
33302 x := v.Args[0]
33303 v.reset(OpARM64Equal)
33304 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
33305 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
33306 v1.AddArg(x)
33307 v0.AddArg(v1)
33308 v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
33309 v2.AddArg(y)
33310 v0.AddArg(v2)
33311 v.AddArg(v0)
33312 return true
33313 }
33314 }
33315 func rewriteValueARM64_OpEqB_0(v *Value) bool {
33316 b := v.Block
33317 typ := &b.Func.Config.Types
33318
33319
33320
33321 for {
33322 y := v.Args[1]
33323 x := v.Args[0]
33324 v.reset(OpARM64XOR)
33325 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
33326 v0.AuxInt = 1
33327 v.AddArg(v0)
33328 v1 := b.NewValue0(v.Pos, OpARM64XOR, typ.Bool)
33329 v1.AddArg(x)
33330 v1.AddArg(y)
33331 v.AddArg(v1)
33332 return true
33333 }
33334 }
33335 func rewriteValueARM64_OpEqPtr_0(v *Value) bool {
33336 b := v.Block
33337
33338
33339
33340 for {
33341 y := v.Args[1]
33342 x := v.Args[0]
33343 v.reset(OpARM64Equal)
33344 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags)
33345 v0.AddArg(x)
33346 v0.AddArg(y)
33347 v.AddArg(v0)
33348 return true
33349 }
33350 }
33351 func rewriteValueARM64_OpFloor_0(v *Value) bool {
33352
33353
33354
33355 for {
33356 x := v.Args[0]
33357 v.reset(OpARM64FRINTMD)
33358 v.AddArg(x)
33359 return true
33360 }
33361 }
33362 func rewriteValueARM64_OpGeq16_0(v *Value) bool {
33363 b := v.Block
33364 typ := &b.Func.Config.Types
33365
33366
33367
33368 for {
33369 y := v.Args[1]
33370 x := v.Args[0]
33371 v.reset(OpARM64GreaterEqual)
33372 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
33373 v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
33374 v1.AddArg(x)
33375 v0.AddArg(v1)
33376 v2 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
33377 v2.AddArg(y)
33378 v0.AddArg(v2)
33379 v.AddArg(v0)
33380 return true
33381 }
33382 }
33383 func rewriteValueARM64_OpGeq16U_0(v *Value) bool {
33384 b := v.Block
33385 typ := &b.Func.Config.Types
33386
33387
33388
33389 for {
33390 y := v.Args[1]
33391 x := v.Args[0]
33392 v.reset(OpARM64GreaterEqualU)
33393 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
33394 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
33395 v1.AddArg(x)
33396 v0.AddArg(v1)
33397 v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
33398 v2.AddArg(y)
33399 v0.AddArg(v2)
33400 v.AddArg(v0)
33401 return true
33402 }
33403 }
33404 func rewriteValueARM64_OpGeq32_0(v *Value) bool {
33405 b := v.Block
33406
33407
33408
33409 for {
33410 y := v.Args[1]
33411 x := v.Args[0]
33412 v.reset(OpARM64GreaterEqual)
33413 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
33414 v0.AddArg(x)
33415 v0.AddArg(y)
33416 v.AddArg(v0)
33417 return true
33418 }
33419 }
33420 func rewriteValueARM64_OpGeq32F_0(v *Value) bool {
33421 b := v.Block
33422
33423
33424
33425 for {
33426 y := v.Args[1]
33427 x := v.Args[0]
33428 v.reset(OpARM64GreaterEqualF)
33429 v0 := b.NewValue0(v.Pos, OpARM64FCMPS, types.TypeFlags)
33430 v0.AddArg(x)
33431 v0.AddArg(y)
33432 v.AddArg(v0)
33433 return true
33434 }
33435 }
33436 func rewriteValueARM64_OpGeq32U_0(v *Value) bool {
33437 b := v.Block
33438
33439
33440
33441 for {
33442 y := v.Args[1]
33443 x := v.Args[0]
33444 v.reset(OpARM64GreaterEqualU)
33445 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
33446 v0.AddArg(x)
33447 v0.AddArg(y)
33448 v.AddArg(v0)
33449 return true
33450 }
33451 }
33452 func rewriteValueARM64_OpGeq64_0(v *Value) bool {
33453 b := v.Block
33454
33455
33456
33457 for {
33458 y := v.Args[1]
33459 x := v.Args[0]
33460 v.reset(OpARM64GreaterEqual)
33461 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags)
33462 v0.AddArg(x)
33463 v0.AddArg(y)
33464 v.AddArg(v0)
33465 return true
33466 }
33467 }
33468 func rewriteValueARM64_OpGeq64F_0(v *Value) bool {
33469 b := v.Block
33470
33471
33472
33473 for {
33474 y := v.Args[1]
33475 x := v.Args[0]
33476 v.reset(OpARM64GreaterEqualF)
33477 v0 := b.NewValue0(v.Pos, OpARM64FCMPD, types.TypeFlags)
33478 v0.AddArg(x)
33479 v0.AddArg(y)
33480 v.AddArg(v0)
33481 return true
33482 }
33483 }
33484 func rewriteValueARM64_OpGeq64U_0(v *Value) bool {
33485 b := v.Block
33486
33487
33488
33489 for {
33490 y := v.Args[1]
33491 x := v.Args[0]
33492 v.reset(OpARM64GreaterEqualU)
33493 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags)
33494 v0.AddArg(x)
33495 v0.AddArg(y)
33496 v.AddArg(v0)
33497 return true
33498 }
33499 }
33500 func rewriteValueARM64_OpGeq8_0(v *Value) bool {
33501 b := v.Block
33502 typ := &b.Func.Config.Types
33503
33504
33505
33506 for {
33507 y := v.Args[1]
33508 x := v.Args[0]
33509 v.reset(OpARM64GreaterEqual)
33510 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
33511 v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
33512 v1.AddArg(x)
33513 v0.AddArg(v1)
33514 v2 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
33515 v2.AddArg(y)
33516 v0.AddArg(v2)
33517 v.AddArg(v0)
33518 return true
33519 }
33520 }
33521 func rewriteValueARM64_OpGeq8U_0(v *Value) bool {
33522 b := v.Block
33523 typ := &b.Func.Config.Types
33524
33525
33526
33527 for {
33528 y := v.Args[1]
33529 x := v.Args[0]
33530 v.reset(OpARM64GreaterEqualU)
33531 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
33532 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
33533 v1.AddArg(x)
33534 v0.AddArg(v1)
33535 v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
33536 v2.AddArg(y)
33537 v0.AddArg(v2)
33538 v.AddArg(v0)
33539 return true
33540 }
33541 }
33542 func rewriteValueARM64_OpGetCallerPC_0(v *Value) bool {
33543
33544
33545
33546 for {
33547 v.reset(OpARM64LoweredGetCallerPC)
33548 return true
33549 }
33550 }
33551 func rewriteValueARM64_OpGetCallerSP_0(v *Value) bool {
33552
33553
33554
33555 for {
33556 v.reset(OpARM64LoweredGetCallerSP)
33557 return true
33558 }
33559 }
33560 func rewriteValueARM64_OpGetClosurePtr_0(v *Value) bool {
33561
33562
33563
33564 for {
33565 v.reset(OpARM64LoweredGetClosurePtr)
33566 return true
33567 }
33568 }
33569 func rewriteValueARM64_OpGreater16_0(v *Value) bool {
33570 b := v.Block
33571 typ := &b.Func.Config.Types
33572
33573
33574
33575 for {
33576 y := v.Args[1]
33577 x := v.Args[0]
33578 v.reset(OpARM64GreaterThan)
33579 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
33580 v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
33581 v1.AddArg(x)
33582 v0.AddArg(v1)
33583 v2 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
33584 v2.AddArg(y)
33585 v0.AddArg(v2)
33586 v.AddArg(v0)
33587 return true
33588 }
33589 }
33590 func rewriteValueARM64_OpGreater16U_0(v *Value) bool {
33591 b := v.Block
33592 typ := &b.Func.Config.Types
33593
33594
33595
33596 for {
33597 y := v.Args[1]
33598 x := v.Args[0]
33599 v.reset(OpARM64GreaterThanU)
33600 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
33601 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
33602 v1.AddArg(x)
33603 v0.AddArg(v1)
33604 v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
33605 v2.AddArg(y)
33606 v0.AddArg(v2)
33607 v.AddArg(v0)
33608 return true
33609 }
33610 }
33611 func rewriteValueARM64_OpGreater32_0(v *Value) bool {
33612 b := v.Block
33613
33614
33615
33616 for {
33617 y := v.Args[1]
33618 x := v.Args[0]
33619 v.reset(OpARM64GreaterThan)
33620 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
33621 v0.AddArg(x)
33622 v0.AddArg(y)
33623 v.AddArg(v0)
33624 return true
33625 }
33626 }
33627 func rewriteValueARM64_OpGreater32F_0(v *Value) bool {
33628 b := v.Block
33629
33630
33631
33632 for {
33633 y := v.Args[1]
33634 x := v.Args[0]
33635 v.reset(OpARM64GreaterThanF)
33636 v0 := b.NewValue0(v.Pos, OpARM64FCMPS, types.TypeFlags)
33637 v0.AddArg(x)
33638 v0.AddArg(y)
33639 v.AddArg(v0)
33640 return true
33641 }
33642 }
33643 func rewriteValueARM64_OpGreater32U_0(v *Value) bool {
33644 b := v.Block
33645
33646
33647
33648 for {
33649 y := v.Args[1]
33650 x := v.Args[0]
33651 v.reset(OpARM64GreaterThanU)
33652 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
33653 v0.AddArg(x)
33654 v0.AddArg(y)
33655 v.AddArg(v0)
33656 return true
33657 }
33658 }
33659 func rewriteValueARM64_OpGreater64_0(v *Value) bool {
33660 b := v.Block
33661
33662
33663
33664 for {
33665 y := v.Args[1]
33666 x := v.Args[0]
33667 v.reset(OpARM64GreaterThan)
33668 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags)
33669 v0.AddArg(x)
33670 v0.AddArg(y)
33671 v.AddArg(v0)
33672 return true
33673 }
33674 }
33675 func rewriteValueARM64_OpGreater64F_0(v *Value) bool {
33676 b := v.Block
33677
33678
33679
33680 for {
33681 y := v.Args[1]
33682 x := v.Args[0]
33683 v.reset(OpARM64GreaterThanF)
33684 v0 := b.NewValue0(v.Pos, OpARM64FCMPD, types.TypeFlags)
33685 v0.AddArg(x)
33686 v0.AddArg(y)
33687 v.AddArg(v0)
33688 return true
33689 }
33690 }
33691 func rewriteValueARM64_OpGreater64U_0(v *Value) bool {
33692 b := v.Block
33693
33694
33695
33696 for {
33697 y := v.Args[1]
33698 x := v.Args[0]
33699 v.reset(OpARM64GreaterThanU)
33700 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags)
33701 v0.AddArg(x)
33702 v0.AddArg(y)
33703 v.AddArg(v0)
33704 return true
33705 }
33706 }
33707 func rewriteValueARM64_OpGreater8_0(v *Value) bool {
33708 b := v.Block
33709 typ := &b.Func.Config.Types
33710
33711
33712
33713 for {
33714 y := v.Args[1]
33715 x := v.Args[0]
33716 v.reset(OpARM64GreaterThan)
33717 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
33718 v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
33719 v1.AddArg(x)
33720 v0.AddArg(v1)
33721 v2 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
33722 v2.AddArg(y)
33723 v0.AddArg(v2)
33724 v.AddArg(v0)
33725 return true
33726 }
33727 }
33728 func rewriteValueARM64_OpGreater8U_0(v *Value) bool {
33729 b := v.Block
33730 typ := &b.Func.Config.Types
33731
33732
33733
33734 for {
33735 y := v.Args[1]
33736 x := v.Args[0]
33737 v.reset(OpARM64GreaterThanU)
33738 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
33739 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
33740 v1.AddArg(x)
33741 v0.AddArg(v1)
33742 v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
33743 v2.AddArg(y)
33744 v0.AddArg(v2)
33745 v.AddArg(v0)
33746 return true
33747 }
33748 }
33749 func rewriteValueARM64_OpHmul32_0(v *Value) bool {
33750 b := v.Block
33751 typ := &b.Func.Config.Types
33752
33753
33754
33755 for {
33756 y := v.Args[1]
33757 x := v.Args[0]
33758 v.reset(OpARM64SRAconst)
33759 v.AuxInt = 32
33760 v0 := b.NewValue0(v.Pos, OpARM64MULL, typ.Int64)
33761 v0.AddArg(x)
33762 v0.AddArg(y)
33763 v.AddArg(v0)
33764 return true
33765 }
33766 }
33767 func rewriteValueARM64_OpHmul32u_0(v *Value) bool {
33768 b := v.Block
33769 typ := &b.Func.Config.Types
33770
33771
33772
33773 for {
33774 y := v.Args[1]
33775 x := v.Args[0]
33776 v.reset(OpARM64SRAconst)
33777 v.AuxInt = 32
33778 v0 := b.NewValue0(v.Pos, OpARM64UMULL, typ.UInt64)
33779 v0.AddArg(x)
33780 v0.AddArg(y)
33781 v.AddArg(v0)
33782 return true
33783 }
33784 }
33785 func rewriteValueARM64_OpHmul64_0(v *Value) bool {
33786
33787
33788
33789 for {
33790 y := v.Args[1]
33791 x := v.Args[0]
33792 v.reset(OpARM64MULH)
33793 v.AddArg(x)
33794 v.AddArg(y)
33795 return true
33796 }
33797 }
33798 func rewriteValueARM64_OpHmul64u_0(v *Value) bool {
33799
33800
33801
33802 for {
33803 y := v.Args[1]
33804 x := v.Args[0]
33805 v.reset(OpARM64UMULH)
33806 v.AddArg(x)
33807 v.AddArg(y)
33808 return true
33809 }
33810 }
33811 func rewriteValueARM64_OpInterCall_0(v *Value) bool {
33812
33813
33814
33815 for {
33816 argwid := v.AuxInt
33817 mem := v.Args[1]
33818 entry := v.Args[0]
33819 v.reset(OpARM64CALLinter)
33820 v.AuxInt = argwid
33821 v.AddArg(entry)
33822 v.AddArg(mem)
33823 return true
33824 }
33825 }
33826 func rewriteValueARM64_OpIsInBounds_0(v *Value) bool {
33827 b := v.Block
33828
33829
33830
33831 for {
33832 len := v.Args[1]
33833 idx := v.Args[0]
33834 v.reset(OpARM64LessThanU)
33835 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags)
33836 v0.AddArg(idx)
33837 v0.AddArg(len)
33838 v.AddArg(v0)
33839 return true
33840 }
33841 }
33842 func rewriteValueARM64_OpIsNonNil_0(v *Value) bool {
33843 b := v.Block
33844
33845
33846
33847 for {
33848 ptr := v.Args[0]
33849 v.reset(OpARM64NotEqual)
33850 v0 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
33851 v0.AuxInt = 0
33852 v0.AddArg(ptr)
33853 v.AddArg(v0)
33854 return true
33855 }
33856 }
33857 func rewriteValueARM64_OpIsSliceInBounds_0(v *Value) bool {
33858 b := v.Block
33859
33860
33861
33862 for {
33863 len := v.Args[1]
33864 idx := v.Args[0]
33865 v.reset(OpARM64LessEqualU)
33866 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags)
33867 v0.AddArg(idx)
33868 v0.AddArg(len)
33869 v.AddArg(v0)
33870 return true
33871 }
33872 }
33873 func rewriteValueARM64_OpLeq16_0(v *Value) bool {
33874 b := v.Block
33875 typ := &b.Func.Config.Types
33876
33877
33878
33879 for {
33880 y := v.Args[1]
33881 x := v.Args[0]
33882 v.reset(OpARM64LessEqual)
33883 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
33884 v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
33885 v1.AddArg(x)
33886 v0.AddArg(v1)
33887 v2 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
33888 v2.AddArg(y)
33889 v0.AddArg(v2)
33890 v.AddArg(v0)
33891 return true
33892 }
33893 }
33894 func rewriteValueARM64_OpLeq16U_0(v *Value) bool {
33895 b := v.Block
33896 typ := &b.Func.Config.Types
33897
33898
33899
33900 for {
33901 y := v.Args[1]
33902 x := v.Args[0]
33903 v.reset(OpARM64LessEqualU)
33904 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
33905 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
33906 v1.AddArg(x)
33907 v0.AddArg(v1)
33908 v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
33909 v2.AddArg(y)
33910 v0.AddArg(v2)
33911 v.AddArg(v0)
33912 return true
33913 }
33914 }
33915 func rewriteValueARM64_OpLeq32_0(v *Value) bool {
33916 b := v.Block
33917
33918
33919
33920 for {
33921 y := v.Args[1]
33922 x := v.Args[0]
33923 v.reset(OpARM64LessEqual)
33924 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
33925 v0.AddArg(x)
33926 v0.AddArg(y)
33927 v.AddArg(v0)
33928 return true
33929 }
33930 }
33931 func rewriteValueARM64_OpLeq32F_0(v *Value) bool {
33932 b := v.Block
33933
33934
33935
33936 for {
33937 y := v.Args[1]
33938 x := v.Args[0]
33939 v.reset(OpARM64LessEqualF)
33940 v0 := b.NewValue0(v.Pos, OpARM64FCMPS, types.TypeFlags)
33941 v0.AddArg(x)
33942 v0.AddArg(y)
33943 v.AddArg(v0)
33944 return true
33945 }
33946 }
33947 func rewriteValueARM64_OpLeq32U_0(v *Value) bool {
33948 b := v.Block
33949
33950
33951
33952 for {
33953 y := v.Args[1]
33954 x := v.Args[0]
33955 v.reset(OpARM64LessEqualU)
33956 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
33957 v0.AddArg(x)
33958 v0.AddArg(y)
33959 v.AddArg(v0)
33960 return true
33961 }
33962 }
33963 func rewriteValueARM64_OpLeq64_0(v *Value) bool {
33964 b := v.Block
33965
33966
33967
33968 for {
33969 y := v.Args[1]
33970 x := v.Args[0]
33971 v.reset(OpARM64LessEqual)
33972 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags)
33973 v0.AddArg(x)
33974 v0.AddArg(y)
33975 v.AddArg(v0)
33976 return true
33977 }
33978 }
33979 func rewriteValueARM64_OpLeq64F_0(v *Value) bool {
33980 b := v.Block
33981
33982
33983
33984 for {
33985 y := v.Args[1]
33986 x := v.Args[0]
33987 v.reset(OpARM64LessEqualF)
33988 v0 := b.NewValue0(v.Pos, OpARM64FCMPD, types.TypeFlags)
33989 v0.AddArg(x)
33990 v0.AddArg(y)
33991 v.AddArg(v0)
33992 return true
33993 }
33994 }
33995 func rewriteValueARM64_OpLeq64U_0(v *Value) bool {
33996 b := v.Block
33997
33998
33999
34000 for {
34001 y := v.Args[1]
34002 x := v.Args[0]
34003 v.reset(OpARM64LessEqualU)
34004 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags)
34005 v0.AddArg(x)
34006 v0.AddArg(y)
34007 v.AddArg(v0)
34008 return true
34009 }
34010 }
34011 func rewriteValueARM64_OpLeq8_0(v *Value) bool {
34012 b := v.Block
34013 typ := &b.Func.Config.Types
34014
34015
34016
34017 for {
34018 y := v.Args[1]
34019 x := v.Args[0]
34020 v.reset(OpARM64LessEqual)
34021 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
34022 v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
34023 v1.AddArg(x)
34024 v0.AddArg(v1)
34025 v2 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
34026 v2.AddArg(y)
34027 v0.AddArg(v2)
34028 v.AddArg(v0)
34029 return true
34030 }
34031 }
34032 func rewriteValueARM64_OpLeq8U_0(v *Value) bool {
34033 b := v.Block
34034 typ := &b.Func.Config.Types
34035
34036
34037
34038 for {
34039 y := v.Args[1]
34040 x := v.Args[0]
34041 v.reset(OpARM64LessEqualU)
34042 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
34043 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
34044 v1.AddArg(x)
34045 v0.AddArg(v1)
34046 v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
34047 v2.AddArg(y)
34048 v0.AddArg(v2)
34049 v.AddArg(v0)
34050 return true
34051 }
34052 }
34053 func rewriteValueARM64_OpLess16_0(v *Value) bool {
34054 b := v.Block
34055 typ := &b.Func.Config.Types
34056
34057
34058
34059 for {
34060 y := v.Args[1]
34061 x := v.Args[0]
34062 v.reset(OpARM64LessThan)
34063 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
34064 v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
34065 v1.AddArg(x)
34066 v0.AddArg(v1)
34067 v2 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
34068 v2.AddArg(y)
34069 v0.AddArg(v2)
34070 v.AddArg(v0)
34071 return true
34072 }
34073 }
34074 func rewriteValueARM64_OpLess16U_0(v *Value) bool {
34075 b := v.Block
34076 typ := &b.Func.Config.Types
34077
34078
34079
34080 for {
34081 y := v.Args[1]
34082 x := v.Args[0]
34083 v.reset(OpARM64LessThanU)
34084 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
34085 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
34086 v1.AddArg(x)
34087 v0.AddArg(v1)
34088 v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
34089 v2.AddArg(y)
34090 v0.AddArg(v2)
34091 v.AddArg(v0)
34092 return true
34093 }
34094 }
34095 func rewriteValueARM64_OpLess32_0(v *Value) bool {
34096 b := v.Block
34097
34098
34099
34100 for {
34101 y := v.Args[1]
34102 x := v.Args[0]
34103 v.reset(OpARM64LessThan)
34104 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
34105 v0.AddArg(x)
34106 v0.AddArg(y)
34107 v.AddArg(v0)
34108 return true
34109 }
34110 }
34111 func rewriteValueARM64_OpLess32F_0(v *Value) bool {
34112 b := v.Block
34113
34114
34115
34116 for {
34117 y := v.Args[1]
34118 x := v.Args[0]
34119 v.reset(OpARM64LessThanF)
34120 v0 := b.NewValue0(v.Pos, OpARM64FCMPS, types.TypeFlags)
34121 v0.AddArg(x)
34122 v0.AddArg(y)
34123 v.AddArg(v0)
34124 return true
34125 }
34126 }
34127 func rewriteValueARM64_OpLess32U_0(v *Value) bool {
34128 b := v.Block
34129
34130
34131
34132 for {
34133 y := v.Args[1]
34134 x := v.Args[0]
34135 v.reset(OpARM64LessThanU)
34136 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
34137 v0.AddArg(x)
34138 v0.AddArg(y)
34139 v.AddArg(v0)
34140 return true
34141 }
34142 }
34143 func rewriteValueARM64_OpLess64_0(v *Value) bool {
34144 b := v.Block
34145
34146
34147
34148 for {
34149 y := v.Args[1]
34150 x := v.Args[0]
34151 v.reset(OpARM64LessThan)
34152 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags)
34153 v0.AddArg(x)
34154 v0.AddArg(y)
34155 v.AddArg(v0)
34156 return true
34157 }
34158 }
34159 func rewriteValueARM64_OpLess64F_0(v *Value) bool {
34160 b := v.Block
34161
34162
34163
34164 for {
34165 y := v.Args[1]
34166 x := v.Args[0]
34167 v.reset(OpARM64LessThanF)
34168 v0 := b.NewValue0(v.Pos, OpARM64FCMPD, types.TypeFlags)
34169 v0.AddArg(x)
34170 v0.AddArg(y)
34171 v.AddArg(v0)
34172 return true
34173 }
34174 }
34175 func rewriteValueARM64_OpLess64U_0(v *Value) bool {
34176 b := v.Block
34177
34178
34179
34180 for {
34181 y := v.Args[1]
34182 x := v.Args[0]
34183 v.reset(OpARM64LessThanU)
34184 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags)
34185 v0.AddArg(x)
34186 v0.AddArg(y)
34187 v.AddArg(v0)
34188 return true
34189 }
34190 }
34191 func rewriteValueARM64_OpLess8_0(v *Value) bool {
34192 b := v.Block
34193 typ := &b.Func.Config.Types
34194
34195
34196
34197 for {
34198 y := v.Args[1]
34199 x := v.Args[0]
34200 v.reset(OpARM64LessThan)
34201 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
34202 v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
34203 v1.AddArg(x)
34204 v0.AddArg(v1)
34205 v2 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
34206 v2.AddArg(y)
34207 v0.AddArg(v2)
34208 v.AddArg(v0)
34209 return true
34210 }
34211 }
34212 func rewriteValueARM64_OpLess8U_0(v *Value) bool {
34213 b := v.Block
34214 typ := &b.Func.Config.Types
34215
34216
34217
34218 for {
34219 y := v.Args[1]
34220 x := v.Args[0]
34221 v.reset(OpARM64LessThanU)
34222 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
34223 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
34224 v1.AddArg(x)
34225 v0.AddArg(v1)
34226 v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
34227 v2.AddArg(y)
34228 v0.AddArg(v2)
34229 v.AddArg(v0)
34230 return true
34231 }
34232 }
34233 func rewriteValueARM64_OpLoad_0(v *Value) bool {
34234
34235
34236
34237 for {
34238 t := v.Type
34239 mem := v.Args[1]
34240 ptr := v.Args[0]
34241 if !(t.IsBoolean()) {
34242 break
34243 }
34244 v.reset(OpARM64MOVBUload)
34245 v.AddArg(ptr)
34246 v.AddArg(mem)
34247 return true
34248 }
34249
34250
34251
34252 for {
34253 t := v.Type
34254 mem := v.Args[1]
34255 ptr := v.Args[0]
34256 if !(is8BitInt(t) && isSigned(t)) {
34257 break
34258 }
34259 v.reset(OpARM64MOVBload)
34260 v.AddArg(ptr)
34261 v.AddArg(mem)
34262 return true
34263 }
34264
34265
34266
34267 for {
34268 t := v.Type
34269 mem := v.Args[1]
34270 ptr := v.Args[0]
34271 if !(is8BitInt(t) && !isSigned(t)) {
34272 break
34273 }
34274 v.reset(OpARM64MOVBUload)
34275 v.AddArg(ptr)
34276 v.AddArg(mem)
34277 return true
34278 }
34279
34280
34281
34282 for {
34283 t := v.Type
34284 mem := v.Args[1]
34285 ptr := v.Args[0]
34286 if !(is16BitInt(t) && isSigned(t)) {
34287 break
34288 }
34289 v.reset(OpARM64MOVHload)
34290 v.AddArg(ptr)
34291 v.AddArg(mem)
34292 return true
34293 }
34294
34295
34296
34297 for {
34298 t := v.Type
34299 mem := v.Args[1]
34300 ptr := v.Args[0]
34301 if !(is16BitInt(t) && !isSigned(t)) {
34302 break
34303 }
34304 v.reset(OpARM64MOVHUload)
34305 v.AddArg(ptr)
34306 v.AddArg(mem)
34307 return true
34308 }
34309
34310
34311
34312 for {
34313 t := v.Type
34314 mem := v.Args[1]
34315 ptr := v.Args[0]
34316 if !(is32BitInt(t) && isSigned(t)) {
34317 break
34318 }
34319 v.reset(OpARM64MOVWload)
34320 v.AddArg(ptr)
34321 v.AddArg(mem)
34322 return true
34323 }
34324
34325
34326
34327 for {
34328 t := v.Type
34329 mem := v.Args[1]
34330 ptr := v.Args[0]
34331 if !(is32BitInt(t) && !isSigned(t)) {
34332 break
34333 }
34334 v.reset(OpARM64MOVWUload)
34335 v.AddArg(ptr)
34336 v.AddArg(mem)
34337 return true
34338 }
34339
34340
34341
34342 for {
34343 t := v.Type
34344 mem := v.Args[1]
34345 ptr := v.Args[0]
34346 if !(is64BitInt(t) || isPtr(t)) {
34347 break
34348 }
34349 v.reset(OpARM64MOVDload)
34350 v.AddArg(ptr)
34351 v.AddArg(mem)
34352 return true
34353 }
34354
34355
34356
34357 for {
34358 t := v.Type
34359 mem := v.Args[1]
34360 ptr := v.Args[0]
34361 if !(is32BitFloat(t)) {
34362 break
34363 }
34364 v.reset(OpARM64FMOVSload)
34365 v.AddArg(ptr)
34366 v.AddArg(mem)
34367 return true
34368 }
34369
34370
34371
34372 for {
34373 t := v.Type
34374 mem := v.Args[1]
34375 ptr := v.Args[0]
34376 if !(is64BitFloat(t)) {
34377 break
34378 }
34379 v.reset(OpARM64FMOVDload)
34380 v.AddArg(ptr)
34381 v.AddArg(mem)
34382 return true
34383 }
34384 return false
34385 }
34386 func rewriteValueARM64_OpLocalAddr_0(v *Value) bool {
34387
34388
34389
34390 for {
34391 sym := v.Aux
34392 _ = v.Args[1]
34393 base := v.Args[0]
34394 v.reset(OpARM64MOVDaddr)
34395 v.Aux = sym
34396 v.AddArg(base)
34397 return true
34398 }
34399 }
34400 func rewriteValueARM64_OpLsh16x16_0(v *Value) bool {
34401 b := v.Block
34402 typ := &b.Func.Config.Types
34403
34404
34405
34406 for {
34407 t := v.Type
34408 y := v.Args[1]
34409 x := v.Args[0]
34410 v.reset(OpARM64CSEL)
34411 v.Aux = OpARM64LessThanU
34412 v0 := b.NewValue0(v.Pos, OpARM64SLL, t)
34413 v0.AddArg(x)
34414 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
34415 v1.AddArg(y)
34416 v0.AddArg(v1)
34417 v.AddArg(v0)
34418 v2 := b.NewValue0(v.Pos, OpConst64, t)
34419 v2.AuxInt = 0
34420 v.AddArg(v2)
34421 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
34422 v3.AuxInt = 64
34423 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
34424 v4.AddArg(y)
34425 v3.AddArg(v4)
34426 v.AddArg(v3)
34427 return true
34428 }
34429 }
34430 func rewriteValueARM64_OpLsh16x32_0(v *Value) bool {
34431 b := v.Block
34432 typ := &b.Func.Config.Types
34433
34434
34435
34436 for {
34437 t := v.Type
34438 y := v.Args[1]
34439 x := v.Args[0]
34440 v.reset(OpARM64CSEL)
34441 v.Aux = OpARM64LessThanU
34442 v0 := b.NewValue0(v.Pos, OpARM64SLL, t)
34443 v0.AddArg(x)
34444 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
34445 v1.AddArg(y)
34446 v0.AddArg(v1)
34447 v.AddArg(v0)
34448 v2 := b.NewValue0(v.Pos, OpConst64, t)
34449 v2.AuxInt = 0
34450 v.AddArg(v2)
34451 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
34452 v3.AuxInt = 64
34453 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
34454 v4.AddArg(y)
34455 v3.AddArg(v4)
34456 v.AddArg(v3)
34457 return true
34458 }
34459 }
34460 func rewriteValueARM64_OpLsh16x64_0(v *Value) bool {
34461 b := v.Block
34462
34463
34464
34465 for {
34466 t := v.Type
34467 y := v.Args[1]
34468 x := v.Args[0]
34469 v.reset(OpARM64CSEL)
34470 v.Aux = OpARM64LessThanU
34471 v0 := b.NewValue0(v.Pos, OpARM64SLL, t)
34472 v0.AddArg(x)
34473 v0.AddArg(y)
34474 v.AddArg(v0)
34475 v1 := b.NewValue0(v.Pos, OpConst64, t)
34476 v1.AuxInt = 0
34477 v.AddArg(v1)
34478 v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
34479 v2.AuxInt = 64
34480 v2.AddArg(y)
34481 v.AddArg(v2)
34482 return true
34483 }
34484 }
34485 func rewriteValueARM64_OpLsh16x8_0(v *Value) bool {
34486 b := v.Block
34487 typ := &b.Func.Config.Types
34488
34489
34490
34491 for {
34492 t := v.Type
34493 y := v.Args[1]
34494 x := v.Args[0]
34495 v.reset(OpARM64CSEL)
34496 v.Aux = OpARM64LessThanU
34497 v0 := b.NewValue0(v.Pos, OpARM64SLL, t)
34498 v0.AddArg(x)
34499 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
34500 v1.AddArg(y)
34501 v0.AddArg(v1)
34502 v.AddArg(v0)
34503 v2 := b.NewValue0(v.Pos, OpConst64, t)
34504 v2.AuxInt = 0
34505 v.AddArg(v2)
34506 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
34507 v3.AuxInt = 64
34508 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
34509 v4.AddArg(y)
34510 v3.AddArg(v4)
34511 v.AddArg(v3)
34512 return true
34513 }
34514 }
34515 func rewriteValueARM64_OpLsh32x16_0(v *Value) bool {
34516 b := v.Block
34517 typ := &b.Func.Config.Types
34518
34519
34520
34521 for {
34522 t := v.Type
34523 y := v.Args[1]
34524 x := v.Args[0]
34525 v.reset(OpARM64CSEL)
34526 v.Aux = OpARM64LessThanU
34527 v0 := b.NewValue0(v.Pos, OpARM64SLL, t)
34528 v0.AddArg(x)
34529 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
34530 v1.AddArg(y)
34531 v0.AddArg(v1)
34532 v.AddArg(v0)
34533 v2 := b.NewValue0(v.Pos, OpConst64, t)
34534 v2.AuxInt = 0
34535 v.AddArg(v2)
34536 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
34537 v3.AuxInt = 64
34538 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
34539 v4.AddArg(y)
34540 v3.AddArg(v4)
34541 v.AddArg(v3)
34542 return true
34543 }
34544 }
34545 func rewriteValueARM64_OpLsh32x32_0(v *Value) bool {
34546 b := v.Block
34547 typ := &b.Func.Config.Types
34548
34549
34550
34551 for {
34552 t := v.Type
34553 y := v.Args[1]
34554 x := v.Args[0]
34555 v.reset(OpARM64CSEL)
34556 v.Aux = OpARM64LessThanU
34557 v0 := b.NewValue0(v.Pos, OpARM64SLL, t)
34558 v0.AddArg(x)
34559 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
34560 v1.AddArg(y)
34561 v0.AddArg(v1)
34562 v.AddArg(v0)
34563 v2 := b.NewValue0(v.Pos, OpConst64, t)
34564 v2.AuxInt = 0
34565 v.AddArg(v2)
34566 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
34567 v3.AuxInt = 64
34568 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
34569 v4.AddArg(y)
34570 v3.AddArg(v4)
34571 v.AddArg(v3)
34572 return true
34573 }
34574 }
34575 func rewriteValueARM64_OpLsh32x64_0(v *Value) bool {
34576 b := v.Block
34577
34578
34579
34580 for {
34581 t := v.Type
34582 y := v.Args[1]
34583 x := v.Args[0]
34584 v.reset(OpARM64CSEL)
34585 v.Aux = OpARM64LessThanU
34586 v0 := b.NewValue0(v.Pos, OpARM64SLL, t)
34587 v0.AddArg(x)
34588 v0.AddArg(y)
34589 v.AddArg(v0)
34590 v1 := b.NewValue0(v.Pos, OpConst64, t)
34591 v1.AuxInt = 0
34592 v.AddArg(v1)
34593 v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
34594 v2.AuxInt = 64
34595 v2.AddArg(y)
34596 v.AddArg(v2)
34597 return true
34598 }
34599 }
34600 func rewriteValueARM64_OpLsh32x8_0(v *Value) bool {
34601 b := v.Block
34602 typ := &b.Func.Config.Types
34603
34604
34605
34606 for {
34607 t := v.Type
34608 y := v.Args[1]
34609 x := v.Args[0]
34610 v.reset(OpARM64CSEL)
34611 v.Aux = OpARM64LessThanU
34612 v0 := b.NewValue0(v.Pos, OpARM64SLL, t)
34613 v0.AddArg(x)
34614 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
34615 v1.AddArg(y)
34616 v0.AddArg(v1)
34617 v.AddArg(v0)
34618 v2 := b.NewValue0(v.Pos, OpConst64, t)
34619 v2.AuxInt = 0
34620 v.AddArg(v2)
34621 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
34622 v3.AuxInt = 64
34623 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
34624 v4.AddArg(y)
34625 v3.AddArg(v4)
34626 v.AddArg(v3)
34627 return true
34628 }
34629 }
34630 func rewriteValueARM64_OpLsh64x16_0(v *Value) bool {
34631 b := v.Block
34632 typ := &b.Func.Config.Types
34633
34634
34635
34636 for {
34637 t := v.Type
34638 y := v.Args[1]
34639 x := v.Args[0]
34640 v.reset(OpARM64CSEL)
34641 v.Aux = OpARM64LessThanU
34642 v0 := b.NewValue0(v.Pos, OpARM64SLL, t)
34643 v0.AddArg(x)
34644 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
34645 v1.AddArg(y)
34646 v0.AddArg(v1)
34647 v.AddArg(v0)
34648 v2 := b.NewValue0(v.Pos, OpConst64, t)
34649 v2.AuxInt = 0
34650 v.AddArg(v2)
34651 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
34652 v3.AuxInt = 64
34653 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
34654 v4.AddArg(y)
34655 v3.AddArg(v4)
34656 v.AddArg(v3)
34657 return true
34658 }
34659 }
34660 func rewriteValueARM64_OpLsh64x32_0(v *Value) bool {
34661 b := v.Block
34662 typ := &b.Func.Config.Types
34663
34664
34665
34666 for {
34667 t := v.Type
34668 y := v.Args[1]
34669 x := v.Args[0]
34670 v.reset(OpARM64CSEL)
34671 v.Aux = OpARM64LessThanU
34672 v0 := b.NewValue0(v.Pos, OpARM64SLL, t)
34673 v0.AddArg(x)
34674 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
34675 v1.AddArg(y)
34676 v0.AddArg(v1)
34677 v.AddArg(v0)
34678 v2 := b.NewValue0(v.Pos, OpConst64, t)
34679 v2.AuxInt = 0
34680 v.AddArg(v2)
34681 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
34682 v3.AuxInt = 64
34683 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
34684 v4.AddArg(y)
34685 v3.AddArg(v4)
34686 v.AddArg(v3)
34687 return true
34688 }
34689 }
34690 func rewriteValueARM64_OpLsh64x64_0(v *Value) bool {
34691 b := v.Block
34692
34693
34694
34695 for {
34696 t := v.Type
34697 y := v.Args[1]
34698 x := v.Args[0]
34699 v.reset(OpARM64CSEL)
34700 v.Aux = OpARM64LessThanU
34701 v0 := b.NewValue0(v.Pos, OpARM64SLL, t)
34702 v0.AddArg(x)
34703 v0.AddArg(y)
34704 v.AddArg(v0)
34705 v1 := b.NewValue0(v.Pos, OpConst64, t)
34706 v1.AuxInt = 0
34707 v.AddArg(v1)
34708 v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
34709 v2.AuxInt = 64
34710 v2.AddArg(y)
34711 v.AddArg(v2)
34712 return true
34713 }
34714 }
34715 func rewriteValueARM64_OpLsh64x8_0(v *Value) bool {
34716 b := v.Block
34717 typ := &b.Func.Config.Types
34718
34719
34720
34721 for {
34722 t := v.Type
34723 y := v.Args[1]
34724 x := v.Args[0]
34725 v.reset(OpARM64CSEL)
34726 v.Aux = OpARM64LessThanU
34727 v0 := b.NewValue0(v.Pos, OpARM64SLL, t)
34728 v0.AddArg(x)
34729 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
34730 v1.AddArg(y)
34731 v0.AddArg(v1)
34732 v.AddArg(v0)
34733 v2 := b.NewValue0(v.Pos, OpConst64, t)
34734 v2.AuxInt = 0
34735 v.AddArg(v2)
34736 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
34737 v3.AuxInt = 64
34738 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
34739 v4.AddArg(y)
34740 v3.AddArg(v4)
34741 v.AddArg(v3)
34742 return true
34743 }
34744 }
34745 func rewriteValueARM64_OpLsh8x16_0(v *Value) bool {
34746 b := v.Block
34747 typ := &b.Func.Config.Types
34748
34749
34750
34751 for {
34752 t := v.Type
34753 y := v.Args[1]
34754 x := v.Args[0]
34755 v.reset(OpARM64CSEL)
34756 v.Aux = OpARM64LessThanU
34757 v0 := b.NewValue0(v.Pos, OpARM64SLL, t)
34758 v0.AddArg(x)
34759 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
34760 v1.AddArg(y)
34761 v0.AddArg(v1)
34762 v.AddArg(v0)
34763 v2 := b.NewValue0(v.Pos, OpConst64, t)
34764 v2.AuxInt = 0
34765 v.AddArg(v2)
34766 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
34767 v3.AuxInt = 64
34768 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
34769 v4.AddArg(y)
34770 v3.AddArg(v4)
34771 v.AddArg(v3)
34772 return true
34773 }
34774 }
34775 func rewriteValueARM64_OpLsh8x32_0(v *Value) bool {
34776 b := v.Block
34777 typ := &b.Func.Config.Types
34778
34779
34780
34781 for {
34782 t := v.Type
34783 y := v.Args[1]
34784 x := v.Args[0]
34785 v.reset(OpARM64CSEL)
34786 v.Aux = OpARM64LessThanU
34787 v0 := b.NewValue0(v.Pos, OpARM64SLL, t)
34788 v0.AddArg(x)
34789 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
34790 v1.AddArg(y)
34791 v0.AddArg(v1)
34792 v.AddArg(v0)
34793 v2 := b.NewValue0(v.Pos, OpConst64, t)
34794 v2.AuxInt = 0
34795 v.AddArg(v2)
34796 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
34797 v3.AuxInt = 64
34798 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
34799 v4.AddArg(y)
34800 v3.AddArg(v4)
34801 v.AddArg(v3)
34802 return true
34803 }
34804 }
34805 func rewriteValueARM64_OpLsh8x64_0(v *Value) bool {
34806 b := v.Block
34807
34808
34809
34810 for {
34811 t := v.Type
34812 y := v.Args[1]
34813 x := v.Args[0]
34814 v.reset(OpARM64CSEL)
34815 v.Aux = OpARM64LessThanU
34816 v0 := b.NewValue0(v.Pos, OpARM64SLL, t)
34817 v0.AddArg(x)
34818 v0.AddArg(y)
34819 v.AddArg(v0)
34820 v1 := b.NewValue0(v.Pos, OpConst64, t)
34821 v1.AuxInt = 0
34822 v.AddArg(v1)
34823 v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
34824 v2.AuxInt = 64
34825 v2.AddArg(y)
34826 v.AddArg(v2)
34827 return true
34828 }
34829 }
34830 func rewriteValueARM64_OpLsh8x8_0(v *Value) bool {
34831 b := v.Block
34832 typ := &b.Func.Config.Types
34833
34834
34835
34836 for {
34837 t := v.Type
34838 y := v.Args[1]
34839 x := v.Args[0]
34840 v.reset(OpARM64CSEL)
34841 v.Aux = OpARM64LessThanU
34842 v0 := b.NewValue0(v.Pos, OpARM64SLL, t)
34843 v0.AddArg(x)
34844 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
34845 v1.AddArg(y)
34846 v0.AddArg(v1)
34847 v.AddArg(v0)
34848 v2 := b.NewValue0(v.Pos, OpConst64, t)
34849 v2.AuxInt = 0
34850 v.AddArg(v2)
34851 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
34852 v3.AuxInt = 64
34853 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
34854 v4.AddArg(y)
34855 v3.AddArg(v4)
34856 v.AddArg(v3)
34857 return true
34858 }
34859 }
34860 func rewriteValueARM64_OpMod16_0(v *Value) bool {
34861 b := v.Block
34862 typ := &b.Func.Config.Types
34863
34864
34865
34866 for {
34867 y := v.Args[1]
34868 x := v.Args[0]
34869 v.reset(OpARM64MODW)
34870 v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
34871 v0.AddArg(x)
34872 v.AddArg(v0)
34873 v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
34874 v1.AddArg(y)
34875 v.AddArg(v1)
34876 return true
34877 }
34878 }
34879 func rewriteValueARM64_OpMod16u_0(v *Value) bool {
34880 b := v.Block
34881 typ := &b.Func.Config.Types
34882
34883
34884
34885 for {
34886 y := v.Args[1]
34887 x := v.Args[0]
34888 v.reset(OpARM64UMODW)
34889 v0 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
34890 v0.AddArg(x)
34891 v.AddArg(v0)
34892 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
34893 v1.AddArg(y)
34894 v.AddArg(v1)
34895 return true
34896 }
34897 }
34898 func rewriteValueARM64_OpMod32_0(v *Value) bool {
34899
34900
34901
34902 for {
34903 y := v.Args[1]
34904 x := v.Args[0]
34905 v.reset(OpARM64MODW)
34906 v.AddArg(x)
34907 v.AddArg(y)
34908 return true
34909 }
34910 }
34911 func rewriteValueARM64_OpMod32u_0(v *Value) bool {
34912
34913
34914
34915 for {
34916 y := v.Args[1]
34917 x := v.Args[0]
34918 v.reset(OpARM64UMODW)
34919 v.AddArg(x)
34920 v.AddArg(y)
34921 return true
34922 }
34923 }
34924 func rewriteValueARM64_OpMod64_0(v *Value) bool {
34925
34926
34927
34928 for {
34929 y := v.Args[1]
34930 x := v.Args[0]
34931 v.reset(OpARM64MOD)
34932 v.AddArg(x)
34933 v.AddArg(y)
34934 return true
34935 }
34936 }
34937 func rewriteValueARM64_OpMod64u_0(v *Value) bool {
34938
34939
34940
34941 for {
34942 y := v.Args[1]
34943 x := v.Args[0]
34944 v.reset(OpARM64UMOD)
34945 v.AddArg(x)
34946 v.AddArg(y)
34947 return true
34948 }
34949 }
34950 func rewriteValueARM64_OpMod8_0(v *Value) bool {
34951 b := v.Block
34952 typ := &b.Func.Config.Types
34953
34954
34955
34956 for {
34957 y := v.Args[1]
34958 x := v.Args[0]
34959 v.reset(OpARM64MODW)
34960 v0 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
34961 v0.AddArg(x)
34962 v.AddArg(v0)
34963 v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
34964 v1.AddArg(y)
34965 v.AddArg(v1)
34966 return true
34967 }
34968 }
34969 func rewriteValueARM64_OpMod8u_0(v *Value) bool {
34970 b := v.Block
34971 typ := &b.Func.Config.Types
34972
34973
34974
34975 for {
34976 y := v.Args[1]
34977 x := v.Args[0]
34978 v.reset(OpARM64UMODW)
34979 v0 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
34980 v0.AddArg(x)
34981 v.AddArg(v0)
34982 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
34983 v1.AddArg(y)
34984 v.AddArg(v1)
34985 return true
34986 }
34987 }
34988 func rewriteValueARM64_OpMove_0(v *Value) bool {
34989 b := v.Block
34990 typ := &b.Func.Config.Types
34991
34992
34993
34994 for {
34995 if v.AuxInt != 0 {
34996 break
34997 }
34998 mem := v.Args[2]
34999 v.reset(OpCopy)
35000 v.Type = mem.Type
35001 v.AddArg(mem)
35002 return true
35003 }
35004
35005
35006
35007 for {
35008 if v.AuxInt != 1 {
35009 break
35010 }
35011 mem := v.Args[2]
35012 dst := v.Args[0]
35013 src := v.Args[1]
35014 v.reset(OpARM64MOVBstore)
35015 v.AddArg(dst)
35016 v0 := b.NewValue0(v.Pos, OpARM64MOVBUload, typ.UInt8)
35017 v0.AddArg(src)
35018 v0.AddArg(mem)
35019 v.AddArg(v0)
35020 v.AddArg(mem)
35021 return true
35022 }
35023
35024
35025
35026 for {
35027 if v.AuxInt != 2 {
35028 break
35029 }
35030 mem := v.Args[2]
35031 dst := v.Args[0]
35032 src := v.Args[1]
35033 v.reset(OpARM64MOVHstore)
35034 v.AddArg(dst)
35035 v0 := b.NewValue0(v.Pos, OpARM64MOVHUload, typ.UInt16)
35036 v0.AddArg(src)
35037 v0.AddArg(mem)
35038 v.AddArg(v0)
35039 v.AddArg(mem)
35040 return true
35041 }
35042
35043
35044
35045 for {
35046 if v.AuxInt != 4 {
35047 break
35048 }
35049 mem := v.Args[2]
35050 dst := v.Args[0]
35051 src := v.Args[1]
35052 v.reset(OpARM64MOVWstore)
35053 v.AddArg(dst)
35054 v0 := b.NewValue0(v.Pos, OpARM64MOVWUload, typ.UInt32)
35055 v0.AddArg(src)
35056 v0.AddArg(mem)
35057 v.AddArg(v0)
35058 v.AddArg(mem)
35059 return true
35060 }
35061
35062
35063
35064 for {
35065 if v.AuxInt != 8 {
35066 break
35067 }
35068 mem := v.Args[2]
35069 dst := v.Args[0]
35070 src := v.Args[1]
35071 v.reset(OpARM64MOVDstore)
35072 v.AddArg(dst)
35073 v0 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64)
35074 v0.AddArg(src)
35075 v0.AddArg(mem)
35076 v.AddArg(v0)
35077 v.AddArg(mem)
35078 return true
35079 }
35080
35081
35082
35083 for {
35084 if v.AuxInt != 3 {
35085 break
35086 }
35087 mem := v.Args[2]
35088 dst := v.Args[0]
35089 src := v.Args[1]
35090 v.reset(OpARM64MOVBstore)
35091 v.AuxInt = 2
35092 v.AddArg(dst)
35093 v0 := b.NewValue0(v.Pos, OpARM64MOVBUload, typ.UInt8)
35094 v0.AuxInt = 2
35095 v0.AddArg(src)
35096 v0.AddArg(mem)
35097 v.AddArg(v0)
35098 v1 := b.NewValue0(v.Pos, OpARM64MOVHstore, types.TypeMem)
35099 v1.AddArg(dst)
35100 v2 := b.NewValue0(v.Pos, OpARM64MOVHUload, typ.UInt16)
35101 v2.AddArg(src)
35102 v2.AddArg(mem)
35103 v1.AddArg(v2)
35104 v1.AddArg(mem)
35105 v.AddArg(v1)
35106 return true
35107 }
35108
35109
35110
35111 for {
35112 if v.AuxInt != 5 {
35113 break
35114 }
35115 mem := v.Args[2]
35116 dst := v.Args[0]
35117 src := v.Args[1]
35118 v.reset(OpARM64MOVBstore)
35119 v.AuxInt = 4
35120 v.AddArg(dst)
35121 v0 := b.NewValue0(v.Pos, OpARM64MOVBUload, typ.UInt8)
35122 v0.AuxInt = 4
35123 v0.AddArg(src)
35124 v0.AddArg(mem)
35125 v.AddArg(v0)
35126 v1 := b.NewValue0(v.Pos, OpARM64MOVWstore, types.TypeMem)
35127 v1.AddArg(dst)
35128 v2 := b.NewValue0(v.Pos, OpARM64MOVWUload, typ.UInt32)
35129 v2.AddArg(src)
35130 v2.AddArg(mem)
35131 v1.AddArg(v2)
35132 v1.AddArg(mem)
35133 v.AddArg(v1)
35134 return true
35135 }
35136
35137
35138
35139 for {
35140 if v.AuxInt != 6 {
35141 break
35142 }
35143 mem := v.Args[2]
35144 dst := v.Args[0]
35145 src := v.Args[1]
35146 v.reset(OpARM64MOVHstore)
35147 v.AuxInt = 4
35148 v.AddArg(dst)
35149 v0 := b.NewValue0(v.Pos, OpARM64MOVHUload, typ.UInt16)
35150 v0.AuxInt = 4
35151 v0.AddArg(src)
35152 v0.AddArg(mem)
35153 v.AddArg(v0)
35154 v1 := b.NewValue0(v.Pos, OpARM64MOVWstore, types.TypeMem)
35155 v1.AddArg(dst)
35156 v2 := b.NewValue0(v.Pos, OpARM64MOVWUload, typ.UInt32)
35157 v2.AddArg(src)
35158 v2.AddArg(mem)
35159 v1.AddArg(v2)
35160 v1.AddArg(mem)
35161 v.AddArg(v1)
35162 return true
35163 }
35164
35165
35166
35167 for {
35168 if v.AuxInt != 7 {
35169 break
35170 }
35171 mem := v.Args[2]
35172 dst := v.Args[0]
35173 src := v.Args[1]
35174 v.reset(OpARM64MOVBstore)
35175 v.AuxInt = 6
35176 v.AddArg(dst)
35177 v0 := b.NewValue0(v.Pos, OpARM64MOVBUload, typ.UInt8)
35178 v0.AuxInt = 6
35179 v0.AddArg(src)
35180 v0.AddArg(mem)
35181 v.AddArg(v0)
35182 v1 := b.NewValue0(v.Pos, OpARM64MOVHstore, types.TypeMem)
35183 v1.AuxInt = 4
35184 v1.AddArg(dst)
35185 v2 := b.NewValue0(v.Pos, OpARM64MOVHUload, typ.UInt16)
35186 v2.AuxInt = 4
35187 v2.AddArg(src)
35188 v2.AddArg(mem)
35189 v1.AddArg(v2)
35190 v3 := b.NewValue0(v.Pos, OpARM64MOVWstore, types.TypeMem)
35191 v3.AddArg(dst)
35192 v4 := b.NewValue0(v.Pos, OpARM64MOVWUload, typ.UInt32)
35193 v4.AddArg(src)
35194 v4.AddArg(mem)
35195 v3.AddArg(v4)
35196 v3.AddArg(mem)
35197 v1.AddArg(v3)
35198 v.AddArg(v1)
35199 return true
35200 }
35201
35202
35203
35204 for {
35205 if v.AuxInt != 12 {
35206 break
35207 }
35208 mem := v.Args[2]
35209 dst := v.Args[0]
35210 src := v.Args[1]
35211 v.reset(OpARM64MOVWstore)
35212 v.AuxInt = 8
35213 v.AddArg(dst)
35214 v0 := b.NewValue0(v.Pos, OpARM64MOVWUload, typ.UInt32)
35215 v0.AuxInt = 8
35216 v0.AddArg(src)
35217 v0.AddArg(mem)
35218 v.AddArg(v0)
35219 v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem)
35220 v1.AddArg(dst)
35221 v2 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64)
35222 v2.AddArg(src)
35223 v2.AddArg(mem)
35224 v1.AddArg(v2)
35225 v1.AddArg(mem)
35226 v.AddArg(v1)
35227 return true
35228 }
35229 return false
35230 }
35231 func rewriteValueARM64_OpMove_10(v *Value) bool {
35232 b := v.Block
35233 config := b.Func.Config
35234 typ := &b.Func.Config.Types
35235
35236
35237
35238 for {
35239 if v.AuxInt != 16 {
35240 break
35241 }
35242 mem := v.Args[2]
35243 dst := v.Args[0]
35244 src := v.Args[1]
35245 v.reset(OpARM64MOVDstore)
35246 v.AuxInt = 8
35247 v.AddArg(dst)
35248 v0 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64)
35249 v0.AuxInt = 8
35250 v0.AddArg(src)
35251 v0.AddArg(mem)
35252 v.AddArg(v0)
35253 v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem)
35254 v1.AddArg(dst)
35255 v2 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64)
35256 v2.AddArg(src)
35257 v2.AddArg(mem)
35258 v1.AddArg(v2)
35259 v1.AddArg(mem)
35260 v.AddArg(v1)
35261 return true
35262 }
35263
35264
35265
35266 for {
35267 if v.AuxInt != 24 {
35268 break
35269 }
35270 mem := v.Args[2]
35271 dst := v.Args[0]
35272 src := v.Args[1]
35273 v.reset(OpARM64MOVDstore)
35274 v.AuxInt = 16
35275 v.AddArg(dst)
35276 v0 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64)
35277 v0.AuxInt = 16
35278 v0.AddArg(src)
35279 v0.AddArg(mem)
35280 v.AddArg(v0)
35281 v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem)
35282 v1.AuxInt = 8
35283 v1.AddArg(dst)
35284 v2 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64)
35285 v2.AuxInt = 8
35286 v2.AddArg(src)
35287 v2.AddArg(mem)
35288 v1.AddArg(v2)
35289 v3 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem)
35290 v3.AddArg(dst)
35291 v4 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64)
35292 v4.AddArg(src)
35293 v4.AddArg(mem)
35294 v3.AddArg(v4)
35295 v3.AddArg(mem)
35296 v1.AddArg(v3)
35297 v.AddArg(v1)
35298 return true
35299 }
35300
35301
35302
35303 for {
35304 s := v.AuxInt
35305 mem := v.Args[2]
35306 dst := v.Args[0]
35307 src := v.Args[1]
35308 if !(s%8 != 0 && s > 8) {
35309 break
35310 }
35311 v.reset(OpMove)
35312 v.AuxInt = s % 8
35313 v0 := b.NewValue0(v.Pos, OpOffPtr, dst.Type)
35314 v0.AuxInt = s - s%8
35315 v0.AddArg(dst)
35316 v.AddArg(v0)
35317 v1 := b.NewValue0(v.Pos, OpOffPtr, src.Type)
35318 v1.AuxInt = s - s%8
35319 v1.AddArg(src)
35320 v.AddArg(v1)
35321 v2 := b.NewValue0(v.Pos, OpMove, types.TypeMem)
35322 v2.AuxInt = s - s%8
35323 v2.AddArg(dst)
35324 v2.AddArg(src)
35325 v2.AddArg(mem)
35326 v.AddArg(v2)
35327 return true
35328 }
35329
35330
35331
35332 for {
35333 s := v.AuxInt
35334 mem := v.Args[2]
35335 dst := v.Args[0]
35336 src := v.Args[1]
35337 if !(s > 32 && s <= 16*64 && s%16 == 8 && !config.noDuffDevice) {
35338 break
35339 }
35340 v.reset(OpARM64MOVDstore)
35341 v.AuxInt = s - 8
35342 v.AddArg(dst)
35343 v0 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64)
35344 v0.AuxInt = s - 8
35345 v0.AddArg(src)
35346 v0.AddArg(mem)
35347 v.AddArg(v0)
35348 v1 := b.NewValue0(v.Pos, OpARM64DUFFCOPY, types.TypeMem)
35349 v1.AuxInt = 8 * (64 - (s-8)/16)
35350 v1.AddArg(dst)
35351 v1.AddArg(src)
35352 v1.AddArg(mem)
35353 v.AddArg(v1)
35354 return true
35355 }
35356
35357
35358
35359 for {
35360 s := v.AuxInt
35361 mem := v.Args[2]
35362 dst := v.Args[0]
35363 src := v.Args[1]
35364 if !(s > 32 && s <= 16*64 && s%16 == 0 && !config.noDuffDevice) {
35365 break
35366 }
35367 v.reset(OpARM64DUFFCOPY)
35368 v.AuxInt = 8 * (64 - s/16)
35369 v.AddArg(dst)
35370 v.AddArg(src)
35371 v.AddArg(mem)
35372 return true
35373 }
35374
35375
35376
35377 for {
35378 s := v.AuxInt
35379 mem := v.Args[2]
35380 dst := v.Args[0]
35381 src := v.Args[1]
35382 if !(s > 24 && s%8 == 0) {
35383 break
35384 }
35385 v.reset(OpARM64LoweredMove)
35386 v.AddArg(dst)
35387 v.AddArg(src)
35388 v0 := b.NewValue0(v.Pos, OpARM64ADDconst, src.Type)
35389 v0.AuxInt = s - 8
35390 v0.AddArg(src)
35391 v.AddArg(v0)
35392 v.AddArg(mem)
35393 return true
35394 }
35395 return false
35396 }
35397 func rewriteValueARM64_OpMul16_0(v *Value) bool {
35398
35399
35400
35401 for {
35402 y := v.Args[1]
35403 x := v.Args[0]
35404 v.reset(OpARM64MULW)
35405 v.AddArg(x)
35406 v.AddArg(y)
35407 return true
35408 }
35409 }
35410 func rewriteValueARM64_OpMul32_0(v *Value) bool {
35411
35412
35413
35414 for {
35415 y := v.Args[1]
35416 x := v.Args[0]
35417 v.reset(OpARM64MULW)
35418 v.AddArg(x)
35419 v.AddArg(y)
35420 return true
35421 }
35422 }
35423 func rewriteValueARM64_OpMul32F_0(v *Value) bool {
35424
35425
35426
35427 for {
35428 y := v.Args[1]
35429 x := v.Args[0]
35430 v.reset(OpARM64FMULS)
35431 v.AddArg(x)
35432 v.AddArg(y)
35433 return true
35434 }
35435 }
35436 func rewriteValueARM64_OpMul64_0(v *Value) bool {
35437
35438
35439
35440 for {
35441 y := v.Args[1]
35442 x := v.Args[0]
35443 v.reset(OpARM64MUL)
35444 v.AddArg(x)
35445 v.AddArg(y)
35446 return true
35447 }
35448 }
35449 func rewriteValueARM64_OpMul64F_0(v *Value) bool {
35450
35451
35452
35453 for {
35454 y := v.Args[1]
35455 x := v.Args[0]
35456 v.reset(OpARM64FMULD)
35457 v.AddArg(x)
35458 v.AddArg(y)
35459 return true
35460 }
35461 }
35462 func rewriteValueARM64_OpMul64uhilo_0(v *Value) bool {
35463
35464
35465
35466 for {
35467 y := v.Args[1]
35468 x := v.Args[0]
35469 v.reset(OpARM64LoweredMuluhilo)
35470 v.AddArg(x)
35471 v.AddArg(y)
35472 return true
35473 }
35474 }
35475 func rewriteValueARM64_OpMul8_0(v *Value) bool {
35476
35477
35478
35479 for {
35480 y := v.Args[1]
35481 x := v.Args[0]
35482 v.reset(OpARM64MULW)
35483 v.AddArg(x)
35484 v.AddArg(y)
35485 return true
35486 }
35487 }
35488 func rewriteValueARM64_OpNeg16_0(v *Value) bool {
35489
35490
35491
35492 for {
35493 x := v.Args[0]
35494 v.reset(OpARM64NEG)
35495 v.AddArg(x)
35496 return true
35497 }
35498 }
35499 func rewriteValueARM64_OpNeg32_0(v *Value) bool {
35500
35501
35502
35503 for {
35504 x := v.Args[0]
35505 v.reset(OpARM64NEG)
35506 v.AddArg(x)
35507 return true
35508 }
35509 }
35510 func rewriteValueARM64_OpNeg32F_0(v *Value) bool {
35511
35512
35513
35514 for {
35515 x := v.Args[0]
35516 v.reset(OpARM64FNEGS)
35517 v.AddArg(x)
35518 return true
35519 }
35520 }
35521 func rewriteValueARM64_OpNeg64_0(v *Value) bool {
35522
35523
35524
35525 for {
35526 x := v.Args[0]
35527 v.reset(OpARM64NEG)
35528 v.AddArg(x)
35529 return true
35530 }
35531 }
35532 func rewriteValueARM64_OpNeg64F_0(v *Value) bool {
35533
35534
35535
35536 for {
35537 x := v.Args[0]
35538 v.reset(OpARM64FNEGD)
35539 v.AddArg(x)
35540 return true
35541 }
35542 }
35543 func rewriteValueARM64_OpNeg8_0(v *Value) bool {
35544
35545
35546
35547 for {
35548 x := v.Args[0]
35549 v.reset(OpARM64NEG)
35550 v.AddArg(x)
35551 return true
35552 }
35553 }
35554 func rewriteValueARM64_OpNeq16_0(v *Value) bool {
35555 b := v.Block
35556 typ := &b.Func.Config.Types
35557
35558
35559
35560 for {
35561 y := v.Args[1]
35562 x := v.Args[0]
35563 v.reset(OpARM64NotEqual)
35564 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
35565 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
35566 v1.AddArg(x)
35567 v0.AddArg(v1)
35568 v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
35569 v2.AddArg(y)
35570 v0.AddArg(v2)
35571 v.AddArg(v0)
35572 return true
35573 }
35574 }
35575 func rewriteValueARM64_OpNeq32_0(v *Value) bool {
35576 b := v.Block
35577
35578
35579
35580 for {
35581 y := v.Args[1]
35582 x := v.Args[0]
35583 v.reset(OpARM64NotEqual)
35584 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
35585 v0.AddArg(x)
35586 v0.AddArg(y)
35587 v.AddArg(v0)
35588 return true
35589 }
35590 }
35591 func rewriteValueARM64_OpNeq32F_0(v *Value) bool {
35592 b := v.Block
35593
35594
35595
35596 for {
35597 y := v.Args[1]
35598 x := v.Args[0]
35599 v.reset(OpARM64NotEqual)
35600 v0 := b.NewValue0(v.Pos, OpARM64FCMPS, types.TypeFlags)
35601 v0.AddArg(x)
35602 v0.AddArg(y)
35603 v.AddArg(v0)
35604 return true
35605 }
35606 }
35607 func rewriteValueARM64_OpNeq64_0(v *Value) bool {
35608 b := v.Block
35609
35610
35611
35612 for {
35613 y := v.Args[1]
35614 x := v.Args[0]
35615 v.reset(OpARM64NotEqual)
35616 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags)
35617 v0.AddArg(x)
35618 v0.AddArg(y)
35619 v.AddArg(v0)
35620 return true
35621 }
35622 }
35623 func rewriteValueARM64_OpNeq64F_0(v *Value) bool {
35624 b := v.Block
35625
35626
35627
35628 for {
35629 y := v.Args[1]
35630 x := v.Args[0]
35631 v.reset(OpARM64NotEqual)
35632 v0 := b.NewValue0(v.Pos, OpARM64FCMPD, types.TypeFlags)
35633 v0.AddArg(x)
35634 v0.AddArg(y)
35635 v.AddArg(v0)
35636 return true
35637 }
35638 }
35639 func rewriteValueARM64_OpNeq8_0(v *Value) bool {
35640 b := v.Block
35641 typ := &b.Func.Config.Types
35642
35643
35644
35645 for {
35646 y := v.Args[1]
35647 x := v.Args[0]
35648 v.reset(OpARM64NotEqual)
35649 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
35650 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
35651 v1.AddArg(x)
35652 v0.AddArg(v1)
35653 v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
35654 v2.AddArg(y)
35655 v0.AddArg(v2)
35656 v.AddArg(v0)
35657 return true
35658 }
35659 }
35660 func rewriteValueARM64_OpNeqB_0(v *Value) bool {
35661
35662
35663
35664 for {
35665 y := v.Args[1]
35666 x := v.Args[0]
35667 v.reset(OpARM64XOR)
35668 v.AddArg(x)
35669 v.AddArg(y)
35670 return true
35671 }
35672 }
35673 func rewriteValueARM64_OpNeqPtr_0(v *Value) bool {
35674 b := v.Block
35675
35676
35677
35678 for {
35679 y := v.Args[1]
35680 x := v.Args[0]
35681 v.reset(OpARM64NotEqual)
35682 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags)
35683 v0.AddArg(x)
35684 v0.AddArg(y)
35685 v.AddArg(v0)
35686 return true
35687 }
35688 }
35689 func rewriteValueARM64_OpNilCheck_0(v *Value) bool {
35690
35691
35692
35693 for {
35694 mem := v.Args[1]
35695 ptr := v.Args[0]
35696 v.reset(OpARM64LoweredNilCheck)
35697 v.AddArg(ptr)
35698 v.AddArg(mem)
35699 return true
35700 }
35701 }
35702 func rewriteValueARM64_OpNot_0(v *Value) bool {
35703 b := v.Block
35704 typ := &b.Func.Config.Types
35705
35706
35707
35708 for {
35709 x := v.Args[0]
35710 v.reset(OpARM64XOR)
35711 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
35712 v0.AuxInt = 1
35713 v.AddArg(v0)
35714 v.AddArg(x)
35715 return true
35716 }
35717 }
35718 func rewriteValueARM64_OpOffPtr_0(v *Value) bool {
35719
35720
35721
35722 for {
35723 off := v.AuxInt
35724 ptr := v.Args[0]
35725 if ptr.Op != OpSP {
35726 break
35727 }
35728 v.reset(OpARM64MOVDaddr)
35729 v.AuxInt = off
35730 v.AddArg(ptr)
35731 return true
35732 }
35733
35734
35735
35736 for {
35737 off := v.AuxInt
35738 ptr := v.Args[0]
35739 v.reset(OpARM64ADDconst)
35740 v.AuxInt = off
35741 v.AddArg(ptr)
35742 return true
35743 }
35744 }
35745 func rewriteValueARM64_OpOr16_0(v *Value) bool {
35746
35747
35748
35749 for {
35750 y := v.Args[1]
35751 x := v.Args[0]
35752 v.reset(OpARM64OR)
35753 v.AddArg(x)
35754 v.AddArg(y)
35755 return true
35756 }
35757 }
35758 func rewriteValueARM64_OpOr32_0(v *Value) bool {
35759
35760
35761
35762 for {
35763 y := v.Args[1]
35764 x := v.Args[0]
35765 v.reset(OpARM64OR)
35766 v.AddArg(x)
35767 v.AddArg(y)
35768 return true
35769 }
35770 }
35771 func rewriteValueARM64_OpOr64_0(v *Value) bool {
35772
35773
35774
35775 for {
35776 y := v.Args[1]
35777 x := v.Args[0]
35778 v.reset(OpARM64OR)
35779 v.AddArg(x)
35780 v.AddArg(y)
35781 return true
35782 }
35783 }
35784 func rewriteValueARM64_OpOr8_0(v *Value) bool {
35785
35786
35787
35788 for {
35789 y := v.Args[1]
35790 x := v.Args[0]
35791 v.reset(OpARM64OR)
35792 v.AddArg(x)
35793 v.AddArg(y)
35794 return true
35795 }
35796 }
35797 func rewriteValueARM64_OpOrB_0(v *Value) bool {
35798
35799
35800
35801 for {
35802 y := v.Args[1]
35803 x := v.Args[0]
35804 v.reset(OpARM64OR)
35805 v.AddArg(x)
35806 v.AddArg(y)
35807 return true
35808 }
35809 }
35810 func rewriteValueARM64_OpPanicBounds_0(v *Value) bool {
35811
35812
35813
35814 for {
35815 kind := v.AuxInt
35816 mem := v.Args[2]
35817 x := v.Args[0]
35818 y := v.Args[1]
35819 if !(boundsABI(kind) == 0) {
35820 break
35821 }
35822 v.reset(OpARM64LoweredPanicBoundsA)
35823 v.AuxInt = kind
35824 v.AddArg(x)
35825 v.AddArg(y)
35826 v.AddArg(mem)
35827 return true
35828 }
35829
35830
35831
35832 for {
35833 kind := v.AuxInt
35834 mem := v.Args[2]
35835 x := v.Args[0]
35836 y := v.Args[1]
35837 if !(boundsABI(kind) == 1) {
35838 break
35839 }
35840 v.reset(OpARM64LoweredPanicBoundsB)
35841 v.AuxInt = kind
35842 v.AddArg(x)
35843 v.AddArg(y)
35844 v.AddArg(mem)
35845 return true
35846 }
35847
35848
35849
35850 for {
35851 kind := v.AuxInt
35852 mem := v.Args[2]
35853 x := v.Args[0]
35854 y := v.Args[1]
35855 if !(boundsABI(kind) == 2) {
35856 break
35857 }
35858 v.reset(OpARM64LoweredPanicBoundsC)
35859 v.AuxInt = kind
35860 v.AddArg(x)
35861 v.AddArg(y)
35862 v.AddArg(mem)
35863 return true
35864 }
35865 return false
35866 }
35867 func rewriteValueARM64_OpPopCount16_0(v *Value) bool {
35868 b := v.Block
35869 typ := &b.Func.Config.Types
35870
35871
35872
35873 for {
35874 t := v.Type
35875 x := v.Args[0]
35876 v.reset(OpARM64FMOVDfpgp)
35877 v.Type = t
35878 v0 := b.NewValue0(v.Pos, OpARM64VUADDLV, typ.Float64)
35879 v1 := b.NewValue0(v.Pos, OpARM64VCNT, typ.Float64)
35880 v2 := b.NewValue0(v.Pos, OpARM64FMOVDgpfp, typ.Float64)
35881 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
35882 v3.AddArg(x)
35883 v2.AddArg(v3)
35884 v1.AddArg(v2)
35885 v0.AddArg(v1)
35886 v.AddArg(v0)
35887 return true
35888 }
35889 }
35890 func rewriteValueARM64_OpPopCount32_0(v *Value) bool {
35891 b := v.Block
35892 typ := &b.Func.Config.Types
35893
35894
35895
35896 for {
35897 t := v.Type
35898 x := v.Args[0]
35899 v.reset(OpARM64FMOVDfpgp)
35900 v.Type = t
35901 v0 := b.NewValue0(v.Pos, OpARM64VUADDLV, typ.Float64)
35902 v1 := b.NewValue0(v.Pos, OpARM64VCNT, typ.Float64)
35903 v2 := b.NewValue0(v.Pos, OpARM64FMOVDgpfp, typ.Float64)
35904 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
35905 v3.AddArg(x)
35906 v2.AddArg(v3)
35907 v1.AddArg(v2)
35908 v0.AddArg(v1)
35909 v.AddArg(v0)
35910 return true
35911 }
35912 }
35913 func rewriteValueARM64_OpPopCount64_0(v *Value) bool {
35914 b := v.Block
35915 typ := &b.Func.Config.Types
35916
35917
35918
35919 for {
35920 t := v.Type
35921 x := v.Args[0]
35922 v.reset(OpARM64FMOVDfpgp)
35923 v.Type = t
35924 v0 := b.NewValue0(v.Pos, OpARM64VUADDLV, typ.Float64)
35925 v1 := b.NewValue0(v.Pos, OpARM64VCNT, typ.Float64)
35926 v2 := b.NewValue0(v.Pos, OpARM64FMOVDgpfp, typ.Float64)
35927 v2.AddArg(x)
35928 v1.AddArg(v2)
35929 v0.AddArg(v1)
35930 v.AddArg(v0)
35931 return true
35932 }
35933 }
35934 func rewriteValueARM64_OpRotateLeft16_0(v *Value) bool {
35935 b := v.Block
35936 typ := &b.Func.Config.Types
35937
35938
35939
35940 for {
35941 t := v.Type
35942 _ = v.Args[1]
35943 x := v.Args[0]
35944 v_1 := v.Args[1]
35945 if v_1.Op != OpARM64MOVDconst {
35946 break
35947 }
35948 c := v_1.AuxInt
35949 v.reset(OpOr16)
35950 v0 := b.NewValue0(v.Pos, OpLsh16x64, t)
35951 v0.AddArg(x)
35952 v1 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
35953 v1.AuxInt = c & 15
35954 v0.AddArg(v1)
35955 v.AddArg(v0)
35956 v2 := b.NewValue0(v.Pos, OpRsh16Ux64, t)
35957 v2.AddArg(x)
35958 v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
35959 v3.AuxInt = -c & 15
35960 v2.AddArg(v3)
35961 v.AddArg(v2)
35962 return true
35963 }
35964 return false
35965 }
35966 func rewriteValueARM64_OpRotateLeft32_0(v *Value) bool {
35967 b := v.Block
35968
35969
35970
35971 for {
35972 y := v.Args[1]
35973 x := v.Args[0]
35974 v.reset(OpARM64RORW)
35975 v.AddArg(x)
35976 v0 := b.NewValue0(v.Pos, OpARM64NEG, y.Type)
35977 v0.AddArg(y)
35978 v.AddArg(v0)
35979 return true
35980 }
35981 }
35982 func rewriteValueARM64_OpRotateLeft64_0(v *Value) bool {
35983 b := v.Block
35984
35985
35986
35987 for {
35988 y := v.Args[1]
35989 x := v.Args[0]
35990 v.reset(OpARM64ROR)
35991 v.AddArg(x)
35992 v0 := b.NewValue0(v.Pos, OpARM64NEG, y.Type)
35993 v0.AddArg(y)
35994 v.AddArg(v0)
35995 return true
35996 }
35997 }
35998 func rewriteValueARM64_OpRotateLeft8_0(v *Value) bool {
35999 b := v.Block
36000 typ := &b.Func.Config.Types
36001
36002
36003
36004 for {
36005 t := v.Type
36006 _ = v.Args[1]
36007 x := v.Args[0]
36008 v_1 := v.Args[1]
36009 if v_1.Op != OpARM64MOVDconst {
36010 break
36011 }
36012 c := v_1.AuxInt
36013 v.reset(OpOr8)
36014 v0 := b.NewValue0(v.Pos, OpLsh8x64, t)
36015 v0.AddArg(x)
36016 v1 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
36017 v1.AuxInt = c & 7
36018 v0.AddArg(v1)
36019 v.AddArg(v0)
36020 v2 := b.NewValue0(v.Pos, OpRsh8Ux64, t)
36021 v2.AddArg(x)
36022 v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
36023 v3.AuxInt = -c & 7
36024 v2.AddArg(v3)
36025 v.AddArg(v2)
36026 return true
36027 }
36028 return false
36029 }
36030 func rewriteValueARM64_OpRound_0(v *Value) bool {
36031
36032
36033
36034 for {
36035 x := v.Args[0]
36036 v.reset(OpARM64FRINTAD)
36037 v.AddArg(x)
36038 return true
36039 }
36040 }
36041 func rewriteValueARM64_OpRound32F_0(v *Value) bool {
36042
36043
36044
36045 for {
36046 x := v.Args[0]
36047 v.reset(OpARM64LoweredRound32F)
36048 v.AddArg(x)
36049 return true
36050 }
36051 }
36052 func rewriteValueARM64_OpRound64F_0(v *Value) bool {
36053
36054
36055
36056 for {
36057 x := v.Args[0]
36058 v.reset(OpARM64LoweredRound64F)
36059 v.AddArg(x)
36060 return true
36061 }
36062 }
36063 func rewriteValueARM64_OpRoundToEven_0(v *Value) bool {
36064
36065
36066
36067 for {
36068 x := v.Args[0]
36069 v.reset(OpARM64FRINTND)
36070 v.AddArg(x)
36071 return true
36072 }
36073 }
36074 func rewriteValueARM64_OpRsh16Ux16_0(v *Value) bool {
36075 b := v.Block
36076 typ := &b.Func.Config.Types
36077
36078
36079
36080 for {
36081 t := v.Type
36082 y := v.Args[1]
36083 x := v.Args[0]
36084 v.reset(OpARM64CSEL)
36085 v.Aux = OpARM64LessThanU
36086 v0 := b.NewValue0(v.Pos, OpARM64SRL, t)
36087 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
36088 v1.AddArg(x)
36089 v0.AddArg(v1)
36090 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
36091 v2.AddArg(y)
36092 v0.AddArg(v2)
36093 v.AddArg(v0)
36094 v3 := b.NewValue0(v.Pos, OpConst64, t)
36095 v3.AuxInt = 0
36096 v.AddArg(v3)
36097 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
36098 v4.AuxInt = 64
36099 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
36100 v5.AddArg(y)
36101 v4.AddArg(v5)
36102 v.AddArg(v4)
36103 return true
36104 }
36105 }
36106 func rewriteValueARM64_OpRsh16Ux32_0(v *Value) bool {
36107 b := v.Block
36108 typ := &b.Func.Config.Types
36109
36110
36111
36112 for {
36113 t := v.Type
36114 y := v.Args[1]
36115 x := v.Args[0]
36116 v.reset(OpARM64CSEL)
36117 v.Aux = OpARM64LessThanU
36118 v0 := b.NewValue0(v.Pos, OpARM64SRL, t)
36119 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
36120 v1.AddArg(x)
36121 v0.AddArg(v1)
36122 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
36123 v2.AddArg(y)
36124 v0.AddArg(v2)
36125 v.AddArg(v0)
36126 v3 := b.NewValue0(v.Pos, OpConst64, t)
36127 v3.AuxInt = 0
36128 v.AddArg(v3)
36129 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
36130 v4.AuxInt = 64
36131 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
36132 v5.AddArg(y)
36133 v4.AddArg(v5)
36134 v.AddArg(v4)
36135 return true
36136 }
36137 }
36138 func rewriteValueARM64_OpRsh16Ux64_0(v *Value) bool {
36139 b := v.Block
36140 typ := &b.Func.Config.Types
36141
36142
36143
36144 for {
36145 t := v.Type
36146 y := v.Args[1]
36147 x := v.Args[0]
36148 v.reset(OpARM64CSEL)
36149 v.Aux = OpARM64LessThanU
36150 v0 := b.NewValue0(v.Pos, OpARM64SRL, t)
36151 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
36152 v1.AddArg(x)
36153 v0.AddArg(v1)
36154 v0.AddArg(y)
36155 v.AddArg(v0)
36156 v2 := b.NewValue0(v.Pos, OpConst64, t)
36157 v2.AuxInt = 0
36158 v.AddArg(v2)
36159 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
36160 v3.AuxInt = 64
36161 v3.AddArg(y)
36162 v.AddArg(v3)
36163 return true
36164 }
36165 }
36166 func rewriteValueARM64_OpRsh16Ux8_0(v *Value) bool {
36167 b := v.Block
36168 typ := &b.Func.Config.Types
36169
36170
36171
36172 for {
36173 t := v.Type
36174 y := v.Args[1]
36175 x := v.Args[0]
36176 v.reset(OpARM64CSEL)
36177 v.Aux = OpARM64LessThanU
36178 v0 := b.NewValue0(v.Pos, OpARM64SRL, t)
36179 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
36180 v1.AddArg(x)
36181 v0.AddArg(v1)
36182 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
36183 v2.AddArg(y)
36184 v0.AddArg(v2)
36185 v.AddArg(v0)
36186 v3 := b.NewValue0(v.Pos, OpConst64, t)
36187 v3.AuxInt = 0
36188 v.AddArg(v3)
36189 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
36190 v4.AuxInt = 64
36191 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
36192 v5.AddArg(y)
36193 v4.AddArg(v5)
36194 v.AddArg(v4)
36195 return true
36196 }
36197 }
36198 func rewriteValueARM64_OpRsh16x16_0(v *Value) bool {
36199 b := v.Block
36200 typ := &b.Func.Config.Types
36201
36202
36203
36204 for {
36205 y := v.Args[1]
36206 x := v.Args[0]
36207 v.reset(OpARM64SRA)
36208 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
36209 v0.AddArg(x)
36210 v.AddArg(v0)
36211 v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type)
36212 v1.Aux = OpARM64LessThanU
36213 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
36214 v2.AddArg(y)
36215 v1.AddArg(v2)
36216 v3 := b.NewValue0(v.Pos, OpConst64, y.Type)
36217 v3.AuxInt = 63
36218 v1.AddArg(v3)
36219 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
36220 v4.AuxInt = 64
36221 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
36222 v5.AddArg(y)
36223 v4.AddArg(v5)
36224 v1.AddArg(v4)
36225 v.AddArg(v1)
36226 return true
36227 }
36228 }
36229 func rewriteValueARM64_OpRsh16x32_0(v *Value) bool {
36230 b := v.Block
36231 typ := &b.Func.Config.Types
36232
36233
36234
36235 for {
36236 y := v.Args[1]
36237 x := v.Args[0]
36238 v.reset(OpARM64SRA)
36239 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
36240 v0.AddArg(x)
36241 v.AddArg(v0)
36242 v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type)
36243 v1.Aux = OpARM64LessThanU
36244 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
36245 v2.AddArg(y)
36246 v1.AddArg(v2)
36247 v3 := b.NewValue0(v.Pos, OpConst64, y.Type)
36248 v3.AuxInt = 63
36249 v1.AddArg(v3)
36250 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
36251 v4.AuxInt = 64
36252 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
36253 v5.AddArg(y)
36254 v4.AddArg(v5)
36255 v1.AddArg(v4)
36256 v.AddArg(v1)
36257 return true
36258 }
36259 }
36260 func rewriteValueARM64_OpRsh16x64_0(v *Value) bool {
36261 b := v.Block
36262 typ := &b.Func.Config.Types
36263
36264
36265
36266 for {
36267 y := v.Args[1]
36268 x := v.Args[0]
36269 v.reset(OpARM64SRA)
36270 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
36271 v0.AddArg(x)
36272 v.AddArg(v0)
36273 v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type)
36274 v1.Aux = OpARM64LessThanU
36275 v1.AddArg(y)
36276 v2 := b.NewValue0(v.Pos, OpConst64, y.Type)
36277 v2.AuxInt = 63
36278 v1.AddArg(v2)
36279 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
36280 v3.AuxInt = 64
36281 v3.AddArg(y)
36282 v1.AddArg(v3)
36283 v.AddArg(v1)
36284 return true
36285 }
36286 }
36287 func rewriteValueARM64_OpRsh16x8_0(v *Value) bool {
36288 b := v.Block
36289 typ := &b.Func.Config.Types
36290
36291
36292
36293 for {
36294 y := v.Args[1]
36295 x := v.Args[0]
36296 v.reset(OpARM64SRA)
36297 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
36298 v0.AddArg(x)
36299 v.AddArg(v0)
36300 v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type)
36301 v1.Aux = OpARM64LessThanU
36302 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
36303 v2.AddArg(y)
36304 v1.AddArg(v2)
36305 v3 := b.NewValue0(v.Pos, OpConst64, y.Type)
36306 v3.AuxInt = 63
36307 v1.AddArg(v3)
36308 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
36309 v4.AuxInt = 64
36310 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
36311 v5.AddArg(y)
36312 v4.AddArg(v5)
36313 v1.AddArg(v4)
36314 v.AddArg(v1)
36315 return true
36316 }
36317 }
36318 func rewriteValueARM64_OpRsh32Ux16_0(v *Value) bool {
36319 b := v.Block
36320 typ := &b.Func.Config.Types
36321
36322
36323
36324 for {
36325 t := v.Type
36326 y := v.Args[1]
36327 x := v.Args[0]
36328 v.reset(OpARM64CSEL)
36329 v.Aux = OpARM64LessThanU
36330 v0 := b.NewValue0(v.Pos, OpARM64SRL, t)
36331 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
36332 v1.AddArg(x)
36333 v0.AddArg(v1)
36334 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
36335 v2.AddArg(y)
36336 v0.AddArg(v2)
36337 v.AddArg(v0)
36338 v3 := b.NewValue0(v.Pos, OpConst64, t)
36339 v3.AuxInt = 0
36340 v.AddArg(v3)
36341 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
36342 v4.AuxInt = 64
36343 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
36344 v5.AddArg(y)
36345 v4.AddArg(v5)
36346 v.AddArg(v4)
36347 return true
36348 }
36349 }
36350 func rewriteValueARM64_OpRsh32Ux32_0(v *Value) bool {
36351 b := v.Block
36352 typ := &b.Func.Config.Types
36353
36354
36355
36356 for {
36357 t := v.Type
36358 y := v.Args[1]
36359 x := v.Args[0]
36360 v.reset(OpARM64CSEL)
36361 v.Aux = OpARM64LessThanU
36362 v0 := b.NewValue0(v.Pos, OpARM64SRL, t)
36363 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
36364 v1.AddArg(x)
36365 v0.AddArg(v1)
36366 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
36367 v2.AddArg(y)
36368 v0.AddArg(v2)
36369 v.AddArg(v0)
36370 v3 := b.NewValue0(v.Pos, OpConst64, t)
36371 v3.AuxInt = 0
36372 v.AddArg(v3)
36373 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
36374 v4.AuxInt = 64
36375 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
36376 v5.AddArg(y)
36377 v4.AddArg(v5)
36378 v.AddArg(v4)
36379 return true
36380 }
36381 }
36382 func rewriteValueARM64_OpRsh32Ux64_0(v *Value) bool {
36383 b := v.Block
36384 typ := &b.Func.Config.Types
36385
36386
36387
36388 for {
36389 t := v.Type
36390 y := v.Args[1]
36391 x := v.Args[0]
36392 v.reset(OpARM64CSEL)
36393 v.Aux = OpARM64LessThanU
36394 v0 := b.NewValue0(v.Pos, OpARM64SRL, t)
36395 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
36396 v1.AddArg(x)
36397 v0.AddArg(v1)
36398 v0.AddArg(y)
36399 v.AddArg(v0)
36400 v2 := b.NewValue0(v.Pos, OpConst64, t)
36401 v2.AuxInt = 0
36402 v.AddArg(v2)
36403 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
36404 v3.AuxInt = 64
36405 v3.AddArg(y)
36406 v.AddArg(v3)
36407 return true
36408 }
36409 }
36410 func rewriteValueARM64_OpRsh32Ux8_0(v *Value) bool {
36411 b := v.Block
36412 typ := &b.Func.Config.Types
36413
36414
36415
36416 for {
36417 t := v.Type
36418 y := v.Args[1]
36419 x := v.Args[0]
36420 v.reset(OpARM64CSEL)
36421 v.Aux = OpARM64LessThanU
36422 v0 := b.NewValue0(v.Pos, OpARM64SRL, t)
36423 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
36424 v1.AddArg(x)
36425 v0.AddArg(v1)
36426 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
36427 v2.AddArg(y)
36428 v0.AddArg(v2)
36429 v.AddArg(v0)
36430 v3 := b.NewValue0(v.Pos, OpConst64, t)
36431 v3.AuxInt = 0
36432 v.AddArg(v3)
36433 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
36434 v4.AuxInt = 64
36435 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
36436 v5.AddArg(y)
36437 v4.AddArg(v5)
36438 v.AddArg(v4)
36439 return true
36440 }
36441 }
36442 func rewriteValueARM64_OpRsh32x16_0(v *Value) bool {
36443 b := v.Block
36444 typ := &b.Func.Config.Types
36445
36446
36447
36448 for {
36449 y := v.Args[1]
36450 x := v.Args[0]
36451 v.reset(OpARM64SRA)
36452 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
36453 v0.AddArg(x)
36454 v.AddArg(v0)
36455 v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type)
36456 v1.Aux = OpARM64LessThanU
36457 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
36458 v2.AddArg(y)
36459 v1.AddArg(v2)
36460 v3 := b.NewValue0(v.Pos, OpConst64, y.Type)
36461 v3.AuxInt = 63
36462 v1.AddArg(v3)
36463 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
36464 v4.AuxInt = 64
36465 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
36466 v5.AddArg(y)
36467 v4.AddArg(v5)
36468 v1.AddArg(v4)
36469 v.AddArg(v1)
36470 return true
36471 }
36472 }
36473 func rewriteValueARM64_OpRsh32x32_0(v *Value) bool {
36474 b := v.Block
36475 typ := &b.Func.Config.Types
36476
36477
36478
36479 for {
36480 y := v.Args[1]
36481 x := v.Args[0]
36482 v.reset(OpARM64SRA)
36483 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
36484 v0.AddArg(x)
36485 v.AddArg(v0)
36486 v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type)
36487 v1.Aux = OpARM64LessThanU
36488 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
36489 v2.AddArg(y)
36490 v1.AddArg(v2)
36491 v3 := b.NewValue0(v.Pos, OpConst64, y.Type)
36492 v3.AuxInt = 63
36493 v1.AddArg(v3)
36494 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
36495 v4.AuxInt = 64
36496 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
36497 v5.AddArg(y)
36498 v4.AddArg(v5)
36499 v1.AddArg(v4)
36500 v.AddArg(v1)
36501 return true
36502 }
36503 }
36504 func rewriteValueARM64_OpRsh32x64_0(v *Value) bool {
36505 b := v.Block
36506 typ := &b.Func.Config.Types
36507
36508
36509
36510 for {
36511 y := v.Args[1]
36512 x := v.Args[0]
36513 v.reset(OpARM64SRA)
36514 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
36515 v0.AddArg(x)
36516 v.AddArg(v0)
36517 v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type)
36518 v1.Aux = OpARM64LessThanU
36519 v1.AddArg(y)
36520 v2 := b.NewValue0(v.Pos, OpConst64, y.Type)
36521 v2.AuxInt = 63
36522 v1.AddArg(v2)
36523 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
36524 v3.AuxInt = 64
36525 v3.AddArg(y)
36526 v1.AddArg(v3)
36527 v.AddArg(v1)
36528 return true
36529 }
36530 }
36531 func rewriteValueARM64_OpRsh32x8_0(v *Value) bool {
36532 b := v.Block
36533 typ := &b.Func.Config.Types
36534
36535
36536
36537 for {
36538 y := v.Args[1]
36539 x := v.Args[0]
36540 v.reset(OpARM64SRA)
36541 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
36542 v0.AddArg(x)
36543 v.AddArg(v0)
36544 v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type)
36545 v1.Aux = OpARM64LessThanU
36546 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
36547 v2.AddArg(y)
36548 v1.AddArg(v2)
36549 v3 := b.NewValue0(v.Pos, OpConst64, y.Type)
36550 v3.AuxInt = 63
36551 v1.AddArg(v3)
36552 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
36553 v4.AuxInt = 64
36554 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
36555 v5.AddArg(y)
36556 v4.AddArg(v5)
36557 v1.AddArg(v4)
36558 v.AddArg(v1)
36559 return true
36560 }
36561 }
36562 func rewriteValueARM64_OpRsh64Ux16_0(v *Value) bool {
36563 b := v.Block
36564 typ := &b.Func.Config.Types
36565
36566
36567
36568 for {
36569 t := v.Type
36570 y := v.Args[1]
36571 x := v.Args[0]
36572 v.reset(OpARM64CSEL)
36573 v.Aux = OpARM64LessThanU
36574 v0 := b.NewValue0(v.Pos, OpARM64SRL, t)
36575 v0.AddArg(x)
36576 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
36577 v1.AddArg(y)
36578 v0.AddArg(v1)
36579 v.AddArg(v0)
36580 v2 := b.NewValue0(v.Pos, OpConst64, t)
36581 v2.AuxInt = 0
36582 v.AddArg(v2)
36583 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
36584 v3.AuxInt = 64
36585 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
36586 v4.AddArg(y)
36587 v3.AddArg(v4)
36588 v.AddArg(v3)
36589 return true
36590 }
36591 }
36592 func rewriteValueARM64_OpRsh64Ux32_0(v *Value) bool {
36593 b := v.Block
36594 typ := &b.Func.Config.Types
36595
36596
36597
36598 for {
36599 t := v.Type
36600 y := v.Args[1]
36601 x := v.Args[0]
36602 v.reset(OpARM64CSEL)
36603 v.Aux = OpARM64LessThanU
36604 v0 := b.NewValue0(v.Pos, OpARM64SRL, t)
36605 v0.AddArg(x)
36606 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
36607 v1.AddArg(y)
36608 v0.AddArg(v1)
36609 v.AddArg(v0)
36610 v2 := b.NewValue0(v.Pos, OpConst64, t)
36611 v2.AuxInt = 0
36612 v.AddArg(v2)
36613 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
36614 v3.AuxInt = 64
36615 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
36616 v4.AddArg(y)
36617 v3.AddArg(v4)
36618 v.AddArg(v3)
36619 return true
36620 }
36621 }
36622 func rewriteValueARM64_OpRsh64Ux64_0(v *Value) bool {
36623 b := v.Block
36624
36625
36626
36627 for {
36628 t := v.Type
36629 y := v.Args[1]
36630 x := v.Args[0]
36631 v.reset(OpARM64CSEL)
36632 v.Aux = OpARM64LessThanU
36633 v0 := b.NewValue0(v.Pos, OpARM64SRL, t)
36634 v0.AddArg(x)
36635 v0.AddArg(y)
36636 v.AddArg(v0)
36637 v1 := b.NewValue0(v.Pos, OpConst64, t)
36638 v1.AuxInt = 0
36639 v.AddArg(v1)
36640 v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
36641 v2.AuxInt = 64
36642 v2.AddArg(y)
36643 v.AddArg(v2)
36644 return true
36645 }
36646 }
36647 func rewriteValueARM64_OpRsh64Ux8_0(v *Value) bool {
36648 b := v.Block
36649 typ := &b.Func.Config.Types
36650
36651
36652
36653 for {
36654 t := v.Type
36655 y := v.Args[1]
36656 x := v.Args[0]
36657 v.reset(OpARM64CSEL)
36658 v.Aux = OpARM64LessThanU
36659 v0 := b.NewValue0(v.Pos, OpARM64SRL, t)
36660 v0.AddArg(x)
36661 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
36662 v1.AddArg(y)
36663 v0.AddArg(v1)
36664 v.AddArg(v0)
36665 v2 := b.NewValue0(v.Pos, OpConst64, t)
36666 v2.AuxInt = 0
36667 v.AddArg(v2)
36668 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
36669 v3.AuxInt = 64
36670 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
36671 v4.AddArg(y)
36672 v3.AddArg(v4)
36673 v.AddArg(v3)
36674 return true
36675 }
36676 }
36677 func rewriteValueARM64_OpRsh64x16_0(v *Value) bool {
36678 b := v.Block
36679 typ := &b.Func.Config.Types
36680
36681
36682
36683 for {
36684 y := v.Args[1]
36685 x := v.Args[0]
36686 v.reset(OpARM64SRA)
36687 v.AddArg(x)
36688 v0 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type)
36689 v0.Aux = OpARM64LessThanU
36690 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
36691 v1.AddArg(y)
36692 v0.AddArg(v1)
36693 v2 := b.NewValue0(v.Pos, OpConst64, y.Type)
36694 v2.AuxInt = 63
36695 v0.AddArg(v2)
36696 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
36697 v3.AuxInt = 64
36698 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
36699 v4.AddArg(y)
36700 v3.AddArg(v4)
36701 v0.AddArg(v3)
36702 v.AddArg(v0)
36703 return true
36704 }
36705 }
36706 func rewriteValueARM64_OpRsh64x32_0(v *Value) bool {
36707 b := v.Block
36708 typ := &b.Func.Config.Types
36709
36710
36711
36712 for {
36713 y := v.Args[1]
36714 x := v.Args[0]
36715 v.reset(OpARM64SRA)
36716 v.AddArg(x)
36717 v0 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type)
36718 v0.Aux = OpARM64LessThanU
36719 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
36720 v1.AddArg(y)
36721 v0.AddArg(v1)
36722 v2 := b.NewValue0(v.Pos, OpConst64, y.Type)
36723 v2.AuxInt = 63
36724 v0.AddArg(v2)
36725 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
36726 v3.AuxInt = 64
36727 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
36728 v4.AddArg(y)
36729 v3.AddArg(v4)
36730 v0.AddArg(v3)
36731 v.AddArg(v0)
36732 return true
36733 }
36734 }
36735 func rewriteValueARM64_OpRsh64x64_0(v *Value) bool {
36736 b := v.Block
36737
36738
36739
36740 for {
36741 y := v.Args[1]
36742 x := v.Args[0]
36743 v.reset(OpARM64SRA)
36744 v.AddArg(x)
36745 v0 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type)
36746 v0.Aux = OpARM64LessThanU
36747 v0.AddArg(y)
36748 v1 := b.NewValue0(v.Pos, OpConst64, y.Type)
36749 v1.AuxInt = 63
36750 v0.AddArg(v1)
36751 v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
36752 v2.AuxInt = 64
36753 v2.AddArg(y)
36754 v0.AddArg(v2)
36755 v.AddArg(v0)
36756 return true
36757 }
36758 }
36759 func rewriteValueARM64_OpRsh64x8_0(v *Value) bool {
36760 b := v.Block
36761 typ := &b.Func.Config.Types
36762
36763
36764
36765 for {
36766 y := v.Args[1]
36767 x := v.Args[0]
36768 v.reset(OpARM64SRA)
36769 v.AddArg(x)
36770 v0 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type)
36771 v0.Aux = OpARM64LessThanU
36772 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
36773 v1.AddArg(y)
36774 v0.AddArg(v1)
36775 v2 := b.NewValue0(v.Pos, OpConst64, y.Type)
36776 v2.AuxInt = 63
36777 v0.AddArg(v2)
36778 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
36779 v3.AuxInt = 64
36780 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
36781 v4.AddArg(y)
36782 v3.AddArg(v4)
36783 v0.AddArg(v3)
36784 v.AddArg(v0)
36785 return true
36786 }
36787 }
36788 func rewriteValueARM64_OpRsh8Ux16_0(v *Value) bool {
36789 b := v.Block
36790 typ := &b.Func.Config.Types
36791
36792
36793
36794 for {
36795 t := v.Type
36796 y := v.Args[1]
36797 x := v.Args[0]
36798 v.reset(OpARM64CSEL)
36799 v.Aux = OpARM64LessThanU
36800 v0 := b.NewValue0(v.Pos, OpARM64SRL, t)
36801 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
36802 v1.AddArg(x)
36803 v0.AddArg(v1)
36804 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
36805 v2.AddArg(y)
36806 v0.AddArg(v2)
36807 v.AddArg(v0)
36808 v3 := b.NewValue0(v.Pos, OpConst64, t)
36809 v3.AuxInt = 0
36810 v.AddArg(v3)
36811 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
36812 v4.AuxInt = 64
36813 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
36814 v5.AddArg(y)
36815 v4.AddArg(v5)
36816 v.AddArg(v4)
36817 return true
36818 }
36819 }
36820 func rewriteValueARM64_OpRsh8Ux32_0(v *Value) bool {
36821 b := v.Block
36822 typ := &b.Func.Config.Types
36823
36824
36825
36826 for {
36827 t := v.Type
36828 y := v.Args[1]
36829 x := v.Args[0]
36830 v.reset(OpARM64CSEL)
36831 v.Aux = OpARM64LessThanU
36832 v0 := b.NewValue0(v.Pos, OpARM64SRL, t)
36833 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
36834 v1.AddArg(x)
36835 v0.AddArg(v1)
36836 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
36837 v2.AddArg(y)
36838 v0.AddArg(v2)
36839 v.AddArg(v0)
36840 v3 := b.NewValue0(v.Pos, OpConst64, t)
36841 v3.AuxInt = 0
36842 v.AddArg(v3)
36843 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
36844 v4.AuxInt = 64
36845 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
36846 v5.AddArg(y)
36847 v4.AddArg(v5)
36848 v.AddArg(v4)
36849 return true
36850 }
36851 }
36852 func rewriteValueARM64_OpRsh8Ux64_0(v *Value) bool {
36853 b := v.Block
36854 typ := &b.Func.Config.Types
36855
36856
36857
36858 for {
36859 t := v.Type
36860 y := v.Args[1]
36861 x := v.Args[0]
36862 v.reset(OpARM64CSEL)
36863 v.Aux = OpARM64LessThanU
36864 v0 := b.NewValue0(v.Pos, OpARM64SRL, t)
36865 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
36866 v1.AddArg(x)
36867 v0.AddArg(v1)
36868 v0.AddArg(y)
36869 v.AddArg(v0)
36870 v2 := b.NewValue0(v.Pos, OpConst64, t)
36871 v2.AuxInt = 0
36872 v.AddArg(v2)
36873 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
36874 v3.AuxInt = 64
36875 v3.AddArg(y)
36876 v.AddArg(v3)
36877 return true
36878 }
36879 }
36880 func rewriteValueARM64_OpRsh8Ux8_0(v *Value) bool {
36881 b := v.Block
36882 typ := &b.Func.Config.Types
36883
36884
36885
36886 for {
36887 t := v.Type
36888 y := v.Args[1]
36889 x := v.Args[0]
36890 v.reset(OpARM64CSEL)
36891 v.Aux = OpARM64LessThanU
36892 v0 := b.NewValue0(v.Pos, OpARM64SRL, t)
36893 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
36894 v1.AddArg(x)
36895 v0.AddArg(v1)
36896 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
36897 v2.AddArg(y)
36898 v0.AddArg(v2)
36899 v.AddArg(v0)
36900 v3 := b.NewValue0(v.Pos, OpConst64, t)
36901 v3.AuxInt = 0
36902 v.AddArg(v3)
36903 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
36904 v4.AuxInt = 64
36905 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
36906 v5.AddArg(y)
36907 v4.AddArg(v5)
36908 v.AddArg(v4)
36909 return true
36910 }
36911 }
36912 func rewriteValueARM64_OpRsh8x16_0(v *Value) bool {
36913 b := v.Block
36914 typ := &b.Func.Config.Types
36915
36916
36917
36918 for {
36919 y := v.Args[1]
36920 x := v.Args[0]
36921 v.reset(OpARM64SRA)
36922 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
36923 v0.AddArg(x)
36924 v.AddArg(v0)
36925 v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type)
36926 v1.Aux = OpARM64LessThanU
36927 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
36928 v2.AddArg(y)
36929 v1.AddArg(v2)
36930 v3 := b.NewValue0(v.Pos, OpConst64, y.Type)
36931 v3.AuxInt = 63
36932 v1.AddArg(v3)
36933 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
36934 v4.AuxInt = 64
36935 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
36936 v5.AddArg(y)
36937 v4.AddArg(v5)
36938 v1.AddArg(v4)
36939 v.AddArg(v1)
36940 return true
36941 }
36942 }
36943 func rewriteValueARM64_OpRsh8x32_0(v *Value) bool {
36944 b := v.Block
36945 typ := &b.Func.Config.Types
36946
36947
36948
36949 for {
36950 y := v.Args[1]
36951 x := v.Args[0]
36952 v.reset(OpARM64SRA)
36953 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
36954 v0.AddArg(x)
36955 v.AddArg(v0)
36956 v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type)
36957 v1.Aux = OpARM64LessThanU
36958 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
36959 v2.AddArg(y)
36960 v1.AddArg(v2)
36961 v3 := b.NewValue0(v.Pos, OpConst64, y.Type)
36962 v3.AuxInt = 63
36963 v1.AddArg(v3)
36964 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
36965 v4.AuxInt = 64
36966 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
36967 v5.AddArg(y)
36968 v4.AddArg(v5)
36969 v1.AddArg(v4)
36970 v.AddArg(v1)
36971 return true
36972 }
36973 }
36974 func rewriteValueARM64_OpRsh8x64_0(v *Value) bool {
36975 b := v.Block
36976 typ := &b.Func.Config.Types
36977
36978
36979
36980 for {
36981 y := v.Args[1]
36982 x := v.Args[0]
36983 v.reset(OpARM64SRA)
36984 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
36985 v0.AddArg(x)
36986 v.AddArg(v0)
36987 v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type)
36988 v1.Aux = OpARM64LessThanU
36989 v1.AddArg(y)
36990 v2 := b.NewValue0(v.Pos, OpConst64, y.Type)
36991 v2.AuxInt = 63
36992 v1.AddArg(v2)
36993 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
36994 v3.AuxInt = 64
36995 v3.AddArg(y)
36996 v1.AddArg(v3)
36997 v.AddArg(v1)
36998 return true
36999 }
37000 }
37001 func rewriteValueARM64_OpRsh8x8_0(v *Value) bool {
37002 b := v.Block
37003 typ := &b.Func.Config.Types
37004
37005
37006
37007 for {
37008 y := v.Args[1]
37009 x := v.Args[0]
37010 v.reset(OpARM64SRA)
37011 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
37012 v0.AddArg(x)
37013 v.AddArg(v0)
37014 v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type)
37015 v1.Aux = OpARM64LessThanU
37016 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
37017 v2.AddArg(y)
37018 v1.AddArg(v2)
37019 v3 := b.NewValue0(v.Pos, OpConst64, y.Type)
37020 v3.AuxInt = 63
37021 v1.AddArg(v3)
37022 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
37023 v4.AuxInt = 64
37024 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
37025 v5.AddArg(y)
37026 v4.AddArg(v5)
37027 v1.AddArg(v4)
37028 v.AddArg(v1)
37029 return true
37030 }
37031 }
37032 func rewriteValueARM64_OpSelect0_0(v *Value) bool {
37033 b := v.Block
37034 typ := &b.Func.Config.Types
37035
37036
37037
37038 for {
37039 v_0 := v.Args[0]
37040 if v_0.Op != OpAdd64carry {
37041 break
37042 }
37043 c := v_0.Args[2]
37044 x := v_0.Args[0]
37045 y := v_0.Args[1]
37046 v.reset(OpSelect0)
37047 v.Type = typ.UInt64
37048 v0 := b.NewValue0(v.Pos, OpARM64ADCSflags, types.NewTuple(typ.UInt64, types.TypeFlags))
37049 v0.AddArg(x)
37050 v0.AddArg(y)
37051 v1 := b.NewValue0(v.Pos, OpSelect1, types.TypeFlags)
37052 v2 := b.NewValue0(v.Pos, OpARM64ADDSconstflags, types.NewTuple(typ.UInt64, types.TypeFlags))
37053 v2.AuxInt = -1
37054 v2.AddArg(c)
37055 v1.AddArg(v2)
37056 v0.AddArg(v1)
37057 v.AddArg(v0)
37058 return true
37059 }
37060
37061
37062
37063 for {
37064 v_0 := v.Args[0]
37065 if v_0.Op != OpSub64borrow {
37066 break
37067 }
37068 bo := v_0.Args[2]
37069 x := v_0.Args[0]
37070 y := v_0.Args[1]
37071 v.reset(OpSelect0)
37072 v.Type = typ.UInt64
37073 v0 := b.NewValue0(v.Pos, OpARM64SBCSflags, types.NewTuple(typ.UInt64, types.TypeFlags))
37074 v0.AddArg(x)
37075 v0.AddArg(y)
37076 v1 := b.NewValue0(v.Pos, OpSelect1, types.TypeFlags)
37077 v2 := b.NewValue0(v.Pos, OpARM64NEGSflags, types.NewTuple(typ.UInt64, types.TypeFlags))
37078 v2.AddArg(bo)
37079 v1.AddArg(v2)
37080 v0.AddArg(v1)
37081 v.AddArg(v0)
37082 return true
37083 }
37084 return false
37085 }
37086 func rewriteValueARM64_OpSelect1_0(v *Value) bool {
37087 b := v.Block
37088 typ := &b.Func.Config.Types
37089
37090
37091
37092 for {
37093 v_0 := v.Args[0]
37094 if v_0.Op != OpAdd64carry {
37095 break
37096 }
37097 c := v_0.Args[2]
37098 x := v_0.Args[0]
37099 y := v_0.Args[1]
37100 v.reset(OpARM64ADCzerocarry)
37101 v.Type = typ.UInt64
37102 v0 := b.NewValue0(v.Pos, OpSelect1, types.TypeFlags)
37103 v1 := b.NewValue0(v.Pos, OpARM64ADCSflags, types.NewTuple(typ.UInt64, types.TypeFlags))
37104 v1.AddArg(x)
37105 v1.AddArg(y)
37106 v2 := b.NewValue0(v.Pos, OpSelect1, types.TypeFlags)
37107 v3 := b.NewValue0(v.Pos, OpARM64ADDSconstflags, types.NewTuple(typ.UInt64, types.TypeFlags))
37108 v3.AuxInt = -1
37109 v3.AddArg(c)
37110 v2.AddArg(v3)
37111 v1.AddArg(v2)
37112 v0.AddArg(v1)
37113 v.AddArg(v0)
37114 return true
37115 }
37116
37117
37118
37119 for {
37120 v_0 := v.Args[0]
37121 if v_0.Op != OpSub64borrow {
37122 break
37123 }
37124 bo := v_0.Args[2]
37125 x := v_0.Args[0]
37126 y := v_0.Args[1]
37127 v.reset(OpARM64NEG)
37128 v.Type = typ.UInt64
37129 v0 := b.NewValue0(v.Pos, OpARM64NGCzerocarry, typ.UInt64)
37130 v1 := b.NewValue0(v.Pos, OpSelect1, types.TypeFlags)
37131 v2 := b.NewValue0(v.Pos, OpARM64SBCSflags, types.NewTuple(typ.UInt64, types.TypeFlags))
37132 v2.AddArg(x)
37133 v2.AddArg(y)
37134 v3 := b.NewValue0(v.Pos, OpSelect1, types.TypeFlags)
37135 v4 := b.NewValue0(v.Pos, OpARM64NEGSflags, types.NewTuple(typ.UInt64, types.TypeFlags))
37136 v4.AddArg(bo)
37137 v3.AddArg(v4)
37138 v2.AddArg(v3)
37139 v1.AddArg(v2)
37140 v0.AddArg(v1)
37141 v.AddArg(v0)
37142 return true
37143 }
37144 return false
37145 }
37146 func rewriteValueARM64_OpSignExt16to32_0(v *Value) bool {
37147
37148
37149
37150 for {
37151 x := v.Args[0]
37152 v.reset(OpARM64MOVHreg)
37153 v.AddArg(x)
37154 return true
37155 }
37156 }
37157 func rewriteValueARM64_OpSignExt16to64_0(v *Value) bool {
37158
37159
37160
37161 for {
37162 x := v.Args[0]
37163 v.reset(OpARM64MOVHreg)
37164 v.AddArg(x)
37165 return true
37166 }
37167 }
37168 func rewriteValueARM64_OpSignExt32to64_0(v *Value) bool {
37169
37170
37171
37172 for {
37173 x := v.Args[0]
37174 v.reset(OpARM64MOVWreg)
37175 v.AddArg(x)
37176 return true
37177 }
37178 }
37179 func rewriteValueARM64_OpSignExt8to16_0(v *Value) bool {
37180
37181
37182
37183 for {
37184 x := v.Args[0]
37185 v.reset(OpARM64MOVBreg)
37186 v.AddArg(x)
37187 return true
37188 }
37189 }
37190 func rewriteValueARM64_OpSignExt8to32_0(v *Value) bool {
37191
37192
37193
37194 for {
37195 x := v.Args[0]
37196 v.reset(OpARM64MOVBreg)
37197 v.AddArg(x)
37198 return true
37199 }
37200 }
37201 func rewriteValueARM64_OpSignExt8to64_0(v *Value) bool {
37202
37203
37204
37205 for {
37206 x := v.Args[0]
37207 v.reset(OpARM64MOVBreg)
37208 v.AddArg(x)
37209 return true
37210 }
37211 }
37212 func rewriteValueARM64_OpSlicemask_0(v *Value) bool {
37213 b := v.Block
37214
37215
37216
37217 for {
37218 t := v.Type
37219 x := v.Args[0]
37220 v.reset(OpARM64SRAconst)
37221 v.AuxInt = 63
37222 v0 := b.NewValue0(v.Pos, OpARM64NEG, t)
37223 v0.AddArg(x)
37224 v.AddArg(v0)
37225 return true
37226 }
37227 }
37228 func rewriteValueARM64_OpSqrt_0(v *Value) bool {
37229
37230
37231
37232 for {
37233 x := v.Args[0]
37234 v.reset(OpARM64FSQRTD)
37235 v.AddArg(x)
37236 return true
37237 }
37238 }
37239 func rewriteValueARM64_OpStaticCall_0(v *Value) bool {
37240
37241
37242
37243 for {
37244 argwid := v.AuxInt
37245 target := v.Aux
37246 mem := v.Args[0]
37247 v.reset(OpARM64CALLstatic)
37248 v.AuxInt = argwid
37249 v.Aux = target
37250 v.AddArg(mem)
37251 return true
37252 }
37253 }
37254 func rewriteValueARM64_OpStore_0(v *Value) bool {
37255
37256
37257
37258 for {
37259 t := v.Aux
37260 mem := v.Args[2]
37261 ptr := v.Args[0]
37262 val := v.Args[1]
37263 if !(t.(*types.Type).Size() == 1) {
37264 break
37265 }
37266 v.reset(OpARM64MOVBstore)
37267 v.AddArg(ptr)
37268 v.AddArg(val)
37269 v.AddArg(mem)
37270 return true
37271 }
37272
37273
37274
37275 for {
37276 t := v.Aux
37277 mem := v.Args[2]
37278 ptr := v.Args[0]
37279 val := v.Args[1]
37280 if !(t.(*types.Type).Size() == 2) {
37281 break
37282 }
37283 v.reset(OpARM64MOVHstore)
37284 v.AddArg(ptr)
37285 v.AddArg(val)
37286 v.AddArg(mem)
37287 return true
37288 }
37289
37290
37291
37292 for {
37293 t := v.Aux
37294 mem := v.Args[2]
37295 ptr := v.Args[0]
37296 val := v.Args[1]
37297 if !(t.(*types.Type).Size() == 4 && !is32BitFloat(val.Type)) {
37298 break
37299 }
37300 v.reset(OpARM64MOVWstore)
37301 v.AddArg(ptr)
37302 v.AddArg(val)
37303 v.AddArg(mem)
37304 return true
37305 }
37306
37307
37308
37309 for {
37310 t := v.Aux
37311 mem := v.Args[2]
37312 ptr := v.Args[0]
37313 val := v.Args[1]
37314 if !(t.(*types.Type).Size() == 8 && !is64BitFloat(val.Type)) {
37315 break
37316 }
37317 v.reset(OpARM64MOVDstore)
37318 v.AddArg(ptr)
37319 v.AddArg(val)
37320 v.AddArg(mem)
37321 return true
37322 }
37323
37324
37325
37326 for {
37327 t := v.Aux
37328 mem := v.Args[2]
37329 ptr := v.Args[0]
37330 val := v.Args[1]
37331 if !(t.(*types.Type).Size() == 4 && is32BitFloat(val.Type)) {
37332 break
37333 }
37334 v.reset(OpARM64FMOVSstore)
37335 v.AddArg(ptr)
37336 v.AddArg(val)
37337 v.AddArg(mem)
37338 return true
37339 }
37340
37341
37342
37343 for {
37344 t := v.Aux
37345 mem := v.Args[2]
37346 ptr := v.Args[0]
37347 val := v.Args[1]
37348 if !(t.(*types.Type).Size() == 8 && is64BitFloat(val.Type)) {
37349 break
37350 }
37351 v.reset(OpARM64FMOVDstore)
37352 v.AddArg(ptr)
37353 v.AddArg(val)
37354 v.AddArg(mem)
37355 return true
37356 }
37357 return false
37358 }
37359 func rewriteValueARM64_OpSub16_0(v *Value) bool {
37360
37361
37362
37363 for {
37364 y := v.Args[1]
37365 x := v.Args[0]
37366 v.reset(OpARM64SUB)
37367 v.AddArg(x)
37368 v.AddArg(y)
37369 return true
37370 }
37371 }
37372 func rewriteValueARM64_OpSub32_0(v *Value) bool {
37373
37374
37375
37376 for {
37377 y := v.Args[1]
37378 x := v.Args[0]
37379 v.reset(OpARM64SUB)
37380 v.AddArg(x)
37381 v.AddArg(y)
37382 return true
37383 }
37384 }
37385 func rewriteValueARM64_OpSub32F_0(v *Value) bool {
37386
37387
37388
37389 for {
37390 y := v.Args[1]
37391 x := v.Args[0]
37392 v.reset(OpARM64FSUBS)
37393 v.AddArg(x)
37394 v.AddArg(y)
37395 return true
37396 }
37397 }
37398 func rewriteValueARM64_OpSub64_0(v *Value) bool {
37399
37400
37401
37402 for {
37403 y := v.Args[1]
37404 x := v.Args[0]
37405 v.reset(OpARM64SUB)
37406 v.AddArg(x)
37407 v.AddArg(y)
37408 return true
37409 }
37410 }
37411 func rewriteValueARM64_OpSub64F_0(v *Value) bool {
37412
37413
37414
37415 for {
37416 y := v.Args[1]
37417 x := v.Args[0]
37418 v.reset(OpARM64FSUBD)
37419 v.AddArg(x)
37420 v.AddArg(y)
37421 return true
37422 }
37423 }
37424 func rewriteValueARM64_OpSub8_0(v *Value) bool {
37425
37426
37427
37428 for {
37429 y := v.Args[1]
37430 x := v.Args[0]
37431 v.reset(OpARM64SUB)
37432 v.AddArg(x)
37433 v.AddArg(y)
37434 return true
37435 }
37436 }
37437 func rewriteValueARM64_OpSubPtr_0(v *Value) bool {
37438
37439
37440
37441 for {
37442 y := v.Args[1]
37443 x := v.Args[0]
37444 v.reset(OpARM64SUB)
37445 v.AddArg(x)
37446 v.AddArg(y)
37447 return true
37448 }
37449 }
37450 func rewriteValueARM64_OpTrunc_0(v *Value) bool {
37451
37452
37453
37454 for {
37455 x := v.Args[0]
37456 v.reset(OpARM64FRINTZD)
37457 v.AddArg(x)
37458 return true
37459 }
37460 }
37461 func rewriteValueARM64_OpTrunc16to8_0(v *Value) bool {
37462
37463
37464
37465 for {
37466 x := v.Args[0]
37467 v.reset(OpCopy)
37468 v.Type = x.Type
37469 v.AddArg(x)
37470 return true
37471 }
37472 }
37473 func rewriteValueARM64_OpTrunc32to16_0(v *Value) bool {
37474
37475
37476
37477 for {
37478 x := v.Args[0]
37479 v.reset(OpCopy)
37480 v.Type = x.Type
37481 v.AddArg(x)
37482 return true
37483 }
37484 }
37485 func rewriteValueARM64_OpTrunc32to8_0(v *Value) bool {
37486
37487
37488
37489 for {
37490 x := v.Args[0]
37491 v.reset(OpCopy)
37492 v.Type = x.Type
37493 v.AddArg(x)
37494 return true
37495 }
37496 }
37497 func rewriteValueARM64_OpTrunc64to16_0(v *Value) bool {
37498
37499
37500
37501 for {
37502 x := v.Args[0]
37503 v.reset(OpCopy)
37504 v.Type = x.Type
37505 v.AddArg(x)
37506 return true
37507 }
37508 }
37509 func rewriteValueARM64_OpTrunc64to32_0(v *Value) bool {
37510
37511
37512
37513 for {
37514 x := v.Args[0]
37515 v.reset(OpCopy)
37516 v.Type = x.Type
37517 v.AddArg(x)
37518 return true
37519 }
37520 }
37521 func rewriteValueARM64_OpTrunc64to8_0(v *Value) bool {
37522
37523
37524
37525 for {
37526 x := v.Args[0]
37527 v.reset(OpCopy)
37528 v.Type = x.Type
37529 v.AddArg(x)
37530 return true
37531 }
37532 }
37533 func rewriteValueARM64_OpWB_0(v *Value) bool {
37534
37535
37536
37537 for {
37538 fn := v.Aux
37539 mem := v.Args[2]
37540 destptr := v.Args[0]
37541 srcptr := v.Args[1]
37542 v.reset(OpARM64LoweredWB)
37543 v.Aux = fn
37544 v.AddArg(destptr)
37545 v.AddArg(srcptr)
37546 v.AddArg(mem)
37547 return true
37548 }
37549 }
37550 func rewriteValueARM64_OpXor16_0(v *Value) bool {
37551
37552
37553
37554 for {
37555 y := v.Args[1]
37556 x := v.Args[0]
37557 v.reset(OpARM64XOR)
37558 v.AddArg(x)
37559 v.AddArg(y)
37560 return true
37561 }
37562 }
37563 func rewriteValueARM64_OpXor32_0(v *Value) bool {
37564
37565
37566
37567 for {
37568 y := v.Args[1]
37569 x := v.Args[0]
37570 v.reset(OpARM64XOR)
37571 v.AddArg(x)
37572 v.AddArg(y)
37573 return true
37574 }
37575 }
37576 func rewriteValueARM64_OpXor64_0(v *Value) bool {
37577
37578
37579
37580 for {
37581 y := v.Args[1]
37582 x := v.Args[0]
37583 v.reset(OpARM64XOR)
37584 v.AddArg(x)
37585 v.AddArg(y)
37586 return true
37587 }
37588 }
37589 func rewriteValueARM64_OpXor8_0(v *Value) bool {
37590
37591
37592
37593 for {
37594 y := v.Args[1]
37595 x := v.Args[0]
37596 v.reset(OpARM64XOR)
37597 v.AddArg(x)
37598 v.AddArg(y)
37599 return true
37600 }
37601 }
37602 func rewriteValueARM64_OpZero_0(v *Value) bool {
37603 b := v.Block
37604 typ := &b.Func.Config.Types
37605
37606
37607
37608 for {
37609 if v.AuxInt != 0 {
37610 break
37611 }
37612 mem := v.Args[1]
37613 v.reset(OpCopy)
37614 v.Type = mem.Type
37615 v.AddArg(mem)
37616 return true
37617 }
37618
37619
37620
37621 for {
37622 if v.AuxInt != 1 {
37623 break
37624 }
37625 mem := v.Args[1]
37626 ptr := v.Args[0]
37627 v.reset(OpARM64MOVBstore)
37628 v.AddArg(ptr)
37629 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
37630 v0.AuxInt = 0
37631 v.AddArg(v0)
37632 v.AddArg(mem)
37633 return true
37634 }
37635
37636
37637
37638 for {
37639 if v.AuxInt != 2 {
37640 break
37641 }
37642 mem := v.Args[1]
37643 ptr := v.Args[0]
37644 v.reset(OpARM64MOVHstore)
37645 v.AddArg(ptr)
37646 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
37647 v0.AuxInt = 0
37648 v.AddArg(v0)
37649 v.AddArg(mem)
37650 return true
37651 }
37652
37653
37654
37655 for {
37656 if v.AuxInt != 4 {
37657 break
37658 }
37659 mem := v.Args[1]
37660 ptr := v.Args[0]
37661 v.reset(OpARM64MOVWstore)
37662 v.AddArg(ptr)
37663 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
37664 v0.AuxInt = 0
37665 v.AddArg(v0)
37666 v.AddArg(mem)
37667 return true
37668 }
37669
37670
37671
37672 for {
37673 if v.AuxInt != 8 {
37674 break
37675 }
37676 mem := v.Args[1]
37677 ptr := v.Args[0]
37678 v.reset(OpARM64MOVDstore)
37679 v.AddArg(ptr)
37680 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
37681 v0.AuxInt = 0
37682 v.AddArg(v0)
37683 v.AddArg(mem)
37684 return true
37685 }
37686
37687
37688
37689 for {
37690 if v.AuxInt != 3 {
37691 break
37692 }
37693 mem := v.Args[1]
37694 ptr := v.Args[0]
37695 v.reset(OpARM64MOVBstore)
37696 v.AuxInt = 2
37697 v.AddArg(ptr)
37698 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
37699 v0.AuxInt = 0
37700 v.AddArg(v0)
37701 v1 := b.NewValue0(v.Pos, OpARM64MOVHstore, types.TypeMem)
37702 v1.AddArg(ptr)
37703 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
37704 v2.AuxInt = 0
37705 v1.AddArg(v2)
37706 v1.AddArg(mem)
37707 v.AddArg(v1)
37708 return true
37709 }
37710
37711
37712
37713 for {
37714 if v.AuxInt != 5 {
37715 break
37716 }
37717 mem := v.Args[1]
37718 ptr := v.Args[0]
37719 v.reset(OpARM64MOVBstore)
37720 v.AuxInt = 4
37721 v.AddArg(ptr)
37722 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
37723 v0.AuxInt = 0
37724 v.AddArg(v0)
37725 v1 := b.NewValue0(v.Pos, OpARM64MOVWstore, types.TypeMem)
37726 v1.AddArg(ptr)
37727 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
37728 v2.AuxInt = 0
37729 v1.AddArg(v2)
37730 v1.AddArg(mem)
37731 v.AddArg(v1)
37732 return true
37733 }
37734
37735
37736
37737 for {
37738 if v.AuxInt != 6 {
37739 break
37740 }
37741 mem := v.Args[1]
37742 ptr := v.Args[0]
37743 v.reset(OpARM64MOVHstore)
37744 v.AuxInt = 4
37745 v.AddArg(ptr)
37746 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
37747 v0.AuxInt = 0
37748 v.AddArg(v0)
37749 v1 := b.NewValue0(v.Pos, OpARM64MOVWstore, types.TypeMem)
37750 v1.AddArg(ptr)
37751 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
37752 v2.AuxInt = 0
37753 v1.AddArg(v2)
37754 v1.AddArg(mem)
37755 v.AddArg(v1)
37756 return true
37757 }
37758
37759
37760
37761 for {
37762 if v.AuxInt != 7 {
37763 break
37764 }
37765 mem := v.Args[1]
37766 ptr := v.Args[0]
37767 v.reset(OpARM64MOVBstore)
37768 v.AuxInt = 6
37769 v.AddArg(ptr)
37770 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
37771 v0.AuxInt = 0
37772 v.AddArg(v0)
37773 v1 := b.NewValue0(v.Pos, OpARM64MOVHstore, types.TypeMem)
37774 v1.AuxInt = 4
37775 v1.AddArg(ptr)
37776 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
37777 v2.AuxInt = 0
37778 v1.AddArg(v2)
37779 v3 := b.NewValue0(v.Pos, OpARM64MOVWstore, types.TypeMem)
37780 v3.AddArg(ptr)
37781 v4 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
37782 v4.AuxInt = 0
37783 v3.AddArg(v4)
37784 v3.AddArg(mem)
37785 v1.AddArg(v3)
37786 v.AddArg(v1)
37787 return true
37788 }
37789
37790
37791
37792 for {
37793 if v.AuxInt != 9 {
37794 break
37795 }
37796 mem := v.Args[1]
37797 ptr := v.Args[0]
37798 v.reset(OpARM64MOVBstore)
37799 v.AuxInt = 8
37800 v.AddArg(ptr)
37801 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
37802 v0.AuxInt = 0
37803 v.AddArg(v0)
37804 v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem)
37805 v1.AddArg(ptr)
37806 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
37807 v2.AuxInt = 0
37808 v1.AddArg(v2)
37809 v1.AddArg(mem)
37810 v.AddArg(v1)
37811 return true
37812 }
37813 return false
37814 }
37815 func rewriteValueARM64_OpZero_10(v *Value) bool {
37816 b := v.Block
37817 typ := &b.Func.Config.Types
37818
37819
37820
37821 for {
37822 if v.AuxInt != 10 {
37823 break
37824 }
37825 mem := v.Args[1]
37826 ptr := v.Args[0]
37827 v.reset(OpARM64MOVHstore)
37828 v.AuxInt = 8
37829 v.AddArg(ptr)
37830 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
37831 v0.AuxInt = 0
37832 v.AddArg(v0)
37833 v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem)
37834 v1.AddArg(ptr)
37835 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
37836 v2.AuxInt = 0
37837 v1.AddArg(v2)
37838 v1.AddArg(mem)
37839 v.AddArg(v1)
37840 return true
37841 }
37842
37843
37844
37845 for {
37846 if v.AuxInt != 11 {
37847 break
37848 }
37849 mem := v.Args[1]
37850 ptr := v.Args[0]
37851 v.reset(OpARM64MOVBstore)
37852 v.AuxInt = 10
37853 v.AddArg(ptr)
37854 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
37855 v0.AuxInt = 0
37856 v.AddArg(v0)
37857 v1 := b.NewValue0(v.Pos, OpARM64MOVHstore, types.TypeMem)
37858 v1.AuxInt = 8
37859 v1.AddArg(ptr)
37860 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
37861 v2.AuxInt = 0
37862 v1.AddArg(v2)
37863 v3 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem)
37864 v3.AddArg(ptr)
37865 v4 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
37866 v4.AuxInt = 0
37867 v3.AddArg(v4)
37868 v3.AddArg(mem)
37869 v1.AddArg(v3)
37870 v.AddArg(v1)
37871 return true
37872 }
37873
37874
37875
37876 for {
37877 if v.AuxInt != 12 {
37878 break
37879 }
37880 mem := v.Args[1]
37881 ptr := v.Args[0]
37882 v.reset(OpARM64MOVWstore)
37883 v.AuxInt = 8
37884 v.AddArg(ptr)
37885 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
37886 v0.AuxInt = 0
37887 v.AddArg(v0)
37888 v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem)
37889 v1.AddArg(ptr)
37890 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
37891 v2.AuxInt = 0
37892 v1.AddArg(v2)
37893 v1.AddArg(mem)
37894 v.AddArg(v1)
37895 return true
37896 }
37897
37898
37899
37900 for {
37901 if v.AuxInt != 13 {
37902 break
37903 }
37904 mem := v.Args[1]
37905 ptr := v.Args[0]
37906 v.reset(OpARM64MOVBstore)
37907 v.AuxInt = 12
37908 v.AddArg(ptr)
37909 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
37910 v0.AuxInt = 0
37911 v.AddArg(v0)
37912 v1 := b.NewValue0(v.Pos, OpARM64MOVWstore, types.TypeMem)
37913 v1.AuxInt = 8
37914 v1.AddArg(ptr)
37915 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
37916 v2.AuxInt = 0
37917 v1.AddArg(v2)
37918 v3 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem)
37919 v3.AddArg(ptr)
37920 v4 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
37921 v4.AuxInt = 0
37922 v3.AddArg(v4)
37923 v3.AddArg(mem)
37924 v1.AddArg(v3)
37925 v.AddArg(v1)
37926 return true
37927 }
37928
37929
37930
37931 for {
37932 if v.AuxInt != 14 {
37933 break
37934 }
37935 mem := v.Args[1]
37936 ptr := v.Args[0]
37937 v.reset(OpARM64MOVHstore)
37938 v.AuxInt = 12
37939 v.AddArg(ptr)
37940 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
37941 v0.AuxInt = 0
37942 v.AddArg(v0)
37943 v1 := b.NewValue0(v.Pos, OpARM64MOVWstore, types.TypeMem)
37944 v1.AuxInt = 8
37945 v1.AddArg(ptr)
37946 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
37947 v2.AuxInt = 0
37948 v1.AddArg(v2)
37949 v3 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem)
37950 v3.AddArg(ptr)
37951 v4 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
37952 v4.AuxInt = 0
37953 v3.AddArg(v4)
37954 v3.AddArg(mem)
37955 v1.AddArg(v3)
37956 v.AddArg(v1)
37957 return true
37958 }
37959
37960
37961
37962 for {
37963 if v.AuxInt != 15 {
37964 break
37965 }
37966 mem := v.Args[1]
37967 ptr := v.Args[0]
37968 v.reset(OpARM64MOVBstore)
37969 v.AuxInt = 14
37970 v.AddArg(ptr)
37971 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
37972 v0.AuxInt = 0
37973 v.AddArg(v0)
37974 v1 := b.NewValue0(v.Pos, OpARM64MOVHstore, types.TypeMem)
37975 v1.AuxInt = 12
37976 v1.AddArg(ptr)
37977 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
37978 v2.AuxInt = 0
37979 v1.AddArg(v2)
37980 v3 := b.NewValue0(v.Pos, OpARM64MOVWstore, types.TypeMem)
37981 v3.AuxInt = 8
37982 v3.AddArg(ptr)
37983 v4 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
37984 v4.AuxInt = 0
37985 v3.AddArg(v4)
37986 v5 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem)
37987 v5.AddArg(ptr)
37988 v6 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
37989 v6.AuxInt = 0
37990 v5.AddArg(v6)
37991 v5.AddArg(mem)
37992 v3.AddArg(v5)
37993 v1.AddArg(v3)
37994 v.AddArg(v1)
37995 return true
37996 }
37997
37998
37999
38000 for {
38001 if v.AuxInt != 16 {
38002 break
38003 }
38004 mem := v.Args[1]
38005 ptr := v.Args[0]
38006 v.reset(OpARM64STP)
38007 v.AuxInt = 0
38008 v.AddArg(ptr)
38009 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
38010 v0.AuxInt = 0
38011 v.AddArg(v0)
38012 v1 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
38013 v1.AuxInt = 0
38014 v.AddArg(v1)
38015 v.AddArg(mem)
38016 return true
38017 }
38018
38019
38020
38021 for {
38022 if v.AuxInt != 32 {
38023 break
38024 }
38025 mem := v.Args[1]
38026 ptr := v.Args[0]
38027 v.reset(OpARM64STP)
38028 v.AuxInt = 16
38029 v.AddArg(ptr)
38030 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
38031 v0.AuxInt = 0
38032 v.AddArg(v0)
38033 v1 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
38034 v1.AuxInt = 0
38035 v.AddArg(v1)
38036 v2 := b.NewValue0(v.Pos, OpARM64STP, types.TypeMem)
38037 v2.AuxInt = 0
38038 v2.AddArg(ptr)
38039 v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
38040 v3.AuxInt = 0
38041 v2.AddArg(v3)
38042 v4 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
38043 v4.AuxInt = 0
38044 v2.AddArg(v4)
38045 v2.AddArg(mem)
38046 v.AddArg(v2)
38047 return true
38048 }
38049
38050
38051
38052 for {
38053 if v.AuxInt != 48 {
38054 break
38055 }
38056 mem := v.Args[1]
38057 ptr := v.Args[0]
38058 v.reset(OpARM64STP)
38059 v.AuxInt = 32
38060 v.AddArg(ptr)
38061 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
38062 v0.AuxInt = 0
38063 v.AddArg(v0)
38064 v1 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
38065 v1.AuxInt = 0
38066 v.AddArg(v1)
38067 v2 := b.NewValue0(v.Pos, OpARM64STP, types.TypeMem)
38068 v2.AuxInt = 16
38069 v2.AddArg(ptr)
38070 v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
38071 v3.AuxInt = 0
38072 v2.AddArg(v3)
38073 v4 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
38074 v4.AuxInt = 0
38075 v2.AddArg(v4)
38076 v5 := b.NewValue0(v.Pos, OpARM64STP, types.TypeMem)
38077 v5.AuxInt = 0
38078 v5.AddArg(ptr)
38079 v6 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
38080 v6.AuxInt = 0
38081 v5.AddArg(v6)
38082 v7 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
38083 v7.AuxInt = 0
38084 v5.AddArg(v7)
38085 v5.AddArg(mem)
38086 v2.AddArg(v5)
38087 v.AddArg(v2)
38088 return true
38089 }
38090
38091
38092
38093 for {
38094 if v.AuxInt != 64 {
38095 break
38096 }
38097 mem := v.Args[1]
38098 ptr := v.Args[0]
38099 v.reset(OpARM64STP)
38100 v.AuxInt = 48
38101 v.AddArg(ptr)
38102 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
38103 v0.AuxInt = 0
38104 v.AddArg(v0)
38105 v1 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
38106 v1.AuxInt = 0
38107 v.AddArg(v1)
38108 v2 := b.NewValue0(v.Pos, OpARM64STP, types.TypeMem)
38109 v2.AuxInt = 32
38110 v2.AddArg(ptr)
38111 v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
38112 v3.AuxInt = 0
38113 v2.AddArg(v3)
38114 v4 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
38115 v4.AuxInt = 0
38116 v2.AddArg(v4)
38117 v5 := b.NewValue0(v.Pos, OpARM64STP, types.TypeMem)
38118 v5.AuxInt = 16
38119 v5.AddArg(ptr)
38120 v6 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
38121 v6.AuxInt = 0
38122 v5.AddArg(v6)
38123 v7 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
38124 v7.AuxInt = 0
38125 v5.AddArg(v7)
38126 v8 := b.NewValue0(v.Pos, OpARM64STP, types.TypeMem)
38127 v8.AuxInt = 0
38128 v8.AddArg(ptr)
38129 v9 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
38130 v9.AuxInt = 0
38131 v8.AddArg(v9)
38132 v10 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
38133 v10.AuxInt = 0
38134 v8.AddArg(v10)
38135 v8.AddArg(mem)
38136 v5.AddArg(v8)
38137 v2.AddArg(v5)
38138 v.AddArg(v2)
38139 return true
38140 }
38141 return false
38142 }
38143 func rewriteValueARM64_OpZero_20(v *Value) bool {
38144 b := v.Block
38145 config := b.Func.Config
38146
38147
38148
38149 for {
38150 s := v.AuxInt
38151 mem := v.Args[1]
38152 ptr := v.Args[0]
38153 if !(s%16 != 0 && s%16 <= 8 && s > 16) {
38154 break
38155 }
38156 v.reset(OpZero)
38157 v.AuxInt = 8
38158 v0 := b.NewValue0(v.Pos, OpOffPtr, ptr.Type)
38159 v0.AuxInt = s - 8
38160 v0.AddArg(ptr)
38161 v.AddArg(v0)
38162 v1 := b.NewValue0(v.Pos, OpZero, types.TypeMem)
38163 v1.AuxInt = s - s%16
38164 v1.AddArg(ptr)
38165 v1.AddArg(mem)
38166 v.AddArg(v1)
38167 return true
38168 }
38169
38170
38171
38172 for {
38173 s := v.AuxInt
38174 mem := v.Args[1]
38175 ptr := v.Args[0]
38176 if !(s%16 != 0 && s%16 > 8 && s > 16) {
38177 break
38178 }
38179 v.reset(OpZero)
38180 v.AuxInt = 16
38181 v0 := b.NewValue0(v.Pos, OpOffPtr, ptr.Type)
38182 v0.AuxInt = s - 16
38183 v0.AddArg(ptr)
38184 v.AddArg(v0)
38185 v1 := b.NewValue0(v.Pos, OpZero, types.TypeMem)
38186 v1.AuxInt = s - s%16
38187 v1.AddArg(ptr)
38188 v1.AddArg(mem)
38189 v.AddArg(v1)
38190 return true
38191 }
38192
38193
38194
38195 for {
38196 s := v.AuxInt
38197 mem := v.Args[1]
38198 ptr := v.Args[0]
38199 if !(s%16 == 0 && s > 64 && s <= 16*64 && !config.noDuffDevice) {
38200 break
38201 }
38202 v.reset(OpARM64DUFFZERO)
38203 v.AuxInt = 4 * (64 - s/16)
38204 v.AddArg(ptr)
38205 v.AddArg(mem)
38206 return true
38207 }
38208
38209
38210
38211 for {
38212 s := v.AuxInt
38213 mem := v.Args[1]
38214 ptr := v.Args[0]
38215 if !(s%16 == 0 && (s > 16*64 || config.noDuffDevice)) {
38216 break
38217 }
38218 v.reset(OpARM64LoweredZero)
38219 v.AddArg(ptr)
38220 v0 := b.NewValue0(v.Pos, OpARM64ADDconst, ptr.Type)
38221 v0.AuxInt = s - 16
38222 v0.AddArg(ptr)
38223 v.AddArg(v0)
38224 v.AddArg(mem)
38225 return true
38226 }
38227 return false
38228 }
38229 func rewriteValueARM64_OpZeroExt16to32_0(v *Value) bool {
38230
38231
38232
38233 for {
38234 x := v.Args[0]
38235 v.reset(OpARM64MOVHUreg)
38236 v.AddArg(x)
38237 return true
38238 }
38239 }
38240 func rewriteValueARM64_OpZeroExt16to64_0(v *Value) bool {
38241
38242
38243
38244 for {
38245 x := v.Args[0]
38246 v.reset(OpARM64MOVHUreg)
38247 v.AddArg(x)
38248 return true
38249 }
38250 }
38251 func rewriteValueARM64_OpZeroExt32to64_0(v *Value) bool {
38252
38253
38254
38255 for {
38256 x := v.Args[0]
38257 v.reset(OpARM64MOVWUreg)
38258 v.AddArg(x)
38259 return true
38260 }
38261 }
38262 func rewriteValueARM64_OpZeroExt8to16_0(v *Value) bool {
38263
38264
38265
38266 for {
38267 x := v.Args[0]
38268 v.reset(OpARM64MOVBUreg)
38269 v.AddArg(x)
38270 return true
38271 }
38272 }
38273 func rewriteValueARM64_OpZeroExt8to32_0(v *Value) bool {
38274
38275
38276
38277 for {
38278 x := v.Args[0]
38279 v.reset(OpARM64MOVBUreg)
38280 v.AddArg(x)
38281 return true
38282 }
38283 }
38284 func rewriteValueARM64_OpZeroExt8to64_0(v *Value) bool {
38285
38286
38287
38288 for {
38289 x := v.Args[0]
38290 v.reset(OpARM64MOVBUreg)
38291 v.AddArg(x)
38292 return true
38293 }
38294 }
38295 func rewriteBlockARM64(b *Block) bool {
38296 config := b.Func.Config
38297 typ := &config.Types
38298 _ = typ
38299 v := b.Control
38300 _ = v
38301 switch b.Kind {
38302 case BlockARM64EQ:
38303
38304
38305
38306 for v.Op == OpARM64CMPWconst {
38307 if v.AuxInt != 0 {
38308 break
38309 }
38310 x := v.Args[0]
38311 if x.Op != OpARM64ANDconst {
38312 break
38313 }
38314 c := x.AuxInt
38315 y := x.Args[0]
38316 if !(x.Uses == 1) {
38317 break
38318 }
38319 b.Kind = BlockARM64EQ
38320 v0 := b.NewValue0(v.Pos, OpARM64TSTWconst, types.TypeFlags)
38321 v0.AuxInt = c
38322 v0.AddArg(y)
38323 b.SetControl(v0)
38324 b.Aux = nil
38325 return true
38326 }
38327
38328
38329
38330 for v.Op == OpARM64CMPconst {
38331 if v.AuxInt != 0 {
38332 break
38333 }
38334 z := v.Args[0]
38335 if z.Op != OpARM64AND {
38336 break
38337 }
38338 y := z.Args[1]
38339 x := z.Args[0]
38340 if !(z.Uses == 1) {
38341 break
38342 }
38343 b.Kind = BlockARM64EQ
38344 v0 := b.NewValue0(v.Pos, OpARM64TST, types.TypeFlags)
38345 v0.AddArg(x)
38346 v0.AddArg(y)
38347 b.SetControl(v0)
38348 b.Aux = nil
38349 return true
38350 }
38351
38352
38353
38354 for v.Op == OpARM64CMPWconst {
38355 if v.AuxInt != 0 {
38356 break
38357 }
38358 z := v.Args[0]
38359 if z.Op != OpARM64AND {
38360 break
38361 }
38362 y := z.Args[1]
38363 x := z.Args[0]
38364 if !(z.Uses == 1) {
38365 break
38366 }
38367 b.Kind = BlockARM64EQ
38368 v0 := b.NewValue0(v.Pos, OpARM64TSTW, types.TypeFlags)
38369 v0.AddArg(x)
38370 v0.AddArg(y)
38371 b.SetControl(v0)
38372 b.Aux = nil
38373 return true
38374 }
38375
38376
38377
38378 for v.Op == OpARM64CMPconst {
38379 if v.AuxInt != 0 {
38380 break
38381 }
38382 x := v.Args[0]
38383 if x.Op != OpARM64ANDconst {
38384 break
38385 }
38386 c := x.AuxInt
38387 y := x.Args[0]
38388 if !(x.Uses == 1) {
38389 break
38390 }
38391 b.Kind = BlockARM64EQ
38392 v0 := b.NewValue0(v.Pos, OpARM64TSTconst, types.TypeFlags)
38393 v0.AuxInt = c
38394 v0.AddArg(y)
38395 b.SetControl(v0)
38396 b.Aux = nil
38397 return true
38398 }
38399
38400
38401
38402 for v.Op == OpARM64CMPconst {
38403 if v.AuxInt != 0 {
38404 break
38405 }
38406 x := v.Args[0]
38407 if x.Op != OpARM64ADDconst {
38408 break
38409 }
38410 c := x.AuxInt
38411 y := x.Args[0]
38412 if !(x.Uses == 1) {
38413 break
38414 }
38415 b.Kind = BlockARM64EQ
38416 v0 := b.NewValue0(v.Pos, OpARM64CMNconst, types.TypeFlags)
38417 v0.AuxInt = c
38418 v0.AddArg(y)
38419 b.SetControl(v0)
38420 b.Aux = nil
38421 return true
38422 }
38423
38424
38425
38426 for v.Op == OpARM64CMPWconst {
38427 if v.AuxInt != 0 {
38428 break
38429 }
38430 x := v.Args[0]
38431 if x.Op != OpARM64ADDconst {
38432 break
38433 }
38434 c := x.AuxInt
38435 y := x.Args[0]
38436 if !(x.Uses == 1) {
38437 break
38438 }
38439 b.Kind = BlockARM64EQ
38440 v0 := b.NewValue0(v.Pos, OpARM64CMNWconst, types.TypeFlags)
38441 v0.AuxInt = c
38442 v0.AddArg(y)
38443 b.SetControl(v0)
38444 b.Aux = nil
38445 return true
38446 }
38447
38448
38449
38450 for v.Op == OpARM64CMPconst {
38451 if v.AuxInt != 0 {
38452 break
38453 }
38454 z := v.Args[0]
38455 if z.Op != OpARM64ADD {
38456 break
38457 }
38458 y := z.Args[1]
38459 x := z.Args[0]
38460 if !(z.Uses == 1) {
38461 break
38462 }
38463 b.Kind = BlockARM64EQ
38464 v0 := b.NewValue0(v.Pos, OpARM64CMN, types.TypeFlags)
38465 v0.AddArg(x)
38466 v0.AddArg(y)
38467 b.SetControl(v0)
38468 b.Aux = nil
38469 return true
38470 }
38471
38472
38473
38474 for v.Op == OpARM64CMPWconst {
38475 if v.AuxInt != 0 {
38476 break
38477 }
38478 z := v.Args[0]
38479 if z.Op != OpARM64ADD {
38480 break
38481 }
38482 y := z.Args[1]
38483 x := z.Args[0]
38484 if !(z.Uses == 1) {
38485 break
38486 }
38487 b.Kind = BlockARM64EQ
38488 v0 := b.NewValue0(v.Pos, OpARM64CMNW, types.TypeFlags)
38489 v0.AddArg(x)
38490 v0.AddArg(y)
38491 b.SetControl(v0)
38492 b.Aux = nil
38493 return true
38494 }
38495
38496
38497
38498 for v.Op == OpARM64CMP {
38499 _ = v.Args[1]
38500 x := v.Args[0]
38501 z := v.Args[1]
38502 if z.Op != OpARM64NEG {
38503 break
38504 }
38505 y := z.Args[0]
38506 if !(z.Uses == 1) {
38507 break
38508 }
38509 b.Kind = BlockARM64EQ
38510 v0 := b.NewValue0(v.Pos, OpARM64CMN, types.TypeFlags)
38511 v0.AddArg(x)
38512 v0.AddArg(y)
38513 b.SetControl(v0)
38514 b.Aux = nil
38515 return true
38516 }
38517
38518
38519
38520 for v.Op == OpARM64CMPW {
38521 _ = v.Args[1]
38522 x := v.Args[0]
38523 z := v.Args[1]
38524 if z.Op != OpARM64NEG {
38525 break
38526 }
38527 y := z.Args[0]
38528 if !(z.Uses == 1) {
38529 break
38530 }
38531 b.Kind = BlockARM64EQ
38532 v0 := b.NewValue0(v.Pos, OpARM64CMNW, types.TypeFlags)
38533 v0.AddArg(x)
38534 v0.AddArg(y)
38535 b.SetControl(v0)
38536 b.Aux = nil
38537 return true
38538 }
38539
38540
38541
38542 for v.Op == OpARM64CMPconst {
38543 if v.AuxInt != 0 {
38544 break
38545 }
38546 x := v.Args[0]
38547 b.Kind = BlockARM64Z
38548 b.SetControl(x)
38549 b.Aux = nil
38550 return true
38551 }
38552
38553
38554
38555 for v.Op == OpARM64CMPWconst {
38556 if v.AuxInt != 0 {
38557 break
38558 }
38559 x := v.Args[0]
38560 b.Kind = BlockARM64ZW
38561 b.SetControl(x)
38562 b.Aux = nil
38563 return true
38564 }
38565
38566
38567
38568 for v.Op == OpARM64CMPconst {
38569 if v.AuxInt != 0 {
38570 break
38571 }
38572 z := v.Args[0]
38573 if z.Op != OpARM64MADD {
38574 break
38575 }
38576 y := z.Args[2]
38577 a := z.Args[0]
38578 x := z.Args[1]
38579 if !(z.Uses == 1) {
38580 break
38581 }
38582 b.Kind = BlockARM64EQ
38583 v0 := b.NewValue0(v.Pos, OpARM64CMN, types.TypeFlags)
38584 v0.AddArg(a)
38585 v1 := b.NewValue0(v.Pos, OpARM64MUL, x.Type)
38586 v1.AddArg(x)
38587 v1.AddArg(y)
38588 v0.AddArg(v1)
38589 b.SetControl(v0)
38590 b.Aux = nil
38591 return true
38592 }
38593
38594
38595
38596 for v.Op == OpARM64CMPconst {
38597 if v.AuxInt != 0 {
38598 break
38599 }
38600 z := v.Args[0]
38601 if z.Op != OpARM64MSUB {
38602 break
38603 }
38604 y := z.Args[2]
38605 a := z.Args[0]
38606 x := z.Args[1]
38607 if !(z.Uses == 1) {
38608 break
38609 }
38610 b.Kind = BlockARM64EQ
38611 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags)
38612 v0.AddArg(a)
38613 v1 := b.NewValue0(v.Pos, OpARM64MUL, x.Type)
38614 v1.AddArg(x)
38615 v1.AddArg(y)
38616 v0.AddArg(v1)
38617 b.SetControl(v0)
38618 b.Aux = nil
38619 return true
38620 }
38621
38622
38623
38624 for v.Op == OpARM64CMPWconst {
38625 if v.AuxInt != 0 {
38626 break
38627 }
38628 z := v.Args[0]
38629 if z.Op != OpARM64MADDW {
38630 break
38631 }
38632 y := z.Args[2]
38633 a := z.Args[0]
38634 x := z.Args[1]
38635 if !(z.Uses == 1) {
38636 break
38637 }
38638 b.Kind = BlockARM64EQ
38639 v0 := b.NewValue0(v.Pos, OpARM64CMNW, types.TypeFlags)
38640 v0.AddArg(a)
38641 v1 := b.NewValue0(v.Pos, OpARM64MULW, x.Type)
38642 v1.AddArg(x)
38643 v1.AddArg(y)
38644 v0.AddArg(v1)
38645 b.SetControl(v0)
38646 b.Aux = nil
38647 return true
38648 }
38649
38650
38651
38652 for v.Op == OpARM64CMPWconst {
38653 if v.AuxInt != 0 {
38654 break
38655 }
38656 z := v.Args[0]
38657 if z.Op != OpARM64MSUBW {
38658 break
38659 }
38660 y := z.Args[2]
38661 a := z.Args[0]
38662 x := z.Args[1]
38663 if !(z.Uses == 1) {
38664 break
38665 }
38666 b.Kind = BlockARM64EQ
38667 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
38668 v0.AddArg(a)
38669 v1 := b.NewValue0(v.Pos, OpARM64MULW, x.Type)
38670 v1.AddArg(x)
38671 v1.AddArg(y)
38672 v0.AddArg(v1)
38673 b.SetControl(v0)
38674 b.Aux = nil
38675 return true
38676 }
38677
38678
38679
38680 for v.Op == OpARM64TSTconst {
38681 c := v.AuxInt
38682 x := v.Args[0]
38683 if !(oneBit(c)) {
38684 break
38685 }
38686 b.Kind = BlockARM64TBZ
38687 b.SetControl(x)
38688 b.Aux = ntz(c)
38689 return true
38690 }
38691
38692
38693
38694 for v.Op == OpARM64TSTWconst {
38695 c := v.AuxInt
38696 x := v.Args[0]
38697 if !(oneBit(int64(uint32(c)))) {
38698 break
38699 }
38700 b.Kind = BlockARM64TBZ
38701 b.SetControl(x)
38702 b.Aux = ntz(int64(uint32(c)))
38703 return true
38704 }
38705
38706
38707
38708 for v.Op == OpARM64FlagEQ {
38709 b.Kind = BlockFirst
38710 b.SetControl(nil)
38711 b.Aux = nil
38712 return true
38713 }
38714
38715
38716
38717 for v.Op == OpARM64FlagLT_ULT {
38718 b.Kind = BlockFirst
38719 b.SetControl(nil)
38720 b.Aux = nil
38721 b.swapSuccessors()
38722 return true
38723 }
38724
38725
38726
38727 for v.Op == OpARM64FlagLT_UGT {
38728 b.Kind = BlockFirst
38729 b.SetControl(nil)
38730 b.Aux = nil
38731 b.swapSuccessors()
38732 return true
38733 }
38734
38735
38736
38737 for v.Op == OpARM64FlagGT_ULT {
38738 b.Kind = BlockFirst
38739 b.SetControl(nil)
38740 b.Aux = nil
38741 b.swapSuccessors()
38742 return true
38743 }
38744
38745
38746
38747 for v.Op == OpARM64FlagGT_UGT {
38748 b.Kind = BlockFirst
38749 b.SetControl(nil)
38750 b.Aux = nil
38751 b.swapSuccessors()
38752 return true
38753 }
38754
38755
38756
38757 for v.Op == OpARM64InvertFlags {
38758 cmp := v.Args[0]
38759 b.Kind = BlockARM64EQ
38760 b.SetControl(cmp)
38761 b.Aux = nil
38762 return true
38763 }
38764 case BlockARM64FGE:
38765
38766
38767
38768 for v.Op == OpARM64InvertFlags {
38769 cmp := v.Args[0]
38770 b.Kind = BlockARM64FLE
38771 b.SetControl(cmp)
38772 b.Aux = nil
38773 return true
38774 }
38775 case BlockARM64FGT:
38776
38777
38778
38779 for v.Op == OpARM64InvertFlags {
38780 cmp := v.Args[0]
38781 b.Kind = BlockARM64FLT
38782 b.SetControl(cmp)
38783 b.Aux = nil
38784 return true
38785 }
38786 case BlockARM64FLE:
38787
38788
38789
38790 for v.Op == OpARM64InvertFlags {
38791 cmp := v.Args[0]
38792 b.Kind = BlockARM64FGE
38793 b.SetControl(cmp)
38794 b.Aux = nil
38795 return true
38796 }
38797 case BlockARM64FLT:
38798
38799
38800
38801 for v.Op == OpARM64InvertFlags {
38802 cmp := v.Args[0]
38803 b.Kind = BlockARM64FGT
38804 b.SetControl(cmp)
38805 b.Aux = nil
38806 return true
38807 }
38808 case BlockARM64GE:
38809
38810
38811
38812 for v.Op == OpARM64CMPWconst {
38813 if v.AuxInt != 0 {
38814 break
38815 }
38816 x := v.Args[0]
38817 if x.Op != OpARM64ANDconst {
38818 break
38819 }
38820 c := x.AuxInt
38821 y := x.Args[0]
38822 if !(x.Uses == 1) {
38823 break
38824 }
38825 b.Kind = BlockARM64GE
38826 v0 := b.NewValue0(v.Pos, OpARM64TSTWconst, types.TypeFlags)
38827 v0.AuxInt = c
38828 v0.AddArg(y)
38829 b.SetControl(v0)
38830 b.Aux = nil
38831 return true
38832 }
38833
38834
38835
38836 for v.Op == OpARM64CMPconst {
38837 if v.AuxInt != 0 {
38838 break
38839 }
38840 z := v.Args[0]
38841 if z.Op != OpARM64AND {
38842 break
38843 }
38844 y := z.Args[1]
38845 x := z.Args[0]
38846 if !(z.Uses == 1) {
38847 break
38848 }
38849 b.Kind = BlockARM64GE
38850 v0 := b.NewValue0(v.Pos, OpARM64TST, types.TypeFlags)
38851 v0.AddArg(x)
38852 v0.AddArg(y)
38853 b.SetControl(v0)
38854 b.Aux = nil
38855 return true
38856 }
38857
38858
38859
38860 for v.Op == OpARM64CMPWconst {
38861 if v.AuxInt != 0 {
38862 break
38863 }
38864 z := v.Args[0]
38865 if z.Op != OpARM64AND {
38866 break
38867 }
38868 y := z.Args[1]
38869 x := z.Args[0]
38870 if !(z.Uses == 1) {
38871 break
38872 }
38873 b.Kind = BlockARM64GE
38874 v0 := b.NewValue0(v.Pos, OpARM64TSTW, types.TypeFlags)
38875 v0.AddArg(x)
38876 v0.AddArg(y)
38877 b.SetControl(v0)
38878 b.Aux = nil
38879 return true
38880 }
38881
38882
38883
38884 for v.Op == OpARM64CMPconst {
38885 if v.AuxInt != 0 {
38886 break
38887 }
38888 x := v.Args[0]
38889 if x.Op != OpARM64ANDconst {
38890 break
38891 }
38892 c := x.AuxInt
38893 y := x.Args[0]
38894 if !(x.Uses == 1) {
38895 break
38896 }
38897 b.Kind = BlockARM64GE
38898 v0 := b.NewValue0(v.Pos, OpARM64TSTconst, types.TypeFlags)
38899 v0.AuxInt = c
38900 v0.AddArg(y)
38901 b.SetControl(v0)
38902 b.Aux = nil
38903 return true
38904 }
38905
38906
38907
38908 for v.Op == OpARM64CMPconst {
38909 if v.AuxInt != 0 {
38910 break
38911 }
38912 x := v.Args[0]
38913 if x.Op != OpARM64ADDconst {
38914 break
38915 }
38916 c := x.AuxInt
38917 y := x.Args[0]
38918 if !(x.Uses == 1) {
38919 break
38920 }
38921 b.Kind = BlockARM64GE
38922 v0 := b.NewValue0(v.Pos, OpARM64CMNconst, types.TypeFlags)
38923 v0.AuxInt = c
38924 v0.AddArg(y)
38925 b.SetControl(v0)
38926 b.Aux = nil
38927 return true
38928 }
38929
38930
38931
38932 for v.Op == OpARM64CMPWconst {
38933 if v.AuxInt != 0 {
38934 break
38935 }
38936 x := v.Args[0]
38937 if x.Op != OpARM64ADDconst {
38938 break
38939 }
38940 c := x.AuxInt
38941 y := x.Args[0]
38942 if !(x.Uses == 1) {
38943 break
38944 }
38945 b.Kind = BlockARM64GE
38946 v0 := b.NewValue0(v.Pos, OpARM64CMNWconst, types.TypeFlags)
38947 v0.AuxInt = c
38948 v0.AddArg(y)
38949 b.SetControl(v0)
38950 b.Aux = nil
38951 return true
38952 }
38953
38954
38955
38956 for v.Op == OpARM64CMPconst {
38957 if v.AuxInt != 0 {
38958 break
38959 }
38960 z := v.Args[0]
38961 if z.Op != OpARM64ADD {
38962 break
38963 }
38964 y := z.Args[1]
38965 x := z.Args[0]
38966 if !(z.Uses == 1) {
38967 break
38968 }
38969 b.Kind = BlockARM64GE
38970 v0 := b.NewValue0(v.Pos, OpARM64CMN, types.TypeFlags)
38971 v0.AddArg(x)
38972 v0.AddArg(y)
38973 b.SetControl(v0)
38974 b.Aux = nil
38975 return true
38976 }
38977
38978
38979
38980 for v.Op == OpARM64CMPWconst {
38981 if v.AuxInt != 0 {
38982 break
38983 }
38984 z := v.Args[0]
38985 if z.Op != OpARM64ADD {
38986 break
38987 }
38988 y := z.Args[1]
38989 x := z.Args[0]
38990 if !(z.Uses == 1) {
38991 break
38992 }
38993 b.Kind = BlockARM64GE
38994 v0 := b.NewValue0(v.Pos, OpARM64CMNW, types.TypeFlags)
38995 v0.AddArg(x)
38996 v0.AddArg(y)
38997 b.SetControl(v0)
38998 b.Aux = nil
38999 return true
39000 }
39001
39002
39003
39004 for v.Op == OpARM64CMP {
39005 _ = v.Args[1]
39006 x := v.Args[0]
39007 z := v.Args[1]
39008 if z.Op != OpARM64NEG {
39009 break
39010 }
39011 y := z.Args[0]
39012 if !(z.Uses == 1) {
39013 break
39014 }
39015 b.Kind = BlockARM64GE
39016 v0 := b.NewValue0(v.Pos, OpARM64CMN, types.TypeFlags)
39017 v0.AddArg(x)
39018 v0.AddArg(y)
39019 b.SetControl(v0)
39020 b.Aux = nil
39021 return true
39022 }
39023
39024
39025
39026 for v.Op == OpARM64CMPW {
39027 _ = v.Args[1]
39028 x := v.Args[0]
39029 z := v.Args[1]
39030 if z.Op != OpARM64NEG {
39031 break
39032 }
39033 y := z.Args[0]
39034 if !(z.Uses == 1) {
39035 break
39036 }
39037 b.Kind = BlockARM64GE
39038 v0 := b.NewValue0(v.Pos, OpARM64CMNW, types.TypeFlags)
39039 v0.AddArg(x)
39040 v0.AddArg(y)
39041 b.SetControl(v0)
39042 b.Aux = nil
39043 return true
39044 }
39045
39046
39047
39048 for v.Op == OpARM64CMPconst {
39049 if v.AuxInt != 0 {
39050 break
39051 }
39052 z := v.Args[0]
39053 if z.Op != OpARM64MADD {
39054 break
39055 }
39056 y := z.Args[2]
39057 a := z.Args[0]
39058 x := z.Args[1]
39059 if !(z.Uses == 1) {
39060 break
39061 }
39062 b.Kind = BlockARM64GE
39063 v0 := b.NewValue0(v.Pos, OpARM64CMN, types.TypeFlags)
39064 v0.AddArg(a)
39065 v1 := b.NewValue0(v.Pos, OpARM64MUL, x.Type)
39066 v1.AddArg(x)
39067 v1.AddArg(y)
39068 v0.AddArg(v1)
39069 b.SetControl(v0)
39070 b.Aux = nil
39071 return true
39072 }
39073
39074
39075
39076 for v.Op == OpARM64CMPconst {
39077 if v.AuxInt != 0 {
39078 break
39079 }
39080 z := v.Args[0]
39081 if z.Op != OpARM64MSUB {
39082 break
39083 }
39084 y := z.Args[2]
39085 a := z.Args[0]
39086 x := z.Args[1]
39087 if !(z.Uses == 1) {
39088 break
39089 }
39090 b.Kind = BlockARM64GE
39091 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags)
39092 v0.AddArg(a)
39093 v1 := b.NewValue0(v.Pos, OpARM64MUL, x.Type)
39094 v1.AddArg(x)
39095 v1.AddArg(y)
39096 v0.AddArg(v1)
39097 b.SetControl(v0)
39098 b.Aux = nil
39099 return true
39100 }
39101
39102
39103
39104 for v.Op == OpARM64CMPWconst {
39105 if v.AuxInt != 0 {
39106 break
39107 }
39108 z := v.Args[0]
39109 if z.Op != OpARM64MADDW {
39110 break
39111 }
39112 y := z.Args[2]
39113 a := z.Args[0]
39114 x := z.Args[1]
39115 if !(z.Uses == 1) {
39116 break
39117 }
39118 b.Kind = BlockARM64GE
39119 v0 := b.NewValue0(v.Pos, OpARM64CMNW, types.TypeFlags)
39120 v0.AddArg(a)
39121 v1 := b.NewValue0(v.Pos, OpARM64MULW, x.Type)
39122 v1.AddArg(x)
39123 v1.AddArg(y)
39124 v0.AddArg(v1)
39125 b.SetControl(v0)
39126 b.Aux = nil
39127 return true
39128 }
39129
39130
39131
39132 for v.Op == OpARM64CMPWconst {
39133 if v.AuxInt != 0 {
39134 break
39135 }
39136 z := v.Args[0]
39137 if z.Op != OpARM64MSUBW {
39138 break
39139 }
39140 y := z.Args[2]
39141 a := z.Args[0]
39142 x := z.Args[1]
39143 if !(z.Uses == 1) {
39144 break
39145 }
39146 b.Kind = BlockARM64GE
39147 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
39148 v0.AddArg(a)
39149 v1 := b.NewValue0(v.Pos, OpARM64MULW, x.Type)
39150 v1.AddArg(x)
39151 v1.AddArg(y)
39152 v0.AddArg(v1)
39153 b.SetControl(v0)
39154 b.Aux = nil
39155 return true
39156 }
39157
39158
39159
39160 for v.Op == OpARM64CMPWconst {
39161 if v.AuxInt != 0 {
39162 break
39163 }
39164 x := v.Args[0]
39165 b.Kind = BlockARM64TBZ
39166 b.SetControl(x)
39167 b.Aux = int64(31)
39168 return true
39169 }
39170
39171
39172
39173 for v.Op == OpARM64CMPconst {
39174 if v.AuxInt != 0 {
39175 break
39176 }
39177 x := v.Args[0]
39178 b.Kind = BlockARM64TBZ
39179 b.SetControl(x)
39180 b.Aux = int64(63)
39181 return true
39182 }
39183
39184
39185
39186 for v.Op == OpARM64FlagEQ {
39187 b.Kind = BlockFirst
39188 b.SetControl(nil)
39189 b.Aux = nil
39190 return true
39191 }
39192
39193
39194
39195 for v.Op == OpARM64FlagLT_ULT {
39196 b.Kind = BlockFirst
39197 b.SetControl(nil)
39198 b.Aux = nil
39199 b.swapSuccessors()
39200 return true
39201 }
39202
39203
39204
39205 for v.Op == OpARM64FlagLT_UGT {
39206 b.Kind = BlockFirst
39207 b.SetControl(nil)
39208 b.Aux = nil
39209 b.swapSuccessors()
39210 return true
39211 }
39212
39213
39214
39215 for v.Op == OpARM64FlagGT_ULT {
39216 b.Kind = BlockFirst
39217 b.SetControl(nil)
39218 b.Aux = nil
39219 return true
39220 }
39221
39222
39223
39224 for v.Op == OpARM64FlagGT_UGT {
39225 b.Kind = BlockFirst
39226 b.SetControl(nil)
39227 b.Aux = nil
39228 return true
39229 }
39230
39231
39232
39233 for v.Op == OpARM64InvertFlags {
39234 cmp := v.Args[0]
39235 b.Kind = BlockARM64LE
39236 b.SetControl(cmp)
39237 b.Aux = nil
39238 return true
39239 }
39240 case BlockARM64GT:
39241
39242
39243
39244 for v.Op == OpARM64CMPWconst {
39245 if v.AuxInt != 0 {
39246 break
39247 }
39248 x := v.Args[0]
39249 if x.Op != OpARM64ANDconst {
39250 break
39251 }
39252 c := x.AuxInt
39253 y := x.Args[0]
39254 if !(x.Uses == 1) {
39255 break
39256 }
39257 b.Kind = BlockARM64GT
39258 v0 := b.NewValue0(v.Pos, OpARM64TSTWconst, types.TypeFlags)
39259 v0.AuxInt = c
39260 v0.AddArg(y)
39261 b.SetControl(v0)
39262 b.Aux = nil
39263 return true
39264 }
39265
39266
39267
39268 for v.Op == OpARM64CMPconst {
39269 if v.AuxInt != 0 {
39270 break
39271 }
39272 z := v.Args[0]
39273 if z.Op != OpARM64AND {
39274 break
39275 }
39276 y := z.Args[1]
39277 x := z.Args[0]
39278 if !(z.Uses == 1) {
39279 break
39280 }
39281 b.Kind = BlockARM64GT
39282 v0 := b.NewValue0(v.Pos, OpARM64TST, types.TypeFlags)
39283 v0.AddArg(x)
39284 v0.AddArg(y)
39285 b.SetControl(v0)
39286 b.Aux = nil
39287 return true
39288 }
39289
39290
39291
39292 for v.Op == OpARM64CMPWconst {
39293 if v.AuxInt != 0 {
39294 break
39295 }
39296 z := v.Args[0]
39297 if z.Op != OpARM64AND {
39298 break
39299 }
39300 y := z.Args[1]
39301 x := z.Args[0]
39302 if !(z.Uses == 1) {
39303 break
39304 }
39305 b.Kind = BlockARM64GT
39306 v0 := b.NewValue0(v.Pos, OpARM64TSTW, types.TypeFlags)
39307 v0.AddArg(x)
39308 v0.AddArg(y)
39309 b.SetControl(v0)
39310 b.Aux = nil
39311 return true
39312 }
39313
39314
39315
39316 for v.Op == OpARM64CMPconst {
39317 if v.AuxInt != 0 {
39318 break
39319 }
39320 x := v.Args[0]
39321 if x.Op != OpARM64ANDconst {
39322 break
39323 }
39324 c := x.AuxInt
39325 y := x.Args[0]
39326 if !(x.Uses == 1) {
39327 break
39328 }
39329 b.Kind = BlockARM64GT
39330 v0 := b.NewValue0(v.Pos, OpARM64TSTconst, types.TypeFlags)
39331 v0.AuxInt = c
39332 v0.AddArg(y)
39333 b.SetControl(v0)
39334 b.Aux = nil
39335 return true
39336 }
39337
39338
39339
39340 for v.Op == OpARM64CMPconst {
39341 if v.AuxInt != 0 {
39342 break
39343 }
39344 x := v.Args[0]
39345 if x.Op != OpARM64ADDconst {
39346 break
39347 }
39348 c := x.AuxInt
39349 y := x.Args[0]
39350 if !(x.Uses == 1) {
39351 break
39352 }
39353 b.Kind = BlockARM64GT
39354 v0 := b.NewValue0(v.Pos, OpARM64CMNconst, types.TypeFlags)
39355 v0.AuxInt = c
39356 v0.AddArg(y)
39357 b.SetControl(v0)
39358 b.Aux = nil
39359 return true
39360 }
39361
39362
39363
39364 for v.Op == OpARM64CMPWconst {
39365 if v.AuxInt != 0 {
39366 break
39367 }
39368 x := v.Args[0]
39369 if x.Op != OpARM64ADDconst {
39370 break
39371 }
39372 c := x.AuxInt
39373 y := x.Args[0]
39374 if !(x.Uses == 1) {
39375 break
39376 }
39377 b.Kind = BlockARM64GT
39378 v0 := b.NewValue0(v.Pos, OpARM64CMNWconst, types.TypeFlags)
39379 v0.AuxInt = c
39380 v0.AddArg(y)
39381 b.SetControl(v0)
39382 b.Aux = nil
39383 return true
39384 }
39385
39386
39387
39388 for v.Op == OpARM64CMPconst {
39389 if v.AuxInt != 0 {
39390 break
39391 }
39392 z := v.Args[0]
39393 if z.Op != OpARM64ADD {
39394 break
39395 }
39396 y := z.Args[1]
39397 x := z.Args[0]
39398 if !(z.Uses == 1) {
39399 break
39400 }
39401 b.Kind = BlockARM64GT
39402 v0 := b.NewValue0(v.Pos, OpARM64CMN, types.TypeFlags)
39403 v0.AddArg(x)
39404 v0.AddArg(y)
39405 b.SetControl(v0)
39406 b.Aux = nil
39407 return true
39408 }
39409
39410
39411
39412 for v.Op == OpARM64CMPWconst {
39413 if v.AuxInt != 0 {
39414 break
39415 }
39416 z := v.Args[0]
39417 if z.Op != OpARM64ADD {
39418 break
39419 }
39420 y := z.Args[1]
39421 x := z.Args[0]
39422 if !(z.Uses == 1) {
39423 break
39424 }
39425 b.Kind = BlockARM64GT
39426 v0 := b.NewValue0(v.Pos, OpARM64CMNW, types.TypeFlags)
39427 v0.AddArg(x)
39428 v0.AddArg(y)
39429 b.SetControl(v0)
39430 b.Aux = nil
39431 return true
39432 }
39433
39434
39435
39436 for v.Op == OpARM64CMP {
39437 _ = v.Args[1]
39438 x := v.Args[0]
39439 z := v.Args[1]
39440 if z.Op != OpARM64NEG {
39441 break
39442 }
39443 y := z.Args[0]
39444 if !(z.Uses == 1) {
39445 break
39446 }
39447 b.Kind = BlockARM64GT
39448 v0 := b.NewValue0(v.Pos, OpARM64CMN, types.TypeFlags)
39449 v0.AddArg(x)
39450 v0.AddArg(y)
39451 b.SetControl(v0)
39452 b.Aux = nil
39453 return true
39454 }
39455
39456
39457
39458 for v.Op == OpARM64CMPW {
39459 _ = v.Args[1]
39460 x := v.Args[0]
39461 z := v.Args[1]
39462 if z.Op != OpARM64NEG {
39463 break
39464 }
39465 y := z.Args[0]
39466 if !(z.Uses == 1) {
39467 break
39468 }
39469 b.Kind = BlockARM64GT
39470 v0 := b.NewValue0(v.Pos, OpARM64CMNW, types.TypeFlags)
39471 v0.AddArg(x)
39472 v0.AddArg(y)
39473 b.SetControl(v0)
39474 b.Aux = nil
39475 return true
39476 }
39477
39478
39479
39480 for v.Op == OpARM64CMPconst {
39481 if v.AuxInt != 0 {
39482 break
39483 }
39484 z := v.Args[0]
39485 if z.Op != OpARM64MADD {
39486 break
39487 }
39488 y := z.Args[2]
39489 a := z.Args[0]
39490 x := z.Args[1]
39491 if !(z.Uses == 1) {
39492 break
39493 }
39494 b.Kind = BlockARM64GT
39495 v0 := b.NewValue0(v.Pos, OpARM64CMN, types.TypeFlags)
39496 v0.AddArg(a)
39497 v1 := b.NewValue0(v.Pos, OpARM64MUL, x.Type)
39498 v1.AddArg(x)
39499 v1.AddArg(y)
39500 v0.AddArg(v1)
39501 b.SetControl(v0)
39502 b.Aux = nil
39503 return true
39504 }
39505
39506
39507
39508 for v.Op == OpARM64CMPconst {
39509 if v.AuxInt != 0 {
39510 break
39511 }
39512 z := v.Args[0]
39513 if z.Op != OpARM64MSUB {
39514 break
39515 }
39516 y := z.Args[2]
39517 a := z.Args[0]
39518 x := z.Args[1]
39519 if !(z.Uses == 1) {
39520 break
39521 }
39522 b.Kind = BlockARM64GT
39523 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags)
39524 v0.AddArg(a)
39525 v1 := b.NewValue0(v.Pos, OpARM64MUL, x.Type)
39526 v1.AddArg(x)
39527 v1.AddArg(y)
39528 v0.AddArg(v1)
39529 b.SetControl(v0)
39530 b.Aux = nil
39531 return true
39532 }
39533
39534
39535
39536 for v.Op == OpARM64CMPWconst {
39537 if v.AuxInt != 0 {
39538 break
39539 }
39540 z := v.Args[0]
39541 if z.Op != OpARM64MADDW {
39542 break
39543 }
39544 y := z.Args[2]
39545 a := z.Args[0]
39546 x := z.Args[1]
39547 if !(z.Uses == 1) {
39548 break
39549 }
39550 b.Kind = BlockARM64GT
39551 v0 := b.NewValue0(v.Pos, OpARM64CMNW, types.TypeFlags)
39552 v0.AddArg(a)
39553 v1 := b.NewValue0(v.Pos, OpARM64MULW, x.Type)
39554 v1.AddArg(x)
39555 v1.AddArg(y)
39556 v0.AddArg(v1)
39557 b.SetControl(v0)
39558 b.Aux = nil
39559 return true
39560 }
39561
39562
39563
39564 for v.Op == OpARM64CMPWconst {
39565 if v.AuxInt != 0 {
39566 break
39567 }
39568 z := v.Args[0]
39569 if z.Op != OpARM64MSUBW {
39570 break
39571 }
39572 y := z.Args[2]
39573 a := z.Args[0]
39574 x := z.Args[1]
39575 if !(z.Uses == 1) {
39576 break
39577 }
39578 b.Kind = BlockARM64GT
39579 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
39580 v0.AddArg(a)
39581 v1 := b.NewValue0(v.Pos, OpARM64MULW, x.Type)
39582 v1.AddArg(x)
39583 v1.AddArg(y)
39584 v0.AddArg(v1)
39585 b.SetControl(v0)
39586 b.Aux = nil
39587 return true
39588 }
39589
39590
39591
39592 for v.Op == OpARM64FlagEQ {
39593 b.Kind = BlockFirst
39594 b.SetControl(nil)
39595 b.Aux = nil
39596 b.swapSuccessors()
39597 return true
39598 }
39599
39600
39601
39602 for v.Op == OpARM64FlagLT_ULT {
39603 b.Kind = BlockFirst
39604 b.SetControl(nil)
39605 b.Aux = nil
39606 b.swapSuccessors()
39607 return true
39608 }
39609
39610
39611
39612 for v.Op == OpARM64FlagLT_UGT {
39613 b.Kind = BlockFirst
39614 b.SetControl(nil)
39615 b.Aux = nil
39616 b.swapSuccessors()
39617 return true
39618 }
39619
39620
39621
39622 for v.Op == OpARM64FlagGT_ULT {
39623 b.Kind = BlockFirst
39624 b.SetControl(nil)
39625 b.Aux = nil
39626 return true
39627 }
39628
39629
39630
39631 for v.Op == OpARM64FlagGT_UGT {
39632 b.Kind = BlockFirst
39633 b.SetControl(nil)
39634 b.Aux = nil
39635 return true
39636 }
39637
39638
39639
39640 for v.Op == OpARM64InvertFlags {
39641 cmp := v.Args[0]
39642 b.Kind = BlockARM64LT
39643 b.SetControl(cmp)
39644 b.Aux = nil
39645 return true
39646 }
39647 case BlockIf:
39648
39649
39650
39651 for v.Op == OpARM64Equal {
39652 cc := v.Args[0]
39653 b.Kind = BlockARM64EQ
39654 b.SetControl(cc)
39655 b.Aux = nil
39656 return true
39657 }
39658
39659
39660
39661 for v.Op == OpARM64NotEqual {
39662 cc := v.Args[0]
39663 b.Kind = BlockARM64NE
39664 b.SetControl(cc)
39665 b.Aux = nil
39666 return true
39667 }
39668
39669
39670
39671 for v.Op == OpARM64LessThan {
39672 cc := v.Args[0]
39673 b.Kind = BlockARM64LT
39674 b.SetControl(cc)
39675 b.Aux = nil
39676 return true
39677 }
39678
39679
39680
39681 for v.Op == OpARM64LessThanU {
39682 cc := v.Args[0]
39683 b.Kind = BlockARM64ULT
39684 b.SetControl(cc)
39685 b.Aux = nil
39686 return true
39687 }
39688
39689
39690
39691 for v.Op == OpARM64LessEqual {
39692 cc := v.Args[0]
39693 b.Kind = BlockARM64LE
39694 b.SetControl(cc)
39695 b.Aux = nil
39696 return true
39697 }
39698
39699
39700
39701 for v.Op == OpARM64LessEqualU {
39702 cc := v.Args[0]
39703 b.Kind = BlockARM64ULE
39704 b.SetControl(cc)
39705 b.Aux = nil
39706 return true
39707 }
39708
39709
39710
39711 for v.Op == OpARM64GreaterThan {
39712 cc := v.Args[0]
39713 b.Kind = BlockARM64GT
39714 b.SetControl(cc)
39715 b.Aux = nil
39716 return true
39717 }
39718
39719
39720
39721 for v.Op == OpARM64GreaterThanU {
39722 cc := v.Args[0]
39723 b.Kind = BlockARM64UGT
39724 b.SetControl(cc)
39725 b.Aux = nil
39726 return true
39727 }
39728
39729
39730
39731 for v.Op == OpARM64GreaterEqual {
39732 cc := v.Args[0]
39733 b.Kind = BlockARM64GE
39734 b.SetControl(cc)
39735 b.Aux = nil
39736 return true
39737 }
39738
39739
39740
39741 for v.Op == OpARM64GreaterEqualU {
39742 cc := v.Args[0]
39743 b.Kind = BlockARM64UGE
39744 b.SetControl(cc)
39745 b.Aux = nil
39746 return true
39747 }
39748
39749
39750
39751 for v.Op == OpARM64LessThanF {
39752 cc := v.Args[0]
39753 b.Kind = BlockARM64FLT
39754 b.SetControl(cc)
39755 b.Aux = nil
39756 return true
39757 }
39758
39759
39760
39761 for v.Op == OpARM64LessEqualF {
39762 cc := v.Args[0]
39763 b.Kind = BlockARM64FLE
39764 b.SetControl(cc)
39765 b.Aux = nil
39766 return true
39767 }
39768
39769
39770
39771 for v.Op == OpARM64GreaterThanF {
39772 cc := v.Args[0]
39773 b.Kind = BlockARM64FGT
39774 b.SetControl(cc)
39775 b.Aux = nil
39776 return true
39777 }
39778
39779
39780
39781 for v.Op == OpARM64GreaterEqualF {
39782 cc := v.Args[0]
39783 b.Kind = BlockARM64FGE
39784 b.SetControl(cc)
39785 b.Aux = nil
39786 return true
39787 }
39788
39789
39790
39791 for {
39792 cond := b.Control
39793 b.Kind = BlockARM64NZ
39794 b.SetControl(cond)
39795 b.Aux = nil
39796 return true
39797 }
39798 case BlockARM64LE:
39799
39800
39801
39802 for v.Op == OpARM64CMPWconst {
39803 if v.AuxInt != 0 {
39804 break
39805 }
39806 x := v.Args[0]
39807 if x.Op != OpARM64ANDconst {
39808 break
39809 }
39810 c := x.AuxInt
39811 y := x.Args[0]
39812 if !(x.Uses == 1) {
39813 break
39814 }
39815 b.Kind = BlockARM64LE
39816 v0 := b.NewValue0(v.Pos, OpARM64TSTWconst, types.TypeFlags)
39817 v0.AuxInt = c
39818 v0.AddArg(y)
39819 b.SetControl(v0)
39820 b.Aux = nil
39821 return true
39822 }
39823
39824
39825
39826 for v.Op == OpARM64CMPconst {
39827 if v.AuxInt != 0 {
39828 break
39829 }
39830 z := v.Args[0]
39831 if z.Op != OpARM64AND {
39832 break
39833 }
39834 y := z.Args[1]
39835 x := z.Args[0]
39836 if !(z.Uses == 1) {
39837 break
39838 }
39839 b.Kind = BlockARM64LE
39840 v0 := b.NewValue0(v.Pos, OpARM64TST, types.TypeFlags)
39841 v0.AddArg(x)
39842 v0.AddArg(y)
39843 b.SetControl(v0)
39844 b.Aux = nil
39845 return true
39846 }
39847
39848
39849
39850 for v.Op == OpARM64CMPWconst {
39851 if v.AuxInt != 0 {
39852 break
39853 }
39854 z := v.Args[0]
39855 if z.Op != OpARM64AND {
39856 break
39857 }
39858 y := z.Args[1]
39859 x := z.Args[0]
39860 if !(z.Uses == 1) {
39861 break
39862 }
39863 b.Kind = BlockARM64LE
39864 v0 := b.NewValue0(v.Pos, OpARM64TSTW, types.TypeFlags)
39865 v0.AddArg(x)
39866 v0.AddArg(y)
39867 b.SetControl(v0)
39868 b.Aux = nil
39869 return true
39870 }
39871
39872
39873
39874 for v.Op == OpARM64CMPconst {
39875 if v.AuxInt != 0 {
39876 break
39877 }
39878 x := v.Args[0]
39879 if x.Op != OpARM64ANDconst {
39880 break
39881 }
39882 c := x.AuxInt
39883 y := x.Args[0]
39884 if !(x.Uses == 1) {
39885 break
39886 }
39887 b.Kind = BlockARM64LE
39888 v0 := b.NewValue0(v.Pos, OpARM64TSTconst, types.TypeFlags)
39889 v0.AuxInt = c
39890 v0.AddArg(y)
39891 b.SetControl(v0)
39892 b.Aux = nil
39893 return true
39894 }
39895
39896
39897
39898 for v.Op == OpARM64CMPconst {
39899 if v.AuxInt != 0 {
39900 break
39901 }
39902 x := v.Args[0]
39903 if x.Op != OpARM64ADDconst {
39904 break
39905 }
39906 c := x.AuxInt
39907 y := x.Args[0]
39908 if !(x.Uses == 1) {
39909 break
39910 }
39911 b.Kind = BlockARM64LE
39912 v0 := b.NewValue0(v.Pos, OpARM64CMNconst, types.TypeFlags)
39913 v0.AuxInt = c
39914 v0.AddArg(y)
39915 b.SetControl(v0)
39916 b.Aux = nil
39917 return true
39918 }
39919
39920
39921
39922 for v.Op == OpARM64CMPWconst {
39923 if v.AuxInt != 0 {
39924 break
39925 }
39926 x := v.Args[0]
39927 if x.Op != OpARM64ADDconst {
39928 break
39929 }
39930 c := x.AuxInt
39931 y := x.Args[0]
39932 if !(x.Uses == 1) {
39933 break
39934 }
39935 b.Kind = BlockARM64LE
39936 v0 := b.NewValue0(v.Pos, OpARM64CMNWconst, types.TypeFlags)
39937 v0.AuxInt = c
39938 v0.AddArg(y)
39939 b.SetControl(v0)
39940 b.Aux = nil
39941 return true
39942 }
39943
39944
39945
39946 for v.Op == OpARM64CMPconst {
39947 if v.AuxInt != 0 {
39948 break
39949 }
39950 z := v.Args[0]
39951 if z.Op != OpARM64ADD {
39952 break
39953 }
39954 y := z.Args[1]
39955 x := z.Args[0]
39956 if !(z.Uses == 1) {
39957 break
39958 }
39959 b.Kind = BlockARM64LE
39960 v0 := b.NewValue0(v.Pos, OpARM64CMN, types.TypeFlags)
39961 v0.AddArg(x)
39962 v0.AddArg(y)
39963 b.SetControl(v0)
39964 b.Aux = nil
39965 return true
39966 }
39967
39968
39969
39970 for v.Op == OpARM64CMPWconst {
39971 if v.AuxInt != 0 {
39972 break
39973 }
39974 z := v.Args[0]
39975 if z.Op != OpARM64ADD {
39976 break
39977 }
39978 y := z.Args[1]
39979 x := z.Args[0]
39980 if !(z.Uses == 1) {
39981 break
39982 }
39983 b.Kind = BlockARM64LE
39984 v0 := b.NewValue0(v.Pos, OpARM64CMNW, types.TypeFlags)
39985 v0.AddArg(x)
39986 v0.AddArg(y)
39987 b.SetControl(v0)
39988 b.Aux = nil
39989 return true
39990 }
39991
39992
39993
39994 for v.Op == OpARM64CMP {
39995 _ = v.Args[1]
39996 x := v.Args[0]
39997 z := v.Args[1]
39998 if z.Op != OpARM64NEG {
39999 break
40000 }
40001 y := z.Args[0]
40002 if !(z.Uses == 1) {
40003 break
40004 }
40005 b.Kind = BlockARM64LE
40006 v0 := b.NewValue0(v.Pos, OpARM64CMN, types.TypeFlags)
40007 v0.AddArg(x)
40008 v0.AddArg(y)
40009 b.SetControl(v0)
40010 b.Aux = nil
40011 return true
40012 }
40013
40014
40015
40016 for v.Op == OpARM64CMPW {
40017 _ = v.Args[1]
40018 x := v.Args[0]
40019 z := v.Args[1]
40020 if z.Op != OpARM64NEG {
40021 break
40022 }
40023 y := z.Args[0]
40024 if !(z.Uses == 1) {
40025 break
40026 }
40027 b.Kind = BlockARM64LE
40028 v0 := b.NewValue0(v.Pos, OpARM64CMNW, types.TypeFlags)
40029 v0.AddArg(x)
40030 v0.AddArg(y)
40031 b.SetControl(v0)
40032 b.Aux = nil
40033 return true
40034 }
40035
40036
40037
40038 for v.Op == OpARM64CMPconst {
40039 if v.AuxInt != 0 {
40040 break
40041 }
40042 z := v.Args[0]
40043 if z.Op != OpARM64MADD {
40044 break
40045 }
40046 y := z.Args[2]
40047 a := z.Args[0]
40048 x := z.Args[1]
40049 if !(z.Uses == 1) {
40050 break
40051 }
40052 b.Kind = BlockARM64LE
40053 v0 := b.NewValue0(v.Pos, OpARM64CMN, types.TypeFlags)
40054 v0.AddArg(a)
40055 v1 := b.NewValue0(v.Pos, OpARM64MUL, x.Type)
40056 v1.AddArg(x)
40057 v1.AddArg(y)
40058 v0.AddArg(v1)
40059 b.SetControl(v0)
40060 b.Aux = nil
40061 return true
40062 }
40063
40064
40065
40066 for v.Op == OpARM64CMPconst {
40067 if v.AuxInt != 0 {
40068 break
40069 }
40070 z := v.Args[0]
40071 if z.Op != OpARM64MSUB {
40072 break
40073 }
40074 y := z.Args[2]
40075 a := z.Args[0]
40076 x := z.Args[1]
40077 if !(z.Uses == 1) {
40078 break
40079 }
40080 b.Kind = BlockARM64LE
40081 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags)
40082 v0.AddArg(a)
40083 v1 := b.NewValue0(v.Pos, OpARM64MUL, x.Type)
40084 v1.AddArg(x)
40085 v1.AddArg(y)
40086 v0.AddArg(v1)
40087 b.SetControl(v0)
40088 b.Aux = nil
40089 return true
40090 }
40091
40092
40093
40094 for v.Op == OpARM64CMPWconst {
40095 if v.AuxInt != 0 {
40096 break
40097 }
40098 z := v.Args[0]
40099 if z.Op != OpARM64MADDW {
40100 break
40101 }
40102 y := z.Args[2]
40103 a := z.Args[0]
40104 x := z.Args[1]
40105 if !(z.Uses == 1) {
40106 break
40107 }
40108 b.Kind = BlockARM64LE
40109 v0 := b.NewValue0(v.Pos, OpARM64CMNW, types.TypeFlags)
40110 v0.AddArg(a)
40111 v1 := b.NewValue0(v.Pos, OpARM64MULW, x.Type)
40112 v1.AddArg(x)
40113 v1.AddArg(y)
40114 v0.AddArg(v1)
40115 b.SetControl(v0)
40116 b.Aux = nil
40117 return true
40118 }
40119
40120
40121
40122 for v.Op == OpARM64CMPWconst {
40123 if v.AuxInt != 0 {
40124 break
40125 }
40126 z := v.Args[0]
40127 if z.Op != OpARM64MSUBW {
40128 break
40129 }
40130 y := z.Args[2]
40131 a := z.Args[0]
40132 x := z.Args[1]
40133 if !(z.Uses == 1) {
40134 break
40135 }
40136 b.Kind = BlockARM64LE
40137 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
40138 v0.AddArg(a)
40139 v1 := b.NewValue0(v.Pos, OpARM64MULW, x.Type)
40140 v1.AddArg(x)
40141 v1.AddArg(y)
40142 v0.AddArg(v1)
40143 b.SetControl(v0)
40144 b.Aux = nil
40145 return true
40146 }
40147
40148
40149
40150 for v.Op == OpARM64FlagEQ {
40151 b.Kind = BlockFirst
40152 b.SetControl(nil)
40153 b.Aux = nil
40154 return true
40155 }
40156
40157
40158
40159 for v.Op == OpARM64FlagLT_ULT {
40160 b.Kind = BlockFirst
40161 b.SetControl(nil)
40162 b.Aux = nil
40163 return true
40164 }
40165
40166
40167
40168 for v.Op == OpARM64FlagLT_UGT {
40169 b.Kind = BlockFirst
40170 b.SetControl(nil)
40171 b.Aux = nil
40172 return true
40173 }
40174
40175
40176
40177 for v.Op == OpARM64FlagGT_ULT {
40178 b.Kind = BlockFirst
40179 b.SetControl(nil)
40180 b.Aux = nil
40181 b.swapSuccessors()
40182 return true
40183 }
40184
40185
40186
40187 for v.Op == OpARM64FlagGT_UGT {
40188 b.Kind = BlockFirst
40189 b.SetControl(nil)
40190 b.Aux = nil
40191 b.swapSuccessors()
40192 return true
40193 }
40194
40195
40196
40197 for v.Op == OpARM64InvertFlags {
40198 cmp := v.Args[0]
40199 b.Kind = BlockARM64GE
40200 b.SetControl(cmp)
40201 b.Aux = nil
40202 return true
40203 }
40204 case BlockARM64LT:
40205
40206
40207
40208 for v.Op == OpARM64CMPWconst {
40209 if v.AuxInt != 0 {
40210 break
40211 }
40212 x := v.Args[0]
40213 if x.Op != OpARM64ANDconst {
40214 break
40215 }
40216 c := x.AuxInt
40217 y := x.Args[0]
40218 if !(x.Uses == 1) {
40219 break
40220 }
40221 b.Kind = BlockARM64LT
40222 v0 := b.NewValue0(v.Pos, OpARM64TSTWconst, types.TypeFlags)
40223 v0.AuxInt = c
40224 v0.AddArg(y)
40225 b.SetControl(v0)
40226 b.Aux = nil
40227 return true
40228 }
40229
40230
40231
40232 for v.Op == OpARM64CMPconst {
40233 if v.AuxInt != 0 {
40234 break
40235 }
40236 z := v.Args[0]
40237 if z.Op != OpARM64AND {
40238 break
40239 }
40240 y := z.Args[1]
40241 x := z.Args[0]
40242 if !(z.Uses == 1) {
40243 break
40244 }
40245 b.Kind = BlockARM64LT
40246 v0 := b.NewValue0(v.Pos, OpARM64TST, types.TypeFlags)
40247 v0.AddArg(x)
40248 v0.AddArg(y)
40249 b.SetControl(v0)
40250 b.Aux = nil
40251 return true
40252 }
40253
40254
40255
40256 for v.Op == OpARM64CMPWconst {
40257 if v.AuxInt != 0 {
40258 break
40259 }
40260 z := v.Args[0]
40261 if z.Op != OpARM64AND {
40262 break
40263 }
40264 y := z.Args[1]
40265 x := z.Args[0]
40266 if !(z.Uses == 1) {
40267 break
40268 }
40269 b.Kind = BlockARM64LT
40270 v0 := b.NewValue0(v.Pos, OpARM64TSTW, types.TypeFlags)
40271 v0.AddArg(x)
40272 v0.AddArg(y)
40273 b.SetControl(v0)
40274 b.Aux = nil
40275 return true
40276 }
40277
40278
40279
40280 for v.Op == OpARM64CMPconst {
40281 if v.AuxInt != 0 {
40282 break
40283 }
40284 x := v.Args[0]
40285 if x.Op != OpARM64ANDconst {
40286 break
40287 }
40288 c := x.AuxInt
40289 y := x.Args[0]
40290 if !(x.Uses == 1) {
40291 break
40292 }
40293 b.Kind = BlockARM64LT
40294 v0 := b.NewValue0(v.Pos, OpARM64TSTconst, types.TypeFlags)
40295 v0.AuxInt = c
40296 v0.AddArg(y)
40297 b.SetControl(v0)
40298 b.Aux = nil
40299 return true
40300 }
40301
40302
40303
40304 for v.Op == OpARM64CMPconst {
40305 if v.AuxInt != 0 {
40306 break
40307 }
40308 x := v.Args[0]
40309 if x.Op != OpARM64ADDconst {
40310 break
40311 }
40312 c := x.AuxInt
40313 y := x.Args[0]
40314 if !(x.Uses == 1) {
40315 break
40316 }
40317 b.Kind = BlockARM64LT
40318 v0 := b.NewValue0(v.Pos, OpARM64CMNconst, types.TypeFlags)
40319 v0.AuxInt = c
40320 v0.AddArg(y)
40321 b.SetControl(v0)
40322 b.Aux = nil
40323 return true
40324 }
40325
40326
40327
40328 for v.Op == OpARM64CMPWconst {
40329 if v.AuxInt != 0 {
40330 break
40331 }
40332 x := v.Args[0]
40333 if x.Op != OpARM64ADDconst {
40334 break
40335 }
40336 c := x.AuxInt
40337 y := x.Args[0]
40338 if !(x.Uses == 1) {
40339 break
40340 }
40341 b.Kind = BlockARM64LT
40342 v0 := b.NewValue0(v.Pos, OpARM64CMNWconst, types.TypeFlags)
40343 v0.AuxInt = c
40344 v0.AddArg(y)
40345 b.SetControl(v0)
40346 b.Aux = nil
40347 return true
40348 }
40349
40350
40351
40352 for v.Op == OpARM64CMPconst {
40353 if v.AuxInt != 0 {
40354 break
40355 }
40356 z := v.Args[0]
40357 if z.Op != OpARM64ADD {
40358 break
40359 }
40360 y := z.Args[1]
40361 x := z.Args[0]
40362 if !(z.Uses == 1) {
40363 break
40364 }
40365 b.Kind = BlockARM64LT
40366 v0 := b.NewValue0(v.Pos, OpARM64CMN, types.TypeFlags)
40367 v0.AddArg(x)
40368 v0.AddArg(y)
40369 b.SetControl(v0)
40370 b.Aux = nil
40371 return true
40372 }
40373
40374
40375
40376 for v.Op == OpARM64CMPWconst {
40377 if v.AuxInt != 0 {
40378 break
40379 }
40380 z := v.Args[0]
40381 if z.Op != OpARM64ADD {
40382 break
40383 }
40384 y := z.Args[1]
40385 x := z.Args[0]
40386 if !(z.Uses == 1) {
40387 break
40388 }
40389 b.Kind = BlockARM64LT
40390 v0 := b.NewValue0(v.Pos, OpARM64CMNW, types.TypeFlags)
40391 v0.AddArg(x)
40392 v0.AddArg(y)
40393 b.SetControl(v0)
40394 b.Aux = nil
40395 return true
40396 }
40397
40398
40399
40400 for v.Op == OpARM64CMP {
40401 _ = v.Args[1]
40402 x := v.Args[0]
40403 z := v.Args[1]
40404 if z.Op != OpARM64NEG {
40405 break
40406 }
40407 y := z.Args[0]
40408 if !(z.Uses == 1) {
40409 break
40410 }
40411 b.Kind = BlockARM64LT
40412 v0 := b.NewValue0(v.Pos, OpARM64CMN, types.TypeFlags)
40413 v0.AddArg(x)
40414 v0.AddArg(y)
40415 b.SetControl(v0)
40416 b.Aux = nil
40417 return true
40418 }
40419
40420
40421
40422 for v.Op == OpARM64CMPW {
40423 _ = v.Args[1]
40424 x := v.Args[0]
40425 z := v.Args[1]
40426 if z.Op != OpARM64NEG {
40427 break
40428 }
40429 y := z.Args[0]
40430 if !(z.Uses == 1) {
40431 break
40432 }
40433 b.Kind = BlockARM64LT
40434 v0 := b.NewValue0(v.Pos, OpARM64CMNW, types.TypeFlags)
40435 v0.AddArg(x)
40436 v0.AddArg(y)
40437 b.SetControl(v0)
40438 b.Aux = nil
40439 return true
40440 }
40441
40442
40443
40444 for v.Op == OpARM64CMPconst {
40445 if v.AuxInt != 0 {
40446 break
40447 }
40448 z := v.Args[0]
40449 if z.Op != OpARM64MADD {
40450 break
40451 }
40452 y := z.Args[2]
40453 a := z.Args[0]
40454 x := z.Args[1]
40455 if !(z.Uses == 1) {
40456 break
40457 }
40458 b.Kind = BlockARM64LT
40459 v0 := b.NewValue0(v.Pos, OpARM64CMN, types.TypeFlags)
40460 v0.AddArg(a)
40461 v1 := b.NewValue0(v.Pos, OpARM64MUL, x.Type)
40462 v1.AddArg(x)
40463 v1.AddArg(y)
40464 v0.AddArg(v1)
40465 b.SetControl(v0)
40466 b.Aux = nil
40467 return true
40468 }
40469
40470
40471
40472 for v.Op == OpARM64CMPconst {
40473 if v.AuxInt != 0 {
40474 break
40475 }
40476 z := v.Args[0]
40477 if z.Op != OpARM64MSUB {
40478 break
40479 }
40480 y := z.Args[2]
40481 a := z.Args[0]
40482 x := z.Args[1]
40483 if !(z.Uses == 1) {
40484 break
40485 }
40486 b.Kind = BlockARM64LT
40487 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags)
40488 v0.AddArg(a)
40489 v1 := b.NewValue0(v.Pos, OpARM64MUL, x.Type)
40490 v1.AddArg(x)
40491 v1.AddArg(y)
40492 v0.AddArg(v1)
40493 b.SetControl(v0)
40494 b.Aux = nil
40495 return true
40496 }
40497
40498
40499
40500 for v.Op == OpARM64CMPWconst {
40501 if v.AuxInt != 0 {
40502 break
40503 }
40504 z := v.Args[0]
40505 if z.Op != OpARM64MADDW {
40506 break
40507 }
40508 y := z.Args[2]
40509 a := z.Args[0]
40510 x := z.Args[1]
40511 if !(z.Uses == 1) {
40512 break
40513 }
40514 b.Kind = BlockARM64LT
40515 v0 := b.NewValue0(v.Pos, OpARM64CMNW, types.TypeFlags)
40516 v0.AddArg(a)
40517 v1 := b.NewValue0(v.Pos, OpARM64MULW, x.Type)
40518 v1.AddArg(x)
40519 v1.AddArg(y)
40520 v0.AddArg(v1)
40521 b.SetControl(v0)
40522 b.Aux = nil
40523 return true
40524 }
40525
40526
40527
40528 for v.Op == OpARM64CMPWconst {
40529 if v.AuxInt != 0 {
40530 break
40531 }
40532 z := v.Args[0]
40533 if z.Op != OpARM64MSUBW {
40534 break
40535 }
40536 y := z.Args[2]
40537 a := z.Args[0]
40538 x := z.Args[1]
40539 if !(z.Uses == 1) {
40540 break
40541 }
40542 b.Kind = BlockARM64LT
40543 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
40544 v0.AddArg(a)
40545 v1 := b.NewValue0(v.Pos, OpARM64MULW, x.Type)
40546 v1.AddArg(x)
40547 v1.AddArg(y)
40548 v0.AddArg(v1)
40549 b.SetControl(v0)
40550 b.Aux = nil
40551 return true
40552 }
40553
40554
40555
40556 for v.Op == OpARM64CMPWconst {
40557 if v.AuxInt != 0 {
40558 break
40559 }
40560 x := v.Args[0]
40561 b.Kind = BlockARM64TBNZ
40562 b.SetControl(x)
40563 b.Aux = int64(31)
40564 return true
40565 }
40566
40567
40568
40569 for v.Op == OpARM64CMPconst {
40570 if v.AuxInt != 0 {
40571 break
40572 }
40573 x := v.Args[0]
40574 b.Kind = BlockARM64TBNZ
40575 b.SetControl(x)
40576 b.Aux = int64(63)
40577 return true
40578 }
40579
40580
40581
40582 for v.Op == OpARM64FlagEQ {
40583 b.Kind = BlockFirst
40584 b.SetControl(nil)
40585 b.Aux = nil
40586 b.swapSuccessors()
40587 return true
40588 }
40589
40590
40591
40592 for v.Op == OpARM64FlagLT_ULT {
40593 b.Kind = BlockFirst
40594 b.SetControl(nil)
40595 b.Aux = nil
40596 return true
40597 }
40598
40599
40600
40601 for v.Op == OpARM64FlagLT_UGT {
40602 b.Kind = BlockFirst
40603 b.SetControl(nil)
40604 b.Aux = nil
40605 return true
40606 }
40607
40608
40609
40610 for v.Op == OpARM64FlagGT_ULT {
40611 b.Kind = BlockFirst
40612 b.SetControl(nil)
40613 b.Aux = nil
40614 b.swapSuccessors()
40615 return true
40616 }
40617
40618
40619
40620 for v.Op == OpARM64FlagGT_UGT {
40621 b.Kind = BlockFirst
40622 b.SetControl(nil)
40623 b.Aux = nil
40624 b.swapSuccessors()
40625 return true
40626 }
40627
40628
40629
40630 for v.Op == OpARM64InvertFlags {
40631 cmp := v.Args[0]
40632 b.Kind = BlockARM64GT
40633 b.SetControl(cmp)
40634 b.Aux = nil
40635 return true
40636 }
40637 case BlockARM64NE:
40638
40639
40640
40641 for v.Op == OpARM64CMPWconst {
40642 if v.AuxInt != 0 {
40643 break
40644 }
40645 x := v.Args[0]
40646 if x.Op != OpARM64ANDconst {
40647 break
40648 }
40649 c := x.AuxInt
40650 y := x.Args[0]
40651 if !(x.Uses == 1) {
40652 break
40653 }
40654 b.Kind = BlockARM64NE
40655 v0 := b.NewValue0(v.Pos, OpARM64TSTWconst, types.TypeFlags)
40656 v0.AuxInt = c
40657 v0.AddArg(y)
40658 b.SetControl(v0)
40659 b.Aux = nil
40660 return true
40661 }
40662
40663
40664
40665 for v.Op == OpARM64CMPconst {
40666 if v.AuxInt != 0 {
40667 break
40668 }
40669 z := v.Args[0]
40670 if z.Op != OpARM64AND {
40671 break
40672 }
40673 y := z.Args[1]
40674 x := z.Args[0]
40675 if !(z.Uses == 1) {
40676 break
40677 }
40678 b.Kind = BlockARM64NE
40679 v0 := b.NewValue0(v.Pos, OpARM64TST, types.TypeFlags)
40680 v0.AddArg(x)
40681 v0.AddArg(y)
40682 b.SetControl(v0)
40683 b.Aux = nil
40684 return true
40685 }
40686
40687
40688
40689 for v.Op == OpARM64CMPWconst {
40690 if v.AuxInt != 0 {
40691 break
40692 }
40693 z := v.Args[0]
40694 if z.Op != OpARM64AND {
40695 break
40696 }
40697 y := z.Args[1]
40698 x := z.Args[0]
40699 if !(z.Uses == 1) {
40700 break
40701 }
40702 b.Kind = BlockARM64NE
40703 v0 := b.NewValue0(v.Pos, OpARM64TSTW, types.TypeFlags)
40704 v0.AddArg(x)
40705 v0.AddArg(y)
40706 b.SetControl(v0)
40707 b.Aux = nil
40708 return true
40709 }
40710
40711
40712
40713 for v.Op == OpARM64CMPconst {
40714 if v.AuxInt != 0 {
40715 break
40716 }
40717 x := v.Args[0]
40718 if x.Op != OpARM64ANDconst {
40719 break
40720 }
40721 c := x.AuxInt
40722 y := x.Args[0]
40723 if !(x.Uses == 1) {
40724 break
40725 }
40726 b.Kind = BlockARM64NE
40727 v0 := b.NewValue0(v.Pos, OpARM64TSTconst, types.TypeFlags)
40728 v0.AuxInt = c
40729 v0.AddArg(y)
40730 b.SetControl(v0)
40731 b.Aux = nil
40732 return true
40733 }
40734
40735
40736
40737 for v.Op == OpARM64CMPconst {
40738 if v.AuxInt != 0 {
40739 break
40740 }
40741 x := v.Args[0]
40742 if x.Op != OpARM64ADDconst {
40743 break
40744 }
40745 c := x.AuxInt
40746 y := x.Args[0]
40747 if !(x.Uses == 1) {
40748 break
40749 }
40750 b.Kind = BlockARM64NE
40751 v0 := b.NewValue0(v.Pos, OpARM64CMNconst, types.TypeFlags)
40752 v0.AuxInt = c
40753 v0.AddArg(y)
40754 b.SetControl(v0)
40755 b.Aux = nil
40756 return true
40757 }
40758
40759
40760
40761 for v.Op == OpARM64CMPWconst {
40762 if v.AuxInt != 0 {
40763 break
40764 }
40765 x := v.Args[0]
40766 if x.Op != OpARM64ADDconst {
40767 break
40768 }
40769 c := x.AuxInt
40770 y := x.Args[0]
40771 if !(x.Uses == 1) {
40772 break
40773 }
40774 b.Kind = BlockARM64NE
40775 v0 := b.NewValue0(v.Pos, OpARM64CMNWconst, types.TypeFlags)
40776 v0.AuxInt = c
40777 v0.AddArg(y)
40778 b.SetControl(v0)
40779 b.Aux = nil
40780 return true
40781 }
40782
40783
40784
40785 for v.Op == OpARM64CMPconst {
40786 if v.AuxInt != 0 {
40787 break
40788 }
40789 z := v.Args[0]
40790 if z.Op != OpARM64ADD {
40791 break
40792 }
40793 y := z.Args[1]
40794 x := z.Args[0]
40795 if !(z.Uses == 1) {
40796 break
40797 }
40798 b.Kind = BlockARM64NE
40799 v0 := b.NewValue0(v.Pos, OpARM64CMN, types.TypeFlags)
40800 v0.AddArg(x)
40801 v0.AddArg(y)
40802 b.SetControl(v0)
40803 b.Aux = nil
40804 return true
40805 }
40806
40807
40808
40809 for v.Op == OpARM64CMPWconst {
40810 if v.AuxInt != 0 {
40811 break
40812 }
40813 z := v.Args[0]
40814 if z.Op != OpARM64ADD {
40815 break
40816 }
40817 y := z.Args[1]
40818 x := z.Args[0]
40819 if !(z.Uses == 1) {
40820 break
40821 }
40822 b.Kind = BlockARM64NE
40823 v0 := b.NewValue0(v.Pos, OpARM64CMNW, types.TypeFlags)
40824 v0.AddArg(x)
40825 v0.AddArg(y)
40826 b.SetControl(v0)
40827 b.Aux = nil
40828 return true
40829 }
40830
40831
40832
40833 for v.Op == OpARM64CMP {
40834 _ = v.Args[1]
40835 x := v.Args[0]
40836 z := v.Args[1]
40837 if z.Op != OpARM64NEG {
40838 break
40839 }
40840 y := z.Args[0]
40841 if !(z.Uses == 1) {
40842 break
40843 }
40844 b.Kind = BlockARM64NE
40845 v0 := b.NewValue0(v.Pos, OpARM64CMN, types.TypeFlags)
40846 v0.AddArg(x)
40847 v0.AddArg(y)
40848 b.SetControl(v0)
40849 b.Aux = nil
40850 return true
40851 }
40852
40853
40854
40855 for v.Op == OpARM64CMPW {
40856 _ = v.Args[1]
40857 x := v.Args[0]
40858 z := v.Args[1]
40859 if z.Op != OpARM64NEG {
40860 break
40861 }
40862 y := z.Args[0]
40863 if !(z.Uses == 1) {
40864 break
40865 }
40866 b.Kind = BlockARM64NE
40867 v0 := b.NewValue0(v.Pos, OpARM64CMNW, types.TypeFlags)
40868 v0.AddArg(x)
40869 v0.AddArg(y)
40870 b.SetControl(v0)
40871 b.Aux = nil
40872 return true
40873 }
40874
40875
40876
40877 for v.Op == OpARM64CMPconst {
40878 if v.AuxInt != 0 {
40879 break
40880 }
40881 x := v.Args[0]
40882 b.Kind = BlockARM64NZ
40883 b.SetControl(x)
40884 b.Aux = nil
40885 return true
40886 }
40887
40888
40889
40890 for v.Op == OpARM64CMPWconst {
40891 if v.AuxInt != 0 {
40892 break
40893 }
40894 x := v.Args[0]
40895 b.Kind = BlockARM64NZW
40896 b.SetControl(x)
40897 b.Aux = nil
40898 return true
40899 }
40900
40901
40902
40903 for v.Op == OpARM64CMPconst {
40904 if v.AuxInt != 0 {
40905 break
40906 }
40907 z := v.Args[0]
40908 if z.Op != OpARM64MADD {
40909 break
40910 }
40911 y := z.Args[2]
40912 a := z.Args[0]
40913 x := z.Args[1]
40914 if !(z.Uses == 1) {
40915 break
40916 }
40917 b.Kind = BlockARM64NE
40918 v0 := b.NewValue0(v.Pos, OpARM64CMN, types.TypeFlags)
40919 v0.AddArg(a)
40920 v1 := b.NewValue0(v.Pos, OpARM64MUL, x.Type)
40921 v1.AddArg(x)
40922 v1.AddArg(y)
40923 v0.AddArg(v1)
40924 b.SetControl(v0)
40925 b.Aux = nil
40926 return true
40927 }
40928
40929
40930
40931 for v.Op == OpARM64CMPconst {
40932 if v.AuxInt != 0 {
40933 break
40934 }
40935 z := v.Args[0]
40936 if z.Op != OpARM64MSUB {
40937 break
40938 }
40939 y := z.Args[2]
40940 a := z.Args[0]
40941 x := z.Args[1]
40942 if !(z.Uses == 1) {
40943 break
40944 }
40945 b.Kind = BlockARM64NE
40946 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags)
40947 v0.AddArg(a)
40948 v1 := b.NewValue0(v.Pos, OpARM64MUL, x.Type)
40949 v1.AddArg(x)
40950 v1.AddArg(y)
40951 v0.AddArg(v1)
40952 b.SetControl(v0)
40953 b.Aux = nil
40954 return true
40955 }
40956
40957
40958
40959 for v.Op == OpARM64CMPWconst {
40960 if v.AuxInt != 0 {
40961 break
40962 }
40963 z := v.Args[0]
40964 if z.Op != OpARM64MADDW {
40965 break
40966 }
40967 y := z.Args[2]
40968 a := z.Args[0]
40969 x := z.Args[1]
40970 if !(z.Uses == 1) {
40971 break
40972 }
40973 b.Kind = BlockARM64NE
40974 v0 := b.NewValue0(v.Pos, OpARM64CMNW, types.TypeFlags)
40975 v0.AddArg(a)
40976 v1 := b.NewValue0(v.Pos, OpARM64MULW, x.Type)
40977 v1.AddArg(x)
40978 v1.AddArg(y)
40979 v0.AddArg(v1)
40980 b.SetControl(v0)
40981 b.Aux = nil
40982 return true
40983 }
40984
40985
40986
40987 for v.Op == OpARM64CMPWconst {
40988 if v.AuxInt != 0 {
40989 break
40990 }
40991 z := v.Args[0]
40992 if z.Op != OpARM64MSUBW {
40993 break
40994 }
40995 y := z.Args[2]
40996 a := z.Args[0]
40997 x := z.Args[1]
40998 if !(z.Uses == 1) {
40999 break
41000 }
41001 b.Kind = BlockARM64NE
41002 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
41003 v0.AddArg(a)
41004 v1 := b.NewValue0(v.Pos, OpARM64MULW, x.Type)
41005 v1.AddArg(x)
41006 v1.AddArg(y)
41007 v0.AddArg(v1)
41008 b.SetControl(v0)
41009 b.Aux = nil
41010 return true
41011 }
41012
41013
41014
41015 for v.Op == OpARM64TSTconst {
41016 c := v.AuxInt
41017 x := v.Args[0]
41018 if !(oneBit(c)) {
41019 break
41020 }
41021 b.Kind = BlockARM64TBNZ
41022 b.SetControl(x)
41023 b.Aux = ntz(c)
41024 return true
41025 }
41026
41027
41028
41029 for v.Op == OpARM64TSTWconst {
41030 c := v.AuxInt
41031 x := v.Args[0]
41032 if !(oneBit(int64(uint32(c)))) {
41033 break
41034 }
41035 b.Kind = BlockARM64TBNZ
41036 b.SetControl(x)
41037 b.Aux = ntz(int64(uint32(c)))
41038 return true
41039 }
41040
41041
41042
41043 for v.Op == OpARM64FlagEQ {
41044 b.Kind = BlockFirst
41045 b.SetControl(nil)
41046 b.Aux = nil
41047 b.swapSuccessors()
41048 return true
41049 }
41050
41051
41052
41053 for v.Op == OpARM64FlagLT_ULT {
41054 b.Kind = BlockFirst
41055 b.SetControl(nil)
41056 b.Aux = nil
41057 return true
41058 }
41059
41060
41061
41062 for v.Op == OpARM64FlagLT_UGT {
41063 b.Kind = BlockFirst
41064 b.SetControl(nil)
41065 b.Aux = nil
41066 return true
41067 }
41068
41069
41070
41071 for v.Op == OpARM64FlagGT_ULT {
41072 b.Kind = BlockFirst
41073 b.SetControl(nil)
41074 b.Aux = nil
41075 return true
41076 }
41077
41078
41079
41080 for v.Op == OpARM64FlagGT_UGT {
41081 b.Kind = BlockFirst
41082 b.SetControl(nil)
41083 b.Aux = nil
41084 return true
41085 }
41086
41087
41088
41089 for v.Op == OpARM64InvertFlags {
41090 cmp := v.Args[0]
41091 b.Kind = BlockARM64NE
41092 b.SetControl(cmp)
41093 b.Aux = nil
41094 return true
41095 }
41096 case BlockARM64NZ:
41097
41098
41099
41100 for v.Op == OpARM64Equal {
41101 cc := v.Args[0]
41102 b.Kind = BlockARM64EQ
41103 b.SetControl(cc)
41104 b.Aux = nil
41105 return true
41106 }
41107
41108
41109
41110 for v.Op == OpARM64NotEqual {
41111 cc := v.Args[0]
41112 b.Kind = BlockARM64NE
41113 b.SetControl(cc)
41114 b.Aux = nil
41115 return true
41116 }
41117
41118
41119
41120 for v.Op == OpARM64LessThan {
41121 cc := v.Args[0]
41122 b.Kind = BlockARM64LT
41123 b.SetControl(cc)
41124 b.Aux = nil
41125 return true
41126 }
41127
41128
41129
41130 for v.Op == OpARM64LessThanU {
41131 cc := v.Args[0]
41132 b.Kind = BlockARM64ULT
41133 b.SetControl(cc)
41134 b.Aux = nil
41135 return true
41136 }
41137
41138
41139
41140 for v.Op == OpARM64LessEqual {
41141 cc := v.Args[0]
41142 b.Kind = BlockARM64LE
41143 b.SetControl(cc)
41144 b.Aux = nil
41145 return true
41146 }
41147
41148
41149
41150 for v.Op == OpARM64LessEqualU {
41151 cc := v.Args[0]
41152 b.Kind = BlockARM64ULE
41153 b.SetControl(cc)
41154 b.Aux = nil
41155 return true
41156 }
41157
41158
41159
41160 for v.Op == OpARM64GreaterThan {
41161 cc := v.Args[0]
41162 b.Kind = BlockARM64GT
41163 b.SetControl(cc)
41164 b.Aux = nil
41165 return true
41166 }
41167
41168
41169
41170 for v.Op == OpARM64GreaterThanU {
41171 cc := v.Args[0]
41172 b.Kind = BlockARM64UGT
41173 b.SetControl(cc)
41174 b.Aux = nil
41175 return true
41176 }
41177
41178
41179
41180 for v.Op == OpARM64GreaterEqual {
41181 cc := v.Args[0]
41182 b.Kind = BlockARM64GE
41183 b.SetControl(cc)
41184 b.Aux = nil
41185 return true
41186 }
41187
41188
41189
41190 for v.Op == OpARM64GreaterEqualU {
41191 cc := v.Args[0]
41192 b.Kind = BlockARM64UGE
41193 b.SetControl(cc)
41194 b.Aux = nil
41195 return true
41196 }
41197
41198
41199
41200 for v.Op == OpARM64LessThanF {
41201 cc := v.Args[0]
41202 b.Kind = BlockARM64FLT
41203 b.SetControl(cc)
41204 b.Aux = nil
41205 return true
41206 }
41207
41208
41209
41210 for v.Op == OpARM64LessEqualF {
41211 cc := v.Args[0]
41212 b.Kind = BlockARM64FLE
41213 b.SetControl(cc)
41214 b.Aux = nil
41215 return true
41216 }
41217
41218
41219
41220 for v.Op == OpARM64GreaterThanF {
41221 cc := v.Args[0]
41222 b.Kind = BlockARM64FGT
41223 b.SetControl(cc)
41224 b.Aux = nil
41225 return true
41226 }
41227
41228
41229
41230 for v.Op == OpARM64GreaterEqualF {
41231 cc := v.Args[0]
41232 b.Kind = BlockARM64FGE
41233 b.SetControl(cc)
41234 b.Aux = nil
41235 return true
41236 }
41237
41238
41239
41240 for v.Op == OpARM64ANDconst {
41241 c := v.AuxInt
41242 x := v.Args[0]
41243 if !(oneBit(c)) {
41244 break
41245 }
41246 b.Kind = BlockARM64TBNZ
41247 b.SetControl(x)
41248 b.Aux = ntz(c)
41249 return true
41250 }
41251
41252
41253
41254 for v.Op == OpARM64MOVDconst {
41255 if v.AuxInt != 0 {
41256 break
41257 }
41258 b.Kind = BlockFirst
41259 b.SetControl(nil)
41260 b.Aux = nil
41261 b.swapSuccessors()
41262 return true
41263 }
41264
41265
41266
41267 for v.Op == OpARM64MOVDconst {
41268 c := v.AuxInt
41269 if !(c != 0) {
41270 break
41271 }
41272 b.Kind = BlockFirst
41273 b.SetControl(nil)
41274 b.Aux = nil
41275 return true
41276 }
41277 case BlockARM64NZW:
41278
41279
41280
41281 for v.Op == OpARM64ANDconst {
41282 c := v.AuxInt
41283 x := v.Args[0]
41284 if !(oneBit(int64(uint32(c)))) {
41285 break
41286 }
41287 b.Kind = BlockARM64TBNZ
41288 b.SetControl(x)
41289 b.Aux = ntz(int64(uint32(c)))
41290 return true
41291 }
41292
41293
41294
41295 for v.Op == OpARM64MOVDconst {
41296 c := v.AuxInt
41297 if !(int32(c) == 0) {
41298 break
41299 }
41300 b.Kind = BlockFirst
41301 b.SetControl(nil)
41302 b.Aux = nil
41303 b.swapSuccessors()
41304 return true
41305 }
41306
41307
41308
41309 for v.Op == OpARM64MOVDconst {
41310 c := v.AuxInt
41311 if !(int32(c) != 0) {
41312 break
41313 }
41314 b.Kind = BlockFirst
41315 b.SetControl(nil)
41316 b.Aux = nil
41317 return true
41318 }
41319 case BlockARM64UGE:
41320
41321
41322
41323 for v.Op == OpARM64FlagEQ {
41324 b.Kind = BlockFirst
41325 b.SetControl(nil)
41326 b.Aux = nil
41327 return true
41328 }
41329
41330
41331
41332 for v.Op == OpARM64FlagLT_ULT {
41333 b.Kind = BlockFirst
41334 b.SetControl(nil)
41335 b.Aux = nil
41336 b.swapSuccessors()
41337 return true
41338 }
41339
41340
41341
41342 for v.Op == OpARM64FlagLT_UGT {
41343 b.Kind = BlockFirst
41344 b.SetControl(nil)
41345 b.Aux = nil
41346 return true
41347 }
41348
41349
41350
41351 for v.Op == OpARM64FlagGT_ULT {
41352 b.Kind = BlockFirst
41353 b.SetControl(nil)
41354 b.Aux = nil
41355 b.swapSuccessors()
41356 return true
41357 }
41358
41359
41360
41361 for v.Op == OpARM64FlagGT_UGT {
41362 b.Kind = BlockFirst
41363 b.SetControl(nil)
41364 b.Aux = nil
41365 return true
41366 }
41367
41368
41369
41370 for v.Op == OpARM64InvertFlags {
41371 cmp := v.Args[0]
41372 b.Kind = BlockARM64ULE
41373 b.SetControl(cmp)
41374 b.Aux = nil
41375 return true
41376 }
41377 case BlockARM64UGT:
41378
41379
41380
41381 for v.Op == OpARM64FlagEQ {
41382 b.Kind = BlockFirst
41383 b.SetControl(nil)
41384 b.Aux = nil
41385 b.swapSuccessors()
41386 return true
41387 }
41388
41389
41390
41391 for v.Op == OpARM64FlagLT_ULT {
41392 b.Kind = BlockFirst
41393 b.SetControl(nil)
41394 b.Aux = nil
41395 b.swapSuccessors()
41396 return true
41397 }
41398
41399
41400
41401 for v.Op == OpARM64FlagLT_UGT {
41402 b.Kind = BlockFirst
41403 b.SetControl(nil)
41404 b.Aux = nil
41405 return true
41406 }
41407
41408
41409
41410 for v.Op == OpARM64FlagGT_ULT {
41411 b.Kind = BlockFirst
41412 b.SetControl(nil)
41413 b.Aux = nil
41414 b.swapSuccessors()
41415 return true
41416 }
41417
41418
41419
41420 for v.Op == OpARM64FlagGT_UGT {
41421 b.Kind = BlockFirst
41422 b.SetControl(nil)
41423 b.Aux = nil
41424 return true
41425 }
41426
41427
41428
41429 for v.Op == OpARM64InvertFlags {
41430 cmp := v.Args[0]
41431 b.Kind = BlockARM64ULT
41432 b.SetControl(cmp)
41433 b.Aux = nil
41434 return true
41435 }
41436 case BlockARM64ULE:
41437
41438
41439
41440 for v.Op == OpARM64FlagEQ {
41441 b.Kind = BlockFirst
41442 b.SetControl(nil)
41443 b.Aux = nil
41444 return true
41445 }
41446
41447
41448
41449 for v.Op == OpARM64FlagLT_ULT {
41450 b.Kind = BlockFirst
41451 b.SetControl(nil)
41452 b.Aux = nil
41453 return true
41454 }
41455
41456
41457
41458 for v.Op == OpARM64FlagLT_UGT {
41459 b.Kind = BlockFirst
41460 b.SetControl(nil)
41461 b.Aux = nil
41462 b.swapSuccessors()
41463 return true
41464 }
41465
41466
41467
41468 for v.Op == OpARM64FlagGT_ULT {
41469 b.Kind = BlockFirst
41470 b.SetControl(nil)
41471 b.Aux = nil
41472 return true
41473 }
41474
41475
41476
41477 for v.Op == OpARM64FlagGT_UGT {
41478 b.Kind = BlockFirst
41479 b.SetControl(nil)
41480 b.Aux = nil
41481 b.swapSuccessors()
41482 return true
41483 }
41484
41485
41486
41487 for v.Op == OpARM64InvertFlags {
41488 cmp := v.Args[0]
41489 b.Kind = BlockARM64UGE
41490 b.SetControl(cmp)
41491 b.Aux = nil
41492 return true
41493 }
41494 case BlockARM64ULT:
41495
41496
41497
41498 for v.Op == OpARM64FlagEQ {
41499 b.Kind = BlockFirst
41500 b.SetControl(nil)
41501 b.Aux = nil
41502 b.swapSuccessors()
41503 return true
41504 }
41505
41506
41507
41508 for v.Op == OpARM64FlagLT_ULT {
41509 b.Kind = BlockFirst
41510 b.SetControl(nil)
41511 b.Aux = nil
41512 return true
41513 }
41514
41515
41516
41517 for v.Op == OpARM64FlagLT_UGT {
41518 b.Kind = BlockFirst
41519 b.SetControl(nil)
41520 b.Aux = nil
41521 b.swapSuccessors()
41522 return true
41523 }
41524
41525
41526
41527 for v.Op == OpARM64FlagGT_ULT {
41528 b.Kind = BlockFirst
41529 b.SetControl(nil)
41530 b.Aux = nil
41531 return true
41532 }
41533
41534
41535
41536 for v.Op == OpARM64FlagGT_UGT {
41537 b.Kind = BlockFirst
41538 b.SetControl(nil)
41539 b.Aux = nil
41540 b.swapSuccessors()
41541 return true
41542 }
41543
41544
41545
41546 for v.Op == OpARM64InvertFlags {
41547 cmp := v.Args[0]
41548 b.Kind = BlockARM64UGT
41549 b.SetControl(cmp)
41550 b.Aux = nil
41551 return true
41552 }
41553 case BlockARM64Z:
41554
41555
41556
41557 for v.Op == OpARM64ANDconst {
41558 c := v.AuxInt
41559 x := v.Args[0]
41560 if !(oneBit(c)) {
41561 break
41562 }
41563 b.Kind = BlockARM64TBZ
41564 b.SetControl(x)
41565 b.Aux = ntz(c)
41566 return true
41567 }
41568
41569
41570
41571 for v.Op == OpARM64MOVDconst {
41572 if v.AuxInt != 0 {
41573 break
41574 }
41575 b.Kind = BlockFirst
41576 b.SetControl(nil)
41577 b.Aux = nil
41578 return true
41579 }
41580
41581
41582
41583 for v.Op == OpARM64MOVDconst {
41584 c := v.AuxInt
41585 if !(c != 0) {
41586 break
41587 }
41588 b.Kind = BlockFirst
41589 b.SetControl(nil)
41590 b.Aux = nil
41591 b.swapSuccessors()
41592 return true
41593 }
41594 case BlockARM64ZW:
41595
41596
41597
41598 for v.Op == OpARM64ANDconst {
41599 c := v.AuxInt
41600 x := v.Args[0]
41601 if !(oneBit(int64(uint32(c)))) {
41602 break
41603 }
41604 b.Kind = BlockARM64TBZ
41605 b.SetControl(x)
41606 b.Aux = ntz(int64(uint32(c)))
41607 return true
41608 }
41609
41610
41611
41612 for v.Op == OpARM64MOVDconst {
41613 c := v.AuxInt
41614 if !(int32(c) == 0) {
41615 break
41616 }
41617 b.Kind = BlockFirst
41618 b.SetControl(nil)
41619 b.Aux = nil
41620 return true
41621 }
41622
41623
41624
41625 for v.Op == OpARM64MOVDconst {
41626 c := v.AuxInt
41627 if !(int32(c) != 0) {
41628 break
41629 }
41630 b.Kind = BlockFirst
41631 b.SetControl(nil)
41632 b.Aux = nil
41633 b.swapSuccessors()
41634 return true
41635 }
41636 }
41637 return false
41638 }
41639
View as plain text