...
Text file src/pkg/cmd/compile/internal/ssa/gen/Wasm.rules
1 // Copyright 2018 The Go Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style
3 // license that can be found in the LICENSE file.
4
5 // Lowering arithmetic
6 (Add(64|32|16|8|Ptr) x y) -> (I64Add x y)
7 (Add(64|32)F x y) -> (F64Add x y)
8
9 (Sub(64|32|16|8|Ptr) x y) -> (I64Sub x y)
10 (Sub(64|32)F x y) -> (F64Sub x y)
11
12 (Mul(64|32|16|8) x y) -> (I64Mul x y)
13 (Mul(64|32)F x y) -> (F64Mul x y)
14
15 (Div64 x y) -> (I64DivS x y)
16 (Div64u x y) -> (I64DivU x y)
17 (Div32 x y) -> (I64DivS (SignExt32to64 x) (SignExt32to64 y))
18 (Div32u x y) -> (I64DivU (ZeroExt32to64 x) (ZeroExt32to64 y))
19 (Div16 x y) -> (I64DivS (SignExt16to64 x) (SignExt16to64 y))
20 (Div16u x y) -> (I64DivU (ZeroExt16to64 x) (ZeroExt16to64 y))
21 (Div8 x y) -> (I64DivS (SignExt8to64 x) (SignExt8to64 y))
22 (Div8u x y) -> (I64DivU (ZeroExt8to64 x) (ZeroExt8to64 y))
23 (Div(64|32)F x y) -> (F64Div x y)
24
25 (Mod64 x y) -> (I64RemS x y)
26 (Mod64u x y) -> (I64RemU x y)
27 (Mod32 x y) -> (I64RemS (SignExt32to64 x) (SignExt32to64 y))
28 (Mod32u x y) -> (I64RemU (ZeroExt32to64 x) (ZeroExt32to64 y))
29 (Mod16 x y) -> (I64RemS (SignExt16to64 x) (SignExt16to64 y))
30 (Mod16u x y) -> (I64RemU (ZeroExt16to64 x) (ZeroExt16to64 y))
31 (Mod8 x y) -> (I64RemS (SignExt8to64 x) (SignExt8to64 y))
32 (Mod8u x y) -> (I64RemU (ZeroExt8to64 x) (ZeroExt8to64 y))
33
34 (And(64|32|16|8|B) x y) -> (I64And x y)
35
36 (Or(64|32|16|8|B) x y) -> (I64Or x y)
37
38 (Xor(64|32|16|8) x y) -> (I64Xor x y)
39
40 (Neg(64|32|16|8) x) -> (I64Sub (I64Const [0]) x)
41 (Neg32F x) -> (F64Neg x)
42 (Neg64F x) -> (F64Neg x)
43
44 (Com(64|32|16|8) x) -> (I64Xor x (I64Const [-1]))
45
46 (Not x) -> (I64Eqz x)
47
48 // Lowering pointer arithmetic
49 (OffPtr [off] ptr) -> (I64AddConst [off] ptr)
50
51 // Lowering extension
52 // It is unnecessary to extend loads
53 (SignExt32to64 x:(I64Load32S _ _)) -> x
54 (SignExt16to(64|32) x:(I64Load16S _ _)) -> x
55 (SignExt8to(64|32|16) x:(I64Load8S _ _)) -> x
56 (ZeroExt32to64 x:(I64Load32U _ _)) -> x
57 (ZeroExt16to(64|32) x:(I64Load16U _ _)) -> x
58 (ZeroExt8to(64|32|16) x:(I64Load8U _ _)) -> x
59 (SignExt32to64 x) && objabi.GOWASM.SignExt -> (I64Extend32S x)
60 (SignExt8to(64|32|16) x) && objabi.GOWASM.SignExt -> (I64Extend8S x)
61 (SignExt16to(64|32) x) && objabi.GOWASM.SignExt -> (I64Extend16S x)
62 (SignExt32to64 x) -> (I64ShrS (I64Shl x (I64Const [32])) (I64Const [32]))
63 (SignExt16to(64|32) x) -> (I64ShrS (I64Shl x (I64Const [48])) (I64Const [48]))
64 (SignExt8to(64|32|16) x) -> (I64ShrS (I64Shl x (I64Const [56])) (I64Const [56]))
65 (ZeroExt32to64 x) -> (I64And x (I64Const [0xffffffff]))
66 (ZeroExt16to(64|32) x) -> (I64And x (I64Const [0xffff]))
67 (ZeroExt8to(64|32|16) x) -> (I64And x (I64Const [0xff]))
68
69 (Slicemask x) -> (I64ShrS (I64Sub (I64Const [0]) x) (I64Const [63]))
70
71 // Lowering truncation
72 // Because we ignore the high parts, truncates are just copies.
73 (Trunc64to(32|16|8) x) -> x
74 (Trunc32to(16|8) x) -> x
75 (Trunc16to8 x) -> x
76
77 // Lowering float <-> int
78 (Cvt32to32F x) -> (LoweredRound32F (F64ConvertI64S (SignExt32to64 x)))
79 (Cvt32to64F x) -> (F64ConvertI64S (SignExt32to64 x))
80 (Cvt64to32F x) -> (LoweredRound32F (F64ConvertI64S x))
81 (Cvt64to64F x) -> (F64ConvertI64S x)
82 (Cvt32Uto32F x) -> (LoweredRound32F (F64ConvertI64U (ZeroExt32to64 x)))
83 (Cvt32Uto64F x) -> (F64ConvertI64U (ZeroExt32to64 x))
84 (Cvt64Uto32F x) -> (LoweredRound32F (F64ConvertI64U x))
85 (Cvt64Uto64F x) -> (F64ConvertI64U x)
86
87 (Cvt32Fto32 x) -> (I64TruncSatF64S x)
88 (Cvt32Fto64 x) -> (I64TruncSatF64S x)
89 (Cvt64Fto32 x) -> (I64TruncSatF64S x)
90 (Cvt64Fto64 x) -> (I64TruncSatF64S x)
91 (Cvt32Fto32U x) -> (I64TruncSatF64U x)
92 (Cvt32Fto64U x) -> (I64TruncSatF64U x)
93 (Cvt64Fto32U x) -> (I64TruncSatF64U x)
94 (Cvt64Fto64U x) -> (I64TruncSatF64U x)
95
96 (Cvt32Fto64F x) -> x
97 (Cvt64Fto32F x) -> (LoweredRound32F x)
98
99 (Round32F x) -> (LoweredRound32F x)
100 (Round64F x) -> x
101
102 // Lowering shifts
103 // Unsigned shifts need to return 0 if shift amount is >= width of shifted value.
104
105 (Lsh64x64 x y) -> (Select (I64Shl x y) (I64Const [0]) (I64LtU y (I64Const [64])))
106 (Lsh64x32 x y) -> (Lsh64x64 x (ZeroExt32to64 y))
107 (Lsh64x16 x y) -> (Lsh64x64 x (ZeroExt16to64 y))
108 (Lsh64x8 x y) -> (Lsh64x64 x (ZeroExt8to64 y))
109
110 (Lsh32x64 x y) -> (Lsh64x64 x y)
111 (Lsh32x32 x y) -> (Lsh64x64 x (ZeroExt32to64 y))
112 (Lsh32x16 x y) -> (Lsh64x64 x (ZeroExt16to64 y))
113 (Lsh32x8 x y) -> (Lsh64x64 x (ZeroExt8to64 y))
114
115 (Lsh16x64 x y) -> (Lsh64x64 x y)
116 (Lsh16x32 x y) -> (Lsh64x64 x (ZeroExt32to64 y))
117 (Lsh16x16 x y) -> (Lsh64x64 x (ZeroExt16to64 y))
118 (Lsh16x8 x y) -> (Lsh64x64 x (ZeroExt8to64 y))
119
120 (Lsh8x64 x y) -> (Lsh64x64 x y)
121 (Lsh8x32 x y) -> (Lsh64x64 x (ZeroExt32to64 y))
122 (Lsh8x16 x y) -> (Lsh64x64 x (ZeroExt16to64 y))
123 (Lsh8x8 x y) -> (Lsh64x64 x (ZeroExt8to64 y))
124
125 (Rsh64Ux64 x y) -> (Select (I64ShrU x y) (I64Const [0]) (I64LtU y (I64Const [64])))
126 (Rsh64Ux32 x y) -> (Rsh64Ux64 x (ZeroExt32to64 y))
127 (Rsh64Ux16 x y) -> (Rsh64Ux64 x (ZeroExt16to64 y))
128 (Rsh64Ux8 x y) -> (Rsh64Ux64 x (ZeroExt8to64 y))
129
130 (Rsh32Ux64 x y) -> (Rsh64Ux64 (ZeroExt32to64 x) y)
131 (Rsh32Ux32 x y) -> (Rsh64Ux64 (ZeroExt32to64 x) (ZeroExt32to64 y))
132 (Rsh32Ux16 x y) -> (Rsh64Ux64 (ZeroExt32to64 x) (ZeroExt16to64 y))
133 (Rsh32Ux8 x y) -> (Rsh64Ux64 (ZeroExt32to64 x) (ZeroExt8to64 y))
134
135 (Rsh16Ux64 x y) -> (Rsh64Ux64 (ZeroExt16to64 x) y)
136 (Rsh16Ux32 x y) -> (Rsh64Ux64 (ZeroExt16to64 x) (ZeroExt32to64 y))
137 (Rsh16Ux16 x y) -> (Rsh64Ux64 (ZeroExt16to64 x) (ZeroExt16to64 y))
138 (Rsh16Ux8 x y) -> (Rsh64Ux64 (ZeroExt16to64 x) (ZeroExt8to64 y))
139
140 (Rsh8Ux64 x y) -> (Rsh64Ux64 (ZeroExt8to64 x) y)
141 (Rsh8Ux32 x y) -> (Rsh64Ux64 (ZeroExt8to64 x) (ZeroExt32to64 y))
142 (Rsh8Ux16 x y) -> (Rsh64Ux64 (ZeroExt8to64 x) (ZeroExt16to64 y))
143 (Rsh8Ux8 x y) -> (Rsh64Ux64 (ZeroExt8to64 x) (ZeroExt8to64 y))
144
145 // Signed right shift needs to return 0/-1 if shift amount is >= width of shifted value.
146 // We implement this by setting the shift value to (width - 1) if the shift value is >= width.
147
148 (Rsh64x64 x y) -> (I64ShrS x (Select <typ.Int64> y (I64Const [63]) (I64LtU y (I64Const [64]))))
149 (Rsh64x32 x y) -> (Rsh64x64 x (ZeroExt32to64 y))
150 (Rsh64x16 x y) -> (Rsh64x64 x (ZeroExt16to64 y))
151 (Rsh64x8 x y) -> (Rsh64x64 x (ZeroExt8to64 y))
152
153 (Rsh32x64 x y) -> (Rsh64x64 (SignExt32to64 x) y)
154 (Rsh32x32 x y) -> (Rsh64x64 (SignExt32to64 x) (ZeroExt32to64 y))
155 (Rsh32x16 x y) -> (Rsh64x64 (SignExt32to64 x) (ZeroExt16to64 y))
156 (Rsh32x8 x y) -> (Rsh64x64 (SignExt32to64 x) (ZeroExt8to64 y))
157
158 (Rsh16x64 x y) -> (Rsh64x64 (SignExt16to64 x) y)
159 (Rsh16x32 x y) -> (Rsh64x64 (SignExt16to64 x) (ZeroExt32to64 y))
160 (Rsh16x16 x y) -> (Rsh64x64 (SignExt16to64 x) (ZeroExt16to64 y))
161 (Rsh16x8 x y) -> (Rsh64x64 (SignExt16to64 x) (ZeroExt8to64 y))
162
163 (Rsh8x64 x y) -> (Rsh64x64 (SignExt8to64 x) y)
164 (Rsh8x32 x y) -> (Rsh64x64 (SignExt8to64 x) (ZeroExt32to64 y))
165 (Rsh8x16 x y) -> (Rsh64x64 (SignExt8to64 x) (ZeroExt16to64 y))
166 (Rsh8x8 x y) -> (Rsh64x64 (SignExt8to64 x) (ZeroExt8to64 y))
167
168 // Lowering rotates
169 (RotateLeft8 <t> x (I64Const [c])) -> (Or8 (Lsh8x64 <t> x (I64Const [c&7])) (Rsh8Ux64 <t> x (I64Const [-c&7])))
170 (RotateLeft16 <t> x (I64Const [c])) -> (Or16 (Lsh16x64 <t> x (I64Const [c&15])) (Rsh16Ux64 <t> x (I64Const [-c&15])))
171 (RotateLeft32 <t> x (I64Const [c])) -> (Or32 (Lsh32x64 <t> x (I64Const [c&31])) (Rsh32Ux64 <t> x (I64Const [-c&31])))
172
173 // Lowering comparisons
174 (Less64 x y) -> (I64LtS x y)
175 (Less32 x y) -> (I64LtS (SignExt32to64 x) (SignExt32to64 y))
176 (Less16 x y) -> (I64LtS (SignExt16to64 x) (SignExt16to64 y))
177 (Less8 x y) -> (I64LtS (SignExt8to64 x) (SignExt8to64 y))
178 (Less64U x y) -> (I64LtU x y)
179 (Less32U x y) -> (I64LtU (ZeroExt32to64 x) (ZeroExt32to64 y))
180 (Less16U x y) -> (I64LtU (ZeroExt16to64 x) (ZeroExt16to64 y))
181 (Less8U x y) -> (I64LtU (ZeroExt8to64 x) (ZeroExt8to64 y))
182 (Less64F x y) -> (F64Lt x y)
183 (Less32F x y) -> (F64Lt (LoweredRound32F x) (LoweredRound32F y))
184
185 (Leq64 x y) -> (I64LeS x y)
186 (Leq32 x y) -> (I64LeS (SignExt32to64 x) (SignExt32to64 y))
187 (Leq16 x y) -> (I64LeS (SignExt16to64 x) (SignExt16to64 y))
188 (Leq8 x y) -> (I64LeS (SignExt8to64 x) (SignExt8to64 y))
189 (Leq64U x y) -> (I64LeU x y)
190 (Leq32U x y) -> (I64LeU (ZeroExt32to64 x) (ZeroExt32to64 y))
191 (Leq16U x y) -> (I64LeU (ZeroExt16to64 x) (ZeroExt16to64 y))
192 (Leq8U x y) -> (I64LeU (ZeroExt8to64 x) (ZeroExt8to64 y))
193 (Leq64F x y) -> (F64Le x y)
194 (Leq32F x y) -> (F64Le (LoweredRound32F x) (LoweredRound32F y))
195
196 (Greater64 x y) -> (I64GtS x y)
197 (Greater32 x y) -> (I64GtS (SignExt32to64 x) (SignExt32to64 y))
198 (Greater16 x y) -> (I64GtS (SignExt16to64 x) (SignExt16to64 y))
199 (Greater8 x y) -> (I64GtS (SignExt8to64 x) (SignExt8to64 y))
200 (Greater64U x y) -> (I64GtU x y)
201 (Greater32U x y) -> (I64GtU (ZeroExt32to64 x) (ZeroExt32to64 y))
202 (Greater16U x y) -> (I64GtU (ZeroExt16to64 x) (ZeroExt16to64 y))
203 (Greater8U x y) -> (I64GtU (ZeroExt8to64 x) (ZeroExt8to64 y))
204 (Greater64F x y) -> (F64Gt x y)
205 (Greater32F x y) -> (F64Gt (LoweredRound32F x) (LoweredRound32F y))
206
207 (Geq64 x y) -> (I64GeS x y)
208 (Geq32 x y) -> (I64GeS (SignExt32to64 x) (SignExt32to64 y))
209 (Geq16 x y) -> (I64GeS (SignExt16to64 x) (SignExt16to64 y))
210 (Geq8 x y) -> (I64GeS (SignExt8to64 x) (SignExt8to64 y))
211 (Geq64U x y) -> (I64GeU x y)
212 (Geq32U x y) -> (I64GeU (ZeroExt32to64 x) (ZeroExt32to64 y))
213 (Geq16U x y) -> (I64GeU (ZeroExt16to64 x) (ZeroExt16to64 y))
214 (Geq8U x y) -> (I64GeU (ZeroExt8to64 x) (ZeroExt8to64 y))
215 (Geq64F x y) -> (F64Ge x y)
216 (Geq32F x y) -> (F64Ge (LoweredRound32F x) (LoweredRound32F y))
217
218 (Eq64 x y) -> (I64Eq x y)
219 (Eq32 x y) -> (I64Eq (ZeroExt32to64 x) (ZeroExt32to64 y))
220 (Eq16 x y) -> (I64Eq (ZeroExt16to64 x) (ZeroExt16to64 y))
221 (Eq8 x y) -> (I64Eq (ZeroExt8to64 x) (ZeroExt8to64 y))
222 (EqB x y) -> (I64Eq x y)
223 (EqPtr x y) -> (I64Eq x y)
224 (Eq64F x y) -> (F64Eq x y)
225 (Eq32F x y) -> (F64Eq (LoweredRound32F x) (LoweredRound32F y))
226
227 (Neq64 x y) -> (I64Ne x y)
228 (Neq32 x y) -> (I64Ne (ZeroExt32to64 x) (ZeroExt32to64 y))
229 (Neq16 x y) -> (I64Ne (ZeroExt16to64 x) (ZeroExt16to64 y))
230 (Neq8 x y) -> (I64Ne (ZeroExt8to64 x) (ZeroExt8to64 y))
231 (NeqB x y) -> (I64Ne x y)
232 (NeqPtr x y) -> (I64Ne x y)
233 (Neq64F x y) -> (F64Ne x y)
234 (Neq32F x y) -> (F64Ne (LoweredRound32F x) (LoweredRound32F y))
235
236 // Lowering loads
237 (Load <t> ptr mem) && is32BitFloat(t) -> (F32Load ptr mem)
238 (Load <t> ptr mem) && is64BitFloat(t) -> (F64Load ptr mem)
239 (Load <t> ptr mem) && t.Size() == 8 -> (I64Load ptr mem)
240 (Load <t> ptr mem) && t.Size() == 4 && !t.IsSigned() -> (I64Load32U ptr mem)
241 (Load <t> ptr mem) && t.Size() == 4 && t.IsSigned() -> (I64Load32S ptr mem)
242 (Load <t> ptr mem) && t.Size() == 2 && !t.IsSigned() -> (I64Load16U ptr mem)
243 (Load <t> ptr mem) && t.Size() == 2 && t.IsSigned() -> (I64Load16S ptr mem)
244 (Load <t> ptr mem) && t.Size() == 1 && !t.IsSigned() -> (I64Load8U ptr mem)
245 (Load <t> ptr mem) && t.Size() == 1 && t.IsSigned() -> (I64Load8S ptr mem)
246
247 // Lowering stores
248 (Store {t} ptr val mem) && is64BitFloat(t.(*types.Type)) -> (F64Store ptr val mem)
249 (Store {t} ptr val mem) && is32BitFloat(t.(*types.Type)) -> (F32Store ptr val mem)
250 (Store {t} ptr val mem) && t.(*types.Type).Size() == 8 -> (I64Store ptr val mem)
251 (Store {t} ptr val mem) && t.(*types.Type).Size() == 4 -> (I64Store32 ptr val mem)
252 (Store {t} ptr val mem) && t.(*types.Type).Size() == 2 -> (I64Store16 ptr val mem)
253 (Store {t} ptr val mem) && t.(*types.Type).Size() == 1 -> (I64Store8 ptr val mem)
254
255 // Lowering moves
256 (Move [0] _ _ mem) -> mem
257 (Move [1] dst src mem) -> (I64Store8 dst (I64Load8U src mem) mem)
258 (Move [2] dst src mem) -> (I64Store16 dst (I64Load16U src mem) mem)
259 (Move [4] dst src mem) -> (I64Store32 dst (I64Load32U src mem) mem)
260 (Move [8] dst src mem) -> (I64Store dst (I64Load src mem) mem)
261 (Move [16] dst src mem) ->
262 (I64Store [8] dst (I64Load [8] src mem)
263 (I64Store dst (I64Load src mem) mem))
264 (Move [3] dst src mem) ->
265 (I64Store8 [2] dst (I64Load8U [2] src mem)
266 (I64Store16 dst (I64Load16U src mem) mem))
267 (Move [5] dst src mem) ->
268 (I64Store8 [4] dst (I64Load8U [4] src mem)
269 (I64Store32 dst (I64Load32U src mem) mem))
270 (Move [6] dst src mem) ->
271 (I64Store16 [4] dst (I64Load16U [4] src mem)
272 (I64Store32 dst (I64Load32U src mem) mem))
273 (Move [7] dst src mem) ->
274 (I64Store32 [3] dst (I64Load32U [3] src mem)
275 (I64Store32 dst (I64Load32U src mem) mem))
276 (Move [s] dst src mem) && s > 8 && s < 16 ->
277 (I64Store [s-8] dst (I64Load [s-8] src mem)
278 (I64Store dst (I64Load src mem) mem))
279
280 // Adjust moves to be a multiple of 16 bytes.
281 (Move [s] dst src mem)
282 && s > 16 && s%16 != 0 && s%16 <= 8 ->
283 (Move [s-s%16]
284 (OffPtr <dst.Type> dst [s%16])
285 (OffPtr <src.Type> src [s%16])
286 (I64Store dst (I64Load src mem) mem))
287 (Move [s] dst src mem)
288 && s > 16 && s%16 != 0 && s%16 > 8 ->
289 (Move [s-s%16]
290 (OffPtr <dst.Type> dst [s%16])
291 (OffPtr <src.Type> src [s%16])
292 (I64Store [8] dst (I64Load [8] src mem)
293 (I64Store dst (I64Load src mem) mem)))
294
295 // Large copying uses helper.
296 (Move [s] dst src mem) && s%8 == 0 ->
297 (LoweredMove [s/8] dst src mem)
298
299 // Lowering Zero instructions
300 (Zero [0] _ mem) -> mem
301 (Zero [1] destptr mem) -> (I64Store8 destptr (I64Const [0]) mem)
302 (Zero [2] destptr mem) -> (I64Store16 destptr (I64Const [0]) mem)
303 (Zero [4] destptr mem) -> (I64Store32 destptr (I64Const [0]) mem)
304 (Zero [8] destptr mem) -> (I64Store destptr (I64Const [0]) mem)
305
306 (Zero [3] destptr mem) ->
307 (I64Store8 [2] destptr (I64Const [0])
308 (I64Store16 destptr (I64Const [0]) mem))
309 (Zero [5] destptr mem) ->
310 (I64Store8 [4] destptr (I64Const [0])
311 (I64Store32 destptr (I64Const [0]) mem))
312 (Zero [6] destptr mem) ->
313 (I64Store16 [4] destptr (I64Const [0])
314 (I64Store32 destptr (I64Const [0]) mem))
315 (Zero [7] destptr mem) ->
316 (I64Store32 [3] destptr (I64Const [0])
317 (I64Store32 destptr (I64Const [0]) mem))
318
319 // Strip off any fractional word zeroing.
320 (Zero [s] destptr mem) && s%8 != 0 && s > 8 ->
321 (Zero [s-s%8] (OffPtr <destptr.Type> destptr [s%8])
322 (I64Store destptr (I64Const [0]) mem))
323
324 // Zero small numbers of words directly.
325 (Zero [16] destptr mem) ->
326 (I64Store [8] destptr (I64Const [0])
327 (I64Store destptr (I64Const [0]) mem))
328 (Zero [24] destptr mem) ->
329 (I64Store [16] destptr (I64Const [0])
330 (I64Store [8] destptr (I64Const [0])
331 (I64Store destptr (I64Const [0]) mem)))
332 (Zero [32] destptr mem) ->
333 (I64Store [24] destptr (I64Const [0])
334 (I64Store [16] destptr (I64Const [0])
335 (I64Store [8] destptr (I64Const [0])
336 (I64Store destptr (I64Const [0]) mem))))
337
338 // Large zeroing uses helper.
339 (Zero [s] destptr mem) && s%8 == 0 && s > 32 ->
340 (LoweredZero [s/8] destptr mem)
341
342 // Lowering constants
343 (Const(64|32|16|8) [val]) -> (I64Const [val])
344 (Const(64|32)F [val]) -> (F64Const [val])
345 (ConstNil) -> (I64Const [0])
346 (ConstBool [b]) -> (I64Const [b])
347
348 // Lowering calls
349 (StaticCall [argwid] {target} mem) -> (LoweredStaticCall [argwid] {target} mem)
350 (ClosureCall [argwid] entry closure mem) -> (LoweredClosureCall [argwid] entry closure mem)
351 (InterCall [argwid] entry mem) -> (LoweredInterCall [argwid] entry mem)
352
353 // Miscellaneous
354 (Convert <t> x mem) -> (LoweredConvert <t> x mem)
355 (IsNonNil p) -> (I64Eqz (I64Eqz p))
356 (IsInBounds idx len) -> (I64LtU idx len)
357 (IsSliceInBounds idx len) -> (I64LeU idx len)
358 (NilCheck ptr mem) -> (LoweredNilCheck ptr mem)
359 (GetClosurePtr) -> (LoweredGetClosurePtr)
360 (GetCallerPC) -> (LoweredGetCallerPC)
361 (GetCallerSP) -> (LoweredGetCallerSP)
362 (Addr {sym} base) -> (LoweredAddr {sym} base)
363 (LocalAddr {sym} base _) -> (LoweredAddr {sym} base)
364
365 // Write barrier.
366 (WB {fn} destptr srcptr mem) -> (LoweredWB {fn} destptr srcptr mem)
367
368 // --- Intrinsics ---
369 (Sqrt x) -> (F64Sqrt x)
370 (Trunc x) -> (F64Trunc x)
371 (Ceil x) -> (F64Ceil x)
372 (Floor x) -> (F64Floor x)
373 (RoundToEven x) -> (F64Nearest x)
374 (Abs x) -> (F64Abs x)
375 (Copysign x y) -> (F64Copysign x y)
376
377 (Ctz64 x) -> (I64Ctz x)
378 (Ctz32 x) -> (I64Ctz (I64Or x (I64Const [0x100000000])))
379 (Ctz16 x) -> (I64Ctz (I64Or x (I64Const [0x10000])))
380 (Ctz8 x) -> (I64Ctz (I64Or x (I64Const [0x100])))
381
382 (Ctz(64|32|16|8)NonZero x) -> (I64Ctz x)
383
384 (BitLen64 x) -> (I64Sub (I64Const [64]) (I64Clz x))
385
386 (RotateLeft64 x y) -> (I64Rotl x y)
387
388 (PopCount64 x) -> (I64Popcnt x)
389 (PopCount32 x) -> (I64Popcnt (ZeroExt32to64 x))
390 (PopCount16 x) -> (I64Popcnt (ZeroExt16to64 x))
391 (PopCount8 x) -> (I64Popcnt (ZeroExt8to64 x))
392
393 // --- Optimizations ---
394 (I64Add (I64Const [x]) (I64Const [y])) -> (I64Const [x + y])
395 (I64Mul (I64Const [x]) (I64Const [y])) -> (I64Const [x * y])
396 (I64And (I64Const [x]) (I64Const [y])) -> (I64Const [x & y])
397 (I64Or (I64Const [x]) (I64Const [y])) -> (I64Const [x | y])
398 (I64Xor (I64Const [x]) (I64Const [y])) -> (I64Const [x ^ y])
399 (F64Add (F64Const [x]) (F64Const [y])) -> (F64Const [auxFrom64F(auxTo64F(x) + auxTo64F(y))])
400 (F64Mul (F64Const [x]) (F64Const [y])) -> (F64Const [auxFrom64F(auxTo64F(x) * auxTo64F(y))])
401 (I64Eq (I64Const [x]) (I64Const [y])) && x == y -> (I64Const [1])
402 (I64Eq (I64Const [x]) (I64Const [y])) && x != y -> (I64Const [0])
403 (I64Ne (I64Const [x]) (I64Const [y])) && x == y -> (I64Const [0])
404 (I64Ne (I64Const [x]) (I64Const [y])) && x != y -> (I64Const [1])
405
406 (I64Shl (I64Const [x]) (I64Const [y])) -> (I64Const [x << uint64(y)])
407 (I64ShrU (I64Const [x]) (I64Const [y])) -> (I64Const [int64(uint64(x) >> uint64(y))])
408 (I64ShrS (I64Const [x]) (I64Const [y])) -> (I64Const [x >> uint64(y)])
409
410 (I64Add (I64Const [x]) y) -> (I64Add y (I64Const [x]))
411 (I64Mul (I64Const [x]) y) -> (I64Mul y (I64Const [x]))
412 (I64And (I64Const [x]) y) -> (I64And y (I64Const [x]))
413 (I64Or (I64Const [x]) y) -> (I64Or y (I64Const [x]))
414 (I64Xor (I64Const [x]) y) -> (I64Xor y (I64Const [x]))
415 (F64Add (F64Const [x]) y) -> (F64Add y (F64Const [x]))
416 (F64Mul (F64Const [x]) y) -> (F64Mul y (F64Const [x]))
417 (I64Eq (I64Const [x]) y) -> (I64Eq y (I64Const [x]))
418 (I64Ne (I64Const [x]) y) -> (I64Ne y (I64Const [x]))
419
420 (I64Eq x (I64Const [0])) -> (I64Eqz x)
421 (I64Ne x (I64Const [0])) -> (I64Eqz (I64Eqz x))
422
423 (I64Add x (I64Const [y])) -> (I64AddConst [y] x)
424 (I64AddConst [0] x) -> x
425 (I64Eqz (I64Eqz (I64Eqz x))) -> (I64Eqz x)
426
427 // folding offset into load/store
428 ((I64Load|I64Load32U|I64Load32S|I64Load16U|I64Load16S|I64Load8U|I64Load8S) [off] (I64AddConst [off2] ptr) mem)
429 && isU32Bit(off+off2) ->
430 ((I64Load|I64Load32U|I64Load32S|I64Load16U|I64Load16S|I64Load8U|I64Load8S) [off+off2] ptr mem)
431
432 ((I64Store|I64Store32|I64Store16|I64Store8) [off] (I64AddConst [off2] ptr) val mem)
433 && isU32Bit(off+off2) ->
434 ((I64Store|I64Store32|I64Store16|I64Store8) [off+off2] ptr val mem)
435
436 // folding offset into address
437 (I64AddConst [off] (LoweredAddr {sym} [off2] base)) && isU32Bit(off+off2) ->
438 (LoweredAddr {sym} [off+off2] base)
View as plain text