blob: 885bbaf4a1d0bf1cd3732264da50f23ed8deeaa0 [file] [log] [blame]
Patrice Arruda748609c2020-06-25 12:12:21 -07001// Code generated from gen/RISCV64.rules; DO NOT EDIT.
2// generated with: cd gen; go run *.go
3
4package ssa
5
6import "math"
Colin Cross1f805522021-05-14 11:10:59 -07007import "cmd/compile/internal/types"
Patrice Arruda748609c2020-06-25 12:12:21 -07008
9func rewriteValueRISCV64(v *Value) bool {
10 switch v.Op {
Dan Willemsenbc60c3c2021-12-15 01:09:00 -080011 case OpAbs:
12 v.Op = OpRISCV64FABSD
13 return true
Patrice Arruda748609c2020-06-25 12:12:21 -070014 case OpAdd16:
15 v.Op = OpRISCV64ADD
16 return true
17 case OpAdd32:
18 v.Op = OpRISCV64ADD
19 return true
20 case OpAdd32F:
21 v.Op = OpRISCV64FADDS
22 return true
23 case OpAdd64:
24 v.Op = OpRISCV64ADD
25 return true
26 case OpAdd64F:
27 v.Op = OpRISCV64FADDD
28 return true
29 case OpAdd8:
30 v.Op = OpRISCV64ADD
31 return true
32 case OpAddPtr:
33 v.Op = OpRISCV64ADD
34 return true
35 case OpAddr:
36 return rewriteValueRISCV64_OpAddr(v)
37 case OpAnd16:
38 v.Op = OpRISCV64AND
39 return true
40 case OpAnd32:
41 v.Op = OpRISCV64AND
42 return true
43 case OpAnd64:
44 v.Op = OpRISCV64AND
45 return true
46 case OpAnd8:
47 v.Op = OpRISCV64AND
48 return true
49 case OpAndB:
50 v.Op = OpRISCV64AND
51 return true
52 case OpAtomicAdd32:
53 v.Op = OpRISCV64LoweredAtomicAdd32
54 return true
55 case OpAtomicAdd64:
56 v.Op = OpRISCV64LoweredAtomicAdd64
57 return true
Dan Willemsencc753b72021-08-31 13:25:42 -070058 case OpAtomicAnd32:
59 v.Op = OpRISCV64LoweredAtomicAnd32
60 return true
61 case OpAtomicAnd8:
62 return rewriteValueRISCV64_OpAtomicAnd8(v)
Patrice Arruda748609c2020-06-25 12:12:21 -070063 case OpAtomicCompareAndSwap32:
64 v.Op = OpRISCV64LoweredAtomicCas32
65 return true
66 case OpAtomicCompareAndSwap64:
67 v.Op = OpRISCV64LoweredAtomicCas64
68 return true
69 case OpAtomicExchange32:
70 v.Op = OpRISCV64LoweredAtomicExchange32
71 return true
72 case OpAtomicExchange64:
73 v.Op = OpRISCV64LoweredAtomicExchange64
74 return true
75 case OpAtomicLoad32:
76 v.Op = OpRISCV64LoweredAtomicLoad32
77 return true
78 case OpAtomicLoad64:
79 v.Op = OpRISCV64LoweredAtomicLoad64
80 return true
81 case OpAtomicLoad8:
82 v.Op = OpRISCV64LoweredAtomicLoad8
83 return true
84 case OpAtomicLoadPtr:
85 v.Op = OpRISCV64LoweredAtomicLoad64
86 return true
Dan Willemsencc753b72021-08-31 13:25:42 -070087 case OpAtomicOr32:
88 v.Op = OpRISCV64LoweredAtomicOr32
89 return true
90 case OpAtomicOr8:
91 return rewriteValueRISCV64_OpAtomicOr8(v)
Patrice Arruda748609c2020-06-25 12:12:21 -070092 case OpAtomicStore32:
93 v.Op = OpRISCV64LoweredAtomicStore32
94 return true
95 case OpAtomicStore64:
96 v.Op = OpRISCV64LoweredAtomicStore64
97 return true
98 case OpAtomicStore8:
99 v.Op = OpRISCV64LoweredAtomicStore8
100 return true
101 case OpAtomicStorePtrNoWB:
102 v.Op = OpRISCV64LoweredAtomicStore64
103 return true
104 case OpAvg64u:
105 return rewriteValueRISCV64_OpAvg64u(v)
106 case OpClosureCall:
107 v.Op = OpRISCV64CALLclosure
108 return true
109 case OpCom16:
110 v.Op = OpRISCV64NOT
111 return true
112 case OpCom32:
113 v.Op = OpRISCV64NOT
114 return true
115 case OpCom64:
116 v.Op = OpRISCV64NOT
117 return true
118 case OpCom8:
119 v.Op = OpRISCV64NOT
120 return true
121 case OpConst16:
Dan Willemsencc753b72021-08-31 13:25:42 -0700122 return rewriteValueRISCV64_OpConst16(v)
Patrice Arruda748609c2020-06-25 12:12:21 -0700123 case OpConst32:
Dan Willemsencc753b72021-08-31 13:25:42 -0700124 return rewriteValueRISCV64_OpConst32(v)
Patrice Arruda748609c2020-06-25 12:12:21 -0700125 case OpConst32F:
126 return rewriteValueRISCV64_OpConst32F(v)
127 case OpConst64:
Dan Willemsencc753b72021-08-31 13:25:42 -0700128 return rewriteValueRISCV64_OpConst64(v)
Patrice Arruda748609c2020-06-25 12:12:21 -0700129 case OpConst64F:
130 return rewriteValueRISCV64_OpConst64F(v)
131 case OpConst8:
Dan Willemsencc753b72021-08-31 13:25:42 -0700132 return rewriteValueRISCV64_OpConst8(v)
Patrice Arruda748609c2020-06-25 12:12:21 -0700133 case OpConstBool:
134 return rewriteValueRISCV64_OpConstBool(v)
135 case OpConstNil:
136 return rewriteValueRISCV64_OpConstNil(v)
137 case OpConvert:
138 v.Op = OpRISCV64MOVconvert
139 return true
Dan Willemsenbc60c3c2021-12-15 01:09:00 -0800140 case OpCopysign:
141 v.Op = OpRISCV64FSGNJD
142 return true
Patrice Arruda748609c2020-06-25 12:12:21 -0700143 case OpCvt32Fto32:
144 v.Op = OpRISCV64FCVTWS
145 return true
146 case OpCvt32Fto64:
147 v.Op = OpRISCV64FCVTLS
148 return true
149 case OpCvt32Fto64F:
150 v.Op = OpRISCV64FCVTDS
151 return true
152 case OpCvt32to32F:
153 v.Op = OpRISCV64FCVTSW
154 return true
155 case OpCvt32to64F:
156 v.Op = OpRISCV64FCVTDW
157 return true
158 case OpCvt64Fto32:
159 v.Op = OpRISCV64FCVTWD
160 return true
161 case OpCvt64Fto32F:
162 v.Op = OpRISCV64FCVTSD
163 return true
164 case OpCvt64Fto64:
165 v.Op = OpRISCV64FCVTLD
166 return true
167 case OpCvt64to32F:
168 v.Op = OpRISCV64FCVTSL
169 return true
170 case OpCvt64to64F:
171 v.Op = OpRISCV64FCVTDL
172 return true
173 case OpCvtBoolToUint8:
174 v.Op = OpCopy
175 return true
176 case OpDiv16:
177 return rewriteValueRISCV64_OpDiv16(v)
178 case OpDiv16u:
179 return rewriteValueRISCV64_OpDiv16u(v)
180 case OpDiv32:
181 return rewriteValueRISCV64_OpDiv32(v)
182 case OpDiv32F:
183 v.Op = OpRISCV64FDIVS
184 return true
185 case OpDiv32u:
186 v.Op = OpRISCV64DIVUW
187 return true
188 case OpDiv64:
189 return rewriteValueRISCV64_OpDiv64(v)
190 case OpDiv64F:
191 v.Op = OpRISCV64FDIVD
192 return true
193 case OpDiv64u:
194 v.Op = OpRISCV64DIVU
195 return true
196 case OpDiv8:
197 return rewriteValueRISCV64_OpDiv8(v)
198 case OpDiv8u:
199 return rewriteValueRISCV64_OpDiv8u(v)
200 case OpEq16:
201 return rewriteValueRISCV64_OpEq16(v)
202 case OpEq32:
203 return rewriteValueRISCV64_OpEq32(v)
204 case OpEq32F:
205 v.Op = OpRISCV64FEQS
206 return true
207 case OpEq64:
208 return rewriteValueRISCV64_OpEq64(v)
209 case OpEq64F:
210 v.Op = OpRISCV64FEQD
211 return true
212 case OpEq8:
213 return rewriteValueRISCV64_OpEq8(v)
214 case OpEqB:
215 return rewriteValueRISCV64_OpEqB(v)
216 case OpEqPtr:
217 return rewriteValueRISCV64_OpEqPtr(v)
Dan Willemsenbc60c3c2021-12-15 01:09:00 -0800218 case OpFMA:
219 v.Op = OpRISCV64FMADDD
220 return true
Patrice Arruda748609c2020-06-25 12:12:21 -0700221 case OpGetCallerPC:
222 v.Op = OpRISCV64LoweredGetCallerPC
223 return true
224 case OpGetCallerSP:
225 v.Op = OpRISCV64LoweredGetCallerSP
226 return true
227 case OpGetClosurePtr:
228 v.Op = OpRISCV64LoweredGetClosurePtr
229 return true
230 case OpHmul32:
231 return rewriteValueRISCV64_OpHmul32(v)
232 case OpHmul32u:
233 return rewriteValueRISCV64_OpHmul32u(v)
234 case OpHmul64:
235 v.Op = OpRISCV64MULH
236 return true
237 case OpHmul64u:
238 v.Op = OpRISCV64MULHU
239 return true
240 case OpInterCall:
241 v.Op = OpRISCV64CALLinter
242 return true
243 case OpIsInBounds:
244 v.Op = OpLess64U
245 return true
246 case OpIsNonNil:
Dan Willemsencc753b72021-08-31 13:25:42 -0700247 v.Op = OpRISCV64SNEZ
248 return true
Patrice Arruda748609c2020-06-25 12:12:21 -0700249 case OpIsSliceInBounds:
250 v.Op = OpLeq64U
251 return true
252 case OpLeq16:
253 return rewriteValueRISCV64_OpLeq16(v)
254 case OpLeq16U:
255 return rewriteValueRISCV64_OpLeq16U(v)
256 case OpLeq32:
257 return rewriteValueRISCV64_OpLeq32(v)
258 case OpLeq32F:
259 v.Op = OpRISCV64FLES
260 return true
261 case OpLeq32U:
262 return rewriteValueRISCV64_OpLeq32U(v)
263 case OpLeq64:
264 return rewriteValueRISCV64_OpLeq64(v)
265 case OpLeq64F:
266 v.Op = OpRISCV64FLED
267 return true
268 case OpLeq64U:
269 return rewriteValueRISCV64_OpLeq64U(v)
270 case OpLeq8:
271 return rewriteValueRISCV64_OpLeq8(v)
272 case OpLeq8U:
273 return rewriteValueRISCV64_OpLeq8U(v)
274 case OpLess16:
275 return rewriteValueRISCV64_OpLess16(v)
276 case OpLess16U:
277 return rewriteValueRISCV64_OpLess16U(v)
278 case OpLess32:
279 return rewriteValueRISCV64_OpLess32(v)
280 case OpLess32F:
281 v.Op = OpRISCV64FLTS
282 return true
283 case OpLess32U:
284 return rewriteValueRISCV64_OpLess32U(v)
285 case OpLess64:
286 v.Op = OpRISCV64SLT
287 return true
288 case OpLess64F:
289 v.Op = OpRISCV64FLTD
290 return true
291 case OpLess64U:
292 v.Op = OpRISCV64SLTU
293 return true
294 case OpLess8:
295 return rewriteValueRISCV64_OpLess8(v)
296 case OpLess8U:
297 return rewriteValueRISCV64_OpLess8U(v)
298 case OpLoad:
299 return rewriteValueRISCV64_OpLoad(v)
300 case OpLocalAddr:
301 return rewriteValueRISCV64_OpLocalAddr(v)
302 case OpLsh16x16:
303 return rewriteValueRISCV64_OpLsh16x16(v)
304 case OpLsh16x32:
305 return rewriteValueRISCV64_OpLsh16x32(v)
306 case OpLsh16x64:
307 return rewriteValueRISCV64_OpLsh16x64(v)
308 case OpLsh16x8:
309 return rewriteValueRISCV64_OpLsh16x8(v)
310 case OpLsh32x16:
311 return rewriteValueRISCV64_OpLsh32x16(v)
312 case OpLsh32x32:
313 return rewriteValueRISCV64_OpLsh32x32(v)
314 case OpLsh32x64:
315 return rewriteValueRISCV64_OpLsh32x64(v)
316 case OpLsh32x8:
317 return rewriteValueRISCV64_OpLsh32x8(v)
318 case OpLsh64x16:
319 return rewriteValueRISCV64_OpLsh64x16(v)
320 case OpLsh64x32:
321 return rewriteValueRISCV64_OpLsh64x32(v)
322 case OpLsh64x64:
323 return rewriteValueRISCV64_OpLsh64x64(v)
324 case OpLsh64x8:
325 return rewriteValueRISCV64_OpLsh64x8(v)
326 case OpLsh8x16:
327 return rewriteValueRISCV64_OpLsh8x16(v)
328 case OpLsh8x32:
329 return rewriteValueRISCV64_OpLsh8x32(v)
330 case OpLsh8x64:
331 return rewriteValueRISCV64_OpLsh8x64(v)
332 case OpLsh8x8:
333 return rewriteValueRISCV64_OpLsh8x8(v)
334 case OpMod16:
335 return rewriteValueRISCV64_OpMod16(v)
336 case OpMod16u:
337 return rewriteValueRISCV64_OpMod16u(v)
338 case OpMod32:
339 return rewriteValueRISCV64_OpMod32(v)
340 case OpMod32u:
341 v.Op = OpRISCV64REMUW
342 return true
343 case OpMod64:
344 return rewriteValueRISCV64_OpMod64(v)
345 case OpMod64u:
346 v.Op = OpRISCV64REMU
347 return true
348 case OpMod8:
349 return rewriteValueRISCV64_OpMod8(v)
350 case OpMod8u:
351 return rewriteValueRISCV64_OpMod8u(v)
352 case OpMove:
353 return rewriteValueRISCV64_OpMove(v)
354 case OpMul16:
355 return rewriteValueRISCV64_OpMul16(v)
356 case OpMul32:
357 v.Op = OpRISCV64MULW
358 return true
359 case OpMul32F:
360 v.Op = OpRISCV64FMULS
361 return true
362 case OpMul64:
363 v.Op = OpRISCV64MUL
364 return true
365 case OpMul64F:
366 v.Op = OpRISCV64FMULD
367 return true
Dan Willemsenbc60c3c2021-12-15 01:09:00 -0800368 case OpMul64uhilo:
369 v.Op = OpRISCV64LoweredMuluhilo
370 return true
371 case OpMul64uover:
372 v.Op = OpRISCV64LoweredMuluover
373 return true
Patrice Arruda748609c2020-06-25 12:12:21 -0700374 case OpMul8:
375 return rewriteValueRISCV64_OpMul8(v)
376 case OpNeg16:
377 v.Op = OpRISCV64NEG
378 return true
379 case OpNeg32:
380 v.Op = OpRISCV64NEG
381 return true
382 case OpNeg32F:
383 v.Op = OpRISCV64FNEGS
384 return true
385 case OpNeg64:
386 v.Op = OpRISCV64NEG
387 return true
388 case OpNeg64F:
389 v.Op = OpRISCV64FNEGD
390 return true
391 case OpNeg8:
392 v.Op = OpRISCV64NEG
393 return true
394 case OpNeq16:
395 return rewriteValueRISCV64_OpNeq16(v)
396 case OpNeq32:
397 return rewriteValueRISCV64_OpNeq32(v)
398 case OpNeq32F:
399 v.Op = OpRISCV64FNES
400 return true
401 case OpNeq64:
402 return rewriteValueRISCV64_OpNeq64(v)
403 case OpNeq64F:
404 v.Op = OpRISCV64FNED
405 return true
406 case OpNeq8:
407 return rewriteValueRISCV64_OpNeq8(v)
408 case OpNeqB:
409 v.Op = OpRISCV64XOR
410 return true
411 case OpNeqPtr:
412 return rewriteValueRISCV64_OpNeqPtr(v)
413 case OpNilCheck:
414 v.Op = OpRISCV64LoweredNilCheck
415 return true
416 case OpNot:
417 v.Op = OpRISCV64SEQZ
418 return true
419 case OpOffPtr:
420 return rewriteValueRISCV64_OpOffPtr(v)
421 case OpOr16:
422 v.Op = OpRISCV64OR
423 return true
424 case OpOr32:
425 v.Op = OpRISCV64OR
426 return true
427 case OpOr64:
428 v.Op = OpRISCV64OR
429 return true
430 case OpOr8:
431 v.Op = OpRISCV64OR
432 return true
433 case OpOrB:
434 v.Op = OpRISCV64OR
435 return true
436 case OpPanicBounds:
437 return rewriteValueRISCV64_OpPanicBounds(v)
438 case OpRISCV64ADD:
439 return rewriteValueRISCV64_OpRISCV64ADD(v)
440 case OpRISCV64ADDI:
441 return rewriteValueRISCV64_OpRISCV64ADDI(v)
442 case OpRISCV64AND:
443 return rewriteValueRISCV64_OpRISCV64AND(v)
Dan Willemsenbc60c3c2021-12-15 01:09:00 -0800444 case OpRISCV64ANDI:
445 return rewriteValueRISCV64_OpRISCV64ANDI(v)
446 case OpRISCV64FMADDD:
447 return rewriteValueRISCV64_OpRISCV64FMADDD(v)
448 case OpRISCV64FMSUBD:
449 return rewriteValueRISCV64_OpRISCV64FMSUBD(v)
450 case OpRISCV64FNMADDD:
451 return rewriteValueRISCV64_OpRISCV64FNMADDD(v)
452 case OpRISCV64FNMSUBD:
453 return rewriteValueRISCV64_OpRISCV64FNMSUBD(v)
Patrice Arruda748609c2020-06-25 12:12:21 -0700454 case OpRISCV64MOVBUload:
455 return rewriteValueRISCV64_OpRISCV64MOVBUload(v)
Colin Cross1f805522021-05-14 11:10:59 -0700456 case OpRISCV64MOVBUreg:
457 return rewriteValueRISCV64_OpRISCV64MOVBUreg(v)
Patrice Arruda748609c2020-06-25 12:12:21 -0700458 case OpRISCV64MOVBload:
459 return rewriteValueRISCV64_OpRISCV64MOVBload(v)
Colin Cross1f805522021-05-14 11:10:59 -0700460 case OpRISCV64MOVBreg:
461 return rewriteValueRISCV64_OpRISCV64MOVBreg(v)
Patrice Arruda748609c2020-06-25 12:12:21 -0700462 case OpRISCV64MOVBstore:
463 return rewriteValueRISCV64_OpRISCV64MOVBstore(v)
464 case OpRISCV64MOVBstorezero:
465 return rewriteValueRISCV64_OpRISCV64MOVBstorezero(v)
Patrice Arruda748609c2020-06-25 12:12:21 -0700466 case OpRISCV64MOVDload:
467 return rewriteValueRISCV64_OpRISCV64MOVDload(v)
Dan Willemsencc753b72021-08-31 13:25:42 -0700468 case OpRISCV64MOVDnop:
469 return rewriteValueRISCV64_OpRISCV64MOVDnop(v)
Colin Cross1f805522021-05-14 11:10:59 -0700470 case OpRISCV64MOVDreg:
471 return rewriteValueRISCV64_OpRISCV64MOVDreg(v)
Patrice Arruda748609c2020-06-25 12:12:21 -0700472 case OpRISCV64MOVDstore:
473 return rewriteValueRISCV64_OpRISCV64MOVDstore(v)
474 case OpRISCV64MOVDstorezero:
475 return rewriteValueRISCV64_OpRISCV64MOVDstorezero(v)
476 case OpRISCV64MOVHUload:
477 return rewriteValueRISCV64_OpRISCV64MOVHUload(v)
Colin Cross1f805522021-05-14 11:10:59 -0700478 case OpRISCV64MOVHUreg:
479 return rewriteValueRISCV64_OpRISCV64MOVHUreg(v)
Patrice Arruda748609c2020-06-25 12:12:21 -0700480 case OpRISCV64MOVHload:
481 return rewriteValueRISCV64_OpRISCV64MOVHload(v)
Colin Cross1f805522021-05-14 11:10:59 -0700482 case OpRISCV64MOVHreg:
483 return rewriteValueRISCV64_OpRISCV64MOVHreg(v)
Patrice Arruda748609c2020-06-25 12:12:21 -0700484 case OpRISCV64MOVHstore:
485 return rewriteValueRISCV64_OpRISCV64MOVHstore(v)
486 case OpRISCV64MOVHstorezero:
487 return rewriteValueRISCV64_OpRISCV64MOVHstorezero(v)
488 case OpRISCV64MOVWUload:
489 return rewriteValueRISCV64_OpRISCV64MOVWUload(v)
Colin Cross1f805522021-05-14 11:10:59 -0700490 case OpRISCV64MOVWUreg:
491 return rewriteValueRISCV64_OpRISCV64MOVWUreg(v)
Patrice Arruda748609c2020-06-25 12:12:21 -0700492 case OpRISCV64MOVWload:
493 return rewriteValueRISCV64_OpRISCV64MOVWload(v)
Colin Cross1f805522021-05-14 11:10:59 -0700494 case OpRISCV64MOVWreg:
495 return rewriteValueRISCV64_OpRISCV64MOVWreg(v)
Patrice Arruda748609c2020-06-25 12:12:21 -0700496 case OpRISCV64MOVWstore:
497 return rewriteValueRISCV64_OpRISCV64MOVWstore(v)
498 case OpRISCV64MOVWstorezero:
499 return rewriteValueRISCV64_OpRISCV64MOVWstorezero(v)
Dan Willemsenbc60c3c2021-12-15 01:09:00 -0800500 case OpRISCV64NEG:
501 return rewriteValueRISCV64_OpRISCV64NEG(v)
502 case OpRISCV64NEGW:
503 return rewriteValueRISCV64_OpRISCV64NEGW(v)
Patrice Arruda748609c2020-06-25 12:12:21 -0700504 case OpRISCV64OR:
505 return rewriteValueRISCV64_OpRISCV64OR(v)
Dan Willemsenbc60c3c2021-12-15 01:09:00 -0800506 case OpRISCV64ORI:
507 return rewriteValueRISCV64_OpRISCV64ORI(v)
Patrice Arruda748609c2020-06-25 12:12:21 -0700508 case OpRISCV64SLL:
509 return rewriteValueRISCV64_OpRISCV64SLL(v)
Dan Willemsenbc60c3c2021-12-15 01:09:00 -0800510 case OpRISCV64SLLI:
511 return rewriteValueRISCV64_OpRISCV64SLLI(v)
512 case OpRISCV64SLTI:
513 return rewriteValueRISCV64_OpRISCV64SLTI(v)
514 case OpRISCV64SLTIU:
515 return rewriteValueRISCV64_OpRISCV64SLTIU(v)
Patrice Arruda748609c2020-06-25 12:12:21 -0700516 case OpRISCV64SRA:
517 return rewriteValueRISCV64_OpRISCV64SRA(v)
Dan Willemsenbc60c3c2021-12-15 01:09:00 -0800518 case OpRISCV64SRAI:
519 return rewriteValueRISCV64_OpRISCV64SRAI(v)
Patrice Arruda748609c2020-06-25 12:12:21 -0700520 case OpRISCV64SRL:
521 return rewriteValueRISCV64_OpRISCV64SRL(v)
Dan Willemsenbc60c3c2021-12-15 01:09:00 -0800522 case OpRISCV64SRLI:
523 return rewriteValueRISCV64_OpRISCV64SRLI(v)
Patrice Arruda748609c2020-06-25 12:12:21 -0700524 case OpRISCV64SUB:
525 return rewriteValueRISCV64_OpRISCV64SUB(v)
526 case OpRISCV64SUBW:
527 return rewriteValueRISCV64_OpRISCV64SUBW(v)
528 case OpRISCV64XOR:
529 return rewriteValueRISCV64_OpRISCV64XOR(v)
530 case OpRotateLeft16:
531 return rewriteValueRISCV64_OpRotateLeft16(v)
532 case OpRotateLeft32:
533 return rewriteValueRISCV64_OpRotateLeft32(v)
534 case OpRotateLeft64:
535 return rewriteValueRISCV64_OpRotateLeft64(v)
536 case OpRotateLeft8:
537 return rewriteValueRISCV64_OpRotateLeft8(v)
538 case OpRound32F:
539 v.Op = OpCopy
540 return true
541 case OpRound64F:
542 v.Op = OpCopy
543 return true
544 case OpRsh16Ux16:
545 return rewriteValueRISCV64_OpRsh16Ux16(v)
546 case OpRsh16Ux32:
547 return rewriteValueRISCV64_OpRsh16Ux32(v)
548 case OpRsh16Ux64:
549 return rewriteValueRISCV64_OpRsh16Ux64(v)
550 case OpRsh16Ux8:
551 return rewriteValueRISCV64_OpRsh16Ux8(v)
552 case OpRsh16x16:
553 return rewriteValueRISCV64_OpRsh16x16(v)
554 case OpRsh16x32:
555 return rewriteValueRISCV64_OpRsh16x32(v)
556 case OpRsh16x64:
557 return rewriteValueRISCV64_OpRsh16x64(v)
558 case OpRsh16x8:
559 return rewriteValueRISCV64_OpRsh16x8(v)
560 case OpRsh32Ux16:
561 return rewriteValueRISCV64_OpRsh32Ux16(v)
562 case OpRsh32Ux32:
563 return rewriteValueRISCV64_OpRsh32Ux32(v)
564 case OpRsh32Ux64:
565 return rewriteValueRISCV64_OpRsh32Ux64(v)
566 case OpRsh32Ux8:
567 return rewriteValueRISCV64_OpRsh32Ux8(v)
568 case OpRsh32x16:
569 return rewriteValueRISCV64_OpRsh32x16(v)
570 case OpRsh32x32:
571 return rewriteValueRISCV64_OpRsh32x32(v)
572 case OpRsh32x64:
573 return rewriteValueRISCV64_OpRsh32x64(v)
574 case OpRsh32x8:
575 return rewriteValueRISCV64_OpRsh32x8(v)
576 case OpRsh64Ux16:
577 return rewriteValueRISCV64_OpRsh64Ux16(v)
578 case OpRsh64Ux32:
579 return rewriteValueRISCV64_OpRsh64Ux32(v)
580 case OpRsh64Ux64:
581 return rewriteValueRISCV64_OpRsh64Ux64(v)
582 case OpRsh64Ux8:
583 return rewriteValueRISCV64_OpRsh64Ux8(v)
584 case OpRsh64x16:
585 return rewriteValueRISCV64_OpRsh64x16(v)
586 case OpRsh64x32:
587 return rewriteValueRISCV64_OpRsh64x32(v)
588 case OpRsh64x64:
589 return rewriteValueRISCV64_OpRsh64x64(v)
590 case OpRsh64x8:
591 return rewriteValueRISCV64_OpRsh64x8(v)
592 case OpRsh8Ux16:
593 return rewriteValueRISCV64_OpRsh8Ux16(v)
594 case OpRsh8Ux32:
595 return rewriteValueRISCV64_OpRsh8Ux32(v)
596 case OpRsh8Ux64:
597 return rewriteValueRISCV64_OpRsh8Ux64(v)
598 case OpRsh8Ux8:
599 return rewriteValueRISCV64_OpRsh8Ux8(v)
600 case OpRsh8x16:
601 return rewriteValueRISCV64_OpRsh8x16(v)
602 case OpRsh8x32:
603 return rewriteValueRISCV64_OpRsh8x32(v)
604 case OpRsh8x64:
605 return rewriteValueRISCV64_OpRsh8x64(v)
606 case OpRsh8x8:
607 return rewriteValueRISCV64_OpRsh8x8(v)
608 case OpSignExt16to32:
Colin Cross1f805522021-05-14 11:10:59 -0700609 v.Op = OpRISCV64MOVHreg
610 return true
Patrice Arruda748609c2020-06-25 12:12:21 -0700611 case OpSignExt16to64:
Colin Cross1f805522021-05-14 11:10:59 -0700612 v.Op = OpRISCV64MOVHreg
613 return true
Patrice Arruda748609c2020-06-25 12:12:21 -0700614 case OpSignExt32to64:
Colin Cross1f805522021-05-14 11:10:59 -0700615 v.Op = OpRISCV64MOVWreg
616 return true
Patrice Arruda748609c2020-06-25 12:12:21 -0700617 case OpSignExt8to16:
Colin Cross1f805522021-05-14 11:10:59 -0700618 v.Op = OpRISCV64MOVBreg
619 return true
Patrice Arruda748609c2020-06-25 12:12:21 -0700620 case OpSignExt8to32:
Colin Cross1f805522021-05-14 11:10:59 -0700621 v.Op = OpRISCV64MOVBreg
622 return true
Patrice Arruda748609c2020-06-25 12:12:21 -0700623 case OpSignExt8to64:
Colin Cross1f805522021-05-14 11:10:59 -0700624 v.Op = OpRISCV64MOVBreg
625 return true
Patrice Arruda748609c2020-06-25 12:12:21 -0700626 case OpSlicemask:
627 return rewriteValueRISCV64_OpSlicemask(v)
628 case OpSqrt:
629 v.Op = OpRISCV64FSQRTD
630 return true
Dan Willemsencc753b72021-08-31 13:25:42 -0700631 case OpSqrt32:
632 v.Op = OpRISCV64FSQRTS
633 return true
Patrice Arruda748609c2020-06-25 12:12:21 -0700634 case OpStaticCall:
635 v.Op = OpRISCV64CALLstatic
636 return true
637 case OpStore:
638 return rewriteValueRISCV64_OpStore(v)
639 case OpSub16:
640 v.Op = OpRISCV64SUB
641 return true
642 case OpSub32:
643 v.Op = OpRISCV64SUB
644 return true
645 case OpSub32F:
646 v.Op = OpRISCV64FSUBS
647 return true
648 case OpSub64:
649 v.Op = OpRISCV64SUB
650 return true
651 case OpSub64F:
652 v.Op = OpRISCV64FSUBD
653 return true
654 case OpSub8:
655 v.Op = OpRISCV64SUB
656 return true
657 case OpSubPtr:
658 v.Op = OpRISCV64SUB
659 return true
Dan Willemsenbc60c3c2021-12-15 01:09:00 -0800660 case OpTailCall:
661 v.Op = OpRISCV64CALLtail
662 return true
Patrice Arruda748609c2020-06-25 12:12:21 -0700663 case OpTrunc16to8:
664 v.Op = OpCopy
665 return true
666 case OpTrunc32to16:
667 v.Op = OpCopy
668 return true
669 case OpTrunc32to8:
670 v.Op = OpCopy
671 return true
672 case OpTrunc64to16:
673 v.Op = OpCopy
674 return true
675 case OpTrunc64to32:
676 v.Op = OpCopy
677 return true
678 case OpTrunc64to8:
679 v.Op = OpCopy
680 return true
681 case OpWB:
682 v.Op = OpRISCV64LoweredWB
683 return true
684 case OpXor16:
685 v.Op = OpRISCV64XOR
686 return true
687 case OpXor32:
688 v.Op = OpRISCV64XOR
689 return true
690 case OpXor64:
691 v.Op = OpRISCV64XOR
692 return true
693 case OpXor8:
694 v.Op = OpRISCV64XOR
695 return true
696 case OpZero:
697 return rewriteValueRISCV64_OpZero(v)
698 case OpZeroExt16to32:
Colin Cross1f805522021-05-14 11:10:59 -0700699 v.Op = OpRISCV64MOVHUreg
700 return true
Patrice Arruda748609c2020-06-25 12:12:21 -0700701 case OpZeroExt16to64:
Colin Cross1f805522021-05-14 11:10:59 -0700702 v.Op = OpRISCV64MOVHUreg
703 return true
Patrice Arruda748609c2020-06-25 12:12:21 -0700704 case OpZeroExt32to64:
Colin Cross1f805522021-05-14 11:10:59 -0700705 v.Op = OpRISCV64MOVWUreg
706 return true
Patrice Arruda748609c2020-06-25 12:12:21 -0700707 case OpZeroExt8to16:
Colin Cross1f805522021-05-14 11:10:59 -0700708 v.Op = OpRISCV64MOVBUreg
709 return true
Patrice Arruda748609c2020-06-25 12:12:21 -0700710 case OpZeroExt8to32:
Colin Cross1f805522021-05-14 11:10:59 -0700711 v.Op = OpRISCV64MOVBUreg
712 return true
Patrice Arruda748609c2020-06-25 12:12:21 -0700713 case OpZeroExt8to64:
Colin Cross1f805522021-05-14 11:10:59 -0700714 v.Op = OpRISCV64MOVBUreg
715 return true
Patrice Arruda748609c2020-06-25 12:12:21 -0700716 }
717 return false
718}
719func rewriteValueRISCV64_OpAddr(v *Value) bool {
720 v_0 := v.Args[0]
721 // match: (Addr {sym} base)
722 // result: (MOVaddr {sym} [0] base)
723 for {
724 sym := auxToSym(v.Aux)
725 base := v_0
726 v.reset(OpRISCV64MOVaddr)
727 v.AuxInt = int32ToAuxInt(0)
728 v.Aux = symToAux(sym)
729 v.AddArg(base)
730 return true
731 }
732}
Dan Willemsencc753b72021-08-31 13:25:42 -0700733func rewriteValueRISCV64_OpAtomicAnd8(v *Value) bool {
734 v_2 := v.Args[2]
735 v_1 := v.Args[1]
736 v_0 := v.Args[0]
737 b := v.Block
738 typ := &b.Func.Config.Types
739 // match: (AtomicAnd8 ptr val mem)
740 // result: (LoweredAtomicAnd32 (ANDI <typ.Uintptr> [^3] ptr) (NOT <typ.UInt32> (SLL <typ.UInt32> (XORI <typ.UInt32> [0xff] (ZeroExt8to32 val)) (SLLI <typ.UInt64> [3] (ANDI <typ.UInt64> [3] ptr)))) mem)
741 for {
742 ptr := v_0
743 val := v_1
744 mem := v_2
745 v.reset(OpRISCV64LoweredAtomicAnd32)
746 v0 := b.NewValue0(v.Pos, OpRISCV64ANDI, typ.Uintptr)
747 v0.AuxInt = int64ToAuxInt(^3)
748 v0.AddArg(ptr)
749 v1 := b.NewValue0(v.Pos, OpRISCV64NOT, typ.UInt32)
750 v2 := b.NewValue0(v.Pos, OpRISCV64SLL, typ.UInt32)
751 v3 := b.NewValue0(v.Pos, OpRISCV64XORI, typ.UInt32)
752 v3.AuxInt = int64ToAuxInt(0xff)
753 v4 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
754 v4.AddArg(val)
755 v3.AddArg(v4)
756 v5 := b.NewValue0(v.Pos, OpRISCV64SLLI, typ.UInt64)
757 v5.AuxInt = int64ToAuxInt(3)
758 v6 := b.NewValue0(v.Pos, OpRISCV64ANDI, typ.UInt64)
759 v6.AuxInt = int64ToAuxInt(3)
760 v6.AddArg(ptr)
761 v5.AddArg(v6)
762 v2.AddArg2(v3, v5)
763 v1.AddArg(v2)
764 v.AddArg3(v0, v1, mem)
765 return true
766 }
767}
768func rewriteValueRISCV64_OpAtomicOr8(v *Value) bool {
769 v_2 := v.Args[2]
770 v_1 := v.Args[1]
771 v_0 := v.Args[0]
772 b := v.Block
773 typ := &b.Func.Config.Types
774 // match: (AtomicOr8 ptr val mem)
775 // result: (LoweredAtomicOr32 (ANDI <typ.Uintptr> [^3] ptr) (SLL <typ.UInt32> (ZeroExt8to32 val) (SLLI <typ.UInt64> [3] (ANDI <typ.UInt64> [3] ptr))) mem)
776 for {
777 ptr := v_0
778 val := v_1
779 mem := v_2
780 v.reset(OpRISCV64LoweredAtomicOr32)
781 v0 := b.NewValue0(v.Pos, OpRISCV64ANDI, typ.Uintptr)
782 v0.AuxInt = int64ToAuxInt(^3)
783 v0.AddArg(ptr)
784 v1 := b.NewValue0(v.Pos, OpRISCV64SLL, typ.UInt32)
785 v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
786 v2.AddArg(val)
787 v3 := b.NewValue0(v.Pos, OpRISCV64SLLI, typ.UInt64)
788 v3.AuxInt = int64ToAuxInt(3)
789 v4 := b.NewValue0(v.Pos, OpRISCV64ANDI, typ.UInt64)
790 v4.AuxInt = int64ToAuxInt(3)
791 v4.AddArg(ptr)
792 v3.AddArg(v4)
793 v1.AddArg2(v2, v3)
794 v.AddArg3(v0, v1, mem)
795 return true
796 }
797}
Patrice Arruda748609c2020-06-25 12:12:21 -0700798func rewriteValueRISCV64_OpAvg64u(v *Value) bool {
799 v_1 := v.Args[1]
800 v_0 := v.Args[0]
801 b := v.Block
802 // match: (Avg64u <t> x y)
803 // result: (ADD (ADD <t> (SRLI <t> [1] x) (SRLI <t> [1] y)) (ANDI <t> [1] (AND <t> x y)))
804 for {
805 t := v.Type
806 x := v_0
807 y := v_1
808 v.reset(OpRISCV64ADD)
809 v0 := b.NewValue0(v.Pos, OpRISCV64ADD, t)
810 v1 := b.NewValue0(v.Pos, OpRISCV64SRLI, t)
811 v1.AuxInt = int64ToAuxInt(1)
812 v1.AddArg(x)
813 v2 := b.NewValue0(v.Pos, OpRISCV64SRLI, t)
814 v2.AuxInt = int64ToAuxInt(1)
815 v2.AddArg(y)
816 v0.AddArg2(v1, v2)
817 v3 := b.NewValue0(v.Pos, OpRISCV64ANDI, t)
818 v3.AuxInt = int64ToAuxInt(1)
819 v4 := b.NewValue0(v.Pos, OpRISCV64AND, t)
820 v4.AddArg2(x, y)
821 v3.AddArg(v4)
822 v.AddArg2(v0, v3)
823 return true
824 }
825}
Dan Willemsencc753b72021-08-31 13:25:42 -0700826func rewriteValueRISCV64_OpConst16(v *Value) bool {
827 // match: (Const16 [val])
828 // result: (MOVDconst [int64(val)])
829 for {
830 val := auxIntToInt16(v.AuxInt)
831 v.reset(OpRISCV64MOVDconst)
832 v.AuxInt = int64ToAuxInt(int64(val))
833 return true
834 }
835}
836func rewriteValueRISCV64_OpConst32(v *Value) bool {
837 // match: (Const32 [val])
838 // result: (MOVDconst [int64(val)])
839 for {
840 val := auxIntToInt32(v.AuxInt)
841 v.reset(OpRISCV64MOVDconst)
842 v.AuxInt = int64ToAuxInt(int64(val))
843 return true
844 }
845}
Patrice Arruda748609c2020-06-25 12:12:21 -0700846func rewriteValueRISCV64_OpConst32F(v *Value) bool {
847 b := v.Block
848 typ := &b.Func.Config.Types
849 // match: (Const32F [val])
Dan Willemsencc753b72021-08-31 13:25:42 -0700850 // result: (FMVSX (MOVDconst [int64(math.Float32bits(val))]))
Patrice Arruda748609c2020-06-25 12:12:21 -0700851 for {
852 val := auxIntToFloat32(v.AuxInt)
853 v.reset(OpRISCV64FMVSX)
Dan Willemsencc753b72021-08-31 13:25:42 -0700854 v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
855 v0.AuxInt = int64ToAuxInt(int64(math.Float32bits(val)))
Patrice Arruda748609c2020-06-25 12:12:21 -0700856 v.AddArg(v0)
857 return true
858 }
859}
Dan Willemsencc753b72021-08-31 13:25:42 -0700860func rewriteValueRISCV64_OpConst64(v *Value) bool {
861 // match: (Const64 [val])
862 // result: (MOVDconst [int64(val)])
863 for {
864 val := auxIntToInt64(v.AuxInt)
865 v.reset(OpRISCV64MOVDconst)
866 v.AuxInt = int64ToAuxInt(int64(val))
867 return true
868 }
869}
Patrice Arruda748609c2020-06-25 12:12:21 -0700870func rewriteValueRISCV64_OpConst64F(v *Value) bool {
871 b := v.Block
872 typ := &b.Func.Config.Types
873 // match: (Const64F [val])
874 // result: (FMVDX (MOVDconst [int64(math.Float64bits(val))]))
875 for {
876 val := auxIntToFloat64(v.AuxInt)
877 v.reset(OpRISCV64FMVDX)
878 v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
879 v0.AuxInt = int64ToAuxInt(int64(math.Float64bits(val)))
880 v.AddArg(v0)
881 return true
882 }
883}
Dan Willemsencc753b72021-08-31 13:25:42 -0700884func rewriteValueRISCV64_OpConst8(v *Value) bool {
885 // match: (Const8 [val])
886 // result: (MOVDconst [int64(val)])
887 for {
888 val := auxIntToInt8(v.AuxInt)
889 v.reset(OpRISCV64MOVDconst)
890 v.AuxInt = int64ToAuxInt(int64(val))
891 return true
892 }
893}
Patrice Arruda748609c2020-06-25 12:12:21 -0700894func rewriteValueRISCV64_OpConstBool(v *Value) bool {
895 // match: (ConstBool [val])
Dan Willemsencc753b72021-08-31 13:25:42 -0700896 // result: (MOVDconst [int64(b2i(val))])
Patrice Arruda748609c2020-06-25 12:12:21 -0700897 for {
898 val := auxIntToBool(v.AuxInt)
Dan Willemsencc753b72021-08-31 13:25:42 -0700899 v.reset(OpRISCV64MOVDconst)
900 v.AuxInt = int64ToAuxInt(int64(b2i(val)))
Patrice Arruda748609c2020-06-25 12:12:21 -0700901 return true
902 }
903}
904func rewriteValueRISCV64_OpConstNil(v *Value) bool {
905 // match: (ConstNil)
906 // result: (MOVDconst [0])
907 for {
908 v.reset(OpRISCV64MOVDconst)
909 v.AuxInt = int64ToAuxInt(0)
910 return true
911 }
912}
913func rewriteValueRISCV64_OpDiv16(v *Value) bool {
914 v_1 := v.Args[1]
915 v_0 := v.Args[0]
916 b := v.Block
917 typ := &b.Func.Config.Types
918 // match: (Div16 x y [false])
919 // result: (DIVW (SignExt16to32 x) (SignExt16to32 y))
920 for {
921 if auxIntToBool(v.AuxInt) != false {
922 break
923 }
924 x := v_0
925 y := v_1
926 v.reset(OpRISCV64DIVW)
927 v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
928 v0.AddArg(x)
929 v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
930 v1.AddArg(y)
931 v.AddArg2(v0, v1)
932 return true
933 }
934 return false
935}
936func rewriteValueRISCV64_OpDiv16u(v *Value) bool {
937 v_1 := v.Args[1]
938 v_0 := v.Args[0]
939 b := v.Block
940 typ := &b.Func.Config.Types
941 // match: (Div16u x y)
942 // result: (DIVUW (ZeroExt16to32 x) (ZeroExt16to32 y))
943 for {
944 x := v_0
945 y := v_1
946 v.reset(OpRISCV64DIVUW)
947 v0 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
948 v0.AddArg(x)
949 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
950 v1.AddArg(y)
951 v.AddArg2(v0, v1)
952 return true
953 }
954}
955func rewriteValueRISCV64_OpDiv32(v *Value) bool {
956 v_1 := v.Args[1]
957 v_0 := v.Args[0]
958 // match: (Div32 x y [false])
959 // result: (DIVW x y)
960 for {
961 if auxIntToBool(v.AuxInt) != false {
962 break
963 }
964 x := v_0
965 y := v_1
966 v.reset(OpRISCV64DIVW)
967 v.AddArg2(x, y)
968 return true
969 }
970 return false
971}
972func rewriteValueRISCV64_OpDiv64(v *Value) bool {
973 v_1 := v.Args[1]
974 v_0 := v.Args[0]
975 // match: (Div64 x y [false])
976 // result: (DIV x y)
977 for {
978 if auxIntToBool(v.AuxInt) != false {
979 break
980 }
981 x := v_0
982 y := v_1
983 v.reset(OpRISCV64DIV)
984 v.AddArg2(x, y)
985 return true
986 }
987 return false
988}
989func rewriteValueRISCV64_OpDiv8(v *Value) bool {
990 v_1 := v.Args[1]
991 v_0 := v.Args[0]
992 b := v.Block
993 typ := &b.Func.Config.Types
994 // match: (Div8 x y)
995 // result: (DIVW (SignExt8to32 x) (SignExt8to32 y))
996 for {
997 x := v_0
998 y := v_1
999 v.reset(OpRISCV64DIVW)
1000 v0 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
1001 v0.AddArg(x)
1002 v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
1003 v1.AddArg(y)
1004 v.AddArg2(v0, v1)
1005 return true
1006 }
1007}
1008func rewriteValueRISCV64_OpDiv8u(v *Value) bool {
1009 v_1 := v.Args[1]
1010 v_0 := v.Args[0]
1011 b := v.Block
1012 typ := &b.Func.Config.Types
1013 // match: (Div8u x y)
1014 // result: (DIVUW (ZeroExt8to32 x) (ZeroExt8to32 y))
1015 for {
1016 x := v_0
1017 y := v_1
1018 v.reset(OpRISCV64DIVUW)
1019 v0 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
1020 v0.AddArg(x)
1021 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
1022 v1.AddArg(y)
1023 v.AddArg2(v0, v1)
1024 return true
1025 }
1026}
1027func rewriteValueRISCV64_OpEq16(v *Value) bool {
1028 v_1 := v.Args[1]
1029 v_0 := v.Args[0]
1030 b := v.Block
1031 typ := &b.Func.Config.Types
1032 // match: (Eq16 x y)
Colin Cross1f805522021-05-14 11:10:59 -07001033 // result: (SEQZ (SUB <x.Type> (ZeroExt16to64 x) (ZeroExt16to64 y)))
Patrice Arruda748609c2020-06-25 12:12:21 -07001034 for {
1035 x := v_0
1036 y := v_1
1037 v.reset(OpRISCV64SEQZ)
Colin Cross1f805522021-05-14 11:10:59 -07001038 v0 := b.NewValue0(v.Pos, OpRISCV64SUB, x.Type)
1039 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
1040 v1.AddArg(x)
1041 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
1042 v2.AddArg(y)
1043 v0.AddArg2(v1, v2)
Patrice Arruda748609c2020-06-25 12:12:21 -07001044 v.AddArg(v0)
1045 return true
1046 }
1047}
1048func rewriteValueRISCV64_OpEq32(v *Value) bool {
1049 v_1 := v.Args[1]
1050 v_0 := v.Args[0]
1051 b := v.Block
Dan Willemsencc753b72021-08-31 13:25:42 -07001052 typ := &b.Func.Config.Types
Patrice Arruda748609c2020-06-25 12:12:21 -07001053 // match: (Eq32 x y)
Dan Willemsencc753b72021-08-31 13:25:42 -07001054 // result: (SEQZ (SUB <x.Type> (ZeroExt32to64 x) (ZeroExt32to64 y)))
Patrice Arruda748609c2020-06-25 12:12:21 -07001055 for {
1056 x := v_0
1057 y := v_1
1058 v.reset(OpRISCV64SEQZ)
Dan Willemsencc753b72021-08-31 13:25:42 -07001059 v0 := b.NewValue0(v.Pos, OpRISCV64SUB, x.Type)
1060 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
1061 v1.AddArg(x)
1062 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
1063 v2.AddArg(y)
1064 v0.AddArg2(v1, v2)
Patrice Arruda748609c2020-06-25 12:12:21 -07001065 v.AddArg(v0)
1066 return true
1067 }
1068}
1069func rewriteValueRISCV64_OpEq64(v *Value) bool {
1070 v_1 := v.Args[1]
1071 v_0 := v.Args[0]
1072 b := v.Block
1073 // match: (Eq64 x y)
1074 // result: (SEQZ (SUB <x.Type> x y))
1075 for {
1076 x := v_0
1077 y := v_1
1078 v.reset(OpRISCV64SEQZ)
1079 v0 := b.NewValue0(v.Pos, OpRISCV64SUB, x.Type)
1080 v0.AddArg2(x, y)
1081 v.AddArg(v0)
1082 return true
1083 }
1084}
1085func rewriteValueRISCV64_OpEq8(v *Value) bool {
1086 v_1 := v.Args[1]
1087 v_0 := v.Args[0]
1088 b := v.Block
1089 typ := &b.Func.Config.Types
1090 // match: (Eq8 x y)
Colin Cross1f805522021-05-14 11:10:59 -07001091 // result: (SEQZ (SUB <x.Type> (ZeroExt8to64 x) (ZeroExt8to64 y)))
Patrice Arruda748609c2020-06-25 12:12:21 -07001092 for {
1093 x := v_0
1094 y := v_1
1095 v.reset(OpRISCV64SEQZ)
Colin Cross1f805522021-05-14 11:10:59 -07001096 v0 := b.NewValue0(v.Pos, OpRISCV64SUB, x.Type)
1097 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
1098 v1.AddArg(x)
1099 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
1100 v2.AddArg(y)
1101 v0.AddArg2(v1, v2)
Patrice Arruda748609c2020-06-25 12:12:21 -07001102 v.AddArg(v0)
1103 return true
1104 }
1105}
1106func rewriteValueRISCV64_OpEqB(v *Value) bool {
1107 v_1 := v.Args[1]
1108 v_0 := v.Args[0]
1109 b := v.Block
1110 typ := &b.Func.Config.Types
1111 // match: (EqB x y)
1112 // result: (SEQZ (XOR <typ.Bool> x y))
1113 for {
1114 x := v_0
1115 y := v_1
1116 v.reset(OpRISCV64SEQZ)
1117 v0 := b.NewValue0(v.Pos, OpRISCV64XOR, typ.Bool)
1118 v0.AddArg2(x, y)
1119 v.AddArg(v0)
1120 return true
1121 }
1122}
1123func rewriteValueRISCV64_OpEqPtr(v *Value) bool {
1124 v_1 := v.Args[1]
1125 v_0 := v.Args[0]
1126 b := v.Block
1127 // match: (EqPtr x y)
1128 // result: (SEQZ (SUB <x.Type> x y))
1129 for {
1130 x := v_0
1131 y := v_1
1132 v.reset(OpRISCV64SEQZ)
1133 v0 := b.NewValue0(v.Pos, OpRISCV64SUB, x.Type)
1134 v0.AddArg2(x, y)
1135 v.AddArg(v0)
1136 return true
1137 }
1138}
1139func rewriteValueRISCV64_OpHmul32(v *Value) bool {
1140 v_1 := v.Args[1]
1141 v_0 := v.Args[0]
1142 b := v.Block
1143 typ := &b.Func.Config.Types
1144 // match: (Hmul32 x y)
1145 // result: (SRAI [32] (MUL (SignExt32to64 x) (SignExt32to64 y)))
1146 for {
1147 x := v_0
1148 y := v_1
1149 v.reset(OpRISCV64SRAI)
1150 v.AuxInt = int64ToAuxInt(32)
1151 v0 := b.NewValue0(v.Pos, OpRISCV64MUL, typ.Int64)
1152 v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
1153 v1.AddArg(x)
1154 v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
1155 v2.AddArg(y)
1156 v0.AddArg2(v1, v2)
1157 v.AddArg(v0)
1158 return true
1159 }
1160}
1161func rewriteValueRISCV64_OpHmul32u(v *Value) bool {
1162 v_1 := v.Args[1]
1163 v_0 := v.Args[0]
1164 b := v.Block
1165 typ := &b.Func.Config.Types
1166 // match: (Hmul32u x y)
1167 // result: (SRLI [32] (MUL (ZeroExt32to64 x) (ZeroExt32to64 y)))
1168 for {
1169 x := v_0
1170 y := v_1
1171 v.reset(OpRISCV64SRLI)
1172 v.AuxInt = int64ToAuxInt(32)
1173 v0 := b.NewValue0(v.Pos, OpRISCV64MUL, typ.Int64)
1174 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
1175 v1.AddArg(x)
1176 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
1177 v2.AddArg(y)
1178 v0.AddArg2(v1, v2)
1179 v.AddArg(v0)
1180 return true
1181 }
1182}
Patrice Arruda748609c2020-06-25 12:12:21 -07001183func rewriteValueRISCV64_OpLeq16(v *Value) bool {
1184 v_1 := v.Args[1]
1185 v_0 := v.Args[0]
1186 b := v.Block
1187 typ := &b.Func.Config.Types
1188 // match: (Leq16 x y)
1189 // result: (Not (Less16 y x))
1190 for {
1191 x := v_0
1192 y := v_1
1193 v.reset(OpNot)
1194 v0 := b.NewValue0(v.Pos, OpLess16, typ.Bool)
1195 v0.AddArg2(y, x)
1196 v.AddArg(v0)
1197 return true
1198 }
1199}
1200func rewriteValueRISCV64_OpLeq16U(v *Value) bool {
1201 v_1 := v.Args[1]
1202 v_0 := v.Args[0]
1203 b := v.Block
1204 typ := &b.Func.Config.Types
1205 // match: (Leq16U x y)
1206 // result: (Not (Less16U y x))
1207 for {
1208 x := v_0
1209 y := v_1
1210 v.reset(OpNot)
1211 v0 := b.NewValue0(v.Pos, OpLess16U, typ.Bool)
1212 v0.AddArg2(y, x)
1213 v.AddArg(v0)
1214 return true
1215 }
1216}
1217func rewriteValueRISCV64_OpLeq32(v *Value) bool {
1218 v_1 := v.Args[1]
1219 v_0 := v.Args[0]
1220 b := v.Block
1221 typ := &b.Func.Config.Types
1222 // match: (Leq32 x y)
1223 // result: (Not (Less32 y x))
1224 for {
1225 x := v_0
1226 y := v_1
1227 v.reset(OpNot)
1228 v0 := b.NewValue0(v.Pos, OpLess32, typ.Bool)
1229 v0.AddArg2(y, x)
1230 v.AddArg(v0)
1231 return true
1232 }
1233}
1234func rewriteValueRISCV64_OpLeq32U(v *Value) bool {
1235 v_1 := v.Args[1]
1236 v_0 := v.Args[0]
1237 b := v.Block
1238 typ := &b.Func.Config.Types
1239 // match: (Leq32U x y)
1240 // result: (Not (Less32U y x))
1241 for {
1242 x := v_0
1243 y := v_1
1244 v.reset(OpNot)
1245 v0 := b.NewValue0(v.Pos, OpLess32U, typ.Bool)
1246 v0.AddArg2(y, x)
1247 v.AddArg(v0)
1248 return true
1249 }
1250}
1251func rewriteValueRISCV64_OpLeq64(v *Value) bool {
1252 v_1 := v.Args[1]
1253 v_0 := v.Args[0]
1254 b := v.Block
1255 typ := &b.Func.Config.Types
1256 // match: (Leq64 x y)
1257 // result: (Not (Less64 y x))
1258 for {
1259 x := v_0
1260 y := v_1
1261 v.reset(OpNot)
1262 v0 := b.NewValue0(v.Pos, OpLess64, typ.Bool)
1263 v0.AddArg2(y, x)
1264 v.AddArg(v0)
1265 return true
1266 }
1267}
1268func rewriteValueRISCV64_OpLeq64U(v *Value) bool {
1269 v_1 := v.Args[1]
1270 v_0 := v.Args[0]
1271 b := v.Block
1272 typ := &b.Func.Config.Types
1273 // match: (Leq64U x y)
1274 // result: (Not (Less64U y x))
1275 for {
1276 x := v_0
1277 y := v_1
1278 v.reset(OpNot)
1279 v0 := b.NewValue0(v.Pos, OpLess64U, typ.Bool)
1280 v0.AddArg2(y, x)
1281 v.AddArg(v0)
1282 return true
1283 }
1284}
1285func rewriteValueRISCV64_OpLeq8(v *Value) bool {
1286 v_1 := v.Args[1]
1287 v_0 := v.Args[0]
1288 b := v.Block
1289 typ := &b.Func.Config.Types
1290 // match: (Leq8 x y)
1291 // result: (Not (Less8 y x))
1292 for {
1293 x := v_0
1294 y := v_1
1295 v.reset(OpNot)
1296 v0 := b.NewValue0(v.Pos, OpLess8, typ.Bool)
1297 v0.AddArg2(y, x)
1298 v.AddArg(v0)
1299 return true
1300 }
1301}
1302func rewriteValueRISCV64_OpLeq8U(v *Value) bool {
1303 v_1 := v.Args[1]
1304 v_0 := v.Args[0]
1305 b := v.Block
1306 typ := &b.Func.Config.Types
1307 // match: (Leq8U x y)
1308 // result: (Not (Less8U y x))
1309 for {
1310 x := v_0
1311 y := v_1
1312 v.reset(OpNot)
1313 v0 := b.NewValue0(v.Pos, OpLess8U, typ.Bool)
1314 v0.AddArg2(y, x)
1315 v.AddArg(v0)
1316 return true
1317 }
1318}
1319func rewriteValueRISCV64_OpLess16(v *Value) bool {
1320 v_1 := v.Args[1]
1321 v_0 := v.Args[0]
1322 b := v.Block
1323 typ := &b.Func.Config.Types
1324 // match: (Less16 x y)
1325 // result: (SLT (SignExt16to64 x) (SignExt16to64 y))
1326 for {
1327 x := v_0
1328 y := v_1
1329 v.reset(OpRISCV64SLT)
1330 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
1331 v0.AddArg(x)
1332 v1 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
1333 v1.AddArg(y)
1334 v.AddArg2(v0, v1)
1335 return true
1336 }
1337}
1338func rewriteValueRISCV64_OpLess16U(v *Value) bool {
1339 v_1 := v.Args[1]
1340 v_0 := v.Args[0]
1341 b := v.Block
1342 typ := &b.Func.Config.Types
1343 // match: (Less16U x y)
1344 // result: (SLTU (ZeroExt16to64 x) (ZeroExt16to64 y))
1345 for {
1346 x := v_0
1347 y := v_1
1348 v.reset(OpRISCV64SLTU)
1349 v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
1350 v0.AddArg(x)
1351 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
1352 v1.AddArg(y)
1353 v.AddArg2(v0, v1)
1354 return true
1355 }
1356}
1357func rewriteValueRISCV64_OpLess32(v *Value) bool {
1358 v_1 := v.Args[1]
1359 v_0 := v.Args[0]
1360 b := v.Block
1361 typ := &b.Func.Config.Types
1362 // match: (Less32 x y)
1363 // result: (SLT (SignExt32to64 x) (SignExt32to64 y))
1364 for {
1365 x := v_0
1366 y := v_1
1367 v.reset(OpRISCV64SLT)
1368 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
1369 v0.AddArg(x)
1370 v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
1371 v1.AddArg(y)
1372 v.AddArg2(v0, v1)
1373 return true
1374 }
1375}
1376func rewriteValueRISCV64_OpLess32U(v *Value) bool {
1377 v_1 := v.Args[1]
1378 v_0 := v.Args[0]
1379 b := v.Block
1380 typ := &b.Func.Config.Types
1381 // match: (Less32U x y)
1382 // result: (SLTU (ZeroExt32to64 x) (ZeroExt32to64 y))
1383 for {
1384 x := v_0
1385 y := v_1
1386 v.reset(OpRISCV64SLTU)
1387 v0 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
1388 v0.AddArg(x)
1389 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
1390 v1.AddArg(y)
1391 v.AddArg2(v0, v1)
1392 return true
1393 }
1394}
1395func rewriteValueRISCV64_OpLess8(v *Value) bool {
1396 v_1 := v.Args[1]
1397 v_0 := v.Args[0]
1398 b := v.Block
1399 typ := &b.Func.Config.Types
1400 // match: (Less8 x y)
1401 // result: (SLT (SignExt8to64 x) (SignExt8to64 y))
1402 for {
1403 x := v_0
1404 y := v_1
1405 v.reset(OpRISCV64SLT)
1406 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
1407 v0.AddArg(x)
1408 v1 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
1409 v1.AddArg(y)
1410 v.AddArg2(v0, v1)
1411 return true
1412 }
1413}
1414func rewriteValueRISCV64_OpLess8U(v *Value) bool {
1415 v_1 := v.Args[1]
1416 v_0 := v.Args[0]
1417 b := v.Block
1418 typ := &b.Func.Config.Types
1419 // match: (Less8U x y)
1420 // result: (SLTU (ZeroExt8to64 x) (ZeroExt8to64 y))
1421 for {
1422 x := v_0
1423 y := v_1
1424 v.reset(OpRISCV64SLTU)
1425 v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
1426 v0.AddArg(x)
1427 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
1428 v1.AddArg(y)
1429 v.AddArg2(v0, v1)
1430 return true
1431 }
1432}
1433func rewriteValueRISCV64_OpLoad(v *Value) bool {
1434 v_1 := v.Args[1]
1435 v_0 := v.Args[0]
1436 // match: (Load <t> ptr mem)
1437 // cond: t.IsBoolean()
1438 // result: (MOVBUload ptr mem)
1439 for {
1440 t := v.Type
1441 ptr := v_0
1442 mem := v_1
1443 if !(t.IsBoolean()) {
1444 break
1445 }
1446 v.reset(OpRISCV64MOVBUload)
1447 v.AddArg2(ptr, mem)
1448 return true
1449 }
1450 // match: (Load <t> ptr mem)
1451 // cond: ( is8BitInt(t) && isSigned(t))
1452 // result: (MOVBload ptr mem)
1453 for {
1454 t := v.Type
1455 ptr := v_0
1456 mem := v_1
1457 if !(is8BitInt(t) && isSigned(t)) {
1458 break
1459 }
1460 v.reset(OpRISCV64MOVBload)
1461 v.AddArg2(ptr, mem)
1462 return true
1463 }
1464 // match: (Load <t> ptr mem)
1465 // cond: ( is8BitInt(t) && !isSigned(t))
1466 // result: (MOVBUload ptr mem)
1467 for {
1468 t := v.Type
1469 ptr := v_0
1470 mem := v_1
1471 if !(is8BitInt(t) && !isSigned(t)) {
1472 break
1473 }
1474 v.reset(OpRISCV64MOVBUload)
1475 v.AddArg2(ptr, mem)
1476 return true
1477 }
1478 // match: (Load <t> ptr mem)
1479 // cond: (is16BitInt(t) && isSigned(t))
1480 // result: (MOVHload ptr mem)
1481 for {
1482 t := v.Type
1483 ptr := v_0
1484 mem := v_1
1485 if !(is16BitInt(t) && isSigned(t)) {
1486 break
1487 }
1488 v.reset(OpRISCV64MOVHload)
1489 v.AddArg2(ptr, mem)
1490 return true
1491 }
1492 // match: (Load <t> ptr mem)
1493 // cond: (is16BitInt(t) && !isSigned(t))
1494 // result: (MOVHUload ptr mem)
1495 for {
1496 t := v.Type
1497 ptr := v_0
1498 mem := v_1
1499 if !(is16BitInt(t) && !isSigned(t)) {
1500 break
1501 }
1502 v.reset(OpRISCV64MOVHUload)
1503 v.AddArg2(ptr, mem)
1504 return true
1505 }
1506 // match: (Load <t> ptr mem)
1507 // cond: (is32BitInt(t) && isSigned(t))
1508 // result: (MOVWload ptr mem)
1509 for {
1510 t := v.Type
1511 ptr := v_0
1512 mem := v_1
1513 if !(is32BitInt(t) && isSigned(t)) {
1514 break
1515 }
1516 v.reset(OpRISCV64MOVWload)
1517 v.AddArg2(ptr, mem)
1518 return true
1519 }
1520 // match: (Load <t> ptr mem)
1521 // cond: (is32BitInt(t) && !isSigned(t))
1522 // result: (MOVWUload ptr mem)
1523 for {
1524 t := v.Type
1525 ptr := v_0
1526 mem := v_1
1527 if !(is32BitInt(t) && !isSigned(t)) {
1528 break
1529 }
1530 v.reset(OpRISCV64MOVWUload)
1531 v.AddArg2(ptr, mem)
1532 return true
1533 }
1534 // match: (Load <t> ptr mem)
1535 // cond: (is64BitInt(t) || isPtr(t))
1536 // result: (MOVDload ptr mem)
1537 for {
1538 t := v.Type
1539 ptr := v_0
1540 mem := v_1
1541 if !(is64BitInt(t) || isPtr(t)) {
1542 break
1543 }
1544 v.reset(OpRISCV64MOVDload)
1545 v.AddArg2(ptr, mem)
1546 return true
1547 }
1548 // match: (Load <t> ptr mem)
1549 // cond: is32BitFloat(t)
1550 // result: (FMOVWload ptr mem)
1551 for {
1552 t := v.Type
1553 ptr := v_0
1554 mem := v_1
1555 if !(is32BitFloat(t)) {
1556 break
1557 }
1558 v.reset(OpRISCV64FMOVWload)
1559 v.AddArg2(ptr, mem)
1560 return true
1561 }
1562 // match: (Load <t> ptr mem)
1563 // cond: is64BitFloat(t)
1564 // result: (FMOVDload ptr mem)
1565 for {
1566 t := v.Type
1567 ptr := v_0
1568 mem := v_1
1569 if !(is64BitFloat(t)) {
1570 break
1571 }
1572 v.reset(OpRISCV64FMOVDload)
1573 v.AddArg2(ptr, mem)
1574 return true
1575 }
1576 return false
1577}
1578func rewriteValueRISCV64_OpLocalAddr(v *Value) bool {
1579 v_0 := v.Args[0]
1580 // match: (LocalAddr {sym} base _)
1581 // result: (MOVaddr {sym} base)
1582 for {
1583 sym := auxToSym(v.Aux)
1584 base := v_0
1585 v.reset(OpRISCV64MOVaddr)
1586 v.Aux = symToAux(sym)
1587 v.AddArg(base)
1588 return true
1589 }
1590}
1591func rewriteValueRISCV64_OpLsh16x16(v *Value) bool {
1592 v_1 := v.Args[1]
1593 v_0 := v.Args[0]
1594 b := v.Block
1595 typ := &b.Func.Config.Types
1596 // match: (Lsh16x16 <t> x y)
1597 // result: (AND (SLL <t> x y) (Neg16 <t> (SLTIU <t> [64] (ZeroExt16to64 y))))
1598 for {
1599 t := v.Type
1600 x := v_0
1601 y := v_1
1602 v.reset(OpRISCV64AND)
1603 v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
1604 v0.AddArg2(x, y)
1605 v1 := b.NewValue0(v.Pos, OpNeg16, t)
1606 v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
1607 v2.AuxInt = int64ToAuxInt(64)
1608 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
1609 v3.AddArg(y)
1610 v2.AddArg(v3)
1611 v1.AddArg(v2)
1612 v.AddArg2(v0, v1)
1613 return true
1614 }
1615}
1616func rewriteValueRISCV64_OpLsh16x32(v *Value) bool {
1617 v_1 := v.Args[1]
1618 v_0 := v.Args[0]
1619 b := v.Block
1620 typ := &b.Func.Config.Types
1621 // match: (Lsh16x32 <t> x y)
1622 // result: (AND (SLL <t> x y) (Neg16 <t> (SLTIU <t> [64] (ZeroExt32to64 y))))
1623 for {
1624 t := v.Type
1625 x := v_0
1626 y := v_1
1627 v.reset(OpRISCV64AND)
1628 v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
1629 v0.AddArg2(x, y)
1630 v1 := b.NewValue0(v.Pos, OpNeg16, t)
1631 v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
1632 v2.AuxInt = int64ToAuxInt(64)
1633 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
1634 v3.AddArg(y)
1635 v2.AddArg(v3)
1636 v1.AddArg(v2)
1637 v.AddArg2(v0, v1)
1638 return true
1639 }
1640}
1641func rewriteValueRISCV64_OpLsh16x64(v *Value) bool {
1642 v_1 := v.Args[1]
1643 v_0 := v.Args[0]
1644 b := v.Block
1645 // match: (Lsh16x64 <t> x y)
1646 // result: (AND (SLL <t> x y) (Neg16 <t> (SLTIU <t> [64] y)))
1647 for {
1648 t := v.Type
1649 x := v_0
1650 y := v_1
1651 v.reset(OpRISCV64AND)
1652 v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
1653 v0.AddArg2(x, y)
1654 v1 := b.NewValue0(v.Pos, OpNeg16, t)
1655 v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
1656 v2.AuxInt = int64ToAuxInt(64)
1657 v2.AddArg(y)
1658 v1.AddArg(v2)
1659 v.AddArg2(v0, v1)
1660 return true
1661 }
1662}
1663func rewriteValueRISCV64_OpLsh16x8(v *Value) bool {
1664 v_1 := v.Args[1]
1665 v_0 := v.Args[0]
1666 b := v.Block
1667 typ := &b.Func.Config.Types
1668 // match: (Lsh16x8 <t> x y)
1669 // result: (AND (SLL <t> x y) (Neg16 <t> (SLTIU <t> [64] (ZeroExt8to64 y))))
1670 for {
1671 t := v.Type
1672 x := v_0
1673 y := v_1
1674 v.reset(OpRISCV64AND)
1675 v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
1676 v0.AddArg2(x, y)
1677 v1 := b.NewValue0(v.Pos, OpNeg16, t)
1678 v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
1679 v2.AuxInt = int64ToAuxInt(64)
1680 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
1681 v3.AddArg(y)
1682 v2.AddArg(v3)
1683 v1.AddArg(v2)
1684 v.AddArg2(v0, v1)
1685 return true
1686 }
1687}
1688func rewriteValueRISCV64_OpLsh32x16(v *Value) bool {
1689 v_1 := v.Args[1]
1690 v_0 := v.Args[0]
1691 b := v.Block
1692 typ := &b.Func.Config.Types
1693 // match: (Lsh32x16 <t> x y)
1694 // result: (AND (SLL <t> x y) (Neg32 <t> (SLTIU <t> [64] (ZeroExt16to64 y))))
1695 for {
1696 t := v.Type
1697 x := v_0
1698 y := v_1
1699 v.reset(OpRISCV64AND)
1700 v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
1701 v0.AddArg2(x, y)
1702 v1 := b.NewValue0(v.Pos, OpNeg32, t)
1703 v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
1704 v2.AuxInt = int64ToAuxInt(64)
1705 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
1706 v3.AddArg(y)
1707 v2.AddArg(v3)
1708 v1.AddArg(v2)
1709 v.AddArg2(v0, v1)
1710 return true
1711 }
1712}
1713func rewriteValueRISCV64_OpLsh32x32(v *Value) bool {
1714 v_1 := v.Args[1]
1715 v_0 := v.Args[0]
1716 b := v.Block
1717 typ := &b.Func.Config.Types
1718 // match: (Lsh32x32 <t> x y)
1719 // result: (AND (SLL <t> x y) (Neg32 <t> (SLTIU <t> [64] (ZeroExt32to64 y))))
1720 for {
1721 t := v.Type
1722 x := v_0
1723 y := v_1
1724 v.reset(OpRISCV64AND)
1725 v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
1726 v0.AddArg2(x, y)
1727 v1 := b.NewValue0(v.Pos, OpNeg32, t)
1728 v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
1729 v2.AuxInt = int64ToAuxInt(64)
1730 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
1731 v3.AddArg(y)
1732 v2.AddArg(v3)
1733 v1.AddArg(v2)
1734 v.AddArg2(v0, v1)
1735 return true
1736 }
1737}
1738func rewriteValueRISCV64_OpLsh32x64(v *Value) bool {
1739 v_1 := v.Args[1]
1740 v_0 := v.Args[0]
1741 b := v.Block
1742 // match: (Lsh32x64 <t> x y)
1743 // result: (AND (SLL <t> x y) (Neg32 <t> (SLTIU <t> [64] y)))
1744 for {
1745 t := v.Type
1746 x := v_0
1747 y := v_1
1748 v.reset(OpRISCV64AND)
1749 v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
1750 v0.AddArg2(x, y)
1751 v1 := b.NewValue0(v.Pos, OpNeg32, t)
1752 v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
1753 v2.AuxInt = int64ToAuxInt(64)
1754 v2.AddArg(y)
1755 v1.AddArg(v2)
1756 v.AddArg2(v0, v1)
1757 return true
1758 }
1759}
1760func rewriteValueRISCV64_OpLsh32x8(v *Value) bool {
1761 v_1 := v.Args[1]
1762 v_0 := v.Args[0]
1763 b := v.Block
1764 typ := &b.Func.Config.Types
1765 // match: (Lsh32x8 <t> x y)
1766 // result: (AND (SLL <t> x y) (Neg32 <t> (SLTIU <t> [64] (ZeroExt8to64 y))))
1767 for {
1768 t := v.Type
1769 x := v_0
1770 y := v_1
1771 v.reset(OpRISCV64AND)
1772 v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
1773 v0.AddArg2(x, y)
1774 v1 := b.NewValue0(v.Pos, OpNeg32, t)
1775 v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
1776 v2.AuxInt = int64ToAuxInt(64)
1777 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
1778 v3.AddArg(y)
1779 v2.AddArg(v3)
1780 v1.AddArg(v2)
1781 v.AddArg2(v0, v1)
1782 return true
1783 }
1784}
1785func rewriteValueRISCV64_OpLsh64x16(v *Value) bool {
1786 v_1 := v.Args[1]
1787 v_0 := v.Args[0]
1788 b := v.Block
1789 typ := &b.Func.Config.Types
1790 // match: (Lsh64x16 <t> x y)
1791 // result: (AND (SLL <t> x y) (Neg64 <t> (SLTIU <t> [64] (ZeroExt16to64 y))))
1792 for {
1793 t := v.Type
1794 x := v_0
1795 y := v_1
1796 v.reset(OpRISCV64AND)
1797 v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
1798 v0.AddArg2(x, y)
1799 v1 := b.NewValue0(v.Pos, OpNeg64, t)
1800 v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
1801 v2.AuxInt = int64ToAuxInt(64)
1802 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
1803 v3.AddArg(y)
1804 v2.AddArg(v3)
1805 v1.AddArg(v2)
1806 v.AddArg2(v0, v1)
1807 return true
1808 }
1809}
1810func rewriteValueRISCV64_OpLsh64x32(v *Value) bool {
1811 v_1 := v.Args[1]
1812 v_0 := v.Args[0]
1813 b := v.Block
1814 typ := &b.Func.Config.Types
1815 // match: (Lsh64x32 <t> x y)
1816 // result: (AND (SLL <t> x y) (Neg64 <t> (SLTIU <t> [64] (ZeroExt32to64 y))))
1817 for {
1818 t := v.Type
1819 x := v_0
1820 y := v_1
1821 v.reset(OpRISCV64AND)
1822 v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
1823 v0.AddArg2(x, y)
1824 v1 := b.NewValue0(v.Pos, OpNeg64, t)
1825 v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
1826 v2.AuxInt = int64ToAuxInt(64)
1827 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
1828 v3.AddArg(y)
1829 v2.AddArg(v3)
1830 v1.AddArg(v2)
1831 v.AddArg2(v0, v1)
1832 return true
1833 }
1834}
1835func rewriteValueRISCV64_OpLsh64x64(v *Value) bool {
1836 v_1 := v.Args[1]
1837 v_0 := v.Args[0]
1838 b := v.Block
1839 // match: (Lsh64x64 <t> x y)
1840 // result: (AND (SLL <t> x y) (Neg64 <t> (SLTIU <t> [64] y)))
1841 for {
1842 t := v.Type
1843 x := v_0
1844 y := v_1
1845 v.reset(OpRISCV64AND)
1846 v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
1847 v0.AddArg2(x, y)
1848 v1 := b.NewValue0(v.Pos, OpNeg64, t)
1849 v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
1850 v2.AuxInt = int64ToAuxInt(64)
1851 v2.AddArg(y)
1852 v1.AddArg(v2)
1853 v.AddArg2(v0, v1)
1854 return true
1855 }
1856}
1857func rewriteValueRISCV64_OpLsh64x8(v *Value) bool {
1858 v_1 := v.Args[1]
1859 v_0 := v.Args[0]
1860 b := v.Block
1861 typ := &b.Func.Config.Types
1862 // match: (Lsh64x8 <t> x y)
1863 // result: (AND (SLL <t> x y) (Neg64 <t> (SLTIU <t> [64] (ZeroExt8to64 y))))
1864 for {
1865 t := v.Type
1866 x := v_0
1867 y := v_1
1868 v.reset(OpRISCV64AND)
1869 v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
1870 v0.AddArg2(x, y)
1871 v1 := b.NewValue0(v.Pos, OpNeg64, t)
1872 v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
1873 v2.AuxInt = int64ToAuxInt(64)
1874 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
1875 v3.AddArg(y)
1876 v2.AddArg(v3)
1877 v1.AddArg(v2)
1878 v.AddArg2(v0, v1)
1879 return true
1880 }
1881}
1882func rewriteValueRISCV64_OpLsh8x16(v *Value) bool {
1883 v_1 := v.Args[1]
1884 v_0 := v.Args[0]
1885 b := v.Block
1886 typ := &b.Func.Config.Types
1887 // match: (Lsh8x16 <t> x y)
1888 // result: (AND (SLL <t> x y) (Neg8 <t> (SLTIU <t> [64] (ZeroExt16to64 y))))
1889 for {
1890 t := v.Type
1891 x := v_0
1892 y := v_1
1893 v.reset(OpRISCV64AND)
1894 v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
1895 v0.AddArg2(x, y)
1896 v1 := b.NewValue0(v.Pos, OpNeg8, t)
1897 v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
1898 v2.AuxInt = int64ToAuxInt(64)
1899 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
1900 v3.AddArg(y)
1901 v2.AddArg(v3)
1902 v1.AddArg(v2)
1903 v.AddArg2(v0, v1)
1904 return true
1905 }
1906}
1907func rewriteValueRISCV64_OpLsh8x32(v *Value) bool {
1908 v_1 := v.Args[1]
1909 v_0 := v.Args[0]
1910 b := v.Block
1911 typ := &b.Func.Config.Types
1912 // match: (Lsh8x32 <t> x y)
1913 // result: (AND (SLL <t> x y) (Neg8 <t> (SLTIU <t> [64] (ZeroExt32to64 y))))
1914 for {
1915 t := v.Type
1916 x := v_0
1917 y := v_1
1918 v.reset(OpRISCV64AND)
1919 v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
1920 v0.AddArg2(x, y)
1921 v1 := b.NewValue0(v.Pos, OpNeg8, t)
1922 v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
1923 v2.AuxInt = int64ToAuxInt(64)
1924 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
1925 v3.AddArg(y)
1926 v2.AddArg(v3)
1927 v1.AddArg(v2)
1928 v.AddArg2(v0, v1)
1929 return true
1930 }
1931}
1932func rewriteValueRISCV64_OpLsh8x64(v *Value) bool {
1933 v_1 := v.Args[1]
1934 v_0 := v.Args[0]
1935 b := v.Block
1936 // match: (Lsh8x64 <t> x y)
1937 // result: (AND (SLL <t> x y) (Neg8 <t> (SLTIU <t> [64] y)))
1938 for {
1939 t := v.Type
1940 x := v_0
1941 y := v_1
1942 v.reset(OpRISCV64AND)
1943 v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
1944 v0.AddArg2(x, y)
1945 v1 := b.NewValue0(v.Pos, OpNeg8, t)
1946 v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
1947 v2.AuxInt = int64ToAuxInt(64)
1948 v2.AddArg(y)
1949 v1.AddArg(v2)
1950 v.AddArg2(v0, v1)
1951 return true
1952 }
1953}
1954func rewriteValueRISCV64_OpLsh8x8(v *Value) bool {
1955 v_1 := v.Args[1]
1956 v_0 := v.Args[0]
1957 b := v.Block
1958 typ := &b.Func.Config.Types
1959 // match: (Lsh8x8 <t> x y)
1960 // result: (AND (SLL <t> x y) (Neg8 <t> (SLTIU <t> [64] (ZeroExt8to64 y))))
1961 for {
1962 t := v.Type
1963 x := v_0
1964 y := v_1
1965 v.reset(OpRISCV64AND)
1966 v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
1967 v0.AddArg2(x, y)
1968 v1 := b.NewValue0(v.Pos, OpNeg8, t)
1969 v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
1970 v2.AuxInt = int64ToAuxInt(64)
1971 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
1972 v3.AddArg(y)
1973 v2.AddArg(v3)
1974 v1.AddArg(v2)
1975 v.AddArg2(v0, v1)
1976 return true
1977 }
1978}
1979func rewriteValueRISCV64_OpMod16(v *Value) bool {
1980 v_1 := v.Args[1]
1981 v_0 := v.Args[0]
1982 b := v.Block
1983 typ := &b.Func.Config.Types
1984 // match: (Mod16 x y [false])
1985 // result: (REMW (SignExt16to32 x) (SignExt16to32 y))
1986 for {
1987 if auxIntToBool(v.AuxInt) != false {
1988 break
1989 }
1990 x := v_0
1991 y := v_1
1992 v.reset(OpRISCV64REMW)
1993 v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
1994 v0.AddArg(x)
1995 v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
1996 v1.AddArg(y)
1997 v.AddArg2(v0, v1)
1998 return true
1999 }
2000 return false
2001}
2002func rewriteValueRISCV64_OpMod16u(v *Value) bool {
2003 v_1 := v.Args[1]
2004 v_0 := v.Args[0]
2005 b := v.Block
2006 typ := &b.Func.Config.Types
2007 // match: (Mod16u x y)
2008 // result: (REMUW (ZeroExt16to32 x) (ZeroExt16to32 y))
2009 for {
2010 x := v_0
2011 y := v_1
2012 v.reset(OpRISCV64REMUW)
2013 v0 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
2014 v0.AddArg(x)
2015 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
2016 v1.AddArg(y)
2017 v.AddArg2(v0, v1)
2018 return true
2019 }
2020}
2021func rewriteValueRISCV64_OpMod32(v *Value) bool {
2022 v_1 := v.Args[1]
2023 v_0 := v.Args[0]
2024 // match: (Mod32 x y [false])
2025 // result: (REMW x y)
2026 for {
2027 if auxIntToBool(v.AuxInt) != false {
2028 break
2029 }
2030 x := v_0
2031 y := v_1
2032 v.reset(OpRISCV64REMW)
2033 v.AddArg2(x, y)
2034 return true
2035 }
2036 return false
2037}
2038func rewriteValueRISCV64_OpMod64(v *Value) bool {
2039 v_1 := v.Args[1]
2040 v_0 := v.Args[0]
2041 // match: (Mod64 x y [false])
2042 // result: (REM x y)
2043 for {
2044 if auxIntToBool(v.AuxInt) != false {
2045 break
2046 }
2047 x := v_0
2048 y := v_1
2049 v.reset(OpRISCV64REM)
2050 v.AddArg2(x, y)
2051 return true
2052 }
2053 return false
2054}
2055func rewriteValueRISCV64_OpMod8(v *Value) bool {
2056 v_1 := v.Args[1]
2057 v_0 := v.Args[0]
2058 b := v.Block
2059 typ := &b.Func.Config.Types
2060 // match: (Mod8 x y)
2061 // result: (REMW (SignExt8to32 x) (SignExt8to32 y))
2062 for {
2063 x := v_0
2064 y := v_1
2065 v.reset(OpRISCV64REMW)
2066 v0 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
2067 v0.AddArg(x)
2068 v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
2069 v1.AddArg(y)
2070 v.AddArg2(v0, v1)
2071 return true
2072 }
2073}
2074func rewriteValueRISCV64_OpMod8u(v *Value) bool {
2075 v_1 := v.Args[1]
2076 v_0 := v.Args[0]
2077 b := v.Block
2078 typ := &b.Func.Config.Types
2079 // match: (Mod8u x y)
2080 // result: (REMUW (ZeroExt8to32 x) (ZeroExt8to32 y))
2081 for {
2082 x := v_0
2083 y := v_1
2084 v.reset(OpRISCV64REMUW)
2085 v0 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
2086 v0.AddArg(x)
2087 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
2088 v1.AddArg(y)
2089 v.AddArg2(v0, v1)
2090 return true
2091 }
2092}
2093func rewriteValueRISCV64_OpMove(v *Value) bool {
2094 v_2 := v.Args[2]
2095 v_1 := v.Args[1]
2096 v_0 := v.Args[0]
2097 b := v.Block
2098 config := b.Func.Config
2099 typ := &b.Func.Config.Types
2100 // match: (Move [0] _ _ mem)
2101 // result: mem
2102 for {
2103 if auxIntToInt64(v.AuxInt) != 0 {
2104 break
2105 }
2106 mem := v_2
2107 v.copyOf(mem)
2108 return true
2109 }
2110 // match: (Move [1] dst src mem)
2111 // result: (MOVBstore dst (MOVBload src mem) mem)
2112 for {
2113 if auxIntToInt64(v.AuxInt) != 1 {
2114 break
2115 }
2116 dst := v_0
2117 src := v_1
2118 mem := v_2
2119 v.reset(OpRISCV64MOVBstore)
2120 v0 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
2121 v0.AddArg2(src, mem)
2122 v.AddArg3(dst, v0, mem)
2123 return true
2124 }
Colin Cross1f805522021-05-14 11:10:59 -07002125 // match: (Move [2] {t} dst src mem)
2126 // cond: t.Alignment()%2 == 0
Patrice Arruda748609c2020-06-25 12:12:21 -07002127 // result: (MOVHstore dst (MOVHload src mem) mem)
2128 for {
2129 if auxIntToInt64(v.AuxInt) != 2 {
2130 break
2131 }
Colin Cross1f805522021-05-14 11:10:59 -07002132 t := auxToType(v.Aux)
2133 dst := v_0
2134 src := v_1
2135 mem := v_2
2136 if !(t.Alignment()%2 == 0) {
2137 break
2138 }
2139 v.reset(OpRISCV64MOVHstore)
2140 v0 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
2141 v0.AddArg2(src, mem)
2142 v.AddArg3(dst, v0, mem)
2143 return true
2144 }
2145 // match: (Move [2] dst src mem)
2146 // result: (MOVBstore [1] dst (MOVBload [1] src mem) (MOVBstore dst (MOVBload src mem) mem))
2147 for {
2148 if auxIntToInt64(v.AuxInt) != 2 {
2149 break
2150 }
Patrice Arruda748609c2020-06-25 12:12:21 -07002151 dst := v_0
2152 src := v_1
2153 mem := v_2
Colin Cross1f805522021-05-14 11:10:59 -07002154 v.reset(OpRISCV64MOVBstore)
2155 v.AuxInt = int32ToAuxInt(1)
2156 v0 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
2157 v0.AuxInt = int32ToAuxInt(1)
2158 v0.AddArg2(src, mem)
2159 v1 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
2160 v2 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
2161 v2.AddArg2(src, mem)
2162 v1.AddArg3(dst, v2, mem)
2163 v.AddArg3(dst, v0, v1)
2164 return true
2165 }
2166 // match: (Move [4] {t} dst src mem)
2167 // cond: t.Alignment()%4 == 0
2168 // result: (MOVWstore dst (MOVWload src mem) mem)
2169 for {
2170 if auxIntToInt64(v.AuxInt) != 4 {
2171 break
2172 }
2173 t := auxToType(v.Aux)
2174 dst := v_0
2175 src := v_1
2176 mem := v_2
2177 if !(t.Alignment()%4 == 0) {
2178 break
2179 }
2180 v.reset(OpRISCV64MOVWstore)
2181 v0 := b.NewValue0(v.Pos, OpRISCV64MOVWload, typ.Int32)
Patrice Arruda748609c2020-06-25 12:12:21 -07002182 v0.AddArg2(src, mem)
2183 v.AddArg3(dst, v0, mem)
2184 return true
2185 }
Colin Cross1f805522021-05-14 11:10:59 -07002186 // match: (Move [4] {t} dst src mem)
2187 // cond: t.Alignment()%2 == 0
2188 // result: (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem))
2189 for {
2190 if auxIntToInt64(v.AuxInt) != 4 {
2191 break
2192 }
2193 t := auxToType(v.Aux)
2194 dst := v_0
2195 src := v_1
2196 mem := v_2
2197 if !(t.Alignment()%2 == 0) {
2198 break
2199 }
2200 v.reset(OpRISCV64MOVHstore)
2201 v.AuxInt = int32ToAuxInt(2)
2202 v0 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
2203 v0.AuxInt = int32ToAuxInt(2)
2204 v0.AddArg2(src, mem)
2205 v1 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
2206 v2 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
2207 v2.AddArg2(src, mem)
2208 v1.AddArg3(dst, v2, mem)
2209 v.AddArg3(dst, v0, v1)
2210 return true
2211 }
Patrice Arruda748609c2020-06-25 12:12:21 -07002212 // match: (Move [4] dst src mem)
Colin Cross1f805522021-05-14 11:10:59 -07002213 // result: (MOVBstore [3] dst (MOVBload [3] src mem) (MOVBstore [2] dst (MOVBload [2] src mem) (MOVBstore [1] dst (MOVBload [1] src mem) (MOVBstore dst (MOVBload src mem) mem))))
Patrice Arruda748609c2020-06-25 12:12:21 -07002214 for {
2215 if auxIntToInt64(v.AuxInt) != 4 {
2216 break
2217 }
2218 dst := v_0
2219 src := v_1
2220 mem := v_2
Colin Cross1f805522021-05-14 11:10:59 -07002221 v.reset(OpRISCV64MOVBstore)
2222 v.AuxInt = int32ToAuxInt(3)
2223 v0 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
2224 v0.AuxInt = int32ToAuxInt(3)
Patrice Arruda748609c2020-06-25 12:12:21 -07002225 v0.AddArg2(src, mem)
Colin Cross1f805522021-05-14 11:10:59 -07002226 v1 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
2227 v1.AuxInt = int32ToAuxInt(2)
2228 v2 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
2229 v2.AuxInt = int32ToAuxInt(2)
2230 v2.AddArg2(src, mem)
2231 v3 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
2232 v3.AuxInt = int32ToAuxInt(1)
2233 v4 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
2234 v4.AuxInt = int32ToAuxInt(1)
2235 v4.AddArg2(src, mem)
2236 v5 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
2237 v6 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
2238 v6.AddArg2(src, mem)
2239 v5.AddArg3(dst, v6, mem)
2240 v3.AddArg3(dst, v4, v5)
2241 v1.AddArg3(dst, v2, v3)
2242 v.AddArg3(dst, v0, v1)
Patrice Arruda748609c2020-06-25 12:12:21 -07002243 return true
2244 }
Colin Cross1f805522021-05-14 11:10:59 -07002245 // match: (Move [8] {t} dst src mem)
2246 // cond: t.Alignment()%8 == 0
Patrice Arruda748609c2020-06-25 12:12:21 -07002247 // result: (MOVDstore dst (MOVDload src mem) mem)
2248 for {
2249 if auxIntToInt64(v.AuxInt) != 8 {
2250 break
2251 }
Colin Cross1f805522021-05-14 11:10:59 -07002252 t := auxToType(v.Aux)
Patrice Arruda748609c2020-06-25 12:12:21 -07002253 dst := v_0
2254 src := v_1
2255 mem := v_2
Colin Cross1f805522021-05-14 11:10:59 -07002256 if !(t.Alignment()%8 == 0) {
2257 break
2258 }
Patrice Arruda748609c2020-06-25 12:12:21 -07002259 v.reset(OpRISCV64MOVDstore)
2260 v0 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
2261 v0.AddArg2(src, mem)
2262 v.AddArg3(dst, v0, mem)
2263 return true
2264 }
Colin Cross1f805522021-05-14 11:10:59 -07002265 // match: (Move [8] {t} dst src mem)
2266 // cond: t.Alignment()%4 == 0
2267 // result: (MOVWstore [4] dst (MOVWload [4] src mem) (MOVWstore dst (MOVWload src mem) mem))
2268 for {
2269 if auxIntToInt64(v.AuxInt) != 8 {
2270 break
2271 }
2272 t := auxToType(v.Aux)
2273 dst := v_0
2274 src := v_1
2275 mem := v_2
2276 if !(t.Alignment()%4 == 0) {
2277 break
2278 }
2279 v.reset(OpRISCV64MOVWstore)
2280 v.AuxInt = int32ToAuxInt(4)
2281 v0 := b.NewValue0(v.Pos, OpRISCV64MOVWload, typ.Int32)
2282 v0.AuxInt = int32ToAuxInt(4)
2283 v0.AddArg2(src, mem)
2284 v1 := b.NewValue0(v.Pos, OpRISCV64MOVWstore, types.TypeMem)
2285 v2 := b.NewValue0(v.Pos, OpRISCV64MOVWload, typ.Int32)
2286 v2.AddArg2(src, mem)
2287 v1.AddArg3(dst, v2, mem)
2288 v.AddArg3(dst, v0, v1)
2289 return true
2290 }
2291 // match: (Move [8] {t} dst src mem)
2292 // cond: t.Alignment()%2 == 0
2293 // result: (MOVHstore [6] dst (MOVHload [6] src mem) (MOVHstore [4] dst (MOVHload [4] src mem) (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem))))
2294 for {
2295 if auxIntToInt64(v.AuxInt) != 8 {
2296 break
2297 }
2298 t := auxToType(v.Aux)
2299 dst := v_0
2300 src := v_1
2301 mem := v_2
2302 if !(t.Alignment()%2 == 0) {
2303 break
2304 }
2305 v.reset(OpRISCV64MOVHstore)
2306 v.AuxInt = int32ToAuxInt(6)
2307 v0 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
2308 v0.AuxInt = int32ToAuxInt(6)
2309 v0.AddArg2(src, mem)
2310 v1 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
2311 v1.AuxInt = int32ToAuxInt(4)
2312 v2 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
2313 v2.AuxInt = int32ToAuxInt(4)
2314 v2.AddArg2(src, mem)
2315 v3 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
2316 v3.AuxInt = int32ToAuxInt(2)
2317 v4 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
2318 v4.AuxInt = int32ToAuxInt(2)
2319 v4.AddArg2(src, mem)
2320 v5 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
2321 v6 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
2322 v6.AddArg2(src, mem)
2323 v5.AddArg3(dst, v6, mem)
2324 v3.AddArg3(dst, v4, v5)
2325 v1.AddArg3(dst, v2, v3)
2326 v.AddArg3(dst, v0, v1)
2327 return true
2328 }
2329 // match: (Move [3] dst src mem)
2330 // result: (MOVBstore [2] dst (MOVBload [2] src mem) (MOVBstore [1] dst (MOVBload [1] src mem) (MOVBstore dst (MOVBload src mem) mem)))
2331 for {
2332 if auxIntToInt64(v.AuxInt) != 3 {
2333 break
2334 }
2335 dst := v_0
2336 src := v_1
2337 mem := v_2
2338 v.reset(OpRISCV64MOVBstore)
2339 v.AuxInt = int32ToAuxInt(2)
2340 v0 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
2341 v0.AuxInt = int32ToAuxInt(2)
2342 v0.AddArg2(src, mem)
2343 v1 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
2344 v1.AuxInt = int32ToAuxInt(1)
2345 v2 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
2346 v2.AuxInt = int32ToAuxInt(1)
2347 v2.AddArg2(src, mem)
2348 v3 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
2349 v4 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
2350 v4.AddArg2(src, mem)
2351 v3.AddArg3(dst, v4, mem)
2352 v1.AddArg3(dst, v2, v3)
2353 v.AddArg3(dst, v0, v1)
2354 return true
2355 }
2356 // match: (Move [6] {t} dst src mem)
2357 // cond: t.Alignment()%2 == 0
2358 // result: (MOVHstore [4] dst (MOVHload [4] src mem) (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem)))
2359 for {
2360 if auxIntToInt64(v.AuxInt) != 6 {
2361 break
2362 }
2363 t := auxToType(v.Aux)
2364 dst := v_0
2365 src := v_1
2366 mem := v_2
2367 if !(t.Alignment()%2 == 0) {
2368 break
2369 }
2370 v.reset(OpRISCV64MOVHstore)
2371 v.AuxInt = int32ToAuxInt(4)
2372 v0 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
2373 v0.AuxInt = int32ToAuxInt(4)
2374 v0.AddArg2(src, mem)
2375 v1 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
2376 v1.AuxInt = int32ToAuxInt(2)
2377 v2 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
2378 v2.AuxInt = int32ToAuxInt(2)
2379 v2.AddArg2(src, mem)
2380 v3 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
2381 v4 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
2382 v4.AddArg2(src, mem)
2383 v3.AddArg3(dst, v4, mem)
2384 v1.AddArg3(dst, v2, v3)
2385 v.AddArg3(dst, v0, v1)
2386 return true
2387 }
2388 // match: (Move [12] {t} dst src mem)
2389 // cond: t.Alignment()%4 == 0
2390 // result: (MOVWstore [8] dst (MOVWload [8] src mem) (MOVWstore [4] dst (MOVWload [4] src mem) (MOVWstore dst (MOVWload src mem) mem)))
2391 for {
2392 if auxIntToInt64(v.AuxInt) != 12 {
2393 break
2394 }
2395 t := auxToType(v.Aux)
2396 dst := v_0
2397 src := v_1
2398 mem := v_2
2399 if !(t.Alignment()%4 == 0) {
2400 break
2401 }
2402 v.reset(OpRISCV64MOVWstore)
2403 v.AuxInt = int32ToAuxInt(8)
2404 v0 := b.NewValue0(v.Pos, OpRISCV64MOVWload, typ.Int32)
2405 v0.AuxInt = int32ToAuxInt(8)
2406 v0.AddArg2(src, mem)
2407 v1 := b.NewValue0(v.Pos, OpRISCV64MOVWstore, types.TypeMem)
2408 v1.AuxInt = int32ToAuxInt(4)
2409 v2 := b.NewValue0(v.Pos, OpRISCV64MOVWload, typ.Int32)
2410 v2.AuxInt = int32ToAuxInt(4)
2411 v2.AddArg2(src, mem)
2412 v3 := b.NewValue0(v.Pos, OpRISCV64MOVWstore, types.TypeMem)
2413 v4 := b.NewValue0(v.Pos, OpRISCV64MOVWload, typ.Int32)
2414 v4.AddArg2(src, mem)
2415 v3.AddArg3(dst, v4, mem)
2416 v1.AddArg3(dst, v2, v3)
2417 v.AddArg3(dst, v0, v1)
2418 return true
2419 }
2420 // match: (Move [16] {t} dst src mem)
2421 // cond: t.Alignment()%8 == 0
2422 // result: (MOVDstore [8] dst (MOVDload [8] src mem) (MOVDstore dst (MOVDload src mem) mem))
2423 for {
2424 if auxIntToInt64(v.AuxInt) != 16 {
2425 break
2426 }
2427 t := auxToType(v.Aux)
2428 dst := v_0
2429 src := v_1
2430 mem := v_2
2431 if !(t.Alignment()%8 == 0) {
2432 break
2433 }
2434 v.reset(OpRISCV64MOVDstore)
2435 v.AuxInt = int32ToAuxInt(8)
2436 v0 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
2437 v0.AuxInt = int32ToAuxInt(8)
2438 v0.AddArg2(src, mem)
2439 v1 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
2440 v2 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
2441 v2.AddArg2(src, mem)
2442 v1.AddArg3(dst, v2, mem)
2443 v.AddArg3(dst, v0, v1)
2444 return true
2445 }
2446 // match: (Move [24] {t} dst src mem)
2447 // cond: t.Alignment()%8 == 0
2448 // result: (MOVDstore [16] dst (MOVDload [16] src mem) (MOVDstore [8] dst (MOVDload [8] src mem) (MOVDstore dst (MOVDload src mem) mem)))
2449 for {
2450 if auxIntToInt64(v.AuxInt) != 24 {
2451 break
2452 }
2453 t := auxToType(v.Aux)
2454 dst := v_0
2455 src := v_1
2456 mem := v_2
2457 if !(t.Alignment()%8 == 0) {
2458 break
2459 }
2460 v.reset(OpRISCV64MOVDstore)
2461 v.AuxInt = int32ToAuxInt(16)
2462 v0 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
2463 v0.AuxInt = int32ToAuxInt(16)
2464 v0.AddArg2(src, mem)
2465 v1 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
2466 v1.AuxInt = int32ToAuxInt(8)
2467 v2 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
2468 v2.AuxInt = int32ToAuxInt(8)
2469 v2.AddArg2(src, mem)
2470 v3 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
2471 v4 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
2472 v4.AddArg2(src, mem)
2473 v3.AddArg3(dst, v4, mem)
2474 v1.AddArg3(dst, v2, v3)
2475 v.AddArg3(dst, v0, v1)
2476 return true
2477 }
2478 // match: (Move [32] {t} dst src mem)
2479 // cond: t.Alignment()%8 == 0
2480 // result: (MOVDstore [24] dst (MOVDload [24] src mem) (MOVDstore [16] dst (MOVDload [16] src mem) (MOVDstore [8] dst (MOVDload [8] src mem) (MOVDstore dst (MOVDload src mem) mem))))
2481 for {
2482 if auxIntToInt64(v.AuxInt) != 32 {
2483 break
2484 }
2485 t := auxToType(v.Aux)
2486 dst := v_0
2487 src := v_1
2488 mem := v_2
2489 if !(t.Alignment()%8 == 0) {
2490 break
2491 }
2492 v.reset(OpRISCV64MOVDstore)
2493 v.AuxInt = int32ToAuxInt(24)
2494 v0 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
2495 v0.AuxInt = int32ToAuxInt(24)
2496 v0.AddArg2(src, mem)
2497 v1 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
2498 v1.AuxInt = int32ToAuxInt(16)
2499 v2 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
2500 v2.AuxInt = int32ToAuxInt(16)
2501 v2.AddArg2(src, mem)
2502 v3 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
2503 v3.AuxInt = int32ToAuxInt(8)
2504 v4 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
2505 v4.AuxInt = int32ToAuxInt(8)
2506 v4.AddArg2(src, mem)
2507 v5 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
2508 v6 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
2509 v6.AddArg2(src, mem)
2510 v5.AddArg3(dst, v6, mem)
2511 v3.AddArg3(dst, v4, v5)
2512 v1.AddArg3(dst, v2, v3)
2513 v.AddArg3(dst, v0, v1)
2514 return true
2515 }
2516 // match: (Move [s] {t} dst src mem)
2517 // cond: s%8 == 0 && s <= 8*128 && t.Alignment()%8 == 0 && !config.noDuffDevice && logLargeCopy(v, s)
2518 // result: (DUFFCOPY [16 * (128 - s/8)] dst src mem)
2519 for {
2520 s := auxIntToInt64(v.AuxInt)
2521 t := auxToType(v.Aux)
2522 dst := v_0
2523 src := v_1
2524 mem := v_2
2525 if !(s%8 == 0 && s <= 8*128 && t.Alignment()%8 == 0 && !config.noDuffDevice && logLargeCopy(v, s)) {
2526 break
2527 }
2528 v.reset(OpRISCV64DUFFCOPY)
2529 v.AuxInt = int64ToAuxInt(16 * (128 - s/8))
2530 v.AddArg3(dst, src, mem)
2531 return true
2532 }
Patrice Arruda748609c2020-06-25 12:12:21 -07002533 // match: (Move [s] {t} dst src mem)
2534 // cond: (s <= 16 || logLargeCopy(v, s))
2535 // result: (LoweredMove [t.Alignment()] dst src (ADDI <src.Type> [s-moveSize(t.Alignment(), config)] src) mem)
2536 for {
2537 s := auxIntToInt64(v.AuxInt)
2538 t := auxToType(v.Aux)
2539 dst := v_0
2540 src := v_1
2541 mem := v_2
2542 if !(s <= 16 || logLargeCopy(v, s)) {
2543 break
2544 }
2545 v.reset(OpRISCV64LoweredMove)
2546 v.AuxInt = int64ToAuxInt(t.Alignment())
2547 v0 := b.NewValue0(v.Pos, OpRISCV64ADDI, src.Type)
2548 v0.AuxInt = int64ToAuxInt(s - moveSize(t.Alignment(), config))
2549 v0.AddArg(src)
2550 v.AddArg4(dst, src, v0, mem)
2551 return true
2552 }
2553 return false
2554}
2555func rewriteValueRISCV64_OpMul16(v *Value) bool {
2556 v_1 := v.Args[1]
2557 v_0 := v.Args[0]
2558 b := v.Block
2559 typ := &b.Func.Config.Types
2560 // match: (Mul16 x y)
2561 // result: (MULW (SignExt16to32 x) (SignExt16to32 y))
2562 for {
2563 x := v_0
2564 y := v_1
2565 v.reset(OpRISCV64MULW)
2566 v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
2567 v0.AddArg(x)
2568 v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
2569 v1.AddArg(y)
2570 v.AddArg2(v0, v1)
2571 return true
2572 }
2573}
2574func rewriteValueRISCV64_OpMul8(v *Value) bool {
2575 v_1 := v.Args[1]
2576 v_0 := v.Args[0]
2577 b := v.Block
2578 typ := &b.Func.Config.Types
2579 // match: (Mul8 x y)
2580 // result: (MULW (SignExt8to32 x) (SignExt8to32 y))
2581 for {
2582 x := v_0
2583 y := v_1
2584 v.reset(OpRISCV64MULW)
2585 v0 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
2586 v0.AddArg(x)
2587 v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
2588 v1.AddArg(y)
2589 v.AddArg2(v0, v1)
2590 return true
2591 }
2592}
2593func rewriteValueRISCV64_OpNeq16(v *Value) bool {
2594 v_1 := v.Args[1]
2595 v_0 := v.Args[0]
2596 b := v.Block
2597 typ := &b.Func.Config.Types
2598 // match: (Neq16 x y)
Colin Cross1f805522021-05-14 11:10:59 -07002599 // result: (SNEZ (SUB <x.Type> (ZeroExt16to64 x) (ZeroExt16to64 y)))
Patrice Arruda748609c2020-06-25 12:12:21 -07002600 for {
2601 x := v_0
2602 y := v_1
2603 v.reset(OpRISCV64SNEZ)
Colin Cross1f805522021-05-14 11:10:59 -07002604 v0 := b.NewValue0(v.Pos, OpRISCV64SUB, x.Type)
2605 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
2606 v1.AddArg(x)
2607 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
2608 v2.AddArg(y)
2609 v0.AddArg2(v1, v2)
Patrice Arruda748609c2020-06-25 12:12:21 -07002610 v.AddArg(v0)
2611 return true
2612 }
2613}
2614func rewriteValueRISCV64_OpNeq32(v *Value) bool {
2615 v_1 := v.Args[1]
2616 v_0 := v.Args[0]
2617 b := v.Block
Dan Willemsencc753b72021-08-31 13:25:42 -07002618 typ := &b.Func.Config.Types
Patrice Arruda748609c2020-06-25 12:12:21 -07002619 // match: (Neq32 x y)
Dan Willemsencc753b72021-08-31 13:25:42 -07002620 // result: (SNEZ (SUB <x.Type> (ZeroExt32to64 x) (ZeroExt32to64 y)))
Patrice Arruda748609c2020-06-25 12:12:21 -07002621 for {
2622 x := v_0
2623 y := v_1
2624 v.reset(OpRISCV64SNEZ)
Dan Willemsencc753b72021-08-31 13:25:42 -07002625 v0 := b.NewValue0(v.Pos, OpRISCV64SUB, x.Type)
2626 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
2627 v1.AddArg(x)
2628 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
2629 v2.AddArg(y)
2630 v0.AddArg2(v1, v2)
Patrice Arruda748609c2020-06-25 12:12:21 -07002631 v.AddArg(v0)
2632 return true
2633 }
2634}
2635func rewriteValueRISCV64_OpNeq64(v *Value) bool {
2636 v_1 := v.Args[1]
2637 v_0 := v.Args[0]
2638 b := v.Block
2639 // match: (Neq64 x y)
2640 // result: (SNEZ (SUB <x.Type> x y))
2641 for {
2642 x := v_0
2643 y := v_1
2644 v.reset(OpRISCV64SNEZ)
2645 v0 := b.NewValue0(v.Pos, OpRISCV64SUB, x.Type)
2646 v0.AddArg2(x, y)
2647 v.AddArg(v0)
2648 return true
2649 }
2650}
2651func rewriteValueRISCV64_OpNeq8(v *Value) bool {
2652 v_1 := v.Args[1]
2653 v_0 := v.Args[0]
2654 b := v.Block
2655 typ := &b.Func.Config.Types
2656 // match: (Neq8 x y)
Colin Cross1f805522021-05-14 11:10:59 -07002657 // result: (SNEZ (SUB <x.Type> (ZeroExt8to64 x) (ZeroExt8to64 y)))
Patrice Arruda748609c2020-06-25 12:12:21 -07002658 for {
2659 x := v_0
2660 y := v_1
2661 v.reset(OpRISCV64SNEZ)
Colin Cross1f805522021-05-14 11:10:59 -07002662 v0 := b.NewValue0(v.Pos, OpRISCV64SUB, x.Type)
2663 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
2664 v1.AddArg(x)
2665 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
2666 v2.AddArg(y)
2667 v0.AddArg2(v1, v2)
Patrice Arruda748609c2020-06-25 12:12:21 -07002668 v.AddArg(v0)
2669 return true
2670 }
2671}
2672func rewriteValueRISCV64_OpNeqPtr(v *Value) bool {
2673 v_1 := v.Args[1]
2674 v_0 := v.Args[0]
2675 b := v.Block
2676 // match: (NeqPtr x y)
2677 // result: (SNEZ (SUB <x.Type> x y))
2678 for {
2679 x := v_0
2680 y := v_1
2681 v.reset(OpRISCV64SNEZ)
2682 v0 := b.NewValue0(v.Pos, OpRISCV64SUB, x.Type)
2683 v0.AddArg2(x, y)
2684 v.AddArg(v0)
2685 return true
2686 }
2687}
2688func rewriteValueRISCV64_OpOffPtr(v *Value) bool {
2689 v_0 := v.Args[0]
2690 b := v.Block
2691 typ := &b.Func.Config.Types
2692 // match: (OffPtr [off] ptr:(SP))
2693 // cond: is32Bit(off)
2694 // result: (MOVaddr [int32(off)] ptr)
2695 for {
2696 off := auxIntToInt64(v.AuxInt)
2697 ptr := v_0
2698 if ptr.Op != OpSP || !(is32Bit(off)) {
2699 break
2700 }
2701 v.reset(OpRISCV64MOVaddr)
2702 v.AuxInt = int32ToAuxInt(int32(off))
2703 v.AddArg(ptr)
2704 return true
2705 }
2706 // match: (OffPtr [off] ptr)
2707 // cond: is32Bit(off)
2708 // result: (ADDI [off] ptr)
2709 for {
2710 off := auxIntToInt64(v.AuxInt)
2711 ptr := v_0
2712 if !(is32Bit(off)) {
2713 break
2714 }
2715 v.reset(OpRISCV64ADDI)
2716 v.AuxInt = int64ToAuxInt(off)
2717 v.AddArg(ptr)
2718 return true
2719 }
2720 // match: (OffPtr [off] ptr)
2721 // result: (ADD (MOVDconst [off]) ptr)
2722 for {
2723 off := auxIntToInt64(v.AuxInt)
2724 ptr := v_0
2725 v.reset(OpRISCV64ADD)
2726 v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
2727 v0.AuxInt = int64ToAuxInt(off)
2728 v.AddArg2(v0, ptr)
2729 return true
2730 }
2731}
2732func rewriteValueRISCV64_OpPanicBounds(v *Value) bool {
2733 v_2 := v.Args[2]
2734 v_1 := v.Args[1]
2735 v_0 := v.Args[0]
2736 // match: (PanicBounds [kind] x y mem)
2737 // cond: boundsABI(kind) == 0
2738 // result: (LoweredPanicBoundsA [kind] x y mem)
2739 for {
2740 kind := auxIntToInt64(v.AuxInt)
2741 x := v_0
2742 y := v_1
2743 mem := v_2
2744 if !(boundsABI(kind) == 0) {
2745 break
2746 }
2747 v.reset(OpRISCV64LoweredPanicBoundsA)
2748 v.AuxInt = int64ToAuxInt(kind)
2749 v.AddArg3(x, y, mem)
2750 return true
2751 }
2752 // match: (PanicBounds [kind] x y mem)
2753 // cond: boundsABI(kind) == 1
2754 // result: (LoweredPanicBoundsB [kind] x y mem)
2755 for {
2756 kind := auxIntToInt64(v.AuxInt)
2757 x := v_0
2758 y := v_1
2759 mem := v_2
2760 if !(boundsABI(kind) == 1) {
2761 break
2762 }
2763 v.reset(OpRISCV64LoweredPanicBoundsB)
2764 v.AuxInt = int64ToAuxInt(kind)
2765 v.AddArg3(x, y, mem)
2766 return true
2767 }
2768 // match: (PanicBounds [kind] x y mem)
2769 // cond: boundsABI(kind) == 2
2770 // result: (LoweredPanicBoundsC [kind] x y mem)
2771 for {
2772 kind := auxIntToInt64(v.AuxInt)
2773 x := v_0
2774 y := v_1
2775 mem := v_2
2776 if !(boundsABI(kind) == 2) {
2777 break
2778 }
2779 v.reset(OpRISCV64LoweredPanicBoundsC)
2780 v.AuxInt = int64ToAuxInt(kind)
2781 v.AddArg3(x, y, mem)
2782 return true
2783 }
2784 return false
2785}
2786func rewriteValueRISCV64_OpRISCV64ADD(v *Value) bool {
2787 v_1 := v.Args[1]
2788 v_0 := v.Args[0]
Patrice Arruda748609c2020-06-25 12:12:21 -07002789 // match: (ADD (MOVDconst [val]) x)
2790 // cond: is32Bit(val)
2791 // result: (ADDI [val] x)
2792 for {
2793 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2794 if v_0.Op != OpRISCV64MOVDconst {
2795 continue
2796 }
2797 val := auxIntToInt64(v_0.AuxInt)
2798 x := v_1
2799 if !(is32Bit(val)) {
2800 continue
2801 }
2802 v.reset(OpRISCV64ADDI)
2803 v.AuxInt = int64ToAuxInt(val)
2804 v.AddArg(x)
2805 return true
2806 }
2807 break
2808 }
2809 return false
2810}
2811func rewriteValueRISCV64_OpRISCV64ADDI(v *Value) bool {
2812 v_0 := v.Args[0]
2813 // match: (ADDI [c] (MOVaddr [d] {s} x))
2814 // cond: is32Bit(c+int64(d))
2815 // result: (MOVaddr [int32(c)+d] {s} x)
2816 for {
2817 c := auxIntToInt64(v.AuxInt)
2818 if v_0.Op != OpRISCV64MOVaddr {
2819 break
2820 }
2821 d := auxIntToInt32(v_0.AuxInt)
2822 s := auxToSym(v_0.Aux)
2823 x := v_0.Args[0]
2824 if !(is32Bit(c + int64(d))) {
2825 break
2826 }
2827 v.reset(OpRISCV64MOVaddr)
2828 v.AuxInt = int32ToAuxInt(int32(c) + d)
2829 v.Aux = symToAux(s)
2830 v.AddArg(x)
2831 return true
2832 }
2833 // match: (ADDI [0] x)
2834 // result: x
2835 for {
2836 if auxIntToInt64(v.AuxInt) != 0 {
2837 break
2838 }
2839 x := v_0
2840 v.copyOf(x)
2841 return true
2842 }
Dan Willemsenbc60c3c2021-12-15 01:09:00 -08002843 // match: (ADDI [x] (MOVDconst [y]))
2844 // cond: is32Bit(x + y)
2845 // result: (MOVDconst [x + y])
2846 for {
2847 x := auxIntToInt64(v.AuxInt)
2848 if v_0.Op != OpRISCV64MOVDconst {
2849 break
2850 }
2851 y := auxIntToInt64(v_0.AuxInt)
2852 if !(is32Bit(x + y)) {
2853 break
2854 }
2855 v.reset(OpRISCV64MOVDconst)
2856 v.AuxInt = int64ToAuxInt(x + y)
2857 return true
2858 }
Patrice Arruda748609c2020-06-25 12:12:21 -07002859 return false
2860}
2861func rewriteValueRISCV64_OpRISCV64AND(v *Value) bool {
2862 v_1 := v.Args[1]
2863 v_0 := v.Args[0]
Patrice Arruda748609c2020-06-25 12:12:21 -07002864 // match: (AND (MOVDconst [val]) x)
2865 // cond: is32Bit(val)
2866 // result: (ANDI [val] x)
2867 for {
2868 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2869 if v_0.Op != OpRISCV64MOVDconst {
2870 continue
2871 }
2872 val := auxIntToInt64(v_0.AuxInt)
2873 x := v_1
2874 if !(is32Bit(val)) {
2875 continue
2876 }
2877 v.reset(OpRISCV64ANDI)
2878 v.AuxInt = int64ToAuxInt(val)
2879 v.AddArg(x)
2880 return true
2881 }
2882 break
2883 }
2884 return false
2885}
Dan Willemsenbc60c3c2021-12-15 01:09:00 -08002886func rewriteValueRISCV64_OpRISCV64ANDI(v *Value) bool {
2887 v_0 := v.Args[0]
2888 // match: (ANDI [0] x)
2889 // result: (MOVDconst [0])
2890 for {
2891 if auxIntToInt64(v.AuxInt) != 0 {
2892 break
2893 }
2894 v.reset(OpRISCV64MOVDconst)
2895 v.AuxInt = int64ToAuxInt(0)
2896 return true
2897 }
2898 // match: (ANDI [-1] x)
2899 // result: x
2900 for {
2901 if auxIntToInt64(v.AuxInt) != -1 {
2902 break
2903 }
2904 x := v_0
2905 v.copyOf(x)
2906 return true
2907 }
2908 // match: (ANDI [x] (MOVDconst [y]))
2909 // result: (MOVDconst [x & y])
2910 for {
2911 x := auxIntToInt64(v.AuxInt)
2912 if v_0.Op != OpRISCV64MOVDconst {
2913 break
2914 }
2915 y := auxIntToInt64(v_0.AuxInt)
2916 v.reset(OpRISCV64MOVDconst)
2917 v.AuxInt = int64ToAuxInt(x & y)
2918 return true
2919 }
2920 return false
2921}
2922func rewriteValueRISCV64_OpRISCV64FMADDD(v *Value) bool {
2923 v_2 := v.Args[2]
2924 v_1 := v.Args[1]
2925 v_0 := v.Args[0]
2926 // match: (FMADDD neg:(FNEGD x) y z)
2927 // cond: neg.Uses == 1
2928 // result: (FNMADDD x y z)
2929 for {
2930 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2931 neg := v_0
2932 if neg.Op != OpRISCV64FNEGD {
2933 continue
2934 }
2935 x := neg.Args[0]
2936 y := v_1
2937 z := v_2
2938 if !(neg.Uses == 1) {
2939 continue
2940 }
2941 v.reset(OpRISCV64FNMADDD)
2942 v.AddArg3(x, y, z)
2943 return true
2944 }
2945 break
2946 }
2947 // match: (FMADDD x y neg:(FNEGD z))
2948 // cond: neg.Uses == 1
2949 // result: (FMSUBD x y z)
2950 for {
2951 x := v_0
2952 y := v_1
2953 neg := v_2
2954 if neg.Op != OpRISCV64FNEGD {
2955 break
2956 }
2957 z := neg.Args[0]
2958 if !(neg.Uses == 1) {
2959 break
2960 }
2961 v.reset(OpRISCV64FMSUBD)
2962 v.AddArg3(x, y, z)
2963 return true
2964 }
2965 return false
2966}
2967func rewriteValueRISCV64_OpRISCV64FMSUBD(v *Value) bool {
2968 v_2 := v.Args[2]
2969 v_1 := v.Args[1]
2970 v_0 := v.Args[0]
2971 // match: (FMSUBD neg:(FNEGD x) y z)
2972 // cond: neg.Uses == 1
2973 // result: (FNMSUBD x y z)
2974 for {
2975 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2976 neg := v_0
2977 if neg.Op != OpRISCV64FNEGD {
2978 continue
2979 }
2980 x := neg.Args[0]
2981 y := v_1
2982 z := v_2
2983 if !(neg.Uses == 1) {
2984 continue
2985 }
2986 v.reset(OpRISCV64FNMSUBD)
2987 v.AddArg3(x, y, z)
2988 return true
2989 }
2990 break
2991 }
2992 // match: (FMSUBD x y neg:(FNEGD z))
2993 // cond: neg.Uses == 1
2994 // result: (FMADDD x y z)
2995 for {
2996 x := v_0
2997 y := v_1
2998 neg := v_2
2999 if neg.Op != OpRISCV64FNEGD {
3000 break
3001 }
3002 z := neg.Args[0]
3003 if !(neg.Uses == 1) {
3004 break
3005 }
3006 v.reset(OpRISCV64FMADDD)
3007 v.AddArg3(x, y, z)
3008 return true
3009 }
3010 return false
3011}
3012func rewriteValueRISCV64_OpRISCV64FNMADDD(v *Value) bool {
3013 v_2 := v.Args[2]
3014 v_1 := v.Args[1]
3015 v_0 := v.Args[0]
3016 // match: (FNMADDD neg:(FNEGD x) y z)
3017 // cond: neg.Uses == 1
3018 // result: (FMADDD x y z)
3019 for {
3020 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3021 neg := v_0
3022 if neg.Op != OpRISCV64FNEGD {
3023 continue
3024 }
3025 x := neg.Args[0]
3026 y := v_1
3027 z := v_2
3028 if !(neg.Uses == 1) {
3029 continue
3030 }
3031 v.reset(OpRISCV64FMADDD)
3032 v.AddArg3(x, y, z)
3033 return true
3034 }
3035 break
3036 }
3037 // match: (FNMADDD x y neg:(FNEGD z))
3038 // cond: neg.Uses == 1
3039 // result: (FNMSUBD x y z)
3040 for {
3041 x := v_0
3042 y := v_1
3043 neg := v_2
3044 if neg.Op != OpRISCV64FNEGD {
3045 break
3046 }
3047 z := neg.Args[0]
3048 if !(neg.Uses == 1) {
3049 break
3050 }
3051 v.reset(OpRISCV64FNMSUBD)
3052 v.AddArg3(x, y, z)
3053 return true
3054 }
3055 return false
3056}
3057func rewriteValueRISCV64_OpRISCV64FNMSUBD(v *Value) bool {
3058 v_2 := v.Args[2]
3059 v_1 := v.Args[1]
3060 v_0 := v.Args[0]
3061 // match: (FNMSUBD neg:(FNEGD x) y z)
3062 // cond: neg.Uses == 1
3063 // result: (FMSUBD x y z)
3064 for {
3065 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3066 neg := v_0
3067 if neg.Op != OpRISCV64FNEGD {
3068 continue
3069 }
3070 x := neg.Args[0]
3071 y := v_1
3072 z := v_2
3073 if !(neg.Uses == 1) {
3074 continue
3075 }
3076 v.reset(OpRISCV64FMSUBD)
3077 v.AddArg3(x, y, z)
3078 return true
3079 }
3080 break
3081 }
3082 // match: (FNMSUBD x y neg:(FNEGD z))
3083 // cond: neg.Uses == 1
3084 // result: (FNMADDD x y z)
3085 for {
3086 x := v_0
3087 y := v_1
3088 neg := v_2
3089 if neg.Op != OpRISCV64FNEGD {
3090 break
3091 }
3092 z := neg.Args[0]
3093 if !(neg.Uses == 1) {
3094 break
3095 }
3096 v.reset(OpRISCV64FNMADDD)
3097 v.AddArg3(x, y, z)
3098 return true
3099 }
3100 return false
3101}
Patrice Arruda748609c2020-06-25 12:12:21 -07003102func rewriteValueRISCV64_OpRISCV64MOVBUload(v *Value) bool {
3103 v_1 := v.Args[1]
3104 v_0 := v.Args[0]
3105 // match: (MOVBUload [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
3106 // cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)
Colin Cross1f805522021-05-14 11:10:59 -07003107 // result: (MOVBUload [off1+off2] {mergeSym(sym1,sym2)} base mem)
Patrice Arruda748609c2020-06-25 12:12:21 -07003108 for {
3109 off1 := auxIntToInt32(v.AuxInt)
3110 sym1 := auxToSym(v.Aux)
3111 if v_0.Op != OpRISCV64MOVaddr {
3112 break
3113 }
3114 off2 := auxIntToInt32(v_0.AuxInt)
3115 sym2 := auxToSym(v_0.Aux)
3116 base := v_0.Args[0]
3117 mem := v_1
3118 if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)) {
3119 break
3120 }
3121 v.reset(OpRISCV64MOVBUload)
3122 v.AuxInt = int32ToAuxInt(off1 + off2)
Colin Cross1f805522021-05-14 11:10:59 -07003123 v.Aux = symToAux(mergeSym(sym1, sym2))
Patrice Arruda748609c2020-06-25 12:12:21 -07003124 v.AddArg2(base, mem)
3125 return true
3126 }
3127 // match: (MOVBUload [off1] {sym} (ADDI [off2] base) mem)
3128 // cond: is32Bit(int64(off1)+off2)
3129 // result: (MOVBUload [off1+int32(off2)] {sym} base mem)
3130 for {
3131 off1 := auxIntToInt32(v.AuxInt)
3132 sym := auxToSym(v.Aux)
3133 if v_0.Op != OpRISCV64ADDI {
3134 break
3135 }
3136 off2 := auxIntToInt64(v_0.AuxInt)
3137 base := v_0.Args[0]
3138 mem := v_1
3139 if !(is32Bit(int64(off1) + off2)) {
3140 break
3141 }
3142 v.reset(OpRISCV64MOVBUload)
3143 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3144 v.Aux = symToAux(sym)
3145 v.AddArg2(base, mem)
3146 return true
3147 }
3148 return false
3149}
Colin Cross1f805522021-05-14 11:10:59 -07003150func rewriteValueRISCV64_OpRISCV64MOVBUreg(v *Value) bool {
3151 v_0 := v.Args[0]
3152 b := v.Block
Dan Willemsencc753b72021-08-31 13:25:42 -07003153 // match: (MOVBUreg (MOVDconst [c]))
Colin Cross1f805522021-05-14 11:10:59 -07003154 // result: (MOVDconst [int64(uint8(c))])
3155 for {
Dan Willemsencc753b72021-08-31 13:25:42 -07003156 if v_0.Op != OpRISCV64MOVDconst {
Colin Cross1f805522021-05-14 11:10:59 -07003157 break
3158 }
Dan Willemsencc753b72021-08-31 13:25:42 -07003159 c := auxIntToInt64(v_0.AuxInt)
Colin Cross1f805522021-05-14 11:10:59 -07003160 v.reset(OpRISCV64MOVDconst)
3161 v.AuxInt = int64ToAuxInt(int64(uint8(c)))
3162 return true
3163 }
3164 // match: (MOVBUreg x:(MOVBUload _ _))
3165 // result: (MOVDreg x)
3166 for {
3167 x := v_0
3168 if x.Op != OpRISCV64MOVBUload {
3169 break
3170 }
3171 v.reset(OpRISCV64MOVDreg)
3172 v.AddArg(x)
3173 return true
3174 }
3175 // match: (MOVBUreg x:(MOVBUreg _))
3176 // result: (MOVDreg x)
3177 for {
3178 x := v_0
3179 if x.Op != OpRISCV64MOVBUreg {
3180 break
3181 }
3182 v.reset(OpRISCV64MOVDreg)
3183 v.AddArg(x)
3184 return true
3185 }
3186 // match: (MOVBUreg <t> x:(MOVBload [off] {sym} ptr mem))
3187 // cond: x.Uses == 1 && clobber(x)
3188 // result: @x.Block (MOVBUload <t> [off] {sym} ptr mem)
3189 for {
3190 t := v.Type
3191 x := v_0
3192 if x.Op != OpRISCV64MOVBload {
3193 break
3194 }
3195 off := auxIntToInt32(x.AuxInt)
3196 sym := auxToSym(x.Aux)
3197 mem := x.Args[1]
3198 ptr := x.Args[0]
3199 if !(x.Uses == 1 && clobber(x)) {
3200 break
3201 }
3202 b = x.Block
3203 v0 := b.NewValue0(x.Pos, OpRISCV64MOVBUload, t)
3204 v.copyOf(v0)
3205 v0.AuxInt = int32ToAuxInt(off)
3206 v0.Aux = symToAux(sym)
3207 v0.AddArg2(ptr, mem)
3208 return true
3209 }
3210 return false
3211}
Patrice Arruda748609c2020-06-25 12:12:21 -07003212func rewriteValueRISCV64_OpRISCV64MOVBload(v *Value) bool {
3213 v_1 := v.Args[1]
3214 v_0 := v.Args[0]
3215 // match: (MOVBload [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
3216 // cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)
Colin Cross1f805522021-05-14 11:10:59 -07003217 // result: (MOVBload [off1+off2] {mergeSym(sym1,sym2)} base mem)
Patrice Arruda748609c2020-06-25 12:12:21 -07003218 for {
3219 off1 := auxIntToInt32(v.AuxInt)
3220 sym1 := auxToSym(v.Aux)
3221 if v_0.Op != OpRISCV64MOVaddr {
3222 break
3223 }
3224 off2 := auxIntToInt32(v_0.AuxInt)
3225 sym2 := auxToSym(v_0.Aux)
3226 base := v_0.Args[0]
3227 mem := v_1
3228 if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)) {
3229 break
3230 }
3231 v.reset(OpRISCV64MOVBload)
3232 v.AuxInt = int32ToAuxInt(off1 + off2)
Colin Cross1f805522021-05-14 11:10:59 -07003233 v.Aux = symToAux(mergeSym(sym1, sym2))
Patrice Arruda748609c2020-06-25 12:12:21 -07003234 v.AddArg2(base, mem)
3235 return true
3236 }
3237 // match: (MOVBload [off1] {sym} (ADDI [off2] base) mem)
3238 // cond: is32Bit(int64(off1)+off2)
3239 // result: (MOVBload [off1+int32(off2)] {sym} base mem)
3240 for {
3241 off1 := auxIntToInt32(v.AuxInt)
3242 sym := auxToSym(v.Aux)
3243 if v_0.Op != OpRISCV64ADDI {
3244 break
3245 }
3246 off2 := auxIntToInt64(v_0.AuxInt)
3247 base := v_0.Args[0]
3248 mem := v_1
3249 if !(is32Bit(int64(off1) + off2)) {
3250 break
3251 }
3252 v.reset(OpRISCV64MOVBload)
3253 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3254 v.Aux = symToAux(sym)
3255 v.AddArg2(base, mem)
3256 return true
3257 }
3258 return false
3259}
Colin Cross1f805522021-05-14 11:10:59 -07003260func rewriteValueRISCV64_OpRISCV64MOVBreg(v *Value) bool {
3261 v_0 := v.Args[0]
3262 b := v.Block
Dan Willemsencc753b72021-08-31 13:25:42 -07003263 // match: (MOVBreg (MOVDconst [c]))
3264 // result: (MOVDconst [int64(int8(c))])
Colin Cross1f805522021-05-14 11:10:59 -07003265 for {
Dan Willemsencc753b72021-08-31 13:25:42 -07003266 if v_0.Op != OpRISCV64MOVDconst {
Colin Cross1f805522021-05-14 11:10:59 -07003267 break
3268 }
Dan Willemsencc753b72021-08-31 13:25:42 -07003269 c := auxIntToInt64(v_0.AuxInt)
Colin Cross1f805522021-05-14 11:10:59 -07003270 v.reset(OpRISCV64MOVDconst)
Dan Willemsencc753b72021-08-31 13:25:42 -07003271 v.AuxInt = int64ToAuxInt(int64(int8(c)))
Colin Cross1f805522021-05-14 11:10:59 -07003272 return true
3273 }
3274 // match: (MOVBreg x:(MOVBload _ _))
3275 // result: (MOVDreg x)
3276 for {
3277 x := v_0
3278 if x.Op != OpRISCV64MOVBload {
3279 break
3280 }
3281 v.reset(OpRISCV64MOVDreg)
3282 v.AddArg(x)
3283 return true
3284 }
3285 // match: (MOVBreg x:(MOVBreg _))
3286 // result: (MOVDreg x)
3287 for {
3288 x := v_0
3289 if x.Op != OpRISCV64MOVBreg {
3290 break
3291 }
3292 v.reset(OpRISCV64MOVDreg)
3293 v.AddArg(x)
3294 return true
3295 }
3296 // match: (MOVBreg <t> x:(MOVBUload [off] {sym} ptr mem))
3297 // cond: x.Uses == 1 && clobber(x)
3298 // result: @x.Block (MOVBload <t> [off] {sym} ptr mem)
3299 for {
3300 t := v.Type
3301 x := v_0
3302 if x.Op != OpRISCV64MOVBUload {
3303 break
3304 }
3305 off := auxIntToInt32(x.AuxInt)
3306 sym := auxToSym(x.Aux)
3307 mem := x.Args[1]
3308 ptr := x.Args[0]
3309 if !(x.Uses == 1 && clobber(x)) {
3310 break
3311 }
3312 b = x.Block
3313 v0 := b.NewValue0(x.Pos, OpRISCV64MOVBload, t)
3314 v.copyOf(v0)
3315 v0.AuxInt = int32ToAuxInt(off)
3316 v0.Aux = symToAux(sym)
3317 v0.AddArg2(ptr, mem)
3318 return true
3319 }
3320 return false
3321}
Patrice Arruda748609c2020-06-25 12:12:21 -07003322func rewriteValueRISCV64_OpRISCV64MOVBstore(v *Value) bool {
3323 v_2 := v.Args[2]
3324 v_1 := v.Args[1]
3325 v_0 := v.Args[0]
3326 // match: (MOVBstore [off1] {sym1} (MOVaddr [off2] {sym2} base) val mem)
3327 // cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)
Colin Cross1f805522021-05-14 11:10:59 -07003328 // result: (MOVBstore [off1+off2] {mergeSym(sym1,sym2)} base val mem)
Patrice Arruda748609c2020-06-25 12:12:21 -07003329 for {
3330 off1 := auxIntToInt32(v.AuxInt)
3331 sym1 := auxToSym(v.Aux)
3332 if v_0.Op != OpRISCV64MOVaddr {
3333 break
3334 }
3335 off2 := auxIntToInt32(v_0.AuxInt)
3336 sym2 := auxToSym(v_0.Aux)
3337 base := v_0.Args[0]
3338 val := v_1
3339 mem := v_2
3340 if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)) {
3341 break
3342 }
3343 v.reset(OpRISCV64MOVBstore)
3344 v.AuxInt = int32ToAuxInt(off1 + off2)
Colin Cross1f805522021-05-14 11:10:59 -07003345 v.Aux = symToAux(mergeSym(sym1, sym2))
Patrice Arruda748609c2020-06-25 12:12:21 -07003346 v.AddArg3(base, val, mem)
3347 return true
3348 }
3349 // match: (MOVBstore [off1] {sym} (ADDI [off2] base) val mem)
3350 // cond: is32Bit(int64(off1)+off2)
3351 // result: (MOVBstore [off1+int32(off2)] {sym} base val mem)
3352 for {
3353 off1 := auxIntToInt32(v.AuxInt)
3354 sym := auxToSym(v.Aux)
3355 if v_0.Op != OpRISCV64ADDI {
3356 break
3357 }
3358 off2 := auxIntToInt64(v_0.AuxInt)
3359 base := v_0.Args[0]
3360 val := v_1
3361 mem := v_2
3362 if !(is32Bit(int64(off1) + off2)) {
3363 break
3364 }
3365 v.reset(OpRISCV64MOVBstore)
3366 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3367 v.Aux = symToAux(sym)
3368 v.AddArg3(base, val, mem)
3369 return true
3370 }
Dan Willemsencc753b72021-08-31 13:25:42 -07003371 // match: (MOVBstore [off] {sym} ptr (MOVDconst [0]) mem)
Patrice Arruda748609c2020-06-25 12:12:21 -07003372 // result: (MOVBstorezero [off] {sym} ptr mem)
3373 for {
3374 off := auxIntToInt32(v.AuxInt)
3375 sym := auxToSym(v.Aux)
3376 ptr := v_0
Dan Willemsencc753b72021-08-31 13:25:42 -07003377 if v_1.Op != OpRISCV64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
Patrice Arruda748609c2020-06-25 12:12:21 -07003378 break
3379 }
3380 mem := v_2
3381 v.reset(OpRISCV64MOVBstorezero)
3382 v.AuxInt = int32ToAuxInt(off)
3383 v.Aux = symToAux(sym)
3384 v.AddArg2(ptr, mem)
3385 return true
3386 }
Colin Cross1f805522021-05-14 11:10:59 -07003387 // match: (MOVBstore [off] {sym} ptr (MOVBreg x) mem)
3388 // result: (MOVBstore [off] {sym} ptr x mem)
3389 for {
3390 off := auxIntToInt32(v.AuxInt)
3391 sym := auxToSym(v.Aux)
3392 ptr := v_0
3393 if v_1.Op != OpRISCV64MOVBreg {
3394 break
3395 }
3396 x := v_1.Args[0]
3397 mem := v_2
3398 v.reset(OpRISCV64MOVBstore)
3399 v.AuxInt = int32ToAuxInt(off)
3400 v.Aux = symToAux(sym)
3401 v.AddArg3(ptr, x, mem)
3402 return true
3403 }
3404 // match: (MOVBstore [off] {sym} ptr (MOVHreg x) mem)
3405 // result: (MOVBstore [off] {sym} ptr x mem)
3406 for {
3407 off := auxIntToInt32(v.AuxInt)
3408 sym := auxToSym(v.Aux)
3409 ptr := v_0
3410 if v_1.Op != OpRISCV64MOVHreg {
3411 break
3412 }
3413 x := v_1.Args[0]
3414 mem := v_2
3415 v.reset(OpRISCV64MOVBstore)
3416 v.AuxInt = int32ToAuxInt(off)
3417 v.Aux = symToAux(sym)
3418 v.AddArg3(ptr, x, mem)
3419 return true
3420 }
3421 // match: (MOVBstore [off] {sym} ptr (MOVWreg x) mem)
3422 // result: (MOVBstore [off] {sym} ptr x mem)
3423 for {
3424 off := auxIntToInt32(v.AuxInt)
3425 sym := auxToSym(v.Aux)
3426 ptr := v_0
3427 if v_1.Op != OpRISCV64MOVWreg {
3428 break
3429 }
3430 x := v_1.Args[0]
3431 mem := v_2
3432 v.reset(OpRISCV64MOVBstore)
3433 v.AuxInt = int32ToAuxInt(off)
3434 v.Aux = symToAux(sym)
3435 v.AddArg3(ptr, x, mem)
3436 return true
3437 }
3438 // match: (MOVBstore [off] {sym} ptr (MOVBUreg x) mem)
3439 // result: (MOVBstore [off] {sym} ptr x mem)
3440 for {
3441 off := auxIntToInt32(v.AuxInt)
3442 sym := auxToSym(v.Aux)
3443 ptr := v_0
3444 if v_1.Op != OpRISCV64MOVBUreg {
3445 break
3446 }
3447 x := v_1.Args[0]
3448 mem := v_2
3449 v.reset(OpRISCV64MOVBstore)
3450 v.AuxInt = int32ToAuxInt(off)
3451 v.Aux = symToAux(sym)
3452 v.AddArg3(ptr, x, mem)
3453 return true
3454 }
3455 // match: (MOVBstore [off] {sym} ptr (MOVHUreg x) mem)
3456 // result: (MOVBstore [off] {sym} ptr x mem)
3457 for {
3458 off := auxIntToInt32(v.AuxInt)
3459 sym := auxToSym(v.Aux)
3460 ptr := v_0
3461 if v_1.Op != OpRISCV64MOVHUreg {
3462 break
3463 }
3464 x := v_1.Args[0]
3465 mem := v_2
3466 v.reset(OpRISCV64MOVBstore)
3467 v.AuxInt = int32ToAuxInt(off)
3468 v.Aux = symToAux(sym)
3469 v.AddArg3(ptr, x, mem)
3470 return true
3471 }
3472 // match: (MOVBstore [off] {sym} ptr (MOVWUreg x) mem)
3473 // result: (MOVBstore [off] {sym} ptr x mem)
3474 for {
3475 off := auxIntToInt32(v.AuxInt)
3476 sym := auxToSym(v.Aux)
3477 ptr := v_0
3478 if v_1.Op != OpRISCV64MOVWUreg {
3479 break
3480 }
3481 x := v_1.Args[0]
3482 mem := v_2
3483 v.reset(OpRISCV64MOVBstore)
3484 v.AuxInt = int32ToAuxInt(off)
3485 v.Aux = symToAux(sym)
3486 v.AddArg3(ptr, x, mem)
3487 return true
3488 }
Patrice Arruda748609c2020-06-25 12:12:21 -07003489 return false
3490}
3491func rewriteValueRISCV64_OpRISCV64MOVBstorezero(v *Value) bool {
3492 v_1 := v.Args[1]
3493 v_0 := v.Args[0]
3494 // match: (MOVBstorezero [off1] {sym1} (MOVaddr [off2] {sym2} ptr) mem)
3495 // cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2))
Colin Cross1f805522021-05-14 11:10:59 -07003496 // result: (MOVBstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
Patrice Arruda748609c2020-06-25 12:12:21 -07003497 for {
3498 off1 := auxIntToInt32(v.AuxInt)
3499 sym1 := auxToSym(v.Aux)
3500 if v_0.Op != OpRISCV64MOVaddr {
3501 break
3502 }
3503 off2 := auxIntToInt32(v_0.AuxInt)
3504 sym2 := auxToSym(v_0.Aux)
3505 ptr := v_0.Args[0]
3506 mem := v_1
3507 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) {
3508 break
3509 }
3510 v.reset(OpRISCV64MOVBstorezero)
3511 v.AuxInt = int32ToAuxInt(off1 + off2)
Colin Cross1f805522021-05-14 11:10:59 -07003512 v.Aux = symToAux(mergeSym(sym1, sym2))
Patrice Arruda748609c2020-06-25 12:12:21 -07003513 v.AddArg2(ptr, mem)
3514 return true
3515 }
3516 // match: (MOVBstorezero [off1] {sym} (ADDI [off2] ptr) mem)
3517 // cond: is32Bit(int64(off1)+off2)
3518 // result: (MOVBstorezero [off1+int32(off2)] {sym} ptr mem)
3519 for {
3520 off1 := auxIntToInt32(v.AuxInt)
3521 sym := auxToSym(v.Aux)
3522 if v_0.Op != OpRISCV64ADDI {
3523 break
3524 }
3525 off2 := auxIntToInt64(v_0.AuxInt)
3526 ptr := v_0.Args[0]
3527 mem := v_1
3528 if !(is32Bit(int64(off1) + off2)) {
3529 break
3530 }
3531 v.reset(OpRISCV64MOVBstorezero)
3532 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3533 v.Aux = symToAux(sym)
3534 v.AddArg2(ptr, mem)
3535 return true
3536 }
3537 return false
3538}
Patrice Arruda748609c2020-06-25 12:12:21 -07003539func rewriteValueRISCV64_OpRISCV64MOVDload(v *Value) bool {
3540 v_1 := v.Args[1]
3541 v_0 := v.Args[0]
3542 // match: (MOVDload [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
3543 // cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)
Colin Cross1f805522021-05-14 11:10:59 -07003544 // result: (MOVDload [off1+off2] {mergeSym(sym1,sym2)} base mem)
Patrice Arruda748609c2020-06-25 12:12:21 -07003545 for {
3546 off1 := auxIntToInt32(v.AuxInt)
3547 sym1 := auxToSym(v.Aux)
3548 if v_0.Op != OpRISCV64MOVaddr {
3549 break
3550 }
3551 off2 := auxIntToInt32(v_0.AuxInt)
3552 sym2 := auxToSym(v_0.Aux)
3553 base := v_0.Args[0]
3554 mem := v_1
3555 if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)) {
3556 break
3557 }
3558 v.reset(OpRISCV64MOVDload)
3559 v.AuxInt = int32ToAuxInt(off1 + off2)
Colin Cross1f805522021-05-14 11:10:59 -07003560 v.Aux = symToAux(mergeSym(sym1, sym2))
Patrice Arruda748609c2020-06-25 12:12:21 -07003561 v.AddArg2(base, mem)
3562 return true
3563 }
3564 // match: (MOVDload [off1] {sym} (ADDI [off2] base) mem)
3565 // cond: is32Bit(int64(off1)+off2)
3566 // result: (MOVDload [off1+int32(off2)] {sym} base mem)
3567 for {
3568 off1 := auxIntToInt32(v.AuxInt)
3569 sym := auxToSym(v.Aux)
3570 if v_0.Op != OpRISCV64ADDI {
3571 break
3572 }
3573 off2 := auxIntToInt64(v_0.AuxInt)
3574 base := v_0.Args[0]
3575 mem := v_1
3576 if !(is32Bit(int64(off1) + off2)) {
3577 break
3578 }
3579 v.reset(OpRISCV64MOVDload)
3580 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3581 v.Aux = symToAux(sym)
3582 v.AddArg2(base, mem)
3583 return true
3584 }
3585 return false
3586}
Dan Willemsencc753b72021-08-31 13:25:42 -07003587func rewriteValueRISCV64_OpRISCV64MOVDnop(v *Value) bool {
3588 v_0 := v.Args[0]
3589 // match: (MOVDnop (MOVDconst [c]))
3590 // result: (MOVDconst [c])
3591 for {
3592 if v_0.Op != OpRISCV64MOVDconst {
3593 break
3594 }
3595 c := auxIntToInt64(v_0.AuxInt)
3596 v.reset(OpRISCV64MOVDconst)
3597 v.AuxInt = int64ToAuxInt(c)
3598 return true
3599 }
3600 return false
3601}
Colin Cross1f805522021-05-14 11:10:59 -07003602func rewriteValueRISCV64_OpRISCV64MOVDreg(v *Value) bool {
3603 v_0 := v.Args[0]
3604 // match: (MOVDreg x)
3605 // cond: x.Uses == 1
3606 // result: (MOVDnop x)
3607 for {
3608 x := v_0
3609 if !(x.Uses == 1) {
3610 break
3611 }
3612 v.reset(OpRISCV64MOVDnop)
3613 v.AddArg(x)
3614 return true
3615 }
3616 return false
3617}
Patrice Arruda748609c2020-06-25 12:12:21 -07003618func rewriteValueRISCV64_OpRISCV64MOVDstore(v *Value) bool {
3619 v_2 := v.Args[2]
3620 v_1 := v.Args[1]
3621 v_0 := v.Args[0]
3622 // match: (MOVDstore [off1] {sym1} (MOVaddr [off2] {sym2} base) val mem)
3623 // cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)
Colin Cross1f805522021-05-14 11:10:59 -07003624 // result: (MOVDstore [off1+off2] {mergeSym(sym1,sym2)} base val mem)
Patrice Arruda748609c2020-06-25 12:12:21 -07003625 for {
3626 off1 := auxIntToInt32(v.AuxInt)
3627 sym1 := auxToSym(v.Aux)
3628 if v_0.Op != OpRISCV64MOVaddr {
3629 break
3630 }
3631 off2 := auxIntToInt32(v_0.AuxInt)
3632 sym2 := auxToSym(v_0.Aux)
3633 base := v_0.Args[0]
3634 val := v_1
3635 mem := v_2
3636 if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)) {
3637 break
3638 }
3639 v.reset(OpRISCV64MOVDstore)
3640 v.AuxInt = int32ToAuxInt(off1 + off2)
Colin Cross1f805522021-05-14 11:10:59 -07003641 v.Aux = symToAux(mergeSym(sym1, sym2))
Patrice Arruda748609c2020-06-25 12:12:21 -07003642 v.AddArg3(base, val, mem)
3643 return true
3644 }
3645 // match: (MOVDstore [off1] {sym} (ADDI [off2] base) val mem)
3646 // cond: is32Bit(int64(off1)+off2)
3647 // result: (MOVDstore [off1+int32(off2)] {sym} base val mem)
3648 for {
3649 off1 := auxIntToInt32(v.AuxInt)
3650 sym := auxToSym(v.Aux)
3651 if v_0.Op != OpRISCV64ADDI {
3652 break
3653 }
3654 off2 := auxIntToInt64(v_0.AuxInt)
3655 base := v_0.Args[0]
3656 val := v_1
3657 mem := v_2
3658 if !(is32Bit(int64(off1) + off2)) {
3659 break
3660 }
3661 v.reset(OpRISCV64MOVDstore)
3662 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3663 v.Aux = symToAux(sym)
3664 v.AddArg3(base, val, mem)
3665 return true
3666 }
3667 // match: (MOVDstore [off] {sym} ptr (MOVDconst [0]) mem)
3668 // result: (MOVDstorezero [off] {sym} ptr mem)
3669 for {
3670 off := auxIntToInt32(v.AuxInt)
3671 sym := auxToSym(v.Aux)
3672 ptr := v_0
3673 if v_1.Op != OpRISCV64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
3674 break
3675 }
3676 mem := v_2
3677 v.reset(OpRISCV64MOVDstorezero)
3678 v.AuxInt = int32ToAuxInt(off)
3679 v.Aux = symToAux(sym)
3680 v.AddArg2(ptr, mem)
3681 return true
3682 }
3683 return false
3684}
3685func rewriteValueRISCV64_OpRISCV64MOVDstorezero(v *Value) bool {
3686 v_1 := v.Args[1]
3687 v_0 := v.Args[0]
3688 // match: (MOVDstorezero [off1] {sym1} (MOVaddr [off2] {sym2} ptr) mem)
3689 // cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2))
Colin Cross1f805522021-05-14 11:10:59 -07003690 // result: (MOVDstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
Patrice Arruda748609c2020-06-25 12:12:21 -07003691 for {
3692 off1 := auxIntToInt32(v.AuxInt)
3693 sym1 := auxToSym(v.Aux)
3694 if v_0.Op != OpRISCV64MOVaddr {
3695 break
3696 }
3697 off2 := auxIntToInt32(v_0.AuxInt)
3698 sym2 := auxToSym(v_0.Aux)
3699 ptr := v_0.Args[0]
3700 mem := v_1
3701 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) {
3702 break
3703 }
3704 v.reset(OpRISCV64MOVDstorezero)
3705 v.AuxInt = int32ToAuxInt(off1 + off2)
Colin Cross1f805522021-05-14 11:10:59 -07003706 v.Aux = symToAux(mergeSym(sym1, sym2))
Patrice Arruda748609c2020-06-25 12:12:21 -07003707 v.AddArg2(ptr, mem)
3708 return true
3709 }
3710 // match: (MOVDstorezero [off1] {sym} (ADDI [off2] ptr) mem)
3711 // cond: is32Bit(int64(off1)+off2)
3712 // result: (MOVDstorezero [off1+int32(off2)] {sym} ptr mem)
3713 for {
3714 off1 := auxIntToInt32(v.AuxInt)
3715 sym := auxToSym(v.Aux)
3716 if v_0.Op != OpRISCV64ADDI {
3717 break
3718 }
3719 off2 := auxIntToInt64(v_0.AuxInt)
3720 ptr := v_0.Args[0]
3721 mem := v_1
3722 if !(is32Bit(int64(off1) + off2)) {
3723 break
3724 }
3725 v.reset(OpRISCV64MOVDstorezero)
3726 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3727 v.Aux = symToAux(sym)
3728 v.AddArg2(ptr, mem)
3729 return true
3730 }
3731 return false
3732}
3733func rewriteValueRISCV64_OpRISCV64MOVHUload(v *Value) bool {
3734 v_1 := v.Args[1]
3735 v_0 := v.Args[0]
3736 // match: (MOVHUload [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
3737 // cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)
Colin Cross1f805522021-05-14 11:10:59 -07003738 // result: (MOVHUload [off1+off2] {mergeSym(sym1,sym2)} base mem)
Patrice Arruda748609c2020-06-25 12:12:21 -07003739 for {
3740 off1 := auxIntToInt32(v.AuxInt)
3741 sym1 := auxToSym(v.Aux)
3742 if v_0.Op != OpRISCV64MOVaddr {
3743 break
3744 }
3745 off2 := auxIntToInt32(v_0.AuxInt)
3746 sym2 := auxToSym(v_0.Aux)
3747 base := v_0.Args[0]
3748 mem := v_1
3749 if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)) {
3750 break
3751 }
3752 v.reset(OpRISCV64MOVHUload)
3753 v.AuxInt = int32ToAuxInt(off1 + off2)
Colin Cross1f805522021-05-14 11:10:59 -07003754 v.Aux = symToAux(mergeSym(sym1, sym2))
Patrice Arruda748609c2020-06-25 12:12:21 -07003755 v.AddArg2(base, mem)
3756 return true
3757 }
3758 // match: (MOVHUload [off1] {sym} (ADDI [off2] base) mem)
3759 // cond: is32Bit(int64(off1)+off2)
3760 // result: (MOVHUload [off1+int32(off2)] {sym} base mem)
3761 for {
3762 off1 := auxIntToInt32(v.AuxInt)
3763 sym := auxToSym(v.Aux)
3764 if v_0.Op != OpRISCV64ADDI {
3765 break
3766 }
3767 off2 := auxIntToInt64(v_0.AuxInt)
3768 base := v_0.Args[0]
3769 mem := v_1
3770 if !(is32Bit(int64(off1) + off2)) {
3771 break
3772 }
3773 v.reset(OpRISCV64MOVHUload)
3774 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3775 v.Aux = symToAux(sym)
3776 v.AddArg2(base, mem)
3777 return true
3778 }
3779 return false
3780}
Colin Cross1f805522021-05-14 11:10:59 -07003781func rewriteValueRISCV64_OpRISCV64MOVHUreg(v *Value) bool {
3782 v_0 := v.Args[0]
3783 b := v.Block
Dan Willemsencc753b72021-08-31 13:25:42 -07003784 // match: (MOVHUreg (MOVDconst [c]))
Colin Cross1f805522021-05-14 11:10:59 -07003785 // result: (MOVDconst [int64(uint16(c))])
3786 for {
Dan Willemsencc753b72021-08-31 13:25:42 -07003787 if v_0.Op != OpRISCV64MOVDconst {
Colin Cross1f805522021-05-14 11:10:59 -07003788 break
3789 }
Dan Willemsencc753b72021-08-31 13:25:42 -07003790 c := auxIntToInt64(v_0.AuxInt)
Colin Cross1f805522021-05-14 11:10:59 -07003791 v.reset(OpRISCV64MOVDconst)
3792 v.AuxInt = int64ToAuxInt(int64(uint16(c)))
3793 return true
3794 }
3795 // match: (MOVHUreg x:(MOVBUload _ _))
3796 // result: (MOVDreg x)
3797 for {
3798 x := v_0
3799 if x.Op != OpRISCV64MOVBUload {
3800 break
3801 }
3802 v.reset(OpRISCV64MOVDreg)
3803 v.AddArg(x)
3804 return true
3805 }
3806 // match: (MOVHUreg x:(MOVHUload _ _))
3807 // result: (MOVDreg x)
3808 for {
3809 x := v_0
3810 if x.Op != OpRISCV64MOVHUload {
3811 break
3812 }
3813 v.reset(OpRISCV64MOVDreg)
3814 v.AddArg(x)
3815 return true
3816 }
3817 // match: (MOVHUreg x:(MOVBUreg _))
3818 // result: (MOVDreg x)
3819 for {
3820 x := v_0
3821 if x.Op != OpRISCV64MOVBUreg {
3822 break
3823 }
3824 v.reset(OpRISCV64MOVDreg)
3825 v.AddArg(x)
3826 return true
3827 }
3828 // match: (MOVHUreg x:(MOVHUreg _))
3829 // result: (MOVDreg x)
3830 for {
3831 x := v_0
3832 if x.Op != OpRISCV64MOVHUreg {
3833 break
3834 }
3835 v.reset(OpRISCV64MOVDreg)
3836 v.AddArg(x)
3837 return true
3838 }
3839 // match: (MOVHUreg <t> x:(MOVHload [off] {sym} ptr mem))
3840 // cond: x.Uses == 1 && clobber(x)
3841 // result: @x.Block (MOVHUload <t> [off] {sym} ptr mem)
3842 for {
3843 t := v.Type
3844 x := v_0
3845 if x.Op != OpRISCV64MOVHload {
3846 break
3847 }
3848 off := auxIntToInt32(x.AuxInt)
3849 sym := auxToSym(x.Aux)
3850 mem := x.Args[1]
3851 ptr := x.Args[0]
3852 if !(x.Uses == 1 && clobber(x)) {
3853 break
3854 }
3855 b = x.Block
3856 v0 := b.NewValue0(x.Pos, OpRISCV64MOVHUload, t)
3857 v.copyOf(v0)
3858 v0.AuxInt = int32ToAuxInt(off)
3859 v0.Aux = symToAux(sym)
3860 v0.AddArg2(ptr, mem)
3861 return true
3862 }
3863 return false
3864}
Patrice Arruda748609c2020-06-25 12:12:21 -07003865func rewriteValueRISCV64_OpRISCV64MOVHload(v *Value) bool {
3866 v_1 := v.Args[1]
3867 v_0 := v.Args[0]
3868 // match: (MOVHload [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
3869 // cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)
Colin Cross1f805522021-05-14 11:10:59 -07003870 // result: (MOVHload [off1+off2] {mergeSym(sym1,sym2)} base mem)
Patrice Arruda748609c2020-06-25 12:12:21 -07003871 for {
3872 off1 := auxIntToInt32(v.AuxInt)
3873 sym1 := auxToSym(v.Aux)
3874 if v_0.Op != OpRISCV64MOVaddr {
3875 break
3876 }
3877 off2 := auxIntToInt32(v_0.AuxInt)
3878 sym2 := auxToSym(v_0.Aux)
3879 base := v_0.Args[0]
3880 mem := v_1
3881 if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)) {
3882 break
3883 }
3884 v.reset(OpRISCV64MOVHload)
3885 v.AuxInt = int32ToAuxInt(off1 + off2)
Colin Cross1f805522021-05-14 11:10:59 -07003886 v.Aux = symToAux(mergeSym(sym1, sym2))
Patrice Arruda748609c2020-06-25 12:12:21 -07003887 v.AddArg2(base, mem)
3888 return true
3889 }
3890 // match: (MOVHload [off1] {sym} (ADDI [off2] base) mem)
3891 // cond: is32Bit(int64(off1)+off2)
3892 // result: (MOVHload [off1+int32(off2)] {sym} base mem)
3893 for {
3894 off1 := auxIntToInt32(v.AuxInt)
3895 sym := auxToSym(v.Aux)
3896 if v_0.Op != OpRISCV64ADDI {
3897 break
3898 }
3899 off2 := auxIntToInt64(v_0.AuxInt)
3900 base := v_0.Args[0]
3901 mem := v_1
3902 if !(is32Bit(int64(off1) + off2)) {
3903 break
3904 }
3905 v.reset(OpRISCV64MOVHload)
3906 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3907 v.Aux = symToAux(sym)
3908 v.AddArg2(base, mem)
3909 return true
3910 }
3911 return false
3912}
Colin Cross1f805522021-05-14 11:10:59 -07003913func rewriteValueRISCV64_OpRISCV64MOVHreg(v *Value) bool {
3914 v_0 := v.Args[0]
3915 b := v.Block
Dan Willemsencc753b72021-08-31 13:25:42 -07003916 // match: (MOVHreg (MOVDconst [c]))
3917 // result: (MOVDconst [int64(int16(c))])
Colin Cross1f805522021-05-14 11:10:59 -07003918 for {
Dan Willemsencc753b72021-08-31 13:25:42 -07003919 if v_0.Op != OpRISCV64MOVDconst {
Colin Cross1f805522021-05-14 11:10:59 -07003920 break
3921 }
Dan Willemsencc753b72021-08-31 13:25:42 -07003922 c := auxIntToInt64(v_0.AuxInt)
Colin Cross1f805522021-05-14 11:10:59 -07003923 v.reset(OpRISCV64MOVDconst)
Dan Willemsencc753b72021-08-31 13:25:42 -07003924 v.AuxInt = int64ToAuxInt(int64(int16(c)))
Colin Cross1f805522021-05-14 11:10:59 -07003925 return true
3926 }
3927 // match: (MOVHreg x:(MOVBload _ _))
3928 // result: (MOVDreg x)
3929 for {
3930 x := v_0
3931 if x.Op != OpRISCV64MOVBload {
3932 break
3933 }
3934 v.reset(OpRISCV64MOVDreg)
3935 v.AddArg(x)
3936 return true
3937 }
3938 // match: (MOVHreg x:(MOVBUload _ _))
3939 // result: (MOVDreg x)
3940 for {
3941 x := v_0
3942 if x.Op != OpRISCV64MOVBUload {
3943 break
3944 }
3945 v.reset(OpRISCV64MOVDreg)
3946 v.AddArg(x)
3947 return true
3948 }
3949 // match: (MOVHreg x:(MOVHload _ _))
3950 // result: (MOVDreg x)
3951 for {
3952 x := v_0
3953 if x.Op != OpRISCV64MOVHload {
3954 break
3955 }
3956 v.reset(OpRISCV64MOVDreg)
3957 v.AddArg(x)
3958 return true
3959 }
3960 // match: (MOVHreg x:(MOVBreg _))
3961 // result: (MOVDreg x)
3962 for {
3963 x := v_0
3964 if x.Op != OpRISCV64MOVBreg {
3965 break
3966 }
3967 v.reset(OpRISCV64MOVDreg)
3968 v.AddArg(x)
3969 return true
3970 }
3971 // match: (MOVHreg x:(MOVBUreg _))
3972 // result: (MOVDreg x)
3973 for {
3974 x := v_0
3975 if x.Op != OpRISCV64MOVBUreg {
3976 break
3977 }
3978 v.reset(OpRISCV64MOVDreg)
3979 v.AddArg(x)
3980 return true
3981 }
3982 // match: (MOVHreg x:(MOVHreg _))
3983 // result: (MOVDreg x)
3984 for {
3985 x := v_0
3986 if x.Op != OpRISCV64MOVHreg {
3987 break
3988 }
3989 v.reset(OpRISCV64MOVDreg)
3990 v.AddArg(x)
3991 return true
3992 }
3993 // match: (MOVHreg <t> x:(MOVHUload [off] {sym} ptr mem))
3994 // cond: x.Uses == 1 && clobber(x)
3995 // result: @x.Block (MOVHload <t> [off] {sym} ptr mem)
3996 for {
3997 t := v.Type
3998 x := v_0
3999 if x.Op != OpRISCV64MOVHUload {
4000 break
4001 }
4002 off := auxIntToInt32(x.AuxInt)
4003 sym := auxToSym(x.Aux)
4004 mem := x.Args[1]
4005 ptr := x.Args[0]
4006 if !(x.Uses == 1 && clobber(x)) {
4007 break
4008 }
4009 b = x.Block
4010 v0 := b.NewValue0(x.Pos, OpRISCV64MOVHload, t)
4011 v.copyOf(v0)
4012 v0.AuxInt = int32ToAuxInt(off)
4013 v0.Aux = symToAux(sym)
4014 v0.AddArg2(ptr, mem)
4015 return true
4016 }
4017 return false
4018}
Patrice Arruda748609c2020-06-25 12:12:21 -07004019func rewriteValueRISCV64_OpRISCV64MOVHstore(v *Value) bool {
4020 v_2 := v.Args[2]
4021 v_1 := v.Args[1]
4022 v_0 := v.Args[0]
4023 // match: (MOVHstore [off1] {sym1} (MOVaddr [off2] {sym2} base) val mem)
4024 // cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)
Colin Cross1f805522021-05-14 11:10:59 -07004025 // result: (MOVHstore [off1+off2] {mergeSym(sym1,sym2)} base val mem)
Patrice Arruda748609c2020-06-25 12:12:21 -07004026 for {
4027 off1 := auxIntToInt32(v.AuxInt)
4028 sym1 := auxToSym(v.Aux)
4029 if v_0.Op != OpRISCV64MOVaddr {
4030 break
4031 }
4032 off2 := auxIntToInt32(v_0.AuxInt)
4033 sym2 := auxToSym(v_0.Aux)
4034 base := v_0.Args[0]
4035 val := v_1
4036 mem := v_2
4037 if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)) {
4038 break
4039 }
4040 v.reset(OpRISCV64MOVHstore)
4041 v.AuxInt = int32ToAuxInt(off1 + off2)
Colin Cross1f805522021-05-14 11:10:59 -07004042 v.Aux = symToAux(mergeSym(sym1, sym2))
Patrice Arruda748609c2020-06-25 12:12:21 -07004043 v.AddArg3(base, val, mem)
4044 return true
4045 }
4046 // match: (MOVHstore [off1] {sym} (ADDI [off2] base) val mem)
4047 // cond: is32Bit(int64(off1)+off2)
4048 // result: (MOVHstore [off1+int32(off2)] {sym} base val mem)
4049 for {
4050 off1 := auxIntToInt32(v.AuxInt)
4051 sym := auxToSym(v.Aux)
4052 if v_0.Op != OpRISCV64ADDI {
4053 break
4054 }
4055 off2 := auxIntToInt64(v_0.AuxInt)
4056 base := v_0.Args[0]
4057 val := v_1
4058 mem := v_2
4059 if !(is32Bit(int64(off1) + off2)) {
4060 break
4061 }
4062 v.reset(OpRISCV64MOVHstore)
4063 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4064 v.Aux = symToAux(sym)
4065 v.AddArg3(base, val, mem)
4066 return true
4067 }
Dan Willemsencc753b72021-08-31 13:25:42 -07004068 // match: (MOVHstore [off] {sym} ptr (MOVDconst [0]) mem)
Patrice Arruda748609c2020-06-25 12:12:21 -07004069 // result: (MOVHstorezero [off] {sym} ptr mem)
4070 for {
4071 off := auxIntToInt32(v.AuxInt)
4072 sym := auxToSym(v.Aux)
4073 ptr := v_0
Dan Willemsencc753b72021-08-31 13:25:42 -07004074 if v_1.Op != OpRISCV64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
Patrice Arruda748609c2020-06-25 12:12:21 -07004075 break
4076 }
4077 mem := v_2
4078 v.reset(OpRISCV64MOVHstorezero)
4079 v.AuxInt = int32ToAuxInt(off)
4080 v.Aux = symToAux(sym)
4081 v.AddArg2(ptr, mem)
4082 return true
4083 }
Colin Cross1f805522021-05-14 11:10:59 -07004084 // match: (MOVHstore [off] {sym} ptr (MOVHreg x) mem)
4085 // result: (MOVHstore [off] {sym} ptr x mem)
4086 for {
4087 off := auxIntToInt32(v.AuxInt)
4088 sym := auxToSym(v.Aux)
4089 ptr := v_0
4090 if v_1.Op != OpRISCV64MOVHreg {
4091 break
4092 }
4093 x := v_1.Args[0]
4094 mem := v_2
4095 v.reset(OpRISCV64MOVHstore)
4096 v.AuxInt = int32ToAuxInt(off)
4097 v.Aux = symToAux(sym)
4098 v.AddArg3(ptr, x, mem)
4099 return true
4100 }
4101 // match: (MOVHstore [off] {sym} ptr (MOVWreg x) mem)
4102 // result: (MOVHstore [off] {sym} ptr x mem)
4103 for {
4104 off := auxIntToInt32(v.AuxInt)
4105 sym := auxToSym(v.Aux)
4106 ptr := v_0
4107 if v_1.Op != OpRISCV64MOVWreg {
4108 break
4109 }
4110 x := v_1.Args[0]
4111 mem := v_2
4112 v.reset(OpRISCV64MOVHstore)
4113 v.AuxInt = int32ToAuxInt(off)
4114 v.Aux = symToAux(sym)
4115 v.AddArg3(ptr, x, mem)
4116 return true
4117 }
4118 // match: (MOVHstore [off] {sym} ptr (MOVHUreg x) mem)
4119 // result: (MOVHstore [off] {sym} ptr x mem)
4120 for {
4121 off := auxIntToInt32(v.AuxInt)
4122 sym := auxToSym(v.Aux)
4123 ptr := v_0
4124 if v_1.Op != OpRISCV64MOVHUreg {
4125 break
4126 }
4127 x := v_1.Args[0]
4128 mem := v_2
4129 v.reset(OpRISCV64MOVHstore)
4130 v.AuxInt = int32ToAuxInt(off)
4131 v.Aux = symToAux(sym)
4132 v.AddArg3(ptr, x, mem)
4133 return true
4134 }
4135 // match: (MOVHstore [off] {sym} ptr (MOVWUreg x) mem)
4136 // result: (MOVHstore [off] {sym} ptr x mem)
4137 for {
4138 off := auxIntToInt32(v.AuxInt)
4139 sym := auxToSym(v.Aux)
4140 ptr := v_0
4141 if v_1.Op != OpRISCV64MOVWUreg {
4142 break
4143 }
4144 x := v_1.Args[0]
4145 mem := v_2
4146 v.reset(OpRISCV64MOVHstore)
4147 v.AuxInt = int32ToAuxInt(off)
4148 v.Aux = symToAux(sym)
4149 v.AddArg3(ptr, x, mem)
4150 return true
4151 }
Patrice Arruda748609c2020-06-25 12:12:21 -07004152 return false
4153}
4154func rewriteValueRISCV64_OpRISCV64MOVHstorezero(v *Value) bool {
4155 v_1 := v.Args[1]
4156 v_0 := v.Args[0]
4157 // match: (MOVHstorezero [off1] {sym1} (MOVaddr [off2] {sym2} ptr) mem)
4158 // cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2))
Colin Cross1f805522021-05-14 11:10:59 -07004159 // result: (MOVHstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
Patrice Arruda748609c2020-06-25 12:12:21 -07004160 for {
4161 off1 := auxIntToInt32(v.AuxInt)
4162 sym1 := auxToSym(v.Aux)
4163 if v_0.Op != OpRISCV64MOVaddr {
4164 break
4165 }
4166 off2 := auxIntToInt32(v_0.AuxInt)
4167 sym2 := auxToSym(v_0.Aux)
4168 ptr := v_0.Args[0]
4169 mem := v_1
4170 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) {
4171 break
4172 }
4173 v.reset(OpRISCV64MOVHstorezero)
4174 v.AuxInt = int32ToAuxInt(off1 + off2)
Colin Cross1f805522021-05-14 11:10:59 -07004175 v.Aux = symToAux(mergeSym(sym1, sym2))
Patrice Arruda748609c2020-06-25 12:12:21 -07004176 v.AddArg2(ptr, mem)
4177 return true
4178 }
4179 // match: (MOVHstorezero [off1] {sym} (ADDI [off2] ptr) mem)
4180 // cond: is32Bit(int64(off1)+off2)
4181 // result: (MOVHstorezero [off1+int32(off2)] {sym} ptr mem)
4182 for {
4183 off1 := auxIntToInt32(v.AuxInt)
4184 sym := auxToSym(v.Aux)
4185 if v_0.Op != OpRISCV64ADDI {
4186 break
4187 }
4188 off2 := auxIntToInt64(v_0.AuxInt)
4189 ptr := v_0.Args[0]
4190 mem := v_1
4191 if !(is32Bit(int64(off1) + off2)) {
4192 break
4193 }
4194 v.reset(OpRISCV64MOVHstorezero)
4195 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4196 v.Aux = symToAux(sym)
4197 v.AddArg2(ptr, mem)
4198 return true
4199 }
4200 return false
4201}
4202func rewriteValueRISCV64_OpRISCV64MOVWUload(v *Value) bool {
4203 v_1 := v.Args[1]
4204 v_0 := v.Args[0]
4205 // match: (MOVWUload [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
4206 // cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)
Colin Cross1f805522021-05-14 11:10:59 -07004207 // result: (MOVWUload [off1+off2] {mergeSym(sym1,sym2)} base mem)
Patrice Arruda748609c2020-06-25 12:12:21 -07004208 for {
4209 off1 := auxIntToInt32(v.AuxInt)
4210 sym1 := auxToSym(v.Aux)
4211 if v_0.Op != OpRISCV64MOVaddr {
4212 break
4213 }
4214 off2 := auxIntToInt32(v_0.AuxInt)
4215 sym2 := auxToSym(v_0.Aux)
4216 base := v_0.Args[0]
4217 mem := v_1
4218 if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)) {
4219 break
4220 }
4221 v.reset(OpRISCV64MOVWUload)
4222 v.AuxInt = int32ToAuxInt(off1 + off2)
Colin Cross1f805522021-05-14 11:10:59 -07004223 v.Aux = symToAux(mergeSym(sym1, sym2))
Patrice Arruda748609c2020-06-25 12:12:21 -07004224 v.AddArg2(base, mem)
4225 return true
4226 }
4227 // match: (MOVWUload [off1] {sym} (ADDI [off2] base) mem)
4228 // cond: is32Bit(int64(off1)+off2)
4229 // result: (MOVWUload [off1+int32(off2)] {sym} base mem)
4230 for {
4231 off1 := auxIntToInt32(v.AuxInt)
4232 sym := auxToSym(v.Aux)
4233 if v_0.Op != OpRISCV64ADDI {
4234 break
4235 }
4236 off2 := auxIntToInt64(v_0.AuxInt)
4237 base := v_0.Args[0]
4238 mem := v_1
4239 if !(is32Bit(int64(off1) + off2)) {
4240 break
4241 }
4242 v.reset(OpRISCV64MOVWUload)
4243 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4244 v.Aux = symToAux(sym)
4245 v.AddArg2(base, mem)
4246 return true
4247 }
4248 return false
4249}
Colin Cross1f805522021-05-14 11:10:59 -07004250func rewriteValueRISCV64_OpRISCV64MOVWUreg(v *Value) bool {
4251 v_0 := v.Args[0]
4252 b := v.Block
Dan Willemsencc753b72021-08-31 13:25:42 -07004253 // match: (MOVWUreg (MOVDconst [c]))
Colin Cross1f805522021-05-14 11:10:59 -07004254 // result: (MOVDconst [int64(uint32(c))])
4255 for {
Dan Willemsencc753b72021-08-31 13:25:42 -07004256 if v_0.Op != OpRISCV64MOVDconst {
Colin Cross1f805522021-05-14 11:10:59 -07004257 break
4258 }
Dan Willemsencc753b72021-08-31 13:25:42 -07004259 c := auxIntToInt64(v_0.AuxInt)
Colin Cross1f805522021-05-14 11:10:59 -07004260 v.reset(OpRISCV64MOVDconst)
4261 v.AuxInt = int64ToAuxInt(int64(uint32(c)))
4262 return true
4263 }
4264 // match: (MOVWUreg x:(MOVBUload _ _))
4265 // result: (MOVDreg x)
4266 for {
4267 x := v_0
4268 if x.Op != OpRISCV64MOVBUload {
4269 break
4270 }
4271 v.reset(OpRISCV64MOVDreg)
4272 v.AddArg(x)
4273 return true
4274 }
4275 // match: (MOVWUreg x:(MOVHUload _ _))
4276 // result: (MOVDreg x)
4277 for {
4278 x := v_0
4279 if x.Op != OpRISCV64MOVHUload {
4280 break
4281 }
4282 v.reset(OpRISCV64MOVDreg)
4283 v.AddArg(x)
4284 return true
4285 }
4286 // match: (MOVWUreg x:(MOVWUload _ _))
4287 // result: (MOVDreg x)
4288 for {
4289 x := v_0
4290 if x.Op != OpRISCV64MOVWUload {
4291 break
4292 }
4293 v.reset(OpRISCV64MOVDreg)
4294 v.AddArg(x)
4295 return true
4296 }
4297 // match: (MOVWUreg x:(MOVBUreg _))
4298 // result: (MOVDreg x)
4299 for {
4300 x := v_0
4301 if x.Op != OpRISCV64MOVBUreg {
4302 break
4303 }
4304 v.reset(OpRISCV64MOVDreg)
4305 v.AddArg(x)
4306 return true
4307 }
4308 // match: (MOVWUreg x:(MOVHUreg _))
4309 // result: (MOVDreg x)
4310 for {
4311 x := v_0
4312 if x.Op != OpRISCV64MOVHUreg {
4313 break
4314 }
4315 v.reset(OpRISCV64MOVDreg)
4316 v.AddArg(x)
4317 return true
4318 }
4319 // match: (MOVWUreg x:(MOVWUreg _))
4320 // result: (MOVDreg x)
4321 for {
4322 x := v_0
4323 if x.Op != OpRISCV64MOVWUreg {
4324 break
4325 }
4326 v.reset(OpRISCV64MOVDreg)
4327 v.AddArg(x)
4328 return true
4329 }
4330 // match: (MOVWUreg <t> x:(MOVWload [off] {sym} ptr mem))
4331 // cond: x.Uses == 1 && clobber(x)
4332 // result: @x.Block (MOVWUload <t> [off] {sym} ptr mem)
4333 for {
4334 t := v.Type
4335 x := v_0
4336 if x.Op != OpRISCV64MOVWload {
4337 break
4338 }
4339 off := auxIntToInt32(x.AuxInt)
4340 sym := auxToSym(x.Aux)
4341 mem := x.Args[1]
4342 ptr := x.Args[0]
4343 if !(x.Uses == 1 && clobber(x)) {
4344 break
4345 }
4346 b = x.Block
4347 v0 := b.NewValue0(x.Pos, OpRISCV64MOVWUload, t)
4348 v.copyOf(v0)
4349 v0.AuxInt = int32ToAuxInt(off)
4350 v0.Aux = symToAux(sym)
4351 v0.AddArg2(ptr, mem)
4352 return true
4353 }
4354 return false
4355}
Patrice Arruda748609c2020-06-25 12:12:21 -07004356func rewriteValueRISCV64_OpRISCV64MOVWload(v *Value) bool {
4357 v_1 := v.Args[1]
4358 v_0 := v.Args[0]
4359 // match: (MOVWload [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
4360 // cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)
Colin Cross1f805522021-05-14 11:10:59 -07004361 // result: (MOVWload [off1+off2] {mergeSym(sym1,sym2)} base mem)
Patrice Arruda748609c2020-06-25 12:12:21 -07004362 for {
4363 off1 := auxIntToInt32(v.AuxInt)
4364 sym1 := auxToSym(v.Aux)
4365 if v_0.Op != OpRISCV64MOVaddr {
4366 break
4367 }
4368 off2 := auxIntToInt32(v_0.AuxInt)
4369 sym2 := auxToSym(v_0.Aux)
4370 base := v_0.Args[0]
4371 mem := v_1
4372 if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)) {
4373 break
4374 }
4375 v.reset(OpRISCV64MOVWload)
4376 v.AuxInt = int32ToAuxInt(off1 + off2)
Colin Cross1f805522021-05-14 11:10:59 -07004377 v.Aux = symToAux(mergeSym(sym1, sym2))
Patrice Arruda748609c2020-06-25 12:12:21 -07004378 v.AddArg2(base, mem)
4379 return true
4380 }
4381 // match: (MOVWload [off1] {sym} (ADDI [off2] base) mem)
4382 // cond: is32Bit(int64(off1)+off2)
4383 // result: (MOVWload [off1+int32(off2)] {sym} base mem)
4384 for {
4385 off1 := auxIntToInt32(v.AuxInt)
4386 sym := auxToSym(v.Aux)
4387 if v_0.Op != OpRISCV64ADDI {
4388 break
4389 }
4390 off2 := auxIntToInt64(v_0.AuxInt)
4391 base := v_0.Args[0]
4392 mem := v_1
4393 if !(is32Bit(int64(off1) + off2)) {
4394 break
4395 }
4396 v.reset(OpRISCV64MOVWload)
4397 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4398 v.Aux = symToAux(sym)
4399 v.AddArg2(base, mem)
4400 return true
4401 }
4402 return false
4403}
Colin Cross1f805522021-05-14 11:10:59 -07004404func rewriteValueRISCV64_OpRISCV64MOVWreg(v *Value) bool {
4405 v_0 := v.Args[0]
4406 b := v.Block
Dan Willemsencc753b72021-08-31 13:25:42 -07004407 // match: (MOVWreg (MOVDconst [c]))
4408 // result: (MOVDconst [int64(int32(c))])
Colin Cross1f805522021-05-14 11:10:59 -07004409 for {
Dan Willemsencc753b72021-08-31 13:25:42 -07004410 if v_0.Op != OpRISCV64MOVDconst {
Colin Cross1f805522021-05-14 11:10:59 -07004411 break
4412 }
Dan Willemsencc753b72021-08-31 13:25:42 -07004413 c := auxIntToInt64(v_0.AuxInt)
Colin Cross1f805522021-05-14 11:10:59 -07004414 v.reset(OpRISCV64MOVDconst)
Dan Willemsencc753b72021-08-31 13:25:42 -07004415 v.AuxInt = int64ToAuxInt(int64(int32(c)))
Colin Cross1f805522021-05-14 11:10:59 -07004416 return true
4417 }
4418 // match: (MOVWreg x:(MOVBload _ _))
4419 // result: (MOVDreg x)
4420 for {
4421 x := v_0
4422 if x.Op != OpRISCV64MOVBload {
4423 break
4424 }
4425 v.reset(OpRISCV64MOVDreg)
4426 v.AddArg(x)
4427 return true
4428 }
4429 // match: (MOVWreg x:(MOVBUload _ _))
4430 // result: (MOVDreg x)
4431 for {
4432 x := v_0
4433 if x.Op != OpRISCV64MOVBUload {
4434 break
4435 }
4436 v.reset(OpRISCV64MOVDreg)
4437 v.AddArg(x)
4438 return true
4439 }
4440 // match: (MOVWreg x:(MOVHload _ _))
4441 // result: (MOVDreg x)
4442 for {
4443 x := v_0
4444 if x.Op != OpRISCV64MOVHload {
4445 break
4446 }
4447 v.reset(OpRISCV64MOVDreg)
4448 v.AddArg(x)
4449 return true
4450 }
4451 // match: (MOVWreg x:(MOVHUload _ _))
4452 // result: (MOVDreg x)
4453 for {
4454 x := v_0
4455 if x.Op != OpRISCV64MOVHUload {
4456 break
4457 }
4458 v.reset(OpRISCV64MOVDreg)
4459 v.AddArg(x)
4460 return true
4461 }
4462 // match: (MOVWreg x:(MOVWload _ _))
4463 // result: (MOVDreg x)
4464 for {
4465 x := v_0
4466 if x.Op != OpRISCV64MOVWload {
4467 break
4468 }
4469 v.reset(OpRISCV64MOVDreg)
4470 v.AddArg(x)
4471 return true
4472 }
4473 // match: (MOVWreg x:(MOVBreg _))
4474 // result: (MOVDreg x)
4475 for {
4476 x := v_0
4477 if x.Op != OpRISCV64MOVBreg {
4478 break
4479 }
4480 v.reset(OpRISCV64MOVDreg)
4481 v.AddArg(x)
4482 return true
4483 }
4484 // match: (MOVWreg x:(MOVBUreg _))
4485 // result: (MOVDreg x)
4486 for {
4487 x := v_0
4488 if x.Op != OpRISCV64MOVBUreg {
4489 break
4490 }
4491 v.reset(OpRISCV64MOVDreg)
4492 v.AddArg(x)
4493 return true
4494 }
4495 // match: (MOVWreg x:(MOVHreg _))
4496 // result: (MOVDreg x)
4497 for {
4498 x := v_0
4499 if x.Op != OpRISCV64MOVHreg {
4500 break
4501 }
4502 v.reset(OpRISCV64MOVDreg)
4503 v.AddArg(x)
4504 return true
4505 }
4506 // match: (MOVWreg x:(MOVWreg _))
4507 // result: (MOVDreg x)
4508 for {
4509 x := v_0
4510 if x.Op != OpRISCV64MOVWreg {
4511 break
4512 }
4513 v.reset(OpRISCV64MOVDreg)
4514 v.AddArg(x)
4515 return true
4516 }
4517 // match: (MOVWreg <t> x:(MOVWUload [off] {sym} ptr mem))
4518 // cond: x.Uses == 1 && clobber(x)
4519 // result: @x.Block (MOVWload <t> [off] {sym} ptr mem)
4520 for {
4521 t := v.Type
4522 x := v_0
4523 if x.Op != OpRISCV64MOVWUload {
4524 break
4525 }
4526 off := auxIntToInt32(x.AuxInt)
4527 sym := auxToSym(x.Aux)
4528 mem := x.Args[1]
4529 ptr := x.Args[0]
4530 if !(x.Uses == 1 && clobber(x)) {
4531 break
4532 }
4533 b = x.Block
4534 v0 := b.NewValue0(x.Pos, OpRISCV64MOVWload, t)
4535 v.copyOf(v0)
4536 v0.AuxInt = int32ToAuxInt(off)
4537 v0.Aux = symToAux(sym)
4538 v0.AddArg2(ptr, mem)
4539 return true
4540 }
4541 return false
4542}
Patrice Arruda748609c2020-06-25 12:12:21 -07004543func rewriteValueRISCV64_OpRISCV64MOVWstore(v *Value) bool {
4544 v_2 := v.Args[2]
4545 v_1 := v.Args[1]
4546 v_0 := v.Args[0]
4547 // match: (MOVWstore [off1] {sym1} (MOVaddr [off2] {sym2} base) val mem)
4548 // cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)
Colin Cross1f805522021-05-14 11:10:59 -07004549 // result: (MOVWstore [off1+off2] {mergeSym(sym1,sym2)} base val mem)
Patrice Arruda748609c2020-06-25 12:12:21 -07004550 for {
4551 off1 := auxIntToInt32(v.AuxInt)
4552 sym1 := auxToSym(v.Aux)
4553 if v_0.Op != OpRISCV64MOVaddr {
4554 break
4555 }
4556 off2 := auxIntToInt32(v_0.AuxInt)
4557 sym2 := auxToSym(v_0.Aux)
4558 base := v_0.Args[0]
4559 val := v_1
4560 mem := v_2
4561 if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)) {
4562 break
4563 }
4564 v.reset(OpRISCV64MOVWstore)
4565 v.AuxInt = int32ToAuxInt(off1 + off2)
Colin Cross1f805522021-05-14 11:10:59 -07004566 v.Aux = symToAux(mergeSym(sym1, sym2))
Patrice Arruda748609c2020-06-25 12:12:21 -07004567 v.AddArg3(base, val, mem)
4568 return true
4569 }
4570 // match: (MOVWstore [off1] {sym} (ADDI [off2] base) val mem)
4571 // cond: is32Bit(int64(off1)+off2)
4572 // result: (MOVWstore [off1+int32(off2)] {sym} base val mem)
4573 for {
4574 off1 := auxIntToInt32(v.AuxInt)
4575 sym := auxToSym(v.Aux)
4576 if v_0.Op != OpRISCV64ADDI {
4577 break
4578 }
4579 off2 := auxIntToInt64(v_0.AuxInt)
4580 base := v_0.Args[0]
4581 val := v_1
4582 mem := v_2
4583 if !(is32Bit(int64(off1) + off2)) {
4584 break
4585 }
4586 v.reset(OpRISCV64MOVWstore)
4587 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4588 v.Aux = symToAux(sym)
4589 v.AddArg3(base, val, mem)
4590 return true
4591 }
Dan Willemsencc753b72021-08-31 13:25:42 -07004592 // match: (MOVWstore [off] {sym} ptr (MOVDconst [0]) mem)
Patrice Arruda748609c2020-06-25 12:12:21 -07004593 // result: (MOVWstorezero [off] {sym} ptr mem)
4594 for {
4595 off := auxIntToInt32(v.AuxInt)
4596 sym := auxToSym(v.Aux)
4597 ptr := v_0
Dan Willemsencc753b72021-08-31 13:25:42 -07004598 if v_1.Op != OpRISCV64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
Patrice Arruda748609c2020-06-25 12:12:21 -07004599 break
4600 }
4601 mem := v_2
4602 v.reset(OpRISCV64MOVWstorezero)
4603 v.AuxInt = int32ToAuxInt(off)
4604 v.Aux = symToAux(sym)
4605 v.AddArg2(ptr, mem)
4606 return true
4607 }
Colin Cross1f805522021-05-14 11:10:59 -07004608 // match: (MOVWstore [off] {sym} ptr (MOVWreg x) mem)
4609 // result: (MOVWstore [off] {sym} ptr x mem)
4610 for {
4611 off := auxIntToInt32(v.AuxInt)
4612 sym := auxToSym(v.Aux)
4613 ptr := v_0
4614 if v_1.Op != OpRISCV64MOVWreg {
4615 break
4616 }
4617 x := v_1.Args[0]
4618 mem := v_2
4619 v.reset(OpRISCV64MOVWstore)
4620 v.AuxInt = int32ToAuxInt(off)
4621 v.Aux = symToAux(sym)
4622 v.AddArg3(ptr, x, mem)
4623 return true
4624 }
4625 // match: (MOVWstore [off] {sym} ptr (MOVWUreg x) mem)
4626 // result: (MOVWstore [off] {sym} ptr x mem)
4627 for {
4628 off := auxIntToInt32(v.AuxInt)
4629 sym := auxToSym(v.Aux)
4630 ptr := v_0
4631 if v_1.Op != OpRISCV64MOVWUreg {
4632 break
4633 }
4634 x := v_1.Args[0]
4635 mem := v_2
4636 v.reset(OpRISCV64MOVWstore)
4637 v.AuxInt = int32ToAuxInt(off)
4638 v.Aux = symToAux(sym)
4639 v.AddArg3(ptr, x, mem)
4640 return true
4641 }
Patrice Arruda748609c2020-06-25 12:12:21 -07004642 return false
4643}
4644func rewriteValueRISCV64_OpRISCV64MOVWstorezero(v *Value) bool {
4645 v_1 := v.Args[1]
4646 v_0 := v.Args[0]
4647 // match: (MOVWstorezero [off1] {sym1} (MOVaddr [off2] {sym2} ptr) mem)
4648 // cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2))
Colin Cross1f805522021-05-14 11:10:59 -07004649 // result: (MOVWstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
Patrice Arruda748609c2020-06-25 12:12:21 -07004650 for {
4651 off1 := auxIntToInt32(v.AuxInt)
4652 sym1 := auxToSym(v.Aux)
4653 if v_0.Op != OpRISCV64MOVaddr {
4654 break
4655 }
4656 off2 := auxIntToInt32(v_0.AuxInt)
4657 sym2 := auxToSym(v_0.Aux)
4658 ptr := v_0.Args[0]
4659 mem := v_1
4660 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) {
4661 break
4662 }
4663 v.reset(OpRISCV64MOVWstorezero)
4664 v.AuxInt = int32ToAuxInt(off1 + off2)
Colin Cross1f805522021-05-14 11:10:59 -07004665 v.Aux = symToAux(mergeSym(sym1, sym2))
Patrice Arruda748609c2020-06-25 12:12:21 -07004666 v.AddArg2(ptr, mem)
4667 return true
4668 }
4669 // match: (MOVWstorezero [off1] {sym} (ADDI [off2] ptr) mem)
4670 // cond: is32Bit(int64(off1)+off2)
4671 // result: (MOVWstorezero [off1+int32(off2)] {sym} ptr mem)
4672 for {
4673 off1 := auxIntToInt32(v.AuxInt)
4674 sym := auxToSym(v.Aux)
4675 if v_0.Op != OpRISCV64ADDI {
4676 break
4677 }
4678 off2 := auxIntToInt64(v_0.AuxInt)
4679 ptr := v_0.Args[0]
4680 mem := v_1
4681 if !(is32Bit(int64(off1) + off2)) {
4682 break
4683 }
4684 v.reset(OpRISCV64MOVWstorezero)
4685 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4686 v.Aux = symToAux(sym)
4687 v.AddArg2(ptr, mem)
4688 return true
4689 }
4690 return false
4691}
Dan Willemsenbc60c3c2021-12-15 01:09:00 -08004692func rewriteValueRISCV64_OpRISCV64NEG(v *Value) bool {
4693 v_0 := v.Args[0]
4694 // match: (NEG (MOVDconst [x]))
4695 // result: (MOVDconst [-x])
4696 for {
4697 if v_0.Op != OpRISCV64MOVDconst {
4698 break
4699 }
4700 x := auxIntToInt64(v_0.AuxInt)
4701 v.reset(OpRISCV64MOVDconst)
4702 v.AuxInt = int64ToAuxInt(-x)
4703 return true
4704 }
4705 return false
4706}
4707func rewriteValueRISCV64_OpRISCV64NEGW(v *Value) bool {
4708 v_0 := v.Args[0]
4709 // match: (NEGW (MOVDconst [x]))
4710 // result: (MOVDconst [int64(int32(-x))])
4711 for {
4712 if v_0.Op != OpRISCV64MOVDconst {
4713 break
4714 }
4715 x := auxIntToInt64(v_0.AuxInt)
4716 v.reset(OpRISCV64MOVDconst)
4717 v.AuxInt = int64ToAuxInt(int64(int32(-x)))
4718 return true
4719 }
4720 return false
4721}
Patrice Arruda748609c2020-06-25 12:12:21 -07004722func rewriteValueRISCV64_OpRISCV64OR(v *Value) bool {
4723 v_1 := v.Args[1]
4724 v_0 := v.Args[0]
Patrice Arruda748609c2020-06-25 12:12:21 -07004725 // match: (OR (MOVDconst [val]) x)
4726 // cond: is32Bit(val)
4727 // result: (ORI [val] x)
4728 for {
4729 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
4730 if v_0.Op != OpRISCV64MOVDconst {
4731 continue
4732 }
4733 val := auxIntToInt64(v_0.AuxInt)
4734 x := v_1
4735 if !(is32Bit(val)) {
4736 continue
4737 }
4738 v.reset(OpRISCV64ORI)
4739 v.AuxInt = int64ToAuxInt(val)
4740 v.AddArg(x)
4741 return true
4742 }
4743 break
4744 }
4745 return false
4746}
Dan Willemsenbc60c3c2021-12-15 01:09:00 -08004747func rewriteValueRISCV64_OpRISCV64ORI(v *Value) bool {
4748 v_0 := v.Args[0]
4749 // match: (ORI [0] x)
4750 // result: x
4751 for {
4752 if auxIntToInt64(v.AuxInt) != 0 {
4753 break
4754 }
4755 x := v_0
4756 v.copyOf(x)
4757 return true
4758 }
4759 // match: (ORI [-1] x)
4760 // result: (MOVDconst [-1])
4761 for {
4762 if auxIntToInt64(v.AuxInt) != -1 {
4763 break
4764 }
4765 v.reset(OpRISCV64MOVDconst)
4766 v.AuxInt = int64ToAuxInt(-1)
4767 return true
4768 }
4769 // match: (ORI [x] (MOVDconst [y]))
4770 // result: (MOVDconst [x | y])
4771 for {
4772 x := auxIntToInt64(v.AuxInt)
4773 if v_0.Op != OpRISCV64MOVDconst {
4774 break
4775 }
4776 y := auxIntToInt64(v_0.AuxInt)
4777 v.reset(OpRISCV64MOVDconst)
4778 v.AuxInt = int64ToAuxInt(x | y)
4779 return true
4780 }
4781 return false
4782}
Patrice Arruda748609c2020-06-25 12:12:21 -07004783func rewriteValueRISCV64_OpRISCV64SLL(v *Value) bool {
4784 v_1 := v.Args[1]
4785 v_0 := v.Args[0]
Patrice Arruda748609c2020-06-25 12:12:21 -07004786 // match: (SLL x (MOVDconst [val]))
4787 // result: (SLLI [int64(val&63)] x)
4788 for {
4789 x := v_0
4790 if v_1.Op != OpRISCV64MOVDconst {
4791 break
4792 }
4793 val := auxIntToInt64(v_1.AuxInt)
4794 v.reset(OpRISCV64SLLI)
4795 v.AuxInt = int64ToAuxInt(int64(val & 63))
4796 v.AddArg(x)
4797 return true
4798 }
4799 return false
4800}
Dan Willemsenbc60c3c2021-12-15 01:09:00 -08004801func rewriteValueRISCV64_OpRISCV64SLLI(v *Value) bool {
4802 v_0 := v.Args[0]
4803 // match: (SLLI [x] (MOVDconst [y]))
4804 // cond: is32Bit(y << x)
4805 // result: (MOVDconst [y << x])
4806 for {
4807 x := auxIntToInt64(v.AuxInt)
4808 if v_0.Op != OpRISCV64MOVDconst {
4809 break
4810 }
4811 y := auxIntToInt64(v_0.AuxInt)
4812 if !(is32Bit(y << x)) {
4813 break
4814 }
4815 v.reset(OpRISCV64MOVDconst)
4816 v.AuxInt = int64ToAuxInt(y << x)
4817 return true
4818 }
4819 return false
4820}
4821func rewriteValueRISCV64_OpRISCV64SLTI(v *Value) bool {
4822 v_0 := v.Args[0]
4823 // match: (SLTI [x] (MOVDconst [y]))
4824 // result: (MOVDconst [b2i(int64(y) < int64(x))])
4825 for {
4826 x := auxIntToInt64(v.AuxInt)
4827 if v_0.Op != OpRISCV64MOVDconst {
4828 break
4829 }
4830 y := auxIntToInt64(v_0.AuxInt)
4831 v.reset(OpRISCV64MOVDconst)
4832 v.AuxInt = int64ToAuxInt(b2i(int64(y) < int64(x)))
4833 return true
4834 }
4835 return false
4836}
4837func rewriteValueRISCV64_OpRISCV64SLTIU(v *Value) bool {
4838 v_0 := v.Args[0]
4839 // match: (SLTIU [x] (MOVDconst [y]))
4840 // result: (MOVDconst [b2i(uint64(y) < uint64(x))])
4841 for {
4842 x := auxIntToInt64(v.AuxInt)
4843 if v_0.Op != OpRISCV64MOVDconst {
4844 break
4845 }
4846 y := auxIntToInt64(v_0.AuxInt)
4847 v.reset(OpRISCV64MOVDconst)
4848 v.AuxInt = int64ToAuxInt(b2i(uint64(y) < uint64(x)))
4849 return true
4850 }
4851 return false
4852}
Patrice Arruda748609c2020-06-25 12:12:21 -07004853func rewriteValueRISCV64_OpRISCV64SRA(v *Value) bool {
4854 v_1 := v.Args[1]
4855 v_0 := v.Args[0]
Patrice Arruda748609c2020-06-25 12:12:21 -07004856 // match: (SRA x (MOVDconst [val]))
4857 // result: (SRAI [int64(val&63)] x)
4858 for {
4859 x := v_0
4860 if v_1.Op != OpRISCV64MOVDconst {
4861 break
4862 }
4863 val := auxIntToInt64(v_1.AuxInt)
4864 v.reset(OpRISCV64SRAI)
4865 v.AuxInt = int64ToAuxInt(int64(val & 63))
4866 v.AddArg(x)
4867 return true
4868 }
4869 return false
4870}
Dan Willemsenbc60c3c2021-12-15 01:09:00 -08004871func rewriteValueRISCV64_OpRISCV64SRAI(v *Value) bool {
4872 v_0 := v.Args[0]
4873 // match: (SRAI [x] (MOVDconst [y]))
4874 // result: (MOVDconst [int64(y) >> x])
4875 for {
4876 x := auxIntToInt64(v.AuxInt)
4877 if v_0.Op != OpRISCV64MOVDconst {
4878 break
4879 }
4880 y := auxIntToInt64(v_0.AuxInt)
4881 v.reset(OpRISCV64MOVDconst)
4882 v.AuxInt = int64ToAuxInt(int64(y) >> x)
4883 return true
4884 }
4885 return false
4886}
Patrice Arruda748609c2020-06-25 12:12:21 -07004887func rewriteValueRISCV64_OpRISCV64SRL(v *Value) bool {
4888 v_1 := v.Args[1]
4889 v_0 := v.Args[0]
Patrice Arruda748609c2020-06-25 12:12:21 -07004890 // match: (SRL x (MOVDconst [val]))
4891 // result: (SRLI [int64(val&63)] x)
4892 for {
4893 x := v_0
4894 if v_1.Op != OpRISCV64MOVDconst {
4895 break
4896 }
4897 val := auxIntToInt64(v_1.AuxInt)
4898 v.reset(OpRISCV64SRLI)
4899 v.AuxInt = int64ToAuxInt(int64(val & 63))
4900 v.AddArg(x)
4901 return true
4902 }
4903 return false
4904}
Dan Willemsenbc60c3c2021-12-15 01:09:00 -08004905func rewriteValueRISCV64_OpRISCV64SRLI(v *Value) bool {
4906 v_0 := v.Args[0]
4907 // match: (SRLI [x] (MOVDconst [y]))
4908 // result: (MOVDconst [int64(uint64(y) >> x)])
4909 for {
4910 x := auxIntToInt64(v.AuxInt)
4911 if v_0.Op != OpRISCV64MOVDconst {
4912 break
4913 }
4914 y := auxIntToInt64(v_0.AuxInt)
4915 v.reset(OpRISCV64MOVDconst)
4916 v.AuxInt = int64ToAuxInt(int64(uint64(y) >> x))
4917 return true
4918 }
4919 return false
4920}
Patrice Arruda748609c2020-06-25 12:12:21 -07004921func rewriteValueRISCV64_OpRISCV64SUB(v *Value) bool {
4922 v_1 := v.Args[1]
4923 v_0 := v.Args[0]
Patrice Arruda748609c2020-06-25 12:12:21 -07004924 // match: (SUB x (MOVDconst [val]))
4925 // cond: is32Bit(-val)
4926 // result: (ADDI [-val] x)
4927 for {
4928 x := v_0
4929 if v_1.Op != OpRISCV64MOVDconst {
4930 break
4931 }
4932 val := auxIntToInt64(v_1.AuxInt)
4933 if !(is32Bit(-val)) {
4934 break
4935 }
4936 v.reset(OpRISCV64ADDI)
4937 v.AuxInt = int64ToAuxInt(-val)
4938 v.AddArg(x)
4939 return true
4940 }
Patrice Arruda748609c2020-06-25 12:12:21 -07004941 // match: (SUB x (MOVDconst [0]))
4942 // result: x
4943 for {
4944 x := v_0
4945 if v_1.Op != OpRISCV64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
4946 break
4947 }
4948 v.copyOf(x)
4949 return true
4950 }
Patrice Arruda748609c2020-06-25 12:12:21 -07004951 // match: (SUB (MOVDconst [0]) x)
4952 // result: (NEG x)
4953 for {
4954 if v_0.Op != OpRISCV64MOVDconst || auxIntToInt64(v_0.AuxInt) != 0 {
4955 break
4956 }
4957 x := v_1
4958 v.reset(OpRISCV64NEG)
4959 v.AddArg(x)
4960 return true
4961 }
4962 return false
4963}
4964func rewriteValueRISCV64_OpRISCV64SUBW(v *Value) bool {
4965 v_1 := v.Args[1]
4966 v_0 := v.Args[0]
Dan Willemsencc753b72021-08-31 13:25:42 -07004967 // match: (SUBW x (MOVDconst [0]))
Patrice Arruda748609c2020-06-25 12:12:21 -07004968 // result: (ADDIW [0] x)
4969 for {
4970 x := v_0
Dan Willemsencc753b72021-08-31 13:25:42 -07004971 if v_1.Op != OpRISCV64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
Patrice Arruda748609c2020-06-25 12:12:21 -07004972 break
4973 }
4974 v.reset(OpRISCV64ADDIW)
4975 v.AuxInt = int64ToAuxInt(0)
4976 v.AddArg(x)
4977 return true
4978 }
4979 // match: (SUBW (MOVDconst [0]) x)
4980 // result: (NEGW x)
4981 for {
4982 if v_0.Op != OpRISCV64MOVDconst || auxIntToInt64(v_0.AuxInt) != 0 {
4983 break
4984 }
4985 x := v_1
4986 v.reset(OpRISCV64NEGW)
4987 v.AddArg(x)
4988 return true
4989 }
4990 return false
4991}
4992func rewriteValueRISCV64_OpRISCV64XOR(v *Value) bool {
4993 v_1 := v.Args[1]
4994 v_0 := v.Args[0]
Patrice Arruda748609c2020-06-25 12:12:21 -07004995 // match: (XOR (MOVDconst [val]) x)
4996 // cond: is32Bit(val)
4997 // result: (XORI [val] x)
4998 for {
4999 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
5000 if v_0.Op != OpRISCV64MOVDconst {
5001 continue
5002 }
5003 val := auxIntToInt64(v_0.AuxInt)
5004 x := v_1
5005 if !(is32Bit(val)) {
5006 continue
5007 }
5008 v.reset(OpRISCV64XORI)
5009 v.AuxInt = int64ToAuxInt(val)
5010 v.AddArg(x)
5011 return true
5012 }
5013 break
5014 }
5015 return false
5016}
5017func rewriteValueRISCV64_OpRotateLeft16(v *Value) bool {
5018 v_1 := v.Args[1]
5019 v_0 := v.Args[0]
5020 b := v.Block
5021 typ := &b.Func.Config.Types
Dan Willemsencc753b72021-08-31 13:25:42 -07005022 // match: (RotateLeft16 <t> x (MOVDconst [c]))
5023 // result: (Or16 (Lsh16x64 <t> x (MOVDconst [c&15])) (Rsh16Ux64 <t> x (MOVDconst [-c&15])))
Patrice Arruda748609c2020-06-25 12:12:21 -07005024 for {
5025 t := v.Type
5026 x := v_0
Dan Willemsencc753b72021-08-31 13:25:42 -07005027 if v_1.Op != OpRISCV64MOVDconst {
Patrice Arruda748609c2020-06-25 12:12:21 -07005028 break
5029 }
Dan Willemsencc753b72021-08-31 13:25:42 -07005030 c := auxIntToInt64(v_1.AuxInt)
Patrice Arruda748609c2020-06-25 12:12:21 -07005031 v.reset(OpOr16)
5032 v0 := b.NewValue0(v.Pos, OpLsh16x64, t)
Dan Willemsencc753b72021-08-31 13:25:42 -07005033 v1 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
5034 v1.AuxInt = int64ToAuxInt(c & 15)
Patrice Arruda748609c2020-06-25 12:12:21 -07005035 v0.AddArg2(x, v1)
5036 v2 := b.NewValue0(v.Pos, OpRsh16Ux64, t)
Dan Willemsencc753b72021-08-31 13:25:42 -07005037 v3 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
5038 v3.AuxInt = int64ToAuxInt(-c & 15)
Patrice Arruda748609c2020-06-25 12:12:21 -07005039 v2.AddArg2(x, v3)
5040 v.AddArg2(v0, v2)
5041 return true
5042 }
5043 return false
5044}
5045func rewriteValueRISCV64_OpRotateLeft32(v *Value) bool {
5046 v_1 := v.Args[1]
5047 v_0 := v.Args[0]
5048 b := v.Block
5049 typ := &b.Func.Config.Types
Dan Willemsencc753b72021-08-31 13:25:42 -07005050 // match: (RotateLeft32 <t> x (MOVDconst [c]))
5051 // result: (Or32 (Lsh32x64 <t> x (MOVDconst [c&31])) (Rsh32Ux64 <t> x (MOVDconst [-c&31])))
Patrice Arruda748609c2020-06-25 12:12:21 -07005052 for {
5053 t := v.Type
5054 x := v_0
Dan Willemsencc753b72021-08-31 13:25:42 -07005055 if v_1.Op != OpRISCV64MOVDconst {
Patrice Arruda748609c2020-06-25 12:12:21 -07005056 break
5057 }
Dan Willemsencc753b72021-08-31 13:25:42 -07005058 c := auxIntToInt64(v_1.AuxInt)
Patrice Arruda748609c2020-06-25 12:12:21 -07005059 v.reset(OpOr32)
5060 v0 := b.NewValue0(v.Pos, OpLsh32x64, t)
Dan Willemsencc753b72021-08-31 13:25:42 -07005061 v1 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
5062 v1.AuxInt = int64ToAuxInt(c & 31)
Patrice Arruda748609c2020-06-25 12:12:21 -07005063 v0.AddArg2(x, v1)
5064 v2 := b.NewValue0(v.Pos, OpRsh32Ux64, t)
Dan Willemsencc753b72021-08-31 13:25:42 -07005065 v3 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
5066 v3.AuxInt = int64ToAuxInt(-c & 31)
Patrice Arruda748609c2020-06-25 12:12:21 -07005067 v2.AddArg2(x, v3)
5068 v.AddArg2(v0, v2)
5069 return true
5070 }
5071 return false
5072}
5073func rewriteValueRISCV64_OpRotateLeft64(v *Value) bool {
5074 v_1 := v.Args[1]
5075 v_0 := v.Args[0]
5076 b := v.Block
5077 typ := &b.Func.Config.Types
5078 // match: (RotateLeft64 <t> x (MOVDconst [c]))
5079 // result: (Or64 (Lsh64x64 <t> x (MOVDconst [c&63])) (Rsh64Ux64 <t> x (MOVDconst [-c&63])))
5080 for {
5081 t := v.Type
5082 x := v_0
5083 if v_1.Op != OpRISCV64MOVDconst {
5084 break
5085 }
5086 c := auxIntToInt64(v_1.AuxInt)
5087 v.reset(OpOr64)
5088 v0 := b.NewValue0(v.Pos, OpLsh64x64, t)
5089 v1 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
5090 v1.AuxInt = int64ToAuxInt(c & 63)
5091 v0.AddArg2(x, v1)
5092 v2 := b.NewValue0(v.Pos, OpRsh64Ux64, t)
5093 v3 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
5094 v3.AuxInt = int64ToAuxInt(-c & 63)
5095 v2.AddArg2(x, v3)
5096 v.AddArg2(v0, v2)
5097 return true
5098 }
5099 return false
5100}
5101func rewriteValueRISCV64_OpRotateLeft8(v *Value) bool {
5102 v_1 := v.Args[1]
5103 v_0 := v.Args[0]
5104 b := v.Block
5105 typ := &b.Func.Config.Types
Dan Willemsencc753b72021-08-31 13:25:42 -07005106 // match: (RotateLeft8 <t> x (MOVDconst [c]))
5107 // result: (Or8 (Lsh8x64 <t> x (MOVDconst [c&7])) (Rsh8Ux64 <t> x (MOVDconst [-c&7])))
Patrice Arruda748609c2020-06-25 12:12:21 -07005108 for {
5109 t := v.Type
5110 x := v_0
Dan Willemsencc753b72021-08-31 13:25:42 -07005111 if v_1.Op != OpRISCV64MOVDconst {
Patrice Arruda748609c2020-06-25 12:12:21 -07005112 break
5113 }
Dan Willemsencc753b72021-08-31 13:25:42 -07005114 c := auxIntToInt64(v_1.AuxInt)
Patrice Arruda748609c2020-06-25 12:12:21 -07005115 v.reset(OpOr8)
5116 v0 := b.NewValue0(v.Pos, OpLsh8x64, t)
Dan Willemsencc753b72021-08-31 13:25:42 -07005117 v1 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
5118 v1.AuxInt = int64ToAuxInt(c & 7)
Patrice Arruda748609c2020-06-25 12:12:21 -07005119 v0.AddArg2(x, v1)
5120 v2 := b.NewValue0(v.Pos, OpRsh8Ux64, t)
Dan Willemsencc753b72021-08-31 13:25:42 -07005121 v3 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
5122 v3.AuxInt = int64ToAuxInt(-c & 7)
Patrice Arruda748609c2020-06-25 12:12:21 -07005123 v2.AddArg2(x, v3)
5124 v.AddArg2(v0, v2)
5125 return true
5126 }
5127 return false
5128}
5129func rewriteValueRISCV64_OpRsh16Ux16(v *Value) bool {
5130 v_1 := v.Args[1]
5131 v_0 := v.Args[0]
5132 b := v.Block
5133 typ := &b.Func.Config.Types
5134 // match: (Rsh16Ux16 <t> x y)
5135 // result: (AND (SRL <t> (ZeroExt16to64 x) y) (Neg16 <t> (SLTIU <t> [64] (ZeroExt16to64 y))))
5136 for {
5137 t := v.Type
5138 x := v_0
5139 y := v_1
5140 v.reset(OpRISCV64AND)
5141 v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
5142 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
5143 v1.AddArg(x)
5144 v0.AddArg2(v1, y)
5145 v2 := b.NewValue0(v.Pos, OpNeg16, t)
5146 v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
5147 v3.AuxInt = int64ToAuxInt(64)
5148 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
5149 v4.AddArg(y)
5150 v3.AddArg(v4)
5151 v2.AddArg(v3)
5152 v.AddArg2(v0, v2)
5153 return true
5154 }
5155}
5156func rewriteValueRISCV64_OpRsh16Ux32(v *Value) bool {
5157 v_1 := v.Args[1]
5158 v_0 := v.Args[0]
5159 b := v.Block
5160 typ := &b.Func.Config.Types
5161 // match: (Rsh16Ux32 <t> x y)
5162 // result: (AND (SRL <t> (ZeroExt16to64 x) y) (Neg16 <t> (SLTIU <t> [64] (ZeroExt32to64 y))))
5163 for {
5164 t := v.Type
5165 x := v_0
5166 y := v_1
5167 v.reset(OpRISCV64AND)
5168 v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
5169 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
5170 v1.AddArg(x)
5171 v0.AddArg2(v1, y)
5172 v2 := b.NewValue0(v.Pos, OpNeg16, t)
5173 v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
5174 v3.AuxInt = int64ToAuxInt(64)
5175 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
5176 v4.AddArg(y)
5177 v3.AddArg(v4)
5178 v2.AddArg(v3)
5179 v.AddArg2(v0, v2)
5180 return true
5181 }
5182}
5183func rewriteValueRISCV64_OpRsh16Ux64(v *Value) bool {
5184 v_1 := v.Args[1]
5185 v_0 := v.Args[0]
5186 b := v.Block
5187 typ := &b.Func.Config.Types
5188 // match: (Rsh16Ux64 <t> x y)
5189 // result: (AND (SRL <t> (ZeroExt16to64 x) y) (Neg16 <t> (SLTIU <t> [64] y)))
5190 for {
5191 t := v.Type
5192 x := v_0
5193 y := v_1
5194 v.reset(OpRISCV64AND)
5195 v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
5196 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
5197 v1.AddArg(x)
5198 v0.AddArg2(v1, y)
5199 v2 := b.NewValue0(v.Pos, OpNeg16, t)
5200 v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
5201 v3.AuxInt = int64ToAuxInt(64)
5202 v3.AddArg(y)
5203 v2.AddArg(v3)
5204 v.AddArg2(v0, v2)
5205 return true
5206 }
5207}
5208func rewriteValueRISCV64_OpRsh16Ux8(v *Value) bool {
5209 v_1 := v.Args[1]
5210 v_0 := v.Args[0]
5211 b := v.Block
5212 typ := &b.Func.Config.Types
5213 // match: (Rsh16Ux8 <t> x y)
5214 // result: (AND (SRL <t> (ZeroExt16to64 x) y) (Neg16 <t> (SLTIU <t> [64] (ZeroExt8to64 y))))
5215 for {
5216 t := v.Type
5217 x := v_0
5218 y := v_1
5219 v.reset(OpRISCV64AND)
5220 v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
5221 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
5222 v1.AddArg(x)
5223 v0.AddArg2(v1, y)
5224 v2 := b.NewValue0(v.Pos, OpNeg16, t)
5225 v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
5226 v3.AuxInt = int64ToAuxInt(64)
5227 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
5228 v4.AddArg(y)
5229 v3.AddArg(v4)
5230 v2.AddArg(v3)
5231 v.AddArg2(v0, v2)
5232 return true
5233 }
5234}
5235func rewriteValueRISCV64_OpRsh16x16(v *Value) bool {
5236 v_1 := v.Args[1]
5237 v_0 := v.Args[0]
5238 b := v.Block
5239 typ := &b.Func.Config.Types
5240 // match: (Rsh16x16 <t> x y)
5241 // result: (SRA <t> (SignExt16to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt16to64 y)))))
5242 for {
5243 t := v.Type
5244 x := v_0
5245 y := v_1
5246 v.reset(OpRISCV64SRA)
5247 v.Type = t
5248 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
5249 v0.AddArg(x)
5250 v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
5251 v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
5252 v2.AuxInt = int64ToAuxInt(-1)
5253 v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
5254 v3.AuxInt = int64ToAuxInt(64)
5255 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
5256 v4.AddArg(y)
5257 v3.AddArg(v4)
5258 v2.AddArg(v3)
5259 v1.AddArg2(y, v2)
5260 v.AddArg2(v0, v1)
5261 return true
5262 }
5263}
5264func rewriteValueRISCV64_OpRsh16x32(v *Value) bool {
5265 v_1 := v.Args[1]
5266 v_0 := v.Args[0]
5267 b := v.Block
5268 typ := &b.Func.Config.Types
5269 // match: (Rsh16x32 <t> x y)
5270 // result: (SRA <t> (SignExt16to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt32to64 y)))))
5271 for {
5272 t := v.Type
5273 x := v_0
5274 y := v_1
5275 v.reset(OpRISCV64SRA)
5276 v.Type = t
5277 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
5278 v0.AddArg(x)
5279 v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
5280 v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
5281 v2.AuxInt = int64ToAuxInt(-1)
5282 v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
5283 v3.AuxInt = int64ToAuxInt(64)
5284 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
5285 v4.AddArg(y)
5286 v3.AddArg(v4)
5287 v2.AddArg(v3)
5288 v1.AddArg2(y, v2)
5289 v.AddArg2(v0, v1)
5290 return true
5291 }
5292}
5293func rewriteValueRISCV64_OpRsh16x64(v *Value) bool {
5294 v_1 := v.Args[1]
5295 v_0 := v.Args[0]
5296 b := v.Block
5297 typ := &b.Func.Config.Types
5298 // match: (Rsh16x64 <t> x y)
5299 // result: (SRA <t> (SignExt16to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] y))))
5300 for {
5301 t := v.Type
5302 x := v_0
5303 y := v_1
5304 v.reset(OpRISCV64SRA)
5305 v.Type = t
5306 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
5307 v0.AddArg(x)
5308 v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
5309 v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
5310 v2.AuxInt = int64ToAuxInt(-1)
5311 v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
5312 v3.AuxInt = int64ToAuxInt(64)
5313 v3.AddArg(y)
5314 v2.AddArg(v3)
5315 v1.AddArg2(y, v2)
5316 v.AddArg2(v0, v1)
5317 return true
5318 }
5319}
5320func rewriteValueRISCV64_OpRsh16x8(v *Value) bool {
5321 v_1 := v.Args[1]
5322 v_0 := v.Args[0]
5323 b := v.Block
5324 typ := &b.Func.Config.Types
5325 // match: (Rsh16x8 <t> x y)
5326 // result: (SRA <t> (SignExt16to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt8to64 y)))))
5327 for {
5328 t := v.Type
5329 x := v_0
5330 y := v_1
5331 v.reset(OpRISCV64SRA)
5332 v.Type = t
5333 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
5334 v0.AddArg(x)
5335 v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
5336 v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
5337 v2.AuxInt = int64ToAuxInt(-1)
5338 v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
5339 v3.AuxInt = int64ToAuxInt(64)
5340 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
5341 v4.AddArg(y)
5342 v3.AddArg(v4)
5343 v2.AddArg(v3)
5344 v1.AddArg2(y, v2)
5345 v.AddArg2(v0, v1)
5346 return true
5347 }
5348}
5349func rewriteValueRISCV64_OpRsh32Ux16(v *Value) bool {
5350 v_1 := v.Args[1]
5351 v_0 := v.Args[0]
5352 b := v.Block
5353 typ := &b.Func.Config.Types
5354 // match: (Rsh32Ux16 <t> x y)
5355 // result: (AND (SRL <t> (ZeroExt32to64 x) y) (Neg32 <t> (SLTIU <t> [64] (ZeroExt16to64 y))))
5356 for {
5357 t := v.Type
5358 x := v_0
5359 y := v_1
5360 v.reset(OpRISCV64AND)
5361 v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
5362 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
5363 v1.AddArg(x)
5364 v0.AddArg2(v1, y)
5365 v2 := b.NewValue0(v.Pos, OpNeg32, t)
5366 v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
5367 v3.AuxInt = int64ToAuxInt(64)
5368 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
5369 v4.AddArg(y)
5370 v3.AddArg(v4)
5371 v2.AddArg(v3)
5372 v.AddArg2(v0, v2)
5373 return true
5374 }
5375}
5376func rewriteValueRISCV64_OpRsh32Ux32(v *Value) bool {
5377 v_1 := v.Args[1]
5378 v_0 := v.Args[0]
5379 b := v.Block
5380 typ := &b.Func.Config.Types
5381 // match: (Rsh32Ux32 <t> x y)
5382 // result: (AND (SRL <t> (ZeroExt32to64 x) y) (Neg32 <t> (SLTIU <t> [64] (ZeroExt32to64 y))))
5383 for {
5384 t := v.Type
5385 x := v_0
5386 y := v_1
5387 v.reset(OpRISCV64AND)
5388 v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
5389 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
5390 v1.AddArg(x)
5391 v0.AddArg2(v1, y)
5392 v2 := b.NewValue0(v.Pos, OpNeg32, t)
5393 v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
5394 v3.AuxInt = int64ToAuxInt(64)
5395 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
5396 v4.AddArg(y)
5397 v3.AddArg(v4)
5398 v2.AddArg(v3)
5399 v.AddArg2(v0, v2)
5400 return true
5401 }
5402}
5403func rewriteValueRISCV64_OpRsh32Ux64(v *Value) bool {
5404 v_1 := v.Args[1]
5405 v_0 := v.Args[0]
5406 b := v.Block
5407 typ := &b.Func.Config.Types
5408 // match: (Rsh32Ux64 <t> x y)
5409 // result: (AND (SRL <t> (ZeroExt32to64 x) y) (Neg32 <t> (SLTIU <t> [64] y)))
5410 for {
5411 t := v.Type
5412 x := v_0
5413 y := v_1
5414 v.reset(OpRISCV64AND)
5415 v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
5416 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
5417 v1.AddArg(x)
5418 v0.AddArg2(v1, y)
5419 v2 := b.NewValue0(v.Pos, OpNeg32, t)
5420 v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
5421 v3.AuxInt = int64ToAuxInt(64)
5422 v3.AddArg(y)
5423 v2.AddArg(v3)
5424 v.AddArg2(v0, v2)
5425 return true
5426 }
5427}
5428func rewriteValueRISCV64_OpRsh32Ux8(v *Value) bool {
5429 v_1 := v.Args[1]
5430 v_0 := v.Args[0]
5431 b := v.Block
5432 typ := &b.Func.Config.Types
5433 // match: (Rsh32Ux8 <t> x y)
5434 // result: (AND (SRL <t> (ZeroExt32to64 x) y) (Neg32 <t> (SLTIU <t> [64] (ZeroExt8to64 y))))
5435 for {
5436 t := v.Type
5437 x := v_0
5438 y := v_1
5439 v.reset(OpRISCV64AND)
5440 v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
5441 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
5442 v1.AddArg(x)
5443 v0.AddArg2(v1, y)
5444 v2 := b.NewValue0(v.Pos, OpNeg32, t)
5445 v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
5446 v3.AuxInt = int64ToAuxInt(64)
5447 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
5448 v4.AddArg(y)
5449 v3.AddArg(v4)
5450 v2.AddArg(v3)
5451 v.AddArg2(v0, v2)
5452 return true
5453 }
5454}
5455func rewriteValueRISCV64_OpRsh32x16(v *Value) bool {
5456 v_1 := v.Args[1]
5457 v_0 := v.Args[0]
5458 b := v.Block
5459 typ := &b.Func.Config.Types
5460 // match: (Rsh32x16 <t> x y)
5461 // result: (SRA <t> (SignExt32to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt16to64 y)))))
5462 for {
5463 t := v.Type
5464 x := v_0
5465 y := v_1
5466 v.reset(OpRISCV64SRA)
5467 v.Type = t
5468 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
5469 v0.AddArg(x)
5470 v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
5471 v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
5472 v2.AuxInt = int64ToAuxInt(-1)
5473 v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
5474 v3.AuxInt = int64ToAuxInt(64)
5475 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
5476 v4.AddArg(y)
5477 v3.AddArg(v4)
5478 v2.AddArg(v3)
5479 v1.AddArg2(y, v2)
5480 v.AddArg2(v0, v1)
5481 return true
5482 }
5483}
5484func rewriteValueRISCV64_OpRsh32x32(v *Value) bool {
5485 v_1 := v.Args[1]
5486 v_0 := v.Args[0]
5487 b := v.Block
5488 typ := &b.Func.Config.Types
5489 // match: (Rsh32x32 <t> x y)
5490 // result: (SRA <t> (SignExt32to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt32to64 y)))))
5491 for {
5492 t := v.Type
5493 x := v_0
5494 y := v_1
5495 v.reset(OpRISCV64SRA)
5496 v.Type = t
5497 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
5498 v0.AddArg(x)
5499 v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
5500 v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
5501 v2.AuxInt = int64ToAuxInt(-1)
5502 v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
5503 v3.AuxInt = int64ToAuxInt(64)
5504 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
5505 v4.AddArg(y)
5506 v3.AddArg(v4)
5507 v2.AddArg(v3)
5508 v1.AddArg2(y, v2)
5509 v.AddArg2(v0, v1)
5510 return true
5511 }
5512}
5513func rewriteValueRISCV64_OpRsh32x64(v *Value) bool {
5514 v_1 := v.Args[1]
5515 v_0 := v.Args[0]
5516 b := v.Block
5517 typ := &b.Func.Config.Types
5518 // match: (Rsh32x64 <t> x y)
5519 // result: (SRA <t> (SignExt32to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] y))))
5520 for {
5521 t := v.Type
5522 x := v_0
5523 y := v_1
5524 v.reset(OpRISCV64SRA)
5525 v.Type = t
5526 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
5527 v0.AddArg(x)
5528 v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
5529 v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
5530 v2.AuxInt = int64ToAuxInt(-1)
5531 v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
5532 v3.AuxInt = int64ToAuxInt(64)
5533 v3.AddArg(y)
5534 v2.AddArg(v3)
5535 v1.AddArg2(y, v2)
5536 v.AddArg2(v0, v1)
5537 return true
5538 }
5539}
5540func rewriteValueRISCV64_OpRsh32x8(v *Value) bool {
5541 v_1 := v.Args[1]
5542 v_0 := v.Args[0]
5543 b := v.Block
5544 typ := &b.Func.Config.Types
5545 // match: (Rsh32x8 <t> x y)
5546 // result: (SRA <t> (SignExt32to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt8to64 y)))))
5547 for {
5548 t := v.Type
5549 x := v_0
5550 y := v_1
5551 v.reset(OpRISCV64SRA)
5552 v.Type = t
5553 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
5554 v0.AddArg(x)
5555 v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
5556 v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
5557 v2.AuxInt = int64ToAuxInt(-1)
5558 v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
5559 v3.AuxInt = int64ToAuxInt(64)
5560 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
5561 v4.AddArg(y)
5562 v3.AddArg(v4)
5563 v2.AddArg(v3)
5564 v1.AddArg2(y, v2)
5565 v.AddArg2(v0, v1)
5566 return true
5567 }
5568}
5569func rewriteValueRISCV64_OpRsh64Ux16(v *Value) bool {
5570 v_1 := v.Args[1]
5571 v_0 := v.Args[0]
5572 b := v.Block
5573 typ := &b.Func.Config.Types
5574 // match: (Rsh64Ux16 <t> x y)
5575 // result: (AND (SRL <t> x y) (Neg64 <t> (SLTIU <t> [64] (ZeroExt16to64 y))))
5576 for {
5577 t := v.Type
5578 x := v_0
5579 y := v_1
5580 v.reset(OpRISCV64AND)
5581 v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
5582 v0.AddArg2(x, y)
5583 v1 := b.NewValue0(v.Pos, OpNeg64, t)
5584 v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
5585 v2.AuxInt = int64ToAuxInt(64)
5586 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
5587 v3.AddArg(y)
5588 v2.AddArg(v3)
5589 v1.AddArg(v2)
5590 v.AddArg2(v0, v1)
5591 return true
5592 }
5593}
5594func rewriteValueRISCV64_OpRsh64Ux32(v *Value) bool {
5595 v_1 := v.Args[1]
5596 v_0 := v.Args[0]
5597 b := v.Block
5598 typ := &b.Func.Config.Types
5599 // match: (Rsh64Ux32 <t> x y)
5600 // result: (AND (SRL <t> x y) (Neg64 <t> (SLTIU <t> [64] (ZeroExt32to64 y))))
5601 for {
5602 t := v.Type
5603 x := v_0
5604 y := v_1
5605 v.reset(OpRISCV64AND)
5606 v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
5607 v0.AddArg2(x, y)
5608 v1 := b.NewValue0(v.Pos, OpNeg64, t)
5609 v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
5610 v2.AuxInt = int64ToAuxInt(64)
5611 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
5612 v3.AddArg(y)
5613 v2.AddArg(v3)
5614 v1.AddArg(v2)
5615 v.AddArg2(v0, v1)
5616 return true
5617 }
5618}
5619func rewriteValueRISCV64_OpRsh64Ux64(v *Value) bool {
5620 v_1 := v.Args[1]
5621 v_0 := v.Args[0]
5622 b := v.Block
5623 // match: (Rsh64Ux64 <t> x y)
5624 // result: (AND (SRL <t> x y) (Neg64 <t> (SLTIU <t> [64] y)))
5625 for {
5626 t := v.Type
5627 x := v_0
5628 y := v_1
5629 v.reset(OpRISCV64AND)
5630 v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
5631 v0.AddArg2(x, y)
5632 v1 := b.NewValue0(v.Pos, OpNeg64, t)
5633 v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
5634 v2.AuxInt = int64ToAuxInt(64)
5635 v2.AddArg(y)
5636 v1.AddArg(v2)
5637 v.AddArg2(v0, v1)
5638 return true
5639 }
5640}
5641func rewriteValueRISCV64_OpRsh64Ux8(v *Value) bool {
5642 v_1 := v.Args[1]
5643 v_0 := v.Args[0]
5644 b := v.Block
5645 typ := &b.Func.Config.Types
5646 // match: (Rsh64Ux8 <t> x y)
5647 // result: (AND (SRL <t> x y) (Neg64 <t> (SLTIU <t> [64] (ZeroExt8to64 y))))
5648 for {
5649 t := v.Type
5650 x := v_0
5651 y := v_1
5652 v.reset(OpRISCV64AND)
5653 v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
5654 v0.AddArg2(x, y)
5655 v1 := b.NewValue0(v.Pos, OpNeg64, t)
5656 v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
5657 v2.AuxInt = int64ToAuxInt(64)
5658 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
5659 v3.AddArg(y)
5660 v2.AddArg(v3)
5661 v1.AddArg(v2)
5662 v.AddArg2(v0, v1)
5663 return true
5664 }
5665}
5666func rewriteValueRISCV64_OpRsh64x16(v *Value) bool {
5667 v_1 := v.Args[1]
5668 v_0 := v.Args[0]
5669 b := v.Block
5670 typ := &b.Func.Config.Types
5671 // match: (Rsh64x16 <t> x y)
5672 // result: (SRA <t> x (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt16to64 y)))))
5673 for {
5674 t := v.Type
5675 x := v_0
5676 y := v_1
5677 v.reset(OpRISCV64SRA)
5678 v.Type = t
5679 v0 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
5680 v1 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
5681 v1.AuxInt = int64ToAuxInt(-1)
5682 v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
5683 v2.AuxInt = int64ToAuxInt(64)
5684 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
5685 v3.AddArg(y)
5686 v2.AddArg(v3)
5687 v1.AddArg(v2)
5688 v0.AddArg2(y, v1)
5689 v.AddArg2(x, v0)
5690 return true
5691 }
5692}
5693func rewriteValueRISCV64_OpRsh64x32(v *Value) bool {
5694 v_1 := v.Args[1]
5695 v_0 := v.Args[0]
5696 b := v.Block
5697 typ := &b.Func.Config.Types
5698 // match: (Rsh64x32 <t> x y)
5699 // result: (SRA <t> x (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt32to64 y)))))
5700 for {
5701 t := v.Type
5702 x := v_0
5703 y := v_1
5704 v.reset(OpRISCV64SRA)
5705 v.Type = t
5706 v0 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
5707 v1 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
5708 v1.AuxInt = int64ToAuxInt(-1)
5709 v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
5710 v2.AuxInt = int64ToAuxInt(64)
5711 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
5712 v3.AddArg(y)
5713 v2.AddArg(v3)
5714 v1.AddArg(v2)
5715 v0.AddArg2(y, v1)
5716 v.AddArg2(x, v0)
5717 return true
5718 }
5719}
5720func rewriteValueRISCV64_OpRsh64x64(v *Value) bool {
5721 v_1 := v.Args[1]
5722 v_0 := v.Args[0]
5723 b := v.Block
5724 // match: (Rsh64x64 <t> x y)
5725 // result: (SRA <t> x (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] y))))
5726 for {
5727 t := v.Type
5728 x := v_0
5729 y := v_1
5730 v.reset(OpRISCV64SRA)
5731 v.Type = t
5732 v0 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
5733 v1 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
5734 v1.AuxInt = int64ToAuxInt(-1)
5735 v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
5736 v2.AuxInt = int64ToAuxInt(64)
5737 v2.AddArg(y)
5738 v1.AddArg(v2)
5739 v0.AddArg2(y, v1)
5740 v.AddArg2(x, v0)
5741 return true
5742 }
5743}
5744func rewriteValueRISCV64_OpRsh64x8(v *Value) bool {
5745 v_1 := v.Args[1]
5746 v_0 := v.Args[0]
5747 b := v.Block
5748 typ := &b.Func.Config.Types
5749 // match: (Rsh64x8 <t> x y)
5750 // result: (SRA <t> x (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt8to64 y)))))
5751 for {
5752 t := v.Type
5753 x := v_0
5754 y := v_1
5755 v.reset(OpRISCV64SRA)
5756 v.Type = t
5757 v0 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
5758 v1 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
5759 v1.AuxInt = int64ToAuxInt(-1)
5760 v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
5761 v2.AuxInt = int64ToAuxInt(64)
5762 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
5763 v3.AddArg(y)
5764 v2.AddArg(v3)
5765 v1.AddArg(v2)
5766 v0.AddArg2(y, v1)
5767 v.AddArg2(x, v0)
5768 return true
5769 }
5770}
5771func rewriteValueRISCV64_OpRsh8Ux16(v *Value) bool {
5772 v_1 := v.Args[1]
5773 v_0 := v.Args[0]
5774 b := v.Block
5775 typ := &b.Func.Config.Types
5776 // match: (Rsh8Ux16 <t> x y)
5777 // result: (AND (SRL <t> (ZeroExt8to64 x) y) (Neg8 <t> (SLTIU <t> [64] (ZeroExt16to64 y))))
5778 for {
5779 t := v.Type
5780 x := v_0
5781 y := v_1
5782 v.reset(OpRISCV64AND)
5783 v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
5784 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
5785 v1.AddArg(x)
5786 v0.AddArg2(v1, y)
5787 v2 := b.NewValue0(v.Pos, OpNeg8, t)
5788 v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
5789 v3.AuxInt = int64ToAuxInt(64)
5790 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
5791 v4.AddArg(y)
5792 v3.AddArg(v4)
5793 v2.AddArg(v3)
5794 v.AddArg2(v0, v2)
5795 return true
5796 }
5797}
5798func rewriteValueRISCV64_OpRsh8Ux32(v *Value) bool {
5799 v_1 := v.Args[1]
5800 v_0 := v.Args[0]
5801 b := v.Block
5802 typ := &b.Func.Config.Types
5803 // match: (Rsh8Ux32 <t> x y)
5804 // result: (AND (SRL <t> (ZeroExt8to64 x) y) (Neg8 <t> (SLTIU <t> [64] (ZeroExt32to64 y))))
5805 for {
5806 t := v.Type
5807 x := v_0
5808 y := v_1
5809 v.reset(OpRISCV64AND)
5810 v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
5811 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
5812 v1.AddArg(x)
5813 v0.AddArg2(v1, y)
5814 v2 := b.NewValue0(v.Pos, OpNeg8, t)
5815 v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
5816 v3.AuxInt = int64ToAuxInt(64)
5817 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
5818 v4.AddArg(y)
5819 v3.AddArg(v4)
5820 v2.AddArg(v3)
5821 v.AddArg2(v0, v2)
5822 return true
5823 }
5824}
5825func rewriteValueRISCV64_OpRsh8Ux64(v *Value) bool {
5826 v_1 := v.Args[1]
5827 v_0 := v.Args[0]
5828 b := v.Block
5829 typ := &b.Func.Config.Types
5830 // match: (Rsh8Ux64 <t> x y)
5831 // result: (AND (SRL <t> (ZeroExt8to64 x) y) (Neg8 <t> (SLTIU <t> [64] y)))
5832 for {
5833 t := v.Type
5834 x := v_0
5835 y := v_1
5836 v.reset(OpRISCV64AND)
5837 v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
5838 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
5839 v1.AddArg(x)
5840 v0.AddArg2(v1, y)
5841 v2 := b.NewValue0(v.Pos, OpNeg8, t)
5842 v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
5843 v3.AuxInt = int64ToAuxInt(64)
5844 v3.AddArg(y)
5845 v2.AddArg(v3)
5846 v.AddArg2(v0, v2)
5847 return true
5848 }
5849}
5850func rewriteValueRISCV64_OpRsh8Ux8(v *Value) bool {
5851 v_1 := v.Args[1]
5852 v_0 := v.Args[0]
5853 b := v.Block
5854 typ := &b.Func.Config.Types
5855 // match: (Rsh8Ux8 <t> x y)
5856 // result: (AND (SRL <t> (ZeroExt8to64 x) y) (Neg8 <t> (SLTIU <t> [64] (ZeroExt8to64 y))))
5857 for {
5858 t := v.Type
5859 x := v_0
5860 y := v_1
5861 v.reset(OpRISCV64AND)
5862 v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
5863 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
5864 v1.AddArg(x)
5865 v0.AddArg2(v1, y)
5866 v2 := b.NewValue0(v.Pos, OpNeg8, t)
5867 v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
5868 v3.AuxInt = int64ToAuxInt(64)
5869 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
5870 v4.AddArg(y)
5871 v3.AddArg(v4)
5872 v2.AddArg(v3)
5873 v.AddArg2(v0, v2)
5874 return true
5875 }
5876}
5877func rewriteValueRISCV64_OpRsh8x16(v *Value) bool {
5878 v_1 := v.Args[1]
5879 v_0 := v.Args[0]
5880 b := v.Block
5881 typ := &b.Func.Config.Types
5882 // match: (Rsh8x16 <t> x y)
5883 // result: (SRA <t> (SignExt8to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt16to64 y)))))
5884 for {
5885 t := v.Type
5886 x := v_0
5887 y := v_1
5888 v.reset(OpRISCV64SRA)
5889 v.Type = t
5890 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
5891 v0.AddArg(x)
5892 v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
5893 v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
5894 v2.AuxInt = int64ToAuxInt(-1)
5895 v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
5896 v3.AuxInt = int64ToAuxInt(64)
5897 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
5898 v4.AddArg(y)
5899 v3.AddArg(v4)
5900 v2.AddArg(v3)
5901 v1.AddArg2(y, v2)
5902 v.AddArg2(v0, v1)
5903 return true
5904 }
5905}
5906func rewriteValueRISCV64_OpRsh8x32(v *Value) bool {
5907 v_1 := v.Args[1]
5908 v_0 := v.Args[0]
5909 b := v.Block
5910 typ := &b.Func.Config.Types
5911 // match: (Rsh8x32 <t> x y)
5912 // result: (SRA <t> (SignExt8to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt32to64 y)))))
5913 for {
5914 t := v.Type
5915 x := v_0
5916 y := v_1
5917 v.reset(OpRISCV64SRA)
5918 v.Type = t
5919 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
5920 v0.AddArg(x)
5921 v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
5922 v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
5923 v2.AuxInt = int64ToAuxInt(-1)
5924 v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
5925 v3.AuxInt = int64ToAuxInt(64)
5926 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
5927 v4.AddArg(y)
5928 v3.AddArg(v4)
5929 v2.AddArg(v3)
5930 v1.AddArg2(y, v2)
5931 v.AddArg2(v0, v1)
5932 return true
5933 }
5934}
5935func rewriteValueRISCV64_OpRsh8x64(v *Value) bool {
5936 v_1 := v.Args[1]
5937 v_0 := v.Args[0]
5938 b := v.Block
5939 typ := &b.Func.Config.Types
5940 // match: (Rsh8x64 <t> x y)
5941 // result: (SRA <t> (SignExt8to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] y))))
5942 for {
5943 t := v.Type
5944 x := v_0
5945 y := v_1
5946 v.reset(OpRISCV64SRA)
5947 v.Type = t
5948 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
5949 v0.AddArg(x)
5950 v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
5951 v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
5952 v2.AuxInt = int64ToAuxInt(-1)
5953 v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
5954 v3.AuxInt = int64ToAuxInt(64)
5955 v3.AddArg(y)
5956 v2.AddArg(v3)
5957 v1.AddArg2(y, v2)
5958 v.AddArg2(v0, v1)
5959 return true
5960 }
5961}
5962func rewriteValueRISCV64_OpRsh8x8(v *Value) bool {
5963 v_1 := v.Args[1]
5964 v_0 := v.Args[0]
5965 b := v.Block
5966 typ := &b.Func.Config.Types
5967 // match: (Rsh8x8 <t> x y)
5968 // result: (SRA <t> (SignExt8to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt8to64 y)))))
5969 for {
5970 t := v.Type
5971 x := v_0
5972 y := v_1
5973 v.reset(OpRISCV64SRA)
5974 v.Type = t
5975 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
5976 v0.AddArg(x)
5977 v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
5978 v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
5979 v2.AuxInt = int64ToAuxInt(-1)
5980 v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
5981 v3.AuxInt = int64ToAuxInt(64)
5982 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
5983 v4.AddArg(y)
5984 v3.AddArg(v4)
5985 v2.AddArg(v3)
5986 v1.AddArg2(y, v2)
5987 v.AddArg2(v0, v1)
5988 return true
5989 }
5990}
Patrice Arruda748609c2020-06-25 12:12:21 -07005991func rewriteValueRISCV64_OpSlicemask(v *Value) bool {
5992 v_0 := v.Args[0]
5993 b := v.Block
5994 // match: (Slicemask <t> x)
5995 // result: (NOT (SRAI <t> [63] (ADDI <t> [-1] x)))
5996 for {
5997 t := v.Type
5998 x := v_0
5999 v.reset(OpRISCV64NOT)
6000 v0 := b.NewValue0(v.Pos, OpRISCV64SRAI, t)
6001 v0.AuxInt = int64ToAuxInt(63)
6002 v1 := b.NewValue0(v.Pos, OpRISCV64ADDI, t)
6003 v1.AuxInt = int64ToAuxInt(-1)
6004 v1.AddArg(x)
6005 v0.AddArg(v1)
6006 v.AddArg(v0)
6007 return true
6008 }
6009}
6010func rewriteValueRISCV64_OpStore(v *Value) bool {
6011 v_2 := v.Args[2]
6012 v_1 := v.Args[1]
6013 v_0 := v.Args[0]
6014 // match: (Store {t} ptr val mem)
6015 // cond: t.Size() == 1
6016 // result: (MOVBstore ptr val mem)
6017 for {
6018 t := auxToType(v.Aux)
6019 ptr := v_0
6020 val := v_1
6021 mem := v_2
6022 if !(t.Size() == 1) {
6023 break
6024 }
6025 v.reset(OpRISCV64MOVBstore)
6026 v.AddArg3(ptr, val, mem)
6027 return true
6028 }
6029 // match: (Store {t} ptr val mem)
6030 // cond: t.Size() == 2
6031 // result: (MOVHstore ptr val mem)
6032 for {
6033 t := auxToType(v.Aux)
6034 ptr := v_0
6035 val := v_1
6036 mem := v_2
6037 if !(t.Size() == 2) {
6038 break
6039 }
6040 v.reset(OpRISCV64MOVHstore)
6041 v.AddArg3(ptr, val, mem)
6042 return true
6043 }
6044 // match: (Store {t} ptr val mem)
6045 // cond: t.Size() == 4 && !is32BitFloat(val.Type)
6046 // result: (MOVWstore ptr val mem)
6047 for {
6048 t := auxToType(v.Aux)
6049 ptr := v_0
6050 val := v_1
6051 mem := v_2
6052 if !(t.Size() == 4 && !is32BitFloat(val.Type)) {
6053 break
6054 }
6055 v.reset(OpRISCV64MOVWstore)
6056 v.AddArg3(ptr, val, mem)
6057 return true
6058 }
6059 // match: (Store {t} ptr val mem)
6060 // cond: t.Size() == 8 && !is64BitFloat(val.Type)
6061 // result: (MOVDstore ptr val mem)
6062 for {
6063 t := auxToType(v.Aux)
6064 ptr := v_0
6065 val := v_1
6066 mem := v_2
6067 if !(t.Size() == 8 && !is64BitFloat(val.Type)) {
6068 break
6069 }
6070 v.reset(OpRISCV64MOVDstore)
6071 v.AddArg3(ptr, val, mem)
6072 return true
6073 }
6074 // match: (Store {t} ptr val mem)
6075 // cond: t.Size() == 4 && is32BitFloat(val.Type)
6076 // result: (FMOVWstore ptr val mem)
6077 for {
6078 t := auxToType(v.Aux)
6079 ptr := v_0
6080 val := v_1
6081 mem := v_2
6082 if !(t.Size() == 4 && is32BitFloat(val.Type)) {
6083 break
6084 }
6085 v.reset(OpRISCV64FMOVWstore)
6086 v.AddArg3(ptr, val, mem)
6087 return true
6088 }
6089 // match: (Store {t} ptr val mem)
6090 // cond: t.Size() == 8 && is64BitFloat(val.Type)
6091 // result: (FMOVDstore ptr val mem)
6092 for {
6093 t := auxToType(v.Aux)
6094 ptr := v_0
6095 val := v_1
6096 mem := v_2
6097 if !(t.Size() == 8 && is64BitFloat(val.Type)) {
6098 break
6099 }
6100 v.reset(OpRISCV64FMOVDstore)
6101 v.AddArg3(ptr, val, mem)
6102 return true
6103 }
6104 return false
6105}
6106func rewriteValueRISCV64_OpZero(v *Value) bool {
6107 v_1 := v.Args[1]
6108 v_0 := v.Args[0]
6109 b := v.Block
6110 config := b.Func.Config
6111 typ := &b.Func.Config.Types
6112 // match: (Zero [0] _ mem)
6113 // result: mem
6114 for {
6115 if auxIntToInt64(v.AuxInt) != 0 {
6116 break
6117 }
6118 mem := v_1
6119 v.copyOf(mem)
6120 return true
6121 }
6122 // match: (Zero [1] ptr mem)
Dan Willemsencc753b72021-08-31 13:25:42 -07006123 // result: (MOVBstore ptr (MOVDconst [0]) mem)
Patrice Arruda748609c2020-06-25 12:12:21 -07006124 for {
6125 if auxIntToInt64(v.AuxInt) != 1 {
6126 break
6127 }
6128 ptr := v_0
6129 mem := v_1
6130 v.reset(OpRISCV64MOVBstore)
Dan Willemsencc753b72021-08-31 13:25:42 -07006131 v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
6132 v0.AuxInt = int64ToAuxInt(0)
Colin Cross1f805522021-05-14 11:10:59 -07006133 v.AddArg3(ptr, v0, mem)
6134 return true
6135 }
6136 // match: (Zero [2] {t} ptr mem)
6137 // cond: t.Alignment()%2 == 0
Dan Willemsencc753b72021-08-31 13:25:42 -07006138 // result: (MOVHstore ptr (MOVDconst [0]) mem)
Colin Cross1f805522021-05-14 11:10:59 -07006139 for {
6140 if auxIntToInt64(v.AuxInt) != 2 {
6141 break
6142 }
6143 t := auxToType(v.Aux)
6144 ptr := v_0
6145 mem := v_1
6146 if !(t.Alignment()%2 == 0) {
6147 break
6148 }
6149 v.reset(OpRISCV64MOVHstore)
Dan Willemsencc753b72021-08-31 13:25:42 -07006150 v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
6151 v0.AuxInt = int64ToAuxInt(0)
Patrice Arruda748609c2020-06-25 12:12:21 -07006152 v.AddArg3(ptr, v0, mem)
6153 return true
6154 }
6155 // match: (Zero [2] ptr mem)
Dan Willemsencc753b72021-08-31 13:25:42 -07006156 // result: (MOVBstore [1] ptr (MOVDconst [0]) (MOVBstore ptr (MOVDconst [0]) mem))
Patrice Arruda748609c2020-06-25 12:12:21 -07006157 for {
6158 if auxIntToInt64(v.AuxInt) != 2 {
6159 break
6160 }
6161 ptr := v_0
6162 mem := v_1
Colin Cross1f805522021-05-14 11:10:59 -07006163 v.reset(OpRISCV64MOVBstore)
6164 v.AuxInt = int32ToAuxInt(1)
Dan Willemsencc753b72021-08-31 13:25:42 -07006165 v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
6166 v0.AuxInt = int64ToAuxInt(0)
Colin Cross1f805522021-05-14 11:10:59 -07006167 v1 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
6168 v1.AddArg3(ptr, v0, mem)
6169 v.AddArg3(ptr, v0, v1)
6170 return true
6171 }
6172 // match: (Zero [4] {t} ptr mem)
6173 // cond: t.Alignment()%4 == 0
Dan Willemsencc753b72021-08-31 13:25:42 -07006174 // result: (MOVWstore ptr (MOVDconst [0]) mem)
Colin Cross1f805522021-05-14 11:10:59 -07006175 for {
6176 if auxIntToInt64(v.AuxInt) != 4 {
6177 break
6178 }
6179 t := auxToType(v.Aux)
6180 ptr := v_0
6181 mem := v_1
6182 if !(t.Alignment()%4 == 0) {
6183 break
6184 }
6185 v.reset(OpRISCV64MOVWstore)
Dan Willemsencc753b72021-08-31 13:25:42 -07006186 v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
6187 v0.AuxInt = int64ToAuxInt(0)
Patrice Arruda748609c2020-06-25 12:12:21 -07006188 v.AddArg3(ptr, v0, mem)
6189 return true
6190 }
Colin Cross1f805522021-05-14 11:10:59 -07006191 // match: (Zero [4] {t} ptr mem)
6192 // cond: t.Alignment()%2 == 0
Dan Willemsencc753b72021-08-31 13:25:42 -07006193 // result: (MOVHstore [2] ptr (MOVDconst [0]) (MOVHstore ptr (MOVDconst [0]) mem))
Colin Cross1f805522021-05-14 11:10:59 -07006194 for {
6195 if auxIntToInt64(v.AuxInt) != 4 {
6196 break
6197 }
6198 t := auxToType(v.Aux)
6199 ptr := v_0
6200 mem := v_1
6201 if !(t.Alignment()%2 == 0) {
6202 break
6203 }
6204 v.reset(OpRISCV64MOVHstore)
6205 v.AuxInt = int32ToAuxInt(2)
Dan Willemsencc753b72021-08-31 13:25:42 -07006206 v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
6207 v0.AuxInt = int64ToAuxInt(0)
Colin Cross1f805522021-05-14 11:10:59 -07006208 v1 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
6209 v1.AddArg3(ptr, v0, mem)
6210 v.AddArg3(ptr, v0, v1)
6211 return true
6212 }
Patrice Arruda748609c2020-06-25 12:12:21 -07006213 // match: (Zero [4] ptr mem)
Dan Willemsencc753b72021-08-31 13:25:42 -07006214 // result: (MOVBstore [3] ptr (MOVDconst [0]) (MOVBstore [2] ptr (MOVDconst [0]) (MOVBstore [1] ptr (MOVDconst [0]) (MOVBstore ptr (MOVDconst [0]) mem))))
Patrice Arruda748609c2020-06-25 12:12:21 -07006215 for {
6216 if auxIntToInt64(v.AuxInt) != 4 {
6217 break
6218 }
6219 ptr := v_0
6220 mem := v_1
Colin Cross1f805522021-05-14 11:10:59 -07006221 v.reset(OpRISCV64MOVBstore)
6222 v.AuxInt = int32ToAuxInt(3)
Dan Willemsencc753b72021-08-31 13:25:42 -07006223 v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
6224 v0.AuxInt = int64ToAuxInt(0)
Colin Cross1f805522021-05-14 11:10:59 -07006225 v1 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
6226 v1.AuxInt = int32ToAuxInt(2)
6227 v2 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
6228 v2.AuxInt = int32ToAuxInt(1)
6229 v3 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
6230 v3.AddArg3(ptr, v0, mem)
6231 v2.AddArg3(ptr, v0, v3)
6232 v1.AddArg3(ptr, v0, v2)
6233 v.AddArg3(ptr, v0, v1)
Patrice Arruda748609c2020-06-25 12:12:21 -07006234 return true
6235 }
Colin Cross1f805522021-05-14 11:10:59 -07006236 // match: (Zero [8] {t} ptr mem)
6237 // cond: t.Alignment()%8 == 0
6238 // result: (MOVDstore ptr (MOVDconst [0]) mem)
Patrice Arruda748609c2020-06-25 12:12:21 -07006239 for {
6240 if auxIntToInt64(v.AuxInt) != 8 {
6241 break
6242 }
Colin Cross1f805522021-05-14 11:10:59 -07006243 t := auxToType(v.Aux)
Patrice Arruda748609c2020-06-25 12:12:21 -07006244 ptr := v_0
6245 mem := v_1
Colin Cross1f805522021-05-14 11:10:59 -07006246 if !(t.Alignment()%8 == 0) {
6247 break
6248 }
Patrice Arruda748609c2020-06-25 12:12:21 -07006249 v.reset(OpRISCV64MOVDstore)
6250 v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
Colin Cross1f805522021-05-14 11:10:59 -07006251 v0.AuxInt = int64ToAuxInt(0)
Patrice Arruda748609c2020-06-25 12:12:21 -07006252 v.AddArg3(ptr, v0, mem)
6253 return true
6254 }
Colin Cross1f805522021-05-14 11:10:59 -07006255 // match: (Zero [8] {t} ptr mem)
6256 // cond: t.Alignment()%4 == 0
Dan Willemsencc753b72021-08-31 13:25:42 -07006257 // result: (MOVWstore [4] ptr (MOVDconst [0]) (MOVWstore ptr (MOVDconst [0]) mem))
Colin Cross1f805522021-05-14 11:10:59 -07006258 for {
6259 if auxIntToInt64(v.AuxInt) != 8 {
6260 break
6261 }
6262 t := auxToType(v.Aux)
6263 ptr := v_0
6264 mem := v_1
6265 if !(t.Alignment()%4 == 0) {
6266 break
6267 }
6268 v.reset(OpRISCV64MOVWstore)
6269 v.AuxInt = int32ToAuxInt(4)
Dan Willemsencc753b72021-08-31 13:25:42 -07006270 v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
6271 v0.AuxInt = int64ToAuxInt(0)
Colin Cross1f805522021-05-14 11:10:59 -07006272 v1 := b.NewValue0(v.Pos, OpRISCV64MOVWstore, types.TypeMem)
6273 v1.AddArg3(ptr, v0, mem)
6274 v.AddArg3(ptr, v0, v1)
6275 return true
6276 }
6277 // match: (Zero [8] {t} ptr mem)
6278 // cond: t.Alignment()%2 == 0
Dan Willemsencc753b72021-08-31 13:25:42 -07006279 // result: (MOVHstore [6] ptr (MOVDconst [0]) (MOVHstore [4] ptr (MOVDconst [0]) (MOVHstore [2] ptr (MOVDconst [0]) (MOVHstore ptr (MOVDconst [0]) mem))))
Colin Cross1f805522021-05-14 11:10:59 -07006280 for {
6281 if auxIntToInt64(v.AuxInt) != 8 {
6282 break
6283 }
6284 t := auxToType(v.Aux)
6285 ptr := v_0
6286 mem := v_1
6287 if !(t.Alignment()%2 == 0) {
6288 break
6289 }
6290 v.reset(OpRISCV64MOVHstore)
6291 v.AuxInt = int32ToAuxInt(6)
Dan Willemsencc753b72021-08-31 13:25:42 -07006292 v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
6293 v0.AuxInt = int64ToAuxInt(0)
Colin Cross1f805522021-05-14 11:10:59 -07006294 v1 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
6295 v1.AuxInt = int32ToAuxInt(4)
6296 v2 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
6297 v2.AuxInt = int32ToAuxInt(2)
6298 v3 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
6299 v3.AddArg3(ptr, v0, mem)
6300 v2.AddArg3(ptr, v0, v3)
6301 v1.AddArg3(ptr, v0, v2)
6302 v.AddArg3(ptr, v0, v1)
6303 return true
6304 }
6305 // match: (Zero [3] ptr mem)
Dan Willemsencc753b72021-08-31 13:25:42 -07006306 // result: (MOVBstore [2] ptr (MOVDconst [0]) (MOVBstore [1] ptr (MOVDconst [0]) (MOVBstore ptr (MOVDconst [0]) mem)))
Colin Cross1f805522021-05-14 11:10:59 -07006307 for {
6308 if auxIntToInt64(v.AuxInt) != 3 {
6309 break
6310 }
6311 ptr := v_0
6312 mem := v_1
6313 v.reset(OpRISCV64MOVBstore)
6314 v.AuxInt = int32ToAuxInt(2)
Dan Willemsencc753b72021-08-31 13:25:42 -07006315 v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
6316 v0.AuxInt = int64ToAuxInt(0)
Colin Cross1f805522021-05-14 11:10:59 -07006317 v1 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
6318 v1.AuxInt = int32ToAuxInt(1)
6319 v2 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
6320 v2.AddArg3(ptr, v0, mem)
6321 v1.AddArg3(ptr, v0, v2)
6322 v.AddArg3(ptr, v0, v1)
6323 return true
6324 }
6325 // match: (Zero [6] {t} ptr mem)
6326 // cond: t.Alignment()%2 == 0
Dan Willemsencc753b72021-08-31 13:25:42 -07006327 // result: (MOVHstore [4] ptr (MOVDconst [0]) (MOVHstore [2] ptr (MOVDconst [0]) (MOVHstore ptr (MOVDconst [0]) mem)))
Colin Cross1f805522021-05-14 11:10:59 -07006328 for {
6329 if auxIntToInt64(v.AuxInt) != 6 {
6330 break
6331 }
6332 t := auxToType(v.Aux)
6333 ptr := v_0
6334 mem := v_1
6335 if !(t.Alignment()%2 == 0) {
6336 break
6337 }
6338 v.reset(OpRISCV64MOVHstore)
6339 v.AuxInt = int32ToAuxInt(4)
Dan Willemsencc753b72021-08-31 13:25:42 -07006340 v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
6341 v0.AuxInt = int64ToAuxInt(0)
Colin Cross1f805522021-05-14 11:10:59 -07006342 v1 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
6343 v1.AuxInt = int32ToAuxInt(2)
6344 v2 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
6345 v2.AddArg3(ptr, v0, mem)
6346 v1.AddArg3(ptr, v0, v2)
6347 v.AddArg3(ptr, v0, v1)
6348 return true
6349 }
6350 // match: (Zero [12] {t} ptr mem)
6351 // cond: t.Alignment()%4 == 0
Dan Willemsencc753b72021-08-31 13:25:42 -07006352 // result: (MOVWstore [8] ptr (MOVDconst [0]) (MOVWstore [4] ptr (MOVDconst [0]) (MOVWstore ptr (MOVDconst [0]) mem)))
Colin Cross1f805522021-05-14 11:10:59 -07006353 for {
6354 if auxIntToInt64(v.AuxInt) != 12 {
6355 break
6356 }
6357 t := auxToType(v.Aux)
6358 ptr := v_0
6359 mem := v_1
6360 if !(t.Alignment()%4 == 0) {
6361 break
6362 }
6363 v.reset(OpRISCV64MOVWstore)
6364 v.AuxInt = int32ToAuxInt(8)
Dan Willemsencc753b72021-08-31 13:25:42 -07006365 v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
6366 v0.AuxInt = int64ToAuxInt(0)
Colin Cross1f805522021-05-14 11:10:59 -07006367 v1 := b.NewValue0(v.Pos, OpRISCV64MOVWstore, types.TypeMem)
6368 v1.AuxInt = int32ToAuxInt(4)
6369 v2 := b.NewValue0(v.Pos, OpRISCV64MOVWstore, types.TypeMem)
6370 v2.AddArg3(ptr, v0, mem)
6371 v1.AddArg3(ptr, v0, v2)
6372 v.AddArg3(ptr, v0, v1)
6373 return true
6374 }
6375 // match: (Zero [16] {t} ptr mem)
6376 // cond: t.Alignment()%8 == 0
6377 // result: (MOVDstore [8] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem))
6378 for {
6379 if auxIntToInt64(v.AuxInt) != 16 {
6380 break
6381 }
6382 t := auxToType(v.Aux)
6383 ptr := v_0
6384 mem := v_1
6385 if !(t.Alignment()%8 == 0) {
6386 break
6387 }
6388 v.reset(OpRISCV64MOVDstore)
6389 v.AuxInt = int32ToAuxInt(8)
6390 v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
6391 v0.AuxInt = int64ToAuxInt(0)
6392 v1 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
6393 v1.AddArg3(ptr, v0, mem)
6394 v.AddArg3(ptr, v0, v1)
6395 return true
6396 }
6397 // match: (Zero [24] {t} ptr mem)
6398 // cond: t.Alignment()%8 == 0
6399 // result: (MOVDstore [16] ptr (MOVDconst [0]) (MOVDstore [8] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem)))
6400 for {
6401 if auxIntToInt64(v.AuxInt) != 24 {
6402 break
6403 }
6404 t := auxToType(v.Aux)
6405 ptr := v_0
6406 mem := v_1
6407 if !(t.Alignment()%8 == 0) {
6408 break
6409 }
6410 v.reset(OpRISCV64MOVDstore)
6411 v.AuxInt = int32ToAuxInt(16)
6412 v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
6413 v0.AuxInt = int64ToAuxInt(0)
6414 v1 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
6415 v1.AuxInt = int32ToAuxInt(8)
6416 v2 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
6417 v2.AddArg3(ptr, v0, mem)
6418 v1.AddArg3(ptr, v0, v2)
6419 v.AddArg3(ptr, v0, v1)
6420 return true
6421 }
6422 // match: (Zero [32] {t} ptr mem)
6423 // cond: t.Alignment()%8 == 0
6424 // result: (MOVDstore [24] ptr (MOVDconst [0]) (MOVDstore [16] ptr (MOVDconst [0]) (MOVDstore [8] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem))))
6425 for {
6426 if auxIntToInt64(v.AuxInt) != 32 {
6427 break
6428 }
6429 t := auxToType(v.Aux)
6430 ptr := v_0
6431 mem := v_1
6432 if !(t.Alignment()%8 == 0) {
6433 break
6434 }
6435 v.reset(OpRISCV64MOVDstore)
6436 v.AuxInt = int32ToAuxInt(24)
6437 v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
6438 v0.AuxInt = int64ToAuxInt(0)
6439 v1 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
6440 v1.AuxInt = int32ToAuxInt(16)
6441 v2 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
6442 v2.AuxInt = int32ToAuxInt(8)
6443 v3 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
6444 v3.AddArg3(ptr, v0, mem)
6445 v2.AddArg3(ptr, v0, v3)
6446 v1.AddArg3(ptr, v0, v2)
6447 v.AddArg3(ptr, v0, v1)
6448 return true
6449 }
6450 // match: (Zero [s] {t} ptr mem)
6451 // cond: s%8 == 0 && s <= 8*128 && t.Alignment()%8 == 0 && !config.noDuffDevice
6452 // result: (DUFFZERO [8 * (128 - s/8)] ptr mem)
6453 for {
6454 s := auxIntToInt64(v.AuxInt)
6455 t := auxToType(v.Aux)
6456 ptr := v_0
6457 mem := v_1
6458 if !(s%8 == 0 && s <= 8*128 && t.Alignment()%8 == 0 && !config.noDuffDevice) {
6459 break
6460 }
6461 v.reset(OpRISCV64DUFFZERO)
6462 v.AuxInt = int64ToAuxInt(8 * (128 - s/8))
6463 v.AddArg2(ptr, mem)
6464 return true
6465 }
Patrice Arruda748609c2020-06-25 12:12:21 -07006466 // match: (Zero [s] {t} ptr mem)
6467 // result: (LoweredZero [t.Alignment()] ptr (ADD <ptr.Type> ptr (MOVDconst [s-moveSize(t.Alignment(), config)])) mem)
6468 for {
6469 s := auxIntToInt64(v.AuxInt)
6470 t := auxToType(v.Aux)
6471 ptr := v_0
6472 mem := v_1
6473 v.reset(OpRISCV64LoweredZero)
6474 v.AuxInt = int64ToAuxInt(t.Alignment())
6475 v0 := b.NewValue0(v.Pos, OpRISCV64ADD, ptr.Type)
6476 v1 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
6477 v1.AuxInt = int64ToAuxInt(s - moveSize(t.Alignment(), config))
6478 v0.AddArg2(ptr, v1)
6479 v.AddArg3(ptr, v0, mem)
6480 return true
6481 }
6482}
Patrice Arruda748609c2020-06-25 12:12:21 -07006483func rewriteBlockRISCV64(b *Block) bool {
6484 switch b.Kind {
6485 case BlockRISCV64BEQ:
6486 // match: (BEQ (MOVDconst [0]) cond yes no)
6487 // result: (BEQZ cond yes no)
6488 for b.Controls[0].Op == OpRISCV64MOVDconst {
6489 v_0 := b.Controls[0]
6490 if auxIntToInt64(v_0.AuxInt) != 0 {
6491 break
6492 }
6493 cond := b.Controls[1]
6494 b.resetWithControl(BlockRISCV64BEQZ, cond)
6495 return true
6496 }
6497 // match: (BEQ cond (MOVDconst [0]) yes no)
6498 // result: (BEQZ cond yes no)
6499 for b.Controls[1].Op == OpRISCV64MOVDconst {
6500 cond := b.Controls[0]
6501 v_1 := b.Controls[1]
6502 if auxIntToInt64(v_1.AuxInt) != 0 {
6503 break
6504 }
6505 b.resetWithControl(BlockRISCV64BEQZ, cond)
6506 return true
6507 }
6508 case BlockRISCV64BEQZ:
6509 // match: (BEQZ (SEQZ x) yes no)
6510 // result: (BNEZ x yes no)
6511 for b.Controls[0].Op == OpRISCV64SEQZ {
6512 v_0 := b.Controls[0]
6513 x := v_0.Args[0]
6514 b.resetWithControl(BlockRISCV64BNEZ, x)
6515 return true
6516 }
6517 // match: (BEQZ (SNEZ x) yes no)
6518 // result: (BEQZ x yes no)
6519 for b.Controls[0].Op == OpRISCV64SNEZ {
6520 v_0 := b.Controls[0]
6521 x := v_0.Args[0]
6522 b.resetWithControl(BlockRISCV64BEQZ, x)
6523 return true
6524 }
Dan Willemsenbc60c3c2021-12-15 01:09:00 -08006525 // match: (BEQZ x:(NEG y) yes no)
6526 // cond: x.Uses == 1
6527 // result: (BEQZ y yes no)
6528 for b.Controls[0].Op == OpRISCV64NEG {
6529 x := b.Controls[0]
6530 y := x.Args[0]
6531 if !(x.Uses == 1) {
6532 break
6533 }
6534 b.resetWithControl(BlockRISCV64BEQZ, y)
6535 return true
6536 }
Patrice Arruda748609c2020-06-25 12:12:21 -07006537 // match: (BEQZ (SUB x y) yes no)
6538 // result: (BEQ x y yes no)
6539 for b.Controls[0].Op == OpRISCV64SUB {
6540 v_0 := b.Controls[0]
6541 y := v_0.Args[1]
6542 x := v_0.Args[0]
6543 b.resetWithControl2(BlockRISCV64BEQ, x, y)
6544 return true
6545 }
6546 // match: (BEQZ (SLT x y) yes no)
6547 // result: (BGE x y yes no)
6548 for b.Controls[0].Op == OpRISCV64SLT {
6549 v_0 := b.Controls[0]
6550 y := v_0.Args[1]
6551 x := v_0.Args[0]
6552 b.resetWithControl2(BlockRISCV64BGE, x, y)
6553 return true
6554 }
6555 // match: (BEQZ (SLTU x y) yes no)
6556 // result: (BGEU x y yes no)
6557 for b.Controls[0].Op == OpRISCV64SLTU {
6558 v_0 := b.Controls[0]
6559 y := v_0.Args[1]
6560 x := v_0.Args[0]
6561 b.resetWithControl2(BlockRISCV64BGEU, x, y)
6562 return true
6563 }
Dan Willemsenbc60c3c2021-12-15 01:09:00 -08006564 case BlockRISCV64BGE:
6565 // match: (BGE (MOVDconst [0]) cond yes no)
6566 // result: (BLEZ cond yes no)
6567 for b.Controls[0].Op == OpRISCV64MOVDconst {
6568 v_0 := b.Controls[0]
6569 if auxIntToInt64(v_0.AuxInt) != 0 {
6570 break
6571 }
6572 cond := b.Controls[1]
6573 b.resetWithControl(BlockRISCV64BLEZ, cond)
6574 return true
6575 }
6576 // match: (BGE cond (MOVDconst [0]) yes no)
6577 // result: (BGEZ cond yes no)
6578 for b.Controls[1].Op == OpRISCV64MOVDconst {
6579 cond := b.Controls[0]
6580 v_1 := b.Controls[1]
6581 if auxIntToInt64(v_1.AuxInt) != 0 {
6582 break
6583 }
6584 b.resetWithControl(BlockRISCV64BGEZ, cond)
6585 return true
6586 }
6587 case BlockRISCV64BLT:
6588 // match: (BLT (MOVDconst [0]) cond yes no)
6589 // result: (BGTZ cond yes no)
6590 for b.Controls[0].Op == OpRISCV64MOVDconst {
6591 v_0 := b.Controls[0]
6592 if auxIntToInt64(v_0.AuxInt) != 0 {
6593 break
6594 }
6595 cond := b.Controls[1]
6596 b.resetWithControl(BlockRISCV64BGTZ, cond)
6597 return true
6598 }
6599 // match: (BLT cond (MOVDconst [0]) yes no)
6600 // result: (BLTZ cond yes no)
6601 for b.Controls[1].Op == OpRISCV64MOVDconst {
6602 cond := b.Controls[0]
6603 v_1 := b.Controls[1]
6604 if auxIntToInt64(v_1.AuxInt) != 0 {
6605 break
6606 }
6607 b.resetWithControl(BlockRISCV64BLTZ, cond)
6608 return true
6609 }
Patrice Arruda748609c2020-06-25 12:12:21 -07006610 case BlockRISCV64BNE:
6611 // match: (BNE (MOVDconst [0]) cond yes no)
6612 // result: (BNEZ cond yes no)
6613 for b.Controls[0].Op == OpRISCV64MOVDconst {
6614 v_0 := b.Controls[0]
6615 if auxIntToInt64(v_0.AuxInt) != 0 {
6616 break
6617 }
6618 cond := b.Controls[1]
6619 b.resetWithControl(BlockRISCV64BNEZ, cond)
6620 return true
6621 }
6622 // match: (BNE cond (MOVDconst [0]) yes no)
6623 // result: (BNEZ cond yes no)
6624 for b.Controls[1].Op == OpRISCV64MOVDconst {
6625 cond := b.Controls[0]
6626 v_1 := b.Controls[1]
6627 if auxIntToInt64(v_1.AuxInt) != 0 {
6628 break
6629 }
6630 b.resetWithControl(BlockRISCV64BNEZ, cond)
6631 return true
6632 }
6633 case BlockRISCV64BNEZ:
6634 // match: (BNEZ (SEQZ x) yes no)
6635 // result: (BEQZ x yes no)
6636 for b.Controls[0].Op == OpRISCV64SEQZ {
6637 v_0 := b.Controls[0]
6638 x := v_0.Args[0]
6639 b.resetWithControl(BlockRISCV64BEQZ, x)
6640 return true
6641 }
6642 // match: (BNEZ (SNEZ x) yes no)
6643 // result: (BNEZ x yes no)
6644 for b.Controls[0].Op == OpRISCV64SNEZ {
6645 v_0 := b.Controls[0]
6646 x := v_0.Args[0]
6647 b.resetWithControl(BlockRISCV64BNEZ, x)
6648 return true
6649 }
Dan Willemsenbc60c3c2021-12-15 01:09:00 -08006650 // match: (BNEZ x:(NEG y) yes no)
6651 // cond: x.Uses == 1
6652 // result: (BNEZ y yes no)
6653 for b.Controls[0].Op == OpRISCV64NEG {
6654 x := b.Controls[0]
6655 y := x.Args[0]
6656 if !(x.Uses == 1) {
6657 break
6658 }
6659 b.resetWithControl(BlockRISCV64BNEZ, y)
6660 return true
6661 }
Patrice Arruda748609c2020-06-25 12:12:21 -07006662 // match: (BNEZ (SUB x y) yes no)
6663 // result: (BNE x y yes no)
6664 for b.Controls[0].Op == OpRISCV64SUB {
6665 v_0 := b.Controls[0]
6666 y := v_0.Args[1]
6667 x := v_0.Args[0]
6668 b.resetWithControl2(BlockRISCV64BNE, x, y)
6669 return true
6670 }
6671 // match: (BNEZ (SLT x y) yes no)
6672 // result: (BLT x y yes no)
6673 for b.Controls[0].Op == OpRISCV64SLT {
6674 v_0 := b.Controls[0]
6675 y := v_0.Args[1]
6676 x := v_0.Args[0]
6677 b.resetWithControl2(BlockRISCV64BLT, x, y)
6678 return true
6679 }
6680 // match: (BNEZ (SLTU x y) yes no)
6681 // result: (BLTU x y yes no)
6682 for b.Controls[0].Op == OpRISCV64SLTU {
6683 v_0 := b.Controls[0]
6684 y := v_0.Args[1]
6685 x := v_0.Args[0]
6686 b.resetWithControl2(BlockRISCV64BLTU, x, y)
6687 return true
6688 }
6689 case BlockIf:
6690 // match: (If cond yes no)
6691 // result: (BNEZ cond yes no)
6692 for {
6693 cond := b.Controls[0]
6694 b.resetWithControl(BlockRISCV64BNEZ, cond)
6695 return true
6696 }
6697 }
6698 return false
6699}