1 // Code generated from _gen/RISCV64.rules using 'go generate'; DO NOT EDIT.
6 import "cmd/compile/internal/types"
8 func rewriteValueRISCV64(v *Value) bool {
35 return rewriteValueRISCV64_OpAddr(v)
52 v.Op = OpRISCV64LoweredAtomicAdd32
55 v.Op = OpRISCV64LoweredAtomicAdd64
58 v.Op = OpRISCV64LoweredAtomicAnd32
61 return rewriteValueRISCV64_OpAtomicAnd8(v)
62 case OpAtomicCompareAndSwap32:
63 return rewriteValueRISCV64_OpAtomicCompareAndSwap32(v)
64 case OpAtomicCompareAndSwap64:
65 v.Op = OpRISCV64LoweredAtomicCas64
67 case OpAtomicExchange32:
68 v.Op = OpRISCV64LoweredAtomicExchange32
70 case OpAtomicExchange64:
71 v.Op = OpRISCV64LoweredAtomicExchange64
74 v.Op = OpRISCV64LoweredAtomicLoad32
77 v.Op = OpRISCV64LoweredAtomicLoad64
80 v.Op = OpRISCV64LoweredAtomicLoad8
83 v.Op = OpRISCV64LoweredAtomicLoad64
86 v.Op = OpRISCV64LoweredAtomicOr32
89 return rewriteValueRISCV64_OpAtomicOr8(v)
91 v.Op = OpRISCV64LoweredAtomicStore32
94 v.Op = OpRISCV64LoweredAtomicStore64
97 v.Op = OpRISCV64LoweredAtomicStore8
99 case OpAtomicStorePtrNoWB:
100 v.Op = OpRISCV64LoweredAtomicStore64
103 return rewriteValueRISCV64_OpAvg64u(v)
105 v.Op = OpRISCV64CALLclosure
120 return rewriteValueRISCV64_OpConst16(v)
122 return rewriteValueRISCV64_OpConst32(v)
124 return rewriteValueRISCV64_OpConst32F(v)
126 return rewriteValueRISCV64_OpConst64(v)
128 return rewriteValueRISCV64_OpConst64F(v)
130 return rewriteValueRISCV64_OpConst8(v)
132 return rewriteValueRISCV64_OpConstBool(v)
134 return rewriteValueRISCV64_OpConstNil(v)
136 v.Op = OpRISCV64MOVconvert
139 v.Op = OpRISCV64FSGNJD
142 v.Op = OpRISCV64FCVTWS
145 v.Op = OpRISCV64FCVTLS
148 v.Op = OpRISCV64FCVTDS
151 v.Op = OpRISCV64FCVTSW
154 v.Op = OpRISCV64FCVTDW
157 v.Op = OpRISCV64FCVTWD
160 v.Op = OpRISCV64FCVTSD
163 v.Op = OpRISCV64FCVTLD
166 v.Op = OpRISCV64FCVTSL
169 v.Op = OpRISCV64FCVTDL
171 case OpCvtBoolToUint8:
175 return rewriteValueRISCV64_OpDiv16(v)
177 return rewriteValueRISCV64_OpDiv16u(v)
179 return rewriteValueRISCV64_OpDiv32(v)
181 v.Op = OpRISCV64FDIVS
184 v.Op = OpRISCV64DIVUW
187 return rewriteValueRISCV64_OpDiv64(v)
189 v.Op = OpRISCV64FDIVD
195 return rewriteValueRISCV64_OpDiv8(v)
197 return rewriteValueRISCV64_OpDiv8u(v)
199 return rewriteValueRISCV64_OpEq16(v)
201 return rewriteValueRISCV64_OpEq32(v)
206 return rewriteValueRISCV64_OpEq64(v)
211 return rewriteValueRISCV64_OpEq8(v)
213 return rewriteValueRISCV64_OpEqB(v)
215 return rewriteValueRISCV64_OpEqPtr(v)
217 v.Op = OpRISCV64FMADDD
220 v.Op = OpRISCV64LoweredGetCallerPC
223 v.Op = OpRISCV64LoweredGetCallerSP
225 case OpGetClosurePtr:
226 v.Op = OpRISCV64LoweredGetClosurePtr
229 return rewriteValueRISCV64_OpHmul32(v)
231 return rewriteValueRISCV64_OpHmul32u(v)
236 v.Op = OpRISCV64MULHU
239 v.Op = OpRISCV64CALLinter
247 case OpIsSliceInBounds:
251 return rewriteValueRISCV64_OpLeq16(v)
253 return rewriteValueRISCV64_OpLeq16U(v)
255 return rewriteValueRISCV64_OpLeq32(v)
260 return rewriteValueRISCV64_OpLeq32U(v)
262 return rewriteValueRISCV64_OpLeq64(v)
267 return rewriteValueRISCV64_OpLeq64U(v)
269 return rewriteValueRISCV64_OpLeq8(v)
271 return rewriteValueRISCV64_OpLeq8U(v)
273 return rewriteValueRISCV64_OpLess16(v)
275 return rewriteValueRISCV64_OpLess16U(v)
277 return rewriteValueRISCV64_OpLess32(v)
282 return rewriteValueRISCV64_OpLess32U(v)
293 return rewriteValueRISCV64_OpLess8(v)
295 return rewriteValueRISCV64_OpLess8U(v)
297 return rewriteValueRISCV64_OpLoad(v)
299 return rewriteValueRISCV64_OpLocalAddr(v)
301 return rewriteValueRISCV64_OpLsh16x16(v)
303 return rewriteValueRISCV64_OpLsh16x32(v)
305 return rewriteValueRISCV64_OpLsh16x64(v)
307 return rewriteValueRISCV64_OpLsh16x8(v)
309 return rewriteValueRISCV64_OpLsh32x16(v)
311 return rewriteValueRISCV64_OpLsh32x32(v)
313 return rewriteValueRISCV64_OpLsh32x64(v)
315 return rewriteValueRISCV64_OpLsh32x8(v)
317 return rewriteValueRISCV64_OpLsh64x16(v)
319 return rewriteValueRISCV64_OpLsh64x32(v)
321 return rewriteValueRISCV64_OpLsh64x64(v)
323 return rewriteValueRISCV64_OpLsh64x8(v)
325 return rewriteValueRISCV64_OpLsh8x16(v)
327 return rewriteValueRISCV64_OpLsh8x32(v)
329 return rewriteValueRISCV64_OpLsh8x64(v)
331 return rewriteValueRISCV64_OpLsh8x8(v)
333 return rewriteValueRISCV64_OpMod16(v)
335 return rewriteValueRISCV64_OpMod16u(v)
337 return rewriteValueRISCV64_OpMod32(v)
339 v.Op = OpRISCV64REMUW
342 return rewriteValueRISCV64_OpMod64(v)
347 return rewriteValueRISCV64_OpMod8(v)
349 return rewriteValueRISCV64_OpMod8u(v)
351 return rewriteValueRISCV64_OpMove(v)
353 return rewriteValueRISCV64_OpMul16(v)
358 v.Op = OpRISCV64FMULS
364 v.Op = OpRISCV64FMULD
367 v.Op = OpRISCV64LoweredMuluhilo
370 v.Op = OpRISCV64LoweredMuluover
373 return rewriteValueRISCV64_OpMul8(v)
381 v.Op = OpRISCV64FNEGS
387 v.Op = OpRISCV64FNEGD
393 return rewriteValueRISCV64_OpNeq16(v)
395 return rewriteValueRISCV64_OpNeq32(v)
400 return rewriteValueRISCV64_OpNeq64(v)
405 return rewriteValueRISCV64_OpNeq8(v)
407 return rewriteValueRISCV64_OpNeqB(v)
409 return rewriteValueRISCV64_OpNeqPtr(v)
411 v.Op = OpRISCV64LoweredNilCheck
417 return rewriteValueRISCV64_OpOffPtr(v)
434 return rewriteValueRISCV64_OpPanicBounds(v)
436 return rewriteValueRISCV64_OpRISCV64ADD(v)
438 return rewriteValueRISCV64_OpRISCV64ADDI(v)
440 return rewriteValueRISCV64_OpRISCV64AND(v)
442 return rewriteValueRISCV64_OpRISCV64ANDI(v)
443 case OpRISCV64FMADDD:
444 return rewriteValueRISCV64_OpRISCV64FMADDD(v)
445 case OpRISCV64FMSUBD:
446 return rewriteValueRISCV64_OpRISCV64FMSUBD(v)
447 case OpRISCV64FNMADDD:
448 return rewriteValueRISCV64_OpRISCV64FNMADDD(v)
449 case OpRISCV64FNMSUBD:
450 return rewriteValueRISCV64_OpRISCV64FNMSUBD(v)
451 case OpRISCV64MOVBUload:
452 return rewriteValueRISCV64_OpRISCV64MOVBUload(v)
453 case OpRISCV64MOVBUreg:
454 return rewriteValueRISCV64_OpRISCV64MOVBUreg(v)
455 case OpRISCV64MOVBload:
456 return rewriteValueRISCV64_OpRISCV64MOVBload(v)
457 case OpRISCV64MOVBreg:
458 return rewriteValueRISCV64_OpRISCV64MOVBreg(v)
459 case OpRISCV64MOVBstore:
460 return rewriteValueRISCV64_OpRISCV64MOVBstore(v)
461 case OpRISCV64MOVBstorezero:
462 return rewriteValueRISCV64_OpRISCV64MOVBstorezero(v)
463 case OpRISCV64MOVDload:
464 return rewriteValueRISCV64_OpRISCV64MOVDload(v)
465 case OpRISCV64MOVDnop:
466 return rewriteValueRISCV64_OpRISCV64MOVDnop(v)
467 case OpRISCV64MOVDreg:
468 return rewriteValueRISCV64_OpRISCV64MOVDreg(v)
469 case OpRISCV64MOVDstore:
470 return rewriteValueRISCV64_OpRISCV64MOVDstore(v)
471 case OpRISCV64MOVDstorezero:
472 return rewriteValueRISCV64_OpRISCV64MOVDstorezero(v)
473 case OpRISCV64MOVHUload:
474 return rewriteValueRISCV64_OpRISCV64MOVHUload(v)
475 case OpRISCV64MOVHUreg:
476 return rewriteValueRISCV64_OpRISCV64MOVHUreg(v)
477 case OpRISCV64MOVHload:
478 return rewriteValueRISCV64_OpRISCV64MOVHload(v)
479 case OpRISCV64MOVHreg:
480 return rewriteValueRISCV64_OpRISCV64MOVHreg(v)
481 case OpRISCV64MOVHstore:
482 return rewriteValueRISCV64_OpRISCV64MOVHstore(v)
483 case OpRISCV64MOVHstorezero:
484 return rewriteValueRISCV64_OpRISCV64MOVHstorezero(v)
485 case OpRISCV64MOVWUload:
486 return rewriteValueRISCV64_OpRISCV64MOVWUload(v)
487 case OpRISCV64MOVWUreg:
488 return rewriteValueRISCV64_OpRISCV64MOVWUreg(v)
489 case OpRISCV64MOVWload:
490 return rewriteValueRISCV64_OpRISCV64MOVWload(v)
491 case OpRISCV64MOVWreg:
492 return rewriteValueRISCV64_OpRISCV64MOVWreg(v)
493 case OpRISCV64MOVWstore:
494 return rewriteValueRISCV64_OpRISCV64MOVWstore(v)
495 case OpRISCV64MOVWstorezero:
496 return rewriteValueRISCV64_OpRISCV64MOVWstorezero(v)
498 return rewriteValueRISCV64_OpRISCV64NEG(v)
500 return rewriteValueRISCV64_OpRISCV64NEGW(v)
502 return rewriteValueRISCV64_OpRISCV64OR(v)
504 return rewriteValueRISCV64_OpRISCV64ORI(v)
506 return rewriteValueRISCV64_OpRISCV64SEQZ(v)
508 return rewriteValueRISCV64_OpRISCV64SLL(v)
510 return rewriteValueRISCV64_OpRISCV64SLLI(v)
512 return rewriteValueRISCV64_OpRISCV64SLT(v)
514 return rewriteValueRISCV64_OpRISCV64SLTI(v)
516 return rewriteValueRISCV64_OpRISCV64SLTIU(v)
518 return rewriteValueRISCV64_OpRISCV64SLTU(v)
520 return rewriteValueRISCV64_OpRISCV64SNEZ(v)
522 return rewriteValueRISCV64_OpRISCV64SRA(v)
524 return rewriteValueRISCV64_OpRISCV64SRAI(v)
526 return rewriteValueRISCV64_OpRISCV64SRL(v)
528 return rewriteValueRISCV64_OpRISCV64SRLI(v)
530 return rewriteValueRISCV64_OpRISCV64SUB(v)
532 return rewriteValueRISCV64_OpRISCV64SUBW(v)
534 return rewriteValueRISCV64_OpRISCV64XOR(v)
536 return rewriteValueRISCV64_OpRotateLeft16(v)
538 return rewriteValueRISCV64_OpRotateLeft32(v)
540 return rewriteValueRISCV64_OpRotateLeft64(v)
542 return rewriteValueRISCV64_OpRotateLeft8(v)
550 return rewriteValueRISCV64_OpRsh16Ux16(v)
552 return rewriteValueRISCV64_OpRsh16Ux32(v)
554 return rewriteValueRISCV64_OpRsh16Ux64(v)
556 return rewriteValueRISCV64_OpRsh16Ux8(v)
558 return rewriteValueRISCV64_OpRsh16x16(v)
560 return rewriteValueRISCV64_OpRsh16x32(v)
562 return rewriteValueRISCV64_OpRsh16x64(v)
564 return rewriteValueRISCV64_OpRsh16x8(v)
566 return rewriteValueRISCV64_OpRsh32Ux16(v)
568 return rewriteValueRISCV64_OpRsh32Ux32(v)
570 return rewriteValueRISCV64_OpRsh32Ux64(v)
572 return rewriteValueRISCV64_OpRsh32Ux8(v)
574 return rewriteValueRISCV64_OpRsh32x16(v)
576 return rewriteValueRISCV64_OpRsh32x32(v)
578 return rewriteValueRISCV64_OpRsh32x64(v)
580 return rewriteValueRISCV64_OpRsh32x8(v)
582 return rewriteValueRISCV64_OpRsh64Ux16(v)
584 return rewriteValueRISCV64_OpRsh64Ux32(v)
586 return rewriteValueRISCV64_OpRsh64Ux64(v)
588 return rewriteValueRISCV64_OpRsh64Ux8(v)
590 return rewriteValueRISCV64_OpRsh64x16(v)
592 return rewriteValueRISCV64_OpRsh64x32(v)
594 return rewriteValueRISCV64_OpRsh64x64(v)
596 return rewriteValueRISCV64_OpRsh64x8(v)
598 return rewriteValueRISCV64_OpRsh8Ux16(v)
600 return rewriteValueRISCV64_OpRsh8Ux32(v)
602 return rewriteValueRISCV64_OpRsh8Ux64(v)
604 return rewriteValueRISCV64_OpRsh8Ux8(v)
606 return rewriteValueRISCV64_OpRsh8x16(v)
608 return rewriteValueRISCV64_OpRsh8x32(v)
610 return rewriteValueRISCV64_OpRsh8x64(v)
612 return rewriteValueRISCV64_OpRsh8x8(v)
614 return rewriteValueRISCV64_OpSelect0(v)
616 return rewriteValueRISCV64_OpSelect1(v)
617 case OpSignExt16to32:
618 v.Op = OpRISCV64MOVHreg
620 case OpSignExt16to64:
621 v.Op = OpRISCV64MOVHreg
623 case OpSignExt32to64:
624 v.Op = OpRISCV64MOVWreg
627 v.Op = OpRISCV64MOVBreg
630 v.Op = OpRISCV64MOVBreg
633 v.Op = OpRISCV64MOVBreg
636 return rewriteValueRISCV64_OpSlicemask(v)
638 v.Op = OpRISCV64FSQRTD
641 v.Op = OpRISCV64FSQRTS
644 v.Op = OpRISCV64CALLstatic
647 return rewriteValueRISCV64_OpStore(v)
655 v.Op = OpRISCV64FSUBS
661 v.Op = OpRISCV64FSUBD
670 v.Op = OpRISCV64CALLtail
691 v.Op = OpRISCV64LoweredWB
706 return rewriteValueRISCV64_OpZero(v)
707 case OpZeroExt16to32:
708 v.Op = OpRISCV64MOVHUreg
710 case OpZeroExt16to64:
711 v.Op = OpRISCV64MOVHUreg
713 case OpZeroExt32to64:
714 v.Op = OpRISCV64MOVWUreg
717 v.Op = OpRISCV64MOVBUreg
720 v.Op = OpRISCV64MOVBUreg
723 v.Op = OpRISCV64MOVBUreg
728 func rewriteValueRISCV64_OpAddr(v *Value) bool {
730 // match: (Addr {sym} base)
731 // result: (MOVaddr {sym} [0] base)
733 sym := auxToSym(v.Aux)
735 v.reset(OpRISCV64MOVaddr)
736 v.AuxInt = int32ToAuxInt(0)
737 v.Aux = symToAux(sym)
742 func rewriteValueRISCV64_OpAtomicAnd8(v *Value) bool {
747 typ := &b.Func.Config.Types
748 // match: (AtomicAnd8 ptr val mem)
749 // result: (LoweredAtomicAnd32 (ANDI <typ.Uintptr> [^3] ptr) (NOT <typ.UInt32> (SLL <typ.UInt32> (XORI <typ.UInt32> [0xff] (ZeroExt8to32 val)) (SLLI <typ.UInt64> [3] (ANDI <typ.UInt64> [3] ptr)))) mem)
754 v.reset(OpRISCV64LoweredAtomicAnd32)
755 v0 := b.NewValue0(v.Pos, OpRISCV64ANDI, typ.Uintptr)
756 v0.AuxInt = int64ToAuxInt(^3)
758 v1 := b.NewValue0(v.Pos, OpRISCV64NOT, typ.UInt32)
759 v2 := b.NewValue0(v.Pos, OpRISCV64SLL, typ.UInt32)
760 v3 := b.NewValue0(v.Pos, OpRISCV64XORI, typ.UInt32)
761 v3.AuxInt = int64ToAuxInt(0xff)
762 v4 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
765 v5 := b.NewValue0(v.Pos, OpRISCV64SLLI, typ.UInt64)
766 v5.AuxInt = int64ToAuxInt(3)
767 v6 := b.NewValue0(v.Pos, OpRISCV64ANDI, typ.UInt64)
768 v6.AuxInt = int64ToAuxInt(3)
773 v.AddArg3(v0, v1, mem)
777 func rewriteValueRISCV64_OpAtomicCompareAndSwap32(v *Value) bool {
783 typ := &b.Func.Config.Types
784 // match: (AtomicCompareAndSwap32 ptr old new mem)
785 // result: (LoweredAtomicCas32 ptr (SignExt32to64 old) new mem)
791 v.reset(OpRISCV64LoweredAtomicCas32)
792 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
794 v.AddArg4(ptr, v0, new, mem)
798 func rewriteValueRISCV64_OpAtomicOr8(v *Value) bool {
803 typ := &b.Func.Config.Types
804 // match: (AtomicOr8 ptr val mem)
805 // result: (LoweredAtomicOr32 (ANDI <typ.Uintptr> [^3] ptr) (SLL <typ.UInt32> (ZeroExt8to32 val) (SLLI <typ.UInt64> [3] (ANDI <typ.UInt64> [3] ptr))) mem)
810 v.reset(OpRISCV64LoweredAtomicOr32)
811 v0 := b.NewValue0(v.Pos, OpRISCV64ANDI, typ.Uintptr)
812 v0.AuxInt = int64ToAuxInt(^3)
814 v1 := b.NewValue0(v.Pos, OpRISCV64SLL, typ.UInt32)
815 v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
817 v3 := b.NewValue0(v.Pos, OpRISCV64SLLI, typ.UInt64)
818 v3.AuxInt = int64ToAuxInt(3)
819 v4 := b.NewValue0(v.Pos, OpRISCV64ANDI, typ.UInt64)
820 v4.AuxInt = int64ToAuxInt(3)
824 v.AddArg3(v0, v1, mem)
828 func rewriteValueRISCV64_OpAvg64u(v *Value) bool {
832 // match: (Avg64u <t> x y)
833 // result: (ADD (ADD <t> (SRLI <t> [1] x) (SRLI <t> [1] y)) (ANDI <t> [1] (AND <t> x y)))
838 v.reset(OpRISCV64ADD)
839 v0 := b.NewValue0(v.Pos, OpRISCV64ADD, t)
840 v1 := b.NewValue0(v.Pos, OpRISCV64SRLI, t)
841 v1.AuxInt = int64ToAuxInt(1)
843 v2 := b.NewValue0(v.Pos, OpRISCV64SRLI, t)
844 v2.AuxInt = int64ToAuxInt(1)
847 v3 := b.NewValue0(v.Pos, OpRISCV64ANDI, t)
848 v3.AuxInt = int64ToAuxInt(1)
849 v4 := b.NewValue0(v.Pos, OpRISCV64AND, t)
856 func rewriteValueRISCV64_OpConst16(v *Value) bool {
857 // match: (Const16 [val])
858 // result: (MOVDconst [int64(val)])
860 val := auxIntToInt16(v.AuxInt)
861 v.reset(OpRISCV64MOVDconst)
862 v.AuxInt = int64ToAuxInt(int64(val))
866 func rewriteValueRISCV64_OpConst32(v *Value) bool {
867 // match: (Const32 [val])
868 // result: (MOVDconst [int64(val)])
870 val := auxIntToInt32(v.AuxInt)
871 v.reset(OpRISCV64MOVDconst)
872 v.AuxInt = int64ToAuxInt(int64(val))
876 func rewriteValueRISCV64_OpConst32F(v *Value) bool {
878 typ := &b.Func.Config.Types
879 // match: (Const32F [val])
880 // result: (FMVSX (MOVDconst [int64(math.Float32bits(val))]))
882 val := auxIntToFloat32(v.AuxInt)
883 v.reset(OpRISCV64FMVSX)
884 v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
885 v0.AuxInt = int64ToAuxInt(int64(math.Float32bits(val)))
890 func rewriteValueRISCV64_OpConst64(v *Value) bool {
891 // match: (Const64 [val])
892 // result: (MOVDconst [int64(val)])
894 val := auxIntToInt64(v.AuxInt)
895 v.reset(OpRISCV64MOVDconst)
896 v.AuxInt = int64ToAuxInt(int64(val))
900 func rewriteValueRISCV64_OpConst64F(v *Value) bool {
902 typ := &b.Func.Config.Types
903 // match: (Const64F [val])
904 // result: (FMVDX (MOVDconst [int64(math.Float64bits(val))]))
906 val := auxIntToFloat64(v.AuxInt)
907 v.reset(OpRISCV64FMVDX)
908 v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
909 v0.AuxInt = int64ToAuxInt(int64(math.Float64bits(val)))
914 func rewriteValueRISCV64_OpConst8(v *Value) bool {
915 // match: (Const8 [val])
916 // result: (MOVDconst [int64(val)])
918 val := auxIntToInt8(v.AuxInt)
919 v.reset(OpRISCV64MOVDconst)
920 v.AuxInt = int64ToAuxInt(int64(val))
924 func rewriteValueRISCV64_OpConstBool(v *Value) bool {
925 // match: (ConstBool [val])
926 // result: (MOVDconst [int64(b2i(val))])
928 val := auxIntToBool(v.AuxInt)
929 v.reset(OpRISCV64MOVDconst)
930 v.AuxInt = int64ToAuxInt(int64(b2i(val)))
934 func rewriteValueRISCV64_OpConstNil(v *Value) bool {
936 // result: (MOVDconst [0])
938 v.reset(OpRISCV64MOVDconst)
939 v.AuxInt = int64ToAuxInt(0)
943 func rewriteValueRISCV64_OpDiv16(v *Value) bool {
947 typ := &b.Func.Config.Types
948 // match: (Div16 x y [false])
949 // result: (DIVW (SignExt16to32 x) (SignExt16to32 y))
951 if auxIntToBool(v.AuxInt) != false {
956 v.reset(OpRISCV64DIVW)
957 v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
959 v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
966 func rewriteValueRISCV64_OpDiv16u(v *Value) bool {
970 typ := &b.Func.Config.Types
971 // match: (Div16u x y)
972 // result: (DIVUW (ZeroExt16to32 x) (ZeroExt16to32 y))
976 v.reset(OpRISCV64DIVUW)
977 v0 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
979 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
985 func rewriteValueRISCV64_OpDiv32(v *Value) bool {
988 // match: (Div32 x y [false])
989 // result: (DIVW x y)
991 if auxIntToBool(v.AuxInt) != false {
996 v.reset(OpRISCV64DIVW)
1002 func rewriteValueRISCV64_OpDiv64(v *Value) bool {
1005 // match: (Div64 x y [false])
1006 // result: (DIV x y)
1008 if auxIntToBool(v.AuxInt) != false {
1013 v.reset(OpRISCV64DIV)
1019 func rewriteValueRISCV64_OpDiv8(v *Value) bool {
1023 typ := &b.Func.Config.Types
1024 // match: (Div8 x y)
1025 // result: (DIVW (SignExt8to32 x) (SignExt8to32 y))
1029 v.reset(OpRISCV64DIVW)
1030 v0 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
1032 v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
1038 func rewriteValueRISCV64_OpDiv8u(v *Value) bool {
1042 typ := &b.Func.Config.Types
1043 // match: (Div8u x y)
1044 // result: (DIVUW (ZeroExt8to32 x) (ZeroExt8to32 y))
1048 v.reset(OpRISCV64DIVUW)
1049 v0 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
1051 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
1057 func rewriteValueRISCV64_OpEq16(v *Value) bool {
1061 typ := &b.Func.Config.Types
1062 // match: (Eq16 x y)
1063 // result: (SEQZ (SUB <x.Type> (ZeroExt16to64 x) (ZeroExt16to64 y)))
1067 v.reset(OpRISCV64SEQZ)
1068 v0 := b.NewValue0(v.Pos, OpRISCV64SUB, x.Type)
1069 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
1071 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
1078 func rewriteValueRISCV64_OpEq32(v *Value) bool {
1082 typ := &b.Func.Config.Types
1083 // match: (Eq32 x y)
1084 // result: (SEQZ (SUB <x.Type> (ZeroExt32to64 x) (ZeroExt32to64 y)))
1088 v.reset(OpRISCV64SEQZ)
1089 v0 := b.NewValue0(v.Pos, OpRISCV64SUB, x.Type)
1090 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
1092 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
1099 func rewriteValueRISCV64_OpEq64(v *Value) bool {
1103 // match: (Eq64 x y)
1104 // result: (SEQZ (SUB <x.Type> x y))
1108 v.reset(OpRISCV64SEQZ)
1109 v0 := b.NewValue0(v.Pos, OpRISCV64SUB, x.Type)
1115 func rewriteValueRISCV64_OpEq8(v *Value) bool {
1119 typ := &b.Func.Config.Types
1121 // result: (SEQZ (SUB <x.Type> (ZeroExt8to64 x) (ZeroExt8to64 y)))
1125 v.reset(OpRISCV64SEQZ)
1126 v0 := b.NewValue0(v.Pos, OpRISCV64SUB, x.Type)
1127 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
1129 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
1136 func rewriteValueRISCV64_OpEqB(v *Value) bool {
1140 typ := &b.Func.Config.Types
1142 // result: (SEQZ (SUB <typ.Bool> x y))
1146 v.reset(OpRISCV64SEQZ)
1147 v0 := b.NewValue0(v.Pos, OpRISCV64SUB, typ.Bool)
1153 func rewriteValueRISCV64_OpEqPtr(v *Value) bool {
1157 typ := &b.Func.Config.Types
1158 // match: (EqPtr x y)
1159 // result: (SEQZ (SUB <typ.Uintptr> x y))
1163 v.reset(OpRISCV64SEQZ)
1164 v0 := b.NewValue0(v.Pos, OpRISCV64SUB, typ.Uintptr)
1170 func rewriteValueRISCV64_OpHmul32(v *Value) bool {
1174 typ := &b.Func.Config.Types
1175 // match: (Hmul32 x y)
1176 // result: (SRAI [32] (MUL (SignExt32to64 x) (SignExt32to64 y)))
1180 v.reset(OpRISCV64SRAI)
1181 v.AuxInt = int64ToAuxInt(32)
1182 v0 := b.NewValue0(v.Pos, OpRISCV64MUL, typ.Int64)
1183 v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
1185 v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
1192 func rewriteValueRISCV64_OpHmul32u(v *Value) bool {
1196 typ := &b.Func.Config.Types
1197 // match: (Hmul32u x y)
1198 // result: (SRLI [32] (MUL (ZeroExt32to64 x) (ZeroExt32to64 y)))
1202 v.reset(OpRISCV64SRLI)
1203 v.AuxInt = int64ToAuxInt(32)
1204 v0 := b.NewValue0(v.Pos, OpRISCV64MUL, typ.Int64)
1205 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
1207 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
1214 func rewriteValueRISCV64_OpLeq16(v *Value) bool {
1218 typ := &b.Func.Config.Types
1219 // match: (Leq16 x y)
1220 // result: (Not (Less16 y x))
1225 v0 := b.NewValue0(v.Pos, OpLess16, typ.Bool)
1231 func rewriteValueRISCV64_OpLeq16U(v *Value) bool {
1235 typ := &b.Func.Config.Types
1236 // match: (Leq16U x y)
1237 // result: (Not (Less16U y x))
1242 v0 := b.NewValue0(v.Pos, OpLess16U, typ.Bool)
1248 func rewriteValueRISCV64_OpLeq32(v *Value) bool {
1252 typ := &b.Func.Config.Types
1253 // match: (Leq32 x y)
1254 // result: (Not (Less32 y x))
1259 v0 := b.NewValue0(v.Pos, OpLess32, typ.Bool)
1265 func rewriteValueRISCV64_OpLeq32U(v *Value) bool {
1269 typ := &b.Func.Config.Types
1270 // match: (Leq32U x y)
1271 // result: (Not (Less32U y x))
1276 v0 := b.NewValue0(v.Pos, OpLess32U, typ.Bool)
1282 func rewriteValueRISCV64_OpLeq64(v *Value) bool {
1286 typ := &b.Func.Config.Types
1287 // match: (Leq64 x y)
1288 // result: (Not (Less64 y x))
1293 v0 := b.NewValue0(v.Pos, OpLess64, typ.Bool)
1299 func rewriteValueRISCV64_OpLeq64U(v *Value) bool {
1303 typ := &b.Func.Config.Types
1304 // match: (Leq64U x y)
1305 // result: (Not (Less64U y x))
1310 v0 := b.NewValue0(v.Pos, OpLess64U, typ.Bool)
1316 func rewriteValueRISCV64_OpLeq8(v *Value) bool {
1320 typ := &b.Func.Config.Types
1321 // match: (Leq8 x y)
1322 // result: (Not (Less8 y x))
1327 v0 := b.NewValue0(v.Pos, OpLess8, typ.Bool)
1333 func rewriteValueRISCV64_OpLeq8U(v *Value) bool {
1337 typ := &b.Func.Config.Types
1338 // match: (Leq8U x y)
1339 // result: (Not (Less8U y x))
1344 v0 := b.NewValue0(v.Pos, OpLess8U, typ.Bool)
1350 func rewriteValueRISCV64_OpLess16(v *Value) bool {
1354 typ := &b.Func.Config.Types
1355 // match: (Less16 x y)
1356 // result: (SLT (SignExt16to64 x) (SignExt16to64 y))
1360 v.reset(OpRISCV64SLT)
1361 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
1363 v1 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
1369 func rewriteValueRISCV64_OpLess16U(v *Value) bool {
1373 typ := &b.Func.Config.Types
1374 // match: (Less16U x y)
1375 // result: (SLTU (ZeroExt16to64 x) (ZeroExt16to64 y))
1379 v.reset(OpRISCV64SLTU)
1380 v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
1382 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
1388 func rewriteValueRISCV64_OpLess32(v *Value) bool {
1392 typ := &b.Func.Config.Types
1393 // match: (Less32 x y)
1394 // result: (SLT (SignExt32to64 x) (SignExt32to64 y))
1398 v.reset(OpRISCV64SLT)
1399 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
1401 v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
1407 func rewriteValueRISCV64_OpLess32U(v *Value) bool {
1411 typ := &b.Func.Config.Types
1412 // match: (Less32U x y)
1413 // result: (SLTU (ZeroExt32to64 x) (ZeroExt32to64 y))
1417 v.reset(OpRISCV64SLTU)
1418 v0 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
1420 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
1426 func rewriteValueRISCV64_OpLess8(v *Value) bool {
1430 typ := &b.Func.Config.Types
1431 // match: (Less8 x y)
1432 // result: (SLT (SignExt8to64 x) (SignExt8to64 y))
1436 v.reset(OpRISCV64SLT)
1437 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
1439 v1 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
1445 func rewriteValueRISCV64_OpLess8U(v *Value) bool {
1449 typ := &b.Func.Config.Types
1450 // match: (Less8U x y)
1451 // result: (SLTU (ZeroExt8to64 x) (ZeroExt8to64 y))
1455 v.reset(OpRISCV64SLTU)
1456 v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
1458 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
1464 func rewriteValueRISCV64_OpLoad(v *Value) bool {
1467 // match: (Load <t> ptr mem)
1468 // cond: t.IsBoolean()
1469 // result: (MOVBUload ptr mem)
1474 if !(t.IsBoolean()) {
1477 v.reset(OpRISCV64MOVBUload)
1481 // match: (Load <t> ptr mem)
1482 // cond: ( is8BitInt(t) && t.IsSigned())
1483 // result: (MOVBload ptr mem)
1488 if !(is8BitInt(t) && t.IsSigned()) {
1491 v.reset(OpRISCV64MOVBload)
1495 // match: (Load <t> ptr mem)
1496 // cond: ( is8BitInt(t) && !t.IsSigned())
1497 // result: (MOVBUload ptr mem)
1502 if !(is8BitInt(t) && !t.IsSigned()) {
1505 v.reset(OpRISCV64MOVBUload)
1509 // match: (Load <t> ptr mem)
1510 // cond: (is16BitInt(t) && t.IsSigned())
1511 // result: (MOVHload ptr mem)
1516 if !(is16BitInt(t) && t.IsSigned()) {
1519 v.reset(OpRISCV64MOVHload)
1523 // match: (Load <t> ptr mem)
1524 // cond: (is16BitInt(t) && !t.IsSigned())
1525 // result: (MOVHUload ptr mem)
1530 if !(is16BitInt(t) && !t.IsSigned()) {
1533 v.reset(OpRISCV64MOVHUload)
1537 // match: (Load <t> ptr mem)
1538 // cond: (is32BitInt(t) && t.IsSigned())
1539 // result: (MOVWload ptr mem)
1544 if !(is32BitInt(t) && t.IsSigned()) {
1547 v.reset(OpRISCV64MOVWload)
1551 // match: (Load <t> ptr mem)
1552 // cond: (is32BitInt(t) && !t.IsSigned())
1553 // result: (MOVWUload ptr mem)
1558 if !(is32BitInt(t) && !t.IsSigned()) {
1561 v.reset(OpRISCV64MOVWUload)
1565 // match: (Load <t> ptr mem)
1566 // cond: (is64BitInt(t) || isPtr(t))
1567 // result: (MOVDload ptr mem)
1572 if !(is64BitInt(t) || isPtr(t)) {
1575 v.reset(OpRISCV64MOVDload)
1579 // match: (Load <t> ptr mem)
1580 // cond: is32BitFloat(t)
1581 // result: (FMOVWload ptr mem)
1586 if !(is32BitFloat(t)) {
1589 v.reset(OpRISCV64FMOVWload)
1593 // match: (Load <t> ptr mem)
1594 // cond: is64BitFloat(t)
1595 // result: (FMOVDload ptr mem)
1600 if !(is64BitFloat(t)) {
1603 v.reset(OpRISCV64FMOVDload)
1609 func rewriteValueRISCV64_OpLocalAddr(v *Value) bool {
1613 typ := &b.Func.Config.Types
1614 // match: (LocalAddr <t> {sym} base mem)
1615 // cond: t.Elem().HasPointers()
1616 // result: (MOVaddr {sym} (SPanchored base mem))
1619 sym := auxToSym(v.Aux)
1622 if !(t.Elem().HasPointers()) {
1625 v.reset(OpRISCV64MOVaddr)
1626 v.Aux = symToAux(sym)
1627 v0 := b.NewValue0(v.Pos, OpSPanchored, typ.Uintptr)
1628 v0.AddArg2(base, mem)
1632 // match: (LocalAddr <t> {sym} base _)
1633 // cond: !t.Elem().HasPointers()
1634 // result: (MOVaddr {sym} base)
1637 sym := auxToSym(v.Aux)
1639 if !(!t.Elem().HasPointers()) {
1642 v.reset(OpRISCV64MOVaddr)
1643 v.Aux = symToAux(sym)
1649 func rewriteValueRISCV64_OpLsh16x16(v *Value) bool {
1653 typ := &b.Func.Config.Types
1654 // match: (Lsh16x16 <t> x y)
1655 // cond: !shiftIsBounded(v)
1656 // result: (AND (SLL <t> x y) (Neg16 <t> (SLTIU <t> [64] (ZeroExt16to64 y))))
1661 if !(!shiftIsBounded(v)) {
1664 v.reset(OpRISCV64AND)
1665 v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
1667 v1 := b.NewValue0(v.Pos, OpNeg16, t)
1668 v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
1669 v2.AuxInt = int64ToAuxInt(64)
1670 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
1677 // match: (Lsh16x16 x y)
1678 // cond: shiftIsBounded(v)
1679 // result: (SLL x y)
1683 if !(shiftIsBounded(v)) {
1686 v.reset(OpRISCV64SLL)
1692 func rewriteValueRISCV64_OpLsh16x32(v *Value) bool {
1696 typ := &b.Func.Config.Types
1697 // match: (Lsh16x32 <t> x y)
1698 // cond: !shiftIsBounded(v)
1699 // result: (AND (SLL <t> x y) (Neg16 <t> (SLTIU <t> [64] (ZeroExt32to64 y))))
1704 if !(!shiftIsBounded(v)) {
1707 v.reset(OpRISCV64AND)
1708 v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
1710 v1 := b.NewValue0(v.Pos, OpNeg16, t)
1711 v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
1712 v2.AuxInt = int64ToAuxInt(64)
1713 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
1720 // match: (Lsh16x32 x y)
1721 // cond: shiftIsBounded(v)
1722 // result: (SLL x y)
1726 if !(shiftIsBounded(v)) {
1729 v.reset(OpRISCV64SLL)
1735 func rewriteValueRISCV64_OpLsh16x64(v *Value) bool {
1739 // match: (Lsh16x64 <t> x y)
1740 // cond: !shiftIsBounded(v)
1741 // result: (AND (SLL <t> x y) (Neg16 <t> (SLTIU <t> [64] y)))
1746 if !(!shiftIsBounded(v)) {
1749 v.reset(OpRISCV64AND)
1750 v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
1752 v1 := b.NewValue0(v.Pos, OpNeg16, t)
1753 v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
1754 v2.AuxInt = int64ToAuxInt(64)
1760 // match: (Lsh16x64 x y)
1761 // cond: shiftIsBounded(v)
1762 // result: (SLL x y)
1766 if !(shiftIsBounded(v)) {
1769 v.reset(OpRISCV64SLL)
1775 func rewriteValueRISCV64_OpLsh16x8(v *Value) bool {
1779 typ := &b.Func.Config.Types
1780 // match: (Lsh16x8 <t> x y)
1781 // cond: !shiftIsBounded(v)
1782 // result: (AND (SLL <t> x y) (Neg16 <t> (SLTIU <t> [64] (ZeroExt8to64 y))))
1787 if !(!shiftIsBounded(v)) {
1790 v.reset(OpRISCV64AND)
1791 v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
1793 v1 := b.NewValue0(v.Pos, OpNeg16, t)
1794 v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
1795 v2.AuxInt = int64ToAuxInt(64)
1796 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
1803 // match: (Lsh16x8 x y)
1804 // cond: shiftIsBounded(v)
1805 // result: (SLL x y)
1809 if !(shiftIsBounded(v)) {
1812 v.reset(OpRISCV64SLL)
1818 func rewriteValueRISCV64_OpLsh32x16(v *Value) bool {
1822 typ := &b.Func.Config.Types
1823 // match: (Lsh32x16 <t> x y)
1824 // cond: !shiftIsBounded(v)
1825 // result: (AND (SLL <t> x y) (Neg32 <t> (SLTIU <t> [64] (ZeroExt16to64 y))))
1830 if !(!shiftIsBounded(v)) {
1833 v.reset(OpRISCV64AND)
1834 v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
1836 v1 := b.NewValue0(v.Pos, OpNeg32, t)
1837 v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
1838 v2.AuxInt = int64ToAuxInt(64)
1839 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
1846 // match: (Lsh32x16 x y)
1847 // cond: shiftIsBounded(v)
1848 // result: (SLL x y)
1852 if !(shiftIsBounded(v)) {
1855 v.reset(OpRISCV64SLL)
1861 func rewriteValueRISCV64_OpLsh32x32(v *Value) bool {
1865 typ := &b.Func.Config.Types
1866 // match: (Lsh32x32 <t> x y)
1867 // cond: !shiftIsBounded(v)
1868 // result: (AND (SLL <t> x y) (Neg32 <t> (SLTIU <t> [64] (ZeroExt32to64 y))))
1873 if !(!shiftIsBounded(v)) {
1876 v.reset(OpRISCV64AND)
1877 v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
1879 v1 := b.NewValue0(v.Pos, OpNeg32, t)
1880 v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
1881 v2.AuxInt = int64ToAuxInt(64)
1882 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
1889 // match: (Lsh32x32 x y)
1890 // cond: shiftIsBounded(v)
1891 // result: (SLL x y)
1895 if !(shiftIsBounded(v)) {
1898 v.reset(OpRISCV64SLL)
1904 func rewriteValueRISCV64_OpLsh32x64(v *Value) bool {
1908 // match: (Lsh32x64 <t> x y)
1909 // cond: !shiftIsBounded(v)
1910 // result: (AND (SLL <t> x y) (Neg32 <t> (SLTIU <t> [64] y)))
1915 if !(!shiftIsBounded(v)) {
1918 v.reset(OpRISCV64AND)
1919 v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
1921 v1 := b.NewValue0(v.Pos, OpNeg32, t)
1922 v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
1923 v2.AuxInt = int64ToAuxInt(64)
1929 // match: (Lsh32x64 x y)
1930 // cond: shiftIsBounded(v)
1931 // result: (SLL x y)
1935 if !(shiftIsBounded(v)) {
1938 v.reset(OpRISCV64SLL)
1944 func rewriteValueRISCV64_OpLsh32x8(v *Value) bool {
1948 typ := &b.Func.Config.Types
1949 // match: (Lsh32x8 <t> x y)
1950 // cond: !shiftIsBounded(v)
1951 // result: (AND (SLL <t> x y) (Neg32 <t> (SLTIU <t> [64] (ZeroExt8to64 y))))
1956 if !(!shiftIsBounded(v)) {
1959 v.reset(OpRISCV64AND)
1960 v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
1962 v1 := b.NewValue0(v.Pos, OpNeg32, t)
1963 v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
1964 v2.AuxInt = int64ToAuxInt(64)
1965 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
1972 // match: (Lsh32x8 x y)
1973 // cond: shiftIsBounded(v)
1974 // result: (SLL x y)
1978 if !(shiftIsBounded(v)) {
1981 v.reset(OpRISCV64SLL)
1987 func rewriteValueRISCV64_OpLsh64x16(v *Value) bool {
1991 typ := &b.Func.Config.Types
1992 // match: (Lsh64x16 <t> x y)
1993 // cond: !shiftIsBounded(v)
1994 // result: (AND (SLL <t> x y) (Neg64 <t> (SLTIU <t> [64] (ZeroExt16to64 y))))
1999 if !(!shiftIsBounded(v)) {
2002 v.reset(OpRISCV64AND)
2003 v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
2005 v1 := b.NewValue0(v.Pos, OpNeg64, t)
2006 v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
2007 v2.AuxInt = int64ToAuxInt(64)
2008 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
2015 // match: (Lsh64x16 x y)
2016 // cond: shiftIsBounded(v)
2017 // result: (SLL x y)
2021 if !(shiftIsBounded(v)) {
2024 v.reset(OpRISCV64SLL)
2030 func rewriteValueRISCV64_OpLsh64x32(v *Value) bool {
2034 typ := &b.Func.Config.Types
2035 // match: (Lsh64x32 <t> x y)
2036 // cond: !shiftIsBounded(v)
2037 // result: (AND (SLL <t> x y) (Neg64 <t> (SLTIU <t> [64] (ZeroExt32to64 y))))
2042 if !(!shiftIsBounded(v)) {
2045 v.reset(OpRISCV64AND)
2046 v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
2048 v1 := b.NewValue0(v.Pos, OpNeg64, t)
2049 v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
2050 v2.AuxInt = int64ToAuxInt(64)
2051 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
2058 // match: (Lsh64x32 x y)
2059 // cond: shiftIsBounded(v)
2060 // result: (SLL x y)
2064 if !(shiftIsBounded(v)) {
2067 v.reset(OpRISCV64SLL)
2073 func rewriteValueRISCV64_OpLsh64x64(v *Value) bool {
2077 // match: (Lsh64x64 <t> x y)
2078 // cond: !shiftIsBounded(v)
2079 // result: (AND (SLL <t> x y) (Neg64 <t> (SLTIU <t> [64] y)))
2084 if !(!shiftIsBounded(v)) {
2087 v.reset(OpRISCV64AND)
2088 v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
2090 v1 := b.NewValue0(v.Pos, OpNeg64, t)
2091 v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
2092 v2.AuxInt = int64ToAuxInt(64)
2098 // match: (Lsh64x64 x y)
2099 // cond: shiftIsBounded(v)
2100 // result: (SLL x y)
2104 if !(shiftIsBounded(v)) {
2107 v.reset(OpRISCV64SLL)
2113 func rewriteValueRISCV64_OpLsh64x8(v *Value) bool {
2117 typ := &b.Func.Config.Types
2118 // match: (Lsh64x8 <t> x y)
2119 // cond: !shiftIsBounded(v)
2120 // result: (AND (SLL <t> x y) (Neg64 <t> (SLTIU <t> [64] (ZeroExt8to64 y))))
2125 if !(!shiftIsBounded(v)) {
2128 v.reset(OpRISCV64AND)
2129 v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
2131 v1 := b.NewValue0(v.Pos, OpNeg64, t)
2132 v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
2133 v2.AuxInt = int64ToAuxInt(64)
2134 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
2141 // match: (Lsh64x8 x y)
2142 // cond: shiftIsBounded(v)
2143 // result: (SLL x y)
2147 if !(shiftIsBounded(v)) {
2150 v.reset(OpRISCV64SLL)
2156 func rewriteValueRISCV64_OpLsh8x16(v *Value) bool {
2160 typ := &b.Func.Config.Types
2161 // match: (Lsh8x16 <t> x y)
2162 // cond: !shiftIsBounded(v)
2163 // result: (AND (SLL <t> x y) (Neg8 <t> (SLTIU <t> [64] (ZeroExt16to64 y))))
2168 if !(!shiftIsBounded(v)) {
2171 v.reset(OpRISCV64AND)
2172 v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
2174 v1 := b.NewValue0(v.Pos, OpNeg8, t)
2175 v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
2176 v2.AuxInt = int64ToAuxInt(64)
2177 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
2184 // match: (Lsh8x16 x y)
2185 // cond: shiftIsBounded(v)
2186 // result: (SLL x y)
2190 if !(shiftIsBounded(v)) {
2193 v.reset(OpRISCV64SLL)
2199 func rewriteValueRISCV64_OpLsh8x32(v *Value) bool {
2203 typ := &b.Func.Config.Types
2204 // match: (Lsh8x32 <t> x y)
2205 // cond: !shiftIsBounded(v)
2206 // result: (AND (SLL <t> x y) (Neg8 <t> (SLTIU <t> [64] (ZeroExt32to64 y))))
2211 if !(!shiftIsBounded(v)) {
2214 v.reset(OpRISCV64AND)
2215 v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
2217 v1 := b.NewValue0(v.Pos, OpNeg8, t)
2218 v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
2219 v2.AuxInt = int64ToAuxInt(64)
2220 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
2227 // match: (Lsh8x32 x y)
2228 // cond: shiftIsBounded(v)
2229 // result: (SLL x y)
2233 if !(shiftIsBounded(v)) {
2236 v.reset(OpRISCV64SLL)
2242 func rewriteValueRISCV64_OpLsh8x64(v *Value) bool {
2246 // match: (Lsh8x64 <t> x y)
2247 // cond: !shiftIsBounded(v)
2248 // result: (AND (SLL <t> x y) (Neg8 <t> (SLTIU <t> [64] y)))
2253 if !(!shiftIsBounded(v)) {
2256 v.reset(OpRISCV64AND)
2257 v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
2259 v1 := b.NewValue0(v.Pos, OpNeg8, t)
2260 v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
2261 v2.AuxInt = int64ToAuxInt(64)
2267 // match: (Lsh8x64 x y)
2268 // cond: shiftIsBounded(v)
2269 // result: (SLL x y)
2273 if !(shiftIsBounded(v)) {
2276 v.reset(OpRISCV64SLL)
2282 func rewriteValueRISCV64_OpLsh8x8(v *Value) bool {
2286 typ := &b.Func.Config.Types
2287 // match: (Lsh8x8 <t> x y)
2288 // cond: !shiftIsBounded(v)
2289 // result: (AND (SLL <t> x y) (Neg8 <t> (SLTIU <t> [64] (ZeroExt8to64 y))))
2294 if !(!shiftIsBounded(v)) {
2297 v.reset(OpRISCV64AND)
2298 v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
2300 v1 := b.NewValue0(v.Pos, OpNeg8, t)
2301 v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
2302 v2.AuxInt = int64ToAuxInt(64)
2303 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
2310 // match: (Lsh8x8 x y)
2311 // cond: shiftIsBounded(v)
2312 // result: (SLL x y)
2316 if !(shiftIsBounded(v)) {
2319 v.reset(OpRISCV64SLL)
2325 func rewriteValueRISCV64_OpMod16(v *Value) bool {
2329 typ := &b.Func.Config.Types
2330 // match: (Mod16 x y [false])
2331 // result: (REMW (SignExt16to32 x) (SignExt16to32 y))
2333 if auxIntToBool(v.AuxInt) != false {
2338 v.reset(OpRISCV64REMW)
2339 v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
2341 v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
2348 func rewriteValueRISCV64_OpMod16u(v *Value) bool {
2352 typ := &b.Func.Config.Types
2353 // match: (Mod16u x y)
2354 // result: (REMUW (ZeroExt16to32 x) (ZeroExt16to32 y))
2358 v.reset(OpRISCV64REMUW)
2359 v0 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
2361 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
2367 func rewriteValueRISCV64_OpMod32(v *Value) bool {
2370 // match: (Mod32 x y [false])
2371 // result: (REMW x y)
2373 if auxIntToBool(v.AuxInt) != false {
2378 v.reset(OpRISCV64REMW)
2384 func rewriteValueRISCV64_OpMod64(v *Value) bool {
2387 // match: (Mod64 x y [false])
2388 // result: (REM x y)
2390 if auxIntToBool(v.AuxInt) != false {
2395 v.reset(OpRISCV64REM)
2401 func rewriteValueRISCV64_OpMod8(v *Value) bool {
2405 typ := &b.Func.Config.Types
2406 // match: (Mod8 x y)
2407 // result: (REMW (SignExt8to32 x) (SignExt8to32 y))
2411 v.reset(OpRISCV64REMW)
2412 v0 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
2414 v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
2420 func rewriteValueRISCV64_OpMod8u(v *Value) bool {
2424 typ := &b.Func.Config.Types
2425 // match: (Mod8u x y)
2426 // result: (REMUW (ZeroExt8to32 x) (ZeroExt8to32 y))
2430 v.reset(OpRISCV64REMUW)
2431 v0 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
2433 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
2439 func rewriteValueRISCV64_OpMove(v *Value) bool {
2444 config := b.Func.Config
2445 typ := &b.Func.Config.Types
2446 // match: (Move [0] _ _ mem)
2449 if auxIntToInt64(v.AuxInt) != 0 {
2456 // match: (Move [1] dst src mem)
2457 // result: (MOVBstore dst (MOVBload src mem) mem)
2459 if auxIntToInt64(v.AuxInt) != 1 {
2465 v.reset(OpRISCV64MOVBstore)
2466 v0 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
2467 v0.AddArg2(src, mem)
2468 v.AddArg3(dst, v0, mem)
2471 // match: (Move [2] {t} dst src mem)
2472 // cond: t.Alignment()%2 == 0
2473 // result: (MOVHstore dst (MOVHload src mem) mem)
2475 if auxIntToInt64(v.AuxInt) != 2 {
2478 t := auxToType(v.Aux)
2482 if !(t.Alignment()%2 == 0) {
2485 v.reset(OpRISCV64MOVHstore)
2486 v0 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
2487 v0.AddArg2(src, mem)
2488 v.AddArg3(dst, v0, mem)
2491 // match: (Move [2] dst src mem)
2492 // result: (MOVBstore [1] dst (MOVBload [1] src mem) (MOVBstore dst (MOVBload src mem) mem))
2494 if auxIntToInt64(v.AuxInt) != 2 {
2500 v.reset(OpRISCV64MOVBstore)
2501 v.AuxInt = int32ToAuxInt(1)
2502 v0 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
2503 v0.AuxInt = int32ToAuxInt(1)
2504 v0.AddArg2(src, mem)
2505 v1 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
2506 v2 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
2507 v2.AddArg2(src, mem)
2508 v1.AddArg3(dst, v2, mem)
2509 v.AddArg3(dst, v0, v1)
2512 // match: (Move [4] {t} dst src mem)
2513 // cond: t.Alignment()%4 == 0
2514 // result: (MOVWstore dst (MOVWload src mem) mem)
2516 if auxIntToInt64(v.AuxInt) != 4 {
2519 t := auxToType(v.Aux)
2523 if !(t.Alignment()%4 == 0) {
2526 v.reset(OpRISCV64MOVWstore)
2527 v0 := b.NewValue0(v.Pos, OpRISCV64MOVWload, typ.Int32)
2528 v0.AddArg2(src, mem)
2529 v.AddArg3(dst, v0, mem)
2532 // match: (Move [4] {t} dst src mem)
2533 // cond: t.Alignment()%2 == 0
2534 // result: (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem))
2536 if auxIntToInt64(v.AuxInt) != 4 {
2539 t := auxToType(v.Aux)
2543 if !(t.Alignment()%2 == 0) {
2546 v.reset(OpRISCV64MOVHstore)
2547 v.AuxInt = int32ToAuxInt(2)
2548 v0 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
2549 v0.AuxInt = int32ToAuxInt(2)
2550 v0.AddArg2(src, mem)
2551 v1 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
2552 v2 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
2553 v2.AddArg2(src, mem)
2554 v1.AddArg3(dst, v2, mem)
2555 v.AddArg3(dst, v0, v1)
2558 // match: (Move [4] dst src mem)
2559 // result: (MOVBstore [3] dst (MOVBload [3] src mem) (MOVBstore [2] dst (MOVBload [2] src mem) (MOVBstore [1] dst (MOVBload [1] src mem) (MOVBstore dst (MOVBload src mem) mem))))
2561 if auxIntToInt64(v.AuxInt) != 4 {
2567 v.reset(OpRISCV64MOVBstore)
2568 v.AuxInt = int32ToAuxInt(3)
2569 v0 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
2570 v0.AuxInt = int32ToAuxInt(3)
2571 v0.AddArg2(src, mem)
2572 v1 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
2573 v1.AuxInt = int32ToAuxInt(2)
2574 v2 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
2575 v2.AuxInt = int32ToAuxInt(2)
2576 v2.AddArg2(src, mem)
2577 v3 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
2578 v3.AuxInt = int32ToAuxInt(1)
2579 v4 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
2580 v4.AuxInt = int32ToAuxInt(1)
2581 v4.AddArg2(src, mem)
2582 v5 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
2583 v6 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
2584 v6.AddArg2(src, mem)
2585 v5.AddArg3(dst, v6, mem)
2586 v3.AddArg3(dst, v4, v5)
2587 v1.AddArg3(dst, v2, v3)
2588 v.AddArg3(dst, v0, v1)
2591 // match: (Move [8] {t} dst src mem)
2592 // cond: t.Alignment()%8 == 0
2593 // result: (MOVDstore dst (MOVDload src mem) mem)
2595 if auxIntToInt64(v.AuxInt) != 8 {
2598 t := auxToType(v.Aux)
2602 if !(t.Alignment()%8 == 0) {
2605 v.reset(OpRISCV64MOVDstore)
2606 v0 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
2607 v0.AddArg2(src, mem)
2608 v.AddArg3(dst, v0, mem)
2611 // match: (Move [8] {t} dst src mem)
2612 // cond: t.Alignment()%4 == 0
2613 // result: (MOVWstore [4] dst (MOVWload [4] src mem) (MOVWstore dst (MOVWload src mem) mem))
2615 if auxIntToInt64(v.AuxInt) != 8 {
2618 t := auxToType(v.Aux)
2622 if !(t.Alignment()%4 == 0) {
2625 v.reset(OpRISCV64MOVWstore)
2626 v.AuxInt = int32ToAuxInt(4)
2627 v0 := b.NewValue0(v.Pos, OpRISCV64MOVWload, typ.Int32)
2628 v0.AuxInt = int32ToAuxInt(4)
2629 v0.AddArg2(src, mem)
2630 v1 := b.NewValue0(v.Pos, OpRISCV64MOVWstore, types.TypeMem)
2631 v2 := b.NewValue0(v.Pos, OpRISCV64MOVWload, typ.Int32)
2632 v2.AddArg2(src, mem)
2633 v1.AddArg3(dst, v2, mem)
2634 v.AddArg3(dst, v0, v1)
2637 // match: (Move [8] {t} dst src mem)
2638 // cond: t.Alignment()%2 == 0
2639 // result: (MOVHstore [6] dst (MOVHload [6] src mem) (MOVHstore [4] dst (MOVHload [4] src mem) (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem))))
2641 if auxIntToInt64(v.AuxInt) != 8 {
2644 t := auxToType(v.Aux)
2648 if !(t.Alignment()%2 == 0) {
2651 v.reset(OpRISCV64MOVHstore)
2652 v.AuxInt = int32ToAuxInt(6)
2653 v0 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
2654 v0.AuxInt = int32ToAuxInt(6)
2655 v0.AddArg2(src, mem)
2656 v1 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
2657 v1.AuxInt = int32ToAuxInt(4)
2658 v2 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
2659 v2.AuxInt = int32ToAuxInt(4)
2660 v2.AddArg2(src, mem)
2661 v3 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
2662 v3.AuxInt = int32ToAuxInt(2)
2663 v4 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
2664 v4.AuxInt = int32ToAuxInt(2)
2665 v4.AddArg2(src, mem)
2666 v5 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
2667 v6 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
2668 v6.AddArg2(src, mem)
2669 v5.AddArg3(dst, v6, mem)
2670 v3.AddArg3(dst, v4, v5)
2671 v1.AddArg3(dst, v2, v3)
2672 v.AddArg3(dst, v0, v1)
2675 // match: (Move [3] dst src mem)
2676 // result: (MOVBstore [2] dst (MOVBload [2] src mem) (MOVBstore [1] dst (MOVBload [1] src mem) (MOVBstore dst (MOVBload src mem) mem)))
2678 if auxIntToInt64(v.AuxInt) != 3 {
2684 v.reset(OpRISCV64MOVBstore)
2685 v.AuxInt = int32ToAuxInt(2)
2686 v0 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
2687 v0.AuxInt = int32ToAuxInt(2)
2688 v0.AddArg2(src, mem)
2689 v1 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
2690 v1.AuxInt = int32ToAuxInt(1)
2691 v2 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
2692 v2.AuxInt = int32ToAuxInt(1)
2693 v2.AddArg2(src, mem)
2694 v3 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
2695 v4 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
2696 v4.AddArg2(src, mem)
2697 v3.AddArg3(dst, v4, mem)
2698 v1.AddArg3(dst, v2, v3)
2699 v.AddArg3(dst, v0, v1)
2702 // match: (Move [6] {t} dst src mem)
2703 // cond: t.Alignment()%2 == 0
2704 // result: (MOVHstore [4] dst (MOVHload [4] src mem) (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem)))
2706 if auxIntToInt64(v.AuxInt) != 6 {
2709 t := auxToType(v.Aux)
2713 if !(t.Alignment()%2 == 0) {
2716 v.reset(OpRISCV64MOVHstore)
2717 v.AuxInt = int32ToAuxInt(4)
2718 v0 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
2719 v0.AuxInt = int32ToAuxInt(4)
2720 v0.AddArg2(src, mem)
2721 v1 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
2722 v1.AuxInt = int32ToAuxInt(2)
2723 v2 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
2724 v2.AuxInt = int32ToAuxInt(2)
2725 v2.AddArg2(src, mem)
2726 v3 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
2727 v4 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
2728 v4.AddArg2(src, mem)
2729 v3.AddArg3(dst, v4, mem)
2730 v1.AddArg3(dst, v2, v3)
2731 v.AddArg3(dst, v0, v1)
2734 // match: (Move [12] {t} dst src mem)
2735 // cond: t.Alignment()%4 == 0
2736 // result: (MOVWstore [8] dst (MOVWload [8] src mem) (MOVWstore [4] dst (MOVWload [4] src mem) (MOVWstore dst (MOVWload src mem) mem)))
2738 if auxIntToInt64(v.AuxInt) != 12 {
2741 t := auxToType(v.Aux)
2745 if !(t.Alignment()%4 == 0) {
2748 v.reset(OpRISCV64MOVWstore)
2749 v.AuxInt = int32ToAuxInt(8)
2750 v0 := b.NewValue0(v.Pos, OpRISCV64MOVWload, typ.Int32)
2751 v0.AuxInt = int32ToAuxInt(8)
2752 v0.AddArg2(src, mem)
2753 v1 := b.NewValue0(v.Pos, OpRISCV64MOVWstore, types.TypeMem)
2754 v1.AuxInt = int32ToAuxInt(4)
2755 v2 := b.NewValue0(v.Pos, OpRISCV64MOVWload, typ.Int32)
2756 v2.AuxInt = int32ToAuxInt(4)
2757 v2.AddArg2(src, mem)
2758 v3 := b.NewValue0(v.Pos, OpRISCV64MOVWstore, types.TypeMem)
2759 v4 := b.NewValue0(v.Pos, OpRISCV64MOVWload, typ.Int32)
2760 v4.AddArg2(src, mem)
2761 v3.AddArg3(dst, v4, mem)
2762 v1.AddArg3(dst, v2, v3)
2763 v.AddArg3(dst, v0, v1)
2766 // match: (Move [16] {t} dst src mem)
2767 // cond: t.Alignment()%8 == 0
2768 // result: (MOVDstore [8] dst (MOVDload [8] src mem) (MOVDstore dst (MOVDload src mem) mem))
2770 if auxIntToInt64(v.AuxInt) != 16 {
2773 t := auxToType(v.Aux)
2777 if !(t.Alignment()%8 == 0) {
2780 v.reset(OpRISCV64MOVDstore)
2781 v.AuxInt = int32ToAuxInt(8)
2782 v0 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
2783 v0.AuxInt = int32ToAuxInt(8)
2784 v0.AddArg2(src, mem)
2785 v1 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
2786 v2 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
2787 v2.AddArg2(src, mem)
2788 v1.AddArg3(dst, v2, mem)
2789 v.AddArg3(dst, v0, v1)
2792 // match: (Move [24] {t} dst src mem)
2793 // cond: t.Alignment()%8 == 0
2794 // result: (MOVDstore [16] dst (MOVDload [16] src mem) (MOVDstore [8] dst (MOVDload [8] src mem) (MOVDstore dst (MOVDload src mem) mem)))
2796 if auxIntToInt64(v.AuxInt) != 24 {
2799 t := auxToType(v.Aux)
2803 if !(t.Alignment()%8 == 0) {
2806 v.reset(OpRISCV64MOVDstore)
2807 v.AuxInt = int32ToAuxInt(16)
2808 v0 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
2809 v0.AuxInt = int32ToAuxInt(16)
2810 v0.AddArg2(src, mem)
2811 v1 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
2812 v1.AuxInt = int32ToAuxInt(8)
2813 v2 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
2814 v2.AuxInt = int32ToAuxInt(8)
2815 v2.AddArg2(src, mem)
2816 v3 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
2817 v4 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
2818 v4.AddArg2(src, mem)
2819 v3.AddArg3(dst, v4, mem)
2820 v1.AddArg3(dst, v2, v3)
2821 v.AddArg3(dst, v0, v1)
2824 // match: (Move [32] {t} dst src mem)
2825 // cond: t.Alignment()%8 == 0
2826 // result: (MOVDstore [24] dst (MOVDload [24] src mem) (MOVDstore [16] dst (MOVDload [16] src mem) (MOVDstore [8] dst (MOVDload [8] src mem) (MOVDstore dst (MOVDload src mem) mem))))
2828 if auxIntToInt64(v.AuxInt) != 32 {
2831 t := auxToType(v.Aux)
2835 if !(t.Alignment()%8 == 0) {
2838 v.reset(OpRISCV64MOVDstore)
2839 v.AuxInt = int32ToAuxInt(24)
2840 v0 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
2841 v0.AuxInt = int32ToAuxInt(24)
2842 v0.AddArg2(src, mem)
2843 v1 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
2844 v1.AuxInt = int32ToAuxInt(16)
2845 v2 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
2846 v2.AuxInt = int32ToAuxInt(16)
2847 v2.AddArg2(src, mem)
2848 v3 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
2849 v3.AuxInt = int32ToAuxInt(8)
2850 v4 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
2851 v4.AuxInt = int32ToAuxInt(8)
2852 v4.AddArg2(src, mem)
2853 v5 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
2854 v6 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
2855 v6.AddArg2(src, mem)
2856 v5.AddArg3(dst, v6, mem)
2857 v3.AddArg3(dst, v4, v5)
2858 v1.AddArg3(dst, v2, v3)
2859 v.AddArg3(dst, v0, v1)
2862 // match: (Move [s] {t} dst src mem)
2863 // cond: s%8 == 0 && s <= 8*128 && t.Alignment()%8 == 0 && !config.noDuffDevice && logLargeCopy(v, s)
2864 // result: (DUFFCOPY [16 * (128 - s/8)] dst src mem)
2866 s := auxIntToInt64(v.AuxInt)
2867 t := auxToType(v.Aux)
2871 if !(s%8 == 0 && s <= 8*128 && t.Alignment()%8 == 0 && !config.noDuffDevice && logLargeCopy(v, s)) {
2874 v.reset(OpRISCV64DUFFCOPY)
2875 v.AuxInt = int64ToAuxInt(16 * (128 - s/8))
2876 v.AddArg3(dst, src, mem)
2879 // match: (Move [s] {t} dst src mem)
2880 // cond: (s <= 16 || logLargeCopy(v, s))
2881 // result: (LoweredMove [t.Alignment()] dst src (ADDI <src.Type> [s-moveSize(t.Alignment(), config)] src) mem)
2883 s := auxIntToInt64(v.AuxInt)
2884 t := auxToType(v.Aux)
2888 if !(s <= 16 || logLargeCopy(v, s)) {
2891 v.reset(OpRISCV64LoweredMove)
2892 v.AuxInt = int64ToAuxInt(t.Alignment())
2893 v0 := b.NewValue0(v.Pos, OpRISCV64ADDI, src.Type)
2894 v0.AuxInt = int64ToAuxInt(s - moveSize(t.Alignment(), config))
2896 v.AddArg4(dst, src, v0, mem)
2901 func rewriteValueRISCV64_OpMul16(v *Value) bool {
2905 typ := &b.Func.Config.Types
2906 // match: (Mul16 x y)
2907 // result: (MULW (SignExt16to32 x) (SignExt16to32 y))
2911 v.reset(OpRISCV64MULW)
2912 v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
2914 v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
2920 func rewriteValueRISCV64_OpMul8(v *Value) bool {
2924 typ := &b.Func.Config.Types
2925 // match: (Mul8 x y)
2926 // result: (MULW (SignExt8to32 x) (SignExt8to32 y))
2930 v.reset(OpRISCV64MULW)
2931 v0 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
2933 v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
2939 func rewriteValueRISCV64_OpNeq16(v *Value) bool {
2943 typ := &b.Func.Config.Types
2944 // match: (Neq16 x y)
2945 // result: (SNEZ (SUB <x.Type> (ZeroExt16to64 x) (ZeroExt16to64 y)))
2949 v.reset(OpRISCV64SNEZ)
2950 v0 := b.NewValue0(v.Pos, OpRISCV64SUB, x.Type)
2951 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
2953 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
2960 func rewriteValueRISCV64_OpNeq32(v *Value) bool {
2964 typ := &b.Func.Config.Types
2965 // match: (Neq32 x y)
2966 // result: (SNEZ (SUB <x.Type> (ZeroExt32to64 x) (ZeroExt32to64 y)))
2970 v.reset(OpRISCV64SNEZ)
2971 v0 := b.NewValue0(v.Pos, OpRISCV64SUB, x.Type)
2972 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
2974 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
2981 func rewriteValueRISCV64_OpNeq64(v *Value) bool {
2985 // match: (Neq64 x y)
2986 // result: (SNEZ (SUB <x.Type> x y))
2990 v.reset(OpRISCV64SNEZ)
2991 v0 := b.NewValue0(v.Pos, OpRISCV64SUB, x.Type)
2997 func rewriteValueRISCV64_OpNeq8(v *Value) bool {
3001 typ := &b.Func.Config.Types
3002 // match: (Neq8 x y)
3003 // result: (SNEZ (SUB <x.Type> (ZeroExt8to64 x) (ZeroExt8to64 y)))
3007 v.reset(OpRISCV64SNEZ)
3008 v0 := b.NewValue0(v.Pos, OpRISCV64SUB, x.Type)
3009 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
3011 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
3018 func rewriteValueRISCV64_OpNeqB(v *Value) bool {
3022 typ := &b.Func.Config.Types
3023 // match: (NeqB x y)
3024 // result: (SNEZ (SUB <typ.Bool> x y))
3028 v.reset(OpRISCV64SNEZ)
3029 v0 := b.NewValue0(v.Pos, OpRISCV64SUB, typ.Bool)
3035 func rewriteValueRISCV64_OpNeqPtr(v *Value) bool {
3039 typ := &b.Func.Config.Types
3040 // match: (NeqPtr x y)
3041 // result: (SNEZ (SUB <typ.Uintptr> x y))
3045 v.reset(OpRISCV64SNEZ)
3046 v0 := b.NewValue0(v.Pos, OpRISCV64SUB, typ.Uintptr)
3052 func rewriteValueRISCV64_OpOffPtr(v *Value) bool {
3055 typ := &b.Func.Config.Types
3056 // match: (OffPtr [off] ptr:(SP))
3057 // cond: is32Bit(off)
3058 // result: (MOVaddr [int32(off)] ptr)
3060 off := auxIntToInt64(v.AuxInt)
3062 if ptr.Op != OpSP || !(is32Bit(off)) {
3065 v.reset(OpRISCV64MOVaddr)
3066 v.AuxInt = int32ToAuxInt(int32(off))
3070 // match: (OffPtr [off] ptr)
3071 // cond: is32Bit(off)
3072 // result: (ADDI [off] ptr)
3074 off := auxIntToInt64(v.AuxInt)
3076 if !(is32Bit(off)) {
3079 v.reset(OpRISCV64ADDI)
3080 v.AuxInt = int64ToAuxInt(off)
3084 // match: (OffPtr [off] ptr)
3085 // result: (ADD (MOVDconst [off]) ptr)
3087 off := auxIntToInt64(v.AuxInt)
3089 v.reset(OpRISCV64ADD)
3090 v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
3091 v0.AuxInt = int64ToAuxInt(off)
3096 func rewriteValueRISCV64_OpPanicBounds(v *Value) bool {
3100 // match: (PanicBounds [kind] x y mem)
3101 // cond: boundsABI(kind) == 0
3102 // result: (LoweredPanicBoundsA [kind] x y mem)
3104 kind := auxIntToInt64(v.AuxInt)
3108 if !(boundsABI(kind) == 0) {
3111 v.reset(OpRISCV64LoweredPanicBoundsA)
3112 v.AuxInt = int64ToAuxInt(kind)
3113 v.AddArg3(x, y, mem)
3116 // match: (PanicBounds [kind] x y mem)
3117 // cond: boundsABI(kind) == 1
3118 // result: (LoweredPanicBoundsB [kind] x y mem)
3120 kind := auxIntToInt64(v.AuxInt)
3124 if !(boundsABI(kind) == 1) {
3127 v.reset(OpRISCV64LoweredPanicBoundsB)
3128 v.AuxInt = int64ToAuxInt(kind)
3129 v.AddArg3(x, y, mem)
3132 // match: (PanicBounds [kind] x y mem)
3133 // cond: boundsABI(kind) == 2
3134 // result: (LoweredPanicBoundsC [kind] x y mem)
3136 kind := auxIntToInt64(v.AuxInt)
3140 if !(boundsABI(kind) == 2) {
3143 v.reset(OpRISCV64LoweredPanicBoundsC)
3144 v.AuxInt = int64ToAuxInt(kind)
3145 v.AddArg3(x, y, mem)
3150 func rewriteValueRISCV64_OpRISCV64ADD(v *Value) bool {
3153 // match: (ADD (MOVDconst <t> [val]) x)
3154 // cond: is32Bit(val) && !t.IsPtr()
3155 // result: (ADDI [val] x)
3157 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3158 if v_0.Op != OpRISCV64MOVDconst {
3162 val := auxIntToInt64(v_0.AuxInt)
3164 if !(is32Bit(val) && !t.IsPtr()) {
3167 v.reset(OpRISCV64ADDI)
3168 v.AuxInt = int64ToAuxInt(val)
3176 func rewriteValueRISCV64_OpRISCV64ADDI(v *Value) bool {
3178 // match: (ADDI [c] (MOVaddr [d] {s} x))
3179 // cond: is32Bit(c+int64(d))
3180 // result: (MOVaddr [int32(c)+d] {s} x)
3182 c := auxIntToInt64(v.AuxInt)
3183 if v_0.Op != OpRISCV64MOVaddr {
3186 d := auxIntToInt32(v_0.AuxInt)
3187 s := auxToSym(v_0.Aux)
3189 if !(is32Bit(c + int64(d))) {
3192 v.reset(OpRISCV64MOVaddr)
3193 v.AuxInt = int32ToAuxInt(int32(c) + d)
3198 // match: (ADDI [0] x)
3201 if auxIntToInt64(v.AuxInt) != 0 {
3208 // match: (ADDI [x] (MOVDconst [y]))
3209 // cond: is32Bit(x + y)
3210 // result: (MOVDconst [x + y])
3212 x := auxIntToInt64(v.AuxInt)
3213 if v_0.Op != OpRISCV64MOVDconst {
3216 y := auxIntToInt64(v_0.AuxInt)
3217 if !(is32Bit(x + y)) {
3220 v.reset(OpRISCV64MOVDconst)
3221 v.AuxInt = int64ToAuxInt(x + y)
3224 // match: (ADDI [x] (ADDI [y] z))
3225 // cond: is32Bit(x + y)
3226 // result: (ADDI [x + y] z)
3228 x := auxIntToInt64(v.AuxInt)
3229 if v_0.Op != OpRISCV64ADDI {
3232 y := auxIntToInt64(v_0.AuxInt)
3234 if !(is32Bit(x + y)) {
3237 v.reset(OpRISCV64ADDI)
3238 v.AuxInt = int64ToAuxInt(x + y)
3244 func rewriteValueRISCV64_OpRISCV64AND(v *Value) bool {
3247 // match: (AND (MOVDconst [val]) x)
3248 // cond: is32Bit(val)
3249 // result: (ANDI [val] x)
3251 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3252 if v_0.Op != OpRISCV64MOVDconst {
3255 val := auxIntToInt64(v_0.AuxInt)
3257 if !(is32Bit(val)) {
3260 v.reset(OpRISCV64ANDI)
3261 v.AuxInt = int64ToAuxInt(val)
3269 func rewriteValueRISCV64_OpRISCV64ANDI(v *Value) bool {
3271 // match: (ANDI [0] x)
3272 // result: (MOVDconst [0])
3274 if auxIntToInt64(v.AuxInt) != 0 {
3277 v.reset(OpRISCV64MOVDconst)
3278 v.AuxInt = int64ToAuxInt(0)
3281 // match: (ANDI [-1] x)
3284 if auxIntToInt64(v.AuxInt) != -1 {
3291 // match: (ANDI [x] (MOVDconst [y]))
3292 // result: (MOVDconst [x & y])
3294 x := auxIntToInt64(v.AuxInt)
3295 if v_0.Op != OpRISCV64MOVDconst {
3298 y := auxIntToInt64(v_0.AuxInt)
3299 v.reset(OpRISCV64MOVDconst)
3300 v.AuxInt = int64ToAuxInt(x & y)
3303 // match: (ANDI [x] (ANDI [y] z))
3304 // result: (ANDI [x & y] z)
3306 x := auxIntToInt64(v.AuxInt)
3307 if v_0.Op != OpRISCV64ANDI {
3310 y := auxIntToInt64(v_0.AuxInt)
3312 v.reset(OpRISCV64ANDI)
3313 v.AuxInt = int64ToAuxInt(x & y)
3319 func rewriteValueRISCV64_OpRISCV64FMADDD(v *Value) bool {
3323 // match: (FMADDD neg:(FNEGD x) y z)
3324 // cond: neg.Uses == 1
3325 // result: (FNMSUBD x y z)
3327 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3329 if neg.Op != OpRISCV64FNEGD {
3335 if !(neg.Uses == 1) {
3338 v.reset(OpRISCV64FNMSUBD)
3344 // match: (FMADDD x y neg:(FNEGD z))
3345 // cond: neg.Uses == 1
3346 // result: (FMSUBD x y z)
3351 if neg.Op != OpRISCV64FNEGD {
3355 if !(neg.Uses == 1) {
3358 v.reset(OpRISCV64FMSUBD)
3364 func rewriteValueRISCV64_OpRISCV64FMSUBD(v *Value) bool {
3368 // match: (FMSUBD neg:(FNEGD x) y z)
3369 // cond: neg.Uses == 1
3370 // result: (FNMADDD x y z)
3372 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3374 if neg.Op != OpRISCV64FNEGD {
3380 if !(neg.Uses == 1) {
3383 v.reset(OpRISCV64FNMADDD)
3389 // match: (FMSUBD x y neg:(FNEGD z))
3390 // cond: neg.Uses == 1
3391 // result: (FMADDD x y z)
3396 if neg.Op != OpRISCV64FNEGD {
3400 if !(neg.Uses == 1) {
3403 v.reset(OpRISCV64FMADDD)
3409 func rewriteValueRISCV64_OpRISCV64FNMADDD(v *Value) bool {
3413 // match: (FNMADDD neg:(FNEGD x) y z)
3414 // cond: neg.Uses == 1
3415 // result: (FMSUBD x y z)
3417 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3419 if neg.Op != OpRISCV64FNEGD {
3425 if !(neg.Uses == 1) {
3428 v.reset(OpRISCV64FMSUBD)
3434 // match: (FNMADDD x y neg:(FNEGD z))
3435 // cond: neg.Uses == 1
3436 // result: (FNMSUBD x y z)
3441 if neg.Op != OpRISCV64FNEGD {
3445 if !(neg.Uses == 1) {
3448 v.reset(OpRISCV64FNMSUBD)
3454 func rewriteValueRISCV64_OpRISCV64FNMSUBD(v *Value) bool {
3458 // match: (FNMSUBD neg:(FNEGD x) y z)
3459 // cond: neg.Uses == 1
3460 // result: (FMADDD x y z)
3462 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3464 if neg.Op != OpRISCV64FNEGD {
3470 if !(neg.Uses == 1) {
3473 v.reset(OpRISCV64FMADDD)
3479 // match: (FNMSUBD x y neg:(FNEGD z))
3480 // cond: neg.Uses == 1
3481 // result: (FNMADDD x y z)
3486 if neg.Op != OpRISCV64FNEGD {
3490 if !(neg.Uses == 1) {
3493 v.reset(OpRISCV64FNMADDD)
3499 func rewriteValueRISCV64_OpRISCV64MOVBUload(v *Value) bool {
3502 // match: (MOVBUload [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
3503 // cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)
3504 // result: (MOVBUload [off1+off2] {mergeSym(sym1,sym2)} base mem)
3506 off1 := auxIntToInt32(v.AuxInt)
3507 sym1 := auxToSym(v.Aux)
3508 if v_0.Op != OpRISCV64MOVaddr {
3511 off2 := auxIntToInt32(v_0.AuxInt)
3512 sym2 := auxToSym(v_0.Aux)
3515 if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)) {
3518 v.reset(OpRISCV64MOVBUload)
3519 v.AuxInt = int32ToAuxInt(off1 + off2)
3520 v.Aux = symToAux(mergeSym(sym1, sym2))
3521 v.AddArg2(base, mem)
3524 // match: (MOVBUload [off1] {sym} (ADDI [off2] base) mem)
3525 // cond: is32Bit(int64(off1)+off2)
3526 // result: (MOVBUload [off1+int32(off2)] {sym} base mem)
3528 off1 := auxIntToInt32(v.AuxInt)
3529 sym := auxToSym(v.Aux)
3530 if v_0.Op != OpRISCV64ADDI {
3533 off2 := auxIntToInt64(v_0.AuxInt)
3536 if !(is32Bit(int64(off1) + off2)) {
3539 v.reset(OpRISCV64MOVBUload)
3540 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3541 v.Aux = symToAux(sym)
3542 v.AddArg2(base, mem)
3547 func rewriteValueRISCV64_OpRISCV64MOVBUreg(v *Value) bool {
3550 // match: (MOVBUreg x:(FLES _ _))
3554 if x.Op != OpRISCV64FLES {
3560 // match: (MOVBUreg x:(FLTS _ _))
3564 if x.Op != OpRISCV64FLTS {
3570 // match: (MOVBUreg x:(FEQS _ _))
3574 if x.Op != OpRISCV64FEQS {
3580 // match: (MOVBUreg x:(FNES _ _))
3584 if x.Op != OpRISCV64FNES {
3590 // match: (MOVBUreg x:(FLED _ _))
3594 if x.Op != OpRISCV64FLED {
3600 // match: (MOVBUreg x:(FLTD _ _))
3604 if x.Op != OpRISCV64FLTD {
3610 // match: (MOVBUreg x:(FEQD _ _))
3614 if x.Op != OpRISCV64FEQD {
3620 // match: (MOVBUreg x:(FNED _ _))
3624 if x.Op != OpRISCV64FNED {
3630 // match: (MOVBUreg x:(SEQZ _))
3634 if x.Op != OpRISCV64SEQZ {
3640 // match: (MOVBUreg x:(SNEZ _))
3644 if x.Op != OpRISCV64SNEZ {
3650 // match: (MOVBUreg x:(SLT _ _))
3654 if x.Op != OpRISCV64SLT {
3660 // match: (MOVBUreg x:(SLTU _ _))
3664 if x.Op != OpRISCV64SLTU {
3670 // match: (MOVBUreg x:(ANDI [c] y))
3671 // cond: c >= 0 && int64(uint8(c)) == c
3675 if x.Op != OpRISCV64ANDI {
3678 c := auxIntToInt64(x.AuxInt)
3679 if !(c >= 0 && int64(uint8(c)) == c) {
3685 // match: (MOVBUreg (ANDI [c] x))
3687 // result: (ANDI [int64(uint8(c))] x)
3689 if v_0.Op != OpRISCV64ANDI {
3692 c := auxIntToInt64(v_0.AuxInt)
3697 v.reset(OpRISCV64ANDI)
3698 v.AuxInt = int64ToAuxInt(int64(uint8(c)))
3702 // match: (MOVBUreg (MOVDconst [c]))
3703 // result: (MOVDconst [int64(uint8(c))])
3705 if v_0.Op != OpRISCV64MOVDconst {
3708 c := auxIntToInt64(v_0.AuxInt)
3709 v.reset(OpRISCV64MOVDconst)
3710 v.AuxInt = int64ToAuxInt(int64(uint8(c)))
3713 // match: (MOVBUreg x:(MOVBUload _ _))
3714 // result: (MOVDreg x)
3717 if x.Op != OpRISCV64MOVBUload {
3720 v.reset(OpRISCV64MOVDreg)
3724 // match: (MOVBUreg x:(Select0 (LoweredAtomicLoad8 _ _)))
3725 // result: (MOVDreg x)
3728 if x.Op != OpSelect0 {
3732 if x_0.Op != OpRISCV64LoweredAtomicLoad8 {
3735 v.reset(OpRISCV64MOVDreg)
3739 // match: (MOVBUreg x:(Select0 (LoweredAtomicCas32 _ _ _ _)))
3740 // result: (MOVDreg x)
3743 if x.Op != OpSelect0 {
3747 if x_0.Op != OpRISCV64LoweredAtomicCas32 {
3750 v.reset(OpRISCV64MOVDreg)
3754 // match: (MOVBUreg x:(Select0 (LoweredAtomicCas64 _ _ _ _)))
3755 // result: (MOVDreg x)
3758 if x.Op != OpSelect0 {
3762 if x_0.Op != OpRISCV64LoweredAtomicCas64 {
3765 v.reset(OpRISCV64MOVDreg)
3769 // match: (MOVBUreg x:(MOVBUreg _))
3770 // result: (MOVDreg x)
3773 if x.Op != OpRISCV64MOVBUreg {
3776 v.reset(OpRISCV64MOVDreg)
3780 // match: (MOVBUreg <t> x:(MOVBload [off] {sym} ptr mem))
3781 // cond: x.Uses == 1 && clobber(x)
3782 // result: @x.Block (MOVBUload <t> [off] {sym} ptr mem)
3786 if x.Op != OpRISCV64MOVBload {
3789 off := auxIntToInt32(x.AuxInt)
3790 sym := auxToSym(x.Aux)
3793 if !(x.Uses == 1 && clobber(x)) {
3797 v0 := b.NewValue0(x.Pos, OpRISCV64MOVBUload, t)
3799 v0.AuxInt = int32ToAuxInt(off)
3800 v0.Aux = symToAux(sym)
3801 v0.AddArg2(ptr, mem)
3806 func rewriteValueRISCV64_OpRISCV64MOVBload(v *Value) bool {
3809 // match: (MOVBload [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
3810 // cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)
3811 // result: (MOVBload [off1+off2] {mergeSym(sym1,sym2)} base mem)
3813 off1 := auxIntToInt32(v.AuxInt)
3814 sym1 := auxToSym(v.Aux)
3815 if v_0.Op != OpRISCV64MOVaddr {
3818 off2 := auxIntToInt32(v_0.AuxInt)
3819 sym2 := auxToSym(v_0.Aux)
3822 if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)) {
3825 v.reset(OpRISCV64MOVBload)
3826 v.AuxInt = int32ToAuxInt(off1 + off2)
3827 v.Aux = symToAux(mergeSym(sym1, sym2))
3828 v.AddArg2(base, mem)
3831 // match: (MOVBload [off1] {sym} (ADDI [off2] base) mem)
3832 // cond: is32Bit(int64(off1)+off2)
3833 // result: (MOVBload [off1+int32(off2)] {sym} base mem)
3835 off1 := auxIntToInt32(v.AuxInt)
3836 sym := auxToSym(v.Aux)
3837 if v_0.Op != OpRISCV64ADDI {
3840 off2 := auxIntToInt64(v_0.AuxInt)
3843 if !(is32Bit(int64(off1) + off2)) {
3846 v.reset(OpRISCV64MOVBload)
3847 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3848 v.Aux = symToAux(sym)
3849 v.AddArg2(base, mem)
3854 func rewriteValueRISCV64_OpRISCV64MOVBreg(v *Value) bool {
3857 // match: (MOVBreg x:(ANDI [c] y))
3858 // cond: c >= 0 && int64(int8(c)) == c
3862 if x.Op != OpRISCV64ANDI {
3865 c := auxIntToInt64(x.AuxInt)
3866 if !(c >= 0 && int64(int8(c)) == c) {
3872 // match: (MOVBreg (MOVDconst [c]))
3873 // result: (MOVDconst [int64(int8(c))])
3875 if v_0.Op != OpRISCV64MOVDconst {
3878 c := auxIntToInt64(v_0.AuxInt)
3879 v.reset(OpRISCV64MOVDconst)
3880 v.AuxInt = int64ToAuxInt(int64(int8(c)))
3883 // match: (MOVBreg x:(MOVBload _ _))
3884 // result: (MOVDreg x)
3887 if x.Op != OpRISCV64MOVBload {
3890 v.reset(OpRISCV64MOVDreg)
3894 // match: (MOVBreg x:(MOVBreg _))
3895 // result: (MOVDreg x)
3898 if x.Op != OpRISCV64MOVBreg {
3901 v.reset(OpRISCV64MOVDreg)
3905 // match: (MOVBreg <t> x:(MOVBUload [off] {sym} ptr mem))
3906 // cond: x.Uses == 1 && clobber(x)
3907 // result: @x.Block (MOVBload <t> [off] {sym} ptr mem)
3911 if x.Op != OpRISCV64MOVBUload {
3914 off := auxIntToInt32(x.AuxInt)
3915 sym := auxToSym(x.Aux)
3918 if !(x.Uses == 1 && clobber(x)) {
3922 v0 := b.NewValue0(x.Pos, OpRISCV64MOVBload, t)
3924 v0.AuxInt = int32ToAuxInt(off)
3925 v0.Aux = symToAux(sym)
3926 v0.AddArg2(ptr, mem)
3931 func rewriteValueRISCV64_OpRISCV64MOVBstore(v *Value) bool {
3935 // match: (MOVBstore [off1] {sym1} (MOVaddr [off2] {sym2} base) val mem)
3936 // cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)
3937 // result: (MOVBstore [off1+off2] {mergeSym(sym1,sym2)} base val mem)
3939 off1 := auxIntToInt32(v.AuxInt)
3940 sym1 := auxToSym(v.Aux)
3941 if v_0.Op != OpRISCV64MOVaddr {
3944 off2 := auxIntToInt32(v_0.AuxInt)
3945 sym2 := auxToSym(v_0.Aux)
3949 if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)) {
3952 v.reset(OpRISCV64MOVBstore)
3953 v.AuxInt = int32ToAuxInt(off1 + off2)
3954 v.Aux = symToAux(mergeSym(sym1, sym2))
3955 v.AddArg3(base, val, mem)
3958 // match: (MOVBstore [off1] {sym} (ADDI [off2] base) val mem)
3959 // cond: is32Bit(int64(off1)+off2)
3960 // result: (MOVBstore [off1+int32(off2)] {sym} base val mem)
3962 off1 := auxIntToInt32(v.AuxInt)
3963 sym := auxToSym(v.Aux)
3964 if v_0.Op != OpRISCV64ADDI {
3967 off2 := auxIntToInt64(v_0.AuxInt)
3971 if !(is32Bit(int64(off1) + off2)) {
3974 v.reset(OpRISCV64MOVBstore)
3975 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3976 v.Aux = symToAux(sym)
3977 v.AddArg3(base, val, mem)
3980 // match: (MOVBstore [off] {sym} ptr (MOVDconst [0]) mem)
3981 // result: (MOVBstorezero [off] {sym} ptr mem)
3983 off := auxIntToInt32(v.AuxInt)
3984 sym := auxToSym(v.Aux)
3986 if v_1.Op != OpRISCV64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
3990 v.reset(OpRISCV64MOVBstorezero)
3991 v.AuxInt = int32ToAuxInt(off)
3992 v.Aux = symToAux(sym)
3996 // match: (MOVBstore [off] {sym} ptr (MOVBreg x) mem)
3997 // result: (MOVBstore [off] {sym} ptr x mem)
3999 off := auxIntToInt32(v.AuxInt)
4000 sym := auxToSym(v.Aux)
4002 if v_1.Op != OpRISCV64MOVBreg {
4007 v.reset(OpRISCV64MOVBstore)
4008 v.AuxInt = int32ToAuxInt(off)
4009 v.Aux = symToAux(sym)
4010 v.AddArg3(ptr, x, mem)
4013 // match: (MOVBstore [off] {sym} ptr (MOVHreg x) mem)
4014 // result: (MOVBstore [off] {sym} ptr x mem)
4016 off := auxIntToInt32(v.AuxInt)
4017 sym := auxToSym(v.Aux)
4019 if v_1.Op != OpRISCV64MOVHreg {
4024 v.reset(OpRISCV64MOVBstore)
4025 v.AuxInt = int32ToAuxInt(off)
4026 v.Aux = symToAux(sym)
4027 v.AddArg3(ptr, x, mem)
4030 // match: (MOVBstore [off] {sym} ptr (MOVWreg x) mem)
4031 // result: (MOVBstore [off] {sym} ptr x mem)
4033 off := auxIntToInt32(v.AuxInt)
4034 sym := auxToSym(v.Aux)
4036 if v_1.Op != OpRISCV64MOVWreg {
4041 v.reset(OpRISCV64MOVBstore)
4042 v.AuxInt = int32ToAuxInt(off)
4043 v.Aux = symToAux(sym)
4044 v.AddArg3(ptr, x, mem)
4047 // match: (MOVBstore [off] {sym} ptr (MOVBUreg x) mem)
4048 // result: (MOVBstore [off] {sym} ptr x mem)
4050 off := auxIntToInt32(v.AuxInt)
4051 sym := auxToSym(v.Aux)
4053 if v_1.Op != OpRISCV64MOVBUreg {
4058 v.reset(OpRISCV64MOVBstore)
4059 v.AuxInt = int32ToAuxInt(off)
4060 v.Aux = symToAux(sym)
4061 v.AddArg3(ptr, x, mem)
4064 // match: (MOVBstore [off] {sym} ptr (MOVHUreg x) mem)
4065 // result: (MOVBstore [off] {sym} ptr x mem)
4067 off := auxIntToInt32(v.AuxInt)
4068 sym := auxToSym(v.Aux)
4070 if v_1.Op != OpRISCV64MOVHUreg {
4075 v.reset(OpRISCV64MOVBstore)
4076 v.AuxInt = int32ToAuxInt(off)
4077 v.Aux = symToAux(sym)
4078 v.AddArg3(ptr, x, mem)
4081 // match: (MOVBstore [off] {sym} ptr (MOVWUreg x) mem)
4082 // result: (MOVBstore [off] {sym} ptr x mem)
4084 off := auxIntToInt32(v.AuxInt)
4085 sym := auxToSym(v.Aux)
4087 if v_1.Op != OpRISCV64MOVWUreg {
4092 v.reset(OpRISCV64MOVBstore)
4093 v.AuxInt = int32ToAuxInt(off)
4094 v.Aux = symToAux(sym)
4095 v.AddArg3(ptr, x, mem)
4100 func rewriteValueRISCV64_OpRISCV64MOVBstorezero(v *Value) bool {
4103 // match: (MOVBstorezero [off1] {sym1} (MOVaddr [off2] {sym2} ptr) mem)
4104 // cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2))
4105 // result: (MOVBstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
4107 off1 := auxIntToInt32(v.AuxInt)
4108 sym1 := auxToSym(v.Aux)
4109 if v_0.Op != OpRISCV64MOVaddr {
4112 off2 := auxIntToInt32(v_0.AuxInt)
4113 sym2 := auxToSym(v_0.Aux)
4116 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) {
4119 v.reset(OpRISCV64MOVBstorezero)
4120 v.AuxInt = int32ToAuxInt(off1 + off2)
4121 v.Aux = symToAux(mergeSym(sym1, sym2))
4125 // match: (MOVBstorezero [off1] {sym} (ADDI [off2] ptr) mem)
4126 // cond: is32Bit(int64(off1)+off2)
4127 // result: (MOVBstorezero [off1+int32(off2)] {sym} ptr mem)
4129 off1 := auxIntToInt32(v.AuxInt)
4130 sym := auxToSym(v.Aux)
4131 if v_0.Op != OpRISCV64ADDI {
4134 off2 := auxIntToInt64(v_0.AuxInt)
4137 if !(is32Bit(int64(off1) + off2)) {
4140 v.reset(OpRISCV64MOVBstorezero)
4141 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4142 v.Aux = symToAux(sym)
4148 func rewriteValueRISCV64_OpRISCV64MOVDload(v *Value) bool {
4151 // match: (MOVDload [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
4152 // cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)
4153 // result: (MOVDload [off1+off2] {mergeSym(sym1,sym2)} base mem)
4155 off1 := auxIntToInt32(v.AuxInt)
4156 sym1 := auxToSym(v.Aux)
4157 if v_0.Op != OpRISCV64MOVaddr {
4160 off2 := auxIntToInt32(v_0.AuxInt)
4161 sym2 := auxToSym(v_0.Aux)
4164 if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)) {
4167 v.reset(OpRISCV64MOVDload)
4168 v.AuxInt = int32ToAuxInt(off1 + off2)
4169 v.Aux = symToAux(mergeSym(sym1, sym2))
4170 v.AddArg2(base, mem)
4173 // match: (MOVDload [off1] {sym} (ADDI [off2] base) mem)
4174 // cond: is32Bit(int64(off1)+off2)
4175 // result: (MOVDload [off1+int32(off2)] {sym} base mem)
4177 off1 := auxIntToInt32(v.AuxInt)
4178 sym := auxToSym(v.Aux)
4179 if v_0.Op != OpRISCV64ADDI {
4182 off2 := auxIntToInt64(v_0.AuxInt)
4185 if !(is32Bit(int64(off1) + off2)) {
4188 v.reset(OpRISCV64MOVDload)
4189 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4190 v.Aux = symToAux(sym)
4191 v.AddArg2(base, mem)
4196 func rewriteValueRISCV64_OpRISCV64MOVDnop(v *Value) bool {
4198 // match: (MOVDnop (MOVDconst [c]))
4199 // result: (MOVDconst [c])
4201 if v_0.Op != OpRISCV64MOVDconst {
4204 c := auxIntToInt64(v_0.AuxInt)
4205 v.reset(OpRISCV64MOVDconst)
4206 v.AuxInt = int64ToAuxInt(c)
4211 func rewriteValueRISCV64_OpRISCV64MOVDreg(v *Value) bool {
4213 // match: (MOVDreg x)
4214 // cond: x.Uses == 1
4215 // result: (MOVDnop x)
4221 v.reset(OpRISCV64MOVDnop)
4227 func rewriteValueRISCV64_OpRISCV64MOVDstore(v *Value) bool {
4231 // match: (MOVDstore [off1] {sym1} (MOVaddr [off2] {sym2} base) val mem)
4232 // cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)
4233 // result: (MOVDstore [off1+off2] {mergeSym(sym1,sym2)} base val mem)
4235 off1 := auxIntToInt32(v.AuxInt)
4236 sym1 := auxToSym(v.Aux)
4237 if v_0.Op != OpRISCV64MOVaddr {
4240 off2 := auxIntToInt32(v_0.AuxInt)
4241 sym2 := auxToSym(v_0.Aux)
4245 if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)) {
4248 v.reset(OpRISCV64MOVDstore)
4249 v.AuxInt = int32ToAuxInt(off1 + off2)
4250 v.Aux = symToAux(mergeSym(sym1, sym2))
4251 v.AddArg3(base, val, mem)
4254 // match: (MOVDstore [off1] {sym} (ADDI [off2] base) val mem)
4255 // cond: is32Bit(int64(off1)+off2)
4256 // result: (MOVDstore [off1+int32(off2)] {sym} base val mem)
4258 off1 := auxIntToInt32(v.AuxInt)
4259 sym := auxToSym(v.Aux)
4260 if v_0.Op != OpRISCV64ADDI {
4263 off2 := auxIntToInt64(v_0.AuxInt)
4267 if !(is32Bit(int64(off1) + off2)) {
4270 v.reset(OpRISCV64MOVDstore)
4271 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4272 v.Aux = symToAux(sym)
4273 v.AddArg3(base, val, mem)
4276 // match: (MOVDstore [off] {sym} ptr (MOVDconst [0]) mem)
4277 // result: (MOVDstorezero [off] {sym} ptr mem)
4279 off := auxIntToInt32(v.AuxInt)
4280 sym := auxToSym(v.Aux)
4282 if v_1.Op != OpRISCV64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
4286 v.reset(OpRISCV64MOVDstorezero)
4287 v.AuxInt = int32ToAuxInt(off)
4288 v.Aux = symToAux(sym)
4294 func rewriteValueRISCV64_OpRISCV64MOVDstorezero(v *Value) bool {
4297 // match: (MOVDstorezero [off1] {sym1} (MOVaddr [off2] {sym2} ptr) mem)
4298 // cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2))
4299 // result: (MOVDstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
4301 off1 := auxIntToInt32(v.AuxInt)
4302 sym1 := auxToSym(v.Aux)
4303 if v_0.Op != OpRISCV64MOVaddr {
4306 off2 := auxIntToInt32(v_0.AuxInt)
4307 sym2 := auxToSym(v_0.Aux)
4310 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) {
4313 v.reset(OpRISCV64MOVDstorezero)
4314 v.AuxInt = int32ToAuxInt(off1 + off2)
4315 v.Aux = symToAux(mergeSym(sym1, sym2))
4319 // match: (MOVDstorezero [off1] {sym} (ADDI [off2] ptr) mem)
4320 // cond: is32Bit(int64(off1)+off2)
4321 // result: (MOVDstorezero [off1+int32(off2)] {sym} ptr mem)
4323 off1 := auxIntToInt32(v.AuxInt)
4324 sym := auxToSym(v.Aux)
4325 if v_0.Op != OpRISCV64ADDI {
4328 off2 := auxIntToInt64(v_0.AuxInt)
4331 if !(is32Bit(int64(off1) + off2)) {
4334 v.reset(OpRISCV64MOVDstorezero)
4335 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4336 v.Aux = symToAux(sym)
4342 func rewriteValueRISCV64_OpRISCV64MOVHUload(v *Value) bool {
4345 // match: (MOVHUload [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
4346 // cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)
4347 // result: (MOVHUload [off1+off2] {mergeSym(sym1,sym2)} base mem)
4349 off1 := auxIntToInt32(v.AuxInt)
4350 sym1 := auxToSym(v.Aux)
4351 if v_0.Op != OpRISCV64MOVaddr {
4354 off2 := auxIntToInt32(v_0.AuxInt)
4355 sym2 := auxToSym(v_0.Aux)
4358 if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)) {
4361 v.reset(OpRISCV64MOVHUload)
4362 v.AuxInt = int32ToAuxInt(off1 + off2)
4363 v.Aux = symToAux(mergeSym(sym1, sym2))
4364 v.AddArg2(base, mem)
4367 // match: (MOVHUload [off1] {sym} (ADDI [off2] base) mem)
4368 // cond: is32Bit(int64(off1)+off2)
4369 // result: (MOVHUload [off1+int32(off2)] {sym} base mem)
4371 off1 := auxIntToInt32(v.AuxInt)
4372 sym := auxToSym(v.Aux)
4373 if v_0.Op != OpRISCV64ADDI {
4376 off2 := auxIntToInt64(v_0.AuxInt)
4379 if !(is32Bit(int64(off1) + off2)) {
4382 v.reset(OpRISCV64MOVHUload)
4383 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4384 v.Aux = symToAux(sym)
4385 v.AddArg2(base, mem)
4390 func rewriteValueRISCV64_OpRISCV64MOVHUreg(v *Value) bool {
4393 // match: (MOVHUreg x:(ANDI [c] y))
4394 // cond: c >= 0 && int64(uint16(c)) == c
4398 if x.Op != OpRISCV64ANDI {
4401 c := auxIntToInt64(x.AuxInt)
4402 if !(c >= 0 && int64(uint16(c)) == c) {
4408 // match: (MOVHUreg (ANDI [c] x))
4410 // result: (ANDI [int64(uint16(c))] x)
4412 if v_0.Op != OpRISCV64ANDI {
4415 c := auxIntToInt64(v_0.AuxInt)
4420 v.reset(OpRISCV64ANDI)
4421 v.AuxInt = int64ToAuxInt(int64(uint16(c)))
4425 // match: (MOVHUreg (MOVDconst [c]))
4426 // result: (MOVDconst [int64(uint16(c))])
4428 if v_0.Op != OpRISCV64MOVDconst {
4431 c := auxIntToInt64(v_0.AuxInt)
4432 v.reset(OpRISCV64MOVDconst)
4433 v.AuxInt = int64ToAuxInt(int64(uint16(c)))
4436 // match: (MOVHUreg x:(MOVBUload _ _))
4437 // result: (MOVDreg x)
4440 if x.Op != OpRISCV64MOVBUload {
4443 v.reset(OpRISCV64MOVDreg)
4447 // match: (MOVHUreg x:(MOVHUload _ _))
4448 // result: (MOVDreg x)
4451 if x.Op != OpRISCV64MOVHUload {
4454 v.reset(OpRISCV64MOVDreg)
4458 // match: (MOVHUreg x:(MOVBUreg _))
4459 // result: (MOVDreg x)
4462 if x.Op != OpRISCV64MOVBUreg {
4465 v.reset(OpRISCV64MOVDreg)
4469 // match: (MOVHUreg x:(MOVHUreg _))
4470 // result: (MOVDreg x)
4473 if x.Op != OpRISCV64MOVHUreg {
4476 v.reset(OpRISCV64MOVDreg)
4480 // match: (MOVHUreg <t> x:(MOVHload [off] {sym} ptr mem))
4481 // cond: x.Uses == 1 && clobber(x)
4482 // result: @x.Block (MOVHUload <t> [off] {sym} ptr mem)
4486 if x.Op != OpRISCV64MOVHload {
4489 off := auxIntToInt32(x.AuxInt)
4490 sym := auxToSym(x.Aux)
4493 if !(x.Uses == 1 && clobber(x)) {
4497 v0 := b.NewValue0(x.Pos, OpRISCV64MOVHUload, t)
4499 v0.AuxInt = int32ToAuxInt(off)
4500 v0.Aux = symToAux(sym)
4501 v0.AddArg2(ptr, mem)
4506 func rewriteValueRISCV64_OpRISCV64MOVHload(v *Value) bool {
4509 // match: (MOVHload [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
4510 // cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)
4511 // result: (MOVHload [off1+off2] {mergeSym(sym1,sym2)} base mem)
4513 off1 := auxIntToInt32(v.AuxInt)
4514 sym1 := auxToSym(v.Aux)
4515 if v_0.Op != OpRISCV64MOVaddr {
4518 off2 := auxIntToInt32(v_0.AuxInt)
4519 sym2 := auxToSym(v_0.Aux)
4522 if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)) {
4525 v.reset(OpRISCV64MOVHload)
4526 v.AuxInt = int32ToAuxInt(off1 + off2)
4527 v.Aux = symToAux(mergeSym(sym1, sym2))
4528 v.AddArg2(base, mem)
4531 // match: (MOVHload [off1] {sym} (ADDI [off2] base) mem)
4532 // cond: is32Bit(int64(off1)+off2)
4533 // result: (MOVHload [off1+int32(off2)] {sym} base mem)
4535 off1 := auxIntToInt32(v.AuxInt)
4536 sym := auxToSym(v.Aux)
4537 if v_0.Op != OpRISCV64ADDI {
4540 off2 := auxIntToInt64(v_0.AuxInt)
4543 if !(is32Bit(int64(off1) + off2)) {
4546 v.reset(OpRISCV64MOVHload)
4547 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4548 v.Aux = symToAux(sym)
4549 v.AddArg2(base, mem)
4554 func rewriteValueRISCV64_OpRISCV64MOVHreg(v *Value) bool {
4557 // match: (MOVHreg x:(ANDI [c] y))
4558 // cond: c >= 0 && int64(int16(c)) == c
4562 if x.Op != OpRISCV64ANDI {
4565 c := auxIntToInt64(x.AuxInt)
4566 if !(c >= 0 && int64(int16(c)) == c) {
4572 // match: (MOVHreg (MOVDconst [c]))
4573 // result: (MOVDconst [int64(int16(c))])
4575 if v_0.Op != OpRISCV64MOVDconst {
4578 c := auxIntToInt64(v_0.AuxInt)
4579 v.reset(OpRISCV64MOVDconst)
4580 v.AuxInt = int64ToAuxInt(int64(int16(c)))
4583 // match: (MOVHreg x:(MOVBload _ _))
4584 // result: (MOVDreg x)
4587 if x.Op != OpRISCV64MOVBload {
4590 v.reset(OpRISCV64MOVDreg)
4594 // match: (MOVHreg x:(MOVBUload _ _))
4595 // result: (MOVDreg x)
4598 if x.Op != OpRISCV64MOVBUload {
4601 v.reset(OpRISCV64MOVDreg)
4605 // match: (MOVHreg x:(MOVHload _ _))
4606 // result: (MOVDreg x)
4609 if x.Op != OpRISCV64MOVHload {
4612 v.reset(OpRISCV64MOVDreg)
4616 // match: (MOVHreg x:(MOVBreg _))
4617 // result: (MOVDreg x)
4620 if x.Op != OpRISCV64MOVBreg {
4623 v.reset(OpRISCV64MOVDreg)
4627 // match: (MOVHreg x:(MOVBUreg _))
4628 // result: (MOVDreg x)
4631 if x.Op != OpRISCV64MOVBUreg {
4634 v.reset(OpRISCV64MOVDreg)
4638 // match: (MOVHreg x:(MOVHreg _))
4639 // result: (MOVDreg x)
4642 if x.Op != OpRISCV64MOVHreg {
4645 v.reset(OpRISCV64MOVDreg)
4649 // match: (MOVHreg <t> x:(MOVHUload [off] {sym} ptr mem))
4650 // cond: x.Uses == 1 && clobber(x)
4651 // result: @x.Block (MOVHload <t> [off] {sym} ptr mem)
4655 if x.Op != OpRISCV64MOVHUload {
4658 off := auxIntToInt32(x.AuxInt)
4659 sym := auxToSym(x.Aux)
4662 if !(x.Uses == 1 && clobber(x)) {
4666 v0 := b.NewValue0(x.Pos, OpRISCV64MOVHload, t)
4668 v0.AuxInt = int32ToAuxInt(off)
4669 v0.Aux = symToAux(sym)
4670 v0.AddArg2(ptr, mem)
4675 func rewriteValueRISCV64_OpRISCV64MOVHstore(v *Value) bool {
4679 // match: (MOVHstore [off1] {sym1} (MOVaddr [off2] {sym2} base) val mem)
4680 // cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)
4681 // result: (MOVHstore [off1+off2] {mergeSym(sym1,sym2)} base val mem)
4683 off1 := auxIntToInt32(v.AuxInt)
4684 sym1 := auxToSym(v.Aux)
4685 if v_0.Op != OpRISCV64MOVaddr {
4688 off2 := auxIntToInt32(v_0.AuxInt)
4689 sym2 := auxToSym(v_0.Aux)
4693 if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)) {
4696 v.reset(OpRISCV64MOVHstore)
4697 v.AuxInt = int32ToAuxInt(off1 + off2)
4698 v.Aux = symToAux(mergeSym(sym1, sym2))
4699 v.AddArg3(base, val, mem)
4702 // match: (MOVHstore [off1] {sym} (ADDI [off2] base) val mem)
4703 // cond: is32Bit(int64(off1)+off2)
4704 // result: (MOVHstore [off1+int32(off2)] {sym} base val mem)
4706 off1 := auxIntToInt32(v.AuxInt)
4707 sym := auxToSym(v.Aux)
4708 if v_0.Op != OpRISCV64ADDI {
4711 off2 := auxIntToInt64(v_0.AuxInt)
4715 if !(is32Bit(int64(off1) + off2)) {
4718 v.reset(OpRISCV64MOVHstore)
4719 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4720 v.Aux = symToAux(sym)
4721 v.AddArg3(base, val, mem)
4724 // match: (MOVHstore [off] {sym} ptr (MOVDconst [0]) mem)
4725 // result: (MOVHstorezero [off] {sym} ptr mem)
4727 off := auxIntToInt32(v.AuxInt)
4728 sym := auxToSym(v.Aux)
4730 if v_1.Op != OpRISCV64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
4734 v.reset(OpRISCV64MOVHstorezero)
4735 v.AuxInt = int32ToAuxInt(off)
4736 v.Aux = symToAux(sym)
4740 // match: (MOVHstore [off] {sym} ptr (MOVHreg x) mem)
4741 // result: (MOVHstore [off] {sym} ptr x mem)
4743 off := auxIntToInt32(v.AuxInt)
4744 sym := auxToSym(v.Aux)
4746 if v_1.Op != OpRISCV64MOVHreg {
4751 v.reset(OpRISCV64MOVHstore)
4752 v.AuxInt = int32ToAuxInt(off)
4753 v.Aux = symToAux(sym)
4754 v.AddArg3(ptr, x, mem)
4757 // match: (MOVHstore [off] {sym} ptr (MOVWreg x) mem)
4758 // result: (MOVHstore [off] {sym} ptr x mem)
4760 off := auxIntToInt32(v.AuxInt)
4761 sym := auxToSym(v.Aux)
4763 if v_1.Op != OpRISCV64MOVWreg {
4768 v.reset(OpRISCV64MOVHstore)
4769 v.AuxInt = int32ToAuxInt(off)
4770 v.Aux = symToAux(sym)
4771 v.AddArg3(ptr, x, mem)
4774 // match: (MOVHstore [off] {sym} ptr (MOVHUreg x) mem)
4775 // result: (MOVHstore [off] {sym} ptr x mem)
4777 off := auxIntToInt32(v.AuxInt)
4778 sym := auxToSym(v.Aux)
4780 if v_1.Op != OpRISCV64MOVHUreg {
4785 v.reset(OpRISCV64MOVHstore)
4786 v.AuxInt = int32ToAuxInt(off)
4787 v.Aux = symToAux(sym)
4788 v.AddArg3(ptr, x, mem)
4791 // match: (MOVHstore [off] {sym} ptr (MOVWUreg x) mem)
4792 // result: (MOVHstore [off] {sym} ptr x mem)
4794 off := auxIntToInt32(v.AuxInt)
4795 sym := auxToSym(v.Aux)
4797 if v_1.Op != OpRISCV64MOVWUreg {
4802 v.reset(OpRISCV64MOVHstore)
4803 v.AuxInt = int32ToAuxInt(off)
4804 v.Aux = symToAux(sym)
4805 v.AddArg3(ptr, x, mem)
4810 func rewriteValueRISCV64_OpRISCV64MOVHstorezero(v *Value) bool {
4813 // match: (MOVHstorezero [off1] {sym1} (MOVaddr [off2] {sym2} ptr) mem)
4814 // cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2))
4815 // result: (MOVHstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
4817 off1 := auxIntToInt32(v.AuxInt)
4818 sym1 := auxToSym(v.Aux)
4819 if v_0.Op != OpRISCV64MOVaddr {
4822 off2 := auxIntToInt32(v_0.AuxInt)
4823 sym2 := auxToSym(v_0.Aux)
4826 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) {
4829 v.reset(OpRISCV64MOVHstorezero)
4830 v.AuxInt = int32ToAuxInt(off1 + off2)
4831 v.Aux = symToAux(mergeSym(sym1, sym2))
4835 // match: (MOVHstorezero [off1] {sym} (ADDI [off2] ptr) mem)
4836 // cond: is32Bit(int64(off1)+off2)
4837 // result: (MOVHstorezero [off1+int32(off2)] {sym} ptr mem)
4839 off1 := auxIntToInt32(v.AuxInt)
4840 sym := auxToSym(v.Aux)
4841 if v_0.Op != OpRISCV64ADDI {
4844 off2 := auxIntToInt64(v_0.AuxInt)
4847 if !(is32Bit(int64(off1) + off2)) {
4850 v.reset(OpRISCV64MOVHstorezero)
4851 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4852 v.Aux = symToAux(sym)
4858 func rewriteValueRISCV64_OpRISCV64MOVWUload(v *Value) bool {
4861 // match: (MOVWUload [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
4862 // cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)
4863 // result: (MOVWUload [off1+off2] {mergeSym(sym1,sym2)} base mem)
4865 off1 := auxIntToInt32(v.AuxInt)
4866 sym1 := auxToSym(v.Aux)
4867 if v_0.Op != OpRISCV64MOVaddr {
4870 off2 := auxIntToInt32(v_0.AuxInt)
4871 sym2 := auxToSym(v_0.Aux)
4874 if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)) {
4877 v.reset(OpRISCV64MOVWUload)
4878 v.AuxInt = int32ToAuxInt(off1 + off2)
4879 v.Aux = symToAux(mergeSym(sym1, sym2))
4880 v.AddArg2(base, mem)
4883 // match: (MOVWUload [off1] {sym} (ADDI [off2] base) mem)
4884 // cond: is32Bit(int64(off1)+off2)
4885 // result: (MOVWUload [off1+int32(off2)] {sym} base mem)
4887 off1 := auxIntToInt32(v.AuxInt)
4888 sym := auxToSym(v.Aux)
4889 if v_0.Op != OpRISCV64ADDI {
4892 off2 := auxIntToInt64(v_0.AuxInt)
4895 if !(is32Bit(int64(off1) + off2)) {
4898 v.reset(OpRISCV64MOVWUload)
4899 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4900 v.Aux = symToAux(sym)
4901 v.AddArg2(base, mem)
4906 func rewriteValueRISCV64_OpRISCV64MOVWUreg(v *Value) bool {
4909 typ := &b.Func.Config.Types
4910 // match: (MOVWUreg x:(ANDI [c] y))
4911 // cond: c >= 0 && int64(uint32(c)) == c
4915 if x.Op != OpRISCV64ANDI {
4918 c := auxIntToInt64(x.AuxInt)
4919 if !(c >= 0 && int64(uint32(c)) == c) {
4925 // match: (MOVWUreg (ANDI [c] x))
4927 // result: (AND (MOVDconst [int64(uint32(c))]) x)
4929 if v_0.Op != OpRISCV64ANDI {
4932 c := auxIntToInt64(v_0.AuxInt)
4937 v.reset(OpRISCV64AND)
4938 v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
4939 v0.AuxInt = int64ToAuxInt(int64(uint32(c)))
4943 // match: (MOVWUreg (MOVDconst [c]))
4944 // result: (MOVDconst [int64(uint32(c))])
4946 if v_0.Op != OpRISCV64MOVDconst {
4949 c := auxIntToInt64(v_0.AuxInt)
4950 v.reset(OpRISCV64MOVDconst)
4951 v.AuxInt = int64ToAuxInt(int64(uint32(c)))
4954 // match: (MOVWUreg x:(MOVBUload _ _))
4955 // result: (MOVDreg x)
4958 if x.Op != OpRISCV64MOVBUload {
4961 v.reset(OpRISCV64MOVDreg)
4965 // match: (MOVWUreg x:(MOVHUload _ _))
4966 // result: (MOVDreg x)
4969 if x.Op != OpRISCV64MOVHUload {
4972 v.reset(OpRISCV64MOVDreg)
4976 // match: (MOVWUreg x:(MOVWUload _ _))
4977 // result: (MOVDreg x)
4980 if x.Op != OpRISCV64MOVWUload {
4983 v.reset(OpRISCV64MOVDreg)
4987 // match: (MOVWUreg x:(MOVBUreg _))
4988 // result: (MOVDreg x)
4991 if x.Op != OpRISCV64MOVBUreg {
4994 v.reset(OpRISCV64MOVDreg)
4998 // match: (MOVWUreg x:(MOVHUreg _))
4999 // result: (MOVDreg x)
5002 if x.Op != OpRISCV64MOVHUreg {
5005 v.reset(OpRISCV64MOVDreg)
5009 // match: (MOVWUreg x:(MOVWUreg _))
5010 // result: (MOVDreg x)
5013 if x.Op != OpRISCV64MOVWUreg {
5016 v.reset(OpRISCV64MOVDreg)
5020 // match: (MOVWUreg <t> x:(MOVWload [off] {sym} ptr mem))
5021 // cond: x.Uses == 1 && clobber(x)
5022 // result: @x.Block (MOVWUload <t> [off] {sym} ptr mem)
5026 if x.Op != OpRISCV64MOVWload {
5029 off := auxIntToInt32(x.AuxInt)
5030 sym := auxToSym(x.Aux)
5033 if !(x.Uses == 1 && clobber(x)) {
5037 v0 := b.NewValue0(x.Pos, OpRISCV64MOVWUload, t)
5039 v0.AuxInt = int32ToAuxInt(off)
5040 v0.Aux = symToAux(sym)
5041 v0.AddArg2(ptr, mem)
5046 func rewriteValueRISCV64_OpRISCV64MOVWload(v *Value) bool {
5049 // match: (MOVWload [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
5050 // cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)
5051 // result: (MOVWload [off1+off2] {mergeSym(sym1,sym2)} base mem)
5053 off1 := auxIntToInt32(v.AuxInt)
5054 sym1 := auxToSym(v.Aux)
5055 if v_0.Op != OpRISCV64MOVaddr {
5058 off2 := auxIntToInt32(v_0.AuxInt)
5059 sym2 := auxToSym(v_0.Aux)
5062 if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)) {
5065 v.reset(OpRISCV64MOVWload)
5066 v.AuxInt = int32ToAuxInt(off1 + off2)
5067 v.Aux = symToAux(mergeSym(sym1, sym2))
5068 v.AddArg2(base, mem)
5071 // match: (MOVWload [off1] {sym} (ADDI [off2] base) mem)
5072 // cond: is32Bit(int64(off1)+off2)
5073 // result: (MOVWload [off1+int32(off2)] {sym} base mem)
5075 off1 := auxIntToInt32(v.AuxInt)
5076 sym := auxToSym(v.Aux)
5077 if v_0.Op != OpRISCV64ADDI {
5080 off2 := auxIntToInt64(v_0.AuxInt)
5083 if !(is32Bit(int64(off1) + off2)) {
5086 v.reset(OpRISCV64MOVWload)
5087 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
5088 v.Aux = symToAux(sym)
5089 v.AddArg2(base, mem)
5094 func rewriteValueRISCV64_OpRISCV64MOVWreg(v *Value) bool {
5097 // match: (MOVWreg x:(ANDI [c] y))
5098 // cond: c >= 0 && int64(int32(c)) == c
5102 if x.Op != OpRISCV64ANDI {
5105 c := auxIntToInt64(x.AuxInt)
5106 if !(c >= 0 && int64(int32(c)) == c) {
5112 // match: (MOVWreg (MOVDconst [c]))
5113 // result: (MOVDconst [int64(int32(c))])
5115 if v_0.Op != OpRISCV64MOVDconst {
5118 c := auxIntToInt64(v_0.AuxInt)
5119 v.reset(OpRISCV64MOVDconst)
5120 v.AuxInt = int64ToAuxInt(int64(int32(c)))
5123 // match: (MOVWreg x:(MOVBload _ _))
5124 // result: (MOVDreg x)
5127 if x.Op != OpRISCV64MOVBload {
5130 v.reset(OpRISCV64MOVDreg)
5134 // match: (MOVWreg x:(MOVBUload _ _))
5135 // result: (MOVDreg x)
5138 if x.Op != OpRISCV64MOVBUload {
5141 v.reset(OpRISCV64MOVDreg)
5145 // match: (MOVWreg x:(MOVHload _ _))
5146 // result: (MOVDreg x)
5149 if x.Op != OpRISCV64MOVHload {
5152 v.reset(OpRISCV64MOVDreg)
5156 // match: (MOVWreg x:(MOVHUload _ _))
5157 // result: (MOVDreg x)
5160 if x.Op != OpRISCV64MOVHUload {
5163 v.reset(OpRISCV64MOVDreg)
5167 // match: (MOVWreg x:(MOVWload _ _))
5168 // result: (MOVDreg x)
5171 if x.Op != OpRISCV64MOVWload {
5174 v.reset(OpRISCV64MOVDreg)
5178 // match: (MOVWreg x:(ADDIW _))
5179 // result: (MOVDreg x)
5182 if x.Op != OpRISCV64ADDIW {
5185 v.reset(OpRISCV64MOVDreg)
5189 // match: (MOVWreg x:(SUBW _ _))
5190 // result: (MOVDreg x)
5193 if x.Op != OpRISCV64SUBW {
5196 v.reset(OpRISCV64MOVDreg)
5200 // match: (MOVWreg x:(NEGW _))
5201 // result: (MOVDreg x)
5204 if x.Op != OpRISCV64NEGW {
5207 v.reset(OpRISCV64MOVDreg)
5211 // match: (MOVWreg x:(MULW _ _))
5212 // result: (MOVDreg x)
5215 if x.Op != OpRISCV64MULW {
5218 v.reset(OpRISCV64MOVDreg)
5222 // match: (MOVWreg x:(DIVW _ _))
5223 // result: (MOVDreg x)
5226 if x.Op != OpRISCV64DIVW {
5229 v.reset(OpRISCV64MOVDreg)
5233 // match: (MOVWreg x:(DIVUW _ _))
5234 // result: (MOVDreg x)
5237 if x.Op != OpRISCV64DIVUW {
5240 v.reset(OpRISCV64MOVDreg)
5244 // match: (MOVWreg x:(REMW _ _))
5245 // result: (MOVDreg x)
5248 if x.Op != OpRISCV64REMW {
5251 v.reset(OpRISCV64MOVDreg)
5255 // match: (MOVWreg x:(REMUW _ _))
5256 // result: (MOVDreg x)
5259 if x.Op != OpRISCV64REMUW {
5262 v.reset(OpRISCV64MOVDreg)
5266 // match: (MOVWreg x:(MOVBreg _))
5267 // result: (MOVDreg x)
5270 if x.Op != OpRISCV64MOVBreg {
5273 v.reset(OpRISCV64MOVDreg)
5277 // match: (MOVWreg x:(MOVBUreg _))
5278 // result: (MOVDreg x)
5281 if x.Op != OpRISCV64MOVBUreg {
5284 v.reset(OpRISCV64MOVDreg)
5288 // match: (MOVWreg x:(MOVHreg _))
5289 // result: (MOVDreg x)
5292 if x.Op != OpRISCV64MOVHreg {
5295 v.reset(OpRISCV64MOVDreg)
5299 // match: (MOVWreg x:(MOVWreg _))
5300 // result: (MOVDreg x)
5303 if x.Op != OpRISCV64MOVWreg {
5306 v.reset(OpRISCV64MOVDreg)
5310 // match: (MOVWreg <t> x:(MOVWUload [off] {sym} ptr mem))
5311 // cond: x.Uses == 1 && clobber(x)
5312 // result: @x.Block (MOVWload <t> [off] {sym} ptr mem)
5316 if x.Op != OpRISCV64MOVWUload {
5319 off := auxIntToInt32(x.AuxInt)
5320 sym := auxToSym(x.Aux)
5323 if !(x.Uses == 1 && clobber(x)) {
5327 v0 := b.NewValue0(x.Pos, OpRISCV64MOVWload, t)
5329 v0.AuxInt = int32ToAuxInt(off)
5330 v0.Aux = symToAux(sym)
5331 v0.AddArg2(ptr, mem)
5336 func rewriteValueRISCV64_OpRISCV64MOVWstore(v *Value) bool {
5340 // match: (MOVWstore [off1] {sym1} (MOVaddr [off2] {sym2} base) val mem)
5341 // cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)
5342 // result: (MOVWstore [off1+off2] {mergeSym(sym1,sym2)} base val mem)
5344 off1 := auxIntToInt32(v.AuxInt)
5345 sym1 := auxToSym(v.Aux)
5346 if v_0.Op != OpRISCV64MOVaddr {
5349 off2 := auxIntToInt32(v_0.AuxInt)
5350 sym2 := auxToSym(v_0.Aux)
5354 if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)) {
5357 v.reset(OpRISCV64MOVWstore)
5358 v.AuxInt = int32ToAuxInt(off1 + off2)
5359 v.Aux = symToAux(mergeSym(sym1, sym2))
5360 v.AddArg3(base, val, mem)
5363 // match: (MOVWstore [off1] {sym} (ADDI [off2] base) val mem)
5364 // cond: is32Bit(int64(off1)+off2)
5365 // result: (MOVWstore [off1+int32(off2)] {sym} base val mem)
5367 off1 := auxIntToInt32(v.AuxInt)
5368 sym := auxToSym(v.Aux)
5369 if v_0.Op != OpRISCV64ADDI {
5372 off2 := auxIntToInt64(v_0.AuxInt)
5376 if !(is32Bit(int64(off1) + off2)) {
5379 v.reset(OpRISCV64MOVWstore)
5380 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
5381 v.Aux = symToAux(sym)
5382 v.AddArg3(base, val, mem)
5385 // match: (MOVWstore [off] {sym} ptr (MOVDconst [0]) mem)
5386 // result: (MOVWstorezero [off] {sym} ptr mem)
5388 off := auxIntToInt32(v.AuxInt)
5389 sym := auxToSym(v.Aux)
5391 if v_1.Op != OpRISCV64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
5395 v.reset(OpRISCV64MOVWstorezero)
5396 v.AuxInt = int32ToAuxInt(off)
5397 v.Aux = symToAux(sym)
5401 // match: (MOVWstore [off] {sym} ptr (MOVWreg x) mem)
5402 // result: (MOVWstore [off] {sym} ptr x mem)
5404 off := auxIntToInt32(v.AuxInt)
5405 sym := auxToSym(v.Aux)
5407 if v_1.Op != OpRISCV64MOVWreg {
5412 v.reset(OpRISCV64MOVWstore)
5413 v.AuxInt = int32ToAuxInt(off)
5414 v.Aux = symToAux(sym)
5415 v.AddArg3(ptr, x, mem)
5418 // match: (MOVWstore [off] {sym} ptr (MOVWUreg x) mem)
5419 // result: (MOVWstore [off] {sym} ptr x mem)
5421 off := auxIntToInt32(v.AuxInt)
5422 sym := auxToSym(v.Aux)
5424 if v_1.Op != OpRISCV64MOVWUreg {
5429 v.reset(OpRISCV64MOVWstore)
5430 v.AuxInt = int32ToAuxInt(off)
5431 v.Aux = symToAux(sym)
5432 v.AddArg3(ptr, x, mem)
5437 func rewriteValueRISCV64_OpRISCV64MOVWstorezero(v *Value) bool {
5440 // match: (MOVWstorezero [off1] {sym1} (MOVaddr [off2] {sym2} ptr) mem)
5441 // cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2))
5442 // result: (MOVWstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
5444 off1 := auxIntToInt32(v.AuxInt)
5445 sym1 := auxToSym(v.Aux)
5446 if v_0.Op != OpRISCV64MOVaddr {
5449 off2 := auxIntToInt32(v_0.AuxInt)
5450 sym2 := auxToSym(v_0.Aux)
5453 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) {
5456 v.reset(OpRISCV64MOVWstorezero)
5457 v.AuxInt = int32ToAuxInt(off1 + off2)
5458 v.Aux = symToAux(mergeSym(sym1, sym2))
5462 // match: (MOVWstorezero [off1] {sym} (ADDI [off2] ptr) mem)
5463 // cond: is32Bit(int64(off1)+off2)
5464 // result: (MOVWstorezero [off1+int32(off2)] {sym} ptr mem)
5466 off1 := auxIntToInt32(v.AuxInt)
5467 sym := auxToSym(v.Aux)
5468 if v_0.Op != OpRISCV64ADDI {
5471 off2 := auxIntToInt64(v_0.AuxInt)
5474 if !(is32Bit(int64(off1) + off2)) {
5477 v.reset(OpRISCV64MOVWstorezero)
5478 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
5479 v.Aux = symToAux(sym)
5485 func rewriteValueRISCV64_OpRISCV64NEG(v *Value) bool {
5488 // match: (NEG (SUB x y))
5489 // result: (SUB y x)
5491 if v_0.Op != OpRISCV64SUB {
5496 v.reset(OpRISCV64SUB)
5500 // match: (NEG <t> s:(ADDI [val] (SUB x y)))
5501 // cond: s.Uses == 1 && is32Bit(-val)
5502 // result: (ADDI [-val] (SUB <t> y x))
5506 if s.Op != OpRISCV64ADDI {
5509 val := auxIntToInt64(s.AuxInt)
5511 if s_0.Op != OpRISCV64SUB {
5516 if !(s.Uses == 1 && is32Bit(-val)) {
5519 v.reset(OpRISCV64ADDI)
5520 v.AuxInt = int64ToAuxInt(-val)
5521 v0 := b.NewValue0(v.Pos, OpRISCV64SUB, t)
5526 // match: (NEG (NEG x))
5529 if v_0.Op != OpRISCV64NEG {
5536 // match: (NEG (MOVDconst [x]))
5537 // result: (MOVDconst [-x])
5539 if v_0.Op != OpRISCV64MOVDconst {
5542 x := auxIntToInt64(v_0.AuxInt)
5543 v.reset(OpRISCV64MOVDconst)
5544 v.AuxInt = int64ToAuxInt(-x)
5549 func rewriteValueRISCV64_OpRISCV64NEGW(v *Value) bool {
5551 // match: (NEGW (MOVDconst [x]))
5552 // result: (MOVDconst [int64(int32(-x))])
5554 if v_0.Op != OpRISCV64MOVDconst {
5557 x := auxIntToInt64(v_0.AuxInt)
5558 v.reset(OpRISCV64MOVDconst)
5559 v.AuxInt = int64ToAuxInt(int64(int32(-x)))
5564 func rewriteValueRISCV64_OpRISCV64OR(v *Value) bool {
5567 // match: (OR (MOVDconst [val]) x)
5568 // cond: is32Bit(val)
5569 // result: (ORI [val] x)
5571 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
5572 if v_0.Op != OpRISCV64MOVDconst {
5575 val := auxIntToInt64(v_0.AuxInt)
5577 if !(is32Bit(val)) {
5580 v.reset(OpRISCV64ORI)
5581 v.AuxInt = int64ToAuxInt(val)
5589 func rewriteValueRISCV64_OpRISCV64ORI(v *Value) bool {
5591 // match: (ORI [0] x)
5594 if auxIntToInt64(v.AuxInt) != 0 {
5601 // match: (ORI [-1] x)
5602 // result: (MOVDconst [-1])
5604 if auxIntToInt64(v.AuxInt) != -1 {
5607 v.reset(OpRISCV64MOVDconst)
5608 v.AuxInt = int64ToAuxInt(-1)
5611 // match: (ORI [x] (MOVDconst [y]))
5612 // result: (MOVDconst [x | y])
5614 x := auxIntToInt64(v.AuxInt)
5615 if v_0.Op != OpRISCV64MOVDconst {
5618 y := auxIntToInt64(v_0.AuxInt)
5619 v.reset(OpRISCV64MOVDconst)
5620 v.AuxInt = int64ToAuxInt(x | y)
5623 // match: (ORI [x] (ORI [y] z))
5624 // result: (ORI [x | y] z)
5626 x := auxIntToInt64(v.AuxInt)
5627 if v_0.Op != OpRISCV64ORI {
5630 y := auxIntToInt64(v_0.AuxInt)
5632 v.reset(OpRISCV64ORI)
5633 v.AuxInt = int64ToAuxInt(x | y)
5639 func rewriteValueRISCV64_OpRISCV64SEQZ(v *Value) bool {
5641 // match: (SEQZ (NEG x))
5644 if v_0.Op != OpRISCV64NEG {
5648 v.reset(OpRISCV64SEQZ)
5652 // match: (SEQZ (SEQZ x))
5655 if v_0.Op != OpRISCV64SEQZ {
5659 v.reset(OpRISCV64SNEZ)
5663 // match: (SEQZ (SNEZ x))
5666 if v_0.Op != OpRISCV64SNEZ {
5670 v.reset(OpRISCV64SEQZ)
5676 func rewriteValueRISCV64_OpRISCV64SLL(v *Value) bool {
5679 // match: (SLL x (MOVDconst [val]))
5680 // result: (SLLI [int64(val&63)] x)
5683 if v_1.Op != OpRISCV64MOVDconst {
5686 val := auxIntToInt64(v_1.AuxInt)
5687 v.reset(OpRISCV64SLLI)
5688 v.AuxInt = int64ToAuxInt(int64(val & 63))
5694 func rewriteValueRISCV64_OpRISCV64SLLI(v *Value) bool {
5696 // match: (SLLI [x] (MOVDconst [y]))
5697 // cond: is32Bit(y << uint32(x))
5698 // result: (MOVDconst [y << uint32(x)])
5700 x := auxIntToInt64(v.AuxInt)
5701 if v_0.Op != OpRISCV64MOVDconst {
5704 y := auxIntToInt64(v_0.AuxInt)
5705 if !(is32Bit(y << uint32(x))) {
5708 v.reset(OpRISCV64MOVDconst)
5709 v.AuxInt = int64ToAuxInt(y << uint32(x))
5714 func rewriteValueRISCV64_OpRISCV64SLT(v *Value) bool {
5717 // match: (SLT x (MOVDconst [val]))
5718 // cond: val >= -2048 && val <= 2047
5719 // result: (SLTI [val] x)
5722 if v_1.Op != OpRISCV64MOVDconst {
5725 val := auxIntToInt64(v_1.AuxInt)
5726 if !(val >= -2048 && val <= 2047) {
5729 v.reset(OpRISCV64SLTI)
5730 v.AuxInt = int64ToAuxInt(val)
5735 // result: (MOVDconst [0])
5741 v.reset(OpRISCV64MOVDconst)
5742 v.AuxInt = int64ToAuxInt(0)
5747 func rewriteValueRISCV64_OpRISCV64SLTI(v *Value) bool {
5749 // match: (SLTI [x] (MOVDconst [y]))
5750 // result: (MOVDconst [b2i(int64(y) < int64(x))])
5752 x := auxIntToInt64(v.AuxInt)
5753 if v_0.Op != OpRISCV64MOVDconst {
5756 y := auxIntToInt64(v_0.AuxInt)
5757 v.reset(OpRISCV64MOVDconst)
5758 v.AuxInt = int64ToAuxInt(b2i(int64(y) < int64(x)))
5761 // match: (SLTI [x] (ANDI [y] _))
5762 // cond: y >= 0 && int64(y) < int64(x)
5763 // result: (MOVDconst [1])
5765 x := auxIntToInt64(v.AuxInt)
5766 if v_0.Op != OpRISCV64ANDI {
5769 y := auxIntToInt64(v_0.AuxInt)
5770 if !(y >= 0 && int64(y) < int64(x)) {
5773 v.reset(OpRISCV64MOVDconst)
5774 v.AuxInt = int64ToAuxInt(1)
5777 // match: (SLTI [x] (ORI [y] _))
5778 // cond: y >= 0 && int64(y) >= int64(x)
5779 // result: (MOVDconst [0])
5781 x := auxIntToInt64(v.AuxInt)
5782 if v_0.Op != OpRISCV64ORI {
5785 y := auxIntToInt64(v_0.AuxInt)
5786 if !(y >= 0 && int64(y) >= int64(x)) {
5789 v.reset(OpRISCV64MOVDconst)
5790 v.AuxInt = int64ToAuxInt(0)
5795 func rewriteValueRISCV64_OpRISCV64SLTIU(v *Value) bool {
5797 // match: (SLTIU [x] (MOVDconst [y]))
5798 // result: (MOVDconst [b2i(uint64(y) < uint64(x))])
5800 x := auxIntToInt64(v.AuxInt)
5801 if v_0.Op != OpRISCV64MOVDconst {
5804 y := auxIntToInt64(v_0.AuxInt)
5805 v.reset(OpRISCV64MOVDconst)
5806 v.AuxInt = int64ToAuxInt(b2i(uint64(y) < uint64(x)))
5809 // match: (SLTIU [x] (ANDI [y] _))
5810 // cond: y >= 0 && uint64(y) < uint64(x)
5811 // result: (MOVDconst [1])
5813 x := auxIntToInt64(v.AuxInt)
5814 if v_0.Op != OpRISCV64ANDI {
5817 y := auxIntToInt64(v_0.AuxInt)
5818 if !(y >= 0 && uint64(y) < uint64(x)) {
5821 v.reset(OpRISCV64MOVDconst)
5822 v.AuxInt = int64ToAuxInt(1)
5825 // match: (SLTIU [x] (ORI [y] _))
5826 // cond: y >= 0 && uint64(y) >= uint64(x)
5827 // result: (MOVDconst [0])
5829 x := auxIntToInt64(v.AuxInt)
5830 if v_0.Op != OpRISCV64ORI {
5833 y := auxIntToInt64(v_0.AuxInt)
5834 if !(y >= 0 && uint64(y) >= uint64(x)) {
5837 v.reset(OpRISCV64MOVDconst)
5838 v.AuxInt = int64ToAuxInt(0)
5843 func rewriteValueRISCV64_OpRISCV64SLTU(v *Value) bool {
5846 // match: (SLTU x (MOVDconst [val]))
5847 // cond: val >= -2048 && val <= 2047
5848 // result: (SLTIU [val] x)
5851 if v_1.Op != OpRISCV64MOVDconst {
5854 val := auxIntToInt64(v_1.AuxInt)
5855 if !(val >= -2048 && val <= 2047) {
5858 v.reset(OpRISCV64SLTIU)
5859 v.AuxInt = int64ToAuxInt(val)
5863 // match: (SLTU x x)
5864 // result: (MOVDconst [0])
5870 v.reset(OpRISCV64MOVDconst)
5871 v.AuxInt = int64ToAuxInt(0)
5876 func rewriteValueRISCV64_OpRISCV64SNEZ(v *Value) bool {
5878 // match: (SNEZ (NEG x))
5881 if v_0.Op != OpRISCV64NEG {
5885 v.reset(OpRISCV64SNEZ)
5889 // match: (SNEZ (SEQZ x))
5892 if v_0.Op != OpRISCV64SEQZ {
5896 v.reset(OpRISCV64SEQZ)
5900 // match: (SNEZ (SNEZ x))
5903 if v_0.Op != OpRISCV64SNEZ {
5907 v.reset(OpRISCV64SNEZ)
5913 func rewriteValueRISCV64_OpRISCV64SRA(v *Value) bool {
5916 // match: (SRA x (MOVDconst [val]))
5917 // result: (SRAI [int64(val&63)] x)
5920 if v_1.Op != OpRISCV64MOVDconst {
5923 val := auxIntToInt64(v_1.AuxInt)
5924 v.reset(OpRISCV64SRAI)
5925 v.AuxInt = int64ToAuxInt(int64(val & 63))
5931 func rewriteValueRISCV64_OpRISCV64SRAI(v *Value) bool {
5933 // match: (SRAI [x] (MOVDconst [y]))
5934 // result: (MOVDconst [int64(y) >> uint32(x)])
5936 x := auxIntToInt64(v.AuxInt)
5937 if v_0.Op != OpRISCV64MOVDconst {
5940 y := auxIntToInt64(v_0.AuxInt)
5941 v.reset(OpRISCV64MOVDconst)
5942 v.AuxInt = int64ToAuxInt(int64(y) >> uint32(x))
5947 func rewriteValueRISCV64_OpRISCV64SRL(v *Value) bool {
5950 // match: (SRL x (MOVDconst [val]))
5951 // result: (SRLI [int64(val&63)] x)
5954 if v_1.Op != OpRISCV64MOVDconst {
5957 val := auxIntToInt64(v_1.AuxInt)
5958 v.reset(OpRISCV64SRLI)
5959 v.AuxInt = int64ToAuxInt(int64(val & 63))
5965 func rewriteValueRISCV64_OpRISCV64SRLI(v *Value) bool {
5967 // match: (SRLI [x] (MOVDconst [y]))
5968 // result: (MOVDconst [int64(uint64(y) >> uint32(x))])
5970 x := auxIntToInt64(v.AuxInt)
5971 if v_0.Op != OpRISCV64MOVDconst {
5974 y := auxIntToInt64(v_0.AuxInt)
5975 v.reset(OpRISCV64MOVDconst)
5976 v.AuxInt = int64ToAuxInt(int64(uint64(y) >> uint32(x)))
5981 func rewriteValueRISCV64_OpRISCV64SUB(v *Value) bool {
5985 // match: (SUB x (MOVDconst [val]))
5986 // cond: is32Bit(-val)
5987 // result: (ADDI [-val] x)
5990 if v_1.Op != OpRISCV64MOVDconst {
5993 val := auxIntToInt64(v_1.AuxInt)
5994 if !(is32Bit(-val)) {
5997 v.reset(OpRISCV64ADDI)
5998 v.AuxInt = int64ToAuxInt(-val)
6002 // match: (SUB <t> (MOVDconst [val]) y)
6003 // cond: is32Bit(-val)
6004 // result: (NEG (ADDI <t> [-val] y))
6007 if v_0.Op != OpRISCV64MOVDconst {
6010 val := auxIntToInt64(v_0.AuxInt)
6012 if !(is32Bit(-val)) {
6015 v.reset(OpRISCV64NEG)
6016 v0 := b.NewValue0(v.Pos, OpRISCV64ADDI, t)
6017 v0.AuxInt = int64ToAuxInt(-val)
6022 // match: (SUB x (MOVDconst [0]))
6026 if v_1.Op != OpRISCV64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
6032 // match: (SUB (MOVDconst [0]) x)
6035 if v_0.Op != OpRISCV64MOVDconst || auxIntToInt64(v_0.AuxInt) != 0 {
6039 v.reset(OpRISCV64NEG)
6045 func rewriteValueRISCV64_OpRISCV64SUBW(v *Value) bool {
6048 // match: (SUBW x (MOVDconst [0]))
6049 // result: (ADDIW [0] x)
6052 if v_1.Op != OpRISCV64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
6055 v.reset(OpRISCV64ADDIW)
6056 v.AuxInt = int64ToAuxInt(0)
6060 // match: (SUBW (MOVDconst [0]) x)
6063 if v_0.Op != OpRISCV64MOVDconst || auxIntToInt64(v_0.AuxInt) != 0 {
6067 v.reset(OpRISCV64NEGW)
6073 func rewriteValueRISCV64_OpRISCV64XOR(v *Value) bool {
6076 // match: (XOR (MOVDconst [val]) x)
6077 // cond: is32Bit(val)
6078 // result: (XORI [val] x)
6080 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
6081 if v_0.Op != OpRISCV64MOVDconst {
6084 val := auxIntToInt64(v_0.AuxInt)
6086 if !(is32Bit(val)) {
6089 v.reset(OpRISCV64XORI)
6090 v.AuxInt = int64ToAuxInt(val)
6098 func rewriteValueRISCV64_OpRotateLeft16(v *Value) bool {
6102 typ := &b.Func.Config.Types
6103 // match: (RotateLeft16 <t> x (MOVDconst [c]))
6104 // result: (Or16 (Lsh16x64 <t> x (MOVDconst [c&15])) (Rsh16Ux64 <t> x (MOVDconst [-c&15])))
6108 if v_1.Op != OpRISCV64MOVDconst {
6111 c := auxIntToInt64(v_1.AuxInt)
6113 v0 := b.NewValue0(v.Pos, OpLsh16x64, t)
6114 v1 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
6115 v1.AuxInt = int64ToAuxInt(c & 15)
6117 v2 := b.NewValue0(v.Pos, OpRsh16Ux64, t)
6118 v3 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
6119 v3.AuxInt = int64ToAuxInt(-c & 15)
6126 func rewriteValueRISCV64_OpRotateLeft32(v *Value) bool {
6130 typ := &b.Func.Config.Types
6131 // match: (RotateLeft32 <t> x (MOVDconst [c]))
6132 // result: (Or32 (Lsh32x64 <t> x (MOVDconst [c&31])) (Rsh32Ux64 <t> x (MOVDconst [-c&31])))
6136 if v_1.Op != OpRISCV64MOVDconst {
6139 c := auxIntToInt64(v_1.AuxInt)
6141 v0 := b.NewValue0(v.Pos, OpLsh32x64, t)
6142 v1 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
6143 v1.AuxInt = int64ToAuxInt(c & 31)
6145 v2 := b.NewValue0(v.Pos, OpRsh32Ux64, t)
6146 v3 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
6147 v3.AuxInt = int64ToAuxInt(-c & 31)
6154 func rewriteValueRISCV64_OpRotateLeft64(v *Value) bool {
6158 typ := &b.Func.Config.Types
6159 // match: (RotateLeft64 <t> x (MOVDconst [c]))
6160 // result: (Or64 (Lsh64x64 <t> x (MOVDconst [c&63])) (Rsh64Ux64 <t> x (MOVDconst [-c&63])))
6164 if v_1.Op != OpRISCV64MOVDconst {
6167 c := auxIntToInt64(v_1.AuxInt)
6169 v0 := b.NewValue0(v.Pos, OpLsh64x64, t)
6170 v1 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
6171 v1.AuxInt = int64ToAuxInt(c & 63)
6173 v2 := b.NewValue0(v.Pos, OpRsh64Ux64, t)
6174 v3 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
6175 v3.AuxInt = int64ToAuxInt(-c & 63)
6182 func rewriteValueRISCV64_OpRotateLeft8(v *Value) bool {
6186 typ := &b.Func.Config.Types
6187 // match: (RotateLeft8 <t> x (MOVDconst [c]))
6188 // result: (Or8 (Lsh8x64 <t> x (MOVDconst [c&7])) (Rsh8Ux64 <t> x (MOVDconst [-c&7])))
6192 if v_1.Op != OpRISCV64MOVDconst {
6195 c := auxIntToInt64(v_1.AuxInt)
6197 v0 := b.NewValue0(v.Pos, OpLsh8x64, t)
6198 v1 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
6199 v1.AuxInt = int64ToAuxInt(c & 7)
6201 v2 := b.NewValue0(v.Pos, OpRsh8Ux64, t)
6202 v3 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
6203 v3.AuxInt = int64ToAuxInt(-c & 7)
6210 func rewriteValueRISCV64_OpRsh16Ux16(v *Value) bool {
6214 typ := &b.Func.Config.Types
6215 // match: (Rsh16Ux16 <t> x y)
6216 // cond: !shiftIsBounded(v)
6217 // result: (AND (SRL <t> (ZeroExt16to64 x) y) (Neg16 <t> (SLTIU <t> [64] (ZeroExt16to64 y))))
6222 if !(!shiftIsBounded(v)) {
6225 v.reset(OpRISCV64AND)
6226 v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
6227 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
6230 v2 := b.NewValue0(v.Pos, OpNeg16, t)
6231 v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
6232 v3.AuxInt = int64ToAuxInt(64)
6233 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
6240 // match: (Rsh16Ux16 x y)
6241 // cond: shiftIsBounded(v)
6242 // result: (SRL (ZeroExt16to64 x) y)
6246 if !(shiftIsBounded(v)) {
6249 v.reset(OpRISCV64SRL)
6250 v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
6257 func rewriteValueRISCV64_OpRsh16Ux32(v *Value) bool {
6261 typ := &b.Func.Config.Types
6262 // match: (Rsh16Ux32 <t> x y)
6263 // cond: !shiftIsBounded(v)
6264 // result: (AND (SRL <t> (ZeroExt16to64 x) y) (Neg16 <t> (SLTIU <t> [64] (ZeroExt32to64 y))))
6269 if !(!shiftIsBounded(v)) {
6272 v.reset(OpRISCV64AND)
6273 v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
6274 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
6277 v2 := b.NewValue0(v.Pos, OpNeg16, t)
6278 v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
6279 v3.AuxInt = int64ToAuxInt(64)
6280 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
6287 // match: (Rsh16Ux32 x y)
6288 // cond: shiftIsBounded(v)
6289 // result: (SRL (ZeroExt16to64 x) y)
6293 if !(shiftIsBounded(v)) {
6296 v.reset(OpRISCV64SRL)
6297 v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
6304 func rewriteValueRISCV64_OpRsh16Ux64(v *Value) bool {
6308 typ := &b.Func.Config.Types
6309 // match: (Rsh16Ux64 <t> x y)
6310 // cond: !shiftIsBounded(v)
6311 // result: (AND (SRL <t> (ZeroExt16to64 x) y) (Neg16 <t> (SLTIU <t> [64] y)))
6316 if !(!shiftIsBounded(v)) {
6319 v.reset(OpRISCV64AND)
6320 v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
6321 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
6324 v2 := b.NewValue0(v.Pos, OpNeg16, t)
6325 v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
6326 v3.AuxInt = int64ToAuxInt(64)
6332 // match: (Rsh16Ux64 x y)
6333 // cond: shiftIsBounded(v)
6334 // result: (SRL (ZeroExt16to64 x) y)
6338 if !(shiftIsBounded(v)) {
6341 v.reset(OpRISCV64SRL)
6342 v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
6349 func rewriteValueRISCV64_OpRsh16Ux8(v *Value) bool {
6353 typ := &b.Func.Config.Types
6354 // match: (Rsh16Ux8 <t> x y)
6355 // cond: !shiftIsBounded(v)
6356 // result: (AND (SRL <t> (ZeroExt16to64 x) y) (Neg16 <t> (SLTIU <t> [64] (ZeroExt8to64 y))))
6361 if !(!shiftIsBounded(v)) {
6364 v.reset(OpRISCV64AND)
6365 v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
6366 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
6369 v2 := b.NewValue0(v.Pos, OpNeg16, t)
6370 v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
6371 v3.AuxInt = int64ToAuxInt(64)
6372 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
6379 // match: (Rsh16Ux8 x y)
6380 // cond: shiftIsBounded(v)
6381 // result: (SRL (ZeroExt16to64 x) y)
6385 if !(shiftIsBounded(v)) {
6388 v.reset(OpRISCV64SRL)
6389 v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
6396 func rewriteValueRISCV64_OpRsh16x16(v *Value) bool {
6400 typ := &b.Func.Config.Types
6401 // match: (Rsh16x16 <t> x y)
6402 // cond: !shiftIsBounded(v)
6403 // result: (SRA <t> (SignExt16to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt16to64 y)))))
6408 if !(!shiftIsBounded(v)) {
6411 v.reset(OpRISCV64SRA)
6413 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
6415 v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
6416 v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
6417 v2.AuxInt = int64ToAuxInt(-1)
6418 v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
6419 v3.AuxInt = int64ToAuxInt(64)
6420 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
6428 // match: (Rsh16x16 x y)
6429 // cond: shiftIsBounded(v)
6430 // result: (SRA (SignExt16to64 x) y)
6434 if !(shiftIsBounded(v)) {
6437 v.reset(OpRISCV64SRA)
6438 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
6445 func rewriteValueRISCV64_OpRsh16x32(v *Value) bool {
6449 typ := &b.Func.Config.Types
6450 // match: (Rsh16x32 <t> x y)
6451 // cond: !shiftIsBounded(v)
6452 // result: (SRA <t> (SignExt16to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt32to64 y)))))
6457 if !(!shiftIsBounded(v)) {
6460 v.reset(OpRISCV64SRA)
6462 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
6464 v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
6465 v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
6466 v2.AuxInt = int64ToAuxInt(-1)
6467 v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
6468 v3.AuxInt = int64ToAuxInt(64)
6469 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
6477 // match: (Rsh16x32 x y)
6478 // cond: shiftIsBounded(v)
6479 // result: (SRA (SignExt16to64 x) y)
6483 if !(shiftIsBounded(v)) {
6486 v.reset(OpRISCV64SRA)
6487 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
6494 func rewriteValueRISCV64_OpRsh16x64(v *Value) bool {
6498 typ := &b.Func.Config.Types
6499 // match: (Rsh16x64 <t> x y)
6500 // cond: !shiftIsBounded(v)
6501 // result: (SRA <t> (SignExt16to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] y))))
6506 if !(!shiftIsBounded(v)) {
6509 v.reset(OpRISCV64SRA)
6511 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
6513 v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
6514 v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
6515 v2.AuxInt = int64ToAuxInt(-1)
6516 v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
6517 v3.AuxInt = int64ToAuxInt(64)
6524 // match: (Rsh16x64 x y)
6525 // cond: shiftIsBounded(v)
6526 // result: (SRA (SignExt16to64 x) y)
6530 if !(shiftIsBounded(v)) {
6533 v.reset(OpRISCV64SRA)
6534 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
6541 func rewriteValueRISCV64_OpRsh16x8(v *Value) bool {
6545 typ := &b.Func.Config.Types
6546 // match: (Rsh16x8 <t> x y)
6547 // cond: !shiftIsBounded(v)
6548 // result: (SRA <t> (SignExt16to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt8to64 y)))))
6553 if !(!shiftIsBounded(v)) {
6556 v.reset(OpRISCV64SRA)
6558 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
6560 v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
6561 v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
6562 v2.AuxInt = int64ToAuxInt(-1)
6563 v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
6564 v3.AuxInt = int64ToAuxInt(64)
6565 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
6573 // match: (Rsh16x8 x y)
6574 // cond: shiftIsBounded(v)
6575 // result: (SRA (SignExt16to64 x) y)
6579 if !(shiftIsBounded(v)) {
6582 v.reset(OpRISCV64SRA)
6583 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
6590 func rewriteValueRISCV64_OpRsh32Ux16(v *Value) bool {
6594 typ := &b.Func.Config.Types
6595 // match: (Rsh32Ux16 <t> x y)
6596 // cond: !shiftIsBounded(v)
6597 // result: (AND (SRL <t> (ZeroExt32to64 x) y) (Neg32 <t> (SLTIU <t> [64] (ZeroExt16to64 y))))
6602 if !(!shiftIsBounded(v)) {
6605 v.reset(OpRISCV64AND)
6606 v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
6607 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
6610 v2 := b.NewValue0(v.Pos, OpNeg32, t)
6611 v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
6612 v3.AuxInt = int64ToAuxInt(64)
6613 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
6620 // match: (Rsh32Ux16 x y)
6621 // cond: shiftIsBounded(v)
6622 // result: (SRL (ZeroExt32to64 x) y)
6626 if !(shiftIsBounded(v)) {
6629 v.reset(OpRISCV64SRL)
6630 v0 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
6637 func rewriteValueRISCV64_OpRsh32Ux32(v *Value) bool {
6641 typ := &b.Func.Config.Types
6642 // match: (Rsh32Ux32 <t> x y)
6643 // cond: !shiftIsBounded(v)
6644 // result: (AND (SRL <t> (ZeroExt32to64 x) y) (Neg32 <t> (SLTIU <t> [64] (ZeroExt32to64 y))))
6649 if !(!shiftIsBounded(v)) {
6652 v.reset(OpRISCV64AND)
6653 v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
6654 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
6657 v2 := b.NewValue0(v.Pos, OpNeg32, t)
6658 v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
6659 v3.AuxInt = int64ToAuxInt(64)
6660 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
6667 // match: (Rsh32Ux32 x y)
6668 // cond: shiftIsBounded(v)
6669 // result: (SRL (ZeroExt32to64 x) y)
6673 if !(shiftIsBounded(v)) {
6676 v.reset(OpRISCV64SRL)
6677 v0 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
6684 func rewriteValueRISCV64_OpRsh32Ux64(v *Value) bool {
6688 typ := &b.Func.Config.Types
6689 // match: (Rsh32Ux64 <t> x y)
6690 // cond: !shiftIsBounded(v)
6691 // result: (AND (SRL <t> (ZeroExt32to64 x) y) (Neg32 <t> (SLTIU <t> [64] y)))
6696 if !(!shiftIsBounded(v)) {
6699 v.reset(OpRISCV64AND)
6700 v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
6701 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
6704 v2 := b.NewValue0(v.Pos, OpNeg32, t)
6705 v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
6706 v3.AuxInt = int64ToAuxInt(64)
6712 // match: (Rsh32Ux64 x y)
6713 // cond: shiftIsBounded(v)
6714 // result: (SRL (ZeroExt32to64 x) y)
6718 if !(shiftIsBounded(v)) {
6721 v.reset(OpRISCV64SRL)
6722 v0 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
6729 func rewriteValueRISCV64_OpRsh32Ux8(v *Value) bool {
6733 typ := &b.Func.Config.Types
6734 // match: (Rsh32Ux8 <t> x y)
6735 // cond: !shiftIsBounded(v)
6736 // result: (AND (SRL <t> (ZeroExt32to64 x) y) (Neg32 <t> (SLTIU <t> [64] (ZeroExt8to64 y))))
6741 if !(!shiftIsBounded(v)) {
6744 v.reset(OpRISCV64AND)
6745 v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
6746 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
6749 v2 := b.NewValue0(v.Pos, OpNeg32, t)
6750 v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
6751 v3.AuxInt = int64ToAuxInt(64)
6752 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
6759 // match: (Rsh32Ux8 x y)
6760 // cond: shiftIsBounded(v)
6761 // result: (SRL (ZeroExt32to64 x) y)
6765 if !(shiftIsBounded(v)) {
6768 v.reset(OpRISCV64SRL)
6769 v0 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
6776 func rewriteValueRISCV64_OpRsh32x16(v *Value) bool {
6780 typ := &b.Func.Config.Types
6781 // match: (Rsh32x16 <t> x y)
6782 // cond: !shiftIsBounded(v)
6783 // result: (SRA <t> (SignExt32to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt16to64 y)))))
6788 if !(!shiftIsBounded(v)) {
6791 v.reset(OpRISCV64SRA)
6793 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
6795 v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
6796 v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
6797 v2.AuxInt = int64ToAuxInt(-1)
6798 v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
6799 v3.AuxInt = int64ToAuxInt(64)
6800 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
6808 // match: (Rsh32x16 x y)
6809 // cond: shiftIsBounded(v)
6810 // result: (SRA (SignExt32to64 x) y)
6814 if !(shiftIsBounded(v)) {
6817 v.reset(OpRISCV64SRA)
6818 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
6825 func rewriteValueRISCV64_OpRsh32x32(v *Value) bool {
6829 typ := &b.Func.Config.Types
6830 // match: (Rsh32x32 <t> x y)
6831 // cond: !shiftIsBounded(v)
6832 // result: (SRA <t> (SignExt32to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt32to64 y)))))
6837 if !(!shiftIsBounded(v)) {
6840 v.reset(OpRISCV64SRA)
6842 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
6844 v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
6845 v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
6846 v2.AuxInt = int64ToAuxInt(-1)
6847 v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
6848 v3.AuxInt = int64ToAuxInt(64)
6849 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
6857 // match: (Rsh32x32 x y)
6858 // cond: shiftIsBounded(v)
6859 // result: (SRA (SignExt32to64 x) y)
6863 if !(shiftIsBounded(v)) {
6866 v.reset(OpRISCV64SRA)
6867 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
6874 func rewriteValueRISCV64_OpRsh32x64(v *Value) bool {
6878 typ := &b.Func.Config.Types
6879 // match: (Rsh32x64 <t> x y)
6880 // cond: !shiftIsBounded(v)
6881 // result: (SRA <t> (SignExt32to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] y))))
6886 if !(!shiftIsBounded(v)) {
6889 v.reset(OpRISCV64SRA)
6891 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
6893 v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
6894 v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
6895 v2.AuxInt = int64ToAuxInt(-1)
6896 v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
6897 v3.AuxInt = int64ToAuxInt(64)
6904 // match: (Rsh32x64 x y)
6905 // cond: shiftIsBounded(v)
6906 // result: (SRA (SignExt32to64 x) y)
6910 if !(shiftIsBounded(v)) {
6913 v.reset(OpRISCV64SRA)
6914 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
6921 func rewriteValueRISCV64_OpRsh32x8(v *Value) bool {
6925 typ := &b.Func.Config.Types
6926 // match: (Rsh32x8 <t> x y)
6927 // cond: !shiftIsBounded(v)
6928 // result: (SRA <t> (SignExt32to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt8to64 y)))))
6933 if !(!shiftIsBounded(v)) {
6936 v.reset(OpRISCV64SRA)
6938 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
6940 v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
6941 v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
6942 v2.AuxInt = int64ToAuxInt(-1)
6943 v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
6944 v3.AuxInt = int64ToAuxInt(64)
6945 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
6953 // match: (Rsh32x8 x y)
6954 // cond: shiftIsBounded(v)
6955 // result: (SRA (SignExt32to64 x) y)
6959 if !(shiftIsBounded(v)) {
6962 v.reset(OpRISCV64SRA)
6963 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
6970 func rewriteValueRISCV64_OpRsh64Ux16(v *Value) bool {
6974 typ := &b.Func.Config.Types
6975 // match: (Rsh64Ux16 <t> x y)
6976 // cond: !shiftIsBounded(v)
6977 // result: (AND (SRL <t> x y) (Neg64 <t> (SLTIU <t> [64] (ZeroExt16to64 y))))
6982 if !(!shiftIsBounded(v)) {
6985 v.reset(OpRISCV64AND)
6986 v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
6988 v1 := b.NewValue0(v.Pos, OpNeg64, t)
6989 v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
6990 v2.AuxInt = int64ToAuxInt(64)
6991 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
6998 // match: (Rsh64Ux16 x y)
6999 // cond: shiftIsBounded(v)
7000 // result: (SRL x y)
7004 if !(shiftIsBounded(v)) {
7007 v.reset(OpRISCV64SRL)
7013 func rewriteValueRISCV64_OpRsh64Ux32(v *Value) bool {
7017 typ := &b.Func.Config.Types
7018 // match: (Rsh64Ux32 <t> x y)
7019 // cond: !shiftIsBounded(v)
7020 // result: (AND (SRL <t> x y) (Neg64 <t> (SLTIU <t> [64] (ZeroExt32to64 y))))
7025 if !(!shiftIsBounded(v)) {
7028 v.reset(OpRISCV64AND)
7029 v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
7031 v1 := b.NewValue0(v.Pos, OpNeg64, t)
7032 v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
7033 v2.AuxInt = int64ToAuxInt(64)
7034 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
7041 // match: (Rsh64Ux32 x y)
7042 // cond: shiftIsBounded(v)
7043 // result: (SRL x y)
7047 if !(shiftIsBounded(v)) {
7050 v.reset(OpRISCV64SRL)
7056 func rewriteValueRISCV64_OpRsh64Ux64(v *Value) bool {
7060 // match: (Rsh64Ux64 <t> x y)
7061 // cond: !shiftIsBounded(v)
7062 // result: (AND (SRL <t> x y) (Neg64 <t> (SLTIU <t> [64] y)))
7067 if !(!shiftIsBounded(v)) {
7070 v.reset(OpRISCV64AND)
7071 v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
7073 v1 := b.NewValue0(v.Pos, OpNeg64, t)
7074 v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
7075 v2.AuxInt = int64ToAuxInt(64)
7081 // match: (Rsh64Ux64 x y)
7082 // cond: shiftIsBounded(v)
7083 // result: (SRL x y)
7087 if !(shiftIsBounded(v)) {
7090 v.reset(OpRISCV64SRL)
7096 func rewriteValueRISCV64_OpRsh64Ux8(v *Value) bool {
7100 typ := &b.Func.Config.Types
7101 // match: (Rsh64Ux8 <t> x y)
7102 // cond: !shiftIsBounded(v)
7103 // result: (AND (SRL <t> x y) (Neg64 <t> (SLTIU <t> [64] (ZeroExt8to64 y))))
7108 if !(!shiftIsBounded(v)) {
7111 v.reset(OpRISCV64AND)
7112 v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
7114 v1 := b.NewValue0(v.Pos, OpNeg64, t)
7115 v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
7116 v2.AuxInt = int64ToAuxInt(64)
7117 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
7124 // match: (Rsh64Ux8 x y)
7125 // cond: shiftIsBounded(v)
7126 // result: (SRL x y)
7130 if !(shiftIsBounded(v)) {
7133 v.reset(OpRISCV64SRL)
7139 func rewriteValueRISCV64_OpRsh64x16(v *Value) bool {
7143 typ := &b.Func.Config.Types
7144 // match: (Rsh64x16 <t> x y)
7145 // cond: !shiftIsBounded(v)
7146 // result: (SRA <t> x (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt16to64 y)))))
7151 if !(!shiftIsBounded(v)) {
7154 v.reset(OpRISCV64SRA)
7156 v0 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
7157 v1 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
7158 v1.AuxInt = int64ToAuxInt(-1)
7159 v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
7160 v2.AuxInt = int64ToAuxInt(64)
7161 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
7169 // match: (Rsh64x16 x y)
7170 // cond: shiftIsBounded(v)
7171 // result: (SRA x y)
7175 if !(shiftIsBounded(v)) {
7178 v.reset(OpRISCV64SRA)
7184 func rewriteValueRISCV64_OpRsh64x32(v *Value) bool {
7188 typ := &b.Func.Config.Types
7189 // match: (Rsh64x32 <t> x y)
7190 // cond: !shiftIsBounded(v)
7191 // result: (SRA <t> x (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt32to64 y)))))
7196 if !(!shiftIsBounded(v)) {
7199 v.reset(OpRISCV64SRA)
7201 v0 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
7202 v1 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
7203 v1.AuxInt = int64ToAuxInt(-1)
7204 v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
7205 v2.AuxInt = int64ToAuxInt(64)
7206 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
7214 // match: (Rsh64x32 x y)
7215 // cond: shiftIsBounded(v)
7216 // result: (SRA x y)
7220 if !(shiftIsBounded(v)) {
7223 v.reset(OpRISCV64SRA)
7229 func rewriteValueRISCV64_OpRsh64x64(v *Value) bool {
7233 // match: (Rsh64x64 <t> x y)
7234 // cond: !shiftIsBounded(v)
7235 // result: (SRA <t> x (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] y))))
7240 if !(!shiftIsBounded(v)) {
7243 v.reset(OpRISCV64SRA)
7245 v0 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
7246 v1 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
7247 v1.AuxInt = int64ToAuxInt(-1)
7248 v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
7249 v2.AuxInt = int64ToAuxInt(64)
7256 // match: (Rsh64x64 x y)
7257 // cond: shiftIsBounded(v)
7258 // result: (SRA x y)
7262 if !(shiftIsBounded(v)) {
7265 v.reset(OpRISCV64SRA)
7271 func rewriteValueRISCV64_OpRsh64x8(v *Value) bool {
7275 typ := &b.Func.Config.Types
7276 // match: (Rsh64x8 <t> x y)
7277 // cond: !shiftIsBounded(v)
7278 // result: (SRA <t> x (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt8to64 y)))))
7283 if !(!shiftIsBounded(v)) {
7286 v.reset(OpRISCV64SRA)
7288 v0 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
7289 v1 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
7290 v1.AuxInt = int64ToAuxInt(-1)
7291 v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
7292 v2.AuxInt = int64ToAuxInt(64)
7293 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
7301 // match: (Rsh64x8 x y)
7302 // cond: shiftIsBounded(v)
7303 // result: (SRA x y)
7307 if !(shiftIsBounded(v)) {
7310 v.reset(OpRISCV64SRA)
7316 func rewriteValueRISCV64_OpRsh8Ux16(v *Value) bool {
7320 typ := &b.Func.Config.Types
7321 // match: (Rsh8Ux16 <t> x y)
7322 // cond: !shiftIsBounded(v)
7323 // result: (AND (SRL <t> (ZeroExt8to64 x) y) (Neg8 <t> (SLTIU <t> [64] (ZeroExt16to64 y))))
7328 if !(!shiftIsBounded(v)) {
7331 v.reset(OpRISCV64AND)
7332 v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
7333 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
7336 v2 := b.NewValue0(v.Pos, OpNeg8, t)
7337 v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
7338 v3.AuxInt = int64ToAuxInt(64)
7339 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
7346 // match: (Rsh8Ux16 x y)
7347 // cond: shiftIsBounded(v)
7348 // result: (SRL (ZeroExt8to64 x) y)
7352 if !(shiftIsBounded(v)) {
7355 v.reset(OpRISCV64SRL)
7356 v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
7363 func rewriteValueRISCV64_OpRsh8Ux32(v *Value) bool {
7367 typ := &b.Func.Config.Types
7368 // match: (Rsh8Ux32 <t> x y)
7369 // cond: !shiftIsBounded(v)
7370 // result: (AND (SRL <t> (ZeroExt8to64 x) y) (Neg8 <t> (SLTIU <t> [64] (ZeroExt32to64 y))))
7375 if !(!shiftIsBounded(v)) {
7378 v.reset(OpRISCV64AND)
7379 v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
7380 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
7383 v2 := b.NewValue0(v.Pos, OpNeg8, t)
7384 v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
7385 v3.AuxInt = int64ToAuxInt(64)
7386 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
7393 // match: (Rsh8Ux32 x y)
7394 // cond: shiftIsBounded(v)
7395 // result: (SRL (ZeroExt8to64 x) y)
7399 if !(shiftIsBounded(v)) {
7402 v.reset(OpRISCV64SRL)
7403 v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
7410 func rewriteValueRISCV64_OpRsh8Ux64(v *Value) bool {
7414 typ := &b.Func.Config.Types
7415 // match: (Rsh8Ux64 <t> x y)
7416 // cond: !shiftIsBounded(v)
7417 // result: (AND (SRL <t> (ZeroExt8to64 x) y) (Neg8 <t> (SLTIU <t> [64] y)))
7422 if !(!shiftIsBounded(v)) {
7425 v.reset(OpRISCV64AND)
7426 v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
7427 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
7430 v2 := b.NewValue0(v.Pos, OpNeg8, t)
7431 v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
7432 v3.AuxInt = int64ToAuxInt(64)
7438 // match: (Rsh8Ux64 x y)
7439 // cond: shiftIsBounded(v)
7440 // result: (SRL (ZeroExt8to64 x) y)
7444 if !(shiftIsBounded(v)) {
7447 v.reset(OpRISCV64SRL)
7448 v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
7455 func rewriteValueRISCV64_OpRsh8Ux8(v *Value) bool {
7459 typ := &b.Func.Config.Types
7460 // match: (Rsh8Ux8 <t> x y)
7461 // cond: !shiftIsBounded(v)
7462 // result: (AND (SRL <t> (ZeroExt8to64 x) y) (Neg8 <t> (SLTIU <t> [64] (ZeroExt8to64 y))))
7467 if !(!shiftIsBounded(v)) {
7470 v.reset(OpRISCV64AND)
7471 v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
7472 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
7475 v2 := b.NewValue0(v.Pos, OpNeg8, t)
7476 v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
7477 v3.AuxInt = int64ToAuxInt(64)
7478 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
7485 // match: (Rsh8Ux8 x y)
7486 // cond: shiftIsBounded(v)
7487 // result: (SRL (ZeroExt8to64 x) y)
7491 if !(shiftIsBounded(v)) {
7494 v.reset(OpRISCV64SRL)
7495 v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
7502 func rewriteValueRISCV64_OpRsh8x16(v *Value) bool {
7506 typ := &b.Func.Config.Types
7507 // match: (Rsh8x16 <t> x y)
7508 // cond: !shiftIsBounded(v)
7509 // result: (SRA <t> (SignExt8to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt16to64 y)))))
7514 if !(!shiftIsBounded(v)) {
7517 v.reset(OpRISCV64SRA)
7519 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
7521 v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
7522 v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
7523 v2.AuxInt = int64ToAuxInt(-1)
7524 v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
7525 v3.AuxInt = int64ToAuxInt(64)
7526 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
7534 // match: (Rsh8x16 x y)
7535 // cond: shiftIsBounded(v)
7536 // result: (SRA (SignExt8to64 x) y)
7540 if !(shiftIsBounded(v)) {
7543 v.reset(OpRISCV64SRA)
7544 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
7551 func rewriteValueRISCV64_OpRsh8x32(v *Value) bool {
7555 typ := &b.Func.Config.Types
7556 // match: (Rsh8x32 <t> x y)
7557 // cond: !shiftIsBounded(v)
7558 // result: (SRA <t> (SignExt8to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt32to64 y)))))
7563 if !(!shiftIsBounded(v)) {
7566 v.reset(OpRISCV64SRA)
7568 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
7570 v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
7571 v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
7572 v2.AuxInt = int64ToAuxInt(-1)
7573 v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
7574 v3.AuxInt = int64ToAuxInt(64)
7575 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
7583 // match: (Rsh8x32 x y)
7584 // cond: shiftIsBounded(v)
7585 // result: (SRA (SignExt8to64 x) y)
7589 if !(shiftIsBounded(v)) {
7592 v.reset(OpRISCV64SRA)
7593 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
7600 func rewriteValueRISCV64_OpRsh8x64(v *Value) bool {
7604 typ := &b.Func.Config.Types
7605 // match: (Rsh8x64 <t> x y)
7606 // cond: !shiftIsBounded(v)
7607 // result: (SRA <t> (SignExt8to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] y))))
7612 if !(!shiftIsBounded(v)) {
7615 v.reset(OpRISCV64SRA)
7617 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
7619 v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
7620 v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
7621 v2.AuxInt = int64ToAuxInt(-1)
7622 v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
7623 v3.AuxInt = int64ToAuxInt(64)
7630 // match: (Rsh8x64 x y)
7631 // cond: shiftIsBounded(v)
7632 // result: (SRA (SignExt8to64 x) y)
7636 if !(shiftIsBounded(v)) {
7639 v.reset(OpRISCV64SRA)
7640 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
7647 func rewriteValueRISCV64_OpRsh8x8(v *Value) bool {
7651 typ := &b.Func.Config.Types
7652 // match: (Rsh8x8 <t> x y)
7653 // cond: !shiftIsBounded(v)
7654 // result: (SRA <t> (SignExt8to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt8to64 y)))))
7659 if !(!shiftIsBounded(v)) {
7662 v.reset(OpRISCV64SRA)
7664 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
7666 v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
7667 v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
7668 v2.AuxInt = int64ToAuxInt(-1)
7669 v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
7670 v3.AuxInt = int64ToAuxInt(64)
7671 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
7679 // match: (Rsh8x8 x y)
7680 // cond: shiftIsBounded(v)
7681 // result: (SRA (SignExt8to64 x) y)
7685 if !(shiftIsBounded(v)) {
7688 v.reset(OpRISCV64SRA)
7689 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
7696 func rewriteValueRISCV64_OpSelect0(v *Value) bool {
7699 typ := &b.Func.Config.Types
7700 // match: (Select0 (Add64carry x y c))
7701 // result: (ADD (ADD <typ.UInt64> x y) c)
7703 if v_0.Op != OpAdd64carry {
7709 v.reset(OpRISCV64ADD)
7710 v0 := b.NewValue0(v.Pos, OpRISCV64ADD, typ.UInt64)
7715 // match: (Select0 (Sub64borrow x y c))
7716 // result: (SUB (SUB <typ.UInt64> x y) c)
7718 if v_0.Op != OpSub64borrow {
7724 v.reset(OpRISCV64SUB)
7725 v0 := b.NewValue0(v.Pos, OpRISCV64SUB, typ.UInt64)
7730 // match: (Select0 m:(LoweredMuluhilo x y))
7731 // cond: m.Uses == 1
7732 // result: (MULHU x y)
7735 if m.Op != OpRISCV64LoweredMuluhilo {
7743 v.reset(OpRISCV64MULHU)
7749 func rewriteValueRISCV64_OpSelect1(v *Value) bool {
7752 typ := &b.Func.Config.Types
7753 // match: (Select1 (Add64carry x y c))
7754 // result: (OR (SLTU <typ.UInt64> s:(ADD <typ.UInt64> x y) x) (SLTU <typ.UInt64> (ADD <typ.UInt64> s c) s))
7756 if v_0.Op != OpAdd64carry {
7762 v.reset(OpRISCV64OR)
7763 v0 := b.NewValue0(v.Pos, OpRISCV64SLTU, typ.UInt64)
7764 s := b.NewValue0(v.Pos, OpRISCV64ADD, typ.UInt64)
7767 v2 := b.NewValue0(v.Pos, OpRISCV64SLTU, typ.UInt64)
7768 v3 := b.NewValue0(v.Pos, OpRISCV64ADD, typ.UInt64)
7774 // match: (Select1 (Sub64borrow x y c))
7775 // result: (OR (SLTU <typ.UInt64> x s:(SUB <typ.UInt64> x y)) (SLTU <typ.UInt64> s (SUB <typ.UInt64> s c)))
7777 if v_0.Op != OpSub64borrow {
7783 v.reset(OpRISCV64OR)
7784 v0 := b.NewValue0(v.Pos, OpRISCV64SLTU, typ.UInt64)
7785 s := b.NewValue0(v.Pos, OpRISCV64SUB, typ.UInt64)
7788 v2 := b.NewValue0(v.Pos, OpRISCV64SLTU, typ.UInt64)
7789 v3 := b.NewValue0(v.Pos, OpRISCV64SUB, typ.UInt64)
7795 // match: (Select1 m:(LoweredMuluhilo x y))
7796 // cond: m.Uses == 1
7797 // result: (MUL x y)
7800 if m.Op != OpRISCV64LoweredMuluhilo {
7808 v.reset(OpRISCV64MUL)
7814 func rewriteValueRISCV64_OpSlicemask(v *Value) bool {
7817 // match: (Slicemask <t> x)
7818 // result: (SRAI [63] (NEG <t> x))
7822 v.reset(OpRISCV64SRAI)
7823 v.AuxInt = int64ToAuxInt(63)
7824 v0 := b.NewValue0(v.Pos, OpRISCV64NEG, t)
7830 func rewriteValueRISCV64_OpStore(v *Value) bool {
7834 // match: (Store {t} ptr val mem)
7835 // cond: t.Size() == 1
7836 // result: (MOVBstore ptr val mem)
7838 t := auxToType(v.Aux)
7842 if !(t.Size() == 1) {
7845 v.reset(OpRISCV64MOVBstore)
7846 v.AddArg3(ptr, val, mem)
7849 // match: (Store {t} ptr val mem)
7850 // cond: t.Size() == 2
7851 // result: (MOVHstore ptr val mem)
7853 t := auxToType(v.Aux)
7857 if !(t.Size() == 2) {
7860 v.reset(OpRISCV64MOVHstore)
7861 v.AddArg3(ptr, val, mem)
7864 // match: (Store {t} ptr val mem)
7865 // cond: t.Size() == 4 && !t.IsFloat()
7866 // result: (MOVWstore ptr val mem)
7868 t := auxToType(v.Aux)
7872 if !(t.Size() == 4 && !t.IsFloat()) {
7875 v.reset(OpRISCV64MOVWstore)
7876 v.AddArg3(ptr, val, mem)
7879 // match: (Store {t} ptr val mem)
7880 // cond: t.Size() == 8 && !t.IsFloat()
7881 // result: (MOVDstore ptr val mem)
7883 t := auxToType(v.Aux)
7887 if !(t.Size() == 8 && !t.IsFloat()) {
7890 v.reset(OpRISCV64MOVDstore)
7891 v.AddArg3(ptr, val, mem)
7894 // match: (Store {t} ptr val mem)
7895 // cond: t.Size() == 4 && t.IsFloat()
7896 // result: (FMOVWstore ptr val mem)
7898 t := auxToType(v.Aux)
7902 if !(t.Size() == 4 && t.IsFloat()) {
7905 v.reset(OpRISCV64FMOVWstore)
7906 v.AddArg3(ptr, val, mem)
7909 // match: (Store {t} ptr val mem)
7910 // cond: t.Size() == 8 && t.IsFloat()
7911 // result: (FMOVDstore ptr val mem)
7913 t := auxToType(v.Aux)
7917 if !(t.Size() == 8 && t.IsFloat()) {
7920 v.reset(OpRISCV64FMOVDstore)
7921 v.AddArg3(ptr, val, mem)
7926 func rewriteValueRISCV64_OpZero(v *Value) bool {
7930 config := b.Func.Config
7931 typ := &b.Func.Config.Types
7932 // match: (Zero [0] _ mem)
7935 if auxIntToInt64(v.AuxInt) != 0 {
7942 // match: (Zero [1] ptr mem)
7943 // result: (MOVBstore ptr (MOVDconst [0]) mem)
7945 if auxIntToInt64(v.AuxInt) != 1 {
7950 v.reset(OpRISCV64MOVBstore)
7951 v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
7952 v0.AuxInt = int64ToAuxInt(0)
7953 v.AddArg3(ptr, v0, mem)
7956 // match: (Zero [2] {t} ptr mem)
7957 // cond: t.Alignment()%2 == 0
7958 // result: (MOVHstore ptr (MOVDconst [0]) mem)
7960 if auxIntToInt64(v.AuxInt) != 2 {
7963 t := auxToType(v.Aux)
7966 if !(t.Alignment()%2 == 0) {
7969 v.reset(OpRISCV64MOVHstore)
7970 v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
7971 v0.AuxInt = int64ToAuxInt(0)
7972 v.AddArg3(ptr, v0, mem)
7975 // match: (Zero [2] ptr mem)
7976 // result: (MOVBstore [1] ptr (MOVDconst [0]) (MOVBstore ptr (MOVDconst [0]) mem))
7978 if auxIntToInt64(v.AuxInt) != 2 {
7983 v.reset(OpRISCV64MOVBstore)
7984 v.AuxInt = int32ToAuxInt(1)
7985 v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
7986 v0.AuxInt = int64ToAuxInt(0)
7987 v1 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
7988 v1.AddArg3(ptr, v0, mem)
7989 v.AddArg3(ptr, v0, v1)
7992 // match: (Zero [4] {t} ptr mem)
7993 // cond: t.Alignment()%4 == 0
7994 // result: (MOVWstore ptr (MOVDconst [0]) mem)
7996 if auxIntToInt64(v.AuxInt) != 4 {
7999 t := auxToType(v.Aux)
8002 if !(t.Alignment()%4 == 0) {
8005 v.reset(OpRISCV64MOVWstore)
8006 v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
8007 v0.AuxInt = int64ToAuxInt(0)
8008 v.AddArg3(ptr, v0, mem)
8011 // match: (Zero [4] {t} ptr mem)
8012 // cond: t.Alignment()%2 == 0
8013 // result: (MOVHstore [2] ptr (MOVDconst [0]) (MOVHstore ptr (MOVDconst [0]) mem))
8015 if auxIntToInt64(v.AuxInt) != 4 {
8018 t := auxToType(v.Aux)
8021 if !(t.Alignment()%2 == 0) {
8024 v.reset(OpRISCV64MOVHstore)
8025 v.AuxInt = int32ToAuxInt(2)
8026 v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
8027 v0.AuxInt = int64ToAuxInt(0)
8028 v1 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
8029 v1.AddArg3(ptr, v0, mem)
8030 v.AddArg3(ptr, v0, v1)
8033 // match: (Zero [4] ptr mem)
8034 // result: (MOVBstore [3] ptr (MOVDconst [0]) (MOVBstore [2] ptr (MOVDconst [0]) (MOVBstore [1] ptr (MOVDconst [0]) (MOVBstore ptr (MOVDconst [0]) mem))))
8036 if auxIntToInt64(v.AuxInt) != 4 {
8041 v.reset(OpRISCV64MOVBstore)
8042 v.AuxInt = int32ToAuxInt(3)
8043 v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
8044 v0.AuxInt = int64ToAuxInt(0)
8045 v1 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
8046 v1.AuxInt = int32ToAuxInt(2)
8047 v2 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
8048 v2.AuxInt = int32ToAuxInt(1)
8049 v3 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
8050 v3.AddArg3(ptr, v0, mem)
8051 v2.AddArg3(ptr, v0, v3)
8052 v1.AddArg3(ptr, v0, v2)
8053 v.AddArg3(ptr, v0, v1)
8056 // match: (Zero [8] {t} ptr mem)
8057 // cond: t.Alignment()%8 == 0
8058 // result: (MOVDstore ptr (MOVDconst [0]) mem)
8060 if auxIntToInt64(v.AuxInt) != 8 {
8063 t := auxToType(v.Aux)
8066 if !(t.Alignment()%8 == 0) {
8069 v.reset(OpRISCV64MOVDstore)
8070 v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
8071 v0.AuxInt = int64ToAuxInt(0)
8072 v.AddArg3(ptr, v0, mem)
8075 // match: (Zero [8] {t} ptr mem)
8076 // cond: t.Alignment()%4 == 0
8077 // result: (MOVWstore [4] ptr (MOVDconst [0]) (MOVWstore ptr (MOVDconst [0]) mem))
8079 if auxIntToInt64(v.AuxInt) != 8 {
8082 t := auxToType(v.Aux)
8085 if !(t.Alignment()%4 == 0) {
8088 v.reset(OpRISCV64MOVWstore)
8089 v.AuxInt = int32ToAuxInt(4)
8090 v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
8091 v0.AuxInt = int64ToAuxInt(0)
8092 v1 := b.NewValue0(v.Pos, OpRISCV64MOVWstore, types.TypeMem)
8093 v1.AddArg3(ptr, v0, mem)
8094 v.AddArg3(ptr, v0, v1)
8097 // match: (Zero [8] {t} ptr mem)
8098 // cond: t.Alignment()%2 == 0
8099 // result: (MOVHstore [6] ptr (MOVDconst [0]) (MOVHstore [4] ptr (MOVDconst [0]) (MOVHstore [2] ptr (MOVDconst [0]) (MOVHstore ptr (MOVDconst [0]) mem))))
8101 if auxIntToInt64(v.AuxInt) != 8 {
8104 t := auxToType(v.Aux)
8107 if !(t.Alignment()%2 == 0) {
8110 v.reset(OpRISCV64MOVHstore)
8111 v.AuxInt = int32ToAuxInt(6)
8112 v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
8113 v0.AuxInt = int64ToAuxInt(0)
8114 v1 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
8115 v1.AuxInt = int32ToAuxInt(4)
8116 v2 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
8117 v2.AuxInt = int32ToAuxInt(2)
8118 v3 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
8119 v3.AddArg3(ptr, v0, mem)
8120 v2.AddArg3(ptr, v0, v3)
8121 v1.AddArg3(ptr, v0, v2)
8122 v.AddArg3(ptr, v0, v1)
8125 // match: (Zero [3] ptr mem)
8126 // result: (MOVBstore [2] ptr (MOVDconst [0]) (MOVBstore [1] ptr (MOVDconst [0]) (MOVBstore ptr (MOVDconst [0]) mem)))
8128 if auxIntToInt64(v.AuxInt) != 3 {
8133 v.reset(OpRISCV64MOVBstore)
8134 v.AuxInt = int32ToAuxInt(2)
8135 v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
8136 v0.AuxInt = int64ToAuxInt(0)
8137 v1 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
8138 v1.AuxInt = int32ToAuxInt(1)
8139 v2 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
8140 v2.AddArg3(ptr, v0, mem)
8141 v1.AddArg3(ptr, v0, v2)
8142 v.AddArg3(ptr, v0, v1)
8145 // match: (Zero [6] {t} ptr mem)
8146 // cond: t.Alignment()%2 == 0
8147 // result: (MOVHstore [4] ptr (MOVDconst [0]) (MOVHstore [2] ptr (MOVDconst [0]) (MOVHstore ptr (MOVDconst [0]) mem)))
8149 if auxIntToInt64(v.AuxInt) != 6 {
8152 t := auxToType(v.Aux)
8155 if !(t.Alignment()%2 == 0) {
8158 v.reset(OpRISCV64MOVHstore)
8159 v.AuxInt = int32ToAuxInt(4)
8160 v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
8161 v0.AuxInt = int64ToAuxInt(0)
8162 v1 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
8163 v1.AuxInt = int32ToAuxInt(2)
8164 v2 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
8165 v2.AddArg3(ptr, v0, mem)
8166 v1.AddArg3(ptr, v0, v2)
8167 v.AddArg3(ptr, v0, v1)
8170 // match: (Zero [12] {t} ptr mem)
8171 // cond: t.Alignment()%4 == 0
8172 // result: (MOVWstore [8] ptr (MOVDconst [0]) (MOVWstore [4] ptr (MOVDconst [0]) (MOVWstore ptr (MOVDconst [0]) mem)))
8174 if auxIntToInt64(v.AuxInt) != 12 {
8177 t := auxToType(v.Aux)
8180 if !(t.Alignment()%4 == 0) {
8183 v.reset(OpRISCV64MOVWstore)
8184 v.AuxInt = int32ToAuxInt(8)
8185 v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
8186 v0.AuxInt = int64ToAuxInt(0)
8187 v1 := b.NewValue0(v.Pos, OpRISCV64MOVWstore, types.TypeMem)
8188 v1.AuxInt = int32ToAuxInt(4)
8189 v2 := b.NewValue0(v.Pos, OpRISCV64MOVWstore, types.TypeMem)
8190 v2.AddArg3(ptr, v0, mem)
8191 v1.AddArg3(ptr, v0, v2)
8192 v.AddArg3(ptr, v0, v1)
8195 // match: (Zero [16] {t} ptr mem)
8196 // cond: t.Alignment()%8 == 0
8197 // result: (MOVDstore [8] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem))
8199 if auxIntToInt64(v.AuxInt) != 16 {
8202 t := auxToType(v.Aux)
8205 if !(t.Alignment()%8 == 0) {
8208 v.reset(OpRISCV64MOVDstore)
8209 v.AuxInt = int32ToAuxInt(8)
8210 v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
8211 v0.AuxInt = int64ToAuxInt(0)
8212 v1 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
8213 v1.AddArg3(ptr, v0, mem)
8214 v.AddArg3(ptr, v0, v1)
8217 // match: (Zero [24] {t} ptr mem)
8218 // cond: t.Alignment()%8 == 0
8219 // result: (MOVDstore [16] ptr (MOVDconst [0]) (MOVDstore [8] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem)))
8221 if auxIntToInt64(v.AuxInt) != 24 {
8224 t := auxToType(v.Aux)
8227 if !(t.Alignment()%8 == 0) {
8230 v.reset(OpRISCV64MOVDstore)
8231 v.AuxInt = int32ToAuxInt(16)
8232 v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
8233 v0.AuxInt = int64ToAuxInt(0)
8234 v1 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
8235 v1.AuxInt = int32ToAuxInt(8)
8236 v2 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
8237 v2.AddArg3(ptr, v0, mem)
8238 v1.AddArg3(ptr, v0, v2)
8239 v.AddArg3(ptr, v0, v1)
8242 // match: (Zero [32] {t} ptr mem)
8243 // cond: t.Alignment()%8 == 0
8244 // result: (MOVDstore [24] ptr (MOVDconst [0]) (MOVDstore [16] ptr (MOVDconst [0]) (MOVDstore [8] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem))))
8246 if auxIntToInt64(v.AuxInt) != 32 {
8249 t := auxToType(v.Aux)
8252 if !(t.Alignment()%8 == 0) {
8255 v.reset(OpRISCV64MOVDstore)
8256 v.AuxInt = int32ToAuxInt(24)
8257 v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
8258 v0.AuxInt = int64ToAuxInt(0)
8259 v1 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
8260 v1.AuxInt = int32ToAuxInt(16)
8261 v2 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
8262 v2.AuxInt = int32ToAuxInt(8)
8263 v3 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
8264 v3.AddArg3(ptr, v0, mem)
8265 v2.AddArg3(ptr, v0, v3)
8266 v1.AddArg3(ptr, v0, v2)
8267 v.AddArg3(ptr, v0, v1)
8270 // match: (Zero [s] {t} ptr mem)
8271 // cond: s%8 == 0 && s <= 8*128 && t.Alignment()%8 == 0 && !config.noDuffDevice
8272 // result: (DUFFZERO [8 * (128 - s/8)] ptr mem)
8274 s := auxIntToInt64(v.AuxInt)
8275 t := auxToType(v.Aux)
8278 if !(s%8 == 0 && s <= 8*128 && t.Alignment()%8 == 0 && !config.noDuffDevice) {
8281 v.reset(OpRISCV64DUFFZERO)
8282 v.AuxInt = int64ToAuxInt(8 * (128 - s/8))
8286 // match: (Zero [s] {t} ptr mem)
8287 // result: (LoweredZero [t.Alignment()] ptr (ADD <ptr.Type> ptr (MOVDconst [s-moveSize(t.Alignment(), config)])) mem)
8289 s := auxIntToInt64(v.AuxInt)
8290 t := auxToType(v.Aux)
8293 v.reset(OpRISCV64LoweredZero)
8294 v.AuxInt = int64ToAuxInt(t.Alignment())
8295 v0 := b.NewValue0(v.Pos, OpRISCV64ADD, ptr.Type)
8296 v1 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
8297 v1.AuxInt = int64ToAuxInt(s - moveSize(t.Alignment(), config))
8299 v.AddArg3(ptr, v0, mem)
8303 func rewriteBlockRISCV64(b *Block) bool {
8304 typ := &b.Func.Config.Types
8306 case BlockRISCV64BEQ:
8307 // match: (BEQ (MOVDconst [0]) cond yes no)
8308 // result: (BEQZ cond yes no)
8309 for b.Controls[0].Op == OpRISCV64MOVDconst {
8310 v_0 := b.Controls[0]
8311 if auxIntToInt64(v_0.AuxInt) != 0 {
8314 cond := b.Controls[1]
8315 b.resetWithControl(BlockRISCV64BEQZ, cond)
8318 // match: (BEQ cond (MOVDconst [0]) yes no)
8319 // result: (BEQZ cond yes no)
8320 for b.Controls[1].Op == OpRISCV64MOVDconst {
8321 cond := b.Controls[0]
8322 v_1 := b.Controls[1]
8323 if auxIntToInt64(v_1.AuxInt) != 0 {
8326 b.resetWithControl(BlockRISCV64BEQZ, cond)
8329 case BlockRISCV64BEQZ:
8330 // match: (BEQZ (SEQZ x) yes no)
8331 // result: (BNEZ x yes no)
8332 for b.Controls[0].Op == OpRISCV64SEQZ {
8333 v_0 := b.Controls[0]
8335 b.resetWithControl(BlockRISCV64BNEZ, x)
8338 // match: (BEQZ (SNEZ x) yes no)
8339 // result: (BEQZ x yes no)
8340 for b.Controls[0].Op == OpRISCV64SNEZ {
8341 v_0 := b.Controls[0]
8343 b.resetWithControl(BlockRISCV64BEQZ, x)
8346 // match: (BEQZ (NEG x) yes no)
8347 // result: (BEQZ x yes no)
8348 for b.Controls[0].Op == OpRISCV64NEG {
8349 v_0 := b.Controls[0]
8351 b.resetWithControl(BlockRISCV64BEQZ, x)
8354 // match: (BEQZ (FNES <t> x y) yes no)
8355 // result: (BNEZ (FEQS <t> x y) yes no)
8356 for b.Controls[0].Op == OpRISCV64FNES {
8357 v_0 := b.Controls[0]
8360 v_0_0 := v_0.Args[0]
8361 v_0_1 := v_0.Args[1]
8362 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
8365 v0 := b.NewValue0(v_0.Pos, OpRISCV64FEQS, t)
8367 b.resetWithControl(BlockRISCV64BNEZ, v0)
8371 // match: (BEQZ (FNED <t> x y) yes no)
8372 // result: (BNEZ (FEQD <t> x y) yes no)
8373 for b.Controls[0].Op == OpRISCV64FNED {
8374 v_0 := b.Controls[0]
8377 v_0_0 := v_0.Args[0]
8378 v_0_1 := v_0.Args[1]
8379 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
8382 v0 := b.NewValue0(v_0.Pos, OpRISCV64FEQD, t)
8384 b.resetWithControl(BlockRISCV64BNEZ, v0)
8388 // match: (BEQZ (SUB x y) yes no)
8389 // result: (BEQ x y yes no)
8390 for b.Controls[0].Op == OpRISCV64SUB {
8391 v_0 := b.Controls[0]
8394 b.resetWithControl2(BlockRISCV64BEQ, x, y)
8397 // match: (BEQZ (SLT x y) yes no)
8398 // result: (BGE x y yes no)
8399 for b.Controls[0].Op == OpRISCV64SLT {
8400 v_0 := b.Controls[0]
8403 b.resetWithControl2(BlockRISCV64BGE, x, y)
8406 // match: (BEQZ (SLTU x y) yes no)
8407 // result: (BGEU x y yes no)
8408 for b.Controls[0].Op == OpRISCV64SLTU {
8409 v_0 := b.Controls[0]
8412 b.resetWithControl2(BlockRISCV64BGEU, x, y)
8415 // match: (BEQZ (SLTI [x] y) yes no)
8416 // result: (BGE y (MOVDconst [x]) yes no)
8417 for b.Controls[0].Op == OpRISCV64SLTI {
8418 v_0 := b.Controls[0]
8419 x := auxIntToInt64(v_0.AuxInt)
8421 v0 := b.NewValue0(b.Pos, OpRISCV64MOVDconst, typ.UInt64)
8422 v0.AuxInt = int64ToAuxInt(x)
8423 b.resetWithControl2(BlockRISCV64BGE, y, v0)
8426 // match: (BEQZ (SLTIU [x] y) yes no)
8427 // result: (BGEU y (MOVDconst [x]) yes no)
8428 for b.Controls[0].Op == OpRISCV64SLTIU {
8429 v_0 := b.Controls[0]
8430 x := auxIntToInt64(v_0.AuxInt)
8432 v0 := b.NewValue0(b.Pos, OpRISCV64MOVDconst, typ.UInt64)
8433 v0.AuxInt = int64ToAuxInt(x)
8434 b.resetWithControl2(BlockRISCV64BGEU, y, v0)
8437 case BlockRISCV64BGE:
8438 // match: (BGE (MOVDconst [0]) cond yes no)
8439 // result: (BLEZ cond yes no)
8440 for b.Controls[0].Op == OpRISCV64MOVDconst {
8441 v_0 := b.Controls[0]
8442 if auxIntToInt64(v_0.AuxInt) != 0 {
8445 cond := b.Controls[1]
8446 b.resetWithControl(BlockRISCV64BLEZ, cond)
8449 // match: (BGE cond (MOVDconst [0]) yes no)
8450 // result: (BGEZ cond yes no)
8451 for b.Controls[1].Op == OpRISCV64MOVDconst {
8452 cond := b.Controls[0]
8453 v_1 := b.Controls[1]
8454 if auxIntToInt64(v_1.AuxInt) != 0 {
8457 b.resetWithControl(BlockRISCV64BGEZ, cond)
8460 case BlockRISCV64BLT:
8461 // match: (BLT (MOVDconst [0]) cond yes no)
8462 // result: (BGTZ cond yes no)
8463 for b.Controls[0].Op == OpRISCV64MOVDconst {
8464 v_0 := b.Controls[0]
8465 if auxIntToInt64(v_0.AuxInt) != 0 {
8468 cond := b.Controls[1]
8469 b.resetWithControl(BlockRISCV64BGTZ, cond)
8472 // match: (BLT cond (MOVDconst [0]) yes no)
8473 // result: (BLTZ cond yes no)
8474 for b.Controls[1].Op == OpRISCV64MOVDconst {
8475 cond := b.Controls[0]
8476 v_1 := b.Controls[1]
8477 if auxIntToInt64(v_1.AuxInt) != 0 {
8480 b.resetWithControl(BlockRISCV64BLTZ, cond)
8483 case BlockRISCV64BNE:
8484 // match: (BNE (MOVDconst [0]) cond yes no)
8485 // result: (BNEZ cond yes no)
8486 for b.Controls[0].Op == OpRISCV64MOVDconst {
8487 v_0 := b.Controls[0]
8488 if auxIntToInt64(v_0.AuxInt) != 0 {
8491 cond := b.Controls[1]
8492 b.resetWithControl(BlockRISCV64BNEZ, cond)
8495 // match: (BNE cond (MOVDconst [0]) yes no)
8496 // result: (BNEZ cond yes no)
8497 for b.Controls[1].Op == OpRISCV64MOVDconst {
8498 cond := b.Controls[0]
8499 v_1 := b.Controls[1]
8500 if auxIntToInt64(v_1.AuxInt) != 0 {
8503 b.resetWithControl(BlockRISCV64BNEZ, cond)
8506 case BlockRISCV64BNEZ:
8507 // match: (BNEZ (SEQZ x) yes no)
8508 // result: (BEQZ x yes no)
8509 for b.Controls[0].Op == OpRISCV64SEQZ {
8510 v_0 := b.Controls[0]
8512 b.resetWithControl(BlockRISCV64BEQZ, x)
8515 // match: (BNEZ (SNEZ x) yes no)
8516 // result: (BNEZ x yes no)
8517 for b.Controls[0].Op == OpRISCV64SNEZ {
8518 v_0 := b.Controls[0]
8520 b.resetWithControl(BlockRISCV64BNEZ, x)
8523 // match: (BNEZ (NEG x) yes no)
8524 // result: (BNEZ x yes no)
8525 for b.Controls[0].Op == OpRISCV64NEG {
8526 v_0 := b.Controls[0]
8528 b.resetWithControl(BlockRISCV64BNEZ, x)
8531 // match: (BNEZ (FNES <t> x y) yes no)
8532 // result: (BEQZ (FEQS <t> x y) yes no)
8533 for b.Controls[0].Op == OpRISCV64FNES {
8534 v_0 := b.Controls[0]
8537 v_0_0 := v_0.Args[0]
8538 v_0_1 := v_0.Args[1]
8539 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
8542 v0 := b.NewValue0(v_0.Pos, OpRISCV64FEQS, t)
8544 b.resetWithControl(BlockRISCV64BEQZ, v0)
8548 // match: (BNEZ (FNED <t> x y) yes no)
8549 // result: (BEQZ (FEQD <t> x y) yes no)
8550 for b.Controls[0].Op == OpRISCV64FNED {
8551 v_0 := b.Controls[0]
8554 v_0_0 := v_0.Args[0]
8555 v_0_1 := v_0.Args[1]
8556 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
8559 v0 := b.NewValue0(v_0.Pos, OpRISCV64FEQD, t)
8561 b.resetWithControl(BlockRISCV64BEQZ, v0)
8565 // match: (BNEZ (SUB x y) yes no)
8566 // result: (BNE x y yes no)
8567 for b.Controls[0].Op == OpRISCV64SUB {
8568 v_0 := b.Controls[0]
8571 b.resetWithControl2(BlockRISCV64BNE, x, y)
8574 // match: (BNEZ (SLT x y) yes no)
8575 // result: (BLT x y yes no)
8576 for b.Controls[0].Op == OpRISCV64SLT {
8577 v_0 := b.Controls[0]
8580 b.resetWithControl2(BlockRISCV64BLT, x, y)
8583 // match: (BNEZ (SLTU x y) yes no)
8584 // result: (BLTU x y yes no)
8585 for b.Controls[0].Op == OpRISCV64SLTU {
8586 v_0 := b.Controls[0]
8589 b.resetWithControl2(BlockRISCV64BLTU, x, y)
8592 // match: (BNEZ (SLTI [x] y) yes no)
8593 // result: (BLT y (MOVDconst [x]) yes no)
8594 for b.Controls[0].Op == OpRISCV64SLTI {
8595 v_0 := b.Controls[0]
8596 x := auxIntToInt64(v_0.AuxInt)
8598 v0 := b.NewValue0(b.Pos, OpRISCV64MOVDconst, typ.UInt64)
8599 v0.AuxInt = int64ToAuxInt(x)
8600 b.resetWithControl2(BlockRISCV64BLT, y, v0)
8603 // match: (BNEZ (SLTIU [x] y) yes no)
8604 // result: (BLTU y (MOVDconst [x]) yes no)
8605 for b.Controls[0].Op == OpRISCV64SLTIU {
8606 v_0 := b.Controls[0]
8607 x := auxIntToInt64(v_0.AuxInt)
8609 v0 := b.NewValue0(b.Pos, OpRISCV64MOVDconst, typ.UInt64)
8610 v0.AuxInt = int64ToAuxInt(x)
8611 b.resetWithControl2(BlockRISCV64BLTU, y, v0)
8615 // match: (If cond yes no)
8616 // result: (BNEZ (MOVBUreg <typ.UInt64> cond) yes no)
8618 cond := b.Controls[0]
8619 v0 := b.NewValue0(cond.Pos, OpRISCV64MOVBUreg, typ.UInt64)
8621 b.resetWithControl(BlockRISCV64BNEZ, v0)