1 // Code generated from _gen/RISCV64.rules using 'go generate'; DO NOT EDIT.
6 import "cmd/compile/internal/types"
8 func rewriteValueRISCV64(v *Value) bool {
35 return rewriteValueRISCV64_OpAddr(v)
52 v.Op = OpRISCV64LoweredAtomicAdd32
55 v.Op = OpRISCV64LoweredAtomicAdd64
58 v.Op = OpRISCV64LoweredAtomicAnd32
61 return rewriteValueRISCV64_OpAtomicAnd8(v)
62 case OpAtomicCompareAndSwap32:
63 return rewriteValueRISCV64_OpAtomicCompareAndSwap32(v)
64 case OpAtomicCompareAndSwap64:
65 v.Op = OpRISCV64LoweredAtomicCas64
67 case OpAtomicExchange32:
68 v.Op = OpRISCV64LoweredAtomicExchange32
70 case OpAtomicExchange64:
71 v.Op = OpRISCV64LoweredAtomicExchange64
74 v.Op = OpRISCV64LoweredAtomicLoad32
77 v.Op = OpRISCV64LoweredAtomicLoad64
80 v.Op = OpRISCV64LoweredAtomicLoad8
83 v.Op = OpRISCV64LoweredAtomicLoad64
86 v.Op = OpRISCV64LoweredAtomicOr32
89 return rewriteValueRISCV64_OpAtomicOr8(v)
91 v.Op = OpRISCV64LoweredAtomicStore32
94 v.Op = OpRISCV64LoweredAtomicStore64
97 v.Op = OpRISCV64LoweredAtomicStore8
99 case OpAtomicStorePtrNoWB:
100 v.Op = OpRISCV64LoweredAtomicStore64
103 return rewriteValueRISCV64_OpAvg64u(v)
105 v.Op = OpRISCV64CALLclosure
120 return rewriteValueRISCV64_OpConst16(v)
122 return rewriteValueRISCV64_OpConst32(v)
124 return rewriteValueRISCV64_OpConst32F(v)
126 return rewriteValueRISCV64_OpConst64(v)
128 return rewriteValueRISCV64_OpConst64F(v)
130 return rewriteValueRISCV64_OpConst8(v)
132 return rewriteValueRISCV64_OpConstBool(v)
134 return rewriteValueRISCV64_OpConstNil(v)
136 v.Op = OpRISCV64MOVconvert
139 v.Op = OpRISCV64FSGNJD
142 v.Op = OpRISCV64FCVTWS
145 v.Op = OpRISCV64FCVTLS
148 v.Op = OpRISCV64FCVTDS
151 v.Op = OpRISCV64FCVTSW
154 v.Op = OpRISCV64FCVTDW
157 v.Op = OpRISCV64FCVTWD
160 v.Op = OpRISCV64FCVTSD
163 v.Op = OpRISCV64FCVTLD
166 v.Op = OpRISCV64FCVTSL
169 v.Op = OpRISCV64FCVTDL
171 case OpCvtBoolToUint8:
175 return rewriteValueRISCV64_OpDiv16(v)
177 return rewriteValueRISCV64_OpDiv16u(v)
179 return rewriteValueRISCV64_OpDiv32(v)
181 v.Op = OpRISCV64FDIVS
184 v.Op = OpRISCV64DIVUW
187 return rewriteValueRISCV64_OpDiv64(v)
189 v.Op = OpRISCV64FDIVD
195 return rewriteValueRISCV64_OpDiv8(v)
197 return rewriteValueRISCV64_OpDiv8u(v)
199 return rewriteValueRISCV64_OpEq16(v)
201 return rewriteValueRISCV64_OpEq32(v)
206 return rewriteValueRISCV64_OpEq64(v)
211 return rewriteValueRISCV64_OpEq8(v)
213 return rewriteValueRISCV64_OpEqB(v)
215 return rewriteValueRISCV64_OpEqPtr(v)
217 v.Op = OpRISCV64FMADDD
220 v.Op = OpRISCV64LoweredGetCallerPC
223 v.Op = OpRISCV64LoweredGetCallerSP
225 case OpGetClosurePtr:
226 v.Op = OpRISCV64LoweredGetClosurePtr
229 return rewriteValueRISCV64_OpHmul32(v)
231 return rewriteValueRISCV64_OpHmul32u(v)
236 v.Op = OpRISCV64MULHU
239 v.Op = OpRISCV64CALLinter
247 case OpIsSliceInBounds:
251 return rewriteValueRISCV64_OpLeq16(v)
253 return rewriteValueRISCV64_OpLeq16U(v)
255 return rewriteValueRISCV64_OpLeq32(v)
260 return rewriteValueRISCV64_OpLeq32U(v)
262 return rewriteValueRISCV64_OpLeq64(v)
267 return rewriteValueRISCV64_OpLeq64U(v)
269 return rewriteValueRISCV64_OpLeq8(v)
271 return rewriteValueRISCV64_OpLeq8U(v)
273 return rewriteValueRISCV64_OpLess16(v)
275 return rewriteValueRISCV64_OpLess16U(v)
277 return rewriteValueRISCV64_OpLess32(v)
282 return rewriteValueRISCV64_OpLess32U(v)
293 return rewriteValueRISCV64_OpLess8(v)
295 return rewriteValueRISCV64_OpLess8U(v)
297 return rewriteValueRISCV64_OpLoad(v)
299 return rewriteValueRISCV64_OpLocalAddr(v)
301 return rewriteValueRISCV64_OpLsh16x16(v)
303 return rewriteValueRISCV64_OpLsh16x32(v)
305 return rewriteValueRISCV64_OpLsh16x64(v)
307 return rewriteValueRISCV64_OpLsh16x8(v)
309 return rewriteValueRISCV64_OpLsh32x16(v)
311 return rewriteValueRISCV64_OpLsh32x32(v)
313 return rewriteValueRISCV64_OpLsh32x64(v)
315 return rewriteValueRISCV64_OpLsh32x8(v)
317 return rewriteValueRISCV64_OpLsh64x16(v)
319 return rewriteValueRISCV64_OpLsh64x32(v)
321 return rewriteValueRISCV64_OpLsh64x64(v)
323 return rewriteValueRISCV64_OpLsh64x8(v)
325 return rewriteValueRISCV64_OpLsh8x16(v)
327 return rewriteValueRISCV64_OpLsh8x32(v)
329 return rewriteValueRISCV64_OpLsh8x64(v)
331 return rewriteValueRISCV64_OpLsh8x8(v)
333 return rewriteValueRISCV64_OpMod16(v)
335 return rewriteValueRISCV64_OpMod16u(v)
337 return rewriteValueRISCV64_OpMod32(v)
339 v.Op = OpRISCV64REMUW
342 return rewriteValueRISCV64_OpMod64(v)
347 return rewriteValueRISCV64_OpMod8(v)
349 return rewriteValueRISCV64_OpMod8u(v)
351 return rewriteValueRISCV64_OpMove(v)
353 return rewriteValueRISCV64_OpMul16(v)
358 v.Op = OpRISCV64FMULS
364 v.Op = OpRISCV64FMULD
367 v.Op = OpRISCV64LoweredMuluhilo
370 v.Op = OpRISCV64LoweredMuluover
373 return rewriteValueRISCV64_OpMul8(v)
381 v.Op = OpRISCV64FNEGS
387 v.Op = OpRISCV64FNEGD
393 return rewriteValueRISCV64_OpNeq16(v)
395 return rewriteValueRISCV64_OpNeq32(v)
400 return rewriteValueRISCV64_OpNeq64(v)
405 return rewriteValueRISCV64_OpNeq8(v)
407 return rewriteValueRISCV64_OpNeqB(v)
409 return rewriteValueRISCV64_OpNeqPtr(v)
411 v.Op = OpRISCV64LoweredNilCheck
417 return rewriteValueRISCV64_OpOffPtr(v)
434 return rewriteValueRISCV64_OpPanicBounds(v)
436 return rewriteValueRISCV64_OpRISCV64ADD(v)
438 return rewriteValueRISCV64_OpRISCV64ADDI(v)
440 return rewriteValueRISCV64_OpRISCV64AND(v)
442 return rewriteValueRISCV64_OpRISCV64ANDI(v)
444 return rewriteValueRISCV64_OpRISCV64FADDD(v)
445 case OpRISCV64FMADDD:
446 return rewriteValueRISCV64_OpRISCV64FMADDD(v)
447 case OpRISCV64FMSUBD:
448 return rewriteValueRISCV64_OpRISCV64FMSUBD(v)
449 case OpRISCV64FNMADDD:
450 return rewriteValueRISCV64_OpRISCV64FNMADDD(v)
451 case OpRISCV64FNMSUBD:
452 return rewriteValueRISCV64_OpRISCV64FNMSUBD(v)
454 return rewriteValueRISCV64_OpRISCV64FSUBD(v)
455 case OpRISCV64MOVBUload:
456 return rewriteValueRISCV64_OpRISCV64MOVBUload(v)
457 case OpRISCV64MOVBUreg:
458 return rewriteValueRISCV64_OpRISCV64MOVBUreg(v)
459 case OpRISCV64MOVBload:
460 return rewriteValueRISCV64_OpRISCV64MOVBload(v)
461 case OpRISCV64MOVBreg:
462 return rewriteValueRISCV64_OpRISCV64MOVBreg(v)
463 case OpRISCV64MOVBstore:
464 return rewriteValueRISCV64_OpRISCV64MOVBstore(v)
465 case OpRISCV64MOVBstorezero:
466 return rewriteValueRISCV64_OpRISCV64MOVBstorezero(v)
467 case OpRISCV64MOVDload:
468 return rewriteValueRISCV64_OpRISCV64MOVDload(v)
469 case OpRISCV64MOVDnop:
470 return rewriteValueRISCV64_OpRISCV64MOVDnop(v)
471 case OpRISCV64MOVDreg:
472 return rewriteValueRISCV64_OpRISCV64MOVDreg(v)
473 case OpRISCV64MOVDstore:
474 return rewriteValueRISCV64_OpRISCV64MOVDstore(v)
475 case OpRISCV64MOVDstorezero:
476 return rewriteValueRISCV64_OpRISCV64MOVDstorezero(v)
477 case OpRISCV64MOVHUload:
478 return rewriteValueRISCV64_OpRISCV64MOVHUload(v)
479 case OpRISCV64MOVHUreg:
480 return rewriteValueRISCV64_OpRISCV64MOVHUreg(v)
481 case OpRISCV64MOVHload:
482 return rewriteValueRISCV64_OpRISCV64MOVHload(v)
483 case OpRISCV64MOVHreg:
484 return rewriteValueRISCV64_OpRISCV64MOVHreg(v)
485 case OpRISCV64MOVHstore:
486 return rewriteValueRISCV64_OpRISCV64MOVHstore(v)
487 case OpRISCV64MOVHstorezero:
488 return rewriteValueRISCV64_OpRISCV64MOVHstorezero(v)
489 case OpRISCV64MOVWUload:
490 return rewriteValueRISCV64_OpRISCV64MOVWUload(v)
491 case OpRISCV64MOVWUreg:
492 return rewriteValueRISCV64_OpRISCV64MOVWUreg(v)
493 case OpRISCV64MOVWload:
494 return rewriteValueRISCV64_OpRISCV64MOVWload(v)
495 case OpRISCV64MOVWreg:
496 return rewriteValueRISCV64_OpRISCV64MOVWreg(v)
497 case OpRISCV64MOVWstore:
498 return rewriteValueRISCV64_OpRISCV64MOVWstore(v)
499 case OpRISCV64MOVWstorezero:
500 return rewriteValueRISCV64_OpRISCV64MOVWstorezero(v)
502 return rewriteValueRISCV64_OpRISCV64NEG(v)
504 return rewriteValueRISCV64_OpRISCV64NEGW(v)
506 return rewriteValueRISCV64_OpRISCV64OR(v)
508 return rewriteValueRISCV64_OpRISCV64ORI(v)
510 return rewriteValueRISCV64_OpRISCV64SEQZ(v)
512 return rewriteValueRISCV64_OpRISCV64SLL(v)
514 return rewriteValueRISCV64_OpRISCV64SLLI(v)
516 return rewriteValueRISCV64_OpRISCV64SLT(v)
518 return rewriteValueRISCV64_OpRISCV64SLTI(v)
520 return rewriteValueRISCV64_OpRISCV64SLTIU(v)
522 return rewriteValueRISCV64_OpRISCV64SLTU(v)
524 return rewriteValueRISCV64_OpRISCV64SNEZ(v)
526 return rewriteValueRISCV64_OpRISCV64SRA(v)
528 return rewriteValueRISCV64_OpRISCV64SRAI(v)
530 return rewriteValueRISCV64_OpRISCV64SRL(v)
532 return rewriteValueRISCV64_OpRISCV64SRLI(v)
534 return rewriteValueRISCV64_OpRISCV64SUB(v)
536 return rewriteValueRISCV64_OpRISCV64SUBW(v)
538 return rewriteValueRISCV64_OpRISCV64XOR(v)
540 return rewriteValueRISCV64_OpRotateLeft16(v)
542 return rewriteValueRISCV64_OpRotateLeft32(v)
544 return rewriteValueRISCV64_OpRotateLeft64(v)
546 return rewriteValueRISCV64_OpRotateLeft8(v)
548 v.Op = OpRISCV64LoweredRound32F
551 v.Op = OpRISCV64LoweredRound64F
554 return rewriteValueRISCV64_OpRsh16Ux16(v)
556 return rewriteValueRISCV64_OpRsh16Ux32(v)
558 return rewriteValueRISCV64_OpRsh16Ux64(v)
560 return rewriteValueRISCV64_OpRsh16Ux8(v)
562 return rewriteValueRISCV64_OpRsh16x16(v)
564 return rewriteValueRISCV64_OpRsh16x32(v)
566 return rewriteValueRISCV64_OpRsh16x64(v)
568 return rewriteValueRISCV64_OpRsh16x8(v)
570 return rewriteValueRISCV64_OpRsh32Ux16(v)
572 return rewriteValueRISCV64_OpRsh32Ux32(v)
574 return rewriteValueRISCV64_OpRsh32Ux64(v)
576 return rewriteValueRISCV64_OpRsh32Ux8(v)
578 return rewriteValueRISCV64_OpRsh32x16(v)
580 return rewriteValueRISCV64_OpRsh32x32(v)
582 return rewriteValueRISCV64_OpRsh32x64(v)
584 return rewriteValueRISCV64_OpRsh32x8(v)
586 return rewriteValueRISCV64_OpRsh64Ux16(v)
588 return rewriteValueRISCV64_OpRsh64Ux32(v)
590 return rewriteValueRISCV64_OpRsh64Ux64(v)
592 return rewriteValueRISCV64_OpRsh64Ux8(v)
594 return rewriteValueRISCV64_OpRsh64x16(v)
596 return rewriteValueRISCV64_OpRsh64x32(v)
598 return rewriteValueRISCV64_OpRsh64x64(v)
600 return rewriteValueRISCV64_OpRsh64x8(v)
602 return rewriteValueRISCV64_OpRsh8Ux16(v)
604 return rewriteValueRISCV64_OpRsh8Ux32(v)
606 return rewriteValueRISCV64_OpRsh8Ux64(v)
608 return rewriteValueRISCV64_OpRsh8Ux8(v)
610 return rewriteValueRISCV64_OpRsh8x16(v)
612 return rewriteValueRISCV64_OpRsh8x32(v)
614 return rewriteValueRISCV64_OpRsh8x64(v)
616 return rewriteValueRISCV64_OpRsh8x8(v)
618 return rewriteValueRISCV64_OpSelect0(v)
620 return rewriteValueRISCV64_OpSelect1(v)
621 case OpSignExt16to32:
622 v.Op = OpRISCV64MOVHreg
624 case OpSignExt16to64:
625 v.Op = OpRISCV64MOVHreg
627 case OpSignExt32to64:
628 v.Op = OpRISCV64MOVWreg
631 v.Op = OpRISCV64MOVBreg
634 v.Op = OpRISCV64MOVBreg
637 v.Op = OpRISCV64MOVBreg
640 return rewriteValueRISCV64_OpSlicemask(v)
642 v.Op = OpRISCV64FSQRTD
645 v.Op = OpRISCV64FSQRTS
648 v.Op = OpRISCV64CALLstatic
651 return rewriteValueRISCV64_OpStore(v)
659 v.Op = OpRISCV64FSUBS
665 v.Op = OpRISCV64FSUBD
674 v.Op = OpRISCV64CALLtail
695 v.Op = OpRISCV64LoweredWB
710 return rewriteValueRISCV64_OpZero(v)
711 case OpZeroExt16to32:
712 v.Op = OpRISCV64MOVHUreg
714 case OpZeroExt16to64:
715 v.Op = OpRISCV64MOVHUreg
717 case OpZeroExt32to64:
718 v.Op = OpRISCV64MOVWUreg
721 v.Op = OpRISCV64MOVBUreg
724 v.Op = OpRISCV64MOVBUreg
727 v.Op = OpRISCV64MOVBUreg
732 func rewriteValueRISCV64_OpAddr(v *Value) bool {
734 // match: (Addr {sym} base)
735 // result: (MOVaddr {sym} [0] base)
737 sym := auxToSym(v.Aux)
739 v.reset(OpRISCV64MOVaddr)
740 v.AuxInt = int32ToAuxInt(0)
741 v.Aux = symToAux(sym)
746 func rewriteValueRISCV64_OpAtomicAnd8(v *Value) bool {
751 typ := &b.Func.Config.Types
752 // match: (AtomicAnd8 ptr val mem)
753 // result: (LoweredAtomicAnd32 (ANDI <typ.Uintptr> [^3] ptr) (NOT <typ.UInt32> (SLL <typ.UInt32> (XORI <typ.UInt32> [0xff] (ZeroExt8to32 val)) (SLLI <typ.UInt64> [3] (ANDI <typ.UInt64> [3] ptr)))) mem)
758 v.reset(OpRISCV64LoweredAtomicAnd32)
759 v0 := b.NewValue0(v.Pos, OpRISCV64ANDI, typ.Uintptr)
760 v0.AuxInt = int64ToAuxInt(^3)
762 v1 := b.NewValue0(v.Pos, OpRISCV64NOT, typ.UInt32)
763 v2 := b.NewValue0(v.Pos, OpRISCV64SLL, typ.UInt32)
764 v3 := b.NewValue0(v.Pos, OpRISCV64XORI, typ.UInt32)
765 v3.AuxInt = int64ToAuxInt(0xff)
766 v4 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
769 v5 := b.NewValue0(v.Pos, OpRISCV64SLLI, typ.UInt64)
770 v5.AuxInt = int64ToAuxInt(3)
771 v6 := b.NewValue0(v.Pos, OpRISCV64ANDI, typ.UInt64)
772 v6.AuxInt = int64ToAuxInt(3)
777 v.AddArg3(v0, v1, mem)
781 func rewriteValueRISCV64_OpAtomicCompareAndSwap32(v *Value) bool {
787 typ := &b.Func.Config.Types
788 // match: (AtomicCompareAndSwap32 ptr old new mem)
789 // result: (LoweredAtomicCas32 ptr (SignExt32to64 old) new mem)
795 v.reset(OpRISCV64LoweredAtomicCas32)
796 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
798 v.AddArg4(ptr, v0, new, mem)
802 func rewriteValueRISCV64_OpAtomicOr8(v *Value) bool {
807 typ := &b.Func.Config.Types
808 // match: (AtomicOr8 ptr val mem)
809 // result: (LoweredAtomicOr32 (ANDI <typ.Uintptr> [^3] ptr) (SLL <typ.UInt32> (ZeroExt8to32 val) (SLLI <typ.UInt64> [3] (ANDI <typ.UInt64> [3] ptr))) mem)
814 v.reset(OpRISCV64LoweredAtomicOr32)
815 v0 := b.NewValue0(v.Pos, OpRISCV64ANDI, typ.Uintptr)
816 v0.AuxInt = int64ToAuxInt(^3)
818 v1 := b.NewValue0(v.Pos, OpRISCV64SLL, typ.UInt32)
819 v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
821 v3 := b.NewValue0(v.Pos, OpRISCV64SLLI, typ.UInt64)
822 v3.AuxInt = int64ToAuxInt(3)
823 v4 := b.NewValue0(v.Pos, OpRISCV64ANDI, typ.UInt64)
824 v4.AuxInt = int64ToAuxInt(3)
828 v.AddArg3(v0, v1, mem)
832 func rewriteValueRISCV64_OpAvg64u(v *Value) bool {
836 // match: (Avg64u <t> x y)
837 // result: (ADD (ADD <t> (SRLI <t> [1] x) (SRLI <t> [1] y)) (ANDI <t> [1] (AND <t> x y)))
842 v.reset(OpRISCV64ADD)
843 v0 := b.NewValue0(v.Pos, OpRISCV64ADD, t)
844 v1 := b.NewValue0(v.Pos, OpRISCV64SRLI, t)
845 v1.AuxInt = int64ToAuxInt(1)
847 v2 := b.NewValue0(v.Pos, OpRISCV64SRLI, t)
848 v2.AuxInt = int64ToAuxInt(1)
851 v3 := b.NewValue0(v.Pos, OpRISCV64ANDI, t)
852 v3.AuxInt = int64ToAuxInt(1)
853 v4 := b.NewValue0(v.Pos, OpRISCV64AND, t)
860 func rewriteValueRISCV64_OpConst16(v *Value) bool {
861 // match: (Const16 [val])
862 // result: (MOVDconst [int64(val)])
864 val := auxIntToInt16(v.AuxInt)
865 v.reset(OpRISCV64MOVDconst)
866 v.AuxInt = int64ToAuxInt(int64(val))
870 func rewriteValueRISCV64_OpConst32(v *Value) bool {
871 // match: (Const32 [val])
872 // result: (MOVDconst [int64(val)])
874 val := auxIntToInt32(v.AuxInt)
875 v.reset(OpRISCV64MOVDconst)
876 v.AuxInt = int64ToAuxInt(int64(val))
880 func rewriteValueRISCV64_OpConst32F(v *Value) bool {
882 typ := &b.Func.Config.Types
883 // match: (Const32F [val])
884 // result: (FMVSX (MOVDconst [int64(math.Float32bits(val))]))
886 val := auxIntToFloat32(v.AuxInt)
887 v.reset(OpRISCV64FMVSX)
888 v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
889 v0.AuxInt = int64ToAuxInt(int64(math.Float32bits(val)))
894 func rewriteValueRISCV64_OpConst64(v *Value) bool {
895 // match: (Const64 [val])
896 // result: (MOVDconst [int64(val)])
898 val := auxIntToInt64(v.AuxInt)
899 v.reset(OpRISCV64MOVDconst)
900 v.AuxInt = int64ToAuxInt(int64(val))
904 func rewriteValueRISCV64_OpConst64F(v *Value) bool {
906 typ := &b.Func.Config.Types
907 // match: (Const64F [val])
908 // result: (FMVDX (MOVDconst [int64(math.Float64bits(val))]))
910 val := auxIntToFloat64(v.AuxInt)
911 v.reset(OpRISCV64FMVDX)
912 v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
913 v0.AuxInt = int64ToAuxInt(int64(math.Float64bits(val)))
918 func rewriteValueRISCV64_OpConst8(v *Value) bool {
919 // match: (Const8 [val])
920 // result: (MOVDconst [int64(val)])
922 val := auxIntToInt8(v.AuxInt)
923 v.reset(OpRISCV64MOVDconst)
924 v.AuxInt = int64ToAuxInt(int64(val))
928 func rewriteValueRISCV64_OpConstBool(v *Value) bool {
929 // match: (ConstBool [val])
930 // result: (MOVDconst [int64(b2i(val))])
932 val := auxIntToBool(v.AuxInt)
933 v.reset(OpRISCV64MOVDconst)
934 v.AuxInt = int64ToAuxInt(int64(b2i(val)))
938 func rewriteValueRISCV64_OpConstNil(v *Value) bool {
940 // result: (MOVDconst [0])
942 v.reset(OpRISCV64MOVDconst)
943 v.AuxInt = int64ToAuxInt(0)
947 func rewriteValueRISCV64_OpDiv16(v *Value) bool {
951 typ := &b.Func.Config.Types
952 // match: (Div16 x y [false])
953 // result: (DIVW (SignExt16to32 x) (SignExt16to32 y))
955 if auxIntToBool(v.AuxInt) != false {
960 v.reset(OpRISCV64DIVW)
961 v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
963 v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
970 func rewriteValueRISCV64_OpDiv16u(v *Value) bool {
974 typ := &b.Func.Config.Types
975 // match: (Div16u x y)
976 // result: (DIVUW (ZeroExt16to32 x) (ZeroExt16to32 y))
980 v.reset(OpRISCV64DIVUW)
981 v0 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
983 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
989 func rewriteValueRISCV64_OpDiv32(v *Value) bool {
992 // match: (Div32 x y [false])
993 // result: (DIVW x y)
995 if auxIntToBool(v.AuxInt) != false {
1000 v.reset(OpRISCV64DIVW)
1006 func rewriteValueRISCV64_OpDiv64(v *Value) bool {
1009 // match: (Div64 x y [false])
1010 // result: (DIV x y)
1012 if auxIntToBool(v.AuxInt) != false {
1017 v.reset(OpRISCV64DIV)
1023 func rewriteValueRISCV64_OpDiv8(v *Value) bool {
1027 typ := &b.Func.Config.Types
1028 // match: (Div8 x y)
1029 // result: (DIVW (SignExt8to32 x) (SignExt8to32 y))
1033 v.reset(OpRISCV64DIVW)
1034 v0 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
1036 v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
1042 func rewriteValueRISCV64_OpDiv8u(v *Value) bool {
1046 typ := &b.Func.Config.Types
1047 // match: (Div8u x y)
1048 // result: (DIVUW (ZeroExt8to32 x) (ZeroExt8to32 y))
1052 v.reset(OpRISCV64DIVUW)
1053 v0 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
1055 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
1061 func rewriteValueRISCV64_OpEq16(v *Value) bool {
1065 typ := &b.Func.Config.Types
1066 // match: (Eq16 x y)
1067 // result: (SEQZ (SUB <x.Type> (ZeroExt16to64 x) (ZeroExt16to64 y)))
1071 v.reset(OpRISCV64SEQZ)
1072 v0 := b.NewValue0(v.Pos, OpRISCV64SUB, x.Type)
1073 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
1075 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
1082 func rewriteValueRISCV64_OpEq32(v *Value) bool {
1086 typ := &b.Func.Config.Types
1087 // match: (Eq32 x y)
1088 // cond: x.Type.IsSigned()
1089 // result: (SEQZ (SUB <x.Type> (SignExt32to64 x) (SignExt32to64 y)))
1091 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1094 if !(x.Type.IsSigned()) {
1097 v.reset(OpRISCV64SEQZ)
1098 v0 := b.NewValue0(v.Pos, OpRISCV64SUB, x.Type)
1099 v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
1101 v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
1109 // match: (Eq32 x y)
1110 // cond: !x.Type.IsSigned()
1111 // result: (SEQZ (SUB <x.Type> (ZeroExt32to64 x) (ZeroExt32to64 y)))
1113 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1116 if !(!x.Type.IsSigned()) {
1119 v.reset(OpRISCV64SEQZ)
1120 v0 := b.NewValue0(v.Pos, OpRISCV64SUB, x.Type)
1121 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
1123 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
1133 func rewriteValueRISCV64_OpEq64(v *Value) bool {
1137 // match: (Eq64 x y)
1138 // result: (SEQZ (SUB <x.Type> x y))
1142 v.reset(OpRISCV64SEQZ)
1143 v0 := b.NewValue0(v.Pos, OpRISCV64SUB, x.Type)
1149 func rewriteValueRISCV64_OpEq8(v *Value) bool {
1153 typ := &b.Func.Config.Types
1155 // result: (SEQZ (SUB <x.Type> (ZeroExt8to64 x) (ZeroExt8to64 y)))
1159 v.reset(OpRISCV64SEQZ)
1160 v0 := b.NewValue0(v.Pos, OpRISCV64SUB, x.Type)
1161 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
1163 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
1170 func rewriteValueRISCV64_OpEqB(v *Value) bool {
1174 typ := &b.Func.Config.Types
1176 // result: (SEQZ (SUB <typ.Bool> x y))
1180 v.reset(OpRISCV64SEQZ)
1181 v0 := b.NewValue0(v.Pos, OpRISCV64SUB, typ.Bool)
1187 func rewriteValueRISCV64_OpEqPtr(v *Value) bool {
1191 typ := &b.Func.Config.Types
1192 // match: (EqPtr x y)
1193 // result: (SEQZ (SUB <typ.Uintptr> x y))
1197 v.reset(OpRISCV64SEQZ)
1198 v0 := b.NewValue0(v.Pos, OpRISCV64SUB, typ.Uintptr)
1204 func rewriteValueRISCV64_OpHmul32(v *Value) bool {
1208 typ := &b.Func.Config.Types
1209 // match: (Hmul32 x y)
1210 // result: (SRAI [32] (MUL (SignExt32to64 x) (SignExt32to64 y)))
1214 v.reset(OpRISCV64SRAI)
1215 v.AuxInt = int64ToAuxInt(32)
1216 v0 := b.NewValue0(v.Pos, OpRISCV64MUL, typ.Int64)
1217 v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
1219 v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
1226 func rewriteValueRISCV64_OpHmul32u(v *Value) bool {
1230 typ := &b.Func.Config.Types
1231 // match: (Hmul32u x y)
1232 // result: (SRLI [32] (MUL (ZeroExt32to64 x) (ZeroExt32to64 y)))
1236 v.reset(OpRISCV64SRLI)
1237 v.AuxInt = int64ToAuxInt(32)
1238 v0 := b.NewValue0(v.Pos, OpRISCV64MUL, typ.Int64)
1239 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
1241 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
1248 func rewriteValueRISCV64_OpLeq16(v *Value) bool {
1252 typ := &b.Func.Config.Types
1253 // match: (Leq16 x y)
1254 // result: (Not (Less16 y x))
1259 v0 := b.NewValue0(v.Pos, OpLess16, typ.Bool)
1265 func rewriteValueRISCV64_OpLeq16U(v *Value) bool {
1269 typ := &b.Func.Config.Types
1270 // match: (Leq16U x y)
1271 // result: (Not (Less16U y x))
1276 v0 := b.NewValue0(v.Pos, OpLess16U, typ.Bool)
1282 func rewriteValueRISCV64_OpLeq32(v *Value) bool {
1286 typ := &b.Func.Config.Types
1287 // match: (Leq32 x y)
1288 // result: (Not (Less32 y x))
1293 v0 := b.NewValue0(v.Pos, OpLess32, typ.Bool)
1299 func rewriteValueRISCV64_OpLeq32U(v *Value) bool {
1303 typ := &b.Func.Config.Types
1304 // match: (Leq32U x y)
1305 // result: (Not (Less32U y x))
1310 v0 := b.NewValue0(v.Pos, OpLess32U, typ.Bool)
1316 func rewriteValueRISCV64_OpLeq64(v *Value) bool {
1320 typ := &b.Func.Config.Types
1321 // match: (Leq64 x y)
1322 // result: (Not (Less64 y x))
1327 v0 := b.NewValue0(v.Pos, OpLess64, typ.Bool)
1333 func rewriteValueRISCV64_OpLeq64U(v *Value) bool {
1337 typ := &b.Func.Config.Types
1338 // match: (Leq64U x y)
1339 // result: (Not (Less64U y x))
1344 v0 := b.NewValue0(v.Pos, OpLess64U, typ.Bool)
1350 func rewriteValueRISCV64_OpLeq8(v *Value) bool {
1354 typ := &b.Func.Config.Types
1355 // match: (Leq8 x y)
1356 // result: (Not (Less8 y x))
1361 v0 := b.NewValue0(v.Pos, OpLess8, typ.Bool)
1367 func rewriteValueRISCV64_OpLeq8U(v *Value) bool {
1371 typ := &b.Func.Config.Types
1372 // match: (Leq8U x y)
1373 // result: (Not (Less8U y x))
1378 v0 := b.NewValue0(v.Pos, OpLess8U, typ.Bool)
1384 func rewriteValueRISCV64_OpLess16(v *Value) bool {
1388 typ := &b.Func.Config.Types
1389 // match: (Less16 x y)
1390 // result: (SLT (SignExt16to64 x) (SignExt16to64 y))
1394 v.reset(OpRISCV64SLT)
1395 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
1397 v1 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
1403 func rewriteValueRISCV64_OpLess16U(v *Value) bool {
1407 typ := &b.Func.Config.Types
1408 // match: (Less16U x y)
1409 // result: (SLTU (ZeroExt16to64 x) (ZeroExt16to64 y))
1413 v.reset(OpRISCV64SLTU)
1414 v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
1416 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
1422 func rewriteValueRISCV64_OpLess32(v *Value) bool {
1426 typ := &b.Func.Config.Types
1427 // match: (Less32 x y)
1428 // result: (SLT (SignExt32to64 x) (SignExt32to64 y))
1432 v.reset(OpRISCV64SLT)
1433 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
1435 v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
1441 func rewriteValueRISCV64_OpLess32U(v *Value) bool {
1445 typ := &b.Func.Config.Types
1446 // match: (Less32U x y)
1447 // result: (SLTU (ZeroExt32to64 x) (ZeroExt32to64 y))
1451 v.reset(OpRISCV64SLTU)
1452 v0 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
1454 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
1460 func rewriteValueRISCV64_OpLess8(v *Value) bool {
1464 typ := &b.Func.Config.Types
1465 // match: (Less8 x y)
1466 // result: (SLT (SignExt8to64 x) (SignExt8to64 y))
1470 v.reset(OpRISCV64SLT)
1471 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
1473 v1 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
1479 func rewriteValueRISCV64_OpLess8U(v *Value) bool {
1483 typ := &b.Func.Config.Types
1484 // match: (Less8U x y)
1485 // result: (SLTU (ZeroExt8to64 x) (ZeroExt8to64 y))
1489 v.reset(OpRISCV64SLTU)
1490 v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
1492 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
1498 func rewriteValueRISCV64_OpLoad(v *Value) bool {
1501 // match: (Load <t> ptr mem)
1502 // cond: t.IsBoolean()
1503 // result: (MOVBUload ptr mem)
1508 if !(t.IsBoolean()) {
1511 v.reset(OpRISCV64MOVBUload)
1515 // match: (Load <t> ptr mem)
1516 // cond: ( is8BitInt(t) && t.IsSigned())
1517 // result: (MOVBload ptr mem)
1522 if !(is8BitInt(t) && t.IsSigned()) {
1525 v.reset(OpRISCV64MOVBload)
1529 // match: (Load <t> ptr mem)
1530 // cond: ( is8BitInt(t) && !t.IsSigned())
1531 // result: (MOVBUload ptr mem)
1536 if !(is8BitInt(t) && !t.IsSigned()) {
1539 v.reset(OpRISCV64MOVBUload)
1543 // match: (Load <t> ptr mem)
1544 // cond: (is16BitInt(t) && t.IsSigned())
1545 // result: (MOVHload ptr mem)
1550 if !(is16BitInt(t) && t.IsSigned()) {
1553 v.reset(OpRISCV64MOVHload)
1557 // match: (Load <t> ptr mem)
1558 // cond: (is16BitInt(t) && !t.IsSigned())
1559 // result: (MOVHUload ptr mem)
1564 if !(is16BitInt(t) && !t.IsSigned()) {
1567 v.reset(OpRISCV64MOVHUload)
1571 // match: (Load <t> ptr mem)
1572 // cond: (is32BitInt(t) && t.IsSigned())
1573 // result: (MOVWload ptr mem)
1578 if !(is32BitInt(t) && t.IsSigned()) {
1581 v.reset(OpRISCV64MOVWload)
1585 // match: (Load <t> ptr mem)
1586 // cond: (is32BitInt(t) && !t.IsSigned())
1587 // result: (MOVWUload ptr mem)
1592 if !(is32BitInt(t) && !t.IsSigned()) {
1595 v.reset(OpRISCV64MOVWUload)
1599 // match: (Load <t> ptr mem)
1600 // cond: (is64BitInt(t) || isPtr(t))
1601 // result: (MOVDload ptr mem)
1606 if !(is64BitInt(t) || isPtr(t)) {
1609 v.reset(OpRISCV64MOVDload)
1613 // match: (Load <t> ptr mem)
1614 // cond: is32BitFloat(t)
1615 // result: (FMOVWload ptr mem)
1620 if !(is32BitFloat(t)) {
1623 v.reset(OpRISCV64FMOVWload)
1627 // match: (Load <t> ptr mem)
1628 // cond: is64BitFloat(t)
1629 // result: (FMOVDload ptr mem)
1634 if !(is64BitFloat(t)) {
1637 v.reset(OpRISCV64FMOVDload)
1643 func rewriteValueRISCV64_OpLocalAddr(v *Value) bool {
1647 typ := &b.Func.Config.Types
1648 // match: (LocalAddr <t> {sym} base mem)
1649 // cond: t.Elem().HasPointers()
1650 // result: (MOVaddr {sym} (SPanchored base mem))
1653 sym := auxToSym(v.Aux)
1656 if !(t.Elem().HasPointers()) {
1659 v.reset(OpRISCV64MOVaddr)
1660 v.Aux = symToAux(sym)
1661 v0 := b.NewValue0(v.Pos, OpSPanchored, typ.Uintptr)
1662 v0.AddArg2(base, mem)
1666 // match: (LocalAddr <t> {sym} base _)
1667 // cond: !t.Elem().HasPointers()
1668 // result: (MOVaddr {sym} base)
1671 sym := auxToSym(v.Aux)
1673 if !(!t.Elem().HasPointers()) {
1676 v.reset(OpRISCV64MOVaddr)
1677 v.Aux = symToAux(sym)
1683 func rewriteValueRISCV64_OpLsh16x16(v *Value) bool {
1687 typ := &b.Func.Config.Types
1688 // match: (Lsh16x16 <t> x y)
1689 // cond: !shiftIsBounded(v)
1690 // result: (AND (SLL <t> x y) (Neg16 <t> (SLTIU <t> [64] (ZeroExt16to64 y))))
1695 if !(!shiftIsBounded(v)) {
1698 v.reset(OpRISCV64AND)
1699 v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
1701 v1 := b.NewValue0(v.Pos, OpNeg16, t)
1702 v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
1703 v2.AuxInt = int64ToAuxInt(64)
1704 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
1711 // match: (Lsh16x16 x y)
1712 // cond: shiftIsBounded(v)
1713 // result: (SLL x y)
1717 if !(shiftIsBounded(v)) {
1720 v.reset(OpRISCV64SLL)
1726 func rewriteValueRISCV64_OpLsh16x32(v *Value) bool {
1730 typ := &b.Func.Config.Types
1731 // match: (Lsh16x32 <t> x y)
1732 // cond: !shiftIsBounded(v)
1733 // result: (AND (SLL <t> x y) (Neg16 <t> (SLTIU <t> [64] (ZeroExt32to64 y))))
1738 if !(!shiftIsBounded(v)) {
1741 v.reset(OpRISCV64AND)
1742 v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
1744 v1 := b.NewValue0(v.Pos, OpNeg16, t)
1745 v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
1746 v2.AuxInt = int64ToAuxInt(64)
1747 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
1754 // match: (Lsh16x32 x y)
1755 // cond: shiftIsBounded(v)
1756 // result: (SLL x y)
1760 if !(shiftIsBounded(v)) {
1763 v.reset(OpRISCV64SLL)
1769 func rewriteValueRISCV64_OpLsh16x64(v *Value) bool {
1773 // match: (Lsh16x64 <t> x y)
1774 // cond: !shiftIsBounded(v)
1775 // result: (AND (SLL <t> x y) (Neg16 <t> (SLTIU <t> [64] y)))
1780 if !(!shiftIsBounded(v)) {
1783 v.reset(OpRISCV64AND)
1784 v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
1786 v1 := b.NewValue0(v.Pos, OpNeg16, t)
1787 v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
1788 v2.AuxInt = int64ToAuxInt(64)
1794 // match: (Lsh16x64 x y)
1795 // cond: shiftIsBounded(v)
1796 // result: (SLL x y)
1800 if !(shiftIsBounded(v)) {
1803 v.reset(OpRISCV64SLL)
1809 func rewriteValueRISCV64_OpLsh16x8(v *Value) bool {
1813 typ := &b.Func.Config.Types
1814 // match: (Lsh16x8 <t> x y)
1815 // cond: !shiftIsBounded(v)
1816 // result: (AND (SLL <t> x y) (Neg16 <t> (SLTIU <t> [64] (ZeroExt8to64 y))))
1821 if !(!shiftIsBounded(v)) {
1824 v.reset(OpRISCV64AND)
1825 v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
1827 v1 := b.NewValue0(v.Pos, OpNeg16, t)
1828 v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
1829 v2.AuxInt = int64ToAuxInt(64)
1830 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
1837 // match: (Lsh16x8 x y)
1838 // cond: shiftIsBounded(v)
1839 // result: (SLL x y)
1843 if !(shiftIsBounded(v)) {
1846 v.reset(OpRISCV64SLL)
1852 func rewriteValueRISCV64_OpLsh32x16(v *Value) bool {
1856 typ := &b.Func.Config.Types
1857 // match: (Lsh32x16 <t> x y)
1858 // cond: !shiftIsBounded(v)
1859 // result: (AND (SLL <t> x y) (Neg32 <t> (SLTIU <t> [64] (ZeroExt16to64 y))))
1864 if !(!shiftIsBounded(v)) {
1867 v.reset(OpRISCV64AND)
1868 v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
1870 v1 := b.NewValue0(v.Pos, OpNeg32, t)
1871 v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
1872 v2.AuxInt = int64ToAuxInt(64)
1873 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
1880 // match: (Lsh32x16 x y)
1881 // cond: shiftIsBounded(v)
1882 // result: (SLL x y)
1886 if !(shiftIsBounded(v)) {
1889 v.reset(OpRISCV64SLL)
1895 func rewriteValueRISCV64_OpLsh32x32(v *Value) bool {
1899 typ := &b.Func.Config.Types
1900 // match: (Lsh32x32 <t> x y)
1901 // cond: !shiftIsBounded(v)
1902 // result: (AND (SLL <t> x y) (Neg32 <t> (SLTIU <t> [64] (ZeroExt32to64 y))))
1907 if !(!shiftIsBounded(v)) {
1910 v.reset(OpRISCV64AND)
1911 v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
1913 v1 := b.NewValue0(v.Pos, OpNeg32, t)
1914 v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
1915 v2.AuxInt = int64ToAuxInt(64)
1916 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
1923 // match: (Lsh32x32 x y)
1924 // cond: shiftIsBounded(v)
1925 // result: (SLL x y)
1929 if !(shiftIsBounded(v)) {
1932 v.reset(OpRISCV64SLL)
1938 func rewriteValueRISCV64_OpLsh32x64(v *Value) bool {
1942 // match: (Lsh32x64 <t> x y)
1943 // cond: !shiftIsBounded(v)
1944 // result: (AND (SLL <t> x y) (Neg32 <t> (SLTIU <t> [64] y)))
1949 if !(!shiftIsBounded(v)) {
1952 v.reset(OpRISCV64AND)
1953 v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
1955 v1 := b.NewValue0(v.Pos, OpNeg32, t)
1956 v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
1957 v2.AuxInt = int64ToAuxInt(64)
1963 // match: (Lsh32x64 x y)
1964 // cond: shiftIsBounded(v)
1965 // result: (SLL x y)
1969 if !(shiftIsBounded(v)) {
1972 v.reset(OpRISCV64SLL)
1978 func rewriteValueRISCV64_OpLsh32x8(v *Value) bool {
1982 typ := &b.Func.Config.Types
1983 // match: (Lsh32x8 <t> x y)
1984 // cond: !shiftIsBounded(v)
1985 // result: (AND (SLL <t> x y) (Neg32 <t> (SLTIU <t> [64] (ZeroExt8to64 y))))
1990 if !(!shiftIsBounded(v)) {
1993 v.reset(OpRISCV64AND)
1994 v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
1996 v1 := b.NewValue0(v.Pos, OpNeg32, t)
1997 v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
1998 v2.AuxInt = int64ToAuxInt(64)
1999 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
2006 // match: (Lsh32x8 x y)
2007 // cond: shiftIsBounded(v)
2008 // result: (SLL x y)
2012 if !(shiftIsBounded(v)) {
2015 v.reset(OpRISCV64SLL)
2021 func rewriteValueRISCV64_OpLsh64x16(v *Value) bool {
2025 typ := &b.Func.Config.Types
2026 // match: (Lsh64x16 <t> x y)
2027 // cond: !shiftIsBounded(v)
2028 // result: (AND (SLL <t> x y) (Neg64 <t> (SLTIU <t> [64] (ZeroExt16to64 y))))
2033 if !(!shiftIsBounded(v)) {
2036 v.reset(OpRISCV64AND)
2037 v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
2039 v1 := b.NewValue0(v.Pos, OpNeg64, t)
2040 v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
2041 v2.AuxInt = int64ToAuxInt(64)
2042 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
2049 // match: (Lsh64x16 x y)
2050 // cond: shiftIsBounded(v)
2051 // result: (SLL x y)
2055 if !(shiftIsBounded(v)) {
2058 v.reset(OpRISCV64SLL)
2064 func rewriteValueRISCV64_OpLsh64x32(v *Value) bool {
2068 typ := &b.Func.Config.Types
2069 // match: (Lsh64x32 <t> x y)
2070 // cond: !shiftIsBounded(v)
2071 // result: (AND (SLL <t> x y) (Neg64 <t> (SLTIU <t> [64] (ZeroExt32to64 y))))
2076 if !(!shiftIsBounded(v)) {
2079 v.reset(OpRISCV64AND)
2080 v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
2082 v1 := b.NewValue0(v.Pos, OpNeg64, t)
2083 v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
2084 v2.AuxInt = int64ToAuxInt(64)
2085 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
2092 // match: (Lsh64x32 x y)
2093 // cond: shiftIsBounded(v)
2094 // result: (SLL x y)
2098 if !(shiftIsBounded(v)) {
2101 v.reset(OpRISCV64SLL)
2107 func rewriteValueRISCV64_OpLsh64x64(v *Value) bool {
2111 // match: (Lsh64x64 <t> x y)
2112 // cond: !shiftIsBounded(v)
2113 // result: (AND (SLL <t> x y) (Neg64 <t> (SLTIU <t> [64] y)))
2118 if !(!shiftIsBounded(v)) {
2121 v.reset(OpRISCV64AND)
2122 v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
2124 v1 := b.NewValue0(v.Pos, OpNeg64, t)
2125 v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
2126 v2.AuxInt = int64ToAuxInt(64)
2132 // match: (Lsh64x64 x y)
2133 // cond: shiftIsBounded(v)
2134 // result: (SLL x y)
2138 if !(shiftIsBounded(v)) {
2141 v.reset(OpRISCV64SLL)
2147 func rewriteValueRISCV64_OpLsh64x8(v *Value) bool {
2151 typ := &b.Func.Config.Types
2152 // match: (Lsh64x8 <t> x y)
2153 // cond: !shiftIsBounded(v)
2154 // result: (AND (SLL <t> x y) (Neg64 <t> (SLTIU <t> [64] (ZeroExt8to64 y))))
2159 if !(!shiftIsBounded(v)) {
2162 v.reset(OpRISCV64AND)
2163 v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
2165 v1 := b.NewValue0(v.Pos, OpNeg64, t)
2166 v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
2167 v2.AuxInt = int64ToAuxInt(64)
2168 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
2175 // match: (Lsh64x8 x y)
2176 // cond: shiftIsBounded(v)
2177 // result: (SLL x y)
2181 if !(shiftIsBounded(v)) {
2184 v.reset(OpRISCV64SLL)
2190 func rewriteValueRISCV64_OpLsh8x16(v *Value) bool {
2194 typ := &b.Func.Config.Types
2195 // match: (Lsh8x16 <t> x y)
2196 // cond: !shiftIsBounded(v)
2197 // result: (AND (SLL <t> x y) (Neg8 <t> (SLTIU <t> [64] (ZeroExt16to64 y))))
2202 if !(!shiftIsBounded(v)) {
2205 v.reset(OpRISCV64AND)
2206 v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
2208 v1 := b.NewValue0(v.Pos, OpNeg8, t)
2209 v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
2210 v2.AuxInt = int64ToAuxInt(64)
2211 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
2218 // match: (Lsh8x16 x y)
2219 // cond: shiftIsBounded(v)
2220 // result: (SLL x y)
2224 if !(shiftIsBounded(v)) {
2227 v.reset(OpRISCV64SLL)
2233 func rewriteValueRISCV64_OpLsh8x32(v *Value) bool {
2237 typ := &b.Func.Config.Types
2238 // match: (Lsh8x32 <t> x y)
2239 // cond: !shiftIsBounded(v)
2240 // result: (AND (SLL <t> x y) (Neg8 <t> (SLTIU <t> [64] (ZeroExt32to64 y))))
2245 if !(!shiftIsBounded(v)) {
2248 v.reset(OpRISCV64AND)
2249 v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
2251 v1 := b.NewValue0(v.Pos, OpNeg8, t)
2252 v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
2253 v2.AuxInt = int64ToAuxInt(64)
2254 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
2261 // match: (Lsh8x32 x y)
2262 // cond: shiftIsBounded(v)
2263 // result: (SLL x y)
2267 if !(shiftIsBounded(v)) {
2270 v.reset(OpRISCV64SLL)
2276 func rewriteValueRISCV64_OpLsh8x64(v *Value) bool {
2280 // match: (Lsh8x64 <t> x y)
2281 // cond: !shiftIsBounded(v)
2282 // result: (AND (SLL <t> x y) (Neg8 <t> (SLTIU <t> [64] y)))
2287 if !(!shiftIsBounded(v)) {
2290 v.reset(OpRISCV64AND)
2291 v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
2293 v1 := b.NewValue0(v.Pos, OpNeg8, t)
2294 v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
2295 v2.AuxInt = int64ToAuxInt(64)
2301 // match: (Lsh8x64 x y)
2302 // cond: shiftIsBounded(v)
2303 // result: (SLL x y)
2307 if !(shiftIsBounded(v)) {
2310 v.reset(OpRISCV64SLL)
2316 func rewriteValueRISCV64_OpLsh8x8(v *Value) bool {
2320 typ := &b.Func.Config.Types
2321 // match: (Lsh8x8 <t> x y)
2322 // cond: !shiftIsBounded(v)
2323 // result: (AND (SLL <t> x y) (Neg8 <t> (SLTIU <t> [64] (ZeroExt8to64 y))))
2328 if !(!shiftIsBounded(v)) {
2331 v.reset(OpRISCV64AND)
2332 v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
2334 v1 := b.NewValue0(v.Pos, OpNeg8, t)
2335 v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
2336 v2.AuxInt = int64ToAuxInt(64)
2337 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
2344 // match: (Lsh8x8 x y)
2345 // cond: shiftIsBounded(v)
2346 // result: (SLL x y)
2350 if !(shiftIsBounded(v)) {
2353 v.reset(OpRISCV64SLL)
2359 func rewriteValueRISCV64_OpMod16(v *Value) bool {
2363 typ := &b.Func.Config.Types
2364 // match: (Mod16 x y [false])
2365 // result: (REMW (SignExt16to32 x) (SignExt16to32 y))
2367 if auxIntToBool(v.AuxInt) != false {
2372 v.reset(OpRISCV64REMW)
2373 v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
2375 v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
2382 func rewriteValueRISCV64_OpMod16u(v *Value) bool {
2386 typ := &b.Func.Config.Types
2387 // match: (Mod16u x y)
2388 // result: (REMUW (ZeroExt16to32 x) (ZeroExt16to32 y))
2392 v.reset(OpRISCV64REMUW)
2393 v0 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
2395 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
2401 func rewriteValueRISCV64_OpMod32(v *Value) bool {
2404 // match: (Mod32 x y [false])
2405 // result: (REMW x y)
2407 if auxIntToBool(v.AuxInt) != false {
2412 v.reset(OpRISCV64REMW)
2418 func rewriteValueRISCV64_OpMod64(v *Value) bool {
2421 // match: (Mod64 x y [false])
2422 // result: (REM x y)
2424 if auxIntToBool(v.AuxInt) != false {
2429 v.reset(OpRISCV64REM)
2435 func rewriteValueRISCV64_OpMod8(v *Value) bool {
2439 typ := &b.Func.Config.Types
2440 // match: (Mod8 x y)
2441 // result: (REMW (SignExt8to32 x) (SignExt8to32 y))
2445 v.reset(OpRISCV64REMW)
2446 v0 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
2448 v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
2454 func rewriteValueRISCV64_OpMod8u(v *Value) bool {
2458 typ := &b.Func.Config.Types
2459 // match: (Mod8u x y)
2460 // result: (REMUW (ZeroExt8to32 x) (ZeroExt8to32 y))
2464 v.reset(OpRISCV64REMUW)
2465 v0 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
2467 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
2473 func rewriteValueRISCV64_OpMove(v *Value) bool {
2478 config := b.Func.Config
2479 typ := &b.Func.Config.Types
2480 // match: (Move [0] _ _ mem)
2483 if auxIntToInt64(v.AuxInt) != 0 {
2490 // match: (Move [1] dst src mem)
2491 // result: (MOVBstore dst (MOVBload src mem) mem)
2493 if auxIntToInt64(v.AuxInt) != 1 {
2499 v.reset(OpRISCV64MOVBstore)
2500 v0 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
2501 v0.AddArg2(src, mem)
2502 v.AddArg3(dst, v0, mem)
2505 // match: (Move [2] {t} dst src mem)
2506 // cond: t.Alignment()%2 == 0
2507 // result: (MOVHstore dst (MOVHload src mem) mem)
2509 if auxIntToInt64(v.AuxInt) != 2 {
2512 t := auxToType(v.Aux)
2516 if !(t.Alignment()%2 == 0) {
2519 v.reset(OpRISCV64MOVHstore)
2520 v0 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
2521 v0.AddArg2(src, mem)
2522 v.AddArg3(dst, v0, mem)
2525 // match: (Move [2] dst src mem)
2526 // result: (MOVBstore [1] dst (MOVBload [1] src mem) (MOVBstore dst (MOVBload src mem) mem))
2528 if auxIntToInt64(v.AuxInt) != 2 {
2534 v.reset(OpRISCV64MOVBstore)
2535 v.AuxInt = int32ToAuxInt(1)
2536 v0 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
2537 v0.AuxInt = int32ToAuxInt(1)
2538 v0.AddArg2(src, mem)
2539 v1 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
2540 v2 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
2541 v2.AddArg2(src, mem)
2542 v1.AddArg3(dst, v2, mem)
2543 v.AddArg3(dst, v0, v1)
2546 // match: (Move [4] {t} dst src mem)
2547 // cond: t.Alignment()%4 == 0
2548 // result: (MOVWstore dst (MOVWload src mem) mem)
2550 if auxIntToInt64(v.AuxInt) != 4 {
2553 t := auxToType(v.Aux)
2557 if !(t.Alignment()%4 == 0) {
2560 v.reset(OpRISCV64MOVWstore)
2561 v0 := b.NewValue0(v.Pos, OpRISCV64MOVWload, typ.Int32)
2562 v0.AddArg2(src, mem)
2563 v.AddArg3(dst, v0, mem)
2566 // match: (Move [4] {t} dst src mem)
2567 // cond: t.Alignment()%2 == 0
2568 // result: (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem))
2570 if auxIntToInt64(v.AuxInt) != 4 {
2573 t := auxToType(v.Aux)
2577 if !(t.Alignment()%2 == 0) {
2580 v.reset(OpRISCV64MOVHstore)
2581 v.AuxInt = int32ToAuxInt(2)
2582 v0 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
2583 v0.AuxInt = int32ToAuxInt(2)
2584 v0.AddArg2(src, mem)
2585 v1 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
2586 v2 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
2587 v2.AddArg2(src, mem)
2588 v1.AddArg3(dst, v2, mem)
2589 v.AddArg3(dst, v0, v1)
2592 // match: (Move [4] dst src mem)
2593 // result: (MOVBstore [3] dst (MOVBload [3] src mem) (MOVBstore [2] dst (MOVBload [2] src mem) (MOVBstore [1] dst (MOVBload [1] src mem) (MOVBstore dst (MOVBload src mem) mem))))
2595 if auxIntToInt64(v.AuxInt) != 4 {
2601 v.reset(OpRISCV64MOVBstore)
2602 v.AuxInt = int32ToAuxInt(3)
2603 v0 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
2604 v0.AuxInt = int32ToAuxInt(3)
2605 v0.AddArg2(src, mem)
2606 v1 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
2607 v1.AuxInt = int32ToAuxInt(2)
2608 v2 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
2609 v2.AuxInt = int32ToAuxInt(2)
2610 v2.AddArg2(src, mem)
2611 v3 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
2612 v3.AuxInt = int32ToAuxInt(1)
2613 v4 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
2614 v4.AuxInt = int32ToAuxInt(1)
2615 v4.AddArg2(src, mem)
2616 v5 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
2617 v6 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
2618 v6.AddArg2(src, mem)
2619 v5.AddArg3(dst, v6, mem)
2620 v3.AddArg3(dst, v4, v5)
2621 v1.AddArg3(dst, v2, v3)
2622 v.AddArg3(dst, v0, v1)
2625 // match: (Move [8] {t} dst src mem)
2626 // cond: t.Alignment()%8 == 0
2627 // result: (MOVDstore dst (MOVDload src mem) mem)
2629 if auxIntToInt64(v.AuxInt) != 8 {
2632 t := auxToType(v.Aux)
2636 if !(t.Alignment()%8 == 0) {
2639 v.reset(OpRISCV64MOVDstore)
2640 v0 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
2641 v0.AddArg2(src, mem)
2642 v.AddArg3(dst, v0, mem)
2645 // match: (Move [8] {t} dst src mem)
2646 // cond: t.Alignment()%4 == 0
2647 // result: (MOVWstore [4] dst (MOVWload [4] src mem) (MOVWstore dst (MOVWload src mem) mem))
2649 if auxIntToInt64(v.AuxInt) != 8 {
2652 t := auxToType(v.Aux)
2656 if !(t.Alignment()%4 == 0) {
2659 v.reset(OpRISCV64MOVWstore)
2660 v.AuxInt = int32ToAuxInt(4)
2661 v0 := b.NewValue0(v.Pos, OpRISCV64MOVWload, typ.Int32)
2662 v0.AuxInt = int32ToAuxInt(4)
2663 v0.AddArg2(src, mem)
2664 v1 := b.NewValue0(v.Pos, OpRISCV64MOVWstore, types.TypeMem)
2665 v2 := b.NewValue0(v.Pos, OpRISCV64MOVWload, typ.Int32)
2666 v2.AddArg2(src, mem)
2667 v1.AddArg3(dst, v2, mem)
2668 v.AddArg3(dst, v0, v1)
2671 // match: (Move [8] {t} dst src mem)
2672 // cond: t.Alignment()%2 == 0
2673 // result: (MOVHstore [6] dst (MOVHload [6] src mem) (MOVHstore [4] dst (MOVHload [4] src mem) (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem))))
2675 if auxIntToInt64(v.AuxInt) != 8 {
2678 t := auxToType(v.Aux)
2682 if !(t.Alignment()%2 == 0) {
2685 v.reset(OpRISCV64MOVHstore)
2686 v.AuxInt = int32ToAuxInt(6)
2687 v0 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
2688 v0.AuxInt = int32ToAuxInt(6)
2689 v0.AddArg2(src, mem)
2690 v1 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
2691 v1.AuxInt = int32ToAuxInt(4)
2692 v2 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
2693 v2.AuxInt = int32ToAuxInt(4)
2694 v2.AddArg2(src, mem)
2695 v3 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
2696 v3.AuxInt = int32ToAuxInt(2)
2697 v4 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
2698 v4.AuxInt = int32ToAuxInt(2)
2699 v4.AddArg2(src, mem)
2700 v5 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
2701 v6 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
2702 v6.AddArg2(src, mem)
2703 v5.AddArg3(dst, v6, mem)
2704 v3.AddArg3(dst, v4, v5)
2705 v1.AddArg3(dst, v2, v3)
2706 v.AddArg3(dst, v0, v1)
2709 // match: (Move [3] dst src mem)
2710 // result: (MOVBstore [2] dst (MOVBload [2] src mem) (MOVBstore [1] dst (MOVBload [1] src mem) (MOVBstore dst (MOVBload src mem) mem)))
2712 if auxIntToInt64(v.AuxInt) != 3 {
2718 v.reset(OpRISCV64MOVBstore)
2719 v.AuxInt = int32ToAuxInt(2)
2720 v0 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
2721 v0.AuxInt = int32ToAuxInt(2)
2722 v0.AddArg2(src, mem)
2723 v1 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
2724 v1.AuxInt = int32ToAuxInt(1)
2725 v2 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
2726 v2.AuxInt = int32ToAuxInt(1)
2727 v2.AddArg2(src, mem)
2728 v3 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
2729 v4 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
2730 v4.AddArg2(src, mem)
2731 v3.AddArg3(dst, v4, mem)
2732 v1.AddArg3(dst, v2, v3)
2733 v.AddArg3(dst, v0, v1)
2736 // match: (Move [6] {t} dst src mem)
2737 // cond: t.Alignment()%2 == 0
2738 // result: (MOVHstore [4] dst (MOVHload [4] src mem) (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem)))
2740 if auxIntToInt64(v.AuxInt) != 6 {
2743 t := auxToType(v.Aux)
2747 if !(t.Alignment()%2 == 0) {
2750 v.reset(OpRISCV64MOVHstore)
2751 v.AuxInt = int32ToAuxInt(4)
2752 v0 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
2753 v0.AuxInt = int32ToAuxInt(4)
2754 v0.AddArg2(src, mem)
2755 v1 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
2756 v1.AuxInt = int32ToAuxInt(2)
2757 v2 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
2758 v2.AuxInt = int32ToAuxInt(2)
2759 v2.AddArg2(src, mem)
2760 v3 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
2761 v4 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
2762 v4.AddArg2(src, mem)
2763 v3.AddArg3(dst, v4, mem)
2764 v1.AddArg3(dst, v2, v3)
2765 v.AddArg3(dst, v0, v1)
2768 // match: (Move [12] {t} dst src mem)
2769 // cond: t.Alignment()%4 == 0
2770 // result: (MOVWstore [8] dst (MOVWload [8] src mem) (MOVWstore [4] dst (MOVWload [4] src mem) (MOVWstore dst (MOVWload src mem) mem)))
2772 if auxIntToInt64(v.AuxInt) != 12 {
2775 t := auxToType(v.Aux)
2779 if !(t.Alignment()%4 == 0) {
2782 v.reset(OpRISCV64MOVWstore)
2783 v.AuxInt = int32ToAuxInt(8)
2784 v0 := b.NewValue0(v.Pos, OpRISCV64MOVWload, typ.Int32)
2785 v0.AuxInt = int32ToAuxInt(8)
2786 v0.AddArg2(src, mem)
2787 v1 := b.NewValue0(v.Pos, OpRISCV64MOVWstore, types.TypeMem)
2788 v1.AuxInt = int32ToAuxInt(4)
2789 v2 := b.NewValue0(v.Pos, OpRISCV64MOVWload, typ.Int32)
2790 v2.AuxInt = int32ToAuxInt(4)
2791 v2.AddArg2(src, mem)
2792 v3 := b.NewValue0(v.Pos, OpRISCV64MOVWstore, types.TypeMem)
2793 v4 := b.NewValue0(v.Pos, OpRISCV64MOVWload, typ.Int32)
2794 v4.AddArg2(src, mem)
2795 v3.AddArg3(dst, v4, mem)
2796 v1.AddArg3(dst, v2, v3)
2797 v.AddArg3(dst, v0, v1)
2800 // match: (Move [16] {t} dst src mem)
2801 // cond: t.Alignment()%8 == 0
2802 // result: (MOVDstore [8] dst (MOVDload [8] src mem) (MOVDstore dst (MOVDload src mem) mem))
2804 if auxIntToInt64(v.AuxInt) != 16 {
2807 t := auxToType(v.Aux)
2811 if !(t.Alignment()%8 == 0) {
2814 v.reset(OpRISCV64MOVDstore)
2815 v.AuxInt = int32ToAuxInt(8)
2816 v0 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
2817 v0.AuxInt = int32ToAuxInt(8)
2818 v0.AddArg2(src, mem)
2819 v1 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
2820 v2 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
2821 v2.AddArg2(src, mem)
2822 v1.AddArg3(dst, v2, mem)
2823 v.AddArg3(dst, v0, v1)
2826 // match: (Move [24] {t} dst src mem)
2827 // cond: t.Alignment()%8 == 0
2828 // result: (MOVDstore [16] dst (MOVDload [16] src mem) (MOVDstore [8] dst (MOVDload [8] src mem) (MOVDstore dst (MOVDload src mem) mem)))
2830 if auxIntToInt64(v.AuxInt) != 24 {
2833 t := auxToType(v.Aux)
2837 if !(t.Alignment()%8 == 0) {
2840 v.reset(OpRISCV64MOVDstore)
2841 v.AuxInt = int32ToAuxInt(16)
2842 v0 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
2843 v0.AuxInt = int32ToAuxInt(16)
2844 v0.AddArg2(src, mem)
2845 v1 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
2846 v1.AuxInt = int32ToAuxInt(8)
2847 v2 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
2848 v2.AuxInt = int32ToAuxInt(8)
2849 v2.AddArg2(src, mem)
2850 v3 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
2851 v4 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
2852 v4.AddArg2(src, mem)
2853 v3.AddArg3(dst, v4, mem)
2854 v1.AddArg3(dst, v2, v3)
2855 v.AddArg3(dst, v0, v1)
2858 // match: (Move [32] {t} dst src mem)
2859 // cond: t.Alignment()%8 == 0
2860 // result: (MOVDstore [24] dst (MOVDload [24] src mem) (MOVDstore [16] dst (MOVDload [16] src mem) (MOVDstore [8] dst (MOVDload [8] src mem) (MOVDstore dst (MOVDload src mem) mem))))
2862 if auxIntToInt64(v.AuxInt) != 32 {
2865 t := auxToType(v.Aux)
2869 if !(t.Alignment()%8 == 0) {
2872 v.reset(OpRISCV64MOVDstore)
2873 v.AuxInt = int32ToAuxInt(24)
2874 v0 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
2875 v0.AuxInt = int32ToAuxInt(24)
2876 v0.AddArg2(src, mem)
2877 v1 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
2878 v1.AuxInt = int32ToAuxInt(16)
2879 v2 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
2880 v2.AuxInt = int32ToAuxInt(16)
2881 v2.AddArg2(src, mem)
2882 v3 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
2883 v3.AuxInt = int32ToAuxInt(8)
2884 v4 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
2885 v4.AuxInt = int32ToAuxInt(8)
2886 v4.AddArg2(src, mem)
2887 v5 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
2888 v6 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
2889 v6.AddArg2(src, mem)
2890 v5.AddArg3(dst, v6, mem)
2891 v3.AddArg3(dst, v4, v5)
2892 v1.AddArg3(dst, v2, v3)
2893 v.AddArg3(dst, v0, v1)
2896 // match: (Move [s] {t} dst src mem)
2897 // cond: s%8 == 0 && s <= 8*128 && t.Alignment()%8 == 0 && !config.noDuffDevice && logLargeCopy(v, s)
2898 // result: (DUFFCOPY [16 * (128 - s/8)] dst src mem)
2900 s := auxIntToInt64(v.AuxInt)
2901 t := auxToType(v.Aux)
2905 if !(s%8 == 0 && s <= 8*128 && t.Alignment()%8 == 0 && !config.noDuffDevice && logLargeCopy(v, s)) {
2908 v.reset(OpRISCV64DUFFCOPY)
2909 v.AuxInt = int64ToAuxInt(16 * (128 - s/8))
2910 v.AddArg3(dst, src, mem)
2913 // match: (Move [s] {t} dst src mem)
2914 // cond: (s <= 16 || logLargeCopy(v, s))
2915 // result: (LoweredMove [t.Alignment()] dst src (ADDI <src.Type> [s-moveSize(t.Alignment(), config)] src) mem)
2917 s := auxIntToInt64(v.AuxInt)
2918 t := auxToType(v.Aux)
2922 if !(s <= 16 || logLargeCopy(v, s)) {
2925 v.reset(OpRISCV64LoweredMove)
2926 v.AuxInt = int64ToAuxInt(t.Alignment())
2927 v0 := b.NewValue0(v.Pos, OpRISCV64ADDI, src.Type)
2928 v0.AuxInt = int64ToAuxInt(s - moveSize(t.Alignment(), config))
2930 v.AddArg4(dst, src, v0, mem)
2935 func rewriteValueRISCV64_OpMul16(v *Value) bool {
2939 typ := &b.Func.Config.Types
2940 // match: (Mul16 x y)
2941 // result: (MULW (SignExt16to32 x) (SignExt16to32 y))
2945 v.reset(OpRISCV64MULW)
2946 v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
2948 v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
2954 func rewriteValueRISCV64_OpMul8(v *Value) bool {
2958 typ := &b.Func.Config.Types
2959 // match: (Mul8 x y)
2960 // result: (MULW (SignExt8to32 x) (SignExt8to32 y))
2964 v.reset(OpRISCV64MULW)
2965 v0 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
2967 v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
2973 func rewriteValueRISCV64_OpNeq16(v *Value) bool {
2977 typ := &b.Func.Config.Types
2978 // match: (Neq16 x y)
2979 // result: (Not (Eq16 x y))
2984 v0 := b.NewValue0(v.Pos, OpEq16, typ.Bool)
2990 func rewriteValueRISCV64_OpNeq32(v *Value) bool {
2994 typ := &b.Func.Config.Types
2995 // match: (Neq32 x y)
2996 // result: (Not (Eq32 x y))
3001 v0 := b.NewValue0(v.Pos, OpEq32, typ.Bool)
3007 func rewriteValueRISCV64_OpNeq64(v *Value) bool {
3011 typ := &b.Func.Config.Types
3012 // match: (Neq64 x y)
3013 // result: (Not (Eq64 x y))
3018 v0 := b.NewValue0(v.Pos, OpEq64, typ.Bool)
3024 func rewriteValueRISCV64_OpNeq8(v *Value) bool {
3028 typ := &b.Func.Config.Types
3029 // match: (Neq8 x y)
3030 // result: (Not (Eq8 x y))
3035 v0 := b.NewValue0(v.Pos, OpEq8, typ.Bool)
3041 func rewriteValueRISCV64_OpNeqB(v *Value) bool {
3045 typ := &b.Func.Config.Types
3046 // match: (NeqB x y)
3047 // result: (SNEZ (SUB <typ.Bool> x y))
3051 v.reset(OpRISCV64SNEZ)
3052 v0 := b.NewValue0(v.Pos, OpRISCV64SUB, typ.Bool)
3058 func rewriteValueRISCV64_OpNeqPtr(v *Value) bool {
3062 typ := &b.Func.Config.Types
3063 // match: (NeqPtr x y)
3064 // result: (Not (EqPtr x y))
3069 v0 := b.NewValue0(v.Pos, OpEqPtr, typ.Bool)
3075 func rewriteValueRISCV64_OpOffPtr(v *Value) bool {
3078 typ := &b.Func.Config.Types
3079 // match: (OffPtr [off] ptr:(SP))
3080 // cond: is32Bit(off)
3081 // result: (MOVaddr [int32(off)] ptr)
3083 off := auxIntToInt64(v.AuxInt)
3085 if ptr.Op != OpSP || !(is32Bit(off)) {
3088 v.reset(OpRISCV64MOVaddr)
3089 v.AuxInt = int32ToAuxInt(int32(off))
3093 // match: (OffPtr [off] ptr)
3094 // cond: is32Bit(off)
3095 // result: (ADDI [off] ptr)
3097 off := auxIntToInt64(v.AuxInt)
3099 if !(is32Bit(off)) {
3102 v.reset(OpRISCV64ADDI)
3103 v.AuxInt = int64ToAuxInt(off)
3107 // match: (OffPtr [off] ptr)
3108 // result: (ADD (MOVDconst [off]) ptr)
3110 off := auxIntToInt64(v.AuxInt)
3112 v.reset(OpRISCV64ADD)
3113 v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
3114 v0.AuxInt = int64ToAuxInt(off)
3119 func rewriteValueRISCV64_OpPanicBounds(v *Value) bool {
3123 // match: (PanicBounds [kind] x y mem)
3124 // cond: boundsABI(kind) == 0
3125 // result: (LoweredPanicBoundsA [kind] x y mem)
3127 kind := auxIntToInt64(v.AuxInt)
3131 if !(boundsABI(kind) == 0) {
3134 v.reset(OpRISCV64LoweredPanicBoundsA)
3135 v.AuxInt = int64ToAuxInt(kind)
3136 v.AddArg3(x, y, mem)
3139 // match: (PanicBounds [kind] x y mem)
3140 // cond: boundsABI(kind) == 1
3141 // result: (LoweredPanicBoundsB [kind] x y mem)
3143 kind := auxIntToInt64(v.AuxInt)
3147 if !(boundsABI(kind) == 1) {
3150 v.reset(OpRISCV64LoweredPanicBoundsB)
3151 v.AuxInt = int64ToAuxInt(kind)
3152 v.AddArg3(x, y, mem)
3155 // match: (PanicBounds [kind] x y mem)
3156 // cond: boundsABI(kind) == 2
3157 // result: (LoweredPanicBoundsC [kind] x y mem)
3159 kind := auxIntToInt64(v.AuxInt)
3163 if !(boundsABI(kind) == 2) {
3166 v.reset(OpRISCV64LoweredPanicBoundsC)
3167 v.AuxInt = int64ToAuxInt(kind)
3168 v.AddArg3(x, y, mem)
3173 func rewriteValueRISCV64_OpRISCV64ADD(v *Value) bool {
3176 // match: (ADD (MOVDconst <t> [val]) x)
3177 // cond: is32Bit(val) && !t.IsPtr()
3178 // result: (ADDI [val] x)
3180 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3181 if v_0.Op != OpRISCV64MOVDconst {
3185 val := auxIntToInt64(v_0.AuxInt)
3187 if !(is32Bit(val) && !t.IsPtr()) {
3190 v.reset(OpRISCV64ADDI)
3191 v.AuxInt = int64ToAuxInt(val)
3199 func rewriteValueRISCV64_OpRISCV64ADDI(v *Value) bool {
3201 // match: (ADDI [c] (MOVaddr [d] {s} x))
3202 // cond: is32Bit(c+int64(d))
3203 // result: (MOVaddr [int32(c)+d] {s} x)
3205 c := auxIntToInt64(v.AuxInt)
3206 if v_0.Op != OpRISCV64MOVaddr {
3209 d := auxIntToInt32(v_0.AuxInt)
3210 s := auxToSym(v_0.Aux)
3212 if !(is32Bit(c + int64(d))) {
3215 v.reset(OpRISCV64MOVaddr)
3216 v.AuxInt = int32ToAuxInt(int32(c) + d)
3221 // match: (ADDI [0] x)
3224 if auxIntToInt64(v.AuxInt) != 0 {
3231 // match: (ADDI [x] (MOVDconst [y]))
3232 // cond: is32Bit(x + y)
3233 // result: (MOVDconst [x + y])
3235 x := auxIntToInt64(v.AuxInt)
3236 if v_0.Op != OpRISCV64MOVDconst {
3239 y := auxIntToInt64(v_0.AuxInt)
3240 if !(is32Bit(x + y)) {
3243 v.reset(OpRISCV64MOVDconst)
3244 v.AuxInt = int64ToAuxInt(x + y)
3247 // match: (ADDI [x] (ADDI [y] z))
3248 // cond: is32Bit(x + y)
3249 // result: (ADDI [x + y] z)
3251 x := auxIntToInt64(v.AuxInt)
3252 if v_0.Op != OpRISCV64ADDI {
3255 y := auxIntToInt64(v_0.AuxInt)
3257 if !(is32Bit(x + y)) {
3260 v.reset(OpRISCV64ADDI)
3261 v.AuxInt = int64ToAuxInt(x + y)
3267 func rewriteValueRISCV64_OpRISCV64AND(v *Value) bool {
3270 // match: (AND (MOVDconst [val]) x)
3271 // cond: is32Bit(val)
3272 // result: (ANDI [val] x)
3274 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3275 if v_0.Op != OpRISCV64MOVDconst {
3278 val := auxIntToInt64(v_0.AuxInt)
3280 if !(is32Bit(val)) {
3283 v.reset(OpRISCV64ANDI)
3284 v.AuxInt = int64ToAuxInt(val)
3292 func rewriteValueRISCV64_OpRISCV64ANDI(v *Value) bool {
3294 // match: (ANDI [0] x)
3295 // result: (MOVDconst [0])
3297 if auxIntToInt64(v.AuxInt) != 0 {
3300 v.reset(OpRISCV64MOVDconst)
3301 v.AuxInt = int64ToAuxInt(0)
3304 // match: (ANDI [-1] x)
3307 if auxIntToInt64(v.AuxInt) != -1 {
3314 // match: (ANDI [x] (MOVDconst [y]))
3315 // result: (MOVDconst [x & y])
3317 x := auxIntToInt64(v.AuxInt)
3318 if v_0.Op != OpRISCV64MOVDconst {
3321 y := auxIntToInt64(v_0.AuxInt)
3322 v.reset(OpRISCV64MOVDconst)
3323 v.AuxInt = int64ToAuxInt(x & y)
3326 // match: (ANDI [x] (ANDI [y] z))
3327 // result: (ANDI [x & y] z)
3329 x := auxIntToInt64(v.AuxInt)
3330 if v_0.Op != OpRISCV64ANDI {
3333 y := auxIntToInt64(v_0.AuxInt)
3335 v.reset(OpRISCV64ANDI)
3336 v.AuxInt = int64ToAuxInt(x & y)
3342 func rewriteValueRISCV64_OpRISCV64FADDD(v *Value) bool {
3345 // match: (FADDD a (FMULD x y))
3346 // cond: a.Block.Func.useFMA(v)
3347 // result: (FMADDD x y a)
3349 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3351 if v_1.Op != OpRISCV64FMULD {
3356 if !(a.Block.Func.useFMA(v)) {
3359 v.reset(OpRISCV64FMADDD)
3367 func rewriteValueRISCV64_OpRISCV64FMADDD(v *Value) bool {
3371 // match: (FMADDD neg:(FNEGD x) y z)
3372 // cond: neg.Uses == 1
3373 // result: (FNMSUBD x y z)
3375 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3377 if neg.Op != OpRISCV64FNEGD {
3383 if !(neg.Uses == 1) {
3386 v.reset(OpRISCV64FNMSUBD)
3392 // match: (FMADDD x y neg:(FNEGD z))
3393 // cond: neg.Uses == 1
3394 // result: (FMSUBD x y z)
3399 if neg.Op != OpRISCV64FNEGD {
3403 if !(neg.Uses == 1) {
3406 v.reset(OpRISCV64FMSUBD)
3412 func rewriteValueRISCV64_OpRISCV64FMSUBD(v *Value) bool {
3416 // match: (FMSUBD neg:(FNEGD x) y z)
3417 // cond: neg.Uses == 1
3418 // result: (FNMADDD x y z)
3420 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3422 if neg.Op != OpRISCV64FNEGD {
3428 if !(neg.Uses == 1) {
3431 v.reset(OpRISCV64FNMADDD)
3437 // match: (FMSUBD x y neg:(FNEGD z))
3438 // cond: neg.Uses == 1
3439 // result: (FMADDD x y z)
3444 if neg.Op != OpRISCV64FNEGD {
3448 if !(neg.Uses == 1) {
3451 v.reset(OpRISCV64FMADDD)
3457 func rewriteValueRISCV64_OpRISCV64FNMADDD(v *Value) bool {
3461 // match: (FNMADDD neg:(FNEGD x) y z)
3462 // cond: neg.Uses == 1
3463 // result: (FMSUBD x y z)
3465 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3467 if neg.Op != OpRISCV64FNEGD {
3473 if !(neg.Uses == 1) {
3476 v.reset(OpRISCV64FMSUBD)
3482 // match: (FNMADDD x y neg:(FNEGD z))
3483 // cond: neg.Uses == 1
3484 // result: (FNMSUBD x y z)
3489 if neg.Op != OpRISCV64FNEGD {
3493 if !(neg.Uses == 1) {
3496 v.reset(OpRISCV64FNMSUBD)
3502 func rewriteValueRISCV64_OpRISCV64FNMSUBD(v *Value) bool {
3506 // match: (FNMSUBD neg:(FNEGD x) y z)
3507 // cond: neg.Uses == 1
3508 // result: (FMADDD x y z)
3510 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3512 if neg.Op != OpRISCV64FNEGD {
3518 if !(neg.Uses == 1) {
3521 v.reset(OpRISCV64FMADDD)
3527 // match: (FNMSUBD x y neg:(FNEGD z))
3528 // cond: neg.Uses == 1
3529 // result: (FNMADDD x y z)
3534 if neg.Op != OpRISCV64FNEGD {
3538 if !(neg.Uses == 1) {
3541 v.reset(OpRISCV64FNMADDD)
3547 func rewriteValueRISCV64_OpRISCV64FSUBD(v *Value) bool {
3550 // match: (FSUBD a (FMULD x y))
3551 // cond: a.Block.Func.useFMA(v)
3552 // result: (FNMSUBD x y a)
3555 if v_1.Op != OpRISCV64FMULD {
3560 if !(a.Block.Func.useFMA(v)) {
3563 v.reset(OpRISCV64FNMSUBD)
3567 // match: (FSUBD (FMULD x y) a)
3568 // cond: a.Block.Func.useFMA(v)
3569 // result: (FMSUBD x y a)
3571 if v_0.Op != OpRISCV64FMULD {
3577 if !(a.Block.Func.useFMA(v)) {
3580 v.reset(OpRISCV64FMSUBD)
3586 func rewriteValueRISCV64_OpRISCV64MOVBUload(v *Value) bool {
3589 // match: (MOVBUload [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
3590 // cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)
3591 // result: (MOVBUload [off1+off2] {mergeSym(sym1,sym2)} base mem)
3593 off1 := auxIntToInt32(v.AuxInt)
3594 sym1 := auxToSym(v.Aux)
3595 if v_0.Op != OpRISCV64MOVaddr {
3598 off2 := auxIntToInt32(v_0.AuxInt)
3599 sym2 := auxToSym(v_0.Aux)
3602 if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)) {
3605 v.reset(OpRISCV64MOVBUload)
3606 v.AuxInt = int32ToAuxInt(off1 + off2)
3607 v.Aux = symToAux(mergeSym(sym1, sym2))
3608 v.AddArg2(base, mem)
3611 // match: (MOVBUload [off1] {sym} (ADDI [off2] base) mem)
3612 // cond: is32Bit(int64(off1)+off2)
3613 // result: (MOVBUload [off1+int32(off2)] {sym} base mem)
3615 off1 := auxIntToInt32(v.AuxInt)
3616 sym := auxToSym(v.Aux)
3617 if v_0.Op != OpRISCV64ADDI {
3620 off2 := auxIntToInt64(v_0.AuxInt)
3623 if !(is32Bit(int64(off1) + off2)) {
3626 v.reset(OpRISCV64MOVBUload)
3627 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3628 v.Aux = symToAux(sym)
3629 v.AddArg2(base, mem)
3634 func rewriteValueRISCV64_OpRISCV64MOVBUreg(v *Value) bool {
3637 // match: (MOVBUreg x:(FLES _ _))
3641 if x.Op != OpRISCV64FLES {
3647 // match: (MOVBUreg x:(FLTS _ _))
3651 if x.Op != OpRISCV64FLTS {
3657 // match: (MOVBUreg x:(FEQS _ _))
3661 if x.Op != OpRISCV64FEQS {
3667 // match: (MOVBUreg x:(FNES _ _))
3671 if x.Op != OpRISCV64FNES {
3677 // match: (MOVBUreg x:(FLED _ _))
3681 if x.Op != OpRISCV64FLED {
3687 // match: (MOVBUreg x:(FLTD _ _))
3691 if x.Op != OpRISCV64FLTD {
3697 // match: (MOVBUreg x:(FEQD _ _))
3701 if x.Op != OpRISCV64FEQD {
3707 // match: (MOVBUreg x:(FNED _ _))
3711 if x.Op != OpRISCV64FNED {
3717 // match: (MOVBUreg x:(SEQZ _))
3721 if x.Op != OpRISCV64SEQZ {
3727 // match: (MOVBUreg x:(SNEZ _))
3731 if x.Op != OpRISCV64SNEZ {
3737 // match: (MOVBUreg x:(SLT _ _))
3741 if x.Op != OpRISCV64SLT {
3747 // match: (MOVBUreg x:(SLTU _ _))
3751 if x.Op != OpRISCV64SLTU {
3757 // match: (MOVBUreg x:(ANDI [c] y))
3758 // cond: c >= 0 && int64(uint8(c)) == c
3762 if x.Op != OpRISCV64ANDI {
3765 c := auxIntToInt64(x.AuxInt)
3766 if !(c >= 0 && int64(uint8(c)) == c) {
3772 // match: (MOVBUreg (ANDI [c] x))
3774 // result: (ANDI [int64(uint8(c))] x)
3776 if v_0.Op != OpRISCV64ANDI {
3779 c := auxIntToInt64(v_0.AuxInt)
3784 v.reset(OpRISCV64ANDI)
3785 v.AuxInt = int64ToAuxInt(int64(uint8(c)))
3789 // match: (MOVBUreg (MOVDconst [c]))
3790 // result: (MOVDconst [int64(uint8(c))])
3792 if v_0.Op != OpRISCV64MOVDconst {
3795 c := auxIntToInt64(v_0.AuxInt)
3796 v.reset(OpRISCV64MOVDconst)
3797 v.AuxInt = int64ToAuxInt(int64(uint8(c)))
3800 // match: (MOVBUreg x:(MOVBUload _ _))
3801 // result: (MOVDreg x)
3804 if x.Op != OpRISCV64MOVBUload {
3807 v.reset(OpRISCV64MOVDreg)
3811 // match: (MOVBUreg x:(Select0 (LoweredAtomicLoad8 _ _)))
3812 // result: (MOVDreg x)
3815 if x.Op != OpSelect0 {
3819 if x_0.Op != OpRISCV64LoweredAtomicLoad8 {
3822 v.reset(OpRISCV64MOVDreg)
3826 // match: (MOVBUreg x:(Select0 (LoweredAtomicCas32 _ _ _ _)))
3827 // result: (MOVDreg x)
3830 if x.Op != OpSelect0 {
3834 if x_0.Op != OpRISCV64LoweredAtomicCas32 {
3837 v.reset(OpRISCV64MOVDreg)
3841 // match: (MOVBUreg x:(Select0 (LoweredAtomicCas64 _ _ _ _)))
3842 // result: (MOVDreg x)
3845 if x.Op != OpSelect0 {
3849 if x_0.Op != OpRISCV64LoweredAtomicCas64 {
3852 v.reset(OpRISCV64MOVDreg)
3856 // match: (MOVBUreg x:(MOVBUreg _))
3857 // result: (MOVDreg x)
3860 if x.Op != OpRISCV64MOVBUreg {
3863 v.reset(OpRISCV64MOVDreg)
3867 // match: (MOVBUreg <t> x:(MOVBload [off] {sym} ptr mem))
3868 // cond: x.Uses == 1 && clobber(x)
3869 // result: @x.Block (MOVBUload <t> [off] {sym} ptr mem)
3873 if x.Op != OpRISCV64MOVBload {
3876 off := auxIntToInt32(x.AuxInt)
3877 sym := auxToSym(x.Aux)
3880 if !(x.Uses == 1 && clobber(x)) {
3884 v0 := b.NewValue0(x.Pos, OpRISCV64MOVBUload, t)
3886 v0.AuxInt = int32ToAuxInt(off)
3887 v0.Aux = symToAux(sym)
3888 v0.AddArg2(ptr, mem)
3893 func rewriteValueRISCV64_OpRISCV64MOVBload(v *Value) bool {
3896 // match: (MOVBload [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
3897 // cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)
3898 // result: (MOVBload [off1+off2] {mergeSym(sym1,sym2)} base mem)
3900 off1 := auxIntToInt32(v.AuxInt)
3901 sym1 := auxToSym(v.Aux)
3902 if v_0.Op != OpRISCV64MOVaddr {
3905 off2 := auxIntToInt32(v_0.AuxInt)
3906 sym2 := auxToSym(v_0.Aux)
3909 if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)) {
3912 v.reset(OpRISCV64MOVBload)
3913 v.AuxInt = int32ToAuxInt(off1 + off2)
3914 v.Aux = symToAux(mergeSym(sym1, sym2))
3915 v.AddArg2(base, mem)
3918 // match: (MOVBload [off1] {sym} (ADDI [off2] base) mem)
3919 // cond: is32Bit(int64(off1)+off2)
3920 // result: (MOVBload [off1+int32(off2)] {sym} base mem)
3922 off1 := auxIntToInt32(v.AuxInt)
3923 sym := auxToSym(v.Aux)
3924 if v_0.Op != OpRISCV64ADDI {
3927 off2 := auxIntToInt64(v_0.AuxInt)
3930 if !(is32Bit(int64(off1) + off2)) {
3933 v.reset(OpRISCV64MOVBload)
3934 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3935 v.Aux = symToAux(sym)
3936 v.AddArg2(base, mem)
3941 func rewriteValueRISCV64_OpRISCV64MOVBreg(v *Value) bool {
3944 // match: (MOVBreg x:(ANDI [c] y))
3945 // cond: c >= 0 && int64(int8(c)) == c
3949 if x.Op != OpRISCV64ANDI {
3952 c := auxIntToInt64(x.AuxInt)
3953 if !(c >= 0 && int64(int8(c)) == c) {
3959 // match: (MOVBreg (MOVDconst [c]))
3960 // result: (MOVDconst [int64(int8(c))])
3962 if v_0.Op != OpRISCV64MOVDconst {
3965 c := auxIntToInt64(v_0.AuxInt)
3966 v.reset(OpRISCV64MOVDconst)
3967 v.AuxInt = int64ToAuxInt(int64(int8(c)))
3970 // match: (MOVBreg x:(MOVBload _ _))
3971 // result: (MOVDreg x)
3974 if x.Op != OpRISCV64MOVBload {
3977 v.reset(OpRISCV64MOVDreg)
3981 // match: (MOVBreg x:(MOVBreg _))
3982 // result: (MOVDreg x)
3985 if x.Op != OpRISCV64MOVBreg {
3988 v.reset(OpRISCV64MOVDreg)
3992 // match: (MOVBreg <t> x:(MOVBUload [off] {sym} ptr mem))
3993 // cond: x.Uses == 1 && clobber(x)
3994 // result: @x.Block (MOVBload <t> [off] {sym} ptr mem)
3998 if x.Op != OpRISCV64MOVBUload {
4001 off := auxIntToInt32(x.AuxInt)
4002 sym := auxToSym(x.Aux)
4005 if !(x.Uses == 1 && clobber(x)) {
4009 v0 := b.NewValue0(x.Pos, OpRISCV64MOVBload, t)
4011 v0.AuxInt = int32ToAuxInt(off)
4012 v0.Aux = symToAux(sym)
4013 v0.AddArg2(ptr, mem)
4018 func rewriteValueRISCV64_OpRISCV64MOVBstore(v *Value) bool {
4022 // match: (MOVBstore [off1] {sym1} (MOVaddr [off2] {sym2} base) val mem)
4023 // cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)
4024 // result: (MOVBstore [off1+off2] {mergeSym(sym1,sym2)} base val mem)
4026 off1 := auxIntToInt32(v.AuxInt)
4027 sym1 := auxToSym(v.Aux)
4028 if v_0.Op != OpRISCV64MOVaddr {
4031 off2 := auxIntToInt32(v_0.AuxInt)
4032 sym2 := auxToSym(v_0.Aux)
4036 if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)) {
4039 v.reset(OpRISCV64MOVBstore)
4040 v.AuxInt = int32ToAuxInt(off1 + off2)
4041 v.Aux = symToAux(mergeSym(sym1, sym2))
4042 v.AddArg3(base, val, mem)
4045 // match: (MOVBstore [off1] {sym} (ADDI [off2] base) val mem)
4046 // cond: is32Bit(int64(off1)+off2)
4047 // result: (MOVBstore [off1+int32(off2)] {sym} base val mem)
4049 off1 := auxIntToInt32(v.AuxInt)
4050 sym := auxToSym(v.Aux)
4051 if v_0.Op != OpRISCV64ADDI {
4054 off2 := auxIntToInt64(v_0.AuxInt)
4058 if !(is32Bit(int64(off1) + off2)) {
4061 v.reset(OpRISCV64MOVBstore)
4062 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4063 v.Aux = symToAux(sym)
4064 v.AddArg3(base, val, mem)
4067 // match: (MOVBstore [off] {sym} ptr (MOVDconst [0]) mem)
4068 // result: (MOVBstorezero [off] {sym} ptr mem)
4070 off := auxIntToInt32(v.AuxInt)
4071 sym := auxToSym(v.Aux)
4073 if v_1.Op != OpRISCV64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
4077 v.reset(OpRISCV64MOVBstorezero)
4078 v.AuxInt = int32ToAuxInt(off)
4079 v.Aux = symToAux(sym)
4083 // match: (MOVBstore [off] {sym} ptr (MOVBreg x) mem)
4084 // result: (MOVBstore [off] {sym} ptr x mem)
4086 off := auxIntToInt32(v.AuxInt)
4087 sym := auxToSym(v.Aux)
4089 if v_1.Op != OpRISCV64MOVBreg {
4094 v.reset(OpRISCV64MOVBstore)
4095 v.AuxInt = int32ToAuxInt(off)
4096 v.Aux = symToAux(sym)
4097 v.AddArg3(ptr, x, mem)
4100 // match: (MOVBstore [off] {sym} ptr (MOVHreg x) mem)
4101 // result: (MOVBstore [off] {sym} ptr x mem)
4103 off := auxIntToInt32(v.AuxInt)
4104 sym := auxToSym(v.Aux)
4106 if v_1.Op != OpRISCV64MOVHreg {
4111 v.reset(OpRISCV64MOVBstore)
4112 v.AuxInt = int32ToAuxInt(off)
4113 v.Aux = symToAux(sym)
4114 v.AddArg3(ptr, x, mem)
4117 // match: (MOVBstore [off] {sym} ptr (MOVWreg x) mem)
4118 // result: (MOVBstore [off] {sym} ptr x mem)
4120 off := auxIntToInt32(v.AuxInt)
4121 sym := auxToSym(v.Aux)
4123 if v_1.Op != OpRISCV64MOVWreg {
4128 v.reset(OpRISCV64MOVBstore)
4129 v.AuxInt = int32ToAuxInt(off)
4130 v.Aux = symToAux(sym)
4131 v.AddArg3(ptr, x, mem)
4134 // match: (MOVBstore [off] {sym} ptr (MOVBUreg x) mem)
4135 // result: (MOVBstore [off] {sym} ptr x mem)
4137 off := auxIntToInt32(v.AuxInt)
4138 sym := auxToSym(v.Aux)
4140 if v_1.Op != OpRISCV64MOVBUreg {
4145 v.reset(OpRISCV64MOVBstore)
4146 v.AuxInt = int32ToAuxInt(off)
4147 v.Aux = symToAux(sym)
4148 v.AddArg3(ptr, x, mem)
4151 // match: (MOVBstore [off] {sym} ptr (MOVHUreg x) mem)
4152 // result: (MOVBstore [off] {sym} ptr x mem)
4154 off := auxIntToInt32(v.AuxInt)
4155 sym := auxToSym(v.Aux)
4157 if v_1.Op != OpRISCV64MOVHUreg {
4162 v.reset(OpRISCV64MOVBstore)
4163 v.AuxInt = int32ToAuxInt(off)
4164 v.Aux = symToAux(sym)
4165 v.AddArg3(ptr, x, mem)
4168 // match: (MOVBstore [off] {sym} ptr (MOVWUreg x) mem)
4169 // result: (MOVBstore [off] {sym} ptr x mem)
4171 off := auxIntToInt32(v.AuxInt)
4172 sym := auxToSym(v.Aux)
4174 if v_1.Op != OpRISCV64MOVWUreg {
4179 v.reset(OpRISCV64MOVBstore)
4180 v.AuxInt = int32ToAuxInt(off)
4181 v.Aux = symToAux(sym)
4182 v.AddArg3(ptr, x, mem)
4187 func rewriteValueRISCV64_OpRISCV64MOVBstorezero(v *Value) bool {
4190 // match: (MOVBstorezero [off1] {sym1} (MOVaddr [off2] {sym2} ptr) mem)
4191 // cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2))
4192 // result: (MOVBstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
4194 off1 := auxIntToInt32(v.AuxInt)
4195 sym1 := auxToSym(v.Aux)
4196 if v_0.Op != OpRISCV64MOVaddr {
4199 off2 := auxIntToInt32(v_0.AuxInt)
4200 sym2 := auxToSym(v_0.Aux)
4203 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) {
4206 v.reset(OpRISCV64MOVBstorezero)
4207 v.AuxInt = int32ToAuxInt(off1 + off2)
4208 v.Aux = symToAux(mergeSym(sym1, sym2))
4212 // match: (MOVBstorezero [off1] {sym} (ADDI [off2] ptr) mem)
4213 // cond: is32Bit(int64(off1)+off2)
4214 // result: (MOVBstorezero [off1+int32(off2)] {sym} ptr mem)
4216 off1 := auxIntToInt32(v.AuxInt)
4217 sym := auxToSym(v.Aux)
4218 if v_0.Op != OpRISCV64ADDI {
4221 off2 := auxIntToInt64(v_0.AuxInt)
4224 if !(is32Bit(int64(off1) + off2)) {
4227 v.reset(OpRISCV64MOVBstorezero)
4228 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4229 v.Aux = symToAux(sym)
4235 func rewriteValueRISCV64_OpRISCV64MOVDload(v *Value) bool {
4238 // match: (MOVDload [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
4239 // cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)
4240 // result: (MOVDload [off1+off2] {mergeSym(sym1,sym2)} base mem)
4242 off1 := auxIntToInt32(v.AuxInt)
4243 sym1 := auxToSym(v.Aux)
4244 if v_0.Op != OpRISCV64MOVaddr {
4247 off2 := auxIntToInt32(v_0.AuxInt)
4248 sym2 := auxToSym(v_0.Aux)
4251 if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)) {
4254 v.reset(OpRISCV64MOVDload)
4255 v.AuxInt = int32ToAuxInt(off1 + off2)
4256 v.Aux = symToAux(mergeSym(sym1, sym2))
4257 v.AddArg2(base, mem)
4260 // match: (MOVDload [off1] {sym} (ADDI [off2] base) mem)
4261 // cond: is32Bit(int64(off1)+off2)
4262 // result: (MOVDload [off1+int32(off2)] {sym} base mem)
4264 off1 := auxIntToInt32(v.AuxInt)
4265 sym := auxToSym(v.Aux)
4266 if v_0.Op != OpRISCV64ADDI {
4269 off2 := auxIntToInt64(v_0.AuxInt)
4272 if !(is32Bit(int64(off1) + off2)) {
4275 v.reset(OpRISCV64MOVDload)
4276 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4277 v.Aux = symToAux(sym)
4278 v.AddArg2(base, mem)
4283 func rewriteValueRISCV64_OpRISCV64MOVDnop(v *Value) bool {
4285 // match: (MOVDnop (MOVDconst [c]))
4286 // result: (MOVDconst [c])
4288 if v_0.Op != OpRISCV64MOVDconst {
4291 c := auxIntToInt64(v_0.AuxInt)
4292 v.reset(OpRISCV64MOVDconst)
4293 v.AuxInt = int64ToAuxInt(c)
4298 func rewriteValueRISCV64_OpRISCV64MOVDreg(v *Value) bool {
4300 // match: (MOVDreg x)
4301 // cond: x.Uses == 1
4302 // result: (MOVDnop x)
4308 v.reset(OpRISCV64MOVDnop)
4314 func rewriteValueRISCV64_OpRISCV64MOVDstore(v *Value) bool {
4318 // match: (MOVDstore [off1] {sym1} (MOVaddr [off2] {sym2} base) val mem)
4319 // cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)
4320 // result: (MOVDstore [off1+off2] {mergeSym(sym1,sym2)} base val mem)
4322 off1 := auxIntToInt32(v.AuxInt)
4323 sym1 := auxToSym(v.Aux)
4324 if v_0.Op != OpRISCV64MOVaddr {
4327 off2 := auxIntToInt32(v_0.AuxInt)
4328 sym2 := auxToSym(v_0.Aux)
4332 if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)) {
4335 v.reset(OpRISCV64MOVDstore)
4336 v.AuxInt = int32ToAuxInt(off1 + off2)
4337 v.Aux = symToAux(mergeSym(sym1, sym2))
4338 v.AddArg3(base, val, mem)
4341 // match: (MOVDstore [off1] {sym} (ADDI [off2] base) val mem)
4342 // cond: is32Bit(int64(off1)+off2)
4343 // result: (MOVDstore [off1+int32(off2)] {sym} base val mem)
4345 off1 := auxIntToInt32(v.AuxInt)
4346 sym := auxToSym(v.Aux)
4347 if v_0.Op != OpRISCV64ADDI {
4350 off2 := auxIntToInt64(v_0.AuxInt)
4354 if !(is32Bit(int64(off1) + off2)) {
4357 v.reset(OpRISCV64MOVDstore)
4358 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4359 v.Aux = symToAux(sym)
4360 v.AddArg3(base, val, mem)
4363 // match: (MOVDstore [off] {sym} ptr (MOVDconst [0]) mem)
4364 // result: (MOVDstorezero [off] {sym} ptr mem)
4366 off := auxIntToInt32(v.AuxInt)
4367 sym := auxToSym(v.Aux)
4369 if v_1.Op != OpRISCV64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
4373 v.reset(OpRISCV64MOVDstorezero)
4374 v.AuxInt = int32ToAuxInt(off)
4375 v.Aux = symToAux(sym)
4381 func rewriteValueRISCV64_OpRISCV64MOVDstorezero(v *Value) bool {
4384 // match: (MOVDstorezero [off1] {sym1} (MOVaddr [off2] {sym2} ptr) mem)
4385 // cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2))
4386 // result: (MOVDstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
4388 off1 := auxIntToInt32(v.AuxInt)
4389 sym1 := auxToSym(v.Aux)
4390 if v_0.Op != OpRISCV64MOVaddr {
4393 off2 := auxIntToInt32(v_0.AuxInt)
4394 sym2 := auxToSym(v_0.Aux)
4397 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) {
4400 v.reset(OpRISCV64MOVDstorezero)
4401 v.AuxInt = int32ToAuxInt(off1 + off2)
4402 v.Aux = symToAux(mergeSym(sym1, sym2))
4406 // match: (MOVDstorezero [off1] {sym} (ADDI [off2] ptr) mem)
4407 // cond: is32Bit(int64(off1)+off2)
4408 // result: (MOVDstorezero [off1+int32(off2)] {sym} ptr mem)
4410 off1 := auxIntToInt32(v.AuxInt)
4411 sym := auxToSym(v.Aux)
4412 if v_0.Op != OpRISCV64ADDI {
4415 off2 := auxIntToInt64(v_0.AuxInt)
4418 if !(is32Bit(int64(off1) + off2)) {
4421 v.reset(OpRISCV64MOVDstorezero)
4422 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4423 v.Aux = symToAux(sym)
4429 func rewriteValueRISCV64_OpRISCV64MOVHUload(v *Value) bool {
4432 // match: (MOVHUload [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
4433 // cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)
4434 // result: (MOVHUload [off1+off2] {mergeSym(sym1,sym2)} base mem)
4436 off1 := auxIntToInt32(v.AuxInt)
4437 sym1 := auxToSym(v.Aux)
4438 if v_0.Op != OpRISCV64MOVaddr {
4441 off2 := auxIntToInt32(v_0.AuxInt)
4442 sym2 := auxToSym(v_0.Aux)
4445 if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)) {
4448 v.reset(OpRISCV64MOVHUload)
4449 v.AuxInt = int32ToAuxInt(off1 + off2)
4450 v.Aux = symToAux(mergeSym(sym1, sym2))
4451 v.AddArg2(base, mem)
4454 // match: (MOVHUload [off1] {sym} (ADDI [off2] base) mem)
4455 // cond: is32Bit(int64(off1)+off2)
4456 // result: (MOVHUload [off1+int32(off2)] {sym} base mem)
4458 off1 := auxIntToInt32(v.AuxInt)
4459 sym := auxToSym(v.Aux)
4460 if v_0.Op != OpRISCV64ADDI {
4463 off2 := auxIntToInt64(v_0.AuxInt)
4466 if !(is32Bit(int64(off1) + off2)) {
4469 v.reset(OpRISCV64MOVHUload)
4470 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4471 v.Aux = symToAux(sym)
4472 v.AddArg2(base, mem)
4477 func rewriteValueRISCV64_OpRISCV64MOVHUreg(v *Value) bool {
4480 // match: (MOVHUreg x:(ANDI [c] y))
4481 // cond: c >= 0 && int64(uint16(c)) == c
4485 if x.Op != OpRISCV64ANDI {
4488 c := auxIntToInt64(x.AuxInt)
4489 if !(c >= 0 && int64(uint16(c)) == c) {
4495 // match: (MOVHUreg (ANDI [c] x))
4497 // result: (ANDI [int64(uint16(c))] x)
4499 if v_0.Op != OpRISCV64ANDI {
4502 c := auxIntToInt64(v_0.AuxInt)
4507 v.reset(OpRISCV64ANDI)
4508 v.AuxInt = int64ToAuxInt(int64(uint16(c)))
4512 // match: (MOVHUreg (MOVDconst [c]))
4513 // result: (MOVDconst [int64(uint16(c))])
4515 if v_0.Op != OpRISCV64MOVDconst {
4518 c := auxIntToInt64(v_0.AuxInt)
4519 v.reset(OpRISCV64MOVDconst)
4520 v.AuxInt = int64ToAuxInt(int64(uint16(c)))
4523 // match: (MOVHUreg x:(MOVBUload _ _))
4524 // result: (MOVDreg x)
4527 if x.Op != OpRISCV64MOVBUload {
4530 v.reset(OpRISCV64MOVDreg)
4534 // match: (MOVHUreg x:(MOVHUload _ _))
4535 // result: (MOVDreg x)
4538 if x.Op != OpRISCV64MOVHUload {
4541 v.reset(OpRISCV64MOVDreg)
4545 // match: (MOVHUreg x:(MOVBUreg _))
4546 // result: (MOVDreg x)
4549 if x.Op != OpRISCV64MOVBUreg {
4552 v.reset(OpRISCV64MOVDreg)
4556 // match: (MOVHUreg x:(MOVHUreg _))
4557 // result: (MOVDreg x)
4560 if x.Op != OpRISCV64MOVHUreg {
4563 v.reset(OpRISCV64MOVDreg)
4567 // match: (MOVHUreg <t> x:(MOVHload [off] {sym} ptr mem))
4568 // cond: x.Uses == 1 && clobber(x)
4569 // result: @x.Block (MOVHUload <t> [off] {sym} ptr mem)
4573 if x.Op != OpRISCV64MOVHload {
4576 off := auxIntToInt32(x.AuxInt)
4577 sym := auxToSym(x.Aux)
4580 if !(x.Uses == 1 && clobber(x)) {
4584 v0 := b.NewValue0(x.Pos, OpRISCV64MOVHUload, t)
4586 v0.AuxInt = int32ToAuxInt(off)
4587 v0.Aux = symToAux(sym)
4588 v0.AddArg2(ptr, mem)
4593 func rewriteValueRISCV64_OpRISCV64MOVHload(v *Value) bool {
4596 // match: (MOVHload [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
4597 // cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)
4598 // result: (MOVHload [off1+off2] {mergeSym(sym1,sym2)} base mem)
4600 off1 := auxIntToInt32(v.AuxInt)
4601 sym1 := auxToSym(v.Aux)
4602 if v_0.Op != OpRISCV64MOVaddr {
4605 off2 := auxIntToInt32(v_0.AuxInt)
4606 sym2 := auxToSym(v_0.Aux)
4609 if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)) {
4612 v.reset(OpRISCV64MOVHload)
4613 v.AuxInt = int32ToAuxInt(off1 + off2)
4614 v.Aux = symToAux(mergeSym(sym1, sym2))
4615 v.AddArg2(base, mem)
4618 // match: (MOVHload [off1] {sym} (ADDI [off2] base) mem)
4619 // cond: is32Bit(int64(off1)+off2)
4620 // result: (MOVHload [off1+int32(off2)] {sym} base mem)
4622 off1 := auxIntToInt32(v.AuxInt)
4623 sym := auxToSym(v.Aux)
4624 if v_0.Op != OpRISCV64ADDI {
4627 off2 := auxIntToInt64(v_0.AuxInt)
4630 if !(is32Bit(int64(off1) + off2)) {
4633 v.reset(OpRISCV64MOVHload)
4634 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4635 v.Aux = symToAux(sym)
4636 v.AddArg2(base, mem)
4641 func rewriteValueRISCV64_OpRISCV64MOVHreg(v *Value) bool {
4644 // match: (MOVHreg x:(ANDI [c] y))
4645 // cond: c >= 0 && int64(int16(c)) == c
4649 if x.Op != OpRISCV64ANDI {
4652 c := auxIntToInt64(x.AuxInt)
4653 if !(c >= 0 && int64(int16(c)) == c) {
4659 // match: (MOVHreg (MOVDconst [c]))
4660 // result: (MOVDconst [int64(int16(c))])
4662 if v_0.Op != OpRISCV64MOVDconst {
4665 c := auxIntToInt64(v_0.AuxInt)
4666 v.reset(OpRISCV64MOVDconst)
4667 v.AuxInt = int64ToAuxInt(int64(int16(c)))
4670 // match: (MOVHreg x:(MOVBload _ _))
4671 // result: (MOVDreg x)
4674 if x.Op != OpRISCV64MOVBload {
4677 v.reset(OpRISCV64MOVDreg)
4681 // match: (MOVHreg x:(MOVBUload _ _))
4682 // result: (MOVDreg x)
4685 if x.Op != OpRISCV64MOVBUload {
4688 v.reset(OpRISCV64MOVDreg)
4692 // match: (MOVHreg x:(MOVHload _ _))
4693 // result: (MOVDreg x)
4696 if x.Op != OpRISCV64MOVHload {
4699 v.reset(OpRISCV64MOVDreg)
4703 // match: (MOVHreg x:(MOVBreg _))
4704 // result: (MOVDreg x)
4707 if x.Op != OpRISCV64MOVBreg {
4710 v.reset(OpRISCV64MOVDreg)
4714 // match: (MOVHreg x:(MOVBUreg _))
4715 // result: (MOVDreg x)
4718 if x.Op != OpRISCV64MOVBUreg {
4721 v.reset(OpRISCV64MOVDreg)
4725 // match: (MOVHreg x:(MOVHreg _))
4726 // result: (MOVDreg x)
4729 if x.Op != OpRISCV64MOVHreg {
4732 v.reset(OpRISCV64MOVDreg)
4736 // match: (MOVHreg <t> x:(MOVHUload [off] {sym} ptr mem))
4737 // cond: x.Uses == 1 && clobber(x)
4738 // result: @x.Block (MOVHload <t> [off] {sym} ptr mem)
4742 if x.Op != OpRISCV64MOVHUload {
4745 off := auxIntToInt32(x.AuxInt)
4746 sym := auxToSym(x.Aux)
4749 if !(x.Uses == 1 && clobber(x)) {
4753 v0 := b.NewValue0(x.Pos, OpRISCV64MOVHload, t)
4755 v0.AuxInt = int32ToAuxInt(off)
4756 v0.Aux = symToAux(sym)
4757 v0.AddArg2(ptr, mem)
4762 func rewriteValueRISCV64_OpRISCV64MOVHstore(v *Value) bool {
4766 // match: (MOVHstore [off1] {sym1} (MOVaddr [off2] {sym2} base) val mem)
4767 // cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)
4768 // result: (MOVHstore [off1+off2] {mergeSym(sym1,sym2)} base val mem)
4770 off1 := auxIntToInt32(v.AuxInt)
4771 sym1 := auxToSym(v.Aux)
4772 if v_0.Op != OpRISCV64MOVaddr {
4775 off2 := auxIntToInt32(v_0.AuxInt)
4776 sym2 := auxToSym(v_0.Aux)
4780 if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)) {
4783 v.reset(OpRISCV64MOVHstore)
4784 v.AuxInt = int32ToAuxInt(off1 + off2)
4785 v.Aux = symToAux(mergeSym(sym1, sym2))
4786 v.AddArg3(base, val, mem)
4789 // match: (MOVHstore [off1] {sym} (ADDI [off2] base) val mem)
4790 // cond: is32Bit(int64(off1)+off2)
4791 // result: (MOVHstore [off1+int32(off2)] {sym} base val mem)
4793 off1 := auxIntToInt32(v.AuxInt)
4794 sym := auxToSym(v.Aux)
4795 if v_0.Op != OpRISCV64ADDI {
4798 off2 := auxIntToInt64(v_0.AuxInt)
4802 if !(is32Bit(int64(off1) + off2)) {
4805 v.reset(OpRISCV64MOVHstore)
4806 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4807 v.Aux = symToAux(sym)
4808 v.AddArg3(base, val, mem)
4811 // match: (MOVHstore [off] {sym} ptr (MOVDconst [0]) mem)
4812 // result: (MOVHstorezero [off] {sym} ptr mem)
4814 off := auxIntToInt32(v.AuxInt)
4815 sym := auxToSym(v.Aux)
4817 if v_1.Op != OpRISCV64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
4821 v.reset(OpRISCV64MOVHstorezero)
4822 v.AuxInt = int32ToAuxInt(off)
4823 v.Aux = symToAux(sym)
4827 // match: (MOVHstore [off] {sym} ptr (MOVHreg x) mem)
4828 // result: (MOVHstore [off] {sym} ptr x mem)
4830 off := auxIntToInt32(v.AuxInt)
4831 sym := auxToSym(v.Aux)
4833 if v_1.Op != OpRISCV64MOVHreg {
4838 v.reset(OpRISCV64MOVHstore)
4839 v.AuxInt = int32ToAuxInt(off)
4840 v.Aux = symToAux(sym)
4841 v.AddArg3(ptr, x, mem)
4844 // match: (MOVHstore [off] {sym} ptr (MOVWreg x) mem)
4845 // result: (MOVHstore [off] {sym} ptr x mem)
4847 off := auxIntToInt32(v.AuxInt)
4848 sym := auxToSym(v.Aux)
4850 if v_1.Op != OpRISCV64MOVWreg {
4855 v.reset(OpRISCV64MOVHstore)
4856 v.AuxInt = int32ToAuxInt(off)
4857 v.Aux = symToAux(sym)
4858 v.AddArg3(ptr, x, mem)
4861 // match: (MOVHstore [off] {sym} ptr (MOVHUreg x) mem)
4862 // result: (MOVHstore [off] {sym} ptr x mem)
4864 off := auxIntToInt32(v.AuxInt)
4865 sym := auxToSym(v.Aux)
4867 if v_1.Op != OpRISCV64MOVHUreg {
4872 v.reset(OpRISCV64MOVHstore)
4873 v.AuxInt = int32ToAuxInt(off)
4874 v.Aux = symToAux(sym)
4875 v.AddArg3(ptr, x, mem)
4878 // match: (MOVHstore [off] {sym} ptr (MOVWUreg x) mem)
4879 // result: (MOVHstore [off] {sym} ptr x mem)
4881 off := auxIntToInt32(v.AuxInt)
4882 sym := auxToSym(v.Aux)
4884 if v_1.Op != OpRISCV64MOVWUreg {
4889 v.reset(OpRISCV64MOVHstore)
4890 v.AuxInt = int32ToAuxInt(off)
4891 v.Aux = symToAux(sym)
4892 v.AddArg3(ptr, x, mem)
4897 func rewriteValueRISCV64_OpRISCV64MOVHstorezero(v *Value) bool {
4900 // match: (MOVHstorezero [off1] {sym1} (MOVaddr [off2] {sym2} ptr) mem)
4901 // cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2))
4902 // result: (MOVHstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
4904 off1 := auxIntToInt32(v.AuxInt)
4905 sym1 := auxToSym(v.Aux)
4906 if v_0.Op != OpRISCV64MOVaddr {
4909 off2 := auxIntToInt32(v_0.AuxInt)
4910 sym2 := auxToSym(v_0.Aux)
4913 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) {
4916 v.reset(OpRISCV64MOVHstorezero)
4917 v.AuxInt = int32ToAuxInt(off1 + off2)
4918 v.Aux = symToAux(mergeSym(sym1, sym2))
4922 // match: (MOVHstorezero [off1] {sym} (ADDI [off2] ptr) mem)
4923 // cond: is32Bit(int64(off1)+off2)
4924 // result: (MOVHstorezero [off1+int32(off2)] {sym} ptr mem)
4926 off1 := auxIntToInt32(v.AuxInt)
4927 sym := auxToSym(v.Aux)
4928 if v_0.Op != OpRISCV64ADDI {
4931 off2 := auxIntToInt64(v_0.AuxInt)
4934 if !(is32Bit(int64(off1) + off2)) {
4937 v.reset(OpRISCV64MOVHstorezero)
4938 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4939 v.Aux = symToAux(sym)
4945 func rewriteValueRISCV64_OpRISCV64MOVWUload(v *Value) bool {
4948 // match: (MOVWUload [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
4949 // cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)
4950 // result: (MOVWUload [off1+off2] {mergeSym(sym1,sym2)} base mem)
4952 off1 := auxIntToInt32(v.AuxInt)
4953 sym1 := auxToSym(v.Aux)
4954 if v_0.Op != OpRISCV64MOVaddr {
4957 off2 := auxIntToInt32(v_0.AuxInt)
4958 sym2 := auxToSym(v_0.Aux)
4961 if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)) {
4964 v.reset(OpRISCV64MOVWUload)
4965 v.AuxInt = int32ToAuxInt(off1 + off2)
4966 v.Aux = symToAux(mergeSym(sym1, sym2))
4967 v.AddArg2(base, mem)
4970 // match: (MOVWUload [off1] {sym} (ADDI [off2] base) mem)
4971 // cond: is32Bit(int64(off1)+off2)
4972 // result: (MOVWUload [off1+int32(off2)] {sym} base mem)
4974 off1 := auxIntToInt32(v.AuxInt)
4975 sym := auxToSym(v.Aux)
4976 if v_0.Op != OpRISCV64ADDI {
4979 off2 := auxIntToInt64(v_0.AuxInt)
4982 if !(is32Bit(int64(off1) + off2)) {
4985 v.reset(OpRISCV64MOVWUload)
4986 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4987 v.Aux = symToAux(sym)
4988 v.AddArg2(base, mem)
4993 func rewriteValueRISCV64_OpRISCV64MOVWUreg(v *Value) bool {
4996 typ := &b.Func.Config.Types
4997 // match: (MOVWUreg x:(ANDI [c] y))
4998 // cond: c >= 0 && int64(uint32(c)) == c
5002 if x.Op != OpRISCV64ANDI {
5005 c := auxIntToInt64(x.AuxInt)
5006 if !(c >= 0 && int64(uint32(c)) == c) {
5012 // match: (MOVWUreg (ANDI [c] x))
5014 // result: (AND (MOVDconst [int64(uint32(c))]) x)
5016 if v_0.Op != OpRISCV64ANDI {
5019 c := auxIntToInt64(v_0.AuxInt)
5024 v.reset(OpRISCV64AND)
5025 v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
5026 v0.AuxInt = int64ToAuxInt(int64(uint32(c)))
5030 // match: (MOVWUreg (MOVDconst [c]))
5031 // result: (MOVDconst [int64(uint32(c))])
5033 if v_0.Op != OpRISCV64MOVDconst {
5036 c := auxIntToInt64(v_0.AuxInt)
5037 v.reset(OpRISCV64MOVDconst)
5038 v.AuxInt = int64ToAuxInt(int64(uint32(c)))
5041 // match: (MOVWUreg x:(MOVBUload _ _))
5042 // result: (MOVDreg x)
5045 if x.Op != OpRISCV64MOVBUload {
5048 v.reset(OpRISCV64MOVDreg)
5052 // match: (MOVWUreg x:(MOVHUload _ _))
5053 // result: (MOVDreg x)
5056 if x.Op != OpRISCV64MOVHUload {
5059 v.reset(OpRISCV64MOVDreg)
5063 // match: (MOVWUreg x:(MOVWUload _ _))
5064 // result: (MOVDreg x)
5067 if x.Op != OpRISCV64MOVWUload {
5070 v.reset(OpRISCV64MOVDreg)
5074 // match: (MOVWUreg x:(MOVBUreg _))
5075 // result: (MOVDreg x)
5078 if x.Op != OpRISCV64MOVBUreg {
5081 v.reset(OpRISCV64MOVDreg)
5085 // match: (MOVWUreg x:(MOVHUreg _))
5086 // result: (MOVDreg x)
5089 if x.Op != OpRISCV64MOVHUreg {
5092 v.reset(OpRISCV64MOVDreg)
5096 // match: (MOVWUreg x:(MOVWUreg _))
5097 // result: (MOVDreg x)
5100 if x.Op != OpRISCV64MOVWUreg {
5103 v.reset(OpRISCV64MOVDreg)
5107 // match: (MOVWUreg <t> x:(MOVWload [off] {sym} ptr mem))
5108 // cond: x.Uses == 1 && clobber(x)
5109 // result: @x.Block (MOVWUload <t> [off] {sym} ptr mem)
5113 if x.Op != OpRISCV64MOVWload {
5116 off := auxIntToInt32(x.AuxInt)
5117 sym := auxToSym(x.Aux)
5120 if !(x.Uses == 1 && clobber(x)) {
5124 v0 := b.NewValue0(x.Pos, OpRISCV64MOVWUload, t)
5126 v0.AuxInt = int32ToAuxInt(off)
5127 v0.Aux = symToAux(sym)
5128 v0.AddArg2(ptr, mem)
5133 func rewriteValueRISCV64_OpRISCV64MOVWload(v *Value) bool {
5136 // match: (MOVWload [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
5137 // cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)
5138 // result: (MOVWload [off1+off2] {mergeSym(sym1,sym2)} base mem)
5140 off1 := auxIntToInt32(v.AuxInt)
5141 sym1 := auxToSym(v.Aux)
5142 if v_0.Op != OpRISCV64MOVaddr {
5145 off2 := auxIntToInt32(v_0.AuxInt)
5146 sym2 := auxToSym(v_0.Aux)
5149 if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)) {
5152 v.reset(OpRISCV64MOVWload)
5153 v.AuxInt = int32ToAuxInt(off1 + off2)
5154 v.Aux = symToAux(mergeSym(sym1, sym2))
5155 v.AddArg2(base, mem)
5158 // match: (MOVWload [off1] {sym} (ADDI [off2] base) mem)
5159 // cond: is32Bit(int64(off1)+off2)
5160 // result: (MOVWload [off1+int32(off2)] {sym} base mem)
5162 off1 := auxIntToInt32(v.AuxInt)
5163 sym := auxToSym(v.Aux)
5164 if v_0.Op != OpRISCV64ADDI {
5167 off2 := auxIntToInt64(v_0.AuxInt)
5170 if !(is32Bit(int64(off1) + off2)) {
5173 v.reset(OpRISCV64MOVWload)
5174 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
5175 v.Aux = symToAux(sym)
5176 v.AddArg2(base, mem)
5181 func rewriteValueRISCV64_OpRISCV64MOVWreg(v *Value) bool {
5184 // match: (MOVWreg x:(ANDI [c] y))
5185 // cond: c >= 0 && int64(int32(c)) == c
5189 if x.Op != OpRISCV64ANDI {
5192 c := auxIntToInt64(x.AuxInt)
5193 if !(c >= 0 && int64(int32(c)) == c) {
5199 // match: (MOVWreg (MOVDconst [c]))
5200 // result: (MOVDconst [int64(int32(c))])
5202 if v_0.Op != OpRISCV64MOVDconst {
5205 c := auxIntToInt64(v_0.AuxInt)
5206 v.reset(OpRISCV64MOVDconst)
5207 v.AuxInt = int64ToAuxInt(int64(int32(c)))
5210 // match: (MOVWreg x:(MOVBload _ _))
5211 // result: (MOVDreg x)
5214 if x.Op != OpRISCV64MOVBload {
5217 v.reset(OpRISCV64MOVDreg)
5221 // match: (MOVWreg x:(MOVBUload _ _))
5222 // result: (MOVDreg x)
5225 if x.Op != OpRISCV64MOVBUload {
5228 v.reset(OpRISCV64MOVDreg)
5232 // match: (MOVWreg x:(MOVHload _ _))
5233 // result: (MOVDreg x)
5236 if x.Op != OpRISCV64MOVHload {
5239 v.reset(OpRISCV64MOVDreg)
5243 // match: (MOVWreg x:(MOVHUload _ _))
5244 // result: (MOVDreg x)
5247 if x.Op != OpRISCV64MOVHUload {
5250 v.reset(OpRISCV64MOVDreg)
5254 // match: (MOVWreg x:(MOVWload _ _))
5255 // result: (MOVDreg x)
5258 if x.Op != OpRISCV64MOVWload {
5261 v.reset(OpRISCV64MOVDreg)
5265 // match: (MOVWreg x:(ADDIW _))
5266 // result: (MOVDreg x)
5269 if x.Op != OpRISCV64ADDIW {
5272 v.reset(OpRISCV64MOVDreg)
5276 // match: (MOVWreg x:(SUBW _ _))
5277 // result: (MOVDreg x)
5280 if x.Op != OpRISCV64SUBW {
5283 v.reset(OpRISCV64MOVDreg)
5287 // match: (MOVWreg x:(NEGW _))
5288 // result: (MOVDreg x)
5291 if x.Op != OpRISCV64NEGW {
5294 v.reset(OpRISCV64MOVDreg)
5298 // match: (MOVWreg x:(MULW _ _))
5299 // result: (MOVDreg x)
5302 if x.Op != OpRISCV64MULW {
5305 v.reset(OpRISCV64MOVDreg)
5309 // match: (MOVWreg x:(DIVW _ _))
5310 // result: (MOVDreg x)
5313 if x.Op != OpRISCV64DIVW {
5316 v.reset(OpRISCV64MOVDreg)
5320 // match: (MOVWreg x:(DIVUW _ _))
5321 // result: (MOVDreg x)
5324 if x.Op != OpRISCV64DIVUW {
5327 v.reset(OpRISCV64MOVDreg)
5331 // match: (MOVWreg x:(REMW _ _))
5332 // result: (MOVDreg x)
5335 if x.Op != OpRISCV64REMW {
5338 v.reset(OpRISCV64MOVDreg)
5342 // match: (MOVWreg x:(REMUW _ _))
5343 // result: (MOVDreg x)
5346 if x.Op != OpRISCV64REMUW {
5349 v.reset(OpRISCV64MOVDreg)
5353 // match: (MOVWreg x:(MOVBreg _))
5354 // result: (MOVDreg x)
5357 if x.Op != OpRISCV64MOVBreg {
5360 v.reset(OpRISCV64MOVDreg)
5364 // match: (MOVWreg x:(MOVBUreg _))
5365 // result: (MOVDreg x)
5368 if x.Op != OpRISCV64MOVBUreg {
5371 v.reset(OpRISCV64MOVDreg)
5375 // match: (MOVWreg x:(MOVHreg _))
5376 // result: (MOVDreg x)
5379 if x.Op != OpRISCV64MOVHreg {
5382 v.reset(OpRISCV64MOVDreg)
5386 // match: (MOVWreg x:(MOVWreg _))
5387 // result: (MOVDreg x)
5390 if x.Op != OpRISCV64MOVWreg {
5393 v.reset(OpRISCV64MOVDreg)
5397 // match: (MOVWreg <t> x:(MOVWUload [off] {sym} ptr mem))
5398 // cond: x.Uses == 1 && clobber(x)
5399 // result: @x.Block (MOVWload <t> [off] {sym} ptr mem)
5403 if x.Op != OpRISCV64MOVWUload {
5406 off := auxIntToInt32(x.AuxInt)
5407 sym := auxToSym(x.Aux)
5410 if !(x.Uses == 1 && clobber(x)) {
5414 v0 := b.NewValue0(x.Pos, OpRISCV64MOVWload, t)
5416 v0.AuxInt = int32ToAuxInt(off)
5417 v0.Aux = symToAux(sym)
5418 v0.AddArg2(ptr, mem)
5423 func rewriteValueRISCV64_OpRISCV64MOVWstore(v *Value) bool {
5427 // match: (MOVWstore [off1] {sym1} (MOVaddr [off2] {sym2} base) val mem)
5428 // cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)
5429 // result: (MOVWstore [off1+off2] {mergeSym(sym1,sym2)} base val mem)
5431 off1 := auxIntToInt32(v.AuxInt)
5432 sym1 := auxToSym(v.Aux)
5433 if v_0.Op != OpRISCV64MOVaddr {
5436 off2 := auxIntToInt32(v_0.AuxInt)
5437 sym2 := auxToSym(v_0.Aux)
5441 if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)) {
5444 v.reset(OpRISCV64MOVWstore)
5445 v.AuxInt = int32ToAuxInt(off1 + off2)
5446 v.Aux = symToAux(mergeSym(sym1, sym2))
5447 v.AddArg3(base, val, mem)
5450 // match: (MOVWstore [off1] {sym} (ADDI [off2] base) val mem)
5451 // cond: is32Bit(int64(off1)+off2)
5452 // result: (MOVWstore [off1+int32(off2)] {sym} base val mem)
5454 off1 := auxIntToInt32(v.AuxInt)
5455 sym := auxToSym(v.Aux)
5456 if v_0.Op != OpRISCV64ADDI {
5459 off2 := auxIntToInt64(v_0.AuxInt)
5463 if !(is32Bit(int64(off1) + off2)) {
5466 v.reset(OpRISCV64MOVWstore)
5467 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
5468 v.Aux = symToAux(sym)
5469 v.AddArg3(base, val, mem)
5472 // match: (MOVWstore [off] {sym} ptr (MOVDconst [0]) mem)
5473 // result: (MOVWstorezero [off] {sym} ptr mem)
5475 off := auxIntToInt32(v.AuxInt)
5476 sym := auxToSym(v.Aux)
5478 if v_1.Op != OpRISCV64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
5482 v.reset(OpRISCV64MOVWstorezero)
5483 v.AuxInt = int32ToAuxInt(off)
5484 v.Aux = symToAux(sym)
5488 // match: (MOVWstore [off] {sym} ptr (MOVWreg x) mem)
5489 // result: (MOVWstore [off] {sym} ptr x mem)
5491 off := auxIntToInt32(v.AuxInt)
5492 sym := auxToSym(v.Aux)
5494 if v_1.Op != OpRISCV64MOVWreg {
5499 v.reset(OpRISCV64MOVWstore)
5500 v.AuxInt = int32ToAuxInt(off)
5501 v.Aux = symToAux(sym)
5502 v.AddArg3(ptr, x, mem)
5505 // match: (MOVWstore [off] {sym} ptr (MOVWUreg x) mem)
5506 // result: (MOVWstore [off] {sym} ptr x mem)
5508 off := auxIntToInt32(v.AuxInt)
5509 sym := auxToSym(v.Aux)
5511 if v_1.Op != OpRISCV64MOVWUreg {
5516 v.reset(OpRISCV64MOVWstore)
5517 v.AuxInt = int32ToAuxInt(off)
5518 v.Aux = symToAux(sym)
5519 v.AddArg3(ptr, x, mem)
5524 func rewriteValueRISCV64_OpRISCV64MOVWstorezero(v *Value) bool {
5527 // match: (MOVWstorezero [off1] {sym1} (MOVaddr [off2] {sym2} ptr) mem)
5528 // cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2))
5529 // result: (MOVWstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
5531 off1 := auxIntToInt32(v.AuxInt)
5532 sym1 := auxToSym(v.Aux)
5533 if v_0.Op != OpRISCV64MOVaddr {
5536 off2 := auxIntToInt32(v_0.AuxInt)
5537 sym2 := auxToSym(v_0.Aux)
5540 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) {
5543 v.reset(OpRISCV64MOVWstorezero)
5544 v.AuxInt = int32ToAuxInt(off1 + off2)
5545 v.Aux = symToAux(mergeSym(sym1, sym2))
5549 // match: (MOVWstorezero [off1] {sym} (ADDI [off2] ptr) mem)
5550 // cond: is32Bit(int64(off1)+off2)
5551 // result: (MOVWstorezero [off1+int32(off2)] {sym} ptr mem)
5553 off1 := auxIntToInt32(v.AuxInt)
5554 sym := auxToSym(v.Aux)
5555 if v_0.Op != OpRISCV64ADDI {
5558 off2 := auxIntToInt64(v_0.AuxInt)
5561 if !(is32Bit(int64(off1) + off2)) {
5564 v.reset(OpRISCV64MOVWstorezero)
5565 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
5566 v.Aux = symToAux(sym)
5572 func rewriteValueRISCV64_OpRISCV64NEG(v *Value) bool {
5575 // match: (NEG (SUB x y))
5576 // result: (SUB y x)
5578 if v_0.Op != OpRISCV64SUB {
5583 v.reset(OpRISCV64SUB)
5587 // match: (NEG <t> s:(ADDI [val] (SUB x y)))
5588 // cond: s.Uses == 1 && is32Bit(-val)
5589 // result: (ADDI [-val] (SUB <t> y x))
5593 if s.Op != OpRISCV64ADDI {
5596 val := auxIntToInt64(s.AuxInt)
5598 if s_0.Op != OpRISCV64SUB {
5603 if !(s.Uses == 1 && is32Bit(-val)) {
5606 v.reset(OpRISCV64ADDI)
5607 v.AuxInt = int64ToAuxInt(-val)
5608 v0 := b.NewValue0(v.Pos, OpRISCV64SUB, t)
5613 // match: (NEG (NEG x))
5616 if v_0.Op != OpRISCV64NEG {
5623 // match: (NEG (MOVDconst [x]))
5624 // result: (MOVDconst [-x])
5626 if v_0.Op != OpRISCV64MOVDconst {
5629 x := auxIntToInt64(v_0.AuxInt)
5630 v.reset(OpRISCV64MOVDconst)
5631 v.AuxInt = int64ToAuxInt(-x)
5636 func rewriteValueRISCV64_OpRISCV64NEGW(v *Value) bool {
5638 // match: (NEGW (MOVDconst [x]))
5639 // result: (MOVDconst [int64(int32(-x))])
5641 if v_0.Op != OpRISCV64MOVDconst {
5644 x := auxIntToInt64(v_0.AuxInt)
5645 v.reset(OpRISCV64MOVDconst)
5646 v.AuxInt = int64ToAuxInt(int64(int32(-x)))
5651 func rewriteValueRISCV64_OpRISCV64OR(v *Value) bool {
5654 // match: (OR (MOVDconst [val]) x)
5655 // cond: is32Bit(val)
5656 // result: (ORI [val] x)
5658 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
5659 if v_0.Op != OpRISCV64MOVDconst {
5662 val := auxIntToInt64(v_0.AuxInt)
5664 if !(is32Bit(val)) {
5667 v.reset(OpRISCV64ORI)
5668 v.AuxInt = int64ToAuxInt(val)
5676 func rewriteValueRISCV64_OpRISCV64ORI(v *Value) bool {
5678 // match: (ORI [0] x)
5681 if auxIntToInt64(v.AuxInt) != 0 {
5688 // match: (ORI [-1] x)
5689 // result: (MOVDconst [-1])
5691 if auxIntToInt64(v.AuxInt) != -1 {
5694 v.reset(OpRISCV64MOVDconst)
5695 v.AuxInt = int64ToAuxInt(-1)
5698 // match: (ORI [x] (MOVDconst [y]))
5699 // result: (MOVDconst [x | y])
5701 x := auxIntToInt64(v.AuxInt)
5702 if v_0.Op != OpRISCV64MOVDconst {
5705 y := auxIntToInt64(v_0.AuxInt)
5706 v.reset(OpRISCV64MOVDconst)
5707 v.AuxInt = int64ToAuxInt(x | y)
5710 // match: (ORI [x] (ORI [y] z))
5711 // result: (ORI [x | y] z)
5713 x := auxIntToInt64(v.AuxInt)
5714 if v_0.Op != OpRISCV64ORI {
5717 y := auxIntToInt64(v_0.AuxInt)
5719 v.reset(OpRISCV64ORI)
5720 v.AuxInt = int64ToAuxInt(x | y)
5726 func rewriteValueRISCV64_OpRISCV64SEQZ(v *Value) bool {
5728 // match: (SEQZ (NEG x))
5731 if v_0.Op != OpRISCV64NEG {
5735 v.reset(OpRISCV64SEQZ)
5739 // match: (SEQZ (SEQZ x))
5742 if v_0.Op != OpRISCV64SEQZ {
5746 v.reset(OpRISCV64SNEZ)
5750 // match: (SEQZ (SNEZ x))
5753 if v_0.Op != OpRISCV64SNEZ {
5757 v.reset(OpRISCV64SEQZ)
5763 func rewriteValueRISCV64_OpRISCV64SLL(v *Value) bool {
5766 // match: (SLL x (MOVDconst [val]))
5767 // result: (SLLI [int64(val&63)] x)
5770 if v_1.Op != OpRISCV64MOVDconst {
5773 val := auxIntToInt64(v_1.AuxInt)
5774 v.reset(OpRISCV64SLLI)
5775 v.AuxInt = int64ToAuxInt(int64(val & 63))
5781 func rewriteValueRISCV64_OpRISCV64SLLI(v *Value) bool {
5783 // match: (SLLI [x] (MOVDconst [y]))
5784 // cond: is32Bit(y << uint32(x))
5785 // result: (MOVDconst [y << uint32(x)])
5787 x := auxIntToInt64(v.AuxInt)
5788 if v_0.Op != OpRISCV64MOVDconst {
5791 y := auxIntToInt64(v_0.AuxInt)
5792 if !(is32Bit(y << uint32(x))) {
5795 v.reset(OpRISCV64MOVDconst)
5796 v.AuxInt = int64ToAuxInt(y << uint32(x))
5801 func rewriteValueRISCV64_OpRISCV64SLT(v *Value) bool {
5804 // match: (SLT x (MOVDconst [val]))
5805 // cond: val >= -2048 && val <= 2047
5806 // result: (SLTI [val] x)
5809 if v_1.Op != OpRISCV64MOVDconst {
5812 val := auxIntToInt64(v_1.AuxInt)
5813 if !(val >= -2048 && val <= 2047) {
5816 v.reset(OpRISCV64SLTI)
5817 v.AuxInt = int64ToAuxInt(val)
5822 // result: (MOVDconst [0])
5828 v.reset(OpRISCV64MOVDconst)
5829 v.AuxInt = int64ToAuxInt(0)
5834 func rewriteValueRISCV64_OpRISCV64SLTI(v *Value) bool {
5836 // match: (SLTI [x] (MOVDconst [y]))
5837 // result: (MOVDconst [b2i(int64(y) < int64(x))])
5839 x := auxIntToInt64(v.AuxInt)
5840 if v_0.Op != OpRISCV64MOVDconst {
5843 y := auxIntToInt64(v_0.AuxInt)
5844 v.reset(OpRISCV64MOVDconst)
5845 v.AuxInt = int64ToAuxInt(b2i(int64(y) < int64(x)))
5848 // match: (SLTI [x] (ANDI [y] _))
5849 // cond: y >= 0 && int64(y) < int64(x)
5850 // result: (MOVDconst [1])
5852 x := auxIntToInt64(v.AuxInt)
5853 if v_0.Op != OpRISCV64ANDI {
5856 y := auxIntToInt64(v_0.AuxInt)
5857 if !(y >= 0 && int64(y) < int64(x)) {
5860 v.reset(OpRISCV64MOVDconst)
5861 v.AuxInt = int64ToAuxInt(1)
5864 // match: (SLTI [x] (ORI [y] _))
5865 // cond: y >= 0 && int64(y) >= int64(x)
5866 // result: (MOVDconst [0])
5868 x := auxIntToInt64(v.AuxInt)
5869 if v_0.Op != OpRISCV64ORI {
5872 y := auxIntToInt64(v_0.AuxInt)
5873 if !(y >= 0 && int64(y) >= int64(x)) {
5876 v.reset(OpRISCV64MOVDconst)
5877 v.AuxInt = int64ToAuxInt(0)
5882 func rewriteValueRISCV64_OpRISCV64SLTIU(v *Value) bool {
5884 // match: (SLTIU [x] (MOVDconst [y]))
5885 // result: (MOVDconst [b2i(uint64(y) < uint64(x))])
5887 x := auxIntToInt64(v.AuxInt)
5888 if v_0.Op != OpRISCV64MOVDconst {
5891 y := auxIntToInt64(v_0.AuxInt)
5892 v.reset(OpRISCV64MOVDconst)
5893 v.AuxInt = int64ToAuxInt(b2i(uint64(y) < uint64(x)))
5896 // match: (SLTIU [x] (ANDI [y] _))
5897 // cond: y >= 0 && uint64(y) < uint64(x)
5898 // result: (MOVDconst [1])
5900 x := auxIntToInt64(v.AuxInt)
5901 if v_0.Op != OpRISCV64ANDI {
5904 y := auxIntToInt64(v_0.AuxInt)
5905 if !(y >= 0 && uint64(y) < uint64(x)) {
5908 v.reset(OpRISCV64MOVDconst)
5909 v.AuxInt = int64ToAuxInt(1)
5912 // match: (SLTIU [x] (ORI [y] _))
5913 // cond: y >= 0 && uint64(y) >= uint64(x)
5914 // result: (MOVDconst [0])
5916 x := auxIntToInt64(v.AuxInt)
5917 if v_0.Op != OpRISCV64ORI {
5920 y := auxIntToInt64(v_0.AuxInt)
5921 if !(y >= 0 && uint64(y) >= uint64(x)) {
5924 v.reset(OpRISCV64MOVDconst)
5925 v.AuxInt = int64ToAuxInt(0)
5930 func rewriteValueRISCV64_OpRISCV64SLTU(v *Value) bool {
5933 // match: (SLTU x (MOVDconst [val]))
5934 // cond: val >= -2048 && val <= 2047
5935 // result: (SLTIU [val] x)
5938 if v_1.Op != OpRISCV64MOVDconst {
5941 val := auxIntToInt64(v_1.AuxInt)
5942 if !(val >= -2048 && val <= 2047) {
5945 v.reset(OpRISCV64SLTIU)
5946 v.AuxInt = int64ToAuxInt(val)
5950 // match: (SLTU x x)
5951 // result: (MOVDconst [0])
5957 v.reset(OpRISCV64MOVDconst)
5958 v.AuxInt = int64ToAuxInt(0)
5963 func rewriteValueRISCV64_OpRISCV64SNEZ(v *Value) bool {
5965 // match: (SNEZ (NEG x))
5968 if v_0.Op != OpRISCV64NEG {
5972 v.reset(OpRISCV64SNEZ)
5976 // match: (SNEZ (SEQZ x))
5979 if v_0.Op != OpRISCV64SEQZ {
5983 v.reset(OpRISCV64SEQZ)
5987 // match: (SNEZ (SNEZ x))
5990 if v_0.Op != OpRISCV64SNEZ {
5994 v.reset(OpRISCV64SNEZ)
6000 func rewriteValueRISCV64_OpRISCV64SRA(v *Value) bool {
6003 // match: (SRA x (MOVDconst [val]))
6004 // result: (SRAI [int64(val&63)] x)
6007 if v_1.Op != OpRISCV64MOVDconst {
6010 val := auxIntToInt64(v_1.AuxInt)
6011 v.reset(OpRISCV64SRAI)
6012 v.AuxInt = int64ToAuxInt(int64(val & 63))
6018 func rewriteValueRISCV64_OpRISCV64SRAI(v *Value) bool {
6020 // match: (SRAI [x] (MOVDconst [y]))
6021 // result: (MOVDconst [int64(y) >> uint32(x)])
6023 x := auxIntToInt64(v.AuxInt)
6024 if v_0.Op != OpRISCV64MOVDconst {
6027 y := auxIntToInt64(v_0.AuxInt)
6028 v.reset(OpRISCV64MOVDconst)
6029 v.AuxInt = int64ToAuxInt(int64(y) >> uint32(x))
6034 func rewriteValueRISCV64_OpRISCV64SRL(v *Value) bool {
6037 // match: (SRL x (MOVDconst [val]))
6038 // result: (SRLI [int64(val&63)] x)
6041 if v_1.Op != OpRISCV64MOVDconst {
6044 val := auxIntToInt64(v_1.AuxInt)
6045 v.reset(OpRISCV64SRLI)
6046 v.AuxInt = int64ToAuxInt(int64(val & 63))
6052 func rewriteValueRISCV64_OpRISCV64SRLI(v *Value) bool {
6054 // match: (SRLI [x] (MOVDconst [y]))
6055 // result: (MOVDconst [int64(uint64(y) >> uint32(x))])
6057 x := auxIntToInt64(v.AuxInt)
6058 if v_0.Op != OpRISCV64MOVDconst {
6061 y := auxIntToInt64(v_0.AuxInt)
6062 v.reset(OpRISCV64MOVDconst)
6063 v.AuxInt = int64ToAuxInt(int64(uint64(y) >> uint32(x)))
6068 func rewriteValueRISCV64_OpRISCV64SUB(v *Value) bool {
6072 // match: (SUB x (MOVDconst [val]))
6073 // cond: is32Bit(-val)
6074 // result: (ADDI [-val] x)
6077 if v_1.Op != OpRISCV64MOVDconst {
6080 val := auxIntToInt64(v_1.AuxInt)
6081 if !(is32Bit(-val)) {
6084 v.reset(OpRISCV64ADDI)
6085 v.AuxInt = int64ToAuxInt(-val)
6089 // match: (SUB <t> (MOVDconst [val]) y)
6090 // cond: is32Bit(-val)
6091 // result: (NEG (ADDI <t> [-val] y))
6094 if v_0.Op != OpRISCV64MOVDconst {
6097 val := auxIntToInt64(v_0.AuxInt)
6099 if !(is32Bit(-val)) {
6102 v.reset(OpRISCV64NEG)
6103 v0 := b.NewValue0(v.Pos, OpRISCV64ADDI, t)
6104 v0.AuxInt = int64ToAuxInt(-val)
6109 // match: (SUB x (MOVDconst [0]))
6113 if v_1.Op != OpRISCV64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
6119 // match: (SUB (MOVDconst [0]) x)
6122 if v_0.Op != OpRISCV64MOVDconst || auxIntToInt64(v_0.AuxInt) != 0 {
6126 v.reset(OpRISCV64NEG)
6132 func rewriteValueRISCV64_OpRISCV64SUBW(v *Value) bool {
6135 // match: (SUBW x (MOVDconst [0]))
6136 // result: (ADDIW [0] x)
6139 if v_1.Op != OpRISCV64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
6142 v.reset(OpRISCV64ADDIW)
6143 v.AuxInt = int64ToAuxInt(0)
6147 // match: (SUBW (MOVDconst [0]) x)
6150 if v_0.Op != OpRISCV64MOVDconst || auxIntToInt64(v_0.AuxInt) != 0 {
6154 v.reset(OpRISCV64NEGW)
6160 func rewriteValueRISCV64_OpRISCV64XOR(v *Value) bool {
6163 // match: (XOR (MOVDconst [val]) x)
6164 // cond: is32Bit(val)
6165 // result: (XORI [val] x)
6167 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
6168 if v_0.Op != OpRISCV64MOVDconst {
6171 val := auxIntToInt64(v_0.AuxInt)
6173 if !(is32Bit(val)) {
6176 v.reset(OpRISCV64XORI)
6177 v.AuxInt = int64ToAuxInt(val)
6185 func rewriteValueRISCV64_OpRotateLeft16(v *Value) bool {
6189 typ := &b.Func.Config.Types
6190 // match: (RotateLeft16 <t> x (MOVDconst [c]))
6191 // result: (Or16 (Lsh16x64 <t> x (MOVDconst [c&15])) (Rsh16Ux64 <t> x (MOVDconst [-c&15])))
6195 if v_1.Op != OpRISCV64MOVDconst {
6198 c := auxIntToInt64(v_1.AuxInt)
6200 v0 := b.NewValue0(v.Pos, OpLsh16x64, t)
6201 v1 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
6202 v1.AuxInt = int64ToAuxInt(c & 15)
6204 v2 := b.NewValue0(v.Pos, OpRsh16Ux64, t)
6205 v3 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
6206 v3.AuxInt = int64ToAuxInt(-c & 15)
6213 func rewriteValueRISCV64_OpRotateLeft32(v *Value) bool {
6217 typ := &b.Func.Config.Types
6218 // match: (RotateLeft32 <t> x (MOVDconst [c]))
6219 // result: (Or32 (Lsh32x64 <t> x (MOVDconst [c&31])) (Rsh32Ux64 <t> x (MOVDconst [-c&31])))
6223 if v_1.Op != OpRISCV64MOVDconst {
6226 c := auxIntToInt64(v_1.AuxInt)
6228 v0 := b.NewValue0(v.Pos, OpLsh32x64, t)
6229 v1 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
6230 v1.AuxInt = int64ToAuxInt(c & 31)
6232 v2 := b.NewValue0(v.Pos, OpRsh32Ux64, t)
6233 v3 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
6234 v3.AuxInt = int64ToAuxInt(-c & 31)
6241 func rewriteValueRISCV64_OpRotateLeft64(v *Value) bool {
6245 typ := &b.Func.Config.Types
6246 // match: (RotateLeft64 <t> x (MOVDconst [c]))
6247 // result: (Or64 (Lsh64x64 <t> x (MOVDconst [c&63])) (Rsh64Ux64 <t> x (MOVDconst [-c&63])))
6251 if v_1.Op != OpRISCV64MOVDconst {
6254 c := auxIntToInt64(v_1.AuxInt)
6256 v0 := b.NewValue0(v.Pos, OpLsh64x64, t)
6257 v1 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
6258 v1.AuxInt = int64ToAuxInt(c & 63)
6260 v2 := b.NewValue0(v.Pos, OpRsh64Ux64, t)
6261 v3 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
6262 v3.AuxInt = int64ToAuxInt(-c & 63)
6269 func rewriteValueRISCV64_OpRotateLeft8(v *Value) bool {
6273 typ := &b.Func.Config.Types
6274 // match: (RotateLeft8 <t> x (MOVDconst [c]))
6275 // result: (Or8 (Lsh8x64 <t> x (MOVDconst [c&7])) (Rsh8Ux64 <t> x (MOVDconst [-c&7])))
6279 if v_1.Op != OpRISCV64MOVDconst {
6282 c := auxIntToInt64(v_1.AuxInt)
6284 v0 := b.NewValue0(v.Pos, OpLsh8x64, t)
6285 v1 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
6286 v1.AuxInt = int64ToAuxInt(c & 7)
6288 v2 := b.NewValue0(v.Pos, OpRsh8Ux64, t)
6289 v3 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
6290 v3.AuxInt = int64ToAuxInt(-c & 7)
6297 func rewriteValueRISCV64_OpRsh16Ux16(v *Value) bool {
6301 typ := &b.Func.Config.Types
6302 // match: (Rsh16Ux16 <t> x y)
6303 // cond: !shiftIsBounded(v)
6304 // result: (AND (SRL <t> (ZeroExt16to64 x) y) (Neg16 <t> (SLTIU <t> [64] (ZeroExt16to64 y))))
6309 if !(!shiftIsBounded(v)) {
6312 v.reset(OpRISCV64AND)
6313 v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
6314 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
6317 v2 := b.NewValue0(v.Pos, OpNeg16, t)
6318 v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
6319 v3.AuxInt = int64ToAuxInt(64)
6320 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
6327 // match: (Rsh16Ux16 x y)
6328 // cond: shiftIsBounded(v)
6329 // result: (SRL (ZeroExt16to64 x) y)
6333 if !(shiftIsBounded(v)) {
6336 v.reset(OpRISCV64SRL)
6337 v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
6344 func rewriteValueRISCV64_OpRsh16Ux32(v *Value) bool {
6348 typ := &b.Func.Config.Types
6349 // match: (Rsh16Ux32 <t> x y)
6350 // cond: !shiftIsBounded(v)
6351 // result: (AND (SRL <t> (ZeroExt16to64 x) y) (Neg16 <t> (SLTIU <t> [64] (ZeroExt32to64 y))))
6356 if !(!shiftIsBounded(v)) {
6359 v.reset(OpRISCV64AND)
6360 v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
6361 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
6364 v2 := b.NewValue0(v.Pos, OpNeg16, t)
6365 v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
6366 v3.AuxInt = int64ToAuxInt(64)
6367 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
6374 // match: (Rsh16Ux32 x y)
6375 // cond: shiftIsBounded(v)
6376 // result: (SRL (ZeroExt16to64 x) y)
6380 if !(shiftIsBounded(v)) {
6383 v.reset(OpRISCV64SRL)
6384 v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
6391 func rewriteValueRISCV64_OpRsh16Ux64(v *Value) bool {
6395 typ := &b.Func.Config.Types
6396 // match: (Rsh16Ux64 <t> x y)
6397 // cond: !shiftIsBounded(v)
6398 // result: (AND (SRL <t> (ZeroExt16to64 x) y) (Neg16 <t> (SLTIU <t> [64] y)))
6403 if !(!shiftIsBounded(v)) {
6406 v.reset(OpRISCV64AND)
6407 v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
6408 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
6411 v2 := b.NewValue0(v.Pos, OpNeg16, t)
6412 v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
6413 v3.AuxInt = int64ToAuxInt(64)
6419 // match: (Rsh16Ux64 x y)
6420 // cond: shiftIsBounded(v)
6421 // result: (SRL (ZeroExt16to64 x) y)
6425 if !(shiftIsBounded(v)) {
6428 v.reset(OpRISCV64SRL)
6429 v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
6436 func rewriteValueRISCV64_OpRsh16Ux8(v *Value) bool {
6440 typ := &b.Func.Config.Types
6441 // match: (Rsh16Ux8 <t> x y)
6442 // cond: !shiftIsBounded(v)
6443 // result: (AND (SRL <t> (ZeroExt16to64 x) y) (Neg16 <t> (SLTIU <t> [64] (ZeroExt8to64 y))))
6448 if !(!shiftIsBounded(v)) {
6451 v.reset(OpRISCV64AND)
6452 v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
6453 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
6456 v2 := b.NewValue0(v.Pos, OpNeg16, t)
6457 v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
6458 v3.AuxInt = int64ToAuxInt(64)
6459 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
6466 // match: (Rsh16Ux8 x y)
6467 // cond: shiftIsBounded(v)
6468 // result: (SRL (ZeroExt16to64 x) y)
6472 if !(shiftIsBounded(v)) {
6475 v.reset(OpRISCV64SRL)
6476 v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
6483 func rewriteValueRISCV64_OpRsh16x16(v *Value) bool {
6487 typ := &b.Func.Config.Types
6488 // match: (Rsh16x16 <t> x y)
6489 // cond: !shiftIsBounded(v)
6490 // result: (SRA <t> (SignExt16to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt16to64 y)))))
6495 if !(!shiftIsBounded(v)) {
6498 v.reset(OpRISCV64SRA)
6500 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
6502 v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
6503 v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
6504 v2.AuxInt = int64ToAuxInt(-1)
6505 v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
6506 v3.AuxInt = int64ToAuxInt(64)
6507 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
6515 // match: (Rsh16x16 x y)
6516 // cond: shiftIsBounded(v)
6517 // result: (SRA (SignExt16to64 x) y)
6521 if !(shiftIsBounded(v)) {
6524 v.reset(OpRISCV64SRA)
6525 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
6532 func rewriteValueRISCV64_OpRsh16x32(v *Value) bool {
6536 typ := &b.Func.Config.Types
6537 // match: (Rsh16x32 <t> x y)
6538 // cond: !shiftIsBounded(v)
6539 // result: (SRA <t> (SignExt16to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt32to64 y)))))
6544 if !(!shiftIsBounded(v)) {
6547 v.reset(OpRISCV64SRA)
6549 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
6551 v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
6552 v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
6553 v2.AuxInt = int64ToAuxInt(-1)
6554 v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
6555 v3.AuxInt = int64ToAuxInt(64)
6556 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
6564 // match: (Rsh16x32 x y)
6565 // cond: shiftIsBounded(v)
6566 // result: (SRA (SignExt16to64 x) y)
6570 if !(shiftIsBounded(v)) {
6573 v.reset(OpRISCV64SRA)
6574 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
6581 func rewriteValueRISCV64_OpRsh16x64(v *Value) bool {
6585 typ := &b.Func.Config.Types
6586 // match: (Rsh16x64 <t> x y)
6587 // cond: !shiftIsBounded(v)
6588 // result: (SRA <t> (SignExt16to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] y))))
6593 if !(!shiftIsBounded(v)) {
6596 v.reset(OpRISCV64SRA)
6598 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
6600 v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
6601 v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
6602 v2.AuxInt = int64ToAuxInt(-1)
6603 v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
6604 v3.AuxInt = int64ToAuxInt(64)
6611 // match: (Rsh16x64 x y)
6612 // cond: shiftIsBounded(v)
6613 // result: (SRA (SignExt16to64 x) y)
6617 if !(shiftIsBounded(v)) {
6620 v.reset(OpRISCV64SRA)
6621 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
6628 func rewriteValueRISCV64_OpRsh16x8(v *Value) bool {
6632 typ := &b.Func.Config.Types
6633 // match: (Rsh16x8 <t> x y)
6634 // cond: !shiftIsBounded(v)
6635 // result: (SRA <t> (SignExt16to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt8to64 y)))))
6640 if !(!shiftIsBounded(v)) {
6643 v.reset(OpRISCV64SRA)
6645 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
6647 v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
6648 v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
6649 v2.AuxInt = int64ToAuxInt(-1)
6650 v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
6651 v3.AuxInt = int64ToAuxInt(64)
6652 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
6660 // match: (Rsh16x8 x y)
6661 // cond: shiftIsBounded(v)
6662 // result: (SRA (SignExt16to64 x) y)
6666 if !(shiftIsBounded(v)) {
6669 v.reset(OpRISCV64SRA)
6670 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
6677 func rewriteValueRISCV64_OpRsh32Ux16(v *Value) bool {
6681 typ := &b.Func.Config.Types
6682 // match: (Rsh32Ux16 <t> x y)
6683 // cond: !shiftIsBounded(v)
6684 // result: (AND (SRL <t> (ZeroExt32to64 x) y) (Neg32 <t> (SLTIU <t> [64] (ZeroExt16to64 y))))
6689 if !(!shiftIsBounded(v)) {
6692 v.reset(OpRISCV64AND)
6693 v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
6694 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
6697 v2 := b.NewValue0(v.Pos, OpNeg32, t)
6698 v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
6699 v3.AuxInt = int64ToAuxInt(64)
6700 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
6707 // match: (Rsh32Ux16 x y)
6708 // cond: shiftIsBounded(v)
6709 // result: (SRL (ZeroExt32to64 x) y)
6713 if !(shiftIsBounded(v)) {
6716 v.reset(OpRISCV64SRL)
6717 v0 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
6724 func rewriteValueRISCV64_OpRsh32Ux32(v *Value) bool {
6728 typ := &b.Func.Config.Types
6729 // match: (Rsh32Ux32 <t> x y)
6730 // cond: !shiftIsBounded(v)
6731 // result: (AND (SRL <t> (ZeroExt32to64 x) y) (Neg32 <t> (SLTIU <t> [64] (ZeroExt32to64 y))))
6736 if !(!shiftIsBounded(v)) {
6739 v.reset(OpRISCV64AND)
6740 v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
6741 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
6744 v2 := b.NewValue0(v.Pos, OpNeg32, t)
6745 v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
6746 v3.AuxInt = int64ToAuxInt(64)
6747 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
6754 // match: (Rsh32Ux32 x y)
6755 // cond: shiftIsBounded(v)
6756 // result: (SRL (ZeroExt32to64 x) y)
6760 if !(shiftIsBounded(v)) {
6763 v.reset(OpRISCV64SRL)
6764 v0 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
6771 func rewriteValueRISCV64_OpRsh32Ux64(v *Value) bool {
6775 typ := &b.Func.Config.Types
6776 // match: (Rsh32Ux64 <t> x y)
6777 // cond: !shiftIsBounded(v)
6778 // result: (AND (SRL <t> (ZeroExt32to64 x) y) (Neg32 <t> (SLTIU <t> [64] y)))
6783 if !(!shiftIsBounded(v)) {
6786 v.reset(OpRISCV64AND)
6787 v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
6788 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
6791 v2 := b.NewValue0(v.Pos, OpNeg32, t)
6792 v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
6793 v3.AuxInt = int64ToAuxInt(64)
6799 // match: (Rsh32Ux64 x y)
6800 // cond: shiftIsBounded(v)
6801 // result: (SRL (ZeroExt32to64 x) y)
6805 if !(shiftIsBounded(v)) {
6808 v.reset(OpRISCV64SRL)
6809 v0 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
6816 func rewriteValueRISCV64_OpRsh32Ux8(v *Value) bool {
6820 typ := &b.Func.Config.Types
6821 // match: (Rsh32Ux8 <t> x y)
6822 // cond: !shiftIsBounded(v)
6823 // result: (AND (SRL <t> (ZeroExt32to64 x) y) (Neg32 <t> (SLTIU <t> [64] (ZeroExt8to64 y))))
6828 if !(!shiftIsBounded(v)) {
6831 v.reset(OpRISCV64AND)
6832 v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
6833 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
6836 v2 := b.NewValue0(v.Pos, OpNeg32, t)
6837 v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
6838 v3.AuxInt = int64ToAuxInt(64)
6839 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
6846 // match: (Rsh32Ux8 x y)
6847 // cond: shiftIsBounded(v)
6848 // result: (SRL (ZeroExt32to64 x) y)
6852 if !(shiftIsBounded(v)) {
6855 v.reset(OpRISCV64SRL)
6856 v0 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
6863 func rewriteValueRISCV64_OpRsh32x16(v *Value) bool {
6867 typ := &b.Func.Config.Types
6868 // match: (Rsh32x16 <t> x y)
6869 // cond: !shiftIsBounded(v)
6870 // result: (SRA <t> (SignExt32to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt16to64 y)))))
6875 if !(!shiftIsBounded(v)) {
6878 v.reset(OpRISCV64SRA)
6880 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
6882 v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
6883 v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
6884 v2.AuxInt = int64ToAuxInt(-1)
6885 v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
6886 v3.AuxInt = int64ToAuxInt(64)
6887 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
6895 // match: (Rsh32x16 x y)
6896 // cond: shiftIsBounded(v)
6897 // result: (SRA (SignExt32to64 x) y)
6901 if !(shiftIsBounded(v)) {
6904 v.reset(OpRISCV64SRA)
6905 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
6912 func rewriteValueRISCV64_OpRsh32x32(v *Value) bool {
6916 typ := &b.Func.Config.Types
6917 // match: (Rsh32x32 <t> x y)
6918 // cond: !shiftIsBounded(v)
6919 // result: (SRA <t> (SignExt32to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt32to64 y)))))
6924 if !(!shiftIsBounded(v)) {
6927 v.reset(OpRISCV64SRA)
6929 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
6931 v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
6932 v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
6933 v2.AuxInt = int64ToAuxInt(-1)
6934 v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
6935 v3.AuxInt = int64ToAuxInt(64)
6936 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
6944 // match: (Rsh32x32 x y)
6945 // cond: shiftIsBounded(v)
6946 // result: (SRA (SignExt32to64 x) y)
6950 if !(shiftIsBounded(v)) {
6953 v.reset(OpRISCV64SRA)
6954 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
6961 func rewriteValueRISCV64_OpRsh32x64(v *Value) bool {
6965 typ := &b.Func.Config.Types
6966 // match: (Rsh32x64 <t> x y)
6967 // cond: !shiftIsBounded(v)
6968 // result: (SRA <t> (SignExt32to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] y))))
6973 if !(!shiftIsBounded(v)) {
6976 v.reset(OpRISCV64SRA)
6978 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
6980 v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
6981 v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
6982 v2.AuxInt = int64ToAuxInt(-1)
6983 v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
6984 v3.AuxInt = int64ToAuxInt(64)
6991 // match: (Rsh32x64 x y)
6992 // cond: shiftIsBounded(v)
6993 // result: (SRA (SignExt32to64 x) y)
6997 if !(shiftIsBounded(v)) {
7000 v.reset(OpRISCV64SRA)
7001 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
7008 func rewriteValueRISCV64_OpRsh32x8(v *Value) bool {
7012 typ := &b.Func.Config.Types
7013 // match: (Rsh32x8 <t> x y)
7014 // cond: !shiftIsBounded(v)
7015 // result: (SRA <t> (SignExt32to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt8to64 y)))))
7020 if !(!shiftIsBounded(v)) {
7023 v.reset(OpRISCV64SRA)
7025 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
7027 v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
7028 v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
7029 v2.AuxInt = int64ToAuxInt(-1)
7030 v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
7031 v3.AuxInt = int64ToAuxInt(64)
7032 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
7040 // match: (Rsh32x8 x y)
7041 // cond: shiftIsBounded(v)
7042 // result: (SRA (SignExt32to64 x) y)
7046 if !(shiftIsBounded(v)) {
7049 v.reset(OpRISCV64SRA)
7050 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
7057 func rewriteValueRISCV64_OpRsh64Ux16(v *Value) bool {
7061 typ := &b.Func.Config.Types
7062 // match: (Rsh64Ux16 <t> x y)
7063 // cond: !shiftIsBounded(v)
7064 // result: (AND (SRL <t> x y) (Neg64 <t> (SLTIU <t> [64] (ZeroExt16to64 y))))
7069 if !(!shiftIsBounded(v)) {
7072 v.reset(OpRISCV64AND)
7073 v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
7075 v1 := b.NewValue0(v.Pos, OpNeg64, t)
7076 v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
7077 v2.AuxInt = int64ToAuxInt(64)
7078 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
7085 // match: (Rsh64Ux16 x y)
7086 // cond: shiftIsBounded(v)
7087 // result: (SRL x y)
7091 if !(shiftIsBounded(v)) {
7094 v.reset(OpRISCV64SRL)
7100 func rewriteValueRISCV64_OpRsh64Ux32(v *Value) bool {
7104 typ := &b.Func.Config.Types
7105 // match: (Rsh64Ux32 <t> x y)
7106 // cond: !shiftIsBounded(v)
7107 // result: (AND (SRL <t> x y) (Neg64 <t> (SLTIU <t> [64] (ZeroExt32to64 y))))
7112 if !(!shiftIsBounded(v)) {
7115 v.reset(OpRISCV64AND)
7116 v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
7118 v1 := b.NewValue0(v.Pos, OpNeg64, t)
7119 v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
7120 v2.AuxInt = int64ToAuxInt(64)
7121 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
7128 // match: (Rsh64Ux32 x y)
7129 // cond: shiftIsBounded(v)
7130 // result: (SRL x y)
7134 if !(shiftIsBounded(v)) {
7137 v.reset(OpRISCV64SRL)
7143 func rewriteValueRISCV64_OpRsh64Ux64(v *Value) bool {
7147 // match: (Rsh64Ux64 <t> x y)
7148 // cond: !shiftIsBounded(v)
7149 // result: (AND (SRL <t> x y) (Neg64 <t> (SLTIU <t> [64] y)))
7154 if !(!shiftIsBounded(v)) {
7157 v.reset(OpRISCV64AND)
7158 v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
7160 v1 := b.NewValue0(v.Pos, OpNeg64, t)
7161 v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
7162 v2.AuxInt = int64ToAuxInt(64)
7168 // match: (Rsh64Ux64 x y)
7169 // cond: shiftIsBounded(v)
7170 // result: (SRL x y)
7174 if !(shiftIsBounded(v)) {
7177 v.reset(OpRISCV64SRL)
7183 func rewriteValueRISCV64_OpRsh64Ux8(v *Value) bool {
7187 typ := &b.Func.Config.Types
7188 // match: (Rsh64Ux8 <t> x y)
7189 // cond: !shiftIsBounded(v)
7190 // result: (AND (SRL <t> x y) (Neg64 <t> (SLTIU <t> [64] (ZeroExt8to64 y))))
7195 if !(!shiftIsBounded(v)) {
7198 v.reset(OpRISCV64AND)
7199 v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
7201 v1 := b.NewValue0(v.Pos, OpNeg64, t)
7202 v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
7203 v2.AuxInt = int64ToAuxInt(64)
7204 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
7211 // match: (Rsh64Ux8 x y)
7212 // cond: shiftIsBounded(v)
7213 // result: (SRL x y)
7217 if !(shiftIsBounded(v)) {
7220 v.reset(OpRISCV64SRL)
7226 func rewriteValueRISCV64_OpRsh64x16(v *Value) bool {
7230 typ := &b.Func.Config.Types
7231 // match: (Rsh64x16 <t> x y)
7232 // cond: !shiftIsBounded(v)
7233 // result: (SRA <t> x (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt16to64 y)))))
7238 if !(!shiftIsBounded(v)) {
7241 v.reset(OpRISCV64SRA)
7243 v0 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
7244 v1 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
7245 v1.AuxInt = int64ToAuxInt(-1)
7246 v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
7247 v2.AuxInt = int64ToAuxInt(64)
7248 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
7256 // match: (Rsh64x16 x y)
7257 // cond: shiftIsBounded(v)
7258 // result: (SRA x y)
7262 if !(shiftIsBounded(v)) {
7265 v.reset(OpRISCV64SRA)
7271 func rewriteValueRISCV64_OpRsh64x32(v *Value) bool {
7275 typ := &b.Func.Config.Types
7276 // match: (Rsh64x32 <t> x y)
7277 // cond: !shiftIsBounded(v)
7278 // result: (SRA <t> x (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt32to64 y)))))
7283 if !(!shiftIsBounded(v)) {
7286 v.reset(OpRISCV64SRA)
7288 v0 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
7289 v1 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
7290 v1.AuxInt = int64ToAuxInt(-1)
7291 v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
7292 v2.AuxInt = int64ToAuxInt(64)
7293 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
7301 // match: (Rsh64x32 x y)
7302 // cond: shiftIsBounded(v)
7303 // result: (SRA x y)
7307 if !(shiftIsBounded(v)) {
7310 v.reset(OpRISCV64SRA)
7316 func rewriteValueRISCV64_OpRsh64x64(v *Value) bool {
7320 // match: (Rsh64x64 <t> x y)
7321 // cond: !shiftIsBounded(v)
7322 // result: (SRA <t> x (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] y))))
7327 if !(!shiftIsBounded(v)) {
7330 v.reset(OpRISCV64SRA)
7332 v0 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
7333 v1 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
7334 v1.AuxInt = int64ToAuxInt(-1)
7335 v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
7336 v2.AuxInt = int64ToAuxInt(64)
7343 // match: (Rsh64x64 x y)
7344 // cond: shiftIsBounded(v)
7345 // result: (SRA x y)
7349 if !(shiftIsBounded(v)) {
7352 v.reset(OpRISCV64SRA)
7358 func rewriteValueRISCV64_OpRsh64x8(v *Value) bool {
7362 typ := &b.Func.Config.Types
7363 // match: (Rsh64x8 <t> x y)
7364 // cond: !shiftIsBounded(v)
7365 // result: (SRA <t> x (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt8to64 y)))))
7370 if !(!shiftIsBounded(v)) {
7373 v.reset(OpRISCV64SRA)
7375 v0 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
7376 v1 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
7377 v1.AuxInt = int64ToAuxInt(-1)
7378 v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
7379 v2.AuxInt = int64ToAuxInt(64)
7380 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
7388 // match: (Rsh64x8 x y)
7389 // cond: shiftIsBounded(v)
7390 // result: (SRA x y)
7394 if !(shiftIsBounded(v)) {
7397 v.reset(OpRISCV64SRA)
7403 func rewriteValueRISCV64_OpRsh8Ux16(v *Value) bool {
7407 typ := &b.Func.Config.Types
7408 // match: (Rsh8Ux16 <t> x y)
7409 // cond: !shiftIsBounded(v)
7410 // result: (AND (SRL <t> (ZeroExt8to64 x) y) (Neg8 <t> (SLTIU <t> [64] (ZeroExt16to64 y))))
7415 if !(!shiftIsBounded(v)) {
7418 v.reset(OpRISCV64AND)
7419 v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
7420 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
7423 v2 := b.NewValue0(v.Pos, OpNeg8, t)
7424 v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
7425 v3.AuxInt = int64ToAuxInt(64)
7426 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
7433 // match: (Rsh8Ux16 x y)
7434 // cond: shiftIsBounded(v)
7435 // result: (SRL (ZeroExt8to64 x) y)
7439 if !(shiftIsBounded(v)) {
7442 v.reset(OpRISCV64SRL)
7443 v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
7450 func rewriteValueRISCV64_OpRsh8Ux32(v *Value) bool {
7454 typ := &b.Func.Config.Types
7455 // match: (Rsh8Ux32 <t> x y)
7456 // cond: !shiftIsBounded(v)
7457 // result: (AND (SRL <t> (ZeroExt8to64 x) y) (Neg8 <t> (SLTIU <t> [64] (ZeroExt32to64 y))))
7462 if !(!shiftIsBounded(v)) {
7465 v.reset(OpRISCV64AND)
7466 v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
7467 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
7470 v2 := b.NewValue0(v.Pos, OpNeg8, t)
7471 v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
7472 v3.AuxInt = int64ToAuxInt(64)
7473 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
7480 // match: (Rsh8Ux32 x y)
7481 // cond: shiftIsBounded(v)
7482 // result: (SRL (ZeroExt8to64 x) y)
7486 if !(shiftIsBounded(v)) {
7489 v.reset(OpRISCV64SRL)
7490 v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
7497 func rewriteValueRISCV64_OpRsh8Ux64(v *Value) bool {
7501 typ := &b.Func.Config.Types
7502 // match: (Rsh8Ux64 <t> x y)
7503 // cond: !shiftIsBounded(v)
7504 // result: (AND (SRL <t> (ZeroExt8to64 x) y) (Neg8 <t> (SLTIU <t> [64] y)))
7509 if !(!shiftIsBounded(v)) {
7512 v.reset(OpRISCV64AND)
7513 v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
7514 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
7517 v2 := b.NewValue0(v.Pos, OpNeg8, t)
7518 v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
7519 v3.AuxInt = int64ToAuxInt(64)
7525 // match: (Rsh8Ux64 x y)
7526 // cond: shiftIsBounded(v)
7527 // result: (SRL (ZeroExt8to64 x) y)
7531 if !(shiftIsBounded(v)) {
7534 v.reset(OpRISCV64SRL)
7535 v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
7542 func rewriteValueRISCV64_OpRsh8Ux8(v *Value) bool {
7546 typ := &b.Func.Config.Types
7547 // match: (Rsh8Ux8 <t> x y)
7548 // cond: !shiftIsBounded(v)
7549 // result: (AND (SRL <t> (ZeroExt8to64 x) y) (Neg8 <t> (SLTIU <t> [64] (ZeroExt8to64 y))))
7554 if !(!shiftIsBounded(v)) {
7557 v.reset(OpRISCV64AND)
7558 v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
7559 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
7562 v2 := b.NewValue0(v.Pos, OpNeg8, t)
7563 v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
7564 v3.AuxInt = int64ToAuxInt(64)
7565 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
7572 // match: (Rsh8Ux8 x y)
7573 // cond: shiftIsBounded(v)
7574 // result: (SRL (ZeroExt8to64 x) y)
7578 if !(shiftIsBounded(v)) {
7581 v.reset(OpRISCV64SRL)
7582 v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
7589 func rewriteValueRISCV64_OpRsh8x16(v *Value) bool {
7593 typ := &b.Func.Config.Types
7594 // match: (Rsh8x16 <t> x y)
7595 // cond: !shiftIsBounded(v)
7596 // result: (SRA <t> (SignExt8to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt16to64 y)))))
7601 if !(!shiftIsBounded(v)) {
7604 v.reset(OpRISCV64SRA)
7606 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
7608 v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
7609 v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
7610 v2.AuxInt = int64ToAuxInt(-1)
7611 v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
7612 v3.AuxInt = int64ToAuxInt(64)
7613 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
7621 // match: (Rsh8x16 x y)
7622 // cond: shiftIsBounded(v)
7623 // result: (SRA (SignExt8to64 x) y)
7627 if !(shiftIsBounded(v)) {
7630 v.reset(OpRISCV64SRA)
7631 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
7638 func rewriteValueRISCV64_OpRsh8x32(v *Value) bool {
7642 typ := &b.Func.Config.Types
7643 // match: (Rsh8x32 <t> x y)
7644 // cond: !shiftIsBounded(v)
7645 // result: (SRA <t> (SignExt8to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt32to64 y)))))
7650 if !(!shiftIsBounded(v)) {
7653 v.reset(OpRISCV64SRA)
7655 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
7657 v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
7658 v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
7659 v2.AuxInt = int64ToAuxInt(-1)
7660 v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
7661 v3.AuxInt = int64ToAuxInt(64)
7662 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
7670 // match: (Rsh8x32 x y)
7671 // cond: shiftIsBounded(v)
7672 // result: (SRA (SignExt8to64 x) y)
7676 if !(shiftIsBounded(v)) {
7679 v.reset(OpRISCV64SRA)
7680 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
7687 func rewriteValueRISCV64_OpRsh8x64(v *Value) bool {
7691 typ := &b.Func.Config.Types
7692 // match: (Rsh8x64 <t> x y)
7693 // cond: !shiftIsBounded(v)
7694 // result: (SRA <t> (SignExt8to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] y))))
7699 if !(!shiftIsBounded(v)) {
7702 v.reset(OpRISCV64SRA)
7704 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
7706 v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
7707 v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
7708 v2.AuxInt = int64ToAuxInt(-1)
7709 v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
7710 v3.AuxInt = int64ToAuxInt(64)
7717 // match: (Rsh8x64 x y)
7718 // cond: shiftIsBounded(v)
7719 // result: (SRA (SignExt8to64 x) y)
7723 if !(shiftIsBounded(v)) {
7726 v.reset(OpRISCV64SRA)
7727 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
7734 func rewriteValueRISCV64_OpRsh8x8(v *Value) bool {
7738 typ := &b.Func.Config.Types
7739 // match: (Rsh8x8 <t> x y)
7740 // cond: !shiftIsBounded(v)
7741 // result: (SRA <t> (SignExt8to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt8to64 y)))))
7746 if !(!shiftIsBounded(v)) {
7749 v.reset(OpRISCV64SRA)
7751 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
7753 v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
7754 v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
7755 v2.AuxInt = int64ToAuxInt(-1)
7756 v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
7757 v3.AuxInt = int64ToAuxInt(64)
7758 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
7766 // match: (Rsh8x8 x y)
7767 // cond: shiftIsBounded(v)
7768 // result: (SRA (SignExt8to64 x) y)
7772 if !(shiftIsBounded(v)) {
7775 v.reset(OpRISCV64SRA)
7776 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
7783 func rewriteValueRISCV64_OpSelect0(v *Value) bool {
7786 typ := &b.Func.Config.Types
7787 // match: (Select0 (Add64carry x y c))
7788 // result: (ADD (ADD <typ.UInt64> x y) c)
7790 if v_0.Op != OpAdd64carry {
7796 v.reset(OpRISCV64ADD)
7797 v0 := b.NewValue0(v.Pos, OpRISCV64ADD, typ.UInt64)
7802 // match: (Select0 (Sub64borrow x y c))
7803 // result: (SUB (SUB <typ.UInt64> x y) c)
7805 if v_0.Op != OpSub64borrow {
7811 v.reset(OpRISCV64SUB)
7812 v0 := b.NewValue0(v.Pos, OpRISCV64SUB, typ.UInt64)
7817 // match: (Select0 m:(LoweredMuluhilo x y))
7818 // cond: m.Uses == 1
7819 // result: (MULHU x y)
7822 if m.Op != OpRISCV64LoweredMuluhilo {
7830 v.reset(OpRISCV64MULHU)
7836 func rewriteValueRISCV64_OpSelect1(v *Value) bool {
7839 typ := &b.Func.Config.Types
7840 // match: (Select1 (Add64carry x y c))
7841 // result: (OR (SLTU <typ.UInt64> s:(ADD <typ.UInt64> x y) x) (SLTU <typ.UInt64> (ADD <typ.UInt64> s c) s))
7843 if v_0.Op != OpAdd64carry {
7849 v.reset(OpRISCV64OR)
7850 v0 := b.NewValue0(v.Pos, OpRISCV64SLTU, typ.UInt64)
7851 s := b.NewValue0(v.Pos, OpRISCV64ADD, typ.UInt64)
7854 v2 := b.NewValue0(v.Pos, OpRISCV64SLTU, typ.UInt64)
7855 v3 := b.NewValue0(v.Pos, OpRISCV64ADD, typ.UInt64)
7861 // match: (Select1 (Sub64borrow x y c))
7862 // result: (OR (SLTU <typ.UInt64> x s:(SUB <typ.UInt64> x y)) (SLTU <typ.UInt64> s (SUB <typ.UInt64> s c)))
7864 if v_0.Op != OpSub64borrow {
7870 v.reset(OpRISCV64OR)
7871 v0 := b.NewValue0(v.Pos, OpRISCV64SLTU, typ.UInt64)
7872 s := b.NewValue0(v.Pos, OpRISCV64SUB, typ.UInt64)
7875 v2 := b.NewValue0(v.Pos, OpRISCV64SLTU, typ.UInt64)
7876 v3 := b.NewValue0(v.Pos, OpRISCV64SUB, typ.UInt64)
7882 // match: (Select1 m:(LoweredMuluhilo x y))
7883 // cond: m.Uses == 1
7884 // result: (MUL x y)
7887 if m.Op != OpRISCV64LoweredMuluhilo {
7895 v.reset(OpRISCV64MUL)
7901 func rewriteValueRISCV64_OpSlicemask(v *Value) bool {
7904 // match: (Slicemask <t> x)
7905 // result: (SRAI [63] (NEG <t> x))
7909 v.reset(OpRISCV64SRAI)
7910 v.AuxInt = int64ToAuxInt(63)
7911 v0 := b.NewValue0(v.Pos, OpRISCV64NEG, t)
7917 func rewriteValueRISCV64_OpStore(v *Value) bool {
7921 // match: (Store {t} ptr val mem)
7922 // cond: t.Size() == 1
7923 // result: (MOVBstore ptr val mem)
7925 t := auxToType(v.Aux)
7929 if !(t.Size() == 1) {
7932 v.reset(OpRISCV64MOVBstore)
7933 v.AddArg3(ptr, val, mem)
7936 // match: (Store {t} ptr val mem)
7937 // cond: t.Size() == 2
7938 // result: (MOVHstore ptr val mem)
7940 t := auxToType(v.Aux)
7944 if !(t.Size() == 2) {
7947 v.reset(OpRISCV64MOVHstore)
7948 v.AddArg3(ptr, val, mem)
7951 // match: (Store {t} ptr val mem)
7952 // cond: t.Size() == 4 && !t.IsFloat()
7953 // result: (MOVWstore ptr val mem)
7955 t := auxToType(v.Aux)
7959 if !(t.Size() == 4 && !t.IsFloat()) {
7962 v.reset(OpRISCV64MOVWstore)
7963 v.AddArg3(ptr, val, mem)
7966 // match: (Store {t} ptr val mem)
7967 // cond: t.Size() == 8 && !t.IsFloat()
7968 // result: (MOVDstore ptr val mem)
7970 t := auxToType(v.Aux)
7974 if !(t.Size() == 8 && !t.IsFloat()) {
7977 v.reset(OpRISCV64MOVDstore)
7978 v.AddArg3(ptr, val, mem)
7981 // match: (Store {t} ptr val mem)
7982 // cond: t.Size() == 4 && t.IsFloat()
7983 // result: (FMOVWstore ptr val mem)
7985 t := auxToType(v.Aux)
7989 if !(t.Size() == 4 && t.IsFloat()) {
7992 v.reset(OpRISCV64FMOVWstore)
7993 v.AddArg3(ptr, val, mem)
7996 // match: (Store {t} ptr val mem)
7997 // cond: t.Size() == 8 && t.IsFloat()
7998 // result: (FMOVDstore ptr val mem)
8000 t := auxToType(v.Aux)
8004 if !(t.Size() == 8 && t.IsFloat()) {
8007 v.reset(OpRISCV64FMOVDstore)
8008 v.AddArg3(ptr, val, mem)
8013 func rewriteValueRISCV64_OpZero(v *Value) bool {
8017 config := b.Func.Config
8018 typ := &b.Func.Config.Types
8019 // match: (Zero [0] _ mem)
8022 if auxIntToInt64(v.AuxInt) != 0 {
8029 // match: (Zero [1] ptr mem)
8030 // result: (MOVBstore ptr (MOVDconst [0]) mem)
8032 if auxIntToInt64(v.AuxInt) != 1 {
8037 v.reset(OpRISCV64MOVBstore)
8038 v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
8039 v0.AuxInt = int64ToAuxInt(0)
8040 v.AddArg3(ptr, v0, mem)
8043 // match: (Zero [2] {t} ptr mem)
8044 // cond: t.Alignment()%2 == 0
8045 // result: (MOVHstore ptr (MOVDconst [0]) mem)
8047 if auxIntToInt64(v.AuxInt) != 2 {
8050 t := auxToType(v.Aux)
8053 if !(t.Alignment()%2 == 0) {
8056 v.reset(OpRISCV64MOVHstore)
8057 v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
8058 v0.AuxInt = int64ToAuxInt(0)
8059 v.AddArg3(ptr, v0, mem)
8062 // match: (Zero [2] ptr mem)
8063 // result: (MOVBstore [1] ptr (MOVDconst [0]) (MOVBstore ptr (MOVDconst [0]) mem))
8065 if auxIntToInt64(v.AuxInt) != 2 {
8070 v.reset(OpRISCV64MOVBstore)
8071 v.AuxInt = int32ToAuxInt(1)
8072 v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
8073 v0.AuxInt = int64ToAuxInt(0)
8074 v1 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
8075 v1.AddArg3(ptr, v0, mem)
8076 v.AddArg3(ptr, v0, v1)
8079 // match: (Zero [4] {t} ptr mem)
8080 // cond: t.Alignment()%4 == 0
8081 // result: (MOVWstore ptr (MOVDconst [0]) mem)
8083 if auxIntToInt64(v.AuxInt) != 4 {
8086 t := auxToType(v.Aux)
8089 if !(t.Alignment()%4 == 0) {
8092 v.reset(OpRISCV64MOVWstore)
8093 v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
8094 v0.AuxInt = int64ToAuxInt(0)
8095 v.AddArg3(ptr, v0, mem)
8098 // match: (Zero [4] {t} ptr mem)
8099 // cond: t.Alignment()%2 == 0
8100 // result: (MOVHstore [2] ptr (MOVDconst [0]) (MOVHstore ptr (MOVDconst [0]) mem))
8102 if auxIntToInt64(v.AuxInt) != 4 {
8105 t := auxToType(v.Aux)
8108 if !(t.Alignment()%2 == 0) {
8111 v.reset(OpRISCV64MOVHstore)
8112 v.AuxInt = int32ToAuxInt(2)
8113 v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
8114 v0.AuxInt = int64ToAuxInt(0)
8115 v1 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
8116 v1.AddArg3(ptr, v0, mem)
8117 v.AddArg3(ptr, v0, v1)
8120 // match: (Zero [4] ptr mem)
8121 // result: (MOVBstore [3] ptr (MOVDconst [0]) (MOVBstore [2] ptr (MOVDconst [0]) (MOVBstore [1] ptr (MOVDconst [0]) (MOVBstore ptr (MOVDconst [0]) mem))))
8123 if auxIntToInt64(v.AuxInt) != 4 {
8128 v.reset(OpRISCV64MOVBstore)
8129 v.AuxInt = int32ToAuxInt(3)
8130 v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
8131 v0.AuxInt = int64ToAuxInt(0)
8132 v1 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
8133 v1.AuxInt = int32ToAuxInt(2)
8134 v2 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
8135 v2.AuxInt = int32ToAuxInt(1)
8136 v3 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
8137 v3.AddArg3(ptr, v0, mem)
8138 v2.AddArg3(ptr, v0, v3)
8139 v1.AddArg3(ptr, v0, v2)
8140 v.AddArg3(ptr, v0, v1)
8143 // match: (Zero [8] {t} ptr mem)
8144 // cond: t.Alignment()%8 == 0
8145 // result: (MOVDstore ptr (MOVDconst [0]) mem)
8147 if auxIntToInt64(v.AuxInt) != 8 {
8150 t := auxToType(v.Aux)
8153 if !(t.Alignment()%8 == 0) {
8156 v.reset(OpRISCV64MOVDstore)
8157 v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
8158 v0.AuxInt = int64ToAuxInt(0)
8159 v.AddArg3(ptr, v0, mem)
8162 // match: (Zero [8] {t} ptr mem)
8163 // cond: t.Alignment()%4 == 0
8164 // result: (MOVWstore [4] ptr (MOVDconst [0]) (MOVWstore ptr (MOVDconst [0]) mem))
8166 if auxIntToInt64(v.AuxInt) != 8 {
8169 t := auxToType(v.Aux)
8172 if !(t.Alignment()%4 == 0) {
8175 v.reset(OpRISCV64MOVWstore)
8176 v.AuxInt = int32ToAuxInt(4)
8177 v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
8178 v0.AuxInt = int64ToAuxInt(0)
8179 v1 := b.NewValue0(v.Pos, OpRISCV64MOVWstore, types.TypeMem)
8180 v1.AddArg3(ptr, v0, mem)
8181 v.AddArg3(ptr, v0, v1)
8184 // match: (Zero [8] {t} ptr mem)
8185 // cond: t.Alignment()%2 == 0
8186 // result: (MOVHstore [6] ptr (MOVDconst [0]) (MOVHstore [4] ptr (MOVDconst [0]) (MOVHstore [2] ptr (MOVDconst [0]) (MOVHstore ptr (MOVDconst [0]) mem))))
8188 if auxIntToInt64(v.AuxInt) != 8 {
8191 t := auxToType(v.Aux)
8194 if !(t.Alignment()%2 == 0) {
8197 v.reset(OpRISCV64MOVHstore)
8198 v.AuxInt = int32ToAuxInt(6)
8199 v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
8200 v0.AuxInt = int64ToAuxInt(0)
8201 v1 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
8202 v1.AuxInt = int32ToAuxInt(4)
8203 v2 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
8204 v2.AuxInt = int32ToAuxInt(2)
8205 v3 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
8206 v3.AddArg3(ptr, v0, mem)
8207 v2.AddArg3(ptr, v0, v3)
8208 v1.AddArg3(ptr, v0, v2)
8209 v.AddArg3(ptr, v0, v1)
8212 // match: (Zero [3] ptr mem)
8213 // result: (MOVBstore [2] ptr (MOVDconst [0]) (MOVBstore [1] ptr (MOVDconst [0]) (MOVBstore ptr (MOVDconst [0]) mem)))
8215 if auxIntToInt64(v.AuxInt) != 3 {
8220 v.reset(OpRISCV64MOVBstore)
8221 v.AuxInt = int32ToAuxInt(2)
8222 v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
8223 v0.AuxInt = int64ToAuxInt(0)
8224 v1 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
8225 v1.AuxInt = int32ToAuxInt(1)
8226 v2 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
8227 v2.AddArg3(ptr, v0, mem)
8228 v1.AddArg3(ptr, v0, v2)
8229 v.AddArg3(ptr, v0, v1)
8232 // match: (Zero [6] {t} ptr mem)
8233 // cond: t.Alignment()%2 == 0
8234 // result: (MOVHstore [4] ptr (MOVDconst [0]) (MOVHstore [2] ptr (MOVDconst [0]) (MOVHstore ptr (MOVDconst [0]) mem)))
8236 if auxIntToInt64(v.AuxInt) != 6 {
8239 t := auxToType(v.Aux)
8242 if !(t.Alignment()%2 == 0) {
8245 v.reset(OpRISCV64MOVHstore)
8246 v.AuxInt = int32ToAuxInt(4)
8247 v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
8248 v0.AuxInt = int64ToAuxInt(0)
8249 v1 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
8250 v1.AuxInt = int32ToAuxInt(2)
8251 v2 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
8252 v2.AddArg3(ptr, v0, mem)
8253 v1.AddArg3(ptr, v0, v2)
8254 v.AddArg3(ptr, v0, v1)
8257 // match: (Zero [12] {t} ptr mem)
8258 // cond: t.Alignment()%4 == 0
8259 // result: (MOVWstore [8] ptr (MOVDconst [0]) (MOVWstore [4] ptr (MOVDconst [0]) (MOVWstore ptr (MOVDconst [0]) mem)))
8261 if auxIntToInt64(v.AuxInt) != 12 {
8264 t := auxToType(v.Aux)
8267 if !(t.Alignment()%4 == 0) {
8270 v.reset(OpRISCV64MOVWstore)
8271 v.AuxInt = int32ToAuxInt(8)
8272 v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
8273 v0.AuxInt = int64ToAuxInt(0)
8274 v1 := b.NewValue0(v.Pos, OpRISCV64MOVWstore, types.TypeMem)
8275 v1.AuxInt = int32ToAuxInt(4)
8276 v2 := b.NewValue0(v.Pos, OpRISCV64MOVWstore, types.TypeMem)
8277 v2.AddArg3(ptr, v0, mem)
8278 v1.AddArg3(ptr, v0, v2)
8279 v.AddArg3(ptr, v0, v1)
8282 // match: (Zero [16] {t} ptr mem)
8283 // cond: t.Alignment()%8 == 0
8284 // result: (MOVDstore [8] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem))
8286 if auxIntToInt64(v.AuxInt) != 16 {
8289 t := auxToType(v.Aux)
8292 if !(t.Alignment()%8 == 0) {
8295 v.reset(OpRISCV64MOVDstore)
8296 v.AuxInt = int32ToAuxInt(8)
8297 v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
8298 v0.AuxInt = int64ToAuxInt(0)
8299 v1 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
8300 v1.AddArg3(ptr, v0, mem)
8301 v.AddArg3(ptr, v0, v1)
8304 // match: (Zero [24] {t} ptr mem)
8305 // cond: t.Alignment()%8 == 0
8306 // result: (MOVDstore [16] ptr (MOVDconst [0]) (MOVDstore [8] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem)))
8308 if auxIntToInt64(v.AuxInt) != 24 {
8311 t := auxToType(v.Aux)
8314 if !(t.Alignment()%8 == 0) {
8317 v.reset(OpRISCV64MOVDstore)
8318 v.AuxInt = int32ToAuxInt(16)
8319 v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
8320 v0.AuxInt = int64ToAuxInt(0)
8321 v1 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
8322 v1.AuxInt = int32ToAuxInt(8)
8323 v2 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
8324 v2.AddArg3(ptr, v0, mem)
8325 v1.AddArg3(ptr, v0, v2)
8326 v.AddArg3(ptr, v0, v1)
8329 // match: (Zero [32] {t} ptr mem)
8330 // cond: t.Alignment()%8 == 0
8331 // result: (MOVDstore [24] ptr (MOVDconst [0]) (MOVDstore [16] ptr (MOVDconst [0]) (MOVDstore [8] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem))))
8333 if auxIntToInt64(v.AuxInt) != 32 {
8336 t := auxToType(v.Aux)
8339 if !(t.Alignment()%8 == 0) {
8342 v.reset(OpRISCV64MOVDstore)
8343 v.AuxInt = int32ToAuxInt(24)
8344 v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
8345 v0.AuxInt = int64ToAuxInt(0)
8346 v1 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
8347 v1.AuxInt = int32ToAuxInt(16)
8348 v2 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
8349 v2.AuxInt = int32ToAuxInt(8)
8350 v3 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
8351 v3.AddArg3(ptr, v0, mem)
8352 v2.AddArg3(ptr, v0, v3)
8353 v1.AddArg3(ptr, v0, v2)
8354 v.AddArg3(ptr, v0, v1)
8357 // match: (Zero [s] {t} ptr mem)
8358 // cond: s%8 == 0 && s <= 8*128 && t.Alignment()%8 == 0 && !config.noDuffDevice
8359 // result: (DUFFZERO [8 * (128 - s/8)] ptr mem)
8361 s := auxIntToInt64(v.AuxInt)
8362 t := auxToType(v.Aux)
8365 if !(s%8 == 0 && s <= 8*128 && t.Alignment()%8 == 0 && !config.noDuffDevice) {
8368 v.reset(OpRISCV64DUFFZERO)
8369 v.AuxInt = int64ToAuxInt(8 * (128 - s/8))
8373 // match: (Zero [s] {t} ptr mem)
8374 // result: (LoweredZero [t.Alignment()] ptr (ADD <ptr.Type> ptr (MOVDconst [s-moveSize(t.Alignment(), config)])) mem)
8376 s := auxIntToInt64(v.AuxInt)
8377 t := auxToType(v.Aux)
8380 v.reset(OpRISCV64LoweredZero)
8381 v.AuxInt = int64ToAuxInt(t.Alignment())
8382 v0 := b.NewValue0(v.Pos, OpRISCV64ADD, ptr.Type)
8383 v1 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
8384 v1.AuxInt = int64ToAuxInt(s - moveSize(t.Alignment(), config))
8386 v.AddArg3(ptr, v0, mem)
8390 func rewriteBlockRISCV64(b *Block) bool {
8391 typ := &b.Func.Config.Types
8393 case BlockRISCV64BEQ:
8394 // match: (BEQ (MOVDconst [0]) cond yes no)
8395 // result: (BEQZ cond yes no)
8396 for b.Controls[0].Op == OpRISCV64MOVDconst {
8397 v_0 := b.Controls[0]
8398 if auxIntToInt64(v_0.AuxInt) != 0 {
8401 cond := b.Controls[1]
8402 b.resetWithControl(BlockRISCV64BEQZ, cond)
8405 // match: (BEQ cond (MOVDconst [0]) yes no)
8406 // result: (BEQZ cond yes no)
8407 for b.Controls[1].Op == OpRISCV64MOVDconst {
8408 cond := b.Controls[0]
8409 v_1 := b.Controls[1]
8410 if auxIntToInt64(v_1.AuxInt) != 0 {
8413 b.resetWithControl(BlockRISCV64BEQZ, cond)
8416 case BlockRISCV64BEQZ:
8417 // match: (BEQZ (SEQZ x) yes no)
8418 // result: (BNEZ x yes no)
8419 for b.Controls[0].Op == OpRISCV64SEQZ {
8420 v_0 := b.Controls[0]
8422 b.resetWithControl(BlockRISCV64BNEZ, x)
8425 // match: (BEQZ (SNEZ x) yes no)
8426 // result: (BEQZ x yes no)
8427 for b.Controls[0].Op == OpRISCV64SNEZ {
8428 v_0 := b.Controls[0]
8430 b.resetWithControl(BlockRISCV64BEQZ, x)
8433 // match: (BEQZ (NEG x) yes no)
8434 // result: (BEQZ x yes no)
8435 for b.Controls[0].Op == OpRISCV64NEG {
8436 v_0 := b.Controls[0]
8438 b.resetWithControl(BlockRISCV64BEQZ, x)
8441 // match: (BEQZ (FNES <t> x y) yes no)
8442 // result: (BNEZ (FEQS <t> x y) yes no)
8443 for b.Controls[0].Op == OpRISCV64FNES {
8444 v_0 := b.Controls[0]
8447 v_0_0 := v_0.Args[0]
8448 v_0_1 := v_0.Args[1]
8449 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
8452 v0 := b.NewValue0(v_0.Pos, OpRISCV64FEQS, t)
8454 b.resetWithControl(BlockRISCV64BNEZ, v0)
8458 // match: (BEQZ (FNED <t> x y) yes no)
8459 // result: (BNEZ (FEQD <t> x y) yes no)
8460 for b.Controls[0].Op == OpRISCV64FNED {
8461 v_0 := b.Controls[0]
8464 v_0_0 := v_0.Args[0]
8465 v_0_1 := v_0.Args[1]
8466 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
8469 v0 := b.NewValue0(v_0.Pos, OpRISCV64FEQD, t)
8471 b.resetWithControl(BlockRISCV64BNEZ, v0)
8475 // match: (BEQZ (SUB x y) yes no)
8476 // result: (BEQ x y yes no)
8477 for b.Controls[0].Op == OpRISCV64SUB {
8478 v_0 := b.Controls[0]
8481 b.resetWithControl2(BlockRISCV64BEQ, x, y)
8484 // match: (BEQZ (SLT x y) yes no)
8485 // result: (BGE x y yes no)
8486 for b.Controls[0].Op == OpRISCV64SLT {
8487 v_0 := b.Controls[0]
8490 b.resetWithControl2(BlockRISCV64BGE, x, y)
8493 // match: (BEQZ (SLTU x y) yes no)
8494 // result: (BGEU x y yes no)
8495 for b.Controls[0].Op == OpRISCV64SLTU {
8496 v_0 := b.Controls[0]
8499 b.resetWithControl2(BlockRISCV64BGEU, x, y)
8502 // match: (BEQZ (SLTI [x] y) yes no)
8503 // result: (BGE y (MOVDconst [x]) yes no)
8504 for b.Controls[0].Op == OpRISCV64SLTI {
8505 v_0 := b.Controls[0]
8506 x := auxIntToInt64(v_0.AuxInt)
8508 v0 := b.NewValue0(b.Pos, OpRISCV64MOVDconst, typ.UInt64)
8509 v0.AuxInt = int64ToAuxInt(x)
8510 b.resetWithControl2(BlockRISCV64BGE, y, v0)
8513 // match: (BEQZ (SLTIU [x] y) yes no)
8514 // result: (BGEU y (MOVDconst [x]) yes no)
8515 for b.Controls[0].Op == OpRISCV64SLTIU {
8516 v_0 := b.Controls[0]
8517 x := auxIntToInt64(v_0.AuxInt)
8519 v0 := b.NewValue0(b.Pos, OpRISCV64MOVDconst, typ.UInt64)
8520 v0.AuxInt = int64ToAuxInt(x)
8521 b.resetWithControl2(BlockRISCV64BGEU, y, v0)
8524 case BlockRISCV64BGE:
8525 // match: (BGE (MOVDconst [0]) cond yes no)
8526 // result: (BLEZ cond yes no)
8527 for b.Controls[0].Op == OpRISCV64MOVDconst {
8528 v_0 := b.Controls[0]
8529 if auxIntToInt64(v_0.AuxInt) != 0 {
8532 cond := b.Controls[1]
8533 b.resetWithControl(BlockRISCV64BLEZ, cond)
8536 // match: (BGE cond (MOVDconst [0]) yes no)
8537 // result: (BGEZ cond yes no)
8538 for b.Controls[1].Op == OpRISCV64MOVDconst {
8539 cond := b.Controls[0]
8540 v_1 := b.Controls[1]
8541 if auxIntToInt64(v_1.AuxInt) != 0 {
8544 b.resetWithControl(BlockRISCV64BGEZ, cond)
8547 case BlockRISCV64BLT:
8548 // match: (BLT (MOVDconst [0]) cond yes no)
8549 // result: (BGTZ cond yes no)
8550 for b.Controls[0].Op == OpRISCV64MOVDconst {
8551 v_0 := b.Controls[0]
8552 if auxIntToInt64(v_0.AuxInt) != 0 {
8555 cond := b.Controls[1]
8556 b.resetWithControl(BlockRISCV64BGTZ, cond)
8559 // match: (BLT cond (MOVDconst [0]) yes no)
8560 // result: (BLTZ cond yes no)
8561 for b.Controls[1].Op == OpRISCV64MOVDconst {
8562 cond := b.Controls[0]
8563 v_1 := b.Controls[1]
8564 if auxIntToInt64(v_1.AuxInt) != 0 {
8567 b.resetWithControl(BlockRISCV64BLTZ, cond)
8570 case BlockRISCV64BNE:
8571 // match: (BNE (MOVDconst [0]) cond yes no)
8572 // result: (BNEZ cond yes no)
8573 for b.Controls[0].Op == OpRISCV64MOVDconst {
8574 v_0 := b.Controls[0]
8575 if auxIntToInt64(v_0.AuxInt) != 0 {
8578 cond := b.Controls[1]
8579 b.resetWithControl(BlockRISCV64BNEZ, cond)
8582 // match: (BNE cond (MOVDconst [0]) yes no)
8583 // result: (BNEZ cond yes no)
8584 for b.Controls[1].Op == OpRISCV64MOVDconst {
8585 cond := b.Controls[0]
8586 v_1 := b.Controls[1]
8587 if auxIntToInt64(v_1.AuxInt) != 0 {
8590 b.resetWithControl(BlockRISCV64BNEZ, cond)
8593 case BlockRISCV64BNEZ:
8594 // match: (BNEZ (SEQZ x) yes no)
8595 // result: (BEQZ x yes no)
8596 for b.Controls[0].Op == OpRISCV64SEQZ {
8597 v_0 := b.Controls[0]
8599 b.resetWithControl(BlockRISCV64BEQZ, x)
8602 // match: (BNEZ (SNEZ x) yes no)
8603 // result: (BNEZ x yes no)
8604 for b.Controls[0].Op == OpRISCV64SNEZ {
8605 v_0 := b.Controls[0]
8607 b.resetWithControl(BlockRISCV64BNEZ, x)
8610 // match: (BNEZ (NEG x) yes no)
8611 // result: (BNEZ x yes no)
8612 for b.Controls[0].Op == OpRISCV64NEG {
8613 v_0 := b.Controls[0]
8615 b.resetWithControl(BlockRISCV64BNEZ, x)
8618 // match: (BNEZ (FNES <t> x y) yes no)
8619 // result: (BEQZ (FEQS <t> x y) yes no)
8620 for b.Controls[0].Op == OpRISCV64FNES {
8621 v_0 := b.Controls[0]
8624 v_0_0 := v_0.Args[0]
8625 v_0_1 := v_0.Args[1]
8626 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
8629 v0 := b.NewValue0(v_0.Pos, OpRISCV64FEQS, t)
8631 b.resetWithControl(BlockRISCV64BEQZ, v0)
8635 // match: (BNEZ (FNED <t> x y) yes no)
8636 // result: (BEQZ (FEQD <t> x y) yes no)
8637 for b.Controls[0].Op == OpRISCV64FNED {
8638 v_0 := b.Controls[0]
8641 v_0_0 := v_0.Args[0]
8642 v_0_1 := v_0.Args[1]
8643 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
8646 v0 := b.NewValue0(v_0.Pos, OpRISCV64FEQD, t)
8648 b.resetWithControl(BlockRISCV64BEQZ, v0)
8652 // match: (BNEZ (SUB x y) yes no)
8653 // result: (BNE x y yes no)
8654 for b.Controls[0].Op == OpRISCV64SUB {
8655 v_0 := b.Controls[0]
8658 b.resetWithControl2(BlockRISCV64BNE, x, y)
8661 // match: (BNEZ (SLT x y) yes no)
8662 // result: (BLT x y yes no)
8663 for b.Controls[0].Op == OpRISCV64SLT {
8664 v_0 := b.Controls[0]
8667 b.resetWithControl2(BlockRISCV64BLT, x, y)
8670 // match: (BNEZ (SLTU x y) yes no)
8671 // result: (BLTU x y yes no)
8672 for b.Controls[0].Op == OpRISCV64SLTU {
8673 v_0 := b.Controls[0]
8676 b.resetWithControl2(BlockRISCV64BLTU, x, y)
8679 // match: (BNEZ (SLTI [x] y) yes no)
8680 // result: (BLT y (MOVDconst [x]) yes no)
8681 for b.Controls[0].Op == OpRISCV64SLTI {
8682 v_0 := b.Controls[0]
8683 x := auxIntToInt64(v_0.AuxInt)
8685 v0 := b.NewValue0(b.Pos, OpRISCV64MOVDconst, typ.UInt64)
8686 v0.AuxInt = int64ToAuxInt(x)
8687 b.resetWithControl2(BlockRISCV64BLT, y, v0)
8690 // match: (BNEZ (SLTIU [x] y) yes no)
8691 // result: (BLTU y (MOVDconst [x]) yes no)
8692 for b.Controls[0].Op == OpRISCV64SLTIU {
8693 v_0 := b.Controls[0]
8694 x := auxIntToInt64(v_0.AuxInt)
8696 v0 := b.NewValue0(b.Pos, OpRISCV64MOVDconst, typ.UInt64)
8697 v0.AuxInt = int64ToAuxInt(x)
8698 b.resetWithControl2(BlockRISCV64BLTU, y, v0)
8702 // match: (If cond yes no)
8703 // result: (BNEZ (MOVBUreg <typ.UInt64> cond) yes no)
8705 cond := b.Controls[0]
8706 v0 := b.NewValue0(cond.Pos, OpRISCV64MOVBUreg, typ.UInt64)
8708 b.resetWithControl(BlockRISCV64BNEZ, v0)