1 // Code generated from _gen/RISCV64.rules using 'go generate'; DO NOT EDIT.
6 import "cmd/compile/internal/types"
8 func rewriteValueRISCV64(v *Value) bool {
35 return rewriteValueRISCV64_OpAddr(v)
52 v.Op = OpRISCV64LoweredAtomicAdd32
55 v.Op = OpRISCV64LoweredAtomicAdd64
58 v.Op = OpRISCV64LoweredAtomicAnd32
61 return rewriteValueRISCV64_OpAtomicAnd8(v)
62 case OpAtomicCompareAndSwap32:
63 return rewriteValueRISCV64_OpAtomicCompareAndSwap32(v)
64 case OpAtomicCompareAndSwap64:
65 v.Op = OpRISCV64LoweredAtomicCas64
67 case OpAtomicExchange32:
68 v.Op = OpRISCV64LoweredAtomicExchange32
70 case OpAtomicExchange64:
71 v.Op = OpRISCV64LoweredAtomicExchange64
74 v.Op = OpRISCV64LoweredAtomicLoad32
77 v.Op = OpRISCV64LoweredAtomicLoad64
80 v.Op = OpRISCV64LoweredAtomicLoad8
83 v.Op = OpRISCV64LoweredAtomicLoad64
86 v.Op = OpRISCV64LoweredAtomicOr32
89 return rewriteValueRISCV64_OpAtomicOr8(v)
91 v.Op = OpRISCV64LoweredAtomicStore32
94 v.Op = OpRISCV64LoweredAtomicStore64
97 v.Op = OpRISCV64LoweredAtomicStore8
99 case OpAtomicStorePtrNoWB:
100 v.Op = OpRISCV64LoweredAtomicStore64
103 return rewriteValueRISCV64_OpAvg64u(v)
105 v.Op = OpRISCV64CALLclosure
120 return rewriteValueRISCV64_OpConst16(v)
122 return rewriteValueRISCV64_OpConst32(v)
124 return rewriteValueRISCV64_OpConst32F(v)
126 return rewriteValueRISCV64_OpConst64(v)
128 return rewriteValueRISCV64_OpConst64F(v)
130 return rewriteValueRISCV64_OpConst8(v)
132 return rewriteValueRISCV64_OpConstBool(v)
134 return rewriteValueRISCV64_OpConstNil(v)
136 v.Op = OpRISCV64MOVconvert
139 v.Op = OpRISCV64FSGNJD
142 v.Op = OpRISCV64FCVTWS
145 v.Op = OpRISCV64FCVTLS
148 v.Op = OpRISCV64FCVTDS
151 v.Op = OpRISCV64FCVTSW
154 v.Op = OpRISCV64FCVTDW
157 v.Op = OpRISCV64FCVTWD
160 v.Op = OpRISCV64FCVTSD
163 v.Op = OpRISCV64FCVTLD
166 v.Op = OpRISCV64FCVTSL
169 v.Op = OpRISCV64FCVTDL
171 case OpCvtBoolToUint8:
175 return rewriteValueRISCV64_OpDiv16(v)
177 return rewriteValueRISCV64_OpDiv16u(v)
179 return rewriteValueRISCV64_OpDiv32(v)
181 v.Op = OpRISCV64FDIVS
184 v.Op = OpRISCV64DIVUW
187 return rewriteValueRISCV64_OpDiv64(v)
189 v.Op = OpRISCV64FDIVD
195 return rewriteValueRISCV64_OpDiv8(v)
197 return rewriteValueRISCV64_OpDiv8u(v)
199 return rewriteValueRISCV64_OpEq16(v)
201 return rewriteValueRISCV64_OpEq32(v)
206 return rewriteValueRISCV64_OpEq64(v)
211 return rewriteValueRISCV64_OpEq8(v)
213 return rewriteValueRISCV64_OpEqB(v)
215 return rewriteValueRISCV64_OpEqPtr(v)
217 v.Op = OpRISCV64FMADDD
220 v.Op = OpRISCV64LoweredGetCallerPC
223 v.Op = OpRISCV64LoweredGetCallerSP
225 case OpGetClosurePtr:
226 v.Op = OpRISCV64LoweredGetClosurePtr
229 return rewriteValueRISCV64_OpHmul32(v)
231 return rewriteValueRISCV64_OpHmul32u(v)
236 v.Op = OpRISCV64MULHU
239 v.Op = OpRISCV64CALLinter
247 case OpIsSliceInBounds:
251 return rewriteValueRISCV64_OpLeq16(v)
253 return rewriteValueRISCV64_OpLeq16U(v)
255 return rewriteValueRISCV64_OpLeq32(v)
260 return rewriteValueRISCV64_OpLeq32U(v)
262 return rewriteValueRISCV64_OpLeq64(v)
267 return rewriteValueRISCV64_OpLeq64U(v)
269 return rewriteValueRISCV64_OpLeq8(v)
271 return rewriteValueRISCV64_OpLeq8U(v)
273 return rewriteValueRISCV64_OpLess16(v)
275 return rewriteValueRISCV64_OpLess16U(v)
277 return rewriteValueRISCV64_OpLess32(v)
282 return rewriteValueRISCV64_OpLess32U(v)
293 return rewriteValueRISCV64_OpLess8(v)
295 return rewriteValueRISCV64_OpLess8U(v)
297 return rewriteValueRISCV64_OpLoad(v)
299 return rewriteValueRISCV64_OpLocalAddr(v)
301 return rewriteValueRISCV64_OpLsh16x16(v)
303 return rewriteValueRISCV64_OpLsh16x32(v)
305 return rewriteValueRISCV64_OpLsh16x64(v)
307 return rewriteValueRISCV64_OpLsh16x8(v)
309 return rewriteValueRISCV64_OpLsh32x16(v)
311 return rewriteValueRISCV64_OpLsh32x32(v)
313 return rewriteValueRISCV64_OpLsh32x64(v)
315 return rewriteValueRISCV64_OpLsh32x8(v)
317 return rewriteValueRISCV64_OpLsh64x16(v)
319 return rewriteValueRISCV64_OpLsh64x32(v)
321 return rewriteValueRISCV64_OpLsh64x64(v)
323 return rewriteValueRISCV64_OpLsh64x8(v)
325 return rewriteValueRISCV64_OpLsh8x16(v)
327 return rewriteValueRISCV64_OpLsh8x32(v)
329 return rewriteValueRISCV64_OpLsh8x64(v)
331 return rewriteValueRISCV64_OpLsh8x8(v)
333 return rewriteValueRISCV64_OpMod16(v)
335 return rewriteValueRISCV64_OpMod16u(v)
337 return rewriteValueRISCV64_OpMod32(v)
339 v.Op = OpRISCV64REMUW
342 return rewriteValueRISCV64_OpMod64(v)
347 return rewriteValueRISCV64_OpMod8(v)
349 return rewriteValueRISCV64_OpMod8u(v)
351 return rewriteValueRISCV64_OpMove(v)
353 return rewriteValueRISCV64_OpMul16(v)
358 v.Op = OpRISCV64FMULS
364 v.Op = OpRISCV64FMULD
367 v.Op = OpRISCV64LoweredMuluhilo
370 v.Op = OpRISCV64LoweredMuluover
373 return rewriteValueRISCV64_OpMul8(v)
381 v.Op = OpRISCV64FNEGS
387 v.Op = OpRISCV64FNEGD
393 return rewriteValueRISCV64_OpNeq16(v)
395 return rewriteValueRISCV64_OpNeq32(v)
400 return rewriteValueRISCV64_OpNeq64(v)
405 return rewriteValueRISCV64_OpNeq8(v)
407 return rewriteValueRISCV64_OpNeqB(v)
409 return rewriteValueRISCV64_OpNeqPtr(v)
411 v.Op = OpRISCV64LoweredNilCheck
417 return rewriteValueRISCV64_OpOffPtr(v)
434 return rewriteValueRISCV64_OpPanicBounds(v)
436 v.Op = OpRISCV64LoweredPubBarrier
439 return rewriteValueRISCV64_OpRISCV64ADD(v)
441 return rewriteValueRISCV64_OpRISCV64ADDI(v)
443 return rewriteValueRISCV64_OpRISCV64AND(v)
445 return rewriteValueRISCV64_OpRISCV64ANDI(v)
447 return rewriteValueRISCV64_OpRISCV64FADDD(v)
449 return rewriteValueRISCV64_OpRISCV64FADDS(v)
450 case OpRISCV64FMADDD:
451 return rewriteValueRISCV64_OpRISCV64FMADDD(v)
452 case OpRISCV64FMADDS:
453 return rewriteValueRISCV64_OpRISCV64FMADDS(v)
454 case OpRISCV64FMSUBD:
455 return rewriteValueRISCV64_OpRISCV64FMSUBD(v)
456 case OpRISCV64FMSUBS:
457 return rewriteValueRISCV64_OpRISCV64FMSUBS(v)
458 case OpRISCV64FNMADDD:
459 return rewriteValueRISCV64_OpRISCV64FNMADDD(v)
460 case OpRISCV64FNMADDS:
461 return rewriteValueRISCV64_OpRISCV64FNMADDS(v)
462 case OpRISCV64FNMSUBD:
463 return rewriteValueRISCV64_OpRISCV64FNMSUBD(v)
464 case OpRISCV64FNMSUBS:
465 return rewriteValueRISCV64_OpRISCV64FNMSUBS(v)
467 return rewriteValueRISCV64_OpRISCV64FSUBD(v)
469 return rewriteValueRISCV64_OpRISCV64FSUBS(v)
470 case OpRISCV64MOVBUload:
471 return rewriteValueRISCV64_OpRISCV64MOVBUload(v)
472 case OpRISCV64MOVBUreg:
473 return rewriteValueRISCV64_OpRISCV64MOVBUreg(v)
474 case OpRISCV64MOVBload:
475 return rewriteValueRISCV64_OpRISCV64MOVBload(v)
476 case OpRISCV64MOVBreg:
477 return rewriteValueRISCV64_OpRISCV64MOVBreg(v)
478 case OpRISCV64MOVBstore:
479 return rewriteValueRISCV64_OpRISCV64MOVBstore(v)
480 case OpRISCV64MOVBstorezero:
481 return rewriteValueRISCV64_OpRISCV64MOVBstorezero(v)
482 case OpRISCV64MOVDload:
483 return rewriteValueRISCV64_OpRISCV64MOVDload(v)
484 case OpRISCV64MOVDnop:
485 return rewriteValueRISCV64_OpRISCV64MOVDnop(v)
486 case OpRISCV64MOVDreg:
487 return rewriteValueRISCV64_OpRISCV64MOVDreg(v)
488 case OpRISCV64MOVDstore:
489 return rewriteValueRISCV64_OpRISCV64MOVDstore(v)
490 case OpRISCV64MOVDstorezero:
491 return rewriteValueRISCV64_OpRISCV64MOVDstorezero(v)
492 case OpRISCV64MOVHUload:
493 return rewriteValueRISCV64_OpRISCV64MOVHUload(v)
494 case OpRISCV64MOVHUreg:
495 return rewriteValueRISCV64_OpRISCV64MOVHUreg(v)
496 case OpRISCV64MOVHload:
497 return rewriteValueRISCV64_OpRISCV64MOVHload(v)
498 case OpRISCV64MOVHreg:
499 return rewriteValueRISCV64_OpRISCV64MOVHreg(v)
500 case OpRISCV64MOVHstore:
501 return rewriteValueRISCV64_OpRISCV64MOVHstore(v)
502 case OpRISCV64MOVHstorezero:
503 return rewriteValueRISCV64_OpRISCV64MOVHstorezero(v)
504 case OpRISCV64MOVWUload:
505 return rewriteValueRISCV64_OpRISCV64MOVWUload(v)
506 case OpRISCV64MOVWUreg:
507 return rewriteValueRISCV64_OpRISCV64MOVWUreg(v)
508 case OpRISCV64MOVWload:
509 return rewriteValueRISCV64_OpRISCV64MOVWload(v)
510 case OpRISCV64MOVWreg:
511 return rewriteValueRISCV64_OpRISCV64MOVWreg(v)
512 case OpRISCV64MOVWstore:
513 return rewriteValueRISCV64_OpRISCV64MOVWstore(v)
514 case OpRISCV64MOVWstorezero:
515 return rewriteValueRISCV64_OpRISCV64MOVWstorezero(v)
517 return rewriteValueRISCV64_OpRISCV64NEG(v)
519 return rewriteValueRISCV64_OpRISCV64NEGW(v)
521 return rewriteValueRISCV64_OpRISCV64OR(v)
523 return rewriteValueRISCV64_OpRISCV64ORI(v)
525 return rewriteValueRISCV64_OpRISCV64SEQZ(v)
527 return rewriteValueRISCV64_OpRISCV64SLL(v)
529 return rewriteValueRISCV64_OpRISCV64SLLI(v)
531 return rewriteValueRISCV64_OpRISCV64SLT(v)
533 return rewriteValueRISCV64_OpRISCV64SLTI(v)
535 return rewriteValueRISCV64_OpRISCV64SLTIU(v)
537 return rewriteValueRISCV64_OpRISCV64SLTU(v)
539 return rewriteValueRISCV64_OpRISCV64SNEZ(v)
541 return rewriteValueRISCV64_OpRISCV64SRA(v)
543 return rewriteValueRISCV64_OpRISCV64SRAI(v)
545 return rewriteValueRISCV64_OpRISCV64SRL(v)
547 return rewriteValueRISCV64_OpRISCV64SRLI(v)
549 return rewriteValueRISCV64_OpRISCV64SRLW(v)
551 return rewriteValueRISCV64_OpRISCV64SUB(v)
553 return rewriteValueRISCV64_OpRISCV64SUBW(v)
555 return rewriteValueRISCV64_OpRISCV64XOR(v)
557 return rewriteValueRISCV64_OpRotateLeft16(v)
559 return rewriteValueRISCV64_OpRotateLeft32(v)
561 return rewriteValueRISCV64_OpRotateLeft64(v)
563 return rewriteValueRISCV64_OpRotateLeft8(v)
565 v.Op = OpRISCV64LoweredRound32F
568 v.Op = OpRISCV64LoweredRound64F
571 return rewriteValueRISCV64_OpRsh16Ux16(v)
573 return rewriteValueRISCV64_OpRsh16Ux32(v)
575 return rewriteValueRISCV64_OpRsh16Ux64(v)
577 return rewriteValueRISCV64_OpRsh16Ux8(v)
579 return rewriteValueRISCV64_OpRsh16x16(v)
581 return rewriteValueRISCV64_OpRsh16x32(v)
583 return rewriteValueRISCV64_OpRsh16x64(v)
585 return rewriteValueRISCV64_OpRsh16x8(v)
587 return rewriteValueRISCV64_OpRsh32Ux16(v)
589 return rewriteValueRISCV64_OpRsh32Ux32(v)
591 return rewriteValueRISCV64_OpRsh32Ux64(v)
593 return rewriteValueRISCV64_OpRsh32Ux8(v)
595 return rewriteValueRISCV64_OpRsh32x16(v)
597 return rewriteValueRISCV64_OpRsh32x32(v)
599 return rewriteValueRISCV64_OpRsh32x64(v)
601 return rewriteValueRISCV64_OpRsh32x8(v)
603 return rewriteValueRISCV64_OpRsh64Ux16(v)
605 return rewriteValueRISCV64_OpRsh64Ux32(v)
607 return rewriteValueRISCV64_OpRsh64Ux64(v)
609 return rewriteValueRISCV64_OpRsh64Ux8(v)
611 return rewriteValueRISCV64_OpRsh64x16(v)
613 return rewriteValueRISCV64_OpRsh64x32(v)
615 return rewriteValueRISCV64_OpRsh64x64(v)
617 return rewriteValueRISCV64_OpRsh64x8(v)
619 return rewriteValueRISCV64_OpRsh8Ux16(v)
621 return rewriteValueRISCV64_OpRsh8Ux32(v)
623 return rewriteValueRISCV64_OpRsh8Ux64(v)
625 return rewriteValueRISCV64_OpRsh8Ux8(v)
627 return rewriteValueRISCV64_OpRsh8x16(v)
629 return rewriteValueRISCV64_OpRsh8x32(v)
631 return rewriteValueRISCV64_OpRsh8x64(v)
633 return rewriteValueRISCV64_OpRsh8x8(v)
635 return rewriteValueRISCV64_OpSelect0(v)
637 return rewriteValueRISCV64_OpSelect1(v)
638 case OpSignExt16to32:
639 v.Op = OpRISCV64MOVHreg
641 case OpSignExt16to64:
642 v.Op = OpRISCV64MOVHreg
644 case OpSignExt32to64:
645 v.Op = OpRISCV64MOVWreg
648 v.Op = OpRISCV64MOVBreg
651 v.Op = OpRISCV64MOVBreg
654 v.Op = OpRISCV64MOVBreg
657 return rewriteValueRISCV64_OpSlicemask(v)
659 v.Op = OpRISCV64FSQRTD
662 v.Op = OpRISCV64FSQRTS
665 v.Op = OpRISCV64CALLstatic
668 return rewriteValueRISCV64_OpStore(v)
676 v.Op = OpRISCV64FSUBS
682 v.Op = OpRISCV64FSUBD
691 v.Op = OpRISCV64CALLtail
712 v.Op = OpRISCV64LoweredWB
727 return rewriteValueRISCV64_OpZero(v)
728 case OpZeroExt16to32:
729 v.Op = OpRISCV64MOVHUreg
731 case OpZeroExt16to64:
732 v.Op = OpRISCV64MOVHUreg
734 case OpZeroExt32to64:
735 v.Op = OpRISCV64MOVWUreg
738 v.Op = OpRISCV64MOVBUreg
741 v.Op = OpRISCV64MOVBUreg
744 v.Op = OpRISCV64MOVBUreg
749 func rewriteValueRISCV64_OpAddr(v *Value) bool {
751 // match: (Addr {sym} base)
752 // result: (MOVaddr {sym} [0] base)
754 sym := auxToSym(v.Aux)
756 v.reset(OpRISCV64MOVaddr)
757 v.AuxInt = int32ToAuxInt(0)
758 v.Aux = symToAux(sym)
763 func rewriteValueRISCV64_OpAtomicAnd8(v *Value) bool {
768 typ := &b.Func.Config.Types
769 // match: (AtomicAnd8 ptr val mem)
770 // result: (LoweredAtomicAnd32 (ANDI <typ.Uintptr> [^3] ptr) (NOT <typ.UInt32> (SLL <typ.UInt32> (XORI <typ.UInt32> [0xff] (ZeroExt8to32 val)) (SLLI <typ.UInt64> [3] (ANDI <typ.UInt64> [3] ptr)))) mem)
775 v.reset(OpRISCV64LoweredAtomicAnd32)
776 v0 := b.NewValue0(v.Pos, OpRISCV64ANDI, typ.Uintptr)
777 v0.AuxInt = int64ToAuxInt(^3)
779 v1 := b.NewValue0(v.Pos, OpRISCV64NOT, typ.UInt32)
780 v2 := b.NewValue0(v.Pos, OpRISCV64SLL, typ.UInt32)
781 v3 := b.NewValue0(v.Pos, OpRISCV64XORI, typ.UInt32)
782 v3.AuxInt = int64ToAuxInt(0xff)
783 v4 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
786 v5 := b.NewValue0(v.Pos, OpRISCV64SLLI, typ.UInt64)
787 v5.AuxInt = int64ToAuxInt(3)
788 v6 := b.NewValue0(v.Pos, OpRISCV64ANDI, typ.UInt64)
789 v6.AuxInt = int64ToAuxInt(3)
794 v.AddArg3(v0, v1, mem)
798 func rewriteValueRISCV64_OpAtomicCompareAndSwap32(v *Value) bool {
804 typ := &b.Func.Config.Types
805 // match: (AtomicCompareAndSwap32 ptr old new mem)
806 // result: (LoweredAtomicCas32 ptr (SignExt32to64 old) new mem)
812 v.reset(OpRISCV64LoweredAtomicCas32)
813 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
815 v.AddArg4(ptr, v0, new, mem)
819 func rewriteValueRISCV64_OpAtomicOr8(v *Value) bool {
824 typ := &b.Func.Config.Types
825 // match: (AtomicOr8 ptr val mem)
826 // result: (LoweredAtomicOr32 (ANDI <typ.Uintptr> [^3] ptr) (SLL <typ.UInt32> (ZeroExt8to32 val) (SLLI <typ.UInt64> [3] (ANDI <typ.UInt64> [3] ptr))) mem)
831 v.reset(OpRISCV64LoweredAtomicOr32)
832 v0 := b.NewValue0(v.Pos, OpRISCV64ANDI, typ.Uintptr)
833 v0.AuxInt = int64ToAuxInt(^3)
835 v1 := b.NewValue0(v.Pos, OpRISCV64SLL, typ.UInt32)
836 v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
838 v3 := b.NewValue0(v.Pos, OpRISCV64SLLI, typ.UInt64)
839 v3.AuxInt = int64ToAuxInt(3)
840 v4 := b.NewValue0(v.Pos, OpRISCV64ANDI, typ.UInt64)
841 v4.AuxInt = int64ToAuxInt(3)
845 v.AddArg3(v0, v1, mem)
849 func rewriteValueRISCV64_OpAvg64u(v *Value) bool {
853 // match: (Avg64u <t> x y)
854 // result: (ADD (ADD <t> (SRLI <t> [1] x) (SRLI <t> [1] y)) (ANDI <t> [1] (AND <t> x y)))
859 v.reset(OpRISCV64ADD)
860 v0 := b.NewValue0(v.Pos, OpRISCV64ADD, t)
861 v1 := b.NewValue0(v.Pos, OpRISCV64SRLI, t)
862 v1.AuxInt = int64ToAuxInt(1)
864 v2 := b.NewValue0(v.Pos, OpRISCV64SRLI, t)
865 v2.AuxInt = int64ToAuxInt(1)
868 v3 := b.NewValue0(v.Pos, OpRISCV64ANDI, t)
869 v3.AuxInt = int64ToAuxInt(1)
870 v4 := b.NewValue0(v.Pos, OpRISCV64AND, t)
877 func rewriteValueRISCV64_OpConst16(v *Value) bool {
878 // match: (Const16 [val])
879 // result: (MOVDconst [int64(val)])
881 val := auxIntToInt16(v.AuxInt)
882 v.reset(OpRISCV64MOVDconst)
883 v.AuxInt = int64ToAuxInt(int64(val))
887 func rewriteValueRISCV64_OpConst32(v *Value) bool {
888 // match: (Const32 [val])
889 // result: (MOVDconst [int64(val)])
891 val := auxIntToInt32(v.AuxInt)
892 v.reset(OpRISCV64MOVDconst)
893 v.AuxInt = int64ToAuxInt(int64(val))
897 func rewriteValueRISCV64_OpConst32F(v *Value) bool {
899 typ := &b.Func.Config.Types
900 // match: (Const32F [val])
901 // result: (FMVSX (MOVDconst [int64(math.Float32bits(val))]))
903 val := auxIntToFloat32(v.AuxInt)
904 v.reset(OpRISCV64FMVSX)
905 v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
906 v0.AuxInt = int64ToAuxInt(int64(math.Float32bits(val)))
911 func rewriteValueRISCV64_OpConst64(v *Value) bool {
912 // match: (Const64 [val])
913 // result: (MOVDconst [int64(val)])
915 val := auxIntToInt64(v.AuxInt)
916 v.reset(OpRISCV64MOVDconst)
917 v.AuxInt = int64ToAuxInt(int64(val))
921 func rewriteValueRISCV64_OpConst64F(v *Value) bool {
923 typ := &b.Func.Config.Types
924 // match: (Const64F [val])
925 // result: (FMVDX (MOVDconst [int64(math.Float64bits(val))]))
927 val := auxIntToFloat64(v.AuxInt)
928 v.reset(OpRISCV64FMVDX)
929 v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
930 v0.AuxInt = int64ToAuxInt(int64(math.Float64bits(val)))
935 func rewriteValueRISCV64_OpConst8(v *Value) bool {
936 // match: (Const8 [val])
937 // result: (MOVDconst [int64(val)])
939 val := auxIntToInt8(v.AuxInt)
940 v.reset(OpRISCV64MOVDconst)
941 v.AuxInt = int64ToAuxInt(int64(val))
945 func rewriteValueRISCV64_OpConstBool(v *Value) bool {
946 // match: (ConstBool [val])
947 // result: (MOVDconst [int64(b2i(val))])
949 val := auxIntToBool(v.AuxInt)
950 v.reset(OpRISCV64MOVDconst)
951 v.AuxInt = int64ToAuxInt(int64(b2i(val)))
955 func rewriteValueRISCV64_OpConstNil(v *Value) bool {
957 // result: (MOVDconst [0])
959 v.reset(OpRISCV64MOVDconst)
960 v.AuxInt = int64ToAuxInt(0)
964 func rewriteValueRISCV64_OpDiv16(v *Value) bool {
968 typ := &b.Func.Config.Types
969 // match: (Div16 x y [false])
970 // result: (DIVW (SignExt16to32 x) (SignExt16to32 y))
972 if auxIntToBool(v.AuxInt) != false {
977 v.reset(OpRISCV64DIVW)
978 v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
980 v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
987 func rewriteValueRISCV64_OpDiv16u(v *Value) bool {
991 typ := &b.Func.Config.Types
992 // match: (Div16u x y)
993 // result: (DIVUW (ZeroExt16to32 x) (ZeroExt16to32 y))
997 v.reset(OpRISCV64DIVUW)
998 v0 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
1000 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
1006 func rewriteValueRISCV64_OpDiv32(v *Value) bool {
1009 // match: (Div32 x y [false])
1010 // result: (DIVW x y)
1012 if auxIntToBool(v.AuxInt) != false {
1017 v.reset(OpRISCV64DIVW)
1023 func rewriteValueRISCV64_OpDiv64(v *Value) bool {
1026 // match: (Div64 x y [false])
1027 // result: (DIV x y)
1029 if auxIntToBool(v.AuxInt) != false {
1034 v.reset(OpRISCV64DIV)
1040 func rewriteValueRISCV64_OpDiv8(v *Value) bool {
1044 typ := &b.Func.Config.Types
1045 // match: (Div8 x y)
1046 // result: (DIVW (SignExt8to32 x) (SignExt8to32 y))
1050 v.reset(OpRISCV64DIVW)
1051 v0 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
1053 v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
1059 func rewriteValueRISCV64_OpDiv8u(v *Value) bool {
1063 typ := &b.Func.Config.Types
1064 // match: (Div8u x y)
1065 // result: (DIVUW (ZeroExt8to32 x) (ZeroExt8to32 y))
1069 v.reset(OpRISCV64DIVUW)
1070 v0 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
1072 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
1078 func rewriteValueRISCV64_OpEq16(v *Value) bool {
1082 typ := &b.Func.Config.Types
1083 // match: (Eq16 x y)
1084 // result: (SEQZ (SUB <x.Type> (ZeroExt16to64 x) (ZeroExt16to64 y)))
1088 v.reset(OpRISCV64SEQZ)
1089 v0 := b.NewValue0(v.Pos, OpRISCV64SUB, x.Type)
1090 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
1092 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
1099 func rewriteValueRISCV64_OpEq32(v *Value) bool {
1103 typ := &b.Func.Config.Types
1104 // match: (Eq32 x y)
1105 // cond: x.Type.IsSigned()
1106 // result: (SEQZ (SUB <x.Type> (SignExt32to64 x) (SignExt32to64 y)))
1108 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1111 if !(x.Type.IsSigned()) {
1114 v.reset(OpRISCV64SEQZ)
1115 v0 := b.NewValue0(v.Pos, OpRISCV64SUB, x.Type)
1116 v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
1118 v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
1126 // match: (Eq32 x y)
1127 // cond: !x.Type.IsSigned()
1128 // result: (SEQZ (SUB <x.Type> (ZeroExt32to64 x) (ZeroExt32to64 y)))
1130 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1133 if !(!x.Type.IsSigned()) {
1136 v.reset(OpRISCV64SEQZ)
1137 v0 := b.NewValue0(v.Pos, OpRISCV64SUB, x.Type)
1138 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
1140 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
1150 func rewriteValueRISCV64_OpEq64(v *Value) bool {
1154 // match: (Eq64 x y)
1155 // result: (SEQZ (SUB <x.Type> x y))
1159 v.reset(OpRISCV64SEQZ)
1160 v0 := b.NewValue0(v.Pos, OpRISCV64SUB, x.Type)
1166 func rewriteValueRISCV64_OpEq8(v *Value) bool {
1170 typ := &b.Func.Config.Types
1172 // result: (SEQZ (SUB <x.Type> (ZeroExt8to64 x) (ZeroExt8to64 y)))
1176 v.reset(OpRISCV64SEQZ)
1177 v0 := b.NewValue0(v.Pos, OpRISCV64SUB, x.Type)
1178 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
1180 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
1187 func rewriteValueRISCV64_OpEqB(v *Value) bool {
1191 typ := &b.Func.Config.Types
1193 // result: (SEQZ (SUB <typ.Bool> x y))
1197 v.reset(OpRISCV64SEQZ)
1198 v0 := b.NewValue0(v.Pos, OpRISCV64SUB, typ.Bool)
1204 func rewriteValueRISCV64_OpEqPtr(v *Value) bool {
1208 typ := &b.Func.Config.Types
1209 // match: (EqPtr x y)
1210 // result: (SEQZ (SUB <typ.Uintptr> x y))
1214 v.reset(OpRISCV64SEQZ)
1215 v0 := b.NewValue0(v.Pos, OpRISCV64SUB, typ.Uintptr)
1221 func rewriteValueRISCV64_OpHmul32(v *Value) bool {
1225 typ := &b.Func.Config.Types
1226 // match: (Hmul32 x y)
1227 // result: (SRAI [32] (MUL (SignExt32to64 x) (SignExt32to64 y)))
1231 v.reset(OpRISCV64SRAI)
1232 v.AuxInt = int64ToAuxInt(32)
1233 v0 := b.NewValue0(v.Pos, OpRISCV64MUL, typ.Int64)
1234 v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
1236 v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
1243 func rewriteValueRISCV64_OpHmul32u(v *Value) bool {
1247 typ := &b.Func.Config.Types
1248 // match: (Hmul32u x y)
1249 // result: (SRLI [32] (MUL (ZeroExt32to64 x) (ZeroExt32to64 y)))
1253 v.reset(OpRISCV64SRLI)
1254 v.AuxInt = int64ToAuxInt(32)
1255 v0 := b.NewValue0(v.Pos, OpRISCV64MUL, typ.Int64)
1256 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
1258 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
1265 func rewriteValueRISCV64_OpLeq16(v *Value) bool {
1269 typ := &b.Func.Config.Types
1270 // match: (Leq16 x y)
1271 // result: (Not (Less16 y x))
1276 v0 := b.NewValue0(v.Pos, OpLess16, typ.Bool)
1282 func rewriteValueRISCV64_OpLeq16U(v *Value) bool {
1286 typ := &b.Func.Config.Types
1287 // match: (Leq16U x y)
1288 // result: (Not (Less16U y x))
1293 v0 := b.NewValue0(v.Pos, OpLess16U, typ.Bool)
1299 func rewriteValueRISCV64_OpLeq32(v *Value) bool {
1303 typ := &b.Func.Config.Types
1304 // match: (Leq32 x y)
1305 // result: (Not (Less32 y x))
1310 v0 := b.NewValue0(v.Pos, OpLess32, typ.Bool)
1316 func rewriteValueRISCV64_OpLeq32U(v *Value) bool {
1320 typ := &b.Func.Config.Types
1321 // match: (Leq32U x y)
1322 // result: (Not (Less32U y x))
1327 v0 := b.NewValue0(v.Pos, OpLess32U, typ.Bool)
1333 func rewriteValueRISCV64_OpLeq64(v *Value) bool {
1337 typ := &b.Func.Config.Types
1338 // match: (Leq64 x y)
1339 // result: (Not (Less64 y x))
1344 v0 := b.NewValue0(v.Pos, OpLess64, typ.Bool)
1350 func rewriteValueRISCV64_OpLeq64U(v *Value) bool {
1354 typ := &b.Func.Config.Types
1355 // match: (Leq64U x y)
1356 // result: (Not (Less64U y x))
1361 v0 := b.NewValue0(v.Pos, OpLess64U, typ.Bool)
1367 func rewriteValueRISCV64_OpLeq8(v *Value) bool {
1371 typ := &b.Func.Config.Types
1372 // match: (Leq8 x y)
1373 // result: (Not (Less8 y x))
1378 v0 := b.NewValue0(v.Pos, OpLess8, typ.Bool)
1384 func rewriteValueRISCV64_OpLeq8U(v *Value) bool {
1388 typ := &b.Func.Config.Types
1389 // match: (Leq8U x y)
1390 // result: (Not (Less8U y x))
1395 v0 := b.NewValue0(v.Pos, OpLess8U, typ.Bool)
1401 func rewriteValueRISCV64_OpLess16(v *Value) bool {
1405 typ := &b.Func.Config.Types
1406 // match: (Less16 x y)
1407 // result: (SLT (SignExt16to64 x) (SignExt16to64 y))
1411 v.reset(OpRISCV64SLT)
1412 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
1414 v1 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
1420 func rewriteValueRISCV64_OpLess16U(v *Value) bool {
1424 typ := &b.Func.Config.Types
1425 // match: (Less16U x y)
1426 // result: (SLTU (ZeroExt16to64 x) (ZeroExt16to64 y))
1430 v.reset(OpRISCV64SLTU)
1431 v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
1433 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
1439 func rewriteValueRISCV64_OpLess32(v *Value) bool {
1443 typ := &b.Func.Config.Types
1444 // match: (Less32 x y)
1445 // result: (SLT (SignExt32to64 x) (SignExt32to64 y))
1449 v.reset(OpRISCV64SLT)
1450 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
1452 v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
1458 func rewriteValueRISCV64_OpLess32U(v *Value) bool {
1462 typ := &b.Func.Config.Types
1463 // match: (Less32U x y)
1464 // result: (SLTU (ZeroExt32to64 x) (ZeroExt32to64 y))
1468 v.reset(OpRISCV64SLTU)
1469 v0 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
1471 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
1477 func rewriteValueRISCV64_OpLess8(v *Value) bool {
1481 typ := &b.Func.Config.Types
1482 // match: (Less8 x y)
1483 // result: (SLT (SignExt8to64 x) (SignExt8to64 y))
1487 v.reset(OpRISCV64SLT)
1488 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
1490 v1 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
1496 func rewriteValueRISCV64_OpLess8U(v *Value) bool {
1500 typ := &b.Func.Config.Types
1501 // match: (Less8U x y)
1502 // result: (SLTU (ZeroExt8to64 x) (ZeroExt8to64 y))
1506 v.reset(OpRISCV64SLTU)
1507 v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
1509 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
1515 func rewriteValueRISCV64_OpLoad(v *Value) bool {
1518 // match: (Load <t> ptr mem)
1519 // cond: t.IsBoolean()
1520 // result: (MOVBUload ptr mem)
1525 if !(t.IsBoolean()) {
1528 v.reset(OpRISCV64MOVBUload)
1532 // match: (Load <t> ptr mem)
1533 // cond: ( is8BitInt(t) && t.IsSigned())
1534 // result: (MOVBload ptr mem)
1539 if !(is8BitInt(t) && t.IsSigned()) {
1542 v.reset(OpRISCV64MOVBload)
1546 // match: (Load <t> ptr mem)
1547 // cond: ( is8BitInt(t) && !t.IsSigned())
1548 // result: (MOVBUload ptr mem)
1553 if !(is8BitInt(t) && !t.IsSigned()) {
1556 v.reset(OpRISCV64MOVBUload)
1560 // match: (Load <t> ptr mem)
1561 // cond: (is16BitInt(t) && t.IsSigned())
1562 // result: (MOVHload ptr mem)
1567 if !(is16BitInt(t) && t.IsSigned()) {
1570 v.reset(OpRISCV64MOVHload)
1574 // match: (Load <t> ptr mem)
1575 // cond: (is16BitInt(t) && !t.IsSigned())
1576 // result: (MOVHUload ptr mem)
1581 if !(is16BitInt(t) && !t.IsSigned()) {
1584 v.reset(OpRISCV64MOVHUload)
1588 // match: (Load <t> ptr mem)
1589 // cond: (is32BitInt(t) && t.IsSigned())
1590 // result: (MOVWload ptr mem)
1595 if !(is32BitInt(t) && t.IsSigned()) {
1598 v.reset(OpRISCV64MOVWload)
1602 // match: (Load <t> ptr mem)
1603 // cond: (is32BitInt(t) && !t.IsSigned())
1604 // result: (MOVWUload ptr mem)
1609 if !(is32BitInt(t) && !t.IsSigned()) {
1612 v.reset(OpRISCV64MOVWUload)
1616 // match: (Load <t> ptr mem)
1617 // cond: (is64BitInt(t) || isPtr(t))
1618 // result: (MOVDload ptr mem)
1623 if !(is64BitInt(t) || isPtr(t)) {
1626 v.reset(OpRISCV64MOVDload)
1630 // match: (Load <t> ptr mem)
1631 // cond: is32BitFloat(t)
1632 // result: (FMOVWload ptr mem)
1637 if !(is32BitFloat(t)) {
1640 v.reset(OpRISCV64FMOVWload)
1644 // match: (Load <t> ptr mem)
1645 // cond: is64BitFloat(t)
1646 // result: (FMOVDload ptr mem)
1651 if !(is64BitFloat(t)) {
1654 v.reset(OpRISCV64FMOVDload)
1660 func rewriteValueRISCV64_OpLocalAddr(v *Value) bool {
1664 typ := &b.Func.Config.Types
1665 // match: (LocalAddr <t> {sym} base mem)
1666 // cond: t.Elem().HasPointers()
1667 // result: (MOVaddr {sym} (SPanchored base mem))
1670 sym := auxToSym(v.Aux)
1673 if !(t.Elem().HasPointers()) {
1676 v.reset(OpRISCV64MOVaddr)
1677 v.Aux = symToAux(sym)
1678 v0 := b.NewValue0(v.Pos, OpSPanchored, typ.Uintptr)
1679 v0.AddArg2(base, mem)
1683 // match: (LocalAddr <t> {sym} base _)
1684 // cond: !t.Elem().HasPointers()
1685 // result: (MOVaddr {sym} base)
1688 sym := auxToSym(v.Aux)
1690 if !(!t.Elem().HasPointers()) {
1693 v.reset(OpRISCV64MOVaddr)
1694 v.Aux = symToAux(sym)
1700 func rewriteValueRISCV64_OpLsh16x16(v *Value) bool {
1704 typ := &b.Func.Config.Types
1705 // match: (Lsh16x16 <t> x y)
1706 // cond: !shiftIsBounded(v)
1707 // result: (AND (SLL <t> x y) (Neg16 <t> (SLTIU <t> [64] (ZeroExt16to64 y))))
1712 if !(!shiftIsBounded(v)) {
1715 v.reset(OpRISCV64AND)
1716 v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
1718 v1 := b.NewValue0(v.Pos, OpNeg16, t)
1719 v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
1720 v2.AuxInt = int64ToAuxInt(64)
1721 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
1728 // match: (Lsh16x16 x y)
1729 // cond: shiftIsBounded(v)
1730 // result: (SLL x y)
1734 if !(shiftIsBounded(v)) {
1737 v.reset(OpRISCV64SLL)
1743 func rewriteValueRISCV64_OpLsh16x32(v *Value) bool {
1747 typ := &b.Func.Config.Types
1748 // match: (Lsh16x32 <t> x y)
1749 // cond: !shiftIsBounded(v)
1750 // result: (AND (SLL <t> x y) (Neg16 <t> (SLTIU <t> [64] (ZeroExt32to64 y))))
1755 if !(!shiftIsBounded(v)) {
1758 v.reset(OpRISCV64AND)
1759 v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
1761 v1 := b.NewValue0(v.Pos, OpNeg16, t)
1762 v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
1763 v2.AuxInt = int64ToAuxInt(64)
1764 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
1771 // match: (Lsh16x32 x y)
1772 // cond: shiftIsBounded(v)
1773 // result: (SLL x y)
1777 if !(shiftIsBounded(v)) {
1780 v.reset(OpRISCV64SLL)
1786 func rewriteValueRISCV64_OpLsh16x64(v *Value) bool {
1790 // match: (Lsh16x64 <t> x y)
1791 // cond: !shiftIsBounded(v)
1792 // result: (AND (SLL <t> x y) (Neg16 <t> (SLTIU <t> [64] y)))
1797 if !(!shiftIsBounded(v)) {
1800 v.reset(OpRISCV64AND)
1801 v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
1803 v1 := b.NewValue0(v.Pos, OpNeg16, t)
1804 v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
1805 v2.AuxInt = int64ToAuxInt(64)
1811 // match: (Lsh16x64 x y)
1812 // cond: shiftIsBounded(v)
1813 // result: (SLL x y)
1817 if !(shiftIsBounded(v)) {
1820 v.reset(OpRISCV64SLL)
1826 func rewriteValueRISCV64_OpLsh16x8(v *Value) bool {
1830 typ := &b.Func.Config.Types
1831 // match: (Lsh16x8 <t> x y)
1832 // cond: !shiftIsBounded(v)
1833 // result: (AND (SLL <t> x y) (Neg16 <t> (SLTIU <t> [64] (ZeroExt8to64 y))))
1838 if !(!shiftIsBounded(v)) {
1841 v.reset(OpRISCV64AND)
1842 v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
1844 v1 := b.NewValue0(v.Pos, OpNeg16, t)
1845 v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
1846 v2.AuxInt = int64ToAuxInt(64)
1847 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
1854 // match: (Lsh16x8 x y)
1855 // cond: shiftIsBounded(v)
1856 // result: (SLL x y)
1860 if !(shiftIsBounded(v)) {
1863 v.reset(OpRISCV64SLL)
1869 func rewriteValueRISCV64_OpLsh32x16(v *Value) bool {
1873 typ := &b.Func.Config.Types
1874 // match: (Lsh32x16 <t> x y)
1875 // cond: !shiftIsBounded(v)
1876 // result: (AND (SLL <t> x y) (Neg32 <t> (SLTIU <t> [64] (ZeroExt16to64 y))))
1881 if !(!shiftIsBounded(v)) {
1884 v.reset(OpRISCV64AND)
1885 v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
1887 v1 := b.NewValue0(v.Pos, OpNeg32, t)
1888 v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
1889 v2.AuxInt = int64ToAuxInt(64)
1890 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
1897 // match: (Lsh32x16 x y)
1898 // cond: shiftIsBounded(v)
1899 // result: (SLL x y)
1903 if !(shiftIsBounded(v)) {
1906 v.reset(OpRISCV64SLL)
1912 func rewriteValueRISCV64_OpLsh32x32(v *Value) bool {
1916 typ := &b.Func.Config.Types
1917 // match: (Lsh32x32 <t> x y)
1918 // cond: !shiftIsBounded(v)
1919 // result: (AND (SLL <t> x y) (Neg32 <t> (SLTIU <t> [64] (ZeroExt32to64 y))))
1924 if !(!shiftIsBounded(v)) {
1927 v.reset(OpRISCV64AND)
1928 v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
1930 v1 := b.NewValue0(v.Pos, OpNeg32, t)
1931 v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
1932 v2.AuxInt = int64ToAuxInt(64)
1933 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
1940 // match: (Lsh32x32 x y)
1941 // cond: shiftIsBounded(v)
1942 // result: (SLL x y)
1946 if !(shiftIsBounded(v)) {
1949 v.reset(OpRISCV64SLL)
1955 func rewriteValueRISCV64_OpLsh32x64(v *Value) bool {
1959 // match: (Lsh32x64 <t> x y)
1960 // cond: !shiftIsBounded(v)
1961 // result: (AND (SLL <t> x y) (Neg32 <t> (SLTIU <t> [64] y)))
1966 if !(!shiftIsBounded(v)) {
1969 v.reset(OpRISCV64AND)
1970 v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
1972 v1 := b.NewValue0(v.Pos, OpNeg32, t)
1973 v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
1974 v2.AuxInt = int64ToAuxInt(64)
1980 // match: (Lsh32x64 x y)
1981 // cond: shiftIsBounded(v)
1982 // result: (SLL x y)
1986 if !(shiftIsBounded(v)) {
1989 v.reset(OpRISCV64SLL)
1995 func rewriteValueRISCV64_OpLsh32x8(v *Value) bool {
1999 typ := &b.Func.Config.Types
2000 // match: (Lsh32x8 <t> x y)
2001 // cond: !shiftIsBounded(v)
2002 // result: (AND (SLL <t> x y) (Neg32 <t> (SLTIU <t> [64] (ZeroExt8to64 y))))
2007 if !(!shiftIsBounded(v)) {
2010 v.reset(OpRISCV64AND)
2011 v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
2013 v1 := b.NewValue0(v.Pos, OpNeg32, t)
2014 v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
2015 v2.AuxInt = int64ToAuxInt(64)
2016 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
2023 // match: (Lsh32x8 x y)
2024 // cond: shiftIsBounded(v)
2025 // result: (SLL x y)
2029 if !(shiftIsBounded(v)) {
2032 v.reset(OpRISCV64SLL)
2038 func rewriteValueRISCV64_OpLsh64x16(v *Value) bool {
2042 typ := &b.Func.Config.Types
2043 // match: (Lsh64x16 <t> x y)
2044 // cond: !shiftIsBounded(v)
2045 // result: (AND (SLL <t> x y) (Neg64 <t> (SLTIU <t> [64] (ZeroExt16to64 y))))
2050 if !(!shiftIsBounded(v)) {
2053 v.reset(OpRISCV64AND)
2054 v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
2056 v1 := b.NewValue0(v.Pos, OpNeg64, t)
2057 v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
2058 v2.AuxInt = int64ToAuxInt(64)
2059 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
2066 // match: (Lsh64x16 x y)
2067 // cond: shiftIsBounded(v)
2068 // result: (SLL x y)
2072 if !(shiftIsBounded(v)) {
2075 v.reset(OpRISCV64SLL)
2081 func rewriteValueRISCV64_OpLsh64x32(v *Value) bool {
2085 typ := &b.Func.Config.Types
2086 // match: (Lsh64x32 <t> x y)
2087 // cond: !shiftIsBounded(v)
2088 // result: (AND (SLL <t> x y) (Neg64 <t> (SLTIU <t> [64] (ZeroExt32to64 y))))
2093 if !(!shiftIsBounded(v)) {
2096 v.reset(OpRISCV64AND)
2097 v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
2099 v1 := b.NewValue0(v.Pos, OpNeg64, t)
2100 v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
2101 v2.AuxInt = int64ToAuxInt(64)
2102 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
2109 // match: (Lsh64x32 x y)
2110 // cond: shiftIsBounded(v)
2111 // result: (SLL x y)
2115 if !(shiftIsBounded(v)) {
2118 v.reset(OpRISCV64SLL)
2124 func rewriteValueRISCV64_OpLsh64x64(v *Value) bool {
2128 // match: (Lsh64x64 <t> x y)
2129 // cond: !shiftIsBounded(v)
2130 // result: (AND (SLL <t> x y) (Neg64 <t> (SLTIU <t> [64] y)))
2135 if !(!shiftIsBounded(v)) {
2138 v.reset(OpRISCV64AND)
2139 v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
2141 v1 := b.NewValue0(v.Pos, OpNeg64, t)
2142 v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
2143 v2.AuxInt = int64ToAuxInt(64)
2149 // match: (Lsh64x64 x y)
2150 // cond: shiftIsBounded(v)
2151 // result: (SLL x y)
2155 if !(shiftIsBounded(v)) {
2158 v.reset(OpRISCV64SLL)
2164 func rewriteValueRISCV64_OpLsh64x8(v *Value) bool {
2168 typ := &b.Func.Config.Types
2169 // match: (Lsh64x8 <t> x y)
2170 // cond: !shiftIsBounded(v)
2171 // result: (AND (SLL <t> x y) (Neg64 <t> (SLTIU <t> [64] (ZeroExt8to64 y))))
2176 if !(!shiftIsBounded(v)) {
2179 v.reset(OpRISCV64AND)
2180 v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
2182 v1 := b.NewValue0(v.Pos, OpNeg64, t)
2183 v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
2184 v2.AuxInt = int64ToAuxInt(64)
2185 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
2192 // match: (Lsh64x8 x y)
2193 // cond: shiftIsBounded(v)
2194 // result: (SLL x y)
2198 if !(shiftIsBounded(v)) {
2201 v.reset(OpRISCV64SLL)
2207 func rewriteValueRISCV64_OpLsh8x16(v *Value) bool {
2211 typ := &b.Func.Config.Types
2212 // match: (Lsh8x16 <t> x y)
2213 // cond: !shiftIsBounded(v)
2214 // result: (AND (SLL <t> x y) (Neg8 <t> (SLTIU <t> [64] (ZeroExt16to64 y))))
2219 if !(!shiftIsBounded(v)) {
2222 v.reset(OpRISCV64AND)
2223 v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
2225 v1 := b.NewValue0(v.Pos, OpNeg8, t)
2226 v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
2227 v2.AuxInt = int64ToAuxInt(64)
2228 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
2235 // match: (Lsh8x16 x y)
2236 // cond: shiftIsBounded(v)
2237 // result: (SLL x y)
2241 if !(shiftIsBounded(v)) {
2244 v.reset(OpRISCV64SLL)
2250 func rewriteValueRISCV64_OpLsh8x32(v *Value) bool {
2254 typ := &b.Func.Config.Types
2255 // match: (Lsh8x32 <t> x y)
2256 // cond: !shiftIsBounded(v)
2257 // result: (AND (SLL <t> x y) (Neg8 <t> (SLTIU <t> [64] (ZeroExt32to64 y))))
2262 if !(!shiftIsBounded(v)) {
2265 v.reset(OpRISCV64AND)
2266 v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
2268 v1 := b.NewValue0(v.Pos, OpNeg8, t)
2269 v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
2270 v2.AuxInt = int64ToAuxInt(64)
2271 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
2278 // match: (Lsh8x32 x y)
2279 // cond: shiftIsBounded(v)
2280 // result: (SLL x y)
2284 if !(shiftIsBounded(v)) {
2287 v.reset(OpRISCV64SLL)
2293 func rewriteValueRISCV64_OpLsh8x64(v *Value) bool {
2297 // match: (Lsh8x64 <t> x y)
2298 // cond: !shiftIsBounded(v)
2299 // result: (AND (SLL <t> x y) (Neg8 <t> (SLTIU <t> [64] y)))
2304 if !(!shiftIsBounded(v)) {
2307 v.reset(OpRISCV64AND)
2308 v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
2310 v1 := b.NewValue0(v.Pos, OpNeg8, t)
2311 v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
2312 v2.AuxInt = int64ToAuxInt(64)
2318 // match: (Lsh8x64 x y)
2319 // cond: shiftIsBounded(v)
2320 // result: (SLL x y)
2324 if !(shiftIsBounded(v)) {
2327 v.reset(OpRISCV64SLL)
2333 func rewriteValueRISCV64_OpLsh8x8(v *Value) bool {
2337 typ := &b.Func.Config.Types
2338 // match: (Lsh8x8 <t> x y)
2339 // cond: !shiftIsBounded(v)
2340 // result: (AND (SLL <t> x y) (Neg8 <t> (SLTIU <t> [64] (ZeroExt8to64 y))))
2345 if !(!shiftIsBounded(v)) {
2348 v.reset(OpRISCV64AND)
2349 v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
2351 v1 := b.NewValue0(v.Pos, OpNeg8, t)
2352 v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
2353 v2.AuxInt = int64ToAuxInt(64)
2354 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
2361 // match: (Lsh8x8 x y)
2362 // cond: shiftIsBounded(v)
2363 // result: (SLL x y)
2367 if !(shiftIsBounded(v)) {
2370 v.reset(OpRISCV64SLL)
2376 func rewriteValueRISCV64_OpMod16(v *Value) bool {
2380 typ := &b.Func.Config.Types
2381 // match: (Mod16 x y [false])
2382 // result: (REMW (SignExt16to32 x) (SignExt16to32 y))
2384 if auxIntToBool(v.AuxInt) != false {
2389 v.reset(OpRISCV64REMW)
2390 v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
2392 v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
2399 func rewriteValueRISCV64_OpMod16u(v *Value) bool {
2403 typ := &b.Func.Config.Types
2404 // match: (Mod16u x y)
2405 // result: (REMUW (ZeroExt16to32 x) (ZeroExt16to32 y))
2409 v.reset(OpRISCV64REMUW)
2410 v0 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
2412 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
2418 func rewriteValueRISCV64_OpMod32(v *Value) bool {
2421 // match: (Mod32 x y [false])
2422 // result: (REMW x y)
2424 if auxIntToBool(v.AuxInt) != false {
2429 v.reset(OpRISCV64REMW)
2435 func rewriteValueRISCV64_OpMod64(v *Value) bool {
2438 // match: (Mod64 x y [false])
2439 // result: (REM x y)
2441 if auxIntToBool(v.AuxInt) != false {
2446 v.reset(OpRISCV64REM)
2452 func rewriteValueRISCV64_OpMod8(v *Value) bool {
2456 typ := &b.Func.Config.Types
2457 // match: (Mod8 x y)
2458 // result: (REMW (SignExt8to32 x) (SignExt8to32 y))
2462 v.reset(OpRISCV64REMW)
2463 v0 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
2465 v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
2471 func rewriteValueRISCV64_OpMod8u(v *Value) bool {
2475 typ := &b.Func.Config.Types
2476 // match: (Mod8u x y)
2477 // result: (REMUW (ZeroExt8to32 x) (ZeroExt8to32 y))
2481 v.reset(OpRISCV64REMUW)
2482 v0 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
2484 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
2490 func rewriteValueRISCV64_OpMove(v *Value) bool {
2495 config := b.Func.Config
2496 typ := &b.Func.Config.Types
2497 // match: (Move [0] _ _ mem)
2500 if auxIntToInt64(v.AuxInt) != 0 {
2507 // match: (Move [1] dst src mem)
2508 // result: (MOVBstore dst (MOVBload src mem) mem)
2510 if auxIntToInt64(v.AuxInt) != 1 {
2516 v.reset(OpRISCV64MOVBstore)
2517 v0 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
2518 v0.AddArg2(src, mem)
2519 v.AddArg3(dst, v0, mem)
2522 // match: (Move [2] {t} dst src mem)
2523 // cond: t.Alignment()%2 == 0
2524 // result: (MOVHstore dst (MOVHload src mem) mem)
2526 if auxIntToInt64(v.AuxInt) != 2 {
2529 t := auxToType(v.Aux)
2533 if !(t.Alignment()%2 == 0) {
2536 v.reset(OpRISCV64MOVHstore)
2537 v0 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
2538 v0.AddArg2(src, mem)
2539 v.AddArg3(dst, v0, mem)
2542 // match: (Move [2] dst src mem)
2543 // result: (MOVBstore [1] dst (MOVBload [1] src mem) (MOVBstore dst (MOVBload src mem) mem))
2545 if auxIntToInt64(v.AuxInt) != 2 {
2551 v.reset(OpRISCV64MOVBstore)
2552 v.AuxInt = int32ToAuxInt(1)
2553 v0 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
2554 v0.AuxInt = int32ToAuxInt(1)
2555 v0.AddArg2(src, mem)
2556 v1 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
2557 v2 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
2558 v2.AddArg2(src, mem)
2559 v1.AddArg3(dst, v2, mem)
2560 v.AddArg3(dst, v0, v1)
2563 // match: (Move [4] {t} dst src mem)
2564 // cond: t.Alignment()%4 == 0
2565 // result: (MOVWstore dst (MOVWload src mem) mem)
2567 if auxIntToInt64(v.AuxInt) != 4 {
2570 t := auxToType(v.Aux)
2574 if !(t.Alignment()%4 == 0) {
2577 v.reset(OpRISCV64MOVWstore)
2578 v0 := b.NewValue0(v.Pos, OpRISCV64MOVWload, typ.Int32)
2579 v0.AddArg2(src, mem)
2580 v.AddArg3(dst, v0, mem)
2583 // match: (Move [4] {t} dst src mem)
2584 // cond: t.Alignment()%2 == 0
2585 // result: (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem))
2587 if auxIntToInt64(v.AuxInt) != 4 {
2590 t := auxToType(v.Aux)
2594 if !(t.Alignment()%2 == 0) {
2597 v.reset(OpRISCV64MOVHstore)
2598 v.AuxInt = int32ToAuxInt(2)
2599 v0 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
2600 v0.AuxInt = int32ToAuxInt(2)
2601 v0.AddArg2(src, mem)
2602 v1 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
2603 v2 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
2604 v2.AddArg2(src, mem)
2605 v1.AddArg3(dst, v2, mem)
2606 v.AddArg3(dst, v0, v1)
2609 // match: (Move [4] dst src mem)
2610 // result: (MOVBstore [3] dst (MOVBload [3] src mem) (MOVBstore [2] dst (MOVBload [2] src mem) (MOVBstore [1] dst (MOVBload [1] src mem) (MOVBstore dst (MOVBload src mem) mem))))
2612 if auxIntToInt64(v.AuxInt) != 4 {
2618 v.reset(OpRISCV64MOVBstore)
2619 v.AuxInt = int32ToAuxInt(3)
2620 v0 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
2621 v0.AuxInt = int32ToAuxInt(3)
2622 v0.AddArg2(src, mem)
2623 v1 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
2624 v1.AuxInt = int32ToAuxInt(2)
2625 v2 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
2626 v2.AuxInt = int32ToAuxInt(2)
2627 v2.AddArg2(src, mem)
2628 v3 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
2629 v3.AuxInt = int32ToAuxInt(1)
2630 v4 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
2631 v4.AuxInt = int32ToAuxInt(1)
2632 v4.AddArg2(src, mem)
2633 v5 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
2634 v6 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
2635 v6.AddArg2(src, mem)
2636 v5.AddArg3(dst, v6, mem)
2637 v3.AddArg3(dst, v4, v5)
2638 v1.AddArg3(dst, v2, v3)
2639 v.AddArg3(dst, v0, v1)
2642 // match: (Move [8] {t} dst src mem)
2643 // cond: t.Alignment()%8 == 0
2644 // result: (MOVDstore dst (MOVDload src mem) mem)
2646 if auxIntToInt64(v.AuxInt) != 8 {
2649 t := auxToType(v.Aux)
2653 if !(t.Alignment()%8 == 0) {
2656 v.reset(OpRISCV64MOVDstore)
2657 v0 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
2658 v0.AddArg2(src, mem)
2659 v.AddArg3(dst, v0, mem)
2662 // match: (Move [8] {t} dst src mem)
2663 // cond: t.Alignment()%4 == 0
2664 // result: (MOVWstore [4] dst (MOVWload [4] src mem) (MOVWstore dst (MOVWload src mem) mem))
2666 if auxIntToInt64(v.AuxInt) != 8 {
2669 t := auxToType(v.Aux)
2673 if !(t.Alignment()%4 == 0) {
2676 v.reset(OpRISCV64MOVWstore)
2677 v.AuxInt = int32ToAuxInt(4)
2678 v0 := b.NewValue0(v.Pos, OpRISCV64MOVWload, typ.Int32)
2679 v0.AuxInt = int32ToAuxInt(4)
2680 v0.AddArg2(src, mem)
2681 v1 := b.NewValue0(v.Pos, OpRISCV64MOVWstore, types.TypeMem)
2682 v2 := b.NewValue0(v.Pos, OpRISCV64MOVWload, typ.Int32)
2683 v2.AddArg2(src, mem)
2684 v1.AddArg3(dst, v2, mem)
2685 v.AddArg3(dst, v0, v1)
2688 // match: (Move [8] {t} dst src mem)
2689 // cond: t.Alignment()%2 == 0
2690 // result: (MOVHstore [6] dst (MOVHload [6] src mem) (MOVHstore [4] dst (MOVHload [4] src mem) (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem))))
2692 if auxIntToInt64(v.AuxInt) != 8 {
2695 t := auxToType(v.Aux)
2699 if !(t.Alignment()%2 == 0) {
2702 v.reset(OpRISCV64MOVHstore)
2703 v.AuxInt = int32ToAuxInt(6)
2704 v0 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
2705 v0.AuxInt = int32ToAuxInt(6)
2706 v0.AddArg2(src, mem)
2707 v1 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
2708 v1.AuxInt = int32ToAuxInt(4)
2709 v2 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
2710 v2.AuxInt = int32ToAuxInt(4)
2711 v2.AddArg2(src, mem)
2712 v3 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
2713 v3.AuxInt = int32ToAuxInt(2)
2714 v4 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
2715 v4.AuxInt = int32ToAuxInt(2)
2716 v4.AddArg2(src, mem)
2717 v5 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
2718 v6 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
2719 v6.AddArg2(src, mem)
2720 v5.AddArg3(dst, v6, mem)
2721 v3.AddArg3(dst, v4, v5)
2722 v1.AddArg3(dst, v2, v3)
2723 v.AddArg3(dst, v0, v1)
2726 // match: (Move [3] dst src mem)
2727 // result: (MOVBstore [2] dst (MOVBload [2] src mem) (MOVBstore [1] dst (MOVBload [1] src mem) (MOVBstore dst (MOVBload src mem) mem)))
2729 if auxIntToInt64(v.AuxInt) != 3 {
2735 v.reset(OpRISCV64MOVBstore)
2736 v.AuxInt = int32ToAuxInt(2)
2737 v0 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
2738 v0.AuxInt = int32ToAuxInt(2)
2739 v0.AddArg2(src, mem)
2740 v1 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
2741 v1.AuxInt = int32ToAuxInt(1)
2742 v2 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
2743 v2.AuxInt = int32ToAuxInt(1)
2744 v2.AddArg2(src, mem)
2745 v3 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
2746 v4 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
2747 v4.AddArg2(src, mem)
2748 v3.AddArg3(dst, v4, mem)
2749 v1.AddArg3(dst, v2, v3)
2750 v.AddArg3(dst, v0, v1)
2753 // match: (Move [6] {t} dst src mem)
2754 // cond: t.Alignment()%2 == 0
2755 // result: (MOVHstore [4] dst (MOVHload [4] src mem) (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem)))
2757 if auxIntToInt64(v.AuxInt) != 6 {
2760 t := auxToType(v.Aux)
2764 if !(t.Alignment()%2 == 0) {
2767 v.reset(OpRISCV64MOVHstore)
2768 v.AuxInt = int32ToAuxInt(4)
2769 v0 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
2770 v0.AuxInt = int32ToAuxInt(4)
2771 v0.AddArg2(src, mem)
2772 v1 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
2773 v1.AuxInt = int32ToAuxInt(2)
2774 v2 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
2775 v2.AuxInt = int32ToAuxInt(2)
2776 v2.AddArg2(src, mem)
2777 v3 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
2778 v4 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
2779 v4.AddArg2(src, mem)
2780 v3.AddArg3(dst, v4, mem)
2781 v1.AddArg3(dst, v2, v3)
2782 v.AddArg3(dst, v0, v1)
2785 // match: (Move [12] {t} dst src mem)
2786 // cond: t.Alignment()%4 == 0
2787 // result: (MOVWstore [8] dst (MOVWload [8] src mem) (MOVWstore [4] dst (MOVWload [4] src mem) (MOVWstore dst (MOVWload src mem) mem)))
2789 if auxIntToInt64(v.AuxInt) != 12 {
2792 t := auxToType(v.Aux)
2796 if !(t.Alignment()%4 == 0) {
2799 v.reset(OpRISCV64MOVWstore)
2800 v.AuxInt = int32ToAuxInt(8)
2801 v0 := b.NewValue0(v.Pos, OpRISCV64MOVWload, typ.Int32)
2802 v0.AuxInt = int32ToAuxInt(8)
2803 v0.AddArg2(src, mem)
2804 v1 := b.NewValue0(v.Pos, OpRISCV64MOVWstore, types.TypeMem)
2805 v1.AuxInt = int32ToAuxInt(4)
2806 v2 := b.NewValue0(v.Pos, OpRISCV64MOVWload, typ.Int32)
2807 v2.AuxInt = int32ToAuxInt(4)
2808 v2.AddArg2(src, mem)
2809 v3 := b.NewValue0(v.Pos, OpRISCV64MOVWstore, types.TypeMem)
2810 v4 := b.NewValue0(v.Pos, OpRISCV64MOVWload, typ.Int32)
2811 v4.AddArg2(src, mem)
2812 v3.AddArg3(dst, v4, mem)
2813 v1.AddArg3(dst, v2, v3)
2814 v.AddArg3(dst, v0, v1)
2817 // match: (Move [16] {t} dst src mem)
2818 // cond: t.Alignment()%8 == 0
2819 // result: (MOVDstore [8] dst (MOVDload [8] src mem) (MOVDstore dst (MOVDload src mem) mem))
2821 if auxIntToInt64(v.AuxInt) != 16 {
2824 t := auxToType(v.Aux)
2828 if !(t.Alignment()%8 == 0) {
2831 v.reset(OpRISCV64MOVDstore)
2832 v.AuxInt = int32ToAuxInt(8)
2833 v0 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
2834 v0.AuxInt = int32ToAuxInt(8)
2835 v0.AddArg2(src, mem)
2836 v1 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
2837 v2 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
2838 v2.AddArg2(src, mem)
2839 v1.AddArg3(dst, v2, mem)
2840 v.AddArg3(dst, v0, v1)
2843 // match: (Move [24] {t} dst src mem)
2844 // cond: t.Alignment()%8 == 0
2845 // result: (MOVDstore [16] dst (MOVDload [16] src mem) (MOVDstore [8] dst (MOVDload [8] src mem) (MOVDstore dst (MOVDload src mem) mem)))
2847 if auxIntToInt64(v.AuxInt) != 24 {
2850 t := auxToType(v.Aux)
2854 if !(t.Alignment()%8 == 0) {
2857 v.reset(OpRISCV64MOVDstore)
2858 v.AuxInt = int32ToAuxInt(16)
2859 v0 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
2860 v0.AuxInt = int32ToAuxInt(16)
2861 v0.AddArg2(src, mem)
2862 v1 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
2863 v1.AuxInt = int32ToAuxInt(8)
2864 v2 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
2865 v2.AuxInt = int32ToAuxInt(8)
2866 v2.AddArg2(src, mem)
2867 v3 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
2868 v4 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
2869 v4.AddArg2(src, mem)
2870 v3.AddArg3(dst, v4, mem)
2871 v1.AddArg3(dst, v2, v3)
2872 v.AddArg3(dst, v0, v1)
2875 // match: (Move [32] {t} dst src mem)
2876 // cond: t.Alignment()%8 == 0
2877 // result: (MOVDstore [24] dst (MOVDload [24] src mem) (MOVDstore [16] dst (MOVDload [16] src mem) (MOVDstore [8] dst (MOVDload [8] src mem) (MOVDstore dst (MOVDload src mem) mem))))
2879 if auxIntToInt64(v.AuxInt) != 32 {
2882 t := auxToType(v.Aux)
2886 if !(t.Alignment()%8 == 0) {
2889 v.reset(OpRISCV64MOVDstore)
2890 v.AuxInt = int32ToAuxInt(24)
2891 v0 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
2892 v0.AuxInt = int32ToAuxInt(24)
2893 v0.AddArg2(src, mem)
2894 v1 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
2895 v1.AuxInt = int32ToAuxInt(16)
2896 v2 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
2897 v2.AuxInt = int32ToAuxInt(16)
2898 v2.AddArg2(src, mem)
2899 v3 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
2900 v3.AuxInt = int32ToAuxInt(8)
2901 v4 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
2902 v4.AuxInt = int32ToAuxInt(8)
2903 v4.AddArg2(src, mem)
2904 v5 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
2905 v6 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
2906 v6.AddArg2(src, mem)
2907 v5.AddArg3(dst, v6, mem)
2908 v3.AddArg3(dst, v4, v5)
2909 v1.AddArg3(dst, v2, v3)
2910 v.AddArg3(dst, v0, v1)
2913 // match: (Move [s] {t} dst src mem)
2914 // cond: s%8 == 0 && s <= 8*128 && t.Alignment()%8 == 0 && !config.noDuffDevice && logLargeCopy(v, s)
2915 // result: (DUFFCOPY [16 * (128 - s/8)] dst src mem)
2917 s := auxIntToInt64(v.AuxInt)
2918 t := auxToType(v.Aux)
2922 if !(s%8 == 0 && s <= 8*128 && t.Alignment()%8 == 0 && !config.noDuffDevice && logLargeCopy(v, s)) {
2925 v.reset(OpRISCV64DUFFCOPY)
2926 v.AuxInt = int64ToAuxInt(16 * (128 - s/8))
2927 v.AddArg3(dst, src, mem)
2930 // match: (Move [s] {t} dst src mem)
2931 // cond: (s <= 16 || logLargeCopy(v, s))
2932 // result: (LoweredMove [t.Alignment()] dst src (ADDI <src.Type> [s-moveSize(t.Alignment(), config)] src) mem)
2934 s := auxIntToInt64(v.AuxInt)
2935 t := auxToType(v.Aux)
2939 if !(s <= 16 || logLargeCopy(v, s)) {
2942 v.reset(OpRISCV64LoweredMove)
2943 v.AuxInt = int64ToAuxInt(t.Alignment())
2944 v0 := b.NewValue0(v.Pos, OpRISCV64ADDI, src.Type)
2945 v0.AuxInt = int64ToAuxInt(s - moveSize(t.Alignment(), config))
2947 v.AddArg4(dst, src, v0, mem)
2952 func rewriteValueRISCV64_OpMul16(v *Value) bool {
2956 typ := &b.Func.Config.Types
2957 // match: (Mul16 x y)
2958 // result: (MULW (SignExt16to32 x) (SignExt16to32 y))
2962 v.reset(OpRISCV64MULW)
2963 v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
2965 v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
2971 func rewriteValueRISCV64_OpMul8(v *Value) bool {
2975 typ := &b.Func.Config.Types
2976 // match: (Mul8 x y)
2977 // result: (MULW (SignExt8to32 x) (SignExt8to32 y))
2981 v.reset(OpRISCV64MULW)
2982 v0 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
2984 v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
2990 func rewriteValueRISCV64_OpNeq16(v *Value) bool {
2994 typ := &b.Func.Config.Types
2995 // match: (Neq16 x y)
2996 // result: (Not (Eq16 x y))
3001 v0 := b.NewValue0(v.Pos, OpEq16, typ.Bool)
3007 func rewriteValueRISCV64_OpNeq32(v *Value) bool {
3011 typ := &b.Func.Config.Types
3012 // match: (Neq32 x y)
3013 // result: (Not (Eq32 x y))
3018 v0 := b.NewValue0(v.Pos, OpEq32, typ.Bool)
3024 func rewriteValueRISCV64_OpNeq64(v *Value) bool {
3028 typ := &b.Func.Config.Types
3029 // match: (Neq64 x y)
3030 // result: (Not (Eq64 x y))
3035 v0 := b.NewValue0(v.Pos, OpEq64, typ.Bool)
3041 func rewriteValueRISCV64_OpNeq8(v *Value) bool {
3045 typ := &b.Func.Config.Types
3046 // match: (Neq8 x y)
3047 // result: (Not (Eq8 x y))
3052 v0 := b.NewValue0(v.Pos, OpEq8, typ.Bool)
3058 func rewriteValueRISCV64_OpNeqB(v *Value) bool {
3062 typ := &b.Func.Config.Types
3063 // match: (NeqB x y)
3064 // result: (SNEZ (SUB <typ.Bool> x y))
3068 v.reset(OpRISCV64SNEZ)
3069 v0 := b.NewValue0(v.Pos, OpRISCV64SUB, typ.Bool)
3075 func rewriteValueRISCV64_OpNeqPtr(v *Value) bool {
3079 typ := &b.Func.Config.Types
3080 // match: (NeqPtr x y)
3081 // result: (Not (EqPtr x y))
3086 v0 := b.NewValue0(v.Pos, OpEqPtr, typ.Bool)
3092 func rewriteValueRISCV64_OpOffPtr(v *Value) bool {
3095 typ := &b.Func.Config.Types
3096 // match: (OffPtr [off] ptr:(SP))
3097 // cond: is32Bit(off)
3098 // result: (MOVaddr [int32(off)] ptr)
3100 off := auxIntToInt64(v.AuxInt)
3102 if ptr.Op != OpSP || !(is32Bit(off)) {
3105 v.reset(OpRISCV64MOVaddr)
3106 v.AuxInt = int32ToAuxInt(int32(off))
3110 // match: (OffPtr [off] ptr)
3111 // cond: is32Bit(off)
3112 // result: (ADDI [off] ptr)
3114 off := auxIntToInt64(v.AuxInt)
3116 if !(is32Bit(off)) {
3119 v.reset(OpRISCV64ADDI)
3120 v.AuxInt = int64ToAuxInt(off)
3124 // match: (OffPtr [off] ptr)
3125 // result: (ADD (MOVDconst [off]) ptr)
3127 off := auxIntToInt64(v.AuxInt)
3129 v.reset(OpRISCV64ADD)
3130 v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
3131 v0.AuxInt = int64ToAuxInt(off)
3136 func rewriteValueRISCV64_OpPanicBounds(v *Value) bool {
3140 // match: (PanicBounds [kind] x y mem)
3141 // cond: boundsABI(kind) == 0
3142 // result: (LoweredPanicBoundsA [kind] x y mem)
3144 kind := auxIntToInt64(v.AuxInt)
3148 if !(boundsABI(kind) == 0) {
3151 v.reset(OpRISCV64LoweredPanicBoundsA)
3152 v.AuxInt = int64ToAuxInt(kind)
3153 v.AddArg3(x, y, mem)
3156 // match: (PanicBounds [kind] x y mem)
3157 // cond: boundsABI(kind) == 1
3158 // result: (LoweredPanicBoundsB [kind] x y mem)
3160 kind := auxIntToInt64(v.AuxInt)
3164 if !(boundsABI(kind) == 1) {
3167 v.reset(OpRISCV64LoweredPanicBoundsB)
3168 v.AuxInt = int64ToAuxInt(kind)
3169 v.AddArg3(x, y, mem)
3172 // match: (PanicBounds [kind] x y mem)
3173 // cond: boundsABI(kind) == 2
3174 // result: (LoweredPanicBoundsC [kind] x y mem)
3176 kind := auxIntToInt64(v.AuxInt)
3180 if !(boundsABI(kind) == 2) {
3183 v.reset(OpRISCV64LoweredPanicBoundsC)
3184 v.AuxInt = int64ToAuxInt(kind)
3185 v.AddArg3(x, y, mem)
3190 func rewriteValueRISCV64_OpRISCV64ADD(v *Value) bool {
3193 // match: (ADD (MOVDconst <t> [val]) x)
3194 // cond: is32Bit(val) && !t.IsPtr()
3195 // result: (ADDI [val] x)
3197 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3198 if v_0.Op != OpRISCV64MOVDconst {
3202 val := auxIntToInt64(v_0.AuxInt)
3204 if !(is32Bit(val) && !t.IsPtr()) {
3207 v.reset(OpRISCV64ADDI)
3208 v.AuxInt = int64ToAuxInt(val)
3216 func rewriteValueRISCV64_OpRISCV64ADDI(v *Value) bool {
3218 // match: (ADDI [c] (MOVaddr [d] {s} x))
3219 // cond: is32Bit(c+int64(d))
3220 // result: (MOVaddr [int32(c)+d] {s} x)
3222 c := auxIntToInt64(v.AuxInt)
3223 if v_0.Op != OpRISCV64MOVaddr {
3226 d := auxIntToInt32(v_0.AuxInt)
3227 s := auxToSym(v_0.Aux)
3229 if !(is32Bit(c + int64(d))) {
3232 v.reset(OpRISCV64MOVaddr)
3233 v.AuxInt = int32ToAuxInt(int32(c) + d)
3238 // match: (ADDI [0] x)
3241 if auxIntToInt64(v.AuxInt) != 0 {
3248 // match: (ADDI [x] (MOVDconst [y]))
3249 // cond: is32Bit(x + y)
3250 // result: (MOVDconst [x + y])
3252 x := auxIntToInt64(v.AuxInt)
3253 if v_0.Op != OpRISCV64MOVDconst {
3256 y := auxIntToInt64(v_0.AuxInt)
3257 if !(is32Bit(x + y)) {
3260 v.reset(OpRISCV64MOVDconst)
3261 v.AuxInt = int64ToAuxInt(x + y)
3264 // match: (ADDI [x] (ADDI [y] z))
3265 // cond: is32Bit(x + y)
3266 // result: (ADDI [x + y] z)
3268 x := auxIntToInt64(v.AuxInt)
3269 if v_0.Op != OpRISCV64ADDI {
3272 y := auxIntToInt64(v_0.AuxInt)
3274 if !(is32Bit(x + y)) {
3277 v.reset(OpRISCV64ADDI)
3278 v.AuxInt = int64ToAuxInt(x + y)
3284 func rewriteValueRISCV64_OpRISCV64AND(v *Value) bool {
3287 // match: (AND (MOVDconst [val]) x)
3288 // cond: is32Bit(val)
3289 // result: (ANDI [val] x)
3291 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3292 if v_0.Op != OpRISCV64MOVDconst {
3295 val := auxIntToInt64(v_0.AuxInt)
3297 if !(is32Bit(val)) {
3300 v.reset(OpRISCV64ANDI)
3301 v.AuxInt = int64ToAuxInt(val)
3309 func rewriteValueRISCV64_OpRISCV64ANDI(v *Value) bool {
3311 // match: (ANDI [0] x)
3312 // result: (MOVDconst [0])
3314 if auxIntToInt64(v.AuxInt) != 0 {
3317 v.reset(OpRISCV64MOVDconst)
3318 v.AuxInt = int64ToAuxInt(0)
3321 // match: (ANDI [-1] x)
3324 if auxIntToInt64(v.AuxInt) != -1 {
3331 // match: (ANDI [x] (MOVDconst [y]))
3332 // result: (MOVDconst [x & y])
3334 x := auxIntToInt64(v.AuxInt)
3335 if v_0.Op != OpRISCV64MOVDconst {
3338 y := auxIntToInt64(v_0.AuxInt)
3339 v.reset(OpRISCV64MOVDconst)
3340 v.AuxInt = int64ToAuxInt(x & y)
3343 // match: (ANDI [x] (ANDI [y] z))
3344 // result: (ANDI [x & y] z)
3346 x := auxIntToInt64(v.AuxInt)
3347 if v_0.Op != OpRISCV64ANDI {
3350 y := auxIntToInt64(v_0.AuxInt)
3352 v.reset(OpRISCV64ANDI)
3353 v.AuxInt = int64ToAuxInt(x & y)
3359 func rewriteValueRISCV64_OpRISCV64FADDD(v *Value) bool {
3362 // match: (FADDD a (FMULD x y))
3363 // cond: a.Block.Func.useFMA(v)
3364 // result: (FMADDD x y a)
3366 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3368 if v_1.Op != OpRISCV64FMULD {
3373 if !(a.Block.Func.useFMA(v)) {
3376 v.reset(OpRISCV64FMADDD)
3384 func rewriteValueRISCV64_OpRISCV64FADDS(v *Value) bool {
3387 // match: (FADDS a (FMULS x y))
3388 // cond: a.Block.Func.useFMA(v)
3389 // result: (FMADDS x y a)
3391 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3393 if v_1.Op != OpRISCV64FMULS {
3398 if !(a.Block.Func.useFMA(v)) {
3401 v.reset(OpRISCV64FMADDS)
3409 func rewriteValueRISCV64_OpRISCV64FMADDD(v *Value) bool {
3413 // match: (FMADDD neg:(FNEGD x) y z)
3414 // cond: neg.Uses == 1
3415 // result: (FNMSUBD x y z)
3417 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3419 if neg.Op != OpRISCV64FNEGD {
3425 if !(neg.Uses == 1) {
3428 v.reset(OpRISCV64FNMSUBD)
3434 // match: (FMADDD x y neg:(FNEGD z))
3435 // cond: neg.Uses == 1
3436 // result: (FMSUBD x y z)
3441 if neg.Op != OpRISCV64FNEGD {
3445 if !(neg.Uses == 1) {
3448 v.reset(OpRISCV64FMSUBD)
3454 func rewriteValueRISCV64_OpRISCV64FMADDS(v *Value) bool {
3458 // match: (FMADDS neg:(FNEGS x) y z)
3459 // cond: neg.Uses == 1
3460 // result: (FNMSUBS x y z)
3462 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3464 if neg.Op != OpRISCV64FNEGS {
3470 if !(neg.Uses == 1) {
3473 v.reset(OpRISCV64FNMSUBS)
3479 // match: (FMADDS x y neg:(FNEGS z))
3480 // cond: neg.Uses == 1
3481 // result: (FMSUBS x y z)
3486 if neg.Op != OpRISCV64FNEGS {
3490 if !(neg.Uses == 1) {
3493 v.reset(OpRISCV64FMSUBS)
3499 func rewriteValueRISCV64_OpRISCV64FMSUBD(v *Value) bool {
3503 // match: (FMSUBD neg:(FNEGD x) y z)
3504 // cond: neg.Uses == 1
3505 // result: (FNMADDD x y z)
3507 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3509 if neg.Op != OpRISCV64FNEGD {
3515 if !(neg.Uses == 1) {
3518 v.reset(OpRISCV64FNMADDD)
3524 // match: (FMSUBD x y neg:(FNEGD z))
3525 // cond: neg.Uses == 1
3526 // result: (FMADDD x y z)
3531 if neg.Op != OpRISCV64FNEGD {
3535 if !(neg.Uses == 1) {
3538 v.reset(OpRISCV64FMADDD)
3544 func rewriteValueRISCV64_OpRISCV64FMSUBS(v *Value) bool {
3548 // match: (FMSUBS neg:(FNEGS x) y z)
3549 // cond: neg.Uses == 1
3550 // result: (FNMADDS x y z)
3552 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3554 if neg.Op != OpRISCV64FNEGS {
3560 if !(neg.Uses == 1) {
3563 v.reset(OpRISCV64FNMADDS)
3569 // match: (FMSUBS x y neg:(FNEGS z))
3570 // cond: neg.Uses == 1
3571 // result: (FMADDS x y z)
3576 if neg.Op != OpRISCV64FNEGS {
3580 if !(neg.Uses == 1) {
3583 v.reset(OpRISCV64FMADDS)
3589 func rewriteValueRISCV64_OpRISCV64FNMADDD(v *Value) bool {
3593 // match: (FNMADDD neg:(FNEGD x) y z)
3594 // cond: neg.Uses == 1
3595 // result: (FMSUBD x y z)
3597 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3599 if neg.Op != OpRISCV64FNEGD {
3605 if !(neg.Uses == 1) {
3608 v.reset(OpRISCV64FMSUBD)
3614 // match: (FNMADDD x y neg:(FNEGD z))
3615 // cond: neg.Uses == 1
3616 // result: (FNMSUBD x y z)
3621 if neg.Op != OpRISCV64FNEGD {
3625 if !(neg.Uses == 1) {
3628 v.reset(OpRISCV64FNMSUBD)
3634 func rewriteValueRISCV64_OpRISCV64FNMADDS(v *Value) bool {
3638 // match: (FNMADDS neg:(FNEGS x) y z)
3639 // cond: neg.Uses == 1
3640 // result: (FMSUBS x y z)
3642 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3644 if neg.Op != OpRISCV64FNEGS {
3650 if !(neg.Uses == 1) {
3653 v.reset(OpRISCV64FMSUBS)
3659 // match: (FNMADDS x y neg:(FNEGS z))
3660 // cond: neg.Uses == 1
3661 // result: (FNMSUBS x y z)
3666 if neg.Op != OpRISCV64FNEGS {
3670 if !(neg.Uses == 1) {
3673 v.reset(OpRISCV64FNMSUBS)
3679 func rewriteValueRISCV64_OpRISCV64FNMSUBD(v *Value) bool {
3683 // match: (FNMSUBD neg:(FNEGD x) y z)
3684 // cond: neg.Uses == 1
3685 // result: (FMADDD x y z)
3687 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3689 if neg.Op != OpRISCV64FNEGD {
3695 if !(neg.Uses == 1) {
3698 v.reset(OpRISCV64FMADDD)
3704 // match: (FNMSUBD x y neg:(FNEGD z))
3705 // cond: neg.Uses == 1
3706 // result: (FNMADDD x y z)
3711 if neg.Op != OpRISCV64FNEGD {
3715 if !(neg.Uses == 1) {
3718 v.reset(OpRISCV64FNMADDD)
3724 func rewriteValueRISCV64_OpRISCV64FNMSUBS(v *Value) bool {
3728 // match: (FNMSUBS neg:(FNEGS x) y z)
3729 // cond: neg.Uses == 1
3730 // result: (FMADDS x y z)
3732 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3734 if neg.Op != OpRISCV64FNEGS {
3740 if !(neg.Uses == 1) {
3743 v.reset(OpRISCV64FMADDS)
3749 // match: (FNMSUBS x y neg:(FNEGS z))
3750 // cond: neg.Uses == 1
3751 // result: (FNMADDS x y z)
3756 if neg.Op != OpRISCV64FNEGS {
3760 if !(neg.Uses == 1) {
3763 v.reset(OpRISCV64FNMADDS)
3769 func rewriteValueRISCV64_OpRISCV64FSUBD(v *Value) bool {
3772 // match: (FSUBD a (FMULD x y))
3773 // cond: a.Block.Func.useFMA(v)
3774 // result: (FNMSUBD x y a)
3777 if v_1.Op != OpRISCV64FMULD {
3782 if !(a.Block.Func.useFMA(v)) {
3785 v.reset(OpRISCV64FNMSUBD)
3789 // match: (FSUBD (FMULD x y) a)
3790 // cond: a.Block.Func.useFMA(v)
3791 // result: (FMSUBD x y a)
3793 if v_0.Op != OpRISCV64FMULD {
3799 if !(a.Block.Func.useFMA(v)) {
3802 v.reset(OpRISCV64FMSUBD)
3808 func rewriteValueRISCV64_OpRISCV64FSUBS(v *Value) bool {
3811 // match: (FSUBS a (FMULS x y))
3812 // cond: a.Block.Func.useFMA(v)
3813 // result: (FNMSUBS x y a)
3816 if v_1.Op != OpRISCV64FMULS {
3821 if !(a.Block.Func.useFMA(v)) {
3824 v.reset(OpRISCV64FNMSUBS)
3828 // match: (FSUBS (FMULS x y) a)
3829 // cond: a.Block.Func.useFMA(v)
3830 // result: (FMSUBS x y a)
3832 if v_0.Op != OpRISCV64FMULS {
3838 if !(a.Block.Func.useFMA(v)) {
3841 v.reset(OpRISCV64FMSUBS)
3847 func rewriteValueRISCV64_OpRISCV64MOVBUload(v *Value) bool {
3850 // match: (MOVBUload [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
3851 // cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)
3852 // result: (MOVBUload [off1+off2] {mergeSym(sym1,sym2)} base mem)
3854 off1 := auxIntToInt32(v.AuxInt)
3855 sym1 := auxToSym(v.Aux)
3856 if v_0.Op != OpRISCV64MOVaddr {
3859 off2 := auxIntToInt32(v_0.AuxInt)
3860 sym2 := auxToSym(v_0.Aux)
3863 if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)) {
3866 v.reset(OpRISCV64MOVBUload)
3867 v.AuxInt = int32ToAuxInt(off1 + off2)
3868 v.Aux = symToAux(mergeSym(sym1, sym2))
3869 v.AddArg2(base, mem)
3872 // match: (MOVBUload [off1] {sym} (ADDI [off2] base) mem)
3873 // cond: is32Bit(int64(off1)+off2)
3874 // result: (MOVBUload [off1+int32(off2)] {sym} base mem)
3876 off1 := auxIntToInt32(v.AuxInt)
3877 sym := auxToSym(v.Aux)
3878 if v_0.Op != OpRISCV64ADDI {
3881 off2 := auxIntToInt64(v_0.AuxInt)
3884 if !(is32Bit(int64(off1) + off2)) {
3887 v.reset(OpRISCV64MOVBUload)
3888 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3889 v.Aux = symToAux(sym)
3890 v.AddArg2(base, mem)
3895 func rewriteValueRISCV64_OpRISCV64MOVBUreg(v *Value) bool {
3898 // match: (MOVBUreg x:(FLES _ _))
3902 if x.Op != OpRISCV64FLES {
3908 // match: (MOVBUreg x:(FLTS _ _))
3912 if x.Op != OpRISCV64FLTS {
3918 // match: (MOVBUreg x:(FEQS _ _))
3922 if x.Op != OpRISCV64FEQS {
3928 // match: (MOVBUreg x:(FNES _ _))
3932 if x.Op != OpRISCV64FNES {
3938 // match: (MOVBUreg x:(FLED _ _))
3942 if x.Op != OpRISCV64FLED {
3948 // match: (MOVBUreg x:(FLTD _ _))
3952 if x.Op != OpRISCV64FLTD {
3958 // match: (MOVBUreg x:(FEQD _ _))
3962 if x.Op != OpRISCV64FEQD {
3968 // match: (MOVBUreg x:(FNED _ _))
3972 if x.Op != OpRISCV64FNED {
3978 // match: (MOVBUreg x:(SEQZ _))
3982 if x.Op != OpRISCV64SEQZ {
3988 // match: (MOVBUreg x:(SNEZ _))
3992 if x.Op != OpRISCV64SNEZ {
3998 // match: (MOVBUreg x:(SLT _ _))
4002 if x.Op != OpRISCV64SLT {
4008 // match: (MOVBUreg x:(SLTU _ _))
4012 if x.Op != OpRISCV64SLTU {
4018 // match: (MOVBUreg x:(ANDI [c] y))
4019 // cond: c >= 0 && int64(uint8(c)) == c
4023 if x.Op != OpRISCV64ANDI {
4026 c := auxIntToInt64(x.AuxInt)
4027 if !(c >= 0 && int64(uint8(c)) == c) {
4033 // match: (MOVBUreg (ANDI [c] x))
4035 // result: (ANDI [int64(uint8(c))] x)
4037 if v_0.Op != OpRISCV64ANDI {
4040 c := auxIntToInt64(v_0.AuxInt)
4045 v.reset(OpRISCV64ANDI)
4046 v.AuxInt = int64ToAuxInt(int64(uint8(c)))
4050 // match: (MOVBUreg (MOVDconst [c]))
4051 // result: (MOVDconst [int64(uint8(c))])
4053 if v_0.Op != OpRISCV64MOVDconst {
4056 c := auxIntToInt64(v_0.AuxInt)
4057 v.reset(OpRISCV64MOVDconst)
4058 v.AuxInt = int64ToAuxInt(int64(uint8(c)))
4061 // match: (MOVBUreg x:(MOVBUload _ _))
4062 // result: (MOVDreg x)
4065 if x.Op != OpRISCV64MOVBUload {
4068 v.reset(OpRISCV64MOVDreg)
4072 // match: (MOVBUreg x:(Select0 (LoweredAtomicLoad8 _ _)))
4073 // result: (MOVDreg x)
4076 if x.Op != OpSelect0 {
4080 if x_0.Op != OpRISCV64LoweredAtomicLoad8 {
4083 v.reset(OpRISCV64MOVDreg)
4087 // match: (MOVBUreg x:(Select0 (LoweredAtomicCas32 _ _ _ _)))
4088 // result: (MOVDreg x)
4091 if x.Op != OpSelect0 {
4095 if x_0.Op != OpRISCV64LoweredAtomicCas32 {
4098 v.reset(OpRISCV64MOVDreg)
4102 // match: (MOVBUreg x:(Select0 (LoweredAtomicCas64 _ _ _ _)))
4103 // result: (MOVDreg x)
4106 if x.Op != OpSelect0 {
4110 if x_0.Op != OpRISCV64LoweredAtomicCas64 {
4113 v.reset(OpRISCV64MOVDreg)
4117 // match: (MOVBUreg x:(MOVBUreg _))
4118 // result: (MOVDreg x)
4121 if x.Op != OpRISCV64MOVBUreg {
4124 v.reset(OpRISCV64MOVDreg)
4128 // match: (MOVBUreg <t> x:(MOVBload [off] {sym} ptr mem))
4129 // cond: x.Uses == 1 && clobber(x)
4130 // result: @x.Block (MOVBUload <t> [off] {sym} ptr mem)
4134 if x.Op != OpRISCV64MOVBload {
4137 off := auxIntToInt32(x.AuxInt)
4138 sym := auxToSym(x.Aux)
4141 if !(x.Uses == 1 && clobber(x)) {
4145 v0 := b.NewValue0(x.Pos, OpRISCV64MOVBUload, t)
4147 v0.AuxInt = int32ToAuxInt(off)
4148 v0.Aux = symToAux(sym)
4149 v0.AddArg2(ptr, mem)
4154 func rewriteValueRISCV64_OpRISCV64MOVBload(v *Value) bool {
4157 // match: (MOVBload [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
4158 // cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)
4159 // result: (MOVBload [off1+off2] {mergeSym(sym1,sym2)} base mem)
4161 off1 := auxIntToInt32(v.AuxInt)
4162 sym1 := auxToSym(v.Aux)
4163 if v_0.Op != OpRISCV64MOVaddr {
4166 off2 := auxIntToInt32(v_0.AuxInt)
4167 sym2 := auxToSym(v_0.Aux)
4170 if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)) {
4173 v.reset(OpRISCV64MOVBload)
4174 v.AuxInt = int32ToAuxInt(off1 + off2)
4175 v.Aux = symToAux(mergeSym(sym1, sym2))
4176 v.AddArg2(base, mem)
4179 // match: (MOVBload [off1] {sym} (ADDI [off2] base) mem)
4180 // cond: is32Bit(int64(off1)+off2)
4181 // result: (MOVBload [off1+int32(off2)] {sym} base mem)
4183 off1 := auxIntToInt32(v.AuxInt)
4184 sym := auxToSym(v.Aux)
4185 if v_0.Op != OpRISCV64ADDI {
4188 off2 := auxIntToInt64(v_0.AuxInt)
4191 if !(is32Bit(int64(off1) + off2)) {
4194 v.reset(OpRISCV64MOVBload)
4195 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4196 v.Aux = symToAux(sym)
4197 v.AddArg2(base, mem)
4202 func rewriteValueRISCV64_OpRISCV64MOVBreg(v *Value) bool {
4205 // match: (MOVBreg x:(ANDI [c] y))
4206 // cond: c >= 0 && int64(int8(c)) == c
4210 if x.Op != OpRISCV64ANDI {
4213 c := auxIntToInt64(x.AuxInt)
4214 if !(c >= 0 && int64(int8(c)) == c) {
4220 // match: (MOVBreg (MOVDconst [c]))
4221 // result: (MOVDconst [int64(int8(c))])
4223 if v_0.Op != OpRISCV64MOVDconst {
4226 c := auxIntToInt64(v_0.AuxInt)
4227 v.reset(OpRISCV64MOVDconst)
4228 v.AuxInt = int64ToAuxInt(int64(int8(c)))
4231 // match: (MOVBreg x:(MOVBload _ _))
4232 // result: (MOVDreg x)
4235 if x.Op != OpRISCV64MOVBload {
4238 v.reset(OpRISCV64MOVDreg)
4242 // match: (MOVBreg x:(MOVBreg _))
4243 // result: (MOVDreg x)
4246 if x.Op != OpRISCV64MOVBreg {
4249 v.reset(OpRISCV64MOVDreg)
4253 // match: (MOVBreg <t> x:(MOVBUload [off] {sym} ptr mem))
4254 // cond: x.Uses == 1 && clobber(x)
4255 // result: @x.Block (MOVBload <t> [off] {sym} ptr mem)
4259 if x.Op != OpRISCV64MOVBUload {
4262 off := auxIntToInt32(x.AuxInt)
4263 sym := auxToSym(x.Aux)
4266 if !(x.Uses == 1 && clobber(x)) {
4270 v0 := b.NewValue0(x.Pos, OpRISCV64MOVBload, t)
4272 v0.AuxInt = int32ToAuxInt(off)
4273 v0.Aux = symToAux(sym)
4274 v0.AddArg2(ptr, mem)
4279 func rewriteValueRISCV64_OpRISCV64MOVBstore(v *Value) bool {
4283 // match: (MOVBstore [off1] {sym1} (MOVaddr [off2] {sym2} base) val mem)
4284 // cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)
4285 // result: (MOVBstore [off1+off2] {mergeSym(sym1,sym2)} base val mem)
4287 off1 := auxIntToInt32(v.AuxInt)
4288 sym1 := auxToSym(v.Aux)
4289 if v_0.Op != OpRISCV64MOVaddr {
4292 off2 := auxIntToInt32(v_0.AuxInt)
4293 sym2 := auxToSym(v_0.Aux)
4297 if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)) {
4300 v.reset(OpRISCV64MOVBstore)
4301 v.AuxInt = int32ToAuxInt(off1 + off2)
4302 v.Aux = symToAux(mergeSym(sym1, sym2))
4303 v.AddArg3(base, val, mem)
4306 // match: (MOVBstore [off1] {sym} (ADDI [off2] base) val mem)
4307 // cond: is32Bit(int64(off1)+off2)
4308 // result: (MOVBstore [off1+int32(off2)] {sym} base val mem)
4310 off1 := auxIntToInt32(v.AuxInt)
4311 sym := auxToSym(v.Aux)
4312 if v_0.Op != OpRISCV64ADDI {
4315 off2 := auxIntToInt64(v_0.AuxInt)
4319 if !(is32Bit(int64(off1) + off2)) {
4322 v.reset(OpRISCV64MOVBstore)
4323 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4324 v.Aux = symToAux(sym)
4325 v.AddArg3(base, val, mem)
4328 // match: (MOVBstore [off] {sym} ptr (MOVDconst [0]) mem)
4329 // result: (MOVBstorezero [off] {sym} ptr mem)
4331 off := auxIntToInt32(v.AuxInt)
4332 sym := auxToSym(v.Aux)
4334 if v_1.Op != OpRISCV64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
4338 v.reset(OpRISCV64MOVBstorezero)
4339 v.AuxInt = int32ToAuxInt(off)
4340 v.Aux = symToAux(sym)
4344 // match: (MOVBstore [off] {sym} ptr (MOVBreg x) mem)
4345 // result: (MOVBstore [off] {sym} ptr x mem)
4347 off := auxIntToInt32(v.AuxInt)
4348 sym := auxToSym(v.Aux)
4350 if v_1.Op != OpRISCV64MOVBreg {
4355 v.reset(OpRISCV64MOVBstore)
4356 v.AuxInt = int32ToAuxInt(off)
4357 v.Aux = symToAux(sym)
4358 v.AddArg3(ptr, x, mem)
4361 // match: (MOVBstore [off] {sym} ptr (MOVHreg x) mem)
4362 // result: (MOVBstore [off] {sym} ptr x mem)
4364 off := auxIntToInt32(v.AuxInt)
4365 sym := auxToSym(v.Aux)
4367 if v_1.Op != OpRISCV64MOVHreg {
4372 v.reset(OpRISCV64MOVBstore)
4373 v.AuxInt = int32ToAuxInt(off)
4374 v.Aux = symToAux(sym)
4375 v.AddArg3(ptr, x, mem)
4378 // match: (MOVBstore [off] {sym} ptr (MOVWreg x) mem)
4379 // result: (MOVBstore [off] {sym} ptr x mem)
4381 off := auxIntToInt32(v.AuxInt)
4382 sym := auxToSym(v.Aux)
4384 if v_1.Op != OpRISCV64MOVWreg {
4389 v.reset(OpRISCV64MOVBstore)
4390 v.AuxInt = int32ToAuxInt(off)
4391 v.Aux = symToAux(sym)
4392 v.AddArg3(ptr, x, mem)
4395 // match: (MOVBstore [off] {sym} ptr (MOVBUreg x) mem)
4396 // result: (MOVBstore [off] {sym} ptr x mem)
4398 off := auxIntToInt32(v.AuxInt)
4399 sym := auxToSym(v.Aux)
4401 if v_1.Op != OpRISCV64MOVBUreg {
4406 v.reset(OpRISCV64MOVBstore)
4407 v.AuxInt = int32ToAuxInt(off)
4408 v.Aux = symToAux(sym)
4409 v.AddArg3(ptr, x, mem)
4412 // match: (MOVBstore [off] {sym} ptr (MOVHUreg x) mem)
4413 // result: (MOVBstore [off] {sym} ptr x mem)
4415 off := auxIntToInt32(v.AuxInt)
4416 sym := auxToSym(v.Aux)
4418 if v_1.Op != OpRISCV64MOVHUreg {
4423 v.reset(OpRISCV64MOVBstore)
4424 v.AuxInt = int32ToAuxInt(off)
4425 v.Aux = symToAux(sym)
4426 v.AddArg3(ptr, x, mem)
4429 // match: (MOVBstore [off] {sym} ptr (MOVWUreg x) mem)
4430 // result: (MOVBstore [off] {sym} ptr x mem)
4432 off := auxIntToInt32(v.AuxInt)
4433 sym := auxToSym(v.Aux)
4435 if v_1.Op != OpRISCV64MOVWUreg {
4440 v.reset(OpRISCV64MOVBstore)
4441 v.AuxInt = int32ToAuxInt(off)
4442 v.Aux = symToAux(sym)
4443 v.AddArg3(ptr, x, mem)
4448 func rewriteValueRISCV64_OpRISCV64MOVBstorezero(v *Value) bool {
4451 // match: (MOVBstorezero [off1] {sym1} (MOVaddr [off2] {sym2} ptr) mem)
4452 // cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2))
4453 // result: (MOVBstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
4455 off1 := auxIntToInt32(v.AuxInt)
4456 sym1 := auxToSym(v.Aux)
4457 if v_0.Op != OpRISCV64MOVaddr {
4460 off2 := auxIntToInt32(v_0.AuxInt)
4461 sym2 := auxToSym(v_0.Aux)
4464 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) {
4467 v.reset(OpRISCV64MOVBstorezero)
4468 v.AuxInt = int32ToAuxInt(off1 + off2)
4469 v.Aux = symToAux(mergeSym(sym1, sym2))
4473 // match: (MOVBstorezero [off1] {sym} (ADDI [off2] ptr) mem)
4474 // cond: is32Bit(int64(off1)+off2)
4475 // result: (MOVBstorezero [off1+int32(off2)] {sym} ptr mem)
4477 off1 := auxIntToInt32(v.AuxInt)
4478 sym := auxToSym(v.Aux)
4479 if v_0.Op != OpRISCV64ADDI {
4482 off2 := auxIntToInt64(v_0.AuxInt)
4485 if !(is32Bit(int64(off1) + off2)) {
4488 v.reset(OpRISCV64MOVBstorezero)
4489 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4490 v.Aux = symToAux(sym)
4496 func rewriteValueRISCV64_OpRISCV64MOVDload(v *Value) bool {
4499 // match: (MOVDload [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
4500 // cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)
4501 // result: (MOVDload [off1+off2] {mergeSym(sym1,sym2)} base mem)
4503 off1 := auxIntToInt32(v.AuxInt)
4504 sym1 := auxToSym(v.Aux)
4505 if v_0.Op != OpRISCV64MOVaddr {
4508 off2 := auxIntToInt32(v_0.AuxInt)
4509 sym2 := auxToSym(v_0.Aux)
4512 if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)) {
4515 v.reset(OpRISCV64MOVDload)
4516 v.AuxInt = int32ToAuxInt(off1 + off2)
4517 v.Aux = symToAux(mergeSym(sym1, sym2))
4518 v.AddArg2(base, mem)
4521 // match: (MOVDload [off1] {sym} (ADDI [off2] base) mem)
4522 // cond: is32Bit(int64(off1)+off2)
4523 // result: (MOVDload [off1+int32(off2)] {sym} base mem)
4525 off1 := auxIntToInt32(v.AuxInt)
4526 sym := auxToSym(v.Aux)
4527 if v_0.Op != OpRISCV64ADDI {
4530 off2 := auxIntToInt64(v_0.AuxInt)
4533 if !(is32Bit(int64(off1) + off2)) {
4536 v.reset(OpRISCV64MOVDload)
4537 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4538 v.Aux = symToAux(sym)
4539 v.AddArg2(base, mem)
4544 func rewriteValueRISCV64_OpRISCV64MOVDnop(v *Value) bool {
4546 // match: (MOVDnop (MOVDconst [c]))
4547 // result: (MOVDconst [c])
4549 if v_0.Op != OpRISCV64MOVDconst {
4552 c := auxIntToInt64(v_0.AuxInt)
4553 v.reset(OpRISCV64MOVDconst)
4554 v.AuxInt = int64ToAuxInt(c)
4559 func rewriteValueRISCV64_OpRISCV64MOVDreg(v *Value) bool {
4561 // match: (MOVDreg x)
4562 // cond: x.Uses == 1
4563 // result: (MOVDnop x)
4569 v.reset(OpRISCV64MOVDnop)
4575 func rewriteValueRISCV64_OpRISCV64MOVDstore(v *Value) bool {
4579 // match: (MOVDstore [off1] {sym1} (MOVaddr [off2] {sym2} base) val mem)
4580 // cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)
4581 // result: (MOVDstore [off1+off2] {mergeSym(sym1,sym2)} base val mem)
4583 off1 := auxIntToInt32(v.AuxInt)
4584 sym1 := auxToSym(v.Aux)
4585 if v_0.Op != OpRISCV64MOVaddr {
4588 off2 := auxIntToInt32(v_0.AuxInt)
4589 sym2 := auxToSym(v_0.Aux)
4593 if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)) {
4596 v.reset(OpRISCV64MOVDstore)
4597 v.AuxInt = int32ToAuxInt(off1 + off2)
4598 v.Aux = symToAux(mergeSym(sym1, sym2))
4599 v.AddArg3(base, val, mem)
4602 // match: (MOVDstore [off1] {sym} (ADDI [off2] base) val mem)
4603 // cond: is32Bit(int64(off1)+off2)
4604 // result: (MOVDstore [off1+int32(off2)] {sym} base val mem)
4606 off1 := auxIntToInt32(v.AuxInt)
4607 sym := auxToSym(v.Aux)
4608 if v_0.Op != OpRISCV64ADDI {
4611 off2 := auxIntToInt64(v_0.AuxInt)
4615 if !(is32Bit(int64(off1) + off2)) {
4618 v.reset(OpRISCV64MOVDstore)
4619 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4620 v.Aux = symToAux(sym)
4621 v.AddArg3(base, val, mem)
4624 // match: (MOVDstore [off] {sym} ptr (MOVDconst [0]) mem)
4625 // result: (MOVDstorezero [off] {sym} ptr mem)
4627 off := auxIntToInt32(v.AuxInt)
4628 sym := auxToSym(v.Aux)
4630 if v_1.Op != OpRISCV64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
4634 v.reset(OpRISCV64MOVDstorezero)
4635 v.AuxInt = int32ToAuxInt(off)
4636 v.Aux = symToAux(sym)
4642 func rewriteValueRISCV64_OpRISCV64MOVDstorezero(v *Value) bool {
4645 // match: (MOVDstorezero [off1] {sym1} (MOVaddr [off2] {sym2} ptr) mem)
4646 // cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2))
4647 // result: (MOVDstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
4649 off1 := auxIntToInt32(v.AuxInt)
4650 sym1 := auxToSym(v.Aux)
4651 if v_0.Op != OpRISCV64MOVaddr {
4654 off2 := auxIntToInt32(v_0.AuxInt)
4655 sym2 := auxToSym(v_0.Aux)
4658 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) {
4661 v.reset(OpRISCV64MOVDstorezero)
4662 v.AuxInt = int32ToAuxInt(off1 + off2)
4663 v.Aux = symToAux(mergeSym(sym1, sym2))
4667 // match: (MOVDstorezero [off1] {sym} (ADDI [off2] ptr) mem)
4668 // cond: is32Bit(int64(off1)+off2)
4669 // result: (MOVDstorezero [off1+int32(off2)] {sym} ptr mem)
4671 off1 := auxIntToInt32(v.AuxInt)
4672 sym := auxToSym(v.Aux)
4673 if v_0.Op != OpRISCV64ADDI {
4676 off2 := auxIntToInt64(v_0.AuxInt)
4679 if !(is32Bit(int64(off1) + off2)) {
4682 v.reset(OpRISCV64MOVDstorezero)
4683 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4684 v.Aux = symToAux(sym)
4690 func rewriteValueRISCV64_OpRISCV64MOVHUload(v *Value) bool {
4693 // match: (MOVHUload [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
4694 // cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)
4695 // result: (MOVHUload [off1+off2] {mergeSym(sym1,sym2)} base mem)
4697 off1 := auxIntToInt32(v.AuxInt)
4698 sym1 := auxToSym(v.Aux)
4699 if v_0.Op != OpRISCV64MOVaddr {
4702 off2 := auxIntToInt32(v_0.AuxInt)
4703 sym2 := auxToSym(v_0.Aux)
4706 if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)) {
4709 v.reset(OpRISCV64MOVHUload)
4710 v.AuxInt = int32ToAuxInt(off1 + off2)
4711 v.Aux = symToAux(mergeSym(sym1, sym2))
4712 v.AddArg2(base, mem)
4715 // match: (MOVHUload [off1] {sym} (ADDI [off2] base) mem)
4716 // cond: is32Bit(int64(off1)+off2)
4717 // result: (MOVHUload [off1+int32(off2)] {sym} base mem)
4719 off1 := auxIntToInt32(v.AuxInt)
4720 sym := auxToSym(v.Aux)
4721 if v_0.Op != OpRISCV64ADDI {
4724 off2 := auxIntToInt64(v_0.AuxInt)
4727 if !(is32Bit(int64(off1) + off2)) {
4730 v.reset(OpRISCV64MOVHUload)
4731 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4732 v.Aux = symToAux(sym)
4733 v.AddArg2(base, mem)
4738 func rewriteValueRISCV64_OpRISCV64MOVHUreg(v *Value) bool {
4741 // match: (MOVHUreg x:(ANDI [c] y))
4742 // cond: c >= 0 && int64(uint16(c)) == c
4746 if x.Op != OpRISCV64ANDI {
4749 c := auxIntToInt64(x.AuxInt)
4750 if !(c >= 0 && int64(uint16(c)) == c) {
4756 // match: (MOVHUreg (ANDI [c] x))
4758 // result: (ANDI [int64(uint16(c))] x)
4760 if v_0.Op != OpRISCV64ANDI {
4763 c := auxIntToInt64(v_0.AuxInt)
4768 v.reset(OpRISCV64ANDI)
4769 v.AuxInt = int64ToAuxInt(int64(uint16(c)))
4773 // match: (MOVHUreg (MOVDconst [c]))
4774 // result: (MOVDconst [int64(uint16(c))])
4776 if v_0.Op != OpRISCV64MOVDconst {
4779 c := auxIntToInt64(v_0.AuxInt)
4780 v.reset(OpRISCV64MOVDconst)
4781 v.AuxInt = int64ToAuxInt(int64(uint16(c)))
4784 // match: (MOVHUreg x:(MOVBUload _ _))
4785 // result: (MOVDreg x)
4788 if x.Op != OpRISCV64MOVBUload {
4791 v.reset(OpRISCV64MOVDreg)
4795 // match: (MOVHUreg x:(MOVHUload _ _))
4796 // result: (MOVDreg x)
4799 if x.Op != OpRISCV64MOVHUload {
4802 v.reset(OpRISCV64MOVDreg)
4806 // match: (MOVHUreg x:(MOVBUreg _))
4807 // result: (MOVDreg x)
4810 if x.Op != OpRISCV64MOVBUreg {
4813 v.reset(OpRISCV64MOVDreg)
4817 // match: (MOVHUreg x:(MOVHUreg _))
4818 // result: (MOVDreg x)
4821 if x.Op != OpRISCV64MOVHUreg {
4824 v.reset(OpRISCV64MOVDreg)
4828 // match: (MOVHUreg <t> x:(MOVHload [off] {sym} ptr mem))
4829 // cond: x.Uses == 1 && clobber(x)
4830 // result: @x.Block (MOVHUload <t> [off] {sym} ptr mem)
4834 if x.Op != OpRISCV64MOVHload {
4837 off := auxIntToInt32(x.AuxInt)
4838 sym := auxToSym(x.Aux)
4841 if !(x.Uses == 1 && clobber(x)) {
4845 v0 := b.NewValue0(x.Pos, OpRISCV64MOVHUload, t)
4847 v0.AuxInt = int32ToAuxInt(off)
4848 v0.Aux = symToAux(sym)
4849 v0.AddArg2(ptr, mem)
4854 func rewriteValueRISCV64_OpRISCV64MOVHload(v *Value) bool {
4857 // match: (MOVHload [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
4858 // cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)
4859 // result: (MOVHload [off1+off2] {mergeSym(sym1,sym2)} base mem)
4861 off1 := auxIntToInt32(v.AuxInt)
4862 sym1 := auxToSym(v.Aux)
4863 if v_0.Op != OpRISCV64MOVaddr {
4866 off2 := auxIntToInt32(v_0.AuxInt)
4867 sym2 := auxToSym(v_0.Aux)
4870 if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)) {
4873 v.reset(OpRISCV64MOVHload)
4874 v.AuxInt = int32ToAuxInt(off1 + off2)
4875 v.Aux = symToAux(mergeSym(sym1, sym2))
4876 v.AddArg2(base, mem)
4879 // match: (MOVHload [off1] {sym} (ADDI [off2] base) mem)
4880 // cond: is32Bit(int64(off1)+off2)
4881 // result: (MOVHload [off1+int32(off2)] {sym} base mem)
4883 off1 := auxIntToInt32(v.AuxInt)
4884 sym := auxToSym(v.Aux)
4885 if v_0.Op != OpRISCV64ADDI {
4888 off2 := auxIntToInt64(v_0.AuxInt)
4891 if !(is32Bit(int64(off1) + off2)) {
4894 v.reset(OpRISCV64MOVHload)
4895 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4896 v.Aux = symToAux(sym)
4897 v.AddArg2(base, mem)
4902 func rewriteValueRISCV64_OpRISCV64MOVHreg(v *Value) bool {
4905 // match: (MOVHreg x:(ANDI [c] y))
4906 // cond: c >= 0 && int64(int16(c)) == c
4910 if x.Op != OpRISCV64ANDI {
4913 c := auxIntToInt64(x.AuxInt)
4914 if !(c >= 0 && int64(int16(c)) == c) {
4920 // match: (MOVHreg (MOVDconst [c]))
4921 // result: (MOVDconst [int64(int16(c))])
4923 if v_0.Op != OpRISCV64MOVDconst {
4926 c := auxIntToInt64(v_0.AuxInt)
4927 v.reset(OpRISCV64MOVDconst)
4928 v.AuxInt = int64ToAuxInt(int64(int16(c)))
4931 // match: (MOVHreg x:(MOVBload _ _))
4932 // result: (MOVDreg x)
4935 if x.Op != OpRISCV64MOVBload {
4938 v.reset(OpRISCV64MOVDreg)
4942 // match: (MOVHreg x:(MOVBUload _ _))
4943 // result: (MOVDreg x)
4946 if x.Op != OpRISCV64MOVBUload {
4949 v.reset(OpRISCV64MOVDreg)
4953 // match: (MOVHreg x:(MOVHload _ _))
4954 // result: (MOVDreg x)
4957 if x.Op != OpRISCV64MOVHload {
4960 v.reset(OpRISCV64MOVDreg)
4964 // match: (MOVHreg x:(MOVBreg _))
4965 // result: (MOVDreg x)
4968 if x.Op != OpRISCV64MOVBreg {
4971 v.reset(OpRISCV64MOVDreg)
4975 // match: (MOVHreg x:(MOVBUreg _))
4976 // result: (MOVDreg x)
4979 if x.Op != OpRISCV64MOVBUreg {
4982 v.reset(OpRISCV64MOVDreg)
4986 // match: (MOVHreg x:(MOVHreg _))
4987 // result: (MOVDreg x)
4990 if x.Op != OpRISCV64MOVHreg {
4993 v.reset(OpRISCV64MOVDreg)
4997 // match: (MOVHreg <t> x:(MOVHUload [off] {sym} ptr mem))
4998 // cond: x.Uses == 1 && clobber(x)
4999 // result: @x.Block (MOVHload <t> [off] {sym} ptr mem)
5003 if x.Op != OpRISCV64MOVHUload {
5006 off := auxIntToInt32(x.AuxInt)
5007 sym := auxToSym(x.Aux)
5010 if !(x.Uses == 1 && clobber(x)) {
5014 v0 := b.NewValue0(x.Pos, OpRISCV64MOVHload, t)
5016 v0.AuxInt = int32ToAuxInt(off)
5017 v0.Aux = symToAux(sym)
5018 v0.AddArg2(ptr, mem)
5023 func rewriteValueRISCV64_OpRISCV64MOVHstore(v *Value) bool {
5027 // match: (MOVHstore [off1] {sym1} (MOVaddr [off2] {sym2} base) val mem)
5028 // cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)
5029 // result: (MOVHstore [off1+off2] {mergeSym(sym1,sym2)} base val mem)
5031 off1 := auxIntToInt32(v.AuxInt)
5032 sym1 := auxToSym(v.Aux)
5033 if v_0.Op != OpRISCV64MOVaddr {
5036 off2 := auxIntToInt32(v_0.AuxInt)
5037 sym2 := auxToSym(v_0.Aux)
5041 if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)) {
5044 v.reset(OpRISCV64MOVHstore)
5045 v.AuxInt = int32ToAuxInt(off1 + off2)
5046 v.Aux = symToAux(mergeSym(sym1, sym2))
5047 v.AddArg3(base, val, mem)
5050 // match: (MOVHstore [off1] {sym} (ADDI [off2] base) val mem)
5051 // cond: is32Bit(int64(off1)+off2)
5052 // result: (MOVHstore [off1+int32(off2)] {sym} base val mem)
5054 off1 := auxIntToInt32(v.AuxInt)
5055 sym := auxToSym(v.Aux)
5056 if v_0.Op != OpRISCV64ADDI {
5059 off2 := auxIntToInt64(v_0.AuxInt)
5063 if !(is32Bit(int64(off1) + off2)) {
5066 v.reset(OpRISCV64MOVHstore)
5067 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
5068 v.Aux = symToAux(sym)
5069 v.AddArg3(base, val, mem)
5072 // match: (MOVHstore [off] {sym} ptr (MOVDconst [0]) mem)
5073 // result: (MOVHstorezero [off] {sym} ptr mem)
5075 off := auxIntToInt32(v.AuxInt)
5076 sym := auxToSym(v.Aux)
5078 if v_1.Op != OpRISCV64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
5082 v.reset(OpRISCV64MOVHstorezero)
5083 v.AuxInt = int32ToAuxInt(off)
5084 v.Aux = symToAux(sym)
5088 // match: (MOVHstore [off] {sym} ptr (MOVHreg x) mem)
5089 // result: (MOVHstore [off] {sym} ptr x mem)
5091 off := auxIntToInt32(v.AuxInt)
5092 sym := auxToSym(v.Aux)
5094 if v_1.Op != OpRISCV64MOVHreg {
5099 v.reset(OpRISCV64MOVHstore)
5100 v.AuxInt = int32ToAuxInt(off)
5101 v.Aux = symToAux(sym)
5102 v.AddArg3(ptr, x, mem)
5105 // match: (MOVHstore [off] {sym} ptr (MOVWreg x) mem)
5106 // result: (MOVHstore [off] {sym} ptr x mem)
5108 off := auxIntToInt32(v.AuxInt)
5109 sym := auxToSym(v.Aux)
5111 if v_1.Op != OpRISCV64MOVWreg {
5116 v.reset(OpRISCV64MOVHstore)
5117 v.AuxInt = int32ToAuxInt(off)
5118 v.Aux = symToAux(sym)
5119 v.AddArg3(ptr, x, mem)
5122 // match: (MOVHstore [off] {sym} ptr (MOVHUreg x) mem)
5123 // result: (MOVHstore [off] {sym} ptr x mem)
5125 off := auxIntToInt32(v.AuxInt)
5126 sym := auxToSym(v.Aux)
5128 if v_1.Op != OpRISCV64MOVHUreg {
5133 v.reset(OpRISCV64MOVHstore)
5134 v.AuxInt = int32ToAuxInt(off)
5135 v.Aux = symToAux(sym)
5136 v.AddArg3(ptr, x, mem)
5139 // match: (MOVHstore [off] {sym} ptr (MOVWUreg x) mem)
5140 // result: (MOVHstore [off] {sym} ptr x mem)
5142 off := auxIntToInt32(v.AuxInt)
5143 sym := auxToSym(v.Aux)
5145 if v_1.Op != OpRISCV64MOVWUreg {
5150 v.reset(OpRISCV64MOVHstore)
5151 v.AuxInt = int32ToAuxInt(off)
5152 v.Aux = symToAux(sym)
5153 v.AddArg3(ptr, x, mem)
5158 func rewriteValueRISCV64_OpRISCV64MOVHstorezero(v *Value) bool {
5161 // match: (MOVHstorezero [off1] {sym1} (MOVaddr [off2] {sym2} ptr) mem)
5162 // cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2))
5163 // result: (MOVHstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
5165 off1 := auxIntToInt32(v.AuxInt)
5166 sym1 := auxToSym(v.Aux)
5167 if v_0.Op != OpRISCV64MOVaddr {
5170 off2 := auxIntToInt32(v_0.AuxInt)
5171 sym2 := auxToSym(v_0.Aux)
5174 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) {
5177 v.reset(OpRISCV64MOVHstorezero)
5178 v.AuxInt = int32ToAuxInt(off1 + off2)
5179 v.Aux = symToAux(mergeSym(sym1, sym2))
5183 // match: (MOVHstorezero [off1] {sym} (ADDI [off2] ptr) mem)
5184 // cond: is32Bit(int64(off1)+off2)
5185 // result: (MOVHstorezero [off1+int32(off2)] {sym} ptr mem)
5187 off1 := auxIntToInt32(v.AuxInt)
5188 sym := auxToSym(v.Aux)
5189 if v_0.Op != OpRISCV64ADDI {
5192 off2 := auxIntToInt64(v_0.AuxInt)
5195 if !(is32Bit(int64(off1) + off2)) {
5198 v.reset(OpRISCV64MOVHstorezero)
5199 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
5200 v.Aux = symToAux(sym)
5206 func rewriteValueRISCV64_OpRISCV64MOVWUload(v *Value) bool {
5209 // match: (MOVWUload [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
5210 // cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)
5211 // result: (MOVWUload [off1+off2] {mergeSym(sym1,sym2)} base mem)
5213 off1 := auxIntToInt32(v.AuxInt)
5214 sym1 := auxToSym(v.Aux)
5215 if v_0.Op != OpRISCV64MOVaddr {
5218 off2 := auxIntToInt32(v_0.AuxInt)
5219 sym2 := auxToSym(v_0.Aux)
5222 if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)) {
5225 v.reset(OpRISCV64MOVWUload)
5226 v.AuxInt = int32ToAuxInt(off1 + off2)
5227 v.Aux = symToAux(mergeSym(sym1, sym2))
5228 v.AddArg2(base, mem)
5231 // match: (MOVWUload [off1] {sym} (ADDI [off2] base) mem)
5232 // cond: is32Bit(int64(off1)+off2)
5233 // result: (MOVWUload [off1+int32(off2)] {sym} base mem)
5235 off1 := auxIntToInt32(v.AuxInt)
5236 sym := auxToSym(v.Aux)
5237 if v_0.Op != OpRISCV64ADDI {
5240 off2 := auxIntToInt64(v_0.AuxInt)
5243 if !(is32Bit(int64(off1) + off2)) {
5246 v.reset(OpRISCV64MOVWUload)
5247 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
5248 v.Aux = symToAux(sym)
5249 v.AddArg2(base, mem)
5254 func rewriteValueRISCV64_OpRISCV64MOVWUreg(v *Value) bool {
5257 typ := &b.Func.Config.Types
5258 // match: (MOVWUreg x:(ANDI [c] y))
5259 // cond: c >= 0 && int64(uint32(c)) == c
5263 if x.Op != OpRISCV64ANDI {
5266 c := auxIntToInt64(x.AuxInt)
5267 if !(c >= 0 && int64(uint32(c)) == c) {
5273 // match: (MOVWUreg (ANDI [c] x))
5275 // result: (AND (MOVDconst [int64(uint32(c))]) x)
5277 if v_0.Op != OpRISCV64ANDI {
5280 c := auxIntToInt64(v_0.AuxInt)
5285 v.reset(OpRISCV64AND)
5286 v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
5287 v0.AuxInt = int64ToAuxInt(int64(uint32(c)))
5291 // match: (MOVWUreg (MOVDconst [c]))
5292 // result: (MOVDconst [int64(uint32(c))])
5294 if v_0.Op != OpRISCV64MOVDconst {
5297 c := auxIntToInt64(v_0.AuxInt)
5298 v.reset(OpRISCV64MOVDconst)
5299 v.AuxInt = int64ToAuxInt(int64(uint32(c)))
5302 // match: (MOVWUreg x:(MOVBUload _ _))
5303 // result: (MOVDreg x)
5306 if x.Op != OpRISCV64MOVBUload {
5309 v.reset(OpRISCV64MOVDreg)
5313 // match: (MOVWUreg x:(MOVHUload _ _))
5314 // result: (MOVDreg x)
5317 if x.Op != OpRISCV64MOVHUload {
5320 v.reset(OpRISCV64MOVDreg)
5324 // match: (MOVWUreg x:(MOVWUload _ _))
5325 // result: (MOVDreg x)
5328 if x.Op != OpRISCV64MOVWUload {
5331 v.reset(OpRISCV64MOVDreg)
5335 // match: (MOVWUreg x:(MOVBUreg _))
5336 // result: (MOVDreg x)
5339 if x.Op != OpRISCV64MOVBUreg {
5342 v.reset(OpRISCV64MOVDreg)
5346 // match: (MOVWUreg x:(MOVHUreg _))
5347 // result: (MOVDreg x)
5350 if x.Op != OpRISCV64MOVHUreg {
5353 v.reset(OpRISCV64MOVDreg)
5357 // match: (MOVWUreg x:(MOVWUreg _))
5358 // result: (MOVDreg x)
5361 if x.Op != OpRISCV64MOVWUreg {
5364 v.reset(OpRISCV64MOVDreg)
5368 // match: (MOVWUreg <t> x:(MOVWload [off] {sym} ptr mem))
5369 // cond: x.Uses == 1 && clobber(x)
5370 // result: @x.Block (MOVWUload <t> [off] {sym} ptr mem)
5374 if x.Op != OpRISCV64MOVWload {
5377 off := auxIntToInt32(x.AuxInt)
5378 sym := auxToSym(x.Aux)
5381 if !(x.Uses == 1 && clobber(x)) {
5385 v0 := b.NewValue0(x.Pos, OpRISCV64MOVWUload, t)
5387 v0.AuxInt = int32ToAuxInt(off)
5388 v0.Aux = symToAux(sym)
5389 v0.AddArg2(ptr, mem)
5394 func rewriteValueRISCV64_OpRISCV64MOVWload(v *Value) bool {
5397 // match: (MOVWload [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
5398 // cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)
5399 // result: (MOVWload [off1+off2] {mergeSym(sym1,sym2)} base mem)
5401 off1 := auxIntToInt32(v.AuxInt)
5402 sym1 := auxToSym(v.Aux)
5403 if v_0.Op != OpRISCV64MOVaddr {
5406 off2 := auxIntToInt32(v_0.AuxInt)
5407 sym2 := auxToSym(v_0.Aux)
5410 if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)) {
5413 v.reset(OpRISCV64MOVWload)
5414 v.AuxInt = int32ToAuxInt(off1 + off2)
5415 v.Aux = symToAux(mergeSym(sym1, sym2))
5416 v.AddArg2(base, mem)
5419 // match: (MOVWload [off1] {sym} (ADDI [off2] base) mem)
5420 // cond: is32Bit(int64(off1)+off2)
5421 // result: (MOVWload [off1+int32(off2)] {sym} base mem)
5423 off1 := auxIntToInt32(v.AuxInt)
5424 sym := auxToSym(v.Aux)
5425 if v_0.Op != OpRISCV64ADDI {
5428 off2 := auxIntToInt64(v_0.AuxInt)
5431 if !(is32Bit(int64(off1) + off2)) {
5434 v.reset(OpRISCV64MOVWload)
5435 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
5436 v.Aux = symToAux(sym)
5437 v.AddArg2(base, mem)
5442 func rewriteValueRISCV64_OpRISCV64MOVWreg(v *Value) bool {
5445 // match: (MOVWreg x:(ANDI [c] y))
5446 // cond: c >= 0 && int64(int32(c)) == c
5450 if x.Op != OpRISCV64ANDI {
5453 c := auxIntToInt64(x.AuxInt)
5454 if !(c >= 0 && int64(int32(c)) == c) {
5460 // match: (MOVWreg (MOVDconst [c]))
5461 // result: (MOVDconst [int64(int32(c))])
5463 if v_0.Op != OpRISCV64MOVDconst {
5466 c := auxIntToInt64(v_0.AuxInt)
5467 v.reset(OpRISCV64MOVDconst)
5468 v.AuxInt = int64ToAuxInt(int64(int32(c)))
5471 // match: (MOVWreg x:(MOVBload _ _))
5472 // result: (MOVDreg x)
5475 if x.Op != OpRISCV64MOVBload {
5478 v.reset(OpRISCV64MOVDreg)
5482 // match: (MOVWreg x:(MOVBUload _ _))
5483 // result: (MOVDreg x)
5486 if x.Op != OpRISCV64MOVBUload {
5489 v.reset(OpRISCV64MOVDreg)
5493 // match: (MOVWreg x:(MOVHload _ _))
5494 // result: (MOVDreg x)
5497 if x.Op != OpRISCV64MOVHload {
5500 v.reset(OpRISCV64MOVDreg)
5504 // match: (MOVWreg x:(MOVHUload _ _))
5505 // result: (MOVDreg x)
5508 if x.Op != OpRISCV64MOVHUload {
5511 v.reset(OpRISCV64MOVDreg)
5515 // match: (MOVWreg x:(MOVWload _ _))
5516 // result: (MOVDreg x)
5519 if x.Op != OpRISCV64MOVWload {
5522 v.reset(OpRISCV64MOVDreg)
5526 // match: (MOVWreg x:(ADDIW _))
5527 // result: (MOVDreg x)
5530 if x.Op != OpRISCV64ADDIW {
5533 v.reset(OpRISCV64MOVDreg)
5537 // match: (MOVWreg x:(SUBW _ _))
5538 // result: (MOVDreg x)
5541 if x.Op != OpRISCV64SUBW {
5544 v.reset(OpRISCV64MOVDreg)
5548 // match: (MOVWreg x:(NEGW _))
5549 // result: (MOVDreg x)
5552 if x.Op != OpRISCV64NEGW {
5555 v.reset(OpRISCV64MOVDreg)
5559 // match: (MOVWreg x:(MULW _ _))
5560 // result: (MOVDreg x)
5563 if x.Op != OpRISCV64MULW {
5566 v.reset(OpRISCV64MOVDreg)
5570 // match: (MOVWreg x:(DIVW _ _))
5571 // result: (MOVDreg x)
5574 if x.Op != OpRISCV64DIVW {
5577 v.reset(OpRISCV64MOVDreg)
5581 // match: (MOVWreg x:(DIVUW _ _))
5582 // result: (MOVDreg x)
5585 if x.Op != OpRISCV64DIVUW {
5588 v.reset(OpRISCV64MOVDreg)
5592 // match: (MOVWreg x:(REMW _ _))
5593 // result: (MOVDreg x)
5596 if x.Op != OpRISCV64REMW {
5599 v.reset(OpRISCV64MOVDreg)
5603 // match: (MOVWreg x:(REMUW _ _))
5604 // result: (MOVDreg x)
5607 if x.Op != OpRISCV64REMUW {
5610 v.reset(OpRISCV64MOVDreg)
5614 // match: (MOVWreg x:(MOVBreg _))
5615 // result: (MOVDreg x)
5618 if x.Op != OpRISCV64MOVBreg {
5621 v.reset(OpRISCV64MOVDreg)
5625 // match: (MOVWreg x:(MOVBUreg _))
5626 // result: (MOVDreg x)
5629 if x.Op != OpRISCV64MOVBUreg {
5632 v.reset(OpRISCV64MOVDreg)
5636 // match: (MOVWreg x:(MOVHreg _))
5637 // result: (MOVDreg x)
5640 if x.Op != OpRISCV64MOVHreg {
5643 v.reset(OpRISCV64MOVDreg)
5647 // match: (MOVWreg x:(MOVWreg _))
5648 // result: (MOVDreg x)
5651 if x.Op != OpRISCV64MOVWreg {
5654 v.reset(OpRISCV64MOVDreg)
5658 // match: (MOVWreg <t> x:(MOVWUload [off] {sym} ptr mem))
5659 // cond: x.Uses == 1 && clobber(x)
5660 // result: @x.Block (MOVWload <t> [off] {sym} ptr mem)
5664 if x.Op != OpRISCV64MOVWUload {
5667 off := auxIntToInt32(x.AuxInt)
5668 sym := auxToSym(x.Aux)
5671 if !(x.Uses == 1 && clobber(x)) {
5675 v0 := b.NewValue0(x.Pos, OpRISCV64MOVWload, t)
5677 v0.AuxInt = int32ToAuxInt(off)
5678 v0.Aux = symToAux(sym)
5679 v0.AddArg2(ptr, mem)
5684 func rewriteValueRISCV64_OpRISCV64MOVWstore(v *Value) bool {
5688 // match: (MOVWstore [off1] {sym1} (MOVaddr [off2] {sym2} base) val mem)
5689 // cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)
5690 // result: (MOVWstore [off1+off2] {mergeSym(sym1,sym2)} base val mem)
5692 off1 := auxIntToInt32(v.AuxInt)
5693 sym1 := auxToSym(v.Aux)
5694 if v_0.Op != OpRISCV64MOVaddr {
5697 off2 := auxIntToInt32(v_0.AuxInt)
5698 sym2 := auxToSym(v_0.Aux)
5702 if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)) {
5705 v.reset(OpRISCV64MOVWstore)
5706 v.AuxInt = int32ToAuxInt(off1 + off2)
5707 v.Aux = symToAux(mergeSym(sym1, sym2))
5708 v.AddArg3(base, val, mem)
5711 // match: (MOVWstore [off1] {sym} (ADDI [off2] base) val mem)
5712 // cond: is32Bit(int64(off1)+off2)
5713 // result: (MOVWstore [off1+int32(off2)] {sym} base val mem)
5715 off1 := auxIntToInt32(v.AuxInt)
5716 sym := auxToSym(v.Aux)
5717 if v_0.Op != OpRISCV64ADDI {
5720 off2 := auxIntToInt64(v_0.AuxInt)
5724 if !(is32Bit(int64(off1) + off2)) {
5727 v.reset(OpRISCV64MOVWstore)
5728 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
5729 v.Aux = symToAux(sym)
5730 v.AddArg3(base, val, mem)
5733 // match: (MOVWstore [off] {sym} ptr (MOVDconst [0]) mem)
5734 // result: (MOVWstorezero [off] {sym} ptr mem)
5736 off := auxIntToInt32(v.AuxInt)
5737 sym := auxToSym(v.Aux)
5739 if v_1.Op != OpRISCV64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
5743 v.reset(OpRISCV64MOVWstorezero)
5744 v.AuxInt = int32ToAuxInt(off)
5745 v.Aux = symToAux(sym)
5749 // match: (MOVWstore [off] {sym} ptr (MOVWreg x) mem)
5750 // result: (MOVWstore [off] {sym} ptr x mem)
5752 off := auxIntToInt32(v.AuxInt)
5753 sym := auxToSym(v.Aux)
5755 if v_1.Op != OpRISCV64MOVWreg {
5760 v.reset(OpRISCV64MOVWstore)
5761 v.AuxInt = int32ToAuxInt(off)
5762 v.Aux = symToAux(sym)
5763 v.AddArg3(ptr, x, mem)
5766 // match: (MOVWstore [off] {sym} ptr (MOVWUreg x) mem)
5767 // result: (MOVWstore [off] {sym} ptr x mem)
5769 off := auxIntToInt32(v.AuxInt)
5770 sym := auxToSym(v.Aux)
5772 if v_1.Op != OpRISCV64MOVWUreg {
5777 v.reset(OpRISCV64MOVWstore)
5778 v.AuxInt = int32ToAuxInt(off)
5779 v.Aux = symToAux(sym)
5780 v.AddArg3(ptr, x, mem)
5785 func rewriteValueRISCV64_OpRISCV64MOVWstorezero(v *Value) bool {
5788 // match: (MOVWstorezero [off1] {sym1} (MOVaddr [off2] {sym2} ptr) mem)
5789 // cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2))
5790 // result: (MOVWstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
5792 off1 := auxIntToInt32(v.AuxInt)
5793 sym1 := auxToSym(v.Aux)
5794 if v_0.Op != OpRISCV64MOVaddr {
5797 off2 := auxIntToInt32(v_0.AuxInt)
5798 sym2 := auxToSym(v_0.Aux)
5801 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) {
5804 v.reset(OpRISCV64MOVWstorezero)
5805 v.AuxInt = int32ToAuxInt(off1 + off2)
5806 v.Aux = symToAux(mergeSym(sym1, sym2))
5810 // match: (MOVWstorezero [off1] {sym} (ADDI [off2] ptr) mem)
5811 // cond: is32Bit(int64(off1)+off2)
5812 // result: (MOVWstorezero [off1+int32(off2)] {sym} ptr mem)
5814 off1 := auxIntToInt32(v.AuxInt)
5815 sym := auxToSym(v.Aux)
5816 if v_0.Op != OpRISCV64ADDI {
5819 off2 := auxIntToInt64(v_0.AuxInt)
5822 if !(is32Bit(int64(off1) + off2)) {
5825 v.reset(OpRISCV64MOVWstorezero)
5826 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
5827 v.Aux = symToAux(sym)
5833 func rewriteValueRISCV64_OpRISCV64NEG(v *Value) bool {
5836 // match: (NEG (SUB x y))
5837 // result: (SUB y x)
5839 if v_0.Op != OpRISCV64SUB {
5844 v.reset(OpRISCV64SUB)
5848 // match: (NEG <t> s:(ADDI [val] (SUB x y)))
5849 // cond: s.Uses == 1 && is32Bit(-val)
5850 // result: (ADDI [-val] (SUB <t> y x))
5854 if s.Op != OpRISCV64ADDI {
5857 val := auxIntToInt64(s.AuxInt)
5859 if s_0.Op != OpRISCV64SUB {
5864 if !(s.Uses == 1 && is32Bit(-val)) {
5867 v.reset(OpRISCV64ADDI)
5868 v.AuxInt = int64ToAuxInt(-val)
5869 v0 := b.NewValue0(v.Pos, OpRISCV64SUB, t)
5874 // match: (NEG (NEG x))
5877 if v_0.Op != OpRISCV64NEG {
5884 // match: (NEG (MOVDconst [x]))
5885 // result: (MOVDconst [-x])
5887 if v_0.Op != OpRISCV64MOVDconst {
5890 x := auxIntToInt64(v_0.AuxInt)
5891 v.reset(OpRISCV64MOVDconst)
5892 v.AuxInt = int64ToAuxInt(-x)
5897 func rewriteValueRISCV64_OpRISCV64NEGW(v *Value) bool {
5899 // match: (NEGW (MOVDconst [x]))
5900 // result: (MOVDconst [int64(int32(-x))])
5902 if v_0.Op != OpRISCV64MOVDconst {
5905 x := auxIntToInt64(v_0.AuxInt)
5906 v.reset(OpRISCV64MOVDconst)
5907 v.AuxInt = int64ToAuxInt(int64(int32(-x)))
5912 func rewriteValueRISCV64_OpRISCV64OR(v *Value) bool {
5915 // match: (OR (MOVDconst [val]) x)
5916 // cond: is32Bit(val)
5917 // result: (ORI [val] x)
5919 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
5920 if v_0.Op != OpRISCV64MOVDconst {
5923 val := auxIntToInt64(v_0.AuxInt)
5925 if !(is32Bit(val)) {
5928 v.reset(OpRISCV64ORI)
5929 v.AuxInt = int64ToAuxInt(val)
5937 func rewriteValueRISCV64_OpRISCV64ORI(v *Value) bool {
5939 // match: (ORI [0] x)
5942 if auxIntToInt64(v.AuxInt) != 0 {
5949 // match: (ORI [-1] x)
5950 // result: (MOVDconst [-1])
5952 if auxIntToInt64(v.AuxInt) != -1 {
5955 v.reset(OpRISCV64MOVDconst)
5956 v.AuxInt = int64ToAuxInt(-1)
5959 // match: (ORI [x] (MOVDconst [y]))
5960 // result: (MOVDconst [x | y])
5962 x := auxIntToInt64(v.AuxInt)
5963 if v_0.Op != OpRISCV64MOVDconst {
5966 y := auxIntToInt64(v_0.AuxInt)
5967 v.reset(OpRISCV64MOVDconst)
5968 v.AuxInt = int64ToAuxInt(x | y)
5971 // match: (ORI [x] (ORI [y] z))
5972 // result: (ORI [x | y] z)
5974 x := auxIntToInt64(v.AuxInt)
5975 if v_0.Op != OpRISCV64ORI {
5978 y := auxIntToInt64(v_0.AuxInt)
5980 v.reset(OpRISCV64ORI)
5981 v.AuxInt = int64ToAuxInt(x | y)
5987 func rewriteValueRISCV64_OpRISCV64SEQZ(v *Value) bool {
5989 // match: (SEQZ (NEG x))
5992 if v_0.Op != OpRISCV64NEG {
5996 v.reset(OpRISCV64SEQZ)
6000 // match: (SEQZ (SEQZ x))
6003 if v_0.Op != OpRISCV64SEQZ {
6007 v.reset(OpRISCV64SNEZ)
6011 // match: (SEQZ (SNEZ x))
6014 if v_0.Op != OpRISCV64SNEZ {
6018 v.reset(OpRISCV64SEQZ)
6024 func rewriteValueRISCV64_OpRISCV64SLL(v *Value) bool {
6027 // match: (SLL x (MOVDconst [val]))
6028 // result: (SLLI [int64(val&63)] x)
6031 if v_1.Op != OpRISCV64MOVDconst {
6034 val := auxIntToInt64(v_1.AuxInt)
6035 v.reset(OpRISCV64SLLI)
6036 v.AuxInt = int64ToAuxInt(int64(val & 63))
6042 func rewriteValueRISCV64_OpRISCV64SLLI(v *Value) bool {
6044 // match: (SLLI [x] (MOVDconst [y]))
6045 // cond: is32Bit(y << uint32(x))
6046 // result: (MOVDconst [y << uint32(x)])
6048 x := auxIntToInt64(v.AuxInt)
6049 if v_0.Op != OpRISCV64MOVDconst {
6052 y := auxIntToInt64(v_0.AuxInt)
6053 if !(is32Bit(y << uint32(x))) {
6056 v.reset(OpRISCV64MOVDconst)
6057 v.AuxInt = int64ToAuxInt(y << uint32(x))
6062 func rewriteValueRISCV64_OpRISCV64SLT(v *Value) bool {
6065 // match: (SLT x (MOVDconst [val]))
6066 // cond: val >= -2048 && val <= 2047
6067 // result: (SLTI [val] x)
6070 if v_1.Op != OpRISCV64MOVDconst {
6073 val := auxIntToInt64(v_1.AuxInt)
6074 if !(val >= -2048 && val <= 2047) {
6077 v.reset(OpRISCV64SLTI)
6078 v.AuxInt = int64ToAuxInt(val)
6083 // result: (MOVDconst [0])
6089 v.reset(OpRISCV64MOVDconst)
6090 v.AuxInt = int64ToAuxInt(0)
6095 func rewriteValueRISCV64_OpRISCV64SLTI(v *Value) bool {
6097 // match: (SLTI [x] (MOVDconst [y]))
6098 // result: (MOVDconst [b2i(int64(y) < int64(x))])
6100 x := auxIntToInt64(v.AuxInt)
6101 if v_0.Op != OpRISCV64MOVDconst {
6104 y := auxIntToInt64(v_0.AuxInt)
6105 v.reset(OpRISCV64MOVDconst)
6106 v.AuxInt = int64ToAuxInt(b2i(int64(y) < int64(x)))
6109 // match: (SLTI [x] (ANDI [y] _))
6110 // cond: y >= 0 && int64(y) < int64(x)
6111 // result: (MOVDconst [1])
6113 x := auxIntToInt64(v.AuxInt)
6114 if v_0.Op != OpRISCV64ANDI {
6117 y := auxIntToInt64(v_0.AuxInt)
6118 if !(y >= 0 && int64(y) < int64(x)) {
6121 v.reset(OpRISCV64MOVDconst)
6122 v.AuxInt = int64ToAuxInt(1)
6125 // match: (SLTI [x] (ORI [y] _))
6126 // cond: y >= 0 && int64(y) >= int64(x)
6127 // result: (MOVDconst [0])
6129 x := auxIntToInt64(v.AuxInt)
6130 if v_0.Op != OpRISCV64ORI {
6133 y := auxIntToInt64(v_0.AuxInt)
6134 if !(y >= 0 && int64(y) >= int64(x)) {
6137 v.reset(OpRISCV64MOVDconst)
6138 v.AuxInt = int64ToAuxInt(0)
6143 func rewriteValueRISCV64_OpRISCV64SLTIU(v *Value) bool {
6145 // match: (SLTIU [x] (MOVDconst [y]))
6146 // result: (MOVDconst [b2i(uint64(y) < uint64(x))])
6148 x := auxIntToInt64(v.AuxInt)
6149 if v_0.Op != OpRISCV64MOVDconst {
6152 y := auxIntToInt64(v_0.AuxInt)
6153 v.reset(OpRISCV64MOVDconst)
6154 v.AuxInt = int64ToAuxInt(b2i(uint64(y) < uint64(x)))
6157 // match: (SLTIU [x] (ANDI [y] _))
6158 // cond: y >= 0 && uint64(y) < uint64(x)
6159 // result: (MOVDconst [1])
6161 x := auxIntToInt64(v.AuxInt)
6162 if v_0.Op != OpRISCV64ANDI {
6165 y := auxIntToInt64(v_0.AuxInt)
6166 if !(y >= 0 && uint64(y) < uint64(x)) {
6169 v.reset(OpRISCV64MOVDconst)
6170 v.AuxInt = int64ToAuxInt(1)
6173 // match: (SLTIU [x] (ORI [y] _))
6174 // cond: y >= 0 && uint64(y) >= uint64(x)
6175 // result: (MOVDconst [0])
6177 x := auxIntToInt64(v.AuxInt)
6178 if v_0.Op != OpRISCV64ORI {
6181 y := auxIntToInt64(v_0.AuxInt)
6182 if !(y >= 0 && uint64(y) >= uint64(x)) {
6185 v.reset(OpRISCV64MOVDconst)
6186 v.AuxInt = int64ToAuxInt(0)
6191 func rewriteValueRISCV64_OpRISCV64SLTU(v *Value) bool {
6194 // match: (SLTU x (MOVDconst [val]))
6195 // cond: val >= -2048 && val <= 2047
6196 // result: (SLTIU [val] x)
6199 if v_1.Op != OpRISCV64MOVDconst {
6202 val := auxIntToInt64(v_1.AuxInt)
6203 if !(val >= -2048 && val <= 2047) {
6206 v.reset(OpRISCV64SLTIU)
6207 v.AuxInt = int64ToAuxInt(val)
6211 // match: (SLTU x x)
6212 // result: (MOVDconst [0])
6218 v.reset(OpRISCV64MOVDconst)
6219 v.AuxInt = int64ToAuxInt(0)
6224 func rewriteValueRISCV64_OpRISCV64SNEZ(v *Value) bool {
6226 // match: (SNEZ (NEG x))
6229 if v_0.Op != OpRISCV64NEG {
6233 v.reset(OpRISCV64SNEZ)
6237 // match: (SNEZ (SEQZ x))
6240 if v_0.Op != OpRISCV64SEQZ {
6244 v.reset(OpRISCV64SEQZ)
6248 // match: (SNEZ (SNEZ x))
6251 if v_0.Op != OpRISCV64SNEZ {
6255 v.reset(OpRISCV64SNEZ)
6261 func rewriteValueRISCV64_OpRISCV64SRA(v *Value) bool {
6264 // match: (SRA x (MOVDconst [val]))
6265 // result: (SRAI [int64(val&63)] x)
6268 if v_1.Op != OpRISCV64MOVDconst {
6271 val := auxIntToInt64(v_1.AuxInt)
6272 v.reset(OpRISCV64SRAI)
6273 v.AuxInt = int64ToAuxInt(int64(val & 63))
6279 func rewriteValueRISCV64_OpRISCV64SRAI(v *Value) bool {
6281 // match: (SRAI [x] (MOVDconst [y]))
6282 // result: (MOVDconst [int64(y) >> uint32(x)])
6284 x := auxIntToInt64(v.AuxInt)
6285 if v_0.Op != OpRISCV64MOVDconst {
6288 y := auxIntToInt64(v_0.AuxInt)
6289 v.reset(OpRISCV64MOVDconst)
6290 v.AuxInt = int64ToAuxInt(int64(y) >> uint32(x))
6295 func rewriteValueRISCV64_OpRISCV64SRL(v *Value) bool {
6298 // match: (SRL <t> (MOVWUreg x) y)
6299 // result: (SRLW <t> x y)
6302 if v_0.Op != OpRISCV64MOVWUreg {
6307 v.reset(OpRISCV64SRLW)
6312 // match: (SRL x (MOVDconst [val]))
6313 // result: (SRLI [int64(val&63)] x)
6316 if v_1.Op != OpRISCV64MOVDconst {
6319 val := auxIntToInt64(v_1.AuxInt)
6320 v.reset(OpRISCV64SRLI)
6321 v.AuxInt = int64ToAuxInt(int64(val & 63))
6327 func rewriteValueRISCV64_OpRISCV64SRLI(v *Value) bool {
6329 // match: (SRLI <t> [x] (MOVWUreg y))
6330 // result: (SRLIW <t> [x] y)
6333 x := auxIntToInt64(v.AuxInt)
6334 if v_0.Op != OpRISCV64MOVWUreg {
6338 v.reset(OpRISCV64SRLIW)
6340 v.AuxInt = int64ToAuxInt(x)
6344 // match: (SRLI [x] (MOVDconst [y]))
6345 // result: (MOVDconst [int64(uint64(y) >> uint32(x))])
6347 x := auxIntToInt64(v.AuxInt)
6348 if v_0.Op != OpRISCV64MOVDconst {
6351 y := auxIntToInt64(v_0.AuxInt)
6352 v.reset(OpRISCV64MOVDconst)
6353 v.AuxInt = int64ToAuxInt(int64(uint64(y) >> uint32(x)))
6358 func rewriteValueRISCV64_OpRISCV64SRLW(v *Value) bool {
6361 // match: (SRLW x (MOVDconst [val]))
6362 // result: (SRLIW [int64(val&31)] x)
6365 if v_1.Op != OpRISCV64MOVDconst {
6368 val := auxIntToInt64(v_1.AuxInt)
6369 v.reset(OpRISCV64SRLIW)
6370 v.AuxInt = int64ToAuxInt(int64(val & 31))
6376 func rewriteValueRISCV64_OpRISCV64SUB(v *Value) bool {
6380 // match: (SUB x (MOVDconst [val]))
6381 // cond: is32Bit(-val)
6382 // result: (ADDI [-val] x)
6385 if v_1.Op != OpRISCV64MOVDconst {
6388 val := auxIntToInt64(v_1.AuxInt)
6389 if !(is32Bit(-val)) {
6392 v.reset(OpRISCV64ADDI)
6393 v.AuxInt = int64ToAuxInt(-val)
6397 // match: (SUB <t> (MOVDconst [val]) y)
6398 // cond: is32Bit(-val)
6399 // result: (NEG (ADDI <t> [-val] y))
6402 if v_0.Op != OpRISCV64MOVDconst {
6405 val := auxIntToInt64(v_0.AuxInt)
6407 if !(is32Bit(-val)) {
6410 v.reset(OpRISCV64NEG)
6411 v0 := b.NewValue0(v.Pos, OpRISCV64ADDI, t)
6412 v0.AuxInt = int64ToAuxInt(-val)
6417 // match: (SUB x (MOVDconst [0]))
6421 if v_1.Op != OpRISCV64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
6427 // match: (SUB (MOVDconst [0]) x)
6430 if v_0.Op != OpRISCV64MOVDconst || auxIntToInt64(v_0.AuxInt) != 0 {
6434 v.reset(OpRISCV64NEG)
6440 func rewriteValueRISCV64_OpRISCV64SUBW(v *Value) bool {
6443 // match: (SUBW x (MOVDconst [0]))
6444 // result: (ADDIW [0] x)
6447 if v_1.Op != OpRISCV64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
6450 v.reset(OpRISCV64ADDIW)
6451 v.AuxInt = int64ToAuxInt(0)
6455 // match: (SUBW (MOVDconst [0]) x)
6458 if v_0.Op != OpRISCV64MOVDconst || auxIntToInt64(v_0.AuxInt) != 0 {
6462 v.reset(OpRISCV64NEGW)
6468 func rewriteValueRISCV64_OpRISCV64XOR(v *Value) bool {
6471 // match: (XOR (MOVDconst [val]) x)
6472 // cond: is32Bit(val)
6473 // result: (XORI [val] x)
6475 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
6476 if v_0.Op != OpRISCV64MOVDconst {
6479 val := auxIntToInt64(v_0.AuxInt)
6481 if !(is32Bit(val)) {
6484 v.reset(OpRISCV64XORI)
6485 v.AuxInt = int64ToAuxInt(val)
6493 func rewriteValueRISCV64_OpRotateLeft16(v *Value) bool {
6497 typ := &b.Func.Config.Types
6498 // match: (RotateLeft16 <t> x (MOVDconst [c]))
6499 // result: (Or16 (Lsh16x64 <t> x (MOVDconst [c&15])) (Rsh16Ux64 <t> x (MOVDconst [-c&15])))
6503 if v_1.Op != OpRISCV64MOVDconst {
6506 c := auxIntToInt64(v_1.AuxInt)
6508 v0 := b.NewValue0(v.Pos, OpLsh16x64, t)
6509 v1 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
6510 v1.AuxInt = int64ToAuxInt(c & 15)
6512 v2 := b.NewValue0(v.Pos, OpRsh16Ux64, t)
6513 v3 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
6514 v3.AuxInt = int64ToAuxInt(-c & 15)
6521 func rewriteValueRISCV64_OpRotateLeft32(v *Value) bool {
6525 typ := &b.Func.Config.Types
6526 // match: (RotateLeft32 <t> x (MOVDconst [c]))
6527 // result: (Or32 (Lsh32x64 <t> x (MOVDconst [c&31])) (Rsh32Ux64 <t> x (MOVDconst [-c&31])))
6531 if v_1.Op != OpRISCV64MOVDconst {
6534 c := auxIntToInt64(v_1.AuxInt)
6536 v0 := b.NewValue0(v.Pos, OpLsh32x64, t)
6537 v1 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
6538 v1.AuxInt = int64ToAuxInt(c & 31)
6540 v2 := b.NewValue0(v.Pos, OpRsh32Ux64, t)
6541 v3 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
6542 v3.AuxInt = int64ToAuxInt(-c & 31)
6549 func rewriteValueRISCV64_OpRotateLeft64(v *Value) bool {
6553 typ := &b.Func.Config.Types
6554 // match: (RotateLeft64 <t> x (MOVDconst [c]))
6555 // result: (Or64 (Lsh64x64 <t> x (MOVDconst [c&63])) (Rsh64Ux64 <t> x (MOVDconst [-c&63])))
6559 if v_1.Op != OpRISCV64MOVDconst {
6562 c := auxIntToInt64(v_1.AuxInt)
6564 v0 := b.NewValue0(v.Pos, OpLsh64x64, t)
6565 v1 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
6566 v1.AuxInt = int64ToAuxInt(c & 63)
6568 v2 := b.NewValue0(v.Pos, OpRsh64Ux64, t)
6569 v3 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
6570 v3.AuxInt = int64ToAuxInt(-c & 63)
6577 func rewriteValueRISCV64_OpRotateLeft8(v *Value) bool {
6581 typ := &b.Func.Config.Types
6582 // match: (RotateLeft8 <t> x (MOVDconst [c]))
6583 // result: (Or8 (Lsh8x64 <t> x (MOVDconst [c&7])) (Rsh8Ux64 <t> x (MOVDconst [-c&7])))
6587 if v_1.Op != OpRISCV64MOVDconst {
6590 c := auxIntToInt64(v_1.AuxInt)
6592 v0 := b.NewValue0(v.Pos, OpLsh8x64, t)
6593 v1 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
6594 v1.AuxInt = int64ToAuxInt(c & 7)
6596 v2 := b.NewValue0(v.Pos, OpRsh8Ux64, t)
6597 v3 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
6598 v3.AuxInt = int64ToAuxInt(-c & 7)
6605 func rewriteValueRISCV64_OpRsh16Ux16(v *Value) bool {
6609 typ := &b.Func.Config.Types
6610 // match: (Rsh16Ux16 <t> x y)
6611 // cond: !shiftIsBounded(v)
6612 // result: (AND (SRL <t> (ZeroExt16to64 x) y) (Neg16 <t> (SLTIU <t> [64] (ZeroExt16to64 y))))
6617 if !(!shiftIsBounded(v)) {
6620 v.reset(OpRISCV64AND)
6621 v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
6622 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
6625 v2 := b.NewValue0(v.Pos, OpNeg16, t)
6626 v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
6627 v3.AuxInt = int64ToAuxInt(64)
6628 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
6635 // match: (Rsh16Ux16 x y)
6636 // cond: shiftIsBounded(v)
6637 // result: (SRL (ZeroExt16to64 x) y)
6641 if !(shiftIsBounded(v)) {
6644 v.reset(OpRISCV64SRL)
6645 v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
6652 func rewriteValueRISCV64_OpRsh16Ux32(v *Value) bool {
6656 typ := &b.Func.Config.Types
6657 // match: (Rsh16Ux32 <t> x y)
6658 // cond: !shiftIsBounded(v)
6659 // result: (AND (SRL <t> (ZeroExt16to64 x) y) (Neg16 <t> (SLTIU <t> [64] (ZeroExt32to64 y))))
6664 if !(!shiftIsBounded(v)) {
6667 v.reset(OpRISCV64AND)
6668 v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
6669 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
6672 v2 := b.NewValue0(v.Pos, OpNeg16, t)
6673 v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
6674 v3.AuxInt = int64ToAuxInt(64)
6675 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
6682 // match: (Rsh16Ux32 x y)
6683 // cond: shiftIsBounded(v)
6684 // result: (SRL (ZeroExt16to64 x) y)
6688 if !(shiftIsBounded(v)) {
6691 v.reset(OpRISCV64SRL)
6692 v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
6699 func rewriteValueRISCV64_OpRsh16Ux64(v *Value) bool {
6703 typ := &b.Func.Config.Types
6704 // match: (Rsh16Ux64 <t> x y)
6705 // cond: !shiftIsBounded(v)
6706 // result: (AND (SRL <t> (ZeroExt16to64 x) y) (Neg16 <t> (SLTIU <t> [64] y)))
6711 if !(!shiftIsBounded(v)) {
6714 v.reset(OpRISCV64AND)
6715 v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
6716 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
6719 v2 := b.NewValue0(v.Pos, OpNeg16, t)
6720 v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
6721 v3.AuxInt = int64ToAuxInt(64)
6727 // match: (Rsh16Ux64 x y)
6728 // cond: shiftIsBounded(v)
6729 // result: (SRL (ZeroExt16to64 x) y)
6733 if !(shiftIsBounded(v)) {
6736 v.reset(OpRISCV64SRL)
6737 v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
6744 func rewriteValueRISCV64_OpRsh16Ux8(v *Value) bool {
6748 typ := &b.Func.Config.Types
6749 // match: (Rsh16Ux8 <t> x y)
6750 // cond: !shiftIsBounded(v)
6751 // result: (AND (SRL <t> (ZeroExt16to64 x) y) (Neg16 <t> (SLTIU <t> [64] (ZeroExt8to64 y))))
6756 if !(!shiftIsBounded(v)) {
6759 v.reset(OpRISCV64AND)
6760 v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
6761 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
6764 v2 := b.NewValue0(v.Pos, OpNeg16, t)
6765 v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
6766 v3.AuxInt = int64ToAuxInt(64)
6767 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
6774 // match: (Rsh16Ux8 x y)
6775 // cond: shiftIsBounded(v)
6776 // result: (SRL (ZeroExt16to64 x) y)
6780 if !(shiftIsBounded(v)) {
6783 v.reset(OpRISCV64SRL)
6784 v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
6791 func rewriteValueRISCV64_OpRsh16x16(v *Value) bool {
6795 typ := &b.Func.Config.Types
6796 // match: (Rsh16x16 <t> x y)
6797 // cond: !shiftIsBounded(v)
6798 // result: (SRA <t> (SignExt16to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt16to64 y)))))
6803 if !(!shiftIsBounded(v)) {
6806 v.reset(OpRISCV64SRA)
6808 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
6810 v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
6811 v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
6812 v2.AuxInt = int64ToAuxInt(-1)
6813 v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
6814 v3.AuxInt = int64ToAuxInt(64)
6815 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
6823 // match: (Rsh16x16 x y)
6824 // cond: shiftIsBounded(v)
6825 // result: (SRA (SignExt16to64 x) y)
6829 if !(shiftIsBounded(v)) {
6832 v.reset(OpRISCV64SRA)
6833 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
6840 func rewriteValueRISCV64_OpRsh16x32(v *Value) bool {
6844 typ := &b.Func.Config.Types
6845 // match: (Rsh16x32 <t> x y)
6846 // cond: !shiftIsBounded(v)
6847 // result: (SRA <t> (SignExt16to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt32to64 y)))))
6852 if !(!shiftIsBounded(v)) {
6855 v.reset(OpRISCV64SRA)
6857 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
6859 v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
6860 v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
6861 v2.AuxInt = int64ToAuxInt(-1)
6862 v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
6863 v3.AuxInt = int64ToAuxInt(64)
6864 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
6872 // match: (Rsh16x32 x y)
6873 // cond: shiftIsBounded(v)
6874 // result: (SRA (SignExt16to64 x) y)
6878 if !(shiftIsBounded(v)) {
6881 v.reset(OpRISCV64SRA)
6882 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
6889 func rewriteValueRISCV64_OpRsh16x64(v *Value) bool {
6893 typ := &b.Func.Config.Types
6894 // match: (Rsh16x64 <t> x y)
6895 // cond: !shiftIsBounded(v)
6896 // result: (SRA <t> (SignExt16to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] y))))
6901 if !(!shiftIsBounded(v)) {
6904 v.reset(OpRISCV64SRA)
6906 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
6908 v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
6909 v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
6910 v2.AuxInt = int64ToAuxInt(-1)
6911 v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
6912 v3.AuxInt = int64ToAuxInt(64)
6919 // match: (Rsh16x64 x y)
6920 // cond: shiftIsBounded(v)
6921 // result: (SRA (SignExt16to64 x) y)
6925 if !(shiftIsBounded(v)) {
6928 v.reset(OpRISCV64SRA)
6929 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
6936 func rewriteValueRISCV64_OpRsh16x8(v *Value) bool {
6940 typ := &b.Func.Config.Types
6941 // match: (Rsh16x8 <t> x y)
6942 // cond: !shiftIsBounded(v)
6943 // result: (SRA <t> (SignExt16to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt8to64 y)))))
6948 if !(!shiftIsBounded(v)) {
6951 v.reset(OpRISCV64SRA)
6953 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
6955 v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
6956 v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
6957 v2.AuxInt = int64ToAuxInt(-1)
6958 v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
6959 v3.AuxInt = int64ToAuxInt(64)
6960 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
6968 // match: (Rsh16x8 x y)
6969 // cond: shiftIsBounded(v)
6970 // result: (SRA (SignExt16to64 x) y)
6974 if !(shiftIsBounded(v)) {
6977 v.reset(OpRISCV64SRA)
6978 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
6985 func rewriteValueRISCV64_OpRsh32Ux16(v *Value) bool {
6989 typ := &b.Func.Config.Types
6990 // match: (Rsh32Ux16 <t> x y)
6991 // cond: !shiftIsBounded(v)
6992 // result: (AND (SRL <t> (ZeroExt32to64 x) y) (Neg32 <t> (SLTIU <t> [32] (ZeroExt16to64 y))))
6997 if !(!shiftIsBounded(v)) {
7000 v.reset(OpRISCV64AND)
7001 v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
7002 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
7005 v2 := b.NewValue0(v.Pos, OpNeg32, t)
7006 v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
7007 v3.AuxInt = int64ToAuxInt(32)
7008 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
7015 // match: (Rsh32Ux16 x y)
7016 // cond: shiftIsBounded(v)
7017 // result: (SRL (ZeroExt32to64 x) y)
7021 if !(shiftIsBounded(v)) {
7024 v.reset(OpRISCV64SRL)
7025 v0 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
7032 func rewriteValueRISCV64_OpRsh32Ux32(v *Value) bool {
7036 typ := &b.Func.Config.Types
7037 // match: (Rsh32Ux32 <t> x y)
7038 // cond: !shiftIsBounded(v)
7039 // result: (AND (SRL <t> (ZeroExt32to64 x) y) (Neg32 <t> (SLTIU <t> [32] (ZeroExt32to64 y))))
7044 if !(!shiftIsBounded(v)) {
7047 v.reset(OpRISCV64AND)
7048 v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
7049 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
7052 v2 := b.NewValue0(v.Pos, OpNeg32, t)
7053 v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
7054 v3.AuxInt = int64ToAuxInt(32)
7055 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
7062 // match: (Rsh32Ux32 x y)
7063 // cond: shiftIsBounded(v)
7064 // result: (SRL (ZeroExt32to64 x) y)
7068 if !(shiftIsBounded(v)) {
7071 v.reset(OpRISCV64SRL)
7072 v0 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
7079 func rewriteValueRISCV64_OpRsh32Ux64(v *Value) bool {
7083 typ := &b.Func.Config.Types
7084 // match: (Rsh32Ux64 <t> x y)
7085 // cond: !shiftIsBounded(v)
7086 // result: (AND (SRL <t> (ZeroExt32to64 x) y) (Neg32 <t> (SLTIU <t> [32] y)))
7091 if !(!shiftIsBounded(v)) {
7094 v.reset(OpRISCV64AND)
7095 v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
7096 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
7099 v2 := b.NewValue0(v.Pos, OpNeg32, t)
7100 v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
7101 v3.AuxInt = int64ToAuxInt(32)
7107 // match: (Rsh32Ux64 x y)
7108 // cond: shiftIsBounded(v)
7109 // result: (SRL (ZeroExt32to64 x) y)
7113 if !(shiftIsBounded(v)) {
7116 v.reset(OpRISCV64SRL)
7117 v0 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
7124 func rewriteValueRISCV64_OpRsh32Ux8(v *Value) bool {
7128 typ := &b.Func.Config.Types
7129 // match: (Rsh32Ux8 <t> x y)
7130 // cond: !shiftIsBounded(v)
7131 // result: (AND (SRL <t> (ZeroExt32to64 x) y) (Neg32 <t> (SLTIU <t> [32] (ZeroExt8to64 y))))
7136 if !(!shiftIsBounded(v)) {
7139 v.reset(OpRISCV64AND)
7140 v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
7141 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
7144 v2 := b.NewValue0(v.Pos, OpNeg32, t)
7145 v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
7146 v3.AuxInt = int64ToAuxInt(32)
7147 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
7154 // match: (Rsh32Ux8 x y)
7155 // cond: shiftIsBounded(v)
7156 // result: (SRL (ZeroExt32to64 x) y)
7160 if !(shiftIsBounded(v)) {
7163 v.reset(OpRISCV64SRL)
7164 v0 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
7171 func rewriteValueRISCV64_OpRsh32x16(v *Value) bool {
7175 typ := &b.Func.Config.Types
7176 // match: (Rsh32x16 <t> x y)
7177 // cond: !shiftIsBounded(v)
7178 // result: (SRA <t> (SignExt32to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt16to64 y)))))
7183 if !(!shiftIsBounded(v)) {
7186 v.reset(OpRISCV64SRA)
7188 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
7190 v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
7191 v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
7192 v2.AuxInt = int64ToAuxInt(-1)
7193 v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
7194 v3.AuxInt = int64ToAuxInt(64)
7195 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
7203 // match: (Rsh32x16 x y)
7204 // cond: shiftIsBounded(v)
7205 // result: (SRA (SignExt32to64 x) y)
7209 if !(shiftIsBounded(v)) {
7212 v.reset(OpRISCV64SRA)
7213 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
7220 func rewriteValueRISCV64_OpRsh32x32(v *Value) bool {
7224 typ := &b.Func.Config.Types
7225 // match: (Rsh32x32 <t> x y)
7226 // cond: !shiftIsBounded(v)
7227 // result: (SRA <t> (SignExt32to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt32to64 y)))))
7232 if !(!shiftIsBounded(v)) {
7235 v.reset(OpRISCV64SRA)
7237 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
7239 v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
7240 v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
7241 v2.AuxInt = int64ToAuxInt(-1)
7242 v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
7243 v3.AuxInt = int64ToAuxInt(64)
7244 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
7252 // match: (Rsh32x32 x y)
7253 // cond: shiftIsBounded(v)
7254 // result: (SRA (SignExt32to64 x) y)
7258 if !(shiftIsBounded(v)) {
7261 v.reset(OpRISCV64SRA)
7262 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
7269 func rewriteValueRISCV64_OpRsh32x64(v *Value) bool {
7273 typ := &b.Func.Config.Types
7274 // match: (Rsh32x64 <t> x y)
7275 // cond: !shiftIsBounded(v)
7276 // result: (SRA <t> (SignExt32to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] y))))
7281 if !(!shiftIsBounded(v)) {
7284 v.reset(OpRISCV64SRA)
7286 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
7288 v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
7289 v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
7290 v2.AuxInt = int64ToAuxInt(-1)
7291 v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
7292 v3.AuxInt = int64ToAuxInt(64)
7299 // match: (Rsh32x64 x y)
7300 // cond: shiftIsBounded(v)
7301 // result: (SRA (SignExt32to64 x) y)
7305 if !(shiftIsBounded(v)) {
7308 v.reset(OpRISCV64SRA)
7309 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
7316 func rewriteValueRISCV64_OpRsh32x8(v *Value) bool {
7320 typ := &b.Func.Config.Types
7321 // match: (Rsh32x8 <t> x y)
7322 // cond: !shiftIsBounded(v)
7323 // result: (SRA <t> (SignExt32to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt8to64 y)))))
7328 if !(!shiftIsBounded(v)) {
7331 v.reset(OpRISCV64SRA)
7333 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
7335 v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
7336 v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
7337 v2.AuxInt = int64ToAuxInt(-1)
7338 v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
7339 v3.AuxInt = int64ToAuxInt(64)
7340 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
7348 // match: (Rsh32x8 x y)
7349 // cond: shiftIsBounded(v)
7350 // result: (SRA (SignExt32to64 x) y)
7354 if !(shiftIsBounded(v)) {
7357 v.reset(OpRISCV64SRA)
7358 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
7365 func rewriteValueRISCV64_OpRsh64Ux16(v *Value) bool {
7369 typ := &b.Func.Config.Types
7370 // match: (Rsh64Ux16 <t> x y)
7371 // cond: !shiftIsBounded(v)
7372 // result: (AND (SRL <t> x y) (Neg64 <t> (SLTIU <t> [64] (ZeroExt16to64 y))))
7377 if !(!shiftIsBounded(v)) {
7380 v.reset(OpRISCV64AND)
7381 v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
7383 v1 := b.NewValue0(v.Pos, OpNeg64, t)
7384 v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
7385 v2.AuxInt = int64ToAuxInt(64)
7386 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
7393 // match: (Rsh64Ux16 x y)
7394 // cond: shiftIsBounded(v)
7395 // result: (SRL x y)
7399 if !(shiftIsBounded(v)) {
7402 v.reset(OpRISCV64SRL)
7408 func rewriteValueRISCV64_OpRsh64Ux32(v *Value) bool {
7412 typ := &b.Func.Config.Types
7413 // match: (Rsh64Ux32 <t> x y)
7414 // cond: !shiftIsBounded(v)
7415 // result: (AND (SRL <t> x y) (Neg64 <t> (SLTIU <t> [64] (ZeroExt32to64 y))))
7420 if !(!shiftIsBounded(v)) {
7423 v.reset(OpRISCV64AND)
7424 v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
7426 v1 := b.NewValue0(v.Pos, OpNeg64, t)
7427 v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
7428 v2.AuxInt = int64ToAuxInt(64)
7429 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
7436 // match: (Rsh64Ux32 x y)
7437 // cond: shiftIsBounded(v)
7438 // result: (SRL x y)
7442 if !(shiftIsBounded(v)) {
7445 v.reset(OpRISCV64SRL)
7451 func rewriteValueRISCV64_OpRsh64Ux64(v *Value) bool {
7455 // match: (Rsh64Ux64 <t> x y)
7456 // cond: !shiftIsBounded(v)
7457 // result: (AND (SRL <t> x y) (Neg64 <t> (SLTIU <t> [64] y)))
7462 if !(!shiftIsBounded(v)) {
7465 v.reset(OpRISCV64AND)
7466 v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
7468 v1 := b.NewValue0(v.Pos, OpNeg64, t)
7469 v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
7470 v2.AuxInt = int64ToAuxInt(64)
7476 // match: (Rsh64Ux64 x y)
7477 // cond: shiftIsBounded(v)
7478 // result: (SRL x y)
7482 if !(shiftIsBounded(v)) {
7485 v.reset(OpRISCV64SRL)
7491 func rewriteValueRISCV64_OpRsh64Ux8(v *Value) bool {
7495 typ := &b.Func.Config.Types
7496 // match: (Rsh64Ux8 <t> x y)
7497 // cond: !shiftIsBounded(v)
7498 // result: (AND (SRL <t> x y) (Neg64 <t> (SLTIU <t> [64] (ZeroExt8to64 y))))
7503 if !(!shiftIsBounded(v)) {
7506 v.reset(OpRISCV64AND)
7507 v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
7509 v1 := b.NewValue0(v.Pos, OpNeg64, t)
7510 v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
7511 v2.AuxInt = int64ToAuxInt(64)
7512 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
7519 // match: (Rsh64Ux8 x y)
7520 // cond: shiftIsBounded(v)
7521 // result: (SRL x y)
7525 if !(shiftIsBounded(v)) {
7528 v.reset(OpRISCV64SRL)
7534 func rewriteValueRISCV64_OpRsh64x16(v *Value) bool {
7538 typ := &b.Func.Config.Types
7539 // match: (Rsh64x16 <t> x y)
7540 // cond: !shiftIsBounded(v)
7541 // result: (SRA <t> x (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt16to64 y)))))
7546 if !(!shiftIsBounded(v)) {
7549 v.reset(OpRISCV64SRA)
7551 v0 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
7552 v1 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
7553 v1.AuxInt = int64ToAuxInt(-1)
7554 v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
7555 v2.AuxInt = int64ToAuxInt(64)
7556 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
7564 // match: (Rsh64x16 x y)
7565 // cond: shiftIsBounded(v)
7566 // result: (SRA x y)
7570 if !(shiftIsBounded(v)) {
7573 v.reset(OpRISCV64SRA)
7579 func rewriteValueRISCV64_OpRsh64x32(v *Value) bool {
7583 typ := &b.Func.Config.Types
7584 // match: (Rsh64x32 <t> x y)
7585 // cond: !shiftIsBounded(v)
7586 // result: (SRA <t> x (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt32to64 y)))))
7591 if !(!shiftIsBounded(v)) {
7594 v.reset(OpRISCV64SRA)
7596 v0 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
7597 v1 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
7598 v1.AuxInt = int64ToAuxInt(-1)
7599 v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
7600 v2.AuxInt = int64ToAuxInt(64)
7601 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
7609 // match: (Rsh64x32 x y)
7610 // cond: shiftIsBounded(v)
7611 // result: (SRA x y)
7615 if !(shiftIsBounded(v)) {
7618 v.reset(OpRISCV64SRA)
7624 func rewriteValueRISCV64_OpRsh64x64(v *Value) bool {
7628 // match: (Rsh64x64 <t> x y)
7629 // cond: !shiftIsBounded(v)
7630 // result: (SRA <t> x (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] y))))
7635 if !(!shiftIsBounded(v)) {
7638 v.reset(OpRISCV64SRA)
7640 v0 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
7641 v1 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
7642 v1.AuxInt = int64ToAuxInt(-1)
7643 v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
7644 v2.AuxInt = int64ToAuxInt(64)
7651 // match: (Rsh64x64 x y)
7652 // cond: shiftIsBounded(v)
7653 // result: (SRA x y)
7657 if !(shiftIsBounded(v)) {
7660 v.reset(OpRISCV64SRA)
7666 func rewriteValueRISCV64_OpRsh64x8(v *Value) bool {
7670 typ := &b.Func.Config.Types
7671 // match: (Rsh64x8 <t> x y)
7672 // cond: !shiftIsBounded(v)
7673 // result: (SRA <t> x (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt8to64 y)))))
7678 if !(!shiftIsBounded(v)) {
7681 v.reset(OpRISCV64SRA)
7683 v0 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
7684 v1 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
7685 v1.AuxInt = int64ToAuxInt(-1)
7686 v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
7687 v2.AuxInt = int64ToAuxInt(64)
7688 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
7696 // match: (Rsh64x8 x y)
7697 // cond: shiftIsBounded(v)
7698 // result: (SRA x y)
7702 if !(shiftIsBounded(v)) {
7705 v.reset(OpRISCV64SRA)
7711 func rewriteValueRISCV64_OpRsh8Ux16(v *Value) bool {
7715 typ := &b.Func.Config.Types
7716 // match: (Rsh8Ux16 <t> x y)
7717 // cond: !shiftIsBounded(v)
7718 // result: (AND (SRL <t> (ZeroExt8to64 x) y) (Neg8 <t> (SLTIU <t> [64] (ZeroExt16to64 y))))
7723 if !(!shiftIsBounded(v)) {
7726 v.reset(OpRISCV64AND)
7727 v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
7728 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
7731 v2 := b.NewValue0(v.Pos, OpNeg8, t)
7732 v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
7733 v3.AuxInt = int64ToAuxInt(64)
7734 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
7741 // match: (Rsh8Ux16 x y)
7742 // cond: shiftIsBounded(v)
7743 // result: (SRL (ZeroExt8to64 x) y)
7747 if !(shiftIsBounded(v)) {
7750 v.reset(OpRISCV64SRL)
7751 v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
7758 func rewriteValueRISCV64_OpRsh8Ux32(v *Value) bool {
7762 typ := &b.Func.Config.Types
7763 // match: (Rsh8Ux32 <t> x y)
7764 // cond: !shiftIsBounded(v)
7765 // result: (AND (SRL <t> (ZeroExt8to64 x) y) (Neg8 <t> (SLTIU <t> [64] (ZeroExt32to64 y))))
7770 if !(!shiftIsBounded(v)) {
7773 v.reset(OpRISCV64AND)
7774 v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
7775 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
7778 v2 := b.NewValue0(v.Pos, OpNeg8, t)
7779 v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
7780 v3.AuxInt = int64ToAuxInt(64)
7781 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
7788 // match: (Rsh8Ux32 x y)
7789 // cond: shiftIsBounded(v)
7790 // result: (SRL (ZeroExt8to64 x) y)
7794 if !(shiftIsBounded(v)) {
7797 v.reset(OpRISCV64SRL)
7798 v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
7805 func rewriteValueRISCV64_OpRsh8Ux64(v *Value) bool {
7809 typ := &b.Func.Config.Types
7810 // match: (Rsh8Ux64 <t> x y)
7811 // cond: !shiftIsBounded(v)
7812 // result: (AND (SRL <t> (ZeroExt8to64 x) y) (Neg8 <t> (SLTIU <t> [64] y)))
7817 if !(!shiftIsBounded(v)) {
7820 v.reset(OpRISCV64AND)
7821 v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
7822 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
7825 v2 := b.NewValue0(v.Pos, OpNeg8, t)
7826 v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
7827 v3.AuxInt = int64ToAuxInt(64)
7833 // match: (Rsh8Ux64 x y)
7834 // cond: shiftIsBounded(v)
7835 // result: (SRL (ZeroExt8to64 x) y)
7839 if !(shiftIsBounded(v)) {
7842 v.reset(OpRISCV64SRL)
7843 v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
7850 func rewriteValueRISCV64_OpRsh8Ux8(v *Value) bool {
7854 typ := &b.Func.Config.Types
7855 // match: (Rsh8Ux8 <t> x y)
7856 // cond: !shiftIsBounded(v)
7857 // result: (AND (SRL <t> (ZeroExt8to64 x) y) (Neg8 <t> (SLTIU <t> [64] (ZeroExt8to64 y))))
7862 if !(!shiftIsBounded(v)) {
7865 v.reset(OpRISCV64AND)
7866 v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
7867 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
7870 v2 := b.NewValue0(v.Pos, OpNeg8, t)
7871 v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
7872 v3.AuxInt = int64ToAuxInt(64)
7873 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
7880 // match: (Rsh8Ux8 x y)
7881 // cond: shiftIsBounded(v)
7882 // result: (SRL (ZeroExt8to64 x) y)
7886 if !(shiftIsBounded(v)) {
7889 v.reset(OpRISCV64SRL)
7890 v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
7897 func rewriteValueRISCV64_OpRsh8x16(v *Value) bool {
7901 typ := &b.Func.Config.Types
7902 // match: (Rsh8x16 <t> x y)
7903 // cond: !shiftIsBounded(v)
7904 // result: (SRA <t> (SignExt8to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt16to64 y)))))
7909 if !(!shiftIsBounded(v)) {
7912 v.reset(OpRISCV64SRA)
7914 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
7916 v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
7917 v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
7918 v2.AuxInt = int64ToAuxInt(-1)
7919 v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
7920 v3.AuxInt = int64ToAuxInt(64)
7921 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
7929 // match: (Rsh8x16 x y)
7930 // cond: shiftIsBounded(v)
7931 // result: (SRA (SignExt8to64 x) y)
7935 if !(shiftIsBounded(v)) {
7938 v.reset(OpRISCV64SRA)
7939 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
7946 func rewriteValueRISCV64_OpRsh8x32(v *Value) bool {
7950 typ := &b.Func.Config.Types
7951 // match: (Rsh8x32 <t> x y)
7952 // cond: !shiftIsBounded(v)
7953 // result: (SRA <t> (SignExt8to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt32to64 y)))))
7958 if !(!shiftIsBounded(v)) {
7961 v.reset(OpRISCV64SRA)
7963 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
7965 v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
7966 v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
7967 v2.AuxInt = int64ToAuxInt(-1)
7968 v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
7969 v3.AuxInt = int64ToAuxInt(64)
7970 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
7978 // match: (Rsh8x32 x y)
7979 // cond: shiftIsBounded(v)
7980 // result: (SRA (SignExt8to64 x) y)
7984 if !(shiftIsBounded(v)) {
7987 v.reset(OpRISCV64SRA)
7988 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
7995 func rewriteValueRISCV64_OpRsh8x64(v *Value) bool {
7999 typ := &b.Func.Config.Types
8000 // match: (Rsh8x64 <t> x y)
8001 // cond: !shiftIsBounded(v)
8002 // result: (SRA <t> (SignExt8to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] y))))
8007 if !(!shiftIsBounded(v)) {
8010 v.reset(OpRISCV64SRA)
8012 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
8014 v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
8015 v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
8016 v2.AuxInt = int64ToAuxInt(-1)
8017 v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
8018 v3.AuxInt = int64ToAuxInt(64)
8025 // match: (Rsh8x64 x y)
8026 // cond: shiftIsBounded(v)
8027 // result: (SRA (SignExt8to64 x) y)
8031 if !(shiftIsBounded(v)) {
8034 v.reset(OpRISCV64SRA)
8035 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
8042 func rewriteValueRISCV64_OpRsh8x8(v *Value) bool {
8046 typ := &b.Func.Config.Types
8047 // match: (Rsh8x8 <t> x y)
8048 // cond: !shiftIsBounded(v)
8049 // result: (SRA <t> (SignExt8to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt8to64 y)))))
8054 if !(!shiftIsBounded(v)) {
8057 v.reset(OpRISCV64SRA)
8059 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
8061 v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
8062 v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
8063 v2.AuxInt = int64ToAuxInt(-1)
8064 v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
8065 v3.AuxInt = int64ToAuxInt(64)
8066 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
8074 // match: (Rsh8x8 x y)
8075 // cond: shiftIsBounded(v)
8076 // result: (SRA (SignExt8to64 x) y)
8080 if !(shiftIsBounded(v)) {
8083 v.reset(OpRISCV64SRA)
8084 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
8091 func rewriteValueRISCV64_OpSelect0(v *Value) bool {
8094 typ := &b.Func.Config.Types
8095 // match: (Select0 (Add64carry x y c))
8096 // result: (ADD (ADD <typ.UInt64> x y) c)
8098 if v_0.Op != OpAdd64carry {
8104 v.reset(OpRISCV64ADD)
8105 v0 := b.NewValue0(v.Pos, OpRISCV64ADD, typ.UInt64)
8110 // match: (Select0 (Sub64borrow x y c))
8111 // result: (SUB (SUB <typ.UInt64> x y) c)
8113 if v_0.Op != OpSub64borrow {
8119 v.reset(OpRISCV64SUB)
8120 v0 := b.NewValue0(v.Pos, OpRISCV64SUB, typ.UInt64)
8125 // match: (Select0 m:(LoweredMuluhilo x y))
8126 // cond: m.Uses == 1
8127 // result: (MULHU x y)
8130 if m.Op != OpRISCV64LoweredMuluhilo {
8138 v.reset(OpRISCV64MULHU)
8144 func rewriteValueRISCV64_OpSelect1(v *Value) bool {
8147 typ := &b.Func.Config.Types
8148 // match: (Select1 (Add64carry x y c))
8149 // result: (OR (SLTU <typ.UInt64> s:(ADD <typ.UInt64> x y) x) (SLTU <typ.UInt64> (ADD <typ.UInt64> s c) s))
8151 if v_0.Op != OpAdd64carry {
8157 v.reset(OpRISCV64OR)
8158 v0 := b.NewValue0(v.Pos, OpRISCV64SLTU, typ.UInt64)
8159 s := b.NewValue0(v.Pos, OpRISCV64ADD, typ.UInt64)
8162 v2 := b.NewValue0(v.Pos, OpRISCV64SLTU, typ.UInt64)
8163 v3 := b.NewValue0(v.Pos, OpRISCV64ADD, typ.UInt64)
8169 // match: (Select1 (Sub64borrow x y c))
8170 // result: (OR (SLTU <typ.UInt64> x s:(SUB <typ.UInt64> x y)) (SLTU <typ.UInt64> s (SUB <typ.UInt64> s c)))
8172 if v_0.Op != OpSub64borrow {
8178 v.reset(OpRISCV64OR)
8179 v0 := b.NewValue0(v.Pos, OpRISCV64SLTU, typ.UInt64)
8180 s := b.NewValue0(v.Pos, OpRISCV64SUB, typ.UInt64)
8183 v2 := b.NewValue0(v.Pos, OpRISCV64SLTU, typ.UInt64)
8184 v3 := b.NewValue0(v.Pos, OpRISCV64SUB, typ.UInt64)
8190 // match: (Select1 m:(LoweredMuluhilo x y))
8191 // cond: m.Uses == 1
8192 // result: (MUL x y)
8195 if m.Op != OpRISCV64LoweredMuluhilo {
8203 v.reset(OpRISCV64MUL)
8209 func rewriteValueRISCV64_OpSlicemask(v *Value) bool {
8212 // match: (Slicemask <t> x)
8213 // result: (SRAI [63] (NEG <t> x))
8217 v.reset(OpRISCV64SRAI)
8218 v.AuxInt = int64ToAuxInt(63)
8219 v0 := b.NewValue0(v.Pos, OpRISCV64NEG, t)
8225 func rewriteValueRISCV64_OpStore(v *Value) bool {
8229 // match: (Store {t} ptr val mem)
8230 // cond: t.Size() == 1
8231 // result: (MOVBstore ptr val mem)
8233 t := auxToType(v.Aux)
8237 if !(t.Size() == 1) {
8240 v.reset(OpRISCV64MOVBstore)
8241 v.AddArg3(ptr, val, mem)
8244 // match: (Store {t} ptr val mem)
8245 // cond: t.Size() == 2
8246 // result: (MOVHstore ptr val mem)
8248 t := auxToType(v.Aux)
8252 if !(t.Size() == 2) {
8255 v.reset(OpRISCV64MOVHstore)
8256 v.AddArg3(ptr, val, mem)
8259 // match: (Store {t} ptr val mem)
8260 // cond: t.Size() == 4 && !t.IsFloat()
8261 // result: (MOVWstore ptr val mem)
8263 t := auxToType(v.Aux)
8267 if !(t.Size() == 4 && !t.IsFloat()) {
8270 v.reset(OpRISCV64MOVWstore)
8271 v.AddArg3(ptr, val, mem)
8274 // match: (Store {t} ptr val mem)
8275 // cond: t.Size() == 8 && !t.IsFloat()
8276 // result: (MOVDstore ptr val mem)
8278 t := auxToType(v.Aux)
8282 if !(t.Size() == 8 && !t.IsFloat()) {
8285 v.reset(OpRISCV64MOVDstore)
8286 v.AddArg3(ptr, val, mem)
8289 // match: (Store {t} ptr val mem)
8290 // cond: t.Size() == 4 && t.IsFloat()
8291 // result: (FMOVWstore ptr val mem)
8293 t := auxToType(v.Aux)
8297 if !(t.Size() == 4 && t.IsFloat()) {
8300 v.reset(OpRISCV64FMOVWstore)
8301 v.AddArg3(ptr, val, mem)
8304 // match: (Store {t} ptr val mem)
8305 // cond: t.Size() == 8 && t.IsFloat()
8306 // result: (FMOVDstore ptr val mem)
8308 t := auxToType(v.Aux)
8312 if !(t.Size() == 8 && t.IsFloat()) {
8315 v.reset(OpRISCV64FMOVDstore)
8316 v.AddArg3(ptr, val, mem)
8321 func rewriteValueRISCV64_OpZero(v *Value) bool {
8325 config := b.Func.Config
8326 typ := &b.Func.Config.Types
8327 // match: (Zero [0] _ mem)
8330 if auxIntToInt64(v.AuxInt) != 0 {
8337 // match: (Zero [1] ptr mem)
8338 // result: (MOVBstore ptr (MOVDconst [0]) mem)
8340 if auxIntToInt64(v.AuxInt) != 1 {
8345 v.reset(OpRISCV64MOVBstore)
8346 v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
8347 v0.AuxInt = int64ToAuxInt(0)
8348 v.AddArg3(ptr, v0, mem)
8351 // match: (Zero [2] {t} ptr mem)
8352 // cond: t.Alignment()%2 == 0
8353 // result: (MOVHstore ptr (MOVDconst [0]) mem)
8355 if auxIntToInt64(v.AuxInt) != 2 {
8358 t := auxToType(v.Aux)
8361 if !(t.Alignment()%2 == 0) {
8364 v.reset(OpRISCV64MOVHstore)
8365 v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
8366 v0.AuxInt = int64ToAuxInt(0)
8367 v.AddArg3(ptr, v0, mem)
8370 // match: (Zero [2] ptr mem)
8371 // result: (MOVBstore [1] ptr (MOVDconst [0]) (MOVBstore ptr (MOVDconst [0]) mem))
8373 if auxIntToInt64(v.AuxInt) != 2 {
8378 v.reset(OpRISCV64MOVBstore)
8379 v.AuxInt = int32ToAuxInt(1)
8380 v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
8381 v0.AuxInt = int64ToAuxInt(0)
8382 v1 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
8383 v1.AddArg3(ptr, v0, mem)
8384 v.AddArg3(ptr, v0, v1)
8387 // match: (Zero [4] {t} ptr mem)
8388 // cond: t.Alignment()%4 == 0
8389 // result: (MOVWstore ptr (MOVDconst [0]) mem)
8391 if auxIntToInt64(v.AuxInt) != 4 {
8394 t := auxToType(v.Aux)
8397 if !(t.Alignment()%4 == 0) {
8400 v.reset(OpRISCV64MOVWstore)
8401 v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
8402 v0.AuxInt = int64ToAuxInt(0)
8403 v.AddArg3(ptr, v0, mem)
8406 // match: (Zero [4] {t} ptr mem)
8407 // cond: t.Alignment()%2 == 0
8408 // result: (MOVHstore [2] ptr (MOVDconst [0]) (MOVHstore ptr (MOVDconst [0]) mem))
8410 if auxIntToInt64(v.AuxInt) != 4 {
8413 t := auxToType(v.Aux)
8416 if !(t.Alignment()%2 == 0) {
8419 v.reset(OpRISCV64MOVHstore)
8420 v.AuxInt = int32ToAuxInt(2)
8421 v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
8422 v0.AuxInt = int64ToAuxInt(0)
8423 v1 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
8424 v1.AddArg3(ptr, v0, mem)
8425 v.AddArg3(ptr, v0, v1)
8428 // match: (Zero [4] ptr mem)
8429 // result: (MOVBstore [3] ptr (MOVDconst [0]) (MOVBstore [2] ptr (MOVDconst [0]) (MOVBstore [1] ptr (MOVDconst [0]) (MOVBstore ptr (MOVDconst [0]) mem))))
8431 if auxIntToInt64(v.AuxInt) != 4 {
8436 v.reset(OpRISCV64MOVBstore)
8437 v.AuxInt = int32ToAuxInt(3)
8438 v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
8439 v0.AuxInt = int64ToAuxInt(0)
8440 v1 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
8441 v1.AuxInt = int32ToAuxInt(2)
8442 v2 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
8443 v2.AuxInt = int32ToAuxInt(1)
8444 v3 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
8445 v3.AddArg3(ptr, v0, mem)
8446 v2.AddArg3(ptr, v0, v3)
8447 v1.AddArg3(ptr, v0, v2)
8448 v.AddArg3(ptr, v0, v1)
8451 // match: (Zero [8] {t} ptr mem)
8452 // cond: t.Alignment()%8 == 0
8453 // result: (MOVDstore ptr (MOVDconst [0]) mem)
8455 if auxIntToInt64(v.AuxInt) != 8 {
8458 t := auxToType(v.Aux)
8461 if !(t.Alignment()%8 == 0) {
8464 v.reset(OpRISCV64MOVDstore)
8465 v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
8466 v0.AuxInt = int64ToAuxInt(0)
8467 v.AddArg3(ptr, v0, mem)
8470 // match: (Zero [8] {t} ptr mem)
8471 // cond: t.Alignment()%4 == 0
8472 // result: (MOVWstore [4] ptr (MOVDconst [0]) (MOVWstore ptr (MOVDconst [0]) mem))
8474 if auxIntToInt64(v.AuxInt) != 8 {
8477 t := auxToType(v.Aux)
8480 if !(t.Alignment()%4 == 0) {
8483 v.reset(OpRISCV64MOVWstore)
8484 v.AuxInt = int32ToAuxInt(4)
8485 v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
8486 v0.AuxInt = int64ToAuxInt(0)
8487 v1 := b.NewValue0(v.Pos, OpRISCV64MOVWstore, types.TypeMem)
8488 v1.AddArg3(ptr, v0, mem)
8489 v.AddArg3(ptr, v0, v1)
8492 // match: (Zero [8] {t} ptr mem)
8493 // cond: t.Alignment()%2 == 0
8494 // result: (MOVHstore [6] ptr (MOVDconst [0]) (MOVHstore [4] ptr (MOVDconst [0]) (MOVHstore [2] ptr (MOVDconst [0]) (MOVHstore ptr (MOVDconst [0]) mem))))
8496 if auxIntToInt64(v.AuxInt) != 8 {
8499 t := auxToType(v.Aux)
8502 if !(t.Alignment()%2 == 0) {
8505 v.reset(OpRISCV64MOVHstore)
8506 v.AuxInt = int32ToAuxInt(6)
8507 v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
8508 v0.AuxInt = int64ToAuxInt(0)
8509 v1 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
8510 v1.AuxInt = int32ToAuxInt(4)
8511 v2 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
8512 v2.AuxInt = int32ToAuxInt(2)
8513 v3 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
8514 v3.AddArg3(ptr, v0, mem)
8515 v2.AddArg3(ptr, v0, v3)
8516 v1.AddArg3(ptr, v0, v2)
8517 v.AddArg3(ptr, v0, v1)
8520 // match: (Zero [3] ptr mem)
8521 // result: (MOVBstore [2] ptr (MOVDconst [0]) (MOVBstore [1] ptr (MOVDconst [0]) (MOVBstore ptr (MOVDconst [0]) mem)))
8523 if auxIntToInt64(v.AuxInt) != 3 {
8528 v.reset(OpRISCV64MOVBstore)
8529 v.AuxInt = int32ToAuxInt(2)
8530 v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
8531 v0.AuxInt = int64ToAuxInt(0)
8532 v1 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
8533 v1.AuxInt = int32ToAuxInt(1)
8534 v2 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
8535 v2.AddArg3(ptr, v0, mem)
8536 v1.AddArg3(ptr, v0, v2)
8537 v.AddArg3(ptr, v0, v1)
8540 // match: (Zero [6] {t} ptr mem)
8541 // cond: t.Alignment()%2 == 0
8542 // result: (MOVHstore [4] ptr (MOVDconst [0]) (MOVHstore [2] ptr (MOVDconst [0]) (MOVHstore ptr (MOVDconst [0]) mem)))
8544 if auxIntToInt64(v.AuxInt) != 6 {
8547 t := auxToType(v.Aux)
8550 if !(t.Alignment()%2 == 0) {
8553 v.reset(OpRISCV64MOVHstore)
8554 v.AuxInt = int32ToAuxInt(4)
8555 v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
8556 v0.AuxInt = int64ToAuxInt(0)
8557 v1 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
8558 v1.AuxInt = int32ToAuxInt(2)
8559 v2 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
8560 v2.AddArg3(ptr, v0, mem)
8561 v1.AddArg3(ptr, v0, v2)
8562 v.AddArg3(ptr, v0, v1)
8565 // match: (Zero [12] {t} ptr mem)
8566 // cond: t.Alignment()%4 == 0
8567 // result: (MOVWstore [8] ptr (MOVDconst [0]) (MOVWstore [4] ptr (MOVDconst [0]) (MOVWstore ptr (MOVDconst [0]) mem)))
8569 if auxIntToInt64(v.AuxInt) != 12 {
8572 t := auxToType(v.Aux)
8575 if !(t.Alignment()%4 == 0) {
8578 v.reset(OpRISCV64MOVWstore)
8579 v.AuxInt = int32ToAuxInt(8)
8580 v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
8581 v0.AuxInt = int64ToAuxInt(0)
8582 v1 := b.NewValue0(v.Pos, OpRISCV64MOVWstore, types.TypeMem)
8583 v1.AuxInt = int32ToAuxInt(4)
8584 v2 := b.NewValue0(v.Pos, OpRISCV64MOVWstore, types.TypeMem)
8585 v2.AddArg3(ptr, v0, mem)
8586 v1.AddArg3(ptr, v0, v2)
8587 v.AddArg3(ptr, v0, v1)
8590 // match: (Zero [16] {t} ptr mem)
8591 // cond: t.Alignment()%8 == 0
8592 // result: (MOVDstore [8] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem))
8594 if auxIntToInt64(v.AuxInt) != 16 {
8597 t := auxToType(v.Aux)
8600 if !(t.Alignment()%8 == 0) {
8603 v.reset(OpRISCV64MOVDstore)
8604 v.AuxInt = int32ToAuxInt(8)
8605 v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
8606 v0.AuxInt = int64ToAuxInt(0)
8607 v1 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
8608 v1.AddArg3(ptr, v0, mem)
8609 v.AddArg3(ptr, v0, v1)
8612 // match: (Zero [24] {t} ptr mem)
8613 // cond: t.Alignment()%8 == 0
8614 // result: (MOVDstore [16] ptr (MOVDconst [0]) (MOVDstore [8] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem)))
8616 if auxIntToInt64(v.AuxInt) != 24 {
8619 t := auxToType(v.Aux)
8622 if !(t.Alignment()%8 == 0) {
8625 v.reset(OpRISCV64MOVDstore)
8626 v.AuxInt = int32ToAuxInt(16)
8627 v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
8628 v0.AuxInt = int64ToAuxInt(0)
8629 v1 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
8630 v1.AuxInt = int32ToAuxInt(8)
8631 v2 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
8632 v2.AddArg3(ptr, v0, mem)
8633 v1.AddArg3(ptr, v0, v2)
8634 v.AddArg3(ptr, v0, v1)
8637 // match: (Zero [32] {t} ptr mem)
8638 // cond: t.Alignment()%8 == 0
8639 // result: (MOVDstore [24] ptr (MOVDconst [0]) (MOVDstore [16] ptr (MOVDconst [0]) (MOVDstore [8] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem))))
8641 if auxIntToInt64(v.AuxInt) != 32 {
8644 t := auxToType(v.Aux)
8647 if !(t.Alignment()%8 == 0) {
8650 v.reset(OpRISCV64MOVDstore)
8651 v.AuxInt = int32ToAuxInt(24)
8652 v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
8653 v0.AuxInt = int64ToAuxInt(0)
8654 v1 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
8655 v1.AuxInt = int32ToAuxInt(16)
8656 v2 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
8657 v2.AuxInt = int32ToAuxInt(8)
8658 v3 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
8659 v3.AddArg3(ptr, v0, mem)
8660 v2.AddArg3(ptr, v0, v3)
8661 v1.AddArg3(ptr, v0, v2)
8662 v.AddArg3(ptr, v0, v1)
8665 // match: (Zero [s] {t} ptr mem)
8666 // cond: s%8 == 0 && s <= 8*128 && t.Alignment()%8 == 0 && !config.noDuffDevice
8667 // result: (DUFFZERO [8 * (128 - s/8)] ptr mem)
8669 s := auxIntToInt64(v.AuxInt)
8670 t := auxToType(v.Aux)
8673 if !(s%8 == 0 && s <= 8*128 && t.Alignment()%8 == 0 && !config.noDuffDevice) {
8676 v.reset(OpRISCV64DUFFZERO)
8677 v.AuxInt = int64ToAuxInt(8 * (128 - s/8))
8681 // match: (Zero [s] {t} ptr mem)
8682 // result: (LoweredZero [t.Alignment()] ptr (ADD <ptr.Type> ptr (MOVDconst [s-moveSize(t.Alignment(), config)])) mem)
8684 s := auxIntToInt64(v.AuxInt)
8685 t := auxToType(v.Aux)
8688 v.reset(OpRISCV64LoweredZero)
8689 v.AuxInt = int64ToAuxInt(t.Alignment())
8690 v0 := b.NewValue0(v.Pos, OpRISCV64ADD, ptr.Type)
8691 v1 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
8692 v1.AuxInt = int64ToAuxInt(s - moveSize(t.Alignment(), config))
8694 v.AddArg3(ptr, v0, mem)
8698 func rewriteBlockRISCV64(b *Block) bool {
8699 typ := &b.Func.Config.Types
8701 case BlockRISCV64BEQ:
8702 // match: (BEQ (MOVDconst [0]) cond yes no)
8703 // result: (BEQZ cond yes no)
8704 for b.Controls[0].Op == OpRISCV64MOVDconst {
8705 v_0 := b.Controls[0]
8706 if auxIntToInt64(v_0.AuxInt) != 0 {
8709 cond := b.Controls[1]
8710 b.resetWithControl(BlockRISCV64BEQZ, cond)
8713 // match: (BEQ cond (MOVDconst [0]) yes no)
8714 // result: (BEQZ cond yes no)
8715 for b.Controls[1].Op == OpRISCV64MOVDconst {
8716 cond := b.Controls[0]
8717 v_1 := b.Controls[1]
8718 if auxIntToInt64(v_1.AuxInt) != 0 {
8721 b.resetWithControl(BlockRISCV64BEQZ, cond)
8724 case BlockRISCV64BEQZ:
8725 // match: (BEQZ (SEQZ x) yes no)
8726 // result: (BNEZ x yes no)
8727 for b.Controls[0].Op == OpRISCV64SEQZ {
8728 v_0 := b.Controls[0]
8730 b.resetWithControl(BlockRISCV64BNEZ, x)
8733 // match: (BEQZ (SNEZ x) yes no)
8734 // result: (BEQZ x yes no)
8735 for b.Controls[0].Op == OpRISCV64SNEZ {
8736 v_0 := b.Controls[0]
8738 b.resetWithControl(BlockRISCV64BEQZ, x)
8741 // match: (BEQZ (NEG x) yes no)
8742 // result: (BEQZ x yes no)
8743 for b.Controls[0].Op == OpRISCV64NEG {
8744 v_0 := b.Controls[0]
8746 b.resetWithControl(BlockRISCV64BEQZ, x)
8749 // match: (BEQZ (FNES <t> x y) yes no)
8750 // result: (BNEZ (FEQS <t> x y) yes no)
8751 for b.Controls[0].Op == OpRISCV64FNES {
8752 v_0 := b.Controls[0]
8755 v_0_0 := v_0.Args[0]
8756 v_0_1 := v_0.Args[1]
8757 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
8760 v0 := b.NewValue0(v_0.Pos, OpRISCV64FEQS, t)
8762 b.resetWithControl(BlockRISCV64BNEZ, v0)
8766 // match: (BEQZ (FNED <t> x y) yes no)
8767 // result: (BNEZ (FEQD <t> x y) yes no)
8768 for b.Controls[0].Op == OpRISCV64FNED {
8769 v_0 := b.Controls[0]
8772 v_0_0 := v_0.Args[0]
8773 v_0_1 := v_0.Args[1]
8774 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
8777 v0 := b.NewValue0(v_0.Pos, OpRISCV64FEQD, t)
8779 b.resetWithControl(BlockRISCV64BNEZ, v0)
8783 // match: (BEQZ (SUB x y) yes no)
8784 // result: (BEQ x y yes no)
8785 for b.Controls[0].Op == OpRISCV64SUB {
8786 v_0 := b.Controls[0]
8789 b.resetWithControl2(BlockRISCV64BEQ, x, y)
8792 // match: (BEQZ (SLT x y) yes no)
8793 // result: (BGE x y yes no)
8794 for b.Controls[0].Op == OpRISCV64SLT {
8795 v_0 := b.Controls[0]
8798 b.resetWithControl2(BlockRISCV64BGE, x, y)
8801 // match: (BEQZ (SLTU x y) yes no)
8802 // result: (BGEU x y yes no)
8803 for b.Controls[0].Op == OpRISCV64SLTU {
8804 v_0 := b.Controls[0]
8807 b.resetWithControl2(BlockRISCV64BGEU, x, y)
8810 // match: (BEQZ (SLTI [x] y) yes no)
8811 // result: (BGE y (MOVDconst [x]) yes no)
8812 for b.Controls[0].Op == OpRISCV64SLTI {
8813 v_0 := b.Controls[0]
8814 x := auxIntToInt64(v_0.AuxInt)
8816 v0 := b.NewValue0(b.Pos, OpRISCV64MOVDconst, typ.UInt64)
8817 v0.AuxInt = int64ToAuxInt(x)
8818 b.resetWithControl2(BlockRISCV64BGE, y, v0)
8821 // match: (BEQZ (SLTIU [x] y) yes no)
8822 // result: (BGEU y (MOVDconst [x]) yes no)
8823 for b.Controls[0].Op == OpRISCV64SLTIU {
8824 v_0 := b.Controls[0]
8825 x := auxIntToInt64(v_0.AuxInt)
8827 v0 := b.NewValue0(b.Pos, OpRISCV64MOVDconst, typ.UInt64)
8828 v0.AuxInt = int64ToAuxInt(x)
8829 b.resetWithControl2(BlockRISCV64BGEU, y, v0)
8832 case BlockRISCV64BGE:
8833 // match: (BGE (MOVDconst [0]) cond yes no)
8834 // result: (BLEZ cond yes no)
8835 for b.Controls[0].Op == OpRISCV64MOVDconst {
8836 v_0 := b.Controls[0]
8837 if auxIntToInt64(v_0.AuxInt) != 0 {
8840 cond := b.Controls[1]
8841 b.resetWithControl(BlockRISCV64BLEZ, cond)
8844 // match: (BGE cond (MOVDconst [0]) yes no)
8845 // result: (BGEZ cond yes no)
8846 for b.Controls[1].Op == OpRISCV64MOVDconst {
8847 cond := b.Controls[0]
8848 v_1 := b.Controls[1]
8849 if auxIntToInt64(v_1.AuxInt) != 0 {
8852 b.resetWithControl(BlockRISCV64BGEZ, cond)
8855 case BlockRISCV64BLT:
8856 // match: (BLT (MOVDconst [0]) cond yes no)
8857 // result: (BGTZ cond yes no)
8858 for b.Controls[0].Op == OpRISCV64MOVDconst {
8859 v_0 := b.Controls[0]
8860 if auxIntToInt64(v_0.AuxInt) != 0 {
8863 cond := b.Controls[1]
8864 b.resetWithControl(BlockRISCV64BGTZ, cond)
8867 // match: (BLT cond (MOVDconst [0]) yes no)
8868 // result: (BLTZ cond yes no)
8869 for b.Controls[1].Op == OpRISCV64MOVDconst {
8870 cond := b.Controls[0]
8871 v_1 := b.Controls[1]
8872 if auxIntToInt64(v_1.AuxInt) != 0 {
8875 b.resetWithControl(BlockRISCV64BLTZ, cond)
8878 case BlockRISCV64BNE:
8879 // match: (BNE (MOVDconst [0]) cond yes no)
8880 // result: (BNEZ cond yes no)
8881 for b.Controls[0].Op == OpRISCV64MOVDconst {
8882 v_0 := b.Controls[0]
8883 if auxIntToInt64(v_0.AuxInt) != 0 {
8886 cond := b.Controls[1]
8887 b.resetWithControl(BlockRISCV64BNEZ, cond)
8890 // match: (BNE cond (MOVDconst [0]) yes no)
8891 // result: (BNEZ cond yes no)
8892 for b.Controls[1].Op == OpRISCV64MOVDconst {
8893 cond := b.Controls[0]
8894 v_1 := b.Controls[1]
8895 if auxIntToInt64(v_1.AuxInt) != 0 {
8898 b.resetWithControl(BlockRISCV64BNEZ, cond)
8901 case BlockRISCV64BNEZ:
8902 // match: (BNEZ (SEQZ x) yes no)
8903 // result: (BEQZ x yes no)
8904 for b.Controls[0].Op == OpRISCV64SEQZ {
8905 v_0 := b.Controls[0]
8907 b.resetWithControl(BlockRISCV64BEQZ, x)
8910 // match: (BNEZ (SNEZ x) yes no)
8911 // result: (BNEZ x yes no)
8912 for b.Controls[0].Op == OpRISCV64SNEZ {
8913 v_0 := b.Controls[0]
8915 b.resetWithControl(BlockRISCV64BNEZ, x)
8918 // match: (BNEZ (NEG x) yes no)
8919 // result: (BNEZ x yes no)
8920 for b.Controls[0].Op == OpRISCV64NEG {
8921 v_0 := b.Controls[0]
8923 b.resetWithControl(BlockRISCV64BNEZ, x)
8926 // match: (BNEZ (FNES <t> x y) yes no)
8927 // result: (BEQZ (FEQS <t> x y) yes no)
8928 for b.Controls[0].Op == OpRISCV64FNES {
8929 v_0 := b.Controls[0]
8932 v_0_0 := v_0.Args[0]
8933 v_0_1 := v_0.Args[1]
8934 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
8937 v0 := b.NewValue0(v_0.Pos, OpRISCV64FEQS, t)
8939 b.resetWithControl(BlockRISCV64BEQZ, v0)
8943 // match: (BNEZ (FNED <t> x y) yes no)
8944 // result: (BEQZ (FEQD <t> x y) yes no)
8945 for b.Controls[0].Op == OpRISCV64FNED {
8946 v_0 := b.Controls[0]
8949 v_0_0 := v_0.Args[0]
8950 v_0_1 := v_0.Args[1]
8951 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
8954 v0 := b.NewValue0(v_0.Pos, OpRISCV64FEQD, t)
8956 b.resetWithControl(BlockRISCV64BEQZ, v0)
8960 // match: (BNEZ (SUB x y) yes no)
8961 // result: (BNE x y yes no)
8962 for b.Controls[0].Op == OpRISCV64SUB {
8963 v_0 := b.Controls[0]
8966 b.resetWithControl2(BlockRISCV64BNE, x, y)
8969 // match: (BNEZ (SLT x y) yes no)
8970 // result: (BLT x y yes no)
8971 for b.Controls[0].Op == OpRISCV64SLT {
8972 v_0 := b.Controls[0]
8975 b.resetWithControl2(BlockRISCV64BLT, x, y)
8978 // match: (BNEZ (SLTU x y) yes no)
8979 // result: (BLTU x y yes no)
8980 for b.Controls[0].Op == OpRISCV64SLTU {
8981 v_0 := b.Controls[0]
8984 b.resetWithControl2(BlockRISCV64BLTU, x, y)
8987 // match: (BNEZ (SLTI [x] y) yes no)
8988 // result: (BLT y (MOVDconst [x]) yes no)
8989 for b.Controls[0].Op == OpRISCV64SLTI {
8990 v_0 := b.Controls[0]
8991 x := auxIntToInt64(v_0.AuxInt)
8993 v0 := b.NewValue0(b.Pos, OpRISCV64MOVDconst, typ.UInt64)
8994 v0.AuxInt = int64ToAuxInt(x)
8995 b.resetWithControl2(BlockRISCV64BLT, y, v0)
8998 // match: (BNEZ (SLTIU [x] y) yes no)
8999 // result: (BLTU y (MOVDconst [x]) yes no)
9000 for b.Controls[0].Op == OpRISCV64SLTIU {
9001 v_0 := b.Controls[0]
9002 x := auxIntToInt64(v_0.AuxInt)
9004 v0 := b.NewValue0(b.Pos, OpRISCV64MOVDconst, typ.UInt64)
9005 v0.AuxInt = int64ToAuxInt(x)
9006 b.resetWithControl2(BlockRISCV64BLTU, y, v0)
9010 // match: (If cond yes no)
9011 // result: (BNEZ (MOVBUreg <typ.UInt64> cond) yes no)
9013 cond := b.Controls[0]
9014 v0 := b.NewValue0(cond.Pos, OpRISCV64MOVBUreg, typ.UInt64)
9016 b.resetWithControl(BlockRISCV64BNEZ, v0)