1 // Code generated from _gen/generic.rules using 'go generate'; DO NOT EDIT.
6 import "cmd/internal/obj"
7 import "cmd/compile/internal/types"
8 import "cmd/compile/internal/ir"
10 func rewriteValuegeneric(v *Value) bool {
13 return rewriteValuegeneric_OpAdd16(v)
15 return rewriteValuegeneric_OpAdd32(v)
17 return rewriteValuegeneric_OpAdd32F(v)
19 return rewriteValuegeneric_OpAdd64(v)
21 return rewriteValuegeneric_OpAdd64F(v)
23 return rewriteValuegeneric_OpAdd8(v)
25 return rewriteValuegeneric_OpAddPtr(v)
27 return rewriteValuegeneric_OpAnd16(v)
29 return rewriteValuegeneric_OpAnd32(v)
31 return rewriteValuegeneric_OpAnd64(v)
33 return rewriteValuegeneric_OpAnd8(v)
35 return rewriteValuegeneric_OpAndB(v)
37 return rewriteValuegeneric_OpArraySelect(v)
39 return rewriteValuegeneric_OpCeil(v)
41 return rewriteValuegeneric_OpCom16(v)
43 return rewriteValuegeneric_OpCom32(v)
45 return rewriteValuegeneric_OpCom64(v)
47 return rewriteValuegeneric_OpCom8(v)
48 case OpConstInterface:
49 return rewriteValuegeneric_OpConstInterface(v)
51 return rewriteValuegeneric_OpConstSlice(v)
53 return rewriteValuegeneric_OpConstString(v)
55 return rewriteValuegeneric_OpConvert(v)
57 return rewriteValuegeneric_OpCtz16(v)
59 return rewriteValuegeneric_OpCtz32(v)
61 return rewriteValuegeneric_OpCtz64(v)
63 return rewriteValuegeneric_OpCtz8(v)
65 return rewriteValuegeneric_OpCvt32Fto32(v)
67 return rewriteValuegeneric_OpCvt32Fto64(v)
69 return rewriteValuegeneric_OpCvt32Fto64F(v)
71 return rewriteValuegeneric_OpCvt32to32F(v)
73 return rewriteValuegeneric_OpCvt32to64F(v)
75 return rewriteValuegeneric_OpCvt64Fto32(v)
77 return rewriteValuegeneric_OpCvt64Fto32F(v)
79 return rewriteValuegeneric_OpCvt64Fto64(v)
81 return rewriteValuegeneric_OpCvt64to32F(v)
83 return rewriteValuegeneric_OpCvt64to64F(v)
84 case OpCvtBoolToUint8:
85 return rewriteValuegeneric_OpCvtBoolToUint8(v)
87 return rewriteValuegeneric_OpDiv16(v)
89 return rewriteValuegeneric_OpDiv16u(v)
91 return rewriteValuegeneric_OpDiv32(v)
93 return rewriteValuegeneric_OpDiv32F(v)
95 return rewriteValuegeneric_OpDiv32u(v)
97 return rewriteValuegeneric_OpDiv64(v)
99 return rewriteValuegeneric_OpDiv64F(v)
101 return rewriteValuegeneric_OpDiv64u(v)
103 return rewriteValuegeneric_OpDiv8(v)
105 return rewriteValuegeneric_OpDiv8u(v)
107 return rewriteValuegeneric_OpEq16(v)
109 return rewriteValuegeneric_OpEq32(v)
111 return rewriteValuegeneric_OpEq32F(v)
113 return rewriteValuegeneric_OpEq64(v)
115 return rewriteValuegeneric_OpEq64F(v)
117 return rewriteValuegeneric_OpEq8(v)
119 return rewriteValuegeneric_OpEqB(v)
121 return rewriteValuegeneric_OpEqInter(v)
123 return rewriteValuegeneric_OpEqPtr(v)
125 return rewriteValuegeneric_OpEqSlice(v)
127 return rewriteValuegeneric_OpFloor(v)
129 return rewriteValuegeneric_OpIMake(v)
131 return rewriteValuegeneric_OpInterLECall(v)
133 return rewriteValuegeneric_OpIsInBounds(v)
135 return rewriteValuegeneric_OpIsNonNil(v)
136 case OpIsSliceInBounds:
137 return rewriteValuegeneric_OpIsSliceInBounds(v)
139 return rewriteValuegeneric_OpLeq16(v)
141 return rewriteValuegeneric_OpLeq16U(v)
143 return rewriteValuegeneric_OpLeq32(v)
145 return rewriteValuegeneric_OpLeq32F(v)
147 return rewriteValuegeneric_OpLeq32U(v)
149 return rewriteValuegeneric_OpLeq64(v)
151 return rewriteValuegeneric_OpLeq64F(v)
153 return rewriteValuegeneric_OpLeq64U(v)
155 return rewriteValuegeneric_OpLeq8(v)
157 return rewriteValuegeneric_OpLeq8U(v)
159 return rewriteValuegeneric_OpLess16(v)
161 return rewriteValuegeneric_OpLess16U(v)
163 return rewriteValuegeneric_OpLess32(v)
165 return rewriteValuegeneric_OpLess32F(v)
167 return rewriteValuegeneric_OpLess32U(v)
169 return rewriteValuegeneric_OpLess64(v)
171 return rewriteValuegeneric_OpLess64F(v)
173 return rewriteValuegeneric_OpLess64U(v)
175 return rewriteValuegeneric_OpLess8(v)
177 return rewriteValuegeneric_OpLess8U(v)
179 return rewriteValuegeneric_OpLoad(v)
181 return rewriteValuegeneric_OpLsh16x16(v)
183 return rewriteValuegeneric_OpLsh16x32(v)
185 return rewriteValuegeneric_OpLsh16x64(v)
187 return rewriteValuegeneric_OpLsh16x8(v)
189 return rewriteValuegeneric_OpLsh32x16(v)
191 return rewriteValuegeneric_OpLsh32x32(v)
193 return rewriteValuegeneric_OpLsh32x64(v)
195 return rewriteValuegeneric_OpLsh32x8(v)
197 return rewriteValuegeneric_OpLsh64x16(v)
199 return rewriteValuegeneric_OpLsh64x32(v)
201 return rewriteValuegeneric_OpLsh64x64(v)
203 return rewriteValuegeneric_OpLsh64x8(v)
205 return rewriteValuegeneric_OpLsh8x16(v)
207 return rewriteValuegeneric_OpLsh8x32(v)
209 return rewriteValuegeneric_OpLsh8x64(v)
211 return rewriteValuegeneric_OpLsh8x8(v)
213 return rewriteValuegeneric_OpMod16(v)
215 return rewriteValuegeneric_OpMod16u(v)
217 return rewriteValuegeneric_OpMod32(v)
219 return rewriteValuegeneric_OpMod32u(v)
221 return rewriteValuegeneric_OpMod64(v)
223 return rewriteValuegeneric_OpMod64u(v)
225 return rewriteValuegeneric_OpMod8(v)
227 return rewriteValuegeneric_OpMod8u(v)
229 return rewriteValuegeneric_OpMove(v)
231 return rewriteValuegeneric_OpMul16(v)
233 return rewriteValuegeneric_OpMul32(v)
235 return rewriteValuegeneric_OpMul32F(v)
237 return rewriteValuegeneric_OpMul64(v)
239 return rewriteValuegeneric_OpMul64F(v)
241 return rewriteValuegeneric_OpMul8(v)
243 return rewriteValuegeneric_OpNeg16(v)
245 return rewriteValuegeneric_OpNeg32(v)
247 return rewriteValuegeneric_OpNeg32F(v)
249 return rewriteValuegeneric_OpNeg64(v)
251 return rewriteValuegeneric_OpNeg64F(v)
253 return rewriteValuegeneric_OpNeg8(v)
255 return rewriteValuegeneric_OpNeq16(v)
257 return rewriteValuegeneric_OpNeq32(v)
259 return rewriteValuegeneric_OpNeq32F(v)
261 return rewriteValuegeneric_OpNeq64(v)
263 return rewriteValuegeneric_OpNeq64F(v)
265 return rewriteValuegeneric_OpNeq8(v)
267 return rewriteValuegeneric_OpNeqB(v)
269 return rewriteValuegeneric_OpNeqInter(v)
271 return rewriteValuegeneric_OpNeqPtr(v)
273 return rewriteValuegeneric_OpNeqSlice(v)
275 return rewriteValuegeneric_OpNilCheck(v)
277 return rewriteValuegeneric_OpNot(v)
279 return rewriteValuegeneric_OpOffPtr(v)
281 return rewriteValuegeneric_OpOr16(v)
283 return rewriteValuegeneric_OpOr32(v)
285 return rewriteValuegeneric_OpOr64(v)
287 return rewriteValuegeneric_OpOr8(v)
289 return rewriteValuegeneric_OpOrB(v)
291 return rewriteValuegeneric_OpPhi(v)
293 return rewriteValuegeneric_OpPtrIndex(v)
295 return rewriteValuegeneric_OpRotateLeft16(v)
297 return rewriteValuegeneric_OpRotateLeft32(v)
299 return rewriteValuegeneric_OpRotateLeft64(v)
301 return rewriteValuegeneric_OpRotateLeft8(v)
303 return rewriteValuegeneric_OpRound32F(v)
305 return rewriteValuegeneric_OpRound64F(v)
307 return rewriteValuegeneric_OpRoundToEven(v)
309 return rewriteValuegeneric_OpRsh16Ux16(v)
311 return rewriteValuegeneric_OpRsh16Ux32(v)
313 return rewriteValuegeneric_OpRsh16Ux64(v)
315 return rewriteValuegeneric_OpRsh16Ux8(v)
317 return rewriteValuegeneric_OpRsh16x16(v)
319 return rewriteValuegeneric_OpRsh16x32(v)
321 return rewriteValuegeneric_OpRsh16x64(v)
323 return rewriteValuegeneric_OpRsh16x8(v)
325 return rewriteValuegeneric_OpRsh32Ux16(v)
327 return rewriteValuegeneric_OpRsh32Ux32(v)
329 return rewriteValuegeneric_OpRsh32Ux64(v)
331 return rewriteValuegeneric_OpRsh32Ux8(v)
333 return rewriteValuegeneric_OpRsh32x16(v)
335 return rewriteValuegeneric_OpRsh32x32(v)
337 return rewriteValuegeneric_OpRsh32x64(v)
339 return rewriteValuegeneric_OpRsh32x8(v)
341 return rewriteValuegeneric_OpRsh64Ux16(v)
343 return rewriteValuegeneric_OpRsh64Ux32(v)
345 return rewriteValuegeneric_OpRsh64Ux64(v)
347 return rewriteValuegeneric_OpRsh64Ux8(v)
349 return rewriteValuegeneric_OpRsh64x16(v)
351 return rewriteValuegeneric_OpRsh64x32(v)
353 return rewriteValuegeneric_OpRsh64x64(v)
355 return rewriteValuegeneric_OpRsh64x8(v)
357 return rewriteValuegeneric_OpRsh8Ux16(v)
359 return rewriteValuegeneric_OpRsh8Ux32(v)
361 return rewriteValuegeneric_OpRsh8Ux64(v)
363 return rewriteValuegeneric_OpRsh8Ux8(v)
365 return rewriteValuegeneric_OpRsh8x16(v)
367 return rewriteValuegeneric_OpRsh8x32(v)
369 return rewriteValuegeneric_OpRsh8x64(v)
371 return rewriteValuegeneric_OpRsh8x8(v)
373 return rewriteValuegeneric_OpSelect0(v)
375 return rewriteValuegeneric_OpSelect1(v)
377 return rewriteValuegeneric_OpSelectN(v)
378 case OpSignExt16to32:
379 return rewriteValuegeneric_OpSignExt16to32(v)
380 case OpSignExt16to64:
381 return rewriteValuegeneric_OpSignExt16to64(v)
382 case OpSignExt32to64:
383 return rewriteValuegeneric_OpSignExt32to64(v)
385 return rewriteValuegeneric_OpSignExt8to16(v)
387 return rewriteValuegeneric_OpSignExt8to32(v)
389 return rewriteValuegeneric_OpSignExt8to64(v)
391 return rewriteValuegeneric_OpSliceCap(v)
393 return rewriteValuegeneric_OpSliceLen(v)
395 return rewriteValuegeneric_OpSlicePtr(v)
397 return rewriteValuegeneric_OpSlicemask(v)
399 return rewriteValuegeneric_OpSqrt(v)
401 return rewriteValuegeneric_OpStaticCall(v)
403 return rewriteValuegeneric_OpStaticLECall(v)
405 return rewriteValuegeneric_OpStore(v)
407 return rewriteValuegeneric_OpStringLen(v)
409 return rewriteValuegeneric_OpStringPtr(v)
411 return rewriteValuegeneric_OpStructSelect(v)
413 return rewriteValuegeneric_OpSub16(v)
415 return rewriteValuegeneric_OpSub32(v)
417 return rewriteValuegeneric_OpSub32F(v)
419 return rewriteValuegeneric_OpSub64(v)
421 return rewriteValuegeneric_OpSub64F(v)
423 return rewriteValuegeneric_OpSub8(v)
425 return rewriteValuegeneric_OpTrunc(v)
427 return rewriteValuegeneric_OpTrunc16to8(v)
429 return rewriteValuegeneric_OpTrunc32to16(v)
431 return rewriteValuegeneric_OpTrunc32to8(v)
433 return rewriteValuegeneric_OpTrunc64to16(v)
435 return rewriteValuegeneric_OpTrunc64to32(v)
437 return rewriteValuegeneric_OpTrunc64to8(v)
439 return rewriteValuegeneric_OpXor16(v)
441 return rewriteValuegeneric_OpXor32(v)
443 return rewriteValuegeneric_OpXor64(v)
445 return rewriteValuegeneric_OpXor8(v)
447 return rewriteValuegeneric_OpZero(v)
448 case OpZeroExt16to32:
449 return rewriteValuegeneric_OpZeroExt16to32(v)
450 case OpZeroExt16to64:
451 return rewriteValuegeneric_OpZeroExt16to64(v)
452 case OpZeroExt32to64:
453 return rewriteValuegeneric_OpZeroExt32to64(v)
455 return rewriteValuegeneric_OpZeroExt8to16(v)
457 return rewriteValuegeneric_OpZeroExt8to32(v)
459 return rewriteValuegeneric_OpZeroExt8to64(v)
463 func rewriteValuegeneric_OpAdd16(v *Value) bool {
467 config := b.Func.Config
468 // match: (Add16 (Const16 [c]) (Const16 [d]))
469 // result: (Const16 [c+d])
471 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
472 if v_0.Op != OpConst16 {
475 c := auxIntToInt16(v_0.AuxInt)
476 if v_1.Op != OpConst16 {
479 d := auxIntToInt16(v_1.AuxInt)
481 v.AuxInt = int16ToAuxInt(c + d)
486 // match: (Add16 <t> (Mul16 x y) (Mul16 x z))
487 // result: (Mul16 x (Add16 <t> y z))
490 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
491 if v_0.Op != OpMul16 {
497 for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
500 if v_1.Op != OpMul16 {
506 for _i2 := 0; _i2 <= 1; _i2, v_1_0, v_1_1 = _i2+1, v_1_1, v_1_0 {
512 v0 := b.NewValue0(v.Pos, OpAdd16, t)
521 // match: (Add16 (Const16 [0]) x)
524 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
525 if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 0 {
534 // match: (Add16 x (Neg16 y))
535 // result: (Sub16 x y)
537 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
539 if v_1.Op != OpNeg16 {
549 // match: (Add16 (Com16 x) x)
550 // result: (Const16 [-1])
552 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
553 if v_0.Op != OpCom16 {
561 v.AuxInt = int16ToAuxInt(-1)
566 // match: (Add16 (Sub16 x t) (Add16 t y))
567 // result: (Add16 x y)
569 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
570 if v_0.Op != OpSub16 {
575 if v_1.Op != OpAdd16 {
581 for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
593 // match: (Add16 (Const16 [1]) (Com16 x))
596 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
597 if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 1 || v_1.Op != OpCom16 {
607 // match: (Add16 x (Sub16 y x))
610 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
612 if v_1.Op != OpSub16 {
617 if x != v_1.Args[1] {
625 // match: (Add16 x (Add16 y (Sub16 z x)))
626 // result: (Add16 y z)
628 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
630 if v_1.Op != OpAdd16 {
636 for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
638 if v_1_1.Op != OpSub16 {
643 if x != v_1_1.Args[1] {
653 // match: (Add16 (Add16 i:(Const16 <t>) z) x)
654 // cond: (z.Op != OpConst16 && x.Op != OpConst16)
655 // result: (Add16 i (Add16 <t> z x))
657 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
658 if v_0.Op != OpAdd16 {
664 for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
666 if i.Op != OpConst16 {
672 if !(z.Op != OpConst16 && x.Op != OpConst16) {
676 v0 := b.NewValue0(v.Pos, OpAdd16, t)
684 // match: (Add16 (Sub16 i:(Const16 <t>) z) x)
685 // cond: (z.Op != OpConst16 && x.Op != OpConst16)
686 // result: (Add16 i (Sub16 <t> x z))
688 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
689 if v_0.Op != OpSub16 {
694 if i.Op != OpConst16 {
699 if !(z.Op != OpConst16 && x.Op != OpConst16) {
703 v0 := b.NewValue0(v.Pos, OpSub16, t)
710 // match: (Add16 (Const16 <t> [c]) (Add16 (Const16 <t> [d]) x))
711 // result: (Add16 (Const16 <t> [c+d]) x)
713 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
714 if v_0.Op != OpConst16 {
718 c := auxIntToInt16(v_0.AuxInt)
719 if v_1.Op != OpAdd16 {
725 for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
726 if v_1_0.Op != OpConst16 || v_1_0.Type != t {
729 d := auxIntToInt16(v_1_0.AuxInt)
732 v0 := b.NewValue0(v.Pos, OpConst16, t)
733 v0.AuxInt = int16ToAuxInt(c + d)
740 // match: (Add16 (Const16 <t> [c]) (Sub16 (Const16 <t> [d]) x))
741 // result: (Sub16 (Const16 <t> [c+d]) x)
743 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
744 if v_0.Op != OpConst16 {
748 c := auxIntToInt16(v_0.AuxInt)
749 if v_1.Op != OpSub16 {
754 if v_1_0.Op != OpConst16 || v_1_0.Type != t {
757 d := auxIntToInt16(v_1_0.AuxInt)
759 v0 := b.NewValue0(v.Pos, OpConst16, t)
760 v0.AuxInt = int16ToAuxInt(c + d)
766 // match: (Add16 (Lsh16x64 x z:(Const64 <t> [c])) (Rsh16Ux64 x (Const64 [d])))
767 // cond: c < 16 && d == 16-c && canRotate(config, 16)
768 // result: (RotateLeft16 x z)
770 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
771 if v_0.Op != OpLsh16x64 {
777 if z.Op != OpConst64 {
780 c := auxIntToInt64(z.AuxInt)
781 if v_1.Op != OpRsh16Ux64 {
785 if x != v_1.Args[0] {
789 if v_1_1.Op != OpConst64 {
792 d := auxIntToInt64(v_1_1.AuxInt)
793 if !(c < 16 && d == 16-c && canRotate(config, 16)) {
796 v.reset(OpRotateLeft16)
802 // match: (Add16 left:(Lsh16x64 x y) right:(Rsh16Ux64 x (Sub64 (Const64 [16]) y)))
803 // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)
804 // result: (RotateLeft16 x y)
806 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
808 if left.Op != OpLsh16x64 {
814 if right.Op != OpRsh16Ux64 {
818 if x != right.Args[0] {
821 right_1 := right.Args[1]
822 if right_1.Op != OpSub64 {
826 right_1_0 := right_1.Args[0]
827 if right_1_0.Op != OpConst64 || auxIntToInt64(right_1_0.AuxInt) != 16 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) {
830 v.reset(OpRotateLeft16)
836 // match: (Add16 left:(Lsh16x32 x y) right:(Rsh16Ux32 x (Sub32 (Const32 [16]) y)))
837 // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)
838 // result: (RotateLeft16 x y)
840 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
842 if left.Op != OpLsh16x32 {
848 if right.Op != OpRsh16Ux32 {
852 if x != right.Args[0] {
855 right_1 := right.Args[1]
856 if right_1.Op != OpSub32 {
860 right_1_0 := right_1.Args[0]
861 if right_1_0.Op != OpConst32 || auxIntToInt32(right_1_0.AuxInt) != 16 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) {
864 v.reset(OpRotateLeft16)
870 // match: (Add16 left:(Lsh16x16 x y) right:(Rsh16Ux16 x (Sub16 (Const16 [16]) y)))
871 // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)
872 // result: (RotateLeft16 x y)
874 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
876 if left.Op != OpLsh16x16 {
882 if right.Op != OpRsh16Ux16 {
886 if x != right.Args[0] {
889 right_1 := right.Args[1]
890 if right_1.Op != OpSub16 {
894 right_1_0 := right_1.Args[0]
895 if right_1_0.Op != OpConst16 || auxIntToInt16(right_1_0.AuxInt) != 16 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) {
898 v.reset(OpRotateLeft16)
904 // match: (Add16 left:(Lsh16x8 x y) right:(Rsh16Ux8 x (Sub8 (Const8 [16]) y)))
905 // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)
906 // result: (RotateLeft16 x y)
908 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
910 if left.Op != OpLsh16x8 {
916 if right.Op != OpRsh16Ux8 {
920 if x != right.Args[0] {
923 right_1 := right.Args[1]
924 if right_1.Op != OpSub8 {
928 right_1_0 := right_1.Args[0]
929 if right_1_0.Op != OpConst8 || auxIntToInt8(right_1_0.AuxInt) != 16 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) {
932 v.reset(OpRotateLeft16)
938 // match: (Add16 right:(Rsh16Ux64 x y) left:(Lsh16x64 x z:(Sub64 (Const64 [16]) y)))
939 // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)
940 // result: (RotateLeft16 x z)
942 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
944 if right.Op != OpRsh16Ux64 {
950 if left.Op != OpLsh16x64 {
954 if x != left.Args[0] {
963 if z_0.Op != OpConst64 || auxIntToInt64(z_0.AuxInt) != 16 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) {
966 v.reset(OpRotateLeft16)
972 // match: (Add16 right:(Rsh16Ux32 x y) left:(Lsh16x32 x z:(Sub32 (Const32 [16]) y)))
973 // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)
974 // result: (RotateLeft16 x z)
976 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
978 if right.Op != OpRsh16Ux32 {
984 if left.Op != OpLsh16x32 {
988 if x != left.Args[0] {
997 if z_0.Op != OpConst32 || auxIntToInt32(z_0.AuxInt) != 16 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) {
1000 v.reset(OpRotateLeft16)
1006 // match: (Add16 right:(Rsh16Ux16 x y) left:(Lsh16x16 x z:(Sub16 (Const16 [16]) y)))
1007 // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)
1008 // result: (RotateLeft16 x z)
1010 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1012 if right.Op != OpRsh16Ux16 {
1018 if left.Op != OpLsh16x16 {
1022 if x != left.Args[0] {
1026 if z.Op != OpSub16 {
1031 if z_0.Op != OpConst16 || auxIntToInt16(z_0.AuxInt) != 16 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) {
1034 v.reset(OpRotateLeft16)
1040 // match: (Add16 right:(Rsh16Ux8 x y) left:(Lsh16x8 x z:(Sub8 (Const8 [16]) y)))
1041 // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)
1042 // result: (RotateLeft16 x z)
1044 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1046 if right.Op != OpRsh16Ux8 {
1052 if left.Op != OpLsh16x8 {
1056 if x != left.Args[0] {
1065 if z_0.Op != OpConst8 || auxIntToInt8(z_0.AuxInt) != 16 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) {
1068 v.reset(OpRotateLeft16)
1076 func rewriteValuegeneric_OpAdd32(v *Value) bool {
1080 config := b.Func.Config
1081 // match: (Add32 (Const32 [c]) (Const32 [d]))
1082 // result: (Const32 [c+d])
1084 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1085 if v_0.Op != OpConst32 {
1088 c := auxIntToInt32(v_0.AuxInt)
1089 if v_1.Op != OpConst32 {
1092 d := auxIntToInt32(v_1.AuxInt)
1094 v.AuxInt = int32ToAuxInt(c + d)
1099 // match: (Add32 <t> (Mul32 x y) (Mul32 x z))
1100 // result: (Mul32 x (Add32 <t> y z))
1103 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1104 if v_0.Op != OpMul32 {
1108 v_0_0 := v_0.Args[0]
1109 v_0_1 := v_0.Args[1]
1110 for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
1113 if v_1.Op != OpMul32 {
1117 v_1_0 := v_1.Args[0]
1118 v_1_1 := v_1.Args[1]
1119 for _i2 := 0; _i2 <= 1; _i2, v_1_0, v_1_1 = _i2+1, v_1_1, v_1_0 {
1125 v0 := b.NewValue0(v.Pos, OpAdd32, t)
1134 // match: (Add32 (Const32 [0]) x)
1137 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1138 if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 {
1147 // match: (Add32 x (Neg32 y))
1148 // result: (Sub32 x y)
1150 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1152 if v_1.Op != OpNeg32 {
1162 // match: (Add32 (Com32 x) x)
1163 // result: (Const32 [-1])
1165 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1166 if v_0.Op != OpCom32 {
1174 v.AuxInt = int32ToAuxInt(-1)
1179 // match: (Add32 (Sub32 x t) (Add32 t y))
1180 // result: (Add32 x y)
1182 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1183 if v_0.Op != OpSub32 {
1188 if v_1.Op != OpAdd32 {
1192 v_1_0 := v_1.Args[0]
1193 v_1_1 := v_1.Args[1]
1194 for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
1206 // match: (Add32 (Const32 [1]) (Com32 x))
1207 // result: (Neg32 x)
1209 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1210 if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 1 || v_1.Op != OpCom32 {
1220 // match: (Add32 x (Sub32 y x))
1223 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1225 if v_1.Op != OpSub32 {
1230 if x != v_1.Args[1] {
1238 // match: (Add32 x (Add32 y (Sub32 z x)))
1239 // result: (Add32 y z)
1241 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1243 if v_1.Op != OpAdd32 {
1247 v_1_0 := v_1.Args[0]
1248 v_1_1 := v_1.Args[1]
1249 for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
1251 if v_1_1.Op != OpSub32 {
1256 if x != v_1_1.Args[1] {
1266 // match: (Add32 (Add32 i:(Const32 <t>) z) x)
1267 // cond: (z.Op != OpConst32 && x.Op != OpConst32)
1268 // result: (Add32 i (Add32 <t> z x))
1270 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1271 if v_0.Op != OpAdd32 {
1275 v_0_0 := v_0.Args[0]
1276 v_0_1 := v_0.Args[1]
1277 for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
1279 if i.Op != OpConst32 {
1285 if !(z.Op != OpConst32 && x.Op != OpConst32) {
1289 v0 := b.NewValue0(v.Pos, OpAdd32, t)
1297 // match: (Add32 (Sub32 i:(Const32 <t>) z) x)
1298 // cond: (z.Op != OpConst32 && x.Op != OpConst32)
1299 // result: (Add32 i (Sub32 <t> x z))
1301 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1302 if v_0.Op != OpSub32 {
1307 if i.Op != OpConst32 {
1312 if !(z.Op != OpConst32 && x.Op != OpConst32) {
1316 v0 := b.NewValue0(v.Pos, OpSub32, t)
1323 // match: (Add32 (Const32 <t> [c]) (Add32 (Const32 <t> [d]) x))
1324 // result: (Add32 (Const32 <t> [c+d]) x)
1326 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1327 if v_0.Op != OpConst32 {
1331 c := auxIntToInt32(v_0.AuxInt)
1332 if v_1.Op != OpAdd32 {
1336 v_1_0 := v_1.Args[0]
1337 v_1_1 := v_1.Args[1]
1338 for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
1339 if v_1_0.Op != OpConst32 || v_1_0.Type != t {
1342 d := auxIntToInt32(v_1_0.AuxInt)
1345 v0 := b.NewValue0(v.Pos, OpConst32, t)
1346 v0.AuxInt = int32ToAuxInt(c + d)
1353 // match: (Add32 (Const32 <t> [c]) (Sub32 (Const32 <t> [d]) x))
1354 // result: (Sub32 (Const32 <t> [c+d]) x)
1356 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1357 if v_0.Op != OpConst32 {
1361 c := auxIntToInt32(v_0.AuxInt)
1362 if v_1.Op != OpSub32 {
1366 v_1_0 := v_1.Args[0]
1367 if v_1_0.Op != OpConst32 || v_1_0.Type != t {
1370 d := auxIntToInt32(v_1_0.AuxInt)
1372 v0 := b.NewValue0(v.Pos, OpConst32, t)
1373 v0.AuxInt = int32ToAuxInt(c + d)
1379 // match: (Add32 (Lsh32x64 x z:(Const64 <t> [c])) (Rsh32Ux64 x (Const64 [d])))
1380 // cond: c < 32 && d == 32-c && canRotate(config, 32)
1381 // result: (RotateLeft32 x z)
1383 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1384 if v_0.Op != OpLsh32x64 {
1390 if z.Op != OpConst64 {
1393 c := auxIntToInt64(z.AuxInt)
1394 if v_1.Op != OpRsh32Ux64 {
1398 if x != v_1.Args[0] {
1401 v_1_1 := v_1.Args[1]
1402 if v_1_1.Op != OpConst64 {
1405 d := auxIntToInt64(v_1_1.AuxInt)
1406 if !(c < 32 && d == 32-c && canRotate(config, 32)) {
1409 v.reset(OpRotateLeft32)
1415 // match: (Add32 left:(Lsh32x64 x y) right:(Rsh32Ux64 x (Sub64 (Const64 [32]) y)))
1416 // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)
1417 // result: (RotateLeft32 x y)
1419 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1421 if left.Op != OpLsh32x64 {
1427 if right.Op != OpRsh32Ux64 {
1431 if x != right.Args[0] {
1434 right_1 := right.Args[1]
1435 if right_1.Op != OpSub64 {
1439 right_1_0 := right_1.Args[0]
1440 if right_1_0.Op != OpConst64 || auxIntToInt64(right_1_0.AuxInt) != 32 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) {
1443 v.reset(OpRotateLeft32)
1449 // match: (Add32 left:(Lsh32x32 x y) right:(Rsh32Ux32 x (Sub32 (Const32 [32]) y)))
1450 // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)
1451 // result: (RotateLeft32 x y)
1453 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1455 if left.Op != OpLsh32x32 {
1461 if right.Op != OpRsh32Ux32 {
1465 if x != right.Args[0] {
1468 right_1 := right.Args[1]
1469 if right_1.Op != OpSub32 {
1473 right_1_0 := right_1.Args[0]
1474 if right_1_0.Op != OpConst32 || auxIntToInt32(right_1_0.AuxInt) != 32 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) {
1477 v.reset(OpRotateLeft32)
1483 // match: (Add32 left:(Lsh32x16 x y) right:(Rsh32Ux16 x (Sub16 (Const16 [32]) y)))
1484 // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)
1485 // result: (RotateLeft32 x y)
1487 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1489 if left.Op != OpLsh32x16 {
1495 if right.Op != OpRsh32Ux16 {
1499 if x != right.Args[0] {
1502 right_1 := right.Args[1]
1503 if right_1.Op != OpSub16 {
1507 right_1_0 := right_1.Args[0]
1508 if right_1_0.Op != OpConst16 || auxIntToInt16(right_1_0.AuxInt) != 32 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) {
1511 v.reset(OpRotateLeft32)
1517 // match: (Add32 left:(Lsh32x8 x y) right:(Rsh32Ux8 x (Sub8 (Const8 [32]) y)))
1518 // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)
1519 // result: (RotateLeft32 x y)
1521 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1523 if left.Op != OpLsh32x8 {
1529 if right.Op != OpRsh32Ux8 {
1533 if x != right.Args[0] {
1536 right_1 := right.Args[1]
1537 if right_1.Op != OpSub8 {
1541 right_1_0 := right_1.Args[0]
1542 if right_1_0.Op != OpConst8 || auxIntToInt8(right_1_0.AuxInt) != 32 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) {
1545 v.reset(OpRotateLeft32)
1551 // match: (Add32 right:(Rsh32Ux64 x y) left:(Lsh32x64 x z:(Sub64 (Const64 [32]) y)))
1552 // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)
1553 // result: (RotateLeft32 x z)
1555 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1557 if right.Op != OpRsh32Ux64 {
1563 if left.Op != OpLsh32x64 {
1567 if x != left.Args[0] {
1571 if z.Op != OpSub64 {
1576 if z_0.Op != OpConst64 || auxIntToInt64(z_0.AuxInt) != 32 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) {
1579 v.reset(OpRotateLeft32)
1585 // match: (Add32 right:(Rsh32Ux32 x y) left:(Lsh32x32 x z:(Sub32 (Const32 [32]) y)))
1586 // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)
1587 // result: (RotateLeft32 x z)
1589 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1591 if right.Op != OpRsh32Ux32 {
1597 if left.Op != OpLsh32x32 {
1601 if x != left.Args[0] {
1605 if z.Op != OpSub32 {
1610 if z_0.Op != OpConst32 || auxIntToInt32(z_0.AuxInt) != 32 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) {
1613 v.reset(OpRotateLeft32)
1619 // match: (Add32 right:(Rsh32Ux16 x y) left:(Lsh32x16 x z:(Sub16 (Const16 [32]) y)))
1620 // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)
1621 // result: (RotateLeft32 x z)
1623 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1625 if right.Op != OpRsh32Ux16 {
1631 if left.Op != OpLsh32x16 {
1635 if x != left.Args[0] {
1639 if z.Op != OpSub16 {
1644 if z_0.Op != OpConst16 || auxIntToInt16(z_0.AuxInt) != 32 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) {
1647 v.reset(OpRotateLeft32)
1653 // match: (Add32 right:(Rsh32Ux8 x y) left:(Lsh32x8 x z:(Sub8 (Const8 [32]) y)))
1654 // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)
1655 // result: (RotateLeft32 x z)
1657 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1659 if right.Op != OpRsh32Ux8 {
1665 if left.Op != OpLsh32x8 {
1669 if x != left.Args[0] {
1678 if z_0.Op != OpConst8 || auxIntToInt8(z_0.AuxInt) != 32 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) {
1681 v.reset(OpRotateLeft32)
1689 func rewriteValuegeneric_OpAdd32F(v *Value) bool {
1692 // match: (Add32F (Const32F [c]) (Const32F [d]))
1694 // result: (Const32F [c+d])
1696 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1697 if v_0.Op != OpConst32F {
1700 c := auxIntToFloat32(v_0.AuxInt)
1701 if v_1.Op != OpConst32F {
1704 d := auxIntToFloat32(v_1.AuxInt)
1709 v.AuxInt = float32ToAuxInt(c + d)
1716 func rewriteValuegeneric_OpAdd64(v *Value) bool {
1720 config := b.Func.Config
1721 // match: (Add64 (Const64 [c]) (Const64 [d]))
1722 // result: (Const64 [c+d])
1724 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1725 if v_0.Op != OpConst64 {
1728 c := auxIntToInt64(v_0.AuxInt)
1729 if v_1.Op != OpConst64 {
1732 d := auxIntToInt64(v_1.AuxInt)
1734 v.AuxInt = int64ToAuxInt(c + d)
1739 // match: (Add64 <t> (Mul64 x y) (Mul64 x z))
1740 // result: (Mul64 x (Add64 <t> y z))
1743 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1744 if v_0.Op != OpMul64 {
1748 v_0_0 := v_0.Args[0]
1749 v_0_1 := v_0.Args[1]
1750 for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
1753 if v_1.Op != OpMul64 {
1757 v_1_0 := v_1.Args[0]
1758 v_1_1 := v_1.Args[1]
1759 for _i2 := 0; _i2 <= 1; _i2, v_1_0, v_1_1 = _i2+1, v_1_1, v_1_0 {
1765 v0 := b.NewValue0(v.Pos, OpAdd64, t)
1774 // match: (Add64 (Const64 [0]) x)
1777 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1778 if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 {
1787 // match: (Add64 x (Neg64 y))
1788 // result: (Sub64 x y)
1790 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1792 if v_1.Op != OpNeg64 {
1802 // match: (Add64 (Com64 x) x)
1803 // result: (Const64 [-1])
1805 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1806 if v_0.Op != OpCom64 {
1814 v.AuxInt = int64ToAuxInt(-1)
1819 // match: (Add64 (Sub64 x t) (Add64 t y))
1820 // result: (Add64 x y)
1822 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1823 if v_0.Op != OpSub64 {
1828 if v_1.Op != OpAdd64 {
1832 v_1_0 := v_1.Args[0]
1833 v_1_1 := v_1.Args[1]
1834 for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
1846 // match: (Add64 (Const64 [1]) (Com64 x))
1847 // result: (Neg64 x)
1849 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1850 if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 1 || v_1.Op != OpCom64 {
1860 // match: (Add64 x (Sub64 y x))
1863 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1865 if v_1.Op != OpSub64 {
1870 if x != v_1.Args[1] {
1878 // match: (Add64 x (Add64 y (Sub64 z x)))
1879 // result: (Add64 y z)
1881 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1883 if v_1.Op != OpAdd64 {
1887 v_1_0 := v_1.Args[0]
1888 v_1_1 := v_1.Args[1]
1889 for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
1891 if v_1_1.Op != OpSub64 {
1896 if x != v_1_1.Args[1] {
1906 // match: (Add64 (Add64 i:(Const64 <t>) z) x)
1907 // cond: (z.Op != OpConst64 && x.Op != OpConst64)
1908 // result: (Add64 i (Add64 <t> z x))
1910 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1911 if v_0.Op != OpAdd64 {
1915 v_0_0 := v_0.Args[0]
1916 v_0_1 := v_0.Args[1]
1917 for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
1919 if i.Op != OpConst64 {
1925 if !(z.Op != OpConst64 && x.Op != OpConst64) {
1929 v0 := b.NewValue0(v.Pos, OpAdd64, t)
1937 // match: (Add64 (Sub64 i:(Const64 <t>) z) x)
1938 // cond: (z.Op != OpConst64 && x.Op != OpConst64)
1939 // result: (Add64 i (Sub64 <t> x z))
1941 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1942 if v_0.Op != OpSub64 {
1947 if i.Op != OpConst64 {
1952 if !(z.Op != OpConst64 && x.Op != OpConst64) {
1956 v0 := b.NewValue0(v.Pos, OpSub64, t)
1963 // match: (Add64 (Const64 <t> [c]) (Add64 (Const64 <t> [d]) x))
1964 // result: (Add64 (Const64 <t> [c+d]) x)
1966 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1967 if v_0.Op != OpConst64 {
1971 c := auxIntToInt64(v_0.AuxInt)
1972 if v_1.Op != OpAdd64 {
1976 v_1_0 := v_1.Args[0]
1977 v_1_1 := v_1.Args[1]
1978 for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
1979 if v_1_0.Op != OpConst64 || v_1_0.Type != t {
1982 d := auxIntToInt64(v_1_0.AuxInt)
1985 v0 := b.NewValue0(v.Pos, OpConst64, t)
1986 v0.AuxInt = int64ToAuxInt(c + d)
1993 // match: (Add64 (Const64 <t> [c]) (Sub64 (Const64 <t> [d]) x))
1994 // result: (Sub64 (Const64 <t> [c+d]) x)
1996 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
1997 if v_0.Op != OpConst64 {
2001 c := auxIntToInt64(v_0.AuxInt)
2002 if v_1.Op != OpSub64 {
2006 v_1_0 := v_1.Args[0]
2007 if v_1_0.Op != OpConst64 || v_1_0.Type != t {
2010 d := auxIntToInt64(v_1_0.AuxInt)
2012 v0 := b.NewValue0(v.Pos, OpConst64, t)
2013 v0.AuxInt = int64ToAuxInt(c + d)
2019 // match: (Add64 (Lsh64x64 x z:(Const64 <t> [c])) (Rsh64Ux64 x (Const64 [d])))
2020 // cond: c < 64 && d == 64-c && canRotate(config, 64)
2021 // result: (RotateLeft64 x z)
2023 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2024 if v_0.Op != OpLsh64x64 {
2030 if z.Op != OpConst64 {
2033 c := auxIntToInt64(z.AuxInt)
2034 if v_1.Op != OpRsh64Ux64 {
2038 if x != v_1.Args[0] {
2041 v_1_1 := v_1.Args[1]
2042 if v_1_1.Op != OpConst64 {
2045 d := auxIntToInt64(v_1_1.AuxInt)
2046 if !(c < 64 && d == 64-c && canRotate(config, 64)) {
2049 v.reset(OpRotateLeft64)
2055 // match: (Add64 left:(Lsh64x64 x y) right:(Rsh64Ux64 x (Sub64 (Const64 [64]) y)))
2056 // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)
2057 // result: (RotateLeft64 x y)
2059 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2061 if left.Op != OpLsh64x64 {
2067 if right.Op != OpRsh64Ux64 {
2071 if x != right.Args[0] {
2074 right_1 := right.Args[1]
2075 if right_1.Op != OpSub64 {
2079 right_1_0 := right_1.Args[0]
2080 if right_1_0.Op != OpConst64 || auxIntToInt64(right_1_0.AuxInt) != 64 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) {
2083 v.reset(OpRotateLeft64)
2089 // match: (Add64 left:(Lsh64x32 x y) right:(Rsh64Ux32 x (Sub32 (Const32 [64]) y)))
2090 // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)
2091 // result: (RotateLeft64 x y)
2093 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2095 if left.Op != OpLsh64x32 {
2101 if right.Op != OpRsh64Ux32 {
2105 if x != right.Args[0] {
2108 right_1 := right.Args[1]
2109 if right_1.Op != OpSub32 {
2113 right_1_0 := right_1.Args[0]
2114 if right_1_0.Op != OpConst32 || auxIntToInt32(right_1_0.AuxInt) != 64 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) {
2117 v.reset(OpRotateLeft64)
2123 // match: (Add64 left:(Lsh64x16 x y) right:(Rsh64Ux16 x (Sub16 (Const16 [64]) y)))
2124 // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)
2125 // result: (RotateLeft64 x y)
2127 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2129 if left.Op != OpLsh64x16 {
2135 if right.Op != OpRsh64Ux16 {
2139 if x != right.Args[0] {
2142 right_1 := right.Args[1]
2143 if right_1.Op != OpSub16 {
2147 right_1_0 := right_1.Args[0]
2148 if right_1_0.Op != OpConst16 || auxIntToInt16(right_1_0.AuxInt) != 64 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) {
2151 v.reset(OpRotateLeft64)
2157 // match: (Add64 left:(Lsh64x8 x y) right:(Rsh64Ux8 x (Sub8 (Const8 [64]) y)))
2158 // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)
2159 // result: (RotateLeft64 x y)
2161 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2163 if left.Op != OpLsh64x8 {
2169 if right.Op != OpRsh64Ux8 {
2173 if x != right.Args[0] {
2176 right_1 := right.Args[1]
2177 if right_1.Op != OpSub8 {
2181 right_1_0 := right_1.Args[0]
2182 if right_1_0.Op != OpConst8 || auxIntToInt8(right_1_0.AuxInt) != 64 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) {
2185 v.reset(OpRotateLeft64)
2191 // match: (Add64 right:(Rsh64Ux64 x y) left:(Lsh64x64 x z:(Sub64 (Const64 [64]) y)))
2192 // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)
2193 // result: (RotateLeft64 x z)
2195 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2197 if right.Op != OpRsh64Ux64 {
2203 if left.Op != OpLsh64x64 {
2207 if x != left.Args[0] {
2211 if z.Op != OpSub64 {
2216 if z_0.Op != OpConst64 || auxIntToInt64(z_0.AuxInt) != 64 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) {
2219 v.reset(OpRotateLeft64)
2225 // match: (Add64 right:(Rsh64Ux32 x y) left:(Lsh64x32 x z:(Sub32 (Const32 [64]) y)))
2226 // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)
2227 // result: (RotateLeft64 x z)
2229 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2231 if right.Op != OpRsh64Ux32 {
2237 if left.Op != OpLsh64x32 {
2241 if x != left.Args[0] {
2245 if z.Op != OpSub32 {
2250 if z_0.Op != OpConst32 || auxIntToInt32(z_0.AuxInt) != 64 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) {
2253 v.reset(OpRotateLeft64)
2259 // match: (Add64 right:(Rsh64Ux16 x y) left:(Lsh64x16 x z:(Sub16 (Const16 [64]) y)))
2260 // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)
2261 // result: (RotateLeft64 x z)
2263 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2265 if right.Op != OpRsh64Ux16 {
2271 if left.Op != OpLsh64x16 {
2275 if x != left.Args[0] {
2279 if z.Op != OpSub16 {
2284 if z_0.Op != OpConst16 || auxIntToInt16(z_0.AuxInt) != 64 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) {
2287 v.reset(OpRotateLeft64)
2293 // match: (Add64 right:(Rsh64Ux8 x y) left:(Lsh64x8 x z:(Sub8 (Const8 [64]) y)))
2294 // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)
2295 // result: (RotateLeft64 x z)
2297 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2299 if right.Op != OpRsh64Ux8 {
2305 if left.Op != OpLsh64x8 {
2309 if x != left.Args[0] {
2318 if z_0.Op != OpConst8 || auxIntToInt8(z_0.AuxInt) != 64 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) {
2321 v.reset(OpRotateLeft64)
2329 func rewriteValuegeneric_OpAdd64F(v *Value) bool {
2332 // match: (Add64F (Const64F [c]) (Const64F [d]))
2334 // result: (Const64F [c+d])
2336 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2337 if v_0.Op != OpConst64F {
2340 c := auxIntToFloat64(v_0.AuxInt)
2341 if v_1.Op != OpConst64F {
2344 d := auxIntToFloat64(v_1.AuxInt)
2349 v.AuxInt = float64ToAuxInt(c + d)
2356 func rewriteValuegeneric_OpAdd8(v *Value) bool {
2360 config := b.Func.Config
2361 // match: (Add8 (Const8 [c]) (Const8 [d]))
2362 // result: (Const8 [c+d])
2364 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2365 if v_0.Op != OpConst8 {
2368 c := auxIntToInt8(v_0.AuxInt)
2369 if v_1.Op != OpConst8 {
2372 d := auxIntToInt8(v_1.AuxInt)
2374 v.AuxInt = int8ToAuxInt(c + d)
2379 // match: (Add8 <t> (Mul8 x y) (Mul8 x z))
2380 // result: (Mul8 x (Add8 <t> y z))
2383 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2384 if v_0.Op != OpMul8 {
2388 v_0_0 := v_0.Args[0]
2389 v_0_1 := v_0.Args[1]
2390 for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
2393 if v_1.Op != OpMul8 {
2397 v_1_0 := v_1.Args[0]
2398 v_1_1 := v_1.Args[1]
2399 for _i2 := 0; _i2 <= 1; _i2, v_1_0, v_1_1 = _i2+1, v_1_1, v_1_0 {
2405 v0 := b.NewValue0(v.Pos, OpAdd8, t)
2414 // match: (Add8 (Const8 [0]) x)
2417 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2418 if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 0 {
2427 // match: (Add8 x (Neg8 y))
2428 // result: (Sub8 x y)
2430 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2432 if v_1.Op != OpNeg8 {
2442 // match: (Add8 (Com8 x) x)
2443 // result: (Const8 [-1])
2445 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2446 if v_0.Op != OpCom8 {
2454 v.AuxInt = int8ToAuxInt(-1)
2459 // match: (Add8 (Sub8 x t) (Add8 t y))
2460 // result: (Add8 x y)
2462 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2463 if v_0.Op != OpSub8 {
2468 if v_1.Op != OpAdd8 {
2472 v_1_0 := v_1.Args[0]
2473 v_1_1 := v_1.Args[1]
2474 for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
2486 // match: (Add8 (Const8 [1]) (Com8 x))
2489 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2490 if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 1 || v_1.Op != OpCom8 {
2500 // match: (Add8 x (Sub8 y x))
2503 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2505 if v_1.Op != OpSub8 {
2510 if x != v_1.Args[1] {
2518 // match: (Add8 x (Add8 y (Sub8 z x)))
2519 // result: (Add8 y z)
2521 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2523 if v_1.Op != OpAdd8 {
2527 v_1_0 := v_1.Args[0]
2528 v_1_1 := v_1.Args[1]
2529 for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
2531 if v_1_1.Op != OpSub8 {
2536 if x != v_1_1.Args[1] {
2546 // match: (Add8 (Add8 i:(Const8 <t>) z) x)
2547 // cond: (z.Op != OpConst8 && x.Op != OpConst8)
2548 // result: (Add8 i (Add8 <t> z x))
2550 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2551 if v_0.Op != OpAdd8 {
2555 v_0_0 := v_0.Args[0]
2556 v_0_1 := v_0.Args[1]
2557 for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
2559 if i.Op != OpConst8 {
2565 if !(z.Op != OpConst8 && x.Op != OpConst8) {
2569 v0 := b.NewValue0(v.Pos, OpAdd8, t)
2577 // match: (Add8 (Sub8 i:(Const8 <t>) z) x)
2578 // cond: (z.Op != OpConst8 && x.Op != OpConst8)
2579 // result: (Add8 i (Sub8 <t> x z))
2581 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2582 if v_0.Op != OpSub8 {
2587 if i.Op != OpConst8 {
2592 if !(z.Op != OpConst8 && x.Op != OpConst8) {
2596 v0 := b.NewValue0(v.Pos, OpSub8, t)
2603 // match: (Add8 (Const8 <t> [c]) (Add8 (Const8 <t> [d]) x))
2604 // result: (Add8 (Const8 <t> [c+d]) x)
2606 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2607 if v_0.Op != OpConst8 {
2611 c := auxIntToInt8(v_0.AuxInt)
2612 if v_1.Op != OpAdd8 {
2616 v_1_0 := v_1.Args[0]
2617 v_1_1 := v_1.Args[1]
2618 for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
2619 if v_1_0.Op != OpConst8 || v_1_0.Type != t {
2622 d := auxIntToInt8(v_1_0.AuxInt)
2625 v0 := b.NewValue0(v.Pos, OpConst8, t)
2626 v0.AuxInt = int8ToAuxInt(c + d)
2633 // match: (Add8 (Const8 <t> [c]) (Sub8 (Const8 <t> [d]) x))
2634 // result: (Sub8 (Const8 <t> [c+d]) x)
2636 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2637 if v_0.Op != OpConst8 {
2641 c := auxIntToInt8(v_0.AuxInt)
2642 if v_1.Op != OpSub8 {
2646 v_1_0 := v_1.Args[0]
2647 if v_1_0.Op != OpConst8 || v_1_0.Type != t {
2650 d := auxIntToInt8(v_1_0.AuxInt)
2652 v0 := b.NewValue0(v.Pos, OpConst8, t)
2653 v0.AuxInt = int8ToAuxInt(c + d)
2659 // match: (Add8 (Lsh8x64 x z:(Const64 <t> [c])) (Rsh8Ux64 x (Const64 [d])))
2660 // cond: c < 8 && d == 8-c && canRotate(config, 8)
2661 // result: (RotateLeft8 x z)
2663 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2664 if v_0.Op != OpLsh8x64 {
2670 if z.Op != OpConst64 {
2673 c := auxIntToInt64(z.AuxInt)
2674 if v_1.Op != OpRsh8Ux64 {
2678 if x != v_1.Args[0] {
2681 v_1_1 := v_1.Args[1]
2682 if v_1_1.Op != OpConst64 {
2685 d := auxIntToInt64(v_1_1.AuxInt)
2686 if !(c < 8 && d == 8-c && canRotate(config, 8)) {
2689 v.reset(OpRotateLeft8)
2695 // match: (Add8 left:(Lsh8x64 x y) right:(Rsh8Ux64 x (Sub64 (Const64 [8]) y)))
2696 // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)
2697 // result: (RotateLeft8 x y)
2699 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2701 if left.Op != OpLsh8x64 {
2707 if right.Op != OpRsh8Ux64 {
2711 if x != right.Args[0] {
2714 right_1 := right.Args[1]
2715 if right_1.Op != OpSub64 {
2719 right_1_0 := right_1.Args[0]
2720 if right_1_0.Op != OpConst64 || auxIntToInt64(right_1_0.AuxInt) != 8 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) {
2723 v.reset(OpRotateLeft8)
2729 // match: (Add8 left:(Lsh8x32 x y) right:(Rsh8Ux32 x (Sub32 (Const32 [8]) y)))
2730 // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)
2731 // result: (RotateLeft8 x y)
2733 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2735 if left.Op != OpLsh8x32 {
2741 if right.Op != OpRsh8Ux32 {
2745 if x != right.Args[0] {
2748 right_1 := right.Args[1]
2749 if right_1.Op != OpSub32 {
2753 right_1_0 := right_1.Args[0]
2754 if right_1_0.Op != OpConst32 || auxIntToInt32(right_1_0.AuxInt) != 8 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) {
2757 v.reset(OpRotateLeft8)
2763 // match: (Add8 left:(Lsh8x16 x y) right:(Rsh8Ux16 x (Sub16 (Const16 [8]) y)))
2764 // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)
2765 // result: (RotateLeft8 x y)
2767 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2769 if left.Op != OpLsh8x16 {
2775 if right.Op != OpRsh8Ux16 {
2779 if x != right.Args[0] {
2782 right_1 := right.Args[1]
2783 if right_1.Op != OpSub16 {
2787 right_1_0 := right_1.Args[0]
2788 if right_1_0.Op != OpConst16 || auxIntToInt16(right_1_0.AuxInt) != 8 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) {
2791 v.reset(OpRotateLeft8)
2797 // match: (Add8 left:(Lsh8x8 x y) right:(Rsh8Ux8 x (Sub8 (Const8 [8]) y)))
2798 // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)
2799 // result: (RotateLeft8 x y)
2801 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2803 if left.Op != OpLsh8x8 {
2809 if right.Op != OpRsh8Ux8 {
2813 if x != right.Args[0] {
2816 right_1 := right.Args[1]
2817 if right_1.Op != OpSub8 {
2821 right_1_0 := right_1.Args[0]
2822 if right_1_0.Op != OpConst8 || auxIntToInt8(right_1_0.AuxInt) != 8 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) {
2825 v.reset(OpRotateLeft8)
2831 // match: (Add8 right:(Rsh8Ux64 x y) left:(Lsh8x64 x z:(Sub64 (Const64 [8]) y)))
2832 // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)
2833 // result: (RotateLeft8 x z)
2835 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2837 if right.Op != OpRsh8Ux64 {
2843 if left.Op != OpLsh8x64 {
2847 if x != left.Args[0] {
2851 if z.Op != OpSub64 {
2856 if z_0.Op != OpConst64 || auxIntToInt64(z_0.AuxInt) != 8 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) {
2859 v.reset(OpRotateLeft8)
2865 // match: (Add8 right:(Rsh8Ux32 x y) left:(Lsh8x32 x z:(Sub32 (Const32 [8]) y)))
2866 // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)
2867 // result: (RotateLeft8 x z)
2869 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2871 if right.Op != OpRsh8Ux32 {
2877 if left.Op != OpLsh8x32 {
2881 if x != left.Args[0] {
2885 if z.Op != OpSub32 {
2890 if z_0.Op != OpConst32 || auxIntToInt32(z_0.AuxInt) != 8 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) {
2893 v.reset(OpRotateLeft8)
2899 // match: (Add8 right:(Rsh8Ux16 x y) left:(Lsh8x16 x z:(Sub16 (Const16 [8]) y)))
2900 // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)
2901 // result: (RotateLeft8 x z)
2903 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2905 if right.Op != OpRsh8Ux16 {
2911 if left.Op != OpLsh8x16 {
2915 if x != left.Args[0] {
2919 if z.Op != OpSub16 {
2924 if z_0.Op != OpConst16 || auxIntToInt16(z_0.AuxInt) != 8 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) {
2927 v.reset(OpRotateLeft8)
2933 // match: (Add8 right:(Rsh8Ux8 x y) left:(Lsh8x8 x z:(Sub8 (Const8 [8]) y)))
2934 // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)
2935 // result: (RotateLeft8 x z)
2937 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2939 if right.Op != OpRsh8Ux8 {
2945 if left.Op != OpLsh8x8 {
2949 if x != left.Args[0] {
2958 if z_0.Op != OpConst8 || auxIntToInt8(z_0.AuxInt) != 8 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) {
2961 v.reset(OpRotateLeft8)
2969 func rewriteValuegeneric_OpAddPtr(v *Value) bool {
2972 // match: (AddPtr <t> x (Const64 [c]))
2973 // result: (OffPtr <t> x [c])
2977 if v_1.Op != OpConst64 {
2980 c := auxIntToInt64(v_1.AuxInt)
2983 v.AuxInt = int64ToAuxInt(c)
2987 // match: (AddPtr <t> x (Const32 [c]))
2988 // result: (OffPtr <t> x [int64(c)])
2992 if v_1.Op != OpConst32 {
2995 c := auxIntToInt32(v_1.AuxInt)
2998 v.AuxInt = int64ToAuxInt(int64(c))
3004 func rewriteValuegeneric_OpAnd16(v *Value) bool {
3008 // match: (And16 (Const16 [c]) (Const16 [d]))
3009 // result: (Const16 [c&d])
3011 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3012 if v_0.Op != OpConst16 {
3015 c := auxIntToInt16(v_0.AuxInt)
3016 if v_1.Op != OpConst16 {
3019 d := auxIntToInt16(v_1.AuxInt)
3021 v.AuxInt = int16ToAuxInt(c & d)
3026 // match: (And16 <t> (Com16 x) (Com16 y))
3027 // result: (Com16 (Or16 <t> x y))
3030 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3031 if v_0.Op != OpCom16 {
3035 if v_1.Op != OpCom16 {
3040 v0 := b.NewValue0(v.Pos, OpOr16, t)
3047 // match: (And16 (Const16 [m]) (Rsh16Ux64 _ (Const64 [c])))
3048 // cond: c >= int64(16-ntz16(m))
3049 // result: (Const16 [0])
3051 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3052 if v_0.Op != OpConst16 {
3055 m := auxIntToInt16(v_0.AuxInt)
3056 if v_1.Op != OpRsh16Ux64 {
3060 v_1_1 := v_1.Args[1]
3061 if v_1_1.Op != OpConst64 {
3064 c := auxIntToInt64(v_1_1.AuxInt)
3065 if !(c >= int64(16-ntz16(m))) {
3069 v.AuxInt = int16ToAuxInt(0)
3074 // match: (And16 (Const16 [m]) (Lsh16x64 _ (Const64 [c])))
3075 // cond: c >= int64(16-nlz16(m))
3076 // result: (Const16 [0])
3078 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3079 if v_0.Op != OpConst16 {
3082 m := auxIntToInt16(v_0.AuxInt)
3083 if v_1.Op != OpLsh16x64 {
3087 v_1_1 := v_1.Args[1]
3088 if v_1_1.Op != OpConst64 {
3091 c := auxIntToInt64(v_1_1.AuxInt)
3092 if !(c >= int64(16-nlz16(m))) {
3096 v.AuxInt = int16ToAuxInt(0)
3101 // match: (And16 x x)
3111 // match: (And16 (Const16 [-1]) x)
3114 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3115 if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != -1 {
3124 // match: (And16 (Const16 [0]) _)
3125 // result: (Const16 [0])
3127 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3128 if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 0 {
3132 v.AuxInt = int16ToAuxInt(0)
3137 // match: (And16 (Com16 x) x)
3138 // result: (Const16 [0])
3140 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3141 if v_0.Op != OpCom16 {
3149 v.AuxInt = int16ToAuxInt(0)
3154 // match: (And16 x (And16 x y))
3155 // result: (And16 x y)
3157 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3159 if v_1.Op != OpAnd16 {
3163 v_1_0 := v_1.Args[0]
3164 v_1_1 := v_1.Args[1]
3165 for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
3177 // match: (And16 (And16 i:(Const16 <t>) z) x)
3178 // cond: (z.Op != OpConst16 && x.Op != OpConst16)
3179 // result: (And16 i (And16 <t> z x))
3181 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3182 if v_0.Op != OpAnd16 {
3186 v_0_0 := v_0.Args[0]
3187 v_0_1 := v_0.Args[1]
3188 for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
3190 if i.Op != OpConst16 {
3196 if !(z.Op != OpConst16 && x.Op != OpConst16) {
3200 v0 := b.NewValue0(v.Pos, OpAnd16, t)
3208 // match: (And16 (Const16 <t> [c]) (And16 (Const16 <t> [d]) x))
3209 // result: (And16 (Const16 <t> [c&d]) x)
3211 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3212 if v_0.Op != OpConst16 {
3216 c := auxIntToInt16(v_0.AuxInt)
3217 if v_1.Op != OpAnd16 {
3221 v_1_0 := v_1.Args[0]
3222 v_1_1 := v_1.Args[1]
3223 for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
3224 if v_1_0.Op != OpConst16 || v_1_0.Type != t {
3227 d := auxIntToInt16(v_1_0.AuxInt)
3230 v0 := b.NewValue0(v.Pos, OpConst16, t)
3231 v0.AuxInt = int16ToAuxInt(c & d)
3240 func rewriteValuegeneric_OpAnd32(v *Value) bool {
3244 // match: (And32 (Const32 [c]) (Const32 [d]))
3245 // result: (Const32 [c&d])
3247 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3248 if v_0.Op != OpConst32 {
3251 c := auxIntToInt32(v_0.AuxInt)
3252 if v_1.Op != OpConst32 {
3255 d := auxIntToInt32(v_1.AuxInt)
3257 v.AuxInt = int32ToAuxInt(c & d)
3262 // match: (And32 <t> (Com32 x) (Com32 y))
3263 // result: (Com32 (Or32 <t> x y))
3266 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3267 if v_0.Op != OpCom32 {
3271 if v_1.Op != OpCom32 {
3276 v0 := b.NewValue0(v.Pos, OpOr32, t)
3283 // match: (And32 (Const32 [m]) (Rsh32Ux64 _ (Const64 [c])))
3284 // cond: c >= int64(32-ntz32(m))
3285 // result: (Const32 [0])
3287 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3288 if v_0.Op != OpConst32 {
3291 m := auxIntToInt32(v_0.AuxInt)
3292 if v_1.Op != OpRsh32Ux64 {
3296 v_1_1 := v_1.Args[1]
3297 if v_1_1.Op != OpConst64 {
3300 c := auxIntToInt64(v_1_1.AuxInt)
3301 if !(c >= int64(32-ntz32(m))) {
3305 v.AuxInt = int32ToAuxInt(0)
3310 // match: (And32 (Const32 [m]) (Lsh32x64 _ (Const64 [c])))
3311 // cond: c >= int64(32-nlz32(m))
3312 // result: (Const32 [0])
3314 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3315 if v_0.Op != OpConst32 {
3318 m := auxIntToInt32(v_0.AuxInt)
3319 if v_1.Op != OpLsh32x64 {
3323 v_1_1 := v_1.Args[1]
3324 if v_1_1.Op != OpConst64 {
3327 c := auxIntToInt64(v_1_1.AuxInt)
3328 if !(c >= int64(32-nlz32(m))) {
3332 v.AuxInt = int32ToAuxInt(0)
3337 // match: (And32 x x)
3347 // match: (And32 (Const32 [-1]) x)
3350 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3351 if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != -1 {
3360 // match: (And32 (Const32 [0]) _)
3361 // result: (Const32 [0])
3363 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3364 if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 {
3368 v.AuxInt = int32ToAuxInt(0)
3373 // match: (And32 (Com32 x) x)
3374 // result: (Const32 [0])
3376 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3377 if v_0.Op != OpCom32 {
3385 v.AuxInt = int32ToAuxInt(0)
3390 // match: (And32 x (And32 x y))
3391 // result: (And32 x y)
3393 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3395 if v_1.Op != OpAnd32 {
3399 v_1_0 := v_1.Args[0]
3400 v_1_1 := v_1.Args[1]
3401 for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
3413 // match: (And32 (And32 i:(Const32 <t>) z) x)
3414 // cond: (z.Op != OpConst32 && x.Op != OpConst32)
3415 // result: (And32 i (And32 <t> z x))
3417 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3418 if v_0.Op != OpAnd32 {
3422 v_0_0 := v_0.Args[0]
3423 v_0_1 := v_0.Args[1]
3424 for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
3426 if i.Op != OpConst32 {
3432 if !(z.Op != OpConst32 && x.Op != OpConst32) {
3436 v0 := b.NewValue0(v.Pos, OpAnd32, t)
3444 // match: (And32 (Const32 <t> [c]) (And32 (Const32 <t> [d]) x))
3445 // result: (And32 (Const32 <t> [c&d]) x)
3447 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3448 if v_0.Op != OpConst32 {
3452 c := auxIntToInt32(v_0.AuxInt)
3453 if v_1.Op != OpAnd32 {
3457 v_1_0 := v_1.Args[0]
3458 v_1_1 := v_1.Args[1]
3459 for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
3460 if v_1_0.Op != OpConst32 || v_1_0.Type != t {
3463 d := auxIntToInt32(v_1_0.AuxInt)
3466 v0 := b.NewValue0(v.Pos, OpConst32, t)
3467 v0.AuxInt = int32ToAuxInt(c & d)
3476 func rewriteValuegeneric_OpAnd64(v *Value) bool {
3480 // match: (And64 (Const64 [c]) (Const64 [d]))
3481 // result: (Const64 [c&d])
3483 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3484 if v_0.Op != OpConst64 {
3487 c := auxIntToInt64(v_0.AuxInt)
3488 if v_1.Op != OpConst64 {
3491 d := auxIntToInt64(v_1.AuxInt)
3493 v.AuxInt = int64ToAuxInt(c & d)
3498 // match: (And64 <t> (Com64 x) (Com64 y))
3499 // result: (Com64 (Or64 <t> x y))
3502 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3503 if v_0.Op != OpCom64 {
3507 if v_1.Op != OpCom64 {
3512 v0 := b.NewValue0(v.Pos, OpOr64, t)
3519 // match: (And64 (Const64 [m]) (Rsh64Ux64 _ (Const64 [c])))
3520 // cond: c >= int64(64-ntz64(m))
3521 // result: (Const64 [0])
3523 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3524 if v_0.Op != OpConst64 {
3527 m := auxIntToInt64(v_0.AuxInt)
3528 if v_1.Op != OpRsh64Ux64 {
3532 v_1_1 := v_1.Args[1]
3533 if v_1_1.Op != OpConst64 {
3536 c := auxIntToInt64(v_1_1.AuxInt)
3537 if !(c >= int64(64-ntz64(m))) {
3541 v.AuxInt = int64ToAuxInt(0)
3546 // match: (And64 (Const64 [m]) (Lsh64x64 _ (Const64 [c])))
3547 // cond: c >= int64(64-nlz64(m))
3548 // result: (Const64 [0])
3550 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3551 if v_0.Op != OpConst64 {
3554 m := auxIntToInt64(v_0.AuxInt)
3555 if v_1.Op != OpLsh64x64 {
3559 v_1_1 := v_1.Args[1]
3560 if v_1_1.Op != OpConst64 {
3563 c := auxIntToInt64(v_1_1.AuxInt)
3564 if !(c >= int64(64-nlz64(m))) {
3568 v.AuxInt = int64ToAuxInt(0)
3573 // match: (And64 x x)
3583 // match: (And64 (Const64 [-1]) x)
3586 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3587 if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != -1 {
3596 // match: (And64 (Const64 [0]) _)
3597 // result: (Const64 [0])
3599 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3600 if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 {
3604 v.AuxInt = int64ToAuxInt(0)
3609 // match: (And64 (Com64 x) x)
3610 // result: (Const64 [0])
3612 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3613 if v_0.Op != OpCom64 {
3621 v.AuxInt = int64ToAuxInt(0)
3626 // match: (And64 x (And64 x y))
3627 // result: (And64 x y)
3629 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3631 if v_1.Op != OpAnd64 {
3635 v_1_0 := v_1.Args[0]
3636 v_1_1 := v_1.Args[1]
3637 for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
3649 // match: (And64 (And64 i:(Const64 <t>) z) x)
3650 // cond: (z.Op != OpConst64 && x.Op != OpConst64)
3651 // result: (And64 i (And64 <t> z x))
3653 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3654 if v_0.Op != OpAnd64 {
3658 v_0_0 := v_0.Args[0]
3659 v_0_1 := v_0.Args[1]
3660 for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
3662 if i.Op != OpConst64 {
3668 if !(z.Op != OpConst64 && x.Op != OpConst64) {
3672 v0 := b.NewValue0(v.Pos, OpAnd64, t)
3680 // match: (And64 (Const64 <t> [c]) (And64 (Const64 <t> [d]) x))
3681 // result: (And64 (Const64 <t> [c&d]) x)
3683 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3684 if v_0.Op != OpConst64 {
3688 c := auxIntToInt64(v_0.AuxInt)
3689 if v_1.Op != OpAnd64 {
3693 v_1_0 := v_1.Args[0]
3694 v_1_1 := v_1.Args[1]
3695 for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
3696 if v_1_0.Op != OpConst64 || v_1_0.Type != t {
3699 d := auxIntToInt64(v_1_0.AuxInt)
3702 v0 := b.NewValue0(v.Pos, OpConst64, t)
3703 v0.AuxInt = int64ToAuxInt(c & d)
3712 func rewriteValuegeneric_OpAnd8(v *Value) bool {
3716 // match: (And8 (Const8 [c]) (Const8 [d]))
3717 // result: (Const8 [c&d])
3719 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3720 if v_0.Op != OpConst8 {
3723 c := auxIntToInt8(v_0.AuxInt)
3724 if v_1.Op != OpConst8 {
3727 d := auxIntToInt8(v_1.AuxInt)
3729 v.AuxInt = int8ToAuxInt(c & d)
3734 // match: (And8 <t> (Com8 x) (Com8 y))
3735 // result: (Com8 (Or8 <t> x y))
3738 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3739 if v_0.Op != OpCom8 {
3743 if v_1.Op != OpCom8 {
3748 v0 := b.NewValue0(v.Pos, OpOr8, t)
3755 // match: (And8 (Const8 [m]) (Rsh8Ux64 _ (Const64 [c])))
3756 // cond: c >= int64(8-ntz8(m))
3757 // result: (Const8 [0])
3759 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3760 if v_0.Op != OpConst8 {
3763 m := auxIntToInt8(v_0.AuxInt)
3764 if v_1.Op != OpRsh8Ux64 {
3768 v_1_1 := v_1.Args[1]
3769 if v_1_1.Op != OpConst64 {
3772 c := auxIntToInt64(v_1_1.AuxInt)
3773 if !(c >= int64(8-ntz8(m))) {
3777 v.AuxInt = int8ToAuxInt(0)
3782 // match: (And8 (Const8 [m]) (Lsh8x64 _ (Const64 [c])))
3783 // cond: c >= int64(8-nlz8(m))
3784 // result: (Const8 [0])
3786 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3787 if v_0.Op != OpConst8 {
3790 m := auxIntToInt8(v_0.AuxInt)
3791 if v_1.Op != OpLsh8x64 {
3795 v_1_1 := v_1.Args[1]
3796 if v_1_1.Op != OpConst64 {
3799 c := auxIntToInt64(v_1_1.AuxInt)
3800 if !(c >= int64(8-nlz8(m))) {
3804 v.AuxInt = int8ToAuxInt(0)
3809 // match: (And8 x x)
3819 // match: (And8 (Const8 [-1]) x)
3822 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3823 if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != -1 {
3832 // match: (And8 (Const8 [0]) _)
3833 // result: (Const8 [0])
3835 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3836 if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 0 {
3840 v.AuxInt = int8ToAuxInt(0)
3845 // match: (And8 (Com8 x) x)
3846 // result: (Const8 [0])
3848 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3849 if v_0.Op != OpCom8 {
3857 v.AuxInt = int8ToAuxInt(0)
3862 // match: (And8 x (And8 x y))
3863 // result: (And8 x y)
3865 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3867 if v_1.Op != OpAnd8 {
3871 v_1_0 := v_1.Args[0]
3872 v_1_1 := v_1.Args[1]
3873 for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
3885 // match: (And8 (And8 i:(Const8 <t>) z) x)
3886 // cond: (z.Op != OpConst8 && x.Op != OpConst8)
3887 // result: (And8 i (And8 <t> z x))
3889 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3890 if v_0.Op != OpAnd8 {
3894 v_0_0 := v_0.Args[0]
3895 v_0_1 := v_0.Args[1]
3896 for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
3898 if i.Op != OpConst8 {
3904 if !(z.Op != OpConst8 && x.Op != OpConst8) {
3908 v0 := b.NewValue0(v.Pos, OpAnd8, t)
3916 // match: (And8 (Const8 <t> [c]) (And8 (Const8 <t> [d]) x))
3917 // result: (And8 (Const8 <t> [c&d]) x)
3919 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3920 if v_0.Op != OpConst8 {
3924 c := auxIntToInt8(v_0.AuxInt)
3925 if v_1.Op != OpAnd8 {
3929 v_1_0 := v_1.Args[0]
3930 v_1_1 := v_1.Args[1]
3931 for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
3932 if v_1_0.Op != OpConst8 || v_1_0.Type != t {
3935 d := auxIntToInt8(v_1_0.AuxInt)
3938 v0 := b.NewValue0(v.Pos, OpConst8, t)
3939 v0.AuxInt = int8ToAuxInt(c & d)
3948 func rewriteValuegeneric_OpAndB(v *Value) bool {
3952 // match: (AndB (Leq64 (Const64 [c]) x) (Less64 x (Const64 [d])))
3954 // result: (Less64U (Sub64 <x.Type> x (Const64 <x.Type> [c])) (Const64 <x.Type> [d-c]))
3956 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3957 if v_0.Op != OpLeq64 {
3961 v_0_0 := v_0.Args[0]
3962 if v_0_0.Op != OpConst64 {
3965 c := auxIntToInt64(v_0_0.AuxInt)
3966 if v_1.Op != OpLess64 {
3970 if x != v_1.Args[0] {
3973 v_1_1 := v_1.Args[1]
3974 if v_1_1.Op != OpConst64 {
3977 d := auxIntToInt64(v_1_1.AuxInt)
3982 v0 := b.NewValue0(v.Pos, OpSub64, x.Type)
3983 v1 := b.NewValue0(v.Pos, OpConst64, x.Type)
3984 v1.AuxInt = int64ToAuxInt(c)
3986 v2 := b.NewValue0(v.Pos, OpConst64, x.Type)
3987 v2.AuxInt = int64ToAuxInt(d - c)
3993 // match: (AndB (Leq64 (Const64 [c]) x) (Leq64 x (Const64 [d])))
3995 // result: (Leq64U (Sub64 <x.Type> x (Const64 <x.Type> [c])) (Const64 <x.Type> [d-c]))
3997 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
3998 if v_0.Op != OpLeq64 {
4002 v_0_0 := v_0.Args[0]
4003 if v_0_0.Op != OpConst64 {
4006 c := auxIntToInt64(v_0_0.AuxInt)
4007 if v_1.Op != OpLeq64 {
4011 if x != v_1.Args[0] {
4014 v_1_1 := v_1.Args[1]
4015 if v_1_1.Op != OpConst64 {
4018 d := auxIntToInt64(v_1_1.AuxInt)
4023 v0 := b.NewValue0(v.Pos, OpSub64, x.Type)
4024 v1 := b.NewValue0(v.Pos, OpConst64, x.Type)
4025 v1.AuxInt = int64ToAuxInt(c)
4027 v2 := b.NewValue0(v.Pos, OpConst64, x.Type)
4028 v2.AuxInt = int64ToAuxInt(d - c)
4034 // match: (AndB (Leq32 (Const32 [c]) x) (Less32 x (Const32 [d])))
4036 // result: (Less32U (Sub32 <x.Type> x (Const32 <x.Type> [c])) (Const32 <x.Type> [d-c]))
4038 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
4039 if v_0.Op != OpLeq32 {
4043 v_0_0 := v_0.Args[0]
4044 if v_0_0.Op != OpConst32 {
4047 c := auxIntToInt32(v_0_0.AuxInt)
4048 if v_1.Op != OpLess32 {
4052 if x != v_1.Args[0] {
4055 v_1_1 := v_1.Args[1]
4056 if v_1_1.Op != OpConst32 {
4059 d := auxIntToInt32(v_1_1.AuxInt)
4064 v0 := b.NewValue0(v.Pos, OpSub32, x.Type)
4065 v1 := b.NewValue0(v.Pos, OpConst32, x.Type)
4066 v1.AuxInt = int32ToAuxInt(c)
4068 v2 := b.NewValue0(v.Pos, OpConst32, x.Type)
4069 v2.AuxInt = int32ToAuxInt(d - c)
4075 // match: (AndB (Leq32 (Const32 [c]) x) (Leq32 x (Const32 [d])))
4077 // result: (Leq32U (Sub32 <x.Type> x (Const32 <x.Type> [c])) (Const32 <x.Type> [d-c]))
4079 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
4080 if v_0.Op != OpLeq32 {
4084 v_0_0 := v_0.Args[0]
4085 if v_0_0.Op != OpConst32 {
4088 c := auxIntToInt32(v_0_0.AuxInt)
4089 if v_1.Op != OpLeq32 {
4093 if x != v_1.Args[0] {
4096 v_1_1 := v_1.Args[1]
4097 if v_1_1.Op != OpConst32 {
4100 d := auxIntToInt32(v_1_1.AuxInt)
4105 v0 := b.NewValue0(v.Pos, OpSub32, x.Type)
4106 v1 := b.NewValue0(v.Pos, OpConst32, x.Type)
4107 v1.AuxInt = int32ToAuxInt(c)
4109 v2 := b.NewValue0(v.Pos, OpConst32, x.Type)
4110 v2.AuxInt = int32ToAuxInt(d - c)
4116 // match: (AndB (Leq16 (Const16 [c]) x) (Less16 x (Const16 [d])))
4118 // result: (Less16U (Sub16 <x.Type> x (Const16 <x.Type> [c])) (Const16 <x.Type> [d-c]))
4120 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
4121 if v_0.Op != OpLeq16 {
4125 v_0_0 := v_0.Args[0]
4126 if v_0_0.Op != OpConst16 {
4129 c := auxIntToInt16(v_0_0.AuxInt)
4130 if v_1.Op != OpLess16 {
4134 if x != v_1.Args[0] {
4137 v_1_1 := v_1.Args[1]
4138 if v_1_1.Op != OpConst16 {
4141 d := auxIntToInt16(v_1_1.AuxInt)
4146 v0 := b.NewValue0(v.Pos, OpSub16, x.Type)
4147 v1 := b.NewValue0(v.Pos, OpConst16, x.Type)
4148 v1.AuxInt = int16ToAuxInt(c)
4150 v2 := b.NewValue0(v.Pos, OpConst16, x.Type)
4151 v2.AuxInt = int16ToAuxInt(d - c)
4157 // match: (AndB (Leq16 (Const16 [c]) x) (Leq16 x (Const16 [d])))
4159 // result: (Leq16U (Sub16 <x.Type> x (Const16 <x.Type> [c])) (Const16 <x.Type> [d-c]))
4161 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
4162 if v_0.Op != OpLeq16 {
4166 v_0_0 := v_0.Args[0]
4167 if v_0_0.Op != OpConst16 {
4170 c := auxIntToInt16(v_0_0.AuxInt)
4171 if v_1.Op != OpLeq16 {
4175 if x != v_1.Args[0] {
4178 v_1_1 := v_1.Args[1]
4179 if v_1_1.Op != OpConst16 {
4182 d := auxIntToInt16(v_1_1.AuxInt)
4187 v0 := b.NewValue0(v.Pos, OpSub16, x.Type)
4188 v1 := b.NewValue0(v.Pos, OpConst16, x.Type)
4189 v1.AuxInt = int16ToAuxInt(c)
4191 v2 := b.NewValue0(v.Pos, OpConst16, x.Type)
4192 v2.AuxInt = int16ToAuxInt(d - c)
4198 // match: (AndB (Leq8 (Const8 [c]) x) (Less8 x (Const8 [d])))
4200 // result: (Less8U (Sub8 <x.Type> x (Const8 <x.Type> [c])) (Const8 <x.Type> [d-c]))
4202 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
4203 if v_0.Op != OpLeq8 {
4207 v_0_0 := v_0.Args[0]
4208 if v_0_0.Op != OpConst8 {
4211 c := auxIntToInt8(v_0_0.AuxInt)
4212 if v_1.Op != OpLess8 {
4216 if x != v_1.Args[0] {
4219 v_1_1 := v_1.Args[1]
4220 if v_1_1.Op != OpConst8 {
4223 d := auxIntToInt8(v_1_1.AuxInt)
4228 v0 := b.NewValue0(v.Pos, OpSub8, x.Type)
4229 v1 := b.NewValue0(v.Pos, OpConst8, x.Type)
4230 v1.AuxInt = int8ToAuxInt(c)
4232 v2 := b.NewValue0(v.Pos, OpConst8, x.Type)
4233 v2.AuxInt = int8ToAuxInt(d - c)
4239 // match: (AndB (Leq8 (Const8 [c]) x) (Leq8 x (Const8 [d])))
4241 // result: (Leq8U (Sub8 <x.Type> x (Const8 <x.Type> [c])) (Const8 <x.Type> [d-c]))
4243 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
4244 if v_0.Op != OpLeq8 {
4248 v_0_0 := v_0.Args[0]
4249 if v_0_0.Op != OpConst8 {
4252 c := auxIntToInt8(v_0_0.AuxInt)
4253 if v_1.Op != OpLeq8 {
4257 if x != v_1.Args[0] {
4260 v_1_1 := v_1.Args[1]
4261 if v_1_1.Op != OpConst8 {
4264 d := auxIntToInt8(v_1_1.AuxInt)
4269 v0 := b.NewValue0(v.Pos, OpSub8, x.Type)
4270 v1 := b.NewValue0(v.Pos, OpConst8, x.Type)
4271 v1.AuxInt = int8ToAuxInt(c)
4273 v2 := b.NewValue0(v.Pos, OpConst8, x.Type)
4274 v2.AuxInt = int8ToAuxInt(d - c)
4280 // match: (AndB (Less64 (Const64 [c]) x) (Less64 x (Const64 [d])))
4281 // cond: d >= c+1 && c+1 > c
4282 // result: (Less64U (Sub64 <x.Type> x (Const64 <x.Type> [c+1])) (Const64 <x.Type> [d-c-1]))
4284 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
4285 if v_0.Op != OpLess64 {
4289 v_0_0 := v_0.Args[0]
4290 if v_0_0.Op != OpConst64 {
4293 c := auxIntToInt64(v_0_0.AuxInt)
4294 if v_1.Op != OpLess64 {
4298 if x != v_1.Args[0] {
4301 v_1_1 := v_1.Args[1]
4302 if v_1_1.Op != OpConst64 {
4305 d := auxIntToInt64(v_1_1.AuxInt)
4306 if !(d >= c+1 && c+1 > c) {
4310 v0 := b.NewValue0(v.Pos, OpSub64, x.Type)
4311 v1 := b.NewValue0(v.Pos, OpConst64, x.Type)
4312 v1.AuxInt = int64ToAuxInt(c + 1)
4314 v2 := b.NewValue0(v.Pos, OpConst64, x.Type)
4315 v2.AuxInt = int64ToAuxInt(d - c - 1)
4321 // match: (AndB (Less64 (Const64 [c]) x) (Leq64 x (Const64 [d])))
4322 // cond: d >= c+1 && c+1 > c
4323 // result: (Leq64U (Sub64 <x.Type> x (Const64 <x.Type> [c+1])) (Const64 <x.Type> [d-c-1]))
4325 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
4326 if v_0.Op != OpLess64 {
4330 v_0_0 := v_0.Args[0]
4331 if v_0_0.Op != OpConst64 {
4334 c := auxIntToInt64(v_0_0.AuxInt)
4335 if v_1.Op != OpLeq64 {
4339 if x != v_1.Args[0] {
4342 v_1_1 := v_1.Args[1]
4343 if v_1_1.Op != OpConst64 {
4346 d := auxIntToInt64(v_1_1.AuxInt)
4347 if !(d >= c+1 && c+1 > c) {
4351 v0 := b.NewValue0(v.Pos, OpSub64, x.Type)
4352 v1 := b.NewValue0(v.Pos, OpConst64, x.Type)
4353 v1.AuxInt = int64ToAuxInt(c + 1)
4355 v2 := b.NewValue0(v.Pos, OpConst64, x.Type)
4356 v2.AuxInt = int64ToAuxInt(d - c - 1)
4362 // match: (AndB (Less32 (Const32 [c]) x) (Less32 x (Const32 [d])))
4363 // cond: d >= c+1 && c+1 > c
4364 // result: (Less32U (Sub32 <x.Type> x (Const32 <x.Type> [c+1])) (Const32 <x.Type> [d-c-1]))
4366 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
4367 if v_0.Op != OpLess32 {
4371 v_0_0 := v_0.Args[0]
4372 if v_0_0.Op != OpConst32 {
4375 c := auxIntToInt32(v_0_0.AuxInt)
4376 if v_1.Op != OpLess32 {
4380 if x != v_1.Args[0] {
4383 v_1_1 := v_1.Args[1]
4384 if v_1_1.Op != OpConst32 {
4387 d := auxIntToInt32(v_1_1.AuxInt)
4388 if !(d >= c+1 && c+1 > c) {
4392 v0 := b.NewValue0(v.Pos, OpSub32, x.Type)
4393 v1 := b.NewValue0(v.Pos, OpConst32, x.Type)
4394 v1.AuxInt = int32ToAuxInt(c + 1)
4396 v2 := b.NewValue0(v.Pos, OpConst32, x.Type)
4397 v2.AuxInt = int32ToAuxInt(d - c - 1)
4403 // match: (AndB (Less32 (Const32 [c]) x) (Leq32 x (Const32 [d])))
4404 // cond: d >= c+1 && c+1 > c
4405 // result: (Leq32U (Sub32 <x.Type> x (Const32 <x.Type> [c+1])) (Const32 <x.Type> [d-c-1]))
4407 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
4408 if v_0.Op != OpLess32 {
4412 v_0_0 := v_0.Args[0]
4413 if v_0_0.Op != OpConst32 {
4416 c := auxIntToInt32(v_0_0.AuxInt)
4417 if v_1.Op != OpLeq32 {
4421 if x != v_1.Args[0] {
4424 v_1_1 := v_1.Args[1]
4425 if v_1_1.Op != OpConst32 {
4428 d := auxIntToInt32(v_1_1.AuxInt)
4429 if !(d >= c+1 && c+1 > c) {
4433 v0 := b.NewValue0(v.Pos, OpSub32, x.Type)
4434 v1 := b.NewValue0(v.Pos, OpConst32, x.Type)
4435 v1.AuxInt = int32ToAuxInt(c + 1)
4437 v2 := b.NewValue0(v.Pos, OpConst32, x.Type)
4438 v2.AuxInt = int32ToAuxInt(d - c - 1)
4444 // match: (AndB (Less16 (Const16 [c]) x) (Less16 x (Const16 [d])))
4445 // cond: d >= c+1 && c+1 > c
4446 // result: (Less16U (Sub16 <x.Type> x (Const16 <x.Type> [c+1])) (Const16 <x.Type> [d-c-1]))
4448 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
4449 if v_0.Op != OpLess16 {
4453 v_0_0 := v_0.Args[0]
4454 if v_0_0.Op != OpConst16 {
4457 c := auxIntToInt16(v_0_0.AuxInt)
4458 if v_1.Op != OpLess16 {
4462 if x != v_1.Args[0] {
4465 v_1_1 := v_1.Args[1]
4466 if v_1_1.Op != OpConst16 {
4469 d := auxIntToInt16(v_1_1.AuxInt)
4470 if !(d >= c+1 && c+1 > c) {
4474 v0 := b.NewValue0(v.Pos, OpSub16, x.Type)
4475 v1 := b.NewValue0(v.Pos, OpConst16, x.Type)
4476 v1.AuxInt = int16ToAuxInt(c + 1)
4478 v2 := b.NewValue0(v.Pos, OpConst16, x.Type)
4479 v2.AuxInt = int16ToAuxInt(d - c - 1)
4485 // match: (AndB (Less16 (Const16 [c]) x) (Leq16 x (Const16 [d])))
4486 // cond: d >= c+1 && c+1 > c
4487 // result: (Leq16U (Sub16 <x.Type> x (Const16 <x.Type> [c+1])) (Const16 <x.Type> [d-c-1]))
4489 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
4490 if v_0.Op != OpLess16 {
4494 v_0_0 := v_0.Args[0]
4495 if v_0_0.Op != OpConst16 {
4498 c := auxIntToInt16(v_0_0.AuxInt)
4499 if v_1.Op != OpLeq16 {
4503 if x != v_1.Args[0] {
4506 v_1_1 := v_1.Args[1]
4507 if v_1_1.Op != OpConst16 {
4510 d := auxIntToInt16(v_1_1.AuxInt)
4511 if !(d >= c+1 && c+1 > c) {
4515 v0 := b.NewValue0(v.Pos, OpSub16, x.Type)
4516 v1 := b.NewValue0(v.Pos, OpConst16, x.Type)
4517 v1.AuxInt = int16ToAuxInt(c + 1)
4519 v2 := b.NewValue0(v.Pos, OpConst16, x.Type)
4520 v2.AuxInt = int16ToAuxInt(d - c - 1)
4526 // match: (AndB (Less8 (Const8 [c]) x) (Less8 x (Const8 [d])))
4527 // cond: d >= c+1 && c+1 > c
4528 // result: (Less8U (Sub8 <x.Type> x (Const8 <x.Type> [c+1])) (Const8 <x.Type> [d-c-1]))
4530 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
4531 if v_0.Op != OpLess8 {
4535 v_0_0 := v_0.Args[0]
4536 if v_0_0.Op != OpConst8 {
4539 c := auxIntToInt8(v_0_0.AuxInt)
4540 if v_1.Op != OpLess8 {
4544 if x != v_1.Args[0] {
4547 v_1_1 := v_1.Args[1]
4548 if v_1_1.Op != OpConst8 {
4551 d := auxIntToInt8(v_1_1.AuxInt)
4552 if !(d >= c+1 && c+1 > c) {
4556 v0 := b.NewValue0(v.Pos, OpSub8, x.Type)
4557 v1 := b.NewValue0(v.Pos, OpConst8, x.Type)
4558 v1.AuxInt = int8ToAuxInt(c + 1)
4560 v2 := b.NewValue0(v.Pos, OpConst8, x.Type)
4561 v2.AuxInt = int8ToAuxInt(d - c - 1)
4567 // match: (AndB (Less8 (Const8 [c]) x) (Leq8 x (Const8 [d])))
4568 // cond: d >= c+1 && c+1 > c
4569 // result: (Leq8U (Sub8 <x.Type> x (Const8 <x.Type> [c+1])) (Const8 <x.Type> [d-c-1]))
4571 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
4572 if v_0.Op != OpLess8 {
4576 v_0_0 := v_0.Args[0]
4577 if v_0_0.Op != OpConst8 {
4580 c := auxIntToInt8(v_0_0.AuxInt)
4581 if v_1.Op != OpLeq8 {
4585 if x != v_1.Args[0] {
4588 v_1_1 := v_1.Args[1]
4589 if v_1_1.Op != OpConst8 {
4592 d := auxIntToInt8(v_1_1.AuxInt)
4593 if !(d >= c+1 && c+1 > c) {
4597 v0 := b.NewValue0(v.Pos, OpSub8, x.Type)
4598 v1 := b.NewValue0(v.Pos, OpConst8, x.Type)
4599 v1.AuxInt = int8ToAuxInt(c + 1)
4601 v2 := b.NewValue0(v.Pos, OpConst8, x.Type)
4602 v2.AuxInt = int8ToAuxInt(d - c - 1)
4608 // match: (AndB (Leq64U (Const64 [c]) x) (Less64U x (Const64 [d])))
4609 // cond: uint64(d) >= uint64(c)
4610 // result: (Less64U (Sub64 <x.Type> x (Const64 <x.Type> [c])) (Const64 <x.Type> [d-c]))
4612 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
4613 if v_0.Op != OpLeq64U {
4617 v_0_0 := v_0.Args[0]
4618 if v_0_0.Op != OpConst64 {
4621 c := auxIntToInt64(v_0_0.AuxInt)
4622 if v_1.Op != OpLess64U {
4626 if x != v_1.Args[0] {
4629 v_1_1 := v_1.Args[1]
4630 if v_1_1.Op != OpConst64 {
4633 d := auxIntToInt64(v_1_1.AuxInt)
4634 if !(uint64(d) >= uint64(c)) {
4638 v0 := b.NewValue0(v.Pos, OpSub64, x.Type)
4639 v1 := b.NewValue0(v.Pos, OpConst64, x.Type)
4640 v1.AuxInt = int64ToAuxInt(c)
4642 v2 := b.NewValue0(v.Pos, OpConst64, x.Type)
4643 v2.AuxInt = int64ToAuxInt(d - c)
4649 // match: (AndB (Leq64U (Const64 [c]) x) (Leq64U x (Const64 [d])))
4650 // cond: uint64(d) >= uint64(c)
4651 // result: (Leq64U (Sub64 <x.Type> x (Const64 <x.Type> [c])) (Const64 <x.Type> [d-c]))
4653 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
4654 if v_0.Op != OpLeq64U {
4658 v_0_0 := v_0.Args[0]
4659 if v_0_0.Op != OpConst64 {
4662 c := auxIntToInt64(v_0_0.AuxInt)
4663 if v_1.Op != OpLeq64U {
4667 if x != v_1.Args[0] {
4670 v_1_1 := v_1.Args[1]
4671 if v_1_1.Op != OpConst64 {
4674 d := auxIntToInt64(v_1_1.AuxInt)
4675 if !(uint64(d) >= uint64(c)) {
4679 v0 := b.NewValue0(v.Pos, OpSub64, x.Type)
4680 v1 := b.NewValue0(v.Pos, OpConst64, x.Type)
4681 v1.AuxInt = int64ToAuxInt(c)
4683 v2 := b.NewValue0(v.Pos, OpConst64, x.Type)
4684 v2.AuxInt = int64ToAuxInt(d - c)
4690 // match: (AndB (Leq32U (Const32 [c]) x) (Less32U x (Const32 [d])))
4691 // cond: uint32(d) >= uint32(c)
4692 // result: (Less32U (Sub32 <x.Type> x (Const32 <x.Type> [c])) (Const32 <x.Type> [d-c]))
4694 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
4695 if v_0.Op != OpLeq32U {
4699 v_0_0 := v_0.Args[0]
4700 if v_0_0.Op != OpConst32 {
4703 c := auxIntToInt32(v_0_0.AuxInt)
4704 if v_1.Op != OpLess32U {
4708 if x != v_1.Args[0] {
4711 v_1_1 := v_1.Args[1]
4712 if v_1_1.Op != OpConst32 {
4715 d := auxIntToInt32(v_1_1.AuxInt)
4716 if !(uint32(d) >= uint32(c)) {
4720 v0 := b.NewValue0(v.Pos, OpSub32, x.Type)
4721 v1 := b.NewValue0(v.Pos, OpConst32, x.Type)
4722 v1.AuxInt = int32ToAuxInt(c)
4724 v2 := b.NewValue0(v.Pos, OpConst32, x.Type)
4725 v2.AuxInt = int32ToAuxInt(d - c)
4731 // match: (AndB (Leq32U (Const32 [c]) x) (Leq32U x (Const32 [d])))
4732 // cond: uint32(d) >= uint32(c)
4733 // result: (Leq32U (Sub32 <x.Type> x (Const32 <x.Type> [c])) (Const32 <x.Type> [d-c]))
4735 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
4736 if v_0.Op != OpLeq32U {
4740 v_0_0 := v_0.Args[0]
4741 if v_0_0.Op != OpConst32 {
4744 c := auxIntToInt32(v_0_0.AuxInt)
4745 if v_1.Op != OpLeq32U {
4749 if x != v_1.Args[0] {
4752 v_1_1 := v_1.Args[1]
4753 if v_1_1.Op != OpConst32 {
4756 d := auxIntToInt32(v_1_1.AuxInt)
4757 if !(uint32(d) >= uint32(c)) {
4761 v0 := b.NewValue0(v.Pos, OpSub32, x.Type)
4762 v1 := b.NewValue0(v.Pos, OpConst32, x.Type)
4763 v1.AuxInt = int32ToAuxInt(c)
4765 v2 := b.NewValue0(v.Pos, OpConst32, x.Type)
4766 v2.AuxInt = int32ToAuxInt(d - c)
4772 // match: (AndB (Leq16U (Const16 [c]) x) (Less16U x (Const16 [d])))
4773 // cond: uint16(d) >= uint16(c)
4774 // result: (Less16U (Sub16 <x.Type> x (Const16 <x.Type> [c])) (Const16 <x.Type> [d-c]))
4776 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
4777 if v_0.Op != OpLeq16U {
4781 v_0_0 := v_0.Args[0]
4782 if v_0_0.Op != OpConst16 {
4785 c := auxIntToInt16(v_0_0.AuxInt)
4786 if v_1.Op != OpLess16U {
4790 if x != v_1.Args[0] {
4793 v_1_1 := v_1.Args[1]
4794 if v_1_1.Op != OpConst16 {
4797 d := auxIntToInt16(v_1_1.AuxInt)
4798 if !(uint16(d) >= uint16(c)) {
4802 v0 := b.NewValue0(v.Pos, OpSub16, x.Type)
4803 v1 := b.NewValue0(v.Pos, OpConst16, x.Type)
4804 v1.AuxInt = int16ToAuxInt(c)
4806 v2 := b.NewValue0(v.Pos, OpConst16, x.Type)
4807 v2.AuxInt = int16ToAuxInt(d - c)
4813 // match: (AndB (Leq16U (Const16 [c]) x) (Leq16U x (Const16 [d])))
4814 // cond: uint16(d) >= uint16(c)
4815 // result: (Leq16U (Sub16 <x.Type> x (Const16 <x.Type> [c])) (Const16 <x.Type> [d-c]))
4817 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
4818 if v_0.Op != OpLeq16U {
4822 v_0_0 := v_0.Args[0]
4823 if v_0_0.Op != OpConst16 {
4826 c := auxIntToInt16(v_0_0.AuxInt)
4827 if v_1.Op != OpLeq16U {
4831 if x != v_1.Args[0] {
4834 v_1_1 := v_1.Args[1]
4835 if v_1_1.Op != OpConst16 {
4838 d := auxIntToInt16(v_1_1.AuxInt)
4839 if !(uint16(d) >= uint16(c)) {
4843 v0 := b.NewValue0(v.Pos, OpSub16, x.Type)
4844 v1 := b.NewValue0(v.Pos, OpConst16, x.Type)
4845 v1.AuxInt = int16ToAuxInt(c)
4847 v2 := b.NewValue0(v.Pos, OpConst16, x.Type)
4848 v2.AuxInt = int16ToAuxInt(d - c)
4854 // match: (AndB (Leq8U (Const8 [c]) x) (Less8U x (Const8 [d])))
4855 // cond: uint8(d) >= uint8(c)
4856 // result: (Less8U (Sub8 <x.Type> x (Const8 <x.Type> [c])) (Const8 <x.Type> [d-c]))
4858 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
4859 if v_0.Op != OpLeq8U {
4863 v_0_0 := v_0.Args[0]
4864 if v_0_0.Op != OpConst8 {
4867 c := auxIntToInt8(v_0_0.AuxInt)
4868 if v_1.Op != OpLess8U {
4872 if x != v_1.Args[0] {
4875 v_1_1 := v_1.Args[1]
4876 if v_1_1.Op != OpConst8 {
4879 d := auxIntToInt8(v_1_1.AuxInt)
4880 if !(uint8(d) >= uint8(c)) {
4884 v0 := b.NewValue0(v.Pos, OpSub8, x.Type)
4885 v1 := b.NewValue0(v.Pos, OpConst8, x.Type)
4886 v1.AuxInt = int8ToAuxInt(c)
4888 v2 := b.NewValue0(v.Pos, OpConst8, x.Type)
4889 v2.AuxInt = int8ToAuxInt(d - c)
4895 // match: (AndB (Leq8U (Const8 [c]) x) (Leq8U x (Const8 [d])))
4896 // cond: uint8(d) >= uint8(c)
4897 // result: (Leq8U (Sub8 <x.Type> x (Const8 <x.Type> [c])) (Const8 <x.Type> [d-c]))
4899 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
4900 if v_0.Op != OpLeq8U {
4904 v_0_0 := v_0.Args[0]
4905 if v_0_0.Op != OpConst8 {
4908 c := auxIntToInt8(v_0_0.AuxInt)
4909 if v_1.Op != OpLeq8U {
4913 if x != v_1.Args[0] {
4916 v_1_1 := v_1.Args[1]
4917 if v_1_1.Op != OpConst8 {
4920 d := auxIntToInt8(v_1_1.AuxInt)
4921 if !(uint8(d) >= uint8(c)) {
4925 v0 := b.NewValue0(v.Pos, OpSub8, x.Type)
4926 v1 := b.NewValue0(v.Pos, OpConst8, x.Type)
4927 v1.AuxInt = int8ToAuxInt(c)
4929 v2 := b.NewValue0(v.Pos, OpConst8, x.Type)
4930 v2.AuxInt = int8ToAuxInt(d - c)
4936 // match: (AndB (Less64U (Const64 [c]) x) (Less64U x (Const64 [d])))
4937 // cond: uint64(d) >= uint64(c+1) && uint64(c+1) > uint64(c)
4938 // result: (Less64U (Sub64 <x.Type> x (Const64 <x.Type> [c+1])) (Const64 <x.Type> [d-c-1]))
4940 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
4941 if v_0.Op != OpLess64U {
4945 v_0_0 := v_0.Args[0]
4946 if v_0_0.Op != OpConst64 {
4949 c := auxIntToInt64(v_0_0.AuxInt)
4950 if v_1.Op != OpLess64U {
4954 if x != v_1.Args[0] {
4957 v_1_1 := v_1.Args[1]
4958 if v_1_1.Op != OpConst64 {
4961 d := auxIntToInt64(v_1_1.AuxInt)
4962 if !(uint64(d) >= uint64(c+1) && uint64(c+1) > uint64(c)) {
4966 v0 := b.NewValue0(v.Pos, OpSub64, x.Type)
4967 v1 := b.NewValue0(v.Pos, OpConst64, x.Type)
4968 v1.AuxInt = int64ToAuxInt(c + 1)
4970 v2 := b.NewValue0(v.Pos, OpConst64, x.Type)
4971 v2.AuxInt = int64ToAuxInt(d - c - 1)
4977 // match: (AndB (Less64U (Const64 [c]) x) (Leq64U x (Const64 [d])))
4978 // cond: uint64(d) >= uint64(c+1) && uint64(c+1) > uint64(c)
4979 // result: (Leq64U (Sub64 <x.Type> x (Const64 <x.Type> [c+1])) (Const64 <x.Type> [d-c-1]))
4981 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
4982 if v_0.Op != OpLess64U {
4986 v_0_0 := v_0.Args[0]
4987 if v_0_0.Op != OpConst64 {
4990 c := auxIntToInt64(v_0_0.AuxInt)
4991 if v_1.Op != OpLeq64U {
4995 if x != v_1.Args[0] {
4998 v_1_1 := v_1.Args[1]
4999 if v_1_1.Op != OpConst64 {
5002 d := auxIntToInt64(v_1_1.AuxInt)
5003 if !(uint64(d) >= uint64(c+1) && uint64(c+1) > uint64(c)) {
5007 v0 := b.NewValue0(v.Pos, OpSub64, x.Type)
5008 v1 := b.NewValue0(v.Pos, OpConst64, x.Type)
5009 v1.AuxInt = int64ToAuxInt(c + 1)
5011 v2 := b.NewValue0(v.Pos, OpConst64, x.Type)
5012 v2.AuxInt = int64ToAuxInt(d - c - 1)
5018 // match: (AndB (Less32U (Const32 [c]) x) (Less32U x (Const32 [d])))
5019 // cond: uint32(d) >= uint32(c+1) && uint32(c+1) > uint32(c)
5020 // result: (Less32U (Sub32 <x.Type> x (Const32 <x.Type> [c+1])) (Const32 <x.Type> [d-c-1]))
5022 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
5023 if v_0.Op != OpLess32U {
5027 v_0_0 := v_0.Args[0]
5028 if v_0_0.Op != OpConst32 {
5031 c := auxIntToInt32(v_0_0.AuxInt)
5032 if v_1.Op != OpLess32U {
5036 if x != v_1.Args[0] {
5039 v_1_1 := v_1.Args[1]
5040 if v_1_1.Op != OpConst32 {
5043 d := auxIntToInt32(v_1_1.AuxInt)
5044 if !(uint32(d) >= uint32(c+1) && uint32(c+1) > uint32(c)) {
5048 v0 := b.NewValue0(v.Pos, OpSub32, x.Type)
5049 v1 := b.NewValue0(v.Pos, OpConst32, x.Type)
5050 v1.AuxInt = int32ToAuxInt(c + 1)
5052 v2 := b.NewValue0(v.Pos, OpConst32, x.Type)
5053 v2.AuxInt = int32ToAuxInt(d - c - 1)
5059 // match: (AndB (Less32U (Const32 [c]) x) (Leq32U x (Const32 [d])))
5060 // cond: uint32(d) >= uint32(c+1) && uint32(c+1) > uint32(c)
5061 // result: (Leq32U (Sub32 <x.Type> x (Const32 <x.Type> [c+1])) (Const32 <x.Type> [d-c-1]))
5063 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
5064 if v_0.Op != OpLess32U {
5068 v_0_0 := v_0.Args[0]
5069 if v_0_0.Op != OpConst32 {
5072 c := auxIntToInt32(v_0_0.AuxInt)
5073 if v_1.Op != OpLeq32U {
5077 if x != v_1.Args[0] {
5080 v_1_1 := v_1.Args[1]
5081 if v_1_1.Op != OpConst32 {
5084 d := auxIntToInt32(v_1_1.AuxInt)
5085 if !(uint32(d) >= uint32(c+1) && uint32(c+1) > uint32(c)) {
5089 v0 := b.NewValue0(v.Pos, OpSub32, x.Type)
5090 v1 := b.NewValue0(v.Pos, OpConst32, x.Type)
5091 v1.AuxInt = int32ToAuxInt(c + 1)
5093 v2 := b.NewValue0(v.Pos, OpConst32, x.Type)
5094 v2.AuxInt = int32ToAuxInt(d - c - 1)
5100 // match: (AndB (Less16U (Const16 [c]) x) (Less16U x (Const16 [d])))
5101 // cond: uint16(d) >= uint16(c+1) && uint16(c+1) > uint16(c)
5102 // result: (Less16U (Sub16 <x.Type> x (Const16 <x.Type> [c+1])) (Const16 <x.Type> [d-c-1]))
5104 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
5105 if v_0.Op != OpLess16U {
5109 v_0_0 := v_0.Args[0]
5110 if v_0_0.Op != OpConst16 {
5113 c := auxIntToInt16(v_0_0.AuxInt)
5114 if v_1.Op != OpLess16U {
5118 if x != v_1.Args[0] {
5121 v_1_1 := v_1.Args[1]
5122 if v_1_1.Op != OpConst16 {
5125 d := auxIntToInt16(v_1_1.AuxInt)
5126 if !(uint16(d) >= uint16(c+1) && uint16(c+1) > uint16(c)) {
5130 v0 := b.NewValue0(v.Pos, OpSub16, x.Type)
5131 v1 := b.NewValue0(v.Pos, OpConst16, x.Type)
5132 v1.AuxInt = int16ToAuxInt(c + 1)
5134 v2 := b.NewValue0(v.Pos, OpConst16, x.Type)
5135 v2.AuxInt = int16ToAuxInt(d - c - 1)
5141 // match: (AndB (Less16U (Const16 [c]) x) (Leq16U x (Const16 [d])))
5142 // cond: uint16(d) >= uint16(c+1) && uint16(c+1) > uint16(c)
5143 // result: (Leq16U (Sub16 <x.Type> x (Const16 <x.Type> [c+1])) (Const16 <x.Type> [d-c-1]))
5145 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
5146 if v_0.Op != OpLess16U {
5150 v_0_0 := v_0.Args[0]
5151 if v_0_0.Op != OpConst16 {
5154 c := auxIntToInt16(v_0_0.AuxInt)
5155 if v_1.Op != OpLeq16U {
5159 if x != v_1.Args[0] {
5162 v_1_1 := v_1.Args[1]
5163 if v_1_1.Op != OpConst16 {
5166 d := auxIntToInt16(v_1_1.AuxInt)
5167 if !(uint16(d) >= uint16(c+1) && uint16(c+1) > uint16(c)) {
5171 v0 := b.NewValue0(v.Pos, OpSub16, x.Type)
5172 v1 := b.NewValue0(v.Pos, OpConst16, x.Type)
5173 v1.AuxInt = int16ToAuxInt(c + 1)
5175 v2 := b.NewValue0(v.Pos, OpConst16, x.Type)
5176 v2.AuxInt = int16ToAuxInt(d - c - 1)
5182 // match: (AndB (Less8U (Const8 [c]) x) (Less8U x (Const8 [d])))
5183 // cond: uint8(d) >= uint8(c+1) && uint8(c+1) > uint8(c)
5184 // result: (Less8U (Sub8 <x.Type> x (Const8 <x.Type> [c+1])) (Const8 <x.Type> [d-c-1]))
5186 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
5187 if v_0.Op != OpLess8U {
5191 v_0_0 := v_0.Args[0]
5192 if v_0_0.Op != OpConst8 {
5195 c := auxIntToInt8(v_0_0.AuxInt)
5196 if v_1.Op != OpLess8U {
5200 if x != v_1.Args[0] {
5203 v_1_1 := v_1.Args[1]
5204 if v_1_1.Op != OpConst8 {
5207 d := auxIntToInt8(v_1_1.AuxInt)
5208 if !(uint8(d) >= uint8(c+1) && uint8(c+1) > uint8(c)) {
5212 v0 := b.NewValue0(v.Pos, OpSub8, x.Type)
5213 v1 := b.NewValue0(v.Pos, OpConst8, x.Type)
5214 v1.AuxInt = int8ToAuxInt(c + 1)
5216 v2 := b.NewValue0(v.Pos, OpConst8, x.Type)
5217 v2.AuxInt = int8ToAuxInt(d - c - 1)
5223 // match: (AndB (Less8U (Const8 [c]) x) (Leq8U x (Const8 [d])))
5224 // cond: uint8(d) >= uint8(c+1) && uint8(c+1) > uint8(c)
5225 // result: (Leq8U (Sub8 <x.Type> x (Const8 <x.Type> [c+1])) (Const8 <x.Type> [d-c-1]))
5227 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
5228 if v_0.Op != OpLess8U {
5232 v_0_0 := v_0.Args[0]
5233 if v_0_0.Op != OpConst8 {
5236 c := auxIntToInt8(v_0_0.AuxInt)
5237 if v_1.Op != OpLeq8U {
5241 if x != v_1.Args[0] {
5244 v_1_1 := v_1.Args[1]
5245 if v_1_1.Op != OpConst8 {
5248 d := auxIntToInt8(v_1_1.AuxInt)
5249 if !(uint8(d) >= uint8(c+1) && uint8(c+1) > uint8(c)) {
5253 v0 := b.NewValue0(v.Pos, OpSub8, x.Type)
5254 v1 := b.NewValue0(v.Pos, OpConst8, x.Type)
5255 v1.AuxInt = int8ToAuxInt(c + 1)
5257 v2 := b.NewValue0(v.Pos, OpConst8, x.Type)
5258 v2.AuxInt = int8ToAuxInt(d - c - 1)
5266 func rewriteValuegeneric_OpArraySelect(v *Value) bool {
5268 // match: (ArraySelect (ArrayMake1 x))
5271 if v_0.Op != OpArrayMake1 {
5278 // match: (ArraySelect [0] (IData x))
5279 // result: (IData x)
5281 if auxIntToInt64(v.AuxInt) != 0 || v_0.Op != OpIData {
5291 func rewriteValuegeneric_OpCeil(v *Value) bool {
5293 // match: (Ceil (Const64F [c]))
5294 // result: (Const64F [math.Ceil(c)])
5296 if v_0.Op != OpConst64F {
5299 c := auxIntToFloat64(v_0.AuxInt)
5301 v.AuxInt = float64ToAuxInt(math.Ceil(c))
5306 func rewriteValuegeneric_OpCom16(v *Value) bool {
5308 // match: (Com16 (Com16 x))
5311 if v_0.Op != OpCom16 {
5318 // match: (Com16 (Const16 [c]))
5319 // result: (Const16 [^c])
5321 if v_0.Op != OpConst16 {
5324 c := auxIntToInt16(v_0.AuxInt)
5326 v.AuxInt = int16ToAuxInt(^c)
5329 // match: (Com16 (Add16 (Const16 [-1]) x))
5330 // result: (Neg16 x)
5332 if v_0.Op != OpAdd16 {
5336 v_0_0 := v_0.Args[0]
5337 v_0_1 := v_0.Args[1]
5338 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
5339 if v_0_0.Op != OpConst16 || auxIntToInt16(v_0_0.AuxInt) != -1 {
5351 func rewriteValuegeneric_OpCom32(v *Value) bool {
5353 // match: (Com32 (Com32 x))
5356 if v_0.Op != OpCom32 {
5363 // match: (Com32 (Const32 [c]))
5364 // result: (Const32 [^c])
5366 if v_0.Op != OpConst32 {
5369 c := auxIntToInt32(v_0.AuxInt)
5371 v.AuxInt = int32ToAuxInt(^c)
5374 // match: (Com32 (Add32 (Const32 [-1]) x))
5375 // result: (Neg32 x)
5377 if v_0.Op != OpAdd32 {
5381 v_0_0 := v_0.Args[0]
5382 v_0_1 := v_0.Args[1]
5383 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
5384 if v_0_0.Op != OpConst32 || auxIntToInt32(v_0_0.AuxInt) != -1 {
5396 func rewriteValuegeneric_OpCom64(v *Value) bool {
5398 // match: (Com64 (Com64 x))
5401 if v_0.Op != OpCom64 {
5408 // match: (Com64 (Const64 [c]))
5409 // result: (Const64 [^c])
5411 if v_0.Op != OpConst64 {
5414 c := auxIntToInt64(v_0.AuxInt)
5416 v.AuxInt = int64ToAuxInt(^c)
5419 // match: (Com64 (Add64 (Const64 [-1]) x))
5420 // result: (Neg64 x)
5422 if v_0.Op != OpAdd64 {
5426 v_0_0 := v_0.Args[0]
5427 v_0_1 := v_0.Args[1]
5428 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
5429 if v_0_0.Op != OpConst64 || auxIntToInt64(v_0_0.AuxInt) != -1 {
5441 func rewriteValuegeneric_OpCom8(v *Value) bool {
5443 // match: (Com8 (Com8 x))
5446 if v_0.Op != OpCom8 {
5453 // match: (Com8 (Const8 [c]))
5454 // result: (Const8 [^c])
5456 if v_0.Op != OpConst8 {
5459 c := auxIntToInt8(v_0.AuxInt)
5461 v.AuxInt = int8ToAuxInt(^c)
5464 // match: (Com8 (Add8 (Const8 [-1]) x))
5467 if v_0.Op != OpAdd8 {
5471 v_0_0 := v_0.Args[0]
5472 v_0_1 := v_0.Args[1]
5473 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
5474 if v_0_0.Op != OpConst8 || auxIntToInt8(v_0_0.AuxInt) != -1 {
5486 func rewriteValuegeneric_OpConstInterface(v *Value) bool {
5488 typ := &b.Func.Config.Types
5489 // match: (ConstInterface)
5490 // result: (IMake (ConstNil <typ.Uintptr>) (ConstNil <typ.BytePtr>))
5493 v0 := b.NewValue0(v.Pos, OpConstNil, typ.Uintptr)
5494 v1 := b.NewValue0(v.Pos, OpConstNil, typ.BytePtr)
5499 func rewriteValuegeneric_OpConstSlice(v *Value) bool {
5501 config := b.Func.Config
5502 typ := &b.Func.Config.Types
5503 // match: (ConstSlice)
5504 // cond: config.PtrSize == 4
5505 // result: (SliceMake (ConstNil <v.Type.Elem().PtrTo()>) (Const32 <typ.Int> [0]) (Const32 <typ.Int> [0]))
5507 if !(config.PtrSize == 4) {
5510 v.reset(OpSliceMake)
5511 v0 := b.NewValue0(v.Pos, OpConstNil, v.Type.Elem().PtrTo())
5512 v1 := b.NewValue0(v.Pos, OpConst32, typ.Int)
5513 v1.AuxInt = int32ToAuxInt(0)
5514 v.AddArg3(v0, v1, v1)
5517 // match: (ConstSlice)
5518 // cond: config.PtrSize == 8
5519 // result: (SliceMake (ConstNil <v.Type.Elem().PtrTo()>) (Const64 <typ.Int> [0]) (Const64 <typ.Int> [0]))
5521 if !(config.PtrSize == 8) {
5524 v.reset(OpSliceMake)
5525 v0 := b.NewValue0(v.Pos, OpConstNil, v.Type.Elem().PtrTo())
5526 v1 := b.NewValue0(v.Pos, OpConst64, typ.Int)
5527 v1.AuxInt = int64ToAuxInt(0)
5528 v.AddArg3(v0, v1, v1)
5533 func rewriteValuegeneric_OpConstString(v *Value) bool {
5535 config := b.Func.Config
5537 typ := &b.Func.Config.Types
5538 // match: (ConstString {str})
5539 // cond: config.PtrSize == 4 && str == ""
5540 // result: (StringMake (ConstNil) (Const32 <typ.Int> [0]))
5542 str := auxToString(v.Aux)
5543 if !(config.PtrSize == 4 && str == "") {
5546 v.reset(OpStringMake)
5547 v0 := b.NewValue0(v.Pos, OpConstNil, typ.BytePtr)
5548 v1 := b.NewValue0(v.Pos, OpConst32, typ.Int)
5549 v1.AuxInt = int32ToAuxInt(0)
5553 // match: (ConstString {str})
5554 // cond: config.PtrSize == 8 && str == ""
5555 // result: (StringMake (ConstNil) (Const64 <typ.Int> [0]))
5557 str := auxToString(v.Aux)
5558 if !(config.PtrSize == 8 && str == "") {
5561 v.reset(OpStringMake)
5562 v0 := b.NewValue0(v.Pos, OpConstNil, typ.BytePtr)
5563 v1 := b.NewValue0(v.Pos, OpConst64, typ.Int)
5564 v1.AuxInt = int64ToAuxInt(0)
5568 // match: (ConstString {str})
5569 // cond: config.PtrSize == 4 && str != ""
5570 // result: (StringMake (Addr <typ.BytePtr> {fe.StringData(str)} (SB)) (Const32 <typ.Int> [int32(len(str))]))
5572 str := auxToString(v.Aux)
5573 if !(config.PtrSize == 4 && str != "") {
5576 v.reset(OpStringMake)
5577 v0 := b.NewValue0(v.Pos, OpAddr, typ.BytePtr)
5578 v0.Aux = symToAux(fe.StringData(str))
5579 v1 := b.NewValue0(v.Pos, OpSB, typ.Uintptr)
5581 v2 := b.NewValue0(v.Pos, OpConst32, typ.Int)
5582 v2.AuxInt = int32ToAuxInt(int32(len(str)))
5586 // match: (ConstString {str})
5587 // cond: config.PtrSize == 8 && str != ""
5588 // result: (StringMake (Addr <typ.BytePtr> {fe.StringData(str)} (SB)) (Const64 <typ.Int> [int64(len(str))]))
5590 str := auxToString(v.Aux)
5591 if !(config.PtrSize == 8 && str != "") {
5594 v.reset(OpStringMake)
5595 v0 := b.NewValue0(v.Pos, OpAddr, typ.BytePtr)
5596 v0.Aux = symToAux(fe.StringData(str))
5597 v1 := b.NewValue0(v.Pos, OpSB, typ.Uintptr)
5599 v2 := b.NewValue0(v.Pos, OpConst64, typ.Int)
5600 v2.AuxInt = int64ToAuxInt(int64(len(str)))
5606 func rewriteValuegeneric_OpConvert(v *Value) bool {
5609 // match: (Convert (Add64 (Convert ptr mem) off) mem)
5610 // result: (AddPtr ptr off)
5612 if v_0.Op != OpAdd64 {
5616 v_0_0 := v_0.Args[0]
5617 v_0_1 := v_0.Args[1]
5618 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
5619 if v_0_0.Op != OpConvert {
5622 mem := v_0_0.Args[1]
5623 ptr := v_0_0.Args[0]
5634 // match: (Convert (Add32 (Convert ptr mem) off) mem)
5635 // result: (AddPtr ptr off)
5637 if v_0.Op != OpAdd32 {
5641 v_0_0 := v_0.Args[0]
5642 v_0_1 := v_0.Args[1]
5643 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
5644 if v_0_0.Op != OpConvert {
5647 mem := v_0_0.Args[1]
5648 ptr := v_0_0.Args[0]
5659 // match: (Convert (Convert ptr mem) mem)
5662 if v_0.Op != OpConvert {
5675 func rewriteValuegeneric_OpCtz16(v *Value) bool {
5678 config := b.Func.Config
5679 // match: (Ctz16 (Const16 [c]))
5680 // cond: config.PtrSize == 4
5681 // result: (Const32 [int32(ntz16(c))])
5683 if v_0.Op != OpConst16 {
5686 c := auxIntToInt16(v_0.AuxInt)
5687 if !(config.PtrSize == 4) {
5691 v.AuxInt = int32ToAuxInt(int32(ntz16(c)))
5694 // match: (Ctz16 (Const16 [c]))
5695 // cond: config.PtrSize == 8
5696 // result: (Const64 [int64(ntz16(c))])
5698 if v_0.Op != OpConst16 {
5701 c := auxIntToInt16(v_0.AuxInt)
5702 if !(config.PtrSize == 8) {
5706 v.AuxInt = int64ToAuxInt(int64(ntz16(c)))
5711 func rewriteValuegeneric_OpCtz32(v *Value) bool {
5714 config := b.Func.Config
5715 // match: (Ctz32 (Const32 [c]))
5716 // cond: config.PtrSize == 4
5717 // result: (Const32 [int32(ntz32(c))])
5719 if v_0.Op != OpConst32 {
5722 c := auxIntToInt32(v_0.AuxInt)
5723 if !(config.PtrSize == 4) {
5727 v.AuxInt = int32ToAuxInt(int32(ntz32(c)))
5730 // match: (Ctz32 (Const32 [c]))
5731 // cond: config.PtrSize == 8
5732 // result: (Const64 [int64(ntz32(c))])
5734 if v_0.Op != OpConst32 {
5737 c := auxIntToInt32(v_0.AuxInt)
5738 if !(config.PtrSize == 8) {
5742 v.AuxInt = int64ToAuxInt(int64(ntz32(c)))
5747 func rewriteValuegeneric_OpCtz64(v *Value) bool {
5750 config := b.Func.Config
5751 // match: (Ctz64 (Const64 [c]))
5752 // cond: config.PtrSize == 4
5753 // result: (Const32 [int32(ntz64(c))])
5755 if v_0.Op != OpConst64 {
5758 c := auxIntToInt64(v_0.AuxInt)
5759 if !(config.PtrSize == 4) {
5763 v.AuxInt = int32ToAuxInt(int32(ntz64(c)))
5766 // match: (Ctz64 (Const64 [c]))
5767 // cond: config.PtrSize == 8
5768 // result: (Const64 [int64(ntz64(c))])
5770 if v_0.Op != OpConst64 {
5773 c := auxIntToInt64(v_0.AuxInt)
5774 if !(config.PtrSize == 8) {
5778 v.AuxInt = int64ToAuxInt(int64(ntz64(c)))
5783 func rewriteValuegeneric_OpCtz8(v *Value) bool {
5786 config := b.Func.Config
5787 // match: (Ctz8 (Const8 [c]))
5788 // cond: config.PtrSize == 4
5789 // result: (Const32 [int32(ntz8(c))])
5791 if v_0.Op != OpConst8 {
5794 c := auxIntToInt8(v_0.AuxInt)
5795 if !(config.PtrSize == 4) {
5799 v.AuxInt = int32ToAuxInt(int32(ntz8(c)))
5802 // match: (Ctz8 (Const8 [c]))
5803 // cond: config.PtrSize == 8
5804 // result: (Const64 [int64(ntz8(c))])
5806 if v_0.Op != OpConst8 {
5809 c := auxIntToInt8(v_0.AuxInt)
5810 if !(config.PtrSize == 8) {
5814 v.AuxInt = int64ToAuxInt(int64(ntz8(c)))
5819 func rewriteValuegeneric_OpCvt32Fto32(v *Value) bool {
5821 // match: (Cvt32Fto32 (Const32F [c]))
5822 // result: (Const32 [int32(c)])
5824 if v_0.Op != OpConst32F {
5827 c := auxIntToFloat32(v_0.AuxInt)
5829 v.AuxInt = int32ToAuxInt(int32(c))
5834 func rewriteValuegeneric_OpCvt32Fto64(v *Value) bool {
5836 // match: (Cvt32Fto64 (Const32F [c]))
5837 // result: (Const64 [int64(c)])
5839 if v_0.Op != OpConst32F {
5842 c := auxIntToFloat32(v_0.AuxInt)
5844 v.AuxInt = int64ToAuxInt(int64(c))
5849 func rewriteValuegeneric_OpCvt32Fto64F(v *Value) bool {
5851 // match: (Cvt32Fto64F (Const32F [c]))
5852 // result: (Const64F [float64(c)])
5854 if v_0.Op != OpConst32F {
5857 c := auxIntToFloat32(v_0.AuxInt)
5859 v.AuxInt = float64ToAuxInt(float64(c))
5864 func rewriteValuegeneric_OpCvt32to32F(v *Value) bool {
5866 // match: (Cvt32to32F (Const32 [c]))
5867 // result: (Const32F [float32(c)])
5869 if v_0.Op != OpConst32 {
5872 c := auxIntToInt32(v_0.AuxInt)
5874 v.AuxInt = float32ToAuxInt(float32(c))
5879 func rewriteValuegeneric_OpCvt32to64F(v *Value) bool {
5881 // match: (Cvt32to64F (Const32 [c]))
5882 // result: (Const64F [float64(c)])
5884 if v_0.Op != OpConst32 {
5887 c := auxIntToInt32(v_0.AuxInt)
5889 v.AuxInt = float64ToAuxInt(float64(c))
5894 func rewriteValuegeneric_OpCvt64Fto32(v *Value) bool {
5896 // match: (Cvt64Fto32 (Const64F [c]))
5897 // result: (Const32 [int32(c)])
5899 if v_0.Op != OpConst64F {
5902 c := auxIntToFloat64(v_0.AuxInt)
5904 v.AuxInt = int32ToAuxInt(int32(c))
5909 func rewriteValuegeneric_OpCvt64Fto32F(v *Value) bool {
5911 // match: (Cvt64Fto32F (Const64F [c]))
5912 // result: (Const32F [float32(c)])
5914 if v_0.Op != OpConst64F {
5917 c := auxIntToFloat64(v_0.AuxInt)
5919 v.AuxInt = float32ToAuxInt(float32(c))
5922 // match: (Cvt64Fto32F sqrt0:(Sqrt (Cvt32Fto64F x)))
5923 // cond: sqrt0.Uses==1
5924 // result: (Sqrt32 x)
5927 if sqrt0.Op != OpSqrt {
5930 sqrt0_0 := sqrt0.Args[0]
5931 if sqrt0_0.Op != OpCvt32Fto64F {
5934 x := sqrt0_0.Args[0]
5935 if !(sqrt0.Uses == 1) {
5944 func rewriteValuegeneric_OpCvt64Fto64(v *Value) bool {
5946 // match: (Cvt64Fto64 (Const64F [c]))
5947 // result: (Const64 [int64(c)])
5949 if v_0.Op != OpConst64F {
5952 c := auxIntToFloat64(v_0.AuxInt)
5954 v.AuxInt = int64ToAuxInt(int64(c))
5959 func rewriteValuegeneric_OpCvt64to32F(v *Value) bool {
5961 // match: (Cvt64to32F (Const64 [c]))
5962 // result: (Const32F [float32(c)])
5964 if v_0.Op != OpConst64 {
5967 c := auxIntToInt64(v_0.AuxInt)
5969 v.AuxInt = float32ToAuxInt(float32(c))
5974 func rewriteValuegeneric_OpCvt64to64F(v *Value) bool {
5976 // match: (Cvt64to64F (Const64 [c]))
5977 // result: (Const64F [float64(c)])
5979 if v_0.Op != OpConst64 {
5982 c := auxIntToInt64(v_0.AuxInt)
5984 v.AuxInt = float64ToAuxInt(float64(c))
5989 func rewriteValuegeneric_OpCvtBoolToUint8(v *Value) bool {
5991 // match: (CvtBoolToUint8 (ConstBool [false]))
5992 // result: (Const8 [0])
5994 if v_0.Op != OpConstBool || auxIntToBool(v_0.AuxInt) != false {
5998 v.AuxInt = int8ToAuxInt(0)
6001 // match: (CvtBoolToUint8 (ConstBool [true]))
6002 // result: (Const8 [1])
6004 if v_0.Op != OpConstBool || auxIntToBool(v_0.AuxInt) != true {
6008 v.AuxInt = int8ToAuxInt(1)
6013 func rewriteValuegeneric_OpDiv16(v *Value) bool {
6017 typ := &b.Func.Config.Types
6018 // match: (Div16 (Const16 [c]) (Const16 [d]))
6020 // result: (Const16 [c/d])
6022 if v_0.Op != OpConst16 {
6025 c := auxIntToInt16(v_0.AuxInt)
6026 if v_1.Op != OpConst16 {
6029 d := auxIntToInt16(v_1.AuxInt)
6034 v.AuxInt = int16ToAuxInt(c / d)
6037 // match: (Div16 n (Const16 [c]))
6038 // cond: isNonNegative(n) && isPowerOfTwo16(c)
6039 // result: (Rsh16Ux64 n (Const64 <typ.UInt64> [log16(c)]))
6042 if v_1.Op != OpConst16 {
6045 c := auxIntToInt16(v_1.AuxInt)
6046 if !(isNonNegative(n) && isPowerOfTwo16(c)) {
6049 v.reset(OpRsh16Ux64)
6050 v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6051 v0.AuxInt = int64ToAuxInt(log16(c))
6055 // match: (Div16 <t> n (Const16 [c]))
6056 // cond: c < 0 && c != -1<<15
6057 // result: (Neg16 (Div16 <t> n (Const16 <t> [-c])))
6061 if v_1.Op != OpConst16 {
6064 c := auxIntToInt16(v_1.AuxInt)
6065 if !(c < 0 && c != -1<<15) {
6069 v0 := b.NewValue0(v.Pos, OpDiv16, t)
6070 v1 := b.NewValue0(v.Pos, OpConst16, t)
6071 v1.AuxInt = int16ToAuxInt(-c)
6076 // match: (Div16 <t> x (Const16 [-1<<15]))
6077 // result: (Rsh16Ux64 (And16 <t> x (Neg16 <t> x)) (Const64 <typ.UInt64> [15]))
6081 if v_1.Op != OpConst16 || auxIntToInt16(v_1.AuxInt) != -1<<15 {
6084 v.reset(OpRsh16Ux64)
6085 v0 := b.NewValue0(v.Pos, OpAnd16, t)
6086 v1 := b.NewValue0(v.Pos, OpNeg16, t)
6089 v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6090 v2.AuxInt = int64ToAuxInt(15)
6094 // match: (Div16 <t> n (Const16 [c]))
6095 // cond: isPowerOfTwo16(c)
6096 // result: (Rsh16x64 (Add16 <t> n (Rsh16Ux64 <t> (Rsh16x64 <t> n (Const64 <typ.UInt64> [15])) (Const64 <typ.UInt64> [int64(16-log16(c))]))) (Const64 <typ.UInt64> [int64(log16(c))]))
6100 if v_1.Op != OpConst16 {
6103 c := auxIntToInt16(v_1.AuxInt)
6104 if !(isPowerOfTwo16(c)) {
6108 v0 := b.NewValue0(v.Pos, OpAdd16, t)
6109 v1 := b.NewValue0(v.Pos, OpRsh16Ux64, t)
6110 v2 := b.NewValue0(v.Pos, OpRsh16x64, t)
6111 v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6112 v3.AuxInt = int64ToAuxInt(15)
6114 v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6115 v4.AuxInt = int64ToAuxInt(int64(16 - log16(c)))
6118 v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6119 v5.AuxInt = int64ToAuxInt(int64(log16(c)))
6123 // match: (Div16 <t> x (Const16 [c]))
6124 // cond: smagicOK16(c)
6125 // result: (Sub16 <t> (Rsh32x64 <t> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int32(smagic16(c).m)]) (SignExt16to32 x)) (Const64 <typ.UInt64> [16+smagic16(c).s])) (Rsh32x64 <t> (SignExt16to32 x) (Const64 <typ.UInt64> [31])))
6129 if v_1.Op != OpConst16 {
6132 c := auxIntToInt16(v_1.AuxInt)
6133 if !(smagicOK16(c)) {
6138 v0 := b.NewValue0(v.Pos, OpRsh32x64, t)
6139 v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
6140 v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
6141 v2.AuxInt = int32ToAuxInt(int32(smagic16(c).m))
6142 v3 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
6145 v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6146 v4.AuxInt = int64ToAuxInt(16 + smagic16(c).s)
6148 v5 := b.NewValue0(v.Pos, OpRsh32x64, t)
6149 v6 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6150 v6.AuxInt = int64ToAuxInt(31)
6157 func rewriteValuegeneric_OpDiv16u(v *Value) bool {
6161 config := b.Func.Config
6162 typ := &b.Func.Config.Types
6163 // match: (Div16u (Const16 [c]) (Const16 [d]))
6165 // result: (Const16 [int16(uint16(c)/uint16(d))])
6167 if v_0.Op != OpConst16 {
6170 c := auxIntToInt16(v_0.AuxInt)
6171 if v_1.Op != OpConst16 {
6174 d := auxIntToInt16(v_1.AuxInt)
6179 v.AuxInt = int16ToAuxInt(int16(uint16(c) / uint16(d)))
6182 // match: (Div16u n (Const16 [c]))
6183 // cond: isPowerOfTwo16(c)
6184 // result: (Rsh16Ux64 n (Const64 <typ.UInt64> [log16(c)]))
6187 if v_1.Op != OpConst16 {
6190 c := auxIntToInt16(v_1.AuxInt)
6191 if !(isPowerOfTwo16(c)) {
6194 v.reset(OpRsh16Ux64)
6195 v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6196 v0.AuxInt = int64ToAuxInt(log16(c))
6200 // match: (Div16u x (Const16 [c]))
6201 // cond: umagicOK16(c) && config.RegSize == 8
6202 // result: (Trunc64to16 (Rsh64Ux64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(1<<16+umagic16(c).m)]) (ZeroExt16to64 x)) (Const64 <typ.UInt64> [16+umagic16(c).s])))
6205 if v_1.Op != OpConst16 {
6208 c := auxIntToInt16(v_1.AuxInt)
6209 if !(umagicOK16(c) && config.RegSize == 8) {
6212 v.reset(OpTrunc64to16)
6213 v0 := b.NewValue0(v.Pos, OpRsh64Ux64, typ.UInt64)
6214 v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
6215 v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6216 v2.AuxInt = int64ToAuxInt(int64(1<<16 + umagic16(c).m))
6217 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
6220 v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6221 v4.AuxInt = int64ToAuxInt(16 + umagic16(c).s)
6226 // match: (Div16u x (Const16 [c]))
6227 // cond: umagicOK16(c) && config.RegSize == 4 && umagic16(c).m&1 == 0
6228 // result: (Trunc32to16 (Rsh32Ux64 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int32(1<<15+umagic16(c).m/2)]) (ZeroExt16to32 x)) (Const64 <typ.UInt64> [16+umagic16(c).s-1])))
6231 if v_1.Op != OpConst16 {
6234 c := auxIntToInt16(v_1.AuxInt)
6235 if !(umagicOK16(c) && config.RegSize == 4 && umagic16(c).m&1 == 0) {
6238 v.reset(OpTrunc32to16)
6239 v0 := b.NewValue0(v.Pos, OpRsh32Ux64, typ.UInt32)
6240 v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
6241 v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
6242 v2.AuxInt = int32ToAuxInt(int32(1<<15 + umagic16(c).m/2))
6243 v3 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
6246 v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6247 v4.AuxInt = int64ToAuxInt(16 + umagic16(c).s - 1)
6252 // match: (Div16u x (Const16 [c]))
6253 // cond: umagicOK16(c) && config.RegSize == 4 && c&1 == 0
6254 // result: (Trunc32to16 (Rsh32Ux64 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int32(1<<15+(umagic16(c).m+1)/2)]) (Rsh32Ux64 <typ.UInt32> (ZeroExt16to32 x) (Const64 <typ.UInt64> [1]))) (Const64 <typ.UInt64> [16+umagic16(c).s-2])))
6257 if v_1.Op != OpConst16 {
6260 c := auxIntToInt16(v_1.AuxInt)
6261 if !(umagicOK16(c) && config.RegSize == 4 && c&1 == 0) {
6264 v.reset(OpTrunc32to16)
6265 v0 := b.NewValue0(v.Pos, OpRsh32Ux64, typ.UInt32)
6266 v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
6267 v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
6268 v2.AuxInt = int32ToAuxInt(int32(1<<15 + (umagic16(c).m+1)/2))
6269 v3 := b.NewValue0(v.Pos, OpRsh32Ux64, typ.UInt32)
6270 v4 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
6272 v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6273 v5.AuxInt = int64ToAuxInt(1)
6276 v6 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6277 v6.AuxInt = int64ToAuxInt(16 + umagic16(c).s - 2)
6282 // match: (Div16u x (Const16 [c]))
6283 // cond: umagicOK16(c) && config.RegSize == 4 && config.useAvg
6284 // result: (Trunc32to16 (Rsh32Ux64 <typ.UInt32> (Avg32u (Lsh32x64 <typ.UInt32> (ZeroExt16to32 x) (Const64 <typ.UInt64> [16])) (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int32(umagic16(c).m)]) (ZeroExt16to32 x))) (Const64 <typ.UInt64> [16+umagic16(c).s-1])))
6287 if v_1.Op != OpConst16 {
6290 c := auxIntToInt16(v_1.AuxInt)
6291 if !(umagicOK16(c) && config.RegSize == 4 && config.useAvg) {
6294 v.reset(OpTrunc32to16)
6295 v0 := b.NewValue0(v.Pos, OpRsh32Ux64, typ.UInt32)
6296 v1 := b.NewValue0(v.Pos, OpAvg32u, typ.UInt32)
6297 v2 := b.NewValue0(v.Pos, OpLsh32x64, typ.UInt32)
6298 v3 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
6300 v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6301 v4.AuxInt = int64ToAuxInt(16)
6303 v5 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
6304 v6 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
6305 v6.AuxInt = int32ToAuxInt(int32(umagic16(c).m))
6308 v7 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6309 v7.AuxInt = int64ToAuxInt(16 + umagic16(c).s - 1)
6316 func rewriteValuegeneric_OpDiv32(v *Value) bool {
6320 config := b.Func.Config
6321 typ := &b.Func.Config.Types
6322 // match: (Div32 (Const32 [c]) (Const32 [d]))
6324 // result: (Const32 [c/d])
6326 if v_0.Op != OpConst32 {
6329 c := auxIntToInt32(v_0.AuxInt)
6330 if v_1.Op != OpConst32 {
6333 d := auxIntToInt32(v_1.AuxInt)
6338 v.AuxInt = int32ToAuxInt(c / d)
6341 // match: (Div32 n (Const32 [c]))
6342 // cond: isNonNegative(n) && isPowerOfTwo32(c)
6343 // result: (Rsh32Ux64 n (Const64 <typ.UInt64> [log32(c)]))
6346 if v_1.Op != OpConst32 {
6349 c := auxIntToInt32(v_1.AuxInt)
6350 if !(isNonNegative(n) && isPowerOfTwo32(c)) {
6353 v.reset(OpRsh32Ux64)
6354 v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6355 v0.AuxInt = int64ToAuxInt(log32(c))
6359 // match: (Div32 <t> n (Const32 [c]))
6360 // cond: c < 0 && c != -1<<31
6361 // result: (Neg32 (Div32 <t> n (Const32 <t> [-c])))
6365 if v_1.Op != OpConst32 {
6368 c := auxIntToInt32(v_1.AuxInt)
6369 if !(c < 0 && c != -1<<31) {
6373 v0 := b.NewValue0(v.Pos, OpDiv32, t)
6374 v1 := b.NewValue0(v.Pos, OpConst32, t)
6375 v1.AuxInt = int32ToAuxInt(-c)
6380 // match: (Div32 <t> x (Const32 [-1<<31]))
6381 // result: (Rsh32Ux64 (And32 <t> x (Neg32 <t> x)) (Const64 <typ.UInt64> [31]))
6385 if v_1.Op != OpConst32 || auxIntToInt32(v_1.AuxInt) != -1<<31 {
6388 v.reset(OpRsh32Ux64)
6389 v0 := b.NewValue0(v.Pos, OpAnd32, t)
6390 v1 := b.NewValue0(v.Pos, OpNeg32, t)
6393 v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6394 v2.AuxInt = int64ToAuxInt(31)
6398 // match: (Div32 <t> n (Const32 [c]))
6399 // cond: isPowerOfTwo32(c)
6400 // result: (Rsh32x64 (Add32 <t> n (Rsh32Ux64 <t> (Rsh32x64 <t> n (Const64 <typ.UInt64> [31])) (Const64 <typ.UInt64> [int64(32-log32(c))]))) (Const64 <typ.UInt64> [int64(log32(c))]))
6404 if v_1.Op != OpConst32 {
6407 c := auxIntToInt32(v_1.AuxInt)
6408 if !(isPowerOfTwo32(c)) {
6412 v0 := b.NewValue0(v.Pos, OpAdd32, t)
6413 v1 := b.NewValue0(v.Pos, OpRsh32Ux64, t)
6414 v2 := b.NewValue0(v.Pos, OpRsh32x64, t)
6415 v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6416 v3.AuxInt = int64ToAuxInt(31)
6418 v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6419 v4.AuxInt = int64ToAuxInt(int64(32 - log32(c)))
6422 v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6423 v5.AuxInt = int64ToAuxInt(int64(log32(c)))
6427 // match: (Div32 <t> x (Const32 [c]))
6428 // cond: smagicOK32(c) && config.RegSize == 8
6429 // result: (Sub32 <t> (Rsh64x64 <t> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(smagic32(c).m)]) (SignExt32to64 x)) (Const64 <typ.UInt64> [32+smagic32(c).s])) (Rsh64x64 <t> (SignExt32to64 x) (Const64 <typ.UInt64> [63])))
6433 if v_1.Op != OpConst32 {
6436 c := auxIntToInt32(v_1.AuxInt)
6437 if !(smagicOK32(c) && config.RegSize == 8) {
6442 v0 := b.NewValue0(v.Pos, OpRsh64x64, t)
6443 v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
6444 v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6445 v2.AuxInt = int64ToAuxInt(int64(smagic32(c).m))
6446 v3 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
6449 v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6450 v4.AuxInt = int64ToAuxInt(32 + smagic32(c).s)
6452 v5 := b.NewValue0(v.Pos, OpRsh64x64, t)
6453 v6 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6454 v6.AuxInt = int64ToAuxInt(63)
6459 // match: (Div32 <t> x (Const32 [c]))
6460 // cond: smagicOK32(c) && config.RegSize == 4 && smagic32(c).m&1 == 0 && config.useHmul
6461 // result: (Sub32 <t> (Rsh32x64 <t> (Hmul32 <t> (Const32 <typ.UInt32> [int32(smagic32(c).m/2)]) x) (Const64 <typ.UInt64> [smagic32(c).s-1])) (Rsh32x64 <t> x (Const64 <typ.UInt64> [31])))
6465 if v_1.Op != OpConst32 {
6468 c := auxIntToInt32(v_1.AuxInt)
6469 if !(smagicOK32(c) && config.RegSize == 4 && smagic32(c).m&1 == 0 && config.useHmul) {
6474 v0 := b.NewValue0(v.Pos, OpRsh32x64, t)
6475 v1 := b.NewValue0(v.Pos, OpHmul32, t)
6476 v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
6477 v2.AuxInt = int32ToAuxInt(int32(smagic32(c).m / 2))
6479 v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6480 v3.AuxInt = int64ToAuxInt(smagic32(c).s - 1)
6482 v4 := b.NewValue0(v.Pos, OpRsh32x64, t)
6483 v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6484 v5.AuxInt = int64ToAuxInt(31)
6489 // match: (Div32 <t> x (Const32 [c]))
6490 // cond: smagicOK32(c) && config.RegSize == 4 && smagic32(c).m&1 != 0 && config.useHmul
6491 // result: (Sub32 <t> (Rsh32x64 <t> (Add32 <t> (Hmul32 <t> (Const32 <typ.UInt32> [int32(smagic32(c).m)]) x) x) (Const64 <typ.UInt64> [smagic32(c).s])) (Rsh32x64 <t> x (Const64 <typ.UInt64> [31])))
6495 if v_1.Op != OpConst32 {
6498 c := auxIntToInt32(v_1.AuxInt)
6499 if !(smagicOK32(c) && config.RegSize == 4 && smagic32(c).m&1 != 0 && config.useHmul) {
6504 v0 := b.NewValue0(v.Pos, OpRsh32x64, t)
6505 v1 := b.NewValue0(v.Pos, OpAdd32, t)
6506 v2 := b.NewValue0(v.Pos, OpHmul32, t)
6507 v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
6508 v3.AuxInt = int32ToAuxInt(int32(smagic32(c).m))
6511 v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6512 v4.AuxInt = int64ToAuxInt(smagic32(c).s)
6514 v5 := b.NewValue0(v.Pos, OpRsh32x64, t)
6515 v6 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6516 v6.AuxInt = int64ToAuxInt(31)
6523 func rewriteValuegeneric_OpDiv32F(v *Value) bool {
6527 // match: (Div32F (Const32F [c]) (Const32F [d]))
6529 // result: (Const32F [c/d])
6531 if v_0.Op != OpConst32F {
6534 c := auxIntToFloat32(v_0.AuxInt)
6535 if v_1.Op != OpConst32F {
6538 d := auxIntToFloat32(v_1.AuxInt)
6543 v.AuxInt = float32ToAuxInt(c / d)
6546 // match: (Div32F x (Const32F <t> [c]))
6547 // cond: reciprocalExact32(c)
6548 // result: (Mul32F x (Const32F <t> [1/c]))
6551 if v_1.Op != OpConst32F {
6555 c := auxIntToFloat32(v_1.AuxInt)
6556 if !(reciprocalExact32(c)) {
6560 v0 := b.NewValue0(v.Pos, OpConst32F, t)
6561 v0.AuxInt = float32ToAuxInt(1 / c)
6567 func rewriteValuegeneric_OpDiv32u(v *Value) bool {
6571 config := b.Func.Config
6572 typ := &b.Func.Config.Types
6573 // match: (Div32u (Const32 [c]) (Const32 [d]))
6575 // result: (Const32 [int32(uint32(c)/uint32(d))])
6577 if v_0.Op != OpConst32 {
6580 c := auxIntToInt32(v_0.AuxInt)
6581 if v_1.Op != OpConst32 {
6584 d := auxIntToInt32(v_1.AuxInt)
6589 v.AuxInt = int32ToAuxInt(int32(uint32(c) / uint32(d)))
6592 // match: (Div32u n (Const32 [c]))
6593 // cond: isPowerOfTwo32(c)
6594 // result: (Rsh32Ux64 n (Const64 <typ.UInt64> [log32(c)]))
6597 if v_1.Op != OpConst32 {
6600 c := auxIntToInt32(v_1.AuxInt)
6601 if !(isPowerOfTwo32(c)) {
6604 v.reset(OpRsh32Ux64)
6605 v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6606 v0.AuxInt = int64ToAuxInt(log32(c))
6610 // match: (Div32u x (Const32 [c]))
6611 // cond: umagicOK32(c) && config.RegSize == 4 && umagic32(c).m&1 == 0 && config.useHmul
6612 // result: (Rsh32Ux64 <typ.UInt32> (Hmul32u <typ.UInt32> (Const32 <typ.UInt32> [int32(1<<31+umagic32(c).m/2)]) x) (Const64 <typ.UInt64> [umagic32(c).s-1]))
6615 if v_1.Op != OpConst32 {
6618 c := auxIntToInt32(v_1.AuxInt)
6619 if !(umagicOK32(c) && config.RegSize == 4 && umagic32(c).m&1 == 0 && config.useHmul) {
6622 v.reset(OpRsh32Ux64)
6624 v0 := b.NewValue0(v.Pos, OpHmul32u, typ.UInt32)
6625 v1 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
6626 v1.AuxInt = int32ToAuxInt(int32(1<<31 + umagic32(c).m/2))
6628 v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6629 v2.AuxInt = int64ToAuxInt(umagic32(c).s - 1)
6633 // match: (Div32u x (Const32 [c]))
6634 // cond: umagicOK32(c) && config.RegSize == 4 && c&1 == 0 && config.useHmul
6635 // result: (Rsh32Ux64 <typ.UInt32> (Hmul32u <typ.UInt32> (Const32 <typ.UInt32> [int32(1<<31+(umagic32(c).m+1)/2)]) (Rsh32Ux64 <typ.UInt32> x (Const64 <typ.UInt64> [1]))) (Const64 <typ.UInt64> [umagic32(c).s-2]))
6638 if v_1.Op != OpConst32 {
6641 c := auxIntToInt32(v_1.AuxInt)
6642 if !(umagicOK32(c) && config.RegSize == 4 && c&1 == 0 && config.useHmul) {
6645 v.reset(OpRsh32Ux64)
6647 v0 := b.NewValue0(v.Pos, OpHmul32u, typ.UInt32)
6648 v1 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
6649 v1.AuxInt = int32ToAuxInt(int32(1<<31 + (umagic32(c).m+1)/2))
6650 v2 := b.NewValue0(v.Pos, OpRsh32Ux64, typ.UInt32)
6651 v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6652 v3.AuxInt = int64ToAuxInt(1)
6655 v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6656 v4.AuxInt = int64ToAuxInt(umagic32(c).s - 2)
6660 // match: (Div32u x (Const32 [c]))
6661 // cond: umagicOK32(c) && config.RegSize == 4 && config.useAvg && config.useHmul
6662 // result: (Rsh32Ux64 <typ.UInt32> (Avg32u x (Hmul32u <typ.UInt32> (Const32 <typ.UInt32> [int32(umagic32(c).m)]) x)) (Const64 <typ.UInt64> [umagic32(c).s-1]))
6665 if v_1.Op != OpConst32 {
6668 c := auxIntToInt32(v_1.AuxInt)
6669 if !(umagicOK32(c) && config.RegSize == 4 && config.useAvg && config.useHmul) {
6672 v.reset(OpRsh32Ux64)
6674 v0 := b.NewValue0(v.Pos, OpAvg32u, typ.UInt32)
6675 v1 := b.NewValue0(v.Pos, OpHmul32u, typ.UInt32)
6676 v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
6677 v2.AuxInt = int32ToAuxInt(int32(umagic32(c).m))
6680 v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6681 v3.AuxInt = int64ToAuxInt(umagic32(c).s - 1)
6685 // match: (Div32u x (Const32 [c]))
6686 // cond: umagicOK32(c) && config.RegSize == 8 && umagic32(c).m&1 == 0
6687 // result: (Trunc64to32 (Rsh64Ux64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(1<<31+umagic32(c).m/2)]) (ZeroExt32to64 x)) (Const64 <typ.UInt64> [32+umagic32(c).s-1])))
6690 if v_1.Op != OpConst32 {
6693 c := auxIntToInt32(v_1.AuxInt)
6694 if !(umagicOK32(c) && config.RegSize == 8 && umagic32(c).m&1 == 0) {
6697 v.reset(OpTrunc64to32)
6698 v0 := b.NewValue0(v.Pos, OpRsh64Ux64, typ.UInt64)
6699 v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
6700 v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6701 v2.AuxInt = int64ToAuxInt(int64(1<<31 + umagic32(c).m/2))
6702 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
6705 v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6706 v4.AuxInt = int64ToAuxInt(32 + umagic32(c).s - 1)
6711 // match: (Div32u x (Const32 [c]))
6712 // cond: umagicOK32(c) && config.RegSize == 8 && c&1 == 0
6713 // result: (Trunc64to32 (Rsh64Ux64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(1<<31+(umagic32(c).m+1)/2)]) (Rsh64Ux64 <typ.UInt64> (ZeroExt32to64 x) (Const64 <typ.UInt64> [1]))) (Const64 <typ.UInt64> [32+umagic32(c).s-2])))
6716 if v_1.Op != OpConst32 {
6719 c := auxIntToInt32(v_1.AuxInt)
6720 if !(umagicOK32(c) && config.RegSize == 8 && c&1 == 0) {
6723 v.reset(OpTrunc64to32)
6724 v0 := b.NewValue0(v.Pos, OpRsh64Ux64, typ.UInt64)
6725 v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
6726 v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6727 v2.AuxInt = int64ToAuxInt(int64(1<<31 + (umagic32(c).m+1)/2))
6728 v3 := b.NewValue0(v.Pos, OpRsh64Ux64, typ.UInt64)
6729 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
6731 v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6732 v5.AuxInt = int64ToAuxInt(1)
6735 v6 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6736 v6.AuxInt = int64ToAuxInt(32 + umagic32(c).s - 2)
6741 // match: (Div32u x (Const32 [c]))
6742 // cond: umagicOK32(c) && config.RegSize == 8 && config.useAvg
6743 // result: (Trunc64to32 (Rsh64Ux64 <typ.UInt64> (Avg64u (Lsh64x64 <typ.UInt64> (ZeroExt32to64 x) (Const64 <typ.UInt64> [32])) (Mul64 <typ.UInt64> (Const64 <typ.UInt32> [int64(umagic32(c).m)]) (ZeroExt32to64 x))) (Const64 <typ.UInt64> [32+umagic32(c).s-1])))
6746 if v_1.Op != OpConst32 {
6749 c := auxIntToInt32(v_1.AuxInt)
6750 if !(umagicOK32(c) && config.RegSize == 8 && config.useAvg) {
6753 v.reset(OpTrunc64to32)
6754 v0 := b.NewValue0(v.Pos, OpRsh64Ux64, typ.UInt64)
6755 v1 := b.NewValue0(v.Pos, OpAvg64u, typ.UInt64)
6756 v2 := b.NewValue0(v.Pos, OpLsh64x64, typ.UInt64)
6757 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
6759 v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6760 v4.AuxInt = int64ToAuxInt(32)
6762 v5 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
6763 v6 := b.NewValue0(v.Pos, OpConst64, typ.UInt32)
6764 v6.AuxInt = int64ToAuxInt(int64(umagic32(c).m))
6767 v7 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6768 v7.AuxInt = int64ToAuxInt(32 + umagic32(c).s - 1)
6775 func rewriteValuegeneric_OpDiv64(v *Value) bool {
6779 config := b.Func.Config
6780 typ := &b.Func.Config.Types
6781 // match: (Div64 (Const64 [c]) (Const64 [d]))
6783 // result: (Const64 [c/d])
6785 if v_0.Op != OpConst64 {
6788 c := auxIntToInt64(v_0.AuxInt)
6789 if v_1.Op != OpConst64 {
6792 d := auxIntToInt64(v_1.AuxInt)
6797 v.AuxInt = int64ToAuxInt(c / d)
6800 // match: (Div64 n (Const64 [c]))
6801 // cond: isNonNegative(n) && isPowerOfTwo64(c)
6802 // result: (Rsh64Ux64 n (Const64 <typ.UInt64> [log64(c)]))
6805 if v_1.Op != OpConst64 {
6808 c := auxIntToInt64(v_1.AuxInt)
6809 if !(isNonNegative(n) && isPowerOfTwo64(c)) {
6812 v.reset(OpRsh64Ux64)
6813 v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6814 v0.AuxInt = int64ToAuxInt(log64(c))
6818 // match: (Div64 n (Const64 [-1<<63]))
6819 // cond: isNonNegative(n)
6820 // result: (Const64 [0])
6823 if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != -1<<63 || !(isNonNegative(n)) {
6827 v.AuxInt = int64ToAuxInt(0)
6830 // match: (Div64 <t> n (Const64 [c]))
6831 // cond: c < 0 && c != -1<<63
6832 // result: (Neg64 (Div64 <t> n (Const64 <t> [-c])))
6836 if v_1.Op != OpConst64 {
6839 c := auxIntToInt64(v_1.AuxInt)
6840 if !(c < 0 && c != -1<<63) {
6844 v0 := b.NewValue0(v.Pos, OpDiv64, t)
6845 v1 := b.NewValue0(v.Pos, OpConst64, t)
6846 v1.AuxInt = int64ToAuxInt(-c)
6851 // match: (Div64 <t> x (Const64 [-1<<63]))
6852 // result: (Rsh64Ux64 (And64 <t> x (Neg64 <t> x)) (Const64 <typ.UInt64> [63]))
6856 if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != -1<<63 {
6859 v.reset(OpRsh64Ux64)
6860 v0 := b.NewValue0(v.Pos, OpAnd64, t)
6861 v1 := b.NewValue0(v.Pos, OpNeg64, t)
6864 v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6865 v2.AuxInt = int64ToAuxInt(63)
6869 // match: (Div64 <t> n (Const64 [c]))
6870 // cond: isPowerOfTwo64(c)
6871 // result: (Rsh64x64 (Add64 <t> n (Rsh64Ux64 <t> (Rsh64x64 <t> n (Const64 <typ.UInt64> [63])) (Const64 <typ.UInt64> [int64(64-log64(c))]))) (Const64 <typ.UInt64> [int64(log64(c))]))
6875 if v_1.Op != OpConst64 {
6878 c := auxIntToInt64(v_1.AuxInt)
6879 if !(isPowerOfTwo64(c)) {
6883 v0 := b.NewValue0(v.Pos, OpAdd64, t)
6884 v1 := b.NewValue0(v.Pos, OpRsh64Ux64, t)
6885 v2 := b.NewValue0(v.Pos, OpRsh64x64, t)
6886 v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6887 v3.AuxInt = int64ToAuxInt(63)
6889 v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6890 v4.AuxInt = int64ToAuxInt(int64(64 - log64(c)))
6893 v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6894 v5.AuxInt = int64ToAuxInt(int64(log64(c)))
6898 // match: (Div64 <t> x (Const64 [c]))
6899 // cond: smagicOK64(c) && smagic64(c).m&1 == 0 && config.useHmul
6900 // result: (Sub64 <t> (Rsh64x64 <t> (Hmul64 <t> (Const64 <typ.UInt64> [int64(smagic64(c).m/2)]) x) (Const64 <typ.UInt64> [smagic64(c).s-1])) (Rsh64x64 <t> x (Const64 <typ.UInt64> [63])))
6904 if v_1.Op != OpConst64 {
6907 c := auxIntToInt64(v_1.AuxInt)
6908 if !(smagicOK64(c) && smagic64(c).m&1 == 0 && config.useHmul) {
6913 v0 := b.NewValue0(v.Pos, OpRsh64x64, t)
6914 v1 := b.NewValue0(v.Pos, OpHmul64, t)
6915 v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6916 v2.AuxInt = int64ToAuxInt(int64(smagic64(c).m / 2))
6918 v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6919 v3.AuxInt = int64ToAuxInt(smagic64(c).s - 1)
6921 v4 := b.NewValue0(v.Pos, OpRsh64x64, t)
6922 v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6923 v5.AuxInt = int64ToAuxInt(63)
6928 // match: (Div64 <t> x (Const64 [c]))
6929 // cond: smagicOK64(c) && smagic64(c).m&1 != 0 && config.useHmul
6930 // result: (Sub64 <t> (Rsh64x64 <t> (Add64 <t> (Hmul64 <t> (Const64 <typ.UInt64> [int64(smagic64(c).m)]) x) x) (Const64 <typ.UInt64> [smagic64(c).s])) (Rsh64x64 <t> x (Const64 <typ.UInt64> [63])))
6934 if v_1.Op != OpConst64 {
6937 c := auxIntToInt64(v_1.AuxInt)
6938 if !(smagicOK64(c) && smagic64(c).m&1 != 0 && config.useHmul) {
6943 v0 := b.NewValue0(v.Pos, OpRsh64x64, t)
6944 v1 := b.NewValue0(v.Pos, OpAdd64, t)
6945 v2 := b.NewValue0(v.Pos, OpHmul64, t)
6946 v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6947 v3.AuxInt = int64ToAuxInt(int64(smagic64(c).m))
6950 v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6951 v4.AuxInt = int64ToAuxInt(smagic64(c).s)
6953 v5 := b.NewValue0(v.Pos, OpRsh64x64, t)
6954 v6 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
6955 v6.AuxInt = int64ToAuxInt(63)
6962 func rewriteValuegeneric_OpDiv64F(v *Value) bool {
6966 // match: (Div64F (Const64F [c]) (Const64F [d]))
6968 // result: (Const64F [c/d])
6970 if v_0.Op != OpConst64F {
6973 c := auxIntToFloat64(v_0.AuxInt)
6974 if v_1.Op != OpConst64F {
6977 d := auxIntToFloat64(v_1.AuxInt)
6982 v.AuxInt = float64ToAuxInt(c / d)
6985 // match: (Div64F x (Const64F <t> [c]))
6986 // cond: reciprocalExact64(c)
6987 // result: (Mul64F x (Const64F <t> [1/c]))
6990 if v_1.Op != OpConst64F {
6994 c := auxIntToFloat64(v_1.AuxInt)
6995 if !(reciprocalExact64(c)) {
6999 v0 := b.NewValue0(v.Pos, OpConst64F, t)
7000 v0.AuxInt = float64ToAuxInt(1 / c)
7006 func rewriteValuegeneric_OpDiv64u(v *Value) bool {
7010 config := b.Func.Config
7011 typ := &b.Func.Config.Types
7012 // match: (Div64u (Const64 [c]) (Const64 [d]))
7014 // result: (Const64 [int64(uint64(c)/uint64(d))])
7016 if v_0.Op != OpConst64 {
7019 c := auxIntToInt64(v_0.AuxInt)
7020 if v_1.Op != OpConst64 {
7023 d := auxIntToInt64(v_1.AuxInt)
7028 v.AuxInt = int64ToAuxInt(int64(uint64(c) / uint64(d)))
7031 // match: (Div64u n (Const64 [c]))
7032 // cond: isPowerOfTwo64(c)
7033 // result: (Rsh64Ux64 n (Const64 <typ.UInt64> [log64(c)]))
7036 if v_1.Op != OpConst64 {
7039 c := auxIntToInt64(v_1.AuxInt)
7040 if !(isPowerOfTwo64(c)) {
7043 v.reset(OpRsh64Ux64)
7044 v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
7045 v0.AuxInt = int64ToAuxInt(log64(c))
7049 // match: (Div64u n (Const64 [-1<<63]))
7050 // result: (Rsh64Ux64 n (Const64 <typ.UInt64> [63]))
7053 if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != -1<<63 {
7056 v.reset(OpRsh64Ux64)
7057 v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
7058 v0.AuxInt = int64ToAuxInt(63)
7062 // match: (Div64u x (Const64 [c]))
7063 // cond: c > 0 && c <= 0xFFFF && umagicOK32(int32(c)) && config.RegSize == 4 && config.useHmul
7064 // result: (Add64 (Add64 <typ.UInt64> (Add64 <typ.UInt64> (Lsh64x64 <typ.UInt64> (ZeroExt32to64 (Div32u <typ.UInt32> (Trunc64to32 <typ.UInt32> (Rsh64Ux64 <typ.UInt64> x (Const64 <typ.UInt64> [32]))) (Const32 <typ.UInt32> [int32(c)]))) (Const64 <typ.UInt64> [32])) (ZeroExt32to64 (Div32u <typ.UInt32> (Trunc64to32 <typ.UInt32> x) (Const32 <typ.UInt32> [int32(c)])))) (Mul64 <typ.UInt64> (ZeroExt32to64 <typ.UInt64> (Mod32u <typ.UInt32> (Trunc64to32 <typ.UInt32> (Rsh64Ux64 <typ.UInt64> x (Const64 <typ.UInt64> [32]))) (Const32 <typ.UInt32> [int32(c)]))) (Const64 <typ.UInt64> [int64((1<<32)/c)]))) (ZeroExt32to64 (Div32u <typ.UInt32> (Add32 <typ.UInt32> (Mod32u <typ.UInt32> (Trunc64to32 <typ.UInt32> x) (Const32 <typ.UInt32> [int32(c)])) (Mul32 <typ.UInt32> (Mod32u <typ.UInt32> (Trunc64to32 <typ.UInt32> (Rsh64Ux64 <typ.UInt64> x (Const64 <typ.UInt64> [32]))) (Const32 <typ.UInt32> [int32(c)])) (Const32 <typ.UInt32> [int32((1<<32)%c)]))) (Const32 <typ.UInt32> [int32(c)]))))
7067 if v_1.Op != OpConst64 {
7070 c := auxIntToInt64(v_1.AuxInt)
7071 if !(c > 0 && c <= 0xFFFF && umagicOK32(int32(c)) && config.RegSize == 4 && config.useHmul) {
7075 v0 := b.NewValue0(v.Pos, OpAdd64, typ.UInt64)
7076 v1 := b.NewValue0(v.Pos, OpAdd64, typ.UInt64)
7077 v2 := b.NewValue0(v.Pos, OpLsh64x64, typ.UInt64)
7078 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
7079 v4 := b.NewValue0(v.Pos, OpDiv32u, typ.UInt32)
7080 v5 := b.NewValue0(v.Pos, OpTrunc64to32, typ.UInt32)
7081 v6 := b.NewValue0(v.Pos, OpRsh64Ux64, typ.UInt64)
7082 v7 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
7083 v7.AuxInt = int64ToAuxInt(32)
7086 v8 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
7087 v8.AuxInt = int32ToAuxInt(int32(c))
7091 v9 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
7092 v10 := b.NewValue0(v.Pos, OpDiv32u, typ.UInt32)
7093 v11 := b.NewValue0(v.Pos, OpTrunc64to32, typ.UInt32)
7095 v10.AddArg2(v11, v8)
7098 v12 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
7099 v13 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
7100 v14 := b.NewValue0(v.Pos, OpMod32u, typ.UInt32)
7103 v15 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
7104 v15.AuxInt = int64ToAuxInt(int64((1 << 32) / c))
7105 v12.AddArg2(v13, v15)
7107 v16 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
7108 v17 := b.NewValue0(v.Pos, OpDiv32u, typ.UInt32)
7109 v18 := b.NewValue0(v.Pos, OpAdd32, typ.UInt32)
7110 v19 := b.NewValue0(v.Pos, OpMod32u, typ.UInt32)
7111 v19.AddArg2(v11, v8)
7112 v20 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
7113 v21 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
7114 v21.AuxInt = int32ToAuxInt(int32((1 << 32) % c))
7115 v20.AddArg2(v14, v21)
7116 v18.AddArg2(v19, v20)
7117 v17.AddArg2(v18, v8)
7122 // match: (Div64u x (Const64 [c]))
7123 // cond: umagicOK64(c) && config.RegSize == 8 && umagic64(c).m&1 == 0 && config.useHmul
7124 // result: (Rsh64Ux64 <typ.UInt64> (Hmul64u <typ.UInt64> (Const64 <typ.UInt64> [int64(1<<63+umagic64(c).m/2)]) x) (Const64 <typ.UInt64> [umagic64(c).s-1]))
7127 if v_1.Op != OpConst64 {
7130 c := auxIntToInt64(v_1.AuxInt)
7131 if !(umagicOK64(c) && config.RegSize == 8 && umagic64(c).m&1 == 0 && config.useHmul) {
7134 v.reset(OpRsh64Ux64)
7136 v0 := b.NewValue0(v.Pos, OpHmul64u, typ.UInt64)
7137 v1 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
7138 v1.AuxInt = int64ToAuxInt(int64(1<<63 + umagic64(c).m/2))
7140 v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
7141 v2.AuxInt = int64ToAuxInt(umagic64(c).s - 1)
7145 // match: (Div64u x (Const64 [c]))
7146 // cond: umagicOK64(c) && config.RegSize == 8 && c&1 == 0 && config.useHmul
7147 // result: (Rsh64Ux64 <typ.UInt64> (Hmul64u <typ.UInt64> (Const64 <typ.UInt64> [int64(1<<63+(umagic64(c).m+1)/2)]) (Rsh64Ux64 <typ.UInt64> x (Const64 <typ.UInt64> [1]))) (Const64 <typ.UInt64> [umagic64(c).s-2]))
7150 if v_1.Op != OpConst64 {
7153 c := auxIntToInt64(v_1.AuxInt)
7154 if !(umagicOK64(c) && config.RegSize == 8 && c&1 == 0 && config.useHmul) {
7157 v.reset(OpRsh64Ux64)
7159 v0 := b.NewValue0(v.Pos, OpHmul64u, typ.UInt64)
7160 v1 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
7161 v1.AuxInt = int64ToAuxInt(int64(1<<63 + (umagic64(c).m+1)/2))
7162 v2 := b.NewValue0(v.Pos, OpRsh64Ux64, typ.UInt64)
7163 v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
7164 v3.AuxInt = int64ToAuxInt(1)
7167 v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
7168 v4.AuxInt = int64ToAuxInt(umagic64(c).s - 2)
7172 // match: (Div64u x (Const64 [c]))
7173 // cond: umagicOK64(c) && config.RegSize == 8 && config.useAvg && config.useHmul
7174 // result: (Rsh64Ux64 <typ.UInt64> (Avg64u x (Hmul64u <typ.UInt64> (Const64 <typ.UInt64> [int64(umagic64(c).m)]) x)) (Const64 <typ.UInt64> [umagic64(c).s-1]))
7177 if v_1.Op != OpConst64 {
7180 c := auxIntToInt64(v_1.AuxInt)
7181 if !(umagicOK64(c) && config.RegSize == 8 && config.useAvg && config.useHmul) {
7184 v.reset(OpRsh64Ux64)
7186 v0 := b.NewValue0(v.Pos, OpAvg64u, typ.UInt64)
7187 v1 := b.NewValue0(v.Pos, OpHmul64u, typ.UInt64)
7188 v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
7189 v2.AuxInt = int64ToAuxInt(int64(umagic64(c).m))
7192 v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
7193 v3.AuxInt = int64ToAuxInt(umagic64(c).s - 1)
7199 func rewriteValuegeneric_OpDiv8(v *Value) bool {
7203 typ := &b.Func.Config.Types
7204 // match: (Div8 (Const8 [c]) (Const8 [d]))
7206 // result: (Const8 [c/d])
7208 if v_0.Op != OpConst8 {
7211 c := auxIntToInt8(v_0.AuxInt)
7212 if v_1.Op != OpConst8 {
7215 d := auxIntToInt8(v_1.AuxInt)
7220 v.AuxInt = int8ToAuxInt(c / d)
7223 // match: (Div8 n (Const8 [c]))
7224 // cond: isNonNegative(n) && isPowerOfTwo8(c)
7225 // result: (Rsh8Ux64 n (Const64 <typ.UInt64> [log8(c)]))
7228 if v_1.Op != OpConst8 {
7231 c := auxIntToInt8(v_1.AuxInt)
7232 if !(isNonNegative(n) && isPowerOfTwo8(c)) {
7236 v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
7237 v0.AuxInt = int64ToAuxInt(log8(c))
7241 // match: (Div8 <t> n (Const8 [c]))
7242 // cond: c < 0 && c != -1<<7
7243 // result: (Neg8 (Div8 <t> n (Const8 <t> [-c])))
7247 if v_1.Op != OpConst8 {
7250 c := auxIntToInt8(v_1.AuxInt)
7251 if !(c < 0 && c != -1<<7) {
7255 v0 := b.NewValue0(v.Pos, OpDiv8, t)
7256 v1 := b.NewValue0(v.Pos, OpConst8, t)
7257 v1.AuxInt = int8ToAuxInt(-c)
7262 // match: (Div8 <t> x (Const8 [-1<<7 ]))
7263 // result: (Rsh8Ux64 (And8 <t> x (Neg8 <t> x)) (Const64 <typ.UInt64> [7 ]))
7267 if v_1.Op != OpConst8 || auxIntToInt8(v_1.AuxInt) != -1<<7 {
7271 v0 := b.NewValue0(v.Pos, OpAnd8, t)
7272 v1 := b.NewValue0(v.Pos, OpNeg8, t)
7275 v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
7276 v2.AuxInt = int64ToAuxInt(7)
7280 // match: (Div8 <t> n (Const8 [c]))
7281 // cond: isPowerOfTwo8(c)
7282 // result: (Rsh8x64 (Add8 <t> n (Rsh8Ux64 <t> (Rsh8x64 <t> n (Const64 <typ.UInt64> [ 7])) (Const64 <typ.UInt64> [int64( 8-log8(c))]))) (Const64 <typ.UInt64> [int64(log8(c))]))
7286 if v_1.Op != OpConst8 {
7289 c := auxIntToInt8(v_1.AuxInt)
7290 if !(isPowerOfTwo8(c)) {
7294 v0 := b.NewValue0(v.Pos, OpAdd8, t)
7295 v1 := b.NewValue0(v.Pos, OpRsh8Ux64, t)
7296 v2 := b.NewValue0(v.Pos, OpRsh8x64, t)
7297 v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
7298 v3.AuxInt = int64ToAuxInt(7)
7300 v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
7301 v4.AuxInt = int64ToAuxInt(int64(8 - log8(c)))
7304 v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
7305 v5.AuxInt = int64ToAuxInt(int64(log8(c)))
7309 // match: (Div8 <t> x (Const8 [c]))
7310 // cond: smagicOK8(c)
7311 // result: (Sub8 <t> (Rsh32x64 <t> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int32(smagic8(c).m)]) (SignExt8to32 x)) (Const64 <typ.UInt64> [8+smagic8(c).s])) (Rsh32x64 <t> (SignExt8to32 x) (Const64 <typ.UInt64> [31])))
7315 if v_1.Op != OpConst8 {
7318 c := auxIntToInt8(v_1.AuxInt)
7319 if !(smagicOK8(c)) {
7324 v0 := b.NewValue0(v.Pos, OpRsh32x64, t)
7325 v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
7326 v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
7327 v2.AuxInt = int32ToAuxInt(int32(smagic8(c).m))
7328 v3 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
7331 v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
7332 v4.AuxInt = int64ToAuxInt(8 + smagic8(c).s)
7334 v5 := b.NewValue0(v.Pos, OpRsh32x64, t)
7335 v6 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
7336 v6.AuxInt = int64ToAuxInt(31)
7343 func rewriteValuegeneric_OpDiv8u(v *Value) bool {
7347 typ := &b.Func.Config.Types
7348 // match: (Div8u (Const8 [c]) (Const8 [d]))
7350 // result: (Const8 [int8(uint8(c)/uint8(d))])
7352 if v_0.Op != OpConst8 {
7355 c := auxIntToInt8(v_0.AuxInt)
7356 if v_1.Op != OpConst8 {
7359 d := auxIntToInt8(v_1.AuxInt)
7364 v.AuxInt = int8ToAuxInt(int8(uint8(c) / uint8(d)))
7367 // match: (Div8u n (Const8 [c]))
7368 // cond: isPowerOfTwo8(c)
7369 // result: (Rsh8Ux64 n (Const64 <typ.UInt64> [log8(c)]))
7372 if v_1.Op != OpConst8 {
7375 c := auxIntToInt8(v_1.AuxInt)
7376 if !(isPowerOfTwo8(c)) {
7380 v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
7381 v0.AuxInt = int64ToAuxInt(log8(c))
7385 // match: (Div8u x (Const8 [c]))
7386 // cond: umagicOK8(c)
7387 // result: (Trunc32to8 (Rsh32Ux64 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int32(1<<8+umagic8(c).m)]) (ZeroExt8to32 x)) (Const64 <typ.UInt64> [8+umagic8(c).s])))
7390 if v_1.Op != OpConst8 {
7393 c := auxIntToInt8(v_1.AuxInt)
7394 if !(umagicOK8(c)) {
7397 v.reset(OpTrunc32to8)
7398 v0 := b.NewValue0(v.Pos, OpRsh32Ux64, typ.UInt32)
7399 v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
7400 v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
7401 v2.AuxInt = int32ToAuxInt(int32(1<<8 + umagic8(c).m))
7402 v3 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
7405 v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
7406 v4.AuxInt = int64ToAuxInt(8 + umagic8(c).s)
7413 func rewriteValuegeneric_OpEq16(v *Value) bool {
7417 config := b.Func.Config
7418 typ := &b.Func.Config.Types
7419 // match: (Eq16 x x)
7420 // result: (ConstBool [true])
7426 v.reset(OpConstBool)
7427 v.AuxInt = boolToAuxInt(true)
7430 // match: (Eq16 (Const16 <t> [c]) (Add16 (Const16 <t> [d]) x))
7431 // result: (Eq16 (Const16 <t> [c-d]) x)
7433 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
7434 if v_0.Op != OpConst16 {
7438 c := auxIntToInt16(v_0.AuxInt)
7439 if v_1.Op != OpAdd16 {
7443 v_1_0 := v_1.Args[0]
7444 v_1_1 := v_1.Args[1]
7445 for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
7446 if v_1_0.Op != OpConst16 || v_1_0.Type != t {
7449 d := auxIntToInt16(v_1_0.AuxInt)
7452 v0 := b.NewValue0(v.Pos, OpConst16, t)
7453 v0.AuxInt = int16ToAuxInt(c - d)
7460 // match: (Eq16 (Const16 [c]) (Const16 [d]))
7461 // result: (ConstBool [c == d])
7463 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
7464 if v_0.Op != OpConst16 {
7467 c := auxIntToInt16(v_0.AuxInt)
7468 if v_1.Op != OpConst16 {
7471 d := auxIntToInt16(v_1.AuxInt)
7472 v.reset(OpConstBool)
7473 v.AuxInt = boolToAuxInt(c == d)
7478 // match: (Eq16 (Mod16u x (Const16 [c])) (Const16 [0]))
7479 // cond: x.Op != OpConst16 && udivisibleOK16(c) && !hasSmallRotate(config)
7480 // result: (Eq32 (Mod32u <typ.UInt32> (ZeroExt16to32 <typ.UInt32> x) (Const32 <typ.UInt32> [int32(uint16(c))])) (Const32 <typ.UInt32> [0]))
7482 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
7483 if v_0.Op != OpMod16u {
7488 v_0_1 := v_0.Args[1]
7489 if v_0_1.Op != OpConst16 {
7492 c := auxIntToInt16(v_0_1.AuxInt)
7493 if v_1.Op != OpConst16 || auxIntToInt16(v_1.AuxInt) != 0 || !(x.Op != OpConst16 && udivisibleOK16(c) && !hasSmallRotate(config)) {
7497 v0 := b.NewValue0(v.Pos, OpMod32u, typ.UInt32)
7498 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
7500 v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
7501 v2.AuxInt = int32ToAuxInt(int32(uint16(c)))
7503 v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
7504 v3.AuxInt = int32ToAuxInt(0)
7510 // match: (Eq16 (Mod16 x (Const16 [c])) (Const16 [0]))
7511 // cond: x.Op != OpConst16 && sdivisibleOK16(c) && !hasSmallRotate(config)
7512 // result: (Eq32 (Mod32 <typ.Int32> (SignExt16to32 <typ.Int32> x) (Const32 <typ.Int32> [int32(c)])) (Const32 <typ.Int32> [0]))
7514 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
7515 if v_0.Op != OpMod16 {
7520 v_0_1 := v_0.Args[1]
7521 if v_0_1.Op != OpConst16 {
7524 c := auxIntToInt16(v_0_1.AuxInt)
7525 if v_1.Op != OpConst16 || auxIntToInt16(v_1.AuxInt) != 0 || !(x.Op != OpConst16 && sdivisibleOK16(c) && !hasSmallRotate(config)) {
7529 v0 := b.NewValue0(v.Pos, OpMod32, typ.Int32)
7530 v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
7532 v2 := b.NewValue0(v.Pos, OpConst32, typ.Int32)
7533 v2.AuxInt = int32ToAuxInt(int32(c))
7535 v3 := b.NewValue0(v.Pos, OpConst32, typ.Int32)
7536 v3.AuxInt = int32ToAuxInt(0)
7542 // match: (Eq16 x (Mul16 (Const16 [c]) (Trunc64to16 (Rsh64Ux64 mul:(Mul64 (Const64 [m]) (ZeroExt16to64 x)) (Const64 [s]))) ) )
7543 // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<16+umagic16(c).m) && s == 16+umagic16(c).s && x.Op != OpConst16 && udivisibleOK16(c)
7544 // result: (Leq16U (RotateLeft16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int16(udivisible16(c).m)]) x) (Const16 <typ.UInt16> [int16(16-udivisible16(c).k)]) ) (Const16 <typ.UInt16> [int16(udivisible16(c).max)]) )
7546 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
7548 if v_1.Op != OpMul16 {
7552 v_1_0 := v_1.Args[0]
7553 v_1_1 := v_1.Args[1]
7554 for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
7555 if v_1_0.Op != OpConst16 {
7558 c := auxIntToInt16(v_1_0.AuxInt)
7559 if v_1_1.Op != OpTrunc64to16 {
7562 v_1_1_0 := v_1_1.Args[0]
7563 if v_1_1_0.Op != OpRsh64Ux64 {
7567 mul := v_1_1_0.Args[0]
7568 if mul.Op != OpMul64 {
7572 mul_0 := mul.Args[0]
7573 mul_1 := mul.Args[1]
7574 for _i2 := 0; _i2 <= 1; _i2, mul_0, mul_1 = _i2+1, mul_1, mul_0 {
7575 if mul_0.Op != OpConst64 {
7578 m := auxIntToInt64(mul_0.AuxInt)
7579 if mul_1.Op != OpZeroExt16to64 || x != mul_1.Args[0] {
7582 v_1_1_0_1 := v_1_1_0.Args[1]
7583 if v_1_1_0_1.Op != OpConst64 {
7586 s := auxIntToInt64(v_1_1_0_1.AuxInt)
7587 if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<16+umagic16(c).m) && s == 16+umagic16(c).s && x.Op != OpConst16 && udivisibleOK16(c)) {
7591 v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
7592 v1 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
7593 v2 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
7594 v2.AuxInt = int16ToAuxInt(int16(udivisible16(c).m))
7596 v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
7597 v3.AuxInt = int16ToAuxInt(int16(16 - udivisible16(c).k))
7599 v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
7600 v4.AuxInt = int16ToAuxInt(int16(udivisible16(c).max))
7608 // match: (Eq16 x (Mul16 (Const16 [c]) (Trunc32to16 (Rsh32Ux64 mul:(Mul32 (Const32 [m]) (ZeroExt16to32 x)) (Const64 [s]))) ) )
7609 // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int32(1<<15+umagic16(c).m/2) && s == 16+umagic16(c).s-1 && x.Op != OpConst16 && udivisibleOK16(c)
7610 // result: (Leq16U (RotateLeft16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int16(udivisible16(c).m)]) x) (Const16 <typ.UInt16> [int16(16-udivisible16(c).k)]) ) (Const16 <typ.UInt16> [int16(udivisible16(c).max)]) )
7612 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
7614 if v_1.Op != OpMul16 {
7618 v_1_0 := v_1.Args[0]
7619 v_1_1 := v_1.Args[1]
7620 for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
7621 if v_1_0.Op != OpConst16 {
7624 c := auxIntToInt16(v_1_0.AuxInt)
7625 if v_1_1.Op != OpTrunc32to16 {
7628 v_1_1_0 := v_1_1.Args[0]
7629 if v_1_1_0.Op != OpRsh32Ux64 {
7633 mul := v_1_1_0.Args[0]
7634 if mul.Op != OpMul32 {
7638 mul_0 := mul.Args[0]
7639 mul_1 := mul.Args[1]
7640 for _i2 := 0; _i2 <= 1; _i2, mul_0, mul_1 = _i2+1, mul_1, mul_0 {
7641 if mul_0.Op != OpConst32 {
7644 m := auxIntToInt32(mul_0.AuxInt)
7645 if mul_1.Op != OpZeroExt16to32 || x != mul_1.Args[0] {
7648 v_1_1_0_1 := v_1_1_0.Args[1]
7649 if v_1_1_0_1.Op != OpConst64 {
7652 s := auxIntToInt64(v_1_1_0_1.AuxInt)
7653 if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int32(1<<15+umagic16(c).m/2) && s == 16+umagic16(c).s-1 && x.Op != OpConst16 && udivisibleOK16(c)) {
7657 v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
7658 v1 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
7659 v2 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
7660 v2.AuxInt = int16ToAuxInt(int16(udivisible16(c).m))
7662 v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
7663 v3.AuxInt = int16ToAuxInt(int16(16 - udivisible16(c).k))
7665 v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
7666 v4.AuxInt = int16ToAuxInt(int16(udivisible16(c).max))
7674 // match: (Eq16 x (Mul16 (Const16 [c]) (Trunc32to16 (Rsh32Ux64 mul:(Mul32 (Const32 [m]) (Rsh32Ux64 (ZeroExt16to32 x) (Const64 [1]))) (Const64 [s]))) ) )
7675 // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int32(1<<15+(umagic16(c).m+1)/2) && s == 16+umagic16(c).s-2 && x.Op != OpConst16 && udivisibleOK16(c)
7676 // result: (Leq16U (RotateLeft16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int16(udivisible16(c).m)]) x) (Const16 <typ.UInt16> [int16(16-udivisible16(c).k)]) ) (Const16 <typ.UInt16> [int16(udivisible16(c).max)]) )
7678 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
7680 if v_1.Op != OpMul16 {
7684 v_1_0 := v_1.Args[0]
7685 v_1_1 := v_1.Args[1]
7686 for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
7687 if v_1_0.Op != OpConst16 {
7690 c := auxIntToInt16(v_1_0.AuxInt)
7691 if v_1_1.Op != OpTrunc32to16 {
7694 v_1_1_0 := v_1_1.Args[0]
7695 if v_1_1_0.Op != OpRsh32Ux64 {
7699 mul := v_1_1_0.Args[0]
7700 if mul.Op != OpMul32 {
7704 mul_0 := mul.Args[0]
7705 mul_1 := mul.Args[1]
7706 for _i2 := 0; _i2 <= 1; _i2, mul_0, mul_1 = _i2+1, mul_1, mul_0 {
7707 if mul_0.Op != OpConst32 {
7710 m := auxIntToInt32(mul_0.AuxInt)
7711 if mul_1.Op != OpRsh32Ux64 {
7715 mul_1_0 := mul_1.Args[0]
7716 if mul_1_0.Op != OpZeroExt16to32 || x != mul_1_0.Args[0] {
7719 mul_1_1 := mul_1.Args[1]
7720 if mul_1_1.Op != OpConst64 || auxIntToInt64(mul_1_1.AuxInt) != 1 {
7723 v_1_1_0_1 := v_1_1_0.Args[1]
7724 if v_1_1_0_1.Op != OpConst64 {
7727 s := auxIntToInt64(v_1_1_0_1.AuxInt)
7728 if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int32(1<<15+(umagic16(c).m+1)/2) && s == 16+umagic16(c).s-2 && x.Op != OpConst16 && udivisibleOK16(c)) {
7732 v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
7733 v1 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
7734 v2 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
7735 v2.AuxInt = int16ToAuxInt(int16(udivisible16(c).m))
7737 v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
7738 v3.AuxInt = int16ToAuxInt(int16(16 - udivisible16(c).k))
7740 v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
7741 v4.AuxInt = int16ToAuxInt(int16(udivisible16(c).max))
7749 // match: (Eq16 x (Mul16 (Const16 [c]) (Trunc32to16 (Rsh32Ux64 (Avg32u (Lsh32x64 (ZeroExt16to32 x) (Const64 [16])) mul:(Mul32 (Const32 [m]) (ZeroExt16to32 x))) (Const64 [s]))) ) )
7750 // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int32(umagic16(c).m) && s == 16+umagic16(c).s-1 && x.Op != OpConst16 && udivisibleOK16(c)
7751 // result: (Leq16U (RotateLeft16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int16(udivisible16(c).m)]) x) (Const16 <typ.UInt16> [int16(16-udivisible16(c).k)]) ) (Const16 <typ.UInt16> [int16(udivisible16(c).max)]) )
7753 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
7755 if v_1.Op != OpMul16 {
7759 v_1_0 := v_1.Args[0]
7760 v_1_1 := v_1.Args[1]
7761 for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
7762 if v_1_0.Op != OpConst16 {
7765 c := auxIntToInt16(v_1_0.AuxInt)
7766 if v_1_1.Op != OpTrunc32to16 {
7769 v_1_1_0 := v_1_1.Args[0]
7770 if v_1_1_0.Op != OpRsh32Ux64 {
7774 v_1_1_0_0 := v_1_1_0.Args[0]
7775 if v_1_1_0_0.Op != OpAvg32u {
7778 _ = v_1_1_0_0.Args[1]
7779 v_1_1_0_0_0 := v_1_1_0_0.Args[0]
7780 if v_1_1_0_0_0.Op != OpLsh32x64 {
7783 _ = v_1_1_0_0_0.Args[1]
7784 v_1_1_0_0_0_0 := v_1_1_0_0_0.Args[0]
7785 if v_1_1_0_0_0_0.Op != OpZeroExt16to32 || x != v_1_1_0_0_0_0.Args[0] {
7788 v_1_1_0_0_0_1 := v_1_1_0_0_0.Args[1]
7789 if v_1_1_0_0_0_1.Op != OpConst64 || auxIntToInt64(v_1_1_0_0_0_1.AuxInt) != 16 {
7792 mul := v_1_1_0_0.Args[1]
7793 if mul.Op != OpMul32 {
7797 mul_0 := mul.Args[0]
7798 mul_1 := mul.Args[1]
7799 for _i2 := 0; _i2 <= 1; _i2, mul_0, mul_1 = _i2+1, mul_1, mul_0 {
7800 if mul_0.Op != OpConst32 {
7803 m := auxIntToInt32(mul_0.AuxInt)
7804 if mul_1.Op != OpZeroExt16to32 || x != mul_1.Args[0] {
7807 v_1_1_0_1 := v_1_1_0.Args[1]
7808 if v_1_1_0_1.Op != OpConst64 {
7811 s := auxIntToInt64(v_1_1_0_1.AuxInt)
7812 if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int32(umagic16(c).m) && s == 16+umagic16(c).s-1 && x.Op != OpConst16 && udivisibleOK16(c)) {
7816 v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
7817 v1 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
7818 v2 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
7819 v2.AuxInt = int16ToAuxInt(int16(udivisible16(c).m))
7821 v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
7822 v3.AuxInt = int16ToAuxInt(int16(16 - udivisible16(c).k))
7824 v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
7825 v4.AuxInt = int16ToAuxInt(int16(udivisible16(c).max))
7833 // match: (Eq16 x (Mul16 (Const16 [c]) (Sub16 (Rsh32x64 mul:(Mul32 (Const32 [m]) (SignExt16to32 x)) (Const64 [s])) (Rsh32x64 (SignExt16to32 x) (Const64 [31]))) ) )
7834 // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int32(smagic16(c).m) && s == 16+smagic16(c).s && x.Op != OpConst16 && sdivisibleOK16(c)
7835 // result: (Leq16U (RotateLeft16 <typ.UInt16> (Add16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int16(sdivisible16(c).m)]) x) (Const16 <typ.UInt16> [int16(sdivisible16(c).a)]) ) (Const16 <typ.UInt16> [int16(16-sdivisible16(c).k)]) ) (Const16 <typ.UInt16> [int16(sdivisible16(c).max)]) )
7837 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
7839 if v_1.Op != OpMul16 {
7843 v_1_0 := v_1.Args[0]
7844 v_1_1 := v_1.Args[1]
7845 for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
7846 if v_1_0.Op != OpConst16 {
7849 c := auxIntToInt16(v_1_0.AuxInt)
7850 if v_1_1.Op != OpSub16 {
7854 v_1_1_0 := v_1_1.Args[0]
7855 if v_1_1_0.Op != OpRsh32x64 {
7859 mul := v_1_1_0.Args[0]
7860 if mul.Op != OpMul32 {
7864 mul_0 := mul.Args[0]
7865 mul_1 := mul.Args[1]
7866 for _i2 := 0; _i2 <= 1; _i2, mul_0, mul_1 = _i2+1, mul_1, mul_0 {
7867 if mul_0.Op != OpConst32 {
7870 m := auxIntToInt32(mul_0.AuxInt)
7871 if mul_1.Op != OpSignExt16to32 || x != mul_1.Args[0] {
7874 v_1_1_0_1 := v_1_1_0.Args[1]
7875 if v_1_1_0_1.Op != OpConst64 {
7878 s := auxIntToInt64(v_1_1_0_1.AuxInt)
7879 v_1_1_1 := v_1_1.Args[1]
7880 if v_1_1_1.Op != OpRsh32x64 {
7884 v_1_1_1_0 := v_1_1_1.Args[0]
7885 if v_1_1_1_0.Op != OpSignExt16to32 || x != v_1_1_1_0.Args[0] {
7888 v_1_1_1_1 := v_1_1_1.Args[1]
7889 if v_1_1_1_1.Op != OpConst64 || auxIntToInt64(v_1_1_1_1.AuxInt) != 31 || !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int32(smagic16(c).m) && s == 16+smagic16(c).s && x.Op != OpConst16 && sdivisibleOK16(c)) {
7893 v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
7894 v1 := b.NewValue0(v.Pos, OpAdd16, typ.UInt16)
7895 v2 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
7896 v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
7897 v3.AuxInt = int16ToAuxInt(int16(sdivisible16(c).m))
7899 v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
7900 v4.AuxInt = int16ToAuxInt(int16(sdivisible16(c).a))
7902 v5 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
7903 v5.AuxInt = int16ToAuxInt(int16(16 - sdivisible16(c).k))
7905 v6 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
7906 v6.AuxInt = int16ToAuxInt(int16(sdivisible16(c).max))
7914 // match: (Eq16 n (Lsh16x64 (Rsh16x64 (Add16 <t> n (Rsh16Ux64 <t> (Rsh16x64 <t> n (Const64 <typ.UInt64> [15])) (Const64 <typ.UInt64> [kbar]))) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])) )
7915 // cond: k > 0 && k < 15 && kbar == 16 - k
7916 // result: (Eq16 (And16 <t> n (Const16 <t> [1<<uint(k)-1])) (Const16 <t> [0]))
7918 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
7920 if v_1.Op != OpLsh16x64 {
7924 v_1_0 := v_1.Args[0]
7925 if v_1_0.Op != OpRsh16x64 {
7929 v_1_0_0 := v_1_0.Args[0]
7930 if v_1_0_0.Op != OpAdd16 {
7935 v_1_0_0_0 := v_1_0_0.Args[0]
7936 v_1_0_0_1 := v_1_0_0.Args[1]
7937 for _i1 := 0; _i1 <= 1; _i1, v_1_0_0_0, v_1_0_0_1 = _i1+1, v_1_0_0_1, v_1_0_0_0 {
7938 if n != v_1_0_0_0 || v_1_0_0_1.Op != OpRsh16Ux64 || v_1_0_0_1.Type != t {
7941 _ = v_1_0_0_1.Args[1]
7942 v_1_0_0_1_0 := v_1_0_0_1.Args[0]
7943 if v_1_0_0_1_0.Op != OpRsh16x64 || v_1_0_0_1_0.Type != t {
7946 _ = v_1_0_0_1_0.Args[1]
7947 if n != v_1_0_0_1_0.Args[0] {
7950 v_1_0_0_1_0_1 := v_1_0_0_1_0.Args[1]
7951 if v_1_0_0_1_0_1.Op != OpConst64 || v_1_0_0_1_0_1.Type != typ.UInt64 || auxIntToInt64(v_1_0_0_1_0_1.AuxInt) != 15 {
7954 v_1_0_0_1_1 := v_1_0_0_1.Args[1]
7955 if v_1_0_0_1_1.Op != OpConst64 || v_1_0_0_1_1.Type != typ.UInt64 {
7958 kbar := auxIntToInt64(v_1_0_0_1_1.AuxInt)
7959 v_1_0_1 := v_1_0.Args[1]
7960 if v_1_0_1.Op != OpConst64 || v_1_0_1.Type != typ.UInt64 {
7963 k := auxIntToInt64(v_1_0_1.AuxInt)
7964 v_1_1 := v_1.Args[1]
7965 if v_1_1.Op != OpConst64 || v_1_1.Type != typ.UInt64 || auxIntToInt64(v_1_1.AuxInt) != k || !(k > 0 && k < 15 && kbar == 16-k) {
7969 v0 := b.NewValue0(v.Pos, OpAnd16, t)
7970 v1 := b.NewValue0(v.Pos, OpConst16, t)
7971 v1.AuxInt = int16ToAuxInt(1<<uint(k) - 1)
7973 v2 := b.NewValue0(v.Pos, OpConst16, t)
7974 v2.AuxInt = int16ToAuxInt(0)
7981 // match: (Eq16 s:(Sub16 x y) (Const16 [0]))
7982 // cond: s.Uses == 1
7983 // result: (Eq16 x y)
7985 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
7987 if s.Op != OpSub16 {
7992 if v_1.Op != OpConst16 || auxIntToInt16(v_1.AuxInt) != 0 || !(s.Uses == 1) {
8001 // match: (Eq16 (And16 <t> x (Const16 <t> [y])) (Const16 <t> [y]))
8002 // cond: oneBit16(y)
8003 // result: (Neq16 (And16 <t> x (Const16 <t> [y])) (Const16 <t> [0]))
8005 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
8006 if v_0.Op != OpAnd16 {
8011 v_0_0 := v_0.Args[0]
8012 v_0_1 := v_0.Args[1]
8013 for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
8015 if v_0_1.Op != OpConst16 || v_0_1.Type != t {
8018 y := auxIntToInt16(v_0_1.AuxInt)
8019 if v_1.Op != OpConst16 || v_1.Type != t || auxIntToInt16(v_1.AuxInt) != y || !(oneBit16(y)) {
8023 v0 := b.NewValue0(v.Pos, OpAnd16, t)
8024 v1 := b.NewValue0(v.Pos, OpConst16, t)
8025 v1.AuxInt = int16ToAuxInt(y)
8027 v2 := b.NewValue0(v.Pos, OpConst16, t)
8028 v2.AuxInt = int16ToAuxInt(0)
8037 func rewriteValuegeneric_OpEq32(v *Value) bool {
8041 typ := &b.Func.Config.Types
8042 // match: (Eq32 x x)
8043 // result: (ConstBool [true])
8049 v.reset(OpConstBool)
8050 v.AuxInt = boolToAuxInt(true)
8053 // match: (Eq32 (Const32 <t> [c]) (Add32 (Const32 <t> [d]) x))
8054 // result: (Eq32 (Const32 <t> [c-d]) x)
8056 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
8057 if v_0.Op != OpConst32 {
8061 c := auxIntToInt32(v_0.AuxInt)
8062 if v_1.Op != OpAdd32 {
8066 v_1_0 := v_1.Args[0]
8067 v_1_1 := v_1.Args[1]
8068 for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
8069 if v_1_0.Op != OpConst32 || v_1_0.Type != t {
8072 d := auxIntToInt32(v_1_0.AuxInt)
8075 v0 := b.NewValue0(v.Pos, OpConst32, t)
8076 v0.AuxInt = int32ToAuxInt(c - d)
8083 // match: (Eq32 (Const32 [c]) (Const32 [d]))
8084 // result: (ConstBool [c == d])
8086 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
8087 if v_0.Op != OpConst32 {
8090 c := auxIntToInt32(v_0.AuxInt)
8091 if v_1.Op != OpConst32 {
8094 d := auxIntToInt32(v_1.AuxInt)
8095 v.reset(OpConstBool)
8096 v.AuxInt = boolToAuxInt(c == d)
8101 // match: (Eq32 x (Mul32 (Const32 [c]) (Rsh32Ux64 mul:(Hmul32u (Const32 [m]) x) (Const64 [s])) ) )
8102 // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int32(1<<31+umagic32(c).m/2) && s == umagic32(c).s-1 && x.Op != OpConst32 && udivisibleOK32(c)
8103 // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int32(udivisible32(c).m)]) x) (Const32 <typ.UInt32> [int32(32-udivisible32(c).k)]) ) (Const32 <typ.UInt32> [int32(udivisible32(c).max)]) )
8105 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
8107 if v_1.Op != OpMul32 {
8111 v_1_0 := v_1.Args[0]
8112 v_1_1 := v_1.Args[1]
8113 for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
8114 if v_1_0.Op != OpConst32 {
8117 c := auxIntToInt32(v_1_0.AuxInt)
8118 if v_1_1.Op != OpRsh32Ux64 {
8122 mul := v_1_1.Args[0]
8123 if mul.Op != OpHmul32u {
8127 mul_0 := mul.Args[0]
8128 mul_1 := mul.Args[1]
8129 for _i2 := 0; _i2 <= 1; _i2, mul_0, mul_1 = _i2+1, mul_1, mul_0 {
8130 if mul_0.Op != OpConst32 {
8133 m := auxIntToInt32(mul_0.AuxInt)
8137 v_1_1_1 := v_1_1.Args[1]
8138 if v_1_1_1.Op != OpConst64 {
8141 s := auxIntToInt64(v_1_1_1.AuxInt)
8142 if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int32(1<<31+umagic32(c).m/2) && s == umagic32(c).s-1 && x.Op != OpConst32 && udivisibleOK32(c)) {
8146 v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
8147 v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
8148 v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
8149 v2.AuxInt = int32ToAuxInt(int32(udivisible32(c).m))
8151 v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
8152 v3.AuxInt = int32ToAuxInt(int32(32 - udivisible32(c).k))
8154 v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
8155 v4.AuxInt = int32ToAuxInt(int32(udivisible32(c).max))
8163 // match: (Eq32 x (Mul32 (Const32 [c]) (Rsh32Ux64 mul:(Hmul32u (Const32 <typ.UInt32> [m]) (Rsh32Ux64 x (Const64 [1]))) (Const64 [s])) ) )
8164 // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int32(1<<31+(umagic32(c).m+1)/2) && s == umagic32(c).s-2 && x.Op != OpConst32 && udivisibleOK32(c)
8165 // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int32(udivisible32(c).m)]) x) (Const32 <typ.UInt32> [int32(32-udivisible32(c).k)]) ) (Const32 <typ.UInt32> [int32(udivisible32(c).max)]) )
8167 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
8169 if v_1.Op != OpMul32 {
8173 v_1_0 := v_1.Args[0]
8174 v_1_1 := v_1.Args[1]
8175 for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
8176 if v_1_0.Op != OpConst32 {
8179 c := auxIntToInt32(v_1_0.AuxInt)
8180 if v_1_1.Op != OpRsh32Ux64 {
8184 mul := v_1_1.Args[0]
8185 if mul.Op != OpHmul32u {
8189 mul_0 := mul.Args[0]
8190 mul_1 := mul.Args[1]
8191 for _i2 := 0; _i2 <= 1; _i2, mul_0, mul_1 = _i2+1, mul_1, mul_0 {
8192 if mul_0.Op != OpConst32 || mul_0.Type != typ.UInt32 {
8195 m := auxIntToInt32(mul_0.AuxInt)
8196 if mul_1.Op != OpRsh32Ux64 {
8200 if x != mul_1.Args[0] {
8203 mul_1_1 := mul_1.Args[1]
8204 if mul_1_1.Op != OpConst64 || auxIntToInt64(mul_1_1.AuxInt) != 1 {
8207 v_1_1_1 := v_1_1.Args[1]
8208 if v_1_1_1.Op != OpConst64 {
8211 s := auxIntToInt64(v_1_1_1.AuxInt)
8212 if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int32(1<<31+(umagic32(c).m+1)/2) && s == umagic32(c).s-2 && x.Op != OpConst32 && udivisibleOK32(c)) {
8216 v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
8217 v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
8218 v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
8219 v2.AuxInt = int32ToAuxInt(int32(udivisible32(c).m))
8221 v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
8222 v3.AuxInt = int32ToAuxInt(int32(32 - udivisible32(c).k))
8224 v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
8225 v4.AuxInt = int32ToAuxInt(int32(udivisible32(c).max))
8233 // match: (Eq32 x (Mul32 (Const32 [c]) (Rsh32Ux64 (Avg32u x mul:(Hmul32u (Const32 [m]) x)) (Const64 [s])) ) )
8234 // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int32(umagic32(c).m) && s == umagic32(c).s-1 && x.Op != OpConst32 && udivisibleOK32(c)
8235 // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int32(udivisible32(c).m)]) x) (Const32 <typ.UInt32> [int32(32-udivisible32(c).k)]) ) (Const32 <typ.UInt32> [int32(udivisible32(c).max)]) )
8237 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
8239 if v_1.Op != OpMul32 {
8243 v_1_0 := v_1.Args[0]
8244 v_1_1 := v_1.Args[1]
8245 for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
8246 if v_1_0.Op != OpConst32 {
8249 c := auxIntToInt32(v_1_0.AuxInt)
8250 if v_1_1.Op != OpRsh32Ux64 {
8254 v_1_1_0 := v_1_1.Args[0]
8255 if v_1_1_0.Op != OpAvg32u {
8259 if x != v_1_1_0.Args[0] {
8262 mul := v_1_1_0.Args[1]
8263 if mul.Op != OpHmul32u {
8267 mul_0 := mul.Args[0]
8268 mul_1 := mul.Args[1]
8269 for _i2 := 0; _i2 <= 1; _i2, mul_0, mul_1 = _i2+1, mul_1, mul_0 {
8270 if mul_0.Op != OpConst32 {
8273 m := auxIntToInt32(mul_0.AuxInt)
8277 v_1_1_1 := v_1_1.Args[1]
8278 if v_1_1_1.Op != OpConst64 {
8281 s := auxIntToInt64(v_1_1_1.AuxInt)
8282 if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int32(umagic32(c).m) && s == umagic32(c).s-1 && x.Op != OpConst32 && udivisibleOK32(c)) {
8286 v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
8287 v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
8288 v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
8289 v2.AuxInt = int32ToAuxInt(int32(udivisible32(c).m))
8291 v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
8292 v3.AuxInt = int32ToAuxInt(int32(32 - udivisible32(c).k))
8294 v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
8295 v4.AuxInt = int32ToAuxInt(int32(udivisible32(c).max))
8303 // match: (Eq32 x (Mul32 (Const32 [c]) (Trunc64to32 (Rsh64Ux64 mul:(Mul64 (Const64 [m]) (ZeroExt32to64 x)) (Const64 [s]))) ) )
8304 // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+umagic32(c).m/2) && s == 32+umagic32(c).s-1 && x.Op != OpConst32 && udivisibleOK32(c)
8305 // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int32(udivisible32(c).m)]) x) (Const32 <typ.UInt32> [int32(32-udivisible32(c).k)]) ) (Const32 <typ.UInt32> [int32(udivisible32(c).max)]) )
8307 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
8309 if v_1.Op != OpMul32 {
8313 v_1_0 := v_1.Args[0]
8314 v_1_1 := v_1.Args[1]
8315 for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
8316 if v_1_0.Op != OpConst32 {
8319 c := auxIntToInt32(v_1_0.AuxInt)
8320 if v_1_1.Op != OpTrunc64to32 {
8323 v_1_1_0 := v_1_1.Args[0]
8324 if v_1_1_0.Op != OpRsh64Ux64 {
8328 mul := v_1_1_0.Args[0]
8329 if mul.Op != OpMul64 {
8333 mul_0 := mul.Args[0]
8334 mul_1 := mul.Args[1]
8335 for _i2 := 0; _i2 <= 1; _i2, mul_0, mul_1 = _i2+1, mul_1, mul_0 {
8336 if mul_0.Op != OpConst64 {
8339 m := auxIntToInt64(mul_0.AuxInt)
8340 if mul_1.Op != OpZeroExt32to64 || x != mul_1.Args[0] {
8343 v_1_1_0_1 := v_1_1_0.Args[1]
8344 if v_1_1_0_1.Op != OpConst64 {
8347 s := auxIntToInt64(v_1_1_0_1.AuxInt)
8348 if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+umagic32(c).m/2) && s == 32+umagic32(c).s-1 && x.Op != OpConst32 && udivisibleOK32(c)) {
8352 v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
8353 v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
8354 v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
8355 v2.AuxInt = int32ToAuxInt(int32(udivisible32(c).m))
8357 v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
8358 v3.AuxInt = int32ToAuxInt(int32(32 - udivisible32(c).k))
8360 v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
8361 v4.AuxInt = int32ToAuxInt(int32(udivisible32(c).max))
8369 // match: (Eq32 x (Mul32 (Const32 [c]) (Trunc64to32 (Rsh64Ux64 mul:(Mul64 (Const64 [m]) (Rsh64Ux64 (ZeroExt32to64 x) (Const64 [1]))) (Const64 [s]))) ) )
8370 // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+(umagic32(c).m+1)/2) && s == 32+umagic32(c).s-2 && x.Op != OpConst32 && udivisibleOK32(c)
8371 // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int32(udivisible32(c).m)]) x) (Const32 <typ.UInt32> [int32(32-udivisible32(c).k)]) ) (Const32 <typ.UInt32> [int32(udivisible32(c).max)]) )
8373 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
8375 if v_1.Op != OpMul32 {
8379 v_1_0 := v_1.Args[0]
8380 v_1_1 := v_1.Args[1]
8381 for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
8382 if v_1_0.Op != OpConst32 {
8385 c := auxIntToInt32(v_1_0.AuxInt)
8386 if v_1_1.Op != OpTrunc64to32 {
8389 v_1_1_0 := v_1_1.Args[0]
8390 if v_1_1_0.Op != OpRsh64Ux64 {
8394 mul := v_1_1_0.Args[0]
8395 if mul.Op != OpMul64 {
8399 mul_0 := mul.Args[0]
8400 mul_1 := mul.Args[1]
8401 for _i2 := 0; _i2 <= 1; _i2, mul_0, mul_1 = _i2+1, mul_1, mul_0 {
8402 if mul_0.Op != OpConst64 {
8405 m := auxIntToInt64(mul_0.AuxInt)
8406 if mul_1.Op != OpRsh64Ux64 {
8410 mul_1_0 := mul_1.Args[0]
8411 if mul_1_0.Op != OpZeroExt32to64 || x != mul_1_0.Args[0] {
8414 mul_1_1 := mul_1.Args[1]
8415 if mul_1_1.Op != OpConst64 || auxIntToInt64(mul_1_1.AuxInt) != 1 {
8418 v_1_1_0_1 := v_1_1_0.Args[1]
8419 if v_1_1_0_1.Op != OpConst64 {
8422 s := auxIntToInt64(v_1_1_0_1.AuxInt)
8423 if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+(umagic32(c).m+1)/2) && s == 32+umagic32(c).s-2 && x.Op != OpConst32 && udivisibleOK32(c)) {
8427 v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
8428 v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
8429 v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
8430 v2.AuxInt = int32ToAuxInt(int32(udivisible32(c).m))
8432 v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
8433 v3.AuxInt = int32ToAuxInt(int32(32 - udivisible32(c).k))
8435 v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
8436 v4.AuxInt = int32ToAuxInt(int32(udivisible32(c).max))
8444 // match: (Eq32 x (Mul32 (Const32 [c]) (Trunc64to32 (Rsh64Ux64 (Avg64u (Lsh64x64 (ZeroExt32to64 x) (Const64 [32])) mul:(Mul64 (Const64 [m]) (ZeroExt32to64 x))) (Const64 [s]))) ) )
8445 // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic32(c).m) && s == 32+umagic32(c).s-1 && x.Op != OpConst32 && udivisibleOK32(c)
8446 // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int32(udivisible32(c).m)]) x) (Const32 <typ.UInt32> [int32(32-udivisible32(c).k)]) ) (Const32 <typ.UInt32> [int32(udivisible32(c).max)]) )
8448 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
8450 if v_1.Op != OpMul32 {
8454 v_1_0 := v_1.Args[0]
8455 v_1_1 := v_1.Args[1]
8456 for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
8457 if v_1_0.Op != OpConst32 {
8460 c := auxIntToInt32(v_1_0.AuxInt)
8461 if v_1_1.Op != OpTrunc64to32 {
8464 v_1_1_0 := v_1_1.Args[0]
8465 if v_1_1_0.Op != OpRsh64Ux64 {
8469 v_1_1_0_0 := v_1_1_0.Args[0]
8470 if v_1_1_0_0.Op != OpAvg64u {
8473 _ = v_1_1_0_0.Args[1]
8474 v_1_1_0_0_0 := v_1_1_0_0.Args[0]
8475 if v_1_1_0_0_0.Op != OpLsh64x64 {
8478 _ = v_1_1_0_0_0.Args[1]
8479 v_1_1_0_0_0_0 := v_1_1_0_0_0.Args[0]
8480 if v_1_1_0_0_0_0.Op != OpZeroExt32to64 || x != v_1_1_0_0_0_0.Args[0] {
8483 v_1_1_0_0_0_1 := v_1_1_0_0_0.Args[1]
8484 if v_1_1_0_0_0_1.Op != OpConst64 || auxIntToInt64(v_1_1_0_0_0_1.AuxInt) != 32 {
8487 mul := v_1_1_0_0.Args[1]
8488 if mul.Op != OpMul64 {
8492 mul_0 := mul.Args[0]
8493 mul_1 := mul.Args[1]
8494 for _i2 := 0; _i2 <= 1; _i2, mul_0, mul_1 = _i2+1, mul_1, mul_0 {
8495 if mul_0.Op != OpConst64 {
8498 m := auxIntToInt64(mul_0.AuxInt)
8499 if mul_1.Op != OpZeroExt32to64 || x != mul_1.Args[0] {
8502 v_1_1_0_1 := v_1_1_0.Args[1]
8503 if v_1_1_0_1.Op != OpConst64 {
8506 s := auxIntToInt64(v_1_1_0_1.AuxInt)
8507 if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic32(c).m) && s == 32+umagic32(c).s-1 && x.Op != OpConst32 && udivisibleOK32(c)) {
8511 v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
8512 v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
8513 v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
8514 v2.AuxInt = int32ToAuxInt(int32(udivisible32(c).m))
8516 v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
8517 v3.AuxInt = int32ToAuxInt(int32(32 - udivisible32(c).k))
8519 v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
8520 v4.AuxInt = int32ToAuxInt(int32(udivisible32(c).max))
8528 // match: (Eq32 x (Mul32 (Const32 [c]) (Sub32 (Rsh64x64 mul:(Mul64 (Const64 [m]) (SignExt32to64 x)) (Const64 [s])) (Rsh64x64 (SignExt32to64 x) (Const64 [63]))) ) )
8529 // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic32(c).m) && s == 32+smagic32(c).s && x.Op != OpConst32 && sdivisibleOK32(c)
8530 // result: (Leq32U (RotateLeft32 <typ.UInt32> (Add32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int32(sdivisible32(c).m)]) x) (Const32 <typ.UInt32> [int32(sdivisible32(c).a)]) ) (Const32 <typ.UInt32> [int32(32-sdivisible32(c).k)]) ) (Const32 <typ.UInt32> [int32(sdivisible32(c).max)]) )
8532 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
8534 if v_1.Op != OpMul32 {
8538 v_1_0 := v_1.Args[0]
8539 v_1_1 := v_1.Args[1]
8540 for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
8541 if v_1_0.Op != OpConst32 {
8544 c := auxIntToInt32(v_1_0.AuxInt)
8545 if v_1_1.Op != OpSub32 {
8549 v_1_1_0 := v_1_1.Args[0]
8550 if v_1_1_0.Op != OpRsh64x64 {
8554 mul := v_1_1_0.Args[0]
8555 if mul.Op != OpMul64 {
8559 mul_0 := mul.Args[0]
8560 mul_1 := mul.Args[1]
8561 for _i2 := 0; _i2 <= 1; _i2, mul_0, mul_1 = _i2+1, mul_1, mul_0 {
8562 if mul_0.Op != OpConst64 {
8565 m := auxIntToInt64(mul_0.AuxInt)
8566 if mul_1.Op != OpSignExt32to64 || x != mul_1.Args[0] {
8569 v_1_1_0_1 := v_1_1_0.Args[1]
8570 if v_1_1_0_1.Op != OpConst64 {
8573 s := auxIntToInt64(v_1_1_0_1.AuxInt)
8574 v_1_1_1 := v_1_1.Args[1]
8575 if v_1_1_1.Op != OpRsh64x64 {
8579 v_1_1_1_0 := v_1_1_1.Args[0]
8580 if v_1_1_1_0.Op != OpSignExt32to64 || x != v_1_1_1_0.Args[0] {
8583 v_1_1_1_1 := v_1_1_1.Args[1]
8584 if v_1_1_1_1.Op != OpConst64 || auxIntToInt64(v_1_1_1_1.AuxInt) != 63 || !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic32(c).m) && s == 32+smagic32(c).s && x.Op != OpConst32 && sdivisibleOK32(c)) {
8588 v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
8589 v1 := b.NewValue0(v.Pos, OpAdd32, typ.UInt32)
8590 v2 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
8591 v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
8592 v3.AuxInt = int32ToAuxInt(int32(sdivisible32(c).m))
8594 v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
8595 v4.AuxInt = int32ToAuxInt(int32(sdivisible32(c).a))
8597 v5 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
8598 v5.AuxInt = int32ToAuxInt(int32(32 - sdivisible32(c).k))
8600 v6 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
8601 v6.AuxInt = int32ToAuxInt(int32(sdivisible32(c).max))
8609 // match: (Eq32 x (Mul32 (Const32 [c]) (Sub32 (Rsh32x64 mul:(Hmul32 (Const32 [m]) x) (Const64 [s])) (Rsh32x64 x (Const64 [31]))) ) )
8610 // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int32(smagic32(c).m/2) && s == smagic32(c).s-1 && x.Op != OpConst32 && sdivisibleOK32(c)
8611 // result: (Leq32U (RotateLeft32 <typ.UInt32> (Add32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int32(sdivisible32(c).m)]) x) (Const32 <typ.UInt32> [int32(sdivisible32(c).a)]) ) (Const32 <typ.UInt32> [int32(32-sdivisible32(c).k)]) ) (Const32 <typ.UInt32> [int32(sdivisible32(c).max)]) )
8613 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
8615 if v_1.Op != OpMul32 {
8619 v_1_0 := v_1.Args[0]
8620 v_1_1 := v_1.Args[1]
8621 for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
8622 if v_1_0.Op != OpConst32 {
8625 c := auxIntToInt32(v_1_0.AuxInt)
8626 if v_1_1.Op != OpSub32 {
8630 v_1_1_0 := v_1_1.Args[0]
8631 if v_1_1_0.Op != OpRsh32x64 {
8635 mul := v_1_1_0.Args[0]
8636 if mul.Op != OpHmul32 {
8640 mul_0 := mul.Args[0]
8641 mul_1 := mul.Args[1]
8642 for _i2 := 0; _i2 <= 1; _i2, mul_0, mul_1 = _i2+1, mul_1, mul_0 {
8643 if mul_0.Op != OpConst32 {
8646 m := auxIntToInt32(mul_0.AuxInt)
8650 v_1_1_0_1 := v_1_1_0.Args[1]
8651 if v_1_1_0_1.Op != OpConst64 {
8654 s := auxIntToInt64(v_1_1_0_1.AuxInt)
8655 v_1_1_1 := v_1_1.Args[1]
8656 if v_1_1_1.Op != OpRsh32x64 {
8660 if x != v_1_1_1.Args[0] {
8663 v_1_1_1_1 := v_1_1_1.Args[1]
8664 if v_1_1_1_1.Op != OpConst64 || auxIntToInt64(v_1_1_1_1.AuxInt) != 31 || !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int32(smagic32(c).m/2) && s == smagic32(c).s-1 && x.Op != OpConst32 && sdivisibleOK32(c)) {
8668 v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
8669 v1 := b.NewValue0(v.Pos, OpAdd32, typ.UInt32)
8670 v2 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
8671 v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
8672 v3.AuxInt = int32ToAuxInt(int32(sdivisible32(c).m))
8674 v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
8675 v4.AuxInt = int32ToAuxInt(int32(sdivisible32(c).a))
8677 v5 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
8678 v5.AuxInt = int32ToAuxInt(int32(32 - sdivisible32(c).k))
8680 v6 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
8681 v6.AuxInt = int32ToAuxInt(int32(sdivisible32(c).max))
8689 // match: (Eq32 x (Mul32 (Const32 [c]) (Sub32 (Rsh32x64 (Add32 mul:(Hmul32 (Const32 [m]) x) x) (Const64 [s])) (Rsh32x64 x (Const64 [31]))) ) )
8690 // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int32(smagic32(c).m) && s == smagic32(c).s && x.Op != OpConst32 && sdivisibleOK32(c)
8691 // result: (Leq32U (RotateLeft32 <typ.UInt32> (Add32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int32(sdivisible32(c).m)]) x) (Const32 <typ.UInt32> [int32(sdivisible32(c).a)]) ) (Const32 <typ.UInt32> [int32(32-sdivisible32(c).k)]) ) (Const32 <typ.UInt32> [int32(sdivisible32(c).max)]) )
8693 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
8695 if v_1.Op != OpMul32 {
8699 v_1_0 := v_1.Args[0]
8700 v_1_1 := v_1.Args[1]
8701 for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
8702 if v_1_0.Op != OpConst32 {
8705 c := auxIntToInt32(v_1_0.AuxInt)
8706 if v_1_1.Op != OpSub32 {
8710 v_1_1_0 := v_1_1.Args[0]
8711 if v_1_1_0.Op != OpRsh32x64 {
8715 v_1_1_0_0 := v_1_1_0.Args[0]
8716 if v_1_1_0_0.Op != OpAdd32 {
8719 _ = v_1_1_0_0.Args[1]
8720 v_1_1_0_0_0 := v_1_1_0_0.Args[0]
8721 v_1_1_0_0_1 := v_1_1_0_0.Args[1]
8722 for _i2 := 0; _i2 <= 1; _i2, v_1_1_0_0_0, v_1_1_0_0_1 = _i2+1, v_1_1_0_0_1, v_1_1_0_0_0 {
8724 if mul.Op != OpHmul32 {
8728 mul_0 := mul.Args[0]
8729 mul_1 := mul.Args[1]
8730 for _i3 := 0; _i3 <= 1; _i3, mul_0, mul_1 = _i3+1, mul_1, mul_0 {
8731 if mul_0.Op != OpConst32 {
8734 m := auxIntToInt32(mul_0.AuxInt)
8735 if x != mul_1 || x != v_1_1_0_0_1 {
8738 v_1_1_0_1 := v_1_1_0.Args[1]
8739 if v_1_1_0_1.Op != OpConst64 {
8742 s := auxIntToInt64(v_1_1_0_1.AuxInt)
8743 v_1_1_1 := v_1_1.Args[1]
8744 if v_1_1_1.Op != OpRsh32x64 {
8748 if x != v_1_1_1.Args[0] {
8751 v_1_1_1_1 := v_1_1_1.Args[1]
8752 if v_1_1_1_1.Op != OpConst64 || auxIntToInt64(v_1_1_1_1.AuxInt) != 31 || !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int32(smagic32(c).m) && s == smagic32(c).s && x.Op != OpConst32 && sdivisibleOK32(c)) {
8756 v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
8757 v1 := b.NewValue0(v.Pos, OpAdd32, typ.UInt32)
8758 v2 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
8759 v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
8760 v3.AuxInt = int32ToAuxInt(int32(sdivisible32(c).m))
8762 v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
8763 v4.AuxInt = int32ToAuxInt(int32(sdivisible32(c).a))
8765 v5 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
8766 v5.AuxInt = int32ToAuxInt(int32(32 - sdivisible32(c).k))
8768 v6 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
8769 v6.AuxInt = int32ToAuxInt(int32(sdivisible32(c).max))
8778 // match: (Eq32 n (Lsh32x64 (Rsh32x64 (Add32 <t> n (Rsh32Ux64 <t> (Rsh32x64 <t> n (Const64 <typ.UInt64> [31])) (Const64 <typ.UInt64> [kbar]))) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])) )
8779 // cond: k > 0 && k < 31 && kbar == 32 - k
8780 // result: (Eq32 (And32 <t> n (Const32 <t> [1<<uint(k)-1])) (Const32 <t> [0]))
8782 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
8784 if v_1.Op != OpLsh32x64 {
8788 v_1_0 := v_1.Args[0]
8789 if v_1_0.Op != OpRsh32x64 {
8793 v_1_0_0 := v_1_0.Args[0]
8794 if v_1_0_0.Op != OpAdd32 {
8799 v_1_0_0_0 := v_1_0_0.Args[0]
8800 v_1_0_0_1 := v_1_0_0.Args[1]
8801 for _i1 := 0; _i1 <= 1; _i1, v_1_0_0_0, v_1_0_0_1 = _i1+1, v_1_0_0_1, v_1_0_0_0 {
8802 if n != v_1_0_0_0 || v_1_0_0_1.Op != OpRsh32Ux64 || v_1_0_0_1.Type != t {
8805 _ = v_1_0_0_1.Args[1]
8806 v_1_0_0_1_0 := v_1_0_0_1.Args[0]
8807 if v_1_0_0_1_0.Op != OpRsh32x64 || v_1_0_0_1_0.Type != t {
8810 _ = v_1_0_0_1_0.Args[1]
8811 if n != v_1_0_0_1_0.Args[0] {
8814 v_1_0_0_1_0_1 := v_1_0_0_1_0.Args[1]
8815 if v_1_0_0_1_0_1.Op != OpConst64 || v_1_0_0_1_0_1.Type != typ.UInt64 || auxIntToInt64(v_1_0_0_1_0_1.AuxInt) != 31 {
8818 v_1_0_0_1_1 := v_1_0_0_1.Args[1]
8819 if v_1_0_0_1_1.Op != OpConst64 || v_1_0_0_1_1.Type != typ.UInt64 {
8822 kbar := auxIntToInt64(v_1_0_0_1_1.AuxInt)
8823 v_1_0_1 := v_1_0.Args[1]
8824 if v_1_0_1.Op != OpConst64 || v_1_0_1.Type != typ.UInt64 {
8827 k := auxIntToInt64(v_1_0_1.AuxInt)
8828 v_1_1 := v_1.Args[1]
8829 if v_1_1.Op != OpConst64 || v_1_1.Type != typ.UInt64 || auxIntToInt64(v_1_1.AuxInt) != k || !(k > 0 && k < 31 && kbar == 32-k) {
8833 v0 := b.NewValue0(v.Pos, OpAnd32, t)
8834 v1 := b.NewValue0(v.Pos, OpConst32, t)
8835 v1.AuxInt = int32ToAuxInt(1<<uint(k) - 1)
8837 v2 := b.NewValue0(v.Pos, OpConst32, t)
8838 v2.AuxInt = int32ToAuxInt(0)
8845 // match: (Eq32 s:(Sub32 x y) (Const32 [0]))
8846 // cond: s.Uses == 1
8847 // result: (Eq32 x y)
8849 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
8851 if s.Op != OpSub32 {
8856 if v_1.Op != OpConst32 || auxIntToInt32(v_1.AuxInt) != 0 || !(s.Uses == 1) {
8865 // match: (Eq32 (And32 <t> x (Const32 <t> [y])) (Const32 <t> [y]))
8866 // cond: oneBit32(y)
8867 // result: (Neq32 (And32 <t> x (Const32 <t> [y])) (Const32 <t> [0]))
8869 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
8870 if v_0.Op != OpAnd32 {
8875 v_0_0 := v_0.Args[0]
8876 v_0_1 := v_0.Args[1]
8877 for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
8879 if v_0_1.Op != OpConst32 || v_0_1.Type != t {
8882 y := auxIntToInt32(v_0_1.AuxInt)
8883 if v_1.Op != OpConst32 || v_1.Type != t || auxIntToInt32(v_1.AuxInt) != y || !(oneBit32(y)) {
8887 v0 := b.NewValue0(v.Pos, OpAnd32, t)
8888 v1 := b.NewValue0(v.Pos, OpConst32, t)
8889 v1.AuxInt = int32ToAuxInt(y)
8891 v2 := b.NewValue0(v.Pos, OpConst32, t)
8892 v2.AuxInt = int32ToAuxInt(0)
8901 func rewriteValuegeneric_OpEq32F(v *Value) bool {
8904 // match: (Eq32F (Const32F [c]) (Const32F [d]))
8905 // result: (ConstBool [c == d])
8907 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
8908 if v_0.Op != OpConst32F {
8911 c := auxIntToFloat32(v_0.AuxInt)
8912 if v_1.Op != OpConst32F {
8915 d := auxIntToFloat32(v_1.AuxInt)
8916 v.reset(OpConstBool)
8917 v.AuxInt = boolToAuxInt(c == d)
8924 func rewriteValuegeneric_OpEq64(v *Value) bool {
8928 typ := &b.Func.Config.Types
8929 // match: (Eq64 x x)
8930 // result: (ConstBool [true])
8936 v.reset(OpConstBool)
8937 v.AuxInt = boolToAuxInt(true)
8940 // match: (Eq64 (Const64 <t> [c]) (Add64 (Const64 <t> [d]) x))
8941 // result: (Eq64 (Const64 <t> [c-d]) x)
8943 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
8944 if v_0.Op != OpConst64 {
8948 c := auxIntToInt64(v_0.AuxInt)
8949 if v_1.Op != OpAdd64 {
8953 v_1_0 := v_1.Args[0]
8954 v_1_1 := v_1.Args[1]
8955 for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
8956 if v_1_0.Op != OpConst64 || v_1_0.Type != t {
8959 d := auxIntToInt64(v_1_0.AuxInt)
8962 v0 := b.NewValue0(v.Pos, OpConst64, t)
8963 v0.AuxInt = int64ToAuxInt(c - d)
8970 // match: (Eq64 (Const64 [c]) (Const64 [d]))
8971 // result: (ConstBool [c == d])
8973 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
8974 if v_0.Op != OpConst64 {
8977 c := auxIntToInt64(v_0.AuxInt)
8978 if v_1.Op != OpConst64 {
8981 d := auxIntToInt64(v_1.AuxInt)
8982 v.reset(OpConstBool)
8983 v.AuxInt = boolToAuxInt(c == d)
8988 // match: (Eq64 x (Mul64 (Const64 [c]) (Rsh64Ux64 mul:(Hmul64u (Const64 [m]) x) (Const64 [s])) ) )
8989 // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+umagic64(c).m/2) && s == umagic64(c).s-1 && x.Op != OpConst64 && udivisibleOK64(c)
8990 // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible64(c).m)]) x) (Const64 <typ.UInt64> [64-udivisible64(c).k]) ) (Const64 <typ.UInt64> [int64(udivisible64(c).max)]) )
8992 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
8994 if v_1.Op != OpMul64 {
8998 v_1_0 := v_1.Args[0]
8999 v_1_1 := v_1.Args[1]
9000 for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
9001 if v_1_0.Op != OpConst64 {
9004 c := auxIntToInt64(v_1_0.AuxInt)
9005 if v_1_1.Op != OpRsh64Ux64 {
9009 mul := v_1_1.Args[0]
9010 if mul.Op != OpHmul64u {
9014 mul_0 := mul.Args[0]
9015 mul_1 := mul.Args[1]
9016 for _i2 := 0; _i2 <= 1; _i2, mul_0, mul_1 = _i2+1, mul_1, mul_0 {
9017 if mul_0.Op != OpConst64 {
9020 m := auxIntToInt64(mul_0.AuxInt)
9024 v_1_1_1 := v_1_1.Args[1]
9025 if v_1_1_1.Op != OpConst64 {
9028 s := auxIntToInt64(v_1_1_1.AuxInt)
9029 if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+umagic64(c).m/2) && s == umagic64(c).s-1 && x.Op != OpConst64 && udivisibleOK64(c)) {
9033 v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
9034 v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
9035 v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
9036 v2.AuxInt = int64ToAuxInt(int64(udivisible64(c).m))
9038 v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
9039 v3.AuxInt = int64ToAuxInt(64 - udivisible64(c).k)
9041 v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
9042 v4.AuxInt = int64ToAuxInt(int64(udivisible64(c).max))
9050 // match: (Eq64 x (Mul64 (Const64 [c]) (Rsh64Ux64 mul:(Hmul64u (Const64 [m]) (Rsh64Ux64 x (Const64 [1]))) (Const64 [s])) ) )
9051 // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+(umagic64(c).m+1)/2) && s == umagic64(c).s-2 && x.Op != OpConst64 && udivisibleOK64(c)
9052 // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible64(c).m)]) x) (Const64 <typ.UInt64> [64-udivisible64(c).k]) ) (Const64 <typ.UInt64> [int64(udivisible64(c).max)]) )
9054 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
9056 if v_1.Op != OpMul64 {
9060 v_1_0 := v_1.Args[0]
9061 v_1_1 := v_1.Args[1]
9062 for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
9063 if v_1_0.Op != OpConst64 {
9066 c := auxIntToInt64(v_1_0.AuxInt)
9067 if v_1_1.Op != OpRsh64Ux64 {
9071 mul := v_1_1.Args[0]
9072 if mul.Op != OpHmul64u {
9076 mul_0 := mul.Args[0]
9077 mul_1 := mul.Args[1]
9078 for _i2 := 0; _i2 <= 1; _i2, mul_0, mul_1 = _i2+1, mul_1, mul_0 {
9079 if mul_0.Op != OpConst64 {
9082 m := auxIntToInt64(mul_0.AuxInt)
9083 if mul_1.Op != OpRsh64Ux64 {
9087 if x != mul_1.Args[0] {
9090 mul_1_1 := mul_1.Args[1]
9091 if mul_1_1.Op != OpConst64 || auxIntToInt64(mul_1_1.AuxInt) != 1 {
9094 v_1_1_1 := v_1_1.Args[1]
9095 if v_1_1_1.Op != OpConst64 {
9098 s := auxIntToInt64(v_1_1_1.AuxInt)
9099 if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+(umagic64(c).m+1)/2) && s == umagic64(c).s-2 && x.Op != OpConst64 && udivisibleOK64(c)) {
9103 v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
9104 v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
9105 v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
9106 v2.AuxInt = int64ToAuxInt(int64(udivisible64(c).m))
9108 v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
9109 v3.AuxInt = int64ToAuxInt(64 - udivisible64(c).k)
9111 v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
9112 v4.AuxInt = int64ToAuxInt(int64(udivisible64(c).max))
9120 // match: (Eq64 x (Mul64 (Const64 [c]) (Rsh64Ux64 (Avg64u x mul:(Hmul64u (Const64 [m]) x)) (Const64 [s])) ) )
9121 // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic64(c).m) && s == umagic64(c).s-1 && x.Op != OpConst64 && udivisibleOK64(c)
9122 // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible64(c).m)]) x) (Const64 <typ.UInt64> [64-udivisible64(c).k]) ) (Const64 <typ.UInt64> [int64(udivisible64(c).max)]) )
9124 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
9126 if v_1.Op != OpMul64 {
9130 v_1_0 := v_1.Args[0]
9131 v_1_1 := v_1.Args[1]
9132 for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
9133 if v_1_0.Op != OpConst64 {
9136 c := auxIntToInt64(v_1_0.AuxInt)
9137 if v_1_1.Op != OpRsh64Ux64 {
9141 v_1_1_0 := v_1_1.Args[0]
9142 if v_1_1_0.Op != OpAvg64u {
9146 if x != v_1_1_0.Args[0] {
9149 mul := v_1_1_0.Args[1]
9150 if mul.Op != OpHmul64u {
9154 mul_0 := mul.Args[0]
9155 mul_1 := mul.Args[1]
9156 for _i2 := 0; _i2 <= 1; _i2, mul_0, mul_1 = _i2+1, mul_1, mul_0 {
9157 if mul_0.Op != OpConst64 {
9160 m := auxIntToInt64(mul_0.AuxInt)
9164 v_1_1_1 := v_1_1.Args[1]
9165 if v_1_1_1.Op != OpConst64 {
9168 s := auxIntToInt64(v_1_1_1.AuxInt)
9169 if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic64(c).m) && s == umagic64(c).s-1 && x.Op != OpConst64 && udivisibleOK64(c)) {
9173 v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
9174 v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
9175 v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
9176 v2.AuxInt = int64ToAuxInt(int64(udivisible64(c).m))
9178 v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
9179 v3.AuxInt = int64ToAuxInt(64 - udivisible64(c).k)
9181 v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
9182 v4.AuxInt = int64ToAuxInt(int64(udivisible64(c).max))
9190 // match: (Eq64 x (Mul64 (Const64 [c]) (Sub64 (Rsh64x64 mul:(Hmul64 (Const64 [m]) x) (Const64 [s])) (Rsh64x64 x (Const64 [63]))) ) )
9191 // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic64(c).m/2) && s == smagic64(c).s-1 && x.Op != OpConst64 && sdivisibleOK64(c)
9192 // result: (Leq64U (RotateLeft64 <typ.UInt64> (Add64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(sdivisible64(c).m)]) x) (Const64 <typ.UInt64> [int64(sdivisible64(c).a)]) ) (Const64 <typ.UInt64> [64-sdivisible64(c).k]) ) (Const64 <typ.UInt64> [int64(sdivisible64(c).max)]) )
9194 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
9196 if v_1.Op != OpMul64 {
9200 v_1_0 := v_1.Args[0]
9201 v_1_1 := v_1.Args[1]
9202 for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
9203 if v_1_0.Op != OpConst64 {
9206 c := auxIntToInt64(v_1_0.AuxInt)
9207 if v_1_1.Op != OpSub64 {
9211 v_1_1_0 := v_1_1.Args[0]
9212 if v_1_1_0.Op != OpRsh64x64 {
9216 mul := v_1_1_0.Args[0]
9217 if mul.Op != OpHmul64 {
9221 mul_0 := mul.Args[0]
9222 mul_1 := mul.Args[1]
9223 for _i2 := 0; _i2 <= 1; _i2, mul_0, mul_1 = _i2+1, mul_1, mul_0 {
9224 if mul_0.Op != OpConst64 {
9227 m := auxIntToInt64(mul_0.AuxInt)
9231 v_1_1_0_1 := v_1_1_0.Args[1]
9232 if v_1_1_0_1.Op != OpConst64 {
9235 s := auxIntToInt64(v_1_1_0_1.AuxInt)
9236 v_1_1_1 := v_1_1.Args[1]
9237 if v_1_1_1.Op != OpRsh64x64 {
9241 if x != v_1_1_1.Args[0] {
9244 v_1_1_1_1 := v_1_1_1.Args[1]
9245 if v_1_1_1_1.Op != OpConst64 || auxIntToInt64(v_1_1_1_1.AuxInt) != 63 || !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic64(c).m/2) && s == smagic64(c).s-1 && x.Op != OpConst64 && sdivisibleOK64(c)) {
9249 v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
9250 v1 := b.NewValue0(v.Pos, OpAdd64, typ.UInt64)
9251 v2 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
9252 v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
9253 v3.AuxInt = int64ToAuxInt(int64(sdivisible64(c).m))
9255 v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
9256 v4.AuxInt = int64ToAuxInt(int64(sdivisible64(c).a))
9258 v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
9259 v5.AuxInt = int64ToAuxInt(64 - sdivisible64(c).k)
9261 v6 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
9262 v6.AuxInt = int64ToAuxInt(int64(sdivisible64(c).max))
9270 // match: (Eq64 x (Mul64 (Const64 [c]) (Sub64 (Rsh64x64 (Add64 mul:(Hmul64 (Const64 [m]) x) x) (Const64 [s])) (Rsh64x64 x (Const64 [63]))) ) )
9271 // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic64(c).m) && s == smagic64(c).s && x.Op != OpConst64 && sdivisibleOK64(c)
9272 // result: (Leq64U (RotateLeft64 <typ.UInt64> (Add64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(sdivisible64(c).m)]) x) (Const64 <typ.UInt64> [int64(sdivisible64(c).a)]) ) (Const64 <typ.UInt64> [64-sdivisible64(c).k]) ) (Const64 <typ.UInt64> [int64(sdivisible64(c).max)]) )
9274 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
9276 if v_1.Op != OpMul64 {
9280 v_1_0 := v_1.Args[0]
9281 v_1_1 := v_1.Args[1]
9282 for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
9283 if v_1_0.Op != OpConst64 {
9286 c := auxIntToInt64(v_1_0.AuxInt)
9287 if v_1_1.Op != OpSub64 {
9291 v_1_1_0 := v_1_1.Args[0]
9292 if v_1_1_0.Op != OpRsh64x64 {
9296 v_1_1_0_0 := v_1_1_0.Args[0]
9297 if v_1_1_0_0.Op != OpAdd64 {
9300 _ = v_1_1_0_0.Args[1]
9301 v_1_1_0_0_0 := v_1_1_0_0.Args[0]
9302 v_1_1_0_0_1 := v_1_1_0_0.Args[1]
9303 for _i2 := 0; _i2 <= 1; _i2, v_1_1_0_0_0, v_1_1_0_0_1 = _i2+1, v_1_1_0_0_1, v_1_1_0_0_0 {
9305 if mul.Op != OpHmul64 {
9309 mul_0 := mul.Args[0]
9310 mul_1 := mul.Args[1]
9311 for _i3 := 0; _i3 <= 1; _i3, mul_0, mul_1 = _i3+1, mul_1, mul_0 {
9312 if mul_0.Op != OpConst64 {
9315 m := auxIntToInt64(mul_0.AuxInt)
9316 if x != mul_1 || x != v_1_1_0_0_1 {
9319 v_1_1_0_1 := v_1_1_0.Args[1]
9320 if v_1_1_0_1.Op != OpConst64 {
9323 s := auxIntToInt64(v_1_1_0_1.AuxInt)
9324 v_1_1_1 := v_1_1.Args[1]
9325 if v_1_1_1.Op != OpRsh64x64 {
9329 if x != v_1_1_1.Args[0] {
9332 v_1_1_1_1 := v_1_1_1.Args[1]
9333 if v_1_1_1_1.Op != OpConst64 || auxIntToInt64(v_1_1_1_1.AuxInt) != 63 || !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic64(c).m) && s == smagic64(c).s && x.Op != OpConst64 && sdivisibleOK64(c)) {
9337 v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
9338 v1 := b.NewValue0(v.Pos, OpAdd64, typ.UInt64)
9339 v2 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
9340 v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
9341 v3.AuxInt = int64ToAuxInt(int64(sdivisible64(c).m))
9343 v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
9344 v4.AuxInt = int64ToAuxInt(int64(sdivisible64(c).a))
9346 v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
9347 v5.AuxInt = int64ToAuxInt(64 - sdivisible64(c).k)
9349 v6 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
9350 v6.AuxInt = int64ToAuxInt(int64(sdivisible64(c).max))
9359 // match: (Eq64 n (Lsh64x64 (Rsh64x64 (Add64 <t> n (Rsh64Ux64 <t> (Rsh64x64 <t> n (Const64 <typ.UInt64> [63])) (Const64 <typ.UInt64> [kbar]))) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])) )
9360 // cond: k > 0 && k < 63 && kbar == 64 - k
9361 // result: (Eq64 (And64 <t> n (Const64 <t> [1<<uint(k)-1])) (Const64 <t> [0]))
9363 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
9365 if v_1.Op != OpLsh64x64 {
9369 v_1_0 := v_1.Args[0]
9370 if v_1_0.Op != OpRsh64x64 {
9374 v_1_0_0 := v_1_0.Args[0]
9375 if v_1_0_0.Op != OpAdd64 {
9380 v_1_0_0_0 := v_1_0_0.Args[0]
9381 v_1_0_0_1 := v_1_0_0.Args[1]
9382 for _i1 := 0; _i1 <= 1; _i1, v_1_0_0_0, v_1_0_0_1 = _i1+1, v_1_0_0_1, v_1_0_0_0 {
9383 if n != v_1_0_0_0 || v_1_0_0_1.Op != OpRsh64Ux64 || v_1_0_0_1.Type != t {
9386 _ = v_1_0_0_1.Args[1]
9387 v_1_0_0_1_0 := v_1_0_0_1.Args[0]
9388 if v_1_0_0_1_0.Op != OpRsh64x64 || v_1_0_0_1_0.Type != t {
9391 _ = v_1_0_0_1_0.Args[1]
9392 if n != v_1_0_0_1_0.Args[0] {
9395 v_1_0_0_1_0_1 := v_1_0_0_1_0.Args[1]
9396 if v_1_0_0_1_0_1.Op != OpConst64 || v_1_0_0_1_0_1.Type != typ.UInt64 || auxIntToInt64(v_1_0_0_1_0_1.AuxInt) != 63 {
9399 v_1_0_0_1_1 := v_1_0_0_1.Args[1]
9400 if v_1_0_0_1_1.Op != OpConst64 || v_1_0_0_1_1.Type != typ.UInt64 {
9403 kbar := auxIntToInt64(v_1_0_0_1_1.AuxInt)
9404 v_1_0_1 := v_1_0.Args[1]
9405 if v_1_0_1.Op != OpConst64 || v_1_0_1.Type != typ.UInt64 {
9408 k := auxIntToInt64(v_1_0_1.AuxInt)
9409 v_1_1 := v_1.Args[1]
9410 if v_1_1.Op != OpConst64 || v_1_1.Type != typ.UInt64 || auxIntToInt64(v_1_1.AuxInt) != k || !(k > 0 && k < 63 && kbar == 64-k) {
9414 v0 := b.NewValue0(v.Pos, OpAnd64, t)
9415 v1 := b.NewValue0(v.Pos, OpConst64, t)
9416 v1.AuxInt = int64ToAuxInt(1<<uint(k) - 1)
9418 v2 := b.NewValue0(v.Pos, OpConst64, t)
9419 v2.AuxInt = int64ToAuxInt(0)
9426 // match: (Eq64 s:(Sub64 x y) (Const64 [0]))
9427 // cond: s.Uses == 1
9428 // result: (Eq64 x y)
9430 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
9432 if s.Op != OpSub64 {
9437 if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 0 || !(s.Uses == 1) {
9446 // match: (Eq64 (And64 <t> x (Const64 <t> [y])) (Const64 <t> [y]))
9447 // cond: oneBit64(y)
9448 // result: (Neq64 (And64 <t> x (Const64 <t> [y])) (Const64 <t> [0]))
9450 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
9451 if v_0.Op != OpAnd64 {
9456 v_0_0 := v_0.Args[0]
9457 v_0_1 := v_0.Args[1]
9458 for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
9460 if v_0_1.Op != OpConst64 || v_0_1.Type != t {
9463 y := auxIntToInt64(v_0_1.AuxInt)
9464 if v_1.Op != OpConst64 || v_1.Type != t || auxIntToInt64(v_1.AuxInt) != y || !(oneBit64(y)) {
9468 v0 := b.NewValue0(v.Pos, OpAnd64, t)
9469 v1 := b.NewValue0(v.Pos, OpConst64, t)
9470 v1.AuxInt = int64ToAuxInt(y)
9472 v2 := b.NewValue0(v.Pos, OpConst64, t)
9473 v2.AuxInt = int64ToAuxInt(0)
9482 func rewriteValuegeneric_OpEq64F(v *Value) bool {
9485 // match: (Eq64F (Const64F [c]) (Const64F [d]))
9486 // result: (ConstBool [c == d])
9488 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
9489 if v_0.Op != OpConst64F {
9492 c := auxIntToFloat64(v_0.AuxInt)
9493 if v_1.Op != OpConst64F {
9496 d := auxIntToFloat64(v_1.AuxInt)
9497 v.reset(OpConstBool)
9498 v.AuxInt = boolToAuxInt(c == d)
9505 func rewriteValuegeneric_OpEq8(v *Value) bool {
9509 config := b.Func.Config
9510 typ := &b.Func.Config.Types
9512 // result: (ConstBool [true])
9518 v.reset(OpConstBool)
9519 v.AuxInt = boolToAuxInt(true)
9522 // match: (Eq8 (Const8 <t> [c]) (Add8 (Const8 <t> [d]) x))
9523 // result: (Eq8 (Const8 <t> [c-d]) x)
9525 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
9526 if v_0.Op != OpConst8 {
9530 c := auxIntToInt8(v_0.AuxInt)
9531 if v_1.Op != OpAdd8 {
9535 v_1_0 := v_1.Args[0]
9536 v_1_1 := v_1.Args[1]
9537 for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
9538 if v_1_0.Op != OpConst8 || v_1_0.Type != t {
9541 d := auxIntToInt8(v_1_0.AuxInt)
9544 v0 := b.NewValue0(v.Pos, OpConst8, t)
9545 v0.AuxInt = int8ToAuxInt(c - d)
9552 // match: (Eq8 (Const8 [c]) (Const8 [d]))
9553 // result: (ConstBool [c == d])
9555 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
9556 if v_0.Op != OpConst8 {
9559 c := auxIntToInt8(v_0.AuxInt)
9560 if v_1.Op != OpConst8 {
9563 d := auxIntToInt8(v_1.AuxInt)
9564 v.reset(OpConstBool)
9565 v.AuxInt = boolToAuxInt(c == d)
9570 // match: (Eq8 (Mod8u x (Const8 [c])) (Const8 [0]))
9571 // cond: x.Op != OpConst8 && udivisibleOK8(c) && !hasSmallRotate(config)
9572 // result: (Eq32 (Mod32u <typ.UInt32> (ZeroExt8to32 <typ.UInt32> x) (Const32 <typ.UInt32> [int32(uint8(c))])) (Const32 <typ.UInt32> [0]))
9574 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
9575 if v_0.Op != OpMod8u {
9580 v_0_1 := v_0.Args[1]
9581 if v_0_1.Op != OpConst8 {
9584 c := auxIntToInt8(v_0_1.AuxInt)
9585 if v_1.Op != OpConst8 || auxIntToInt8(v_1.AuxInt) != 0 || !(x.Op != OpConst8 && udivisibleOK8(c) && !hasSmallRotate(config)) {
9589 v0 := b.NewValue0(v.Pos, OpMod32u, typ.UInt32)
9590 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
9592 v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
9593 v2.AuxInt = int32ToAuxInt(int32(uint8(c)))
9595 v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
9596 v3.AuxInt = int32ToAuxInt(0)
9602 // match: (Eq8 (Mod8 x (Const8 [c])) (Const8 [0]))
9603 // cond: x.Op != OpConst8 && sdivisibleOK8(c) && !hasSmallRotate(config)
9604 // result: (Eq32 (Mod32 <typ.Int32> (SignExt8to32 <typ.Int32> x) (Const32 <typ.Int32> [int32(c)])) (Const32 <typ.Int32> [0]))
9606 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
9607 if v_0.Op != OpMod8 {
9612 v_0_1 := v_0.Args[1]
9613 if v_0_1.Op != OpConst8 {
9616 c := auxIntToInt8(v_0_1.AuxInt)
9617 if v_1.Op != OpConst8 || auxIntToInt8(v_1.AuxInt) != 0 || !(x.Op != OpConst8 && sdivisibleOK8(c) && !hasSmallRotate(config)) {
9621 v0 := b.NewValue0(v.Pos, OpMod32, typ.Int32)
9622 v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
9624 v2 := b.NewValue0(v.Pos, OpConst32, typ.Int32)
9625 v2.AuxInt = int32ToAuxInt(int32(c))
9627 v3 := b.NewValue0(v.Pos, OpConst32, typ.Int32)
9628 v3.AuxInt = int32ToAuxInt(0)
9634 // match: (Eq8 x (Mul8 (Const8 [c]) (Trunc32to8 (Rsh32Ux64 mul:(Mul32 (Const32 [m]) (ZeroExt8to32 x)) (Const64 [s]))) ) )
9635 // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int32(1<<8+umagic8(c).m) && s == 8+umagic8(c).s && x.Op != OpConst8 && udivisibleOK8(c)
9636 // result: (Leq8U (RotateLeft8 <typ.UInt8> (Mul8 <typ.UInt8> (Const8 <typ.UInt8> [int8(udivisible8(c).m)]) x) (Const8 <typ.UInt8> [int8(8-udivisible8(c).k)]) ) (Const8 <typ.UInt8> [int8(udivisible8(c).max)]) )
9638 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
9640 if v_1.Op != OpMul8 {
9644 v_1_0 := v_1.Args[0]
9645 v_1_1 := v_1.Args[1]
9646 for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
9647 if v_1_0.Op != OpConst8 {
9650 c := auxIntToInt8(v_1_0.AuxInt)
9651 if v_1_1.Op != OpTrunc32to8 {
9654 v_1_1_0 := v_1_1.Args[0]
9655 if v_1_1_0.Op != OpRsh32Ux64 {
9659 mul := v_1_1_0.Args[0]
9660 if mul.Op != OpMul32 {
9664 mul_0 := mul.Args[0]
9665 mul_1 := mul.Args[1]
9666 for _i2 := 0; _i2 <= 1; _i2, mul_0, mul_1 = _i2+1, mul_1, mul_0 {
9667 if mul_0.Op != OpConst32 {
9670 m := auxIntToInt32(mul_0.AuxInt)
9671 if mul_1.Op != OpZeroExt8to32 || x != mul_1.Args[0] {
9674 v_1_1_0_1 := v_1_1_0.Args[1]
9675 if v_1_1_0_1.Op != OpConst64 {
9678 s := auxIntToInt64(v_1_1_0_1.AuxInt)
9679 if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int32(1<<8+umagic8(c).m) && s == 8+umagic8(c).s && x.Op != OpConst8 && udivisibleOK8(c)) {
9683 v0 := b.NewValue0(v.Pos, OpRotateLeft8, typ.UInt8)
9684 v1 := b.NewValue0(v.Pos, OpMul8, typ.UInt8)
9685 v2 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
9686 v2.AuxInt = int8ToAuxInt(int8(udivisible8(c).m))
9688 v3 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
9689 v3.AuxInt = int8ToAuxInt(int8(8 - udivisible8(c).k))
9691 v4 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
9692 v4.AuxInt = int8ToAuxInt(int8(udivisible8(c).max))
9700 // match: (Eq8 x (Mul8 (Const8 [c]) (Sub8 (Rsh32x64 mul:(Mul32 (Const32 [m]) (SignExt8to32 x)) (Const64 [s])) (Rsh32x64 (SignExt8to32 x) (Const64 [31]))) ) )
9701 // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int32(smagic8(c).m) && s == 8+smagic8(c).s && x.Op != OpConst8 && sdivisibleOK8(c)
9702 // result: (Leq8U (RotateLeft8 <typ.UInt8> (Add8 <typ.UInt8> (Mul8 <typ.UInt8> (Const8 <typ.UInt8> [int8(sdivisible8(c).m)]) x) (Const8 <typ.UInt8> [int8(sdivisible8(c).a)]) ) (Const8 <typ.UInt8> [int8(8-sdivisible8(c).k)]) ) (Const8 <typ.UInt8> [int8(sdivisible8(c).max)]) )
9704 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
9706 if v_1.Op != OpMul8 {
9710 v_1_0 := v_1.Args[0]
9711 v_1_1 := v_1.Args[1]
9712 for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
9713 if v_1_0.Op != OpConst8 {
9716 c := auxIntToInt8(v_1_0.AuxInt)
9717 if v_1_1.Op != OpSub8 {
9721 v_1_1_0 := v_1_1.Args[0]
9722 if v_1_1_0.Op != OpRsh32x64 {
9726 mul := v_1_1_0.Args[0]
9727 if mul.Op != OpMul32 {
9731 mul_0 := mul.Args[0]
9732 mul_1 := mul.Args[1]
9733 for _i2 := 0; _i2 <= 1; _i2, mul_0, mul_1 = _i2+1, mul_1, mul_0 {
9734 if mul_0.Op != OpConst32 {
9737 m := auxIntToInt32(mul_0.AuxInt)
9738 if mul_1.Op != OpSignExt8to32 || x != mul_1.Args[0] {
9741 v_1_1_0_1 := v_1_1_0.Args[1]
9742 if v_1_1_0_1.Op != OpConst64 {
9745 s := auxIntToInt64(v_1_1_0_1.AuxInt)
9746 v_1_1_1 := v_1_1.Args[1]
9747 if v_1_1_1.Op != OpRsh32x64 {
9751 v_1_1_1_0 := v_1_1_1.Args[0]
9752 if v_1_1_1_0.Op != OpSignExt8to32 || x != v_1_1_1_0.Args[0] {
9755 v_1_1_1_1 := v_1_1_1.Args[1]
9756 if v_1_1_1_1.Op != OpConst64 || auxIntToInt64(v_1_1_1_1.AuxInt) != 31 || !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int32(smagic8(c).m) && s == 8+smagic8(c).s && x.Op != OpConst8 && sdivisibleOK8(c)) {
9760 v0 := b.NewValue0(v.Pos, OpRotateLeft8, typ.UInt8)
9761 v1 := b.NewValue0(v.Pos, OpAdd8, typ.UInt8)
9762 v2 := b.NewValue0(v.Pos, OpMul8, typ.UInt8)
9763 v3 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
9764 v3.AuxInt = int8ToAuxInt(int8(sdivisible8(c).m))
9766 v4 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
9767 v4.AuxInt = int8ToAuxInt(int8(sdivisible8(c).a))
9769 v5 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
9770 v5.AuxInt = int8ToAuxInt(int8(8 - sdivisible8(c).k))
9772 v6 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
9773 v6.AuxInt = int8ToAuxInt(int8(sdivisible8(c).max))
9781 // match: (Eq8 n (Lsh8x64 (Rsh8x64 (Add8 <t> n (Rsh8Ux64 <t> (Rsh8x64 <t> n (Const64 <typ.UInt64> [ 7])) (Const64 <typ.UInt64> [kbar]))) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])) )
9782 // cond: k > 0 && k < 7 && kbar == 8 - k
9783 // result: (Eq8 (And8 <t> n (Const8 <t> [1<<uint(k)-1])) (Const8 <t> [0]))
9785 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
9787 if v_1.Op != OpLsh8x64 {
9791 v_1_0 := v_1.Args[0]
9792 if v_1_0.Op != OpRsh8x64 {
9796 v_1_0_0 := v_1_0.Args[0]
9797 if v_1_0_0.Op != OpAdd8 {
9802 v_1_0_0_0 := v_1_0_0.Args[0]
9803 v_1_0_0_1 := v_1_0_0.Args[1]
9804 for _i1 := 0; _i1 <= 1; _i1, v_1_0_0_0, v_1_0_0_1 = _i1+1, v_1_0_0_1, v_1_0_0_0 {
9805 if n != v_1_0_0_0 || v_1_0_0_1.Op != OpRsh8Ux64 || v_1_0_0_1.Type != t {
9808 _ = v_1_0_0_1.Args[1]
9809 v_1_0_0_1_0 := v_1_0_0_1.Args[0]
9810 if v_1_0_0_1_0.Op != OpRsh8x64 || v_1_0_0_1_0.Type != t {
9813 _ = v_1_0_0_1_0.Args[1]
9814 if n != v_1_0_0_1_0.Args[0] {
9817 v_1_0_0_1_0_1 := v_1_0_0_1_0.Args[1]
9818 if v_1_0_0_1_0_1.Op != OpConst64 || v_1_0_0_1_0_1.Type != typ.UInt64 || auxIntToInt64(v_1_0_0_1_0_1.AuxInt) != 7 {
9821 v_1_0_0_1_1 := v_1_0_0_1.Args[1]
9822 if v_1_0_0_1_1.Op != OpConst64 || v_1_0_0_1_1.Type != typ.UInt64 {
9825 kbar := auxIntToInt64(v_1_0_0_1_1.AuxInt)
9826 v_1_0_1 := v_1_0.Args[1]
9827 if v_1_0_1.Op != OpConst64 || v_1_0_1.Type != typ.UInt64 {
9830 k := auxIntToInt64(v_1_0_1.AuxInt)
9831 v_1_1 := v_1.Args[1]
9832 if v_1_1.Op != OpConst64 || v_1_1.Type != typ.UInt64 || auxIntToInt64(v_1_1.AuxInt) != k || !(k > 0 && k < 7 && kbar == 8-k) {
9836 v0 := b.NewValue0(v.Pos, OpAnd8, t)
9837 v1 := b.NewValue0(v.Pos, OpConst8, t)
9838 v1.AuxInt = int8ToAuxInt(1<<uint(k) - 1)
9840 v2 := b.NewValue0(v.Pos, OpConst8, t)
9841 v2.AuxInt = int8ToAuxInt(0)
9848 // match: (Eq8 s:(Sub8 x y) (Const8 [0]))
9849 // cond: s.Uses == 1
9850 // result: (Eq8 x y)
9852 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
9859 if v_1.Op != OpConst8 || auxIntToInt8(v_1.AuxInt) != 0 || !(s.Uses == 1) {
9868 // match: (Eq8 (And8 <t> x (Const8 <t> [y])) (Const8 <t> [y]))
9870 // result: (Neq8 (And8 <t> x (Const8 <t> [y])) (Const8 <t> [0]))
9872 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
9873 if v_0.Op != OpAnd8 {
9878 v_0_0 := v_0.Args[0]
9879 v_0_1 := v_0.Args[1]
9880 for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
9882 if v_0_1.Op != OpConst8 || v_0_1.Type != t {
9885 y := auxIntToInt8(v_0_1.AuxInt)
9886 if v_1.Op != OpConst8 || v_1.Type != t || auxIntToInt8(v_1.AuxInt) != y || !(oneBit8(y)) {
9890 v0 := b.NewValue0(v.Pos, OpAnd8, t)
9891 v1 := b.NewValue0(v.Pos, OpConst8, t)
9892 v1.AuxInt = int8ToAuxInt(y)
9894 v2 := b.NewValue0(v.Pos, OpConst8, t)
9895 v2.AuxInt = int8ToAuxInt(0)
9904 func rewriteValuegeneric_OpEqB(v *Value) bool {
9907 // match: (EqB (ConstBool [c]) (ConstBool [d]))
9908 // result: (ConstBool [c == d])
9910 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
9911 if v_0.Op != OpConstBool {
9914 c := auxIntToBool(v_0.AuxInt)
9915 if v_1.Op != OpConstBool {
9918 d := auxIntToBool(v_1.AuxInt)
9919 v.reset(OpConstBool)
9920 v.AuxInt = boolToAuxInt(c == d)
9925 // match: (EqB (ConstBool [false]) x)
9928 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
9929 if v_0.Op != OpConstBool || auxIntToBool(v_0.AuxInt) != false {
9939 // match: (EqB (ConstBool [true]) x)
9942 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
9943 if v_0.Op != OpConstBool || auxIntToBool(v_0.AuxInt) != true {
9954 func rewriteValuegeneric_OpEqInter(v *Value) bool {
9958 typ := &b.Func.Config.Types
9959 // match: (EqInter x y)
9960 // result: (EqPtr (ITab x) (ITab y))
9965 v0 := b.NewValue0(v.Pos, OpITab, typ.Uintptr)
9967 v1 := b.NewValue0(v.Pos, OpITab, typ.Uintptr)
9973 func rewriteValuegeneric_OpEqPtr(v *Value) bool {
9977 typ := &b.Func.Config.Types
9978 // match: (EqPtr x x)
9979 // result: (ConstBool [true])
9985 v.reset(OpConstBool)
9986 v.AuxInt = boolToAuxInt(true)
9989 // match: (EqPtr (Addr {x} _) (Addr {y} _))
9990 // result: (ConstBool [x == y])
9992 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
9993 if v_0.Op != OpAddr {
9996 x := auxToSym(v_0.Aux)
9997 if v_1.Op != OpAddr {
10000 y := auxToSym(v_1.Aux)
10001 v.reset(OpConstBool)
10002 v.AuxInt = boolToAuxInt(x == y)
10007 // match: (EqPtr (Addr {x} _) (OffPtr [o] (Addr {y} _)))
10008 // result: (ConstBool [x == y && o == 0])
10010 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
10011 if v_0.Op != OpAddr {
10014 x := auxToSym(v_0.Aux)
10015 if v_1.Op != OpOffPtr {
10018 o := auxIntToInt64(v_1.AuxInt)
10019 v_1_0 := v_1.Args[0]
10020 if v_1_0.Op != OpAddr {
10023 y := auxToSym(v_1_0.Aux)
10024 v.reset(OpConstBool)
10025 v.AuxInt = boolToAuxInt(x == y && o == 0)
10030 // match: (EqPtr (OffPtr [o1] (Addr {x} _)) (OffPtr [o2] (Addr {y} _)))
10031 // result: (ConstBool [x == y && o1 == o2])
10033 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
10034 if v_0.Op != OpOffPtr {
10037 o1 := auxIntToInt64(v_0.AuxInt)
10038 v_0_0 := v_0.Args[0]
10039 if v_0_0.Op != OpAddr {
10042 x := auxToSym(v_0_0.Aux)
10043 if v_1.Op != OpOffPtr {
10046 o2 := auxIntToInt64(v_1.AuxInt)
10047 v_1_0 := v_1.Args[0]
10048 if v_1_0.Op != OpAddr {
10051 y := auxToSym(v_1_0.Aux)
10052 v.reset(OpConstBool)
10053 v.AuxInt = boolToAuxInt(x == y && o1 == o2)
10058 // match: (EqPtr (LocalAddr {x} _ _) (LocalAddr {y} _ _))
10059 // result: (ConstBool [x == y])
10061 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
10062 if v_0.Op != OpLocalAddr {
10065 x := auxToSym(v_0.Aux)
10066 if v_1.Op != OpLocalAddr {
10069 y := auxToSym(v_1.Aux)
10070 v.reset(OpConstBool)
10071 v.AuxInt = boolToAuxInt(x == y)
10076 // match: (EqPtr (LocalAddr {x} _ _) (OffPtr [o] (LocalAddr {y} _ _)))
10077 // result: (ConstBool [x == y && o == 0])
10079 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
10080 if v_0.Op != OpLocalAddr {
10083 x := auxToSym(v_0.Aux)
10084 if v_1.Op != OpOffPtr {
10087 o := auxIntToInt64(v_1.AuxInt)
10088 v_1_0 := v_1.Args[0]
10089 if v_1_0.Op != OpLocalAddr {
10092 y := auxToSym(v_1_0.Aux)
10093 v.reset(OpConstBool)
10094 v.AuxInt = boolToAuxInt(x == y && o == 0)
10099 // match: (EqPtr (OffPtr [o1] (LocalAddr {x} _ _)) (OffPtr [o2] (LocalAddr {y} _ _)))
10100 // result: (ConstBool [x == y && o1 == o2])
10102 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
10103 if v_0.Op != OpOffPtr {
10106 o1 := auxIntToInt64(v_0.AuxInt)
10107 v_0_0 := v_0.Args[0]
10108 if v_0_0.Op != OpLocalAddr {
10111 x := auxToSym(v_0_0.Aux)
10112 if v_1.Op != OpOffPtr {
10115 o2 := auxIntToInt64(v_1.AuxInt)
10116 v_1_0 := v_1.Args[0]
10117 if v_1_0.Op != OpLocalAddr {
10120 y := auxToSym(v_1_0.Aux)
10121 v.reset(OpConstBool)
10122 v.AuxInt = boolToAuxInt(x == y && o1 == o2)
10127 // match: (EqPtr (OffPtr [o1] p1) p2)
10128 // cond: isSamePtr(p1, p2)
10129 // result: (ConstBool [o1 == 0])
10131 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
10132 if v_0.Op != OpOffPtr {
10135 o1 := auxIntToInt64(v_0.AuxInt)
10138 if !(isSamePtr(p1, p2)) {
10141 v.reset(OpConstBool)
10142 v.AuxInt = boolToAuxInt(o1 == 0)
10147 // match: (EqPtr (OffPtr [o1] p1) (OffPtr [o2] p2))
10148 // cond: isSamePtr(p1, p2)
10149 // result: (ConstBool [o1 == o2])
10151 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
10152 if v_0.Op != OpOffPtr {
10155 o1 := auxIntToInt64(v_0.AuxInt)
10157 if v_1.Op != OpOffPtr {
10160 o2 := auxIntToInt64(v_1.AuxInt)
10162 if !(isSamePtr(p1, p2)) {
10165 v.reset(OpConstBool)
10166 v.AuxInt = boolToAuxInt(o1 == o2)
10171 // match: (EqPtr (Const32 [c]) (Const32 [d]))
10172 // result: (ConstBool [c == d])
10174 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
10175 if v_0.Op != OpConst32 {
10178 c := auxIntToInt32(v_0.AuxInt)
10179 if v_1.Op != OpConst32 {
10182 d := auxIntToInt32(v_1.AuxInt)
10183 v.reset(OpConstBool)
10184 v.AuxInt = boolToAuxInt(c == d)
10189 // match: (EqPtr (Const64 [c]) (Const64 [d]))
10190 // result: (ConstBool [c == d])
10192 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
10193 if v_0.Op != OpConst64 {
10196 c := auxIntToInt64(v_0.AuxInt)
10197 if v_1.Op != OpConst64 {
10200 d := auxIntToInt64(v_1.AuxInt)
10201 v.reset(OpConstBool)
10202 v.AuxInt = boolToAuxInt(c == d)
10207 // match: (EqPtr (Convert (Addr {x} _) _) (Addr {y} _))
10208 // result: (ConstBool [x==y])
10210 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
10211 if v_0.Op != OpConvert {
10214 v_0_0 := v_0.Args[0]
10215 if v_0_0.Op != OpAddr {
10218 x := auxToSym(v_0_0.Aux)
10219 if v_1.Op != OpAddr {
10222 y := auxToSym(v_1.Aux)
10223 v.reset(OpConstBool)
10224 v.AuxInt = boolToAuxInt(x == y)
10229 // match: (EqPtr (LocalAddr _ _) (Addr _))
10230 // result: (ConstBool [false])
10232 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
10233 if v_0.Op != OpLocalAddr || v_1.Op != OpAddr {
10236 v.reset(OpConstBool)
10237 v.AuxInt = boolToAuxInt(false)
10242 // match: (EqPtr (OffPtr (LocalAddr _ _)) (Addr _))
10243 // result: (ConstBool [false])
10245 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
10246 if v_0.Op != OpOffPtr {
10249 v_0_0 := v_0.Args[0]
10250 if v_0_0.Op != OpLocalAddr || v_1.Op != OpAddr {
10253 v.reset(OpConstBool)
10254 v.AuxInt = boolToAuxInt(false)
10259 // match: (EqPtr (LocalAddr _ _) (OffPtr (Addr _)))
10260 // result: (ConstBool [false])
10262 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
10263 if v_0.Op != OpLocalAddr || v_1.Op != OpOffPtr {
10266 v_1_0 := v_1.Args[0]
10267 if v_1_0.Op != OpAddr {
10270 v.reset(OpConstBool)
10271 v.AuxInt = boolToAuxInt(false)
10276 // match: (EqPtr (OffPtr (LocalAddr _ _)) (OffPtr (Addr _)))
10277 // result: (ConstBool [false])
10279 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
10280 if v_0.Op != OpOffPtr {
10283 v_0_0 := v_0.Args[0]
10284 if v_0_0.Op != OpLocalAddr || v_1.Op != OpOffPtr {
10287 v_1_0 := v_1.Args[0]
10288 if v_1_0.Op != OpAddr {
10291 v.reset(OpConstBool)
10292 v.AuxInt = boolToAuxInt(false)
10297 // match: (EqPtr (AddPtr p1 o1) p2)
10298 // cond: isSamePtr(p1, p2)
10299 // result: (Not (IsNonNil o1))
10301 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
10302 if v_0.Op != OpAddPtr {
10308 if !(isSamePtr(p1, p2)) {
10312 v0 := b.NewValue0(v.Pos, OpIsNonNil, typ.Bool)
10319 // match: (EqPtr (Const32 [0]) p)
10320 // result: (Not (IsNonNil p))
10322 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
10323 if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 {
10328 v0 := b.NewValue0(v.Pos, OpIsNonNil, typ.Bool)
10335 // match: (EqPtr (Const64 [0]) p)
10336 // result: (Not (IsNonNil p))
10338 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
10339 if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 {
10344 v0 := b.NewValue0(v.Pos, OpIsNonNil, typ.Bool)
10351 // match: (EqPtr (ConstNil) p)
10352 // result: (Not (IsNonNil p))
10354 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
10355 if v_0.Op != OpConstNil {
10360 v0 := b.NewValue0(v.Pos, OpIsNonNil, typ.Bool)
10369 func rewriteValuegeneric_OpEqSlice(v *Value) bool {
10373 typ := &b.Func.Config.Types
10374 // match: (EqSlice x y)
10375 // result: (EqPtr (SlicePtr x) (SlicePtr y))
10380 v0 := b.NewValue0(v.Pos, OpSlicePtr, typ.BytePtr)
10382 v1 := b.NewValue0(v.Pos, OpSlicePtr, typ.BytePtr)
10388 func rewriteValuegeneric_OpFloor(v *Value) bool {
10390 // match: (Floor (Const64F [c]))
10391 // result: (Const64F [math.Floor(c)])
10393 if v_0.Op != OpConst64F {
10396 c := auxIntToFloat64(v_0.AuxInt)
10397 v.reset(OpConst64F)
10398 v.AuxInt = float64ToAuxInt(math.Floor(c))
10403 func rewriteValuegeneric_OpIMake(v *Value) bool {
10406 // match: (IMake _typ (StructMake1 val))
10407 // result: (IMake _typ val)
10410 if v_1.Op != OpStructMake1 {
10415 v.AddArg2(_typ, val)
10418 // match: (IMake _typ (ArrayMake1 val))
10419 // result: (IMake _typ val)
10422 if v_1.Op != OpArrayMake1 {
10427 v.AddArg2(_typ, val)
10432 func rewriteValuegeneric_OpInterLECall(v *Value) bool {
10433 // match: (InterLECall [argsize] {auxCall} (Addr {fn} (SB)) ___)
10434 // result: devirtLECall(v, fn.(*obj.LSym))
10436 if len(v.Args) < 1 {
10440 if v_0.Op != OpAddr {
10443 fn := auxToSym(v_0.Aux)
10444 v_0_0 := v_0.Args[0]
10445 if v_0_0.Op != OpSB {
10448 v.copyOf(devirtLECall(v, fn.(*obj.LSym)))
10453 func rewriteValuegeneric_OpIsInBounds(v *Value) bool {
10456 // match: (IsInBounds (ZeroExt8to32 _) (Const32 [c]))
10457 // cond: (1 << 8) <= c
10458 // result: (ConstBool [true])
10460 if v_0.Op != OpZeroExt8to32 || v_1.Op != OpConst32 {
10463 c := auxIntToInt32(v_1.AuxInt)
10464 if !((1 << 8) <= c) {
10467 v.reset(OpConstBool)
10468 v.AuxInt = boolToAuxInt(true)
10471 // match: (IsInBounds (ZeroExt8to64 _) (Const64 [c]))
10472 // cond: (1 << 8) <= c
10473 // result: (ConstBool [true])
10475 if v_0.Op != OpZeroExt8to64 || v_1.Op != OpConst64 {
10478 c := auxIntToInt64(v_1.AuxInt)
10479 if !((1 << 8) <= c) {
10482 v.reset(OpConstBool)
10483 v.AuxInt = boolToAuxInt(true)
10486 // match: (IsInBounds (ZeroExt16to32 _) (Const32 [c]))
10487 // cond: (1 << 16) <= c
10488 // result: (ConstBool [true])
10490 if v_0.Op != OpZeroExt16to32 || v_1.Op != OpConst32 {
10493 c := auxIntToInt32(v_1.AuxInt)
10494 if !((1 << 16) <= c) {
10497 v.reset(OpConstBool)
10498 v.AuxInt = boolToAuxInt(true)
10501 // match: (IsInBounds (ZeroExt16to64 _) (Const64 [c]))
10502 // cond: (1 << 16) <= c
10503 // result: (ConstBool [true])
10505 if v_0.Op != OpZeroExt16to64 || v_1.Op != OpConst64 {
10508 c := auxIntToInt64(v_1.AuxInt)
10509 if !((1 << 16) <= c) {
10512 v.reset(OpConstBool)
10513 v.AuxInt = boolToAuxInt(true)
10516 // match: (IsInBounds x x)
10517 // result: (ConstBool [false])
10523 v.reset(OpConstBool)
10524 v.AuxInt = boolToAuxInt(false)
10527 // match: (IsInBounds (And8 (Const8 [c]) _) (Const8 [d]))
10528 // cond: 0 <= c && c < d
10529 // result: (ConstBool [true])
10531 if v_0.Op != OpAnd8 {
10534 v_0_0 := v_0.Args[0]
10535 v_0_1 := v_0.Args[1]
10536 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
10537 if v_0_0.Op != OpConst8 {
10540 c := auxIntToInt8(v_0_0.AuxInt)
10541 if v_1.Op != OpConst8 {
10544 d := auxIntToInt8(v_1.AuxInt)
10545 if !(0 <= c && c < d) {
10548 v.reset(OpConstBool)
10549 v.AuxInt = boolToAuxInt(true)
10554 // match: (IsInBounds (ZeroExt8to16 (And8 (Const8 [c]) _)) (Const16 [d]))
10555 // cond: 0 <= c && int16(c) < d
10556 // result: (ConstBool [true])
10558 if v_0.Op != OpZeroExt8to16 {
10561 v_0_0 := v_0.Args[0]
10562 if v_0_0.Op != OpAnd8 {
10565 v_0_0_0 := v_0_0.Args[0]
10566 v_0_0_1 := v_0_0.Args[1]
10567 for _i0 := 0; _i0 <= 1; _i0, v_0_0_0, v_0_0_1 = _i0+1, v_0_0_1, v_0_0_0 {
10568 if v_0_0_0.Op != OpConst8 {
10571 c := auxIntToInt8(v_0_0_0.AuxInt)
10572 if v_1.Op != OpConst16 {
10575 d := auxIntToInt16(v_1.AuxInt)
10576 if !(0 <= c && int16(c) < d) {
10579 v.reset(OpConstBool)
10580 v.AuxInt = boolToAuxInt(true)
10585 // match: (IsInBounds (ZeroExt8to32 (And8 (Const8 [c]) _)) (Const32 [d]))
10586 // cond: 0 <= c && int32(c) < d
10587 // result: (ConstBool [true])
10589 if v_0.Op != OpZeroExt8to32 {
10592 v_0_0 := v_0.Args[0]
10593 if v_0_0.Op != OpAnd8 {
10596 v_0_0_0 := v_0_0.Args[0]
10597 v_0_0_1 := v_0_0.Args[1]
10598 for _i0 := 0; _i0 <= 1; _i0, v_0_0_0, v_0_0_1 = _i0+1, v_0_0_1, v_0_0_0 {
10599 if v_0_0_0.Op != OpConst8 {
10602 c := auxIntToInt8(v_0_0_0.AuxInt)
10603 if v_1.Op != OpConst32 {
10606 d := auxIntToInt32(v_1.AuxInt)
10607 if !(0 <= c && int32(c) < d) {
10610 v.reset(OpConstBool)
10611 v.AuxInt = boolToAuxInt(true)
10616 // match: (IsInBounds (ZeroExt8to64 (And8 (Const8 [c]) _)) (Const64 [d]))
10617 // cond: 0 <= c && int64(c) < d
10618 // result: (ConstBool [true])
10620 if v_0.Op != OpZeroExt8to64 {
10623 v_0_0 := v_0.Args[0]
10624 if v_0_0.Op != OpAnd8 {
10627 v_0_0_0 := v_0_0.Args[0]
10628 v_0_0_1 := v_0_0.Args[1]
10629 for _i0 := 0; _i0 <= 1; _i0, v_0_0_0, v_0_0_1 = _i0+1, v_0_0_1, v_0_0_0 {
10630 if v_0_0_0.Op != OpConst8 {
10633 c := auxIntToInt8(v_0_0_0.AuxInt)
10634 if v_1.Op != OpConst64 {
10637 d := auxIntToInt64(v_1.AuxInt)
10638 if !(0 <= c && int64(c) < d) {
10641 v.reset(OpConstBool)
10642 v.AuxInt = boolToAuxInt(true)
10647 // match: (IsInBounds (And16 (Const16 [c]) _) (Const16 [d]))
10648 // cond: 0 <= c && c < d
10649 // result: (ConstBool [true])
10651 if v_0.Op != OpAnd16 {
10654 v_0_0 := v_0.Args[0]
10655 v_0_1 := v_0.Args[1]
10656 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
10657 if v_0_0.Op != OpConst16 {
10660 c := auxIntToInt16(v_0_0.AuxInt)
10661 if v_1.Op != OpConst16 {
10664 d := auxIntToInt16(v_1.AuxInt)
10665 if !(0 <= c && c < d) {
10668 v.reset(OpConstBool)
10669 v.AuxInt = boolToAuxInt(true)
10674 // match: (IsInBounds (ZeroExt16to32 (And16 (Const16 [c]) _)) (Const32 [d]))
10675 // cond: 0 <= c && int32(c) < d
10676 // result: (ConstBool [true])
10678 if v_0.Op != OpZeroExt16to32 {
10681 v_0_0 := v_0.Args[0]
10682 if v_0_0.Op != OpAnd16 {
10685 v_0_0_0 := v_0_0.Args[0]
10686 v_0_0_1 := v_0_0.Args[1]
10687 for _i0 := 0; _i0 <= 1; _i0, v_0_0_0, v_0_0_1 = _i0+1, v_0_0_1, v_0_0_0 {
10688 if v_0_0_0.Op != OpConst16 {
10691 c := auxIntToInt16(v_0_0_0.AuxInt)
10692 if v_1.Op != OpConst32 {
10695 d := auxIntToInt32(v_1.AuxInt)
10696 if !(0 <= c && int32(c) < d) {
10699 v.reset(OpConstBool)
10700 v.AuxInt = boolToAuxInt(true)
10705 // match: (IsInBounds (ZeroExt16to64 (And16 (Const16 [c]) _)) (Const64 [d]))
10706 // cond: 0 <= c && int64(c) < d
10707 // result: (ConstBool [true])
10709 if v_0.Op != OpZeroExt16to64 {
10712 v_0_0 := v_0.Args[0]
10713 if v_0_0.Op != OpAnd16 {
10716 v_0_0_0 := v_0_0.Args[0]
10717 v_0_0_1 := v_0_0.Args[1]
10718 for _i0 := 0; _i0 <= 1; _i0, v_0_0_0, v_0_0_1 = _i0+1, v_0_0_1, v_0_0_0 {
10719 if v_0_0_0.Op != OpConst16 {
10722 c := auxIntToInt16(v_0_0_0.AuxInt)
10723 if v_1.Op != OpConst64 {
10726 d := auxIntToInt64(v_1.AuxInt)
10727 if !(0 <= c && int64(c) < d) {
10730 v.reset(OpConstBool)
10731 v.AuxInt = boolToAuxInt(true)
10736 // match: (IsInBounds (And32 (Const32 [c]) _) (Const32 [d]))
10737 // cond: 0 <= c && c < d
10738 // result: (ConstBool [true])
10740 if v_0.Op != OpAnd32 {
10743 v_0_0 := v_0.Args[0]
10744 v_0_1 := v_0.Args[1]
10745 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
10746 if v_0_0.Op != OpConst32 {
10749 c := auxIntToInt32(v_0_0.AuxInt)
10750 if v_1.Op != OpConst32 {
10753 d := auxIntToInt32(v_1.AuxInt)
10754 if !(0 <= c && c < d) {
10757 v.reset(OpConstBool)
10758 v.AuxInt = boolToAuxInt(true)
10763 // match: (IsInBounds (ZeroExt32to64 (And32 (Const32 [c]) _)) (Const64 [d]))
10764 // cond: 0 <= c && int64(c) < d
10765 // result: (ConstBool [true])
10767 if v_0.Op != OpZeroExt32to64 {
10770 v_0_0 := v_0.Args[0]
10771 if v_0_0.Op != OpAnd32 {
10774 v_0_0_0 := v_0_0.Args[0]
10775 v_0_0_1 := v_0_0.Args[1]
10776 for _i0 := 0; _i0 <= 1; _i0, v_0_0_0, v_0_0_1 = _i0+1, v_0_0_1, v_0_0_0 {
10777 if v_0_0_0.Op != OpConst32 {
10780 c := auxIntToInt32(v_0_0_0.AuxInt)
10781 if v_1.Op != OpConst64 {
10784 d := auxIntToInt64(v_1.AuxInt)
10785 if !(0 <= c && int64(c) < d) {
10788 v.reset(OpConstBool)
10789 v.AuxInt = boolToAuxInt(true)
10794 // match: (IsInBounds (And64 (Const64 [c]) _) (Const64 [d]))
10795 // cond: 0 <= c && c < d
10796 // result: (ConstBool [true])
10798 if v_0.Op != OpAnd64 {
10801 v_0_0 := v_0.Args[0]
10802 v_0_1 := v_0.Args[1]
10803 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
10804 if v_0_0.Op != OpConst64 {
10807 c := auxIntToInt64(v_0_0.AuxInt)
10808 if v_1.Op != OpConst64 {
10811 d := auxIntToInt64(v_1.AuxInt)
10812 if !(0 <= c && c < d) {
10815 v.reset(OpConstBool)
10816 v.AuxInt = boolToAuxInt(true)
10821 // match: (IsInBounds (Const32 [c]) (Const32 [d]))
10822 // result: (ConstBool [0 <= c && c < d])
10824 if v_0.Op != OpConst32 {
10827 c := auxIntToInt32(v_0.AuxInt)
10828 if v_1.Op != OpConst32 {
10831 d := auxIntToInt32(v_1.AuxInt)
10832 v.reset(OpConstBool)
10833 v.AuxInt = boolToAuxInt(0 <= c && c < d)
10836 // match: (IsInBounds (Const64 [c]) (Const64 [d]))
10837 // result: (ConstBool [0 <= c && c < d])
10839 if v_0.Op != OpConst64 {
10842 c := auxIntToInt64(v_0.AuxInt)
10843 if v_1.Op != OpConst64 {
10846 d := auxIntToInt64(v_1.AuxInt)
10847 v.reset(OpConstBool)
10848 v.AuxInt = boolToAuxInt(0 <= c && c < d)
10851 // match: (IsInBounds (Mod32u _ y) y)
10852 // result: (ConstBool [true])
10854 if v_0.Op != OpMod32u {
10861 v.reset(OpConstBool)
10862 v.AuxInt = boolToAuxInt(true)
10865 // match: (IsInBounds (Mod64u _ y) y)
10866 // result: (ConstBool [true])
10868 if v_0.Op != OpMod64u {
10875 v.reset(OpConstBool)
10876 v.AuxInt = boolToAuxInt(true)
10879 // match: (IsInBounds (ZeroExt8to64 (Rsh8Ux64 _ (Const64 [c]))) (Const64 [d]))
10880 // cond: 0 < c && c < 8 && 1<<uint( 8-c)-1 < d
10881 // result: (ConstBool [true])
10883 if v_0.Op != OpZeroExt8to64 {
10886 v_0_0 := v_0.Args[0]
10887 if v_0_0.Op != OpRsh8Ux64 {
10891 v_0_0_1 := v_0_0.Args[1]
10892 if v_0_0_1.Op != OpConst64 {
10895 c := auxIntToInt64(v_0_0_1.AuxInt)
10896 if v_1.Op != OpConst64 {
10899 d := auxIntToInt64(v_1.AuxInt)
10900 if !(0 < c && c < 8 && 1<<uint(8-c)-1 < d) {
10903 v.reset(OpConstBool)
10904 v.AuxInt = boolToAuxInt(true)
10907 // match: (IsInBounds (ZeroExt8to32 (Rsh8Ux64 _ (Const64 [c]))) (Const32 [d]))
10908 // cond: 0 < c && c < 8 && 1<<uint( 8-c)-1 < d
10909 // result: (ConstBool [true])
10911 if v_0.Op != OpZeroExt8to32 {
10914 v_0_0 := v_0.Args[0]
10915 if v_0_0.Op != OpRsh8Ux64 {
10919 v_0_0_1 := v_0_0.Args[1]
10920 if v_0_0_1.Op != OpConst64 {
10923 c := auxIntToInt64(v_0_0_1.AuxInt)
10924 if v_1.Op != OpConst32 {
10927 d := auxIntToInt32(v_1.AuxInt)
10928 if !(0 < c && c < 8 && 1<<uint(8-c)-1 < d) {
10931 v.reset(OpConstBool)
10932 v.AuxInt = boolToAuxInt(true)
10935 // match: (IsInBounds (ZeroExt8to16 (Rsh8Ux64 _ (Const64 [c]))) (Const16 [d]))
10936 // cond: 0 < c && c < 8 && 1<<uint( 8-c)-1 < d
10937 // result: (ConstBool [true])
10939 if v_0.Op != OpZeroExt8to16 {
10942 v_0_0 := v_0.Args[0]
10943 if v_0_0.Op != OpRsh8Ux64 {
10947 v_0_0_1 := v_0_0.Args[1]
10948 if v_0_0_1.Op != OpConst64 {
10951 c := auxIntToInt64(v_0_0_1.AuxInt)
10952 if v_1.Op != OpConst16 {
10955 d := auxIntToInt16(v_1.AuxInt)
10956 if !(0 < c && c < 8 && 1<<uint(8-c)-1 < d) {
10959 v.reset(OpConstBool)
10960 v.AuxInt = boolToAuxInt(true)
10963 // match: (IsInBounds (Rsh8Ux64 _ (Const64 [c])) (Const64 [d]))
10964 // cond: 0 < c && c < 8 && 1<<uint( 8-c)-1 < d
10965 // result: (ConstBool [true])
10967 if v_0.Op != OpRsh8Ux64 {
10971 v_0_1 := v_0.Args[1]
10972 if v_0_1.Op != OpConst64 {
10975 c := auxIntToInt64(v_0_1.AuxInt)
10976 if v_1.Op != OpConst64 {
10979 d := auxIntToInt64(v_1.AuxInt)
10980 if !(0 < c && c < 8 && 1<<uint(8-c)-1 < d) {
10983 v.reset(OpConstBool)
10984 v.AuxInt = boolToAuxInt(true)
10987 // match: (IsInBounds (ZeroExt16to64 (Rsh16Ux64 _ (Const64 [c]))) (Const64 [d]))
10988 // cond: 0 < c && c < 16 && 1<<uint(16-c)-1 < d
10989 // result: (ConstBool [true])
10991 if v_0.Op != OpZeroExt16to64 {
10994 v_0_0 := v_0.Args[0]
10995 if v_0_0.Op != OpRsh16Ux64 {
10999 v_0_0_1 := v_0_0.Args[1]
11000 if v_0_0_1.Op != OpConst64 {
11003 c := auxIntToInt64(v_0_0_1.AuxInt)
11004 if v_1.Op != OpConst64 {
11007 d := auxIntToInt64(v_1.AuxInt)
11008 if !(0 < c && c < 16 && 1<<uint(16-c)-1 < d) {
11011 v.reset(OpConstBool)
11012 v.AuxInt = boolToAuxInt(true)
11015 // match: (IsInBounds (ZeroExt16to32 (Rsh16Ux64 _ (Const64 [c]))) (Const64 [d]))
11016 // cond: 0 < c && c < 16 && 1<<uint(16-c)-1 < d
11017 // result: (ConstBool [true])
11019 if v_0.Op != OpZeroExt16to32 {
11022 v_0_0 := v_0.Args[0]
11023 if v_0_0.Op != OpRsh16Ux64 {
11027 v_0_0_1 := v_0_0.Args[1]
11028 if v_0_0_1.Op != OpConst64 {
11031 c := auxIntToInt64(v_0_0_1.AuxInt)
11032 if v_1.Op != OpConst64 {
11035 d := auxIntToInt64(v_1.AuxInt)
11036 if !(0 < c && c < 16 && 1<<uint(16-c)-1 < d) {
11039 v.reset(OpConstBool)
11040 v.AuxInt = boolToAuxInt(true)
11043 // match: (IsInBounds (Rsh16Ux64 _ (Const64 [c])) (Const64 [d]))
11044 // cond: 0 < c && c < 16 && 1<<uint(16-c)-1 < d
11045 // result: (ConstBool [true])
11047 if v_0.Op != OpRsh16Ux64 {
11051 v_0_1 := v_0.Args[1]
11052 if v_0_1.Op != OpConst64 {
11055 c := auxIntToInt64(v_0_1.AuxInt)
11056 if v_1.Op != OpConst64 {
11059 d := auxIntToInt64(v_1.AuxInt)
11060 if !(0 < c && c < 16 && 1<<uint(16-c)-1 < d) {
11063 v.reset(OpConstBool)
11064 v.AuxInt = boolToAuxInt(true)
11067 // match: (IsInBounds (ZeroExt32to64 (Rsh32Ux64 _ (Const64 [c]))) (Const64 [d]))
11068 // cond: 0 < c && c < 32 && 1<<uint(32-c)-1 < d
11069 // result: (ConstBool [true])
11071 if v_0.Op != OpZeroExt32to64 {
11074 v_0_0 := v_0.Args[0]
11075 if v_0_0.Op != OpRsh32Ux64 {
11079 v_0_0_1 := v_0_0.Args[1]
11080 if v_0_0_1.Op != OpConst64 {
11083 c := auxIntToInt64(v_0_0_1.AuxInt)
11084 if v_1.Op != OpConst64 {
11087 d := auxIntToInt64(v_1.AuxInt)
11088 if !(0 < c && c < 32 && 1<<uint(32-c)-1 < d) {
11091 v.reset(OpConstBool)
11092 v.AuxInt = boolToAuxInt(true)
11095 // match: (IsInBounds (Rsh32Ux64 _ (Const64 [c])) (Const64 [d]))
11096 // cond: 0 < c && c < 32 && 1<<uint(32-c)-1 < d
11097 // result: (ConstBool [true])
11099 if v_0.Op != OpRsh32Ux64 {
11103 v_0_1 := v_0.Args[1]
11104 if v_0_1.Op != OpConst64 {
11107 c := auxIntToInt64(v_0_1.AuxInt)
11108 if v_1.Op != OpConst64 {
11111 d := auxIntToInt64(v_1.AuxInt)
11112 if !(0 < c && c < 32 && 1<<uint(32-c)-1 < d) {
11115 v.reset(OpConstBool)
11116 v.AuxInt = boolToAuxInt(true)
11119 // match: (IsInBounds (Rsh64Ux64 _ (Const64 [c])) (Const64 [d]))
11120 // cond: 0 < c && c < 64 && 1<<uint(64-c)-1 < d
11121 // result: (ConstBool [true])
11123 if v_0.Op != OpRsh64Ux64 {
11127 v_0_1 := v_0.Args[1]
11128 if v_0_1.Op != OpConst64 {
11131 c := auxIntToInt64(v_0_1.AuxInt)
11132 if v_1.Op != OpConst64 {
11135 d := auxIntToInt64(v_1.AuxInt)
11136 if !(0 < c && c < 64 && 1<<uint(64-c)-1 < d) {
11139 v.reset(OpConstBool)
11140 v.AuxInt = boolToAuxInt(true)
11145 func rewriteValuegeneric_OpIsNonNil(v *Value) bool {
11147 // match: (IsNonNil (ConstNil))
11148 // result: (ConstBool [false])
11150 if v_0.Op != OpConstNil {
11153 v.reset(OpConstBool)
11154 v.AuxInt = boolToAuxInt(false)
11157 // match: (IsNonNil (Const32 [c]))
11158 // result: (ConstBool [c != 0])
11160 if v_0.Op != OpConst32 {
11163 c := auxIntToInt32(v_0.AuxInt)
11164 v.reset(OpConstBool)
11165 v.AuxInt = boolToAuxInt(c != 0)
11168 // match: (IsNonNil (Const64 [c]))
11169 // result: (ConstBool [c != 0])
11171 if v_0.Op != OpConst64 {
11174 c := auxIntToInt64(v_0.AuxInt)
11175 v.reset(OpConstBool)
11176 v.AuxInt = boolToAuxInt(c != 0)
11179 // match: (IsNonNil (Addr _) )
11180 // result: (ConstBool [true])
11182 if v_0.Op != OpAddr {
11185 v.reset(OpConstBool)
11186 v.AuxInt = boolToAuxInt(true)
11189 // match: (IsNonNil (Convert (Addr _) _))
11190 // result: (ConstBool [true])
11192 if v_0.Op != OpConvert {
11195 v_0_0 := v_0.Args[0]
11196 if v_0_0.Op != OpAddr {
11199 v.reset(OpConstBool)
11200 v.AuxInt = boolToAuxInt(true)
11203 // match: (IsNonNil (LocalAddr _ _))
11204 // result: (ConstBool [true])
11206 if v_0.Op != OpLocalAddr {
11209 v.reset(OpConstBool)
11210 v.AuxInt = boolToAuxInt(true)
11215 func rewriteValuegeneric_OpIsSliceInBounds(v *Value) bool {
11218 // match: (IsSliceInBounds x x)
11219 // result: (ConstBool [true])
11225 v.reset(OpConstBool)
11226 v.AuxInt = boolToAuxInt(true)
11229 // match: (IsSliceInBounds (And32 (Const32 [c]) _) (Const32 [d]))
11230 // cond: 0 <= c && c <= d
11231 // result: (ConstBool [true])
11233 if v_0.Op != OpAnd32 {
11236 v_0_0 := v_0.Args[0]
11237 v_0_1 := v_0.Args[1]
11238 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
11239 if v_0_0.Op != OpConst32 {
11242 c := auxIntToInt32(v_0_0.AuxInt)
11243 if v_1.Op != OpConst32 {
11246 d := auxIntToInt32(v_1.AuxInt)
11247 if !(0 <= c && c <= d) {
11250 v.reset(OpConstBool)
11251 v.AuxInt = boolToAuxInt(true)
11256 // match: (IsSliceInBounds (And64 (Const64 [c]) _) (Const64 [d]))
11257 // cond: 0 <= c && c <= d
11258 // result: (ConstBool [true])
11260 if v_0.Op != OpAnd64 {
11263 v_0_0 := v_0.Args[0]
11264 v_0_1 := v_0.Args[1]
11265 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
11266 if v_0_0.Op != OpConst64 {
11269 c := auxIntToInt64(v_0_0.AuxInt)
11270 if v_1.Op != OpConst64 {
11273 d := auxIntToInt64(v_1.AuxInt)
11274 if !(0 <= c && c <= d) {
11277 v.reset(OpConstBool)
11278 v.AuxInt = boolToAuxInt(true)
11283 // match: (IsSliceInBounds (Const32 [0]) _)
11284 // result: (ConstBool [true])
11286 if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 {
11289 v.reset(OpConstBool)
11290 v.AuxInt = boolToAuxInt(true)
11293 // match: (IsSliceInBounds (Const64 [0]) _)
11294 // result: (ConstBool [true])
11296 if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 {
11299 v.reset(OpConstBool)
11300 v.AuxInt = boolToAuxInt(true)
11303 // match: (IsSliceInBounds (Const32 [c]) (Const32 [d]))
11304 // result: (ConstBool [0 <= c && c <= d])
11306 if v_0.Op != OpConst32 {
11309 c := auxIntToInt32(v_0.AuxInt)
11310 if v_1.Op != OpConst32 {
11313 d := auxIntToInt32(v_1.AuxInt)
11314 v.reset(OpConstBool)
11315 v.AuxInt = boolToAuxInt(0 <= c && c <= d)
11318 // match: (IsSliceInBounds (Const64 [c]) (Const64 [d]))
11319 // result: (ConstBool [0 <= c && c <= d])
11321 if v_0.Op != OpConst64 {
11324 c := auxIntToInt64(v_0.AuxInt)
11325 if v_1.Op != OpConst64 {
11328 d := auxIntToInt64(v_1.AuxInt)
11329 v.reset(OpConstBool)
11330 v.AuxInt = boolToAuxInt(0 <= c && c <= d)
11333 // match: (IsSliceInBounds (SliceLen x) (SliceCap x))
11334 // result: (ConstBool [true])
11336 if v_0.Op != OpSliceLen {
11340 if v_1.Op != OpSliceCap || x != v_1.Args[0] {
11343 v.reset(OpConstBool)
11344 v.AuxInt = boolToAuxInt(true)
11349 func rewriteValuegeneric_OpLeq16(v *Value) bool {
11353 // match: (Leq16 (Const16 [c]) (Const16 [d]))
11354 // result: (ConstBool [c <= d])
11356 if v_0.Op != OpConst16 {
11359 c := auxIntToInt16(v_0.AuxInt)
11360 if v_1.Op != OpConst16 {
11363 d := auxIntToInt16(v_1.AuxInt)
11364 v.reset(OpConstBool)
11365 v.AuxInt = boolToAuxInt(c <= d)
11368 // match: (Leq16 (Const16 [0]) (And16 _ (Const16 [c])))
11370 // result: (ConstBool [true])
11372 if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 0 || v_1.Op != OpAnd16 {
11376 v_1_0 := v_1.Args[0]
11377 v_1_1 := v_1.Args[1]
11378 for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
11379 if v_1_1.Op != OpConst16 {
11382 c := auxIntToInt16(v_1_1.AuxInt)
11386 v.reset(OpConstBool)
11387 v.AuxInt = boolToAuxInt(true)
11392 // match: (Leq16 (Const16 [0]) (Rsh16Ux64 _ (Const64 [c])))
11394 // result: (ConstBool [true])
11396 if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 0 || v_1.Op != OpRsh16Ux64 {
11400 v_1_1 := v_1.Args[1]
11401 if v_1_1.Op != OpConst64 {
11404 c := auxIntToInt64(v_1_1.AuxInt)
11408 v.reset(OpConstBool)
11409 v.AuxInt = boolToAuxInt(true)
11412 // match: (Leq16 x (Const16 <t> [-1]))
11413 // result: (Less16 x (Const16 <t> [0]))
11416 if v_1.Op != OpConst16 {
11420 if auxIntToInt16(v_1.AuxInt) != -1 {
11424 v0 := b.NewValue0(v.Pos, OpConst16, t)
11425 v0.AuxInt = int16ToAuxInt(0)
11429 // match: (Leq16 (Const16 <t> [1]) x)
11430 // result: (Less16 (Const16 <t> [0]) x)
11432 if v_0.Op != OpConst16 {
11436 if auxIntToInt16(v_0.AuxInt) != 1 {
11441 v0 := b.NewValue0(v.Pos, OpConst16, t)
11442 v0.AuxInt = int16ToAuxInt(0)
11448 func rewriteValuegeneric_OpLeq16U(v *Value) bool {
11452 // match: (Leq16U (Const16 [c]) (Const16 [d]))
11453 // result: (ConstBool [uint16(c) <= uint16(d)])
11455 if v_0.Op != OpConst16 {
11458 c := auxIntToInt16(v_0.AuxInt)
11459 if v_1.Op != OpConst16 {
11462 d := auxIntToInt16(v_1.AuxInt)
11463 v.reset(OpConstBool)
11464 v.AuxInt = boolToAuxInt(uint16(c) <= uint16(d))
11467 // match: (Leq16U (Const16 <t> [1]) x)
11468 // result: (Neq16 (Const16 <t> [0]) x)
11470 if v_0.Op != OpConst16 {
11474 if auxIntToInt16(v_0.AuxInt) != 1 {
11479 v0 := b.NewValue0(v.Pos, OpConst16, t)
11480 v0.AuxInt = int16ToAuxInt(0)
11484 // match: (Leq16U (Const16 [0]) _)
11485 // result: (ConstBool [true])
11487 if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 0 {
11490 v.reset(OpConstBool)
11491 v.AuxInt = boolToAuxInt(true)
11496 func rewriteValuegeneric_OpLeq32(v *Value) bool {
11500 // match: (Leq32 (Const32 [c]) (Const32 [d]))
11501 // result: (ConstBool [c <= d])
11503 if v_0.Op != OpConst32 {
11506 c := auxIntToInt32(v_0.AuxInt)
11507 if v_1.Op != OpConst32 {
11510 d := auxIntToInt32(v_1.AuxInt)
11511 v.reset(OpConstBool)
11512 v.AuxInt = boolToAuxInt(c <= d)
11515 // match: (Leq32 (Const32 [0]) (And32 _ (Const32 [c])))
11517 // result: (ConstBool [true])
11519 if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 || v_1.Op != OpAnd32 {
11523 v_1_0 := v_1.Args[0]
11524 v_1_1 := v_1.Args[1]
11525 for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
11526 if v_1_1.Op != OpConst32 {
11529 c := auxIntToInt32(v_1_1.AuxInt)
11533 v.reset(OpConstBool)
11534 v.AuxInt = boolToAuxInt(true)
11539 // match: (Leq32 (Const32 [0]) (Rsh32Ux64 _ (Const64 [c])))
11541 // result: (ConstBool [true])
11543 if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 || v_1.Op != OpRsh32Ux64 {
11547 v_1_1 := v_1.Args[1]
11548 if v_1_1.Op != OpConst64 {
11551 c := auxIntToInt64(v_1_1.AuxInt)
11555 v.reset(OpConstBool)
11556 v.AuxInt = boolToAuxInt(true)
11559 // match: (Leq32 x (Const32 <t> [-1]))
11560 // result: (Less32 x (Const32 <t> [0]))
11563 if v_1.Op != OpConst32 {
11567 if auxIntToInt32(v_1.AuxInt) != -1 {
11571 v0 := b.NewValue0(v.Pos, OpConst32, t)
11572 v0.AuxInt = int32ToAuxInt(0)
11576 // match: (Leq32 (Const32 <t> [1]) x)
11577 // result: (Less32 (Const32 <t> [0]) x)
11579 if v_0.Op != OpConst32 {
11583 if auxIntToInt32(v_0.AuxInt) != 1 {
11588 v0 := b.NewValue0(v.Pos, OpConst32, t)
11589 v0.AuxInt = int32ToAuxInt(0)
11595 func rewriteValuegeneric_OpLeq32F(v *Value) bool {
11598 // match: (Leq32F (Const32F [c]) (Const32F [d]))
11599 // result: (ConstBool [c <= d])
11601 if v_0.Op != OpConst32F {
11604 c := auxIntToFloat32(v_0.AuxInt)
11605 if v_1.Op != OpConst32F {
11608 d := auxIntToFloat32(v_1.AuxInt)
11609 v.reset(OpConstBool)
11610 v.AuxInt = boolToAuxInt(c <= d)
11615 func rewriteValuegeneric_OpLeq32U(v *Value) bool {
11619 // match: (Leq32U (Const32 [c]) (Const32 [d]))
11620 // result: (ConstBool [uint32(c) <= uint32(d)])
11622 if v_0.Op != OpConst32 {
11625 c := auxIntToInt32(v_0.AuxInt)
11626 if v_1.Op != OpConst32 {
11629 d := auxIntToInt32(v_1.AuxInt)
11630 v.reset(OpConstBool)
11631 v.AuxInt = boolToAuxInt(uint32(c) <= uint32(d))
11634 // match: (Leq32U (Const32 <t> [1]) x)
11635 // result: (Neq32 (Const32 <t> [0]) x)
11637 if v_0.Op != OpConst32 {
11641 if auxIntToInt32(v_0.AuxInt) != 1 {
11646 v0 := b.NewValue0(v.Pos, OpConst32, t)
11647 v0.AuxInt = int32ToAuxInt(0)
11651 // match: (Leq32U (Const32 [0]) _)
11652 // result: (ConstBool [true])
11654 if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 {
11657 v.reset(OpConstBool)
11658 v.AuxInt = boolToAuxInt(true)
11663 func rewriteValuegeneric_OpLeq64(v *Value) bool {
11667 // match: (Leq64 (Const64 [c]) (Const64 [d]))
11668 // result: (ConstBool [c <= d])
11670 if v_0.Op != OpConst64 {
11673 c := auxIntToInt64(v_0.AuxInt)
11674 if v_1.Op != OpConst64 {
11677 d := auxIntToInt64(v_1.AuxInt)
11678 v.reset(OpConstBool)
11679 v.AuxInt = boolToAuxInt(c <= d)
11682 // match: (Leq64 (Const64 [0]) (And64 _ (Const64 [c])))
11684 // result: (ConstBool [true])
11686 if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 || v_1.Op != OpAnd64 {
11690 v_1_0 := v_1.Args[0]
11691 v_1_1 := v_1.Args[1]
11692 for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
11693 if v_1_1.Op != OpConst64 {
11696 c := auxIntToInt64(v_1_1.AuxInt)
11700 v.reset(OpConstBool)
11701 v.AuxInt = boolToAuxInt(true)
11706 // match: (Leq64 (Const64 [0]) (Rsh64Ux64 _ (Const64 [c])))
11708 // result: (ConstBool [true])
11710 if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 || v_1.Op != OpRsh64Ux64 {
11714 v_1_1 := v_1.Args[1]
11715 if v_1_1.Op != OpConst64 {
11718 c := auxIntToInt64(v_1_1.AuxInt)
11722 v.reset(OpConstBool)
11723 v.AuxInt = boolToAuxInt(true)
11726 // match: (Leq64 x (Const64 <t> [-1]))
11727 // result: (Less64 x (Const64 <t> [0]))
11730 if v_1.Op != OpConst64 {
11734 if auxIntToInt64(v_1.AuxInt) != -1 {
11738 v0 := b.NewValue0(v.Pos, OpConst64, t)
11739 v0.AuxInt = int64ToAuxInt(0)
11743 // match: (Leq64 (Const64 <t> [1]) x)
11744 // result: (Less64 (Const64 <t> [0]) x)
11746 if v_0.Op != OpConst64 {
11750 if auxIntToInt64(v_0.AuxInt) != 1 {
11755 v0 := b.NewValue0(v.Pos, OpConst64, t)
11756 v0.AuxInt = int64ToAuxInt(0)
11762 func rewriteValuegeneric_OpLeq64F(v *Value) bool {
11765 // match: (Leq64F (Const64F [c]) (Const64F [d]))
11766 // result: (ConstBool [c <= d])
11768 if v_0.Op != OpConst64F {
11771 c := auxIntToFloat64(v_0.AuxInt)
11772 if v_1.Op != OpConst64F {
11775 d := auxIntToFloat64(v_1.AuxInt)
11776 v.reset(OpConstBool)
11777 v.AuxInt = boolToAuxInt(c <= d)
11782 func rewriteValuegeneric_OpLeq64U(v *Value) bool {
11786 // match: (Leq64U (Const64 [c]) (Const64 [d]))
11787 // result: (ConstBool [uint64(c) <= uint64(d)])
11789 if v_0.Op != OpConst64 {
11792 c := auxIntToInt64(v_0.AuxInt)
11793 if v_1.Op != OpConst64 {
11796 d := auxIntToInt64(v_1.AuxInt)
11797 v.reset(OpConstBool)
11798 v.AuxInt = boolToAuxInt(uint64(c) <= uint64(d))
11801 // match: (Leq64U (Const64 <t> [1]) x)
11802 // result: (Neq64 (Const64 <t> [0]) x)
11804 if v_0.Op != OpConst64 {
11808 if auxIntToInt64(v_0.AuxInt) != 1 {
11813 v0 := b.NewValue0(v.Pos, OpConst64, t)
11814 v0.AuxInt = int64ToAuxInt(0)
11818 // match: (Leq64U (Const64 [0]) _)
11819 // result: (ConstBool [true])
11821 if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 {
11824 v.reset(OpConstBool)
11825 v.AuxInt = boolToAuxInt(true)
11830 func rewriteValuegeneric_OpLeq8(v *Value) bool {
11834 // match: (Leq8 (Const8 [c]) (Const8 [d]))
11835 // result: (ConstBool [c <= d])
11837 if v_0.Op != OpConst8 {
11840 c := auxIntToInt8(v_0.AuxInt)
11841 if v_1.Op != OpConst8 {
11844 d := auxIntToInt8(v_1.AuxInt)
11845 v.reset(OpConstBool)
11846 v.AuxInt = boolToAuxInt(c <= d)
11849 // match: (Leq8 (Const8 [0]) (And8 _ (Const8 [c])))
11851 // result: (ConstBool [true])
11853 if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 0 || v_1.Op != OpAnd8 {
11857 v_1_0 := v_1.Args[0]
11858 v_1_1 := v_1.Args[1]
11859 for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
11860 if v_1_1.Op != OpConst8 {
11863 c := auxIntToInt8(v_1_1.AuxInt)
11867 v.reset(OpConstBool)
11868 v.AuxInt = boolToAuxInt(true)
11873 // match: (Leq8 (Const8 [0]) (Rsh8Ux64 _ (Const64 [c])))
11875 // result: (ConstBool [true])
11877 if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 0 || v_1.Op != OpRsh8Ux64 {
11881 v_1_1 := v_1.Args[1]
11882 if v_1_1.Op != OpConst64 {
11885 c := auxIntToInt64(v_1_1.AuxInt)
11889 v.reset(OpConstBool)
11890 v.AuxInt = boolToAuxInt(true)
11893 // match: (Leq8 x (Const8 <t> [-1]))
11894 // result: (Less8 x (Const8 <t> [0]))
11897 if v_1.Op != OpConst8 {
11901 if auxIntToInt8(v_1.AuxInt) != -1 {
11905 v0 := b.NewValue0(v.Pos, OpConst8, t)
11906 v0.AuxInt = int8ToAuxInt(0)
11910 // match: (Leq8 (Const8 <t> [1]) x)
11911 // result: (Less8 (Const8 <t> [0]) x)
11913 if v_0.Op != OpConst8 {
11917 if auxIntToInt8(v_0.AuxInt) != 1 {
11922 v0 := b.NewValue0(v.Pos, OpConst8, t)
11923 v0.AuxInt = int8ToAuxInt(0)
11929 func rewriteValuegeneric_OpLeq8U(v *Value) bool {
11933 // match: (Leq8U (Const8 [c]) (Const8 [d]))
11934 // result: (ConstBool [ uint8(c) <= uint8(d)])
11936 if v_0.Op != OpConst8 {
11939 c := auxIntToInt8(v_0.AuxInt)
11940 if v_1.Op != OpConst8 {
11943 d := auxIntToInt8(v_1.AuxInt)
11944 v.reset(OpConstBool)
11945 v.AuxInt = boolToAuxInt(uint8(c) <= uint8(d))
11948 // match: (Leq8U (Const8 <t> [1]) x)
11949 // result: (Neq8 (Const8 <t> [0]) x)
11951 if v_0.Op != OpConst8 {
11955 if auxIntToInt8(v_0.AuxInt) != 1 {
11960 v0 := b.NewValue0(v.Pos, OpConst8, t)
11961 v0.AuxInt = int8ToAuxInt(0)
11965 // match: (Leq8U (Const8 [0]) _)
11966 // result: (ConstBool [true])
11968 if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 0 {
11971 v.reset(OpConstBool)
11972 v.AuxInt = boolToAuxInt(true)
11977 func rewriteValuegeneric_OpLess16(v *Value) bool {
11981 // match: (Less16 (Const16 [c]) (Const16 [d]))
11982 // result: (ConstBool [c < d])
11984 if v_0.Op != OpConst16 {
11987 c := auxIntToInt16(v_0.AuxInt)
11988 if v_1.Op != OpConst16 {
11991 d := auxIntToInt16(v_1.AuxInt)
11992 v.reset(OpConstBool)
11993 v.AuxInt = boolToAuxInt(c < d)
11996 // match: (Less16 (Const16 <t> [0]) x)
11997 // cond: isNonNegative(x)
11998 // result: (Neq16 (Const16 <t> [0]) x)
12000 if v_0.Op != OpConst16 {
12004 if auxIntToInt16(v_0.AuxInt) != 0 {
12008 if !(isNonNegative(x)) {
12012 v0 := b.NewValue0(v.Pos, OpConst16, t)
12013 v0.AuxInt = int16ToAuxInt(0)
12017 // match: (Less16 x (Const16 <t> [1]))
12018 // cond: isNonNegative(x)
12019 // result: (Eq16 (Const16 <t> [0]) x)
12022 if v_1.Op != OpConst16 {
12026 if auxIntToInt16(v_1.AuxInt) != 1 || !(isNonNegative(x)) {
12030 v0 := b.NewValue0(v.Pos, OpConst16, t)
12031 v0.AuxInt = int16ToAuxInt(0)
12035 // match: (Less16 x (Const16 <t> [1]))
12036 // result: (Leq16 x (Const16 <t> [0]))
12039 if v_1.Op != OpConst16 {
12043 if auxIntToInt16(v_1.AuxInt) != 1 {
12047 v0 := b.NewValue0(v.Pos, OpConst16, t)
12048 v0.AuxInt = int16ToAuxInt(0)
12052 // match: (Less16 (Const16 <t> [-1]) x)
12053 // result: (Leq16 (Const16 <t> [0]) x)
12055 if v_0.Op != OpConst16 {
12059 if auxIntToInt16(v_0.AuxInt) != -1 {
12064 v0 := b.NewValue0(v.Pos, OpConst16, t)
12065 v0.AuxInt = int16ToAuxInt(0)
12071 func rewriteValuegeneric_OpLess16U(v *Value) bool {
12075 // match: (Less16U (Const16 [c]) (Const16 [d]))
12076 // result: (ConstBool [uint16(c) < uint16(d)])
12078 if v_0.Op != OpConst16 {
12081 c := auxIntToInt16(v_0.AuxInt)
12082 if v_1.Op != OpConst16 {
12085 d := auxIntToInt16(v_1.AuxInt)
12086 v.reset(OpConstBool)
12087 v.AuxInt = boolToAuxInt(uint16(c) < uint16(d))
12090 // match: (Less16U x (Const16 <t> [1]))
12091 // result: (Eq16 (Const16 <t> [0]) x)
12094 if v_1.Op != OpConst16 {
12098 if auxIntToInt16(v_1.AuxInt) != 1 {
12102 v0 := b.NewValue0(v.Pos, OpConst16, t)
12103 v0.AuxInt = int16ToAuxInt(0)
12107 // match: (Less16U _ (Const16 [0]))
12108 // result: (ConstBool [false])
12110 if v_1.Op != OpConst16 || auxIntToInt16(v_1.AuxInt) != 0 {
12113 v.reset(OpConstBool)
12114 v.AuxInt = boolToAuxInt(false)
12119 func rewriteValuegeneric_OpLess32(v *Value) bool {
12123 // match: (Less32 (Const32 [c]) (Const32 [d]))
12124 // result: (ConstBool [c < d])
12126 if v_0.Op != OpConst32 {
12129 c := auxIntToInt32(v_0.AuxInt)
12130 if v_1.Op != OpConst32 {
12133 d := auxIntToInt32(v_1.AuxInt)
12134 v.reset(OpConstBool)
12135 v.AuxInt = boolToAuxInt(c < d)
12138 // match: (Less32 (Const32 <t> [0]) x)
12139 // cond: isNonNegative(x)
12140 // result: (Neq32 (Const32 <t> [0]) x)
12142 if v_0.Op != OpConst32 {
12146 if auxIntToInt32(v_0.AuxInt) != 0 {
12150 if !(isNonNegative(x)) {
12154 v0 := b.NewValue0(v.Pos, OpConst32, t)
12155 v0.AuxInt = int32ToAuxInt(0)
12159 // match: (Less32 x (Const32 <t> [1]))
12160 // cond: isNonNegative(x)
12161 // result: (Eq32 (Const32 <t> [0]) x)
12164 if v_1.Op != OpConst32 {
12168 if auxIntToInt32(v_1.AuxInt) != 1 || !(isNonNegative(x)) {
12172 v0 := b.NewValue0(v.Pos, OpConst32, t)
12173 v0.AuxInt = int32ToAuxInt(0)
12177 // match: (Less32 x (Const32 <t> [1]))
12178 // result: (Leq32 x (Const32 <t> [0]))
12181 if v_1.Op != OpConst32 {
12185 if auxIntToInt32(v_1.AuxInt) != 1 {
12189 v0 := b.NewValue0(v.Pos, OpConst32, t)
12190 v0.AuxInt = int32ToAuxInt(0)
12194 // match: (Less32 (Const32 <t> [-1]) x)
12195 // result: (Leq32 (Const32 <t> [0]) x)
12197 if v_0.Op != OpConst32 {
12201 if auxIntToInt32(v_0.AuxInt) != -1 {
12206 v0 := b.NewValue0(v.Pos, OpConst32, t)
12207 v0.AuxInt = int32ToAuxInt(0)
12213 func rewriteValuegeneric_OpLess32F(v *Value) bool {
12216 // match: (Less32F (Const32F [c]) (Const32F [d]))
12217 // result: (ConstBool [c < d])
12219 if v_0.Op != OpConst32F {
12222 c := auxIntToFloat32(v_0.AuxInt)
12223 if v_1.Op != OpConst32F {
12226 d := auxIntToFloat32(v_1.AuxInt)
12227 v.reset(OpConstBool)
12228 v.AuxInt = boolToAuxInt(c < d)
12233 func rewriteValuegeneric_OpLess32U(v *Value) bool {
12237 // match: (Less32U (Const32 [c]) (Const32 [d]))
12238 // result: (ConstBool [uint32(c) < uint32(d)])
12240 if v_0.Op != OpConst32 {
12243 c := auxIntToInt32(v_0.AuxInt)
12244 if v_1.Op != OpConst32 {
12247 d := auxIntToInt32(v_1.AuxInt)
12248 v.reset(OpConstBool)
12249 v.AuxInt = boolToAuxInt(uint32(c) < uint32(d))
12252 // match: (Less32U x (Const32 <t> [1]))
12253 // result: (Eq32 (Const32 <t> [0]) x)
12256 if v_1.Op != OpConst32 {
12260 if auxIntToInt32(v_1.AuxInt) != 1 {
12264 v0 := b.NewValue0(v.Pos, OpConst32, t)
12265 v0.AuxInt = int32ToAuxInt(0)
12269 // match: (Less32U _ (Const32 [0]))
12270 // result: (ConstBool [false])
12272 if v_1.Op != OpConst32 || auxIntToInt32(v_1.AuxInt) != 0 {
12275 v.reset(OpConstBool)
12276 v.AuxInt = boolToAuxInt(false)
12281 func rewriteValuegeneric_OpLess64(v *Value) bool {
12285 // match: (Less64 (Const64 [c]) (Const64 [d]))
12286 // result: (ConstBool [c < d])
12288 if v_0.Op != OpConst64 {
12291 c := auxIntToInt64(v_0.AuxInt)
12292 if v_1.Op != OpConst64 {
12295 d := auxIntToInt64(v_1.AuxInt)
12296 v.reset(OpConstBool)
12297 v.AuxInt = boolToAuxInt(c < d)
12300 // match: (Less64 (Const64 <t> [0]) x)
12301 // cond: isNonNegative(x)
12302 // result: (Neq64 (Const64 <t> [0]) x)
12304 if v_0.Op != OpConst64 {
12308 if auxIntToInt64(v_0.AuxInt) != 0 {
12312 if !(isNonNegative(x)) {
12316 v0 := b.NewValue0(v.Pos, OpConst64, t)
12317 v0.AuxInt = int64ToAuxInt(0)
12321 // match: (Less64 x (Const64 <t> [1]))
12322 // cond: isNonNegative(x)
12323 // result: (Eq64 (Const64 <t> [0]) x)
12326 if v_1.Op != OpConst64 {
12330 if auxIntToInt64(v_1.AuxInt) != 1 || !(isNonNegative(x)) {
12334 v0 := b.NewValue0(v.Pos, OpConst64, t)
12335 v0.AuxInt = int64ToAuxInt(0)
12339 // match: (Less64 x (Const64 <t> [1]))
12340 // result: (Leq64 x (Const64 <t> [0]))
12343 if v_1.Op != OpConst64 {
12347 if auxIntToInt64(v_1.AuxInt) != 1 {
12351 v0 := b.NewValue0(v.Pos, OpConst64, t)
12352 v0.AuxInt = int64ToAuxInt(0)
12356 // match: (Less64 (Const64 <t> [-1]) x)
12357 // result: (Leq64 (Const64 <t> [0]) x)
12359 if v_0.Op != OpConst64 {
12363 if auxIntToInt64(v_0.AuxInt) != -1 {
12368 v0 := b.NewValue0(v.Pos, OpConst64, t)
12369 v0.AuxInt = int64ToAuxInt(0)
12375 func rewriteValuegeneric_OpLess64F(v *Value) bool {
12378 // match: (Less64F (Const64F [c]) (Const64F [d]))
12379 // result: (ConstBool [c < d])
12381 if v_0.Op != OpConst64F {
12384 c := auxIntToFloat64(v_0.AuxInt)
12385 if v_1.Op != OpConst64F {
12388 d := auxIntToFloat64(v_1.AuxInt)
12389 v.reset(OpConstBool)
12390 v.AuxInt = boolToAuxInt(c < d)
12395 func rewriteValuegeneric_OpLess64U(v *Value) bool {
12399 // match: (Less64U (Const64 [c]) (Const64 [d]))
12400 // result: (ConstBool [uint64(c) < uint64(d)])
12402 if v_0.Op != OpConst64 {
12405 c := auxIntToInt64(v_0.AuxInt)
12406 if v_1.Op != OpConst64 {
12409 d := auxIntToInt64(v_1.AuxInt)
12410 v.reset(OpConstBool)
12411 v.AuxInt = boolToAuxInt(uint64(c) < uint64(d))
12414 // match: (Less64U x (Const64 <t> [1]))
12415 // result: (Eq64 (Const64 <t> [0]) x)
12418 if v_1.Op != OpConst64 {
12422 if auxIntToInt64(v_1.AuxInt) != 1 {
12426 v0 := b.NewValue0(v.Pos, OpConst64, t)
12427 v0.AuxInt = int64ToAuxInt(0)
12431 // match: (Less64U _ (Const64 [0]))
12432 // result: (ConstBool [false])
12434 if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 0 {
12437 v.reset(OpConstBool)
12438 v.AuxInt = boolToAuxInt(false)
12443 func rewriteValuegeneric_OpLess8(v *Value) bool {
12447 // match: (Less8 (Const8 [c]) (Const8 [d]))
12448 // result: (ConstBool [c < d])
12450 if v_0.Op != OpConst8 {
12453 c := auxIntToInt8(v_0.AuxInt)
12454 if v_1.Op != OpConst8 {
12457 d := auxIntToInt8(v_1.AuxInt)
12458 v.reset(OpConstBool)
12459 v.AuxInt = boolToAuxInt(c < d)
12462 // match: (Less8 (Const8 <t> [0]) x)
12463 // cond: isNonNegative(x)
12464 // result: (Neq8 (Const8 <t> [0]) x)
12466 if v_0.Op != OpConst8 {
12470 if auxIntToInt8(v_0.AuxInt) != 0 {
12474 if !(isNonNegative(x)) {
12478 v0 := b.NewValue0(v.Pos, OpConst8, t)
12479 v0.AuxInt = int8ToAuxInt(0)
12483 // match: (Less8 x (Const8 <t> [1]))
12484 // cond: isNonNegative(x)
12485 // result: (Eq8 (Const8 <t> [0]) x)
12488 if v_1.Op != OpConst8 {
12492 if auxIntToInt8(v_1.AuxInt) != 1 || !(isNonNegative(x)) {
12496 v0 := b.NewValue0(v.Pos, OpConst8, t)
12497 v0.AuxInt = int8ToAuxInt(0)
12501 // match: (Less8 x (Const8 <t> [1]))
12502 // result: (Leq8 x (Const8 <t> [0]))
12505 if v_1.Op != OpConst8 {
12509 if auxIntToInt8(v_1.AuxInt) != 1 {
12513 v0 := b.NewValue0(v.Pos, OpConst8, t)
12514 v0.AuxInt = int8ToAuxInt(0)
12518 // match: (Less8 (Const8 <t> [-1]) x)
12519 // result: (Leq8 (Const8 <t> [0]) x)
12521 if v_0.Op != OpConst8 {
12525 if auxIntToInt8(v_0.AuxInt) != -1 {
12530 v0 := b.NewValue0(v.Pos, OpConst8, t)
12531 v0.AuxInt = int8ToAuxInt(0)
12537 func rewriteValuegeneric_OpLess8U(v *Value) bool {
12541 // match: (Less8U (Const8 [c]) (Const8 [d]))
12542 // result: (ConstBool [ uint8(c) < uint8(d)])
12544 if v_0.Op != OpConst8 {
12547 c := auxIntToInt8(v_0.AuxInt)
12548 if v_1.Op != OpConst8 {
12551 d := auxIntToInt8(v_1.AuxInt)
12552 v.reset(OpConstBool)
12553 v.AuxInt = boolToAuxInt(uint8(c) < uint8(d))
12556 // match: (Less8U x (Const8 <t> [1]))
12557 // result: (Eq8 (Const8 <t> [0]) x)
12560 if v_1.Op != OpConst8 {
12564 if auxIntToInt8(v_1.AuxInt) != 1 {
12568 v0 := b.NewValue0(v.Pos, OpConst8, t)
12569 v0.AuxInt = int8ToAuxInt(0)
12573 // match: (Less8U _ (Const8 [0]))
12574 // result: (ConstBool [false])
12576 if v_1.Op != OpConst8 || auxIntToInt8(v_1.AuxInt) != 0 {
12579 v.reset(OpConstBool)
12580 v.AuxInt = boolToAuxInt(false)
12585 func rewriteValuegeneric_OpLoad(v *Value) bool {
12589 config := b.Func.Config
12591 // match: (Load <t1> p1 (Store {t2} p2 x _))
12592 // cond: isSamePtr(p1, p2) && t1.Compare(x.Type) == types.CMPeq && t1.Size() == t2.Size()
12597 if v_1.Op != OpStore {
12600 t2 := auxToType(v_1.Aux)
12603 if !(isSamePtr(p1, p2) && t1.Compare(x.Type) == types.CMPeq && t1.Size() == t2.Size()) {
12609 // match: (Load <t1> p1 (Store {t2} p2 _ (Store {t3} p3 x _)))
12610 // cond: isSamePtr(p1, p3) && t1.Compare(x.Type) == types.CMPeq && t1.Size() == t2.Size() && disjoint(p3, t3.Size(), p2, t2.Size())
12615 if v_1.Op != OpStore {
12618 t2 := auxToType(v_1.Aux)
12621 v_1_2 := v_1.Args[2]
12622 if v_1_2.Op != OpStore {
12625 t3 := auxToType(v_1_2.Aux)
12627 p3 := v_1_2.Args[0]
12628 if !(isSamePtr(p1, p3) && t1.Compare(x.Type) == types.CMPeq && t1.Size() == t2.Size() && disjoint(p3, t3.Size(), p2, t2.Size())) {
12634 // match: (Load <t1> p1 (Store {t2} p2 _ (Store {t3} p3 _ (Store {t4} p4 x _))))
12635 // cond: isSamePtr(p1, p4) && t1.Compare(x.Type) == types.CMPeq && t1.Size() == t2.Size() && disjoint(p4, t4.Size(), p2, t2.Size()) && disjoint(p4, t4.Size(), p3, t3.Size())
12640 if v_1.Op != OpStore {
12643 t2 := auxToType(v_1.Aux)
12646 v_1_2 := v_1.Args[2]
12647 if v_1_2.Op != OpStore {
12650 t3 := auxToType(v_1_2.Aux)
12652 p3 := v_1_2.Args[0]
12653 v_1_2_2 := v_1_2.Args[2]
12654 if v_1_2_2.Op != OpStore {
12657 t4 := auxToType(v_1_2_2.Aux)
12658 x := v_1_2_2.Args[1]
12659 p4 := v_1_2_2.Args[0]
12660 if !(isSamePtr(p1, p4) && t1.Compare(x.Type) == types.CMPeq && t1.Size() == t2.Size() && disjoint(p4, t4.Size(), p2, t2.Size()) && disjoint(p4, t4.Size(), p3, t3.Size())) {
12666 // match: (Load <t1> p1 (Store {t2} p2 _ (Store {t3} p3 _ (Store {t4} p4 _ (Store {t5} p5 x _)))))
12667 // cond: isSamePtr(p1, p5) && t1.Compare(x.Type) == types.CMPeq && t1.Size() == t2.Size() && disjoint(p5, t5.Size(), p2, t2.Size()) && disjoint(p5, t5.Size(), p3, t3.Size()) && disjoint(p5, t5.Size(), p4, t4.Size())
12672 if v_1.Op != OpStore {
12675 t2 := auxToType(v_1.Aux)
12678 v_1_2 := v_1.Args[2]
12679 if v_1_2.Op != OpStore {
12682 t3 := auxToType(v_1_2.Aux)
12684 p3 := v_1_2.Args[0]
12685 v_1_2_2 := v_1_2.Args[2]
12686 if v_1_2_2.Op != OpStore {
12689 t4 := auxToType(v_1_2_2.Aux)
12690 _ = v_1_2_2.Args[2]
12691 p4 := v_1_2_2.Args[0]
12692 v_1_2_2_2 := v_1_2_2.Args[2]
12693 if v_1_2_2_2.Op != OpStore {
12696 t5 := auxToType(v_1_2_2_2.Aux)
12697 x := v_1_2_2_2.Args[1]
12698 p5 := v_1_2_2_2.Args[0]
12699 if !(isSamePtr(p1, p5) && t1.Compare(x.Type) == types.CMPeq && t1.Size() == t2.Size() && disjoint(p5, t5.Size(), p2, t2.Size()) && disjoint(p5, t5.Size(), p3, t3.Size()) && disjoint(p5, t5.Size(), p4, t4.Size())) {
12705 // match: (Load <t1> p1 (Store {t2} p2 (Const64 [x]) _))
12706 // cond: isSamePtr(p1,p2) && sizeof(t2) == 8 && is64BitFloat(t1) && !math.IsNaN(math.Float64frombits(uint64(x)))
12707 // result: (Const64F [math.Float64frombits(uint64(x))])
12711 if v_1.Op != OpStore {
12714 t2 := auxToType(v_1.Aux)
12717 v_1_1 := v_1.Args[1]
12718 if v_1_1.Op != OpConst64 {
12721 x := auxIntToInt64(v_1_1.AuxInt)
12722 if !(isSamePtr(p1, p2) && sizeof(t2) == 8 && is64BitFloat(t1) && !math.IsNaN(math.Float64frombits(uint64(x)))) {
12725 v.reset(OpConst64F)
12726 v.AuxInt = float64ToAuxInt(math.Float64frombits(uint64(x)))
12729 // match: (Load <t1> p1 (Store {t2} p2 (Const32 [x]) _))
12730 // cond: isSamePtr(p1,p2) && sizeof(t2) == 4 && is32BitFloat(t1) && !math.IsNaN(float64(math.Float32frombits(uint32(x))))
12731 // result: (Const32F [math.Float32frombits(uint32(x))])
12735 if v_1.Op != OpStore {
12738 t2 := auxToType(v_1.Aux)
12741 v_1_1 := v_1.Args[1]
12742 if v_1_1.Op != OpConst32 {
12745 x := auxIntToInt32(v_1_1.AuxInt)
12746 if !(isSamePtr(p1, p2) && sizeof(t2) == 4 && is32BitFloat(t1) && !math.IsNaN(float64(math.Float32frombits(uint32(x))))) {
12749 v.reset(OpConst32F)
12750 v.AuxInt = float32ToAuxInt(math.Float32frombits(uint32(x)))
12753 // match: (Load <t1> p1 (Store {t2} p2 (Const64F [x]) _))
12754 // cond: isSamePtr(p1,p2) && sizeof(t2) == 8 && is64BitInt(t1)
12755 // result: (Const64 [int64(math.Float64bits(x))])
12759 if v_1.Op != OpStore {
12762 t2 := auxToType(v_1.Aux)
12765 v_1_1 := v_1.Args[1]
12766 if v_1_1.Op != OpConst64F {
12769 x := auxIntToFloat64(v_1_1.AuxInt)
12770 if !(isSamePtr(p1, p2) && sizeof(t2) == 8 && is64BitInt(t1)) {
12774 v.AuxInt = int64ToAuxInt(int64(math.Float64bits(x)))
12777 // match: (Load <t1> p1 (Store {t2} p2 (Const32F [x]) _))
12778 // cond: isSamePtr(p1,p2) && sizeof(t2) == 4 && is32BitInt(t1)
12779 // result: (Const32 [int32(math.Float32bits(x))])
12783 if v_1.Op != OpStore {
12786 t2 := auxToType(v_1.Aux)
12789 v_1_1 := v_1.Args[1]
12790 if v_1_1.Op != OpConst32F {
12793 x := auxIntToFloat32(v_1_1.AuxInt)
12794 if !(isSamePtr(p1, p2) && sizeof(t2) == 4 && is32BitInt(t1)) {
12798 v.AuxInt = int32ToAuxInt(int32(math.Float32bits(x)))
12801 // match: (Load <t1> op:(OffPtr [o1] p1) (Store {t2} p2 _ mem:(Zero [n] p3 _)))
12802 // cond: o1 >= 0 && o1+t1.Size() <= n && isSamePtr(p1, p3) && fe.CanSSA(t1) && disjoint(op, t1.Size(), p2, t2.Size())
12803 // result: @mem.Block (Load <t1> (OffPtr <op.Type> [o1] p3) mem)
12807 if op.Op != OpOffPtr {
12810 o1 := auxIntToInt64(op.AuxInt)
12812 if v_1.Op != OpStore {
12815 t2 := auxToType(v_1.Aux)
12819 if mem.Op != OpZero {
12822 n := auxIntToInt64(mem.AuxInt)
12824 if !(o1 >= 0 && o1+t1.Size() <= n && isSamePtr(p1, p3) && fe.CanSSA(t1) && disjoint(op, t1.Size(), p2, t2.Size())) {
12828 v0 := b.NewValue0(v.Pos, OpLoad, t1)
12830 v1 := b.NewValue0(v.Pos, OpOffPtr, op.Type)
12831 v1.AuxInt = int64ToAuxInt(o1)
12833 v0.AddArg2(v1, mem)
12836 // match: (Load <t1> op:(OffPtr [o1] p1) (Store {t2} p2 _ (Store {t3} p3 _ mem:(Zero [n] p4 _))))
12837 // cond: o1 >= 0 && o1+t1.Size() <= n && isSamePtr(p1, p4) && fe.CanSSA(t1) && disjoint(op, t1.Size(), p2, t2.Size()) && disjoint(op, t1.Size(), p3, t3.Size())
12838 // result: @mem.Block (Load <t1> (OffPtr <op.Type> [o1] p4) mem)
12842 if op.Op != OpOffPtr {
12845 o1 := auxIntToInt64(op.AuxInt)
12847 if v_1.Op != OpStore {
12850 t2 := auxToType(v_1.Aux)
12853 v_1_2 := v_1.Args[2]
12854 if v_1_2.Op != OpStore {
12857 t3 := auxToType(v_1_2.Aux)
12859 p3 := v_1_2.Args[0]
12860 mem := v_1_2.Args[2]
12861 if mem.Op != OpZero {
12864 n := auxIntToInt64(mem.AuxInt)
12866 if !(o1 >= 0 && o1+t1.Size() <= n && isSamePtr(p1, p4) && fe.CanSSA(t1) && disjoint(op, t1.Size(), p2, t2.Size()) && disjoint(op, t1.Size(), p3, t3.Size())) {
12870 v0 := b.NewValue0(v.Pos, OpLoad, t1)
12872 v1 := b.NewValue0(v.Pos, OpOffPtr, op.Type)
12873 v1.AuxInt = int64ToAuxInt(o1)
12875 v0.AddArg2(v1, mem)
12878 // match: (Load <t1> op:(OffPtr [o1] p1) (Store {t2} p2 _ (Store {t3} p3 _ (Store {t4} p4 _ mem:(Zero [n] p5 _)))))
12879 // cond: o1 >= 0 && o1+t1.Size() <= n && isSamePtr(p1, p5) && fe.CanSSA(t1) && disjoint(op, t1.Size(), p2, t2.Size()) && disjoint(op, t1.Size(), p3, t3.Size()) && disjoint(op, t1.Size(), p4, t4.Size())
12880 // result: @mem.Block (Load <t1> (OffPtr <op.Type> [o1] p5) mem)
12884 if op.Op != OpOffPtr {
12887 o1 := auxIntToInt64(op.AuxInt)
12889 if v_1.Op != OpStore {
12892 t2 := auxToType(v_1.Aux)
12895 v_1_2 := v_1.Args[2]
12896 if v_1_2.Op != OpStore {
12899 t3 := auxToType(v_1_2.Aux)
12901 p3 := v_1_2.Args[0]
12902 v_1_2_2 := v_1_2.Args[2]
12903 if v_1_2_2.Op != OpStore {
12906 t4 := auxToType(v_1_2_2.Aux)
12907 _ = v_1_2_2.Args[2]
12908 p4 := v_1_2_2.Args[0]
12909 mem := v_1_2_2.Args[2]
12910 if mem.Op != OpZero {
12913 n := auxIntToInt64(mem.AuxInt)
12915 if !(o1 >= 0 && o1+t1.Size() <= n && isSamePtr(p1, p5) && fe.CanSSA(t1) && disjoint(op, t1.Size(), p2, t2.Size()) && disjoint(op, t1.Size(), p3, t3.Size()) && disjoint(op, t1.Size(), p4, t4.Size())) {
12919 v0 := b.NewValue0(v.Pos, OpLoad, t1)
12921 v1 := b.NewValue0(v.Pos, OpOffPtr, op.Type)
12922 v1.AuxInt = int64ToAuxInt(o1)
12924 v0.AddArg2(v1, mem)
12927 // match: (Load <t1> op:(OffPtr [o1] p1) (Store {t2} p2 _ (Store {t3} p3 _ (Store {t4} p4 _ (Store {t5} p5 _ mem:(Zero [n] p6 _))))))
12928 // cond: o1 >= 0 && o1+t1.Size() <= n && isSamePtr(p1, p6) && fe.CanSSA(t1) && disjoint(op, t1.Size(), p2, t2.Size()) && disjoint(op, t1.Size(), p3, t3.Size()) && disjoint(op, t1.Size(), p4, t4.Size()) && disjoint(op, t1.Size(), p5, t5.Size())
12929 // result: @mem.Block (Load <t1> (OffPtr <op.Type> [o1] p6) mem)
12933 if op.Op != OpOffPtr {
12936 o1 := auxIntToInt64(op.AuxInt)
12938 if v_1.Op != OpStore {
12941 t2 := auxToType(v_1.Aux)
12944 v_1_2 := v_1.Args[2]
12945 if v_1_2.Op != OpStore {
12948 t3 := auxToType(v_1_2.Aux)
12950 p3 := v_1_2.Args[0]
12951 v_1_2_2 := v_1_2.Args[2]
12952 if v_1_2_2.Op != OpStore {
12955 t4 := auxToType(v_1_2_2.Aux)
12956 _ = v_1_2_2.Args[2]
12957 p4 := v_1_2_2.Args[0]
12958 v_1_2_2_2 := v_1_2_2.Args[2]
12959 if v_1_2_2_2.Op != OpStore {
12962 t5 := auxToType(v_1_2_2_2.Aux)
12963 _ = v_1_2_2_2.Args[2]
12964 p5 := v_1_2_2_2.Args[0]
12965 mem := v_1_2_2_2.Args[2]
12966 if mem.Op != OpZero {
12969 n := auxIntToInt64(mem.AuxInt)
12971 if !(o1 >= 0 && o1+t1.Size() <= n && isSamePtr(p1, p6) && fe.CanSSA(t1) && disjoint(op, t1.Size(), p2, t2.Size()) && disjoint(op, t1.Size(), p3, t3.Size()) && disjoint(op, t1.Size(), p4, t4.Size()) && disjoint(op, t1.Size(), p5, t5.Size())) {
12975 v0 := b.NewValue0(v.Pos, OpLoad, t1)
12977 v1 := b.NewValue0(v.Pos, OpOffPtr, op.Type)
12978 v1.AuxInt = int64ToAuxInt(o1)
12980 v0.AddArg2(v1, mem)
12983 // match: (Load <t1> (OffPtr [o] p1) (Zero [n] p2 _))
12984 // cond: t1.IsBoolean() && isSamePtr(p1, p2) && n >= o + 1
12985 // result: (ConstBool [false])
12988 if v_0.Op != OpOffPtr {
12991 o := auxIntToInt64(v_0.AuxInt)
12993 if v_1.Op != OpZero {
12996 n := auxIntToInt64(v_1.AuxInt)
12998 if !(t1.IsBoolean() && isSamePtr(p1, p2) && n >= o+1) {
13001 v.reset(OpConstBool)
13002 v.AuxInt = boolToAuxInt(false)
13005 // match: (Load <t1> (OffPtr [o] p1) (Zero [n] p2 _))
13006 // cond: is8BitInt(t1) && isSamePtr(p1, p2) && n >= o + 1
13007 // result: (Const8 [0])
13010 if v_0.Op != OpOffPtr {
13013 o := auxIntToInt64(v_0.AuxInt)
13015 if v_1.Op != OpZero {
13018 n := auxIntToInt64(v_1.AuxInt)
13020 if !(is8BitInt(t1) && isSamePtr(p1, p2) && n >= o+1) {
13024 v.AuxInt = int8ToAuxInt(0)
13027 // match: (Load <t1> (OffPtr [o] p1) (Zero [n] p2 _))
13028 // cond: is16BitInt(t1) && isSamePtr(p1, p2) && n >= o + 2
13029 // result: (Const16 [0])
13032 if v_0.Op != OpOffPtr {
13035 o := auxIntToInt64(v_0.AuxInt)
13037 if v_1.Op != OpZero {
13040 n := auxIntToInt64(v_1.AuxInt)
13042 if !(is16BitInt(t1) && isSamePtr(p1, p2) && n >= o+2) {
13046 v.AuxInt = int16ToAuxInt(0)
13049 // match: (Load <t1> (OffPtr [o] p1) (Zero [n] p2 _))
13050 // cond: is32BitInt(t1) && isSamePtr(p1, p2) && n >= o + 4
13051 // result: (Const32 [0])
13054 if v_0.Op != OpOffPtr {
13057 o := auxIntToInt64(v_0.AuxInt)
13059 if v_1.Op != OpZero {
13062 n := auxIntToInt64(v_1.AuxInt)
13064 if !(is32BitInt(t1) && isSamePtr(p1, p2) && n >= o+4) {
13068 v.AuxInt = int32ToAuxInt(0)
13071 // match: (Load <t1> (OffPtr [o] p1) (Zero [n] p2 _))
13072 // cond: is64BitInt(t1) && isSamePtr(p1, p2) && n >= o + 8
13073 // result: (Const64 [0])
13076 if v_0.Op != OpOffPtr {
13079 o := auxIntToInt64(v_0.AuxInt)
13081 if v_1.Op != OpZero {
13084 n := auxIntToInt64(v_1.AuxInt)
13086 if !(is64BitInt(t1) && isSamePtr(p1, p2) && n >= o+8) {
13090 v.AuxInt = int64ToAuxInt(0)
13093 // match: (Load <t1> (OffPtr [o] p1) (Zero [n] p2 _))
13094 // cond: is32BitFloat(t1) && isSamePtr(p1, p2) && n >= o + 4
13095 // result: (Const32F [0])
13098 if v_0.Op != OpOffPtr {
13101 o := auxIntToInt64(v_0.AuxInt)
13103 if v_1.Op != OpZero {
13106 n := auxIntToInt64(v_1.AuxInt)
13108 if !(is32BitFloat(t1) && isSamePtr(p1, p2) && n >= o+4) {
13111 v.reset(OpConst32F)
13112 v.AuxInt = float32ToAuxInt(0)
13115 // match: (Load <t1> (OffPtr [o] p1) (Zero [n] p2 _))
13116 // cond: is64BitFloat(t1) && isSamePtr(p1, p2) && n >= o + 8
13117 // result: (Const64F [0])
13120 if v_0.Op != OpOffPtr {
13123 o := auxIntToInt64(v_0.AuxInt)
13125 if v_1.Op != OpZero {
13128 n := auxIntToInt64(v_1.AuxInt)
13130 if !(is64BitFloat(t1) && isSamePtr(p1, p2) && n >= o+8) {
13133 v.reset(OpConst64F)
13134 v.AuxInt = float64ToAuxInt(0)
13137 // match: (Load <t> _ _)
13138 // cond: t.IsStruct() && t.NumFields() == 0 && fe.CanSSA(t)
13139 // result: (StructMake0)
13142 if !(t.IsStruct() && t.NumFields() == 0 && fe.CanSSA(t)) {
13145 v.reset(OpStructMake0)
13148 // match: (Load <t> ptr mem)
13149 // cond: t.IsStruct() && t.NumFields() == 1 && fe.CanSSA(t)
13150 // result: (StructMake1 (Load <t.FieldType(0)> (OffPtr <t.FieldType(0).PtrTo()> [0] ptr) mem))
13155 if !(t.IsStruct() && t.NumFields() == 1 && fe.CanSSA(t)) {
13158 v.reset(OpStructMake1)
13159 v0 := b.NewValue0(v.Pos, OpLoad, t.FieldType(0))
13160 v1 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(0).PtrTo())
13161 v1.AuxInt = int64ToAuxInt(0)
13163 v0.AddArg2(v1, mem)
13167 // match: (Load <t> ptr mem)
13168 // cond: t.IsStruct() && t.NumFields() == 2 && fe.CanSSA(t)
13169 // result: (StructMake2 (Load <t.FieldType(0)> (OffPtr <t.FieldType(0).PtrTo()> [0] ptr) mem) (Load <t.FieldType(1)> (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] ptr) mem))
13174 if !(t.IsStruct() && t.NumFields() == 2 && fe.CanSSA(t)) {
13177 v.reset(OpStructMake2)
13178 v0 := b.NewValue0(v.Pos, OpLoad, t.FieldType(0))
13179 v1 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(0).PtrTo())
13180 v1.AuxInt = int64ToAuxInt(0)
13182 v0.AddArg2(v1, mem)
13183 v2 := b.NewValue0(v.Pos, OpLoad, t.FieldType(1))
13184 v3 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(1).PtrTo())
13185 v3.AuxInt = int64ToAuxInt(t.FieldOff(1))
13187 v2.AddArg2(v3, mem)
13191 // match: (Load <t> ptr mem)
13192 // cond: t.IsStruct() && t.NumFields() == 3 && fe.CanSSA(t)
13193 // result: (StructMake3 (Load <t.FieldType(0)> (OffPtr <t.FieldType(0).PtrTo()> [0] ptr) mem) (Load <t.FieldType(1)> (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] ptr) mem) (Load <t.FieldType(2)> (OffPtr <t.FieldType(2).PtrTo()> [t.FieldOff(2)] ptr) mem))
13198 if !(t.IsStruct() && t.NumFields() == 3 && fe.CanSSA(t)) {
13201 v.reset(OpStructMake3)
13202 v0 := b.NewValue0(v.Pos, OpLoad, t.FieldType(0))
13203 v1 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(0).PtrTo())
13204 v1.AuxInt = int64ToAuxInt(0)
13206 v0.AddArg2(v1, mem)
13207 v2 := b.NewValue0(v.Pos, OpLoad, t.FieldType(1))
13208 v3 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(1).PtrTo())
13209 v3.AuxInt = int64ToAuxInt(t.FieldOff(1))
13211 v2.AddArg2(v3, mem)
13212 v4 := b.NewValue0(v.Pos, OpLoad, t.FieldType(2))
13213 v5 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(2).PtrTo())
13214 v5.AuxInt = int64ToAuxInt(t.FieldOff(2))
13216 v4.AddArg2(v5, mem)
13217 v.AddArg3(v0, v2, v4)
13220 // match: (Load <t> ptr mem)
13221 // cond: t.IsStruct() && t.NumFields() == 4 && fe.CanSSA(t)
13222 // result: (StructMake4 (Load <t.FieldType(0)> (OffPtr <t.FieldType(0).PtrTo()> [0] ptr) mem) (Load <t.FieldType(1)> (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] ptr) mem) (Load <t.FieldType(2)> (OffPtr <t.FieldType(2).PtrTo()> [t.FieldOff(2)] ptr) mem) (Load <t.FieldType(3)> (OffPtr <t.FieldType(3).PtrTo()> [t.FieldOff(3)] ptr) mem))
13227 if !(t.IsStruct() && t.NumFields() == 4 && fe.CanSSA(t)) {
13230 v.reset(OpStructMake4)
13231 v0 := b.NewValue0(v.Pos, OpLoad, t.FieldType(0))
13232 v1 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(0).PtrTo())
13233 v1.AuxInt = int64ToAuxInt(0)
13235 v0.AddArg2(v1, mem)
13236 v2 := b.NewValue0(v.Pos, OpLoad, t.FieldType(1))
13237 v3 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(1).PtrTo())
13238 v3.AuxInt = int64ToAuxInt(t.FieldOff(1))
13240 v2.AddArg2(v3, mem)
13241 v4 := b.NewValue0(v.Pos, OpLoad, t.FieldType(2))
13242 v5 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(2).PtrTo())
13243 v5.AuxInt = int64ToAuxInt(t.FieldOff(2))
13245 v4.AddArg2(v5, mem)
13246 v6 := b.NewValue0(v.Pos, OpLoad, t.FieldType(3))
13247 v7 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(3).PtrTo())
13248 v7.AuxInt = int64ToAuxInt(t.FieldOff(3))
13250 v6.AddArg2(v7, mem)
13251 v.AddArg4(v0, v2, v4, v6)
13254 // match: (Load <t> _ _)
13255 // cond: t.IsArray() && t.NumElem() == 0
13256 // result: (ArrayMake0)
13259 if !(t.IsArray() && t.NumElem() == 0) {
13262 v.reset(OpArrayMake0)
13265 // match: (Load <t> ptr mem)
13266 // cond: t.IsArray() && t.NumElem() == 1 && fe.CanSSA(t)
13267 // result: (ArrayMake1 (Load <t.Elem()> ptr mem))
13272 if !(t.IsArray() && t.NumElem() == 1 && fe.CanSSA(t)) {
13275 v.reset(OpArrayMake1)
13276 v0 := b.NewValue0(v.Pos, OpLoad, t.Elem())
13277 v0.AddArg2(ptr, mem)
13281 // match: (Load <t> (OffPtr [off] (Addr {s} sb) ) _)
13282 // cond: t.IsUintptr() && isFixedSym(s, off)
13283 // result: (Addr {fixedSym(b.Func, s, off)} sb)
13286 if v_0.Op != OpOffPtr {
13289 off := auxIntToInt64(v_0.AuxInt)
13290 v_0_0 := v_0.Args[0]
13291 if v_0_0.Op != OpAddr {
13294 s := auxToSym(v_0_0.Aux)
13295 sb := v_0_0.Args[0]
13296 if !(t.IsUintptr() && isFixedSym(s, off)) {
13300 v.Aux = symToAux(fixedSym(b.Func, s, off))
13304 // match: (Load <t> (OffPtr [off] (Convert (Addr {s} sb) _) ) _)
13305 // cond: t.IsUintptr() && isFixedSym(s, off)
13306 // result: (Addr {fixedSym(b.Func, s, off)} sb)
13309 if v_0.Op != OpOffPtr {
13312 off := auxIntToInt64(v_0.AuxInt)
13313 v_0_0 := v_0.Args[0]
13314 if v_0_0.Op != OpConvert {
13317 v_0_0_0 := v_0_0.Args[0]
13318 if v_0_0_0.Op != OpAddr {
13321 s := auxToSym(v_0_0_0.Aux)
13322 sb := v_0_0_0.Args[0]
13323 if !(t.IsUintptr() && isFixedSym(s, off)) {
13327 v.Aux = symToAux(fixedSym(b.Func, s, off))
13331 // match: (Load <t> (OffPtr [off] (ITab (IMake (Addr {s} sb) _))) _)
13332 // cond: t.IsUintptr() && isFixedSym(s, off)
13333 // result: (Addr {fixedSym(b.Func, s, off)} sb)
13336 if v_0.Op != OpOffPtr {
13339 off := auxIntToInt64(v_0.AuxInt)
13340 v_0_0 := v_0.Args[0]
13341 if v_0_0.Op != OpITab {
13344 v_0_0_0 := v_0_0.Args[0]
13345 if v_0_0_0.Op != OpIMake {
13348 v_0_0_0_0 := v_0_0_0.Args[0]
13349 if v_0_0_0_0.Op != OpAddr {
13352 s := auxToSym(v_0_0_0_0.Aux)
13353 sb := v_0_0_0_0.Args[0]
13354 if !(t.IsUintptr() && isFixedSym(s, off)) {
13358 v.Aux = symToAux(fixedSym(b.Func, s, off))
13362 // match: (Load <t> (OffPtr [off] (ITab (IMake (Convert (Addr {s} sb) _) _))) _)
13363 // cond: t.IsUintptr() && isFixedSym(s, off)
13364 // result: (Addr {fixedSym(b.Func, s, off)} sb)
13367 if v_0.Op != OpOffPtr {
13370 off := auxIntToInt64(v_0.AuxInt)
13371 v_0_0 := v_0.Args[0]
13372 if v_0_0.Op != OpITab {
13375 v_0_0_0 := v_0_0.Args[0]
13376 if v_0_0_0.Op != OpIMake {
13379 v_0_0_0_0 := v_0_0_0.Args[0]
13380 if v_0_0_0_0.Op != OpConvert {
13383 v_0_0_0_0_0 := v_0_0_0_0.Args[0]
13384 if v_0_0_0_0_0.Op != OpAddr {
13387 s := auxToSym(v_0_0_0_0_0.Aux)
13388 sb := v_0_0_0_0_0.Args[0]
13389 if !(t.IsUintptr() && isFixedSym(s, off)) {
13393 v.Aux = symToAux(fixedSym(b.Func, s, off))
13397 // match: (Load <t> (OffPtr [off] (Addr {sym} _) ) _)
13398 // cond: t.IsInteger() && t.Size() == 4 && isFixed32(config, sym, off)
13399 // result: (Const32 [fixed32(config, sym, off)])
13402 if v_0.Op != OpOffPtr {
13405 off := auxIntToInt64(v_0.AuxInt)
13406 v_0_0 := v_0.Args[0]
13407 if v_0_0.Op != OpAddr {
13410 sym := auxToSym(v_0_0.Aux)
13411 if !(t.IsInteger() && t.Size() == 4 && isFixed32(config, sym, off)) {
13415 v.AuxInt = int32ToAuxInt(fixed32(config, sym, off))
13418 // match: (Load <t> (OffPtr [off] (Convert (Addr {sym} _) _) ) _)
13419 // cond: t.IsInteger() && t.Size() == 4 && isFixed32(config, sym, off)
13420 // result: (Const32 [fixed32(config, sym, off)])
13423 if v_0.Op != OpOffPtr {
13426 off := auxIntToInt64(v_0.AuxInt)
13427 v_0_0 := v_0.Args[0]
13428 if v_0_0.Op != OpConvert {
13431 v_0_0_0 := v_0_0.Args[0]
13432 if v_0_0_0.Op != OpAddr {
13435 sym := auxToSym(v_0_0_0.Aux)
13436 if !(t.IsInteger() && t.Size() == 4 && isFixed32(config, sym, off)) {
13440 v.AuxInt = int32ToAuxInt(fixed32(config, sym, off))
13443 // match: (Load <t> (OffPtr [off] (ITab (IMake (Addr {sym} _) _))) _)
13444 // cond: t.IsInteger() && t.Size() == 4 && isFixed32(config, sym, off)
13445 // result: (Const32 [fixed32(config, sym, off)])
13448 if v_0.Op != OpOffPtr {
13451 off := auxIntToInt64(v_0.AuxInt)
13452 v_0_0 := v_0.Args[0]
13453 if v_0_0.Op != OpITab {
13456 v_0_0_0 := v_0_0.Args[0]
13457 if v_0_0_0.Op != OpIMake {
13460 v_0_0_0_0 := v_0_0_0.Args[0]
13461 if v_0_0_0_0.Op != OpAddr {
13464 sym := auxToSym(v_0_0_0_0.Aux)
13465 if !(t.IsInteger() && t.Size() == 4 && isFixed32(config, sym, off)) {
13469 v.AuxInt = int32ToAuxInt(fixed32(config, sym, off))
13472 // match: (Load <t> (OffPtr [off] (ITab (IMake (Convert (Addr {sym} _) _) _))) _)
13473 // cond: t.IsInteger() && t.Size() == 4 && isFixed32(config, sym, off)
13474 // result: (Const32 [fixed32(config, sym, off)])
13477 if v_0.Op != OpOffPtr {
13480 off := auxIntToInt64(v_0.AuxInt)
13481 v_0_0 := v_0.Args[0]
13482 if v_0_0.Op != OpITab {
13485 v_0_0_0 := v_0_0.Args[0]
13486 if v_0_0_0.Op != OpIMake {
13489 v_0_0_0_0 := v_0_0_0.Args[0]
13490 if v_0_0_0_0.Op != OpConvert {
13493 v_0_0_0_0_0 := v_0_0_0_0.Args[0]
13494 if v_0_0_0_0_0.Op != OpAddr {
13497 sym := auxToSym(v_0_0_0_0_0.Aux)
13498 if !(t.IsInteger() && t.Size() == 4 && isFixed32(config, sym, off)) {
13502 v.AuxInt = int32ToAuxInt(fixed32(config, sym, off))
13507 func rewriteValuegeneric_OpLsh16x16(v *Value) bool {
13511 // match: (Lsh16x16 <t> x (Const16 [c]))
13512 // result: (Lsh16x64 x (Const64 <t> [int64(uint16(c))]))
13516 if v_1.Op != OpConst16 {
13519 c := auxIntToInt16(v_1.AuxInt)
13520 v.reset(OpLsh16x64)
13521 v0 := b.NewValue0(v.Pos, OpConst64, t)
13522 v0.AuxInt = int64ToAuxInt(int64(uint16(c)))
13526 // match: (Lsh16x16 (Const16 [0]) _)
13527 // result: (Const16 [0])
13529 if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 0 {
13533 v.AuxInt = int16ToAuxInt(0)
13538 func rewriteValuegeneric_OpLsh16x32(v *Value) bool {
13542 // match: (Lsh16x32 <t> x (Const32 [c]))
13543 // result: (Lsh16x64 x (Const64 <t> [int64(uint32(c))]))
13547 if v_1.Op != OpConst32 {
13550 c := auxIntToInt32(v_1.AuxInt)
13551 v.reset(OpLsh16x64)
13552 v0 := b.NewValue0(v.Pos, OpConst64, t)
13553 v0.AuxInt = int64ToAuxInt(int64(uint32(c)))
13557 // match: (Lsh16x32 (Const16 [0]) _)
13558 // result: (Const16 [0])
13560 if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 0 {
13564 v.AuxInt = int16ToAuxInt(0)
13569 func rewriteValuegeneric_OpLsh16x64(v *Value) bool {
13573 typ := &b.Func.Config.Types
13574 // match: (Lsh16x64 (Const16 [c]) (Const64 [d]))
13575 // result: (Const16 [c << uint64(d)])
13577 if v_0.Op != OpConst16 {
13580 c := auxIntToInt16(v_0.AuxInt)
13581 if v_1.Op != OpConst64 {
13584 d := auxIntToInt64(v_1.AuxInt)
13586 v.AuxInt = int16ToAuxInt(c << uint64(d))
13589 // match: (Lsh16x64 x (Const64 [0]))
13593 if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 0 {
13599 // match: (Lsh16x64 (Const16 [0]) _)
13600 // result: (Const16 [0])
13602 if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 0 {
13606 v.AuxInt = int16ToAuxInt(0)
13609 // match: (Lsh16x64 _ (Const64 [c]))
13610 // cond: uint64(c) >= 16
13611 // result: (Const16 [0])
13613 if v_1.Op != OpConst64 {
13616 c := auxIntToInt64(v_1.AuxInt)
13617 if !(uint64(c) >= 16) {
13621 v.AuxInt = int16ToAuxInt(0)
13624 // match: (Lsh16x64 <t> (Lsh16x64 x (Const64 [c])) (Const64 [d]))
13625 // cond: !uaddOvf(c,d)
13626 // result: (Lsh16x64 x (Const64 <t> [c+d]))
13629 if v_0.Op != OpLsh16x64 {
13634 v_0_1 := v_0.Args[1]
13635 if v_0_1.Op != OpConst64 {
13638 c := auxIntToInt64(v_0_1.AuxInt)
13639 if v_1.Op != OpConst64 {
13642 d := auxIntToInt64(v_1.AuxInt)
13643 if !(!uaddOvf(c, d)) {
13646 v.reset(OpLsh16x64)
13647 v0 := b.NewValue0(v.Pos, OpConst64, t)
13648 v0.AuxInt = int64ToAuxInt(c + d)
13652 // match: (Lsh16x64 i:(Rsh16x64 x (Const64 [c])) (Const64 [c]))
13653 // cond: c >= 0 && c < 16 && i.Uses == 1
13654 // result: (And16 x (Const16 <v.Type> [int16(-1) << c]))
13657 if i.Op != OpRsh16x64 {
13663 if i_1.Op != OpConst64 {
13666 c := auxIntToInt64(i_1.AuxInt)
13667 if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != c || !(c >= 0 && c < 16 && i.Uses == 1) {
13671 v0 := b.NewValue0(v.Pos, OpConst16, v.Type)
13672 v0.AuxInt = int16ToAuxInt(int16(-1) << c)
13676 // match: (Lsh16x64 i:(Rsh16Ux64 x (Const64 [c])) (Const64 [c]))
13677 // cond: c >= 0 && c < 16 && i.Uses == 1
13678 // result: (And16 x (Const16 <v.Type> [int16(-1) << c]))
13681 if i.Op != OpRsh16Ux64 {
13687 if i_1.Op != OpConst64 {
13690 c := auxIntToInt64(i_1.AuxInt)
13691 if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != c || !(c >= 0 && c < 16 && i.Uses == 1) {
13695 v0 := b.NewValue0(v.Pos, OpConst16, v.Type)
13696 v0.AuxInt = int16ToAuxInt(int16(-1) << c)
13700 // match: (Lsh16x64 (Rsh16Ux64 (Lsh16x64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
13701 // cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
13702 // result: (Lsh16x64 x (Const64 <typ.UInt64> [c1-c2+c3]))
13704 if v_0.Op != OpRsh16Ux64 {
13708 v_0_0 := v_0.Args[0]
13709 if v_0_0.Op != OpLsh16x64 {
13714 v_0_0_1 := v_0_0.Args[1]
13715 if v_0_0_1.Op != OpConst64 {
13718 c1 := auxIntToInt64(v_0_0_1.AuxInt)
13719 v_0_1 := v_0.Args[1]
13720 if v_0_1.Op != OpConst64 {
13723 c2 := auxIntToInt64(v_0_1.AuxInt)
13724 if v_1.Op != OpConst64 {
13727 c3 := auxIntToInt64(v_1.AuxInt)
13728 if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) {
13731 v.reset(OpLsh16x64)
13732 v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
13733 v0.AuxInt = int64ToAuxInt(c1 - c2 + c3)
13739 func rewriteValuegeneric_OpLsh16x8(v *Value) bool {
13743 // match: (Lsh16x8 <t> x (Const8 [c]))
13744 // result: (Lsh16x64 x (Const64 <t> [int64(uint8(c))]))
13748 if v_1.Op != OpConst8 {
13751 c := auxIntToInt8(v_1.AuxInt)
13752 v.reset(OpLsh16x64)
13753 v0 := b.NewValue0(v.Pos, OpConst64, t)
13754 v0.AuxInt = int64ToAuxInt(int64(uint8(c)))
13758 // match: (Lsh16x8 (Const16 [0]) _)
13759 // result: (Const16 [0])
13761 if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 0 {
13765 v.AuxInt = int16ToAuxInt(0)
13770 func rewriteValuegeneric_OpLsh32x16(v *Value) bool {
13774 // match: (Lsh32x16 <t> x (Const16 [c]))
13775 // result: (Lsh32x64 x (Const64 <t> [int64(uint16(c))]))
13779 if v_1.Op != OpConst16 {
13782 c := auxIntToInt16(v_1.AuxInt)
13783 v.reset(OpLsh32x64)
13784 v0 := b.NewValue0(v.Pos, OpConst64, t)
13785 v0.AuxInt = int64ToAuxInt(int64(uint16(c)))
13789 // match: (Lsh32x16 (Const32 [0]) _)
13790 // result: (Const32 [0])
13792 if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 {
13796 v.AuxInt = int32ToAuxInt(0)
13801 func rewriteValuegeneric_OpLsh32x32(v *Value) bool {
13805 // match: (Lsh32x32 <t> x (Const32 [c]))
13806 // result: (Lsh32x64 x (Const64 <t> [int64(uint32(c))]))
13810 if v_1.Op != OpConst32 {
13813 c := auxIntToInt32(v_1.AuxInt)
13814 v.reset(OpLsh32x64)
13815 v0 := b.NewValue0(v.Pos, OpConst64, t)
13816 v0.AuxInt = int64ToAuxInt(int64(uint32(c)))
13820 // match: (Lsh32x32 (Const32 [0]) _)
13821 // result: (Const32 [0])
13823 if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 {
13827 v.AuxInt = int32ToAuxInt(0)
13832 func rewriteValuegeneric_OpLsh32x64(v *Value) bool {
13836 typ := &b.Func.Config.Types
13837 // match: (Lsh32x64 (Const32 [c]) (Const64 [d]))
13838 // result: (Const32 [c << uint64(d)])
13840 if v_0.Op != OpConst32 {
13843 c := auxIntToInt32(v_0.AuxInt)
13844 if v_1.Op != OpConst64 {
13847 d := auxIntToInt64(v_1.AuxInt)
13849 v.AuxInt = int32ToAuxInt(c << uint64(d))
13852 // match: (Lsh32x64 x (Const64 [0]))
13856 if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 0 {
13862 // match: (Lsh32x64 (Const32 [0]) _)
13863 // result: (Const32 [0])
13865 if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 {
13869 v.AuxInt = int32ToAuxInt(0)
13872 // match: (Lsh32x64 _ (Const64 [c]))
13873 // cond: uint64(c) >= 32
13874 // result: (Const32 [0])
13876 if v_1.Op != OpConst64 {
13879 c := auxIntToInt64(v_1.AuxInt)
13880 if !(uint64(c) >= 32) {
13884 v.AuxInt = int32ToAuxInt(0)
13887 // match: (Lsh32x64 <t> (Lsh32x64 x (Const64 [c])) (Const64 [d]))
13888 // cond: !uaddOvf(c,d)
13889 // result: (Lsh32x64 x (Const64 <t> [c+d]))
13892 if v_0.Op != OpLsh32x64 {
13897 v_0_1 := v_0.Args[1]
13898 if v_0_1.Op != OpConst64 {
13901 c := auxIntToInt64(v_0_1.AuxInt)
13902 if v_1.Op != OpConst64 {
13905 d := auxIntToInt64(v_1.AuxInt)
13906 if !(!uaddOvf(c, d)) {
13909 v.reset(OpLsh32x64)
13910 v0 := b.NewValue0(v.Pos, OpConst64, t)
13911 v0.AuxInt = int64ToAuxInt(c + d)
13915 // match: (Lsh32x64 i:(Rsh32x64 x (Const64 [c])) (Const64 [c]))
13916 // cond: c >= 0 && c < 32 && i.Uses == 1
13917 // result: (And32 x (Const32 <v.Type> [int32(-1) << c]))
13920 if i.Op != OpRsh32x64 {
13926 if i_1.Op != OpConst64 {
13929 c := auxIntToInt64(i_1.AuxInt)
13930 if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != c || !(c >= 0 && c < 32 && i.Uses == 1) {
13934 v0 := b.NewValue0(v.Pos, OpConst32, v.Type)
13935 v0.AuxInt = int32ToAuxInt(int32(-1) << c)
13939 // match: (Lsh32x64 i:(Rsh32Ux64 x (Const64 [c])) (Const64 [c]))
13940 // cond: c >= 0 && c < 32 && i.Uses == 1
13941 // result: (And32 x (Const32 <v.Type> [int32(-1) << c]))
13944 if i.Op != OpRsh32Ux64 {
13950 if i_1.Op != OpConst64 {
13953 c := auxIntToInt64(i_1.AuxInt)
13954 if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != c || !(c >= 0 && c < 32 && i.Uses == 1) {
13958 v0 := b.NewValue0(v.Pos, OpConst32, v.Type)
13959 v0.AuxInt = int32ToAuxInt(int32(-1) << c)
13963 // match: (Lsh32x64 (Rsh32Ux64 (Lsh32x64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
13964 // cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
13965 // result: (Lsh32x64 x (Const64 <typ.UInt64> [c1-c2+c3]))
13967 if v_0.Op != OpRsh32Ux64 {
13971 v_0_0 := v_0.Args[0]
13972 if v_0_0.Op != OpLsh32x64 {
13977 v_0_0_1 := v_0_0.Args[1]
13978 if v_0_0_1.Op != OpConst64 {
13981 c1 := auxIntToInt64(v_0_0_1.AuxInt)
13982 v_0_1 := v_0.Args[1]
13983 if v_0_1.Op != OpConst64 {
13986 c2 := auxIntToInt64(v_0_1.AuxInt)
13987 if v_1.Op != OpConst64 {
13990 c3 := auxIntToInt64(v_1.AuxInt)
13991 if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) {
13994 v.reset(OpLsh32x64)
13995 v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
13996 v0.AuxInt = int64ToAuxInt(c1 - c2 + c3)
14002 func rewriteValuegeneric_OpLsh32x8(v *Value) bool {
14006 // match: (Lsh32x8 <t> x (Const8 [c]))
14007 // result: (Lsh32x64 x (Const64 <t> [int64(uint8(c))]))
14011 if v_1.Op != OpConst8 {
14014 c := auxIntToInt8(v_1.AuxInt)
14015 v.reset(OpLsh32x64)
14016 v0 := b.NewValue0(v.Pos, OpConst64, t)
14017 v0.AuxInt = int64ToAuxInt(int64(uint8(c)))
14021 // match: (Lsh32x8 (Const32 [0]) _)
14022 // result: (Const32 [0])
14024 if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 {
14028 v.AuxInt = int32ToAuxInt(0)
14033 func rewriteValuegeneric_OpLsh64x16(v *Value) bool {
14037 // match: (Lsh64x16 <t> x (Const16 [c]))
14038 // result: (Lsh64x64 x (Const64 <t> [int64(uint16(c))]))
14042 if v_1.Op != OpConst16 {
14045 c := auxIntToInt16(v_1.AuxInt)
14046 v.reset(OpLsh64x64)
14047 v0 := b.NewValue0(v.Pos, OpConst64, t)
14048 v0.AuxInt = int64ToAuxInt(int64(uint16(c)))
14052 // match: (Lsh64x16 (Const64 [0]) _)
14053 // result: (Const64 [0])
14055 if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 {
14059 v.AuxInt = int64ToAuxInt(0)
14064 func rewriteValuegeneric_OpLsh64x32(v *Value) bool {
14068 // match: (Lsh64x32 <t> x (Const32 [c]))
14069 // result: (Lsh64x64 x (Const64 <t> [int64(uint32(c))]))
14073 if v_1.Op != OpConst32 {
14076 c := auxIntToInt32(v_1.AuxInt)
14077 v.reset(OpLsh64x64)
14078 v0 := b.NewValue0(v.Pos, OpConst64, t)
14079 v0.AuxInt = int64ToAuxInt(int64(uint32(c)))
14083 // match: (Lsh64x32 (Const64 [0]) _)
14084 // result: (Const64 [0])
14086 if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 {
14090 v.AuxInt = int64ToAuxInt(0)
14095 func rewriteValuegeneric_OpLsh64x64(v *Value) bool {
14099 typ := &b.Func.Config.Types
14100 // match: (Lsh64x64 (Const64 [c]) (Const64 [d]))
14101 // result: (Const64 [c << uint64(d)])
14103 if v_0.Op != OpConst64 {
14106 c := auxIntToInt64(v_0.AuxInt)
14107 if v_1.Op != OpConst64 {
14110 d := auxIntToInt64(v_1.AuxInt)
14112 v.AuxInt = int64ToAuxInt(c << uint64(d))
14115 // match: (Lsh64x64 x (Const64 [0]))
14119 if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 0 {
14125 // match: (Lsh64x64 (Const64 [0]) _)
14126 // result: (Const64 [0])
14128 if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 {
14132 v.AuxInt = int64ToAuxInt(0)
14135 // match: (Lsh64x64 _ (Const64 [c]))
14136 // cond: uint64(c) >= 64
14137 // result: (Const64 [0])
14139 if v_1.Op != OpConst64 {
14142 c := auxIntToInt64(v_1.AuxInt)
14143 if !(uint64(c) >= 64) {
14147 v.AuxInt = int64ToAuxInt(0)
14150 // match: (Lsh64x64 <t> (Lsh64x64 x (Const64 [c])) (Const64 [d]))
14151 // cond: !uaddOvf(c,d)
14152 // result: (Lsh64x64 x (Const64 <t> [c+d]))
14155 if v_0.Op != OpLsh64x64 {
14160 v_0_1 := v_0.Args[1]
14161 if v_0_1.Op != OpConst64 {
14164 c := auxIntToInt64(v_0_1.AuxInt)
14165 if v_1.Op != OpConst64 {
14168 d := auxIntToInt64(v_1.AuxInt)
14169 if !(!uaddOvf(c, d)) {
14172 v.reset(OpLsh64x64)
14173 v0 := b.NewValue0(v.Pos, OpConst64, t)
14174 v0.AuxInt = int64ToAuxInt(c + d)
14178 // match: (Lsh64x64 i:(Rsh64x64 x (Const64 [c])) (Const64 [c]))
14179 // cond: c >= 0 && c < 64 && i.Uses == 1
14180 // result: (And64 x (Const64 <v.Type> [int64(-1) << c]))
14183 if i.Op != OpRsh64x64 {
14189 if i_1.Op != OpConst64 {
14192 c := auxIntToInt64(i_1.AuxInt)
14193 if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != c || !(c >= 0 && c < 64 && i.Uses == 1) {
14197 v0 := b.NewValue0(v.Pos, OpConst64, v.Type)
14198 v0.AuxInt = int64ToAuxInt(int64(-1) << c)
14202 // match: (Lsh64x64 i:(Rsh64Ux64 x (Const64 [c])) (Const64 [c]))
14203 // cond: c >= 0 && c < 64 && i.Uses == 1
14204 // result: (And64 x (Const64 <v.Type> [int64(-1) << c]))
14207 if i.Op != OpRsh64Ux64 {
14213 if i_1.Op != OpConst64 {
14216 c := auxIntToInt64(i_1.AuxInt)
14217 if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != c || !(c >= 0 && c < 64 && i.Uses == 1) {
14221 v0 := b.NewValue0(v.Pos, OpConst64, v.Type)
14222 v0.AuxInt = int64ToAuxInt(int64(-1) << c)
14226 // match: (Lsh64x64 (Rsh64Ux64 (Lsh64x64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
14227 // cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
14228 // result: (Lsh64x64 x (Const64 <typ.UInt64> [c1-c2+c3]))
14230 if v_0.Op != OpRsh64Ux64 {
14234 v_0_0 := v_0.Args[0]
14235 if v_0_0.Op != OpLsh64x64 {
14240 v_0_0_1 := v_0_0.Args[1]
14241 if v_0_0_1.Op != OpConst64 {
14244 c1 := auxIntToInt64(v_0_0_1.AuxInt)
14245 v_0_1 := v_0.Args[1]
14246 if v_0_1.Op != OpConst64 {
14249 c2 := auxIntToInt64(v_0_1.AuxInt)
14250 if v_1.Op != OpConst64 {
14253 c3 := auxIntToInt64(v_1.AuxInt)
14254 if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) {
14257 v.reset(OpLsh64x64)
14258 v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
14259 v0.AuxInt = int64ToAuxInt(c1 - c2 + c3)
14265 func rewriteValuegeneric_OpLsh64x8(v *Value) bool {
14269 // match: (Lsh64x8 <t> x (Const8 [c]))
14270 // result: (Lsh64x64 x (Const64 <t> [int64(uint8(c))]))
14274 if v_1.Op != OpConst8 {
14277 c := auxIntToInt8(v_1.AuxInt)
14278 v.reset(OpLsh64x64)
14279 v0 := b.NewValue0(v.Pos, OpConst64, t)
14280 v0.AuxInt = int64ToAuxInt(int64(uint8(c)))
14284 // match: (Lsh64x8 (Const64 [0]) _)
14285 // result: (Const64 [0])
14287 if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 {
14291 v.AuxInt = int64ToAuxInt(0)
14296 func rewriteValuegeneric_OpLsh8x16(v *Value) bool {
14300 // match: (Lsh8x16 <t> x (Const16 [c]))
14301 // result: (Lsh8x64 x (Const64 <t> [int64(uint16(c))]))
14305 if v_1.Op != OpConst16 {
14308 c := auxIntToInt16(v_1.AuxInt)
14310 v0 := b.NewValue0(v.Pos, OpConst64, t)
14311 v0.AuxInt = int64ToAuxInt(int64(uint16(c)))
14315 // match: (Lsh8x16 (Const8 [0]) _)
14316 // result: (Const8 [0])
14318 if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 0 {
14322 v.AuxInt = int8ToAuxInt(0)
14327 func rewriteValuegeneric_OpLsh8x32(v *Value) bool {
14331 // match: (Lsh8x32 <t> x (Const32 [c]))
14332 // result: (Lsh8x64 x (Const64 <t> [int64(uint32(c))]))
14336 if v_1.Op != OpConst32 {
14339 c := auxIntToInt32(v_1.AuxInt)
14341 v0 := b.NewValue0(v.Pos, OpConst64, t)
14342 v0.AuxInt = int64ToAuxInt(int64(uint32(c)))
14346 // match: (Lsh8x32 (Const8 [0]) _)
14347 // result: (Const8 [0])
14349 if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 0 {
14353 v.AuxInt = int8ToAuxInt(0)
14358 func rewriteValuegeneric_OpLsh8x64(v *Value) bool {
14362 typ := &b.Func.Config.Types
14363 // match: (Lsh8x64 (Const8 [c]) (Const64 [d]))
14364 // result: (Const8 [c << uint64(d)])
14366 if v_0.Op != OpConst8 {
14369 c := auxIntToInt8(v_0.AuxInt)
14370 if v_1.Op != OpConst64 {
14373 d := auxIntToInt64(v_1.AuxInt)
14375 v.AuxInt = int8ToAuxInt(c << uint64(d))
14378 // match: (Lsh8x64 x (Const64 [0]))
14382 if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 0 {
14388 // match: (Lsh8x64 (Const8 [0]) _)
14389 // result: (Const8 [0])
14391 if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 0 {
14395 v.AuxInt = int8ToAuxInt(0)
14398 // match: (Lsh8x64 _ (Const64 [c]))
14399 // cond: uint64(c) >= 8
14400 // result: (Const8 [0])
14402 if v_1.Op != OpConst64 {
14405 c := auxIntToInt64(v_1.AuxInt)
14406 if !(uint64(c) >= 8) {
14410 v.AuxInt = int8ToAuxInt(0)
14413 // match: (Lsh8x64 <t> (Lsh8x64 x (Const64 [c])) (Const64 [d]))
14414 // cond: !uaddOvf(c,d)
14415 // result: (Lsh8x64 x (Const64 <t> [c+d]))
14418 if v_0.Op != OpLsh8x64 {
14423 v_0_1 := v_0.Args[1]
14424 if v_0_1.Op != OpConst64 {
14427 c := auxIntToInt64(v_0_1.AuxInt)
14428 if v_1.Op != OpConst64 {
14431 d := auxIntToInt64(v_1.AuxInt)
14432 if !(!uaddOvf(c, d)) {
14436 v0 := b.NewValue0(v.Pos, OpConst64, t)
14437 v0.AuxInt = int64ToAuxInt(c + d)
14441 // match: (Lsh8x64 i:(Rsh8x64 x (Const64 [c])) (Const64 [c]))
14442 // cond: c >= 0 && c < 8 && i.Uses == 1
14443 // result: (And8 x (Const8 <v.Type> [int8(-1) << c]))
14446 if i.Op != OpRsh8x64 {
14452 if i_1.Op != OpConst64 {
14455 c := auxIntToInt64(i_1.AuxInt)
14456 if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != c || !(c >= 0 && c < 8 && i.Uses == 1) {
14460 v0 := b.NewValue0(v.Pos, OpConst8, v.Type)
14461 v0.AuxInt = int8ToAuxInt(int8(-1) << c)
14465 // match: (Lsh8x64 i:(Rsh8Ux64 x (Const64 [c])) (Const64 [c]))
14466 // cond: c >= 0 && c < 8 && i.Uses == 1
14467 // result: (And8 x (Const8 <v.Type> [int8(-1) << c]))
14470 if i.Op != OpRsh8Ux64 {
14476 if i_1.Op != OpConst64 {
14479 c := auxIntToInt64(i_1.AuxInt)
14480 if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != c || !(c >= 0 && c < 8 && i.Uses == 1) {
14484 v0 := b.NewValue0(v.Pos, OpConst8, v.Type)
14485 v0.AuxInt = int8ToAuxInt(int8(-1) << c)
14489 // match: (Lsh8x64 (Rsh8Ux64 (Lsh8x64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
14490 // cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
14491 // result: (Lsh8x64 x (Const64 <typ.UInt64> [c1-c2+c3]))
14493 if v_0.Op != OpRsh8Ux64 {
14497 v_0_0 := v_0.Args[0]
14498 if v_0_0.Op != OpLsh8x64 {
14503 v_0_0_1 := v_0_0.Args[1]
14504 if v_0_0_1.Op != OpConst64 {
14507 c1 := auxIntToInt64(v_0_0_1.AuxInt)
14508 v_0_1 := v_0.Args[1]
14509 if v_0_1.Op != OpConst64 {
14512 c2 := auxIntToInt64(v_0_1.AuxInt)
14513 if v_1.Op != OpConst64 {
14516 c3 := auxIntToInt64(v_1.AuxInt)
14517 if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) {
14521 v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
14522 v0.AuxInt = int64ToAuxInt(c1 - c2 + c3)
14528 func rewriteValuegeneric_OpLsh8x8(v *Value) bool {
14532 // match: (Lsh8x8 <t> x (Const8 [c]))
14533 // result: (Lsh8x64 x (Const64 <t> [int64(uint8(c))]))
14537 if v_1.Op != OpConst8 {
14540 c := auxIntToInt8(v_1.AuxInt)
14542 v0 := b.NewValue0(v.Pos, OpConst64, t)
14543 v0.AuxInt = int64ToAuxInt(int64(uint8(c)))
14547 // match: (Lsh8x8 (Const8 [0]) _)
14548 // result: (Const8 [0])
14550 if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 0 {
14554 v.AuxInt = int8ToAuxInt(0)
14559 func rewriteValuegeneric_OpMod16(v *Value) bool {
14563 // match: (Mod16 (Const16 [c]) (Const16 [d]))
14565 // result: (Const16 [c % d])
14567 if v_0.Op != OpConst16 {
14570 c := auxIntToInt16(v_0.AuxInt)
14571 if v_1.Op != OpConst16 {
14574 d := auxIntToInt16(v_1.AuxInt)
14579 v.AuxInt = int16ToAuxInt(c % d)
14582 // match: (Mod16 <t> n (Const16 [c]))
14583 // cond: isNonNegative(n) && isPowerOfTwo16(c)
14584 // result: (And16 n (Const16 <t> [c-1]))
14588 if v_1.Op != OpConst16 {
14591 c := auxIntToInt16(v_1.AuxInt)
14592 if !(isNonNegative(n) && isPowerOfTwo16(c)) {
14596 v0 := b.NewValue0(v.Pos, OpConst16, t)
14597 v0.AuxInt = int16ToAuxInt(c - 1)
14601 // match: (Mod16 <t> n (Const16 [c]))
14602 // cond: c < 0 && c != -1<<15
14603 // result: (Mod16 <t> n (Const16 <t> [-c]))
14607 if v_1.Op != OpConst16 {
14610 c := auxIntToInt16(v_1.AuxInt)
14611 if !(c < 0 && c != -1<<15) {
14616 v0 := b.NewValue0(v.Pos, OpConst16, t)
14617 v0.AuxInt = int16ToAuxInt(-c)
14621 // match: (Mod16 <t> x (Const16 [c]))
14622 // cond: x.Op != OpConst16 && (c > 0 || c == -1<<15)
14623 // result: (Sub16 x (Mul16 <t> (Div16 <t> x (Const16 <t> [c])) (Const16 <t> [c])))
14627 if v_1.Op != OpConst16 {
14630 c := auxIntToInt16(v_1.AuxInt)
14631 if !(x.Op != OpConst16 && (c > 0 || c == -1<<15)) {
14635 v0 := b.NewValue0(v.Pos, OpMul16, t)
14636 v1 := b.NewValue0(v.Pos, OpDiv16, t)
14637 v2 := b.NewValue0(v.Pos, OpConst16, t)
14638 v2.AuxInt = int16ToAuxInt(c)
14646 func rewriteValuegeneric_OpMod16u(v *Value) bool {
14650 // match: (Mod16u (Const16 [c]) (Const16 [d]))
14652 // result: (Const16 [int16(uint16(c) % uint16(d))])
14654 if v_0.Op != OpConst16 {
14657 c := auxIntToInt16(v_0.AuxInt)
14658 if v_1.Op != OpConst16 {
14661 d := auxIntToInt16(v_1.AuxInt)
14666 v.AuxInt = int16ToAuxInt(int16(uint16(c) % uint16(d)))
14669 // match: (Mod16u <t> n (Const16 [c]))
14670 // cond: isPowerOfTwo16(c)
14671 // result: (And16 n (Const16 <t> [c-1]))
14675 if v_1.Op != OpConst16 {
14678 c := auxIntToInt16(v_1.AuxInt)
14679 if !(isPowerOfTwo16(c)) {
14683 v0 := b.NewValue0(v.Pos, OpConst16, t)
14684 v0.AuxInt = int16ToAuxInt(c - 1)
14688 // match: (Mod16u <t> x (Const16 [c]))
14689 // cond: x.Op != OpConst16 && c > 0 && umagicOK16(c)
14690 // result: (Sub16 x (Mul16 <t> (Div16u <t> x (Const16 <t> [c])) (Const16 <t> [c])))
14694 if v_1.Op != OpConst16 {
14697 c := auxIntToInt16(v_1.AuxInt)
14698 if !(x.Op != OpConst16 && c > 0 && umagicOK16(c)) {
14702 v0 := b.NewValue0(v.Pos, OpMul16, t)
14703 v1 := b.NewValue0(v.Pos, OpDiv16u, t)
14704 v2 := b.NewValue0(v.Pos, OpConst16, t)
14705 v2.AuxInt = int16ToAuxInt(c)
14713 func rewriteValuegeneric_OpMod32(v *Value) bool {
14717 // match: (Mod32 (Const32 [c]) (Const32 [d]))
14719 // result: (Const32 [c % d])
14721 if v_0.Op != OpConst32 {
14724 c := auxIntToInt32(v_0.AuxInt)
14725 if v_1.Op != OpConst32 {
14728 d := auxIntToInt32(v_1.AuxInt)
14733 v.AuxInt = int32ToAuxInt(c % d)
14736 // match: (Mod32 <t> n (Const32 [c]))
14737 // cond: isNonNegative(n) && isPowerOfTwo32(c)
14738 // result: (And32 n (Const32 <t> [c-1]))
14742 if v_1.Op != OpConst32 {
14745 c := auxIntToInt32(v_1.AuxInt)
14746 if !(isNonNegative(n) && isPowerOfTwo32(c)) {
14750 v0 := b.NewValue0(v.Pos, OpConst32, t)
14751 v0.AuxInt = int32ToAuxInt(c - 1)
14755 // match: (Mod32 <t> n (Const32 [c]))
14756 // cond: c < 0 && c != -1<<31
14757 // result: (Mod32 <t> n (Const32 <t> [-c]))
14761 if v_1.Op != OpConst32 {
14764 c := auxIntToInt32(v_1.AuxInt)
14765 if !(c < 0 && c != -1<<31) {
14770 v0 := b.NewValue0(v.Pos, OpConst32, t)
14771 v0.AuxInt = int32ToAuxInt(-c)
14775 // match: (Mod32 <t> x (Const32 [c]))
14776 // cond: x.Op != OpConst32 && (c > 0 || c == -1<<31)
14777 // result: (Sub32 x (Mul32 <t> (Div32 <t> x (Const32 <t> [c])) (Const32 <t> [c])))
14781 if v_1.Op != OpConst32 {
14784 c := auxIntToInt32(v_1.AuxInt)
14785 if !(x.Op != OpConst32 && (c > 0 || c == -1<<31)) {
14789 v0 := b.NewValue0(v.Pos, OpMul32, t)
14790 v1 := b.NewValue0(v.Pos, OpDiv32, t)
14791 v2 := b.NewValue0(v.Pos, OpConst32, t)
14792 v2.AuxInt = int32ToAuxInt(c)
14800 func rewriteValuegeneric_OpMod32u(v *Value) bool {
14804 // match: (Mod32u (Const32 [c]) (Const32 [d]))
14806 // result: (Const32 [int32(uint32(c) % uint32(d))])
14808 if v_0.Op != OpConst32 {
14811 c := auxIntToInt32(v_0.AuxInt)
14812 if v_1.Op != OpConst32 {
14815 d := auxIntToInt32(v_1.AuxInt)
14820 v.AuxInt = int32ToAuxInt(int32(uint32(c) % uint32(d)))
14823 // match: (Mod32u <t> n (Const32 [c]))
14824 // cond: isPowerOfTwo32(c)
14825 // result: (And32 n (Const32 <t> [c-1]))
14829 if v_1.Op != OpConst32 {
14832 c := auxIntToInt32(v_1.AuxInt)
14833 if !(isPowerOfTwo32(c)) {
14837 v0 := b.NewValue0(v.Pos, OpConst32, t)
14838 v0.AuxInt = int32ToAuxInt(c - 1)
14842 // match: (Mod32u <t> x (Const32 [c]))
14843 // cond: x.Op != OpConst32 && c > 0 && umagicOK32(c)
14844 // result: (Sub32 x (Mul32 <t> (Div32u <t> x (Const32 <t> [c])) (Const32 <t> [c])))
14848 if v_1.Op != OpConst32 {
14851 c := auxIntToInt32(v_1.AuxInt)
14852 if !(x.Op != OpConst32 && c > 0 && umagicOK32(c)) {
14856 v0 := b.NewValue0(v.Pos, OpMul32, t)
14857 v1 := b.NewValue0(v.Pos, OpDiv32u, t)
14858 v2 := b.NewValue0(v.Pos, OpConst32, t)
14859 v2.AuxInt = int32ToAuxInt(c)
14867 func rewriteValuegeneric_OpMod64(v *Value) bool {
14871 // match: (Mod64 (Const64 [c]) (Const64 [d]))
14873 // result: (Const64 [c % d])
14875 if v_0.Op != OpConst64 {
14878 c := auxIntToInt64(v_0.AuxInt)
14879 if v_1.Op != OpConst64 {
14882 d := auxIntToInt64(v_1.AuxInt)
14887 v.AuxInt = int64ToAuxInt(c % d)
14890 // match: (Mod64 <t> n (Const64 [c]))
14891 // cond: isNonNegative(n) && isPowerOfTwo64(c)
14892 // result: (And64 n (Const64 <t> [c-1]))
14896 if v_1.Op != OpConst64 {
14899 c := auxIntToInt64(v_1.AuxInt)
14900 if !(isNonNegative(n) && isPowerOfTwo64(c)) {
14904 v0 := b.NewValue0(v.Pos, OpConst64, t)
14905 v0.AuxInt = int64ToAuxInt(c - 1)
14909 // match: (Mod64 n (Const64 [-1<<63]))
14910 // cond: isNonNegative(n)
14914 if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != -1<<63 || !(isNonNegative(n)) {
14920 // match: (Mod64 <t> n (Const64 [c]))
14921 // cond: c < 0 && c != -1<<63
14922 // result: (Mod64 <t> n (Const64 <t> [-c]))
14926 if v_1.Op != OpConst64 {
14929 c := auxIntToInt64(v_1.AuxInt)
14930 if !(c < 0 && c != -1<<63) {
14935 v0 := b.NewValue0(v.Pos, OpConst64, t)
14936 v0.AuxInt = int64ToAuxInt(-c)
14940 // match: (Mod64 <t> x (Const64 [c]))
14941 // cond: x.Op != OpConst64 && (c > 0 || c == -1<<63)
14942 // result: (Sub64 x (Mul64 <t> (Div64 <t> x (Const64 <t> [c])) (Const64 <t> [c])))
14946 if v_1.Op != OpConst64 {
14949 c := auxIntToInt64(v_1.AuxInt)
14950 if !(x.Op != OpConst64 && (c > 0 || c == -1<<63)) {
14954 v0 := b.NewValue0(v.Pos, OpMul64, t)
14955 v1 := b.NewValue0(v.Pos, OpDiv64, t)
14956 v2 := b.NewValue0(v.Pos, OpConst64, t)
14957 v2.AuxInt = int64ToAuxInt(c)
14965 func rewriteValuegeneric_OpMod64u(v *Value) bool {
14969 // match: (Mod64u (Const64 [c]) (Const64 [d]))
14971 // result: (Const64 [int64(uint64(c) % uint64(d))])
14973 if v_0.Op != OpConst64 {
14976 c := auxIntToInt64(v_0.AuxInt)
14977 if v_1.Op != OpConst64 {
14980 d := auxIntToInt64(v_1.AuxInt)
14985 v.AuxInt = int64ToAuxInt(int64(uint64(c) % uint64(d)))
14988 // match: (Mod64u <t> n (Const64 [c]))
14989 // cond: isPowerOfTwo64(c)
14990 // result: (And64 n (Const64 <t> [c-1]))
14994 if v_1.Op != OpConst64 {
14997 c := auxIntToInt64(v_1.AuxInt)
14998 if !(isPowerOfTwo64(c)) {
15002 v0 := b.NewValue0(v.Pos, OpConst64, t)
15003 v0.AuxInt = int64ToAuxInt(c - 1)
15007 // match: (Mod64u <t> n (Const64 [-1<<63]))
15008 // result: (And64 n (Const64 <t> [1<<63-1]))
15012 if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != -1<<63 {
15016 v0 := b.NewValue0(v.Pos, OpConst64, t)
15017 v0.AuxInt = int64ToAuxInt(1<<63 - 1)
15021 // match: (Mod64u <t> x (Const64 [c]))
15022 // cond: x.Op != OpConst64 && c > 0 && umagicOK64(c)
15023 // result: (Sub64 x (Mul64 <t> (Div64u <t> x (Const64 <t> [c])) (Const64 <t> [c])))
15027 if v_1.Op != OpConst64 {
15030 c := auxIntToInt64(v_1.AuxInt)
15031 if !(x.Op != OpConst64 && c > 0 && umagicOK64(c)) {
15035 v0 := b.NewValue0(v.Pos, OpMul64, t)
15036 v1 := b.NewValue0(v.Pos, OpDiv64u, t)
15037 v2 := b.NewValue0(v.Pos, OpConst64, t)
15038 v2.AuxInt = int64ToAuxInt(c)
15046 func rewriteValuegeneric_OpMod8(v *Value) bool {
15050 // match: (Mod8 (Const8 [c]) (Const8 [d]))
15052 // result: (Const8 [c % d])
15054 if v_0.Op != OpConst8 {
15057 c := auxIntToInt8(v_0.AuxInt)
15058 if v_1.Op != OpConst8 {
15061 d := auxIntToInt8(v_1.AuxInt)
15066 v.AuxInt = int8ToAuxInt(c % d)
15069 // match: (Mod8 <t> n (Const8 [c]))
15070 // cond: isNonNegative(n) && isPowerOfTwo8(c)
15071 // result: (And8 n (Const8 <t> [c-1]))
15075 if v_1.Op != OpConst8 {
15078 c := auxIntToInt8(v_1.AuxInt)
15079 if !(isNonNegative(n) && isPowerOfTwo8(c)) {
15083 v0 := b.NewValue0(v.Pos, OpConst8, t)
15084 v0.AuxInt = int8ToAuxInt(c - 1)
15088 // match: (Mod8 <t> n (Const8 [c]))
15089 // cond: c < 0 && c != -1<<7
15090 // result: (Mod8 <t> n (Const8 <t> [-c]))
15094 if v_1.Op != OpConst8 {
15097 c := auxIntToInt8(v_1.AuxInt)
15098 if !(c < 0 && c != -1<<7) {
15103 v0 := b.NewValue0(v.Pos, OpConst8, t)
15104 v0.AuxInt = int8ToAuxInt(-c)
15108 // match: (Mod8 <t> x (Const8 [c]))
15109 // cond: x.Op != OpConst8 && (c > 0 || c == -1<<7)
15110 // result: (Sub8 x (Mul8 <t> (Div8 <t> x (Const8 <t> [c])) (Const8 <t> [c])))
15114 if v_1.Op != OpConst8 {
15117 c := auxIntToInt8(v_1.AuxInt)
15118 if !(x.Op != OpConst8 && (c > 0 || c == -1<<7)) {
15122 v0 := b.NewValue0(v.Pos, OpMul8, t)
15123 v1 := b.NewValue0(v.Pos, OpDiv8, t)
15124 v2 := b.NewValue0(v.Pos, OpConst8, t)
15125 v2.AuxInt = int8ToAuxInt(c)
15133 func rewriteValuegeneric_OpMod8u(v *Value) bool {
15137 // match: (Mod8u (Const8 [c]) (Const8 [d]))
15139 // result: (Const8 [int8(uint8(c) % uint8(d))])
15141 if v_0.Op != OpConst8 {
15144 c := auxIntToInt8(v_0.AuxInt)
15145 if v_1.Op != OpConst8 {
15148 d := auxIntToInt8(v_1.AuxInt)
15153 v.AuxInt = int8ToAuxInt(int8(uint8(c) % uint8(d)))
15156 // match: (Mod8u <t> n (Const8 [c]))
15157 // cond: isPowerOfTwo8(c)
15158 // result: (And8 n (Const8 <t> [c-1]))
15162 if v_1.Op != OpConst8 {
15165 c := auxIntToInt8(v_1.AuxInt)
15166 if !(isPowerOfTwo8(c)) {
15170 v0 := b.NewValue0(v.Pos, OpConst8, t)
15171 v0.AuxInt = int8ToAuxInt(c - 1)
15175 // match: (Mod8u <t> x (Const8 [c]))
15176 // cond: x.Op != OpConst8 && c > 0 && umagicOK8( c)
15177 // result: (Sub8 x (Mul8 <t> (Div8u <t> x (Const8 <t> [c])) (Const8 <t> [c])))
15181 if v_1.Op != OpConst8 {
15184 c := auxIntToInt8(v_1.AuxInt)
15185 if !(x.Op != OpConst8 && c > 0 && umagicOK8(c)) {
15189 v0 := b.NewValue0(v.Pos, OpMul8, t)
15190 v1 := b.NewValue0(v.Pos, OpDiv8u, t)
15191 v2 := b.NewValue0(v.Pos, OpConst8, t)
15192 v2.AuxInt = int8ToAuxInt(c)
15200 func rewriteValuegeneric_OpMove(v *Value) bool {
15205 config := b.Func.Config
15206 // match: (Move {t} [n] dst1 src mem:(Zero {t} [n] dst2 _))
15207 // cond: isSamePtr(src, dst2)
15208 // result: (Zero {t} [n] dst1 mem)
15210 n := auxIntToInt64(v.AuxInt)
15211 t := auxToType(v.Aux)
15215 if mem.Op != OpZero || auxIntToInt64(mem.AuxInt) != n || auxToType(mem.Aux) != t {
15218 dst2 := mem.Args[0]
15219 if !(isSamePtr(src, dst2)) {
15223 v.AuxInt = int64ToAuxInt(n)
15224 v.Aux = typeToAux(t)
15225 v.AddArg2(dst1, mem)
15228 // match: (Move {t} [n] dst1 src mem:(VarDef (Zero {t} [n] dst0 _)))
15229 // cond: isSamePtr(src, dst0)
15230 // result: (Zero {t} [n] dst1 mem)
15232 n := auxIntToInt64(v.AuxInt)
15233 t := auxToType(v.Aux)
15237 if mem.Op != OpVarDef {
15240 mem_0 := mem.Args[0]
15241 if mem_0.Op != OpZero || auxIntToInt64(mem_0.AuxInt) != n || auxToType(mem_0.Aux) != t {
15244 dst0 := mem_0.Args[0]
15245 if !(isSamePtr(src, dst0)) {
15249 v.AuxInt = int64ToAuxInt(n)
15250 v.Aux = typeToAux(t)
15251 v.AddArg2(dst1, mem)
15254 // match: (Move {t} [n] dst (Addr {sym} (SB)) mem)
15255 // cond: symIsROZero(sym)
15256 // result: (Zero {t} [n] dst mem)
15258 n := auxIntToInt64(v.AuxInt)
15259 t := auxToType(v.Aux)
15261 if v_1.Op != OpAddr {
15264 sym := auxToSym(v_1.Aux)
15265 v_1_0 := v_1.Args[0]
15266 if v_1_0.Op != OpSB {
15270 if !(symIsROZero(sym)) {
15274 v.AuxInt = int64ToAuxInt(n)
15275 v.Aux = typeToAux(t)
15276 v.AddArg2(dst, mem)
15279 // match: (Move {t1} [n] dst1 src1 store:(Store {t2} op:(OffPtr [o2] dst2) _ mem))
15280 // cond: isSamePtr(dst1, dst2) && store.Uses == 1 && n >= o2 + t2.Size() && disjoint(src1, n, op, t2.Size()) && clobber(store)
15281 // result: (Move {t1} [n] dst1 src1 mem)
15283 n := auxIntToInt64(v.AuxInt)
15284 t1 := auxToType(v.Aux)
15288 if store.Op != OpStore {
15291 t2 := auxToType(store.Aux)
15292 mem := store.Args[2]
15293 op := store.Args[0]
15294 if op.Op != OpOffPtr {
15297 o2 := auxIntToInt64(op.AuxInt)
15299 if !(isSamePtr(dst1, dst2) && store.Uses == 1 && n >= o2+t2.Size() && disjoint(src1, n, op, t2.Size()) && clobber(store)) {
15303 v.AuxInt = int64ToAuxInt(n)
15304 v.Aux = typeToAux(t1)
15305 v.AddArg3(dst1, src1, mem)
15308 // match: (Move {t} [n] dst1 src1 move:(Move {t} [n] dst2 _ mem))
15309 // cond: move.Uses == 1 && isSamePtr(dst1, dst2) && disjoint(src1, n, dst2, n) && clobber(move)
15310 // result: (Move {t} [n] dst1 src1 mem)
15312 n := auxIntToInt64(v.AuxInt)
15313 t := auxToType(v.Aux)
15317 if move.Op != OpMove || auxIntToInt64(move.AuxInt) != n || auxToType(move.Aux) != t {
15320 mem := move.Args[2]
15321 dst2 := move.Args[0]
15322 if !(move.Uses == 1 && isSamePtr(dst1, dst2) && disjoint(src1, n, dst2, n) && clobber(move)) {
15326 v.AuxInt = int64ToAuxInt(n)
15327 v.Aux = typeToAux(t)
15328 v.AddArg3(dst1, src1, mem)
15331 // match: (Move {t} [n] dst1 src1 vardef:(VarDef {x} move:(Move {t} [n] dst2 _ mem)))
15332 // cond: move.Uses == 1 && vardef.Uses == 1 && isSamePtr(dst1, dst2) && disjoint(src1, n, dst2, n) && clobber(move, vardef)
15333 // result: (Move {t} [n] dst1 src1 (VarDef {x} mem))
15335 n := auxIntToInt64(v.AuxInt)
15336 t := auxToType(v.Aux)
15340 if vardef.Op != OpVarDef {
15343 x := auxToSym(vardef.Aux)
15344 move := vardef.Args[0]
15345 if move.Op != OpMove || auxIntToInt64(move.AuxInt) != n || auxToType(move.Aux) != t {
15348 mem := move.Args[2]
15349 dst2 := move.Args[0]
15350 if !(move.Uses == 1 && vardef.Uses == 1 && isSamePtr(dst1, dst2) && disjoint(src1, n, dst2, n) && clobber(move, vardef)) {
15354 v.AuxInt = int64ToAuxInt(n)
15355 v.Aux = typeToAux(t)
15356 v0 := b.NewValue0(v.Pos, OpVarDef, types.TypeMem)
15357 v0.Aux = symToAux(x)
15359 v.AddArg3(dst1, src1, v0)
15362 // match: (Move {t} [n] dst1 src1 zero:(Zero {t} [n] dst2 mem))
15363 // cond: zero.Uses == 1 && isSamePtr(dst1, dst2) && disjoint(src1, n, dst2, n) && clobber(zero)
15364 // result: (Move {t} [n] dst1 src1 mem)
15366 n := auxIntToInt64(v.AuxInt)
15367 t := auxToType(v.Aux)
15371 if zero.Op != OpZero || auxIntToInt64(zero.AuxInt) != n || auxToType(zero.Aux) != t {
15374 mem := zero.Args[1]
15375 dst2 := zero.Args[0]
15376 if !(zero.Uses == 1 && isSamePtr(dst1, dst2) && disjoint(src1, n, dst2, n) && clobber(zero)) {
15380 v.AuxInt = int64ToAuxInt(n)
15381 v.Aux = typeToAux(t)
15382 v.AddArg3(dst1, src1, mem)
15385 // match: (Move {t} [n] dst1 src1 vardef:(VarDef {x} zero:(Zero {t} [n] dst2 mem)))
15386 // cond: zero.Uses == 1 && vardef.Uses == 1 && isSamePtr(dst1, dst2) && disjoint(src1, n, dst2, n) && clobber(zero, vardef)
15387 // result: (Move {t} [n] dst1 src1 (VarDef {x} mem))
15389 n := auxIntToInt64(v.AuxInt)
15390 t := auxToType(v.Aux)
15394 if vardef.Op != OpVarDef {
15397 x := auxToSym(vardef.Aux)
15398 zero := vardef.Args[0]
15399 if zero.Op != OpZero || auxIntToInt64(zero.AuxInt) != n || auxToType(zero.Aux) != t {
15402 mem := zero.Args[1]
15403 dst2 := zero.Args[0]
15404 if !(zero.Uses == 1 && vardef.Uses == 1 && isSamePtr(dst1, dst2) && disjoint(src1, n, dst2, n) && clobber(zero, vardef)) {
15408 v.AuxInt = int64ToAuxInt(n)
15409 v.Aux = typeToAux(t)
15410 v0 := b.NewValue0(v.Pos, OpVarDef, types.TypeMem)
15411 v0.Aux = symToAux(x)
15413 v.AddArg3(dst1, src1, v0)
15416 // match: (Move {t1} [n] dst p1 mem:(Store {t2} op2:(OffPtr <tt2> [o2] p2) d1 (Store {t3} op3:(OffPtr <tt3> [0] p3) d2 _)))
15417 // cond: isSamePtr(p1, p2) && isSamePtr(p2, p3) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && registerizable(b, t2) && registerizable(b, t3) && o2 == t3.Size() && n == t2.Size() + t3.Size()
15418 // result: (Store {t2} (OffPtr <tt2> [o2] dst) d1 (Store {t3} (OffPtr <tt3> [0] dst) d2 mem))
15420 n := auxIntToInt64(v.AuxInt)
15421 t1 := auxToType(v.Aux)
15425 if mem.Op != OpStore {
15428 t2 := auxToType(mem.Aux)
15431 if op2.Op != OpOffPtr {
15435 o2 := auxIntToInt64(op2.AuxInt)
15438 mem_2 := mem.Args[2]
15439 if mem_2.Op != OpStore {
15442 t3 := auxToType(mem_2.Aux)
15443 d2 := mem_2.Args[1]
15444 op3 := mem_2.Args[0]
15445 if op3.Op != OpOffPtr {
15449 if auxIntToInt64(op3.AuxInt) != 0 {
15453 if !(isSamePtr(p1, p2) && isSamePtr(p2, p3) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && registerizable(b, t2) && registerizable(b, t3) && o2 == t3.Size() && n == t2.Size()+t3.Size()) {
15457 v.Aux = typeToAux(t2)
15458 v0 := b.NewValue0(v.Pos, OpOffPtr, tt2)
15459 v0.AuxInt = int64ToAuxInt(o2)
15461 v1 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
15462 v1.Aux = typeToAux(t3)
15463 v2 := b.NewValue0(v.Pos, OpOffPtr, tt3)
15464 v2.AuxInt = int64ToAuxInt(0)
15466 v1.AddArg3(v2, d2, mem)
15467 v.AddArg3(v0, d1, v1)
15470 // match: (Move {t1} [n] dst p1 mem:(Store {t2} op2:(OffPtr <tt2> [o2] p2) d1 (Store {t3} op3:(OffPtr <tt3> [o3] p3) d2 (Store {t4} op4:(OffPtr <tt4> [0] p4) d3 _))))
15471 // cond: isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && t4.Alignment() <= t1.Alignment() && registerizable(b, t2) && registerizable(b, t3) && registerizable(b, t4) && o3 == t4.Size() && o2-o3 == t3.Size() && n == t2.Size() + t3.Size() + t4.Size()
15472 // result: (Store {t2} (OffPtr <tt2> [o2] dst) d1 (Store {t3} (OffPtr <tt3> [o3] dst) d2 (Store {t4} (OffPtr <tt4> [0] dst) d3 mem)))
15474 n := auxIntToInt64(v.AuxInt)
15475 t1 := auxToType(v.Aux)
15479 if mem.Op != OpStore {
15482 t2 := auxToType(mem.Aux)
15485 if op2.Op != OpOffPtr {
15489 o2 := auxIntToInt64(op2.AuxInt)
15492 mem_2 := mem.Args[2]
15493 if mem_2.Op != OpStore {
15496 t3 := auxToType(mem_2.Aux)
15498 op3 := mem_2.Args[0]
15499 if op3.Op != OpOffPtr {
15503 o3 := auxIntToInt64(op3.AuxInt)
15505 d2 := mem_2.Args[1]
15506 mem_2_2 := mem_2.Args[2]
15507 if mem_2_2.Op != OpStore {
15510 t4 := auxToType(mem_2_2.Aux)
15511 d3 := mem_2_2.Args[1]
15512 op4 := mem_2_2.Args[0]
15513 if op4.Op != OpOffPtr {
15517 if auxIntToInt64(op4.AuxInt) != 0 {
15521 if !(isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && t4.Alignment() <= t1.Alignment() && registerizable(b, t2) && registerizable(b, t3) && registerizable(b, t4) && o3 == t4.Size() && o2-o3 == t3.Size() && n == t2.Size()+t3.Size()+t4.Size()) {
15525 v.Aux = typeToAux(t2)
15526 v0 := b.NewValue0(v.Pos, OpOffPtr, tt2)
15527 v0.AuxInt = int64ToAuxInt(o2)
15529 v1 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
15530 v1.Aux = typeToAux(t3)
15531 v2 := b.NewValue0(v.Pos, OpOffPtr, tt3)
15532 v2.AuxInt = int64ToAuxInt(o3)
15534 v3 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
15535 v3.Aux = typeToAux(t4)
15536 v4 := b.NewValue0(v.Pos, OpOffPtr, tt4)
15537 v4.AuxInt = int64ToAuxInt(0)
15539 v3.AddArg3(v4, d3, mem)
15540 v1.AddArg3(v2, d2, v3)
15541 v.AddArg3(v0, d1, v1)
15544 // match: (Move {t1} [n] dst p1 mem:(Store {t2} op2:(OffPtr <tt2> [o2] p2) d1 (Store {t3} op3:(OffPtr <tt3> [o3] p3) d2 (Store {t4} op4:(OffPtr <tt4> [o4] p4) d3 (Store {t5} op5:(OffPtr <tt5> [0] p5) d4 _)))))
15545 // cond: isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && isSamePtr(p4, p5) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && t4.Alignment() <= t1.Alignment() && t5.Alignment() <= t1.Alignment() && registerizable(b, t2) && registerizable(b, t3) && registerizable(b, t4) && registerizable(b, t5) && o4 == t5.Size() && o3-o4 == t4.Size() && o2-o3 == t3.Size() && n == t2.Size() + t3.Size() + t4.Size() + t5.Size()
15546 // result: (Store {t2} (OffPtr <tt2> [o2] dst) d1 (Store {t3} (OffPtr <tt3> [o3] dst) d2 (Store {t4} (OffPtr <tt4> [o4] dst) d3 (Store {t5} (OffPtr <tt5> [0] dst) d4 mem))))
15548 n := auxIntToInt64(v.AuxInt)
15549 t1 := auxToType(v.Aux)
15553 if mem.Op != OpStore {
15556 t2 := auxToType(mem.Aux)
15559 if op2.Op != OpOffPtr {
15563 o2 := auxIntToInt64(op2.AuxInt)
15566 mem_2 := mem.Args[2]
15567 if mem_2.Op != OpStore {
15570 t3 := auxToType(mem_2.Aux)
15572 op3 := mem_2.Args[0]
15573 if op3.Op != OpOffPtr {
15577 o3 := auxIntToInt64(op3.AuxInt)
15579 d2 := mem_2.Args[1]
15580 mem_2_2 := mem_2.Args[2]
15581 if mem_2_2.Op != OpStore {
15584 t4 := auxToType(mem_2_2.Aux)
15585 _ = mem_2_2.Args[2]
15586 op4 := mem_2_2.Args[0]
15587 if op4.Op != OpOffPtr {
15591 o4 := auxIntToInt64(op4.AuxInt)
15593 d3 := mem_2_2.Args[1]
15594 mem_2_2_2 := mem_2_2.Args[2]
15595 if mem_2_2_2.Op != OpStore {
15598 t5 := auxToType(mem_2_2_2.Aux)
15599 d4 := mem_2_2_2.Args[1]
15600 op5 := mem_2_2_2.Args[0]
15601 if op5.Op != OpOffPtr {
15605 if auxIntToInt64(op5.AuxInt) != 0 {
15609 if !(isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && isSamePtr(p4, p5) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && t4.Alignment() <= t1.Alignment() && t5.Alignment() <= t1.Alignment() && registerizable(b, t2) && registerizable(b, t3) && registerizable(b, t4) && registerizable(b, t5) && o4 == t5.Size() && o3-o4 == t4.Size() && o2-o3 == t3.Size() && n == t2.Size()+t3.Size()+t4.Size()+t5.Size()) {
15613 v.Aux = typeToAux(t2)
15614 v0 := b.NewValue0(v.Pos, OpOffPtr, tt2)
15615 v0.AuxInt = int64ToAuxInt(o2)
15617 v1 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
15618 v1.Aux = typeToAux(t3)
15619 v2 := b.NewValue0(v.Pos, OpOffPtr, tt3)
15620 v2.AuxInt = int64ToAuxInt(o3)
15622 v3 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
15623 v3.Aux = typeToAux(t4)
15624 v4 := b.NewValue0(v.Pos, OpOffPtr, tt4)
15625 v4.AuxInt = int64ToAuxInt(o4)
15627 v5 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
15628 v5.Aux = typeToAux(t5)
15629 v6 := b.NewValue0(v.Pos, OpOffPtr, tt5)
15630 v6.AuxInt = int64ToAuxInt(0)
15632 v5.AddArg3(v6, d4, mem)
15633 v3.AddArg3(v4, d3, v5)
15634 v1.AddArg3(v2, d2, v3)
15635 v.AddArg3(v0, d1, v1)
15638 // match: (Move {t1} [n] dst p1 mem:(VarDef (Store {t2} op2:(OffPtr <tt2> [o2] p2) d1 (Store {t3} op3:(OffPtr <tt3> [0] p3) d2 _))))
15639 // cond: isSamePtr(p1, p2) && isSamePtr(p2, p3) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && registerizable(b, t2) && registerizable(b, t3) && o2 == t3.Size() && n == t2.Size() + t3.Size()
15640 // result: (Store {t2} (OffPtr <tt2> [o2] dst) d1 (Store {t3} (OffPtr <tt3> [0] dst) d2 mem))
15642 n := auxIntToInt64(v.AuxInt)
15643 t1 := auxToType(v.Aux)
15647 if mem.Op != OpVarDef {
15650 mem_0 := mem.Args[0]
15651 if mem_0.Op != OpStore {
15654 t2 := auxToType(mem_0.Aux)
15656 op2 := mem_0.Args[0]
15657 if op2.Op != OpOffPtr {
15661 o2 := auxIntToInt64(op2.AuxInt)
15663 d1 := mem_0.Args[1]
15664 mem_0_2 := mem_0.Args[2]
15665 if mem_0_2.Op != OpStore {
15668 t3 := auxToType(mem_0_2.Aux)
15669 d2 := mem_0_2.Args[1]
15670 op3 := mem_0_2.Args[0]
15671 if op3.Op != OpOffPtr {
15675 if auxIntToInt64(op3.AuxInt) != 0 {
15679 if !(isSamePtr(p1, p2) && isSamePtr(p2, p3) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && registerizable(b, t2) && registerizable(b, t3) && o2 == t3.Size() && n == t2.Size()+t3.Size()) {
15683 v.Aux = typeToAux(t2)
15684 v0 := b.NewValue0(v.Pos, OpOffPtr, tt2)
15685 v0.AuxInt = int64ToAuxInt(o2)
15687 v1 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
15688 v1.Aux = typeToAux(t3)
15689 v2 := b.NewValue0(v.Pos, OpOffPtr, tt3)
15690 v2.AuxInt = int64ToAuxInt(0)
15692 v1.AddArg3(v2, d2, mem)
15693 v.AddArg3(v0, d1, v1)
15696 // match: (Move {t1} [n] dst p1 mem:(VarDef (Store {t2} op2:(OffPtr <tt2> [o2] p2) d1 (Store {t3} op3:(OffPtr <tt3> [o3] p3) d2 (Store {t4} op4:(OffPtr <tt4> [0] p4) d3 _)))))
15697 // cond: isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && t4.Alignment() <= t1.Alignment() && registerizable(b, t2) && registerizable(b, t3) && registerizable(b, t4) && o3 == t4.Size() && o2-o3 == t3.Size() && n == t2.Size() + t3.Size() + t4.Size()
15698 // result: (Store {t2} (OffPtr <tt2> [o2] dst) d1 (Store {t3} (OffPtr <tt3> [o3] dst) d2 (Store {t4} (OffPtr <tt4> [0] dst) d3 mem)))
15700 n := auxIntToInt64(v.AuxInt)
15701 t1 := auxToType(v.Aux)
15705 if mem.Op != OpVarDef {
15708 mem_0 := mem.Args[0]
15709 if mem_0.Op != OpStore {
15712 t2 := auxToType(mem_0.Aux)
15714 op2 := mem_0.Args[0]
15715 if op2.Op != OpOffPtr {
15719 o2 := auxIntToInt64(op2.AuxInt)
15721 d1 := mem_0.Args[1]
15722 mem_0_2 := mem_0.Args[2]
15723 if mem_0_2.Op != OpStore {
15726 t3 := auxToType(mem_0_2.Aux)
15727 _ = mem_0_2.Args[2]
15728 op3 := mem_0_2.Args[0]
15729 if op3.Op != OpOffPtr {
15733 o3 := auxIntToInt64(op3.AuxInt)
15735 d2 := mem_0_2.Args[1]
15736 mem_0_2_2 := mem_0_2.Args[2]
15737 if mem_0_2_2.Op != OpStore {
15740 t4 := auxToType(mem_0_2_2.Aux)
15741 d3 := mem_0_2_2.Args[1]
15742 op4 := mem_0_2_2.Args[0]
15743 if op4.Op != OpOffPtr {
15747 if auxIntToInt64(op4.AuxInt) != 0 {
15751 if !(isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && t4.Alignment() <= t1.Alignment() && registerizable(b, t2) && registerizable(b, t3) && registerizable(b, t4) && o3 == t4.Size() && o2-o3 == t3.Size() && n == t2.Size()+t3.Size()+t4.Size()) {
15755 v.Aux = typeToAux(t2)
15756 v0 := b.NewValue0(v.Pos, OpOffPtr, tt2)
15757 v0.AuxInt = int64ToAuxInt(o2)
15759 v1 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
15760 v1.Aux = typeToAux(t3)
15761 v2 := b.NewValue0(v.Pos, OpOffPtr, tt3)
15762 v2.AuxInt = int64ToAuxInt(o3)
15764 v3 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
15765 v3.Aux = typeToAux(t4)
15766 v4 := b.NewValue0(v.Pos, OpOffPtr, tt4)
15767 v4.AuxInt = int64ToAuxInt(0)
15769 v3.AddArg3(v4, d3, mem)
15770 v1.AddArg3(v2, d2, v3)
15771 v.AddArg3(v0, d1, v1)
15774 // match: (Move {t1} [n] dst p1 mem:(VarDef (Store {t2} op2:(OffPtr <tt2> [o2] p2) d1 (Store {t3} op3:(OffPtr <tt3> [o3] p3) d2 (Store {t4} op4:(OffPtr <tt4> [o4] p4) d3 (Store {t5} op5:(OffPtr <tt5> [0] p5) d4 _))))))
15775 // cond: isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && isSamePtr(p4, p5) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && t4.Alignment() <= t1.Alignment() && t5.Alignment() <= t1.Alignment() && registerizable(b, t2) && registerizable(b, t3) && registerizable(b, t4) && registerizable(b, t5) && o4 == t5.Size() && o3-o4 == t4.Size() && o2-o3 == t3.Size() && n == t2.Size() + t3.Size() + t4.Size() + t5.Size()
15776 // result: (Store {t2} (OffPtr <tt2> [o2] dst) d1 (Store {t3} (OffPtr <tt3> [o3] dst) d2 (Store {t4} (OffPtr <tt4> [o4] dst) d3 (Store {t5} (OffPtr <tt5> [0] dst) d4 mem))))
15778 n := auxIntToInt64(v.AuxInt)
15779 t1 := auxToType(v.Aux)
15783 if mem.Op != OpVarDef {
15786 mem_0 := mem.Args[0]
15787 if mem_0.Op != OpStore {
15790 t2 := auxToType(mem_0.Aux)
15792 op2 := mem_0.Args[0]
15793 if op2.Op != OpOffPtr {
15797 o2 := auxIntToInt64(op2.AuxInt)
15799 d1 := mem_0.Args[1]
15800 mem_0_2 := mem_0.Args[2]
15801 if mem_0_2.Op != OpStore {
15804 t3 := auxToType(mem_0_2.Aux)
15805 _ = mem_0_2.Args[2]
15806 op3 := mem_0_2.Args[0]
15807 if op3.Op != OpOffPtr {
15811 o3 := auxIntToInt64(op3.AuxInt)
15813 d2 := mem_0_2.Args[1]
15814 mem_0_2_2 := mem_0_2.Args[2]
15815 if mem_0_2_2.Op != OpStore {
15818 t4 := auxToType(mem_0_2_2.Aux)
15819 _ = mem_0_2_2.Args[2]
15820 op4 := mem_0_2_2.Args[0]
15821 if op4.Op != OpOffPtr {
15825 o4 := auxIntToInt64(op4.AuxInt)
15827 d3 := mem_0_2_2.Args[1]
15828 mem_0_2_2_2 := mem_0_2_2.Args[2]
15829 if mem_0_2_2_2.Op != OpStore {
15832 t5 := auxToType(mem_0_2_2_2.Aux)
15833 d4 := mem_0_2_2_2.Args[1]
15834 op5 := mem_0_2_2_2.Args[0]
15835 if op5.Op != OpOffPtr {
15839 if auxIntToInt64(op5.AuxInt) != 0 {
15843 if !(isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && isSamePtr(p4, p5) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && t4.Alignment() <= t1.Alignment() && t5.Alignment() <= t1.Alignment() && registerizable(b, t2) && registerizable(b, t3) && registerizable(b, t4) && registerizable(b, t5) && o4 == t5.Size() && o3-o4 == t4.Size() && o2-o3 == t3.Size() && n == t2.Size()+t3.Size()+t4.Size()+t5.Size()) {
15847 v.Aux = typeToAux(t2)
15848 v0 := b.NewValue0(v.Pos, OpOffPtr, tt2)
15849 v0.AuxInt = int64ToAuxInt(o2)
15851 v1 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
15852 v1.Aux = typeToAux(t3)
15853 v2 := b.NewValue0(v.Pos, OpOffPtr, tt3)
15854 v2.AuxInt = int64ToAuxInt(o3)
15856 v3 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
15857 v3.Aux = typeToAux(t4)
15858 v4 := b.NewValue0(v.Pos, OpOffPtr, tt4)
15859 v4.AuxInt = int64ToAuxInt(o4)
15861 v5 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
15862 v5.Aux = typeToAux(t5)
15863 v6 := b.NewValue0(v.Pos, OpOffPtr, tt5)
15864 v6.AuxInt = int64ToAuxInt(0)
15866 v5.AddArg3(v6, d4, mem)
15867 v3.AddArg3(v4, d3, v5)
15868 v1.AddArg3(v2, d2, v3)
15869 v.AddArg3(v0, d1, v1)
15872 // match: (Move {t1} [n] dst p1 mem:(Store {t2} op2:(OffPtr <tt2> [o2] p2) d1 (Zero {t3} [n] p3 _)))
15873 // cond: isSamePtr(p1, p2) && isSamePtr(p2, p3) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && registerizable(b, t2) && n >= o2 + t2.Size()
15874 // result: (Store {t2} (OffPtr <tt2> [o2] dst) d1 (Zero {t1} [n] dst mem))
15876 n := auxIntToInt64(v.AuxInt)
15877 t1 := auxToType(v.Aux)
15881 if mem.Op != OpStore {
15884 t2 := auxToType(mem.Aux)
15887 if op2.Op != OpOffPtr {
15891 o2 := auxIntToInt64(op2.AuxInt)
15894 mem_2 := mem.Args[2]
15895 if mem_2.Op != OpZero || auxIntToInt64(mem_2.AuxInt) != n {
15898 t3 := auxToType(mem_2.Aux)
15899 p3 := mem_2.Args[0]
15900 if !(isSamePtr(p1, p2) && isSamePtr(p2, p3) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && registerizable(b, t2) && n >= o2+t2.Size()) {
15904 v.Aux = typeToAux(t2)
15905 v0 := b.NewValue0(v.Pos, OpOffPtr, tt2)
15906 v0.AuxInt = int64ToAuxInt(o2)
15908 v1 := b.NewValue0(v.Pos, OpZero, types.TypeMem)
15909 v1.AuxInt = int64ToAuxInt(n)
15910 v1.Aux = typeToAux(t1)
15911 v1.AddArg2(dst, mem)
15912 v.AddArg3(v0, d1, v1)
15915 // match: (Move {t1} [n] dst p1 mem:(Store {t2} (OffPtr <tt2> [o2] p2) d1 (Store {t3} (OffPtr <tt3> [o3] p3) d2 (Zero {t4} [n] p4 _))))
15916 // cond: isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && t4.Alignment() <= t1.Alignment() && registerizable(b, t2) && registerizable(b, t3) && n >= o2 + t2.Size() && n >= o3 + t3.Size()
15917 // result: (Store {t2} (OffPtr <tt2> [o2] dst) d1 (Store {t3} (OffPtr <tt3> [o3] dst) d2 (Zero {t1} [n] dst mem)))
15919 n := auxIntToInt64(v.AuxInt)
15920 t1 := auxToType(v.Aux)
15924 if mem.Op != OpStore {
15927 t2 := auxToType(mem.Aux)
15929 mem_0 := mem.Args[0]
15930 if mem_0.Op != OpOffPtr {
15934 o2 := auxIntToInt64(mem_0.AuxInt)
15935 p2 := mem_0.Args[0]
15937 mem_2 := mem.Args[2]
15938 if mem_2.Op != OpStore {
15941 t3 := auxToType(mem_2.Aux)
15943 mem_2_0 := mem_2.Args[0]
15944 if mem_2_0.Op != OpOffPtr {
15947 tt3 := mem_2_0.Type
15948 o3 := auxIntToInt64(mem_2_0.AuxInt)
15949 p3 := mem_2_0.Args[0]
15950 d2 := mem_2.Args[1]
15951 mem_2_2 := mem_2.Args[2]
15952 if mem_2_2.Op != OpZero || auxIntToInt64(mem_2_2.AuxInt) != n {
15955 t4 := auxToType(mem_2_2.Aux)
15956 p4 := mem_2_2.Args[0]
15957 if !(isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && t4.Alignment() <= t1.Alignment() && registerizable(b, t2) && registerizable(b, t3) && n >= o2+t2.Size() && n >= o3+t3.Size()) {
15961 v.Aux = typeToAux(t2)
15962 v0 := b.NewValue0(v.Pos, OpOffPtr, tt2)
15963 v0.AuxInt = int64ToAuxInt(o2)
15965 v1 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
15966 v1.Aux = typeToAux(t3)
15967 v2 := b.NewValue0(v.Pos, OpOffPtr, tt3)
15968 v2.AuxInt = int64ToAuxInt(o3)
15970 v3 := b.NewValue0(v.Pos, OpZero, types.TypeMem)
15971 v3.AuxInt = int64ToAuxInt(n)
15972 v3.Aux = typeToAux(t1)
15973 v3.AddArg2(dst, mem)
15974 v1.AddArg3(v2, d2, v3)
15975 v.AddArg3(v0, d1, v1)
15978 // match: (Move {t1} [n] dst p1 mem:(Store {t2} (OffPtr <tt2> [o2] p2) d1 (Store {t3} (OffPtr <tt3> [o3] p3) d2 (Store {t4} (OffPtr <tt4> [o4] p4) d3 (Zero {t5} [n] p5 _)))))
15979 // cond: isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && isSamePtr(p4, p5) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && t4.Alignment() <= t1.Alignment() && t5.Alignment() <= t1.Alignment() && registerizable(b, t2) && registerizable(b, t3) && registerizable(b, t4) && n >= o2 + t2.Size() && n >= o3 + t3.Size() && n >= o4 + t4.Size()
15980 // result: (Store {t2} (OffPtr <tt2> [o2] dst) d1 (Store {t3} (OffPtr <tt3> [o3] dst) d2 (Store {t4} (OffPtr <tt4> [o4] dst) d3 (Zero {t1} [n] dst mem))))
15982 n := auxIntToInt64(v.AuxInt)
15983 t1 := auxToType(v.Aux)
15987 if mem.Op != OpStore {
15990 t2 := auxToType(mem.Aux)
15992 mem_0 := mem.Args[0]
15993 if mem_0.Op != OpOffPtr {
15997 o2 := auxIntToInt64(mem_0.AuxInt)
15998 p2 := mem_0.Args[0]
16000 mem_2 := mem.Args[2]
16001 if mem_2.Op != OpStore {
16004 t3 := auxToType(mem_2.Aux)
16006 mem_2_0 := mem_2.Args[0]
16007 if mem_2_0.Op != OpOffPtr {
16010 tt3 := mem_2_0.Type
16011 o3 := auxIntToInt64(mem_2_0.AuxInt)
16012 p3 := mem_2_0.Args[0]
16013 d2 := mem_2.Args[1]
16014 mem_2_2 := mem_2.Args[2]
16015 if mem_2_2.Op != OpStore {
16018 t4 := auxToType(mem_2_2.Aux)
16019 _ = mem_2_2.Args[2]
16020 mem_2_2_0 := mem_2_2.Args[0]
16021 if mem_2_2_0.Op != OpOffPtr {
16024 tt4 := mem_2_2_0.Type
16025 o4 := auxIntToInt64(mem_2_2_0.AuxInt)
16026 p4 := mem_2_2_0.Args[0]
16027 d3 := mem_2_2.Args[1]
16028 mem_2_2_2 := mem_2_2.Args[2]
16029 if mem_2_2_2.Op != OpZero || auxIntToInt64(mem_2_2_2.AuxInt) != n {
16032 t5 := auxToType(mem_2_2_2.Aux)
16033 p5 := mem_2_2_2.Args[0]
16034 if !(isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && isSamePtr(p4, p5) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && t4.Alignment() <= t1.Alignment() && t5.Alignment() <= t1.Alignment() && registerizable(b, t2) && registerizable(b, t3) && registerizable(b, t4) && n >= o2+t2.Size() && n >= o3+t3.Size() && n >= o4+t4.Size()) {
16038 v.Aux = typeToAux(t2)
16039 v0 := b.NewValue0(v.Pos, OpOffPtr, tt2)
16040 v0.AuxInt = int64ToAuxInt(o2)
16042 v1 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
16043 v1.Aux = typeToAux(t3)
16044 v2 := b.NewValue0(v.Pos, OpOffPtr, tt3)
16045 v2.AuxInt = int64ToAuxInt(o3)
16047 v3 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
16048 v3.Aux = typeToAux(t4)
16049 v4 := b.NewValue0(v.Pos, OpOffPtr, tt4)
16050 v4.AuxInt = int64ToAuxInt(o4)
16052 v5 := b.NewValue0(v.Pos, OpZero, types.TypeMem)
16053 v5.AuxInt = int64ToAuxInt(n)
16054 v5.Aux = typeToAux(t1)
16055 v5.AddArg2(dst, mem)
16056 v3.AddArg3(v4, d3, v5)
16057 v1.AddArg3(v2, d2, v3)
16058 v.AddArg3(v0, d1, v1)
16061 // match: (Move {t1} [n] dst p1 mem:(Store {t2} (OffPtr <tt2> [o2] p2) d1 (Store {t3} (OffPtr <tt3> [o3] p3) d2 (Store {t4} (OffPtr <tt4> [o4] p4) d3 (Store {t5} (OffPtr <tt5> [o5] p5) d4 (Zero {t6} [n] p6 _))))))
16062 // cond: isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && isSamePtr(p4, p5) && isSamePtr(p5, p6) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && t4.Alignment() <= t1.Alignment() && t5.Alignment() <= t1.Alignment() && t6.Alignment() <= t1.Alignment() && registerizable(b, t2) && registerizable(b, t3) && registerizable(b, t4) && registerizable(b, t5) && n >= o2 + t2.Size() && n >= o3 + t3.Size() && n >= o4 + t4.Size() && n >= o5 + t5.Size()
16063 // result: (Store {t2} (OffPtr <tt2> [o2] dst) d1 (Store {t3} (OffPtr <tt3> [o3] dst) d2 (Store {t4} (OffPtr <tt4> [o4] dst) d3 (Store {t5} (OffPtr <tt5> [o5] dst) d4 (Zero {t1} [n] dst mem)))))
16065 n := auxIntToInt64(v.AuxInt)
16066 t1 := auxToType(v.Aux)
16070 if mem.Op != OpStore {
16073 t2 := auxToType(mem.Aux)
16075 mem_0 := mem.Args[0]
16076 if mem_0.Op != OpOffPtr {
16080 o2 := auxIntToInt64(mem_0.AuxInt)
16081 p2 := mem_0.Args[0]
16083 mem_2 := mem.Args[2]
16084 if mem_2.Op != OpStore {
16087 t3 := auxToType(mem_2.Aux)
16089 mem_2_0 := mem_2.Args[0]
16090 if mem_2_0.Op != OpOffPtr {
16093 tt3 := mem_2_0.Type
16094 o3 := auxIntToInt64(mem_2_0.AuxInt)
16095 p3 := mem_2_0.Args[0]
16096 d2 := mem_2.Args[1]
16097 mem_2_2 := mem_2.Args[2]
16098 if mem_2_2.Op != OpStore {
16101 t4 := auxToType(mem_2_2.Aux)
16102 _ = mem_2_2.Args[2]
16103 mem_2_2_0 := mem_2_2.Args[0]
16104 if mem_2_2_0.Op != OpOffPtr {
16107 tt4 := mem_2_2_0.Type
16108 o4 := auxIntToInt64(mem_2_2_0.AuxInt)
16109 p4 := mem_2_2_0.Args[0]
16110 d3 := mem_2_2.Args[1]
16111 mem_2_2_2 := mem_2_2.Args[2]
16112 if mem_2_2_2.Op != OpStore {
16115 t5 := auxToType(mem_2_2_2.Aux)
16116 _ = mem_2_2_2.Args[2]
16117 mem_2_2_2_0 := mem_2_2_2.Args[0]
16118 if mem_2_2_2_0.Op != OpOffPtr {
16121 tt5 := mem_2_2_2_0.Type
16122 o5 := auxIntToInt64(mem_2_2_2_0.AuxInt)
16123 p5 := mem_2_2_2_0.Args[0]
16124 d4 := mem_2_2_2.Args[1]
16125 mem_2_2_2_2 := mem_2_2_2.Args[2]
16126 if mem_2_2_2_2.Op != OpZero || auxIntToInt64(mem_2_2_2_2.AuxInt) != n {
16129 t6 := auxToType(mem_2_2_2_2.Aux)
16130 p6 := mem_2_2_2_2.Args[0]
16131 if !(isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && isSamePtr(p4, p5) && isSamePtr(p5, p6) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && t4.Alignment() <= t1.Alignment() && t5.Alignment() <= t1.Alignment() && t6.Alignment() <= t1.Alignment() && registerizable(b, t2) && registerizable(b, t3) && registerizable(b, t4) && registerizable(b, t5) && n >= o2+t2.Size() && n >= o3+t3.Size() && n >= o4+t4.Size() && n >= o5+t5.Size()) {
16135 v.Aux = typeToAux(t2)
16136 v0 := b.NewValue0(v.Pos, OpOffPtr, tt2)
16137 v0.AuxInt = int64ToAuxInt(o2)
16139 v1 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
16140 v1.Aux = typeToAux(t3)
16141 v2 := b.NewValue0(v.Pos, OpOffPtr, tt3)
16142 v2.AuxInt = int64ToAuxInt(o3)
16144 v3 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
16145 v3.Aux = typeToAux(t4)
16146 v4 := b.NewValue0(v.Pos, OpOffPtr, tt4)
16147 v4.AuxInt = int64ToAuxInt(o4)
16149 v5 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
16150 v5.Aux = typeToAux(t5)
16151 v6 := b.NewValue0(v.Pos, OpOffPtr, tt5)
16152 v6.AuxInt = int64ToAuxInt(o5)
16154 v7 := b.NewValue0(v.Pos, OpZero, types.TypeMem)
16155 v7.AuxInt = int64ToAuxInt(n)
16156 v7.Aux = typeToAux(t1)
16157 v7.AddArg2(dst, mem)
16158 v5.AddArg3(v6, d4, v7)
16159 v3.AddArg3(v4, d3, v5)
16160 v1.AddArg3(v2, d2, v3)
16161 v.AddArg3(v0, d1, v1)
16164 // match: (Move {t1} [n] dst p1 mem:(VarDef (Store {t2} op2:(OffPtr <tt2> [o2] p2) d1 (Zero {t3} [n] p3 _))))
16165 // cond: isSamePtr(p1, p2) && isSamePtr(p2, p3) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && registerizable(b, t2) && n >= o2 + t2.Size()
16166 // result: (Store {t2} (OffPtr <tt2> [o2] dst) d1 (Zero {t1} [n] dst mem))
16168 n := auxIntToInt64(v.AuxInt)
16169 t1 := auxToType(v.Aux)
16173 if mem.Op != OpVarDef {
16176 mem_0 := mem.Args[0]
16177 if mem_0.Op != OpStore {
16180 t2 := auxToType(mem_0.Aux)
16182 op2 := mem_0.Args[0]
16183 if op2.Op != OpOffPtr {
16187 o2 := auxIntToInt64(op2.AuxInt)
16189 d1 := mem_0.Args[1]
16190 mem_0_2 := mem_0.Args[2]
16191 if mem_0_2.Op != OpZero || auxIntToInt64(mem_0_2.AuxInt) != n {
16194 t3 := auxToType(mem_0_2.Aux)
16195 p3 := mem_0_2.Args[0]
16196 if !(isSamePtr(p1, p2) && isSamePtr(p2, p3) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && registerizable(b, t2) && n >= o2+t2.Size()) {
16200 v.Aux = typeToAux(t2)
16201 v0 := b.NewValue0(v.Pos, OpOffPtr, tt2)
16202 v0.AuxInt = int64ToAuxInt(o2)
16204 v1 := b.NewValue0(v.Pos, OpZero, types.TypeMem)
16205 v1.AuxInt = int64ToAuxInt(n)
16206 v1.Aux = typeToAux(t1)
16207 v1.AddArg2(dst, mem)
16208 v.AddArg3(v0, d1, v1)
16211 // match: (Move {t1} [n] dst p1 mem:(VarDef (Store {t2} (OffPtr <tt2> [o2] p2) d1 (Store {t3} (OffPtr <tt3> [o3] p3) d2 (Zero {t4} [n] p4 _)))))
16212 // cond: isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && t4.Alignment() <= t1.Alignment() && registerizable(b, t2) && registerizable(b, t3) && n >= o2 + t2.Size() && n >= o3 + t3.Size()
16213 // result: (Store {t2} (OffPtr <tt2> [o2] dst) d1 (Store {t3} (OffPtr <tt3> [o3] dst) d2 (Zero {t1} [n] dst mem)))
16215 n := auxIntToInt64(v.AuxInt)
16216 t1 := auxToType(v.Aux)
16220 if mem.Op != OpVarDef {
16223 mem_0 := mem.Args[0]
16224 if mem_0.Op != OpStore {
16227 t2 := auxToType(mem_0.Aux)
16229 mem_0_0 := mem_0.Args[0]
16230 if mem_0_0.Op != OpOffPtr {
16233 tt2 := mem_0_0.Type
16234 o2 := auxIntToInt64(mem_0_0.AuxInt)
16235 p2 := mem_0_0.Args[0]
16236 d1 := mem_0.Args[1]
16237 mem_0_2 := mem_0.Args[2]
16238 if mem_0_2.Op != OpStore {
16241 t3 := auxToType(mem_0_2.Aux)
16242 _ = mem_0_2.Args[2]
16243 mem_0_2_0 := mem_0_2.Args[0]
16244 if mem_0_2_0.Op != OpOffPtr {
16247 tt3 := mem_0_2_0.Type
16248 o3 := auxIntToInt64(mem_0_2_0.AuxInt)
16249 p3 := mem_0_2_0.Args[0]
16250 d2 := mem_0_2.Args[1]
16251 mem_0_2_2 := mem_0_2.Args[2]
16252 if mem_0_2_2.Op != OpZero || auxIntToInt64(mem_0_2_2.AuxInt) != n {
16255 t4 := auxToType(mem_0_2_2.Aux)
16256 p4 := mem_0_2_2.Args[0]
16257 if !(isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && t4.Alignment() <= t1.Alignment() && registerizable(b, t2) && registerizable(b, t3) && n >= o2+t2.Size() && n >= o3+t3.Size()) {
16261 v.Aux = typeToAux(t2)
16262 v0 := b.NewValue0(v.Pos, OpOffPtr, tt2)
16263 v0.AuxInt = int64ToAuxInt(o2)
16265 v1 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
16266 v1.Aux = typeToAux(t3)
16267 v2 := b.NewValue0(v.Pos, OpOffPtr, tt3)
16268 v2.AuxInt = int64ToAuxInt(o3)
16270 v3 := b.NewValue0(v.Pos, OpZero, types.TypeMem)
16271 v3.AuxInt = int64ToAuxInt(n)
16272 v3.Aux = typeToAux(t1)
16273 v3.AddArg2(dst, mem)
16274 v1.AddArg3(v2, d2, v3)
16275 v.AddArg3(v0, d1, v1)
16278 // match: (Move {t1} [n] dst p1 mem:(VarDef (Store {t2} (OffPtr <tt2> [o2] p2) d1 (Store {t3} (OffPtr <tt3> [o3] p3) d2 (Store {t4} (OffPtr <tt4> [o4] p4) d3 (Zero {t5} [n] p5 _))))))
16279 // cond: isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && isSamePtr(p4, p5) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && t4.Alignment() <= t1.Alignment() && t5.Alignment() <= t1.Alignment() && registerizable(b, t2) && registerizable(b, t3) && registerizable(b, t4) && n >= o2 + t2.Size() && n >= o3 + t3.Size() && n >= o4 + t4.Size()
16280 // result: (Store {t2} (OffPtr <tt2> [o2] dst) d1 (Store {t3} (OffPtr <tt3> [o3] dst) d2 (Store {t4} (OffPtr <tt4> [o4] dst) d3 (Zero {t1} [n] dst mem))))
16282 n := auxIntToInt64(v.AuxInt)
16283 t1 := auxToType(v.Aux)
16287 if mem.Op != OpVarDef {
16290 mem_0 := mem.Args[0]
16291 if mem_0.Op != OpStore {
16294 t2 := auxToType(mem_0.Aux)
16296 mem_0_0 := mem_0.Args[0]
16297 if mem_0_0.Op != OpOffPtr {
16300 tt2 := mem_0_0.Type
16301 o2 := auxIntToInt64(mem_0_0.AuxInt)
16302 p2 := mem_0_0.Args[0]
16303 d1 := mem_0.Args[1]
16304 mem_0_2 := mem_0.Args[2]
16305 if mem_0_2.Op != OpStore {
16308 t3 := auxToType(mem_0_2.Aux)
16309 _ = mem_0_2.Args[2]
16310 mem_0_2_0 := mem_0_2.Args[0]
16311 if mem_0_2_0.Op != OpOffPtr {
16314 tt3 := mem_0_2_0.Type
16315 o3 := auxIntToInt64(mem_0_2_0.AuxInt)
16316 p3 := mem_0_2_0.Args[0]
16317 d2 := mem_0_2.Args[1]
16318 mem_0_2_2 := mem_0_2.Args[2]
16319 if mem_0_2_2.Op != OpStore {
16322 t4 := auxToType(mem_0_2_2.Aux)
16323 _ = mem_0_2_2.Args[2]
16324 mem_0_2_2_0 := mem_0_2_2.Args[0]
16325 if mem_0_2_2_0.Op != OpOffPtr {
16328 tt4 := mem_0_2_2_0.Type
16329 o4 := auxIntToInt64(mem_0_2_2_0.AuxInt)
16330 p4 := mem_0_2_2_0.Args[0]
16331 d3 := mem_0_2_2.Args[1]
16332 mem_0_2_2_2 := mem_0_2_2.Args[2]
16333 if mem_0_2_2_2.Op != OpZero || auxIntToInt64(mem_0_2_2_2.AuxInt) != n {
16336 t5 := auxToType(mem_0_2_2_2.Aux)
16337 p5 := mem_0_2_2_2.Args[0]
16338 if !(isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && isSamePtr(p4, p5) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && t4.Alignment() <= t1.Alignment() && t5.Alignment() <= t1.Alignment() && registerizable(b, t2) && registerizable(b, t3) && registerizable(b, t4) && n >= o2+t2.Size() && n >= o3+t3.Size() && n >= o4+t4.Size()) {
16342 v.Aux = typeToAux(t2)
16343 v0 := b.NewValue0(v.Pos, OpOffPtr, tt2)
16344 v0.AuxInt = int64ToAuxInt(o2)
16346 v1 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
16347 v1.Aux = typeToAux(t3)
16348 v2 := b.NewValue0(v.Pos, OpOffPtr, tt3)
16349 v2.AuxInt = int64ToAuxInt(o3)
16351 v3 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
16352 v3.Aux = typeToAux(t4)
16353 v4 := b.NewValue0(v.Pos, OpOffPtr, tt4)
16354 v4.AuxInt = int64ToAuxInt(o4)
16356 v5 := b.NewValue0(v.Pos, OpZero, types.TypeMem)
16357 v5.AuxInt = int64ToAuxInt(n)
16358 v5.Aux = typeToAux(t1)
16359 v5.AddArg2(dst, mem)
16360 v3.AddArg3(v4, d3, v5)
16361 v1.AddArg3(v2, d2, v3)
16362 v.AddArg3(v0, d1, v1)
16365 // match: (Move {t1} [n] dst p1 mem:(VarDef (Store {t2} (OffPtr <tt2> [o2] p2) d1 (Store {t3} (OffPtr <tt3> [o3] p3) d2 (Store {t4} (OffPtr <tt4> [o4] p4) d3 (Store {t5} (OffPtr <tt5> [o5] p5) d4 (Zero {t6} [n] p6 _)))))))
16366 // cond: isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && isSamePtr(p4, p5) && isSamePtr(p5, p6) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && t4.Alignment() <= t1.Alignment() && t5.Alignment() <= t1.Alignment() && t6.Alignment() <= t1.Alignment() && registerizable(b, t2) && registerizable(b, t3) && registerizable(b, t4) && registerizable(b, t5) && n >= o2 + t2.Size() && n >= o3 + t3.Size() && n >= o4 + t4.Size() && n >= o5 + t5.Size()
16367 // result: (Store {t2} (OffPtr <tt2> [o2] dst) d1 (Store {t3} (OffPtr <tt3> [o3] dst) d2 (Store {t4} (OffPtr <tt4> [o4] dst) d3 (Store {t5} (OffPtr <tt5> [o5] dst) d4 (Zero {t1} [n] dst mem)))))
16369 n := auxIntToInt64(v.AuxInt)
16370 t1 := auxToType(v.Aux)
16374 if mem.Op != OpVarDef {
16377 mem_0 := mem.Args[0]
16378 if mem_0.Op != OpStore {
16381 t2 := auxToType(mem_0.Aux)
16383 mem_0_0 := mem_0.Args[0]
16384 if mem_0_0.Op != OpOffPtr {
16387 tt2 := mem_0_0.Type
16388 o2 := auxIntToInt64(mem_0_0.AuxInt)
16389 p2 := mem_0_0.Args[0]
16390 d1 := mem_0.Args[1]
16391 mem_0_2 := mem_0.Args[2]
16392 if mem_0_2.Op != OpStore {
16395 t3 := auxToType(mem_0_2.Aux)
16396 _ = mem_0_2.Args[2]
16397 mem_0_2_0 := mem_0_2.Args[0]
16398 if mem_0_2_0.Op != OpOffPtr {
16401 tt3 := mem_0_2_0.Type
16402 o3 := auxIntToInt64(mem_0_2_0.AuxInt)
16403 p3 := mem_0_2_0.Args[0]
16404 d2 := mem_0_2.Args[1]
16405 mem_0_2_2 := mem_0_2.Args[2]
16406 if mem_0_2_2.Op != OpStore {
16409 t4 := auxToType(mem_0_2_2.Aux)
16410 _ = mem_0_2_2.Args[2]
16411 mem_0_2_2_0 := mem_0_2_2.Args[0]
16412 if mem_0_2_2_0.Op != OpOffPtr {
16415 tt4 := mem_0_2_2_0.Type
16416 o4 := auxIntToInt64(mem_0_2_2_0.AuxInt)
16417 p4 := mem_0_2_2_0.Args[0]
16418 d3 := mem_0_2_2.Args[1]
16419 mem_0_2_2_2 := mem_0_2_2.Args[2]
16420 if mem_0_2_2_2.Op != OpStore {
16423 t5 := auxToType(mem_0_2_2_2.Aux)
16424 _ = mem_0_2_2_2.Args[2]
16425 mem_0_2_2_2_0 := mem_0_2_2_2.Args[0]
16426 if mem_0_2_2_2_0.Op != OpOffPtr {
16429 tt5 := mem_0_2_2_2_0.Type
16430 o5 := auxIntToInt64(mem_0_2_2_2_0.AuxInt)
16431 p5 := mem_0_2_2_2_0.Args[0]
16432 d4 := mem_0_2_2_2.Args[1]
16433 mem_0_2_2_2_2 := mem_0_2_2_2.Args[2]
16434 if mem_0_2_2_2_2.Op != OpZero || auxIntToInt64(mem_0_2_2_2_2.AuxInt) != n {
16437 t6 := auxToType(mem_0_2_2_2_2.Aux)
16438 p6 := mem_0_2_2_2_2.Args[0]
16439 if !(isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && isSamePtr(p4, p5) && isSamePtr(p5, p6) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && t4.Alignment() <= t1.Alignment() && t5.Alignment() <= t1.Alignment() && t6.Alignment() <= t1.Alignment() && registerizable(b, t2) && registerizable(b, t3) && registerizable(b, t4) && registerizable(b, t5) && n >= o2+t2.Size() && n >= o3+t3.Size() && n >= o4+t4.Size() && n >= o5+t5.Size()) {
16443 v.Aux = typeToAux(t2)
16444 v0 := b.NewValue0(v.Pos, OpOffPtr, tt2)
16445 v0.AuxInt = int64ToAuxInt(o2)
16447 v1 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
16448 v1.Aux = typeToAux(t3)
16449 v2 := b.NewValue0(v.Pos, OpOffPtr, tt3)
16450 v2.AuxInt = int64ToAuxInt(o3)
16452 v3 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
16453 v3.Aux = typeToAux(t4)
16454 v4 := b.NewValue0(v.Pos, OpOffPtr, tt4)
16455 v4.AuxInt = int64ToAuxInt(o4)
16457 v5 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
16458 v5.Aux = typeToAux(t5)
16459 v6 := b.NewValue0(v.Pos, OpOffPtr, tt5)
16460 v6.AuxInt = int64ToAuxInt(o5)
16462 v7 := b.NewValue0(v.Pos, OpZero, types.TypeMem)
16463 v7.AuxInt = int64ToAuxInt(n)
16464 v7.Aux = typeToAux(t1)
16465 v7.AddArg2(dst, mem)
16466 v5.AddArg3(v6, d4, v7)
16467 v3.AddArg3(v4, d3, v5)
16468 v1.AddArg3(v2, d2, v3)
16469 v.AddArg3(v0, d1, v1)
16472 // match: (Move {t1} [s] dst tmp1 midmem:(Move {t2} [s] tmp2 src _))
16473 // cond: t1.Compare(t2) == types.CMPeq && isSamePtr(tmp1, tmp2) && isStackPtr(src) && !isVolatile(src) && disjoint(src, s, tmp2, s) && (disjoint(src, s, dst, s) || isInlinableMemmove(dst, src, s, config))
16474 // result: (Move {t1} [s] dst src midmem)
16476 s := auxIntToInt64(v.AuxInt)
16477 t1 := auxToType(v.Aux)
16481 if midmem.Op != OpMove || auxIntToInt64(midmem.AuxInt) != s {
16484 t2 := auxToType(midmem.Aux)
16485 src := midmem.Args[1]
16486 tmp2 := midmem.Args[0]
16487 if !(t1.Compare(t2) == types.CMPeq && isSamePtr(tmp1, tmp2) && isStackPtr(src) && !isVolatile(src) && disjoint(src, s, tmp2, s) && (disjoint(src, s, dst, s) || isInlinableMemmove(dst, src, s, config))) {
16491 v.AuxInt = int64ToAuxInt(s)
16492 v.Aux = typeToAux(t1)
16493 v.AddArg3(dst, src, midmem)
16496 // match: (Move {t1} [s] dst tmp1 midmem:(VarDef (Move {t2} [s] tmp2 src _)))
16497 // cond: t1.Compare(t2) == types.CMPeq && isSamePtr(tmp1, tmp2) && isStackPtr(src) && !isVolatile(src) && disjoint(src, s, tmp2, s) && (disjoint(src, s, dst, s) || isInlinableMemmove(dst, src, s, config))
16498 // result: (Move {t1} [s] dst src midmem)
16500 s := auxIntToInt64(v.AuxInt)
16501 t1 := auxToType(v.Aux)
16505 if midmem.Op != OpVarDef {
16508 midmem_0 := midmem.Args[0]
16509 if midmem_0.Op != OpMove || auxIntToInt64(midmem_0.AuxInt) != s {
16512 t2 := auxToType(midmem_0.Aux)
16513 src := midmem_0.Args[1]
16514 tmp2 := midmem_0.Args[0]
16515 if !(t1.Compare(t2) == types.CMPeq && isSamePtr(tmp1, tmp2) && isStackPtr(src) && !isVolatile(src) && disjoint(src, s, tmp2, s) && (disjoint(src, s, dst, s) || isInlinableMemmove(dst, src, s, config))) {
16519 v.AuxInt = int64ToAuxInt(s)
16520 v.Aux = typeToAux(t1)
16521 v.AddArg3(dst, src, midmem)
16524 // match: (Move dst src mem)
16525 // cond: isSamePtr(dst, src)
16531 if !(isSamePtr(dst, src)) {
16539 func rewriteValuegeneric_OpMul16(v *Value) bool {
16543 typ := &b.Func.Config.Types
16544 // match: (Mul16 (Const16 [c]) (Const16 [d]))
16545 // result: (Const16 [c*d])
16547 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
16548 if v_0.Op != OpConst16 {
16551 c := auxIntToInt16(v_0.AuxInt)
16552 if v_1.Op != OpConst16 {
16555 d := auxIntToInt16(v_1.AuxInt)
16557 v.AuxInt = int16ToAuxInt(c * d)
16562 // match: (Mul16 (Const16 [1]) x)
16565 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
16566 if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 1 {
16575 // match: (Mul16 (Const16 [-1]) x)
16576 // result: (Neg16 x)
16578 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
16579 if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != -1 {
16589 // match: (Mul16 <t> n (Const16 [c]))
16590 // cond: isPowerOfTwo16(c)
16591 // result: (Lsh16x64 <t> n (Const64 <typ.UInt64> [log16(c)]))
16594 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
16596 if v_1.Op != OpConst16 {
16599 c := auxIntToInt16(v_1.AuxInt)
16600 if !(isPowerOfTwo16(c)) {
16603 v.reset(OpLsh16x64)
16605 v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
16606 v0.AuxInt = int64ToAuxInt(log16(c))
16612 // match: (Mul16 <t> n (Const16 [c]))
16613 // cond: t.IsSigned() && isPowerOfTwo16(-c)
16614 // result: (Neg16 (Lsh16x64 <t> n (Const64 <typ.UInt64> [log16(-c)])))
16617 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
16619 if v_1.Op != OpConst16 {
16622 c := auxIntToInt16(v_1.AuxInt)
16623 if !(t.IsSigned() && isPowerOfTwo16(-c)) {
16627 v0 := b.NewValue0(v.Pos, OpLsh16x64, t)
16628 v1 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
16629 v1.AuxInt = int64ToAuxInt(log16(-c))
16636 // match: (Mul16 (Const16 [0]) _)
16637 // result: (Const16 [0])
16639 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
16640 if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 0 {
16644 v.AuxInt = int16ToAuxInt(0)
16649 // match: (Mul16 (Mul16 i:(Const16 <t>) z) x)
16650 // cond: (z.Op != OpConst16 && x.Op != OpConst16)
16651 // result: (Mul16 i (Mul16 <t> x z))
16653 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
16654 if v_0.Op != OpMul16 {
16658 v_0_0 := v_0.Args[0]
16659 v_0_1 := v_0.Args[1]
16660 for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
16662 if i.Op != OpConst16 {
16668 if !(z.Op != OpConst16 && x.Op != OpConst16) {
16672 v0 := b.NewValue0(v.Pos, OpMul16, t)
16680 // match: (Mul16 (Const16 <t> [c]) (Mul16 (Const16 <t> [d]) x))
16681 // result: (Mul16 (Const16 <t> [c*d]) x)
16683 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
16684 if v_0.Op != OpConst16 {
16688 c := auxIntToInt16(v_0.AuxInt)
16689 if v_1.Op != OpMul16 {
16693 v_1_0 := v_1.Args[0]
16694 v_1_1 := v_1.Args[1]
16695 for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
16696 if v_1_0.Op != OpConst16 || v_1_0.Type != t {
16699 d := auxIntToInt16(v_1_0.AuxInt)
16702 v0 := b.NewValue0(v.Pos, OpConst16, t)
16703 v0.AuxInt = int16ToAuxInt(c * d)
16712 func rewriteValuegeneric_OpMul32(v *Value) bool {
16716 typ := &b.Func.Config.Types
16717 // match: (Mul32 (Const32 [c]) (Const32 [d]))
16718 // result: (Const32 [c*d])
16720 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
16721 if v_0.Op != OpConst32 {
16724 c := auxIntToInt32(v_0.AuxInt)
16725 if v_1.Op != OpConst32 {
16728 d := auxIntToInt32(v_1.AuxInt)
16730 v.AuxInt = int32ToAuxInt(c * d)
16735 // match: (Mul32 (Const32 [1]) x)
16738 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
16739 if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 1 {
16748 // match: (Mul32 (Const32 [-1]) x)
16749 // result: (Neg32 x)
16751 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
16752 if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != -1 {
16762 // match: (Mul32 <t> n (Const32 [c]))
16763 // cond: isPowerOfTwo32(c)
16764 // result: (Lsh32x64 <t> n (Const64 <typ.UInt64> [log32(c)]))
16767 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
16769 if v_1.Op != OpConst32 {
16772 c := auxIntToInt32(v_1.AuxInt)
16773 if !(isPowerOfTwo32(c)) {
16776 v.reset(OpLsh32x64)
16778 v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
16779 v0.AuxInt = int64ToAuxInt(log32(c))
16785 // match: (Mul32 <t> n (Const32 [c]))
16786 // cond: t.IsSigned() && isPowerOfTwo32(-c)
16787 // result: (Neg32 (Lsh32x64 <t> n (Const64 <typ.UInt64> [log32(-c)])))
16790 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
16792 if v_1.Op != OpConst32 {
16795 c := auxIntToInt32(v_1.AuxInt)
16796 if !(t.IsSigned() && isPowerOfTwo32(-c)) {
16800 v0 := b.NewValue0(v.Pos, OpLsh32x64, t)
16801 v1 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
16802 v1.AuxInt = int64ToAuxInt(log32(-c))
16809 // match: (Mul32 (Const32 <t> [c]) (Add32 <t> (Const32 <t> [d]) x))
16810 // result: (Add32 (Const32 <t> [c*d]) (Mul32 <t> (Const32 <t> [c]) x))
16812 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
16813 if v_0.Op != OpConst32 {
16817 c := auxIntToInt32(v_0.AuxInt)
16818 if v_1.Op != OpAdd32 || v_1.Type != t {
16822 v_1_0 := v_1.Args[0]
16823 v_1_1 := v_1.Args[1]
16824 for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
16825 if v_1_0.Op != OpConst32 || v_1_0.Type != t {
16828 d := auxIntToInt32(v_1_0.AuxInt)
16831 v0 := b.NewValue0(v.Pos, OpConst32, t)
16832 v0.AuxInt = int32ToAuxInt(c * d)
16833 v1 := b.NewValue0(v.Pos, OpMul32, t)
16834 v2 := b.NewValue0(v.Pos, OpConst32, t)
16835 v2.AuxInt = int32ToAuxInt(c)
16843 // match: (Mul32 (Const32 [0]) _)
16844 // result: (Const32 [0])
16846 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
16847 if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 {
16851 v.AuxInt = int32ToAuxInt(0)
16856 // match: (Mul32 (Mul32 i:(Const32 <t>) z) x)
16857 // cond: (z.Op != OpConst32 && x.Op != OpConst32)
16858 // result: (Mul32 i (Mul32 <t> x z))
16860 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
16861 if v_0.Op != OpMul32 {
16865 v_0_0 := v_0.Args[0]
16866 v_0_1 := v_0.Args[1]
16867 for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
16869 if i.Op != OpConst32 {
16875 if !(z.Op != OpConst32 && x.Op != OpConst32) {
16879 v0 := b.NewValue0(v.Pos, OpMul32, t)
16887 // match: (Mul32 (Const32 <t> [c]) (Mul32 (Const32 <t> [d]) x))
16888 // result: (Mul32 (Const32 <t> [c*d]) x)
16890 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
16891 if v_0.Op != OpConst32 {
16895 c := auxIntToInt32(v_0.AuxInt)
16896 if v_1.Op != OpMul32 {
16900 v_1_0 := v_1.Args[0]
16901 v_1_1 := v_1.Args[1]
16902 for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
16903 if v_1_0.Op != OpConst32 || v_1_0.Type != t {
16906 d := auxIntToInt32(v_1_0.AuxInt)
16909 v0 := b.NewValue0(v.Pos, OpConst32, t)
16910 v0.AuxInt = int32ToAuxInt(c * d)
16919 func rewriteValuegeneric_OpMul32F(v *Value) bool {
16922 // match: (Mul32F (Const32F [c]) (Const32F [d]))
16923 // cond: c*d == c*d
16924 // result: (Const32F [c*d])
16926 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
16927 if v_0.Op != OpConst32F {
16930 c := auxIntToFloat32(v_0.AuxInt)
16931 if v_1.Op != OpConst32F {
16934 d := auxIntToFloat32(v_1.AuxInt)
16938 v.reset(OpConst32F)
16939 v.AuxInt = float32ToAuxInt(c * d)
16944 // match: (Mul32F x (Const32F [1]))
16947 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
16949 if v_1.Op != OpConst32F || auxIntToFloat32(v_1.AuxInt) != 1 {
16957 // match: (Mul32F x (Const32F [-1]))
16958 // result: (Neg32F x)
16960 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
16962 if v_1.Op != OpConst32F || auxIntToFloat32(v_1.AuxInt) != -1 {
16971 // match: (Mul32F x (Const32F [2]))
16972 // result: (Add32F x x)
16974 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
16976 if v_1.Op != OpConst32F || auxIntToFloat32(v_1.AuxInt) != 2 {
16987 func rewriteValuegeneric_OpMul64(v *Value) bool {
16991 typ := &b.Func.Config.Types
16992 // match: (Mul64 (Const64 [c]) (Const64 [d]))
16993 // result: (Const64 [c*d])
16995 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
16996 if v_0.Op != OpConst64 {
16999 c := auxIntToInt64(v_0.AuxInt)
17000 if v_1.Op != OpConst64 {
17003 d := auxIntToInt64(v_1.AuxInt)
17005 v.AuxInt = int64ToAuxInt(c * d)
17010 // match: (Mul64 (Const64 [1]) x)
17013 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
17014 if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 1 {
17023 // match: (Mul64 (Const64 [-1]) x)
17024 // result: (Neg64 x)
17026 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
17027 if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != -1 {
17037 // match: (Mul64 <t> n (Const64 [c]))
17038 // cond: isPowerOfTwo64(c)
17039 // result: (Lsh64x64 <t> n (Const64 <typ.UInt64> [log64(c)]))
17042 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
17044 if v_1.Op != OpConst64 {
17047 c := auxIntToInt64(v_1.AuxInt)
17048 if !(isPowerOfTwo64(c)) {
17051 v.reset(OpLsh64x64)
17053 v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
17054 v0.AuxInt = int64ToAuxInt(log64(c))
17060 // match: (Mul64 <t> n (Const64 [c]))
17061 // cond: t.IsSigned() && isPowerOfTwo64(-c)
17062 // result: (Neg64 (Lsh64x64 <t> n (Const64 <typ.UInt64> [log64(-c)])))
17065 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
17067 if v_1.Op != OpConst64 {
17070 c := auxIntToInt64(v_1.AuxInt)
17071 if !(t.IsSigned() && isPowerOfTwo64(-c)) {
17075 v0 := b.NewValue0(v.Pos, OpLsh64x64, t)
17076 v1 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
17077 v1.AuxInt = int64ToAuxInt(log64(-c))
17084 // match: (Mul64 (Const64 <t> [c]) (Add64 <t> (Const64 <t> [d]) x))
17085 // result: (Add64 (Const64 <t> [c*d]) (Mul64 <t> (Const64 <t> [c]) x))
17087 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
17088 if v_0.Op != OpConst64 {
17092 c := auxIntToInt64(v_0.AuxInt)
17093 if v_1.Op != OpAdd64 || v_1.Type != t {
17097 v_1_0 := v_1.Args[0]
17098 v_1_1 := v_1.Args[1]
17099 for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
17100 if v_1_0.Op != OpConst64 || v_1_0.Type != t {
17103 d := auxIntToInt64(v_1_0.AuxInt)
17106 v0 := b.NewValue0(v.Pos, OpConst64, t)
17107 v0.AuxInt = int64ToAuxInt(c * d)
17108 v1 := b.NewValue0(v.Pos, OpMul64, t)
17109 v2 := b.NewValue0(v.Pos, OpConst64, t)
17110 v2.AuxInt = int64ToAuxInt(c)
17118 // match: (Mul64 (Const64 [0]) _)
17119 // result: (Const64 [0])
17121 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
17122 if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 {
17126 v.AuxInt = int64ToAuxInt(0)
17131 // match: (Mul64 (Mul64 i:(Const64 <t>) z) x)
17132 // cond: (z.Op != OpConst64 && x.Op != OpConst64)
17133 // result: (Mul64 i (Mul64 <t> x z))
17135 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
17136 if v_0.Op != OpMul64 {
17140 v_0_0 := v_0.Args[0]
17141 v_0_1 := v_0.Args[1]
17142 for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
17144 if i.Op != OpConst64 {
17150 if !(z.Op != OpConst64 && x.Op != OpConst64) {
17154 v0 := b.NewValue0(v.Pos, OpMul64, t)
17162 // match: (Mul64 (Const64 <t> [c]) (Mul64 (Const64 <t> [d]) x))
17163 // result: (Mul64 (Const64 <t> [c*d]) x)
17165 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
17166 if v_0.Op != OpConst64 {
17170 c := auxIntToInt64(v_0.AuxInt)
17171 if v_1.Op != OpMul64 {
17175 v_1_0 := v_1.Args[0]
17176 v_1_1 := v_1.Args[1]
17177 for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
17178 if v_1_0.Op != OpConst64 || v_1_0.Type != t {
17181 d := auxIntToInt64(v_1_0.AuxInt)
17184 v0 := b.NewValue0(v.Pos, OpConst64, t)
17185 v0.AuxInt = int64ToAuxInt(c * d)
17194 func rewriteValuegeneric_OpMul64F(v *Value) bool {
17197 // match: (Mul64F (Const64F [c]) (Const64F [d]))
17198 // cond: c*d == c*d
17199 // result: (Const64F [c*d])
17201 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
17202 if v_0.Op != OpConst64F {
17205 c := auxIntToFloat64(v_0.AuxInt)
17206 if v_1.Op != OpConst64F {
17209 d := auxIntToFloat64(v_1.AuxInt)
17213 v.reset(OpConst64F)
17214 v.AuxInt = float64ToAuxInt(c * d)
17219 // match: (Mul64F x (Const64F [1]))
17222 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
17224 if v_1.Op != OpConst64F || auxIntToFloat64(v_1.AuxInt) != 1 {
17232 // match: (Mul64F x (Const64F [-1]))
17233 // result: (Neg64F x)
17235 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
17237 if v_1.Op != OpConst64F || auxIntToFloat64(v_1.AuxInt) != -1 {
17246 // match: (Mul64F x (Const64F [2]))
17247 // result: (Add64F x x)
17249 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
17251 if v_1.Op != OpConst64F || auxIntToFloat64(v_1.AuxInt) != 2 {
17262 func rewriteValuegeneric_OpMul8(v *Value) bool {
17266 typ := &b.Func.Config.Types
17267 // match: (Mul8 (Const8 [c]) (Const8 [d]))
17268 // result: (Const8 [c*d])
17270 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
17271 if v_0.Op != OpConst8 {
17274 c := auxIntToInt8(v_0.AuxInt)
17275 if v_1.Op != OpConst8 {
17278 d := auxIntToInt8(v_1.AuxInt)
17280 v.AuxInt = int8ToAuxInt(c * d)
17285 // match: (Mul8 (Const8 [1]) x)
17288 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
17289 if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 1 {
17298 // match: (Mul8 (Const8 [-1]) x)
17299 // result: (Neg8 x)
17301 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
17302 if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != -1 {
17312 // match: (Mul8 <t> n (Const8 [c]))
17313 // cond: isPowerOfTwo8(c)
17314 // result: (Lsh8x64 <t> n (Const64 <typ.UInt64> [log8(c)]))
17317 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
17319 if v_1.Op != OpConst8 {
17322 c := auxIntToInt8(v_1.AuxInt)
17323 if !(isPowerOfTwo8(c)) {
17328 v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
17329 v0.AuxInt = int64ToAuxInt(log8(c))
17335 // match: (Mul8 <t> n (Const8 [c]))
17336 // cond: t.IsSigned() && isPowerOfTwo8(-c)
17337 // result: (Neg8 (Lsh8x64 <t> n (Const64 <typ.UInt64> [log8(-c)])))
17340 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
17342 if v_1.Op != OpConst8 {
17345 c := auxIntToInt8(v_1.AuxInt)
17346 if !(t.IsSigned() && isPowerOfTwo8(-c)) {
17350 v0 := b.NewValue0(v.Pos, OpLsh8x64, t)
17351 v1 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
17352 v1.AuxInt = int64ToAuxInt(log8(-c))
17359 // match: (Mul8 (Const8 [0]) _)
17360 // result: (Const8 [0])
17362 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
17363 if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 0 {
17367 v.AuxInt = int8ToAuxInt(0)
17372 // match: (Mul8 (Mul8 i:(Const8 <t>) z) x)
17373 // cond: (z.Op != OpConst8 && x.Op != OpConst8)
17374 // result: (Mul8 i (Mul8 <t> x z))
17376 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
17377 if v_0.Op != OpMul8 {
17381 v_0_0 := v_0.Args[0]
17382 v_0_1 := v_0.Args[1]
17383 for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
17385 if i.Op != OpConst8 {
17391 if !(z.Op != OpConst8 && x.Op != OpConst8) {
17395 v0 := b.NewValue0(v.Pos, OpMul8, t)
17403 // match: (Mul8 (Const8 <t> [c]) (Mul8 (Const8 <t> [d]) x))
17404 // result: (Mul8 (Const8 <t> [c*d]) x)
17406 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
17407 if v_0.Op != OpConst8 {
17411 c := auxIntToInt8(v_0.AuxInt)
17412 if v_1.Op != OpMul8 {
17416 v_1_0 := v_1.Args[0]
17417 v_1_1 := v_1.Args[1]
17418 for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
17419 if v_1_0.Op != OpConst8 || v_1_0.Type != t {
17422 d := auxIntToInt8(v_1_0.AuxInt)
17425 v0 := b.NewValue0(v.Pos, OpConst8, t)
17426 v0.AuxInt = int8ToAuxInt(c * d)
17435 func rewriteValuegeneric_OpNeg16(v *Value) bool {
17438 // match: (Neg16 (Const16 [c]))
17439 // result: (Const16 [-c])
17441 if v_0.Op != OpConst16 {
17444 c := auxIntToInt16(v_0.AuxInt)
17446 v.AuxInt = int16ToAuxInt(-c)
17449 // match: (Neg16 (Sub16 x y))
17450 // result: (Sub16 y x)
17452 if v_0.Op != OpSub16 {
17461 // match: (Neg16 (Neg16 x))
17464 if v_0.Op != OpNeg16 {
17471 // match: (Neg16 <t> (Com16 x))
17472 // result: (Add16 (Const16 <t> [1]) x)
17475 if v_0.Op != OpCom16 {
17480 v0 := b.NewValue0(v.Pos, OpConst16, t)
17481 v0.AuxInt = int16ToAuxInt(1)
17487 func rewriteValuegeneric_OpNeg32(v *Value) bool {
17490 // match: (Neg32 (Const32 [c]))
17491 // result: (Const32 [-c])
17493 if v_0.Op != OpConst32 {
17496 c := auxIntToInt32(v_0.AuxInt)
17498 v.AuxInt = int32ToAuxInt(-c)
17501 // match: (Neg32 (Sub32 x y))
17502 // result: (Sub32 y x)
17504 if v_0.Op != OpSub32 {
17513 // match: (Neg32 (Neg32 x))
17516 if v_0.Op != OpNeg32 {
17523 // match: (Neg32 <t> (Com32 x))
17524 // result: (Add32 (Const32 <t> [1]) x)
17527 if v_0.Op != OpCom32 {
17532 v0 := b.NewValue0(v.Pos, OpConst32, t)
17533 v0.AuxInt = int32ToAuxInt(1)
17539 func rewriteValuegeneric_OpNeg32F(v *Value) bool {
17541 // match: (Neg32F (Const32F [c]))
17543 // result: (Const32F [-c])
17545 if v_0.Op != OpConst32F {
17548 c := auxIntToFloat32(v_0.AuxInt)
17552 v.reset(OpConst32F)
17553 v.AuxInt = float32ToAuxInt(-c)
17558 func rewriteValuegeneric_OpNeg64(v *Value) bool {
17561 // match: (Neg64 (Const64 [c]))
17562 // result: (Const64 [-c])
17564 if v_0.Op != OpConst64 {
17567 c := auxIntToInt64(v_0.AuxInt)
17569 v.AuxInt = int64ToAuxInt(-c)
17572 // match: (Neg64 (Sub64 x y))
17573 // result: (Sub64 y x)
17575 if v_0.Op != OpSub64 {
17584 // match: (Neg64 (Neg64 x))
17587 if v_0.Op != OpNeg64 {
17594 // match: (Neg64 <t> (Com64 x))
17595 // result: (Add64 (Const64 <t> [1]) x)
17598 if v_0.Op != OpCom64 {
17603 v0 := b.NewValue0(v.Pos, OpConst64, t)
17604 v0.AuxInt = int64ToAuxInt(1)
17610 func rewriteValuegeneric_OpNeg64F(v *Value) bool {
17612 // match: (Neg64F (Const64F [c]))
17614 // result: (Const64F [-c])
17616 if v_0.Op != OpConst64F {
17619 c := auxIntToFloat64(v_0.AuxInt)
17623 v.reset(OpConst64F)
17624 v.AuxInt = float64ToAuxInt(-c)
17629 func rewriteValuegeneric_OpNeg8(v *Value) bool {
17632 // match: (Neg8 (Const8 [c]))
17633 // result: (Const8 [-c])
17635 if v_0.Op != OpConst8 {
17638 c := auxIntToInt8(v_0.AuxInt)
17640 v.AuxInt = int8ToAuxInt(-c)
17643 // match: (Neg8 (Sub8 x y))
17644 // result: (Sub8 y x)
17646 if v_0.Op != OpSub8 {
17655 // match: (Neg8 (Neg8 x))
17658 if v_0.Op != OpNeg8 {
17665 // match: (Neg8 <t> (Com8 x))
17666 // result: (Add8 (Const8 <t> [1]) x)
17669 if v_0.Op != OpCom8 {
17674 v0 := b.NewValue0(v.Pos, OpConst8, t)
17675 v0.AuxInt = int8ToAuxInt(1)
17681 func rewriteValuegeneric_OpNeq16(v *Value) bool {
17685 typ := &b.Func.Config.Types
17686 // match: (Neq16 x x)
17687 // result: (ConstBool [false])
17693 v.reset(OpConstBool)
17694 v.AuxInt = boolToAuxInt(false)
17697 // match: (Neq16 (Const16 <t> [c]) (Add16 (Const16 <t> [d]) x))
17698 // result: (Neq16 (Const16 <t> [c-d]) x)
17700 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
17701 if v_0.Op != OpConst16 {
17705 c := auxIntToInt16(v_0.AuxInt)
17706 if v_1.Op != OpAdd16 {
17710 v_1_0 := v_1.Args[0]
17711 v_1_1 := v_1.Args[1]
17712 for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
17713 if v_1_0.Op != OpConst16 || v_1_0.Type != t {
17716 d := auxIntToInt16(v_1_0.AuxInt)
17719 v0 := b.NewValue0(v.Pos, OpConst16, t)
17720 v0.AuxInt = int16ToAuxInt(c - d)
17727 // match: (Neq16 (Const16 [c]) (Const16 [d]))
17728 // result: (ConstBool [c != d])
17730 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
17731 if v_0.Op != OpConst16 {
17734 c := auxIntToInt16(v_0.AuxInt)
17735 if v_1.Op != OpConst16 {
17738 d := auxIntToInt16(v_1.AuxInt)
17739 v.reset(OpConstBool)
17740 v.AuxInt = boolToAuxInt(c != d)
17745 // match: (Neq16 n (Lsh16x64 (Rsh16x64 (Add16 <t> n (Rsh16Ux64 <t> (Rsh16x64 <t> n (Const64 <typ.UInt64> [15])) (Const64 <typ.UInt64> [kbar]))) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])) )
17746 // cond: k > 0 && k < 15 && kbar == 16 - k
17747 // result: (Neq16 (And16 <t> n (Const16 <t> [1<<uint(k)-1])) (Const16 <t> [0]))
17749 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
17751 if v_1.Op != OpLsh16x64 {
17755 v_1_0 := v_1.Args[0]
17756 if v_1_0.Op != OpRsh16x64 {
17760 v_1_0_0 := v_1_0.Args[0]
17761 if v_1_0_0.Op != OpAdd16 {
17765 _ = v_1_0_0.Args[1]
17766 v_1_0_0_0 := v_1_0_0.Args[0]
17767 v_1_0_0_1 := v_1_0_0.Args[1]
17768 for _i1 := 0; _i1 <= 1; _i1, v_1_0_0_0, v_1_0_0_1 = _i1+1, v_1_0_0_1, v_1_0_0_0 {
17769 if n != v_1_0_0_0 || v_1_0_0_1.Op != OpRsh16Ux64 || v_1_0_0_1.Type != t {
17772 _ = v_1_0_0_1.Args[1]
17773 v_1_0_0_1_0 := v_1_0_0_1.Args[0]
17774 if v_1_0_0_1_0.Op != OpRsh16x64 || v_1_0_0_1_0.Type != t {
17777 _ = v_1_0_0_1_0.Args[1]
17778 if n != v_1_0_0_1_0.Args[0] {
17781 v_1_0_0_1_0_1 := v_1_0_0_1_0.Args[1]
17782 if v_1_0_0_1_0_1.Op != OpConst64 || v_1_0_0_1_0_1.Type != typ.UInt64 || auxIntToInt64(v_1_0_0_1_0_1.AuxInt) != 15 {
17785 v_1_0_0_1_1 := v_1_0_0_1.Args[1]
17786 if v_1_0_0_1_1.Op != OpConst64 || v_1_0_0_1_1.Type != typ.UInt64 {
17789 kbar := auxIntToInt64(v_1_0_0_1_1.AuxInt)
17790 v_1_0_1 := v_1_0.Args[1]
17791 if v_1_0_1.Op != OpConst64 || v_1_0_1.Type != typ.UInt64 {
17794 k := auxIntToInt64(v_1_0_1.AuxInt)
17795 v_1_1 := v_1.Args[1]
17796 if v_1_1.Op != OpConst64 || v_1_1.Type != typ.UInt64 || auxIntToInt64(v_1_1.AuxInt) != k || !(k > 0 && k < 15 && kbar == 16-k) {
17800 v0 := b.NewValue0(v.Pos, OpAnd16, t)
17801 v1 := b.NewValue0(v.Pos, OpConst16, t)
17802 v1.AuxInt = int16ToAuxInt(1<<uint(k) - 1)
17804 v2 := b.NewValue0(v.Pos, OpConst16, t)
17805 v2.AuxInt = int16ToAuxInt(0)
17812 // match: (Neq16 s:(Sub16 x y) (Const16 [0]))
17813 // cond: s.Uses == 1
17814 // result: (Neq16 x y)
17816 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
17818 if s.Op != OpSub16 {
17823 if v_1.Op != OpConst16 || auxIntToInt16(v_1.AuxInt) != 0 || !(s.Uses == 1) {
17832 // match: (Neq16 (And16 <t> x (Const16 <t> [y])) (Const16 <t> [y]))
17833 // cond: oneBit16(y)
17834 // result: (Eq16 (And16 <t> x (Const16 <t> [y])) (Const16 <t> [0]))
17836 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
17837 if v_0.Op != OpAnd16 {
17842 v_0_0 := v_0.Args[0]
17843 v_0_1 := v_0.Args[1]
17844 for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
17846 if v_0_1.Op != OpConst16 || v_0_1.Type != t {
17849 y := auxIntToInt16(v_0_1.AuxInt)
17850 if v_1.Op != OpConst16 || v_1.Type != t || auxIntToInt16(v_1.AuxInt) != y || !(oneBit16(y)) {
17854 v0 := b.NewValue0(v.Pos, OpAnd16, t)
17855 v1 := b.NewValue0(v.Pos, OpConst16, t)
17856 v1.AuxInt = int16ToAuxInt(y)
17858 v2 := b.NewValue0(v.Pos, OpConst16, t)
17859 v2.AuxInt = int16ToAuxInt(0)
17868 func rewriteValuegeneric_OpNeq32(v *Value) bool {
17872 typ := &b.Func.Config.Types
17873 // match: (Neq32 x x)
17874 // result: (ConstBool [false])
17880 v.reset(OpConstBool)
17881 v.AuxInt = boolToAuxInt(false)
17884 // match: (Neq32 (Const32 <t> [c]) (Add32 (Const32 <t> [d]) x))
17885 // result: (Neq32 (Const32 <t> [c-d]) x)
17887 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
17888 if v_0.Op != OpConst32 {
17892 c := auxIntToInt32(v_0.AuxInt)
17893 if v_1.Op != OpAdd32 {
17897 v_1_0 := v_1.Args[0]
17898 v_1_1 := v_1.Args[1]
17899 for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
17900 if v_1_0.Op != OpConst32 || v_1_0.Type != t {
17903 d := auxIntToInt32(v_1_0.AuxInt)
17906 v0 := b.NewValue0(v.Pos, OpConst32, t)
17907 v0.AuxInt = int32ToAuxInt(c - d)
17914 // match: (Neq32 (Const32 [c]) (Const32 [d]))
17915 // result: (ConstBool [c != d])
17917 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
17918 if v_0.Op != OpConst32 {
17921 c := auxIntToInt32(v_0.AuxInt)
17922 if v_1.Op != OpConst32 {
17925 d := auxIntToInt32(v_1.AuxInt)
17926 v.reset(OpConstBool)
17927 v.AuxInt = boolToAuxInt(c != d)
17932 // match: (Neq32 n (Lsh32x64 (Rsh32x64 (Add32 <t> n (Rsh32Ux64 <t> (Rsh32x64 <t> n (Const64 <typ.UInt64> [31])) (Const64 <typ.UInt64> [kbar]))) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])) )
17933 // cond: k > 0 && k < 31 && kbar == 32 - k
17934 // result: (Neq32 (And32 <t> n (Const32 <t> [1<<uint(k)-1])) (Const32 <t> [0]))
17936 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
17938 if v_1.Op != OpLsh32x64 {
17942 v_1_0 := v_1.Args[0]
17943 if v_1_0.Op != OpRsh32x64 {
17947 v_1_0_0 := v_1_0.Args[0]
17948 if v_1_0_0.Op != OpAdd32 {
17952 _ = v_1_0_0.Args[1]
17953 v_1_0_0_0 := v_1_0_0.Args[0]
17954 v_1_0_0_1 := v_1_0_0.Args[1]
17955 for _i1 := 0; _i1 <= 1; _i1, v_1_0_0_0, v_1_0_0_1 = _i1+1, v_1_0_0_1, v_1_0_0_0 {
17956 if n != v_1_0_0_0 || v_1_0_0_1.Op != OpRsh32Ux64 || v_1_0_0_1.Type != t {
17959 _ = v_1_0_0_1.Args[1]
17960 v_1_0_0_1_0 := v_1_0_0_1.Args[0]
17961 if v_1_0_0_1_0.Op != OpRsh32x64 || v_1_0_0_1_0.Type != t {
17964 _ = v_1_0_0_1_0.Args[1]
17965 if n != v_1_0_0_1_0.Args[0] {
17968 v_1_0_0_1_0_1 := v_1_0_0_1_0.Args[1]
17969 if v_1_0_0_1_0_1.Op != OpConst64 || v_1_0_0_1_0_1.Type != typ.UInt64 || auxIntToInt64(v_1_0_0_1_0_1.AuxInt) != 31 {
17972 v_1_0_0_1_1 := v_1_0_0_1.Args[1]
17973 if v_1_0_0_1_1.Op != OpConst64 || v_1_0_0_1_1.Type != typ.UInt64 {
17976 kbar := auxIntToInt64(v_1_0_0_1_1.AuxInt)
17977 v_1_0_1 := v_1_0.Args[1]
17978 if v_1_0_1.Op != OpConst64 || v_1_0_1.Type != typ.UInt64 {
17981 k := auxIntToInt64(v_1_0_1.AuxInt)
17982 v_1_1 := v_1.Args[1]
17983 if v_1_1.Op != OpConst64 || v_1_1.Type != typ.UInt64 || auxIntToInt64(v_1_1.AuxInt) != k || !(k > 0 && k < 31 && kbar == 32-k) {
17987 v0 := b.NewValue0(v.Pos, OpAnd32, t)
17988 v1 := b.NewValue0(v.Pos, OpConst32, t)
17989 v1.AuxInt = int32ToAuxInt(1<<uint(k) - 1)
17991 v2 := b.NewValue0(v.Pos, OpConst32, t)
17992 v2.AuxInt = int32ToAuxInt(0)
17999 // match: (Neq32 s:(Sub32 x y) (Const32 [0]))
18000 // cond: s.Uses == 1
18001 // result: (Neq32 x y)
18003 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
18005 if s.Op != OpSub32 {
18010 if v_1.Op != OpConst32 || auxIntToInt32(v_1.AuxInt) != 0 || !(s.Uses == 1) {
18019 // match: (Neq32 (And32 <t> x (Const32 <t> [y])) (Const32 <t> [y]))
18020 // cond: oneBit32(y)
18021 // result: (Eq32 (And32 <t> x (Const32 <t> [y])) (Const32 <t> [0]))
18023 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
18024 if v_0.Op != OpAnd32 {
18029 v_0_0 := v_0.Args[0]
18030 v_0_1 := v_0.Args[1]
18031 for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
18033 if v_0_1.Op != OpConst32 || v_0_1.Type != t {
18036 y := auxIntToInt32(v_0_1.AuxInt)
18037 if v_1.Op != OpConst32 || v_1.Type != t || auxIntToInt32(v_1.AuxInt) != y || !(oneBit32(y)) {
18041 v0 := b.NewValue0(v.Pos, OpAnd32, t)
18042 v1 := b.NewValue0(v.Pos, OpConst32, t)
18043 v1.AuxInt = int32ToAuxInt(y)
18045 v2 := b.NewValue0(v.Pos, OpConst32, t)
18046 v2.AuxInt = int32ToAuxInt(0)
18055 func rewriteValuegeneric_OpNeq32F(v *Value) bool {
18058 // match: (Neq32F (Const32F [c]) (Const32F [d]))
18059 // result: (ConstBool [c != d])
18061 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
18062 if v_0.Op != OpConst32F {
18065 c := auxIntToFloat32(v_0.AuxInt)
18066 if v_1.Op != OpConst32F {
18069 d := auxIntToFloat32(v_1.AuxInt)
18070 v.reset(OpConstBool)
18071 v.AuxInt = boolToAuxInt(c != d)
18078 func rewriteValuegeneric_OpNeq64(v *Value) bool {
18082 typ := &b.Func.Config.Types
18083 // match: (Neq64 x x)
18084 // result: (ConstBool [false])
18090 v.reset(OpConstBool)
18091 v.AuxInt = boolToAuxInt(false)
18094 // match: (Neq64 (Const64 <t> [c]) (Add64 (Const64 <t> [d]) x))
18095 // result: (Neq64 (Const64 <t> [c-d]) x)
18097 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
18098 if v_0.Op != OpConst64 {
18102 c := auxIntToInt64(v_0.AuxInt)
18103 if v_1.Op != OpAdd64 {
18107 v_1_0 := v_1.Args[0]
18108 v_1_1 := v_1.Args[1]
18109 for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
18110 if v_1_0.Op != OpConst64 || v_1_0.Type != t {
18113 d := auxIntToInt64(v_1_0.AuxInt)
18116 v0 := b.NewValue0(v.Pos, OpConst64, t)
18117 v0.AuxInt = int64ToAuxInt(c - d)
18124 // match: (Neq64 (Const64 [c]) (Const64 [d]))
18125 // result: (ConstBool [c != d])
18127 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
18128 if v_0.Op != OpConst64 {
18131 c := auxIntToInt64(v_0.AuxInt)
18132 if v_1.Op != OpConst64 {
18135 d := auxIntToInt64(v_1.AuxInt)
18136 v.reset(OpConstBool)
18137 v.AuxInt = boolToAuxInt(c != d)
18142 // match: (Neq64 n (Lsh64x64 (Rsh64x64 (Add64 <t> n (Rsh64Ux64 <t> (Rsh64x64 <t> n (Const64 <typ.UInt64> [63])) (Const64 <typ.UInt64> [kbar]))) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])) )
18143 // cond: k > 0 && k < 63 && kbar == 64 - k
18144 // result: (Neq64 (And64 <t> n (Const64 <t> [1<<uint(k)-1])) (Const64 <t> [0]))
18146 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
18148 if v_1.Op != OpLsh64x64 {
18152 v_1_0 := v_1.Args[0]
18153 if v_1_0.Op != OpRsh64x64 {
18157 v_1_0_0 := v_1_0.Args[0]
18158 if v_1_0_0.Op != OpAdd64 {
18162 _ = v_1_0_0.Args[1]
18163 v_1_0_0_0 := v_1_0_0.Args[0]
18164 v_1_0_0_1 := v_1_0_0.Args[1]
18165 for _i1 := 0; _i1 <= 1; _i1, v_1_0_0_0, v_1_0_0_1 = _i1+1, v_1_0_0_1, v_1_0_0_0 {
18166 if n != v_1_0_0_0 || v_1_0_0_1.Op != OpRsh64Ux64 || v_1_0_0_1.Type != t {
18169 _ = v_1_0_0_1.Args[1]
18170 v_1_0_0_1_0 := v_1_0_0_1.Args[0]
18171 if v_1_0_0_1_0.Op != OpRsh64x64 || v_1_0_0_1_0.Type != t {
18174 _ = v_1_0_0_1_0.Args[1]
18175 if n != v_1_0_0_1_0.Args[0] {
18178 v_1_0_0_1_0_1 := v_1_0_0_1_0.Args[1]
18179 if v_1_0_0_1_0_1.Op != OpConst64 || v_1_0_0_1_0_1.Type != typ.UInt64 || auxIntToInt64(v_1_0_0_1_0_1.AuxInt) != 63 {
18182 v_1_0_0_1_1 := v_1_0_0_1.Args[1]
18183 if v_1_0_0_1_1.Op != OpConst64 || v_1_0_0_1_1.Type != typ.UInt64 {
18186 kbar := auxIntToInt64(v_1_0_0_1_1.AuxInt)
18187 v_1_0_1 := v_1_0.Args[1]
18188 if v_1_0_1.Op != OpConst64 || v_1_0_1.Type != typ.UInt64 {
18191 k := auxIntToInt64(v_1_0_1.AuxInt)
18192 v_1_1 := v_1.Args[1]
18193 if v_1_1.Op != OpConst64 || v_1_1.Type != typ.UInt64 || auxIntToInt64(v_1_1.AuxInt) != k || !(k > 0 && k < 63 && kbar == 64-k) {
18197 v0 := b.NewValue0(v.Pos, OpAnd64, t)
18198 v1 := b.NewValue0(v.Pos, OpConst64, t)
18199 v1.AuxInt = int64ToAuxInt(1<<uint(k) - 1)
18201 v2 := b.NewValue0(v.Pos, OpConst64, t)
18202 v2.AuxInt = int64ToAuxInt(0)
18209 // match: (Neq64 s:(Sub64 x y) (Const64 [0]))
18210 // cond: s.Uses == 1
18211 // result: (Neq64 x y)
18213 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
18215 if s.Op != OpSub64 {
18220 if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 0 || !(s.Uses == 1) {
18229 // match: (Neq64 (And64 <t> x (Const64 <t> [y])) (Const64 <t> [y]))
18230 // cond: oneBit64(y)
18231 // result: (Eq64 (And64 <t> x (Const64 <t> [y])) (Const64 <t> [0]))
18233 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
18234 if v_0.Op != OpAnd64 {
18239 v_0_0 := v_0.Args[0]
18240 v_0_1 := v_0.Args[1]
18241 for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
18243 if v_0_1.Op != OpConst64 || v_0_1.Type != t {
18246 y := auxIntToInt64(v_0_1.AuxInt)
18247 if v_1.Op != OpConst64 || v_1.Type != t || auxIntToInt64(v_1.AuxInt) != y || !(oneBit64(y)) {
18251 v0 := b.NewValue0(v.Pos, OpAnd64, t)
18252 v1 := b.NewValue0(v.Pos, OpConst64, t)
18253 v1.AuxInt = int64ToAuxInt(y)
18255 v2 := b.NewValue0(v.Pos, OpConst64, t)
18256 v2.AuxInt = int64ToAuxInt(0)
18265 func rewriteValuegeneric_OpNeq64F(v *Value) bool {
18268 // match: (Neq64F (Const64F [c]) (Const64F [d]))
18269 // result: (ConstBool [c != d])
18271 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
18272 if v_0.Op != OpConst64F {
18275 c := auxIntToFloat64(v_0.AuxInt)
18276 if v_1.Op != OpConst64F {
18279 d := auxIntToFloat64(v_1.AuxInt)
18280 v.reset(OpConstBool)
18281 v.AuxInt = boolToAuxInt(c != d)
18288 func rewriteValuegeneric_OpNeq8(v *Value) bool {
18292 typ := &b.Func.Config.Types
18293 // match: (Neq8 x x)
18294 // result: (ConstBool [false])
18300 v.reset(OpConstBool)
18301 v.AuxInt = boolToAuxInt(false)
18304 // match: (Neq8 (Const8 <t> [c]) (Add8 (Const8 <t> [d]) x))
18305 // result: (Neq8 (Const8 <t> [c-d]) x)
18307 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
18308 if v_0.Op != OpConst8 {
18312 c := auxIntToInt8(v_0.AuxInt)
18313 if v_1.Op != OpAdd8 {
18317 v_1_0 := v_1.Args[0]
18318 v_1_1 := v_1.Args[1]
18319 for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
18320 if v_1_0.Op != OpConst8 || v_1_0.Type != t {
18323 d := auxIntToInt8(v_1_0.AuxInt)
18326 v0 := b.NewValue0(v.Pos, OpConst8, t)
18327 v0.AuxInt = int8ToAuxInt(c - d)
18334 // match: (Neq8 (Const8 [c]) (Const8 [d]))
18335 // result: (ConstBool [c != d])
18337 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
18338 if v_0.Op != OpConst8 {
18341 c := auxIntToInt8(v_0.AuxInt)
18342 if v_1.Op != OpConst8 {
18345 d := auxIntToInt8(v_1.AuxInt)
18346 v.reset(OpConstBool)
18347 v.AuxInt = boolToAuxInt(c != d)
18352 // match: (Neq8 n (Lsh8x64 (Rsh8x64 (Add8 <t> n (Rsh8Ux64 <t> (Rsh8x64 <t> n (Const64 <typ.UInt64> [ 7])) (Const64 <typ.UInt64> [kbar]))) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])) )
18353 // cond: k > 0 && k < 7 && kbar == 8 - k
18354 // result: (Neq8 (And8 <t> n (Const8 <t> [1<<uint(k)-1])) (Const8 <t> [0]))
18356 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
18358 if v_1.Op != OpLsh8x64 {
18362 v_1_0 := v_1.Args[0]
18363 if v_1_0.Op != OpRsh8x64 {
18367 v_1_0_0 := v_1_0.Args[0]
18368 if v_1_0_0.Op != OpAdd8 {
18372 _ = v_1_0_0.Args[1]
18373 v_1_0_0_0 := v_1_0_0.Args[0]
18374 v_1_0_0_1 := v_1_0_0.Args[1]
18375 for _i1 := 0; _i1 <= 1; _i1, v_1_0_0_0, v_1_0_0_1 = _i1+1, v_1_0_0_1, v_1_0_0_0 {
18376 if n != v_1_0_0_0 || v_1_0_0_1.Op != OpRsh8Ux64 || v_1_0_0_1.Type != t {
18379 _ = v_1_0_0_1.Args[1]
18380 v_1_0_0_1_0 := v_1_0_0_1.Args[0]
18381 if v_1_0_0_1_0.Op != OpRsh8x64 || v_1_0_0_1_0.Type != t {
18384 _ = v_1_0_0_1_0.Args[1]
18385 if n != v_1_0_0_1_0.Args[0] {
18388 v_1_0_0_1_0_1 := v_1_0_0_1_0.Args[1]
18389 if v_1_0_0_1_0_1.Op != OpConst64 || v_1_0_0_1_0_1.Type != typ.UInt64 || auxIntToInt64(v_1_0_0_1_0_1.AuxInt) != 7 {
18392 v_1_0_0_1_1 := v_1_0_0_1.Args[1]
18393 if v_1_0_0_1_1.Op != OpConst64 || v_1_0_0_1_1.Type != typ.UInt64 {
18396 kbar := auxIntToInt64(v_1_0_0_1_1.AuxInt)
18397 v_1_0_1 := v_1_0.Args[1]
18398 if v_1_0_1.Op != OpConst64 || v_1_0_1.Type != typ.UInt64 {
18401 k := auxIntToInt64(v_1_0_1.AuxInt)
18402 v_1_1 := v_1.Args[1]
18403 if v_1_1.Op != OpConst64 || v_1_1.Type != typ.UInt64 || auxIntToInt64(v_1_1.AuxInt) != k || !(k > 0 && k < 7 && kbar == 8-k) {
18407 v0 := b.NewValue0(v.Pos, OpAnd8, t)
18408 v1 := b.NewValue0(v.Pos, OpConst8, t)
18409 v1.AuxInt = int8ToAuxInt(1<<uint(k) - 1)
18411 v2 := b.NewValue0(v.Pos, OpConst8, t)
18412 v2.AuxInt = int8ToAuxInt(0)
18419 // match: (Neq8 s:(Sub8 x y) (Const8 [0]))
18420 // cond: s.Uses == 1
18421 // result: (Neq8 x y)
18423 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
18425 if s.Op != OpSub8 {
18430 if v_1.Op != OpConst8 || auxIntToInt8(v_1.AuxInt) != 0 || !(s.Uses == 1) {
18439 // match: (Neq8 (And8 <t> x (Const8 <t> [y])) (Const8 <t> [y]))
18440 // cond: oneBit8(y)
18441 // result: (Eq8 (And8 <t> x (Const8 <t> [y])) (Const8 <t> [0]))
18443 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
18444 if v_0.Op != OpAnd8 {
18449 v_0_0 := v_0.Args[0]
18450 v_0_1 := v_0.Args[1]
18451 for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
18453 if v_0_1.Op != OpConst8 || v_0_1.Type != t {
18456 y := auxIntToInt8(v_0_1.AuxInt)
18457 if v_1.Op != OpConst8 || v_1.Type != t || auxIntToInt8(v_1.AuxInt) != y || !(oneBit8(y)) {
18461 v0 := b.NewValue0(v.Pos, OpAnd8, t)
18462 v1 := b.NewValue0(v.Pos, OpConst8, t)
18463 v1.AuxInt = int8ToAuxInt(y)
18465 v2 := b.NewValue0(v.Pos, OpConst8, t)
18466 v2.AuxInt = int8ToAuxInt(0)
18475 func rewriteValuegeneric_OpNeqB(v *Value) bool {
18478 // match: (NeqB (ConstBool [c]) (ConstBool [d]))
18479 // result: (ConstBool [c != d])
18481 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
18482 if v_0.Op != OpConstBool {
18485 c := auxIntToBool(v_0.AuxInt)
18486 if v_1.Op != OpConstBool {
18489 d := auxIntToBool(v_1.AuxInt)
18490 v.reset(OpConstBool)
18491 v.AuxInt = boolToAuxInt(c != d)
18496 // match: (NeqB (ConstBool [false]) x)
18499 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
18500 if v_0.Op != OpConstBool || auxIntToBool(v_0.AuxInt) != false {
18509 // match: (NeqB (ConstBool [true]) x)
18512 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
18513 if v_0.Op != OpConstBool || auxIntToBool(v_0.AuxInt) != true {
18523 // match: (NeqB (Not x) (Not y))
18524 // result: (NeqB x y)
18526 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
18527 if v_0.Op != OpNot {
18531 if v_1.Op != OpNot {
18543 func rewriteValuegeneric_OpNeqInter(v *Value) bool {
18547 typ := &b.Func.Config.Types
18548 // match: (NeqInter x y)
18549 // result: (NeqPtr (ITab x) (ITab y))
18554 v0 := b.NewValue0(v.Pos, OpITab, typ.Uintptr)
18556 v1 := b.NewValue0(v.Pos, OpITab, typ.Uintptr)
18562 func rewriteValuegeneric_OpNeqPtr(v *Value) bool {
18565 // match: (NeqPtr x x)
18566 // result: (ConstBool [false])
18572 v.reset(OpConstBool)
18573 v.AuxInt = boolToAuxInt(false)
18576 // match: (NeqPtr (Addr {x} _) (Addr {y} _))
18577 // result: (ConstBool [x != y])
18579 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
18580 if v_0.Op != OpAddr {
18583 x := auxToSym(v_0.Aux)
18584 if v_1.Op != OpAddr {
18587 y := auxToSym(v_1.Aux)
18588 v.reset(OpConstBool)
18589 v.AuxInt = boolToAuxInt(x != y)
18594 // match: (NeqPtr (Addr {x} _) (OffPtr [o] (Addr {y} _)))
18595 // result: (ConstBool [x != y || o != 0])
18597 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
18598 if v_0.Op != OpAddr {
18601 x := auxToSym(v_0.Aux)
18602 if v_1.Op != OpOffPtr {
18605 o := auxIntToInt64(v_1.AuxInt)
18606 v_1_0 := v_1.Args[0]
18607 if v_1_0.Op != OpAddr {
18610 y := auxToSym(v_1_0.Aux)
18611 v.reset(OpConstBool)
18612 v.AuxInt = boolToAuxInt(x != y || o != 0)
18617 // match: (NeqPtr (OffPtr [o1] (Addr {x} _)) (OffPtr [o2] (Addr {y} _)))
18618 // result: (ConstBool [x != y || o1 != o2])
18620 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
18621 if v_0.Op != OpOffPtr {
18624 o1 := auxIntToInt64(v_0.AuxInt)
18625 v_0_0 := v_0.Args[0]
18626 if v_0_0.Op != OpAddr {
18629 x := auxToSym(v_0_0.Aux)
18630 if v_1.Op != OpOffPtr {
18633 o2 := auxIntToInt64(v_1.AuxInt)
18634 v_1_0 := v_1.Args[0]
18635 if v_1_0.Op != OpAddr {
18638 y := auxToSym(v_1_0.Aux)
18639 v.reset(OpConstBool)
18640 v.AuxInt = boolToAuxInt(x != y || o1 != o2)
18645 // match: (NeqPtr (LocalAddr {x} _ _) (LocalAddr {y} _ _))
18646 // result: (ConstBool [x != y])
18648 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
18649 if v_0.Op != OpLocalAddr {
18652 x := auxToSym(v_0.Aux)
18653 if v_1.Op != OpLocalAddr {
18656 y := auxToSym(v_1.Aux)
18657 v.reset(OpConstBool)
18658 v.AuxInt = boolToAuxInt(x != y)
18663 // match: (NeqPtr (LocalAddr {x} _ _) (OffPtr [o] (LocalAddr {y} _ _)))
18664 // result: (ConstBool [x != y || o != 0])
18666 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
18667 if v_0.Op != OpLocalAddr {
18670 x := auxToSym(v_0.Aux)
18671 if v_1.Op != OpOffPtr {
18674 o := auxIntToInt64(v_1.AuxInt)
18675 v_1_0 := v_1.Args[0]
18676 if v_1_0.Op != OpLocalAddr {
18679 y := auxToSym(v_1_0.Aux)
18680 v.reset(OpConstBool)
18681 v.AuxInt = boolToAuxInt(x != y || o != 0)
18686 // match: (NeqPtr (OffPtr [o1] (LocalAddr {x} _ _)) (OffPtr [o2] (LocalAddr {y} _ _)))
18687 // result: (ConstBool [x != y || o1 != o2])
18689 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
18690 if v_0.Op != OpOffPtr {
18693 o1 := auxIntToInt64(v_0.AuxInt)
18694 v_0_0 := v_0.Args[0]
18695 if v_0_0.Op != OpLocalAddr {
18698 x := auxToSym(v_0_0.Aux)
18699 if v_1.Op != OpOffPtr {
18702 o2 := auxIntToInt64(v_1.AuxInt)
18703 v_1_0 := v_1.Args[0]
18704 if v_1_0.Op != OpLocalAddr {
18707 y := auxToSym(v_1_0.Aux)
18708 v.reset(OpConstBool)
18709 v.AuxInt = boolToAuxInt(x != y || o1 != o2)
18714 // match: (NeqPtr (OffPtr [o1] p1) p2)
18715 // cond: isSamePtr(p1, p2)
18716 // result: (ConstBool [o1 != 0])
18718 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
18719 if v_0.Op != OpOffPtr {
18722 o1 := auxIntToInt64(v_0.AuxInt)
18725 if !(isSamePtr(p1, p2)) {
18728 v.reset(OpConstBool)
18729 v.AuxInt = boolToAuxInt(o1 != 0)
18734 // match: (NeqPtr (OffPtr [o1] p1) (OffPtr [o2] p2))
18735 // cond: isSamePtr(p1, p2)
18736 // result: (ConstBool [o1 != o2])
18738 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
18739 if v_0.Op != OpOffPtr {
18742 o1 := auxIntToInt64(v_0.AuxInt)
18744 if v_1.Op != OpOffPtr {
18747 o2 := auxIntToInt64(v_1.AuxInt)
18749 if !(isSamePtr(p1, p2)) {
18752 v.reset(OpConstBool)
18753 v.AuxInt = boolToAuxInt(o1 != o2)
18758 // match: (NeqPtr (Const32 [c]) (Const32 [d]))
18759 // result: (ConstBool [c != d])
18761 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
18762 if v_0.Op != OpConst32 {
18765 c := auxIntToInt32(v_0.AuxInt)
18766 if v_1.Op != OpConst32 {
18769 d := auxIntToInt32(v_1.AuxInt)
18770 v.reset(OpConstBool)
18771 v.AuxInt = boolToAuxInt(c != d)
18776 // match: (NeqPtr (Const64 [c]) (Const64 [d]))
18777 // result: (ConstBool [c != d])
18779 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
18780 if v_0.Op != OpConst64 {
18783 c := auxIntToInt64(v_0.AuxInt)
18784 if v_1.Op != OpConst64 {
18787 d := auxIntToInt64(v_1.AuxInt)
18788 v.reset(OpConstBool)
18789 v.AuxInt = boolToAuxInt(c != d)
18794 // match: (NeqPtr (Convert (Addr {x} _) _) (Addr {y} _))
18795 // result: (ConstBool [x!=y])
18797 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
18798 if v_0.Op != OpConvert {
18801 v_0_0 := v_0.Args[0]
18802 if v_0_0.Op != OpAddr {
18805 x := auxToSym(v_0_0.Aux)
18806 if v_1.Op != OpAddr {
18809 y := auxToSym(v_1.Aux)
18810 v.reset(OpConstBool)
18811 v.AuxInt = boolToAuxInt(x != y)
18816 // match: (NeqPtr (LocalAddr _ _) (Addr _))
18817 // result: (ConstBool [true])
18819 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
18820 if v_0.Op != OpLocalAddr || v_1.Op != OpAddr {
18823 v.reset(OpConstBool)
18824 v.AuxInt = boolToAuxInt(true)
18829 // match: (NeqPtr (OffPtr (LocalAddr _ _)) (Addr _))
18830 // result: (ConstBool [true])
18832 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
18833 if v_0.Op != OpOffPtr {
18836 v_0_0 := v_0.Args[0]
18837 if v_0_0.Op != OpLocalAddr || v_1.Op != OpAddr {
18840 v.reset(OpConstBool)
18841 v.AuxInt = boolToAuxInt(true)
18846 // match: (NeqPtr (LocalAddr _ _) (OffPtr (Addr _)))
18847 // result: (ConstBool [true])
18849 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
18850 if v_0.Op != OpLocalAddr || v_1.Op != OpOffPtr {
18853 v_1_0 := v_1.Args[0]
18854 if v_1_0.Op != OpAddr {
18857 v.reset(OpConstBool)
18858 v.AuxInt = boolToAuxInt(true)
18863 // match: (NeqPtr (OffPtr (LocalAddr _ _)) (OffPtr (Addr _)))
18864 // result: (ConstBool [true])
18866 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
18867 if v_0.Op != OpOffPtr {
18870 v_0_0 := v_0.Args[0]
18871 if v_0_0.Op != OpLocalAddr || v_1.Op != OpOffPtr {
18874 v_1_0 := v_1.Args[0]
18875 if v_1_0.Op != OpAddr {
18878 v.reset(OpConstBool)
18879 v.AuxInt = boolToAuxInt(true)
18884 // match: (NeqPtr (AddPtr p1 o1) p2)
18885 // cond: isSamePtr(p1, p2)
18886 // result: (IsNonNil o1)
18888 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
18889 if v_0.Op != OpAddPtr {
18895 if !(isSamePtr(p1, p2)) {
18898 v.reset(OpIsNonNil)
18904 // match: (NeqPtr (Const32 [0]) p)
18905 // result: (IsNonNil p)
18907 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
18908 if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 {
18912 v.reset(OpIsNonNil)
18918 // match: (NeqPtr (Const64 [0]) p)
18919 // result: (IsNonNil p)
18921 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
18922 if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 {
18926 v.reset(OpIsNonNil)
18932 // match: (NeqPtr (ConstNil) p)
18933 // result: (IsNonNil p)
18935 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
18936 if v_0.Op != OpConstNil {
18940 v.reset(OpIsNonNil)
18948 func rewriteValuegeneric_OpNeqSlice(v *Value) bool {
18952 typ := &b.Func.Config.Types
18953 // match: (NeqSlice x y)
18954 // result: (NeqPtr (SlicePtr x) (SlicePtr y))
18959 v0 := b.NewValue0(v.Pos, OpSlicePtr, typ.BytePtr)
18961 v1 := b.NewValue0(v.Pos, OpSlicePtr, typ.BytePtr)
18967 func rewriteValuegeneric_OpNilCheck(v *Value) bool {
18972 // match: (NilCheck (GetG mem) mem)
18975 if v_0.Op != OpGetG {
18985 // match: (NilCheck (SelectN [0] call:(StaticLECall _ _)) _)
18986 // cond: isSameCall(call.Aux, "runtime.newobject") && warnRule(fe.Debug_checknil(), v, "removed nil check")
18987 // result: (Invalid)
18989 if v_0.Op != OpSelectN || auxIntToInt64(v_0.AuxInt) != 0 {
18992 call := v_0.Args[0]
18993 if call.Op != OpStaticLECall || len(call.Args) != 2 || !(isSameCall(call.Aux, "runtime.newobject") && warnRule(fe.Debug_checknil(), v, "removed nil check")) {
18999 // match: (NilCheck (OffPtr (SelectN [0] call:(StaticLECall _ _))) _)
19000 // cond: isSameCall(call.Aux, "runtime.newobject") && warnRule(fe.Debug_checknil(), v, "removed nil check")
19001 // result: (Invalid)
19003 if v_0.Op != OpOffPtr {
19006 v_0_0 := v_0.Args[0]
19007 if v_0_0.Op != OpSelectN || auxIntToInt64(v_0_0.AuxInt) != 0 {
19010 call := v_0_0.Args[0]
19011 if call.Op != OpStaticLECall || len(call.Args) != 2 || !(isSameCall(call.Aux, "runtime.newobject") && warnRule(fe.Debug_checknil(), v, "removed nil check")) {
19017 // match: (NilCheck (Addr {_} (SB)) _)
19018 // result: (Invalid)
19020 if v_0.Op != OpAddr {
19023 v_0_0 := v_0.Args[0]
19024 if v_0_0.Op != OpSB {
19030 // match: (NilCheck (Convert (Addr {_} (SB)) _) _)
19031 // result: (Invalid)
19033 if v_0.Op != OpConvert {
19036 v_0_0 := v_0.Args[0]
19037 if v_0_0.Op != OpAddr {
19040 v_0_0_0 := v_0_0.Args[0]
19041 if v_0_0_0.Op != OpSB {
19049 func rewriteValuegeneric_OpNot(v *Value) bool {
19051 // match: (Not (ConstBool [c]))
19052 // result: (ConstBool [!c])
19054 if v_0.Op != OpConstBool {
19057 c := auxIntToBool(v_0.AuxInt)
19058 v.reset(OpConstBool)
19059 v.AuxInt = boolToAuxInt(!c)
19062 // match: (Not (Eq64 x y))
19063 // result: (Neq64 x y)
19065 if v_0.Op != OpEq64 {
19074 // match: (Not (Eq32 x y))
19075 // result: (Neq32 x y)
19077 if v_0.Op != OpEq32 {
19086 // match: (Not (Eq16 x y))
19087 // result: (Neq16 x y)
19089 if v_0.Op != OpEq16 {
19098 // match: (Not (Eq8 x y))
19099 // result: (Neq8 x y)
19101 if v_0.Op != OpEq8 {
19110 // match: (Not (EqB x y))
19111 // result: (NeqB x y)
19113 if v_0.Op != OpEqB {
19122 // match: (Not (EqPtr x y))
19123 // result: (NeqPtr x y)
19125 if v_0.Op != OpEqPtr {
19134 // match: (Not (Eq64F x y))
19135 // result: (Neq64F x y)
19137 if v_0.Op != OpEq64F {
19146 // match: (Not (Eq32F x y))
19147 // result: (Neq32F x y)
19149 if v_0.Op != OpEq32F {
19158 // match: (Not (Neq64 x y))
19159 // result: (Eq64 x y)
19161 if v_0.Op != OpNeq64 {
19170 // match: (Not (Neq32 x y))
19171 // result: (Eq32 x y)
19173 if v_0.Op != OpNeq32 {
19182 // match: (Not (Neq16 x y))
19183 // result: (Eq16 x y)
19185 if v_0.Op != OpNeq16 {
19194 // match: (Not (Neq8 x y))
19195 // result: (Eq8 x y)
19197 if v_0.Op != OpNeq8 {
19206 // match: (Not (NeqB x y))
19207 // result: (EqB x y)
19209 if v_0.Op != OpNeqB {
19218 // match: (Not (NeqPtr x y))
19219 // result: (EqPtr x y)
19221 if v_0.Op != OpNeqPtr {
19230 // match: (Not (Neq64F x y))
19231 // result: (Eq64F x y)
19233 if v_0.Op != OpNeq64F {
19242 // match: (Not (Neq32F x y))
19243 // result: (Eq32F x y)
19245 if v_0.Op != OpNeq32F {
19254 // match: (Not (Less64 x y))
19255 // result: (Leq64 y x)
19257 if v_0.Op != OpLess64 {
19266 // match: (Not (Less32 x y))
19267 // result: (Leq32 y x)
19269 if v_0.Op != OpLess32 {
19278 // match: (Not (Less16 x y))
19279 // result: (Leq16 y x)
19281 if v_0.Op != OpLess16 {
19290 // match: (Not (Less8 x y))
19291 // result: (Leq8 y x)
19293 if v_0.Op != OpLess8 {
19302 // match: (Not (Less64U x y))
19303 // result: (Leq64U y x)
19305 if v_0.Op != OpLess64U {
19314 // match: (Not (Less32U x y))
19315 // result: (Leq32U y x)
19317 if v_0.Op != OpLess32U {
19326 // match: (Not (Less16U x y))
19327 // result: (Leq16U y x)
19329 if v_0.Op != OpLess16U {
19338 // match: (Not (Less8U x y))
19339 // result: (Leq8U y x)
19341 if v_0.Op != OpLess8U {
19350 // match: (Not (Leq64 x y))
19351 // result: (Less64 y x)
19353 if v_0.Op != OpLeq64 {
19362 // match: (Not (Leq32 x y))
19363 // result: (Less32 y x)
19365 if v_0.Op != OpLeq32 {
19374 // match: (Not (Leq16 x y))
19375 // result: (Less16 y x)
19377 if v_0.Op != OpLeq16 {
19386 // match: (Not (Leq8 x y))
19387 // result: (Less8 y x)
19389 if v_0.Op != OpLeq8 {
19398 // match: (Not (Leq64U x y))
19399 // result: (Less64U y x)
19401 if v_0.Op != OpLeq64U {
19410 // match: (Not (Leq32U x y))
19411 // result: (Less32U y x)
19413 if v_0.Op != OpLeq32U {
19422 // match: (Not (Leq16U x y))
19423 // result: (Less16U y x)
19425 if v_0.Op != OpLeq16U {
19434 // match: (Not (Leq8U x y))
19435 // result: (Less8U y x)
19437 if v_0.Op != OpLeq8U {
19448 func rewriteValuegeneric_OpOffPtr(v *Value) bool {
19450 // match: (OffPtr (OffPtr p [y]) [x])
19451 // result: (OffPtr p [x+y])
19453 x := auxIntToInt64(v.AuxInt)
19454 if v_0.Op != OpOffPtr {
19457 y := auxIntToInt64(v_0.AuxInt)
19460 v.AuxInt = int64ToAuxInt(x + y)
19464 // match: (OffPtr p [0])
19465 // cond: v.Type.Compare(p.Type) == types.CMPeq
19468 if auxIntToInt64(v.AuxInt) != 0 {
19472 if !(v.Type.Compare(p.Type) == types.CMPeq) {
19480 func rewriteValuegeneric_OpOr16(v *Value) bool {
19484 config := b.Func.Config
19485 // match: (Or16 (Const16 [c]) (Const16 [d]))
19486 // result: (Const16 [c|d])
19488 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
19489 if v_0.Op != OpConst16 {
19492 c := auxIntToInt16(v_0.AuxInt)
19493 if v_1.Op != OpConst16 {
19496 d := auxIntToInt16(v_1.AuxInt)
19498 v.AuxInt = int16ToAuxInt(c | d)
19503 // match: (Or16 <t> (Com16 x) (Com16 y))
19504 // result: (Com16 (And16 <t> x y))
19507 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
19508 if v_0.Op != OpCom16 {
19512 if v_1.Op != OpCom16 {
19517 v0 := b.NewValue0(v.Pos, OpAnd16, t)
19524 // match: (Or16 x x)
19534 // match: (Or16 (Const16 [0]) x)
19537 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
19538 if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 0 {
19547 // match: (Or16 (Const16 [-1]) _)
19548 // result: (Const16 [-1])
19550 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
19551 if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != -1 {
19555 v.AuxInt = int16ToAuxInt(-1)
19560 // match: (Or16 (Com16 x) x)
19561 // result: (Const16 [-1])
19563 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
19564 if v_0.Op != OpCom16 {
19572 v.AuxInt = int16ToAuxInt(-1)
19577 // match: (Or16 x (Or16 x y))
19578 // result: (Or16 x y)
19580 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
19582 if v_1.Op != OpOr16 {
19586 v_1_0 := v_1.Args[0]
19587 v_1_1 := v_1.Args[1]
19588 for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
19600 // match: (Or16 (And16 x (Const16 [c2])) (Const16 <t> [c1]))
19601 // cond: ^(c1 | c2) == 0
19602 // result: (Or16 (Const16 <t> [c1]) x)
19604 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
19605 if v_0.Op != OpAnd16 {
19609 v_0_0 := v_0.Args[0]
19610 v_0_1 := v_0.Args[1]
19611 for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
19613 if v_0_1.Op != OpConst16 {
19616 c2 := auxIntToInt16(v_0_1.AuxInt)
19617 if v_1.Op != OpConst16 {
19621 c1 := auxIntToInt16(v_1.AuxInt)
19622 if !(^(c1 | c2) == 0) {
19626 v0 := b.NewValue0(v.Pos, OpConst16, t)
19627 v0.AuxInt = int16ToAuxInt(c1)
19634 // match: (Or16 (Or16 i:(Const16 <t>) z) x)
19635 // cond: (z.Op != OpConst16 && x.Op != OpConst16)
19636 // result: (Or16 i (Or16 <t> z x))
19638 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
19639 if v_0.Op != OpOr16 {
19643 v_0_0 := v_0.Args[0]
19644 v_0_1 := v_0.Args[1]
19645 for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
19647 if i.Op != OpConst16 {
19653 if !(z.Op != OpConst16 && x.Op != OpConst16) {
19657 v0 := b.NewValue0(v.Pos, OpOr16, t)
19665 // match: (Or16 (Const16 <t> [c]) (Or16 (Const16 <t> [d]) x))
19666 // result: (Or16 (Const16 <t> [c|d]) x)
19668 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
19669 if v_0.Op != OpConst16 {
19673 c := auxIntToInt16(v_0.AuxInt)
19674 if v_1.Op != OpOr16 {
19678 v_1_0 := v_1.Args[0]
19679 v_1_1 := v_1.Args[1]
19680 for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
19681 if v_1_0.Op != OpConst16 || v_1_0.Type != t {
19684 d := auxIntToInt16(v_1_0.AuxInt)
19687 v0 := b.NewValue0(v.Pos, OpConst16, t)
19688 v0.AuxInt = int16ToAuxInt(c | d)
19695 // match: (Or16 (Lsh16x64 x z:(Const64 <t> [c])) (Rsh16Ux64 x (Const64 [d])))
19696 // cond: c < 16 && d == 16-c && canRotate(config, 16)
19697 // result: (RotateLeft16 x z)
19699 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
19700 if v_0.Op != OpLsh16x64 {
19706 if z.Op != OpConst64 {
19709 c := auxIntToInt64(z.AuxInt)
19710 if v_1.Op != OpRsh16Ux64 {
19714 if x != v_1.Args[0] {
19717 v_1_1 := v_1.Args[1]
19718 if v_1_1.Op != OpConst64 {
19721 d := auxIntToInt64(v_1_1.AuxInt)
19722 if !(c < 16 && d == 16-c && canRotate(config, 16)) {
19725 v.reset(OpRotateLeft16)
19731 // match: (Or16 left:(Lsh16x64 x y) right:(Rsh16Ux64 x (Sub64 (Const64 [16]) y)))
19732 // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)
19733 // result: (RotateLeft16 x y)
19735 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
19737 if left.Op != OpLsh16x64 {
19743 if right.Op != OpRsh16Ux64 {
19747 if x != right.Args[0] {
19750 right_1 := right.Args[1]
19751 if right_1.Op != OpSub64 {
19754 _ = right_1.Args[1]
19755 right_1_0 := right_1.Args[0]
19756 if right_1_0.Op != OpConst64 || auxIntToInt64(right_1_0.AuxInt) != 16 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) {
19759 v.reset(OpRotateLeft16)
19765 // match: (Or16 left:(Lsh16x32 x y) right:(Rsh16Ux32 x (Sub32 (Const32 [16]) y)))
19766 // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)
19767 // result: (RotateLeft16 x y)
19769 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
19771 if left.Op != OpLsh16x32 {
19777 if right.Op != OpRsh16Ux32 {
19781 if x != right.Args[0] {
19784 right_1 := right.Args[1]
19785 if right_1.Op != OpSub32 {
19788 _ = right_1.Args[1]
19789 right_1_0 := right_1.Args[0]
19790 if right_1_0.Op != OpConst32 || auxIntToInt32(right_1_0.AuxInt) != 16 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) {
19793 v.reset(OpRotateLeft16)
19799 // match: (Or16 left:(Lsh16x16 x y) right:(Rsh16Ux16 x (Sub16 (Const16 [16]) y)))
19800 // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)
19801 // result: (RotateLeft16 x y)
19803 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
19805 if left.Op != OpLsh16x16 {
19811 if right.Op != OpRsh16Ux16 {
19815 if x != right.Args[0] {
19818 right_1 := right.Args[1]
19819 if right_1.Op != OpSub16 {
19822 _ = right_1.Args[1]
19823 right_1_0 := right_1.Args[0]
19824 if right_1_0.Op != OpConst16 || auxIntToInt16(right_1_0.AuxInt) != 16 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) {
19827 v.reset(OpRotateLeft16)
19833 // match: (Or16 left:(Lsh16x8 x y) right:(Rsh16Ux8 x (Sub8 (Const8 [16]) y)))
19834 // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)
19835 // result: (RotateLeft16 x y)
19837 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
19839 if left.Op != OpLsh16x8 {
19845 if right.Op != OpRsh16Ux8 {
19849 if x != right.Args[0] {
19852 right_1 := right.Args[1]
19853 if right_1.Op != OpSub8 {
19856 _ = right_1.Args[1]
19857 right_1_0 := right_1.Args[0]
19858 if right_1_0.Op != OpConst8 || auxIntToInt8(right_1_0.AuxInt) != 16 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) {
19861 v.reset(OpRotateLeft16)
19867 // match: (Or16 right:(Rsh16Ux64 x y) left:(Lsh16x64 x z:(Sub64 (Const64 [16]) y)))
19868 // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)
19869 // result: (RotateLeft16 x z)
19871 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
19873 if right.Op != OpRsh16Ux64 {
19879 if left.Op != OpLsh16x64 {
19883 if x != left.Args[0] {
19887 if z.Op != OpSub64 {
19892 if z_0.Op != OpConst64 || auxIntToInt64(z_0.AuxInt) != 16 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) {
19895 v.reset(OpRotateLeft16)
19901 // match: (Or16 right:(Rsh16Ux32 x y) left:(Lsh16x32 x z:(Sub32 (Const32 [16]) y)))
19902 // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)
19903 // result: (RotateLeft16 x z)
19905 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
19907 if right.Op != OpRsh16Ux32 {
19913 if left.Op != OpLsh16x32 {
19917 if x != left.Args[0] {
19921 if z.Op != OpSub32 {
19926 if z_0.Op != OpConst32 || auxIntToInt32(z_0.AuxInt) != 16 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) {
19929 v.reset(OpRotateLeft16)
19935 // match: (Or16 right:(Rsh16Ux16 x y) left:(Lsh16x16 x z:(Sub16 (Const16 [16]) y)))
19936 // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)
19937 // result: (RotateLeft16 x z)
19939 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
19941 if right.Op != OpRsh16Ux16 {
19947 if left.Op != OpLsh16x16 {
19951 if x != left.Args[0] {
19955 if z.Op != OpSub16 {
19960 if z_0.Op != OpConst16 || auxIntToInt16(z_0.AuxInt) != 16 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) {
19963 v.reset(OpRotateLeft16)
19969 // match: (Or16 right:(Rsh16Ux8 x y) left:(Lsh16x8 x z:(Sub8 (Const8 [16]) y)))
19970 // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)
19971 // result: (RotateLeft16 x z)
19973 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
19975 if right.Op != OpRsh16Ux8 {
19981 if left.Op != OpLsh16x8 {
19985 if x != left.Args[0] {
19989 if z.Op != OpSub8 {
19994 if z_0.Op != OpConst8 || auxIntToInt8(z_0.AuxInt) != 16 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) {
19997 v.reset(OpRotateLeft16)
20005 func rewriteValuegeneric_OpOr32(v *Value) bool {
20009 config := b.Func.Config
20010 // match: (Or32 (Const32 [c]) (Const32 [d]))
20011 // result: (Const32 [c|d])
20013 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
20014 if v_0.Op != OpConst32 {
20017 c := auxIntToInt32(v_0.AuxInt)
20018 if v_1.Op != OpConst32 {
20021 d := auxIntToInt32(v_1.AuxInt)
20023 v.AuxInt = int32ToAuxInt(c | d)
20028 // match: (Or32 <t> (Com32 x) (Com32 y))
20029 // result: (Com32 (And32 <t> x y))
20032 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
20033 if v_0.Op != OpCom32 {
20037 if v_1.Op != OpCom32 {
20042 v0 := b.NewValue0(v.Pos, OpAnd32, t)
20049 // match: (Or32 x x)
20059 // match: (Or32 (Const32 [0]) x)
20062 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
20063 if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 {
20072 // match: (Or32 (Const32 [-1]) _)
20073 // result: (Const32 [-1])
20075 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
20076 if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != -1 {
20080 v.AuxInt = int32ToAuxInt(-1)
20085 // match: (Or32 (Com32 x) x)
20086 // result: (Const32 [-1])
20088 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
20089 if v_0.Op != OpCom32 {
20097 v.AuxInt = int32ToAuxInt(-1)
20102 // match: (Or32 x (Or32 x y))
20103 // result: (Or32 x y)
20105 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
20107 if v_1.Op != OpOr32 {
20111 v_1_0 := v_1.Args[0]
20112 v_1_1 := v_1.Args[1]
20113 for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
20125 // match: (Or32 (And32 x (Const32 [c2])) (Const32 <t> [c1]))
20126 // cond: ^(c1 | c2) == 0
20127 // result: (Or32 (Const32 <t> [c1]) x)
20129 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
20130 if v_0.Op != OpAnd32 {
20134 v_0_0 := v_0.Args[0]
20135 v_0_1 := v_0.Args[1]
20136 for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
20138 if v_0_1.Op != OpConst32 {
20141 c2 := auxIntToInt32(v_0_1.AuxInt)
20142 if v_1.Op != OpConst32 {
20146 c1 := auxIntToInt32(v_1.AuxInt)
20147 if !(^(c1 | c2) == 0) {
20151 v0 := b.NewValue0(v.Pos, OpConst32, t)
20152 v0.AuxInt = int32ToAuxInt(c1)
20159 // match: (Or32 (Or32 i:(Const32 <t>) z) x)
20160 // cond: (z.Op != OpConst32 && x.Op != OpConst32)
20161 // result: (Or32 i (Or32 <t> z x))
20163 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
20164 if v_0.Op != OpOr32 {
20168 v_0_0 := v_0.Args[0]
20169 v_0_1 := v_0.Args[1]
20170 for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
20172 if i.Op != OpConst32 {
20178 if !(z.Op != OpConst32 && x.Op != OpConst32) {
20182 v0 := b.NewValue0(v.Pos, OpOr32, t)
20190 // match: (Or32 (Const32 <t> [c]) (Or32 (Const32 <t> [d]) x))
20191 // result: (Or32 (Const32 <t> [c|d]) x)
20193 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
20194 if v_0.Op != OpConst32 {
20198 c := auxIntToInt32(v_0.AuxInt)
20199 if v_1.Op != OpOr32 {
20203 v_1_0 := v_1.Args[0]
20204 v_1_1 := v_1.Args[1]
20205 for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
20206 if v_1_0.Op != OpConst32 || v_1_0.Type != t {
20209 d := auxIntToInt32(v_1_0.AuxInt)
20212 v0 := b.NewValue0(v.Pos, OpConst32, t)
20213 v0.AuxInt = int32ToAuxInt(c | d)
20220 // match: (Or32 (Lsh32x64 x z:(Const64 <t> [c])) (Rsh32Ux64 x (Const64 [d])))
20221 // cond: c < 32 && d == 32-c && canRotate(config, 32)
20222 // result: (RotateLeft32 x z)
20224 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
20225 if v_0.Op != OpLsh32x64 {
20231 if z.Op != OpConst64 {
20234 c := auxIntToInt64(z.AuxInt)
20235 if v_1.Op != OpRsh32Ux64 {
20239 if x != v_1.Args[0] {
20242 v_1_1 := v_1.Args[1]
20243 if v_1_1.Op != OpConst64 {
20246 d := auxIntToInt64(v_1_1.AuxInt)
20247 if !(c < 32 && d == 32-c && canRotate(config, 32)) {
20250 v.reset(OpRotateLeft32)
20256 // match: (Or32 left:(Lsh32x64 x y) right:(Rsh32Ux64 x (Sub64 (Const64 [32]) y)))
20257 // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)
20258 // result: (RotateLeft32 x y)
20260 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
20262 if left.Op != OpLsh32x64 {
20268 if right.Op != OpRsh32Ux64 {
20272 if x != right.Args[0] {
20275 right_1 := right.Args[1]
20276 if right_1.Op != OpSub64 {
20279 _ = right_1.Args[1]
20280 right_1_0 := right_1.Args[0]
20281 if right_1_0.Op != OpConst64 || auxIntToInt64(right_1_0.AuxInt) != 32 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) {
20284 v.reset(OpRotateLeft32)
20290 // match: (Or32 left:(Lsh32x32 x y) right:(Rsh32Ux32 x (Sub32 (Const32 [32]) y)))
20291 // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)
20292 // result: (RotateLeft32 x y)
20294 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
20296 if left.Op != OpLsh32x32 {
20302 if right.Op != OpRsh32Ux32 {
20306 if x != right.Args[0] {
20309 right_1 := right.Args[1]
20310 if right_1.Op != OpSub32 {
20313 _ = right_1.Args[1]
20314 right_1_0 := right_1.Args[0]
20315 if right_1_0.Op != OpConst32 || auxIntToInt32(right_1_0.AuxInt) != 32 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) {
20318 v.reset(OpRotateLeft32)
20324 // match: (Or32 left:(Lsh32x16 x y) right:(Rsh32Ux16 x (Sub16 (Const16 [32]) y)))
20325 // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)
20326 // result: (RotateLeft32 x y)
20328 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
20330 if left.Op != OpLsh32x16 {
20336 if right.Op != OpRsh32Ux16 {
20340 if x != right.Args[0] {
20343 right_1 := right.Args[1]
20344 if right_1.Op != OpSub16 {
20347 _ = right_1.Args[1]
20348 right_1_0 := right_1.Args[0]
20349 if right_1_0.Op != OpConst16 || auxIntToInt16(right_1_0.AuxInt) != 32 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) {
20352 v.reset(OpRotateLeft32)
20358 // match: (Or32 left:(Lsh32x8 x y) right:(Rsh32Ux8 x (Sub8 (Const8 [32]) y)))
20359 // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)
20360 // result: (RotateLeft32 x y)
20362 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
20364 if left.Op != OpLsh32x8 {
20370 if right.Op != OpRsh32Ux8 {
20374 if x != right.Args[0] {
20377 right_1 := right.Args[1]
20378 if right_1.Op != OpSub8 {
20381 _ = right_1.Args[1]
20382 right_1_0 := right_1.Args[0]
20383 if right_1_0.Op != OpConst8 || auxIntToInt8(right_1_0.AuxInt) != 32 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) {
20386 v.reset(OpRotateLeft32)
20392 // match: (Or32 right:(Rsh32Ux64 x y) left:(Lsh32x64 x z:(Sub64 (Const64 [32]) y)))
20393 // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)
20394 // result: (RotateLeft32 x z)
20396 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
20398 if right.Op != OpRsh32Ux64 {
20404 if left.Op != OpLsh32x64 {
20408 if x != left.Args[0] {
20412 if z.Op != OpSub64 {
20417 if z_0.Op != OpConst64 || auxIntToInt64(z_0.AuxInt) != 32 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) {
20420 v.reset(OpRotateLeft32)
20426 // match: (Or32 right:(Rsh32Ux32 x y) left:(Lsh32x32 x z:(Sub32 (Const32 [32]) y)))
20427 // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)
20428 // result: (RotateLeft32 x z)
20430 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
20432 if right.Op != OpRsh32Ux32 {
20438 if left.Op != OpLsh32x32 {
20442 if x != left.Args[0] {
20446 if z.Op != OpSub32 {
20451 if z_0.Op != OpConst32 || auxIntToInt32(z_0.AuxInt) != 32 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) {
20454 v.reset(OpRotateLeft32)
20460 // match: (Or32 right:(Rsh32Ux16 x y) left:(Lsh32x16 x z:(Sub16 (Const16 [32]) y)))
20461 // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)
20462 // result: (RotateLeft32 x z)
20464 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
20466 if right.Op != OpRsh32Ux16 {
20472 if left.Op != OpLsh32x16 {
20476 if x != left.Args[0] {
20480 if z.Op != OpSub16 {
20485 if z_0.Op != OpConst16 || auxIntToInt16(z_0.AuxInt) != 32 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) {
20488 v.reset(OpRotateLeft32)
20494 // match: (Or32 right:(Rsh32Ux8 x y) left:(Lsh32x8 x z:(Sub8 (Const8 [32]) y)))
20495 // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)
20496 // result: (RotateLeft32 x z)
20498 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
20500 if right.Op != OpRsh32Ux8 {
20506 if left.Op != OpLsh32x8 {
20510 if x != left.Args[0] {
20514 if z.Op != OpSub8 {
20519 if z_0.Op != OpConst8 || auxIntToInt8(z_0.AuxInt) != 32 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) {
20522 v.reset(OpRotateLeft32)
20530 func rewriteValuegeneric_OpOr64(v *Value) bool {
20534 config := b.Func.Config
20535 // match: (Or64 (Const64 [c]) (Const64 [d]))
20536 // result: (Const64 [c|d])
20538 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
20539 if v_0.Op != OpConst64 {
20542 c := auxIntToInt64(v_0.AuxInt)
20543 if v_1.Op != OpConst64 {
20546 d := auxIntToInt64(v_1.AuxInt)
20548 v.AuxInt = int64ToAuxInt(c | d)
20553 // match: (Or64 <t> (Com64 x) (Com64 y))
20554 // result: (Com64 (And64 <t> x y))
20557 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
20558 if v_0.Op != OpCom64 {
20562 if v_1.Op != OpCom64 {
20567 v0 := b.NewValue0(v.Pos, OpAnd64, t)
20574 // match: (Or64 x x)
20584 // match: (Or64 (Const64 [0]) x)
20587 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
20588 if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 {
20597 // match: (Or64 (Const64 [-1]) _)
20598 // result: (Const64 [-1])
20600 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
20601 if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != -1 {
20605 v.AuxInt = int64ToAuxInt(-1)
20610 // match: (Or64 (Com64 x) x)
20611 // result: (Const64 [-1])
20613 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
20614 if v_0.Op != OpCom64 {
20622 v.AuxInt = int64ToAuxInt(-1)
20627 // match: (Or64 x (Or64 x y))
20628 // result: (Or64 x y)
20630 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
20632 if v_1.Op != OpOr64 {
20636 v_1_0 := v_1.Args[0]
20637 v_1_1 := v_1.Args[1]
20638 for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
20650 // match: (Or64 (And64 x (Const64 [c2])) (Const64 <t> [c1]))
20651 // cond: ^(c1 | c2) == 0
20652 // result: (Or64 (Const64 <t> [c1]) x)
20654 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
20655 if v_0.Op != OpAnd64 {
20659 v_0_0 := v_0.Args[0]
20660 v_0_1 := v_0.Args[1]
20661 for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
20663 if v_0_1.Op != OpConst64 {
20666 c2 := auxIntToInt64(v_0_1.AuxInt)
20667 if v_1.Op != OpConst64 {
20671 c1 := auxIntToInt64(v_1.AuxInt)
20672 if !(^(c1 | c2) == 0) {
20676 v0 := b.NewValue0(v.Pos, OpConst64, t)
20677 v0.AuxInt = int64ToAuxInt(c1)
20684 // match: (Or64 (Or64 i:(Const64 <t>) z) x)
20685 // cond: (z.Op != OpConst64 && x.Op != OpConst64)
20686 // result: (Or64 i (Or64 <t> z x))
20688 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
20689 if v_0.Op != OpOr64 {
20693 v_0_0 := v_0.Args[0]
20694 v_0_1 := v_0.Args[1]
20695 for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
20697 if i.Op != OpConst64 {
20703 if !(z.Op != OpConst64 && x.Op != OpConst64) {
20707 v0 := b.NewValue0(v.Pos, OpOr64, t)
20715 // match: (Or64 (Const64 <t> [c]) (Or64 (Const64 <t> [d]) x))
20716 // result: (Or64 (Const64 <t> [c|d]) x)
20718 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
20719 if v_0.Op != OpConst64 {
20723 c := auxIntToInt64(v_0.AuxInt)
20724 if v_1.Op != OpOr64 {
20728 v_1_0 := v_1.Args[0]
20729 v_1_1 := v_1.Args[1]
20730 for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
20731 if v_1_0.Op != OpConst64 || v_1_0.Type != t {
20734 d := auxIntToInt64(v_1_0.AuxInt)
20737 v0 := b.NewValue0(v.Pos, OpConst64, t)
20738 v0.AuxInt = int64ToAuxInt(c | d)
20745 // match: (Or64 (Lsh64x64 x z:(Const64 <t> [c])) (Rsh64Ux64 x (Const64 [d])))
20746 // cond: c < 64 && d == 64-c && canRotate(config, 64)
20747 // result: (RotateLeft64 x z)
20749 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
20750 if v_0.Op != OpLsh64x64 {
20756 if z.Op != OpConst64 {
20759 c := auxIntToInt64(z.AuxInt)
20760 if v_1.Op != OpRsh64Ux64 {
20764 if x != v_1.Args[0] {
20767 v_1_1 := v_1.Args[1]
20768 if v_1_1.Op != OpConst64 {
20771 d := auxIntToInt64(v_1_1.AuxInt)
20772 if !(c < 64 && d == 64-c && canRotate(config, 64)) {
20775 v.reset(OpRotateLeft64)
20781 // match: (Or64 left:(Lsh64x64 x y) right:(Rsh64Ux64 x (Sub64 (Const64 [64]) y)))
20782 // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)
20783 // result: (RotateLeft64 x y)
20785 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
20787 if left.Op != OpLsh64x64 {
20793 if right.Op != OpRsh64Ux64 {
20797 if x != right.Args[0] {
20800 right_1 := right.Args[1]
20801 if right_1.Op != OpSub64 {
20804 _ = right_1.Args[1]
20805 right_1_0 := right_1.Args[0]
20806 if right_1_0.Op != OpConst64 || auxIntToInt64(right_1_0.AuxInt) != 64 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) {
20809 v.reset(OpRotateLeft64)
20815 // match: (Or64 left:(Lsh64x32 x y) right:(Rsh64Ux32 x (Sub32 (Const32 [64]) y)))
20816 // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)
20817 // result: (RotateLeft64 x y)
20819 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
20821 if left.Op != OpLsh64x32 {
20827 if right.Op != OpRsh64Ux32 {
20831 if x != right.Args[0] {
20834 right_1 := right.Args[1]
20835 if right_1.Op != OpSub32 {
20838 _ = right_1.Args[1]
20839 right_1_0 := right_1.Args[0]
20840 if right_1_0.Op != OpConst32 || auxIntToInt32(right_1_0.AuxInt) != 64 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) {
20843 v.reset(OpRotateLeft64)
20849 // match: (Or64 left:(Lsh64x16 x y) right:(Rsh64Ux16 x (Sub16 (Const16 [64]) y)))
20850 // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)
20851 // result: (RotateLeft64 x y)
20853 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
20855 if left.Op != OpLsh64x16 {
20861 if right.Op != OpRsh64Ux16 {
20865 if x != right.Args[0] {
20868 right_1 := right.Args[1]
20869 if right_1.Op != OpSub16 {
20872 _ = right_1.Args[1]
20873 right_1_0 := right_1.Args[0]
20874 if right_1_0.Op != OpConst16 || auxIntToInt16(right_1_0.AuxInt) != 64 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) {
20877 v.reset(OpRotateLeft64)
20883 // match: (Or64 left:(Lsh64x8 x y) right:(Rsh64Ux8 x (Sub8 (Const8 [64]) y)))
20884 // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)
20885 // result: (RotateLeft64 x y)
20887 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
20889 if left.Op != OpLsh64x8 {
20895 if right.Op != OpRsh64Ux8 {
20899 if x != right.Args[0] {
20902 right_1 := right.Args[1]
20903 if right_1.Op != OpSub8 {
20906 _ = right_1.Args[1]
20907 right_1_0 := right_1.Args[0]
20908 if right_1_0.Op != OpConst8 || auxIntToInt8(right_1_0.AuxInt) != 64 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) {
20911 v.reset(OpRotateLeft64)
20917 // match: (Or64 right:(Rsh64Ux64 x y) left:(Lsh64x64 x z:(Sub64 (Const64 [64]) y)))
20918 // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)
20919 // result: (RotateLeft64 x z)
20921 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
20923 if right.Op != OpRsh64Ux64 {
20929 if left.Op != OpLsh64x64 {
20933 if x != left.Args[0] {
20937 if z.Op != OpSub64 {
20942 if z_0.Op != OpConst64 || auxIntToInt64(z_0.AuxInt) != 64 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) {
20945 v.reset(OpRotateLeft64)
20951 // match: (Or64 right:(Rsh64Ux32 x y) left:(Lsh64x32 x z:(Sub32 (Const32 [64]) y)))
20952 // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)
20953 // result: (RotateLeft64 x z)
20955 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
20957 if right.Op != OpRsh64Ux32 {
20963 if left.Op != OpLsh64x32 {
20967 if x != left.Args[0] {
20971 if z.Op != OpSub32 {
20976 if z_0.Op != OpConst32 || auxIntToInt32(z_0.AuxInt) != 64 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) {
20979 v.reset(OpRotateLeft64)
20985 // match: (Or64 right:(Rsh64Ux16 x y) left:(Lsh64x16 x z:(Sub16 (Const16 [64]) y)))
20986 // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)
20987 // result: (RotateLeft64 x z)
20989 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
20991 if right.Op != OpRsh64Ux16 {
20997 if left.Op != OpLsh64x16 {
21001 if x != left.Args[0] {
21005 if z.Op != OpSub16 {
21010 if z_0.Op != OpConst16 || auxIntToInt16(z_0.AuxInt) != 64 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) {
21013 v.reset(OpRotateLeft64)
21019 // match: (Or64 right:(Rsh64Ux8 x y) left:(Lsh64x8 x z:(Sub8 (Const8 [64]) y)))
21020 // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)
21021 // result: (RotateLeft64 x z)
21023 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
21025 if right.Op != OpRsh64Ux8 {
21031 if left.Op != OpLsh64x8 {
21035 if x != left.Args[0] {
21039 if z.Op != OpSub8 {
21044 if z_0.Op != OpConst8 || auxIntToInt8(z_0.AuxInt) != 64 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) {
21047 v.reset(OpRotateLeft64)
21055 func rewriteValuegeneric_OpOr8(v *Value) bool {
21059 config := b.Func.Config
21060 // match: (Or8 (Const8 [c]) (Const8 [d]))
21061 // result: (Const8 [c|d])
21063 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
21064 if v_0.Op != OpConst8 {
21067 c := auxIntToInt8(v_0.AuxInt)
21068 if v_1.Op != OpConst8 {
21071 d := auxIntToInt8(v_1.AuxInt)
21073 v.AuxInt = int8ToAuxInt(c | d)
21078 // match: (Or8 <t> (Com8 x) (Com8 y))
21079 // result: (Com8 (And8 <t> x y))
21082 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
21083 if v_0.Op != OpCom8 {
21087 if v_1.Op != OpCom8 {
21092 v0 := b.NewValue0(v.Pos, OpAnd8, t)
21099 // match: (Or8 x x)
21109 // match: (Or8 (Const8 [0]) x)
21112 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
21113 if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 0 {
21122 // match: (Or8 (Const8 [-1]) _)
21123 // result: (Const8 [-1])
21125 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
21126 if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != -1 {
21130 v.AuxInt = int8ToAuxInt(-1)
21135 // match: (Or8 (Com8 x) x)
21136 // result: (Const8 [-1])
21138 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
21139 if v_0.Op != OpCom8 {
21147 v.AuxInt = int8ToAuxInt(-1)
21152 // match: (Or8 x (Or8 x y))
21153 // result: (Or8 x y)
21155 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
21157 if v_1.Op != OpOr8 {
21161 v_1_0 := v_1.Args[0]
21162 v_1_1 := v_1.Args[1]
21163 for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
21175 // match: (Or8 (And8 x (Const8 [c2])) (Const8 <t> [c1]))
21176 // cond: ^(c1 | c2) == 0
21177 // result: (Or8 (Const8 <t> [c1]) x)
21179 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
21180 if v_0.Op != OpAnd8 {
21184 v_0_0 := v_0.Args[0]
21185 v_0_1 := v_0.Args[1]
21186 for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
21188 if v_0_1.Op != OpConst8 {
21191 c2 := auxIntToInt8(v_0_1.AuxInt)
21192 if v_1.Op != OpConst8 {
21196 c1 := auxIntToInt8(v_1.AuxInt)
21197 if !(^(c1 | c2) == 0) {
21201 v0 := b.NewValue0(v.Pos, OpConst8, t)
21202 v0.AuxInt = int8ToAuxInt(c1)
21209 // match: (Or8 (Or8 i:(Const8 <t>) z) x)
21210 // cond: (z.Op != OpConst8 && x.Op != OpConst8)
21211 // result: (Or8 i (Or8 <t> z x))
21213 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
21214 if v_0.Op != OpOr8 {
21218 v_0_0 := v_0.Args[0]
21219 v_0_1 := v_0.Args[1]
21220 for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
21222 if i.Op != OpConst8 {
21228 if !(z.Op != OpConst8 && x.Op != OpConst8) {
21232 v0 := b.NewValue0(v.Pos, OpOr8, t)
21240 // match: (Or8 (Const8 <t> [c]) (Or8 (Const8 <t> [d]) x))
21241 // result: (Or8 (Const8 <t> [c|d]) x)
21243 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
21244 if v_0.Op != OpConst8 {
21248 c := auxIntToInt8(v_0.AuxInt)
21249 if v_1.Op != OpOr8 {
21253 v_1_0 := v_1.Args[0]
21254 v_1_1 := v_1.Args[1]
21255 for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
21256 if v_1_0.Op != OpConst8 || v_1_0.Type != t {
21259 d := auxIntToInt8(v_1_0.AuxInt)
21262 v0 := b.NewValue0(v.Pos, OpConst8, t)
21263 v0.AuxInt = int8ToAuxInt(c | d)
21270 // match: (Or8 (Lsh8x64 x z:(Const64 <t> [c])) (Rsh8Ux64 x (Const64 [d])))
21271 // cond: c < 8 && d == 8-c && canRotate(config, 8)
21272 // result: (RotateLeft8 x z)
21274 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
21275 if v_0.Op != OpLsh8x64 {
21281 if z.Op != OpConst64 {
21284 c := auxIntToInt64(z.AuxInt)
21285 if v_1.Op != OpRsh8Ux64 {
21289 if x != v_1.Args[0] {
21292 v_1_1 := v_1.Args[1]
21293 if v_1_1.Op != OpConst64 {
21296 d := auxIntToInt64(v_1_1.AuxInt)
21297 if !(c < 8 && d == 8-c && canRotate(config, 8)) {
21300 v.reset(OpRotateLeft8)
21306 // match: (Or8 left:(Lsh8x64 x y) right:(Rsh8Ux64 x (Sub64 (Const64 [8]) y)))
21307 // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)
21308 // result: (RotateLeft8 x y)
21310 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
21312 if left.Op != OpLsh8x64 {
21318 if right.Op != OpRsh8Ux64 {
21322 if x != right.Args[0] {
21325 right_1 := right.Args[1]
21326 if right_1.Op != OpSub64 {
21329 _ = right_1.Args[1]
21330 right_1_0 := right_1.Args[0]
21331 if right_1_0.Op != OpConst64 || auxIntToInt64(right_1_0.AuxInt) != 8 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) {
21334 v.reset(OpRotateLeft8)
21340 // match: (Or8 left:(Lsh8x32 x y) right:(Rsh8Ux32 x (Sub32 (Const32 [8]) y)))
21341 // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)
21342 // result: (RotateLeft8 x y)
21344 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
21346 if left.Op != OpLsh8x32 {
21352 if right.Op != OpRsh8Ux32 {
21356 if x != right.Args[0] {
21359 right_1 := right.Args[1]
21360 if right_1.Op != OpSub32 {
21363 _ = right_1.Args[1]
21364 right_1_0 := right_1.Args[0]
21365 if right_1_0.Op != OpConst32 || auxIntToInt32(right_1_0.AuxInt) != 8 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) {
21368 v.reset(OpRotateLeft8)
21374 // match: (Or8 left:(Lsh8x16 x y) right:(Rsh8Ux16 x (Sub16 (Const16 [8]) y)))
21375 // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)
21376 // result: (RotateLeft8 x y)
21378 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
21380 if left.Op != OpLsh8x16 {
21386 if right.Op != OpRsh8Ux16 {
21390 if x != right.Args[0] {
21393 right_1 := right.Args[1]
21394 if right_1.Op != OpSub16 {
21397 _ = right_1.Args[1]
21398 right_1_0 := right_1.Args[0]
21399 if right_1_0.Op != OpConst16 || auxIntToInt16(right_1_0.AuxInt) != 8 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) {
21402 v.reset(OpRotateLeft8)
21408 // match: (Or8 left:(Lsh8x8 x y) right:(Rsh8Ux8 x (Sub8 (Const8 [8]) y)))
21409 // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)
21410 // result: (RotateLeft8 x y)
21412 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
21414 if left.Op != OpLsh8x8 {
21420 if right.Op != OpRsh8Ux8 {
21424 if x != right.Args[0] {
21427 right_1 := right.Args[1]
21428 if right_1.Op != OpSub8 {
21431 _ = right_1.Args[1]
21432 right_1_0 := right_1.Args[0]
21433 if right_1_0.Op != OpConst8 || auxIntToInt8(right_1_0.AuxInt) != 8 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) {
21436 v.reset(OpRotateLeft8)
21442 // match: (Or8 right:(Rsh8Ux64 x y) left:(Lsh8x64 x z:(Sub64 (Const64 [8]) y)))
21443 // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)
21444 // result: (RotateLeft8 x z)
21446 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
21448 if right.Op != OpRsh8Ux64 {
21454 if left.Op != OpLsh8x64 {
21458 if x != left.Args[0] {
21462 if z.Op != OpSub64 {
21467 if z_0.Op != OpConst64 || auxIntToInt64(z_0.AuxInt) != 8 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) {
21470 v.reset(OpRotateLeft8)
21476 // match: (Or8 right:(Rsh8Ux32 x y) left:(Lsh8x32 x z:(Sub32 (Const32 [8]) y)))
21477 // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)
21478 // result: (RotateLeft8 x z)
21480 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
21482 if right.Op != OpRsh8Ux32 {
21488 if left.Op != OpLsh8x32 {
21492 if x != left.Args[0] {
21496 if z.Op != OpSub32 {
21501 if z_0.Op != OpConst32 || auxIntToInt32(z_0.AuxInt) != 8 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) {
21504 v.reset(OpRotateLeft8)
21510 // match: (Or8 right:(Rsh8Ux16 x y) left:(Lsh8x16 x z:(Sub16 (Const16 [8]) y)))
21511 // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)
21512 // result: (RotateLeft8 x z)
21514 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
21516 if right.Op != OpRsh8Ux16 {
21522 if left.Op != OpLsh8x16 {
21526 if x != left.Args[0] {
21530 if z.Op != OpSub16 {
21535 if z_0.Op != OpConst16 || auxIntToInt16(z_0.AuxInt) != 8 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) {
21538 v.reset(OpRotateLeft8)
21544 // match: (Or8 right:(Rsh8Ux8 x y) left:(Lsh8x8 x z:(Sub8 (Const8 [8]) y)))
21545 // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)
21546 // result: (RotateLeft8 x z)
21548 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
21550 if right.Op != OpRsh8Ux8 {
21556 if left.Op != OpLsh8x8 {
21560 if x != left.Args[0] {
21564 if z.Op != OpSub8 {
21569 if z_0.Op != OpConst8 || auxIntToInt8(z_0.AuxInt) != 8 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) {
21572 v.reset(OpRotateLeft8)
21580 func rewriteValuegeneric_OpOrB(v *Value) bool {
21584 // match: (OrB (Less64 (Const64 [c]) x) (Less64 x (Const64 [d])))
21586 // result: (Less64U (Const64 <x.Type> [c-d]) (Sub64 <x.Type> x (Const64 <x.Type> [d])))
21588 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
21589 if v_0.Op != OpLess64 {
21593 v_0_0 := v_0.Args[0]
21594 if v_0_0.Op != OpConst64 {
21597 c := auxIntToInt64(v_0_0.AuxInt)
21598 if v_1.Op != OpLess64 {
21602 if x != v_1.Args[0] {
21605 v_1_1 := v_1.Args[1]
21606 if v_1_1.Op != OpConst64 {
21609 d := auxIntToInt64(v_1_1.AuxInt)
21614 v0 := b.NewValue0(v.Pos, OpConst64, x.Type)
21615 v0.AuxInt = int64ToAuxInt(c - d)
21616 v1 := b.NewValue0(v.Pos, OpSub64, x.Type)
21617 v2 := b.NewValue0(v.Pos, OpConst64, x.Type)
21618 v2.AuxInt = int64ToAuxInt(d)
21625 // match: (OrB (Leq64 (Const64 [c]) x) (Less64 x (Const64 [d])))
21627 // result: (Leq64U (Const64 <x.Type> [c-d]) (Sub64 <x.Type> x (Const64 <x.Type> [d])))
21629 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
21630 if v_0.Op != OpLeq64 {
21634 v_0_0 := v_0.Args[0]
21635 if v_0_0.Op != OpConst64 {
21638 c := auxIntToInt64(v_0_0.AuxInt)
21639 if v_1.Op != OpLess64 {
21643 if x != v_1.Args[0] {
21646 v_1_1 := v_1.Args[1]
21647 if v_1_1.Op != OpConst64 {
21650 d := auxIntToInt64(v_1_1.AuxInt)
21655 v0 := b.NewValue0(v.Pos, OpConst64, x.Type)
21656 v0.AuxInt = int64ToAuxInt(c - d)
21657 v1 := b.NewValue0(v.Pos, OpSub64, x.Type)
21658 v2 := b.NewValue0(v.Pos, OpConst64, x.Type)
21659 v2.AuxInt = int64ToAuxInt(d)
21666 // match: (OrB (Less32 (Const32 [c]) x) (Less32 x (Const32 [d])))
21668 // result: (Less32U (Const32 <x.Type> [c-d]) (Sub32 <x.Type> x (Const32 <x.Type> [d])))
21670 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
21671 if v_0.Op != OpLess32 {
21675 v_0_0 := v_0.Args[0]
21676 if v_0_0.Op != OpConst32 {
21679 c := auxIntToInt32(v_0_0.AuxInt)
21680 if v_1.Op != OpLess32 {
21684 if x != v_1.Args[0] {
21687 v_1_1 := v_1.Args[1]
21688 if v_1_1.Op != OpConst32 {
21691 d := auxIntToInt32(v_1_1.AuxInt)
21696 v0 := b.NewValue0(v.Pos, OpConst32, x.Type)
21697 v0.AuxInt = int32ToAuxInt(c - d)
21698 v1 := b.NewValue0(v.Pos, OpSub32, x.Type)
21699 v2 := b.NewValue0(v.Pos, OpConst32, x.Type)
21700 v2.AuxInt = int32ToAuxInt(d)
21707 // match: (OrB (Leq32 (Const32 [c]) x) (Less32 x (Const32 [d])))
21709 // result: (Leq32U (Const32 <x.Type> [c-d]) (Sub32 <x.Type> x (Const32 <x.Type> [d])))
21711 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
21712 if v_0.Op != OpLeq32 {
21716 v_0_0 := v_0.Args[0]
21717 if v_0_0.Op != OpConst32 {
21720 c := auxIntToInt32(v_0_0.AuxInt)
21721 if v_1.Op != OpLess32 {
21725 if x != v_1.Args[0] {
21728 v_1_1 := v_1.Args[1]
21729 if v_1_1.Op != OpConst32 {
21732 d := auxIntToInt32(v_1_1.AuxInt)
21737 v0 := b.NewValue0(v.Pos, OpConst32, x.Type)
21738 v0.AuxInt = int32ToAuxInt(c - d)
21739 v1 := b.NewValue0(v.Pos, OpSub32, x.Type)
21740 v2 := b.NewValue0(v.Pos, OpConst32, x.Type)
21741 v2.AuxInt = int32ToAuxInt(d)
21748 // match: (OrB (Less16 (Const16 [c]) x) (Less16 x (Const16 [d])))
21750 // result: (Less16U (Const16 <x.Type> [c-d]) (Sub16 <x.Type> x (Const16 <x.Type> [d])))
21752 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
21753 if v_0.Op != OpLess16 {
21757 v_0_0 := v_0.Args[0]
21758 if v_0_0.Op != OpConst16 {
21761 c := auxIntToInt16(v_0_0.AuxInt)
21762 if v_1.Op != OpLess16 {
21766 if x != v_1.Args[0] {
21769 v_1_1 := v_1.Args[1]
21770 if v_1_1.Op != OpConst16 {
21773 d := auxIntToInt16(v_1_1.AuxInt)
21778 v0 := b.NewValue0(v.Pos, OpConst16, x.Type)
21779 v0.AuxInt = int16ToAuxInt(c - d)
21780 v1 := b.NewValue0(v.Pos, OpSub16, x.Type)
21781 v2 := b.NewValue0(v.Pos, OpConst16, x.Type)
21782 v2.AuxInt = int16ToAuxInt(d)
21789 // match: (OrB (Leq16 (Const16 [c]) x) (Less16 x (Const16 [d])))
21791 // result: (Leq16U (Const16 <x.Type> [c-d]) (Sub16 <x.Type> x (Const16 <x.Type> [d])))
21793 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
21794 if v_0.Op != OpLeq16 {
21798 v_0_0 := v_0.Args[0]
21799 if v_0_0.Op != OpConst16 {
21802 c := auxIntToInt16(v_0_0.AuxInt)
21803 if v_1.Op != OpLess16 {
21807 if x != v_1.Args[0] {
21810 v_1_1 := v_1.Args[1]
21811 if v_1_1.Op != OpConst16 {
21814 d := auxIntToInt16(v_1_1.AuxInt)
21819 v0 := b.NewValue0(v.Pos, OpConst16, x.Type)
21820 v0.AuxInt = int16ToAuxInt(c - d)
21821 v1 := b.NewValue0(v.Pos, OpSub16, x.Type)
21822 v2 := b.NewValue0(v.Pos, OpConst16, x.Type)
21823 v2.AuxInt = int16ToAuxInt(d)
21830 // match: (OrB (Less8 (Const8 [c]) x) (Less8 x (Const8 [d])))
21832 // result: (Less8U (Const8 <x.Type> [c-d]) (Sub8 <x.Type> x (Const8 <x.Type> [d])))
21834 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
21835 if v_0.Op != OpLess8 {
21839 v_0_0 := v_0.Args[0]
21840 if v_0_0.Op != OpConst8 {
21843 c := auxIntToInt8(v_0_0.AuxInt)
21844 if v_1.Op != OpLess8 {
21848 if x != v_1.Args[0] {
21851 v_1_1 := v_1.Args[1]
21852 if v_1_1.Op != OpConst8 {
21855 d := auxIntToInt8(v_1_1.AuxInt)
21860 v0 := b.NewValue0(v.Pos, OpConst8, x.Type)
21861 v0.AuxInt = int8ToAuxInt(c - d)
21862 v1 := b.NewValue0(v.Pos, OpSub8, x.Type)
21863 v2 := b.NewValue0(v.Pos, OpConst8, x.Type)
21864 v2.AuxInt = int8ToAuxInt(d)
21871 // match: (OrB (Leq8 (Const8 [c]) x) (Less8 x (Const8 [d])))
21873 // result: (Leq8U (Const8 <x.Type> [c-d]) (Sub8 <x.Type> x (Const8 <x.Type> [d])))
21875 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
21876 if v_0.Op != OpLeq8 {
21880 v_0_0 := v_0.Args[0]
21881 if v_0_0.Op != OpConst8 {
21884 c := auxIntToInt8(v_0_0.AuxInt)
21885 if v_1.Op != OpLess8 {
21889 if x != v_1.Args[0] {
21892 v_1_1 := v_1.Args[1]
21893 if v_1_1.Op != OpConst8 {
21896 d := auxIntToInt8(v_1_1.AuxInt)
21901 v0 := b.NewValue0(v.Pos, OpConst8, x.Type)
21902 v0.AuxInt = int8ToAuxInt(c - d)
21903 v1 := b.NewValue0(v.Pos, OpSub8, x.Type)
21904 v2 := b.NewValue0(v.Pos, OpConst8, x.Type)
21905 v2.AuxInt = int8ToAuxInt(d)
21912 // match: (OrB (Less64 (Const64 [c]) x) (Leq64 x (Const64 [d])))
21913 // cond: c >= d+1 && d+1 > d
21914 // result: (Less64U (Const64 <x.Type> [c-d-1]) (Sub64 <x.Type> x (Const64 <x.Type> [d+1])))
21916 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
21917 if v_0.Op != OpLess64 {
21921 v_0_0 := v_0.Args[0]
21922 if v_0_0.Op != OpConst64 {
21925 c := auxIntToInt64(v_0_0.AuxInt)
21926 if v_1.Op != OpLeq64 {
21930 if x != v_1.Args[0] {
21933 v_1_1 := v_1.Args[1]
21934 if v_1_1.Op != OpConst64 {
21937 d := auxIntToInt64(v_1_1.AuxInt)
21938 if !(c >= d+1 && d+1 > d) {
21942 v0 := b.NewValue0(v.Pos, OpConst64, x.Type)
21943 v0.AuxInt = int64ToAuxInt(c - d - 1)
21944 v1 := b.NewValue0(v.Pos, OpSub64, x.Type)
21945 v2 := b.NewValue0(v.Pos, OpConst64, x.Type)
21946 v2.AuxInt = int64ToAuxInt(d + 1)
21953 // match: (OrB (Leq64 (Const64 [c]) x) (Leq64 x (Const64 [d])))
21954 // cond: c >= d+1 && d+1 > d
21955 // result: (Leq64U (Const64 <x.Type> [c-d-1]) (Sub64 <x.Type> x (Const64 <x.Type> [d+1])))
21957 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
21958 if v_0.Op != OpLeq64 {
21962 v_0_0 := v_0.Args[0]
21963 if v_0_0.Op != OpConst64 {
21966 c := auxIntToInt64(v_0_0.AuxInt)
21967 if v_1.Op != OpLeq64 {
21971 if x != v_1.Args[0] {
21974 v_1_1 := v_1.Args[1]
21975 if v_1_1.Op != OpConst64 {
21978 d := auxIntToInt64(v_1_1.AuxInt)
21979 if !(c >= d+1 && d+1 > d) {
21983 v0 := b.NewValue0(v.Pos, OpConst64, x.Type)
21984 v0.AuxInt = int64ToAuxInt(c - d - 1)
21985 v1 := b.NewValue0(v.Pos, OpSub64, x.Type)
21986 v2 := b.NewValue0(v.Pos, OpConst64, x.Type)
21987 v2.AuxInt = int64ToAuxInt(d + 1)
21994 // match: (OrB (Less32 (Const32 [c]) x) (Leq32 x (Const32 [d])))
21995 // cond: c >= d+1 && d+1 > d
21996 // result: (Less32U (Const32 <x.Type> [c-d-1]) (Sub32 <x.Type> x (Const32 <x.Type> [d+1])))
21998 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
21999 if v_0.Op != OpLess32 {
22003 v_0_0 := v_0.Args[0]
22004 if v_0_0.Op != OpConst32 {
22007 c := auxIntToInt32(v_0_0.AuxInt)
22008 if v_1.Op != OpLeq32 {
22012 if x != v_1.Args[0] {
22015 v_1_1 := v_1.Args[1]
22016 if v_1_1.Op != OpConst32 {
22019 d := auxIntToInt32(v_1_1.AuxInt)
22020 if !(c >= d+1 && d+1 > d) {
22024 v0 := b.NewValue0(v.Pos, OpConst32, x.Type)
22025 v0.AuxInt = int32ToAuxInt(c - d - 1)
22026 v1 := b.NewValue0(v.Pos, OpSub32, x.Type)
22027 v2 := b.NewValue0(v.Pos, OpConst32, x.Type)
22028 v2.AuxInt = int32ToAuxInt(d + 1)
22035 // match: (OrB (Leq32 (Const32 [c]) x) (Leq32 x (Const32 [d])))
22036 // cond: c >= d+1 && d+1 > d
22037 // result: (Leq32U (Const32 <x.Type> [c-d-1]) (Sub32 <x.Type> x (Const32 <x.Type> [d+1])))
22039 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
22040 if v_0.Op != OpLeq32 {
22044 v_0_0 := v_0.Args[0]
22045 if v_0_0.Op != OpConst32 {
22048 c := auxIntToInt32(v_0_0.AuxInt)
22049 if v_1.Op != OpLeq32 {
22053 if x != v_1.Args[0] {
22056 v_1_1 := v_1.Args[1]
22057 if v_1_1.Op != OpConst32 {
22060 d := auxIntToInt32(v_1_1.AuxInt)
22061 if !(c >= d+1 && d+1 > d) {
22065 v0 := b.NewValue0(v.Pos, OpConst32, x.Type)
22066 v0.AuxInt = int32ToAuxInt(c - d - 1)
22067 v1 := b.NewValue0(v.Pos, OpSub32, x.Type)
22068 v2 := b.NewValue0(v.Pos, OpConst32, x.Type)
22069 v2.AuxInt = int32ToAuxInt(d + 1)
22076 // match: (OrB (Less16 (Const16 [c]) x) (Leq16 x (Const16 [d])))
22077 // cond: c >= d+1 && d+1 > d
22078 // result: (Less16U (Const16 <x.Type> [c-d-1]) (Sub16 <x.Type> x (Const16 <x.Type> [d+1])))
22080 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
22081 if v_0.Op != OpLess16 {
22085 v_0_0 := v_0.Args[0]
22086 if v_0_0.Op != OpConst16 {
22089 c := auxIntToInt16(v_0_0.AuxInt)
22090 if v_1.Op != OpLeq16 {
22094 if x != v_1.Args[0] {
22097 v_1_1 := v_1.Args[1]
22098 if v_1_1.Op != OpConst16 {
22101 d := auxIntToInt16(v_1_1.AuxInt)
22102 if !(c >= d+1 && d+1 > d) {
22106 v0 := b.NewValue0(v.Pos, OpConst16, x.Type)
22107 v0.AuxInt = int16ToAuxInt(c - d - 1)
22108 v1 := b.NewValue0(v.Pos, OpSub16, x.Type)
22109 v2 := b.NewValue0(v.Pos, OpConst16, x.Type)
22110 v2.AuxInt = int16ToAuxInt(d + 1)
22117 // match: (OrB (Leq16 (Const16 [c]) x) (Leq16 x (Const16 [d])))
22118 // cond: c >= d+1 && d+1 > d
22119 // result: (Leq16U (Const16 <x.Type> [c-d-1]) (Sub16 <x.Type> x (Const16 <x.Type> [d+1])))
22121 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
22122 if v_0.Op != OpLeq16 {
22126 v_0_0 := v_0.Args[0]
22127 if v_0_0.Op != OpConst16 {
22130 c := auxIntToInt16(v_0_0.AuxInt)
22131 if v_1.Op != OpLeq16 {
22135 if x != v_1.Args[0] {
22138 v_1_1 := v_1.Args[1]
22139 if v_1_1.Op != OpConst16 {
22142 d := auxIntToInt16(v_1_1.AuxInt)
22143 if !(c >= d+1 && d+1 > d) {
22147 v0 := b.NewValue0(v.Pos, OpConst16, x.Type)
22148 v0.AuxInt = int16ToAuxInt(c - d - 1)
22149 v1 := b.NewValue0(v.Pos, OpSub16, x.Type)
22150 v2 := b.NewValue0(v.Pos, OpConst16, x.Type)
22151 v2.AuxInt = int16ToAuxInt(d + 1)
22158 // match: (OrB (Less8 (Const8 [c]) x) (Leq8 x (Const8 [d])))
22159 // cond: c >= d+1 && d+1 > d
22160 // result: (Less8U (Const8 <x.Type> [c-d-1]) (Sub8 <x.Type> x (Const8 <x.Type> [d+1])))
22162 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
22163 if v_0.Op != OpLess8 {
22167 v_0_0 := v_0.Args[0]
22168 if v_0_0.Op != OpConst8 {
22171 c := auxIntToInt8(v_0_0.AuxInt)
22172 if v_1.Op != OpLeq8 {
22176 if x != v_1.Args[0] {
22179 v_1_1 := v_1.Args[1]
22180 if v_1_1.Op != OpConst8 {
22183 d := auxIntToInt8(v_1_1.AuxInt)
22184 if !(c >= d+1 && d+1 > d) {
22188 v0 := b.NewValue0(v.Pos, OpConst8, x.Type)
22189 v0.AuxInt = int8ToAuxInt(c - d - 1)
22190 v1 := b.NewValue0(v.Pos, OpSub8, x.Type)
22191 v2 := b.NewValue0(v.Pos, OpConst8, x.Type)
22192 v2.AuxInt = int8ToAuxInt(d + 1)
22199 // match: (OrB (Leq8 (Const8 [c]) x) (Leq8 x (Const8 [d])))
22200 // cond: c >= d+1 && d+1 > d
22201 // result: (Leq8U (Const8 <x.Type> [c-d-1]) (Sub8 <x.Type> x (Const8 <x.Type> [d+1])))
22203 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
22204 if v_0.Op != OpLeq8 {
22208 v_0_0 := v_0.Args[0]
22209 if v_0_0.Op != OpConst8 {
22212 c := auxIntToInt8(v_0_0.AuxInt)
22213 if v_1.Op != OpLeq8 {
22217 if x != v_1.Args[0] {
22220 v_1_1 := v_1.Args[1]
22221 if v_1_1.Op != OpConst8 {
22224 d := auxIntToInt8(v_1_1.AuxInt)
22225 if !(c >= d+1 && d+1 > d) {
22229 v0 := b.NewValue0(v.Pos, OpConst8, x.Type)
22230 v0.AuxInt = int8ToAuxInt(c - d - 1)
22231 v1 := b.NewValue0(v.Pos, OpSub8, x.Type)
22232 v2 := b.NewValue0(v.Pos, OpConst8, x.Type)
22233 v2.AuxInt = int8ToAuxInt(d + 1)
22240 // match: (OrB (Less64U (Const64 [c]) x) (Less64U x (Const64 [d])))
22241 // cond: uint64(c) >= uint64(d)
22242 // result: (Less64U (Const64 <x.Type> [c-d]) (Sub64 <x.Type> x (Const64 <x.Type> [d])))
22244 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
22245 if v_0.Op != OpLess64U {
22249 v_0_0 := v_0.Args[0]
22250 if v_0_0.Op != OpConst64 {
22253 c := auxIntToInt64(v_0_0.AuxInt)
22254 if v_1.Op != OpLess64U {
22258 if x != v_1.Args[0] {
22261 v_1_1 := v_1.Args[1]
22262 if v_1_1.Op != OpConst64 {
22265 d := auxIntToInt64(v_1_1.AuxInt)
22266 if !(uint64(c) >= uint64(d)) {
22270 v0 := b.NewValue0(v.Pos, OpConst64, x.Type)
22271 v0.AuxInt = int64ToAuxInt(c - d)
22272 v1 := b.NewValue0(v.Pos, OpSub64, x.Type)
22273 v2 := b.NewValue0(v.Pos, OpConst64, x.Type)
22274 v2.AuxInt = int64ToAuxInt(d)
22281 // match: (OrB (Leq64U (Const64 [c]) x) (Less64U x (Const64 [d])))
22282 // cond: uint64(c) >= uint64(d)
22283 // result: (Leq64U (Const64 <x.Type> [c-d]) (Sub64 <x.Type> x (Const64 <x.Type> [d])))
22285 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
22286 if v_0.Op != OpLeq64U {
22290 v_0_0 := v_0.Args[0]
22291 if v_0_0.Op != OpConst64 {
22294 c := auxIntToInt64(v_0_0.AuxInt)
22295 if v_1.Op != OpLess64U {
22299 if x != v_1.Args[0] {
22302 v_1_1 := v_1.Args[1]
22303 if v_1_1.Op != OpConst64 {
22306 d := auxIntToInt64(v_1_1.AuxInt)
22307 if !(uint64(c) >= uint64(d)) {
22311 v0 := b.NewValue0(v.Pos, OpConst64, x.Type)
22312 v0.AuxInt = int64ToAuxInt(c - d)
22313 v1 := b.NewValue0(v.Pos, OpSub64, x.Type)
22314 v2 := b.NewValue0(v.Pos, OpConst64, x.Type)
22315 v2.AuxInt = int64ToAuxInt(d)
22322 // match: (OrB (Less32U (Const32 [c]) x) (Less32U x (Const32 [d])))
22323 // cond: uint32(c) >= uint32(d)
22324 // result: (Less32U (Const32 <x.Type> [c-d]) (Sub32 <x.Type> x (Const32 <x.Type> [d])))
22326 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
22327 if v_0.Op != OpLess32U {
22331 v_0_0 := v_0.Args[0]
22332 if v_0_0.Op != OpConst32 {
22335 c := auxIntToInt32(v_0_0.AuxInt)
22336 if v_1.Op != OpLess32U {
22340 if x != v_1.Args[0] {
22343 v_1_1 := v_1.Args[1]
22344 if v_1_1.Op != OpConst32 {
22347 d := auxIntToInt32(v_1_1.AuxInt)
22348 if !(uint32(c) >= uint32(d)) {
22352 v0 := b.NewValue0(v.Pos, OpConst32, x.Type)
22353 v0.AuxInt = int32ToAuxInt(c - d)
22354 v1 := b.NewValue0(v.Pos, OpSub32, x.Type)
22355 v2 := b.NewValue0(v.Pos, OpConst32, x.Type)
22356 v2.AuxInt = int32ToAuxInt(d)
22363 // match: (OrB (Leq32U (Const32 [c]) x) (Less32U x (Const32 [d])))
22364 // cond: uint32(c) >= uint32(d)
22365 // result: (Leq32U (Const32 <x.Type> [c-d]) (Sub32 <x.Type> x (Const32 <x.Type> [d])))
22367 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
22368 if v_0.Op != OpLeq32U {
22372 v_0_0 := v_0.Args[0]
22373 if v_0_0.Op != OpConst32 {
22376 c := auxIntToInt32(v_0_0.AuxInt)
22377 if v_1.Op != OpLess32U {
22381 if x != v_1.Args[0] {
22384 v_1_1 := v_1.Args[1]
22385 if v_1_1.Op != OpConst32 {
22388 d := auxIntToInt32(v_1_1.AuxInt)
22389 if !(uint32(c) >= uint32(d)) {
22393 v0 := b.NewValue0(v.Pos, OpConst32, x.Type)
22394 v0.AuxInt = int32ToAuxInt(c - d)
22395 v1 := b.NewValue0(v.Pos, OpSub32, x.Type)
22396 v2 := b.NewValue0(v.Pos, OpConst32, x.Type)
22397 v2.AuxInt = int32ToAuxInt(d)
22404 // match: (OrB (Less16U (Const16 [c]) x) (Less16U x (Const16 [d])))
22405 // cond: uint16(c) >= uint16(d)
22406 // result: (Less16U (Const16 <x.Type> [c-d]) (Sub16 <x.Type> x (Const16 <x.Type> [d])))
22408 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
22409 if v_0.Op != OpLess16U {
22413 v_0_0 := v_0.Args[0]
22414 if v_0_0.Op != OpConst16 {
22417 c := auxIntToInt16(v_0_0.AuxInt)
22418 if v_1.Op != OpLess16U {
22422 if x != v_1.Args[0] {
22425 v_1_1 := v_1.Args[1]
22426 if v_1_1.Op != OpConst16 {
22429 d := auxIntToInt16(v_1_1.AuxInt)
22430 if !(uint16(c) >= uint16(d)) {
22434 v0 := b.NewValue0(v.Pos, OpConst16, x.Type)
22435 v0.AuxInt = int16ToAuxInt(c - d)
22436 v1 := b.NewValue0(v.Pos, OpSub16, x.Type)
22437 v2 := b.NewValue0(v.Pos, OpConst16, x.Type)
22438 v2.AuxInt = int16ToAuxInt(d)
22445 // match: (OrB (Leq16U (Const16 [c]) x) (Less16U x (Const16 [d])))
22446 // cond: uint16(c) >= uint16(d)
22447 // result: (Leq16U (Const16 <x.Type> [c-d]) (Sub16 <x.Type> x (Const16 <x.Type> [d])))
22449 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
22450 if v_0.Op != OpLeq16U {
22454 v_0_0 := v_0.Args[0]
22455 if v_0_0.Op != OpConst16 {
22458 c := auxIntToInt16(v_0_0.AuxInt)
22459 if v_1.Op != OpLess16U {
22463 if x != v_1.Args[0] {
22466 v_1_1 := v_1.Args[1]
22467 if v_1_1.Op != OpConst16 {
22470 d := auxIntToInt16(v_1_1.AuxInt)
22471 if !(uint16(c) >= uint16(d)) {
22475 v0 := b.NewValue0(v.Pos, OpConst16, x.Type)
22476 v0.AuxInt = int16ToAuxInt(c - d)
22477 v1 := b.NewValue0(v.Pos, OpSub16, x.Type)
22478 v2 := b.NewValue0(v.Pos, OpConst16, x.Type)
22479 v2.AuxInt = int16ToAuxInt(d)
22486 // match: (OrB (Less8U (Const8 [c]) x) (Less8U x (Const8 [d])))
22487 // cond: uint8(c) >= uint8(d)
22488 // result: (Less8U (Const8 <x.Type> [c-d]) (Sub8 <x.Type> x (Const8 <x.Type> [d])))
22490 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
22491 if v_0.Op != OpLess8U {
22495 v_0_0 := v_0.Args[0]
22496 if v_0_0.Op != OpConst8 {
22499 c := auxIntToInt8(v_0_0.AuxInt)
22500 if v_1.Op != OpLess8U {
22504 if x != v_1.Args[0] {
22507 v_1_1 := v_1.Args[1]
22508 if v_1_1.Op != OpConst8 {
22511 d := auxIntToInt8(v_1_1.AuxInt)
22512 if !(uint8(c) >= uint8(d)) {
22516 v0 := b.NewValue0(v.Pos, OpConst8, x.Type)
22517 v0.AuxInt = int8ToAuxInt(c - d)
22518 v1 := b.NewValue0(v.Pos, OpSub8, x.Type)
22519 v2 := b.NewValue0(v.Pos, OpConst8, x.Type)
22520 v2.AuxInt = int8ToAuxInt(d)
22527 // match: (OrB (Leq8U (Const8 [c]) x) (Less8U x (Const8 [d])))
22528 // cond: uint8(c) >= uint8(d)
22529 // result: (Leq8U (Const8 <x.Type> [c-d]) (Sub8 <x.Type> x (Const8 <x.Type> [d])))
22531 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
22532 if v_0.Op != OpLeq8U {
22536 v_0_0 := v_0.Args[0]
22537 if v_0_0.Op != OpConst8 {
22540 c := auxIntToInt8(v_0_0.AuxInt)
22541 if v_1.Op != OpLess8U {
22545 if x != v_1.Args[0] {
22548 v_1_1 := v_1.Args[1]
22549 if v_1_1.Op != OpConst8 {
22552 d := auxIntToInt8(v_1_1.AuxInt)
22553 if !(uint8(c) >= uint8(d)) {
22557 v0 := b.NewValue0(v.Pos, OpConst8, x.Type)
22558 v0.AuxInt = int8ToAuxInt(c - d)
22559 v1 := b.NewValue0(v.Pos, OpSub8, x.Type)
22560 v2 := b.NewValue0(v.Pos, OpConst8, x.Type)
22561 v2.AuxInt = int8ToAuxInt(d)
22568 // match: (OrB (Less64U (Const64 [c]) x) (Leq64U x (Const64 [d])))
22569 // cond: uint64(c) >= uint64(d+1) && uint64(d+1) > uint64(d)
22570 // result: (Less64U (Const64 <x.Type> [c-d-1]) (Sub64 <x.Type> x (Const64 <x.Type> [d+1])))
22572 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
22573 if v_0.Op != OpLess64U {
22577 v_0_0 := v_0.Args[0]
22578 if v_0_0.Op != OpConst64 {
22581 c := auxIntToInt64(v_0_0.AuxInt)
22582 if v_1.Op != OpLeq64U {
22586 if x != v_1.Args[0] {
22589 v_1_1 := v_1.Args[1]
22590 if v_1_1.Op != OpConst64 {
22593 d := auxIntToInt64(v_1_1.AuxInt)
22594 if !(uint64(c) >= uint64(d+1) && uint64(d+1) > uint64(d)) {
22598 v0 := b.NewValue0(v.Pos, OpConst64, x.Type)
22599 v0.AuxInt = int64ToAuxInt(c - d - 1)
22600 v1 := b.NewValue0(v.Pos, OpSub64, x.Type)
22601 v2 := b.NewValue0(v.Pos, OpConst64, x.Type)
22602 v2.AuxInt = int64ToAuxInt(d + 1)
22609 // match: (OrB (Leq64U (Const64 [c]) x) (Leq64U x (Const64 [d])))
22610 // cond: uint64(c) >= uint64(d+1) && uint64(d+1) > uint64(d)
22611 // result: (Leq64U (Const64 <x.Type> [c-d-1]) (Sub64 <x.Type> x (Const64 <x.Type> [d+1])))
22613 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
22614 if v_0.Op != OpLeq64U {
22618 v_0_0 := v_0.Args[0]
22619 if v_0_0.Op != OpConst64 {
22622 c := auxIntToInt64(v_0_0.AuxInt)
22623 if v_1.Op != OpLeq64U {
22627 if x != v_1.Args[0] {
22630 v_1_1 := v_1.Args[1]
22631 if v_1_1.Op != OpConst64 {
22634 d := auxIntToInt64(v_1_1.AuxInt)
22635 if !(uint64(c) >= uint64(d+1) && uint64(d+1) > uint64(d)) {
22639 v0 := b.NewValue0(v.Pos, OpConst64, x.Type)
22640 v0.AuxInt = int64ToAuxInt(c - d - 1)
22641 v1 := b.NewValue0(v.Pos, OpSub64, x.Type)
22642 v2 := b.NewValue0(v.Pos, OpConst64, x.Type)
22643 v2.AuxInt = int64ToAuxInt(d + 1)
22650 // match: (OrB (Less32U (Const32 [c]) x) (Leq32U x (Const32 [d])))
22651 // cond: uint32(c) >= uint32(d+1) && uint32(d+1) > uint32(d)
22652 // result: (Less32U (Const32 <x.Type> [c-d-1]) (Sub32 <x.Type> x (Const32 <x.Type> [d+1])))
22654 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
22655 if v_0.Op != OpLess32U {
22659 v_0_0 := v_0.Args[0]
22660 if v_0_0.Op != OpConst32 {
22663 c := auxIntToInt32(v_0_0.AuxInt)
22664 if v_1.Op != OpLeq32U {
22668 if x != v_1.Args[0] {
22671 v_1_1 := v_1.Args[1]
22672 if v_1_1.Op != OpConst32 {
22675 d := auxIntToInt32(v_1_1.AuxInt)
22676 if !(uint32(c) >= uint32(d+1) && uint32(d+1) > uint32(d)) {
22680 v0 := b.NewValue0(v.Pos, OpConst32, x.Type)
22681 v0.AuxInt = int32ToAuxInt(c - d - 1)
22682 v1 := b.NewValue0(v.Pos, OpSub32, x.Type)
22683 v2 := b.NewValue0(v.Pos, OpConst32, x.Type)
22684 v2.AuxInt = int32ToAuxInt(d + 1)
22691 // match: (OrB (Leq32U (Const32 [c]) x) (Leq32U x (Const32 [d])))
22692 // cond: uint32(c) >= uint32(d+1) && uint32(d+1) > uint32(d)
22693 // result: (Leq32U (Const32 <x.Type> [c-d-1]) (Sub32 <x.Type> x (Const32 <x.Type> [d+1])))
22695 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
22696 if v_0.Op != OpLeq32U {
22700 v_0_0 := v_0.Args[0]
22701 if v_0_0.Op != OpConst32 {
22704 c := auxIntToInt32(v_0_0.AuxInt)
22705 if v_1.Op != OpLeq32U {
22709 if x != v_1.Args[0] {
22712 v_1_1 := v_1.Args[1]
22713 if v_1_1.Op != OpConst32 {
22716 d := auxIntToInt32(v_1_1.AuxInt)
22717 if !(uint32(c) >= uint32(d+1) && uint32(d+1) > uint32(d)) {
22721 v0 := b.NewValue0(v.Pos, OpConst32, x.Type)
22722 v0.AuxInt = int32ToAuxInt(c - d - 1)
22723 v1 := b.NewValue0(v.Pos, OpSub32, x.Type)
22724 v2 := b.NewValue0(v.Pos, OpConst32, x.Type)
22725 v2.AuxInt = int32ToAuxInt(d + 1)
22732 // match: (OrB (Less16U (Const16 [c]) x) (Leq16U x (Const16 [d])))
22733 // cond: uint16(c) >= uint16(d+1) && uint16(d+1) > uint16(d)
22734 // result: (Less16U (Const16 <x.Type> [c-d-1]) (Sub16 <x.Type> x (Const16 <x.Type> [d+1])))
22736 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
22737 if v_0.Op != OpLess16U {
22741 v_0_0 := v_0.Args[0]
22742 if v_0_0.Op != OpConst16 {
22745 c := auxIntToInt16(v_0_0.AuxInt)
22746 if v_1.Op != OpLeq16U {
22750 if x != v_1.Args[0] {
22753 v_1_1 := v_1.Args[1]
22754 if v_1_1.Op != OpConst16 {
22757 d := auxIntToInt16(v_1_1.AuxInt)
22758 if !(uint16(c) >= uint16(d+1) && uint16(d+1) > uint16(d)) {
22762 v0 := b.NewValue0(v.Pos, OpConst16, x.Type)
22763 v0.AuxInt = int16ToAuxInt(c - d - 1)
22764 v1 := b.NewValue0(v.Pos, OpSub16, x.Type)
22765 v2 := b.NewValue0(v.Pos, OpConst16, x.Type)
22766 v2.AuxInt = int16ToAuxInt(d + 1)
22773 // match: (OrB (Leq16U (Const16 [c]) x) (Leq16U x (Const16 [d])))
22774 // cond: uint16(c) >= uint16(d+1) && uint16(d+1) > uint16(d)
22775 // result: (Leq16U (Const16 <x.Type> [c-d-1]) (Sub16 <x.Type> x (Const16 <x.Type> [d+1])))
22777 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
22778 if v_0.Op != OpLeq16U {
22782 v_0_0 := v_0.Args[0]
22783 if v_0_0.Op != OpConst16 {
22786 c := auxIntToInt16(v_0_0.AuxInt)
22787 if v_1.Op != OpLeq16U {
22791 if x != v_1.Args[0] {
22794 v_1_1 := v_1.Args[1]
22795 if v_1_1.Op != OpConst16 {
22798 d := auxIntToInt16(v_1_1.AuxInt)
22799 if !(uint16(c) >= uint16(d+1) && uint16(d+1) > uint16(d)) {
22803 v0 := b.NewValue0(v.Pos, OpConst16, x.Type)
22804 v0.AuxInt = int16ToAuxInt(c - d - 1)
22805 v1 := b.NewValue0(v.Pos, OpSub16, x.Type)
22806 v2 := b.NewValue0(v.Pos, OpConst16, x.Type)
22807 v2.AuxInt = int16ToAuxInt(d + 1)
22814 // match: (OrB (Less8U (Const8 [c]) x) (Leq8U x (Const8 [d])))
22815 // cond: uint8(c) >= uint8(d+1) && uint8(d+1) > uint8(d)
22816 // result: (Less8U (Const8 <x.Type> [c-d-1]) (Sub8 <x.Type> x (Const8 <x.Type> [d+1])))
22818 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
22819 if v_0.Op != OpLess8U {
22823 v_0_0 := v_0.Args[0]
22824 if v_0_0.Op != OpConst8 {
22827 c := auxIntToInt8(v_0_0.AuxInt)
22828 if v_1.Op != OpLeq8U {
22832 if x != v_1.Args[0] {
22835 v_1_1 := v_1.Args[1]
22836 if v_1_1.Op != OpConst8 {
22839 d := auxIntToInt8(v_1_1.AuxInt)
22840 if !(uint8(c) >= uint8(d+1) && uint8(d+1) > uint8(d)) {
22844 v0 := b.NewValue0(v.Pos, OpConst8, x.Type)
22845 v0.AuxInt = int8ToAuxInt(c - d - 1)
22846 v1 := b.NewValue0(v.Pos, OpSub8, x.Type)
22847 v2 := b.NewValue0(v.Pos, OpConst8, x.Type)
22848 v2.AuxInt = int8ToAuxInt(d + 1)
22855 // match: (OrB (Leq8U (Const8 [c]) x) (Leq8U x (Const8 [d])))
22856 // cond: uint8(c) >= uint8(d+1) && uint8(d+1) > uint8(d)
22857 // result: (Leq8U (Const8 <x.Type> [c-d-1]) (Sub8 <x.Type> x (Const8 <x.Type> [d+1])))
22859 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
22860 if v_0.Op != OpLeq8U {
22864 v_0_0 := v_0.Args[0]
22865 if v_0_0.Op != OpConst8 {
22868 c := auxIntToInt8(v_0_0.AuxInt)
22869 if v_1.Op != OpLeq8U {
22873 if x != v_1.Args[0] {
22876 v_1_1 := v_1.Args[1]
22877 if v_1_1.Op != OpConst8 {
22880 d := auxIntToInt8(v_1_1.AuxInt)
22881 if !(uint8(c) >= uint8(d+1) && uint8(d+1) > uint8(d)) {
22885 v0 := b.NewValue0(v.Pos, OpConst8, x.Type)
22886 v0.AuxInt = int8ToAuxInt(c - d - 1)
22887 v1 := b.NewValue0(v.Pos, OpSub8, x.Type)
22888 v2 := b.NewValue0(v.Pos, OpConst8, x.Type)
22889 v2.AuxInt = int8ToAuxInt(d + 1)
22898 func rewriteValuegeneric_OpPhi(v *Value) bool {
22900 // match: (Phi (Const8 [c]) (Const8 [c]))
22901 // result: (Const8 [c])
22903 if len(v.Args) != 2 {
22908 if v_0.Op != OpConst8 {
22911 c := auxIntToInt8(v_0.AuxInt)
22913 if v_1.Op != OpConst8 || auxIntToInt8(v_1.AuxInt) != c {
22917 v.AuxInt = int8ToAuxInt(c)
22920 // match: (Phi (Const16 [c]) (Const16 [c]))
22921 // result: (Const16 [c])
22923 if len(v.Args) != 2 {
22928 if v_0.Op != OpConst16 {
22931 c := auxIntToInt16(v_0.AuxInt)
22933 if v_1.Op != OpConst16 || auxIntToInt16(v_1.AuxInt) != c {
22937 v.AuxInt = int16ToAuxInt(c)
22940 // match: (Phi (Const32 [c]) (Const32 [c]))
22941 // result: (Const32 [c])
22943 if len(v.Args) != 2 {
22948 if v_0.Op != OpConst32 {
22951 c := auxIntToInt32(v_0.AuxInt)
22953 if v_1.Op != OpConst32 || auxIntToInt32(v_1.AuxInt) != c {
22957 v.AuxInt = int32ToAuxInt(c)
22960 // match: (Phi (Const64 [c]) (Const64 [c]))
22961 // result: (Const64 [c])
22963 if len(v.Args) != 2 {
22968 if v_0.Op != OpConst64 {
22971 c := auxIntToInt64(v_0.AuxInt)
22973 if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != c {
22977 v.AuxInt = int64ToAuxInt(c)
22980 // match: (Phi <t> nx:(Not x) ny:(Not y))
22981 // cond: nx.Uses == 1 && ny.Uses == 1
22982 // result: (Not (Phi <t> x y))
22984 if len(v.Args) != 2 {
22990 if nx.Op != OpNot {
22995 if ny.Op != OpNot {
22999 if !(nx.Uses == 1 && ny.Uses == 1) {
23003 v0 := b.NewValue0(v.Pos, OpPhi, t)
23010 func rewriteValuegeneric_OpPtrIndex(v *Value) bool {
23014 config := b.Func.Config
23015 typ := &b.Func.Config.Types
23016 // match: (PtrIndex <t> ptr idx)
23017 // cond: config.PtrSize == 4 && is32Bit(t.Elem().Size())
23018 // result: (AddPtr ptr (Mul32 <typ.Int> idx (Const32 <typ.Int> [int32(t.Elem().Size())])))
23023 if !(config.PtrSize == 4 && is32Bit(t.Elem().Size())) {
23027 v0 := b.NewValue0(v.Pos, OpMul32, typ.Int)
23028 v1 := b.NewValue0(v.Pos, OpConst32, typ.Int)
23029 v1.AuxInt = int32ToAuxInt(int32(t.Elem().Size()))
23030 v0.AddArg2(idx, v1)
23034 // match: (PtrIndex <t> ptr idx)
23035 // cond: config.PtrSize == 8
23036 // result: (AddPtr ptr (Mul64 <typ.Int> idx (Const64 <typ.Int> [t.Elem().Size()])))
23041 if !(config.PtrSize == 8) {
23045 v0 := b.NewValue0(v.Pos, OpMul64, typ.Int)
23046 v1 := b.NewValue0(v.Pos, OpConst64, typ.Int)
23047 v1.AuxInt = int64ToAuxInt(t.Elem().Size())
23048 v0.AddArg2(idx, v1)
23054 func rewriteValuegeneric_OpRotateLeft16(v *Value) bool {
23058 config := b.Func.Config
23059 // match: (RotateLeft16 x (Const16 [c]))
23064 if v_1.Op != OpConst16 {
23067 c := auxIntToInt16(v_1.AuxInt)
23074 // match: (RotateLeft16 x (And64 y (Const64 [c])))
23075 // cond: c&15 == 15
23076 // result: (RotateLeft16 x y)
23079 if v_1.Op != OpAnd64 {
23083 v_1_0 := v_1.Args[0]
23084 v_1_1 := v_1.Args[1]
23085 for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
23087 if v_1_1.Op != OpConst64 {
23090 c := auxIntToInt64(v_1_1.AuxInt)
23094 v.reset(OpRotateLeft16)
23100 // match: (RotateLeft16 x (And32 y (Const32 [c])))
23101 // cond: c&15 == 15
23102 // result: (RotateLeft16 x y)
23105 if v_1.Op != OpAnd32 {
23109 v_1_0 := v_1.Args[0]
23110 v_1_1 := v_1.Args[1]
23111 for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
23113 if v_1_1.Op != OpConst32 {
23116 c := auxIntToInt32(v_1_1.AuxInt)
23120 v.reset(OpRotateLeft16)
23126 // match: (RotateLeft16 x (And16 y (Const16 [c])))
23127 // cond: c&15 == 15
23128 // result: (RotateLeft16 x y)
23131 if v_1.Op != OpAnd16 {
23135 v_1_0 := v_1.Args[0]
23136 v_1_1 := v_1.Args[1]
23137 for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
23139 if v_1_1.Op != OpConst16 {
23142 c := auxIntToInt16(v_1_1.AuxInt)
23146 v.reset(OpRotateLeft16)
23152 // match: (RotateLeft16 x (And8 y (Const8 [c])))
23153 // cond: c&15 == 15
23154 // result: (RotateLeft16 x y)
23157 if v_1.Op != OpAnd8 {
23161 v_1_0 := v_1.Args[0]
23162 v_1_1 := v_1.Args[1]
23163 for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
23165 if v_1_1.Op != OpConst8 {
23168 c := auxIntToInt8(v_1_1.AuxInt)
23172 v.reset(OpRotateLeft16)
23178 // match: (RotateLeft16 x (Neg64 (And64 y (Const64 [c]))))
23179 // cond: c&15 == 15
23180 // result: (RotateLeft16 x (Neg64 <y.Type> y))
23183 if v_1.Op != OpNeg64 {
23186 v_1_0 := v_1.Args[0]
23187 if v_1_0.Op != OpAnd64 {
23191 v_1_0_0 := v_1_0.Args[0]
23192 v_1_0_1 := v_1_0.Args[1]
23193 for _i0 := 0; _i0 <= 1; _i0, v_1_0_0, v_1_0_1 = _i0+1, v_1_0_1, v_1_0_0 {
23195 if v_1_0_1.Op != OpConst64 {
23198 c := auxIntToInt64(v_1_0_1.AuxInt)
23202 v.reset(OpRotateLeft16)
23203 v0 := b.NewValue0(v.Pos, OpNeg64, y.Type)
23210 // match: (RotateLeft16 x (Neg32 (And32 y (Const32 [c]))))
23211 // cond: c&15 == 15
23212 // result: (RotateLeft16 x (Neg32 <y.Type> y))
23215 if v_1.Op != OpNeg32 {
23218 v_1_0 := v_1.Args[0]
23219 if v_1_0.Op != OpAnd32 {
23223 v_1_0_0 := v_1_0.Args[0]
23224 v_1_0_1 := v_1_0.Args[1]
23225 for _i0 := 0; _i0 <= 1; _i0, v_1_0_0, v_1_0_1 = _i0+1, v_1_0_1, v_1_0_0 {
23227 if v_1_0_1.Op != OpConst32 {
23230 c := auxIntToInt32(v_1_0_1.AuxInt)
23234 v.reset(OpRotateLeft16)
23235 v0 := b.NewValue0(v.Pos, OpNeg32, y.Type)
23242 // match: (RotateLeft16 x (Neg16 (And16 y (Const16 [c]))))
23243 // cond: c&15 == 15
23244 // result: (RotateLeft16 x (Neg16 <y.Type> y))
23247 if v_1.Op != OpNeg16 {
23250 v_1_0 := v_1.Args[0]
23251 if v_1_0.Op != OpAnd16 {
23255 v_1_0_0 := v_1_0.Args[0]
23256 v_1_0_1 := v_1_0.Args[1]
23257 for _i0 := 0; _i0 <= 1; _i0, v_1_0_0, v_1_0_1 = _i0+1, v_1_0_1, v_1_0_0 {
23259 if v_1_0_1.Op != OpConst16 {
23262 c := auxIntToInt16(v_1_0_1.AuxInt)
23266 v.reset(OpRotateLeft16)
23267 v0 := b.NewValue0(v.Pos, OpNeg16, y.Type)
23274 // match: (RotateLeft16 x (Neg8 (And8 y (Const8 [c]))))
23275 // cond: c&15 == 15
23276 // result: (RotateLeft16 x (Neg8 <y.Type> y))
23279 if v_1.Op != OpNeg8 {
23282 v_1_0 := v_1.Args[0]
23283 if v_1_0.Op != OpAnd8 {
23287 v_1_0_0 := v_1_0.Args[0]
23288 v_1_0_1 := v_1_0.Args[1]
23289 for _i0 := 0; _i0 <= 1; _i0, v_1_0_0, v_1_0_1 = _i0+1, v_1_0_1, v_1_0_0 {
23291 if v_1_0_1.Op != OpConst8 {
23294 c := auxIntToInt8(v_1_0_1.AuxInt)
23298 v.reset(OpRotateLeft16)
23299 v0 := b.NewValue0(v.Pos, OpNeg8, y.Type)
23306 // match: (RotateLeft16 x (Add64 y (Const64 [c])))
23308 // result: (RotateLeft16 x y)
23311 if v_1.Op != OpAdd64 {
23315 v_1_0 := v_1.Args[0]
23316 v_1_1 := v_1.Args[1]
23317 for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
23319 if v_1_1.Op != OpConst64 {
23322 c := auxIntToInt64(v_1_1.AuxInt)
23326 v.reset(OpRotateLeft16)
23332 // match: (RotateLeft16 x (Add32 y (Const32 [c])))
23334 // result: (RotateLeft16 x y)
23337 if v_1.Op != OpAdd32 {
23341 v_1_0 := v_1.Args[0]
23342 v_1_1 := v_1.Args[1]
23343 for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
23345 if v_1_1.Op != OpConst32 {
23348 c := auxIntToInt32(v_1_1.AuxInt)
23352 v.reset(OpRotateLeft16)
23358 // match: (RotateLeft16 x (Add16 y (Const16 [c])))
23360 // result: (RotateLeft16 x y)
23363 if v_1.Op != OpAdd16 {
23367 v_1_0 := v_1.Args[0]
23368 v_1_1 := v_1.Args[1]
23369 for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
23371 if v_1_1.Op != OpConst16 {
23374 c := auxIntToInt16(v_1_1.AuxInt)
23378 v.reset(OpRotateLeft16)
23384 // match: (RotateLeft16 x (Add8 y (Const8 [c])))
23386 // result: (RotateLeft16 x y)
23389 if v_1.Op != OpAdd8 {
23393 v_1_0 := v_1.Args[0]
23394 v_1_1 := v_1.Args[1]
23395 for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
23397 if v_1_1.Op != OpConst8 {
23400 c := auxIntToInt8(v_1_1.AuxInt)
23404 v.reset(OpRotateLeft16)
23410 // match: (RotateLeft16 x (Sub64 (Const64 [c]) y))
23412 // result: (RotateLeft16 x (Neg64 <y.Type> y))
23415 if v_1.Op != OpSub64 {
23419 v_1_0 := v_1.Args[0]
23420 if v_1_0.Op != OpConst64 {
23423 c := auxIntToInt64(v_1_0.AuxInt)
23427 v.reset(OpRotateLeft16)
23428 v0 := b.NewValue0(v.Pos, OpNeg64, y.Type)
23433 // match: (RotateLeft16 x (Sub32 (Const32 [c]) y))
23435 // result: (RotateLeft16 x (Neg32 <y.Type> y))
23438 if v_1.Op != OpSub32 {
23442 v_1_0 := v_1.Args[0]
23443 if v_1_0.Op != OpConst32 {
23446 c := auxIntToInt32(v_1_0.AuxInt)
23450 v.reset(OpRotateLeft16)
23451 v0 := b.NewValue0(v.Pos, OpNeg32, y.Type)
23456 // match: (RotateLeft16 x (Sub16 (Const16 [c]) y))
23458 // result: (RotateLeft16 x (Neg16 <y.Type> y))
23461 if v_1.Op != OpSub16 {
23465 v_1_0 := v_1.Args[0]
23466 if v_1_0.Op != OpConst16 {
23469 c := auxIntToInt16(v_1_0.AuxInt)
23473 v.reset(OpRotateLeft16)
23474 v0 := b.NewValue0(v.Pos, OpNeg16, y.Type)
23479 // match: (RotateLeft16 x (Sub8 (Const8 [c]) y))
23481 // result: (RotateLeft16 x (Neg8 <y.Type> y))
23484 if v_1.Op != OpSub8 {
23488 v_1_0 := v_1.Args[0]
23489 if v_1_0.Op != OpConst8 {
23492 c := auxIntToInt8(v_1_0.AuxInt)
23496 v.reset(OpRotateLeft16)
23497 v0 := b.NewValue0(v.Pos, OpNeg8, y.Type)
23502 // match: (RotateLeft16 x (Const64 <t> [c]))
23503 // cond: config.PtrSize == 4
23504 // result: (RotateLeft16 x (Const32 <t> [int32(c)]))
23507 if v_1.Op != OpConst64 {
23511 c := auxIntToInt64(v_1.AuxInt)
23512 if !(config.PtrSize == 4) {
23515 v.reset(OpRotateLeft16)
23516 v0 := b.NewValue0(v.Pos, OpConst32, t)
23517 v0.AuxInt = int32ToAuxInt(int32(c))
23521 // match: (RotateLeft16 (RotateLeft16 x c) d)
23522 // cond: c.Type.Size() == 8 && d.Type.Size() == 8
23523 // result: (RotateLeft16 x (Add64 <c.Type> c d))
23525 if v_0.Op != OpRotateLeft16 {
23531 if !(c.Type.Size() == 8 && d.Type.Size() == 8) {
23534 v.reset(OpRotateLeft16)
23535 v0 := b.NewValue0(v.Pos, OpAdd64, c.Type)
23540 // match: (RotateLeft16 (RotateLeft16 x c) d)
23541 // cond: c.Type.Size() == 4 && d.Type.Size() == 4
23542 // result: (RotateLeft16 x (Add32 <c.Type> c d))
23544 if v_0.Op != OpRotateLeft16 {
23550 if !(c.Type.Size() == 4 && d.Type.Size() == 4) {
23553 v.reset(OpRotateLeft16)
23554 v0 := b.NewValue0(v.Pos, OpAdd32, c.Type)
23559 // match: (RotateLeft16 (RotateLeft16 x c) d)
23560 // cond: c.Type.Size() == 2 && d.Type.Size() == 2
23561 // result: (RotateLeft16 x (Add16 <c.Type> c d))
23563 if v_0.Op != OpRotateLeft16 {
23569 if !(c.Type.Size() == 2 && d.Type.Size() == 2) {
23572 v.reset(OpRotateLeft16)
23573 v0 := b.NewValue0(v.Pos, OpAdd16, c.Type)
23578 // match: (RotateLeft16 (RotateLeft16 x c) d)
23579 // cond: c.Type.Size() == 1 && d.Type.Size() == 1
23580 // result: (RotateLeft16 x (Add8 <c.Type> c d))
23582 if v_0.Op != OpRotateLeft16 {
23588 if !(c.Type.Size() == 1 && d.Type.Size() == 1) {
23591 v.reset(OpRotateLeft16)
23592 v0 := b.NewValue0(v.Pos, OpAdd8, c.Type)
23599 func rewriteValuegeneric_OpRotateLeft32(v *Value) bool {
23603 config := b.Func.Config
23604 // match: (RotateLeft32 x (Const32 [c]))
23609 if v_1.Op != OpConst32 {
23612 c := auxIntToInt32(v_1.AuxInt)
23619 // match: (RotateLeft32 x (And64 y (Const64 [c])))
23620 // cond: c&31 == 31
23621 // result: (RotateLeft32 x y)
23624 if v_1.Op != OpAnd64 {
23628 v_1_0 := v_1.Args[0]
23629 v_1_1 := v_1.Args[1]
23630 for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
23632 if v_1_1.Op != OpConst64 {
23635 c := auxIntToInt64(v_1_1.AuxInt)
23639 v.reset(OpRotateLeft32)
23645 // match: (RotateLeft32 x (And32 y (Const32 [c])))
23646 // cond: c&31 == 31
23647 // result: (RotateLeft32 x y)
23650 if v_1.Op != OpAnd32 {
23654 v_1_0 := v_1.Args[0]
23655 v_1_1 := v_1.Args[1]
23656 for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
23658 if v_1_1.Op != OpConst32 {
23661 c := auxIntToInt32(v_1_1.AuxInt)
23665 v.reset(OpRotateLeft32)
23671 // match: (RotateLeft32 x (And16 y (Const16 [c])))
23672 // cond: c&31 == 31
23673 // result: (RotateLeft32 x y)
23676 if v_1.Op != OpAnd16 {
23680 v_1_0 := v_1.Args[0]
23681 v_1_1 := v_1.Args[1]
23682 for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
23684 if v_1_1.Op != OpConst16 {
23687 c := auxIntToInt16(v_1_1.AuxInt)
23691 v.reset(OpRotateLeft32)
23697 // match: (RotateLeft32 x (And8 y (Const8 [c])))
23698 // cond: c&31 == 31
23699 // result: (RotateLeft32 x y)
23702 if v_1.Op != OpAnd8 {
23706 v_1_0 := v_1.Args[0]
23707 v_1_1 := v_1.Args[1]
23708 for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
23710 if v_1_1.Op != OpConst8 {
23713 c := auxIntToInt8(v_1_1.AuxInt)
23717 v.reset(OpRotateLeft32)
23723 // match: (RotateLeft32 x (Neg64 (And64 y (Const64 [c]))))
23724 // cond: c&31 == 31
23725 // result: (RotateLeft32 x (Neg64 <y.Type> y))
23728 if v_1.Op != OpNeg64 {
23731 v_1_0 := v_1.Args[0]
23732 if v_1_0.Op != OpAnd64 {
23736 v_1_0_0 := v_1_0.Args[0]
23737 v_1_0_1 := v_1_0.Args[1]
23738 for _i0 := 0; _i0 <= 1; _i0, v_1_0_0, v_1_0_1 = _i0+1, v_1_0_1, v_1_0_0 {
23740 if v_1_0_1.Op != OpConst64 {
23743 c := auxIntToInt64(v_1_0_1.AuxInt)
23747 v.reset(OpRotateLeft32)
23748 v0 := b.NewValue0(v.Pos, OpNeg64, y.Type)
23755 // match: (RotateLeft32 x (Neg32 (And32 y (Const32 [c]))))
23756 // cond: c&31 == 31
23757 // result: (RotateLeft32 x (Neg32 <y.Type> y))
23760 if v_1.Op != OpNeg32 {
23763 v_1_0 := v_1.Args[0]
23764 if v_1_0.Op != OpAnd32 {
23768 v_1_0_0 := v_1_0.Args[0]
23769 v_1_0_1 := v_1_0.Args[1]
23770 for _i0 := 0; _i0 <= 1; _i0, v_1_0_0, v_1_0_1 = _i0+1, v_1_0_1, v_1_0_0 {
23772 if v_1_0_1.Op != OpConst32 {
23775 c := auxIntToInt32(v_1_0_1.AuxInt)
23779 v.reset(OpRotateLeft32)
23780 v0 := b.NewValue0(v.Pos, OpNeg32, y.Type)
23787 // match: (RotateLeft32 x (Neg16 (And16 y (Const16 [c]))))
23788 // cond: c&31 == 31
23789 // result: (RotateLeft32 x (Neg16 <y.Type> y))
23792 if v_1.Op != OpNeg16 {
23795 v_1_0 := v_1.Args[0]
23796 if v_1_0.Op != OpAnd16 {
23800 v_1_0_0 := v_1_0.Args[0]
23801 v_1_0_1 := v_1_0.Args[1]
23802 for _i0 := 0; _i0 <= 1; _i0, v_1_0_0, v_1_0_1 = _i0+1, v_1_0_1, v_1_0_0 {
23804 if v_1_0_1.Op != OpConst16 {
23807 c := auxIntToInt16(v_1_0_1.AuxInt)
23811 v.reset(OpRotateLeft32)
23812 v0 := b.NewValue0(v.Pos, OpNeg16, y.Type)
23819 // match: (RotateLeft32 x (Neg8 (And8 y (Const8 [c]))))
23820 // cond: c&31 == 31
23821 // result: (RotateLeft32 x (Neg8 <y.Type> y))
23824 if v_1.Op != OpNeg8 {
23827 v_1_0 := v_1.Args[0]
23828 if v_1_0.Op != OpAnd8 {
23832 v_1_0_0 := v_1_0.Args[0]
23833 v_1_0_1 := v_1_0.Args[1]
23834 for _i0 := 0; _i0 <= 1; _i0, v_1_0_0, v_1_0_1 = _i0+1, v_1_0_1, v_1_0_0 {
23836 if v_1_0_1.Op != OpConst8 {
23839 c := auxIntToInt8(v_1_0_1.AuxInt)
23843 v.reset(OpRotateLeft32)
23844 v0 := b.NewValue0(v.Pos, OpNeg8, y.Type)
23851 // match: (RotateLeft32 x (Add64 y (Const64 [c])))
23853 // result: (RotateLeft32 x y)
23856 if v_1.Op != OpAdd64 {
23860 v_1_0 := v_1.Args[0]
23861 v_1_1 := v_1.Args[1]
23862 for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
23864 if v_1_1.Op != OpConst64 {
23867 c := auxIntToInt64(v_1_1.AuxInt)
23871 v.reset(OpRotateLeft32)
23877 // match: (RotateLeft32 x (Add32 y (Const32 [c])))
23879 // result: (RotateLeft32 x y)
23882 if v_1.Op != OpAdd32 {
23886 v_1_0 := v_1.Args[0]
23887 v_1_1 := v_1.Args[1]
23888 for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
23890 if v_1_1.Op != OpConst32 {
23893 c := auxIntToInt32(v_1_1.AuxInt)
23897 v.reset(OpRotateLeft32)
23903 // match: (RotateLeft32 x (Add16 y (Const16 [c])))
23905 // result: (RotateLeft32 x y)
23908 if v_1.Op != OpAdd16 {
23912 v_1_0 := v_1.Args[0]
23913 v_1_1 := v_1.Args[1]
23914 for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
23916 if v_1_1.Op != OpConst16 {
23919 c := auxIntToInt16(v_1_1.AuxInt)
23923 v.reset(OpRotateLeft32)
23929 // match: (RotateLeft32 x (Add8 y (Const8 [c])))
23931 // result: (RotateLeft32 x y)
23934 if v_1.Op != OpAdd8 {
23938 v_1_0 := v_1.Args[0]
23939 v_1_1 := v_1.Args[1]
23940 for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
23942 if v_1_1.Op != OpConst8 {
23945 c := auxIntToInt8(v_1_1.AuxInt)
23949 v.reset(OpRotateLeft32)
23955 // match: (RotateLeft32 x (Sub64 (Const64 [c]) y))
23957 // result: (RotateLeft32 x (Neg64 <y.Type> y))
23960 if v_1.Op != OpSub64 {
23964 v_1_0 := v_1.Args[0]
23965 if v_1_0.Op != OpConst64 {
23968 c := auxIntToInt64(v_1_0.AuxInt)
23972 v.reset(OpRotateLeft32)
23973 v0 := b.NewValue0(v.Pos, OpNeg64, y.Type)
23978 // match: (RotateLeft32 x (Sub32 (Const32 [c]) y))
23980 // result: (RotateLeft32 x (Neg32 <y.Type> y))
23983 if v_1.Op != OpSub32 {
23987 v_1_0 := v_1.Args[0]
23988 if v_1_0.Op != OpConst32 {
23991 c := auxIntToInt32(v_1_0.AuxInt)
23995 v.reset(OpRotateLeft32)
23996 v0 := b.NewValue0(v.Pos, OpNeg32, y.Type)
24001 // match: (RotateLeft32 x (Sub16 (Const16 [c]) y))
24003 // result: (RotateLeft32 x (Neg16 <y.Type> y))
24006 if v_1.Op != OpSub16 {
24010 v_1_0 := v_1.Args[0]
24011 if v_1_0.Op != OpConst16 {
24014 c := auxIntToInt16(v_1_0.AuxInt)
24018 v.reset(OpRotateLeft32)
24019 v0 := b.NewValue0(v.Pos, OpNeg16, y.Type)
24024 // match: (RotateLeft32 x (Sub8 (Const8 [c]) y))
24026 // result: (RotateLeft32 x (Neg8 <y.Type> y))
24029 if v_1.Op != OpSub8 {
24033 v_1_0 := v_1.Args[0]
24034 if v_1_0.Op != OpConst8 {
24037 c := auxIntToInt8(v_1_0.AuxInt)
24041 v.reset(OpRotateLeft32)
24042 v0 := b.NewValue0(v.Pos, OpNeg8, y.Type)
24047 // match: (RotateLeft32 x (Const64 <t> [c]))
24048 // cond: config.PtrSize == 4
24049 // result: (RotateLeft32 x (Const32 <t> [int32(c)]))
24052 if v_1.Op != OpConst64 {
24056 c := auxIntToInt64(v_1.AuxInt)
24057 if !(config.PtrSize == 4) {
24060 v.reset(OpRotateLeft32)
24061 v0 := b.NewValue0(v.Pos, OpConst32, t)
24062 v0.AuxInt = int32ToAuxInt(int32(c))
24066 // match: (RotateLeft32 (RotateLeft32 x c) d)
24067 // cond: c.Type.Size() == 8 && d.Type.Size() == 8
24068 // result: (RotateLeft32 x (Add64 <c.Type> c d))
24070 if v_0.Op != OpRotateLeft32 {
24076 if !(c.Type.Size() == 8 && d.Type.Size() == 8) {
24079 v.reset(OpRotateLeft32)
24080 v0 := b.NewValue0(v.Pos, OpAdd64, c.Type)
24085 // match: (RotateLeft32 (RotateLeft32 x c) d)
24086 // cond: c.Type.Size() == 4 && d.Type.Size() == 4
24087 // result: (RotateLeft32 x (Add32 <c.Type> c d))
24089 if v_0.Op != OpRotateLeft32 {
24095 if !(c.Type.Size() == 4 && d.Type.Size() == 4) {
24098 v.reset(OpRotateLeft32)
24099 v0 := b.NewValue0(v.Pos, OpAdd32, c.Type)
24104 // match: (RotateLeft32 (RotateLeft32 x c) d)
24105 // cond: c.Type.Size() == 2 && d.Type.Size() == 2
24106 // result: (RotateLeft32 x (Add16 <c.Type> c d))
24108 if v_0.Op != OpRotateLeft32 {
24114 if !(c.Type.Size() == 2 && d.Type.Size() == 2) {
24117 v.reset(OpRotateLeft32)
24118 v0 := b.NewValue0(v.Pos, OpAdd16, c.Type)
24123 // match: (RotateLeft32 (RotateLeft32 x c) d)
24124 // cond: c.Type.Size() == 1 && d.Type.Size() == 1
24125 // result: (RotateLeft32 x (Add8 <c.Type> c d))
24127 if v_0.Op != OpRotateLeft32 {
24133 if !(c.Type.Size() == 1 && d.Type.Size() == 1) {
24136 v.reset(OpRotateLeft32)
24137 v0 := b.NewValue0(v.Pos, OpAdd8, c.Type)
24144 func rewriteValuegeneric_OpRotateLeft64(v *Value) bool {
24148 config := b.Func.Config
24149 // match: (RotateLeft64 x (Const64 [c]))
24154 if v_1.Op != OpConst64 {
24157 c := auxIntToInt64(v_1.AuxInt)
24164 // match: (RotateLeft64 x (And64 y (Const64 [c])))
24165 // cond: c&63 == 63
24166 // result: (RotateLeft64 x y)
24169 if v_1.Op != OpAnd64 {
24173 v_1_0 := v_1.Args[0]
24174 v_1_1 := v_1.Args[1]
24175 for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
24177 if v_1_1.Op != OpConst64 {
24180 c := auxIntToInt64(v_1_1.AuxInt)
24184 v.reset(OpRotateLeft64)
24190 // match: (RotateLeft64 x (And32 y (Const32 [c])))
24191 // cond: c&63 == 63
24192 // result: (RotateLeft64 x y)
24195 if v_1.Op != OpAnd32 {
24199 v_1_0 := v_1.Args[0]
24200 v_1_1 := v_1.Args[1]
24201 for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
24203 if v_1_1.Op != OpConst32 {
24206 c := auxIntToInt32(v_1_1.AuxInt)
24210 v.reset(OpRotateLeft64)
24216 // match: (RotateLeft64 x (And16 y (Const16 [c])))
24217 // cond: c&63 == 63
24218 // result: (RotateLeft64 x y)
24221 if v_1.Op != OpAnd16 {
24225 v_1_0 := v_1.Args[0]
24226 v_1_1 := v_1.Args[1]
24227 for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
24229 if v_1_1.Op != OpConst16 {
24232 c := auxIntToInt16(v_1_1.AuxInt)
24236 v.reset(OpRotateLeft64)
24242 // match: (RotateLeft64 x (And8 y (Const8 [c])))
24243 // cond: c&63 == 63
24244 // result: (RotateLeft64 x y)
24247 if v_1.Op != OpAnd8 {
24251 v_1_0 := v_1.Args[0]
24252 v_1_1 := v_1.Args[1]
24253 for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
24255 if v_1_1.Op != OpConst8 {
24258 c := auxIntToInt8(v_1_1.AuxInt)
24262 v.reset(OpRotateLeft64)
24268 // match: (RotateLeft64 x (Neg64 (And64 y (Const64 [c]))))
24269 // cond: c&63 == 63
24270 // result: (RotateLeft64 x (Neg64 <y.Type> y))
24273 if v_1.Op != OpNeg64 {
24276 v_1_0 := v_1.Args[0]
24277 if v_1_0.Op != OpAnd64 {
24281 v_1_0_0 := v_1_0.Args[0]
24282 v_1_0_1 := v_1_0.Args[1]
24283 for _i0 := 0; _i0 <= 1; _i0, v_1_0_0, v_1_0_1 = _i0+1, v_1_0_1, v_1_0_0 {
24285 if v_1_0_1.Op != OpConst64 {
24288 c := auxIntToInt64(v_1_0_1.AuxInt)
24292 v.reset(OpRotateLeft64)
24293 v0 := b.NewValue0(v.Pos, OpNeg64, y.Type)
24300 // match: (RotateLeft64 x (Neg32 (And32 y (Const32 [c]))))
24301 // cond: c&63 == 63
24302 // result: (RotateLeft64 x (Neg32 <y.Type> y))
24305 if v_1.Op != OpNeg32 {
24308 v_1_0 := v_1.Args[0]
24309 if v_1_0.Op != OpAnd32 {
24313 v_1_0_0 := v_1_0.Args[0]
24314 v_1_0_1 := v_1_0.Args[1]
24315 for _i0 := 0; _i0 <= 1; _i0, v_1_0_0, v_1_0_1 = _i0+1, v_1_0_1, v_1_0_0 {
24317 if v_1_0_1.Op != OpConst32 {
24320 c := auxIntToInt32(v_1_0_1.AuxInt)
24324 v.reset(OpRotateLeft64)
24325 v0 := b.NewValue0(v.Pos, OpNeg32, y.Type)
24332 // match: (RotateLeft64 x (Neg16 (And16 y (Const16 [c]))))
24333 // cond: c&63 == 63
24334 // result: (RotateLeft64 x (Neg16 <y.Type> y))
24337 if v_1.Op != OpNeg16 {
24340 v_1_0 := v_1.Args[0]
24341 if v_1_0.Op != OpAnd16 {
24345 v_1_0_0 := v_1_0.Args[0]
24346 v_1_0_1 := v_1_0.Args[1]
24347 for _i0 := 0; _i0 <= 1; _i0, v_1_0_0, v_1_0_1 = _i0+1, v_1_0_1, v_1_0_0 {
24349 if v_1_0_1.Op != OpConst16 {
24352 c := auxIntToInt16(v_1_0_1.AuxInt)
24356 v.reset(OpRotateLeft64)
24357 v0 := b.NewValue0(v.Pos, OpNeg16, y.Type)
24364 // match: (RotateLeft64 x (Neg8 (And8 y (Const8 [c]))))
24365 // cond: c&63 == 63
24366 // result: (RotateLeft64 x (Neg8 <y.Type> y))
24369 if v_1.Op != OpNeg8 {
24372 v_1_0 := v_1.Args[0]
24373 if v_1_0.Op != OpAnd8 {
24377 v_1_0_0 := v_1_0.Args[0]
24378 v_1_0_1 := v_1_0.Args[1]
24379 for _i0 := 0; _i0 <= 1; _i0, v_1_0_0, v_1_0_1 = _i0+1, v_1_0_1, v_1_0_0 {
24381 if v_1_0_1.Op != OpConst8 {
24384 c := auxIntToInt8(v_1_0_1.AuxInt)
24388 v.reset(OpRotateLeft64)
24389 v0 := b.NewValue0(v.Pos, OpNeg8, y.Type)
24396 // match: (RotateLeft64 x (Add64 y (Const64 [c])))
24398 // result: (RotateLeft64 x y)
24401 if v_1.Op != OpAdd64 {
24405 v_1_0 := v_1.Args[0]
24406 v_1_1 := v_1.Args[1]
24407 for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
24409 if v_1_1.Op != OpConst64 {
24412 c := auxIntToInt64(v_1_1.AuxInt)
24416 v.reset(OpRotateLeft64)
24422 // match: (RotateLeft64 x (Add32 y (Const32 [c])))
24424 // result: (RotateLeft64 x y)
24427 if v_1.Op != OpAdd32 {
24431 v_1_0 := v_1.Args[0]
24432 v_1_1 := v_1.Args[1]
24433 for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
24435 if v_1_1.Op != OpConst32 {
24438 c := auxIntToInt32(v_1_1.AuxInt)
24442 v.reset(OpRotateLeft64)
24448 // match: (RotateLeft64 x (Add16 y (Const16 [c])))
24450 // result: (RotateLeft64 x y)
24453 if v_1.Op != OpAdd16 {
24457 v_1_0 := v_1.Args[0]
24458 v_1_1 := v_1.Args[1]
24459 for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
24461 if v_1_1.Op != OpConst16 {
24464 c := auxIntToInt16(v_1_1.AuxInt)
24468 v.reset(OpRotateLeft64)
24474 // match: (RotateLeft64 x (Add8 y (Const8 [c])))
24476 // result: (RotateLeft64 x y)
24479 if v_1.Op != OpAdd8 {
24483 v_1_0 := v_1.Args[0]
24484 v_1_1 := v_1.Args[1]
24485 for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
24487 if v_1_1.Op != OpConst8 {
24490 c := auxIntToInt8(v_1_1.AuxInt)
24494 v.reset(OpRotateLeft64)
24500 // match: (RotateLeft64 x (Sub64 (Const64 [c]) y))
24502 // result: (RotateLeft64 x (Neg64 <y.Type> y))
24505 if v_1.Op != OpSub64 {
24509 v_1_0 := v_1.Args[0]
24510 if v_1_0.Op != OpConst64 {
24513 c := auxIntToInt64(v_1_0.AuxInt)
24517 v.reset(OpRotateLeft64)
24518 v0 := b.NewValue0(v.Pos, OpNeg64, y.Type)
24523 // match: (RotateLeft64 x (Sub32 (Const32 [c]) y))
24525 // result: (RotateLeft64 x (Neg32 <y.Type> y))
24528 if v_1.Op != OpSub32 {
24532 v_1_0 := v_1.Args[0]
24533 if v_1_0.Op != OpConst32 {
24536 c := auxIntToInt32(v_1_0.AuxInt)
24540 v.reset(OpRotateLeft64)
24541 v0 := b.NewValue0(v.Pos, OpNeg32, y.Type)
24546 // match: (RotateLeft64 x (Sub16 (Const16 [c]) y))
24548 // result: (RotateLeft64 x (Neg16 <y.Type> y))
24551 if v_1.Op != OpSub16 {
24555 v_1_0 := v_1.Args[0]
24556 if v_1_0.Op != OpConst16 {
24559 c := auxIntToInt16(v_1_0.AuxInt)
24563 v.reset(OpRotateLeft64)
24564 v0 := b.NewValue0(v.Pos, OpNeg16, y.Type)
24569 // match: (RotateLeft64 x (Sub8 (Const8 [c]) y))
24571 // result: (RotateLeft64 x (Neg8 <y.Type> y))
24574 if v_1.Op != OpSub8 {
24578 v_1_0 := v_1.Args[0]
24579 if v_1_0.Op != OpConst8 {
24582 c := auxIntToInt8(v_1_0.AuxInt)
24586 v.reset(OpRotateLeft64)
24587 v0 := b.NewValue0(v.Pos, OpNeg8, y.Type)
24592 // match: (RotateLeft64 x (Const64 <t> [c]))
24593 // cond: config.PtrSize == 4
24594 // result: (RotateLeft64 x (Const32 <t> [int32(c)]))
24597 if v_1.Op != OpConst64 {
24601 c := auxIntToInt64(v_1.AuxInt)
24602 if !(config.PtrSize == 4) {
24605 v.reset(OpRotateLeft64)
24606 v0 := b.NewValue0(v.Pos, OpConst32, t)
24607 v0.AuxInt = int32ToAuxInt(int32(c))
24611 // match: (RotateLeft64 (RotateLeft64 x c) d)
24612 // cond: c.Type.Size() == 8 && d.Type.Size() == 8
24613 // result: (RotateLeft64 x (Add64 <c.Type> c d))
24615 if v_0.Op != OpRotateLeft64 {
24621 if !(c.Type.Size() == 8 && d.Type.Size() == 8) {
24624 v.reset(OpRotateLeft64)
24625 v0 := b.NewValue0(v.Pos, OpAdd64, c.Type)
24630 // match: (RotateLeft64 (RotateLeft64 x c) d)
24631 // cond: c.Type.Size() == 4 && d.Type.Size() == 4
24632 // result: (RotateLeft64 x (Add32 <c.Type> c d))
24634 if v_0.Op != OpRotateLeft64 {
24640 if !(c.Type.Size() == 4 && d.Type.Size() == 4) {
24643 v.reset(OpRotateLeft64)
24644 v0 := b.NewValue0(v.Pos, OpAdd32, c.Type)
24649 // match: (RotateLeft64 (RotateLeft64 x c) d)
24650 // cond: c.Type.Size() == 2 && d.Type.Size() == 2
24651 // result: (RotateLeft64 x (Add16 <c.Type> c d))
24653 if v_0.Op != OpRotateLeft64 {
24659 if !(c.Type.Size() == 2 && d.Type.Size() == 2) {
24662 v.reset(OpRotateLeft64)
24663 v0 := b.NewValue0(v.Pos, OpAdd16, c.Type)
24668 // match: (RotateLeft64 (RotateLeft64 x c) d)
24669 // cond: c.Type.Size() == 1 && d.Type.Size() == 1
24670 // result: (RotateLeft64 x (Add8 <c.Type> c d))
24672 if v_0.Op != OpRotateLeft64 {
24678 if !(c.Type.Size() == 1 && d.Type.Size() == 1) {
24681 v.reset(OpRotateLeft64)
24682 v0 := b.NewValue0(v.Pos, OpAdd8, c.Type)
24689 func rewriteValuegeneric_OpRotateLeft8(v *Value) bool {
24693 config := b.Func.Config
24694 // match: (RotateLeft8 x (Const8 [c]))
24699 if v_1.Op != OpConst8 {
24702 c := auxIntToInt8(v_1.AuxInt)
24709 // match: (RotateLeft8 x (And64 y (Const64 [c])))
24711 // result: (RotateLeft8 x y)
24714 if v_1.Op != OpAnd64 {
24718 v_1_0 := v_1.Args[0]
24719 v_1_1 := v_1.Args[1]
24720 for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
24722 if v_1_1.Op != OpConst64 {
24725 c := auxIntToInt64(v_1_1.AuxInt)
24729 v.reset(OpRotateLeft8)
24735 // match: (RotateLeft8 x (And32 y (Const32 [c])))
24737 // result: (RotateLeft8 x y)
24740 if v_1.Op != OpAnd32 {
24744 v_1_0 := v_1.Args[0]
24745 v_1_1 := v_1.Args[1]
24746 for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
24748 if v_1_1.Op != OpConst32 {
24751 c := auxIntToInt32(v_1_1.AuxInt)
24755 v.reset(OpRotateLeft8)
24761 // match: (RotateLeft8 x (And16 y (Const16 [c])))
24763 // result: (RotateLeft8 x y)
24766 if v_1.Op != OpAnd16 {
24770 v_1_0 := v_1.Args[0]
24771 v_1_1 := v_1.Args[1]
24772 for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
24774 if v_1_1.Op != OpConst16 {
24777 c := auxIntToInt16(v_1_1.AuxInt)
24781 v.reset(OpRotateLeft8)
24787 // match: (RotateLeft8 x (And8 y (Const8 [c])))
24789 // result: (RotateLeft8 x y)
24792 if v_1.Op != OpAnd8 {
24796 v_1_0 := v_1.Args[0]
24797 v_1_1 := v_1.Args[1]
24798 for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
24800 if v_1_1.Op != OpConst8 {
24803 c := auxIntToInt8(v_1_1.AuxInt)
24807 v.reset(OpRotateLeft8)
24813 // match: (RotateLeft8 x (Neg64 (And64 y (Const64 [c]))))
24815 // result: (RotateLeft8 x (Neg64 <y.Type> y))
24818 if v_1.Op != OpNeg64 {
24821 v_1_0 := v_1.Args[0]
24822 if v_1_0.Op != OpAnd64 {
24826 v_1_0_0 := v_1_0.Args[0]
24827 v_1_0_1 := v_1_0.Args[1]
24828 for _i0 := 0; _i0 <= 1; _i0, v_1_0_0, v_1_0_1 = _i0+1, v_1_0_1, v_1_0_0 {
24830 if v_1_0_1.Op != OpConst64 {
24833 c := auxIntToInt64(v_1_0_1.AuxInt)
24837 v.reset(OpRotateLeft8)
24838 v0 := b.NewValue0(v.Pos, OpNeg64, y.Type)
24845 // match: (RotateLeft8 x (Neg32 (And32 y (Const32 [c]))))
24847 // result: (RotateLeft8 x (Neg32 <y.Type> y))
24850 if v_1.Op != OpNeg32 {
24853 v_1_0 := v_1.Args[0]
24854 if v_1_0.Op != OpAnd32 {
24858 v_1_0_0 := v_1_0.Args[0]
24859 v_1_0_1 := v_1_0.Args[1]
24860 for _i0 := 0; _i0 <= 1; _i0, v_1_0_0, v_1_0_1 = _i0+1, v_1_0_1, v_1_0_0 {
24862 if v_1_0_1.Op != OpConst32 {
24865 c := auxIntToInt32(v_1_0_1.AuxInt)
24869 v.reset(OpRotateLeft8)
24870 v0 := b.NewValue0(v.Pos, OpNeg32, y.Type)
24877 // match: (RotateLeft8 x (Neg16 (And16 y (Const16 [c]))))
24879 // result: (RotateLeft8 x (Neg16 <y.Type> y))
24882 if v_1.Op != OpNeg16 {
24885 v_1_0 := v_1.Args[0]
24886 if v_1_0.Op != OpAnd16 {
24890 v_1_0_0 := v_1_0.Args[0]
24891 v_1_0_1 := v_1_0.Args[1]
24892 for _i0 := 0; _i0 <= 1; _i0, v_1_0_0, v_1_0_1 = _i0+1, v_1_0_1, v_1_0_0 {
24894 if v_1_0_1.Op != OpConst16 {
24897 c := auxIntToInt16(v_1_0_1.AuxInt)
24901 v.reset(OpRotateLeft8)
24902 v0 := b.NewValue0(v.Pos, OpNeg16, y.Type)
24909 // match: (RotateLeft8 x (Neg8 (And8 y (Const8 [c]))))
24911 // result: (RotateLeft8 x (Neg8 <y.Type> y))
24914 if v_1.Op != OpNeg8 {
24917 v_1_0 := v_1.Args[0]
24918 if v_1_0.Op != OpAnd8 {
24922 v_1_0_0 := v_1_0.Args[0]
24923 v_1_0_1 := v_1_0.Args[1]
24924 for _i0 := 0; _i0 <= 1; _i0, v_1_0_0, v_1_0_1 = _i0+1, v_1_0_1, v_1_0_0 {
24926 if v_1_0_1.Op != OpConst8 {
24929 c := auxIntToInt8(v_1_0_1.AuxInt)
24933 v.reset(OpRotateLeft8)
24934 v0 := b.NewValue0(v.Pos, OpNeg8, y.Type)
24941 // match: (RotateLeft8 x (Add64 y (Const64 [c])))
24943 // result: (RotateLeft8 x y)
24946 if v_1.Op != OpAdd64 {
24950 v_1_0 := v_1.Args[0]
24951 v_1_1 := v_1.Args[1]
24952 for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
24954 if v_1_1.Op != OpConst64 {
24957 c := auxIntToInt64(v_1_1.AuxInt)
24961 v.reset(OpRotateLeft8)
24967 // match: (RotateLeft8 x (Add32 y (Const32 [c])))
24969 // result: (RotateLeft8 x y)
24972 if v_1.Op != OpAdd32 {
24976 v_1_0 := v_1.Args[0]
24977 v_1_1 := v_1.Args[1]
24978 for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
24980 if v_1_1.Op != OpConst32 {
24983 c := auxIntToInt32(v_1_1.AuxInt)
24987 v.reset(OpRotateLeft8)
24993 // match: (RotateLeft8 x (Add16 y (Const16 [c])))
24995 // result: (RotateLeft8 x y)
24998 if v_1.Op != OpAdd16 {
25002 v_1_0 := v_1.Args[0]
25003 v_1_1 := v_1.Args[1]
25004 for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
25006 if v_1_1.Op != OpConst16 {
25009 c := auxIntToInt16(v_1_1.AuxInt)
25013 v.reset(OpRotateLeft8)
25019 // match: (RotateLeft8 x (Add8 y (Const8 [c])))
25021 // result: (RotateLeft8 x y)
25024 if v_1.Op != OpAdd8 {
25028 v_1_0 := v_1.Args[0]
25029 v_1_1 := v_1.Args[1]
25030 for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
25032 if v_1_1.Op != OpConst8 {
25035 c := auxIntToInt8(v_1_1.AuxInt)
25039 v.reset(OpRotateLeft8)
25045 // match: (RotateLeft8 x (Sub64 (Const64 [c]) y))
25047 // result: (RotateLeft8 x (Neg64 <y.Type> y))
25050 if v_1.Op != OpSub64 {
25054 v_1_0 := v_1.Args[0]
25055 if v_1_0.Op != OpConst64 {
25058 c := auxIntToInt64(v_1_0.AuxInt)
25062 v.reset(OpRotateLeft8)
25063 v0 := b.NewValue0(v.Pos, OpNeg64, y.Type)
25068 // match: (RotateLeft8 x (Sub32 (Const32 [c]) y))
25070 // result: (RotateLeft8 x (Neg32 <y.Type> y))
25073 if v_1.Op != OpSub32 {
25077 v_1_0 := v_1.Args[0]
25078 if v_1_0.Op != OpConst32 {
25081 c := auxIntToInt32(v_1_0.AuxInt)
25085 v.reset(OpRotateLeft8)
25086 v0 := b.NewValue0(v.Pos, OpNeg32, y.Type)
25091 // match: (RotateLeft8 x (Sub16 (Const16 [c]) y))
25093 // result: (RotateLeft8 x (Neg16 <y.Type> y))
25096 if v_1.Op != OpSub16 {
25100 v_1_0 := v_1.Args[0]
25101 if v_1_0.Op != OpConst16 {
25104 c := auxIntToInt16(v_1_0.AuxInt)
25108 v.reset(OpRotateLeft8)
25109 v0 := b.NewValue0(v.Pos, OpNeg16, y.Type)
25114 // match: (RotateLeft8 x (Sub8 (Const8 [c]) y))
25116 // result: (RotateLeft8 x (Neg8 <y.Type> y))
25119 if v_1.Op != OpSub8 {
25123 v_1_0 := v_1.Args[0]
25124 if v_1_0.Op != OpConst8 {
25127 c := auxIntToInt8(v_1_0.AuxInt)
25131 v.reset(OpRotateLeft8)
25132 v0 := b.NewValue0(v.Pos, OpNeg8, y.Type)
25137 // match: (RotateLeft8 x (Const64 <t> [c]))
25138 // cond: config.PtrSize == 4
25139 // result: (RotateLeft8 x (Const32 <t> [int32(c)]))
25142 if v_1.Op != OpConst64 {
25146 c := auxIntToInt64(v_1.AuxInt)
25147 if !(config.PtrSize == 4) {
25150 v.reset(OpRotateLeft8)
25151 v0 := b.NewValue0(v.Pos, OpConst32, t)
25152 v0.AuxInt = int32ToAuxInt(int32(c))
25156 // match: (RotateLeft8 (RotateLeft8 x c) d)
25157 // cond: c.Type.Size() == 8 && d.Type.Size() == 8
25158 // result: (RotateLeft8 x (Add64 <c.Type> c d))
25160 if v_0.Op != OpRotateLeft8 {
25166 if !(c.Type.Size() == 8 && d.Type.Size() == 8) {
25169 v.reset(OpRotateLeft8)
25170 v0 := b.NewValue0(v.Pos, OpAdd64, c.Type)
25175 // match: (RotateLeft8 (RotateLeft8 x c) d)
25176 // cond: c.Type.Size() == 4 && d.Type.Size() == 4
25177 // result: (RotateLeft8 x (Add32 <c.Type> c d))
25179 if v_0.Op != OpRotateLeft8 {
25185 if !(c.Type.Size() == 4 && d.Type.Size() == 4) {
25188 v.reset(OpRotateLeft8)
25189 v0 := b.NewValue0(v.Pos, OpAdd32, c.Type)
25194 // match: (RotateLeft8 (RotateLeft8 x c) d)
25195 // cond: c.Type.Size() == 2 && d.Type.Size() == 2
25196 // result: (RotateLeft8 x (Add16 <c.Type> c d))
25198 if v_0.Op != OpRotateLeft8 {
25204 if !(c.Type.Size() == 2 && d.Type.Size() == 2) {
25207 v.reset(OpRotateLeft8)
25208 v0 := b.NewValue0(v.Pos, OpAdd16, c.Type)
25213 // match: (RotateLeft8 (RotateLeft8 x c) d)
25214 // cond: c.Type.Size() == 1 && d.Type.Size() == 1
25215 // result: (RotateLeft8 x (Add8 <c.Type> c d))
25217 if v_0.Op != OpRotateLeft8 {
25223 if !(c.Type.Size() == 1 && d.Type.Size() == 1) {
25226 v.reset(OpRotateLeft8)
25227 v0 := b.NewValue0(v.Pos, OpAdd8, c.Type)
25234 func rewriteValuegeneric_OpRound32F(v *Value) bool {
25236 // match: (Round32F x:(Const32F))
25240 if x.Op != OpConst32F {
25248 func rewriteValuegeneric_OpRound64F(v *Value) bool {
25250 // match: (Round64F x:(Const64F))
25254 if x.Op != OpConst64F {
25262 func rewriteValuegeneric_OpRoundToEven(v *Value) bool {
25264 // match: (RoundToEven (Const64F [c]))
25265 // result: (Const64F [math.RoundToEven(c)])
25267 if v_0.Op != OpConst64F {
25270 c := auxIntToFloat64(v_0.AuxInt)
25271 v.reset(OpConst64F)
25272 v.AuxInt = float64ToAuxInt(math.RoundToEven(c))
25277 func rewriteValuegeneric_OpRsh16Ux16(v *Value) bool {
25281 // match: (Rsh16Ux16 <t> x (Const16 [c]))
25282 // result: (Rsh16Ux64 x (Const64 <t> [int64(uint16(c))]))
25286 if v_1.Op != OpConst16 {
25289 c := auxIntToInt16(v_1.AuxInt)
25290 v.reset(OpRsh16Ux64)
25291 v0 := b.NewValue0(v.Pos, OpConst64, t)
25292 v0.AuxInt = int64ToAuxInt(int64(uint16(c)))
25296 // match: (Rsh16Ux16 (Const16 [0]) _)
25297 // result: (Const16 [0])
25299 if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 0 {
25303 v.AuxInt = int16ToAuxInt(0)
25308 func rewriteValuegeneric_OpRsh16Ux32(v *Value) bool {
25312 // match: (Rsh16Ux32 <t> x (Const32 [c]))
25313 // result: (Rsh16Ux64 x (Const64 <t> [int64(uint32(c))]))
25317 if v_1.Op != OpConst32 {
25320 c := auxIntToInt32(v_1.AuxInt)
25321 v.reset(OpRsh16Ux64)
25322 v0 := b.NewValue0(v.Pos, OpConst64, t)
25323 v0.AuxInt = int64ToAuxInt(int64(uint32(c)))
25327 // match: (Rsh16Ux32 (Const16 [0]) _)
25328 // result: (Const16 [0])
25330 if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 0 {
25334 v.AuxInt = int16ToAuxInt(0)
25339 func rewriteValuegeneric_OpRsh16Ux64(v *Value) bool {
25343 typ := &b.Func.Config.Types
25344 // match: (Rsh16Ux64 (Const16 [c]) (Const64 [d]))
25345 // result: (Const16 [int16(uint16(c) >> uint64(d))])
25347 if v_0.Op != OpConst16 {
25350 c := auxIntToInt16(v_0.AuxInt)
25351 if v_1.Op != OpConst64 {
25354 d := auxIntToInt64(v_1.AuxInt)
25356 v.AuxInt = int16ToAuxInt(int16(uint16(c) >> uint64(d)))
25359 // match: (Rsh16Ux64 x (Const64 [0]))
25363 if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 0 {
25369 // match: (Rsh16Ux64 (Const16 [0]) _)
25370 // result: (Const16 [0])
25372 if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 0 {
25376 v.AuxInt = int16ToAuxInt(0)
25379 // match: (Rsh16Ux64 _ (Const64 [c]))
25380 // cond: uint64(c) >= 16
25381 // result: (Const16 [0])
25383 if v_1.Op != OpConst64 {
25386 c := auxIntToInt64(v_1.AuxInt)
25387 if !(uint64(c) >= 16) {
25391 v.AuxInt = int16ToAuxInt(0)
25394 // match: (Rsh16Ux64 <t> (Rsh16Ux64 x (Const64 [c])) (Const64 [d]))
25395 // cond: !uaddOvf(c,d)
25396 // result: (Rsh16Ux64 x (Const64 <t> [c+d]))
25399 if v_0.Op != OpRsh16Ux64 {
25404 v_0_1 := v_0.Args[1]
25405 if v_0_1.Op != OpConst64 {
25408 c := auxIntToInt64(v_0_1.AuxInt)
25409 if v_1.Op != OpConst64 {
25412 d := auxIntToInt64(v_1.AuxInt)
25413 if !(!uaddOvf(c, d)) {
25416 v.reset(OpRsh16Ux64)
25417 v0 := b.NewValue0(v.Pos, OpConst64, t)
25418 v0.AuxInt = int64ToAuxInt(c + d)
25422 // match: (Rsh16Ux64 (Rsh16x64 x _) (Const64 <t> [15]))
25423 // result: (Rsh16Ux64 x (Const64 <t> [15]))
25425 if v_0.Op != OpRsh16x64 {
25429 if v_1.Op != OpConst64 {
25433 if auxIntToInt64(v_1.AuxInt) != 15 {
25436 v.reset(OpRsh16Ux64)
25437 v0 := b.NewValue0(v.Pos, OpConst64, t)
25438 v0.AuxInt = int64ToAuxInt(15)
25442 // match: (Rsh16Ux64 i:(Lsh16x64 x (Const64 [c])) (Const64 [c]))
25443 // cond: c >= 0 && c < 16 && i.Uses == 1
25444 // result: (And16 x (Const16 <v.Type> [int16(^uint16(0)>>c)]))
25447 if i.Op != OpLsh16x64 {
25453 if i_1.Op != OpConst64 {
25456 c := auxIntToInt64(i_1.AuxInt)
25457 if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != c || !(c >= 0 && c < 16 && i.Uses == 1) {
25461 v0 := b.NewValue0(v.Pos, OpConst16, v.Type)
25462 v0.AuxInt = int16ToAuxInt(int16(^uint16(0) >> c))
25466 // match: (Rsh16Ux64 (Lsh16x64 (Rsh16Ux64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
25467 // cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
25468 // result: (Rsh16Ux64 x (Const64 <typ.UInt64> [c1-c2+c3]))
25470 if v_0.Op != OpLsh16x64 {
25474 v_0_0 := v_0.Args[0]
25475 if v_0_0.Op != OpRsh16Ux64 {
25480 v_0_0_1 := v_0_0.Args[1]
25481 if v_0_0_1.Op != OpConst64 {
25484 c1 := auxIntToInt64(v_0_0_1.AuxInt)
25485 v_0_1 := v_0.Args[1]
25486 if v_0_1.Op != OpConst64 {
25489 c2 := auxIntToInt64(v_0_1.AuxInt)
25490 if v_1.Op != OpConst64 {
25493 c3 := auxIntToInt64(v_1.AuxInt)
25494 if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) {
25497 v.reset(OpRsh16Ux64)
25498 v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
25499 v0.AuxInt = int64ToAuxInt(c1 - c2 + c3)
25503 // match: (Rsh16Ux64 (Lsh16x64 x (Const64 [8])) (Const64 [8]))
25504 // result: (ZeroExt8to16 (Trunc16to8 <typ.UInt8> x))
25506 if v_0.Op != OpLsh16x64 {
25511 v_0_1 := v_0.Args[1]
25512 if v_0_1.Op != OpConst64 || auxIntToInt64(v_0_1.AuxInt) != 8 || v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 8 {
25515 v.reset(OpZeroExt8to16)
25516 v0 := b.NewValue0(v.Pos, OpTrunc16to8, typ.UInt8)
25523 func rewriteValuegeneric_OpRsh16Ux8(v *Value) bool {
25527 // match: (Rsh16Ux8 <t> x (Const8 [c]))
25528 // result: (Rsh16Ux64 x (Const64 <t> [int64(uint8(c))]))
25532 if v_1.Op != OpConst8 {
25535 c := auxIntToInt8(v_1.AuxInt)
25536 v.reset(OpRsh16Ux64)
25537 v0 := b.NewValue0(v.Pos, OpConst64, t)
25538 v0.AuxInt = int64ToAuxInt(int64(uint8(c)))
25542 // match: (Rsh16Ux8 (Const16 [0]) _)
25543 // result: (Const16 [0])
25545 if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 0 {
25549 v.AuxInt = int16ToAuxInt(0)
25554 func rewriteValuegeneric_OpRsh16x16(v *Value) bool {
25558 // match: (Rsh16x16 <t> x (Const16 [c]))
25559 // result: (Rsh16x64 x (Const64 <t> [int64(uint16(c))]))
25563 if v_1.Op != OpConst16 {
25566 c := auxIntToInt16(v_1.AuxInt)
25567 v.reset(OpRsh16x64)
25568 v0 := b.NewValue0(v.Pos, OpConst64, t)
25569 v0.AuxInt = int64ToAuxInt(int64(uint16(c)))
25573 // match: (Rsh16x16 (Const16 [0]) _)
25574 // result: (Const16 [0])
25576 if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 0 {
25580 v.AuxInt = int16ToAuxInt(0)
25585 func rewriteValuegeneric_OpRsh16x32(v *Value) bool {
25589 // match: (Rsh16x32 <t> x (Const32 [c]))
25590 // result: (Rsh16x64 x (Const64 <t> [int64(uint32(c))]))
25594 if v_1.Op != OpConst32 {
25597 c := auxIntToInt32(v_1.AuxInt)
25598 v.reset(OpRsh16x64)
25599 v0 := b.NewValue0(v.Pos, OpConst64, t)
25600 v0.AuxInt = int64ToAuxInt(int64(uint32(c)))
25604 // match: (Rsh16x32 (Const16 [0]) _)
25605 // result: (Const16 [0])
25607 if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 0 {
25611 v.AuxInt = int16ToAuxInt(0)
25616 func rewriteValuegeneric_OpRsh16x64(v *Value) bool {
25620 typ := &b.Func.Config.Types
25621 // match: (Rsh16x64 (Const16 [c]) (Const64 [d]))
25622 // result: (Const16 [c >> uint64(d)])
25624 if v_0.Op != OpConst16 {
25627 c := auxIntToInt16(v_0.AuxInt)
25628 if v_1.Op != OpConst64 {
25631 d := auxIntToInt64(v_1.AuxInt)
25633 v.AuxInt = int16ToAuxInt(c >> uint64(d))
25636 // match: (Rsh16x64 x (Const64 [0]))
25640 if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 0 {
25646 // match: (Rsh16x64 (Const16 [0]) _)
25647 // result: (Const16 [0])
25649 if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 0 {
25653 v.AuxInt = int16ToAuxInt(0)
25656 // match: (Rsh16x64 <t> (Rsh16x64 x (Const64 [c])) (Const64 [d]))
25657 // cond: !uaddOvf(c,d)
25658 // result: (Rsh16x64 x (Const64 <t> [c+d]))
25661 if v_0.Op != OpRsh16x64 {
25666 v_0_1 := v_0.Args[1]
25667 if v_0_1.Op != OpConst64 {
25670 c := auxIntToInt64(v_0_1.AuxInt)
25671 if v_1.Op != OpConst64 {
25674 d := auxIntToInt64(v_1.AuxInt)
25675 if !(!uaddOvf(c, d)) {
25678 v.reset(OpRsh16x64)
25679 v0 := b.NewValue0(v.Pos, OpConst64, t)
25680 v0.AuxInt = int64ToAuxInt(c + d)
25684 // match: (Rsh16x64 (Lsh16x64 x (Const64 [8])) (Const64 [8]))
25685 // result: (SignExt8to16 (Trunc16to8 <typ.Int8> x))
25687 if v_0.Op != OpLsh16x64 {
25692 v_0_1 := v_0.Args[1]
25693 if v_0_1.Op != OpConst64 || auxIntToInt64(v_0_1.AuxInt) != 8 || v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 8 {
25696 v.reset(OpSignExt8to16)
25697 v0 := b.NewValue0(v.Pos, OpTrunc16to8, typ.Int8)
25704 func rewriteValuegeneric_OpRsh16x8(v *Value) bool {
25708 // match: (Rsh16x8 <t> x (Const8 [c]))
25709 // result: (Rsh16x64 x (Const64 <t> [int64(uint8(c))]))
25713 if v_1.Op != OpConst8 {
25716 c := auxIntToInt8(v_1.AuxInt)
25717 v.reset(OpRsh16x64)
25718 v0 := b.NewValue0(v.Pos, OpConst64, t)
25719 v0.AuxInt = int64ToAuxInt(int64(uint8(c)))
25723 // match: (Rsh16x8 (Const16 [0]) _)
25724 // result: (Const16 [0])
25726 if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 0 {
25730 v.AuxInt = int16ToAuxInt(0)
25735 func rewriteValuegeneric_OpRsh32Ux16(v *Value) bool {
25739 // match: (Rsh32Ux16 <t> x (Const16 [c]))
25740 // result: (Rsh32Ux64 x (Const64 <t> [int64(uint16(c))]))
25744 if v_1.Op != OpConst16 {
25747 c := auxIntToInt16(v_1.AuxInt)
25748 v.reset(OpRsh32Ux64)
25749 v0 := b.NewValue0(v.Pos, OpConst64, t)
25750 v0.AuxInt = int64ToAuxInt(int64(uint16(c)))
25754 // match: (Rsh32Ux16 (Const32 [0]) _)
25755 // result: (Const32 [0])
25757 if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 {
25761 v.AuxInt = int32ToAuxInt(0)
25766 func rewriteValuegeneric_OpRsh32Ux32(v *Value) bool {
25770 // match: (Rsh32Ux32 <t> x (Const32 [c]))
25771 // result: (Rsh32Ux64 x (Const64 <t> [int64(uint32(c))]))
25775 if v_1.Op != OpConst32 {
25778 c := auxIntToInt32(v_1.AuxInt)
25779 v.reset(OpRsh32Ux64)
25780 v0 := b.NewValue0(v.Pos, OpConst64, t)
25781 v0.AuxInt = int64ToAuxInt(int64(uint32(c)))
25785 // match: (Rsh32Ux32 (Const32 [0]) _)
25786 // result: (Const32 [0])
25788 if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 {
25792 v.AuxInt = int32ToAuxInt(0)
25797 func rewriteValuegeneric_OpRsh32Ux64(v *Value) bool {
25801 typ := &b.Func.Config.Types
25802 // match: (Rsh32Ux64 (Const32 [c]) (Const64 [d]))
25803 // result: (Const32 [int32(uint32(c) >> uint64(d))])
25805 if v_0.Op != OpConst32 {
25808 c := auxIntToInt32(v_0.AuxInt)
25809 if v_1.Op != OpConst64 {
25812 d := auxIntToInt64(v_1.AuxInt)
25814 v.AuxInt = int32ToAuxInt(int32(uint32(c) >> uint64(d)))
25817 // match: (Rsh32Ux64 x (Const64 [0]))
25821 if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 0 {
25827 // match: (Rsh32Ux64 (Const32 [0]) _)
25828 // result: (Const32 [0])
25830 if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 {
25834 v.AuxInt = int32ToAuxInt(0)
25837 // match: (Rsh32Ux64 _ (Const64 [c]))
25838 // cond: uint64(c) >= 32
25839 // result: (Const32 [0])
25841 if v_1.Op != OpConst64 {
25844 c := auxIntToInt64(v_1.AuxInt)
25845 if !(uint64(c) >= 32) {
25849 v.AuxInt = int32ToAuxInt(0)
25852 // match: (Rsh32Ux64 <t> (Rsh32Ux64 x (Const64 [c])) (Const64 [d]))
25853 // cond: !uaddOvf(c,d)
25854 // result: (Rsh32Ux64 x (Const64 <t> [c+d]))
25857 if v_0.Op != OpRsh32Ux64 {
25862 v_0_1 := v_0.Args[1]
25863 if v_0_1.Op != OpConst64 {
25866 c := auxIntToInt64(v_0_1.AuxInt)
25867 if v_1.Op != OpConst64 {
25870 d := auxIntToInt64(v_1.AuxInt)
25871 if !(!uaddOvf(c, d)) {
25874 v.reset(OpRsh32Ux64)
25875 v0 := b.NewValue0(v.Pos, OpConst64, t)
25876 v0.AuxInt = int64ToAuxInt(c + d)
25880 // match: (Rsh32Ux64 (Rsh32x64 x _) (Const64 <t> [31]))
25881 // result: (Rsh32Ux64 x (Const64 <t> [31]))
25883 if v_0.Op != OpRsh32x64 {
25887 if v_1.Op != OpConst64 {
25891 if auxIntToInt64(v_1.AuxInt) != 31 {
25894 v.reset(OpRsh32Ux64)
25895 v0 := b.NewValue0(v.Pos, OpConst64, t)
25896 v0.AuxInt = int64ToAuxInt(31)
25900 // match: (Rsh32Ux64 i:(Lsh32x64 x (Const64 [c])) (Const64 [c]))
25901 // cond: c >= 0 && c < 32 && i.Uses == 1
25902 // result: (And32 x (Const32 <v.Type> [int32(^uint32(0)>>c)]))
25905 if i.Op != OpLsh32x64 {
25911 if i_1.Op != OpConst64 {
25914 c := auxIntToInt64(i_1.AuxInt)
25915 if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != c || !(c >= 0 && c < 32 && i.Uses == 1) {
25919 v0 := b.NewValue0(v.Pos, OpConst32, v.Type)
25920 v0.AuxInt = int32ToAuxInt(int32(^uint32(0) >> c))
25924 // match: (Rsh32Ux64 (Lsh32x64 (Rsh32Ux64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
25925 // cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
25926 // result: (Rsh32Ux64 x (Const64 <typ.UInt64> [c1-c2+c3]))
25928 if v_0.Op != OpLsh32x64 {
25932 v_0_0 := v_0.Args[0]
25933 if v_0_0.Op != OpRsh32Ux64 {
25938 v_0_0_1 := v_0_0.Args[1]
25939 if v_0_0_1.Op != OpConst64 {
25942 c1 := auxIntToInt64(v_0_0_1.AuxInt)
25943 v_0_1 := v_0.Args[1]
25944 if v_0_1.Op != OpConst64 {
25947 c2 := auxIntToInt64(v_0_1.AuxInt)
25948 if v_1.Op != OpConst64 {
25951 c3 := auxIntToInt64(v_1.AuxInt)
25952 if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) {
25955 v.reset(OpRsh32Ux64)
25956 v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
25957 v0.AuxInt = int64ToAuxInt(c1 - c2 + c3)
25961 // match: (Rsh32Ux64 (Lsh32x64 x (Const64 [24])) (Const64 [24]))
25962 // result: (ZeroExt8to32 (Trunc32to8 <typ.UInt8> x))
25964 if v_0.Op != OpLsh32x64 {
25969 v_0_1 := v_0.Args[1]
25970 if v_0_1.Op != OpConst64 || auxIntToInt64(v_0_1.AuxInt) != 24 || v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 24 {
25973 v.reset(OpZeroExt8to32)
25974 v0 := b.NewValue0(v.Pos, OpTrunc32to8, typ.UInt8)
25979 // match: (Rsh32Ux64 (Lsh32x64 x (Const64 [16])) (Const64 [16]))
25980 // result: (ZeroExt16to32 (Trunc32to16 <typ.UInt16> x))
25982 if v_0.Op != OpLsh32x64 {
25987 v_0_1 := v_0.Args[1]
25988 if v_0_1.Op != OpConst64 || auxIntToInt64(v_0_1.AuxInt) != 16 || v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 16 {
25991 v.reset(OpZeroExt16to32)
25992 v0 := b.NewValue0(v.Pos, OpTrunc32to16, typ.UInt16)
25999 func rewriteValuegeneric_OpRsh32Ux8(v *Value) bool {
26003 // match: (Rsh32Ux8 <t> x (Const8 [c]))
26004 // result: (Rsh32Ux64 x (Const64 <t> [int64(uint8(c))]))
26008 if v_1.Op != OpConst8 {
26011 c := auxIntToInt8(v_1.AuxInt)
26012 v.reset(OpRsh32Ux64)
26013 v0 := b.NewValue0(v.Pos, OpConst64, t)
26014 v0.AuxInt = int64ToAuxInt(int64(uint8(c)))
26018 // match: (Rsh32Ux8 (Const32 [0]) _)
26019 // result: (Const32 [0])
26021 if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 {
26025 v.AuxInt = int32ToAuxInt(0)
26030 func rewriteValuegeneric_OpRsh32x16(v *Value) bool {
26034 // match: (Rsh32x16 <t> x (Const16 [c]))
26035 // result: (Rsh32x64 x (Const64 <t> [int64(uint16(c))]))
26039 if v_1.Op != OpConst16 {
26042 c := auxIntToInt16(v_1.AuxInt)
26043 v.reset(OpRsh32x64)
26044 v0 := b.NewValue0(v.Pos, OpConst64, t)
26045 v0.AuxInt = int64ToAuxInt(int64(uint16(c)))
26049 // match: (Rsh32x16 (Const32 [0]) _)
26050 // result: (Const32 [0])
26052 if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 {
26056 v.AuxInt = int32ToAuxInt(0)
26061 func rewriteValuegeneric_OpRsh32x32(v *Value) bool {
26065 // match: (Rsh32x32 <t> x (Const32 [c]))
26066 // result: (Rsh32x64 x (Const64 <t> [int64(uint32(c))]))
26070 if v_1.Op != OpConst32 {
26073 c := auxIntToInt32(v_1.AuxInt)
26074 v.reset(OpRsh32x64)
26075 v0 := b.NewValue0(v.Pos, OpConst64, t)
26076 v0.AuxInt = int64ToAuxInt(int64(uint32(c)))
26080 // match: (Rsh32x32 (Const32 [0]) _)
26081 // result: (Const32 [0])
26083 if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 {
26087 v.AuxInt = int32ToAuxInt(0)
26092 func rewriteValuegeneric_OpRsh32x64(v *Value) bool {
26096 typ := &b.Func.Config.Types
26097 // match: (Rsh32x64 (Const32 [c]) (Const64 [d]))
26098 // result: (Const32 [c >> uint64(d)])
26100 if v_0.Op != OpConst32 {
26103 c := auxIntToInt32(v_0.AuxInt)
26104 if v_1.Op != OpConst64 {
26107 d := auxIntToInt64(v_1.AuxInt)
26109 v.AuxInt = int32ToAuxInt(c >> uint64(d))
26112 // match: (Rsh32x64 x (Const64 [0]))
26116 if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 0 {
26122 // match: (Rsh32x64 (Const32 [0]) _)
26123 // result: (Const32 [0])
26125 if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 {
26129 v.AuxInt = int32ToAuxInt(0)
26132 // match: (Rsh32x64 <t> (Rsh32x64 x (Const64 [c])) (Const64 [d]))
26133 // cond: !uaddOvf(c,d)
26134 // result: (Rsh32x64 x (Const64 <t> [c+d]))
26137 if v_0.Op != OpRsh32x64 {
26142 v_0_1 := v_0.Args[1]
26143 if v_0_1.Op != OpConst64 {
26146 c := auxIntToInt64(v_0_1.AuxInt)
26147 if v_1.Op != OpConst64 {
26150 d := auxIntToInt64(v_1.AuxInt)
26151 if !(!uaddOvf(c, d)) {
26154 v.reset(OpRsh32x64)
26155 v0 := b.NewValue0(v.Pos, OpConst64, t)
26156 v0.AuxInt = int64ToAuxInt(c + d)
26160 // match: (Rsh32x64 (Lsh32x64 x (Const64 [24])) (Const64 [24]))
26161 // result: (SignExt8to32 (Trunc32to8 <typ.Int8> x))
26163 if v_0.Op != OpLsh32x64 {
26168 v_0_1 := v_0.Args[1]
26169 if v_0_1.Op != OpConst64 || auxIntToInt64(v_0_1.AuxInt) != 24 || v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 24 {
26172 v.reset(OpSignExt8to32)
26173 v0 := b.NewValue0(v.Pos, OpTrunc32to8, typ.Int8)
26178 // match: (Rsh32x64 (Lsh32x64 x (Const64 [16])) (Const64 [16]))
26179 // result: (SignExt16to32 (Trunc32to16 <typ.Int16> x))
26181 if v_0.Op != OpLsh32x64 {
26186 v_0_1 := v_0.Args[1]
26187 if v_0_1.Op != OpConst64 || auxIntToInt64(v_0_1.AuxInt) != 16 || v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 16 {
26190 v.reset(OpSignExt16to32)
26191 v0 := b.NewValue0(v.Pos, OpTrunc32to16, typ.Int16)
26198 func rewriteValuegeneric_OpRsh32x8(v *Value) bool {
26202 // match: (Rsh32x8 <t> x (Const8 [c]))
26203 // result: (Rsh32x64 x (Const64 <t> [int64(uint8(c))]))
26207 if v_1.Op != OpConst8 {
26210 c := auxIntToInt8(v_1.AuxInt)
26211 v.reset(OpRsh32x64)
26212 v0 := b.NewValue0(v.Pos, OpConst64, t)
26213 v0.AuxInt = int64ToAuxInt(int64(uint8(c)))
26217 // match: (Rsh32x8 (Const32 [0]) _)
26218 // result: (Const32 [0])
26220 if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 {
26224 v.AuxInt = int32ToAuxInt(0)
26229 func rewriteValuegeneric_OpRsh64Ux16(v *Value) bool {
26233 // match: (Rsh64Ux16 <t> x (Const16 [c]))
26234 // result: (Rsh64Ux64 x (Const64 <t> [int64(uint16(c))]))
26238 if v_1.Op != OpConst16 {
26241 c := auxIntToInt16(v_1.AuxInt)
26242 v.reset(OpRsh64Ux64)
26243 v0 := b.NewValue0(v.Pos, OpConst64, t)
26244 v0.AuxInt = int64ToAuxInt(int64(uint16(c)))
26248 // match: (Rsh64Ux16 (Const64 [0]) _)
26249 // result: (Const64 [0])
26251 if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 {
26255 v.AuxInt = int64ToAuxInt(0)
26260 func rewriteValuegeneric_OpRsh64Ux32(v *Value) bool {
26264 // match: (Rsh64Ux32 <t> x (Const32 [c]))
26265 // result: (Rsh64Ux64 x (Const64 <t> [int64(uint32(c))]))
26269 if v_1.Op != OpConst32 {
26272 c := auxIntToInt32(v_1.AuxInt)
26273 v.reset(OpRsh64Ux64)
26274 v0 := b.NewValue0(v.Pos, OpConst64, t)
26275 v0.AuxInt = int64ToAuxInt(int64(uint32(c)))
26279 // match: (Rsh64Ux32 (Const64 [0]) _)
26280 // result: (Const64 [0])
26282 if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 {
26286 v.AuxInt = int64ToAuxInt(0)
26291 func rewriteValuegeneric_OpRsh64Ux64(v *Value) bool {
26295 typ := &b.Func.Config.Types
26296 // match: (Rsh64Ux64 (Const64 [c]) (Const64 [d]))
26297 // result: (Const64 [int64(uint64(c) >> uint64(d))])
26299 if v_0.Op != OpConst64 {
26302 c := auxIntToInt64(v_0.AuxInt)
26303 if v_1.Op != OpConst64 {
26306 d := auxIntToInt64(v_1.AuxInt)
26308 v.AuxInt = int64ToAuxInt(int64(uint64(c) >> uint64(d)))
26311 // match: (Rsh64Ux64 x (Const64 [0]))
26315 if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 0 {
26321 // match: (Rsh64Ux64 (Const64 [0]) _)
26322 // result: (Const64 [0])
26324 if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 {
26328 v.AuxInt = int64ToAuxInt(0)
26331 // match: (Rsh64Ux64 _ (Const64 [c]))
26332 // cond: uint64(c) >= 64
26333 // result: (Const64 [0])
26335 if v_1.Op != OpConst64 {
26338 c := auxIntToInt64(v_1.AuxInt)
26339 if !(uint64(c) >= 64) {
26343 v.AuxInt = int64ToAuxInt(0)
26346 // match: (Rsh64Ux64 <t> (Rsh64Ux64 x (Const64 [c])) (Const64 [d]))
26347 // cond: !uaddOvf(c,d)
26348 // result: (Rsh64Ux64 x (Const64 <t> [c+d]))
26351 if v_0.Op != OpRsh64Ux64 {
26356 v_0_1 := v_0.Args[1]
26357 if v_0_1.Op != OpConst64 {
26360 c := auxIntToInt64(v_0_1.AuxInt)
26361 if v_1.Op != OpConst64 {
26364 d := auxIntToInt64(v_1.AuxInt)
26365 if !(!uaddOvf(c, d)) {
26368 v.reset(OpRsh64Ux64)
26369 v0 := b.NewValue0(v.Pos, OpConst64, t)
26370 v0.AuxInt = int64ToAuxInt(c + d)
26374 // match: (Rsh64Ux64 (Rsh64x64 x _) (Const64 <t> [63]))
26375 // result: (Rsh64Ux64 x (Const64 <t> [63]))
26377 if v_0.Op != OpRsh64x64 {
26381 if v_1.Op != OpConst64 {
26385 if auxIntToInt64(v_1.AuxInt) != 63 {
26388 v.reset(OpRsh64Ux64)
26389 v0 := b.NewValue0(v.Pos, OpConst64, t)
26390 v0.AuxInt = int64ToAuxInt(63)
26394 // match: (Rsh64Ux64 i:(Lsh64x64 x (Const64 [c])) (Const64 [c]))
26395 // cond: c >= 0 && c < 64 && i.Uses == 1
26396 // result: (And64 x (Const64 <v.Type> [int64(^uint64(0)>>c)]))
26399 if i.Op != OpLsh64x64 {
26405 if i_1.Op != OpConst64 {
26408 c := auxIntToInt64(i_1.AuxInt)
26409 if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != c || !(c >= 0 && c < 64 && i.Uses == 1) {
26413 v0 := b.NewValue0(v.Pos, OpConst64, v.Type)
26414 v0.AuxInt = int64ToAuxInt(int64(^uint64(0) >> c))
26418 // match: (Rsh64Ux64 (Lsh64x64 (Rsh64Ux64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
26419 // cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
26420 // result: (Rsh64Ux64 x (Const64 <typ.UInt64> [c1-c2+c3]))
26422 if v_0.Op != OpLsh64x64 {
26426 v_0_0 := v_0.Args[0]
26427 if v_0_0.Op != OpRsh64Ux64 {
26432 v_0_0_1 := v_0_0.Args[1]
26433 if v_0_0_1.Op != OpConst64 {
26436 c1 := auxIntToInt64(v_0_0_1.AuxInt)
26437 v_0_1 := v_0.Args[1]
26438 if v_0_1.Op != OpConst64 {
26441 c2 := auxIntToInt64(v_0_1.AuxInt)
26442 if v_1.Op != OpConst64 {
26445 c3 := auxIntToInt64(v_1.AuxInt)
26446 if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) {
26449 v.reset(OpRsh64Ux64)
26450 v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
26451 v0.AuxInt = int64ToAuxInt(c1 - c2 + c3)
26455 // match: (Rsh64Ux64 (Lsh64x64 x (Const64 [56])) (Const64 [56]))
26456 // result: (ZeroExt8to64 (Trunc64to8 <typ.UInt8> x))
26458 if v_0.Op != OpLsh64x64 {
26463 v_0_1 := v_0.Args[1]
26464 if v_0_1.Op != OpConst64 || auxIntToInt64(v_0_1.AuxInt) != 56 || v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 56 {
26467 v.reset(OpZeroExt8to64)
26468 v0 := b.NewValue0(v.Pos, OpTrunc64to8, typ.UInt8)
26473 // match: (Rsh64Ux64 (Lsh64x64 x (Const64 [48])) (Const64 [48]))
26474 // result: (ZeroExt16to64 (Trunc64to16 <typ.UInt16> x))
26476 if v_0.Op != OpLsh64x64 {
26481 v_0_1 := v_0.Args[1]
26482 if v_0_1.Op != OpConst64 || auxIntToInt64(v_0_1.AuxInt) != 48 || v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 48 {
26485 v.reset(OpZeroExt16to64)
26486 v0 := b.NewValue0(v.Pos, OpTrunc64to16, typ.UInt16)
26491 // match: (Rsh64Ux64 (Lsh64x64 x (Const64 [32])) (Const64 [32]))
26492 // result: (ZeroExt32to64 (Trunc64to32 <typ.UInt32> x))
26494 if v_0.Op != OpLsh64x64 {
26499 v_0_1 := v_0.Args[1]
26500 if v_0_1.Op != OpConst64 || auxIntToInt64(v_0_1.AuxInt) != 32 || v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 32 {
26503 v.reset(OpZeroExt32to64)
26504 v0 := b.NewValue0(v.Pos, OpTrunc64to32, typ.UInt32)
26511 func rewriteValuegeneric_OpRsh64Ux8(v *Value) bool {
26515 // match: (Rsh64Ux8 <t> x (Const8 [c]))
26516 // result: (Rsh64Ux64 x (Const64 <t> [int64(uint8(c))]))
26520 if v_1.Op != OpConst8 {
26523 c := auxIntToInt8(v_1.AuxInt)
26524 v.reset(OpRsh64Ux64)
26525 v0 := b.NewValue0(v.Pos, OpConst64, t)
26526 v0.AuxInt = int64ToAuxInt(int64(uint8(c)))
26530 // match: (Rsh64Ux8 (Const64 [0]) _)
26531 // result: (Const64 [0])
26533 if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 {
26537 v.AuxInt = int64ToAuxInt(0)
26542 func rewriteValuegeneric_OpRsh64x16(v *Value) bool {
26546 // match: (Rsh64x16 <t> x (Const16 [c]))
26547 // result: (Rsh64x64 x (Const64 <t> [int64(uint16(c))]))
26551 if v_1.Op != OpConst16 {
26554 c := auxIntToInt16(v_1.AuxInt)
26555 v.reset(OpRsh64x64)
26556 v0 := b.NewValue0(v.Pos, OpConst64, t)
26557 v0.AuxInt = int64ToAuxInt(int64(uint16(c)))
26561 // match: (Rsh64x16 (Const64 [0]) _)
26562 // result: (Const64 [0])
26564 if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 {
26568 v.AuxInt = int64ToAuxInt(0)
26573 func rewriteValuegeneric_OpRsh64x32(v *Value) bool {
26577 // match: (Rsh64x32 <t> x (Const32 [c]))
26578 // result: (Rsh64x64 x (Const64 <t> [int64(uint32(c))]))
26582 if v_1.Op != OpConst32 {
26585 c := auxIntToInt32(v_1.AuxInt)
26586 v.reset(OpRsh64x64)
26587 v0 := b.NewValue0(v.Pos, OpConst64, t)
26588 v0.AuxInt = int64ToAuxInt(int64(uint32(c)))
26592 // match: (Rsh64x32 (Const64 [0]) _)
26593 // result: (Const64 [0])
26595 if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 {
26599 v.AuxInt = int64ToAuxInt(0)
26604 func rewriteValuegeneric_OpRsh64x64(v *Value) bool {
26608 typ := &b.Func.Config.Types
26609 // match: (Rsh64x64 (Const64 [c]) (Const64 [d]))
26610 // result: (Const64 [c >> uint64(d)])
26612 if v_0.Op != OpConst64 {
26615 c := auxIntToInt64(v_0.AuxInt)
26616 if v_1.Op != OpConst64 {
26619 d := auxIntToInt64(v_1.AuxInt)
26621 v.AuxInt = int64ToAuxInt(c >> uint64(d))
26624 // match: (Rsh64x64 x (Const64 [0]))
26628 if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 0 {
26634 // match: (Rsh64x64 (Const64 [0]) _)
26635 // result: (Const64 [0])
26637 if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 {
26641 v.AuxInt = int64ToAuxInt(0)
26644 // match: (Rsh64x64 <t> (Rsh64x64 x (Const64 [c])) (Const64 [d]))
26645 // cond: !uaddOvf(c,d)
26646 // result: (Rsh64x64 x (Const64 <t> [c+d]))
26649 if v_0.Op != OpRsh64x64 {
26654 v_0_1 := v_0.Args[1]
26655 if v_0_1.Op != OpConst64 {
26658 c := auxIntToInt64(v_0_1.AuxInt)
26659 if v_1.Op != OpConst64 {
26662 d := auxIntToInt64(v_1.AuxInt)
26663 if !(!uaddOvf(c, d)) {
26666 v.reset(OpRsh64x64)
26667 v0 := b.NewValue0(v.Pos, OpConst64, t)
26668 v0.AuxInt = int64ToAuxInt(c + d)
26672 // match: (Rsh64x64 (Lsh64x64 x (Const64 [56])) (Const64 [56]))
26673 // result: (SignExt8to64 (Trunc64to8 <typ.Int8> x))
26675 if v_0.Op != OpLsh64x64 {
26680 v_0_1 := v_0.Args[1]
26681 if v_0_1.Op != OpConst64 || auxIntToInt64(v_0_1.AuxInt) != 56 || v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 56 {
26684 v.reset(OpSignExt8to64)
26685 v0 := b.NewValue0(v.Pos, OpTrunc64to8, typ.Int8)
26690 // match: (Rsh64x64 (Lsh64x64 x (Const64 [48])) (Const64 [48]))
26691 // result: (SignExt16to64 (Trunc64to16 <typ.Int16> x))
26693 if v_0.Op != OpLsh64x64 {
26698 v_0_1 := v_0.Args[1]
26699 if v_0_1.Op != OpConst64 || auxIntToInt64(v_0_1.AuxInt) != 48 || v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 48 {
26702 v.reset(OpSignExt16to64)
26703 v0 := b.NewValue0(v.Pos, OpTrunc64to16, typ.Int16)
26708 // match: (Rsh64x64 (Lsh64x64 x (Const64 [32])) (Const64 [32]))
26709 // result: (SignExt32to64 (Trunc64to32 <typ.Int32> x))
26711 if v_0.Op != OpLsh64x64 {
26716 v_0_1 := v_0.Args[1]
26717 if v_0_1.Op != OpConst64 || auxIntToInt64(v_0_1.AuxInt) != 32 || v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 32 {
26720 v.reset(OpSignExt32to64)
26721 v0 := b.NewValue0(v.Pos, OpTrunc64to32, typ.Int32)
26728 func rewriteValuegeneric_OpRsh64x8(v *Value) bool {
26732 // match: (Rsh64x8 <t> x (Const8 [c]))
26733 // result: (Rsh64x64 x (Const64 <t> [int64(uint8(c))]))
26737 if v_1.Op != OpConst8 {
26740 c := auxIntToInt8(v_1.AuxInt)
26741 v.reset(OpRsh64x64)
26742 v0 := b.NewValue0(v.Pos, OpConst64, t)
26743 v0.AuxInt = int64ToAuxInt(int64(uint8(c)))
26747 // match: (Rsh64x8 (Const64 [0]) _)
26748 // result: (Const64 [0])
26750 if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 {
26754 v.AuxInt = int64ToAuxInt(0)
26759 func rewriteValuegeneric_OpRsh8Ux16(v *Value) bool {
26763 // match: (Rsh8Ux16 <t> x (Const16 [c]))
26764 // result: (Rsh8Ux64 x (Const64 <t> [int64(uint16(c))]))
26768 if v_1.Op != OpConst16 {
26771 c := auxIntToInt16(v_1.AuxInt)
26772 v.reset(OpRsh8Ux64)
26773 v0 := b.NewValue0(v.Pos, OpConst64, t)
26774 v0.AuxInt = int64ToAuxInt(int64(uint16(c)))
26778 // match: (Rsh8Ux16 (Const8 [0]) _)
26779 // result: (Const8 [0])
26781 if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 0 {
26785 v.AuxInt = int8ToAuxInt(0)
26790 func rewriteValuegeneric_OpRsh8Ux32(v *Value) bool {
26794 // match: (Rsh8Ux32 <t> x (Const32 [c]))
26795 // result: (Rsh8Ux64 x (Const64 <t> [int64(uint32(c))]))
26799 if v_1.Op != OpConst32 {
26802 c := auxIntToInt32(v_1.AuxInt)
26803 v.reset(OpRsh8Ux64)
26804 v0 := b.NewValue0(v.Pos, OpConst64, t)
26805 v0.AuxInt = int64ToAuxInt(int64(uint32(c)))
26809 // match: (Rsh8Ux32 (Const8 [0]) _)
26810 // result: (Const8 [0])
26812 if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 0 {
26816 v.AuxInt = int8ToAuxInt(0)
26821 func rewriteValuegeneric_OpRsh8Ux64(v *Value) bool {
26825 typ := &b.Func.Config.Types
26826 // match: (Rsh8Ux64 (Const8 [c]) (Const64 [d]))
26827 // result: (Const8 [int8(uint8(c) >> uint64(d))])
26829 if v_0.Op != OpConst8 {
26832 c := auxIntToInt8(v_0.AuxInt)
26833 if v_1.Op != OpConst64 {
26836 d := auxIntToInt64(v_1.AuxInt)
26838 v.AuxInt = int8ToAuxInt(int8(uint8(c) >> uint64(d)))
26841 // match: (Rsh8Ux64 x (Const64 [0]))
26845 if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 0 {
26851 // match: (Rsh8Ux64 (Const8 [0]) _)
26852 // result: (Const8 [0])
26854 if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 0 {
26858 v.AuxInt = int8ToAuxInt(0)
26861 // match: (Rsh8Ux64 _ (Const64 [c]))
26862 // cond: uint64(c) >= 8
26863 // result: (Const8 [0])
26865 if v_1.Op != OpConst64 {
26868 c := auxIntToInt64(v_1.AuxInt)
26869 if !(uint64(c) >= 8) {
26873 v.AuxInt = int8ToAuxInt(0)
26876 // match: (Rsh8Ux64 <t> (Rsh8Ux64 x (Const64 [c])) (Const64 [d]))
26877 // cond: !uaddOvf(c,d)
26878 // result: (Rsh8Ux64 x (Const64 <t> [c+d]))
26881 if v_0.Op != OpRsh8Ux64 {
26886 v_0_1 := v_0.Args[1]
26887 if v_0_1.Op != OpConst64 {
26890 c := auxIntToInt64(v_0_1.AuxInt)
26891 if v_1.Op != OpConst64 {
26894 d := auxIntToInt64(v_1.AuxInt)
26895 if !(!uaddOvf(c, d)) {
26898 v.reset(OpRsh8Ux64)
26899 v0 := b.NewValue0(v.Pos, OpConst64, t)
26900 v0.AuxInt = int64ToAuxInt(c + d)
26904 // match: (Rsh8Ux64 (Rsh8x64 x _) (Const64 <t> [7] ))
26905 // result: (Rsh8Ux64 x (Const64 <t> [7] ))
26907 if v_0.Op != OpRsh8x64 {
26911 if v_1.Op != OpConst64 {
26915 if auxIntToInt64(v_1.AuxInt) != 7 {
26918 v.reset(OpRsh8Ux64)
26919 v0 := b.NewValue0(v.Pos, OpConst64, t)
26920 v0.AuxInt = int64ToAuxInt(7)
26924 // match: (Rsh8Ux64 i:(Lsh8x64 x (Const64 [c])) (Const64 [c]))
26925 // cond: c >= 0 && c < 8 && i.Uses == 1
26926 // result: (And8 x (Const8 <v.Type> [int8 (^uint8 (0)>>c)]))
26929 if i.Op != OpLsh8x64 {
26935 if i_1.Op != OpConst64 {
26938 c := auxIntToInt64(i_1.AuxInt)
26939 if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != c || !(c >= 0 && c < 8 && i.Uses == 1) {
26943 v0 := b.NewValue0(v.Pos, OpConst8, v.Type)
26944 v0.AuxInt = int8ToAuxInt(int8(^uint8(0) >> c))
26948 // match: (Rsh8Ux64 (Lsh8x64 (Rsh8Ux64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
26949 // cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
26950 // result: (Rsh8Ux64 x (Const64 <typ.UInt64> [c1-c2+c3]))
26952 if v_0.Op != OpLsh8x64 {
26956 v_0_0 := v_0.Args[0]
26957 if v_0_0.Op != OpRsh8Ux64 {
26962 v_0_0_1 := v_0_0.Args[1]
26963 if v_0_0_1.Op != OpConst64 {
26966 c1 := auxIntToInt64(v_0_0_1.AuxInt)
26967 v_0_1 := v_0.Args[1]
26968 if v_0_1.Op != OpConst64 {
26971 c2 := auxIntToInt64(v_0_1.AuxInt)
26972 if v_1.Op != OpConst64 {
26975 c3 := auxIntToInt64(v_1.AuxInt)
26976 if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) {
26979 v.reset(OpRsh8Ux64)
26980 v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
26981 v0.AuxInt = int64ToAuxInt(c1 - c2 + c3)
26987 func rewriteValuegeneric_OpRsh8Ux8(v *Value) bool {
26991 // match: (Rsh8Ux8 <t> x (Const8 [c]))
26992 // result: (Rsh8Ux64 x (Const64 <t> [int64(uint8(c))]))
26996 if v_1.Op != OpConst8 {
26999 c := auxIntToInt8(v_1.AuxInt)
27000 v.reset(OpRsh8Ux64)
27001 v0 := b.NewValue0(v.Pos, OpConst64, t)
27002 v0.AuxInt = int64ToAuxInt(int64(uint8(c)))
27006 // match: (Rsh8Ux8 (Const8 [0]) _)
27007 // result: (Const8 [0])
27009 if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 0 {
27013 v.AuxInt = int8ToAuxInt(0)
27018 func rewriteValuegeneric_OpRsh8x16(v *Value) bool {
27022 // match: (Rsh8x16 <t> x (Const16 [c]))
27023 // result: (Rsh8x64 x (Const64 <t> [int64(uint16(c))]))
27027 if v_1.Op != OpConst16 {
27030 c := auxIntToInt16(v_1.AuxInt)
27032 v0 := b.NewValue0(v.Pos, OpConst64, t)
27033 v0.AuxInt = int64ToAuxInt(int64(uint16(c)))
27037 // match: (Rsh8x16 (Const8 [0]) _)
27038 // result: (Const8 [0])
27040 if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 0 {
27044 v.AuxInt = int8ToAuxInt(0)
27049 func rewriteValuegeneric_OpRsh8x32(v *Value) bool {
27053 // match: (Rsh8x32 <t> x (Const32 [c]))
27054 // result: (Rsh8x64 x (Const64 <t> [int64(uint32(c))]))
27058 if v_1.Op != OpConst32 {
27061 c := auxIntToInt32(v_1.AuxInt)
27063 v0 := b.NewValue0(v.Pos, OpConst64, t)
27064 v0.AuxInt = int64ToAuxInt(int64(uint32(c)))
27068 // match: (Rsh8x32 (Const8 [0]) _)
27069 // result: (Const8 [0])
27071 if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 0 {
27075 v.AuxInt = int8ToAuxInt(0)
27080 func rewriteValuegeneric_OpRsh8x64(v *Value) bool {
27084 // match: (Rsh8x64 (Const8 [c]) (Const64 [d]))
27085 // result: (Const8 [c >> uint64(d)])
27087 if v_0.Op != OpConst8 {
27090 c := auxIntToInt8(v_0.AuxInt)
27091 if v_1.Op != OpConst64 {
27094 d := auxIntToInt64(v_1.AuxInt)
27096 v.AuxInt = int8ToAuxInt(c >> uint64(d))
27099 // match: (Rsh8x64 x (Const64 [0]))
27103 if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 0 {
27109 // match: (Rsh8x64 (Const8 [0]) _)
27110 // result: (Const8 [0])
27112 if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 0 {
27116 v.AuxInt = int8ToAuxInt(0)
27119 // match: (Rsh8x64 <t> (Rsh8x64 x (Const64 [c])) (Const64 [d]))
27120 // cond: !uaddOvf(c,d)
27121 // result: (Rsh8x64 x (Const64 <t> [c+d]))
27124 if v_0.Op != OpRsh8x64 {
27129 v_0_1 := v_0.Args[1]
27130 if v_0_1.Op != OpConst64 {
27133 c := auxIntToInt64(v_0_1.AuxInt)
27134 if v_1.Op != OpConst64 {
27137 d := auxIntToInt64(v_1.AuxInt)
27138 if !(!uaddOvf(c, d)) {
27142 v0 := b.NewValue0(v.Pos, OpConst64, t)
27143 v0.AuxInt = int64ToAuxInt(c + d)
27149 func rewriteValuegeneric_OpRsh8x8(v *Value) bool {
27153 // match: (Rsh8x8 <t> x (Const8 [c]))
27154 // result: (Rsh8x64 x (Const64 <t> [int64(uint8(c))]))
27158 if v_1.Op != OpConst8 {
27161 c := auxIntToInt8(v_1.AuxInt)
27163 v0 := b.NewValue0(v.Pos, OpConst64, t)
27164 v0.AuxInt = int64ToAuxInt(int64(uint8(c)))
27168 // match: (Rsh8x8 (Const8 [0]) _)
27169 // result: (Const8 [0])
27171 if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 0 {
27175 v.AuxInt = int8ToAuxInt(0)
27180 func rewriteValuegeneric_OpSelect0(v *Value) bool {
27182 // match: (Select0 (Div128u (Const64 [0]) lo y))
27183 // result: (Div64u lo y)
27185 if v_0.Op != OpDiv128u {
27189 v_0_0 := v_0.Args[0]
27190 if v_0_0.Op != OpConst64 || auxIntToInt64(v_0_0.AuxInt) != 0 {
27198 // match: (Select0 (Mul32uover (Const32 [1]) x))
27201 if v_0.Op != OpMul32uover {
27205 v_0_0 := v_0.Args[0]
27206 v_0_1 := v_0.Args[1]
27207 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
27208 if v_0_0.Op != OpConst32 || auxIntToInt32(v_0_0.AuxInt) != 1 {
27217 // match: (Select0 (Mul64uover (Const64 [1]) x))
27220 if v_0.Op != OpMul64uover {
27224 v_0_0 := v_0.Args[0]
27225 v_0_1 := v_0.Args[1]
27226 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
27227 if v_0_0.Op != OpConst64 || auxIntToInt64(v_0_0.AuxInt) != 1 {
27236 // match: (Select0 (Mul64uover (Const64 [0]) x))
27237 // result: (Const64 [0])
27239 if v_0.Op != OpMul64uover {
27243 v_0_0 := v_0.Args[0]
27244 v_0_1 := v_0.Args[1]
27245 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
27246 if v_0_0.Op != OpConst64 || auxIntToInt64(v_0_0.AuxInt) != 0 {
27250 v.AuxInt = int64ToAuxInt(0)
27255 // match: (Select0 (Mul32uover (Const32 [0]) x))
27256 // result: (Const32 [0])
27258 if v_0.Op != OpMul32uover {
27262 v_0_0 := v_0.Args[0]
27263 v_0_1 := v_0.Args[1]
27264 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
27265 if v_0_0.Op != OpConst32 || auxIntToInt32(v_0_0.AuxInt) != 0 {
27269 v.AuxInt = int32ToAuxInt(0)
27276 func rewriteValuegeneric_OpSelect1(v *Value) bool {
27278 // match: (Select1 (Div128u (Const64 [0]) lo y))
27279 // result: (Mod64u lo y)
27281 if v_0.Op != OpDiv128u {
27285 v_0_0 := v_0.Args[0]
27286 if v_0_0.Op != OpConst64 || auxIntToInt64(v_0_0.AuxInt) != 0 {
27294 // match: (Select1 (Mul32uover (Const32 [1]) x))
27295 // result: (ConstBool [false])
27297 if v_0.Op != OpMul32uover {
27301 v_0_0 := v_0.Args[0]
27302 v_0_1 := v_0.Args[1]
27303 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
27304 if v_0_0.Op != OpConst32 || auxIntToInt32(v_0_0.AuxInt) != 1 {
27307 v.reset(OpConstBool)
27308 v.AuxInt = boolToAuxInt(false)
27313 // match: (Select1 (Mul64uover (Const64 [1]) x))
27314 // result: (ConstBool [false])
27316 if v_0.Op != OpMul64uover {
27320 v_0_0 := v_0.Args[0]
27321 v_0_1 := v_0.Args[1]
27322 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
27323 if v_0_0.Op != OpConst64 || auxIntToInt64(v_0_0.AuxInt) != 1 {
27326 v.reset(OpConstBool)
27327 v.AuxInt = boolToAuxInt(false)
27332 // match: (Select1 (Mul64uover (Const64 [0]) x))
27333 // result: (ConstBool [false])
27335 if v_0.Op != OpMul64uover {
27339 v_0_0 := v_0.Args[0]
27340 v_0_1 := v_0.Args[1]
27341 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
27342 if v_0_0.Op != OpConst64 || auxIntToInt64(v_0_0.AuxInt) != 0 {
27345 v.reset(OpConstBool)
27346 v.AuxInt = boolToAuxInt(false)
27351 // match: (Select1 (Mul32uover (Const32 [0]) x))
27352 // result: (ConstBool [false])
27354 if v_0.Op != OpMul32uover {
27358 v_0_0 := v_0.Args[0]
27359 v_0_1 := v_0.Args[1]
27360 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
27361 if v_0_0.Op != OpConst32 || auxIntToInt32(v_0_0.AuxInt) != 0 {
27364 v.reset(OpConstBool)
27365 v.AuxInt = boolToAuxInt(false)
27372 func rewriteValuegeneric_OpSelectN(v *Value) bool {
27375 config := b.Func.Config
27376 // match: (SelectN [0] (MakeResult x ___))
27379 if auxIntToInt64(v.AuxInt) != 0 || v_0.Op != OpMakeResult || len(v_0.Args) < 1 {
27386 // match: (SelectN [1] (MakeResult x y ___))
27389 if auxIntToInt64(v.AuxInt) != 1 || v_0.Op != OpMakeResult || len(v_0.Args) < 2 {
27396 // match: (SelectN [2] (MakeResult x y z ___))
27399 if auxIntToInt64(v.AuxInt) != 2 || v_0.Op != OpMakeResult || len(v_0.Args) < 3 {
27406 // match: (SelectN [0] call:(StaticCall {sym} sptr (Const64 [c]) mem))
27407 // cond: isInlinableMemclr(config, int64(c)) && isSameCall(sym, "runtime.memclrNoHeapPointers") && call.Uses == 1 && clobber(call)
27408 // result: (Zero {types.Types[types.TUINT8]} [int64(c)] sptr mem)
27410 if auxIntToInt64(v.AuxInt) != 0 {
27414 if call.Op != OpStaticCall || len(call.Args) != 3 {
27417 sym := auxToCall(call.Aux)
27418 mem := call.Args[2]
27419 sptr := call.Args[0]
27420 call_1 := call.Args[1]
27421 if call_1.Op != OpConst64 {
27424 c := auxIntToInt64(call_1.AuxInt)
27425 if !(isInlinableMemclr(config, int64(c)) && isSameCall(sym, "runtime.memclrNoHeapPointers") && call.Uses == 1 && clobber(call)) {
27429 v.AuxInt = int64ToAuxInt(int64(c))
27430 v.Aux = typeToAux(types.Types[types.TUINT8])
27431 v.AddArg2(sptr, mem)
27434 // match: (SelectN [0] call:(StaticCall {sym} sptr (Const32 [c]) mem))
27435 // cond: isInlinableMemclr(config, int64(c)) && isSameCall(sym, "runtime.memclrNoHeapPointers") && call.Uses == 1 && clobber(call)
27436 // result: (Zero {types.Types[types.TUINT8]} [int64(c)] sptr mem)
27438 if auxIntToInt64(v.AuxInt) != 0 {
27442 if call.Op != OpStaticCall || len(call.Args) != 3 {
27445 sym := auxToCall(call.Aux)
27446 mem := call.Args[2]
27447 sptr := call.Args[0]
27448 call_1 := call.Args[1]
27449 if call_1.Op != OpConst32 {
27452 c := auxIntToInt32(call_1.AuxInt)
27453 if !(isInlinableMemclr(config, int64(c)) && isSameCall(sym, "runtime.memclrNoHeapPointers") && call.Uses == 1 && clobber(call)) {
27457 v.AuxInt = int64ToAuxInt(int64(c))
27458 v.Aux = typeToAux(types.Types[types.TUINT8])
27459 v.AddArg2(sptr, mem)
27462 // match: (SelectN [0] call:(StaticCall {sym} s1:(Store _ (Const64 [sz]) s2:(Store _ src s3:(Store {t} _ dst mem)))))
27463 // cond: sz >= 0 && isSameCall(sym, "runtime.memmove") && s1.Uses == 1 && s2.Uses == 1 && s3.Uses == 1 && isInlinableMemmove(dst, src, int64(sz), config) && clobber(s1, s2, s3, call)
27464 // result: (Move {types.Types[types.TUINT8]} [int64(sz)] dst src mem)
27466 if auxIntToInt64(v.AuxInt) != 0 {
27470 if call.Op != OpStaticCall || len(call.Args) != 1 {
27473 sym := auxToCall(call.Aux)
27475 if s1.Op != OpStore {
27480 if s1_1.Op != OpConst64 {
27483 sz := auxIntToInt64(s1_1.AuxInt)
27485 if s2.Op != OpStore {
27491 if s3.Op != OpStore {
27496 if !(sz >= 0 && isSameCall(sym, "runtime.memmove") && s1.Uses == 1 && s2.Uses == 1 && s3.Uses == 1 && isInlinableMemmove(dst, src, int64(sz), config) && clobber(s1, s2, s3, call)) {
27500 v.AuxInt = int64ToAuxInt(int64(sz))
27501 v.Aux = typeToAux(types.Types[types.TUINT8])
27502 v.AddArg3(dst, src, mem)
27505 // match: (SelectN [0] call:(StaticCall {sym} s1:(Store _ (Const32 [sz]) s2:(Store _ src s3:(Store {t} _ dst mem)))))
27506 // cond: sz >= 0 && isSameCall(sym, "runtime.memmove") && s1.Uses == 1 && s2.Uses == 1 && s3.Uses == 1 && isInlinableMemmove(dst, src, int64(sz), config) && clobber(s1, s2, s3, call)
27507 // result: (Move {types.Types[types.TUINT8]} [int64(sz)] dst src mem)
27509 if auxIntToInt64(v.AuxInt) != 0 {
27513 if call.Op != OpStaticCall || len(call.Args) != 1 {
27516 sym := auxToCall(call.Aux)
27518 if s1.Op != OpStore {
27523 if s1_1.Op != OpConst32 {
27526 sz := auxIntToInt32(s1_1.AuxInt)
27528 if s2.Op != OpStore {
27534 if s3.Op != OpStore {
27539 if !(sz >= 0 && isSameCall(sym, "runtime.memmove") && s1.Uses == 1 && s2.Uses == 1 && s3.Uses == 1 && isInlinableMemmove(dst, src, int64(sz), config) && clobber(s1, s2, s3, call)) {
27543 v.AuxInt = int64ToAuxInt(int64(sz))
27544 v.Aux = typeToAux(types.Types[types.TUINT8])
27545 v.AddArg3(dst, src, mem)
27548 // match: (SelectN [0] call:(StaticCall {sym} dst src (Const64 [sz]) mem))
27549 // cond: sz >= 0 && call.Uses == 1 && isSameCall(sym, "runtime.memmove") && isInlinableMemmove(dst, src, int64(sz), config) && clobber(call)
27550 // result: (Move {types.Types[types.TUINT8]} [int64(sz)] dst src mem)
27552 if auxIntToInt64(v.AuxInt) != 0 {
27556 if call.Op != OpStaticCall || len(call.Args) != 4 {
27559 sym := auxToCall(call.Aux)
27560 mem := call.Args[3]
27561 dst := call.Args[0]
27562 src := call.Args[1]
27563 call_2 := call.Args[2]
27564 if call_2.Op != OpConst64 {
27567 sz := auxIntToInt64(call_2.AuxInt)
27568 if !(sz >= 0 && call.Uses == 1 && isSameCall(sym, "runtime.memmove") && isInlinableMemmove(dst, src, int64(sz), config) && clobber(call)) {
27572 v.AuxInt = int64ToAuxInt(int64(sz))
27573 v.Aux = typeToAux(types.Types[types.TUINT8])
27574 v.AddArg3(dst, src, mem)
27577 // match: (SelectN [0] call:(StaticCall {sym} dst src (Const32 [sz]) mem))
27578 // cond: sz >= 0 && call.Uses == 1 && isSameCall(sym, "runtime.memmove") && isInlinableMemmove(dst, src, int64(sz), config) && clobber(call)
27579 // result: (Move {types.Types[types.TUINT8]} [int64(sz)] dst src mem)
27581 if auxIntToInt64(v.AuxInt) != 0 {
27585 if call.Op != OpStaticCall || len(call.Args) != 4 {
27588 sym := auxToCall(call.Aux)
27589 mem := call.Args[3]
27590 dst := call.Args[0]
27591 src := call.Args[1]
27592 call_2 := call.Args[2]
27593 if call_2.Op != OpConst32 {
27596 sz := auxIntToInt32(call_2.AuxInt)
27597 if !(sz >= 0 && call.Uses == 1 && isSameCall(sym, "runtime.memmove") && isInlinableMemmove(dst, src, int64(sz), config) && clobber(call)) {
27601 v.AuxInt = int64ToAuxInt(int64(sz))
27602 v.Aux = typeToAux(types.Types[types.TUINT8])
27603 v.AddArg3(dst, src, mem)
27606 // match: (SelectN [0] call:(StaticLECall {sym} dst src (Const64 [sz]) mem))
27607 // cond: sz >= 0 && call.Uses == 1 && isSameCall(sym, "runtime.memmove") && isInlinableMemmove(dst, src, int64(sz), config) && clobber(call)
27608 // result: (Move {types.Types[types.TUINT8]} [int64(sz)] dst src mem)
27610 if auxIntToInt64(v.AuxInt) != 0 {
27614 if call.Op != OpStaticLECall || len(call.Args) != 4 {
27617 sym := auxToCall(call.Aux)
27618 mem := call.Args[3]
27619 dst := call.Args[0]
27620 src := call.Args[1]
27621 call_2 := call.Args[2]
27622 if call_2.Op != OpConst64 {
27625 sz := auxIntToInt64(call_2.AuxInt)
27626 if !(sz >= 0 && call.Uses == 1 && isSameCall(sym, "runtime.memmove") && isInlinableMemmove(dst, src, int64(sz), config) && clobber(call)) {
27630 v.AuxInt = int64ToAuxInt(int64(sz))
27631 v.Aux = typeToAux(types.Types[types.TUINT8])
27632 v.AddArg3(dst, src, mem)
27635 // match: (SelectN [0] call:(StaticLECall {sym} dst src (Const32 [sz]) mem))
27636 // cond: sz >= 0 && call.Uses == 1 && isSameCall(sym, "runtime.memmove") && isInlinableMemmove(dst, src, int64(sz), config) && clobber(call)
27637 // result: (Move {types.Types[types.TUINT8]} [int64(sz)] dst src mem)
27639 if auxIntToInt64(v.AuxInt) != 0 {
27643 if call.Op != OpStaticLECall || len(call.Args) != 4 {
27646 sym := auxToCall(call.Aux)
27647 mem := call.Args[3]
27648 dst := call.Args[0]
27649 src := call.Args[1]
27650 call_2 := call.Args[2]
27651 if call_2.Op != OpConst32 {
27654 sz := auxIntToInt32(call_2.AuxInt)
27655 if !(sz >= 0 && call.Uses == 1 && isSameCall(sym, "runtime.memmove") && isInlinableMemmove(dst, src, int64(sz), config) && clobber(call)) {
27659 v.AuxInt = int64ToAuxInt(int64(sz))
27660 v.Aux = typeToAux(types.Types[types.TUINT8])
27661 v.AddArg3(dst, src, mem)
27664 // match: (SelectN [0] call:(StaticLECall {sym} a x))
27665 // cond: needRaceCleanup(sym, call) && clobber(call)
27668 if auxIntToInt64(v.AuxInt) != 0 {
27672 if call.Op != OpStaticLECall || len(call.Args) != 2 {
27675 sym := auxToCall(call.Aux)
27677 if !(needRaceCleanup(sym, call) && clobber(call)) {
27683 // match: (SelectN [0] call:(StaticLECall {sym} x))
27684 // cond: needRaceCleanup(sym, call) && clobber(call)
27687 if auxIntToInt64(v.AuxInt) != 0 {
27691 if call.Op != OpStaticLECall || len(call.Args) != 1 {
27694 sym := auxToCall(call.Aux)
27696 if !(needRaceCleanup(sym, call) && clobber(call)) {
27702 // match: (SelectN [1] (StaticCall {sym} _ newLen:(Const64) _ _ _ _))
27703 // cond: v.Type.IsInteger() && isSameCall(sym, "runtime.growslice")
27706 if auxIntToInt64(v.AuxInt) != 1 || v_0.Op != OpStaticCall || len(v_0.Args) != 6 {
27709 sym := auxToCall(v_0.Aux)
27711 newLen := v_0.Args[1]
27712 if newLen.Op != OpConst64 || !(v.Type.IsInteger() && isSameCall(sym, "runtime.growslice")) {
27718 // match: (SelectN [1] (StaticCall {sym} _ newLen:(Const32) _ _ _ _))
27719 // cond: v.Type.IsInteger() && isSameCall(sym, "runtime.growslice")
27722 if auxIntToInt64(v.AuxInt) != 1 || v_0.Op != OpStaticCall || len(v_0.Args) != 6 {
27725 sym := auxToCall(v_0.Aux)
27727 newLen := v_0.Args[1]
27728 if newLen.Op != OpConst32 || !(v.Type.IsInteger() && isSameCall(sym, "runtime.growslice")) {
27736 func rewriteValuegeneric_OpSignExt16to32(v *Value) bool {
27738 // match: (SignExt16to32 (Const16 [c]))
27739 // result: (Const32 [int32(c)])
27741 if v_0.Op != OpConst16 {
27744 c := auxIntToInt16(v_0.AuxInt)
27746 v.AuxInt = int32ToAuxInt(int32(c))
27749 // match: (SignExt16to32 (Trunc32to16 x:(Rsh32x64 _ (Const64 [s]))))
27753 if v_0.Op != OpTrunc32to16 {
27757 if x.Op != OpRsh32x64 {
27762 if x_1.Op != OpConst64 {
27765 s := auxIntToInt64(x_1.AuxInt)
27774 func rewriteValuegeneric_OpSignExt16to64(v *Value) bool {
27776 // match: (SignExt16to64 (Const16 [c]))
27777 // result: (Const64 [int64(c)])
27779 if v_0.Op != OpConst16 {
27782 c := auxIntToInt16(v_0.AuxInt)
27784 v.AuxInt = int64ToAuxInt(int64(c))
27787 // match: (SignExt16to64 (Trunc64to16 x:(Rsh64x64 _ (Const64 [s]))))
27791 if v_0.Op != OpTrunc64to16 {
27795 if x.Op != OpRsh64x64 {
27800 if x_1.Op != OpConst64 {
27803 s := auxIntToInt64(x_1.AuxInt)
27812 func rewriteValuegeneric_OpSignExt32to64(v *Value) bool {
27814 // match: (SignExt32to64 (Const32 [c]))
27815 // result: (Const64 [int64(c)])
27817 if v_0.Op != OpConst32 {
27820 c := auxIntToInt32(v_0.AuxInt)
27822 v.AuxInt = int64ToAuxInt(int64(c))
27825 // match: (SignExt32to64 (Trunc64to32 x:(Rsh64x64 _ (Const64 [s]))))
27829 if v_0.Op != OpTrunc64to32 {
27833 if x.Op != OpRsh64x64 {
27838 if x_1.Op != OpConst64 {
27841 s := auxIntToInt64(x_1.AuxInt)
27850 func rewriteValuegeneric_OpSignExt8to16(v *Value) bool {
27852 // match: (SignExt8to16 (Const8 [c]))
27853 // result: (Const16 [int16(c)])
27855 if v_0.Op != OpConst8 {
27858 c := auxIntToInt8(v_0.AuxInt)
27860 v.AuxInt = int16ToAuxInt(int16(c))
27863 // match: (SignExt8to16 (Trunc16to8 x:(Rsh16x64 _ (Const64 [s]))))
27867 if v_0.Op != OpTrunc16to8 {
27871 if x.Op != OpRsh16x64 {
27876 if x_1.Op != OpConst64 {
27879 s := auxIntToInt64(x_1.AuxInt)
27888 func rewriteValuegeneric_OpSignExt8to32(v *Value) bool {
27890 // match: (SignExt8to32 (Const8 [c]))
27891 // result: (Const32 [int32(c)])
27893 if v_0.Op != OpConst8 {
27896 c := auxIntToInt8(v_0.AuxInt)
27898 v.AuxInt = int32ToAuxInt(int32(c))
27901 // match: (SignExt8to32 (Trunc32to8 x:(Rsh32x64 _ (Const64 [s]))))
27905 if v_0.Op != OpTrunc32to8 {
27909 if x.Op != OpRsh32x64 {
27914 if x_1.Op != OpConst64 {
27917 s := auxIntToInt64(x_1.AuxInt)
27926 func rewriteValuegeneric_OpSignExt8to64(v *Value) bool {
27928 // match: (SignExt8to64 (Const8 [c]))
27929 // result: (Const64 [int64(c)])
27931 if v_0.Op != OpConst8 {
27934 c := auxIntToInt8(v_0.AuxInt)
27936 v.AuxInt = int64ToAuxInt(int64(c))
27939 // match: (SignExt8to64 (Trunc64to8 x:(Rsh64x64 _ (Const64 [s]))))
27943 if v_0.Op != OpTrunc64to8 {
27947 if x.Op != OpRsh64x64 {
27952 if x_1.Op != OpConst64 {
27955 s := auxIntToInt64(x_1.AuxInt)
27964 func rewriteValuegeneric_OpSliceCap(v *Value) bool {
27966 // match: (SliceCap (SliceMake _ _ (Const64 <t> [c])))
27967 // result: (Const64 <t> [c])
27969 if v_0.Op != OpSliceMake {
27973 v_0_2 := v_0.Args[2]
27974 if v_0_2.Op != OpConst64 {
27978 c := auxIntToInt64(v_0_2.AuxInt)
27981 v.AuxInt = int64ToAuxInt(c)
27984 // match: (SliceCap (SliceMake _ _ (Const32 <t> [c])))
27985 // result: (Const32 <t> [c])
27987 if v_0.Op != OpSliceMake {
27991 v_0_2 := v_0.Args[2]
27992 if v_0_2.Op != OpConst32 {
27996 c := auxIntToInt32(v_0_2.AuxInt)
27999 v.AuxInt = int32ToAuxInt(c)
28002 // match: (SliceCap (SliceMake _ _ (SliceCap x)))
28003 // result: (SliceCap x)
28005 if v_0.Op != OpSliceMake {
28009 v_0_2 := v_0.Args[2]
28010 if v_0_2.Op != OpSliceCap {
28014 v.reset(OpSliceCap)
28018 // match: (SliceCap (SliceMake _ _ (SliceLen x)))
28019 // result: (SliceLen x)
28021 if v_0.Op != OpSliceMake {
28025 v_0_2 := v_0.Args[2]
28026 if v_0_2.Op != OpSliceLen {
28030 v.reset(OpSliceLen)
28036 func rewriteValuegeneric_OpSliceLen(v *Value) bool {
28038 // match: (SliceLen (SliceMake _ (Const64 <t> [c]) _))
28039 // result: (Const64 <t> [c])
28041 if v_0.Op != OpSliceMake {
28045 v_0_1 := v_0.Args[1]
28046 if v_0_1.Op != OpConst64 {
28050 c := auxIntToInt64(v_0_1.AuxInt)
28053 v.AuxInt = int64ToAuxInt(c)
28056 // match: (SliceLen (SliceMake _ (Const32 <t> [c]) _))
28057 // result: (Const32 <t> [c])
28059 if v_0.Op != OpSliceMake {
28063 v_0_1 := v_0.Args[1]
28064 if v_0_1.Op != OpConst32 {
28068 c := auxIntToInt32(v_0_1.AuxInt)
28071 v.AuxInt = int32ToAuxInt(c)
28074 // match: (SliceLen (SliceMake _ (SliceLen x) _))
28075 // result: (SliceLen x)
28077 if v_0.Op != OpSliceMake {
28081 v_0_1 := v_0.Args[1]
28082 if v_0_1.Op != OpSliceLen {
28086 v.reset(OpSliceLen)
28090 // match: (SliceLen (SelectN [0] (StaticLECall {sym} _ newLen:(Const64) _ _ _ _)))
28091 // cond: isSameCall(sym, "runtime.growslice")
28094 if v_0.Op != OpSelectN || auxIntToInt64(v_0.AuxInt) != 0 {
28097 v_0_0 := v_0.Args[0]
28098 if v_0_0.Op != OpStaticLECall || len(v_0_0.Args) != 6 {
28101 sym := auxToCall(v_0_0.Aux)
28103 newLen := v_0_0.Args[1]
28104 if newLen.Op != OpConst64 || !(isSameCall(sym, "runtime.growslice")) {
28110 // match: (SliceLen (SelectN [0] (StaticLECall {sym} _ newLen:(Const32) _ _ _ _)))
28111 // cond: isSameCall(sym, "runtime.growslice")
28114 if v_0.Op != OpSelectN || auxIntToInt64(v_0.AuxInt) != 0 {
28117 v_0_0 := v_0.Args[0]
28118 if v_0_0.Op != OpStaticLECall || len(v_0_0.Args) != 6 {
28121 sym := auxToCall(v_0_0.Aux)
28123 newLen := v_0_0.Args[1]
28124 if newLen.Op != OpConst32 || !(isSameCall(sym, "runtime.growslice")) {
28132 func rewriteValuegeneric_OpSlicePtr(v *Value) bool {
28134 // match: (SlicePtr (SliceMake (SlicePtr x) _ _))
28135 // result: (SlicePtr x)
28137 if v_0.Op != OpSliceMake {
28140 v_0_0 := v_0.Args[0]
28141 if v_0_0.Op != OpSlicePtr {
28145 v.reset(OpSlicePtr)
28151 func rewriteValuegeneric_OpSlicemask(v *Value) bool {
28153 // match: (Slicemask (Const32 [x]))
28155 // result: (Const32 [-1])
28157 if v_0.Op != OpConst32 {
28160 x := auxIntToInt32(v_0.AuxInt)
28165 v.AuxInt = int32ToAuxInt(-1)
28168 // match: (Slicemask (Const32 [0]))
28169 // result: (Const32 [0])
28171 if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 {
28175 v.AuxInt = int32ToAuxInt(0)
28178 // match: (Slicemask (Const64 [x]))
28180 // result: (Const64 [-1])
28182 if v_0.Op != OpConst64 {
28185 x := auxIntToInt64(v_0.AuxInt)
28190 v.AuxInt = int64ToAuxInt(-1)
28193 // match: (Slicemask (Const64 [0]))
28194 // result: (Const64 [0])
28196 if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 {
28200 v.AuxInt = int64ToAuxInt(0)
28205 func rewriteValuegeneric_OpSqrt(v *Value) bool {
28207 // match: (Sqrt (Const64F [c]))
28208 // cond: !math.IsNaN(math.Sqrt(c))
28209 // result: (Const64F [math.Sqrt(c)])
28211 if v_0.Op != OpConst64F {
28214 c := auxIntToFloat64(v_0.AuxInt)
28215 if !(!math.IsNaN(math.Sqrt(c))) {
28218 v.reset(OpConst64F)
28219 v.AuxInt = float64ToAuxInt(math.Sqrt(c))
28224 func rewriteValuegeneric_OpStaticCall(v *Value) bool {
28226 typ := &b.Func.Config.Types
28227 // match: (StaticCall {callAux} p q _ mem)
28228 // cond: isSameCall(callAux, "runtime.memequal") && isSamePtr(p, q)
28229 // result: (MakeResult (ConstBool <typ.Bool> [true]) mem)
28231 if len(v.Args) != 4 {
28234 callAux := auxToCall(v.Aux)
28238 if !(isSameCall(callAux, "runtime.memequal") && isSamePtr(p, q)) {
28241 v.reset(OpMakeResult)
28242 v0 := b.NewValue0(v.Pos, OpConstBool, typ.Bool)
28243 v0.AuxInt = boolToAuxInt(true)
28249 func rewriteValuegeneric_OpStaticLECall(v *Value) bool {
28251 config := b.Func.Config
28252 typ := &b.Func.Config.Types
28253 // match: (StaticLECall {callAux} sptr (Addr {scon} (SB)) (Const64 [1]) mem)
28254 // cond: isSameCall(callAux, "runtime.memequal") && symIsRO(scon)
28255 // result: (MakeResult (Eq8 (Load <typ.Int8> sptr mem) (Const8 <typ.Int8> [int8(read8(scon,0))])) mem)
28257 if len(v.Args) != 4 {
28260 callAux := auxToCall(v.Aux)
28264 if v_1.Op != OpAddr {
28267 scon := auxToSym(v_1.Aux)
28268 v_1_0 := v_1.Args[0]
28269 if v_1_0.Op != OpSB {
28273 if v_2.Op != OpConst64 || auxIntToInt64(v_2.AuxInt) != 1 || !(isSameCall(callAux, "runtime.memequal") && symIsRO(scon)) {
28276 v.reset(OpMakeResult)
28277 v0 := b.NewValue0(v.Pos, OpEq8, typ.Bool)
28278 v1 := b.NewValue0(v.Pos, OpLoad, typ.Int8)
28279 v1.AddArg2(sptr, mem)
28280 v2 := b.NewValue0(v.Pos, OpConst8, typ.Int8)
28281 v2.AuxInt = int8ToAuxInt(int8(read8(scon, 0)))
28286 // match: (StaticLECall {callAux} (Addr {scon} (SB)) sptr (Const64 [1]) mem)
28287 // cond: isSameCall(callAux, "runtime.memequal") && symIsRO(scon)
28288 // result: (MakeResult (Eq8 (Load <typ.Int8> sptr mem) (Const8 <typ.Int8> [int8(read8(scon,0))])) mem)
28290 if len(v.Args) != 4 {
28293 callAux := auxToCall(v.Aux)
28296 if v_0.Op != OpAddr {
28299 scon := auxToSym(v_0.Aux)
28300 v_0_0 := v_0.Args[0]
28301 if v_0_0.Op != OpSB {
28306 if v_2.Op != OpConst64 || auxIntToInt64(v_2.AuxInt) != 1 || !(isSameCall(callAux, "runtime.memequal") && symIsRO(scon)) {
28309 v.reset(OpMakeResult)
28310 v0 := b.NewValue0(v.Pos, OpEq8, typ.Bool)
28311 v1 := b.NewValue0(v.Pos, OpLoad, typ.Int8)
28312 v1.AddArg2(sptr, mem)
28313 v2 := b.NewValue0(v.Pos, OpConst8, typ.Int8)
28314 v2.AuxInt = int8ToAuxInt(int8(read8(scon, 0)))
28319 // match: (StaticLECall {callAux} sptr (Addr {scon} (SB)) (Const64 [2]) mem)
28320 // cond: isSameCall(callAux, "runtime.memequal") && symIsRO(scon) && canLoadUnaligned(config)
28321 // result: (MakeResult (Eq16 (Load <typ.Int16> sptr mem) (Const16 <typ.Int16> [int16(read16(scon,0,config.ctxt.Arch.ByteOrder))])) mem)
28323 if len(v.Args) != 4 {
28326 callAux := auxToCall(v.Aux)
28330 if v_1.Op != OpAddr {
28333 scon := auxToSym(v_1.Aux)
28334 v_1_0 := v_1.Args[0]
28335 if v_1_0.Op != OpSB {
28339 if v_2.Op != OpConst64 || auxIntToInt64(v_2.AuxInt) != 2 || !(isSameCall(callAux, "runtime.memequal") && symIsRO(scon) && canLoadUnaligned(config)) {
28342 v.reset(OpMakeResult)
28343 v0 := b.NewValue0(v.Pos, OpEq16, typ.Bool)
28344 v1 := b.NewValue0(v.Pos, OpLoad, typ.Int16)
28345 v1.AddArg2(sptr, mem)
28346 v2 := b.NewValue0(v.Pos, OpConst16, typ.Int16)
28347 v2.AuxInt = int16ToAuxInt(int16(read16(scon, 0, config.ctxt.Arch.ByteOrder)))
28352 // match: (StaticLECall {callAux} (Addr {scon} (SB)) sptr (Const64 [2]) mem)
28353 // cond: isSameCall(callAux, "runtime.memequal") && symIsRO(scon) && canLoadUnaligned(config)
28354 // result: (MakeResult (Eq16 (Load <typ.Int16> sptr mem) (Const16 <typ.Int16> [int16(read16(scon,0,config.ctxt.Arch.ByteOrder))])) mem)
28356 if len(v.Args) != 4 {
28359 callAux := auxToCall(v.Aux)
28362 if v_0.Op != OpAddr {
28365 scon := auxToSym(v_0.Aux)
28366 v_0_0 := v_0.Args[0]
28367 if v_0_0.Op != OpSB {
28372 if v_2.Op != OpConst64 || auxIntToInt64(v_2.AuxInt) != 2 || !(isSameCall(callAux, "runtime.memequal") && symIsRO(scon) && canLoadUnaligned(config)) {
28375 v.reset(OpMakeResult)
28376 v0 := b.NewValue0(v.Pos, OpEq16, typ.Bool)
28377 v1 := b.NewValue0(v.Pos, OpLoad, typ.Int16)
28378 v1.AddArg2(sptr, mem)
28379 v2 := b.NewValue0(v.Pos, OpConst16, typ.Int16)
28380 v2.AuxInt = int16ToAuxInt(int16(read16(scon, 0, config.ctxt.Arch.ByteOrder)))
28385 // match: (StaticLECall {callAux} sptr (Addr {scon} (SB)) (Const64 [4]) mem)
28386 // cond: isSameCall(callAux, "runtime.memequal") && symIsRO(scon) && canLoadUnaligned(config)
28387 // result: (MakeResult (Eq32 (Load <typ.Int32> sptr mem) (Const32 <typ.Int32> [int32(read32(scon,0,config.ctxt.Arch.ByteOrder))])) mem)
28389 if len(v.Args) != 4 {
28392 callAux := auxToCall(v.Aux)
28396 if v_1.Op != OpAddr {
28399 scon := auxToSym(v_1.Aux)
28400 v_1_0 := v_1.Args[0]
28401 if v_1_0.Op != OpSB {
28405 if v_2.Op != OpConst64 || auxIntToInt64(v_2.AuxInt) != 4 || !(isSameCall(callAux, "runtime.memequal") && symIsRO(scon) && canLoadUnaligned(config)) {
28408 v.reset(OpMakeResult)
28409 v0 := b.NewValue0(v.Pos, OpEq32, typ.Bool)
28410 v1 := b.NewValue0(v.Pos, OpLoad, typ.Int32)
28411 v1.AddArg2(sptr, mem)
28412 v2 := b.NewValue0(v.Pos, OpConst32, typ.Int32)
28413 v2.AuxInt = int32ToAuxInt(int32(read32(scon, 0, config.ctxt.Arch.ByteOrder)))
28418 // match: (StaticLECall {callAux} (Addr {scon} (SB)) sptr (Const64 [4]) mem)
28419 // cond: isSameCall(callAux, "runtime.memequal") && symIsRO(scon) && canLoadUnaligned(config)
28420 // result: (MakeResult (Eq32 (Load <typ.Int32> sptr mem) (Const32 <typ.Int32> [int32(read32(scon,0,config.ctxt.Arch.ByteOrder))])) mem)
28422 if len(v.Args) != 4 {
28425 callAux := auxToCall(v.Aux)
28428 if v_0.Op != OpAddr {
28431 scon := auxToSym(v_0.Aux)
28432 v_0_0 := v_0.Args[0]
28433 if v_0_0.Op != OpSB {
28438 if v_2.Op != OpConst64 || auxIntToInt64(v_2.AuxInt) != 4 || !(isSameCall(callAux, "runtime.memequal") && symIsRO(scon) && canLoadUnaligned(config)) {
28441 v.reset(OpMakeResult)
28442 v0 := b.NewValue0(v.Pos, OpEq32, typ.Bool)
28443 v1 := b.NewValue0(v.Pos, OpLoad, typ.Int32)
28444 v1.AddArg2(sptr, mem)
28445 v2 := b.NewValue0(v.Pos, OpConst32, typ.Int32)
28446 v2.AuxInt = int32ToAuxInt(int32(read32(scon, 0, config.ctxt.Arch.ByteOrder)))
28451 // match: (StaticLECall {callAux} sptr (Addr {scon} (SB)) (Const64 [8]) mem)
28452 // cond: isSameCall(callAux, "runtime.memequal") && symIsRO(scon) && canLoadUnaligned(config) && config.PtrSize == 8
28453 // result: (MakeResult (Eq64 (Load <typ.Int64> sptr mem) (Const64 <typ.Int64> [int64(read64(scon,0,config.ctxt.Arch.ByteOrder))])) mem)
28455 if len(v.Args) != 4 {
28458 callAux := auxToCall(v.Aux)
28462 if v_1.Op != OpAddr {
28465 scon := auxToSym(v_1.Aux)
28466 v_1_0 := v_1.Args[0]
28467 if v_1_0.Op != OpSB {
28471 if v_2.Op != OpConst64 || auxIntToInt64(v_2.AuxInt) != 8 || !(isSameCall(callAux, "runtime.memequal") && symIsRO(scon) && canLoadUnaligned(config) && config.PtrSize == 8) {
28474 v.reset(OpMakeResult)
28475 v0 := b.NewValue0(v.Pos, OpEq64, typ.Bool)
28476 v1 := b.NewValue0(v.Pos, OpLoad, typ.Int64)
28477 v1.AddArg2(sptr, mem)
28478 v2 := b.NewValue0(v.Pos, OpConst64, typ.Int64)
28479 v2.AuxInt = int64ToAuxInt(int64(read64(scon, 0, config.ctxt.Arch.ByteOrder)))
28484 // match: (StaticLECall {callAux} (Addr {scon} (SB)) sptr (Const64 [8]) mem)
28485 // cond: isSameCall(callAux, "runtime.memequal") && symIsRO(scon) && canLoadUnaligned(config) && config.PtrSize == 8
28486 // result: (MakeResult (Eq64 (Load <typ.Int64> sptr mem) (Const64 <typ.Int64> [int64(read64(scon,0,config.ctxt.Arch.ByteOrder))])) mem)
28488 if len(v.Args) != 4 {
28491 callAux := auxToCall(v.Aux)
28494 if v_0.Op != OpAddr {
28497 scon := auxToSym(v_0.Aux)
28498 v_0_0 := v_0.Args[0]
28499 if v_0_0.Op != OpSB {
28504 if v_2.Op != OpConst64 || auxIntToInt64(v_2.AuxInt) != 8 || !(isSameCall(callAux, "runtime.memequal") && symIsRO(scon) && canLoadUnaligned(config) && config.PtrSize == 8) {
28507 v.reset(OpMakeResult)
28508 v0 := b.NewValue0(v.Pos, OpEq64, typ.Bool)
28509 v1 := b.NewValue0(v.Pos, OpLoad, typ.Int64)
28510 v1.AddArg2(sptr, mem)
28511 v2 := b.NewValue0(v.Pos, OpConst64, typ.Int64)
28512 v2.AuxInt = int64ToAuxInt(int64(read64(scon, 0, config.ctxt.Arch.ByteOrder)))
28517 // match: (StaticLECall {callAux} _ _ (Const64 [0]) mem)
28518 // cond: isSameCall(callAux, "runtime.memequal")
28519 // result: (MakeResult (ConstBool <typ.Bool> [true]) mem)
28521 if len(v.Args) != 4 {
28524 callAux := auxToCall(v.Aux)
28527 if v_2.Op != OpConst64 || auxIntToInt64(v_2.AuxInt) != 0 || !(isSameCall(callAux, "runtime.memequal")) {
28530 v.reset(OpMakeResult)
28531 v0 := b.NewValue0(v.Pos, OpConstBool, typ.Bool)
28532 v0.AuxInt = boolToAuxInt(true)
28536 // match: (StaticLECall {callAux} p q _ mem)
28537 // cond: isSameCall(callAux, "runtime.memequal") && isSamePtr(p, q)
28538 // result: (MakeResult (ConstBool <typ.Bool> [true]) mem)
28540 if len(v.Args) != 4 {
28543 callAux := auxToCall(v.Aux)
28547 if !(isSameCall(callAux, "runtime.memequal") && isSamePtr(p, q)) {
28550 v.reset(OpMakeResult)
28551 v0 := b.NewValue0(v.Pos, OpConstBool, typ.Bool)
28552 v0.AuxInt = boolToAuxInt(true)
28556 // match: (StaticLECall {callAux} _ (Const64 [0]) (Const64 [0]) mem)
28557 // cond: isSameCall(callAux, "runtime.makeslice")
28558 // result: (MakeResult (Addr <v.Type.FieldType(0)> {ir.Syms.Zerobase} (SB)) mem)
28560 if len(v.Args) != 4 {
28563 callAux := auxToCall(v.Aux)
28566 if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 0 {
28570 if v_2.Op != OpConst64 || auxIntToInt64(v_2.AuxInt) != 0 || !(isSameCall(callAux, "runtime.makeslice")) {
28573 v.reset(OpMakeResult)
28574 v0 := b.NewValue0(v.Pos, OpAddr, v.Type.FieldType(0))
28575 v0.Aux = symToAux(ir.Syms.Zerobase)
28576 v1 := b.NewValue0(v.Pos, OpSB, typ.Uintptr)
28581 // match: (StaticLECall {callAux} _ (Const32 [0]) (Const32 [0]) mem)
28582 // cond: isSameCall(callAux, "runtime.makeslice")
28583 // result: (MakeResult (Addr <v.Type.FieldType(0)> {ir.Syms.Zerobase} (SB)) mem)
28585 if len(v.Args) != 4 {
28588 callAux := auxToCall(v.Aux)
28591 if v_1.Op != OpConst32 || auxIntToInt32(v_1.AuxInt) != 0 {
28595 if v_2.Op != OpConst32 || auxIntToInt32(v_2.AuxInt) != 0 || !(isSameCall(callAux, "runtime.makeslice")) {
28598 v.reset(OpMakeResult)
28599 v0 := b.NewValue0(v.Pos, OpAddr, v.Type.FieldType(0))
28600 v0.Aux = symToAux(ir.Syms.Zerobase)
28601 v1 := b.NewValue0(v.Pos, OpSB, typ.Uintptr)
28608 func rewriteValuegeneric_OpStore(v *Value) bool {
28614 // match: (Store {t1} p1 (Load <t2> p2 mem) mem)
28615 // cond: isSamePtr(p1, p2) && t2.Size() == t1.Size()
28618 t1 := auxToType(v.Aux)
28620 if v_1.Op != OpLoad {
28626 if mem != v_2 || !(isSamePtr(p1, p2) && t2.Size() == t1.Size()) {
28632 // match: (Store {t1} p1 (Load <t2> p2 oldmem) mem:(Store {t3} p3 _ oldmem))
28633 // cond: isSamePtr(p1, p2) && t2.Size() == t1.Size() && disjoint(p1, t1.Size(), p3, t3.Size())
28636 t1 := auxToType(v.Aux)
28638 if v_1.Op != OpLoad {
28642 oldmem := v_1.Args[1]
28645 if mem.Op != OpStore {
28648 t3 := auxToType(mem.Aux)
28651 if oldmem != mem.Args[2] || !(isSamePtr(p1, p2) && t2.Size() == t1.Size() && disjoint(p1, t1.Size(), p3, t3.Size())) {
28657 // match: (Store {t1} p1 (Load <t2> p2 oldmem) mem:(Store {t3} p3 _ (Store {t4} p4 _ oldmem)))
28658 // cond: isSamePtr(p1, p2) && t2.Size() == t1.Size() && disjoint(p1, t1.Size(), p3, t3.Size()) && disjoint(p1, t1.Size(), p4, t4.Size())
28661 t1 := auxToType(v.Aux)
28663 if v_1.Op != OpLoad {
28667 oldmem := v_1.Args[1]
28670 if mem.Op != OpStore {
28673 t3 := auxToType(mem.Aux)
28676 mem_2 := mem.Args[2]
28677 if mem_2.Op != OpStore {
28680 t4 := auxToType(mem_2.Aux)
28682 p4 := mem_2.Args[0]
28683 if oldmem != mem_2.Args[2] || !(isSamePtr(p1, p2) && t2.Size() == t1.Size() && disjoint(p1, t1.Size(), p3, t3.Size()) && disjoint(p1, t1.Size(), p4, t4.Size())) {
28689 // match: (Store {t1} p1 (Load <t2> p2 oldmem) mem:(Store {t3} p3 _ (Store {t4} p4 _ (Store {t5} p5 _ oldmem))))
28690 // cond: isSamePtr(p1, p2) && t2.Size() == t1.Size() && disjoint(p1, t1.Size(), p3, t3.Size()) && disjoint(p1, t1.Size(), p4, t4.Size()) && disjoint(p1, t1.Size(), p5, t5.Size())
28693 t1 := auxToType(v.Aux)
28695 if v_1.Op != OpLoad {
28699 oldmem := v_1.Args[1]
28702 if mem.Op != OpStore {
28705 t3 := auxToType(mem.Aux)
28708 mem_2 := mem.Args[2]
28709 if mem_2.Op != OpStore {
28712 t4 := auxToType(mem_2.Aux)
28714 p4 := mem_2.Args[0]
28715 mem_2_2 := mem_2.Args[2]
28716 if mem_2_2.Op != OpStore {
28719 t5 := auxToType(mem_2_2.Aux)
28720 _ = mem_2_2.Args[2]
28721 p5 := mem_2_2.Args[0]
28722 if oldmem != mem_2_2.Args[2] || !(isSamePtr(p1, p2) && t2.Size() == t1.Size() && disjoint(p1, t1.Size(), p3, t3.Size()) && disjoint(p1, t1.Size(), p4, t4.Size()) && disjoint(p1, t1.Size(), p5, t5.Size())) {
28728 // match: (Store {t} (OffPtr [o] p1) x mem:(Zero [n] p2 _))
28729 // cond: isConstZero(x) && o >= 0 && t.Size() + o <= n && isSamePtr(p1, p2)
28732 t := auxToType(v.Aux)
28733 if v_0.Op != OpOffPtr {
28736 o := auxIntToInt64(v_0.AuxInt)
28740 if mem.Op != OpZero {
28743 n := auxIntToInt64(mem.AuxInt)
28745 if !(isConstZero(x) && o >= 0 && t.Size()+o <= n && isSamePtr(p1, p2)) {
28751 // match: (Store {t1} op:(OffPtr [o1] p1) x mem:(Store {t2} p2 _ (Zero [n] p3 _)))
28752 // cond: isConstZero(x) && o1 >= 0 && t1.Size() + o1 <= n && isSamePtr(p1, p3) && disjoint(op, t1.Size(), p2, t2.Size())
28755 t1 := auxToType(v.Aux)
28757 if op.Op != OpOffPtr {
28760 o1 := auxIntToInt64(op.AuxInt)
28764 if mem.Op != OpStore {
28767 t2 := auxToType(mem.Aux)
28770 mem_2 := mem.Args[2]
28771 if mem_2.Op != OpZero {
28774 n := auxIntToInt64(mem_2.AuxInt)
28775 p3 := mem_2.Args[0]
28776 if !(isConstZero(x) && o1 >= 0 && t1.Size()+o1 <= n && isSamePtr(p1, p3) && disjoint(op, t1.Size(), p2, t2.Size())) {
28782 // match: (Store {t1} op:(OffPtr [o1] p1) x mem:(Store {t2} p2 _ (Store {t3} p3 _ (Zero [n] p4 _))))
28783 // cond: isConstZero(x) && o1 >= 0 && t1.Size() + o1 <= n && isSamePtr(p1, p4) && disjoint(op, t1.Size(), p2, t2.Size()) && disjoint(op, t1.Size(), p3, t3.Size())
28786 t1 := auxToType(v.Aux)
28788 if op.Op != OpOffPtr {
28791 o1 := auxIntToInt64(op.AuxInt)
28795 if mem.Op != OpStore {
28798 t2 := auxToType(mem.Aux)
28801 mem_2 := mem.Args[2]
28802 if mem_2.Op != OpStore {
28805 t3 := auxToType(mem_2.Aux)
28807 p3 := mem_2.Args[0]
28808 mem_2_2 := mem_2.Args[2]
28809 if mem_2_2.Op != OpZero {
28812 n := auxIntToInt64(mem_2_2.AuxInt)
28813 p4 := mem_2_2.Args[0]
28814 if !(isConstZero(x) && o1 >= 0 && t1.Size()+o1 <= n && isSamePtr(p1, p4) && disjoint(op, t1.Size(), p2, t2.Size()) && disjoint(op, t1.Size(), p3, t3.Size())) {
28820 // match: (Store {t1} op:(OffPtr [o1] p1) x mem:(Store {t2} p2 _ (Store {t3} p3 _ (Store {t4} p4 _ (Zero [n] p5 _)))))
28821 // cond: isConstZero(x) && o1 >= 0 && t1.Size() + o1 <= n && isSamePtr(p1, p5) && disjoint(op, t1.Size(), p2, t2.Size()) && disjoint(op, t1.Size(), p3, t3.Size()) && disjoint(op, t1.Size(), p4, t4.Size())
28824 t1 := auxToType(v.Aux)
28826 if op.Op != OpOffPtr {
28829 o1 := auxIntToInt64(op.AuxInt)
28833 if mem.Op != OpStore {
28836 t2 := auxToType(mem.Aux)
28839 mem_2 := mem.Args[2]
28840 if mem_2.Op != OpStore {
28843 t3 := auxToType(mem_2.Aux)
28845 p3 := mem_2.Args[0]
28846 mem_2_2 := mem_2.Args[2]
28847 if mem_2_2.Op != OpStore {
28850 t4 := auxToType(mem_2_2.Aux)
28851 _ = mem_2_2.Args[2]
28852 p4 := mem_2_2.Args[0]
28853 mem_2_2_2 := mem_2_2.Args[2]
28854 if mem_2_2_2.Op != OpZero {
28857 n := auxIntToInt64(mem_2_2_2.AuxInt)
28858 p5 := mem_2_2_2.Args[0]
28859 if !(isConstZero(x) && o1 >= 0 && t1.Size()+o1 <= n && isSamePtr(p1, p5) && disjoint(op, t1.Size(), p2, t2.Size()) && disjoint(op, t1.Size(), p3, t3.Size()) && disjoint(op, t1.Size(), p4, t4.Size())) {
28865 // match: (Store _ (StructMake0) mem)
28868 if v_1.Op != OpStructMake0 {
28875 // match: (Store dst (StructMake1 <t> f0) mem)
28876 // result: (Store {t.FieldType(0)} (OffPtr <t.FieldType(0).PtrTo()> [0] dst) f0 mem)
28879 if v_1.Op != OpStructMake1 {
28886 v.Aux = typeToAux(t.FieldType(0))
28887 v0 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(0).PtrTo())
28888 v0.AuxInt = int64ToAuxInt(0)
28890 v.AddArg3(v0, f0, mem)
28893 // match: (Store dst (StructMake2 <t> f0 f1) mem)
28894 // result: (Store {t.FieldType(1)} (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] dst) f1 (Store {t.FieldType(0)} (OffPtr <t.FieldType(0).PtrTo()> [0] dst) f0 mem))
28897 if v_1.Op != OpStructMake2 {
28905 v.Aux = typeToAux(t.FieldType(1))
28906 v0 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(1).PtrTo())
28907 v0.AuxInt = int64ToAuxInt(t.FieldOff(1))
28909 v1 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
28910 v1.Aux = typeToAux(t.FieldType(0))
28911 v2 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(0).PtrTo())
28912 v2.AuxInt = int64ToAuxInt(0)
28914 v1.AddArg3(v2, f0, mem)
28915 v.AddArg3(v0, f1, v1)
28918 // match: (Store dst (StructMake3 <t> f0 f1 f2) mem)
28919 // result: (Store {t.FieldType(2)} (OffPtr <t.FieldType(2).PtrTo()> [t.FieldOff(2)] dst) f2 (Store {t.FieldType(1)} (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] dst) f1 (Store {t.FieldType(0)} (OffPtr <t.FieldType(0).PtrTo()> [0] dst) f0 mem)))
28922 if v_1.Op != OpStructMake3 {
28931 v.Aux = typeToAux(t.FieldType(2))
28932 v0 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(2).PtrTo())
28933 v0.AuxInt = int64ToAuxInt(t.FieldOff(2))
28935 v1 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
28936 v1.Aux = typeToAux(t.FieldType(1))
28937 v2 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(1).PtrTo())
28938 v2.AuxInt = int64ToAuxInt(t.FieldOff(1))
28940 v3 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
28941 v3.Aux = typeToAux(t.FieldType(0))
28942 v4 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(0).PtrTo())
28943 v4.AuxInt = int64ToAuxInt(0)
28945 v3.AddArg3(v4, f0, mem)
28946 v1.AddArg3(v2, f1, v3)
28947 v.AddArg3(v0, f2, v1)
28950 // match: (Store dst (StructMake4 <t> f0 f1 f2 f3) mem)
28951 // result: (Store {t.FieldType(3)} (OffPtr <t.FieldType(3).PtrTo()> [t.FieldOff(3)] dst) f3 (Store {t.FieldType(2)} (OffPtr <t.FieldType(2).PtrTo()> [t.FieldOff(2)] dst) f2 (Store {t.FieldType(1)} (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] dst) f1 (Store {t.FieldType(0)} (OffPtr <t.FieldType(0).PtrTo()> [0] dst) f0 mem))))
28954 if v_1.Op != OpStructMake4 {
28964 v.Aux = typeToAux(t.FieldType(3))
28965 v0 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(3).PtrTo())
28966 v0.AuxInt = int64ToAuxInt(t.FieldOff(3))
28968 v1 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
28969 v1.Aux = typeToAux(t.FieldType(2))
28970 v2 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(2).PtrTo())
28971 v2.AuxInt = int64ToAuxInt(t.FieldOff(2))
28973 v3 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
28974 v3.Aux = typeToAux(t.FieldType(1))
28975 v4 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(1).PtrTo())
28976 v4.AuxInt = int64ToAuxInt(t.FieldOff(1))
28978 v5 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
28979 v5.Aux = typeToAux(t.FieldType(0))
28980 v6 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(0).PtrTo())
28981 v6.AuxInt = int64ToAuxInt(0)
28983 v5.AddArg3(v6, f0, mem)
28984 v3.AddArg3(v4, f1, v5)
28985 v1.AddArg3(v2, f2, v3)
28986 v.AddArg3(v0, f3, v1)
28989 // match: (Store {t} dst (Load src mem) mem)
28990 // cond: !fe.CanSSA(t)
28991 // result: (Move {t} [t.Size()] dst src mem)
28993 t := auxToType(v.Aux)
28995 if v_1.Op != OpLoad {
29000 if mem != v_2 || !(!fe.CanSSA(t)) {
29004 v.AuxInt = int64ToAuxInt(t.Size())
29005 v.Aux = typeToAux(t)
29006 v.AddArg3(dst, src, mem)
29009 // match: (Store {t} dst (Load src mem) (VarDef {x} mem))
29010 // cond: !fe.CanSSA(t)
29011 // result: (Move {t} [t.Size()] dst src (VarDef {x} mem))
29013 t := auxToType(v.Aux)
29015 if v_1.Op != OpLoad {
29020 if v_2.Op != OpVarDef {
29023 x := auxToSym(v_2.Aux)
29024 if mem != v_2.Args[0] || !(!fe.CanSSA(t)) {
29028 v.AuxInt = int64ToAuxInt(t.Size())
29029 v.Aux = typeToAux(t)
29030 v0 := b.NewValue0(v.Pos, OpVarDef, types.TypeMem)
29031 v0.Aux = symToAux(x)
29033 v.AddArg3(dst, src, v0)
29036 // match: (Store _ (ArrayMake0) mem)
29039 if v_1.Op != OpArrayMake0 {
29046 // match: (Store dst (ArrayMake1 e) mem)
29047 // result: (Store {e.Type} dst e mem)
29050 if v_1.Op != OpArrayMake1 {
29056 v.Aux = typeToAux(e.Type)
29057 v.AddArg3(dst, e, mem)
29060 // match: (Store (SelectN [0] call:(StaticLECall _ _)) x mem:(SelectN [1] call))
29061 // cond: isConstZero(x) && isSameCall(call.Aux, "runtime.newobject")
29064 if v_0.Op != OpSelectN || auxIntToInt64(v_0.AuxInt) != 0 {
29067 call := v_0.Args[0]
29068 if call.Op != OpStaticLECall || len(call.Args) != 2 {
29073 if mem.Op != OpSelectN || auxIntToInt64(mem.AuxInt) != 1 || call != mem.Args[0] || !(isConstZero(x) && isSameCall(call.Aux, "runtime.newobject")) {
29079 // match: (Store (OffPtr (SelectN [0] call:(StaticLECall _ _))) x mem:(SelectN [1] call))
29080 // cond: isConstZero(x) && isSameCall(call.Aux, "runtime.newobject")
29083 if v_0.Op != OpOffPtr {
29086 v_0_0 := v_0.Args[0]
29087 if v_0_0.Op != OpSelectN || auxIntToInt64(v_0_0.AuxInt) != 0 {
29090 call := v_0_0.Args[0]
29091 if call.Op != OpStaticLECall || len(call.Args) != 2 {
29096 if mem.Op != OpSelectN || auxIntToInt64(mem.AuxInt) != 1 || call != mem.Args[0] || !(isConstZero(x) && isSameCall(call.Aux, "runtime.newobject")) {
29102 // match: (Store {t1} op1:(OffPtr [o1] p1) d1 m2:(Store {t2} op2:(OffPtr [0] p2) d2 m3:(Move [n] p3 _ mem)))
29103 // cond: m2.Uses == 1 && m3.Uses == 1 && o1 == t2.Size() && n == t2.Size() + t1.Size() && isSamePtr(p1, p2) && isSamePtr(p2, p3) && clobber(m2, m3)
29104 // result: (Store {t1} op1 d1 (Store {t2} op2 d2 mem))
29106 t1 := auxToType(v.Aux)
29108 if op1.Op != OpOffPtr {
29111 o1 := auxIntToInt64(op1.AuxInt)
29115 if m2.Op != OpStore {
29118 t2 := auxToType(m2.Aux)
29121 if op2.Op != OpOffPtr || auxIntToInt64(op2.AuxInt) != 0 {
29127 if m3.Op != OpMove {
29130 n := auxIntToInt64(m3.AuxInt)
29133 if !(m2.Uses == 1 && m3.Uses == 1 && o1 == t2.Size() && n == t2.Size()+t1.Size() && isSamePtr(p1, p2) && isSamePtr(p2, p3) && clobber(m2, m3)) {
29137 v.Aux = typeToAux(t1)
29138 v0 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
29139 v0.Aux = typeToAux(t2)
29140 v0.AddArg3(op2, d2, mem)
29141 v.AddArg3(op1, d1, v0)
29144 // match: (Store {t1} op1:(OffPtr [o1] p1) d1 m2:(Store {t2} op2:(OffPtr [o2] p2) d2 m3:(Store {t3} op3:(OffPtr [0] p3) d3 m4:(Move [n] p4 _ mem))))
29145 // cond: m2.Uses == 1 && m3.Uses == 1 && m4.Uses == 1 && o2 == t3.Size() && o1-o2 == t2.Size() && n == t3.Size() + t2.Size() + t1.Size() && isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && clobber(m2, m3, m4)
29146 // result: (Store {t1} op1 d1 (Store {t2} op2 d2 (Store {t3} op3 d3 mem)))
29148 t1 := auxToType(v.Aux)
29150 if op1.Op != OpOffPtr {
29153 o1 := auxIntToInt64(op1.AuxInt)
29157 if m2.Op != OpStore {
29160 t2 := auxToType(m2.Aux)
29163 if op2.Op != OpOffPtr {
29166 o2 := auxIntToInt64(op2.AuxInt)
29170 if m3.Op != OpStore {
29173 t3 := auxToType(m3.Aux)
29176 if op3.Op != OpOffPtr || auxIntToInt64(op3.AuxInt) != 0 {
29182 if m4.Op != OpMove {
29185 n := auxIntToInt64(m4.AuxInt)
29188 if !(m2.Uses == 1 && m3.Uses == 1 && m4.Uses == 1 && o2 == t3.Size() && o1-o2 == t2.Size() && n == t3.Size()+t2.Size()+t1.Size() && isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && clobber(m2, m3, m4)) {
29192 v.Aux = typeToAux(t1)
29193 v0 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
29194 v0.Aux = typeToAux(t2)
29195 v1 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
29196 v1.Aux = typeToAux(t3)
29197 v1.AddArg3(op3, d3, mem)
29198 v0.AddArg3(op2, d2, v1)
29199 v.AddArg3(op1, d1, v0)
29202 // match: (Store {t1} op1:(OffPtr [o1] p1) d1 m2:(Store {t2} op2:(OffPtr [o2] p2) d2 m3:(Store {t3} op3:(OffPtr [o3] p3) d3 m4:(Store {t4} op4:(OffPtr [0] p4) d4 m5:(Move [n] p5 _ mem)))))
29203 // cond: m2.Uses == 1 && m3.Uses == 1 && m4.Uses == 1 && m5.Uses == 1 && o3 == t4.Size() && o2-o3 == t3.Size() && o1-o2 == t2.Size() && n == t4.Size() + t3.Size() + t2.Size() + t1.Size() && isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && isSamePtr(p4, p5) && clobber(m2, m3, m4, m5)
29204 // result: (Store {t1} op1 d1 (Store {t2} op2 d2 (Store {t3} op3 d3 (Store {t4} op4 d4 mem))))
29206 t1 := auxToType(v.Aux)
29208 if op1.Op != OpOffPtr {
29211 o1 := auxIntToInt64(op1.AuxInt)
29215 if m2.Op != OpStore {
29218 t2 := auxToType(m2.Aux)
29221 if op2.Op != OpOffPtr {
29224 o2 := auxIntToInt64(op2.AuxInt)
29228 if m3.Op != OpStore {
29231 t3 := auxToType(m3.Aux)
29234 if op3.Op != OpOffPtr {
29237 o3 := auxIntToInt64(op3.AuxInt)
29241 if m4.Op != OpStore {
29244 t4 := auxToType(m4.Aux)
29247 if op4.Op != OpOffPtr || auxIntToInt64(op4.AuxInt) != 0 {
29253 if m5.Op != OpMove {
29256 n := auxIntToInt64(m5.AuxInt)
29259 if !(m2.Uses == 1 && m3.Uses == 1 && m4.Uses == 1 && m5.Uses == 1 && o3 == t4.Size() && o2-o3 == t3.Size() && o1-o2 == t2.Size() && n == t4.Size()+t3.Size()+t2.Size()+t1.Size() && isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && isSamePtr(p4, p5) && clobber(m2, m3, m4, m5)) {
29263 v.Aux = typeToAux(t1)
29264 v0 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
29265 v0.Aux = typeToAux(t2)
29266 v1 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
29267 v1.Aux = typeToAux(t3)
29268 v2 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
29269 v2.Aux = typeToAux(t4)
29270 v2.AddArg3(op4, d4, mem)
29271 v1.AddArg3(op3, d3, v2)
29272 v0.AddArg3(op2, d2, v1)
29273 v.AddArg3(op1, d1, v0)
29276 // match: (Store {t1} op1:(OffPtr [o1] p1) d1 m2:(Store {t2} op2:(OffPtr [0] p2) d2 m3:(Zero [n] p3 mem)))
29277 // cond: m2.Uses == 1 && m3.Uses == 1 && o1 == t2.Size() && n == t2.Size() + t1.Size() && isSamePtr(p1, p2) && isSamePtr(p2, p3) && clobber(m2, m3)
29278 // result: (Store {t1} op1 d1 (Store {t2} op2 d2 mem))
29280 t1 := auxToType(v.Aux)
29282 if op1.Op != OpOffPtr {
29285 o1 := auxIntToInt64(op1.AuxInt)
29289 if m2.Op != OpStore {
29292 t2 := auxToType(m2.Aux)
29295 if op2.Op != OpOffPtr || auxIntToInt64(op2.AuxInt) != 0 {
29301 if m3.Op != OpZero {
29304 n := auxIntToInt64(m3.AuxInt)
29307 if !(m2.Uses == 1 && m3.Uses == 1 && o1 == t2.Size() && n == t2.Size()+t1.Size() && isSamePtr(p1, p2) && isSamePtr(p2, p3) && clobber(m2, m3)) {
29311 v.Aux = typeToAux(t1)
29312 v0 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
29313 v0.Aux = typeToAux(t2)
29314 v0.AddArg3(op2, d2, mem)
29315 v.AddArg3(op1, d1, v0)
29318 // match: (Store {t1} op1:(OffPtr [o1] p1) d1 m2:(Store {t2} op2:(OffPtr [o2] p2) d2 m3:(Store {t3} op3:(OffPtr [0] p3) d3 m4:(Zero [n] p4 mem))))
29319 // cond: m2.Uses == 1 && m3.Uses == 1 && m4.Uses == 1 && o2 == t3.Size() && o1-o2 == t2.Size() && n == t3.Size() + t2.Size() + t1.Size() && isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && clobber(m2, m3, m4)
29320 // result: (Store {t1} op1 d1 (Store {t2} op2 d2 (Store {t3} op3 d3 mem)))
29322 t1 := auxToType(v.Aux)
29324 if op1.Op != OpOffPtr {
29327 o1 := auxIntToInt64(op1.AuxInt)
29331 if m2.Op != OpStore {
29334 t2 := auxToType(m2.Aux)
29337 if op2.Op != OpOffPtr {
29340 o2 := auxIntToInt64(op2.AuxInt)
29344 if m3.Op != OpStore {
29347 t3 := auxToType(m3.Aux)
29350 if op3.Op != OpOffPtr || auxIntToInt64(op3.AuxInt) != 0 {
29356 if m4.Op != OpZero {
29359 n := auxIntToInt64(m4.AuxInt)
29362 if !(m2.Uses == 1 && m3.Uses == 1 && m4.Uses == 1 && o2 == t3.Size() && o1-o2 == t2.Size() && n == t3.Size()+t2.Size()+t1.Size() && isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && clobber(m2, m3, m4)) {
29366 v.Aux = typeToAux(t1)
29367 v0 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
29368 v0.Aux = typeToAux(t2)
29369 v1 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
29370 v1.Aux = typeToAux(t3)
29371 v1.AddArg3(op3, d3, mem)
29372 v0.AddArg3(op2, d2, v1)
29373 v.AddArg3(op1, d1, v0)
29376 // match: (Store {t1} op1:(OffPtr [o1] p1) d1 m2:(Store {t2} op2:(OffPtr [o2] p2) d2 m3:(Store {t3} op3:(OffPtr [o3] p3) d3 m4:(Store {t4} op4:(OffPtr [0] p4) d4 m5:(Zero [n] p5 mem)))))
29377 // cond: m2.Uses == 1 && m3.Uses == 1 && m4.Uses == 1 && m5.Uses == 1 && o3 == t4.Size() && o2-o3 == t3.Size() && o1-o2 == t2.Size() && n == t4.Size() + t3.Size() + t2.Size() + t1.Size() && isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && isSamePtr(p4, p5) && clobber(m2, m3, m4, m5)
29378 // result: (Store {t1} op1 d1 (Store {t2} op2 d2 (Store {t3} op3 d3 (Store {t4} op4 d4 mem))))
29380 t1 := auxToType(v.Aux)
29382 if op1.Op != OpOffPtr {
29385 o1 := auxIntToInt64(op1.AuxInt)
29389 if m2.Op != OpStore {
29392 t2 := auxToType(m2.Aux)
29395 if op2.Op != OpOffPtr {
29398 o2 := auxIntToInt64(op2.AuxInt)
29402 if m3.Op != OpStore {
29405 t3 := auxToType(m3.Aux)
29408 if op3.Op != OpOffPtr {
29411 o3 := auxIntToInt64(op3.AuxInt)
29415 if m4.Op != OpStore {
29418 t4 := auxToType(m4.Aux)
29421 if op4.Op != OpOffPtr || auxIntToInt64(op4.AuxInt) != 0 {
29427 if m5.Op != OpZero {
29430 n := auxIntToInt64(m5.AuxInt)
29433 if !(m2.Uses == 1 && m3.Uses == 1 && m4.Uses == 1 && m5.Uses == 1 && o3 == t4.Size() && o2-o3 == t3.Size() && o1-o2 == t2.Size() && n == t4.Size()+t3.Size()+t2.Size()+t1.Size() && isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && isSamePtr(p4, p5) && clobber(m2, m3, m4, m5)) {
29437 v.Aux = typeToAux(t1)
29438 v0 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
29439 v0.Aux = typeToAux(t2)
29440 v1 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
29441 v1.Aux = typeToAux(t3)
29442 v2 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
29443 v2.Aux = typeToAux(t4)
29444 v2.AddArg3(op4, d4, mem)
29445 v1.AddArg3(op3, d3, v2)
29446 v0.AddArg3(op2, d2, v1)
29447 v.AddArg3(op1, d1, v0)
29452 func rewriteValuegeneric_OpStringLen(v *Value) bool {
29454 // match: (StringLen (StringMake _ (Const64 <t> [c])))
29455 // result: (Const64 <t> [c])
29457 if v_0.Op != OpStringMake {
29461 v_0_1 := v_0.Args[1]
29462 if v_0_1.Op != OpConst64 {
29466 c := auxIntToInt64(v_0_1.AuxInt)
29469 v.AuxInt = int64ToAuxInt(c)
29474 func rewriteValuegeneric_OpStringPtr(v *Value) bool {
29476 // match: (StringPtr (StringMake (Addr <t> {s} base) _))
29477 // result: (Addr <t> {s} base)
29479 if v_0.Op != OpStringMake {
29482 v_0_0 := v_0.Args[0]
29483 if v_0_0.Op != OpAddr {
29487 s := auxToSym(v_0_0.Aux)
29488 base := v_0_0.Args[0]
29491 v.Aux = symToAux(s)
29497 func rewriteValuegeneric_OpStructSelect(v *Value) bool {
29501 // match: (StructSelect (StructMake1 x))
29504 if v_0.Op != OpStructMake1 {
29511 // match: (StructSelect [0] (StructMake2 x _))
29514 if auxIntToInt64(v.AuxInt) != 0 || v_0.Op != OpStructMake2 {
29521 // match: (StructSelect [1] (StructMake2 _ x))
29524 if auxIntToInt64(v.AuxInt) != 1 || v_0.Op != OpStructMake2 {
29531 // match: (StructSelect [0] (StructMake3 x _ _))
29534 if auxIntToInt64(v.AuxInt) != 0 || v_0.Op != OpStructMake3 {
29541 // match: (StructSelect [1] (StructMake3 _ x _))
29544 if auxIntToInt64(v.AuxInt) != 1 || v_0.Op != OpStructMake3 {
29551 // match: (StructSelect [2] (StructMake3 _ _ x))
29554 if auxIntToInt64(v.AuxInt) != 2 || v_0.Op != OpStructMake3 {
29561 // match: (StructSelect [0] (StructMake4 x _ _ _))
29564 if auxIntToInt64(v.AuxInt) != 0 || v_0.Op != OpStructMake4 {
29571 // match: (StructSelect [1] (StructMake4 _ x _ _))
29574 if auxIntToInt64(v.AuxInt) != 1 || v_0.Op != OpStructMake4 {
29581 // match: (StructSelect [2] (StructMake4 _ _ x _))
29584 if auxIntToInt64(v.AuxInt) != 2 || v_0.Op != OpStructMake4 {
29591 // match: (StructSelect [3] (StructMake4 _ _ _ x))
29594 if auxIntToInt64(v.AuxInt) != 3 || v_0.Op != OpStructMake4 {
29601 // match: (StructSelect [i] x:(Load <t> ptr mem))
29602 // cond: !fe.CanSSA(t)
29603 // result: @x.Block (Load <v.Type> (OffPtr <v.Type.PtrTo()> [t.FieldOff(int(i))] ptr) mem)
29605 i := auxIntToInt64(v.AuxInt)
29607 if x.Op != OpLoad {
29613 if !(!fe.CanSSA(t)) {
29617 v0 := b.NewValue0(v.Pos, OpLoad, v.Type)
29619 v1 := b.NewValue0(v.Pos, OpOffPtr, v.Type.PtrTo())
29620 v1.AuxInt = int64ToAuxInt(t.FieldOff(int(i)))
29622 v0.AddArg2(v1, mem)
29625 // match: (StructSelect [0] (IData x))
29626 // result: (IData x)
29628 if auxIntToInt64(v.AuxInt) != 0 || v_0.Op != OpIData {
29638 func rewriteValuegeneric_OpSub16(v *Value) bool {
29642 // match: (Sub16 (Const16 [c]) (Const16 [d]))
29643 // result: (Const16 [c-d])
29645 if v_0.Op != OpConst16 {
29648 c := auxIntToInt16(v_0.AuxInt)
29649 if v_1.Op != OpConst16 {
29652 d := auxIntToInt16(v_1.AuxInt)
29654 v.AuxInt = int16ToAuxInt(c - d)
29657 // match: (Sub16 x (Const16 <t> [c]))
29658 // cond: x.Op != OpConst16
29659 // result: (Add16 (Const16 <t> [-c]) x)
29662 if v_1.Op != OpConst16 {
29666 c := auxIntToInt16(v_1.AuxInt)
29667 if !(x.Op != OpConst16) {
29671 v0 := b.NewValue0(v.Pos, OpConst16, t)
29672 v0.AuxInt = int16ToAuxInt(-c)
29676 // match: (Sub16 <t> (Mul16 x y) (Mul16 x z))
29677 // result: (Mul16 x (Sub16 <t> y z))
29680 if v_0.Op != OpMul16 {
29684 v_0_0 := v_0.Args[0]
29685 v_0_1 := v_0.Args[1]
29686 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
29689 if v_1.Op != OpMul16 {
29693 v_1_0 := v_1.Args[0]
29694 v_1_1 := v_1.Args[1]
29695 for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
29701 v0 := b.NewValue0(v.Pos, OpSub16, t)
29709 // match: (Sub16 x x)
29710 // result: (Const16 [0])
29717 v.AuxInt = int16ToAuxInt(0)
29720 // match: (Sub16 (Neg16 x) (Com16 x))
29721 // result: (Const16 [1])
29723 if v_0.Op != OpNeg16 {
29727 if v_1.Op != OpCom16 || x != v_1.Args[0] {
29731 v.AuxInt = int16ToAuxInt(1)
29734 // match: (Sub16 (Com16 x) (Neg16 x))
29735 // result: (Const16 [-1])
29737 if v_0.Op != OpCom16 {
29741 if v_1.Op != OpNeg16 || x != v_1.Args[0] {
29745 v.AuxInt = int16ToAuxInt(-1)
29748 // match: (Sub16 (Add16 t x) (Add16 t y))
29749 // result: (Sub16 x y)
29751 if v_0.Op != OpAdd16 {
29755 v_0_0 := v_0.Args[0]
29756 v_0_1 := v_0.Args[1]
29757 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
29760 if v_1.Op != OpAdd16 {
29764 v_1_0 := v_1.Args[0]
29765 v_1_1 := v_1.Args[1]
29766 for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
29778 // match: (Sub16 (Add16 x y) x)
29781 if v_0.Op != OpAdd16 {
29785 v_0_0 := v_0.Args[0]
29786 v_0_1 := v_0.Args[1]
29787 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
29798 // match: (Sub16 (Add16 x y) y)
29801 if v_0.Op != OpAdd16 {
29805 v_0_0 := v_0.Args[0]
29806 v_0_1 := v_0.Args[1]
29807 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
29818 // match: (Sub16 (Sub16 x y) x)
29819 // result: (Neg16 y)
29821 if v_0.Op != OpSub16 {
29833 // match: (Sub16 x (Add16 x y))
29834 // result: (Neg16 y)
29837 if v_1.Op != OpAdd16 {
29841 v_1_0 := v_1.Args[0]
29842 v_1_1 := v_1.Args[1]
29843 for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
29854 // match: (Sub16 x (Sub16 i:(Const16 <t>) z))
29855 // cond: (z.Op != OpConst16 && x.Op != OpConst16)
29856 // result: (Sub16 (Add16 <t> x z) i)
29859 if v_1.Op != OpSub16 {
29864 if i.Op != OpConst16 {
29868 if !(z.Op != OpConst16 && x.Op != OpConst16) {
29872 v0 := b.NewValue0(v.Pos, OpAdd16, t)
29877 // match: (Sub16 x (Add16 z i:(Const16 <t>)))
29878 // cond: (z.Op != OpConst16 && x.Op != OpConst16)
29879 // result: (Sub16 (Sub16 <t> x z) i)
29882 if v_1.Op != OpAdd16 {
29886 v_1_0 := v_1.Args[0]
29887 v_1_1 := v_1.Args[1]
29888 for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
29891 if i.Op != OpConst16 {
29895 if !(z.Op != OpConst16 && x.Op != OpConst16) {
29899 v0 := b.NewValue0(v.Pos, OpSub16, t)
29906 // match: (Sub16 (Sub16 i:(Const16 <t>) z) x)
29907 // cond: (z.Op != OpConst16 && x.Op != OpConst16)
29908 // result: (Sub16 i (Add16 <t> z x))
29910 if v_0.Op != OpSub16 {
29915 if i.Op != OpConst16 {
29920 if !(z.Op != OpConst16 && x.Op != OpConst16) {
29924 v0 := b.NewValue0(v.Pos, OpAdd16, t)
29929 // match: (Sub16 (Add16 z i:(Const16 <t>)) x)
29930 // cond: (z.Op != OpConst16 && x.Op != OpConst16)
29931 // result: (Add16 i (Sub16 <t> z x))
29933 if v_0.Op != OpAdd16 {
29937 v_0_0 := v_0.Args[0]
29938 v_0_1 := v_0.Args[1]
29939 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
29942 if i.Op != OpConst16 {
29947 if !(z.Op != OpConst16 && x.Op != OpConst16) {
29951 v0 := b.NewValue0(v.Pos, OpSub16, t)
29958 // match: (Sub16 (Const16 <t> [c]) (Sub16 (Const16 <t> [d]) x))
29959 // result: (Add16 (Const16 <t> [c-d]) x)
29961 if v_0.Op != OpConst16 {
29965 c := auxIntToInt16(v_0.AuxInt)
29966 if v_1.Op != OpSub16 {
29970 v_1_0 := v_1.Args[0]
29971 if v_1_0.Op != OpConst16 || v_1_0.Type != t {
29974 d := auxIntToInt16(v_1_0.AuxInt)
29976 v0 := b.NewValue0(v.Pos, OpConst16, t)
29977 v0.AuxInt = int16ToAuxInt(c - d)
29981 // match: (Sub16 (Const16 <t> [c]) (Add16 (Const16 <t> [d]) x))
29982 // result: (Sub16 (Const16 <t> [c-d]) x)
29984 if v_0.Op != OpConst16 {
29988 c := auxIntToInt16(v_0.AuxInt)
29989 if v_1.Op != OpAdd16 {
29993 v_1_0 := v_1.Args[0]
29994 v_1_1 := v_1.Args[1]
29995 for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
29996 if v_1_0.Op != OpConst16 || v_1_0.Type != t {
29999 d := auxIntToInt16(v_1_0.AuxInt)
30002 v0 := b.NewValue0(v.Pos, OpConst16, t)
30003 v0.AuxInt = int16ToAuxInt(c - d)
30011 func rewriteValuegeneric_OpSub32(v *Value) bool {
30015 // match: (Sub32 (Const32 [c]) (Const32 [d]))
30016 // result: (Const32 [c-d])
30018 if v_0.Op != OpConst32 {
30021 c := auxIntToInt32(v_0.AuxInt)
30022 if v_1.Op != OpConst32 {
30025 d := auxIntToInt32(v_1.AuxInt)
30027 v.AuxInt = int32ToAuxInt(c - d)
30030 // match: (Sub32 x (Const32 <t> [c]))
30031 // cond: x.Op != OpConst32
30032 // result: (Add32 (Const32 <t> [-c]) x)
30035 if v_1.Op != OpConst32 {
30039 c := auxIntToInt32(v_1.AuxInt)
30040 if !(x.Op != OpConst32) {
30044 v0 := b.NewValue0(v.Pos, OpConst32, t)
30045 v0.AuxInt = int32ToAuxInt(-c)
30049 // match: (Sub32 <t> (Mul32 x y) (Mul32 x z))
30050 // result: (Mul32 x (Sub32 <t> y z))
30053 if v_0.Op != OpMul32 {
30057 v_0_0 := v_0.Args[0]
30058 v_0_1 := v_0.Args[1]
30059 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
30062 if v_1.Op != OpMul32 {
30066 v_1_0 := v_1.Args[0]
30067 v_1_1 := v_1.Args[1]
30068 for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
30074 v0 := b.NewValue0(v.Pos, OpSub32, t)
30082 // match: (Sub32 x x)
30083 // result: (Const32 [0])
30090 v.AuxInt = int32ToAuxInt(0)
30093 // match: (Sub32 (Neg32 x) (Com32 x))
30094 // result: (Const32 [1])
30096 if v_0.Op != OpNeg32 {
30100 if v_1.Op != OpCom32 || x != v_1.Args[0] {
30104 v.AuxInt = int32ToAuxInt(1)
30107 // match: (Sub32 (Com32 x) (Neg32 x))
30108 // result: (Const32 [-1])
30110 if v_0.Op != OpCom32 {
30114 if v_1.Op != OpNeg32 || x != v_1.Args[0] {
30118 v.AuxInt = int32ToAuxInt(-1)
30121 // match: (Sub32 (Add32 t x) (Add32 t y))
30122 // result: (Sub32 x y)
30124 if v_0.Op != OpAdd32 {
30128 v_0_0 := v_0.Args[0]
30129 v_0_1 := v_0.Args[1]
30130 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
30133 if v_1.Op != OpAdd32 {
30137 v_1_0 := v_1.Args[0]
30138 v_1_1 := v_1.Args[1]
30139 for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
30151 // match: (Sub32 (Add32 x y) x)
30154 if v_0.Op != OpAdd32 {
30158 v_0_0 := v_0.Args[0]
30159 v_0_1 := v_0.Args[1]
30160 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
30171 // match: (Sub32 (Add32 x y) y)
30174 if v_0.Op != OpAdd32 {
30178 v_0_0 := v_0.Args[0]
30179 v_0_1 := v_0.Args[1]
30180 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
30191 // match: (Sub32 (Sub32 x y) x)
30192 // result: (Neg32 y)
30194 if v_0.Op != OpSub32 {
30206 // match: (Sub32 x (Add32 x y))
30207 // result: (Neg32 y)
30210 if v_1.Op != OpAdd32 {
30214 v_1_0 := v_1.Args[0]
30215 v_1_1 := v_1.Args[1]
30216 for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
30227 // match: (Sub32 x (Sub32 i:(Const32 <t>) z))
30228 // cond: (z.Op != OpConst32 && x.Op != OpConst32)
30229 // result: (Sub32 (Add32 <t> x z) i)
30232 if v_1.Op != OpSub32 {
30237 if i.Op != OpConst32 {
30241 if !(z.Op != OpConst32 && x.Op != OpConst32) {
30245 v0 := b.NewValue0(v.Pos, OpAdd32, t)
30250 // match: (Sub32 x (Add32 z i:(Const32 <t>)))
30251 // cond: (z.Op != OpConst32 && x.Op != OpConst32)
30252 // result: (Sub32 (Sub32 <t> x z) i)
30255 if v_1.Op != OpAdd32 {
30259 v_1_0 := v_1.Args[0]
30260 v_1_1 := v_1.Args[1]
30261 for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
30264 if i.Op != OpConst32 {
30268 if !(z.Op != OpConst32 && x.Op != OpConst32) {
30272 v0 := b.NewValue0(v.Pos, OpSub32, t)
30279 // match: (Sub32 (Sub32 i:(Const32 <t>) z) x)
30280 // cond: (z.Op != OpConst32 && x.Op != OpConst32)
30281 // result: (Sub32 i (Add32 <t> z x))
30283 if v_0.Op != OpSub32 {
30288 if i.Op != OpConst32 {
30293 if !(z.Op != OpConst32 && x.Op != OpConst32) {
30297 v0 := b.NewValue0(v.Pos, OpAdd32, t)
30302 // match: (Sub32 (Add32 z i:(Const32 <t>)) x)
30303 // cond: (z.Op != OpConst32 && x.Op != OpConst32)
30304 // result: (Add32 i (Sub32 <t> z x))
30306 if v_0.Op != OpAdd32 {
30310 v_0_0 := v_0.Args[0]
30311 v_0_1 := v_0.Args[1]
30312 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
30315 if i.Op != OpConst32 {
30320 if !(z.Op != OpConst32 && x.Op != OpConst32) {
30324 v0 := b.NewValue0(v.Pos, OpSub32, t)
30331 // match: (Sub32 (Const32 <t> [c]) (Sub32 (Const32 <t> [d]) x))
30332 // result: (Add32 (Const32 <t> [c-d]) x)
30334 if v_0.Op != OpConst32 {
30338 c := auxIntToInt32(v_0.AuxInt)
30339 if v_1.Op != OpSub32 {
30343 v_1_0 := v_1.Args[0]
30344 if v_1_0.Op != OpConst32 || v_1_0.Type != t {
30347 d := auxIntToInt32(v_1_0.AuxInt)
30349 v0 := b.NewValue0(v.Pos, OpConst32, t)
30350 v0.AuxInt = int32ToAuxInt(c - d)
30354 // match: (Sub32 (Const32 <t> [c]) (Add32 (Const32 <t> [d]) x))
30355 // result: (Sub32 (Const32 <t> [c-d]) x)
30357 if v_0.Op != OpConst32 {
30361 c := auxIntToInt32(v_0.AuxInt)
30362 if v_1.Op != OpAdd32 {
30366 v_1_0 := v_1.Args[0]
30367 v_1_1 := v_1.Args[1]
30368 for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
30369 if v_1_0.Op != OpConst32 || v_1_0.Type != t {
30372 d := auxIntToInt32(v_1_0.AuxInt)
30375 v0 := b.NewValue0(v.Pos, OpConst32, t)
30376 v0.AuxInt = int32ToAuxInt(c - d)
30384 func rewriteValuegeneric_OpSub32F(v *Value) bool {
30387 // match: (Sub32F (Const32F [c]) (Const32F [d]))
30388 // cond: c-d == c-d
30389 // result: (Const32F [c-d])
30391 if v_0.Op != OpConst32F {
30394 c := auxIntToFloat32(v_0.AuxInt)
30395 if v_1.Op != OpConst32F {
30398 d := auxIntToFloat32(v_1.AuxInt)
30402 v.reset(OpConst32F)
30403 v.AuxInt = float32ToAuxInt(c - d)
30408 func rewriteValuegeneric_OpSub64(v *Value) bool {
30412 // match: (Sub64 (Const64 [c]) (Const64 [d]))
30413 // result: (Const64 [c-d])
30415 if v_0.Op != OpConst64 {
30418 c := auxIntToInt64(v_0.AuxInt)
30419 if v_1.Op != OpConst64 {
30422 d := auxIntToInt64(v_1.AuxInt)
30424 v.AuxInt = int64ToAuxInt(c - d)
30427 // match: (Sub64 x (Const64 <t> [c]))
30428 // cond: x.Op != OpConst64
30429 // result: (Add64 (Const64 <t> [-c]) x)
30432 if v_1.Op != OpConst64 {
30436 c := auxIntToInt64(v_1.AuxInt)
30437 if !(x.Op != OpConst64) {
30441 v0 := b.NewValue0(v.Pos, OpConst64, t)
30442 v0.AuxInt = int64ToAuxInt(-c)
30446 // match: (Sub64 <t> (Mul64 x y) (Mul64 x z))
30447 // result: (Mul64 x (Sub64 <t> y z))
30450 if v_0.Op != OpMul64 {
30454 v_0_0 := v_0.Args[0]
30455 v_0_1 := v_0.Args[1]
30456 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
30459 if v_1.Op != OpMul64 {
30463 v_1_0 := v_1.Args[0]
30464 v_1_1 := v_1.Args[1]
30465 for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
30471 v0 := b.NewValue0(v.Pos, OpSub64, t)
30479 // match: (Sub64 x x)
30480 // result: (Const64 [0])
30487 v.AuxInt = int64ToAuxInt(0)
30490 // match: (Sub64 (Neg64 x) (Com64 x))
30491 // result: (Const64 [1])
30493 if v_0.Op != OpNeg64 {
30497 if v_1.Op != OpCom64 || x != v_1.Args[0] {
30501 v.AuxInt = int64ToAuxInt(1)
30504 // match: (Sub64 (Com64 x) (Neg64 x))
30505 // result: (Const64 [-1])
30507 if v_0.Op != OpCom64 {
30511 if v_1.Op != OpNeg64 || x != v_1.Args[0] {
30515 v.AuxInt = int64ToAuxInt(-1)
30518 // match: (Sub64 (Add64 t x) (Add64 t y))
30519 // result: (Sub64 x y)
30521 if v_0.Op != OpAdd64 {
30525 v_0_0 := v_0.Args[0]
30526 v_0_1 := v_0.Args[1]
30527 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
30530 if v_1.Op != OpAdd64 {
30534 v_1_0 := v_1.Args[0]
30535 v_1_1 := v_1.Args[1]
30536 for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
30548 // match: (Sub64 (Add64 x y) x)
30551 if v_0.Op != OpAdd64 {
30555 v_0_0 := v_0.Args[0]
30556 v_0_1 := v_0.Args[1]
30557 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
30568 // match: (Sub64 (Add64 x y) y)
30571 if v_0.Op != OpAdd64 {
30575 v_0_0 := v_0.Args[0]
30576 v_0_1 := v_0.Args[1]
30577 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
30588 // match: (Sub64 (Sub64 x y) x)
30589 // result: (Neg64 y)
30591 if v_0.Op != OpSub64 {
30603 // match: (Sub64 x (Add64 x y))
30604 // result: (Neg64 y)
30607 if v_1.Op != OpAdd64 {
30611 v_1_0 := v_1.Args[0]
30612 v_1_1 := v_1.Args[1]
30613 for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
30624 // match: (Sub64 x (Sub64 i:(Const64 <t>) z))
30625 // cond: (z.Op != OpConst64 && x.Op != OpConst64)
30626 // result: (Sub64 (Add64 <t> x z) i)
30629 if v_1.Op != OpSub64 {
30634 if i.Op != OpConst64 {
30638 if !(z.Op != OpConst64 && x.Op != OpConst64) {
30642 v0 := b.NewValue0(v.Pos, OpAdd64, t)
30647 // match: (Sub64 x (Add64 z i:(Const64 <t>)))
30648 // cond: (z.Op != OpConst64 && x.Op != OpConst64)
30649 // result: (Sub64 (Sub64 <t> x z) i)
30652 if v_1.Op != OpAdd64 {
30656 v_1_0 := v_1.Args[0]
30657 v_1_1 := v_1.Args[1]
30658 for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
30661 if i.Op != OpConst64 {
30665 if !(z.Op != OpConst64 && x.Op != OpConst64) {
30669 v0 := b.NewValue0(v.Pos, OpSub64, t)
30676 // match: (Sub64 (Sub64 i:(Const64 <t>) z) x)
30677 // cond: (z.Op != OpConst64 && x.Op != OpConst64)
30678 // result: (Sub64 i (Add64 <t> z x))
30680 if v_0.Op != OpSub64 {
30685 if i.Op != OpConst64 {
30690 if !(z.Op != OpConst64 && x.Op != OpConst64) {
30694 v0 := b.NewValue0(v.Pos, OpAdd64, t)
30699 // match: (Sub64 (Add64 z i:(Const64 <t>)) x)
30700 // cond: (z.Op != OpConst64 && x.Op != OpConst64)
30701 // result: (Add64 i (Sub64 <t> z x))
30703 if v_0.Op != OpAdd64 {
30707 v_0_0 := v_0.Args[0]
30708 v_0_1 := v_0.Args[1]
30709 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
30712 if i.Op != OpConst64 {
30717 if !(z.Op != OpConst64 && x.Op != OpConst64) {
30721 v0 := b.NewValue0(v.Pos, OpSub64, t)
30728 // match: (Sub64 (Const64 <t> [c]) (Sub64 (Const64 <t> [d]) x))
30729 // result: (Add64 (Const64 <t> [c-d]) x)
30731 if v_0.Op != OpConst64 {
30735 c := auxIntToInt64(v_0.AuxInt)
30736 if v_1.Op != OpSub64 {
30740 v_1_0 := v_1.Args[0]
30741 if v_1_0.Op != OpConst64 || v_1_0.Type != t {
30744 d := auxIntToInt64(v_1_0.AuxInt)
30746 v0 := b.NewValue0(v.Pos, OpConst64, t)
30747 v0.AuxInt = int64ToAuxInt(c - d)
30751 // match: (Sub64 (Const64 <t> [c]) (Add64 (Const64 <t> [d]) x))
30752 // result: (Sub64 (Const64 <t> [c-d]) x)
30754 if v_0.Op != OpConst64 {
30758 c := auxIntToInt64(v_0.AuxInt)
30759 if v_1.Op != OpAdd64 {
30763 v_1_0 := v_1.Args[0]
30764 v_1_1 := v_1.Args[1]
30765 for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
30766 if v_1_0.Op != OpConst64 || v_1_0.Type != t {
30769 d := auxIntToInt64(v_1_0.AuxInt)
30772 v0 := b.NewValue0(v.Pos, OpConst64, t)
30773 v0.AuxInt = int64ToAuxInt(c - d)
30781 func rewriteValuegeneric_OpSub64F(v *Value) bool {
30784 // match: (Sub64F (Const64F [c]) (Const64F [d]))
30785 // cond: c-d == c-d
30786 // result: (Const64F [c-d])
30788 if v_0.Op != OpConst64F {
30791 c := auxIntToFloat64(v_0.AuxInt)
30792 if v_1.Op != OpConst64F {
30795 d := auxIntToFloat64(v_1.AuxInt)
30799 v.reset(OpConst64F)
30800 v.AuxInt = float64ToAuxInt(c - d)
30805 func rewriteValuegeneric_OpSub8(v *Value) bool {
30809 // match: (Sub8 (Const8 [c]) (Const8 [d]))
30810 // result: (Const8 [c-d])
30812 if v_0.Op != OpConst8 {
30815 c := auxIntToInt8(v_0.AuxInt)
30816 if v_1.Op != OpConst8 {
30819 d := auxIntToInt8(v_1.AuxInt)
30821 v.AuxInt = int8ToAuxInt(c - d)
30824 // match: (Sub8 x (Const8 <t> [c]))
30825 // cond: x.Op != OpConst8
30826 // result: (Add8 (Const8 <t> [-c]) x)
30829 if v_1.Op != OpConst8 {
30833 c := auxIntToInt8(v_1.AuxInt)
30834 if !(x.Op != OpConst8) {
30838 v0 := b.NewValue0(v.Pos, OpConst8, t)
30839 v0.AuxInt = int8ToAuxInt(-c)
30843 // match: (Sub8 <t> (Mul8 x y) (Mul8 x z))
30844 // result: (Mul8 x (Sub8 <t> y z))
30847 if v_0.Op != OpMul8 {
30851 v_0_0 := v_0.Args[0]
30852 v_0_1 := v_0.Args[1]
30853 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
30856 if v_1.Op != OpMul8 {
30860 v_1_0 := v_1.Args[0]
30861 v_1_1 := v_1.Args[1]
30862 for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
30868 v0 := b.NewValue0(v.Pos, OpSub8, t)
30876 // match: (Sub8 x x)
30877 // result: (Const8 [0])
30884 v.AuxInt = int8ToAuxInt(0)
30887 // match: (Sub8 (Neg8 x) (Com8 x))
30888 // result: (Const8 [1])
30890 if v_0.Op != OpNeg8 {
30894 if v_1.Op != OpCom8 || x != v_1.Args[0] {
30898 v.AuxInt = int8ToAuxInt(1)
30901 // match: (Sub8 (Com8 x) (Neg8 x))
30902 // result: (Const8 [-1])
30904 if v_0.Op != OpCom8 {
30908 if v_1.Op != OpNeg8 || x != v_1.Args[0] {
30912 v.AuxInt = int8ToAuxInt(-1)
30915 // match: (Sub8 (Add8 t x) (Add8 t y))
30916 // result: (Sub8 x y)
30918 if v_0.Op != OpAdd8 {
30922 v_0_0 := v_0.Args[0]
30923 v_0_1 := v_0.Args[1]
30924 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
30927 if v_1.Op != OpAdd8 {
30931 v_1_0 := v_1.Args[0]
30932 v_1_1 := v_1.Args[1]
30933 for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
30945 // match: (Sub8 (Add8 x y) x)
30948 if v_0.Op != OpAdd8 {
30952 v_0_0 := v_0.Args[0]
30953 v_0_1 := v_0.Args[1]
30954 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
30965 // match: (Sub8 (Add8 x y) y)
30968 if v_0.Op != OpAdd8 {
30972 v_0_0 := v_0.Args[0]
30973 v_0_1 := v_0.Args[1]
30974 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
30985 // match: (Sub8 (Sub8 x y) x)
30986 // result: (Neg8 y)
30988 if v_0.Op != OpSub8 {
31000 // match: (Sub8 x (Add8 x y))
31001 // result: (Neg8 y)
31004 if v_1.Op != OpAdd8 {
31008 v_1_0 := v_1.Args[0]
31009 v_1_1 := v_1.Args[1]
31010 for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
31021 // match: (Sub8 x (Sub8 i:(Const8 <t>) z))
31022 // cond: (z.Op != OpConst8 && x.Op != OpConst8)
31023 // result: (Sub8 (Add8 <t> x z) i)
31026 if v_1.Op != OpSub8 {
31031 if i.Op != OpConst8 {
31035 if !(z.Op != OpConst8 && x.Op != OpConst8) {
31039 v0 := b.NewValue0(v.Pos, OpAdd8, t)
31044 // match: (Sub8 x (Add8 z i:(Const8 <t>)))
31045 // cond: (z.Op != OpConst8 && x.Op != OpConst8)
31046 // result: (Sub8 (Sub8 <t> x z) i)
31049 if v_1.Op != OpAdd8 {
31053 v_1_0 := v_1.Args[0]
31054 v_1_1 := v_1.Args[1]
31055 for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
31058 if i.Op != OpConst8 {
31062 if !(z.Op != OpConst8 && x.Op != OpConst8) {
31066 v0 := b.NewValue0(v.Pos, OpSub8, t)
31073 // match: (Sub8 (Sub8 i:(Const8 <t>) z) x)
31074 // cond: (z.Op != OpConst8 && x.Op != OpConst8)
31075 // result: (Sub8 i (Add8 <t> z x))
31077 if v_0.Op != OpSub8 {
31082 if i.Op != OpConst8 {
31087 if !(z.Op != OpConst8 && x.Op != OpConst8) {
31091 v0 := b.NewValue0(v.Pos, OpAdd8, t)
31096 // match: (Sub8 (Add8 z i:(Const8 <t>)) x)
31097 // cond: (z.Op != OpConst8 && x.Op != OpConst8)
31098 // result: (Add8 i (Sub8 <t> z x))
31100 if v_0.Op != OpAdd8 {
31104 v_0_0 := v_0.Args[0]
31105 v_0_1 := v_0.Args[1]
31106 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
31109 if i.Op != OpConst8 {
31114 if !(z.Op != OpConst8 && x.Op != OpConst8) {
31118 v0 := b.NewValue0(v.Pos, OpSub8, t)
31125 // match: (Sub8 (Const8 <t> [c]) (Sub8 (Const8 <t> [d]) x))
31126 // result: (Add8 (Const8 <t> [c-d]) x)
31128 if v_0.Op != OpConst8 {
31132 c := auxIntToInt8(v_0.AuxInt)
31133 if v_1.Op != OpSub8 {
31137 v_1_0 := v_1.Args[0]
31138 if v_1_0.Op != OpConst8 || v_1_0.Type != t {
31141 d := auxIntToInt8(v_1_0.AuxInt)
31143 v0 := b.NewValue0(v.Pos, OpConst8, t)
31144 v0.AuxInt = int8ToAuxInt(c - d)
31148 // match: (Sub8 (Const8 <t> [c]) (Add8 (Const8 <t> [d]) x))
31149 // result: (Sub8 (Const8 <t> [c-d]) x)
31151 if v_0.Op != OpConst8 {
31155 c := auxIntToInt8(v_0.AuxInt)
31156 if v_1.Op != OpAdd8 {
31160 v_1_0 := v_1.Args[0]
31161 v_1_1 := v_1.Args[1]
31162 for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
31163 if v_1_0.Op != OpConst8 || v_1_0.Type != t {
31166 d := auxIntToInt8(v_1_0.AuxInt)
31169 v0 := b.NewValue0(v.Pos, OpConst8, t)
31170 v0.AuxInt = int8ToAuxInt(c - d)
31178 func rewriteValuegeneric_OpTrunc(v *Value) bool {
31180 // match: (Trunc (Const64F [c]))
31181 // result: (Const64F [math.Trunc(c)])
31183 if v_0.Op != OpConst64F {
31186 c := auxIntToFloat64(v_0.AuxInt)
31187 v.reset(OpConst64F)
31188 v.AuxInt = float64ToAuxInt(math.Trunc(c))
31193 func rewriteValuegeneric_OpTrunc16to8(v *Value) bool {
31195 // match: (Trunc16to8 (Const16 [c]))
31196 // result: (Const8 [int8(c)])
31198 if v_0.Op != OpConst16 {
31201 c := auxIntToInt16(v_0.AuxInt)
31203 v.AuxInt = int8ToAuxInt(int8(c))
31206 // match: (Trunc16to8 (ZeroExt8to16 x))
31209 if v_0.Op != OpZeroExt8to16 {
31216 // match: (Trunc16to8 (SignExt8to16 x))
31219 if v_0.Op != OpSignExt8to16 {
31226 // match: (Trunc16to8 (And16 (Const16 [y]) x))
31227 // cond: y&0xFF == 0xFF
31228 // result: (Trunc16to8 x)
31230 if v_0.Op != OpAnd16 {
31234 v_0_0 := v_0.Args[0]
31235 v_0_1 := v_0.Args[1]
31236 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
31237 if v_0_0.Op != OpConst16 {
31240 y := auxIntToInt16(v_0_0.AuxInt)
31242 if !(y&0xFF == 0xFF) {
31245 v.reset(OpTrunc16to8)
31253 func rewriteValuegeneric_OpTrunc32to16(v *Value) bool {
31255 // match: (Trunc32to16 (Const32 [c]))
31256 // result: (Const16 [int16(c)])
31258 if v_0.Op != OpConst32 {
31261 c := auxIntToInt32(v_0.AuxInt)
31263 v.AuxInt = int16ToAuxInt(int16(c))
31266 // match: (Trunc32to16 (ZeroExt8to32 x))
31267 // result: (ZeroExt8to16 x)
31269 if v_0.Op != OpZeroExt8to32 {
31273 v.reset(OpZeroExt8to16)
31277 // match: (Trunc32to16 (ZeroExt16to32 x))
31280 if v_0.Op != OpZeroExt16to32 {
31287 // match: (Trunc32to16 (SignExt8to32 x))
31288 // result: (SignExt8to16 x)
31290 if v_0.Op != OpSignExt8to32 {
31294 v.reset(OpSignExt8to16)
31298 // match: (Trunc32to16 (SignExt16to32 x))
31301 if v_0.Op != OpSignExt16to32 {
31308 // match: (Trunc32to16 (And32 (Const32 [y]) x))
31309 // cond: y&0xFFFF == 0xFFFF
31310 // result: (Trunc32to16 x)
31312 if v_0.Op != OpAnd32 {
31316 v_0_0 := v_0.Args[0]
31317 v_0_1 := v_0.Args[1]
31318 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
31319 if v_0_0.Op != OpConst32 {
31322 y := auxIntToInt32(v_0_0.AuxInt)
31324 if !(y&0xFFFF == 0xFFFF) {
31327 v.reset(OpTrunc32to16)
31335 func rewriteValuegeneric_OpTrunc32to8(v *Value) bool {
31337 // match: (Trunc32to8 (Const32 [c]))
31338 // result: (Const8 [int8(c)])
31340 if v_0.Op != OpConst32 {
31343 c := auxIntToInt32(v_0.AuxInt)
31345 v.AuxInt = int8ToAuxInt(int8(c))
31348 // match: (Trunc32to8 (ZeroExt8to32 x))
31351 if v_0.Op != OpZeroExt8to32 {
31358 // match: (Trunc32to8 (SignExt8to32 x))
31361 if v_0.Op != OpSignExt8to32 {
31368 // match: (Trunc32to8 (And32 (Const32 [y]) x))
31369 // cond: y&0xFF == 0xFF
31370 // result: (Trunc32to8 x)
31372 if v_0.Op != OpAnd32 {
31376 v_0_0 := v_0.Args[0]
31377 v_0_1 := v_0.Args[1]
31378 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
31379 if v_0_0.Op != OpConst32 {
31382 y := auxIntToInt32(v_0_0.AuxInt)
31384 if !(y&0xFF == 0xFF) {
31387 v.reset(OpTrunc32to8)
31395 func rewriteValuegeneric_OpTrunc64to16(v *Value) bool {
31397 // match: (Trunc64to16 (Const64 [c]))
31398 // result: (Const16 [int16(c)])
31400 if v_0.Op != OpConst64 {
31403 c := auxIntToInt64(v_0.AuxInt)
31405 v.AuxInt = int16ToAuxInt(int16(c))
31408 // match: (Trunc64to16 (ZeroExt8to64 x))
31409 // result: (ZeroExt8to16 x)
31411 if v_0.Op != OpZeroExt8to64 {
31415 v.reset(OpZeroExt8to16)
31419 // match: (Trunc64to16 (ZeroExt16to64 x))
31422 if v_0.Op != OpZeroExt16to64 {
31429 // match: (Trunc64to16 (SignExt8to64 x))
31430 // result: (SignExt8to16 x)
31432 if v_0.Op != OpSignExt8to64 {
31436 v.reset(OpSignExt8to16)
31440 // match: (Trunc64to16 (SignExt16to64 x))
31443 if v_0.Op != OpSignExt16to64 {
31450 // match: (Trunc64to16 (And64 (Const64 [y]) x))
31451 // cond: y&0xFFFF == 0xFFFF
31452 // result: (Trunc64to16 x)
31454 if v_0.Op != OpAnd64 {
31458 v_0_0 := v_0.Args[0]
31459 v_0_1 := v_0.Args[1]
31460 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
31461 if v_0_0.Op != OpConst64 {
31464 y := auxIntToInt64(v_0_0.AuxInt)
31466 if !(y&0xFFFF == 0xFFFF) {
31469 v.reset(OpTrunc64to16)
31477 func rewriteValuegeneric_OpTrunc64to32(v *Value) bool {
31479 // match: (Trunc64to32 (Const64 [c]))
31480 // result: (Const32 [int32(c)])
31482 if v_0.Op != OpConst64 {
31485 c := auxIntToInt64(v_0.AuxInt)
31487 v.AuxInt = int32ToAuxInt(int32(c))
31490 // match: (Trunc64to32 (ZeroExt8to64 x))
31491 // result: (ZeroExt8to32 x)
31493 if v_0.Op != OpZeroExt8to64 {
31497 v.reset(OpZeroExt8to32)
31501 // match: (Trunc64to32 (ZeroExt16to64 x))
31502 // result: (ZeroExt16to32 x)
31504 if v_0.Op != OpZeroExt16to64 {
31508 v.reset(OpZeroExt16to32)
31512 // match: (Trunc64to32 (ZeroExt32to64 x))
31515 if v_0.Op != OpZeroExt32to64 {
31522 // match: (Trunc64to32 (SignExt8to64 x))
31523 // result: (SignExt8to32 x)
31525 if v_0.Op != OpSignExt8to64 {
31529 v.reset(OpSignExt8to32)
31533 // match: (Trunc64to32 (SignExt16to64 x))
31534 // result: (SignExt16to32 x)
31536 if v_0.Op != OpSignExt16to64 {
31540 v.reset(OpSignExt16to32)
31544 // match: (Trunc64to32 (SignExt32to64 x))
31547 if v_0.Op != OpSignExt32to64 {
31554 // match: (Trunc64to32 (And64 (Const64 [y]) x))
31555 // cond: y&0xFFFFFFFF == 0xFFFFFFFF
31556 // result: (Trunc64to32 x)
31558 if v_0.Op != OpAnd64 {
31562 v_0_0 := v_0.Args[0]
31563 v_0_1 := v_0.Args[1]
31564 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
31565 if v_0_0.Op != OpConst64 {
31568 y := auxIntToInt64(v_0_0.AuxInt)
31570 if !(y&0xFFFFFFFF == 0xFFFFFFFF) {
31573 v.reset(OpTrunc64to32)
31581 func rewriteValuegeneric_OpTrunc64to8(v *Value) bool {
31583 // match: (Trunc64to8 (Const64 [c]))
31584 // result: (Const8 [int8(c)])
31586 if v_0.Op != OpConst64 {
31589 c := auxIntToInt64(v_0.AuxInt)
31591 v.AuxInt = int8ToAuxInt(int8(c))
31594 // match: (Trunc64to8 (ZeroExt8to64 x))
31597 if v_0.Op != OpZeroExt8to64 {
31604 // match: (Trunc64to8 (SignExt8to64 x))
31607 if v_0.Op != OpSignExt8to64 {
31614 // match: (Trunc64to8 (And64 (Const64 [y]) x))
31615 // cond: y&0xFF == 0xFF
31616 // result: (Trunc64to8 x)
31618 if v_0.Op != OpAnd64 {
31622 v_0_0 := v_0.Args[0]
31623 v_0_1 := v_0.Args[1]
31624 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
31625 if v_0_0.Op != OpConst64 {
31628 y := auxIntToInt64(v_0_0.AuxInt)
31630 if !(y&0xFF == 0xFF) {
31633 v.reset(OpTrunc64to8)
31641 func rewriteValuegeneric_OpXor16(v *Value) bool {
31645 config := b.Func.Config
31646 // match: (Xor16 (Const16 [c]) (Const16 [d]))
31647 // result: (Const16 [c^d])
31649 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
31650 if v_0.Op != OpConst16 {
31653 c := auxIntToInt16(v_0.AuxInt)
31654 if v_1.Op != OpConst16 {
31657 d := auxIntToInt16(v_1.AuxInt)
31659 v.AuxInt = int16ToAuxInt(c ^ d)
31664 // match: (Xor16 x x)
31665 // result: (Const16 [0])
31672 v.AuxInt = int16ToAuxInt(0)
31675 // match: (Xor16 (Const16 [0]) x)
31678 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
31679 if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 0 {
31688 // match: (Xor16 (Com16 x) x)
31689 // result: (Const16 [-1])
31691 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
31692 if v_0.Op != OpCom16 {
31700 v.AuxInt = int16ToAuxInt(-1)
31705 // match: (Xor16 (Const16 [-1]) x)
31706 // result: (Com16 x)
31708 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
31709 if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != -1 {
31719 // match: (Xor16 x (Xor16 x y))
31722 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
31724 if v_1.Op != OpXor16 {
31728 v_1_0 := v_1.Args[0]
31729 v_1_1 := v_1.Args[1]
31730 for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
31741 // match: (Xor16 (Xor16 i:(Const16 <t>) z) x)
31742 // cond: (z.Op != OpConst16 && x.Op != OpConst16)
31743 // result: (Xor16 i (Xor16 <t> z x))
31745 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
31746 if v_0.Op != OpXor16 {
31750 v_0_0 := v_0.Args[0]
31751 v_0_1 := v_0.Args[1]
31752 for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
31754 if i.Op != OpConst16 {
31760 if !(z.Op != OpConst16 && x.Op != OpConst16) {
31764 v0 := b.NewValue0(v.Pos, OpXor16, t)
31772 // match: (Xor16 (Const16 <t> [c]) (Xor16 (Const16 <t> [d]) x))
31773 // result: (Xor16 (Const16 <t> [c^d]) x)
31775 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
31776 if v_0.Op != OpConst16 {
31780 c := auxIntToInt16(v_0.AuxInt)
31781 if v_1.Op != OpXor16 {
31785 v_1_0 := v_1.Args[0]
31786 v_1_1 := v_1.Args[1]
31787 for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
31788 if v_1_0.Op != OpConst16 || v_1_0.Type != t {
31791 d := auxIntToInt16(v_1_0.AuxInt)
31794 v0 := b.NewValue0(v.Pos, OpConst16, t)
31795 v0.AuxInt = int16ToAuxInt(c ^ d)
31802 // match: (Xor16 (Lsh16x64 x z:(Const64 <t> [c])) (Rsh16Ux64 x (Const64 [d])))
31803 // cond: c < 16 && d == 16-c && canRotate(config, 16)
31804 // result: (RotateLeft16 x z)
31806 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
31807 if v_0.Op != OpLsh16x64 {
31813 if z.Op != OpConst64 {
31816 c := auxIntToInt64(z.AuxInt)
31817 if v_1.Op != OpRsh16Ux64 {
31821 if x != v_1.Args[0] {
31824 v_1_1 := v_1.Args[1]
31825 if v_1_1.Op != OpConst64 {
31828 d := auxIntToInt64(v_1_1.AuxInt)
31829 if !(c < 16 && d == 16-c && canRotate(config, 16)) {
31832 v.reset(OpRotateLeft16)
31838 // match: (Xor16 left:(Lsh16x64 x y) right:(Rsh16Ux64 x (Sub64 (Const64 [16]) y)))
31839 // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)
31840 // result: (RotateLeft16 x y)
31842 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
31844 if left.Op != OpLsh16x64 {
31850 if right.Op != OpRsh16Ux64 {
31854 if x != right.Args[0] {
31857 right_1 := right.Args[1]
31858 if right_1.Op != OpSub64 {
31861 _ = right_1.Args[1]
31862 right_1_0 := right_1.Args[0]
31863 if right_1_0.Op != OpConst64 || auxIntToInt64(right_1_0.AuxInt) != 16 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) {
31866 v.reset(OpRotateLeft16)
31872 // match: (Xor16 left:(Lsh16x32 x y) right:(Rsh16Ux32 x (Sub32 (Const32 [16]) y)))
31873 // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)
31874 // result: (RotateLeft16 x y)
31876 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
31878 if left.Op != OpLsh16x32 {
31884 if right.Op != OpRsh16Ux32 {
31888 if x != right.Args[0] {
31891 right_1 := right.Args[1]
31892 if right_1.Op != OpSub32 {
31895 _ = right_1.Args[1]
31896 right_1_0 := right_1.Args[0]
31897 if right_1_0.Op != OpConst32 || auxIntToInt32(right_1_0.AuxInt) != 16 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) {
31900 v.reset(OpRotateLeft16)
31906 // match: (Xor16 left:(Lsh16x16 x y) right:(Rsh16Ux16 x (Sub16 (Const16 [16]) y)))
31907 // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)
31908 // result: (RotateLeft16 x y)
31910 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
31912 if left.Op != OpLsh16x16 {
31918 if right.Op != OpRsh16Ux16 {
31922 if x != right.Args[0] {
31925 right_1 := right.Args[1]
31926 if right_1.Op != OpSub16 {
31929 _ = right_1.Args[1]
31930 right_1_0 := right_1.Args[0]
31931 if right_1_0.Op != OpConst16 || auxIntToInt16(right_1_0.AuxInt) != 16 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) {
31934 v.reset(OpRotateLeft16)
31940 // match: (Xor16 left:(Lsh16x8 x y) right:(Rsh16Ux8 x (Sub8 (Const8 [16]) y)))
31941 // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)
31942 // result: (RotateLeft16 x y)
31944 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
31946 if left.Op != OpLsh16x8 {
31952 if right.Op != OpRsh16Ux8 {
31956 if x != right.Args[0] {
31959 right_1 := right.Args[1]
31960 if right_1.Op != OpSub8 {
31963 _ = right_1.Args[1]
31964 right_1_0 := right_1.Args[0]
31965 if right_1_0.Op != OpConst8 || auxIntToInt8(right_1_0.AuxInt) != 16 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) {
31968 v.reset(OpRotateLeft16)
31974 // match: (Xor16 right:(Rsh16Ux64 x y) left:(Lsh16x64 x z:(Sub64 (Const64 [16]) y)))
31975 // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)
31976 // result: (RotateLeft16 x z)
31978 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
31980 if right.Op != OpRsh16Ux64 {
31986 if left.Op != OpLsh16x64 {
31990 if x != left.Args[0] {
31994 if z.Op != OpSub64 {
31999 if z_0.Op != OpConst64 || auxIntToInt64(z_0.AuxInt) != 16 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) {
32002 v.reset(OpRotateLeft16)
32008 // match: (Xor16 right:(Rsh16Ux32 x y) left:(Lsh16x32 x z:(Sub32 (Const32 [16]) y)))
32009 // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)
32010 // result: (RotateLeft16 x z)
32012 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
32014 if right.Op != OpRsh16Ux32 {
32020 if left.Op != OpLsh16x32 {
32024 if x != left.Args[0] {
32028 if z.Op != OpSub32 {
32033 if z_0.Op != OpConst32 || auxIntToInt32(z_0.AuxInt) != 16 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) {
32036 v.reset(OpRotateLeft16)
32042 // match: (Xor16 right:(Rsh16Ux16 x y) left:(Lsh16x16 x z:(Sub16 (Const16 [16]) y)))
32043 // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)
32044 // result: (RotateLeft16 x z)
32046 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
32048 if right.Op != OpRsh16Ux16 {
32054 if left.Op != OpLsh16x16 {
32058 if x != left.Args[0] {
32062 if z.Op != OpSub16 {
32067 if z_0.Op != OpConst16 || auxIntToInt16(z_0.AuxInt) != 16 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) {
32070 v.reset(OpRotateLeft16)
32076 // match: (Xor16 right:(Rsh16Ux8 x y) left:(Lsh16x8 x z:(Sub8 (Const8 [16]) y)))
32077 // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)
32078 // result: (RotateLeft16 x z)
32080 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
32082 if right.Op != OpRsh16Ux8 {
32088 if left.Op != OpLsh16x8 {
32092 if x != left.Args[0] {
32096 if z.Op != OpSub8 {
32101 if z_0.Op != OpConst8 || auxIntToInt8(z_0.AuxInt) != 16 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) {
32104 v.reset(OpRotateLeft16)
32112 func rewriteValuegeneric_OpXor32(v *Value) bool {
32116 config := b.Func.Config
32117 // match: (Xor32 (Const32 [c]) (Const32 [d]))
32118 // result: (Const32 [c^d])
32120 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
32121 if v_0.Op != OpConst32 {
32124 c := auxIntToInt32(v_0.AuxInt)
32125 if v_1.Op != OpConst32 {
32128 d := auxIntToInt32(v_1.AuxInt)
32130 v.AuxInt = int32ToAuxInt(c ^ d)
32135 // match: (Xor32 x x)
32136 // result: (Const32 [0])
32143 v.AuxInt = int32ToAuxInt(0)
32146 // match: (Xor32 (Const32 [0]) x)
32149 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
32150 if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 {
32159 // match: (Xor32 (Com32 x) x)
32160 // result: (Const32 [-1])
32162 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
32163 if v_0.Op != OpCom32 {
32171 v.AuxInt = int32ToAuxInt(-1)
32176 // match: (Xor32 (Const32 [-1]) x)
32177 // result: (Com32 x)
32179 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
32180 if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != -1 {
32190 // match: (Xor32 x (Xor32 x y))
32193 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
32195 if v_1.Op != OpXor32 {
32199 v_1_0 := v_1.Args[0]
32200 v_1_1 := v_1.Args[1]
32201 for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
32212 // match: (Xor32 (Xor32 i:(Const32 <t>) z) x)
32213 // cond: (z.Op != OpConst32 && x.Op != OpConst32)
32214 // result: (Xor32 i (Xor32 <t> z x))
32216 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
32217 if v_0.Op != OpXor32 {
32221 v_0_0 := v_0.Args[0]
32222 v_0_1 := v_0.Args[1]
32223 for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
32225 if i.Op != OpConst32 {
32231 if !(z.Op != OpConst32 && x.Op != OpConst32) {
32235 v0 := b.NewValue0(v.Pos, OpXor32, t)
32243 // match: (Xor32 (Const32 <t> [c]) (Xor32 (Const32 <t> [d]) x))
32244 // result: (Xor32 (Const32 <t> [c^d]) x)
32246 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
32247 if v_0.Op != OpConst32 {
32251 c := auxIntToInt32(v_0.AuxInt)
32252 if v_1.Op != OpXor32 {
32256 v_1_0 := v_1.Args[0]
32257 v_1_1 := v_1.Args[1]
32258 for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
32259 if v_1_0.Op != OpConst32 || v_1_0.Type != t {
32262 d := auxIntToInt32(v_1_0.AuxInt)
32265 v0 := b.NewValue0(v.Pos, OpConst32, t)
32266 v0.AuxInt = int32ToAuxInt(c ^ d)
32273 // match: (Xor32 (Lsh32x64 x z:(Const64 <t> [c])) (Rsh32Ux64 x (Const64 [d])))
32274 // cond: c < 32 && d == 32-c && canRotate(config, 32)
32275 // result: (RotateLeft32 x z)
32277 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
32278 if v_0.Op != OpLsh32x64 {
32284 if z.Op != OpConst64 {
32287 c := auxIntToInt64(z.AuxInt)
32288 if v_1.Op != OpRsh32Ux64 {
32292 if x != v_1.Args[0] {
32295 v_1_1 := v_1.Args[1]
32296 if v_1_1.Op != OpConst64 {
32299 d := auxIntToInt64(v_1_1.AuxInt)
32300 if !(c < 32 && d == 32-c && canRotate(config, 32)) {
32303 v.reset(OpRotateLeft32)
32309 // match: (Xor32 left:(Lsh32x64 x y) right:(Rsh32Ux64 x (Sub64 (Const64 [32]) y)))
32310 // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)
32311 // result: (RotateLeft32 x y)
32313 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
32315 if left.Op != OpLsh32x64 {
32321 if right.Op != OpRsh32Ux64 {
32325 if x != right.Args[0] {
32328 right_1 := right.Args[1]
32329 if right_1.Op != OpSub64 {
32332 _ = right_1.Args[1]
32333 right_1_0 := right_1.Args[0]
32334 if right_1_0.Op != OpConst64 || auxIntToInt64(right_1_0.AuxInt) != 32 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) {
32337 v.reset(OpRotateLeft32)
32343 // match: (Xor32 left:(Lsh32x32 x y) right:(Rsh32Ux32 x (Sub32 (Const32 [32]) y)))
32344 // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)
32345 // result: (RotateLeft32 x y)
32347 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
32349 if left.Op != OpLsh32x32 {
32355 if right.Op != OpRsh32Ux32 {
32359 if x != right.Args[0] {
32362 right_1 := right.Args[1]
32363 if right_1.Op != OpSub32 {
32366 _ = right_1.Args[1]
32367 right_1_0 := right_1.Args[0]
32368 if right_1_0.Op != OpConst32 || auxIntToInt32(right_1_0.AuxInt) != 32 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) {
32371 v.reset(OpRotateLeft32)
32377 // match: (Xor32 left:(Lsh32x16 x y) right:(Rsh32Ux16 x (Sub16 (Const16 [32]) y)))
32378 // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)
32379 // result: (RotateLeft32 x y)
32381 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
32383 if left.Op != OpLsh32x16 {
32389 if right.Op != OpRsh32Ux16 {
32393 if x != right.Args[0] {
32396 right_1 := right.Args[1]
32397 if right_1.Op != OpSub16 {
32400 _ = right_1.Args[1]
32401 right_1_0 := right_1.Args[0]
32402 if right_1_0.Op != OpConst16 || auxIntToInt16(right_1_0.AuxInt) != 32 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) {
32405 v.reset(OpRotateLeft32)
32411 // match: (Xor32 left:(Lsh32x8 x y) right:(Rsh32Ux8 x (Sub8 (Const8 [32]) y)))
32412 // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)
32413 // result: (RotateLeft32 x y)
32415 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
32417 if left.Op != OpLsh32x8 {
32423 if right.Op != OpRsh32Ux8 {
32427 if x != right.Args[0] {
32430 right_1 := right.Args[1]
32431 if right_1.Op != OpSub8 {
32434 _ = right_1.Args[1]
32435 right_1_0 := right_1.Args[0]
32436 if right_1_0.Op != OpConst8 || auxIntToInt8(right_1_0.AuxInt) != 32 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) {
32439 v.reset(OpRotateLeft32)
32445 // match: (Xor32 right:(Rsh32Ux64 x y) left:(Lsh32x64 x z:(Sub64 (Const64 [32]) y)))
32446 // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)
32447 // result: (RotateLeft32 x z)
32449 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
32451 if right.Op != OpRsh32Ux64 {
32457 if left.Op != OpLsh32x64 {
32461 if x != left.Args[0] {
32465 if z.Op != OpSub64 {
32470 if z_0.Op != OpConst64 || auxIntToInt64(z_0.AuxInt) != 32 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) {
32473 v.reset(OpRotateLeft32)
32479 // match: (Xor32 right:(Rsh32Ux32 x y) left:(Lsh32x32 x z:(Sub32 (Const32 [32]) y)))
32480 // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)
32481 // result: (RotateLeft32 x z)
32483 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
32485 if right.Op != OpRsh32Ux32 {
32491 if left.Op != OpLsh32x32 {
32495 if x != left.Args[0] {
32499 if z.Op != OpSub32 {
32504 if z_0.Op != OpConst32 || auxIntToInt32(z_0.AuxInt) != 32 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) {
32507 v.reset(OpRotateLeft32)
32513 // match: (Xor32 right:(Rsh32Ux16 x y) left:(Lsh32x16 x z:(Sub16 (Const16 [32]) y)))
32514 // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)
32515 // result: (RotateLeft32 x z)
32517 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
32519 if right.Op != OpRsh32Ux16 {
32525 if left.Op != OpLsh32x16 {
32529 if x != left.Args[0] {
32533 if z.Op != OpSub16 {
32538 if z_0.Op != OpConst16 || auxIntToInt16(z_0.AuxInt) != 32 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) {
32541 v.reset(OpRotateLeft32)
32547 // match: (Xor32 right:(Rsh32Ux8 x y) left:(Lsh32x8 x z:(Sub8 (Const8 [32]) y)))
32548 // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)
32549 // result: (RotateLeft32 x z)
32551 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
32553 if right.Op != OpRsh32Ux8 {
32559 if left.Op != OpLsh32x8 {
32563 if x != left.Args[0] {
32567 if z.Op != OpSub8 {
32572 if z_0.Op != OpConst8 || auxIntToInt8(z_0.AuxInt) != 32 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) {
32575 v.reset(OpRotateLeft32)
32583 func rewriteValuegeneric_OpXor64(v *Value) bool {
32587 config := b.Func.Config
32588 // match: (Xor64 (Const64 [c]) (Const64 [d]))
32589 // result: (Const64 [c^d])
32591 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
32592 if v_0.Op != OpConst64 {
32595 c := auxIntToInt64(v_0.AuxInt)
32596 if v_1.Op != OpConst64 {
32599 d := auxIntToInt64(v_1.AuxInt)
32601 v.AuxInt = int64ToAuxInt(c ^ d)
32606 // match: (Xor64 x x)
32607 // result: (Const64 [0])
32614 v.AuxInt = int64ToAuxInt(0)
32617 // match: (Xor64 (Const64 [0]) x)
32620 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
32621 if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 {
32630 // match: (Xor64 (Com64 x) x)
32631 // result: (Const64 [-1])
32633 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
32634 if v_0.Op != OpCom64 {
32642 v.AuxInt = int64ToAuxInt(-1)
32647 // match: (Xor64 (Const64 [-1]) x)
32648 // result: (Com64 x)
32650 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
32651 if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != -1 {
32661 // match: (Xor64 x (Xor64 x y))
32664 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
32666 if v_1.Op != OpXor64 {
32670 v_1_0 := v_1.Args[0]
32671 v_1_1 := v_1.Args[1]
32672 for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
32683 // match: (Xor64 (Xor64 i:(Const64 <t>) z) x)
32684 // cond: (z.Op != OpConst64 && x.Op != OpConst64)
32685 // result: (Xor64 i (Xor64 <t> z x))
32687 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
32688 if v_0.Op != OpXor64 {
32692 v_0_0 := v_0.Args[0]
32693 v_0_1 := v_0.Args[1]
32694 for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
32696 if i.Op != OpConst64 {
32702 if !(z.Op != OpConst64 && x.Op != OpConst64) {
32706 v0 := b.NewValue0(v.Pos, OpXor64, t)
32714 // match: (Xor64 (Const64 <t> [c]) (Xor64 (Const64 <t> [d]) x))
32715 // result: (Xor64 (Const64 <t> [c^d]) x)
32717 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
32718 if v_0.Op != OpConst64 {
32722 c := auxIntToInt64(v_0.AuxInt)
32723 if v_1.Op != OpXor64 {
32727 v_1_0 := v_1.Args[0]
32728 v_1_1 := v_1.Args[1]
32729 for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
32730 if v_1_0.Op != OpConst64 || v_1_0.Type != t {
32733 d := auxIntToInt64(v_1_0.AuxInt)
32736 v0 := b.NewValue0(v.Pos, OpConst64, t)
32737 v0.AuxInt = int64ToAuxInt(c ^ d)
32744 // match: (Xor64 (Lsh64x64 x z:(Const64 <t> [c])) (Rsh64Ux64 x (Const64 [d])))
32745 // cond: c < 64 && d == 64-c && canRotate(config, 64)
32746 // result: (RotateLeft64 x z)
32748 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
32749 if v_0.Op != OpLsh64x64 {
32755 if z.Op != OpConst64 {
32758 c := auxIntToInt64(z.AuxInt)
32759 if v_1.Op != OpRsh64Ux64 {
32763 if x != v_1.Args[0] {
32766 v_1_1 := v_1.Args[1]
32767 if v_1_1.Op != OpConst64 {
32770 d := auxIntToInt64(v_1_1.AuxInt)
32771 if !(c < 64 && d == 64-c && canRotate(config, 64)) {
32774 v.reset(OpRotateLeft64)
32780 // match: (Xor64 left:(Lsh64x64 x y) right:(Rsh64Ux64 x (Sub64 (Const64 [64]) y)))
32781 // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)
32782 // result: (RotateLeft64 x y)
32784 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
32786 if left.Op != OpLsh64x64 {
32792 if right.Op != OpRsh64Ux64 {
32796 if x != right.Args[0] {
32799 right_1 := right.Args[1]
32800 if right_1.Op != OpSub64 {
32803 _ = right_1.Args[1]
32804 right_1_0 := right_1.Args[0]
32805 if right_1_0.Op != OpConst64 || auxIntToInt64(right_1_0.AuxInt) != 64 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) {
32808 v.reset(OpRotateLeft64)
32814 // match: (Xor64 left:(Lsh64x32 x y) right:(Rsh64Ux32 x (Sub32 (Const32 [64]) y)))
32815 // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)
32816 // result: (RotateLeft64 x y)
32818 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
32820 if left.Op != OpLsh64x32 {
32826 if right.Op != OpRsh64Ux32 {
32830 if x != right.Args[0] {
32833 right_1 := right.Args[1]
32834 if right_1.Op != OpSub32 {
32837 _ = right_1.Args[1]
32838 right_1_0 := right_1.Args[0]
32839 if right_1_0.Op != OpConst32 || auxIntToInt32(right_1_0.AuxInt) != 64 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) {
32842 v.reset(OpRotateLeft64)
32848 // match: (Xor64 left:(Lsh64x16 x y) right:(Rsh64Ux16 x (Sub16 (Const16 [64]) y)))
32849 // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)
32850 // result: (RotateLeft64 x y)
32852 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
32854 if left.Op != OpLsh64x16 {
32860 if right.Op != OpRsh64Ux16 {
32864 if x != right.Args[0] {
32867 right_1 := right.Args[1]
32868 if right_1.Op != OpSub16 {
32871 _ = right_1.Args[1]
32872 right_1_0 := right_1.Args[0]
32873 if right_1_0.Op != OpConst16 || auxIntToInt16(right_1_0.AuxInt) != 64 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) {
32876 v.reset(OpRotateLeft64)
32882 // match: (Xor64 left:(Lsh64x8 x y) right:(Rsh64Ux8 x (Sub8 (Const8 [64]) y)))
32883 // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)
32884 // result: (RotateLeft64 x y)
32886 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
32888 if left.Op != OpLsh64x8 {
32894 if right.Op != OpRsh64Ux8 {
32898 if x != right.Args[0] {
32901 right_1 := right.Args[1]
32902 if right_1.Op != OpSub8 {
32905 _ = right_1.Args[1]
32906 right_1_0 := right_1.Args[0]
32907 if right_1_0.Op != OpConst8 || auxIntToInt8(right_1_0.AuxInt) != 64 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) {
32910 v.reset(OpRotateLeft64)
32916 // match: (Xor64 right:(Rsh64Ux64 x y) left:(Lsh64x64 x z:(Sub64 (Const64 [64]) y)))
32917 // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)
32918 // result: (RotateLeft64 x z)
32920 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
32922 if right.Op != OpRsh64Ux64 {
32928 if left.Op != OpLsh64x64 {
32932 if x != left.Args[0] {
32936 if z.Op != OpSub64 {
32941 if z_0.Op != OpConst64 || auxIntToInt64(z_0.AuxInt) != 64 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) {
32944 v.reset(OpRotateLeft64)
32950 // match: (Xor64 right:(Rsh64Ux32 x y) left:(Lsh64x32 x z:(Sub32 (Const32 [64]) y)))
32951 // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)
32952 // result: (RotateLeft64 x z)
32954 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
32956 if right.Op != OpRsh64Ux32 {
32962 if left.Op != OpLsh64x32 {
32966 if x != left.Args[0] {
32970 if z.Op != OpSub32 {
32975 if z_0.Op != OpConst32 || auxIntToInt32(z_0.AuxInt) != 64 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) {
32978 v.reset(OpRotateLeft64)
32984 // match: (Xor64 right:(Rsh64Ux16 x y) left:(Lsh64x16 x z:(Sub16 (Const16 [64]) y)))
32985 // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)
32986 // result: (RotateLeft64 x z)
32988 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
32990 if right.Op != OpRsh64Ux16 {
32996 if left.Op != OpLsh64x16 {
33000 if x != left.Args[0] {
33004 if z.Op != OpSub16 {
33009 if z_0.Op != OpConst16 || auxIntToInt16(z_0.AuxInt) != 64 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) {
33012 v.reset(OpRotateLeft64)
33018 // match: (Xor64 right:(Rsh64Ux8 x y) left:(Lsh64x8 x z:(Sub8 (Const8 [64]) y)))
33019 // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)
33020 // result: (RotateLeft64 x z)
33022 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
33024 if right.Op != OpRsh64Ux8 {
33030 if left.Op != OpLsh64x8 {
33034 if x != left.Args[0] {
33038 if z.Op != OpSub8 {
33043 if z_0.Op != OpConst8 || auxIntToInt8(z_0.AuxInt) != 64 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) {
33046 v.reset(OpRotateLeft64)
33054 func rewriteValuegeneric_OpXor8(v *Value) bool {
33058 config := b.Func.Config
33059 // match: (Xor8 (Const8 [c]) (Const8 [d]))
33060 // result: (Const8 [c^d])
33062 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
33063 if v_0.Op != OpConst8 {
33066 c := auxIntToInt8(v_0.AuxInt)
33067 if v_1.Op != OpConst8 {
33070 d := auxIntToInt8(v_1.AuxInt)
33072 v.AuxInt = int8ToAuxInt(c ^ d)
33077 // match: (Xor8 x x)
33078 // result: (Const8 [0])
33085 v.AuxInt = int8ToAuxInt(0)
33088 // match: (Xor8 (Const8 [0]) x)
33091 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
33092 if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 0 {
33101 // match: (Xor8 (Com8 x) x)
33102 // result: (Const8 [-1])
33104 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
33105 if v_0.Op != OpCom8 {
33113 v.AuxInt = int8ToAuxInt(-1)
33118 // match: (Xor8 (Const8 [-1]) x)
33119 // result: (Com8 x)
33121 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
33122 if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != -1 {
33132 // match: (Xor8 x (Xor8 x y))
33135 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
33137 if v_1.Op != OpXor8 {
33141 v_1_0 := v_1.Args[0]
33142 v_1_1 := v_1.Args[1]
33143 for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
33154 // match: (Xor8 (Xor8 i:(Const8 <t>) z) x)
33155 // cond: (z.Op != OpConst8 && x.Op != OpConst8)
33156 // result: (Xor8 i (Xor8 <t> z x))
33158 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
33159 if v_0.Op != OpXor8 {
33163 v_0_0 := v_0.Args[0]
33164 v_0_1 := v_0.Args[1]
33165 for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
33167 if i.Op != OpConst8 {
33173 if !(z.Op != OpConst8 && x.Op != OpConst8) {
33177 v0 := b.NewValue0(v.Pos, OpXor8, t)
33185 // match: (Xor8 (Const8 <t> [c]) (Xor8 (Const8 <t> [d]) x))
33186 // result: (Xor8 (Const8 <t> [c^d]) x)
33188 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
33189 if v_0.Op != OpConst8 {
33193 c := auxIntToInt8(v_0.AuxInt)
33194 if v_1.Op != OpXor8 {
33198 v_1_0 := v_1.Args[0]
33199 v_1_1 := v_1.Args[1]
33200 for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
33201 if v_1_0.Op != OpConst8 || v_1_0.Type != t {
33204 d := auxIntToInt8(v_1_0.AuxInt)
33207 v0 := b.NewValue0(v.Pos, OpConst8, t)
33208 v0.AuxInt = int8ToAuxInt(c ^ d)
33215 // match: (Xor8 (Lsh8x64 x z:(Const64 <t> [c])) (Rsh8Ux64 x (Const64 [d])))
33216 // cond: c < 8 && d == 8-c && canRotate(config, 8)
33217 // result: (RotateLeft8 x z)
33219 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
33220 if v_0.Op != OpLsh8x64 {
33226 if z.Op != OpConst64 {
33229 c := auxIntToInt64(z.AuxInt)
33230 if v_1.Op != OpRsh8Ux64 {
33234 if x != v_1.Args[0] {
33237 v_1_1 := v_1.Args[1]
33238 if v_1_1.Op != OpConst64 {
33241 d := auxIntToInt64(v_1_1.AuxInt)
33242 if !(c < 8 && d == 8-c && canRotate(config, 8)) {
33245 v.reset(OpRotateLeft8)
33251 // match: (Xor8 left:(Lsh8x64 x y) right:(Rsh8Ux64 x (Sub64 (Const64 [8]) y)))
33252 // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)
33253 // result: (RotateLeft8 x y)
33255 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
33257 if left.Op != OpLsh8x64 {
33263 if right.Op != OpRsh8Ux64 {
33267 if x != right.Args[0] {
33270 right_1 := right.Args[1]
33271 if right_1.Op != OpSub64 {
33274 _ = right_1.Args[1]
33275 right_1_0 := right_1.Args[0]
33276 if right_1_0.Op != OpConst64 || auxIntToInt64(right_1_0.AuxInt) != 8 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) {
33279 v.reset(OpRotateLeft8)
33285 // match: (Xor8 left:(Lsh8x32 x y) right:(Rsh8Ux32 x (Sub32 (Const32 [8]) y)))
33286 // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)
33287 // result: (RotateLeft8 x y)
33289 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
33291 if left.Op != OpLsh8x32 {
33297 if right.Op != OpRsh8Ux32 {
33301 if x != right.Args[0] {
33304 right_1 := right.Args[1]
33305 if right_1.Op != OpSub32 {
33308 _ = right_1.Args[1]
33309 right_1_0 := right_1.Args[0]
33310 if right_1_0.Op != OpConst32 || auxIntToInt32(right_1_0.AuxInt) != 8 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) {
33313 v.reset(OpRotateLeft8)
33319 // match: (Xor8 left:(Lsh8x16 x y) right:(Rsh8Ux16 x (Sub16 (Const16 [8]) y)))
33320 // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)
33321 // result: (RotateLeft8 x y)
33323 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
33325 if left.Op != OpLsh8x16 {
33331 if right.Op != OpRsh8Ux16 {
33335 if x != right.Args[0] {
33338 right_1 := right.Args[1]
33339 if right_1.Op != OpSub16 {
33342 _ = right_1.Args[1]
33343 right_1_0 := right_1.Args[0]
33344 if right_1_0.Op != OpConst16 || auxIntToInt16(right_1_0.AuxInt) != 8 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) {
33347 v.reset(OpRotateLeft8)
33353 // match: (Xor8 left:(Lsh8x8 x y) right:(Rsh8Ux8 x (Sub8 (Const8 [8]) y)))
33354 // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)
33355 // result: (RotateLeft8 x y)
33357 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
33359 if left.Op != OpLsh8x8 {
33365 if right.Op != OpRsh8Ux8 {
33369 if x != right.Args[0] {
33372 right_1 := right.Args[1]
33373 if right_1.Op != OpSub8 {
33376 _ = right_1.Args[1]
33377 right_1_0 := right_1.Args[0]
33378 if right_1_0.Op != OpConst8 || auxIntToInt8(right_1_0.AuxInt) != 8 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) {
33381 v.reset(OpRotateLeft8)
33387 // match: (Xor8 right:(Rsh8Ux64 x y) left:(Lsh8x64 x z:(Sub64 (Const64 [8]) y)))
33388 // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)
33389 // result: (RotateLeft8 x z)
33391 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
33393 if right.Op != OpRsh8Ux64 {
33399 if left.Op != OpLsh8x64 {
33403 if x != left.Args[0] {
33407 if z.Op != OpSub64 {
33412 if z_0.Op != OpConst64 || auxIntToInt64(z_0.AuxInt) != 8 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) {
33415 v.reset(OpRotateLeft8)
33421 // match: (Xor8 right:(Rsh8Ux32 x y) left:(Lsh8x32 x z:(Sub32 (Const32 [8]) y)))
33422 // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)
33423 // result: (RotateLeft8 x z)
33425 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
33427 if right.Op != OpRsh8Ux32 {
33433 if left.Op != OpLsh8x32 {
33437 if x != left.Args[0] {
33441 if z.Op != OpSub32 {
33446 if z_0.Op != OpConst32 || auxIntToInt32(z_0.AuxInt) != 8 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) {
33449 v.reset(OpRotateLeft8)
33455 // match: (Xor8 right:(Rsh8Ux16 x y) left:(Lsh8x16 x z:(Sub16 (Const16 [8]) y)))
33456 // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)
33457 // result: (RotateLeft8 x z)
33459 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
33461 if right.Op != OpRsh8Ux16 {
33467 if left.Op != OpLsh8x16 {
33471 if x != left.Args[0] {
33475 if z.Op != OpSub16 {
33480 if z_0.Op != OpConst16 || auxIntToInt16(z_0.AuxInt) != 8 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) {
33483 v.reset(OpRotateLeft8)
33489 // match: (Xor8 right:(Rsh8Ux8 x y) left:(Lsh8x8 x z:(Sub8 (Const8 [8]) y)))
33490 // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)
33491 // result: (RotateLeft8 x z)
33493 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
33495 if right.Op != OpRsh8Ux8 {
33501 if left.Op != OpLsh8x8 {
33505 if x != left.Args[0] {
33509 if z.Op != OpSub8 {
33514 if z_0.Op != OpConst8 || auxIntToInt8(z_0.AuxInt) != 8 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) {
33517 v.reset(OpRotateLeft8)
33525 func rewriteValuegeneric_OpZero(v *Value) bool {
33529 // match: (Zero (SelectN [0] call:(StaticLECall _ _)) mem:(SelectN [1] call))
33530 // cond: isSameCall(call.Aux, "runtime.newobject")
33533 if v_0.Op != OpSelectN || auxIntToInt64(v_0.AuxInt) != 0 {
33536 call := v_0.Args[0]
33537 if call.Op != OpStaticLECall || len(call.Args) != 2 {
33541 if mem.Op != OpSelectN || auxIntToInt64(mem.AuxInt) != 1 || call != mem.Args[0] || !(isSameCall(call.Aux, "runtime.newobject")) {
33547 // match: (Zero {t1} [n] p1 store:(Store {t2} (OffPtr [o2] p2) _ mem))
33548 // cond: isSamePtr(p1, p2) && store.Uses == 1 && n >= o2 + t2.Size() && clobber(store)
33549 // result: (Zero {t1} [n] p1 mem)
33551 n := auxIntToInt64(v.AuxInt)
33552 t1 := auxToType(v.Aux)
33555 if store.Op != OpStore {
33558 t2 := auxToType(store.Aux)
33559 mem := store.Args[2]
33560 store_0 := store.Args[0]
33561 if store_0.Op != OpOffPtr {
33564 o2 := auxIntToInt64(store_0.AuxInt)
33565 p2 := store_0.Args[0]
33566 if !(isSamePtr(p1, p2) && store.Uses == 1 && n >= o2+t2.Size() && clobber(store)) {
33570 v.AuxInt = int64ToAuxInt(n)
33571 v.Aux = typeToAux(t1)
33575 // match: (Zero {t} [n] dst1 move:(Move {t} [n] dst2 _ mem))
33576 // cond: move.Uses == 1 && isSamePtr(dst1, dst2) && clobber(move)
33577 // result: (Zero {t} [n] dst1 mem)
33579 n := auxIntToInt64(v.AuxInt)
33580 t := auxToType(v.Aux)
33583 if move.Op != OpMove || auxIntToInt64(move.AuxInt) != n || auxToType(move.Aux) != t {
33586 mem := move.Args[2]
33587 dst2 := move.Args[0]
33588 if !(move.Uses == 1 && isSamePtr(dst1, dst2) && clobber(move)) {
33592 v.AuxInt = int64ToAuxInt(n)
33593 v.Aux = typeToAux(t)
33594 v.AddArg2(dst1, mem)
33597 // match: (Zero {t} [n] dst1 vardef:(VarDef {x} move:(Move {t} [n] dst2 _ mem)))
33598 // cond: move.Uses == 1 && vardef.Uses == 1 && isSamePtr(dst1, dst2) && clobber(move, vardef)
33599 // result: (Zero {t} [n] dst1 (VarDef {x} mem))
33601 n := auxIntToInt64(v.AuxInt)
33602 t := auxToType(v.Aux)
33605 if vardef.Op != OpVarDef {
33608 x := auxToSym(vardef.Aux)
33609 move := vardef.Args[0]
33610 if move.Op != OpMove || auxIntToInt64(move.AuxInt) != n || auxToType(move.Aux) != t {
33613 mem := move.Args[2]
33614 dst2 := move.Args[0]
33615 if !(move.Uses == 1 && vardef.Uses == 1 && isSamePtr(dst1, dst2) && clobber(move, vardef)) {
33619 v.AuxInt = int64ToAuxInt(n)
33620 v.Aux = typeToAux(t)
33621 v0 := b.NewValue0(v.Pos, OpVarDef, types.TypeMem)
33622 v0.Aux = symToAux(x)
33624 v.AddArg2(dst1, v0)
33627 // match: (Zero {t} [s] dst1 zero:(Zero {t} [s] dst2 _))
33628 // cond: isSamePtr(dst1, dst2)
33631 s := auxIntToInt64(v.AuxInt)
33632 t := auxToType(v.Aux)
33635 if zero.Op != OpZero || auxIntToInt64(zero.AuxInt) != s || auxToType(zero.Aux) != t {
33638 dst2 := zero.Args[0]
33639 if !(isSamePtr(dst1, dst2)) {
33645 // match: (Zero {t} [s] dst1 vardef:(VarDef (Zero {t} [s] dst2 _)))
33646 // cond: isSamePtr(dst1, dst2)
33649 s := auxIntToInt64(v.AuxInt)
33650 t := auxToType(v.Aux)
33653 if vardef.Op != OpVarDef {
33656 vardef_0 := vardef.Args[0]
33657 if vardef_0.Op != OpZero || auxIntToInt64(vardef_0.AuxInt) != s || auxToType(vardef_0.Aux) != t {
33660 dst2 := vardef_0.Args[0]
33661 if !(isSamePtr(dst1, dst2)) {
33669 func rewriteValuegeneric_OpZeroExt16to32(v *Value) bool {
33671 // match: (ZeroExt16to32 (Const16 [c]))
33672 // result: (Const32 [int32(uint16(c))])
33674 if v_0.Op != OpConst16 {
33677 c := auxIntToInt16(v_0.AuxInt)
33679 v.AuxInt = int32ToAuxInt(int32(uint16(c)))
33682 // match: (ZeroExt16to32 (Trunc32to16 x:(Rsh32Ux64 _ (Const64 [s]))))
33686 if v_0.Op != OpTrunc32to16 {
33690 if x.Op != OpRsh32Ux64 {
33695 if x_1.Op != OpConst64 {
33698 s := auxIntToInt64(x_1.AuxInt)
33707 func rewriteValuegeneric_OpZeroExt16to64(v *Value) bool {
33709 // match: (ZeroExt16to64 (Const16 [c]))
33710 // result: (Const64 [int64(uint16(c))])
33712 if v_0.Op != OpConst16 {
33715 c := auxIntToInt16(v_0.AuxInt)
33717 v.AuxInt = int64ToAuxInt(int64(uint16(c)))
33720 // match: (ZeroExt16to64 (Trunc64to16 x:(Rsh64Ux64 _ (Const64 [s]))))
33724 if v_0.Op != OpTrunc64to16 {
33728 if x.Op != OpRsh64Ux64 {
33733 if x_1.Op != OpConst64 {
33736 s := auxIntToInt64(x_1.AuxInt)
33745 func rewriteValuegeneric_OpZeroExt32to64(v *Value) bool {
33747 // match: (ZeroExt32to64 (Const32 [c]))
33748 // result: (Const64 [int64(uint32(c))])
33750 if v_0.Op != OpConst32 {
33753 c := auxIntToInt32(v_0.AuxInt)
33755 v.AuxInt = int64ToAuxInt(int64(uint32(c)))
33758 // match: (ZeroExt32to64 (Trunc64to32 x:(Rsh64Ux64 _ (Const64 [s]))))
33762 if v_0.Op != OpTrunc64to32 {
33766 if x.Op != OpRsh64Ux64 {
33771 if x_1.Op != OpConst64 {
33774 s := auxIntToInt64(x_1.AuxInt)
33783 func rewriteValuegeneric_OpZeroExt8to16(v *Value) bool {
33785 // match: (ZeroExt8to16 (Const8 [c]))
33786 // result: (Const16 [int16( uint8(c))])
33788 if v_0.Op != OpConst8 {
33791 c := auxIntToInt8(v_0.AuxInt)
33793 v.AuxInt = int16ToAuxInt(int16(uint8(c)))
33796 // match: (ZeroExt8to16 (Trunc16to8 x:(Rsh16Ux64 _ (Const64 [s]))))
33800 if v_0.Op != OpTrunc16to8 {
33804 if x.Op != OpRsh16Ux64 {
33809 if x_1.Op != OpConst64 {
33812 s := auxIntToInt64(x_1.AuxInt)
33821 func rewriteValuegeneric_OpZeroExt8to32(v *Value) bool {
33823 // match: (ZeroExt8to32 (Const8 [c]))
33824 // result: (Const32 [int32( uint8(c))])
33826 if v_0.Op != OpConst8 {
33829 c := auxIntToInt8(v_0.AuxInt)
33831 v.AuxInt = int32ToAuxInt(int32(uint8(c)))
33834 // match: (ZeroExt8to32 (Trunc32to8 x:(Rsh32Ux64 _ (Const64 [s]))))
33838 if v_0.Op != OpTrunc32to8 {
33842 if x.Op != OpRsh32Ux64 {
33847 if x_1.Op != OpConst64 {
33850 s := auxIntToInt64(x_1.AuxInt)
33859 func rewriteValuegeneric_OpZeroExt8to64(v *Value) bool {
33861 // match: (ZeroExt8to64 (Const8 [c]))
33862 // result: (Const64 [int64( uint8(c))])
33864 if v_0.Op != OpConst8 {
33867 c := auxIntToInt8(v_0.AuxInt)
33869 v.AuxInt = int64ToAuxInt(int64(uint8(c)))
33872 // match: (ZeroExt8to64 (Trunc64to8 x:(Rsh64Ux64 _ (Const64 [s]))))
33876 if v_0.Op != OpTrunc64to8 {
33880 if x.Op != OpRsh64Ux64 {
33885 if x_1.Op != OpConst64 {
33888 s := auxIntToInt64(x_1.AuxInt)
33897 func rewriteBlockgeneric(b *Block) bool {
33900 // match: (If (Not cond) yes no)
33901 // result: (If cond no yes)
33902 for b.Controls[0].Op == OpNot {
33903 v_0 := b.Controls[0]
33904 cond := v_0.Args[0]
33905 b.resetWithControl(BlockIf, cond)
33909 // match: (If (ConstBool [c]) yes no)
33911 // result: (First yes no)
33912 for b.Controls[0].Op == OpConstBool {
33913 v_0 := b.Controls[0]
33914 c := auxIntToBool(v_0.AuxInt)
33918 b.Reset(BlockFirst)
33921 // match: (If (ConstBool [c]) yes no)
33923 // result: (First no yes)
33924 for b.Controls[0].Op == OpConstBool {
33925 v_0 := b.Controls[0]
33926 c := auxIntToBool(v_0.AuxInt)
33930 b.Reset(BlockFirst)