1 // Code generated from _gen/PPC64latelower.rules using 'go generate'; DO NOT EDIT.
5 import "internal/buildcfg"
6 import "cmd/compile/internal/types"
8 func rewriteValuePPC64latelower(v *Value) bool {
11 return rewriteValuePPC64latelower_OpPPC64ADD(v)
13 return rewriteValuePPC64latelower_OpPPC64AND(v)
15 return rewriteValuePPC64latelower_OpPPC64CMPconst(v)
17 return rewriteValuePPC64latelower_OpPPC64ISEL(v)
19 return rewriteValuePPC64latelower_OpPPC64RLDICL(v)
21 return rewriteValuePPC64latelower_OpPPC64SETBC(v)
23 return rewriteValuePPC64latelower_OpPPC64SETBCR(v)
25 return rewriteValuePPC64latelower_OpSelect0(v)
29 func rewriteValuePPC64latelower_OpPPC64ADD(v *Value) bool {
32 // match: (ADD (MOVDconst [m]) x)
33 // cond: supportsPPC64PCRel() && (m<<30)>>30 == m
34 // result: (ADDconst [m] x)
36 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
37 if v_0.Op != OpPPC64MOVDconst {
40 m := auxIntToInt64(v_0.AuxInt)
42 if !(supportsPPC64PCRel() && (m<<30)>>30 == m) {
45 v.reset(OpPPC64ADDconst)
46 v.AuxInt = int64ToAuxInt(m)
54 func rewriteValuePPC64latelower_OpPPC64AND(v *Value) bool {
58 typ := &b.Func.Config.Types
59 // match: (AND <t> x:(MOVDconst [m]) n)
60 // cond: t.Size() <= 2
61 // result: (Select0 (ANDCCconst [int64(int16(m))] n))
64 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
66 if x.Op != OpPPC64MOVDconst {
69 m := auxIntToInt64(x.AuxInt)
75 v0 := b.NewValue0(v.Pos, OpPPC64ANDCCconst, types.NewTuple(typ.Int, types.TypeFlags))
76 v0.AuxInt = int64ToAuxInt(int64(int16(m)))
83 // match: (AND x:(MOVDconst [m]) n)
84 // cond: isPPC64ValidShiftMask(m)
85 // result: (RLDICL [encodePPC64RotateMask(0,m,64)] n)
87 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
89 if x.Op != OpPPC64MOVDconst {
92 m := auxIntToInt64(x.AuxInt)
94 if !(isPPC64ValidShiftMask(m)) {
97 v.reset(OpPPC64RLDICL)
98 v.AuxInt = int64ToAuxInt(encodePPC64RotateMask(0, m, 64))
104 // match: (AND x:(MOVDconst [m]) n)
105 // cond: m != 0 && isPPC64ValidShiftMask(^m)
106 // result: (RLDICR [encodePPC64RotateMask(0,m,64)] n)
108 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
110 if x.Op != OpPPC64MOVDconst {
113 m := auxIntToInt64(x.AuxInt)
115 if !(m != 0 && isPPC64ValidShiftMask(^m)) {
118 v.reset(OpPPC64RLDICR)
119 v.AuxInt = int64ToAuxInt(encodePPC64RotateMask(0, m, 64))
125 // match: (AND <t> x:(MOVDconst [m]) n)
126 // cond: t.Size() == 4 && isPPC64WordRotateMask(m)
127 // result: (RLWINM [encodePPC64RotateMask(0,m,32)] n)
130 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
132 if x.Op != OpPPC64MOVDconst {
135 m := auxIntToInt64(x.AuxInt)
137 if !(t.Size() == 4 && isPPC64WordRotateMask(m)) {
140 v.reset(OpPPC64RLWINM)
141 v.AuxInt = int64ToAuxInt(encodePPC64RotateMask(0, m, 32))
149 func rewriteValuePPC64latelower_OpPPC64CMPconst(v *Value) bool {
151 // match: (CMPconst [0] z:(ADD x y))
152 // cond: v.Block == z.Block
153 // result: (CMPconst [0] convertPPC64OpToOpCC(z))
155 if auxIntToInt64(v.AuxInt) != 0 {
159 if z.Op != OpPPC64ADD {
162 if !(v.Block == z.Block) {
165 v.reset(OpPPC64CMPconst)
166 v.AuxInt = int64ToAuxInt(0)
167 v.AddArg(convertPPC64OpToOpCC(z))
170 // match: (CMPconst [0] z:(AND x y))
171 // cond: v.Block == z.Block
172 // result: (CMPconst [0] convertPPC64OpToOpCC(z))
174 if auxIntToInt64(v.AuxInt) != 0 {
178 if z.Op != OpPPC64AND {
181 if !(v.Block == z.Block) {
184 v.reset(OpPPC64CMPconst)
185 v.AuxInt = int64ToAuxInt(0)
186 v.AddArg(convertPPC64OpToOpCC(z))
189 // match: (CMPconst [0] z:(ANDN x y))
190 // cond: v.Block == z.Block
191 // result: (CMPconst [0] convertPPC64OpToOpCC(z))
193 if auxIntToInt64(v.AuxInt) != 0 {
197 if z.Op != OpPPC64ANDN {
200 if !(v.Block == z.Block) {
203 v.reset(OpPPC64CMPconst)
204 v.AuxInt = int64ToAuxInt(0)
205 v.AddArg(convertPPC64OpToOpCC(z))
208 // match: (CMPconst [0] z:(OR x y))
209 // cond: v.Block == z.Block
210 // result: (CMPconst [0] convertPPC64OpToOpCC(z))
212 if auxIntToInt64(v.AuxInt) != 0 {
216 if z.Op != OpPPC64OR {
219 if !(v.Block == z.Block) {
222 v.reset(OpPPC64CMPconst)
223 v.AuxInt = int64ToAuxInt(0)
224 v.AddArg(convertPPC64OpToOpCC(z))
227 // match: (CMPconst [0] z:(SUB x y))
228 // cond: v.Block == z.Block
229 // result: (CMPconst [0] convertPPC64OpToOpCC(z))
231 if auxIntToInt64(v.AuxInt) != 0 {
235 if z.Op != OpPPC64SUB {
238 if !(v.Block == z.Block) {
241 v.reset(OpPPC64CMPconst)
242 v.AuxInt = int64ToAuxInt(0)
243 v.AddArg(convertPPC64OpToOpCC(z))
246 // match: (CMPconst [0] z:(NOR x y))
247 // cond: v.Block == z.Block
248 // result: (CMPconst [0] convertPPC64OpToOpCC(z))
250 if auxIntToInt64(v.AuxInt) != 0 {
254 if z.Op != OpPPC64NOR {
257 if !(v.Block == z.Block) {
260 v.reset(OpPPC64CMPconst)
261 v.AuxInt = int64ToAuxInt(0)
262 v.AddArg(convertPPC64OpToOpCC(z))
265 // match: (CMPconst [0] z:(XOR x y))
266 // cond: v.Block == z.Block
267 // result: (CMPconst [0] convertPPC64OpToOpCC(z))
269 if auxIntToInt64(v.AuxInt) != 0 {
273 if z.Op != OpPPC64XOR {
276 if !(v.Block == z.Block) {
279 v.reset(OpPPC64CMPconst)
280 v.AuxInt = int64ToAuxInt(0)
281 v.AddArg(convertPPC64OpToOpCC(z))
284 // match: (CMPconst [0] z:(NEG x))
285 // cond: v.Block == z.Block
286 // result: (CMPconst [0] convertPPC64OpToOpCC(z))
288 if auxIntToInt64(v.AuxInt) != 0 {
292 if z.Op != OpPPC64NEG {
295 if !(v.Block == z.Block) {
298 v.reset(OpPPC64CMPconst)
299 v.AuxInt = int64ToAuxInt(0)
300 v.AddArg(convertPPC64OpToOpCC(z))
303 // match: (CMPconst [0] z:(CNTLZD x))
304 // cond: v.Block == z.Block
305 // result: (CMPconst [0] convertPPC64OpToOpCC(z))
307 if auxIntToInt64(v.AuxInt) != 0 {
311 if z.Op != OpPPC64CNTLZD {
314 if !(v.Block == z.Block) {
317 v.reset(OpPPC64CMPconst)
318 v.AuxInt = int64ToAuxInt(0)
319 v.AddArg(convertPPC64OpToOpCC(z))
322 // match: (CMPconst [0] z:(ADDconst [c] x))
323 // cond: int64(int16(c)) == c && v.Block == z.Block
324 // result: (CMPconst [0] convertPPC64OpToOpCC(z))
326 if auxIntToInt64(v.AuxInt) != 0 {
330 if z.Op != OpPPC64ADDconst {
333 c := auxIntToInt64(z.AuxInt)
334 if !(int64(int16(c)) == c && v.Block == z.Block) {
337 v.reset(OpPPC64CMPconst)
338 v.AuxInt = int64ToAuxInt(0)
339 v.AddArg(convertPPC64OpToOpCC(z))
342 // match: (CMPconst <t> [0] (Select0 z:(ADDCC x y)))
343 // result: (Select1 <t> z)
346 if auxIntToInt64(v.AuxInt) != 0 || v_0.Op != OpSelect0 {
350 if z.Op != OpPPC64ADDCC {
358 // match: (CMPconst <t> [0] (Select0 z:(ANDCC x y)))
359 // result: (Select1 <t> z)
362 if auxIntToInt64(v.AuxInt) != 0 || v_0.Op != OpSelect0 {
366 if z.Op != OpPPC64ANDCC {
374 // match: (CMPconst <t> [0] (Select0 z:(ANDNCC x y)))
375 // result: (Select1 <t> z)
378 if auxIntToInt64(v.AuxInt) != 0 || v_0.Op != OpSelect0 {
382 if z.Op != OpPPC64ANDNCC {
390 // match: (CMPconst <t> [0] (Select0 z:(ORCC x y)))
391 // result: (Select1 <t> z)
394 if auxIntToInt64(v.AuxInt) != 0 || v_0.Op != OpSelect0 {
398 if z.Op != OpPPC64ORCC {
406 // match: (CMPconst <t> [0] (Select0 z:(SUBCC x y)))
407 // result: (Select1 <t> z)
410 if auxIntToInt64(v.AuxInt) != 0 || v_0.Op != OpSelect0 {
414 if z.Op != OpPPC64SUBCC {
422 // match: (CMPconst <t> [0] (Select0 z:(NORCC x y)))
423 // result: (Select1 <t> z)
426 if auxIntToInt64(v.AuxInt) != 0 || v_0.Op != OpSelect0 {
430 if z.Op != OpPPC64NORCC {
438 // match: (CMPconst <t> [0] (Select0 z:(XORCC x y)))
439 // result: (Select1 <t> z)
442 if auxIntToInt64(v.AuxInt) != 0 || v_0.Op != OpSelect0 {
446 if z.Op != OpPPC64XORCC {
454 // match: (CMPconst <t> [0] (Select0 z:(ADDCCconst y)))
455 // result: (Select1 <t> z)
458 if auxIntToInt64(v.AuxInt) != 0 || v_0.Op != OpSelect0 {
462 if z.Op != OpPPC64ADDCCconst {
470 // match: (CMPconst <t> [0] (Select0 z:(NEGCC y)))
471 // result: (Select1 <t> z)
474 if auxIntToInt64(v.AuxInt) != 0 || v_0.Op != OpSelect0 {
478 if z.Op != OpPPC64NEGCC {
486 // match: (CMPconst <t> [0] (Select0 z:(CNTLZDCC y)))
487 // result: (Select1 <t> z)
490 if auxIntToInt64(v.AuxInt) != 0 || v_0.Op != OpSelect0 {
494 if z.Op != OpPPC64CNTLZDCC {
504 func rewriteValuePPC64latelower_OpPPC64ISEL(v *Value) bool {
508 // match: (ISEL [a] x (MOVDconst [0]) z)
509 // result: (ISELZ [a] x z)
511 a := auxIntToInt32(v.AuxInt)
513 if v_1.Op != OpPPC64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
517 v.reset(OpPPC64ISELZ)
518 v.AuxInt = int32ToAuxInt(a)
522 // match: (ISEL [a] (MOVDconst [0]) y z)
523 // result: (ISELZ [a^0x4] y z)
525 a := auxIntToInt32(v.AuxInt)
526 if v_0.Op != OpPPC64MOVDconst || auxIntToInt64(v_0.AuxInt) != 0 {
531 v.reset(OpPPC64ISELZ)
532 v.AuxInt = int32ToAuxInt(a ^ 0x4)
538 func rewriteValuePPC64latelower_OpPPC64RLDICL(v *Value) bool {
540 // match: (RLDICL [em] x:(SRDconst [s] a))
541 // cond: (em&0xFF0000) == 0
542 // result: (RLDICL [mergePPC64RLDICLandSRDconst(em, s)] a)
544 em := auxIntToInt64(v.AuxInt)
546 if x.Op != OpPPC64SRDconst {
549 s := auxIntToInt64(x.AuxInt)
551 if !((em & 0xFF0000) == 0) {
554 v.reset(OpPPC64RLDICL)
555 v.AuxInt = int64ToAuxInt(mergePPC64RLDICLandSRDconst(em, s))
561 func rewriteValuePPC64latelower_OpPPC64SETBC(v *Value) bool {
564 typ := &b.Func.Config.Types
565 // match: (SETBC [2] cmp)
566 // cond: buildcfg.GOPPC64 <= 9
567 // result: (ISELZ [2] (MOVDconst [1]) cmp)
569 if auxIntToInt32(v.AuxInt) != 2 {
573 if !(buildcfg.GOPPC64 <= 9) {
576 v.reset(OpPPC64ISELZ)
577 v.AuxInt = int32ToAuxInt(2)
578 v0 := b.NewValue0(v.Pos, OpPPC64MOVDconst, typ.Int64)
579 v0.AuxInt = int64ToAuxInt(1)
583 // match: (SETBC [0] cmp)
584 // cond: buildcfg.GOPPC64 <= 9
585 // result: (ISELZ [0] (MOVDconst [1]) cmp)
587 if auxIntToInt32(v.AuxInt) != 0 {
591 if !(buildcfg.GOPPC64 <= 9) {
594 v.reset(OpPPC64ISELZ)
595 v.AuxInt = int32ToAuxInt(0)
596 v0 := b.NewValue0(v.Pos, OpPPC64MOVDconst, typ.Int64)
597 v0.AuxInt = int64ToAuxInt(1)
601 // match: (SETBC [1] cmp)
602 // cond: buildcfg.GOPPC64 <= 9
603 // result: (ISELZ [1] (MOVDconst [1]) cmp)
605 if auxIntToInt32(v.AuxInt) != 1 {
609 if !(buildcfg.GOPPC64 <= 9) {
612 v.reset(OpPPC64ISELZ)
613 v.AuxInt = int32ToAuxInt(1)
614 v0 := b.NewValue0(v.Pos, OpPPC64MOVDconst, typ.Int64)
615 v0.AuxInt = int64ToAuxInt(1)
621 func rewriteValuePPC64latelower_OpPPC64SETBCR(v *Value) bool {
624 typ := &b.Func.Config.Types
625 // match: (SETBCR [2] cmp)
626 // cond: buildcfg.GOPPC64 <= 9
627 // result: (ISELZ [6] (MOVDconst [1]) cmp)
629 if auxIntToInt32(v.AuxInt) != 2 {
633 if !(buildcfg.GOPPC64 <= 9) {
636 v.reset(OpPPC64ISELZ)
637 v.AuxInt = int32ToAuxInt(6)
638 v0 := b.NewValue0(v.Pos, OpPPC64MOVDconst, typ.Int64)
639 v0.AuxInt = int64ToAuxInt(1)
643 // match: (SETBCR [0] cmp)
644 // cond: buildcfg.GOPPC64 <= 9
645 // result: (ISELZ [4] (MOVDconst [1]) cmp)
647 if auxIntToInt32(v.AuxInt) != 0 {
651 if !(buildcfg.GOPPC64 <= 9) {
654 v.reset(OpPPC64ISELZ)
655 v.AuxInt = int32ToAuxInt(4)
656 v0 := b.NewValue0(v.Pos, OpPPC64MOVDconst, typ.Int64)
657 v0.AuxInt = int64ToAuxInt(1)
661 // match: (SETBCR [1] cmp)
662 // cond: buildcfg.GOPPC64 <= 9
663 // result: (ISELZ [5] (MOVDconst [1]) cmp)
665 if auxIntToInt32(v.AuxInt) != 1 {
669 if !(buildcfg.GOPPC64 <= 9) {
672 v.reset(OpPPC64ISELZ)
673 v.AuxInt = int32ToAuxInt(5)
674 v0 := b.NewValue0(v.Pos, OpPPC64MOVDconst, typ.Int64)
675 v0.AuxInt = int64ToAuxInt(1)
681 func rewriteValuePPC64latelower_OpSelect0(v *Value) bool {
683 // match: (Select0 z:(ANDCCconst [m] x))
684 // cond: z.Uses == 1 && isPPC64ValidShiftMask(m)
685 // result: (RLDICL [encodePPC64RotateMask(0,m,64)] x)
688 if z.Op != OpPPC64ANDCCconst {
691 m := auxIntToInt64(z.AuxInt)
693 if !(z.Uses == 1 && isPPC64ValidShiftMask(m)) {
696 v.reset(OpPPC64RLDICL)
697 v.AuxInt = int64ToAuxInt(encodePPC64RotateMask(0, m, 64))
703 func rewriteBlockPPC64latelower(b *Block) bool {