(TESTW (MOVLconst [c]) x) => (TESTWconst [int16(c)] x)
(TESTB (MOVLconst [c]) x) => (TESTBconst [int8(c)] x)
-// shorten bitwise AND/TESTQ if upper 32 bits are known to be zero.
-(ANDQ x y) && (zeroUpper32Bits(x, 3) || zeroUpper32Bits(y, 3)) => (ANDL x y)
-(TESTQ x y) && (zeroUpper32Bits(x, 3) || zeroUpper32Bits(y, 3)) => (TESTL x y)
-
// TEST %reg,%reg is shorter than CMP
(CMPQconst x [0]) => (TESTQ x x)
(CMPLconst x [0]) => (TESTL x x)
v.copyOf(x)
return true
}
- // match: (ANDQ x y)
- // cond: (zeroUpper32Bits(x, 3) || zeroUpper32Bits(y, 3))
- // result: (ANDL x y)
- for {
- for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
- x := v_0
- y := v_1
- if !(zeroUpper32Bits(x, 3) || zeroUpper32Bits(y, 3)) {
- continue
- }
- v.reset(OpAMD64ANDL)
- v.AddArg2(x, y)
- return true
- }
- break
- }
// match: (ANDQ x l:(MOVQload [off] {sym} ptr mem))
// cond: canMergeLoadClobber(v, l, x) && clobber(l)
// result: (ANDQload x [off] {sym} ptr mem)
}
break
}
- // match: (TESTQ x y)
- // cond: (zeroUpper32Bits(x, 3) || zeroUpper32Bits(y, 3))
- // result: (TESTL x y)
- for {
- for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
- x := v_0
- y := v_1
- if !(zeroUpper32Bits(x, 3) || zeroUpper32Bits(y, 3)) {
- continue
- }
- v.reset(OpAMD64TESTL)
- v.AddArg2(x, y)
- return true
- }
- break
- }
// match: (TESTQ l:(MOVQload {sym} [off] ptr mem) l2)
// cond: l == l2 && l.Uses == 2 && clobber(l)
// result: @l.Block (CMPQconstload {sym} [makeValAndOff(0, off)] ptr mem)
--- /dev/null
+// run
+
+// Copyright 2023 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package main
+
+import (
+ "fmt"
+ "math/big"
+)
+
+//go:noinline
+func f(x uint32) *big.Int {
+ return big.NewInt(int64(x))
+}
+func main() {
+ b := f(0xffffffff)
+ c := big.NewInt(0xffffffff)
+ if b.Cmp(c) != 0 {
+ panic(fmt.Sprintf("b:%x c:%x", b, c))
+ }
+}