p.To.Reg = r
p.SetFrom3Reg(v.Args[0].Reg())
+ case ssa.OpAMD64ANDQconst:
+ asm := v.Op.Asm()
+ // If the constant is positive and fits into 32 bits, use ANDL.
+ // This saves a few bytes of encoding.
+ if 0 <= v.AuxInt && v.AuxInt <= (1<<32-1) {
+ asm = x86.AANDL
+ }
+ p := s.Prog(asm)
+ p.From.Type = obj.TYPE_CONST
+ p.From.Offset = v.AuxInt
+ p.To.Type = obj.TYPE_REG
+ p.To.Reg = v.Reg()
+
case ssa.OpAMD64SUBQconst, ssa.OpAMD64SUBLconst,
- ssa.OpAMD64ANDQconst, ssa.OpAMD64ANDLconst,
+ ssa.OpAMD64ANDLconst,
ssa.OpAMD64ORQconst, ssa.OpAMD64ORLconst,
ssa.OpAMD64XORQconst, ssa.OpAMD64XORLconst,
ssa.OpAMD64SHLQconst, ssa.OpAMD64SHLLconst,
func Pow2Mods(n1 uint, n2 int) (uint, int) {
// 386:"ANDL\t[$]31",-"DIVL"
- // amd64:"ANDQ\t[$]31",-"DIVQ"
+ // amd64:"ANDL\t[$]31",-"DIVQ"
// arm:"AND\t[$]31",-".*udiv"
// arm64:"AND\t[$]31",-"UDIV"
// ppc64:"ANDCC\t[$]31"
func LenMod1(a []int) int {
// 386:"ANDL\t[$]1023"
- // amd64:"ANDQ\t[$]1023"
+ // amd64:"ANDL\t[$]1023"
// arm64:"AND\t[$]1023",-"SDIV"
// arm/6:"AND",-".*udiv"
// arm/7:"BFC",-".*udiv",-"AND"
func LenMod2(s string) int {
// 386:"ANDL\t[$]2047"
- // amd64:"ANDQ\t[$]2047"
+ // amd64:"ANDL\t[$]2047"
// arm64:"AND\t[$]2047",-"SDIV"
// arm/6:"AND",-".*udiv"
// arm/7:"BFC",-".*udiv",-"AND"
func CapMod(a []int) int {
// 386:"ANDL\t[$]4095"
- // amd64:"ANDQ\t[$]4095"
+ // amd64:"ANDL\t[$]4095"
// arm64:"AND\t[$]4095",-"SDIV"
// arm/6:"AND",-".*udiv"
// arm/7:"BFC",-".*udiv",-"AND"
}
func bitSetTest(x int) bool {
- // amd64:"ANDQ\t[$]9, AX"
+ // amd64:"ANDL\t[$]9, AX"
// amd64:"CMPQ\tAX, [$]9"
return x&9 == 9
}