(GetCallerPC ...) => (LoweredGetCallerPC ...)
(GetCallerSP ...) => (LoweredGetCallerSP ...)
(Addr {sym} base) => (LEAL {sym} base)
-(LocalAddr {sym} base _) => (LEAL {sym} base)
+(LocalAddr <t> {sym} base mem) && t.Elem().HasPointers() => (LEAL {sym} (SPanchored base mem))
+(LocalAddr <t> {sym} base _) && !t.Elem().HasPointers() => (LEAL {sym} base)
// block rewrites
(If (SETL cmp) yes no) => (LT cmp yes no)
(HasCPUFeature {s}) => (SETNE (CMPLconst [0] (LoweredHasCPUFeature {s})))
(Addr {sym} base) => (LEAQ {sym} base)
-(LocalAddr {sym} base _) => (LEAQ {sym} base)
+(LocalAddr <t> {sym} base mem) && t.Elem().HasPointers() => (LEAQ {sym} (SPanchored base mem))
+(LocalAddr <t> {sym} base _) && !t.Elem().HasPointers() => (LEAQ {sym} base)
(MOVBstore [off] {sym} ptr y:(SETL x) mem) && y.Uses == 1 => (SETLstore [off] {sym} ptr x mem)
(MOVBstore [off] {sym} ptr y:(SETLE x) mem) && y.Uses == 1 => (SETLEstore [off] {sym} ptr x mem)
(OffPtr [off] ptr) => (ADDconst [int32(off)] ptr)
(Addr {sym} base) => (MOVWaddr {sym} base)
-(LocalAddr {sym} base _) => (MOVWaddr {sym} base)
+(LocalAddr <t> {sym} base mem) && t.Elem().HasPointers() => (MOVWaddr {sym} (SPanchored base mem))
+(LocalAddr <t> {sym} base _) && !t.Elem().HasPointers() => (MOVWaddr {sym} base)
// loads
(Load <t> ptr mem) && t.IsBoolean() => (MOVBUload ptr mem)
(OffPtr [off] ptr) => (ADDconst [off] ptr)
(Addr {sym} base) => (MOVDaddr {sym} base)
-(LocalAddr {sym} base _) => (MOVDaddr {sym} base)
+(LocalAddr <t> {sym} base mem) && t.Elem().HasPointers() => (MOVDaddr {sym} (SPanchored base mem))
+(LocalAddr <t> {sym} base _) && !t.Elem().HasPointers() => (MOVDaddr {sym} base)
// loads
(Load <t> ptr mem) && t.IsBoolean() => (MOVBUload ptr mem)
(OffPtr [off] ptr) => (ADDVconst [off] ptr)
(Addr {sym} base) => (MOVVaddr {sym} base)
-(LocalAddr {sym} base _) => (MOVVaddr {sym} base)
+(LocalAddr <t> {sym} base mem) && t.Elem().HasPointers() => (MOVVaddr {sym} (SPanchored base mem))
+(LocalAddr <t> {sym} base _) && !t.Elem().HasPointers() => (MOVVaddr {sym} base)
// loads
(Load <t> ptr mem) && t.IsBoolean() => (MOVBUload ptr mem)
(OffPtr [off] ptr) => (ADDconst [int32(off)] ptr)
(Addr {sym} base) => (MOVWaddr {sym} base)
-(LocalAddr {sym} base _) => (MOVWaddr {sym} base)
+(LocalAddr <t> {sym} base mem) && t.Elem().HasPointers() => (MOVWaddr {sym} (SPanchored base mem))
+(LocalAddr <t> {sym} base _) && !t.Elem().HasPointers() => (MOVWaddr {sym} base)
// loads
(Load <t> ptr mem) && t.IsBoolean() => (MOVBUload ptr mem)
(OffPtr [off] ptr) => (ADDVconst [off] ptr)
(Addr {sym} base) => (MOVVaddr {sym} base)
-(LocalAddr {sym} base _) => (MOVVaddr {sym} base)
+(LocalAddr <t> {sym} base mem) && t.Elem().HasPointers() => (MOVVaddr {sym} (SPanchored base mem))
+(LocalAddr <t> {sym} base _) && !t.Elem().HasPointers() => (MOVVaddr {sym} base)
// loads
(Load <t> ptr mem) && t.IsBoolean() => (MOVBUload ptr mem)
(S(RAW|RW|LW) x (MOVDconst [c])) => (S(RAW|RW|LW)const [c&31 | (c>>5&1*31)] x)
(Addr {sym} base) => (MOVDaddr {sym} [0] base)
-(LocalAddr {sym} base _) => (MOVDaddr {sym} base)
+(LocalAddr <t> {sym} base mem) && t.Elem().HasPointers() => (MOVDaddr {sym} (SPanchored base mem))
+(LocalAddr <t> {sym} base _) && !t.Elem().HasPointers() => (MOVDaddr {sym} base)
(OffPtr [off] ptr) => (ADD (MOVDconst <typ.Int64> [off]) ptr)
// TODO: optimize these cases?
(ConstBool [val]) => (MOVDconst [int64(b2i(val))])
(Addr {sym} base) => (MOVaddr {sym} [0] base)
-(LocalAddr {sym} base _) => (MOVaddr {sym} base)
+(LocalAddr <t> {sym} base mem) && t.Elem().HasPointers() => (MOVaddr {sym} (SPanchored base mem))
+(LocalAddr <t> {sym} base _) && !t.Elem().HasPointers() => (MOVaddr {sym} base)
// Calls
(StaticCall ...) => (CALLstatic ...)
(GetCallerSP ...) => (LoweredGetCallerSP ...)
(GetCallerPC ...) => (LoweredGetCallerPC ...)
(Addr {sym} base) => (MOVDaddr {sym} base)
-(LocalAddr {sym} base _) => (MOVDaddr {sym} base)
+(LocalAddr <t> {sym} base mem) && t.Elem().HasPointers() => (MOVDaddr {sym} (SPanchored base mem))
+(LocalAddr <t> {sym} base _) && !t.Elem().HasPointers() => (MOVDaddr {sym} base)
(ITab (Load ptr mem)) => (MOVDload ptr mem)
// block rewrites
(GetCallerPC ...) => (LoweredGetCallerPC ...)
(GetCallerSP ...) => (LoweredGetCallerSP ...)
(Addr {sym} base) => (LoweredAddr {sym} [0] base)
-(LocalAddr {sym} base _) => (LoweredAddr {sym} base)
+(LocalAddr <t> {sym} base mem) && t.Elem().HasPointers() => (LoweredAddr {sym} (SPanchored base mem))
+(LocalAddr <t> {sym} base _) && !t.Elem().HasPointers() => (LoweredAddr {sym} base)
// Write barrier.
(WB ...) => (LoweredWB ...)
{name: "Addr", argLength: 1, aux: "Sym", symEffect: "Addr"}, // Address of a variable. Arg0=SB. Aux identifies the variable.
{name: "LocalAddr", argLength: 2, aux: "Sym", symEffect: "Addr"}, // Address of a variable. Arg0=SP. Arg1=mem. Aux identifies the variable.
- {name: "SP", zeroWidth: true}, // stack pointer
- {name: "SB", typ: "Uintptr", zeroWidth: true}, // static base pointer (a.k.a. globals pointer)
- {name: "Invalid"}, // unused value
+ {name: "SP", zeroWidth: true}, // stack pointer
+ {name: "SB", typ: "Uintptr", zeroWidth: true}, // static base pointer (a.k.a. globals pointer)
+ {name: "Invalid"}, // unused value
+ {name: "SPanchored", typ: "Uintptr", argLength: 2, zeroWidth: true}, // arg0 = SP, arg1 = mem. Result is identical to arg0, but cannot be scheduled before memory state arg1.
// Memory operations
{name: "Load", argLength: 2}, // Load from arg0. arg1=memory
continue // lowered
}
switch v.Op {
- case OpSP, OpSB, OpInitMem, OpArg, OpArgIntReg, OpArgFloatReg, OpPhi, OpVarDef, OpVarLive, OpKeepAlive, OpSelect0, OpSelect1, OpSelectN, OpConvert, OpInlMark:
+ case OpSP, OpSPanchored, OpSB, OpInitMem, OpArg, OpArgIntReg, OpArgFloatReg, OpPhi, OpVarDef, OpVarLive, OpKeepAlive, OpSelect0, OpSelect1, OpSelectN, OpConvert, OpInlMark:
continue // ok not to lower
case OpMakeResult:
if b.Controls[0] == v {
OpLocalAddr
OpSP
OpSB
+ OpSPanchored
OpLoad
OpDereference
OpStore
zeroWidth: true,
generic: true,
},
+ {
+ name: "SPanchored",
+ argLen: 2,
+ zeroWidth: true,
+ generic: true,
+ },
{
name: "Load",
argLen: 2,
case OpOffPtr:
return p1.AuxInt == p2.AuxInt && isSamePtr(p1.Args[0], p2.Args[0])
case OpAddr, OpLocalAddr:
- // OpAddr's 0th arg is either OpSP or OpSB, which means that it is uniquely identified by its Op.
- // Checking for value equality only works after [z]cse has run.
- return p1.Aux == p2.Aux && p1.Args[0].Op == p2.Args[0].Op
+ return p1.Aux == p2.Aux
case OpAddPtr:
return p1.Args[1] == p2.Args[1] && isSamePtr(p1.Args[0], p2.Args[0])
}
return false
}
func rewriteValue386_OpLocalAddr(v *Value) bool {
+ v_1 := v.Args[1]
v_0 := v.Args[0]
- // match: (LocalAddr {sym} base _)
+ b := v.Block
+ typ := &b.Func.Config.Types
+ // match: (LocalAddr <t> {sym} base mem)
+ // cond: t.Elem().HasPointers()
+ // result: (LEAL {sym} (SPanchored base mem))
+ for {
+ t := v.Type
+ sym := auxToSym(v.Aux)
+ base := v_0
+ mem := v_1
+ if !(t.Elem().HasPointers()) {
+ break
+ }
+ v.reset(Op386LEAL)
+ v.Aux = symToAux(sym)
+ v0 := b.NewValue0(v.Pos, OpSPanchored, typ.Uintptr)
+ v0.AddArg2(base, mem)
+ v.AddArg(v0)
+ return true
+ }
+ // match: (LocalAddr <t> {sym} base _)
+ // cond: !t.Elem().HasPointers()
// result: (LEAL {sym} base)
for {
+ t := v.Type
sym := auxToSym(v.Aux)
base := v_0
+ if !(!t.Elem().HasPointers()) {
+ break
+ }
v.reset(Op386LEAL)
v.Aux = symToAux(sym)
v.AddArg(base)
return true
}
+ return false
}
func rewriteValue386_OpLsh16x16(v *Value) bool {
v_1 := v.Args[1]
return false
}
func rewriteValueAMD64_OpLocalAddr(v *Value) bool {
+ v_1 := v.Args[1]
v_0 := v.Args[0]
- // match: (LocalAddr {sym} base _)
+ b := v.Block
+ typ := &b.Func.Config.Types
+ // match: (LocalAddr <t> {sym} base mem)
+ // cond: t.Elem().HasPointers()
+ // result: (LEAQ {sym} (SPanchored base mem))
+ for {
+ t := v.Type
+ sym := auxToSym(v.Aux)
+ base := v_0
+ mem := v_1
+ if !(t.Elem().HasPointers()) {
+ break
+ }
+ v.reset(OpAMD64LEAQ)
+ v.Aux = symToAux(sym)
+ v0 := b.NewValue0(v.Pos, OpSPanchored, typ.Uintptr)
+ v0.AddArg2(base, mem)
+ v.AddArg(v0)
+ return true
+ }
+ // match: (LocalAddr <t> {sym} base _)
+ // cond: !t.Elem().HasPointers()
// result: (LEAQ {sym} base)
for {
+ t := v.Type
sym := auxToSym(v.Aux)
base := v_0
+ if !(!t.Elem().HasPointers()) {
+ break
+ }
v.reset(OpAMD64LEAQ)
v.Aux = symToAux(sym)
v.AddArg(base)
return true
}
+ return false
}
func rewriteValueAMD64_OpLsh16x16(v *Value) bool {
v_1 := v.Args[1]
return false
}
func rewriteValueARM_OpLocalAddr(v *Value) bool {
+ v_1 := v.Args[1]
v_0 := v.Args[0]
- // match: (LocalAddr {sym} base _)
+ b := v.Block
+ typ := &b.Func.Config.Types
+ // match: (LocalAddr <t> {sym} base mem)
+ // cond: t.Elem().HasPointers()
+ // result: (MOVWaddr {sym} (SPanchored base mem))
+ for {
+ t := v.Type
+ sym := auxToSym(v.Aux)
+ base := v_0
+ mem := v_1
+ if !(t.Elem().HasPointers()) {
+ break
+ }
+ v.reset(OpARMMOVWaddr)
+ v.Aux = symToAux(sym)
+ v0 := b.NewValue0(v.Pos, OpSPanchored, typ.Uintptr)
+ v0.AddArg2(base, mem)
+ v.AddArg(v0)
+ return true
+ }
+ // match: (LocalAddr <t> {sym} base _)
+ // cond: !t.Elem().HasPointers()
// result: (MOVWaddr {sym} base)
for {
+ t := v.Type
sym := auxToSym(v.Aux)
base := v_0
+ if !(!t.Elem().HasPointers()) {
+ break
+ }
v.reset(OpARMMOVWaddr)
v.Aux = symToAux(sym)
v.AddArg(base)
return true
}
+ return false
}
func rewriteValueARM_OpLsh16x16(v *Value) bool {
v_1 := v.Args[1]
return false
}
func rewriteValueARM64_OpLocalAddr(v *Value) bool {
+ v_1 := v.Args[1]
v_0 := v.Args[0]
- // match: (LocalAddr {sym} base _)
+ b := v.Block
+ typ := &b.Func.Config.Types
+ // match: (LocalAddr <t> {sym} base mem)
+ // cond: t.Elem().HasPointers()
+ // result: (MOVDaddr {sym} (SPanchored base mem))
+ for {
+ t := v.Type
+ sym := auxToSym(v.Aux)
+ base := v_0
+ mem := v_1
+ if !(t.Elem().HasPointers()) {
+ break
+ }
+ v.reset(OpARM64MOVDaddr)
+ v.Aux = symToAux(sym)
+ v0 := b.NewValue0(v.Pos, OpSPanchored, typ.Uintptr)
+ v0.AddArg2(base, mem)
+ v.AddArg(v0)
+ return true
+ }
+ // match: (LocalAddr <t> {sym} base _)
+ // cond: !t.Elem().HasPointers()
// result: (MOVDaddr {sym} base)
for {
+ t := v.Type
sym := auxToSym(v.Aux)
base := v_0
+ if !(!t.Elem().HasPointers()) {
+ break
+ }
v.reset(OpARM64MOVDaddr)
v.Aux = symToAux(sym)
v.AddArg(base)
return true
}
+ return false
}
func rewriteValueARM64_OpLsh16x16(v *Value) bool {
v_1 := v.Args[1]
return false
}
func rewriteValueLOONG64_OpLocalAddr(v *Value) bool {
+ v_1 := v.Args[1]
v_0 := v.Args[0]
- // match: (LocalAddr {sym} base _)
+ b := v.Block
+ typ := &b.Func.Config.Types
+ // match: (LocalAddr <t> {sym} base mem)
+ // cond: t.Elem().HasPointers()
+ // result: (MOVVaddr {sym} (SPanchored base mem))
+ for {
+ t := v.Type
+ sym := auxToSym(v.Aux)
+ base := v_0
+ mem := v_1
+ if !(t.Elem().HasPointers()) {
+ break
+ }
+ v.reset(OpLOONG64MOVVaddr)
+ v.Aux = symToAux(sym)
+ v0 := b.NewValue0(v.Pos, OpSPanchored, typ.Uintptr)
+ v0.AddArg2(base, mem)
+ v.AddArg(v0)
+ return true
+ }
+ // match: (LocalAddr <t> {sym} base _)
+ // cond: !t.Elem().HasPointers()
// result: (MOVVaddr {sym} base)
for {
+ t := v.Type
sym := auxToSym(v.Aux)
base := v_0
+ if !(!t.Elem().HasPointers()) {
+ break
+ }
v.reset(OpLOONG64MOVVaddr)
v.Aux = symToAux(sym)
v.AddArg(base)
return true
}
+ return false
}
func rewriteValueLOONG64_OpLsh16x16(v *Value) bool {
v_1 := v.Args[1]
return false
}
func rewriteValueMIPS_OpLocalAddr(v *Value) bool {
+ v_1 := v.Args[1]
v_0 := v.Args[0]
- // match: (LocalAddr {sym} base _)
+ b := v.Block
+ typ := &b.Func.Config.Types
+ // match: (LocalAddr <t> {sym} base mem)
+ // cond: t.Elem().HasPointers()
+ // result: (MOVWaddr {sym} (SPanchored base mem))
+ for {
+ t := v.Type
+ sym := auxToSym(v.Aux)
+ base := v_0
+ mem := v_1
+ if !(t.Elem().HasPointers()) {
+ break
+ }
+ v.reset(OpMIPSMOVWaddr)
+ v.Aux = symToAux(sym)
+ v0 := b.NewValue0(v.Pos, OpSPanchored, typ.Uintptr)
+ v0.AddArg2(base, mem)
+ v.AddArg(v0)
+ return true
+ }
+ // match: (LocalAddr <t> {sym} base _)
+ // cond: !t.Elem().HasPointers()
// result: (MOVWaddr {sym} base)
for {
+ t := v.Type
sym := auxToSym(v.Aux)
base := v_0
+ if !(!t.Elem().HasPointers()) {
+ break
+ }
v.reset(OpMIPSMOVWaddr)
v.Aux = symToAux(sym)
v.AddArg(base)
return true
}
+ return false
}
func rewriteValueMIPS_OpLsh16x16(v *Value) bool {
v_1 := v.Args[1]
return false
}
func rewriteValueMIPS64_OpLocalAddr(v *Value) bool {
+ v_1 := v.Args[1]
v_0 := v.Args[0]
- // match: (LocalAddr {sym} base _)
+ b := v.Block
+ typ := &b.Func.Config.Types
+ // match: (LocalAddr <t> {sym} base mem)
+ // cond: t.Elem().HasPointers()
+ // result: (MOVVaddr {sym} (SPanchored base mem))
+ for {
+ t := v.Type
+ sym := auxToSym(v.Aux)
+ base := v_0
+ mem := v_1
+ if !(t.Elem().HasPointers()) {
+ break
+ }
+ v.reset(OpMIPS64MOVVaddr)
+ v.Aux = symToAux(sym)
+ v0 := b.NewValue0(v.Pos, OpSPanchored, typ.Uintptr)
+ v0.AddArg2(base, mem)
+ v.AddArg(v0)
+ return true
+ }
+ // match: (LocalAddr <t> {sym} base _)
+ // cond: !t.Elem().HasPointers()
// result: (MOVVaddr {sym} base)
for {
+ t := v.Type
sym := auxToSym(v.Aux)
base := v_0
+ if !(!t.Elem().HasPointers()) {
+ break
+ }
v.reset(OpMIPS64MOVVaddr)
v.Aux = symToAux(sym)
v.AddArg(base)
return true
}
+ return false
}
func rewriteValueMIPS64_OpLsh16x16(v *Value) bool {
v_1 := v.Args[1]
return false
}
func rewriteValuePPC64_OpLocalAddr(v *Value) bool {
+ v_1 := v.Args[1]
v_0 := v.Args[0]
- // match: (LocalAddr {sym} base _)
+ b := v.Block
+ typ := &b.Func.Config.Types
+ // match: (LocalAddr <t> {sym} base mem)
+ // cond: t.Elem().HasPointers()
+ // result: (MOVDaddr {sym} (SPanchored base mem))
+ for {
+ t := v.Type
+ sym := auxToSym(v.Aux)
+ base := v_0
+ mem := v_1
+ if !(t.Elem().HasPointers()) {
+ break
+ }
+ v.reset(OpPPC64MOVDaddr)
+ v.Aux = symToAux(sym)
+ v0 := b.NewValue0(v.Pos, OpSPanchored, typ.Uintptr)
+ v0.AddArg2(base, mem)
+ v.AddArg(v0)
+ return true
+ }
+ // match: (LocalAddr <t> {sym} base _)
+ // cond: !t.Elem().HasPointers()
// result: (MOVDaddr {sym} base)
for {
+ t := v.Type
sym := auxToSym(v.Aux)
base := v_0
+ if !(!t.Elem().HasPointers()) {
+ break
+ }
v.reset(OpPPC64MOVDaddr)
v.Aux = symToAux(sym)
v.AddArg(base)
return true
}
+ return false
}
func rewriteValuePPC64_OpLsh16x16(v *Value) bool {
v_1 := v.Args[1]
return false
}
func rewriteValueRISCV64_OpLocalAddr(v *Value) bool {
+ v_1 := v.Args[1]
v_0 := v.Args[0]
- // match: (LocalAddr {sym} base _)
+ b := v.Block
+ typ := &b.Func.Config.Types
+ // match: (LocalAddr <t> {sym} base mem)
+ // cond: t.Elem().HasPointers()
+ // result: (MOVaddr {sym} (SPanchored base mem))
+ for {
+ t := v.Type
+ sym := auxToSym(v.Aux)
+ base := v_0
+ mem := v_1
+ if !(t.Elem().HasPointers()) {
+ break
+ }
+ v.reset(OpRISCV64MOVaddr)
+ v.Aux = symToAux(sym)
+ v0 := b.NewValue0(v.Pos, OpSPanchored, typ.Uintptr)
+ v0.AddArg2(base, mem)
+ v.AddArg(v0)
+ return true
+ }
+ // match: (LocalAddr <t> {sym} base _)
+ // cond: !t.Elem().HasPointers()
// result: (MOVaddr {sym} base)
for {
+ t := v.Type
sym := auxToSym(v.Aux)
base := v_0
+ if !(!t.Elem().HasPointers()) {
+ break
+ }
v.reset(OpRISCV64MOVaddr)
v.Aux = symToAux(sym)
v.AddArg(base)
return true
}
+ return false
}
func rewriteValueRISCV64_OpLsh16x16(v *Value) bool {
v_1 := v.Args[1]
return false
}
func rewriteValueS390X_OpLocalAddr(v *Value) bool {
+ v_1 := v.Args[1]
v_0 := v.Args[0]
- // match: (LocalAddr {sym} base _)
+ b := v.Block
+ typ := &b.Func.Config.Types
+ // match: (LocalAddr <t> {sym} base mem)
+ // cond: t.Elem().HasPointers()
+ // result: (MOVDaddr {sym} (SPanchored base mem))
+ for {
+ t := v.Type
+ sym := auxToSym(v.Aux)
+ base := v_0
+ mem := v_1
+ if !(t.Elem().HasPointers()) {
+ break
+ }
+ v.reset(OpS390XMOVDaddr)
+ v.Aux = symToAux(sym)
+ v0 := b.NewValue0(v.Pos, OpSPanchored, typ.Uintptr)
+ v0.AddArg2(base, mem)
+ v.AddArg(v0)
+ return true
+ }
+ // match: (LocalAddr <t> {sym} base _)
+ // cond: !t.Elem().HasPointers()
// result: (MOVDaddr {sym} base)
for {
+ t := v.Type
sym := auxToSym(v.Aux)
base := v_0
+ if !(!t.Elem().HasPointers()) {
+ break
+ }
v.reset(OpS390XMOVDaddr)
v.Aux = symToAux(sym)
v.AddArg(base)
return true
}
+ return false
}
func rewriteValueS390X_OpLsh16x16(v *Value) bool {
v_1 := v.Args[1]
return false
}
func rewriteValueWasm_OpLocalAddr(v *Value) bool {
+ v_1 := v.Args[1]
v_0 := v.Args[0]
- // match: (LocalAddr {sym} base _)
+ b := v.Block
+ typ := &b.Func.Config.Types
+ // match: (LocalAddr <t> {sym} base mem)
+ // cond: t.Elem().HasPointers()
+ // result: (LoweredAddr {sym} (SPanchored base mem))
+ for {
+ t := v.Type
+ sym := auxToSym(v.Aux)
+ base := v_0
+ mem := v_1
+ if !(t.Elem().HasPointers()) {
+ break
+ }
+ v.reset(OpWasmLoweredAddr)
+ v.Aux = symToAux(sym)
+ v0 := b.NewValue0(v.Pos, OpSPanchored, typ.Uintptr)
+ v0.AddArg2(base, mem)
+ v.AddArg(v0)
+ return true
+ }
+ // match: (LocalAddr <t> {sym} base _)
+ // cond: !t.Elem().HasPointers()
// result: (LoweredAddr {sym} base)
for {
+ t := v.Type
sym := auxToSym(v.Aux)
base := v_0
+ if !(!t.Elem().HasPointers()) {
+ break
+ }
v.reset(OpWasmLoweredAddr)
v.Aux = symToAux(sym)
v.AddArg(base)
return true
}
+ return false
}
func rewriteValueWasm_OpLsh16x16(v *Value) bool {
v_1 := v.Args[1]
package ssa
import (
+ "cmd/compile/internal/base"
"cmd/compile/internal/types"
"container/heap"
"sort"
}
}
+ // Remove SPanchored now that we've scheduled.
+ for _, b := range f.Blocks {
+ for _, v := range b.Values {
+ for i, a := range v.Args {
+ if a.Op == OpSPanchored {
+ v.SetArg(i, a.Args[0])
+ }
+ }
+ }
+ }
+ for _, b := range f.Blocks {
+ i := 0
+ for _, v := range b.Values {
+ if v.Op == OpSPanchored {
+ // Free this value
+ if v.Uses != 0 {
+ base.Fatalf("SPAnchored still has %d uses", v.Uses)
+ }
+ v.resetArgs()
+ f.freeValue(v)
+ } else {
+ b.Values[i] = v
+ i++
+ }
+ }
+ b.truncateValues(i)
+ }
+
f.scheduled = true
}
}
// Check that divisibility checks x%c==0 are converted to MULs and rotates
-func Divisible(n1 uint, n2 int) (bool, bool, bool, bool) {
+func DivisibleU(n uint) (bool, bool) {
// amd64:"MOVQ\t[$]-6148914691236517205","IMULQ","ROLQ\t[$]63",-"DIVQ"
// 386:"IMUL3L\t[$]-1431655765","ROLL\t[$]31",-"DIVQ"
// arm64:"MOVD\t[$]-6148914691236517205","MOVD\t[$]3074457345618258602","MUL","ROR",-"DIV"
// arm:"MUL","CMP\t[$]715827882",-".*udiv"
// ppc64:"MULLD","ROTL\t[$]63"
// ppc64le:"MULLD","ROTL\t[$]63"
- evenU := n1%6 == 0
+ even := n%6 == 0
// amd64:"MOVQ\t[$]-8737931403336103397","IMULQ",-"ROLQ",-"DIVQ"
// 386:"IMUL3L\t[$]678152731",-"ROLL",-"DIVQ"
// arm:"MUL","CMP\t[$]226050910",-".*udiv"
// ppc64:"MULLD",-"ROTL"
// ppc64le:"MULLD",-"ROTL"
- oddU := n1%19 == 0
+ odd := n%19 == 0
+ return even, odd
+}
+
+func Divisible(n int) (bool, bool) {
// amd64:"IMULQ","ADD","ROLQ\t[$]63",-"DIVQ"
// 386:"IMUL3L\t[$]-1431655765","ADDL\t[$]715827882","ROLL\t[$]31",-"DIVQ"
- // arm64:"MUL","ADD\tR","ROR",-"DIV"
+ // arm64:"MOVD\t[$]-6148914691236517205","MOVD\t[$]3074457345618258602","MUL","ADD\tR","ROR",-"DIV"
// arm:"MUL","ADD\t[$]715827882",-".*udiv"
// ppc64/power8:"MULLD","ADD","ROTL\t[$]63"
// ppc64le/power8:"MULLD","ADD","ROTL\t[$]63"
// ppc64/power9:"MADDLD","ROTL\t[$]63"
// ppc64le/power9:"MADDLD","ROTL\t[$]63"
- evenS := n2%6 == 0
+ even := n%6 == 0
// amd64:"IMULQ","ADD",-"ROLQ",-"DIVQ"
// 386:"IMUL3L\t[$]678152731","ADDL\t[$]113025455",-"ROLL",-"DIVQ"
// ppc64/power9:"MADDLD",-"ROTL"
// ppc64le/power8:"MULLD","ADD",-"ROTL"
// ppc64le/power9:"MADDLD",-"ROTL"
- oddS := n2%19 == 0
+ odd := n%19 == 0
- return evenU, oddU, evenS, oddS
+ return even, odd
}
// Check that fix-up code is not generated for divisions where it has been proven that
package codegen
-var gx, gy int
-
// Test to make sure that (CMPQ (ANDQ x y) [0]) does not get rewritten to
// (TESTQ x y) if the ANDQ has other uses. If that rewrite happens, then one
// of the args of the ANDQ needs to be saved so it can be used as the arg to TESTQ.
func andWithUse(x, y int) int {
- // Load x,y into registers, so those MOVQ will not appear at the z := x&y line.
- gx, gy = x, y
- // amd64:-"MOVQ"
z := x & y
+ // amd64:`TESTQ\s(AX, AX|BX, BX|CX, CX|DX, DX|SI, SI|DI, DI|R8, R8|R9, R9|R10, R10|R11, R11|R12, R12|R13, R13|R15, R15)`
if z == 0 {
return 77
}