&& warnRule(fe.Debug_checknil(), v, "removed nil check")
=> (Invalid)
+// Addresses of globals are always non-nil.
+(NilCheck (Addr {_} (SB)) _) => (Invalid)
+(NilCheck (Convert (Addr {_} (SB)) _) _) => (Invalid)
+
// for late-expanded calls, recognize memequal applied to a single constant byte
// Support is limited by 1, 2, 4, 8 byte sizes
(StaticLECall {callAux} sptr (Addr {scon} (SB)) (Const64 [1]) mem)
(NeqPtr (OffPtr [o1] p1) (OffPtr [o2] p2)) && isSamePtr(p1, p2) => (ConstBool [o1 != o2])
(EqPtr (Const(32|64) [c]) (Const(32|64) [d])) => (ConstBool [c == d])
(NeqPtr (Const(32|64) [c]) (Const(32|64) [d])) => (ConstBool [c != d])
+(EqPtr (Convert (Addr {x} _) _) (Addr {y} _)) => (ConstBool [x==y])
+(NeqPtr (Convert (Addr {x} _) _) (Addr {y} _)) => (ConstBool [x!=y])
(EqPtr (LocalAddr _ _) (Addr _)) => (ConstBool [false])
(EqPtr (OffPtr (LocalAddr _ _)) (Addr _)) => (ConstBool [false])
// Evaluate constant user nil checks.
(IsNonNil (ConstNil)) => (ConstBool [false])
(IsNonNil (Const(32|64) [c])) => (ConstBool [c != 0])
-(IsNonNil (Addr _)) => (ConstBool [true])
+(IsNonNil (Addr _) ) => (ConstBool [true])
+(IsNonNil (Convert (Addr _) _)) => (ConstBool [true])
(IsNonNil (LocalAddr _ _)) => (ConstBool [true])
// Inline small or disjoint runtime.memmove calls with constant length.
=> (Move {types.Types[types.TUINT8]} [int64(sz)] dst src mem)
// De-virtualize late-expanded interface calls into late-expanded static calls.
-// Note that (ITab (IMake)) doesn't get rewritten until after the first opt pass,
-// so this rule should trigger reliably.
-// devirtLECall removes the first argument, adds the devirtualized symbol to the AuxCall, and changes the opcode
-(InterLECall [argsize] {auxCall} (Load (OffPtr [off] (ITab (IMake (Addr {itab} (SB)) _))) _) ___) && devirtLESym(v, auxCall, itab, off) !=
- nil => devirtLECall(v, devirtLESym(v, auxCall, itab, off))
+(InterLECall [argsize] {auxCall} (Addr {fn} (SB)) ___) => devirtLECall(v, fn.(*obj.LSym))
// Move and Zero optimizations.
// Move source and destination may overlap.
(RotateLeft(64|32|16|8) (RotateLeft(64|32|16|8) x c) d) && c.Type.Size() == 4 && d.Type.Size() == 4 => (RotateLeft(64|32|16|8) x (Add32 <c.Type> c d))
(RotateLeft(64|32|16|8) (RotateLeft(64|32|16|8) x c) d) && c.Type.Size() == 2 && d.Type.Size() == 2 => (RotateLeft(64|32|16|8) x (Add16 <c.Type> c d))
(RotateLeft(64|32|16|8) (RotateLeft(64|32|16|8) x c) d) && c.Type.Size() == 1 && d.Type.Size() == 1 => (RotateLeft(64|32|16|8) x (Add8 <c.Type> c d))
+
+// Loading constant values from dictionaries and itabs.
+(Load <t> (OffPtr [off] (Addr {s} sb) ) _) && t.IsUintptr() && isFixedSym(s, off) => (Addr {fixedSym(b.Func, s, off)} sb)
+(Load <t> (OffPtr [off] (Convert (Addr {s} sb) _) ) _) && t.IsUintptr() && isFixedSym(s, off) => (Addr {fixedSym(b.Func, s, off)} sb)
+(Load <t> (OffPtr [off] (ITab (IMake (Addr {s} sb) _))) _) && t.IsUintptr() && isFixedSym(s, off) => (Addr {fixedSym(b.Func, s, off)} sb)
+(Load <t> (OffPtr [off] (ITab (IMake (Convert (Addr {s} sb) _) _))) _) && t.IsUintptr() && isFixedSym(s, off) => (Addr {fixedSym(b.Func, s, off)} sb)
+
+// Loading constant values from runtime._type.hash.
+(Load <t> (OffPtr [off] (Addr {sym} _) ) _) && t.IsInteger() && t.Size() == 4 && isFixed32(config, sym, off) => (Const32 [fixed32(config, sym, off)])
+(Load <t> (OffPtr [off] (Convert (Addr {sym} _) _) ) _) && t.IsInteger() && t.Size() == 4 && isFixed32(config, sym, off) => (Const32 [fixed32(config, sym, off)])
+(Load <t> (OffPtr [off] (ITab (IMake (Addr {sym} _) _))) _) && t.IsInteger() && t.Size() == 4 && isFixed32(config, sym, off) => (Const32 [fixed32(config, sym, off)])
+(Load <t> (OffPtr [off] (ITab (IMake (Convert (Addr {sym} _) _) _))) _) && t.IsInteger() && t.Size() == 4 && isFixed32(config, sym, off) => (Const32 [fixed32(config, sym, off)])
import (
"cmd/compile/internal/base"
"cmd/compile/internal/logopt"
+ "cmd/compile/internal/reflectdata"
"cmd/compile/internal/types"
"cmd/internal/obj"
"cmd/internal/obj/s390x"
"math/bits"
"os"
"path/filepath"
+ "strings"
)
type deadValueChoice bool
return nil
}
-// de-virtualize an InterLECall
-// 'sym' is the symbol for the itab.
-func devirtLESym(v *Value, aux Aux, sym Sym, offset int64) *obj.LSym {
- n, ok := sym.(*obj.LSym)
- if !ok {
- return nil
- }
-
- lsym := loadLSymOffset(n, offset)
- if f := v.Block.Func; f.pass.debug > 0 {
- if lsym != nil {
- f.Warnl(v.Pos, "de-virtualizing call")
- } else {
- f.Warnl(v.Pos, "couldn't de-virtualize call")
- }
- }
- return lsym
-}
-
func devirtLECall(v *Value, sym *obj.LSym) *Value {
v.Op = OpStaticLECall
auxcall := v.Aux.(*AuxCall)
copy(v.Args[0:], v.Args[1:])
v.Args[len(v.Args)-1] = nil // aid GC
v.Args = v.Args[:len(v.Args)-1]
+ if f := v.Block.Func; f.pass.debug > 0 {
+ f.Warnl(v.Pos, "de-virtualizing call")
+ }
return v
}
return true
}
+// isFixed32 returns true if the int32 at offset off in symbol sym
+// is known and constant.
+func isFixed32(c *Config, sym Sym, off int64) bool {
+ return isFixed(c, sym, off, 4)
+}
+
+// isFixed returns true if the range [off,off+size] of the symbol sym
+// is known and constant.
+func isFixed(c *Config, sym Sym, off, size int64) bool {
+ lsym := sym.(*obj.LSym)
+ if lsym.Extra == nil {
+ return false
+ }
+ if _, ok := (*lsym.Extra).(*obj.TypeInfo); ok {
+ if off == 2*c.PtrSize && size == 4 {
+ return true // type hash field
+ }
+ }
+ return false
+}
+func fixed32(c *Config, sym Sym, off int64) int32 {
+ lsym := sym.(*obj.LSym)
+ if ti, ok := (*lsym.Extra).(*obj.TypeInfo); ok {
+ if off == 2*c.PtrSize {
+ return int32(types.TypeHash(ti.Type.(*types.Type)))
+ }
+ }
+ base.Fatalf("fixed32 data not known for %s:%d", sym, off)
+ return 0
+}
+
+// isFixedSym returns true if the contents of sym at the given offset
+// is known and is the constant address of another symbol.
+func isFixedSym(sym Sym, off int64) bool {
+ lsym := sym.(*obj.LSym)
+ switch {
+ case lsym.Type == objabi.SRODATA:
+ // itabs, dictionaries
+ default:
+ return false
+ }
+ for _, r := range lsym.R {
+ if (r.Type == objabi.R_ADDR || r.Type == objabi.R_WEAKADDR) && int64(r.Off) == off && r.Add == 0 {
+ return true
+ }
+ }
+ return false
+}
+func fixedSym(f *Func, sym Sym, off int64) Sym {
+ lsym := sym.(*obj.LSym)
+ for _, r := range lsym.R {
+ if (r.Type == objabi.R_ADDR || r.Type == objabi.R_WEAKADDR) && int64(r.Off) == off {
+ if strings.HasPrefix(r.Sym.Name, "type:") {
+ // In case we're loading a type out of a dictionary, we need to record
+ // that the containing function might put that type in an interface.
+ // That information is currently recorded in relocations in the dictionary,
+ // but if we perform this load at compile time then the dictionary
+ // might be dead.
+ reflectdata.MarkTypeSymUsedInInterface(r.Sym, f.fe.Func().Linksym())
+ }
+ return r.Sym
+ }
+ }
+ base.Fatalf("fixedSym data not known for %s:%d", sym, off)
+ return nil
+}
+
// read8 reads one byte from the read-only global sym at offset off.
func read8(sym interface{}, off int64) uint8 {
lsym := sym.(*obj.LSym)
package ssa
import "math"
+import "cmd/internal/obj"
import "cmd/compile/internal/types"
import "cmd/compile/internal/ir"
}
break
}
+ // match: (EqPtr (Convert (Addr {x} _) _) (Addr {y} _))
+ // result: (ConstBool [x==y])
+ for {
+ for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
+ if v_0.Op != OpConvert {
+ continue
+ }
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpAddr {
+ continue
+ }
+ x := auxToSym(v_0_0.Aux)
+ if v_1.Op != OpAddr {
+ continue
+ }
+ y := auxToSym(v_1.Aux)
+ v.reset(OpConstBool)
+ v.AuxInt = boolToAuxInt(x == y)
+ return true
+ }
+ break
+ }
// match: (EqPtr (LocalAddr _ _) (Addr _))
// result: (ConstBool [false])
for {
return false
}
func rewriteValuegeneric_OpInterLECall(v *Value) bool {
- // match: (InterLECall [argsize] {auxCall} (Load (OffPtr [off] (ITab (IMake (Addr {itab} (SB)) _))) _) ___)
- // cond: devirtLESym(v, auxCall, itab, off) != nil
- // result: devirtLECall(v, devirtLESym(v, auxCall, itab, off))
+ // match: (InterLECall [argsize] {auxCall} (Addr {fn} (SB)) ___)
+ // result: devirtLECall(v, fn.(*obj.LSym))
for {
if len(v.Args) < 1 {
break
}
- auxCall := auxToCall(v.Aux)
v_0 := v.Args[0]
- if v_0.Op != OpLoad {
+ if v_0.Op != OpAddr {
break
}
+ fn := auxToSym(v_0.Aux)
v_0_0 := v_0.Args[0]
- if v_0_0.Op != OpOffPtr {
- break
- }
- off := auxIntToInt64(v_0_0.AuxInt)
- v_0_0_0 := v_0_0.Args[0]
- if v_0_0_0.Op != OpITab {
- break
- }
- v_0_0_0_0 := v_0_0_0.Args[0]
- if v_0_0_0_0.Op != OpIMake {
- break
- }
- v_0_0_0_0_0 := v_0_0_0_0.Args[0]
- if v_0_0_0_0_0.Op != OpAddr {
- break
- }
- itab := auxToSym(v_0_0_0_0_0.Aux)
- v_0_0_0_0_0_0 := v_0_0_0_0_0.Args[0]
- if v_0_0_0_0_0_0.Op != OpSB || !(devirtLESym(v, auxCall, itab, off) != nil) {
+ if v_0_0.Op != OpSB {
break
}
- v.copyOf(devirtLECall(v, devirtLESym(v, auxCall, itab, off)))
+ v.copyOf(devirtLECall(v, fn.(*obj.LSym)))
return true
}
return false
v.AuxInt = boolToAuxInt(c != 0)
return true
}
- // match: (IsNonNil (Addr _))
+ // match: (IsNonNil (Addr _) )
// result: (ConstBool [true])
for {
if v_0.Op != OpAddr {
v.AuxInt = boolToAuxInt(true)
return true
}
+ // match: (IsNonNil (Convert (Addr _) _))
+ // result: (ConstBool [true])
+ for {
+ if v_0.Op != OpConvert {
+ break
+ }
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpAddr {
+ break
+ }
+ v.reset(OpConstBool)
+ v.AuxInt = boolToAuxInt(true)
+ return true
+ }
+ // match: (IsNonNil (LocalAddr _ _))
+ // result: (ConstBool [true])
+ for {
+ if v_0.Op != OpLocalAddr {
+ break
+ }
+ v.reset(OpConstBool)
+ v.AuxInt = boolToAuxInt(true)
+ return true
+ }
return false
}
func rewriteValuegeneric_OpIsSliceInBounds(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
b := v.Block
+ config := b.Func.Config
fe := b.Func.fe
// match: (Load <t1> p1 (Store {t2} p2 x _))
// cond: isSamePtr(p1, p2) && t1.Compare(x.Type) == types.CMPeq && t1.Size() == t2.Size()
v.AddArg(v0)
return true
}
+ // match: (Load <t> (OffPtr [off] (Addr {s} sb) ) _)
+ // cond: t.IsUintptr() && isFixedSym(s, off)
+ // result: (Addr {fixedSym(b.Func, s, off)} sb)
+ for {
+ t := v.Type
+ if v_0.Op != OpOffPtr {
+ break
+ }
+ off := auxIntToInt64(v_0.AuxInt)
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpAddr {
+ break
+ }
+ s := auxToSym(v_0_0.Aux)
+ sb := v_0_0.Args[0]
+ if !(t.IsUintptr() && isFixedSym(s, off)) {
+ break
+ }
+ v.reset(OpAddr)
+ v.Aux = symToAux(fixedSym(b.Func, s, off))
+ v.AddArg(sb)
+ return true
+ }
+ // match: (Load <t> (OffPtr [off] (Convert (Addr {s} sb) _) ) _)
+ // cond: t.IsUintptr() && isFixedSym(s, off)
+ // result: (Addr {fixedSym(b.Func, s, off)} sb)
+ for {
+ t := v.Type
+ if v_0.Op != OpOffPtr {
+ break
+ }
+ off := auxIntToInt64(v_0.AuxInt)
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpConvert {
+ break
+ }
+ v_0_0_0 := v_0_0.Args[0]
+ if v_0_0_0.Op != OpAddr {
+ break
+ }
+ s := auxToSym(v_0_0_0.Aux)
+ sb := v_0_0_0.Args[0]
+ if !(t.IsUintptr() && isFixedSym(s, off)) {
+ break
+ }
+ v.reset(OpAddr)
+ v.Aux = symToAux(fixedSym(b.Func, s, off))
+ v.AddArg(sb)
+ return true
+ }
+ // match: (Load <t> (OffPtr [off] (ITab (IMake (Addr {s} sb) _))) _)
+ // cond: t.IsUintptr() && isFixedSym(s, off)
+ // result: (Addr {fixedSym(b.Func, s, off)} sb)
+ for {
+ t := v.Type
+ if v_0.Op != OpOffPtr {
+ break
+ }
+ off := auxIntToInt64(v_0.AuxInt)
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpITab {
+ break
+ }
+ v_0_0_0 := v_0_0.Args[0]
+ if v_0_0_0.Op != OpIMake {
+ break
+ }
+ v_0_0_0_0 := v_0_0_0.Args[0]
+ if v_0_0_0_0.Op != OpAddr {
+ break
+ }
+ s := auxToSym(v_0_0_0_0.Aux)
+ sb := v_0_0_0_0.Args[0]
+ if !(t.IsUintptr() && isFixedSym(s, off)) {
+ break
+ }
+ v.reset(OpAddr)
+ v.Aux = symToAux(fixedSym(b.Func, s, off))
+ v.AddArg(sb)
+ return true
+ }
+ // match: (Load <t> (OffPtr [off] (ITab (IMake (Convert (Addr {s} sb) _) _))) _)
+ // cond: t.IsUintptr() && isFixedSym(s, off)
+ // result: (Addr {fixedSym(b.Func, s, off)} sb)
+ for {
+ t := v.Type
+ if v_0.Op != OpOffPtr {
+ break
+ }
+ off := auxIntToInt64(v_0.AuxInt)
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpITab {
+ break
+ }
+ v_0_0_0 := v_0_0.Args[0]
+ if v_0_0_0.Op != OpIMake {
+ break
+ }
+ v_0_0_0_0 := v_0_0_0.Args[0]
+ if v_0_0_0_0.Op != OpConvert {
+ break
+ }
+ v_0_0_0_0_0 := v_0_0_0_0.Args[0]
+ if v_0_0_0_0_0.Op != OpAddr {
+ break
+ }
+ s := auxToSym(v_0_0_0_0_0.Aux)
+ sb := v_0_0_0_0_0.Args[0]
+ if !(t.IsUintptr() && isFixedSym(s, off)) {
+ break
+ }
+ v.reset(OpAddr)
+ v.Aux = symToAux(fixedSym(b.Func, s, off))
+ v.AddArg(sb)
+ return true
+ }
+ // match: (Load <t> (OffPtr [off] (Addr {sym} _) ) _)
+ // cond: t.IsInteger() && t.Size() == 4 && isFixed32(config, sym, off)
+ // result: (Const32 [fixed32(config, sym, off)])
+ for {
+ t := v.Type
+ if v_0.Op != OpOffPtr {
+ break
+ }
+ off := auxIntToInt64(v_0.AuxInt)
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpAddr {
+ break
+ }
+ sym := auxToSym(v_0_0.Aux)
+ if !(t.IsInteger() && t.Size() == 4 && isFixed32(config, sym, off)) {
+ break
+ }
+ v.reset(OpConst32)
+ v.AuxInt = int32ToAuxInt(fixed32(config, sym, off))
+ return true
+ }
+ // match: (Load <t> (OffPtr [off] (Convert (Addr {sym} _) _) ) _)
+ // cond: t.IsInteger() && t.Size() == 4 && isFixed32(config, sym, off)
+ // result: (Const32 [fixed32(config, sym, off)])
+ for {
+ t := v.Type
+ if v_0.Op != OpOffPtr {
+ break
+ }
+ off := auxIntToInt64(v_0.AuxInt)
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpConvert {
+ break
+ }
+ v_0_0_0 := v_0_0.Args[0]
+ if v_0_0_0.Op != OpAddr {
+ break
+ }
+ sym := auxToSym(v_0_0_0.Aux)
+ if !(t.IsInteger() && t.Size() == 4 && isFixed32(config, sym, off)) {
+ break
+ }
+ v.reset(OpConst32)
+ v.AuxInt = int32ToAuxInt(fixed32(config, sym, off))
+ return true
+ }
+ // match: (Load <t> (OffPtr [off] (ITab (IMake (Addr {sym} _) _))) _)
+ // cond: t.IsInteger() && t.Size() == 4 && isFixed32(config, sym, off)
+ // result: (Const32 [fixed32(config, sym, off)])
+ for {
+ t := v.Type
+ if v_0.Op != OpOffPtr {
+ break
+ }
+ off := auxIntToInt64(v_0.AuxInt)
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpITab {
+ break
+ }
+ v_0_0_0 := v_0_0.Args[0]
+ if v_0_0_0.Op != OpIMake {
+ break
+ }
+ v_0_0_0_0 := v_0_0_0.Args[0]
+ if v_0_0_0_0.Op != OpAddr {
+ break
+ }
+ sym := auxToSym(v_0_0_0_0.Aux)
+ if !(t.IsInteger() && t.Size() == 4 && isFixed32(config, sym, off)) {
+ break
+ }
+ v.reset(OpConst32)
+ v.AuxInt = int32ToAuxInt(fixed32(config, sym, off))
+ return true
+ }
+ // match: (Load <t> (OffPtr [off] (ITab (IMake (Convert (Addr {sym} _) _) _))) _)
+ // cond: t.IsInteger() && t.Size() == 4 && isFixed32(config, sym, off)
+ // result: (Const32 [fixed32(config, sym, off)])
+ for {
+ t := v.Type
+ if v_0.Op != OpOffPtr {
+ break
+ }
+ off := auxIntToInt64(v_0.AuxInt)
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpITab {
+ break
+ }
+ v_0_0_0 := v_0_0.Args[0]
+ if v_0_0_0.Op != OpIMake {
+ break
+ }
+ v_0_0_0_0 := v_0_0_0.Args[0]
+ if v_0_0_0_0.Op != OpConvert {
+ break
+ }
+ v_0_0_0_0_0 := v_0_0_0_0.Args[0]
+ if v_0_0_0_0_0.Op != OpAddr {
+ break
+ }
+ sym := auxToSym(v_0_0_0_0_0.Aux)
+ if !(t.IsInteger() && t.Size() == 4 && isFixed32(config, sym, off)) {
+ break
+ }
+ v.reset(OpConst32)
+ v.AuxInt = int32ToAuxInt(fixed32(config, sym, off))
+ return true
+ }
return false
}
func rewriteValuegeneric_OpLsh16x16(v *Value) bool {
}
break
}
+ // match: (NeqPtr (Convert (Addr {x} _) _) (Addr {y} _))
+ // result: (ConstBool [x!=y])
+ for {
+ for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
+ if v_0.Op != OpConvert {
+ continue
+ }
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpAddr {
+ continue
+ }
+ x := auxToSym(v_0_0.Aux)
+ if v_1.Op != OpAddr {
+ continue
+ }
+ y := auxToSym(v_1.Aux)
+ v.reset(OpConstBool)
+ v.AuxInt = boolToAuxInt(x != y)
+ return true
+ }
+ break
+ }
// match: (NeqPtr (LocalAddr _ _) (Addr _))
// result: (ConstBool [true])
for {
v.reset(OpInvalid)
return true
}
+ // match: (NilCheck (Addr {_} (SB)) _)
+ // result: (Invalid)
+ for {
+ if v_0.Op != OpAddr {
+ break
+ }
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpSB {
+ break
+ }
+ v.reset(OpInvalid)
+ return true
+ }
+ // match: (NilCheck (Convert (Addr {_} (SB)) _) _)
+ // result: (Invalid)
+ for {
+ if v_0.Op != OpConvert {
+ break
+ }
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpAddr {
+ break
+ }
+ v_0_0_0 := v_0_0.Args[0]
+ if v_0_0_0.Op != OpSB {
+ break
+ }
+ v.reset(OpInvalid)
+ return true
+ }
return false
}
func rewriteValuegeneric_OpNot(v *Value) bool {