1 // Copyright 2009 The Go Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style
3 // license that can be found in the LICENSE file.
14 "cmd/compile/internal/base"
15 "cmd/compile/internal/ir"
16 "cmd/compile/internal/objw"
17 "cmd/compile/internal/reflectdata"
18 "cmd/compile/internal/rttype"
19 "cmd/compile/internal/ssagen"
20 "cmd/compile/internal/typecheck"
21 "cmd/compile/internal/types"
26 // walkSwitch walks a switch statement.
27 func walkSwitch(sw *ir.SwitchStmt) {
28 // Guard against double walk, see #25776.
30 return // Was fatal, but eliminating every possible source of double-walking is hard
34 if sw.Tag != nil && sw.Tag.Op() == ir.OTYPESW {
41 // walkSwitchExpr generates an AST implementing sw. sw is an
43 func walkSwitchExpr(sw *ir.SwitchStmt) {
49 // convert switch {...} to switch true {...}
51 cond = ir.NewBool(base.Pos, true)
52 cond = typecheck.Expr(cond)
53 cond = typecheck.DefaultLit(cond, nil)
56 // Given "switch string(byteslice)",
57 // with all cases being side-effect free,
58 // use a zero-cost alias of the byte slice.
59 // Do this before calling walkExpr on cond,
60 // because walkExpr will lower the string
61 // conversion into a runtime call.
62 // See issue 24937 for more discussion.
63 if cond.Op() == ir.OBYTES2STR && allCaseExprsAreSideEffectFree(sw) {
64 cond := cond.(*ir.ConvExpr)
65 cond.SetOp(ir.OBYTES2STRTMP)
68 cond = walkExpr(cond, sw.PtrInit())
69 if cond.Op() != ir.OLITERAL && cond.Op() != ir.ONIL {
70 cond = copyExpr(cond, cond.Type(), &sw.Compiled)
80 var defaultGoto ir.Node
82 for _, ncase := range sw.Cases {
83 label := typecheck.AutoLabel(".s")
84 jmp := ir.NewBranchStmt(ncase.Pos(), ir.OGOTO, label)
86 // Process case dispatch.
87 if len(ncase.List) == 0 {
88 if defaultGoto != nil {
89 base.Fatalf("duplicate default case not detected during typechecking")
94 for i, n1 := range ncase.List {
96 if i < len(ncase.RTypes) {
97 rtype = ncase.RTypes[i]
99 s.Add(ncase.Pos(), n1, rtype, jmp)
103 body.Append(ir.NewLabelStmt(ncase.Pos(), label))
104 body.Append(ncase.Body...)
105 if fall, pos := endsInFallthrough(ncase.Body); !fall {
106 br := ir.NewBranchStmt(base.Pos, ir.OBREAK, nil)
113 if defaultGoto == nil {
114 br := ir.NewBranchStmt(base.Pos, ir.OBREAK, nil)
115 br.SetPos(br.Pos().WithNotStmt())
120 sw.Compiled.Append(defaultGoto)
121 sw.Compiled.Append(body.Take()...)
122 walkStmtList(sw.Compiled)
125 // An exprSwitch walks an expression switch.
126 type exprSwitch struct {
128 exprname ir.Node // value being switched on
134 type exprClause struct {
137 rtype ir.Node // *runtime._type for OEQ node
141 func (s *exprSwitch) Add(pos src.XPos, expr, rtype, jmp ir.Node) {
142 c := exprClause{pos: pos, lo: expr, hi: expr, rtype: rtype, jmp: jmp}
143 if types.IsOrdered[s.exprname.Type().Kind()] && expr.Op() == ir.OLITERAL {
144 s.clauses = append(s.clauses, c)
149 s.clauses = append(s.clauses, c)
153 func (s *exprSwitch) Emit(out *ir.Nodes) {
155 out.Append(s.done.Take()...)
158 func (s *exprSwitch) flush() {
165 // Caution: If len(cc) == 1, then cc[0] might not an OLITERAL.
166 // The code below is structured to implicitly handle this case
167 // (e.g., sort.Slice doesn't need to invoke the less function
168 // when there's only a single slice element).
170 if s.exprname.Type().IsString() && len(cc) >= 2 {
171 // Sort strings by length and then by value. It is
172 // much cheaper to compare lengths than values, and
173 // all we need here is consistency. We respect this
175 sort.Slice(cc, func(i, j int) bool {
176 si := ir.StringVal(cc[i].lo)
177 sj := ir.StringVal(cc[j].lo)
178 if len(si) != len(sj) {
179 return len(si) < len(sj)
184 // runLen returns the string length associated with a
185 // particular run of exprClauses.
186 runLen := func(run []exprClause) int64 { return int64(len(ir.StringVal(run[0].lo))) }
188 // Collapse runs of consecutive strings with the same length.
189 var runs [][]exprClause
191 for i := 1; i < len(cc); i++ {
192 if runLen(cc[start:]) != runLen(cc[i:]) {
193 runs = append(runs, cc[start:i])
197 runs = append(runs, cc[start:])
199 // We have strings of more than one length. Generate an
200 // outer switch which switches on the length of the string
201 // and an inner switch in each case which resolves all the
202 // strings of the same length. The code looks something like this:
206 // ... search among length 5 strings ...
209 // ... search among length 8 strings ...
211 // ... other lengths ...
216 // ... other lengths ...
220 outerLabel := typecheck.AutoLabel(".s")
221 endLabel := typecheck.AutoLabel(".s")
223 // Jump around all the individual switches for each length.
224 s.done.Append(ir.NewBranchStmt(s.pos, ir.OGOTO, outerLabel))
227 outer.exprname = ir.NewUnaryExpr(s.pos, ir.OLEN, s.exprname)
228 outer.exprname.SetType(types.Types[types.TINT])
230 for _, run := range runs {
231 // Target label to jump to when we match this length.
232 label := typecheck.AutoLabel(".s")
234 // Search within this run of same-length strings.
236 s.done.Append(ir.NewLabelStmt(pos, label))
237 stringSearch(s.exprname, run, &s.done)
238 s.done.Append(ir.NewBranchStmt(pos, ir.OGOTO, endLabel))
240 // Add length case to outer switch.
241 cas := ir.NewInt(pos, runLen(run))
242 jmp := ir.NewBranchStmt(pos, ir.OGOTO, label)
243 outer.Add(pos, cas, nil, jmp)
245 s.done.Append(ir.NewLabelStmt(s.pos, outerLabel))
247 s.done.Append(ir.NewLabelStmt(s.pos, endLabel))
251 sort.Slice(cc, func(i, j int) bool {
252 return constant.Compare(cc[i].lo.Val(), token.LSS, cc[j].lo.Val())
255 // Merge consecutive integer cases.
256 if s.exprname.Type().IsInteger() {
257 consecutive := func(last, next constant.Value) bool {
258 delta := constant.BinaryOp(next, token.SUB, last)
259 return constant.Compare(delta, token.EQL, constant.MakeInt64(1))
263 for _, c := range cc[1:] {
264 last := &merged[len(merged)-1]
265 if last.jmp == c.jmp && consecutive(last.hi.Val(), c.lo.Val()) {
268 merged = append(merged, c)
274 s.search(cc, &s.done)
277 func (s *exprSwitch) search(cc []exprClause, out *ir.Nodes) {
278 if s.tryJumpTable(cc, out) {
281 binarySearch(len(cc), out,
282 func(i int) ir.Node {
283 return ir.NewBinaryExpr(base.Pos, ir.OLE, s.exprname, cc[i-1].hi)
285 func(i int, nif *ir.IfStmt) {
287 nif.Cond = c.test(s.exprname)
288 nif.Body = []ir.Node{c.jmp}
293 // Try to implement the clauses with a jump table. Returns true if successful.
294 func (s *exprSwitch) tryJumpTable(cc []exprClause, out *ir.Nodes) bool {
295 const minCases = 8 // have at least minCases cases in the switch
296 const minDensity = 4 // use at least 1 out of every minDensity entries
298 if base.Flag.N != 0 || !ssagen.Arch.LinkArch.CanJumpTable || base.Ctxt.Retpoline {
301 if len(cc) < minCases {
302 return false // not enough cases for it to be worth it
304 if cc[0].lo.Val().Kind() != constant.Int {
305 return false // e.g. float
307 if s.exprname.Type().Size() > int64(types.PtrSize) {
308 return false // 64-bit switches on 32-bit archs
310 min := cc[0].lo.Val()
311 max := cc[len(cc)-1].hi.Val()
312 width := constant.BinaryOp(constant.BinaryOp(max, token.SUB, min), token.ADD, constant.MakeInt64(1))
313 limit := constant.MakeInt64(int64(len(cc)) * minDensity)
314 if constant.Compare(width, token.GTR, limit) {
315 // We disable jump tables if we use less than a minimum fraction of the entries.
316 // i.e. for switch x {case 0: case 1000: case 2000:} we don't want to use a jump table.
319 jt := ir.NewJumpTableStmt(base.Pos, s.exprname)
320 for _, c := range cc {
321 jmp := c.jmp.(*ir.BranchStmt)
322 if jmp.Op() != ir.OGOTO || jmp.Label == nil {
323 panic("bad switch case body")
325 for i := c.lo.Val(); constant.Compare(i, token.LEQ, c.hi.Val()); i = constant.BinaryOp(i, token.ADD, constant.MakeInt64(1)) {
326 jt.Cases = append(jt.Cases, i)
327 jt.Targets = append(jt.Targets, jmp.Label)
334 func (c *exprClause) test(exprname ir.Node) ir.Node {
337 low := ir.NewBinaryExpr(c.pos, ir.OGE, exprname, c.lo)
338 high := ir.NewBinaryExpr(c.pos, ir.OLE, exprname, c.hi)
339 return ir.NewLogicalExpr(c.pos, ir.OANDAND, low, high)
342 // Optimize "switch true { ...}" and "switch false { ... }".
343 if ir.IsConst(exprname, constant.Bool) && !c.lo.Type().IsInterface() {
344 if ir.BoolVal(exprname) {
347 return ir.NewUnaryExpr(c.pos, ir.ONOT, c.lo)
351 n := ir.NewBinaryExpr(c.pos, ir.OEQ, exprname, c.lo)
356 func allCaseExprsAreSideEffectFree(sw *ir.SwitchStmt) bool {
357 // In theory, we could be more aggressive, allowing any
358 // side-effect-free expressions in cases, but it's a bit
359 // tricky because some of that information is unavailable due
360 // to the introduction of temporaries during order.
361 // Restricting to constants is simple and probably powerful
364 for _, ncase := range sw.Cases {
365 for _, v := range ncase.List {
366 if v.Op() != ir.OLITERAL {
374 // endsInFallthrough reports whether stmts ends with a "fallthrough" statement.
375 func endsInFallthrough(stmts []ir.Node) (bool, src.XPos) {
377 return false, src.NoXPos
380 return stmts[i].Op() == ir.OFALL, stmts[i].Pos()
383 // walkSwitchType generates an AST that implements sw, where sw is a
385 func walkSwitchType(sw *ir.SwitchStmt) {
387 s.srcName = sw.Tag.(*ir.TypeSwitchGuard).X
388 s.srcName = walkExpr(s.srcName, sw.PtrInit())
389 s.srcName = copyExpr(s.srcName, s.srcName.Type(), &sw.Compiled)
390 s.okName = typecheck.TempAt(base.Pos, ir.CurFunc, types.Types[types.TBOOL])
391 s.itabName = typecheck.TempAt(base.Pos, ir.CurFunc, types.Types[types.TUINT8].PtrTo())
393 // Get interface descriptor word.
394 // For empty interfaces this will be the type.
395 // For non-empty interfaces this will be the itab.
396 srcItab := ir.NewUnaryExpr(base.Pos, ir.OITAB, s.srcName)
397 srcData := ir.NewUnaryExpr(base.Pos, ir.OIDATA, s.srcName)
398 srcData.SetType(types.Types[types.TUINT8].PtrTo())
399 srcData.SetTypecheck(1)
401 // For empty interfaces, do:
402 // if e._type == nil {
403 // do nil case if it exists, otherwise default
406 // Use a similar strategy for non-empty interfaces.
407 ifNil := ir.NewIfStmt(base.Pos, nil, nil, nil)
408 ifNil.Cond = ir.NewBinaryExpr(base.Pos, ir.OEQ, srcItab, typecheck.NodNil())
409 base.Pos = base.Pos.WithNotStmt() // disable statement marks after the first check.
410 ifNil.Cond = typecheck.Expr(ifNil.Cond)
411 ifNil.Cond = typecheck.DefaultLit(ifNil.Cond, nil)
412 // ifNil.Nbody assigned later.
413 sw.Compiled.Append(ifNil)
415 // Load hash from type or itab.
416 dotHash := typeHashFieldOf(base.Pos, srcItab)
417 s.hashName = copyExpr(dotHash, dotHash.Type(), &sw.Compiled)
419 // Make a label for each case body.
420 labels := make([]*types.Sym, len(sw.Cases))
421 for i := range sw.Cases {
422 labels[i] = typecheck.AutoLabel(".s")
425 // "jump" to execute if no case matches.
426 br := ir.NewBranchStmt(base.Pos, ir.OBREAK, nil)
428 // Assemble a list of all the types we're looking for.
429 // This pass flattens the case lists, as well as handles
430 // some unusual cases, like default and nil cases.
431 type oneCase struct {
433 jmp ir.Node // jump to body of selected case
435 // The case we're matching. Normally the type we're looking for
436 // is typ.Type(), but when typ is ODYNAMICTYPE the actual type
437 // we're looking for is not a compile-time constant (typ.Type()
438 // will be its shape).
442 var defaultGoto, nilGoto ir.Node
443 for i, ncase := range sw.Cases {
444 jmp := ir.NewBranchStmt(ncase.Pos(), ir.OGOTO, labels[i])
445 if len(ncase.List) == 0 { // default:
446 if defaultGoto != nil {
447 base.Fatalf("duplicate default case not detected during typechecking")
451 for _, n1 := range ncase.List {
452 if ir.IsNil(n1) { // case nil:
454 base.Fatalf("duplicate nil case not detected during typechecking")
459 if n1.Op() == ir.ODYNAMICTYPE {
460 // Convert dynamic to static, if the dynamic is actually static.
461 // TODO: why isn't this OTYPE to begin with?
462 dt := n1.(*ir.DynamicType)
463 if dt.RType != nil && dt.RType.Op() == ir.OADDR {
464 addr := dt.RType.(*ir.AddrExpr)
465 if addr.X.Op() == ir.OLINKSYMOFFSET {
466 n1 = ir.TypeNode(n1.Type())
469 if dt.ITab != nil && dt.ITab.Op() == ir.OADDR {
470 addr := dt.ITab.(*ir.AddrExpr)
471 if addr.X.Op() == ir.OLINKSYMOFFSET {
472 n1 = ir.TypeNode(n1.Type())
476 cases = append(cases, oneCase{
483 if defaultGoto == nil {
487 nilGoto = defaultGoto
489 ifNil.Body = []ir.Node{nilGoto}
491 // Now go through the list of cases, processing groups as we find them.
492 var concreteCases []oneCase
493 var interfaceCases []oneCase
495 // Process all the concrete types first. Because we handle shadowing
496 // below, it is correct to do all the concrete types before all of
497 // the interface types.
498 // The concrete cases can all be handled without a runtime call.
499 if len(concreteCases) > 0 {
500 var clauses []typeClause
501 for _, c := range concreteCases {
502 as := ir.NewAssignListStmt(c.pos, ir.OAS2,
503 []ir.Node{ir.BlankNode, s.okName}, // _, ok =
504 []ir.Node{ir.NewTypeAssertExpr(c.pos, s.srcName, c.typ.Type())}) // iface.(type)
505 nif := ir.NewIfStmt(c.pos, s.okName, []ir.Node{c.jmp}, nil)
506 clauses = append(clauses, typeClause{
507 hash: types.TypeHash(c.typ.Type()),
508 body: []ir.Node{typecheck.Stmt(as), typecheck.Stmt(nif)},
511 s.flush(clauses, &sw.Compiled)
512 concreteCases = concreteCases[:0]
515 // The "any" case, if it exists, must be the last interface case, because
516 // it would shadow all subsequent cases. Strip it off here so the runtime
517 // call only needs to handle non-empty interfaces.
519 if len(interfaceCases) > 0 && interfaceCases[len(interfaceCases)-1].typ.Type().IsEmptyInterface() {
520 anyGoto = interfaceCases[len(interfaceCases)-1].jmp
521 interfaceCases = interfaceCases[:len(interfaceCases)-1]
524 // Next, process all the interface types with a single call to the runtime.
525 if len(interfaceCases) > 0 {
527 // Build an internal/abi.InterfaceSwitch descriptor to pass to the runtime.
528 lsym := types.LocalPkg.Lookup(fmt.Sprintf(".interfaceSwitch.%d", interfaceSwitchGen)).LinksymABI(obj.ABI0)
531 off = objw.SymPtr(lsym, off, typecheck.LookupRuntimeVar("emptyInterfaceSwitchCache"), 0)
532 off = objw.Uintptr(lsym, off, uint64(len(interfaceCases)))
533 for _, c := range interfaceCases {
534 off = objw.SymPtr(lsym, off, reflectdata.TypeSym(c.typ.Type()).Linksym(), 0)
536 objw.Global(lsym, int32(off), obj.LOCAL)
537 // Set the type to be just a single pointer, as the cache pointer is the
538 // only one that GC needs to see.
539 lsym.Gotype = reflectdata.TypeLinksym(types.Types[types.TUINT8].PtrTo())
541 // Call runtime to do switch
542 // case, itab = runtime.interfaceSwitch(&descriptor, typeof(arg))
544 if s.srcName.Type().IsEmptyInterface() {
545 typeArg = ir.NewConvExpr(base.Pos, ir.OCONVNOP, types.Types[types.TUINT8].PtrTo(), srcItab)
547 typeArg = itabType(srcItab)
549 caseVar := typecheck.TempAt(base.Pos, ir.CurFunc, types.Types[types.TINT])
550 isw := ir.NewInterfaceSwitchStmt(base.Pos, caseVar, s.itabName, typeArg, lsym)
551 sw.Compiled.Append(isw)
553 // Switch on the result of the call (or cache lookup).
554 var newCases []*ir.CaseClause
555 for i, c := range interfaceCases {
556 newCases = append(newCases, &ir.CaseClause{
557 List: []ir.Node{ir.NewInt(base.Pos, int64(i))},
558 Body: []ir.Node{c.jmp},
561 // TODO: add len(newCases) case, mark switch as bounded
562 sw2 := ir.NewSwitchStmt(base.Pos, caseVar, newCases)
563 sw.Compiled.Append(typecheck.Stmt(sw2))
564 interfaceCases = interfaceCases[:0]
568 // We've already handled the nil case, so everything
569 // that reaches here matches the "any" case.
570 sw.Compiled.Append(anyGoto)
574 for _, c := range cases {
575 if c.typ.Op() == ir.ODYNAMICTYPE {
576 flush() // process all previous cases
577 dt := c.typ.(*ir.DynamicType)
578 dot := ir.NewDynamicTypeAssertExpr(c.pos, ir.ODYNAMICDOTTYPE, s.srcName, dt.RType)
580 dot.SetType(c.typ.Type())
583 as := ir.NewAssignListStmt(c.pos, ir.OAS2, nil, nil)
584 as.Lhs = []ir.Node{ir.BlankNode, s.okName} // _, ok =
585 as.Rhs = []ir.Node{dot}
588 nif := ir.NewIfStmt(c.pos, s.okName, []ir.Node{c.jmp}, nil)
589 sw.Compiled.Append(as, nif)
593 // Check for shadowing (a case that will never fire because
594 // a previous case would have always fired first). This check
595 // allows us to reorder concrete and interface cases.
596 // (TODO: these should be vet failures, maybe?)
597 for _, ic := range interfaceCases {
598 // An interface type case will shadow all
599 // subsequent types that implement that interface.
600 if typecheck.Implements(c.typ.Type(), ic.typ.Type()) {
603 // Note that we don't need to worry about:
604 // 1. Two concrete types shadowing each other. That's
605 // disallowed by the spec.
606 // 2. A concrete type shadowing an interface type.
607 // That can never happen, as interface types can
608 // be satisfied by an infinite set of concrete types.
609 // The correctness of this step also depends on handling
610 // the dynamic type cases separately, as we do above.
613 if c.typ.Type().IsInterface() {
614 interfaceCases = append(interfaceCases, c)
616 concreteCases = append(concreteCases, c)
621 sw.Compiled.Append(defaultGoto) // if none of the cases matched
623 // Now generate all the case bodies
624 for i, ncase := range sw.Cases {
625 sw.Compiled.Append(ir.NewLabelStmt(ncase.Pos(), labels[i]))
626 if caseVar := ncase.Var; caseVar != nil {
628 if len(ncase.List) == 1 {
629 // single type. We have to downcast the input value to the target type.
630 if ncase.List[0].Op() == ir.OTYPE { // single compile-time known type
631 t := ncase.List[0].Type()
633 // This case is an interface. Build case value from input interface.
634 // The data word will always be the same, but the itab/type changes.
635 if t.IsEmptyInterface() {
637 if s.srcName.Type().IsEmptyInterface() {
638 // E->E, nothing to do, type is already correct.
641 // I->E, load type out of itab
642 typ = itabType(srcItab)
643 typ.SetPos(ncase.Pos())
645 val = ir.NewBinaryExpr(ncase.Pos(), ir.OMAKEFACE, typ, srcData)
647 // The itab we need was returned by a runtime.interfaceSwitch call.
648 val = ir.NewBinaryExpr(ncase.Pos(), ir.OMAKEFACE, s.itabName, srcData)
651 // This case is a concrete type, just read its value out of the interface.
652 val = ifaceData(ncase.Pos(), s.srcName, t)
654 } else if ncase.List[0].Op() == ir.ODYNAMICTYPE { // single runtime known type
655 dt := ncase.List[0].(*ir.DynamicType)
656 x := ir.NewDynamicTypeAssertExpr(ncase.Pos(), ir.ODYNAMICDOTTYPE, val, dt.RType)
659 } else if ir.IsNil(ncase.List[0]) {
661 base.Fatalf("unhandled type switch case %v", ncase.List[0])
663 val.SetType(caseVar.Type())
667 ir.NewDecl(ncase.Pos(), ir.ODCL, caseVar),
668 ir.NewAssignStmt(ncase.Pos(), caseVar, val),
671 sw.Compiled.Append(l...)
673 sw.Compiled.Append(ncase.Body...)
674 sw.Compiled.Append(br)
677 walkStmtList(sw.Compiled)
682 var interfaceSwitchGen int
684 // typeHashFieldOf returns an expression to select the type hash field
685 // from an interface's descriptor word (whether a *runtime._type or
686 // *runtime.itab pointer).
687 func typeHashFieldOf(pos src.XPos, itab *ir.UnaryExpr) *ir.SelectorExpr {
688 if itab.Op() != ir.OITAB {
689 base.Fatalf("expected OITAB, got %v", itab.Op())
691 var hashField *types.Field
692 if itab.X.Type().IsEmptyInterface() {
693 // runtime._type's hash field
694 if rtypeHashField == nil {
695 rtypeHashField = runtimeField("hash", rttype.Type.Offset("Hash"), types.Types[types.TUINT32])
697 hashField = rtypeHashField
699 // runtime.itab's hash field
700 if itabHashField == nil {
701 itabHashField = runtimeField("hash", int64(2*types.PtrSize), types.Types[types.TUINT32])
703 hashField = itabHashField
705 return boundedDotPtr(pos, itab, hashField)
708 var rtypeHashField, itabHashField *types.Field
710 // A typeSwitch walks a type switch.
711 type typeSwitch struct {
712 // Temporary variables (i.e., ONAMEs) used by type switch dispatch logic:
713 srcName ir.Node // value being type-switched on
714 hashName ir.Node // type hash of the value being type-switched on
715 okName ir.Node // boolean used for comma-ok type assertions
716 itabName ir.Node // itab value to use for first word of non-empty interface
719 type typeClause struct {
724 func (s *typeSwitch) flush(cc []typeClause, compiled *ir.Nodes) {
729 sort.Slice(cc, func(i, j int) bool { return cc[i].hash < cc[j].hash })
731 // Combine adjacent cases with the same hash.
733 for _, c := range cc[1:] {
734 last := &merged[len(merged)-1]
735 if last.hash == c.hash {
736 last.body.Append(c.body.Take()...)
738 merged = append(merged, c)
743 if s.tryJumpTable(cc, compiled) {
746 binarySearch(len(cc), compiled,
747 func(i int) ir.Node {
748 return ir.NewBinaryExpr(base.Pos, ir.OLE, s.hashName, ir.NewInt(base.Pos, int64(cc[i-1].hash)))
750 func(i int, nif *ir.IfStmt) {
751 // TODO(mdempsky): Omit hash equality check if
752 // there's only one type.
754 nif.Cond = ir.NewBinaryExpr(base.Pos, ir.OEQ, s.hashName, ir.NewInt(base.Pos, int64(c.hash)))
755 nif.Body.Append(c.body.Take()...)
760 // Try to implement the clauses with a jump table. Returns true if successful.
761 func (s *typeSwitch) tryJumpTable(cc []typeClause, out *ir.Nodes) bool {
762 const minCases = 5 // have at least minCases cases in the switch
763 if base.Flag.N != 0 || !ssagen.Arch.LinkArch.CanJumpTable || base.Ctxt.Retpoline {
766 if len(cc) < minCases {
767 return false // not enough cases for it to be worth it
769 hashes := make([]uint32, len(cc))
770 // b = # of bits to use. Start with the minimum number of
771 // bits possible, but try a few larger sizes if needed.
772 b0 := bits.Len(uint(len(cc) - 1))
773 for b := b0; b < b0+3; b++ {
775 for i := 0; i <= 32-b; i++ { // starting bit position
776 // Compute the hash we'd get from all the cases,
777 // selecting b bits starting at bit i.
779 for _, c := range cc {
780 h := c.hash >> i & (1<<b - 1)
781 hashes = append(hashes, h)
783 // Order by increasing hash.
784 sort.Slice(hashes, func(j, k int) bool {
785 return hashes[j] < hashes[k]
787 for j := 1; j < len(hashes); j++ {
788 if hashes[j] == hashes[j-1] {
789 // There is a duplicate hash; try a different b/i pair.
794 // All hashes are distinct. Use these values of b and i.
797 h = ir.NewBinaryExpr(base.Pos, ir.ORSH, h, ir.NewInt(base.Pos, int64(i)))
799 h = ir.NewBinaryExpr(base.Pos, ir.OAND, h, ir.NewInt(base.Pos, int64(1<<b-1)))
800 h = typecheck.Expr(h)
803 jt := ir.NewJumpTableStmt(base.Pos, h)
804 jt.Cases = make([]constant.Value, 1<<b)
805 jt.Targets = make([]*types.Sym, 1<<b)
808 // Start with all hashes going to the didn't-match target.
809 noMatch := typecheck.AutoLabel(".s")
810 for j := 0; j < 1<<b; j++ {
811 jt.Cases[j] = constant.MakeInt64(int64(j))
812 jt.Targets[j] = noMatch
814 // This statement is not reachable, but it will make it obvious that we don't
815 // fall through to the first case.
816 out.Append(ir.NewBranchStmt(base.Pos, ir.OGOTO, noMatch))
818 // Emit each of the actual cases.
819 for _, c := range cc {
820 h := c.hash >> i & (1<<b - 1)
821 label := typecheck.AutoLabel(".s")
822 jt.Targets[h] = label
823 out.Append(ir.NewLabelStmt(base.Pos, label))
824 out.Append(c.body...)
825 // We reach here if the hash matches but the type equality test fails.
826 out.Append(ir.NewBranchStmt(base.Pos, ir.OGOTO, noMatch))
828 // Emit point to go to if type doesn't match any case.
829 out.Append(ir.NewLabelStmt(base.Pos, noMatch))
833 // Couldn't find a perfect hash. Fall back to binary search.
837 // binarySearch constructs a binary search tree for handling n cases,
838 // and appends it to out. It's used for efficiently implementing
839 // switch statements.
841 // less(i) should return a boolean expression. If it evaluates true,
842 // then cases before i will be tested; otherwise, cases i and later.
844 // leaf(i, nif) should setup nif (an OIF node) to test case i. In
845 // particular, it should set nif.Cond and nif.Body.
846 func binarySearch(n int, out *ir.Nodes, less func(i int) ir.Node, leaf func(i int, nif *ir.IfStmt)) {
847 const binarySearchMin = 4 // minimum number of cases for binary search
849 var do func(lo, hi int, out *ir.Nodes)
850 do = func(lo, hi int, out *ir.Nodes) {
852 if n < binarySearchMin {
853 for i := lo; i < hi; i++ {
854 nif := ir.NewIfStmt(base.Pos, nil, nil, nil)
856 base.Pos = base.Pos.WithNotStmt()
857 nif.Cond = typecheck.Expr(nif.Cond)
858 nif.Cond = typecheck.DefaultLit(nif.Cond, nil)
866 nif := ir.NewIfStmt(base.Pos, nil, nil, nil)
867 nif.Cond = less(half)
868 base.Pos = base.Pos.WithNotStmt()
869 nif.Cond = typecheck.Expr(nif.Cond)
870 nif.Cond = typecheck.DefaultLit(nif.Cond, nil)
871 do(lo, half, &nif.Body)
872 do(half, hi, &nif.Else)
879 func stringSearch(expr ir.Node, cc []exprClause, out *ir.Nodes) {
881 // Short list, just do brute force equality checks.
882 for _, c := range cc {
883 nif := ir.NewIfStmt(base.Pos.WithNotStmt(), typecheck.DefaultLit(typecheck.Expr(c.test(expr)), nil), []ir.Node{c.jmp}, nil)
890 // The strategy here is to find a simple test to divide the set of possible strings
891 // that might match expr approximately in half.
892 // The test we're going to use is to do an ordered comparison of a single byte
893 // of expr to a constant. We will pick the index of that byte and the value we're
894 // comparing against to make the split as even as possible.
895 // if expr[3] <= 'd' { ... search strings with expr[3] at 'd' or lower ... }
896 // else { ... search strings with expr[3] at 'e' or higher ... }
898 // To add complication, we will do the ordered comparison in the signed domain.
899 // The reason for this is to prevent CSE from merging the load used for the
900 // ordered comparison with the load used for the later equality check.
901 // if expr[3] <= 'd' { ... if expr[0] == 'f' && expr[1] == 'o' && expr[2] == 'o' && expr[3] == 'd' { ... } }
902 // If we did both expr[3] loads in the unsigned domain, they would be CSEd, and that
903 // would in turn defeat the combining of expr[0]...expr[3] into a single 4-byte load.
905 // By using signed loads for the ordered comparison and unsigned loads for the
906 // equality comparison, they don't get CSEd and the equality comparisons will be
907 // done using wider loads.
909 n := len(ir.StringVal(cc[0].lo)) // Length of the constant strings.
910 bestScore := int64(0) // measure of how good the split is.
911 bestIdx := 0 // split using expr[bestIdx]
912 bestByte := int8(0) // compare expr[bestIdx] against bestByte
913 for idx := 0; idx < n; idx++ {
914 for b := int8(-128); b < 127; b++ {
916 for _, c := range cc {
917 s := ir.StringVal(c.lo)
918 if int8(s[idx]) <= b {
922 score := int64(le) * int64(len(cc)-le)
923 if score > bestScore {
931 // The split must be at least 1:n-1 because we have at least 2 distinct strings; they
932 // have to be different somewhere.
933 // TODO: what if the best split is still pretty bad?
935 base.Fatalf("unable to split string set")
938 // Convert expr to a []int8
939 slice := ir.NewConvExpr(base.Pos, ir.OSTR2BYTESTMP, types.NewSlice(types.Types[types.TINT8]), expr)
940 slice.SetTypecheck(1) // legacy typechecker doesn't handle this op
942 // Load the byte we're splitting on.
943 load := ir.NewIndexExpr(base.Pos, slice, ir.NewInt(base.Pos, int64(bestIdx)))
944 // Compare with the value we're splitting on.
945 cmp := ir.Node(ir.NewBinaryExpr(base.Pos, ir.OLE, load, ir.NewInt(base.Pos, int64(bestByte))))
946 cmp = typecheck.DefaultLit(typecheck.Expr(cmp), nil)
947 nif := ir.NewIfStmt(base.Pos, cmp, nil, nil)
951 for _, c := range cc {
952 s := ir.StringVal(c.lo)
953 if int8(s[bestIdx]) <= bestByte {
959 stringSearch(expr, le, &nif.Body)
960 stringSearch(expr, gt, &nif.Else)
963 // TODO: if expr[bestIdx] has enough different possible values, use a jump table.