1 // Copyright 2009 The Go Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style
3 // license that can be found in the LICENSE file.
13 "cmd/compile/internal/base"
14 "cmd/compile/internal/ir"
15 "cmd/compile/internal/ssagen"
16 "cmd/compile/internal/typecheck"
17 "cmd/compile/internal/types"
21 // walkSwitch walks a switch statement.
22 func walkSwitch(sw *ir.SwitchStmt) {
23 // Guard against double walk, see #25776.
25 return // Was fatal, but eliminating every possible source of double-walking is hard
29 if sw.Tag != nil && sw.Tag.Op() == ir.OTYPESW {
36 // walkSwitchExpr generates an AST implementing sw. sw is an
38 func walkSwitchExpr(sw *ir.SwitchStmt) {
44 // convert switch {...} to switch true {...}
46 cond = ir.NewBool(base.Pos, true)
47 cond = typecheck.Expr(cond)
48 cond = typecheck.DefaultLit(cond, nil)
51 // Given "switch string(byteslice)",
52 // with all cases being side-effect free,
53 // use a zero-cost alias of the byte slice.
54 // Do this before calling walkExpr on cond,
55 // because walkExpr will lower the string
56 // conversion into a runtime call.
57 // See issue 24937 for more discussion.
58 if cond.Op() == ir.OBYTES2STR && allCaseExprsAreSideEffectFree(sw) {
59 cond := cond.(*ir.ConvExpr)
60 cond.SetOp(ir.OBYTES2STRTMP)
63 cond = walkExpr(cond, sw.PtrInit())
64 if cond.Op() != ir.OLITERAL && cond.Op() != ir.ONIL {
65 cond = copyExpr(cond, cond.Type(), &sw.Compiled)
75 var defaultGoto ir.Node
77 for _, ncase := range sw.Cases {
78 label := typecheck.AutoLabel(".s")
79 jmp := ir.NewBranchStmt(ncase.Pos(), ir.OGOTO, label)
81 // Process case dispatch.
82 if len(ncase.List) == 0 {
83 if defaultGoto != nil {
84 base.Fatalf("duplicate default case not detected during typechecking")
89 for i, n1 := range ncase.List {
91 if i < len(ncase.RTypes) {
92 rtype = ncase.RTypes[i]
94 s.Add(ncase.Pos(), n1, rtype, jmp)
98 body.Append(ir.NewLabelStmt(ncase.Pos(), label))
99 body.Append(ncase.Body...)
100 if fall, pos := endsInFallthrough(ncase.Body); !fall {
101 br := ir.NewBranchStmt(base.Pos, ir.OBREAK, nil)
108 if defaultGoto == nil {
109 br := ir.NewBranchStmt(base.Pos, ir.OBREAK, nil)
110 br.SetPos(br.Pos().WithNotStmt())
115 sw.Compiled.Append(defaultGoto)
116 sw.Compiled.Append(body.Take()...)
117 walkStmtList(sw.Compiled)
120 // An exprSwitch walks an expression switch.
121 type exprSwitch struct {
123 exprname ir.Node // value being switched on
129 type exprClause struct {
132 rtype ir.Node // *runtime._type for OEQ node
136 func (s *exprSwitch) Add(pos src.XPos, expr, rtype, jmp ir.Node) {
137 c := exprClause{pos: pos, lo: expr, hi: expr, rtype: rtype, jmp: jmp}
138 if types.IsOrdered[s.exprname.Type().Kind()] && expr.Op() == ir.OLITERAL {
139 s.clauses = append(s.clauses, c)
144 s.clauses = append(s.clauses, c)
148 func (s *exprSwitch) Emit(out *ir.Nodes) {
150 out.Append(s.done.Take()...)
153 func (s *exprSwitch) flush() {
160 // Caution: If len(cc) == 1, then cc[0] might not an OLITERAL.
161 // The code below is structured to implicitly handle this case
162 // (e.g., sort.Slice doesn't need to invoke the less function
163 // when there's only a single slice element).
165 if s.exprname.Type().IsString() && len(cc) >= 2 {
166 // Sort strings by length and then by value. It is
167 // much cheaper to compare lengths than values, and
168 // all we need here is consistency. We respect this
170 sort.Slice(cc, func(i, j int) bool {
171 si := ir.StringVal(cc[i].lo)
172 sj := ir.StringVal(cc[j].lo)
173 if len(si) != len(sj) {
174 return len(si) < len(sj)
179 // runLen returns the string length associated with a
180 // particular run of exprClauses.
181 runLen := func(run []exprClause) int64 { return int64(len(ir.StringVal(run[0].lo))) }
183 // Collapse runs of consecutive strings with the same length.
184 var runs [][]exprClause
186 for i := 1; i < len(cc); i++ {
187 if runLen(cc[start:]) != runLen(cc[i:]) {
188 runs = append(runs, cc[start:i])
192 runs = append(runs, cc[start:])
194 // We have strings of more than one length. Generate an
195 // outer switch which switches on the length of the string
196 // and an inner switch in each case which resolves all the
197 // strings of the same length. The code looks something like this:
201 // ... search among length 5 strings ...
204 // ... search among length 8 strings ...
206 // ... other lengths ...
211 // ... other lengths ...
215 outerLabel := typecheck.AutoLabel(".s")
216 endLabel := typecheck.AutoLabel(".s")
218 // Jump around all the individual switches for each length.
219 s.done.Append(ir.NewBranchStmt(s.pos, ir.OGOTO, outerLabel))
222 outer.exprname = ir.NewUnaryExpr(s.pos, ir.OLEN, s.exprname)
223 outer.exprname.SetType(types.Types[types.TINT])
225 for _, run := range runs {
226 // Target label to jump to when we match this length.
227 label := typecheck.AutoLabel(".s")
229 // Search within this run of same-length strings.
231 s.done.Append(ir.NewLabelStmt(pos, label))
232 stringSearch(s.exprname, run, &s.done)
233 s.done.Append(ir.NewBranchStmt(pos, ir.OGOTO, endLabel))
235 // Add length case to outer switch.
236 cas := ir.NewInt(pos, runLen(run))
237 jmp := ir.NewBranchStmt(pos, ir.OGOTO, label)
238 outer.Add(pos, cas, nil, jmp)
240 s.done.Append(ir.NewLabelStmt(s.pos, outerLabel))
242 s.done.Append(ir.NewLabelStmt(s.pos, endLabel))
246 sort.Slice(cc, func(i, j int) bool {
247 return constant.Compare(cc[i].lo.Val(), token.LSS, cc[j].lo.Val())
250 // Merge consecutive integer cases.
251 if s.exprname.Type().IsInteger() {
252 consecutive := func(last, next constant.Value) bool {
253 delta := constant.BinaryOp(next, token.SUB, last)
254 return constant.Compare(delta, token.EQL, constant.MakeInt64(1))
258 for _, c := range cc[1:] {
259 last := &merged[len(merged)-1]
260 if last.jmp == c.jmp && consecutive(last.hi.Val(), c.lo.Val()) {
263 merged = append(merged, c)
269 s.search(cc, &s.done)
272 func (s *exprSwitch) search(cc []exprClause, out *ir.Nodes) {
273 if s.tryJumpTable(cc, out) {
276 binarySearch(len(cc), out,
277 func(i int) ir.Node {
278 return ir.NewBinaryExpr(base.Pos, ir.OLE, s.exprname, cc[i-1].hi)
280 func(i int, nif *ir.IfStmt) {
282 nif.Cond = c.test(s.exprname)
283 nif.Body = []ir.Node{c.jmp}
288 // Try to implement the clauses with a jump table. Returns true if successful.
289 func (s *exprSwitch) tryJumpTable(cc []exprClause, out *ir.Nodes) bool {
290 const minCases = 8 // have at least minCases cases in the switch
291 const minDensity = 4 // use at least 1 out of every minDensity entries
293 if base.Flag.N != 0 || !ssagen.Arch.LinkArch.CanJumpTable || base.Ctxt.Retpoline {
296 if len(cc) < minCases {
297 return false // not enough cases for it to be worth it
299 if cc[0].lo.Val().Kind() != constant.Int {
300 return false // e.g. float
302 if s.exprname.Type().Size() > int64(types.PtrSize) {
303 return false // 64-bit switches on 32-bit archs
305 min := cc[0].lo.Val()
306 max := cc[len(cc)-1].hi.Val()
307 width := constant.BinaryOp(constant.BinaryOp(max, token.SUB, min), token.ADD, constant.MakeInt64(1))
308 limit := constant.MakeInt64(int64(len(cc)) * minDensity)
309 if constant.Compare(width, token.GTR, limit) {
310 // We disable jump tables if we use less than a minimum fraction of the entries.
311 // i.e. for switch x {case 0: case 1000: case 2000:} we don't want to use a jump table.
314 jt := ir.NewJumpTableStmt(base.Pos, s.exprname)
315 for _, c := range cc {
316 jmp := c.jmp.(*ir.BranchStmt)
317 if jmp.Op() != ir.OGOTO || jmp.Label == nil {
318 panic("bad switch case body")
320 for i := c.lo.Val(); constant.Compare(i, token.LEQ, c.hi.Val()); i = constant.BinaryOp(i, token.ADD, constant.MakeInt64(1)) {
321 jt.Cases = append(jt.Cases, i)
322 jt.Targets = append(jt.Targets, jmp.Label)
329 func (c *exprClause) test(exprname ir.Node) ir.Node {
332 low := ir.NewBinaryExpr(c.pos, ir.OGE, exprname, c.lo)
333 high := ir.NewBinaryExpr(c.pos, ir.OLE, exprname, c.hi)
334 return ir.NewLogicalExpr(c.pos, ir.OANDAND, low, high)
337 // Optimize "switch true { ...}" and "switch false { ... }".
338 if ir.IsConst(exprname, constant.Bool) && !c.lo.Type().IsInterface() {
339 if ir.BoolVal(exprname) {
342 return ir.NewUnaryExpr(c.pos, ir.ONOT, c.lo)
346 n := ir.NewBinaryExpr(c.pos, ir.OEQ, exprname, c.lo)
351 func allCaseExprsAreSideEffectFree(sw *ir.SwitchStmt) bool {
352 // In theory, we could be more aggressive, allowing any
353 // side-effect-free expressions in cases, but it's a bit
354 // tricky because some of that information is unavailable due
355 // to the introduction of temporaries during order.
356 // Restricting to constants is simple and probably powerful
359 for _, ncase := range sw.Cases {
360 for _, v := range ncase.List {
361 if v.Op() != ir.OLITERAL {
369 // endsInFallthrough reports whether stmts ends with a "fallthrough" statement.
370 func endsInFallthrough(stmts []ir.Node) (bool, src.XPos) {
372 return false, src.NoXPos
375 return stmts[i].Op() == ir.OFALL, stmts[i].Pos()
378 // walkSwitchType generates an AST that implements sw, where sw is a
380 func walkSwitchType(sw *ir.SwitchStmt) {
382 s.facename = sw.Tag.(*ir.TypeSwitchGuard).X
385 s.facename = walkExpr(s.facename, sw.PtrInit())
386 s.facename = copyExpr(s.facename, s.facename.Type(), &sw.Compiled)
387 s.okname = typecheck.TempAt(base.Pos, ir.CurFunc, types.Types[types.TBOOL])
389 // Get interface descriptor word.
390 // For empty interfaces this will be the type.
391 // For non-empty interfaces this will be the itab.
392 itab := ir.NewUnaryExpr(base.Pos, ir.OITAB, s.facename)
394 // For empty interfaces, do:
395 // if e._type == nil {
396 // do nil case if it exists, otherwise default
399 // Use a similar strategy for non-empty interfaces.
400 ifNil := ir.NewIfStmt(base.Pos, nil, nil, nil)
401 ifNil.Cond = ir.NewBinaryExpr(base.Pos, ir.OEQ, itab, typecheck.NodNil())
402 base.Pos = base.Pos.WithNotStmt() // disable statement marks after the first check.
403 ifNil.Cond = typecheck.Expr(ifNil.Cond)
404 ifNil.Cond = typecheck.DefaultLit(ifNil.Cond, nil)
405 // ifNil.Nbody assigned at end.
406 sw.Compiled.Append(ifNil)
408 // Load hash from type or itab.
409 dotHash := typeHashFieldOf(base.Pos, itab)
410 s.hashname = copyExpr(dotHash, dotHash.Type(), &sw.Compiled)
412 br := ir.NewBranchStmt(base.Pos, ir.OBREAK, nil)
413 var defaultGoto, nilGoto ir.Node
415 for _, ncase := range sw.Cases {
418 // For single-type cases with an interface type,
419 // we initialize the case variable as part of the type assertion.
420 // In other cases, we initialize it in the body.
421 var singleType *types.Type
422 if len(ncase.List) == 1 && ncase.List[0].Op() == ir.OTYPE {
423 singleType = ncase.List[0].Type()
425 caseVarInitialized := false
427 label := typecheck.AutoLabel(".s")
428 jmp := ir.NewBranchStmt(ncase.Pos(), ir.OGOTO, label)
430 if len(ncase.List) == 0 { // default:
431 if defaultGoto != nil {
432 base.Fatalf("duplicate default case not detected during typechecking")
437 for _, n1 := range ncase.List {
438 if ir.IsNil(n1) { // case nil:
440 base.Fatalf("duplicate nil case not detected during typechecking")
446 if singleType != nil && singleType.IsInterface() {
447 s.Add(ncase.Pos(), n1, caseVar, jmp)
448 caseVarInitialized = true
450 s.Add(ncase.Pos(), n1, nil, jmp)
454 body.Append(ir.NewLabelStmt(ncase.Pos(), label))
455 if caseVar != nil && !caseVarInitialized {
457 if singleType != nil {
458 // We have a single concrete type. Extract the data.
459 if singleType.IsInterface() {
460 base.Fatalf("singleType interface should have been handled in Add")
462 val = ifaceData(ncase.Pos(), s.facename, singleType)
464 if len(ncase.List) == 1 && ncase.List[0].Op() == ir.ODYNAMICTYPE {
465 dt := ncase.List[0].(*ir.DynamicType)
466 x := ir.NewDynamicTypeAssertExpr(ncase.Pos(), ir.ODYNAMICDOTTYPE, val, dt.RType)
468 x.SetType(caseVar.Type())
473 ir.NewDecl(ncase.Pos(), ir.ODCL, caseVar),
474 ir.NewAssignStmt(ncase.Pos(), caseVar, val),
479 body.Append(ncase.Body...)
484 if defaultGoto == nil {
488 nilGoto = defaultGoto
490 ifNil.Body = []ir.Node{nilGoto}
493 sw.Compiled.Append(defaultGoto)
494 sw.Compiled.Append(body.Take()...)
496 walkStmtList(sw.Compiled)
499 // typeHashFieldOf returns an expression to select the type hash field
500 // from an interface's descriptor word (whether a *runtime._type or
501 // *runtime.itab pointer).
502 func typeHashFieldOf(pos src.XPos, itab *ir.UnaryExpr) *ir.SelectorExpr {
503 if itab.Op() != ir.OITAB {
504 base.Fatalf("expected OITAB, got %v", itab.Op())
506 var hashField *types.Field
507 if itab.X.Type().IsEmptyInterface() {
508 // runtime._type's hash field
509 if rtypeHashField == nil {
510 rtypeHashField = runtimeField("hash", int64(2*types.PtrSize), types.Types[types.TUINT32])
512 hashField = rtypeHashField
514 // runtime.itab's hash field
515 if itabHashField == nil {
516 itabHashField = runtimeField("hash", int64(2*types.PtrSize), types.Types[types.TUINT32])
518 hashField = itabHashField
520 return boundedDotPtr(pos, itab, hashField)
523 var rtypeHashField, itabHashField *types.Field
525 // A typeSwitch walks a type switch.
526 type typeSwitch struct {
527 // Temporary variables (i.e., ONAMEs) used by type switch dispatch logic:
528 facename ir.Node // value being type-switched on
529 hashname ir.Node // type hash of the value being type-switched on
530 okname ir.Node // boolean used for comma-ok type assertions
536 type typeClause struct {
541 func (s *typeSwitch) Add(pos src.XPos, n1 ir.Node, caseVar *ir.Name, jmp ir.Node) {
546 ir.NewDecl(pos, ir.ODCL, caseVar),
547 ir.NewAssignStmt(pos, caseVar, nil),
552 caseVar = ir.BlankNode
555 // cv, ok = iface.(type)
556 as := ir.NewAssignListStmt(pos, ir.OAS2, nil, nil)
557 as.Lhs = []ir.Node{caseVar, s.okname} // cv, ok =
560 // Static type assertion (non-generic)
561 dot := ir.NewTypeAssertExpr(pos, s.facename, typ) // iface.(type)
562 as.Rhs = []ir.Node{dot}
563 case ir.ODYNAMICTYPE:
564 // Dynamic type assertion (generic)
565 dt := n1.(*ir.DynamicType)
566 dot := ir.NewDynamicTypeAssertExpr(pos, ir.ODYNAMICDOTTYPE, s.facename, dt.RType)
570 as.Rhs = []ir.Node{dot}
572 base.Fatalf("unhandled type case %s", n1.Op())
574 appendWalkStmt(&body, as)
576 // if ok { goto label }
577 nif := ir.NewIfStmt(pos, nil, nil, nil)
579 nif.Body = []ir.Node{jmp}
582 if n1.Op() == ir.OTYPE && !typ.IsInterface() {
583 // Defer static, noninterface cases so they can be binary searched by hash.
584 s.clauses = append(s.clauses, typeClause{
585 hash: types.TypeHash(n1.Type()),
592 s.done.Append(body.Take()...)
595 func (s *typeSwitch) Emit(out *ir.Nodes) {
597 out.Append(s.done.Take()...)
600 func (s *typeSwitch) flush() {
607 sort.Slice(cc, func(i, j int) bool { return cc[i].hash < cc[j].hash })
609 // Combine adjacent cases with the same hash.
611 for _, c := range cc[1:] {
612 last := &merged[len(merged)-1]
613 if last.hash == c.hash {
614 last.body.Append(c.body.Take()...)
616 merged = append(merged, c)
621 if s.tryJumpTable(cc, &s.done) {
624 binarySearch(len(cc), &s.done,
625 func(i int) ir.Node {
626 return ir.NewBinaryExpr(base.Pos, ir.OLE, s.hashname, ir.NewInt(base.Pos, int64(cc[i-1].hash)))
628 func(i int, nif *ir.IfStmt) {
629 // TODO(mdempsky): Omit hash equality check if
630 // there's only one type.
632 nif.Cond = ir.NewBinaryExpr(base.Pos, ir.OEQ, s.hashname, ir.NewInt(base.Pos, int64(c.hash)))
633 nif.Body.Append(c.body.Take()...)
638 // Try to implement the clauses with a jump table. Returns true if successful.
639 func (s *typeSwitch) tryJumpTable(cc []typeClause, out *ir.Nodes) bool {
640 const minCases = 5 // have at least minCases cases in the switch
641 if base.Flag.N != 0 || !ssagen.Arch.LinkArch.CanJumpTable || base.Ctxt.Retpoline {
644 if len(cc) < minCases {
645 return false // not enough cases for it to be worth it
647 hashes := make([]uint32, len(cc))
648 // b = # of bits to use. Start with the minimum number of
649 // bits possible, but try a few larger sizes if needed.
650 b0 := bits.Len(uint(len(cc) - 1))
651 for b := b0; b < b0+3; b++ {
653 for i := 0; i <= 32-b; i++ { // starting bit position
654 // Compute the hash we'd get from all the cases,
655 // selecting b bits starting at bit i.
657 for _, c := range cc {
658 h := c.hash >> i & (1<<b - 1)
659 hashes = append(hashes, h)
661 // Order by increasing hash.
662 sort.Slice(hashes, func(j, k int) bool {
663 return hashes[j] < hashes[k]
665 for j := 1; j < len(hashes); j++ {
666 if hashes[j] == hashes[j-1] {
667 // There is a duplicate hash; try a different b/i pair.
672 // All hashes are distinct. Use these values of b and i.
675 h = ir.NewBinaryExpr(base.Pos, ir.ORSH, h, ir.NewInt(base.Pos, int64(i)))
677 h = ir.NewBinaryExpr(base.Pos, ir.OAND, h, ir.NewInt(base.Pos, int64(1<<b-1)))
678 h = typecheck.Expr(h)
681 jt := ir.NewJumpTableStmt(base.Pos, h)
682 jt.Cases = make([]constant.Value, 1<<b)
683 jt.Targets = make([]*types.Sym, 1<<b)
686 // Start with all hashes going to the didn't-match target.
687 noMatch := typecheck.AutoLabel(".s")
688 for j := 0; j < 1<<b; j++ {
689 jt.Cases[j] = constant.MakeInt64(int64(j))
690 jt.Targets[j] = noMatch
692 // This statement is not reachable, but it will make it obvious that we don't
693 // fall through to the first case.
694 out.Append(ir.NewBranchStmt(base.Pos, ir.OGOTO, noMatch))
696 // Emit each of the actual cases.
697 for _, c := range cc {
698 h := c.hash >> i & (1<<b - 1)
699 label := typecheck.AutoLabel(".s")
700 jt.Targets[h] = label
701 out.Append(ir.NewLabelStmt(base.Pos, label))
702 out.Append(c.body...)
703 // We reach here if the hash matches but the type equality test fails.
704 out.Append(ir.NewBranchStmt(base.Pos, ir.OGOTO, noMatch))
706 // Emit point to go to if type doesn't match any case.
707 out.Append(ir.NewLabelStmt(base.Pos, noMatch))
711 // Couldn't find a perfect hash. Fall back to binary search.
715 // binarySearch constructs a binary search tree for handling n cases,
716 // and appends it to out. It's used for efficiently implementing
717 // switch statements.
719 // less(i) should return a boolean expression. If it evaluates true,
720 // then cases before i will be tested; otherwise, cases i and later.
722 // leaf(i, nif) should setup nif (an OIF node) to test case i. In
723 // particular, it should set nif.Cond and nif.Body.
724 func binarySearch(n int, out *ir.Nodes, less func(i int) ir.Node, leaf func(i int, nif *ir.IfStmt)) {
725 const binarySearchMin = 4 // minimum number of cases for binary search
727 var do func(lo, hi int, out *ir.Nodes)
728 do = func(lo, hi int, out *ir.Nodes) {
730 if n < binarySearchMin {
731 for i := lo; i < hi; i++ {
732 nif := ir.NewIfStmt(base.Pos, nil, nil, nil)
734 base.Pos = base.Pos.WithNotStmt()
735 nif.Cond = typecheck.Expr(nif.Cond)
736 nif.Cond = typecheck.DefaultLit(nif.Cond, nil)
744 nif := ir.NewIfStmt(base.Pos, nil, nil, nil)
745 nif.Cond = less(half)
746 base.Pos = base.Pos.WithNotStmt()
747 nif.Cond = typecheck.Expr(nif.Cond)
748 nif.Cond = typecheck.DefaultLit(nif.Cond, nil)
749 do(lo, half, &nif.Body)
750 do(half, hi, &nif.Else)
757 func stringSearch(expr ir.Node, cc []exprClause, out *ir.Nodes) {
759 // Short list, just do brute force equality checks.
760 for _, c := range cc {
761 nif := ir.NewIfStmt(base.Pos.WithNotStmt(), typecheck.DefaultLit(typecheck.Expr(c.test(expr)), nil), []ir.Node{c.jmp}, nil)
768 // The strategy here is to find a simple test to divide the set of possible strings
769 // that might match expr approximately in half.
770 // The test we're going to use is to do an ordered comparison of a single byte
771 // of expr to a constant. We will pick the index of that byte and the value we're
772 // comparing against to make the split as even as possible.
773 // if expr[3] <= 'd' { ... search strings with expr[3] at 'd' or lower ... }
774 // else { ... search strings with expr[3] at 'e' or higher ... }
776 // To add complication, we will do the ordered comparison in the signed domain.
777 // The reason for this is to prevent CSE from merging the load used for the
778 // ordered comparison with the load used for the later equality check.
779 // if expr[3] <= 'd' { ... if expr[0] == 'f' && expr[1] == 'o' && expr[2] == 'o' && expr[3] == 'd' { ... } }
780 // If we did both expr[3] loads in the unsigned domain, they would be CSEd, and that
781 // would in turn defeat the combining of expr[0]...expr[3] into a single 4-byte load.
783 // By using signed loads for the ordered comparison and unsigned loads for the
784 // equality comparison, they don't get CSEd and the equality comparisons will be
785 // done using wider loads.
787 n := len(ir.StringVal(cc[0].lo)) // Length of the constant strings.
788 bestScore := int64(0) // measure of how good the split is.
789 bestIdx := 0 // split using expr[bestIdx]
790 bestByte := int8(0) // compare expr[bestIdx] against bestByte
791 for idx := 0; idx < n; idx++ {
792 for b := int8(-128); b < 127; b++ {
794 for _, c := range cc {
795 s := ir.StringVal(c.lo)
796 if int8(s[idx]) <= b {
800 score := int64(le) * int64(len(cc)-le)
801 if score > bestScore {
809 // The split must be at least 1:n-1 because we have at least 2 distinct strings; they
810 // have to be different somewhere.
811 // TODO: what if the best split is still pretty bad?
813 base.Fatalf("unable to split string set")
816 // Convert expr to a []int8
817 slice := ir.NewConvExpr(base.Pos, ir.OSTR2BYTESTMP, types.NewSlice(types.Types[types.TINT8]), expr)
818 slice.SetTypecheck(1) // legacy typechecker doesn't handle this op
820 // Load the byte we're splitting on.
821 load := ir.NewIndexExpr(base.Pos, slice, ir.NewInt(base.Pos, int64(bestIdx)))
822 // Compare with the value we're splitting on.
823 cmp := ir.Node(ir.NewBinaryExpr(base.Pos, ir.OLE, load, ir.NewInt(base.Pos, int64(bestByte))))
824 cmp = typecheck.DefaultLit(typecheck.Expr(cmp), nil)
825 nif := ir.NewIfStmt(base.Pos, cmp, nil, nil)
829 for _, c := range cc {
830 s := ir.StringVal(c.lo)
831 if int8(s[bestIdx]) <= bestByte {
837 stringSearch(expr, le, &nif.Body)
838 stringSearch(expr, gt, &nif.Else)
841 // TODO: if expr[bestIdx] has enough different possible values, use a jump table.