continue
}
- frags := bytes.SplitAfterN(line, []byte("ERROR HERE: "), 2)
- if len(frags) == 1 {
+ _, frag, ok := bytes.Cut(line, []byte("ERROR HERE: "))
+ if !ok {
continue
}
- frag := fmt.Sprintf(":%d:.*%s", i+1, frags[1])
- re, err := regexp.Compile(frag)
+ re, err := regexp.Compile(fmt.Sprintf(":%d:.*%s", i+1, frag))
if err != nil {
- t.Errorf("Invalid regexp after `ERROR HERE: `: %#q", frags[1])
+ t.Errorf("Invalid regexp after `ERROR HERE: `: %#q", frag)
continue
}
errors = append(errors, re)
args := append(adbCmd(), "exec-out")
// Propagate LD_LIBRARY_PATH to the adb shell invocation.
for _, e := range env {
- if strings.Index(e, "LD_LIBRARY_PATH=") != -1 {
+ if strings.Contains(e, "LD_LIBRARY_PATH=") {
adbargs = append([]string{e}, adbargs...)
break
}
base, name := filepath.Split(args[0])
args[0] = filepath.Join(base, "llvm-dlltool")
var machine string
- switch strings.SplitN(name, "-", 2)[0] {
+ switch prefix, _, _ := strings.Cut(name, "-"); prefix {
case "i686":
machine = "i386"
case "x86_64":
if os.IsNotExist(err) {
return true, fmt.Errorf("%#q failed to produce executable: %v", strings.Join(cmd.Args, " "), err)
}
- snippet := bytes.SplitN(out, []byte{'\n'}, 2)[0]
+ snippet, _, _ := bytes.Cut(out, []byte("\n"))
return true, fmt.Errorf("%#q generated broken executable: %v\n%s", strings.Join(cmd.Args, " "), err, snippet)
}
// Device IDs as listed with ios-deploy -c.
deviceID = os.Getenv("GOIOS_DEVICE_ID")
- parts := strings.SplitN(appID, ".", 2)
- if len(parts) == 2 {
- bundleID = parts[1]
+ if _, id, ok := strings.Cut(appID, "."); ok {
+ bundleID = id
}
if err := signApp(appdir); err != nil {
var iosVer, buildVer string
lines := bytes.Split(out, []byte("\n"))
for _, line := range lines {
- spl := bytes.SplitN(line, []byte(": "), 2)
- if len(spl) != 2 {
+ key, val, ok := strings.Cut(string(line), ": ")
+ if !ok {
continue
}
- key, val := string(spl[0]), string(spl[1])
switch key {
case "ProductVersion":
iosVer = val
}
mu.Lock()
defer mu.Unlock()
- var frag string
- if i := strings.Index(url, "#"); i >= 0 {
- frag = url[i+1:]
- url = url[:i]
+ if u, frag, ok := strings.Cut(url, "#"); ok {
+ url = u
if frag != "" {
uf := urlFrag{url, frag}
neededFrags[uf] = append(neededFrags[uf], sourceURL)
// hasNUL reports whether the NUL character exists within s.
func hasNUL(s string) bool {
- return strings.IndexByte(s, 0) >= 0
+ return strings.Contains(s, "\x00")
}
// isASCII reports whether the input is an ASCII C-style string.
const maxNanoSecondDigits = 9
// Split string into seconds and sub-seconds parts.
- ss, sn := s, ""
- if pos := strings.IndexByte(s, '.'); pos >= 0 {
- ss, sn = s[:pos], s[pos+1:]
- }
+ ss, sn, _ := strings.Cut(s, ".")
// Parse the seconds.
secs, err := strconv.ParseInt(ss, 10, 64)
// return the remainder as r.
func parsePAXRecord(s string) (k, v, r string, err error) {
// The size field ends at the first space.
- sp := strings.IndexByte(s, ' ')
- if sp == -1 {
+ nStr, rest, ok := strings.Cut(s, " ")
+ if !ok {
return "", "", s, ErrHeader
}
// Parse the first token as a decimal integer.
- n, perr := strconv.ParseInt(s[:sp], 10, 0) // Intentionally parse as native int
- if perr != nil || n < 5 || int64(len(s)) < n {
+ n, perr := strconv.ParseInt(nStr, 10, 0) // Intentionally parse as native int
+ if perr != nil || n < 5 || n > int64(len(s)) {
return "", "", s, ErrHeader
}
-
- afterSpace := int64(sp + 1)
- beforeLastNewLine := n - 1
- // In some cases, "length" was perhaps padded/malformed, and
- // trying to index past where the space supposedly is goes past
- // the end of the actual record.
- // For example:
- // "0000000000000000000000000000000030 mtime=1432668921.098285006\n30 ctime=2147483649.15163319"
- // ^ ^
- // | |
- // | afterSpace=35
- // |
- // beforeLastNewLine=29
- // yet indexOf(firstSpace) MUST BE before endOfRecord.
- //
- // See https://golang.org/issues/40196.
- if afterSpace >= beforeLastNewLine {
+ n -= int64(len(nStr) + 1) // convert from index in s to index in rest
+ if n <= 0 {
return "", "", s, ErrHeader
}
// Extract everything between the space and the final newline.
- rec, nl, rem := s[afterSpace:beforeLastNewLine], s[beforeLastNewLine:n], s[n:]
+ rec, nl, rem := rest[:n-1], rest[n-1:n], rest[n:]
if nl != "\n" {
return "", "", s, ErrHeader
}
// The first equals separates the key from the value.
- eq := strings.IndexByte(rec, '=')
- if eq == -1 {
+ k, v, ok = strings.Cut(rec, "=")
+ if !ok {
return "", "", s, ErrHeader
}
- k, v = rec[:eq], rec[eq+1:]
if !validPAXRecord(k, v) {
return "", "", s, ErrHeader
// for the PAX version of the USTAR string fields.
// The key must not contain an '=' character.
func validPAXRecord(k, v string) bool {
- if k == "" || strings.IndexByte(k, '=') >= 0 {
+ if k == "" || strings.Contains(k, "=") {
return false
}
switch k {
var blk block
copy(blk[:], b.Bytes())
prefix := string(blk.toUSTAR().prefix())
- if i := strings.IndexByte(prefix, 0); i >= 0 {
- prefix = prefix[:i] // Truncate at the NUL terminator
- }
+ prefix, _, _ = strings.Cut(prefix, "\x00") // Truncate at the NUL terminator
if blk.getFormat() == FormatGNU && len(prefix) > 0 && strings.HasPrefix(name, prefix) {
t.Errorf("test %d, found prefix in GNU format: %s", i, prefix)
}
}
binary.LittleEndian.PutUint32(sig[:], uint32(dataDescriptorSignature))
- if bytes.Index(b, sig[:]) != -1 {
+ if bytes.Contains(b, sig[:]) {
t.Error("there should be no data descriptor")
}
}
cmd.Stderr = os.Stderr
out, _ := cmd.Output()
for _, line := range strings.Split(string(out), "\n") {
- i := strings.Index(line, "\t")
- if i < 0 {
- continue
- }
- path, dir := line[:i], line[i+1:]
+ path, dir, _ := strings.Cut(line, "\t")
if dir != "" {
list = append(list, Dir{importPath: path, dir: dir, inModule: true})
}
recv = "(" + recv + ") "
}
fnc := pkg.oneLineNodeDepth(n.Type, depth)
- if strings.Index(fnc, "func") == 0 {
- fnc = fnc[4:]
- }
+ fnc = strings.TrimPrefix(fnc, "func")
return fmt.Sprintf("func %s%s%s", recv, name, fnc)
case *ast.TypeSpec:
if strings.HasPrefix(t, "[") || strings.HasPrefix(t, "map[") {
// Lazy: assume there are no nested [] in the array
// length or map key type.
- if i := strings.Index(t, "]"); i >= 0 {
- typeof[n] = t[i+1:]
+ if _, elem, ok := strings.Cut(t, "]"); ok {
+ typeof[n] = elem
}
}
t := expand(typeof[n])
if strings.HasPrefix(t, "[") { // array or slice
// Lazy: assume there are no nested [] in the array length.
- if i := strings.Index(t, "]"); i >= 0 {
- et := t[i+1:]
+ if _, et, ok := strings.Cut(t, "]"); ok {
for _, e := range n.Elts {
if kv, ok := e.(*ast.KeyValueExpr); ok {
e = kv.Value
}
if strings.HasPrefix(t, "map[") { // map
// Lazy: assume there are no nested [] in the map key type.
- if i := strings.Index(t, "]"); i >= 0 {
- kt, vt := t[4:i], t[i+1:]
+ if kt, vt, ok := strings.Cut(t[len("map["):], "]"); ok {
for _, e := range n.Elts {
if kv, ok := e.(*ast.KeyValueExpr); ok {
if typeof[kv.Key] == "" {
key, value = "int", "rune"
} else if strings.HasPrefix(t, "[") {
key = "int"
- if i := strings.Index(t, "]"); i >= 0 {
- value = t[i+1:]
- }
+ _, value, _ = strings.Cut(t, "]")
} else if strings.HasPrefix(t, "map[") {
- if i := strings.Index(t, "]"); i >= 0 {
- key, value = t[4:i], t[i+1:]
+ if k, v, ok := strings.Cut(t[len("map["):], "]"); ok {
+ key, value = k, v
}
}
changed := false
// Assume errmsg says "file:line: foo".
// Cut leading "file:line: " to avoid accidental matching of file name instead of message.
text := errmsg
- if i := strings.Index(text, " "); i >= 0 {
- text = text[i+1:]
+ if _, suffix, ok := strings.Cut(text, " "); ok {
+ text = suffix
}
if we.re.MatchString(text) {
matched = true
if line[0] == '[' {
line = line[1 : len(line)-1]
- parts := strings.SplitN(line, ",", 2)
+ curve, hash, _ := strings.Cut(line, ",")
- switch parts[0] {
+ switch curve {
case "P-224":
pub.Curve = elliptic.P224()
case "P-256":
pub.Curve = nil
}
- switch parts[1] {
+ switch hash {
case "SHA-1":
h = sha1.New()
case "SHA-224":
o.all = append(o.all, data...)
for {
- i := bytes.IndexByte(o.line, '\n')
- if i < 0 {
+ line, next, ok := bytes.Cut(o.line, []byte("\n"))
+ if !ok {
break
}
- if bytes.Equal([]byte(opensslEndOfHandshake), o.line[:i]) {
+ if bytes.Equal([]byte(opensslEndOfHandshake), line) {
o.handshakeComplete <- struct{}{}
}
- if bytes.Equal([]byte(opensslReadKeyUpdate), o.line[:i]) {
+ if bytes.Equal([]byte(opensslReadKeyUpdate), line) {
o.readKeyUpdate <- struct{}{}
}
- o.line = o.line[i+1:]
+ o.line = next
}
return len(data), nil
// Otherwise the line is a line of hex dump that looks like:
// 00000170 fc f5 06 bf (...) |.....X{&?......!|
// (Some bytes have been omitted from the middle section.)
-
- if i := strings.IndexByte(line, ' '); i >= 0 {
- line = line[i:]
- } else {
+ _, after, ok := strings.Cut(line, " ")
+ if !ok {
return nil, errors.New("invalid test data")
}
+ line = after
- if i := strings.IndexByte(line, '|'); i >= 0 {
- line = line[:i]
- } else {
+ before, _, ok := strings.Cut(line, "|")
+ if !ok {
return nil, errors.New("invalid test data")
}
+ line = before
hexBytes := strings.Fields(line)
for _, hexByte := range hexBytes {
return nil, errors.New("x509: no DEK-Info header in block")
}
- idx := strings.Index(dek, ",")
- if idx == -1 {
+ mode, hexIV, ok := strings.Cut(dek, ",")
+ if !ok {
return nil, errors.New("x509: malformed DEK-Info header")
}
- mode, hexIV := dek[:idx], dek[idx+1:]
ciph := cipherByName(mode)
if ciph == nil {
return nil, errors.New("x509: unknown encryption mode")
func parseFieldParameters(str string) (ret fieldParameters) {
var part string
for len(str) > 0 {
- // This loop uses IndexByte and explicit slicing
- // instead of strings.Split(str, ",") to reduce allocations.
- i := strings.IndexByte(str, ',')
- if i < 0 {
- part, str = str, ""
- } else {
- part, str = str[:i], str[i+1:]
- }
+ part, str, _ = strings.Cut(str, ",")
switch {
case part == "optional":
ret.optional = true
// parseTag splits a struct field's json tag into its name and
// comma-separated options.
func parseTag(tag string) (string, tagOptions) {
- if idx := strings.Index(tag, ","); idx != -1 {
- return tag[:idx], tagOptions(tag[idx+1:])
- }
- return tag, tagOptions("")
+ tag, opt, _ := strings.Cut(tag, ",")
+ return tag, tagOptions(opt)
}
// Contains reports whether a comma-separated list of options
}
s := string(o)
for s != "" {
- var next string
- i := strings.Index(s, ",")
- if i >= 0 {
- s, next = s[:i], s[i+1:]
- }
- if s == optionName {
+ var name string
+ name, s, _ = strings.Cut(s, ",")
+ if name == optionName {
return true
}
- s = next
}
return false
}
var pemStart = []byte("\n-----BEGIN ")
var pemEnd = []byte("\n-----END ")
var pemEndOfLine = []byte("-----")
+var colon = []byte(":")
// Decode will find the next PEM formatted block (certificate, private key
// etc) in the input. It returns that block and the remainder of the input. If
rest = data
if bytes.HasPrefix(data, pemStart[1:]) {
rest = rest[len(pemStart)-1 : len(data)]
- } else if i := bytes.Index(data, pemStart); i >= 0 {
- rest = rest[i+len(pemStart) : len(data)]
+ } else if _, after, ok := bytes.Cut(data, pemStart); ok {
+ rest = after
} else {
return nil, data
}
}
line, next := getLine(rest)
- i := bytes.IndexByte(line, ':')
- if i == -1 {
+ key, val, ok := bytes.Cut(line, colon)
+ if !ok {
break
}
// TODO(agl): need to cope with values that spread across lines.
- key, val := line[:i], line[i+1:]
key = bytes.TrimSpace(key)
val = bytes.TrimSpace(val)
p.Headers[string(key)] = string(val)
// Split the tag from the xml namespace if necessary.
tag := f.Tag.Get("xml")
- if i := strings.Index(tag, " "); i >= 0 {
- finfo.xmlns, tag = tag[:i], tag[i+1:]
+ if ns, t, ok := strings.Cut(tag, " "); ok {
+ finfo.xmlns, tag = ns, t
}
// Parse flags.
}
if strings.Count(s, ":") > 1 {
name.Local = s
- } else if i := strings.Index(s, ":"); i < 1 || i > len(s)-2 {
+ } else if space, local, ok := strings.Cut(s, ":"); !ok || space == "" || local == "" {
name.Local = s
} else {
- name.Space = s[0:i]
- name.Local = s[i+1:]
+ name.Space = space
+ name.Local = local
}
return name, true
}
if _, err := w.Write(cdataStart); err != nil {
return err
}
+
for {
- i := bytes.Index(s, cdataEnd)
- if i >= 0 && i+len(cdataEnd) <= len(s) {
- // Found a nested CDATA directive end.
- if _, err := w.Write(s[:i]); err != nil {
- return err
- }
- if _, err := w.Write(cdataEscape); err != nil {
- return err
- }
- i += len(cdataEnd)
- } else {
- if _, err := w.Write(s); err != nil {
- return err
- }
+ before, after, ok := bytes.Cut(s, cdataEnd)
+ if !ok {
break
}
- s = s[i:]
+ // Found a nested CDATA directive end.
+ if _, err := w.Write(before); err != nil {
+ return err
+ }
+ if _, err := w.Write(cdataEscape); err != nil {
+ return err
+ }
+ s = after
+ }
+
+ if _, err := w.Write(s); err != nil {
+ return err
}
+
_, err := w.Write(cdataEnd)
return err
}
// TODO: this parsing is somewhat lame and not exact.
// It works for all actual cases, though.
param = param + "="
- idx := strings.Index(s, param)
- if idx == -1 {
- return ""
- }
- v := s[idx+len(param):]
+ _, v, _ := strings.Cut(s, param)
if v == "" {
return ""
}
if v[0] != '\'' && v[0] != '"' {
return ""
}
- idx = strings.IndexRune(v[1:], rune(v[0]))
- if idx == -1 {
+ unquote, _, ok := strings.Cut(v[1:], v[:1])
+ if !ok {
return ""
}
- return v[1 : idx+1]
+ return unquote
}
var comment []byte
switch {
case bytes.HasPrefix(data, slashSlash):
- i := bytes.Index(data, newline)
- if i < 0 {
- i = len(data)
- }
- comment = data[2:i]
+ comment, _, _ = bytes.Cut(data[2:], newline)
case bytes.HasPrefix(data, slashStar):
- data = data[2:]
- i := bytes.Index(data, starSlash)
- if i < 0 {
+ var ok bool
+ comment, _, ok = bytes.Cut(data[2:], starSlash)
+ if !ok {
// malformed comment
return "", 0
}
- comment = data[:i]
if bytes.Contains(comment, newline) {
return "", 0
}
}
// Split at colon.
- line = strings.TrimSpace(line[4:])
- i := strings.Index(line, ":")
- if i < 0 {
+ line, argstr, ok := strings.Cut(strings.TrimSpace(line[4:]), ":")
+ if !ok {
return fmt.Errorf("%s: invalid #cgo line: %s", filename, orig)
}
- line, argstr := line[:i], line[i+1:]
// Parse GOOS/GOARCH stuff.
f := strings.Fields(line)
if err != nil {
return fmt.Errorf("%s: invalid #cgo line: %s", filename, orig)
}
- var ok bool
for i, arg := range args {
if arg, ok = expandSrcDir(arg, di.Dir); !ok {
return fmt.Errorf("%s: malformed #cgo argument: %s", filename, arg)
// if GOOS=illumos, then files with GOOS=solaris are also matched.
// if GOOS=ios, then files with GOOS=darwin are also matched.
func (ctxt *Context) goodOSArchFile(name string, allTags map[string]bool) bool {
- if dot := strings.Index(name, "."); dot != -1 {
- name = name[:dot]
- }
+ name, _, _ = strings.Cut(name, ".")
// Before Go 1.4, a file called "linux.go" would be equivalent to having a
// build tag "linux" in that file. For Go 1.4 and beyond, we require this
// Also don't count instances in suggested "go get" or similar commands
// (see https://golang.org/issue/41576). The suggested command typically
// follows a semicolon.
- errStr = strings.SplitN(errStr, ";", 2)[0]
+ errStr, _, _ = strings.Cut(errStr, ";")
if n := strings.Count(errStr, pkgPath); n != 1 {
t.Fatalf("package path %q appears in error %d times; should appear once\nerror: %v", pkgPath, n, err)
trimBytes(i)
case '`':
- i := strings.Index(args[1:], "`")
- if i < 0 {
+ var ok bool
+ path, _, ok = strings.Cut(args[1:], "`")
+ if !ok {
return nil, fmt.Errorf("invalid quoted string in //go:embed: %s", args)
}
- path = args[1 : 1+i]
- trimBytes(1 + i + 1)
+ trimBytes(1 + len(path) + 1)
case '"':
i := 1
func testRead(t *testing.T, tests []readTest, read func(io.Reader) ([]byte, error)) {
for i, tt := range tests {
- var in, testOut string
- j := strings.Index(tt.in, "ℙ")
- if j < 0 {
- in = tt.in
- testOut = tt.in
- } else {
- in = tt.in[:j] + tt.in[j+len("ℙ"):]
- testOut = tt.in[:j]
- }
- d := strings.Index(tt.in, "𝔻")
- if d >= 0 {
- in = in[:d] + in[d+len("𝔻"):]
- testOut = testOut[d+len("𝔻"):]
+ beforeP, afterP, _ := strings.Cut(tt.in, "ℙ")
+ in := beforeP + afterP
+ testOut := beforeP
+
+ if beforeD, afterD, ok := strings.Cut(beforeP, "𝔻"); ok {
+ in = beforeD + afterD + afterP
+ testOut = afterD
}
+
r := strings.NewReader(in)
buf, err := read(r)
if err != nil {
if a[1] == "?" {
y = MakeUnknown()
} else {
- if i := strings.Index(a[1], "/"); i >= 0 && kind == token.FLOAT {
- n := MakeFromLiteral(a[1][:i], token.INT, 0)
- d := MakeFromLiteral(a[1][i+1:], token.INT, 0)
+ if ns, ds, ok := strings.Cut(a[1], "/"); ok && kind == token.FLOAT {
+ n := MakeFromLiteral(ns, token.INT, 0)
+ d := MakeFromLiteral(ds, token.INT, 0)
y = BinaryOp(n, token.QUO, d)
} else {
y = MakeFromLiteral(a[1], kind, 0)
return MakeBool(false)
}
- if i := strings.IndexByte(lit, '/'); i >= 0 {
+ if as, bs, ok := strings.Cut(lit, "/"); ok {
// assume fraction
- a := MakeFromLiteral(lit[:i], token.INT, 0)
- b := MakeFromLiteral(lit[i+1:], token.INT, 0)
+ a := MakeFromLiteral(as, token.INT, 0)
+ b := MakeFromLiteral(bs, token.INT, 0)
return BinaryOp(a, token.QUO, b)
}
// allow "'" for possessive "'s" only
for b := line; ; {
- i := strings.IndexRune(b, '\'')
- if i < 0 {
+ var ok bool
+ if _, b, ok = strings.Cut(b, "'"); !ok {
break
}
- if i+1 >= len(b) || b[i+1] != 's' || (i+2 < len(b) && b[i+2] != ' ') {
- return "" // not followed by "s "
+ if b != "s" && !strings.HasPrefix(b, "s ") {
+ return "" // ' not followed by s and then end-of-word
}
- b = b[i+2:]
}
// allow "." when followed by non-space
for b := line; ; {
- i := strings.IndexRune(b, '.')
- if i < 0 {
+ var ok bool
+ if _, b, ok = strings.Cut(b, "."); !ok {
break
}
- if i+1 >= len(b) || b[i+1] == ' ' {
+ if b == "" || strings.HasPrefix(b, " ") {
return "" // not followed by non-space
}
- b = b[i+1:]
}
return line
"go/doc"
"go/parser"
"go/token"
- "internal/lazyregexp"
"io/fs"
"os"
"path/filepath"
+ "regexp"
"runtime"
"strings"
)
)
// ToHTML in comment.go assigns a (possibly blank) ID to each heading
-var html_h = lazyregexp.New(`<h3 id="[^"]*">`)
+var html_h = regexp.MustCompile(`<h3 id="[^"]*">`)
const html_endh = "</h3>\n"
func appendHeadings(list []string, comment string) []string {
var buf bytes.Buffer
doc.ToHTML(&buf, comment, nil)
- for s := buf.String(); ; {
+ for s := buf.String(); s != ""; {
loc := html_h.FindStringIndex(s)
if len(loc) == 0 {
break
}
- i := loc[1]
- j := strings.Index(s, html_endh)
- if j < 0 {
- list = append(list, s[i:]) // incorrect HTML
- break
- }
- list = append(list, s[i:j])
- s = s[j+len(html_endh):]
+ var inner string
+ inner, s, _ = strings.Cut(s[loc[1]:], html_endh)
+ list = append(list, inner)
}
return list
}
t.Fatalf("go list %s: %v\n%s", thePackage, err, out)
}
target := strings.TrimSpace(string(out))
- i := strings.Index(target, ":")
- compiler, target := target[:i], target[i+1:]
+ compiler, target, _ := strings.Cut(target, ":")
if !strings.HasSuffix(target, ".a") {
t.Fatalf("unexpected package %s target %q (not *.a)", thePackage, target)
}
break
}
// remove leading 0's from integer (but not floating-point) imaginary literals
- if x[len(x)-1] == 'i' && strings.IndexByte(x, '.') < 0 && strings.IndexByte(x, 'e') < 0 {
+ if x[len(x)-1] == 'i' && !strings.ContainsAny(x, ".e") {
x = strings.TrimLeft(x, "0_")
if x == "i" {
x = "0i"
* Check for vertical "line of stars" and correct prefix accordingly.
*/
lineOfStars := false
- if i := strings.Index(prefix, "*"); i >= 0 {
- // Line of stars present.
- if i > 0 && prefix[i-1] == ' ' {
- i-- // remove trailing blank from prefix so stars remain aligned
- }
- prefix = prefix[0:i]
+ if p, _, ok := strings.Cut(prefix, "*"); ok {
+ // remove trailing blank from prefix so stars remain aligned
+ prefix = strings.TrimSuffix(p, " ")
lineOfStars = true
} else {
// No line of stars present.
// lines.
last := lines[len(lines)-1]
closing := "*/"
- i := strings.Index(last, closing) // i >= 0 (closing is always present)
- if isBlank(last[0:i]) {
+ before, _, _ := strings.Cut(last, closing) // closing always present
+ if isBlank(before) {
// last line only contains closing */
if lineOfStars {
closing = " */" // add blank to align final star
}
}
-// split splits string s at the first occurrence of s.
+// split splits string s at the first occurrence of s, trimming spaces.
func split(s, sep string) (string, string) {
- i := strings.Index(s, sep)
- return strings.TrimSpace(s[:i]), strings.TrimSpace(s[i+len(sep):])
+ before, after, _ := strings.Cut(s, sep)
+ return strings.TrimSpace(before), strings.TrimSpace(after)
}
func TestCheckExpr(t *testing.T) {
// widely applied.
// Treat data-action as URL below.
name = name[5:]
- } else if colon := strings.IndexRune(name, ':'); colon != -1 {
- if name[:colon] == "xmlns" {
+ } else if prefix, short, ok := strings.Cut(name, ":"); ok {
+ if prefix == "xmlns" {
return contentTypeURL
}
// Treat svg:href and xlink:href as href below.
- name = name[colon+1:]
+ name = short
}
if t, ok := attrTypeMap[name]; ok {
return t
// https://tools.ietf.org/html/rfc4329#section-3
// https://www.ietf.org/rfc/rfc4627.txt
// discard parameters
- if i := strings.Index(mimeType, ";"); i >= 0 {
- mimeType = mimeType[:i]
- }
+ mimeType, _, _ = strings.Cut(mimeType, ";")
mimeType = strings.ToLower(mimeType)
mimeType = strings.TrimSpace(mimeType)
switch mimeType {
// isSafeURL is true if s is a relative URL or if URL has a protocol in
// (http, https, mailto).
func isSafeURL(s string) bool {
- if i := strings.IndexRune(s, ':'); i >= 0 && !strings.ContainsRune(s[:i], '/') {
-
- protocol := s[:i]
+ if protocol, _, ok := strings.Cut(s, ":"); ok && !strings.Contains(protocol, "/") {
if !strings.EqualFold(protocol, "http") && !strings.EqualFold(protocol, "https") && !strings.EqualFold(protocol, "mailto") {
return false
}
break
}
- attr := strings.SplitN(l, delimiter, 2)
- if len(attr) != 2 {
+ key, val, ok := strings.Cut(l, delimiter)
+ if !ok {
break
}
- key, val := strings.TrimSpace(attr[0]), strings.TrimSpace(attr[1])
+ key, val = strings.TrimSpace(key), strings.TrimSpace(val)
var err error
switch key {
case "cycles/second":
if err == errUnrecognized {
// Recognize assignments of the form: attr=value, and replace
// $attr with value on subsequent mappings.
- if attr := strings.SplitN(l, delimiter, 2); len(attr) == 2 {
- attrs = append(attrs, "$"+strings.TrimSpace(attr[0]), strings.TrimSpace(attr[1]))
+ if attr, value, ok := strings.Cut(l, delimiter); ok {
+ attrs = append(attrs, "$"+strings.TrimSpace(attr), strings.TrimSpace(value))
r = strings.NewReplacer(attrs...)
}
// Ignore any unrecognized entries
}
word = word[2 : len(word)-2]
- // split delimits the first 2 fields
- split := strings.IndexByte(word, '?')
-
- // split word "UTF-8?q?ascii" into "UTF-8", 'q', and "ascii"
- charset := word[:split]
- if len(charset) == 0 {
- return "", errInvalidWord
- }
- if len(word) < split+3 {
+ // split word "UTF-8?q?text" into "UTF-8", 'q', and "text"
+ charset, text, _ := strings.Cut(word, "?")
+ if charset == "" {
return "", errInvalidWord
}
- encoding := word[split+1]
- // the field after split must only be one byte
- if word[split+2] != '?' {
+ encoding, text, _ := strings.Cut(text, "?")
+ if len(encoding) != 1 {
return "", errInvalidWord
}
- text := word[split+3:]
- content, err := decode(encoding, text)
+ content, err := decode(encoding[0], text)
if err != nil {
return "", err
}
var buf strings.Builder
-
if err := d.convert(&buf, charset, content); err != nil {
return "", err
}
-
return buf.String(), nil
}
// FormatMediaType returns the empty string.
func FormatMediaType(t string, param map[string]string) string {
var b strings.Builder
- if slash := strings.IndexByte(t, '/'); slash == -1 {
+ if major, sub, ok := strings.Cut(t, "/"); !ok {
if !isToken(t) {
return ""
}
b.WriteString(strings.ToLower(t))
} else {
- major, sub := t[:slash], t[slash+1:]
if !isToken(major) || !isToken(sub) {
return ""
}
// The returned map, params, maps from the lowercase
// attribute to the attribute value with its case preserved.
func ParseMediaType(v string) (mediatype string, params map[string]string, err error) {
- i := strings.Index(v, ";")
- if i == -1 {
- i = len(v)
- }
- mediatype = strings.TrimSpace(strings.ToLower(v[0:i]))
+ base, _, _ := strings.Cut(v, ";")
+ mediatype = strings.TrimSpace(strings.ToLower(base))
err = checkMediaTypeDisposition(mediatype)
if err != nil {
// Lazily initialized.
var continuation map[string]map[string]string
- v = v[i:]
+ v = v[len(base):]
for len(v) > 0 {
v = strings.TrimLeftFunc(v, unicode.IsSpace)
if len(v) == 0 {
}
pmap := params
- if idx := strings.Index(key, "*"); idx != -1 {
- baseName := key[:idx]
+ if baseName, _, ok := strings.Cut(key, "*"); ok {
if continuation == nil {
continuation = make(map[string]map[string]string)
}
func envMap(env []string) map[string]string {
m := make(map[string]string)
for _, kv := range env {
- if idx := strings.Index(kv, "="); idx != -1 {
- m[kv[:idx]] = kv[idx+1:]
+ if k, v, ok := strings.Cut(kv, "="); ok {
+ m[k] = v
}
}
return m
break
}
headerLines++
- parts := strings.SplitN(string(line), ":", 2)
- if len(parts) < 2 {
+ header, val, ok := strings.Cut(string(line), ":")
+ if !ok {
h.printf("cgi: bogus header line: %s", string(line))
continue
}
- header, val := parts[0], parts[1]
if !httpguts.ValidHeaderFieldName(header) {
h.printf("cgi: invalid header name: %q", header)
continue
}
linesRead++
trimmedLine := strings.TrimRight(line, "\r\n")
- split := strings.SplitN(trimmedLine, "=", 2)
- if len(split) != 2 {
- t.Fatalf("Unexpected %d parts from invalid line number %v: %q; existing map=%v",
- len(split), linesRead, line, m)
+ k, v, ok := strings.Cut(trimmedLine, "=")
+ if !ok {
+ t.Fatalf("Unexpected response from invalid line number %v: %q; existing map=%v",
+ linesRead, line, m)
}
- m[split[0]] = split[1]
+ m[k] = v
}
for key, expected := range expectedMap {
if v := urlQuery.Get("code"); v != "" {
location := ts.URL
if final := urlQuery.Get("next"); final != "" {
- splits := strings.Split(final, ",")
- first, rest := splits[0], splits[1:]
+ first, rest, _ := strings.Cut(final, ",")
location = fmt.Sprintf("%s?code=%s", location, first)
- if len(rest) > 0 {
- location = fmt.Sprintf("%s&next=%s", location, strings.Join(rest, ","))
+ if rest != "" {
+ location = fmt.Sprintf("%s&next=%s", location, rest)
}
}
code, _ := strconv.Atoi(v)
continue
}
parts[0] = textproto.TrimString(parts[0])
- j := strings.Index(parts[0], "=")
- if j < 0 {
+ name, value, ok := strings.Cut(parts[0], "=")
+ if !ok {
continue
}
- name, value := parts[0][:j], parts[0][j+1:]
if !isCookieNameValid(name) {
continue
}
- value, ok := parseCookieValue(value, true)
+ value, ok = parseCookieValue(value, true)
if !ok {
continue
}
continue
}
- attr, val := parts[i], ""
- if j := strings.Index(attr, "="); j >= 0 {
- attr, val = attr[:j], attr[j+1:]
- }
+ attr, val, _ := strings.Cut(parts[i], "=")
lowerAttr, isASCII := ascii.ToLower(attr)
if !isASCII {
continue
var part string
for len(line) > 0 { // continue since we have rest
- if splitIndex := strings.Index(line, ";"); splitIndex > 0 {
- part, line = line[:splitIndex], line[splitIndex+1:]
- } else {
- part, line = line, ""
- }
+ part, line, _ = strings.Cut(line, ";")
part = textproto.TrimString(part)
- if len(part) == 0 {
+ if part == "" {
continue
}
- name, val := part, ""
- if j := strings.Index(part, "="); j >= 0 {
- name, val = name[:j], name[j+1:]
- }
+ name, val, _ := strings.Cut(part, "=")
if !isCookieNameValid(name) {
continue
}
if len(v) == 0 {
return v
}
- if strings.IndexByte(v, ' ') >= 0 || strings.IndexByte(v, ',') >= 0 {
+ if strings.ContainsAny(v, " ,") {
return `"` + v + `"`
}
return v
if ra == "" {
continue
}
- i := strings.Index(ra, "-")
- if i < 0 {
+ start, end, ok := strings.Cut(ra, "-")
+ if !ok {
return nil, errors.New("invalid range")
}
- start, end := textproto.TrimString(ra[:i]), textproto.TrimString(ra[i+1:])
+ start, end = textproto.TrimString(start), textproto.TrimString(end)
var r httpRange
if start == "" {
// If no start is specified, end specifies the
return b == ' ' || b == '\t' || b == '\n' || b == '\r'
}
+var semi = []byte(";")
+
// removeChunkExtension removes any chunk-extension from p.
// For example,
// "0" => "0"
// "0;token=val" => "0"
// `0;token="quoted string"` => "0"
func removeChunkExtension(p []byte) ([]byte, error) {
- semi := bytes.IndexByte(p, ';')
- if semi == -1 {
- return p, nil
- }
+ p, _, _ = bytes.Cut(p, semi)
// TODO: care about exact syntax of chunk extensions? We're
// ignoring and stripping them anyway. For now just never
// return an error.
- return p[:semi], nil
+ return p, nil
}
// NewChunkedWriter returns a new chunkedWriter that translates writes into HTTP
buf := make([]byte, 2<<20)
buf = buf[:runtime.Stack(buf, true)]
for _, g := range strings.Split(string(buf), "\n\n") {
- sl := strings.SplitN(g, "\n", 2)
- if len(sl) != 2 {
- continue
- }
- stack := strings.TrimSpace(sl[1])
+ _, stack, _ := strings.Cut(g, "\n")
+ stack = strings.TrimSpace(stack)
if stack == "" ||
strings.Contains(stack, "testing.(*M).before.func1") ||
strings.Contains(stack, "os/signal.signal_recv") ||
func (r *Request) BasicAuth() (username, password string, ok bool) {
auth := r.Header.Get("Authorization")
if auth == "" {
- return
+ return "", "", false
}
return parseBasicAuth(auth)
}
const prefix = "Basic "
// Case insensitive prefix match. See Issue 22736.
if len(auth) < len(prefix) || !ascii.EqualFold(auth[:len(prefix)], prefix) {
- return
+ return "", "", false
}
c, err := base64.StdEncoding.DecodeString(auth[len(prefix):])
if err != nil {
- return
+ return "", "", false
}
cs := string(c)
- s := strings.IndexByte(cs, ':')
- if s < 0 {
- return
+ username, password, ok = strings.Cut(cs, ":")
+ if !ok {
+ return "", "", false
}
- return cs[:s], cs[s+1:], true
+ return username, password, true
}
// SetBasicAuth sets the request's Authorization header to use HTTP
// parseRequestLine parses "GET /foo HTTP/1.1" into its three parts.
func parseRequestLine(line string) (method, requestURI, proto string, ok bool) {
- s1 := strings.Index(line, " ")
- s2 := strings.Index(line[s1+1:], " ")
- if s1 < 0 || s2 < 0 {
- return
+ method, rest, ok1 := strings.Cut(line, " ")
+ requestURI, proto, ok2 := strings.Cut(rest, " ")
+ if !ok1 || !ok2 {
+ return "", "", "", false
}
- s2 += s1 + 1
- return line[:s1], line[s1+1 : s2], line[s2+1:], true
+ return method, requestURI, proto, true
}
var textprotoReaderPool sync.Pool
}
return nil, err
}
- if i := strings.IndexByte(line, ' '); i == -1 {
+ proto, status, ok := strings.Cut(line, " ")
+ if !ok {
return nil, badStringError("malformed HTTP response", line)
- } else {
- resp.Proto = line[:i]
- resp.Status = strings.TrimLeft(line[i+1:], " ")
- }
- statusCode := resp.Status
- if i := strings.IndexByte(resp.Status, ' '); i != -1 {
- statusCode = resp.Status[:i]
}
+ resp.Proto = proto
+ resp.Status = strings.TrimLeft(status, " ")
+
+ statusCode, _, _ := strings.Cut(resp.Status, " ")
if len(statusCode) != 3 {
return nil, badStringError("malformed HTTP status code", statusCode)
}
if err != nil || resp.StatusCode < 0 {
return nil, badStringError("malformed HTTP status code", statusCode)
}
- var ok bool
if resp.ProtoMajor, resp.ProtoMinor, ok = ParseHTTPVersion(resp.Proto); !ok {
return nil, badStringError("malformed HTTP version", resp.Proto)
}
// stripHostPort returns h without any trailing ":<port>".
func stripHostPort(h string) string {
// If no port on host, return unchanged
- if strings.IndexByte(h, ':') == -1 {
+ if !strings.Contains(h, ":") {
return h
}
host, _, err := net.SplitHostPort(h)
return nil, err
}
if resp.StatusCode != 200 {
- f := strings.SplitN(resp.Status, " ", 2)
+ _, text, ok := strings.Cut(resp.Status, " ")
conn.Close()
- if len(f) < 2 {
+ if !ok {
return nil, errors.New("unknown status code")
}
- return nil, errors.New(f[1])
+ return nil, errors.New(text)
}
}
dateLayoutsBuildOnce.Do(buildDateLayouts)
// CR and LF must match and are tolerated anywhere in the date field.
date = strings.ReplaceAll(date, "\r\n", "")
- if strings.Index(date, "\r") != -1 {
+ if strings.Contains(date, "\r") {
return time.Time{}, errors.New("mail: header has a CR without LF")
}
// Re-using some addrParser methods which support obsolete text, i.e. non-printable ASCII
}
func disableSocketConnect(network string) {
- ss := strings.Split(network, ":")
+ net, _, _ := strings.Cut(network, ":")
sw.Set(socktest.FilterConnect, func(so *socktest.Status) (socktest.AfterFilter, error) {
- switch ss[0] {
+ switch net {
case "tcp4":
if so.Cookie.Family() == syscall.AF_INET && so.Cookie.Type() == syscall.SOCK_STREAM {
return nil, syscall.EHOSTUNREACH
b := make([]byte, 2<<20)
b = b[:runtime.Stack(b, true)]
for _, s := range strings.Split(string(b), "\n\n") {
- ss := strings.SplitN(s, "\n", 2)
- if len(ss) != 2 {
- continue
- }
- stack := strings.TrimSpace(ss[1])
+ _, stack, _ := strings.Cut(s, "\n")
+ stack = strings.TrimSpace(stack)
if !strings.Contains(stack, "created by net") {
continue
}
// testableNetwork reports whether network is testable on the current
// platform configuration.
func testableNetwork(network string) bool {
- ss := strings.Split(network, ":")
- switch ss[0] {
+ net, _, _ := strings.Cut(network, ":")
+ switch net {
case "ip+nopriv":
case "ip", "ip4", "ip6":
switch runtime.GOOS {
}
}
}
- switch ss[0] {
+ switch net {
case "tcp4", "udp4", "ip4":
if !supportsIPv4() {
return false
// testableAddress reports whether address of network is testable on
// the current platform configuration.
func testableAddress(network, address string) bool {
- switch ss := strings.Split(network, ":"); ss[0] {
+ switch net, _, _ := strings.Cut(network, ":"); net {
case "unix", "unixgram", "unixpacket":
// Abstract unix domain sockets, a Linux-ism.
if address[0] == '@' && runtime.GOOS != "linux" {
var err error
var addr Addr
- switch ss := strings.Split(network, ":"); ss[0] {
+ switch net, _, _ := strings.Cut(network, ":"); net {
case "tcp", "tcp4", "tcp6":
addr, err = ResolveTCPAddr("tcp", address)
case "udp", "udp4", "udp6":
if len(extList) > 1 {
extList = extList[1:]
for _, line := range extList {
- args := strings.SplitN(line, " ", 2)
- if len(args) > 1 {
- ext[args[0]] = args[1]
- } else {
- ext[args[0]] = ""
- }
+ k, v, _ := strings.Cut(line, " ")
+ ext[k] = v
}
}
if mechs, ok := ext["AUTH"]; ok {
return v, err
}
+var colon = []byte(":")
+
// ReadMIMEHeader reads a MIME-style header from r.
// The header is a sequence of possibly continued Key: Value lines
// ending in a blank line.
}
// Key ends at first colon.
- i := bytes.IndexByte(kv, ':')
- if i < 0 {
+ k, v, ok := bytes.Cut(kv, colon)
+ if !ok {
return m, ProtocolError("malformed MIME header line: " + string(kv))
}
- key := canonicalMIMEHeaderKey(kv[:i])
+ key := canonicalMIMEHeaderKey(k)
// As per RFC 7230 field-name is a token, tokens consist of one or more chars.
// We could return a ProtocolError here, but better to be liberal in what we
}
// Skip initial spaces in value.
- i++ // skip colon
- for i < len(kv) && (kv[i] == ' ' || kv[i] == '\t') {
- i++
- }
- value := string(kv[i:])
+ value := strings.TrimLeft(string(v), " \t")
vv := m[key]
if vv == nil && len(strs) > 0 {
return nil
}
+var nl = []byte("\n")
+
// upcomingHeaderNewlines returns an approximation of the number of newlines
// that will be in this header. If it gets confused, it returns 0.
func (r *Reader) upcomingHeaderNewlines() (n int) {
return
}
peek, _ := r.R.Peek(s)
- for len(peek) > 0 {
- i := bytes.IndexByte(peek, '\n')
- if i < 3 {
- // Not present (-1) or found within the next few bytes,
- // implying we're at the end ("\r\n\r\n" or "\n\n")
- return
- }
- n++
- peek = peek[i+1:]
- }
- return
+ return bytes.Count(peek, nl)
}
// CanonicalMIMEHeaderKey returns the canonical format of the
return "", rawURL, nil
}
-// split slices s into two substrings separated by the first occurrence of
-// sep. If cutc is true then sep is excluded from the second substring.
-// If sep does not occur in s then s and the empty string is returned.
-func split(s string, sep byte, cutc bool) (string, string) {
- i := strings.IndexByte(s, sep)
- if i < 0 {
- return s, ""
- }
- if cutc {
- return s[:i], s[i+1:]
- }
- return s[:i], s[i:]
-}
-
// Parse parses a raw url into a URL structure.
//
// The url may be relative (a path, without a host) or absolute
// error, due to parsing ambiguities.
func Parse(rawURL string) (*URL, error) {
// Cut off #frag
- u, frag := split(rawURL, '#', true)
+ u, frag, _ := strings.Cut(rawURL, "#")
url, err := parse(u, false)
if err != nil {
return nil, &Error{"parse", u, err}
url.ForceQuery = true
rest = rest[:len(rest)-1]
} else {
- rest, url.RawQuery = split(rest, '?', true)
+ rest, url.RawQuery, _ = strings.Cut(rest, "?")
}
if !strings.HasPrefix(rest, "/") {
// RFC 3986, §3.3:
// In addition, a URI reference (Section 4.1) may be a relative-path reference,
// in which case the first path segment cannot contain a colon (":") character.
- colon := strings.Index(rest, ":")
- slash := strings.Index(rest, "/")
- if colon >= 0 && (slash < 0 || colon < slash) {
+ if segment, _, _ := strings.Cut(rest, "/"); strings.Contains(segment, ":") {
// First path segment has colon. Not allowed in relative URL.
return nil, errors.New("first path segment in URL cannot contain colon")
}
if (url.Scheme != "" || !viaRequest && !strings.HasPrefix(rest, "///")) && strings.HasPrefix(rest, "//") {
var authority string
- authority, rest = split(rest[2:], '/', false)
+ authority, rest = rest[2:], ""
+ if i := strings.Index(authority, "/"); i >= 0 {
+ authority, rest = authority[:i], authority[i:]
+ }
url.User, url.Host, err = parseAuthority(authority)
if err != nil {
return nil, err
}
user = User(userinfo)
} else {
- username, password := split(userinfo, ':', true)
+ username, password, _ := strings.Cut(userinfo, ":")
if username, err = unescape(username, encodeUserPassword); err != nil {
return nil, "", err
}
// it would be mistaken for a scheme name. Such a segment must be
// preceded by a dot-segment (e.g., "./this:that") to make a relative-
// path reference.
- if i := strings.IndexByte(path, ':'); i > -1 && strings.IndexByte(path[:i], '/') == -1 {
+ if segment, _, _ := strings.Cut(path, "/"); strings.Contains(segment, ":") {
buf.WriteString("./")
}
}
func parseQuery(m Values, query string) (err error) {
for query != "" {
- key := query
- if i := strings.IndexAny(key, "&"); i >= 0 {
- key, query = key[:i], key[i+1:]
- } else {
- query = ""
- }
+ var key string
+ key, query, _ = strings.Cut(query, "&")
if strings.Contains(key, ";") {
err = fmt.Errorf("invalid semicolon separator in query")
continue
if key == "" {
continue
}
- value := ""
- if i := strings.Index(key, "="); i >= 0 {
- key, value = key[:i], key[i+1:]
- }
+ key, value, _ := strings.Cut(key, "=")
key, err1 := QueryUnescape(key)
if err1 != nil {
if err == nil {
}
var (
- last string
elem string
- i int
dst strings.Builder
)
first := true
remaining := full
// We want to return a leading '/', so write it now.
dst.WriteByte('/')
- for i >= 0 {
- i = strings.IndexByte(remaining, '/')
- if i < 0 {
- last, elem, remaining = remaining, remaining, ""
- } else {
- elem, remaining = remaining[:i], remaining[i+1:]
- }
+ found := true
+ for found {
+ elem, remaining, found = strings.Cut(remaining, "/")
if elem == "." {
first = false
// drop
}
}
- if last == "." || last == ".." {
+ if elem == "." || elem == ".." {
dst.WriteByte('/')
}
})
}
}
-
-var sink string
-
-func BenchmarkSplit(b *testing.B) {
- url := "http://www.google.com/?q=go+language#foo%26bar"
- for i := 0; i < b.N; i++ {
- sink, sink = split(url, '#', true)
- }
-}
out := make([]string, 0, len(env))
saw := make(map[string]int, len(env)) // key => index into out
for _, kv := range env {
- eq := strings.Index(kv, "=")
- if eq < 0 {
+ k, _, ok := strings.Cut(kv, "=")
+ if !ok {
out = append(out, kv)
continue
}
- k := kv[:eq]
if caseInsensitive {
k = strings.ToLower(k)
}
return env
}
for _, kv := range env {
- eq := strings.Index(kv, "=")
- if eq < 0 {
+ k, _, ok := strings.Cut(kv, "=")
+ if !ok {
continue
}
- k := kv[:eq]
if strings.EqualFold(k, "SYSTEMROOT") {
// We already have it.
return env
if _, ok := err.(*exec.ExitError); !ok {
t.Errorf("expected *exec.ExitError from cat combined; got %T: %v", err, err)
}
- s := string(bs)
- sp := strings.SplitN(s, "\n", 2)
- if len(sp) != 2 {
- t.Fatalf("expected two lines from cat; got %q", s)
+ errLine, body, ok := strings.Cut(string(bs), "\n")
+ if !ok {
+ t.Fatalf("expected two lines from cat; got %q", bs)
}
- errLine, body := sp[0], sp[1]
if !strings.HasPrefix(errLine, "Error: open /bogus/file.foo") {
t.Errorf("expected stderr to complain about file; got %q", errLine)
}
// and the /bin/hostname only returns the first component
want := runBinHostname(t)
if hostname != want {
- i := strings.Index(hostname, ".")
- if i < 0 || hostname[0:i] != want {
+ host, _, ok := strings.Cut(hostname, ".")
+ if !ok || host != want {
t.Errorf("Hostname() = %q, want %q", hostname, want)
}
}
// say: "It is expected to be a comma separated list of
// personal data where the first item is the full name of the
// user."
- if i := strings.Index(u.Name, ","); i >= 0 {
- u.Name = u.Name[:i]
- }
+ u.Name, _, _ = strings.Cut(u.Name, ",")
return u
}
// say: "It is expected to be a comma separated list of
// personal data where the first item is the full name of the
// user."
- if i := strings.Index(u.Name, ","); i >= 0 {
- u.Name = u.Name[:i]
- }
+ u.Name, _, _ = strings.Cut(u.Name, ",")
return u, nil
}
}
out[n] = -1
out[n+1] = -1
} else {
- k := strings.Index(pair, "-")
- if k < 0 {
- t.Fatalf("%s:%d: invalid pair %s", file, lineno, pair)
- }
- lo, err1 := strconv.Atoi(pair[:k])
- hi, err2 := strconv.Atoi(pair[k+1:])
+ loStr, hiStr, _ := strings.Cut(pair, "-")
+ lo, err1 := strconv.Atoi(loStr)
+ hi, err2 := strconv.Atoi(hiStr)
if err1 != nil || err2 != nil || lo > hi {
t.Fatalf("%s:%d: invalid pair %s", file, lineno, pair)
}
continue Reading
}
case ':':
- i := strings.Index(flag[1:], ":")
- if i < 0 {
+ var ok bool
+ if _, flag, ok = strings.Cut(flag[1:], ":"); !ok {
t.Logf("skip: %s", line)
continue Reading
}
- flag = flag[1+i+1:]
case 'C', 'N', 'T', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
t.Logf("skip: %s", line)
continue Reading
func (re *Regexp) expand(dst []byte, template string, bsrc []byte, src string, match []int) []byte {
for len(template) > 0 {
- i := strings.Index(template, "$")
- if i < 0 {
+ before, after, ok := strings.Cut(template, "$")
+ if !ok {
break
}
- dst = append(dst, template[:i]...)
- template = template[i:]
- if len(template) > 1 && template[1] == '$' {
+ dst = append(dst, before...)
+ template = after
+ if template != "" && template[0] == '$' {
// Treat $$ as $.
dst = append(dst, '$')
- template = template[2:]
+ template = template[1:]
continue
}
name, num, rest, ok := extract(template)
if !ok {
// Malformed; treat $ as raw text.
dst = append(dst, '$')
- template = template[1:]
continue
}
template = rest
return dst
}
-// extract returns the name from a leading "$name" or "${name}" in str.
+// extract returns the name from a leading "name" or "{name}" in str.
+// (The $ has already been removed by the caller.)
// If it is a number, extract returns num set to that number; otherwise num = -1.
func extract(str string) (name string, num int, rest string, ok bool) {
- if len(str) < 2 || str[0] != '$' {
+ if str == "" {
return
}
brace := false
- if str[1] == '{' {
+ if str[0] == '{' {
brace = true
- str = str[2:]
- } else {
str = str[1:]
}
i := 0
case 'Q':
// \Q ... \E: the ... is always literals
var lit string
- if i := strings.Index(t, `\E`); i < 0 {
- lit = t[2:]
- t = ""
- } else {
- lit = t[2:i]
- t = t[i+2:]
- }
+ lit, t, _ = strings.Cut(t[2:], `\E`)
for lit != "" {
c, rest, err := nextRune(lit)
if err != nil {
)
// Reverse of cmd/go/internal/modload.PackageBuildInfo
for len(data) > 0 {
- i := strings.IndexByte(data, '\n')
- if i < 0 {
+ line, data, ok = strings.Cut(data, "\n")
+ if !ok {
break
}
- line, data = data[:i], data[i+1:]
switch {
case strings.HasPrefix(line, pathLine):
elem := line[len(pathLine):]
func containsInOrder(s string, all ...string) bool {
for _, t := range all {
- i := strings.Index(s, t)
- if i < 0 {
+ var ok bool
+ if _, s, ok = strings.Cut(s, t); !ok {
return false
}
- s = s[i+len(t):]
}
return true
}
// stackContainsLabeled takes a spec like funcname;key=value and matches if the stack has that key
// and value and has funcname somewhere in the stack.
func stackContainsLabeled(spec string, count uintptr, stk []*profile.Location, labels map[string][]string) bool {
- semi := strings.Index(spec, ";")
- if semi == -1 {
+ base, kv, ok := strings.Cut(spec, ";")
+ if !ok {
panic("no semicolon in key/value spec")
}
- kv := strings.SplitN(spec[semi+1:], "=", 2)
- if len(kv) != 2 {
+ k, v, ok := strings.Cut(kv, "=")
+ if !ok {
panic("missing = in key/value spec")
}
- if !contains(labels[kv[0]], kv[1]) {
+ if !contains(labels[k], v) {
return false
}
- return stackContains(spec[:semi], count, stk, labels)
+ return stackContains(base, count, stk, labels)
}
func TestCPUProfileLabel(t *testing.T) {
"os"
"runtime"
"strconv"
+ "strings"
"time"
"unsafe"
)
}
}
+var space = []byte(" ")
+var newline = []byte("\n")
+
func parseProcSelfMaps(data []byte, addMapping func(lo, hi, offset uint64, file, buildID string)) {
// $ cat /proc/self/maps
// 00400000-0040b000 r-xp 00000000 fc:01 787766 /bin/cat
// next removes and returns the next field in the line.
// It also removes from line any spaces following the field.
next := func() []byte {
- j := bytes.IndexByte(line, ' ')
- if j < 0 {
- f := line
- line = nil
- return f
- }
- f := line[:j]
- line = line[j+1:]
- for len(line) > 0 && line[0] == ' ' {
- line = line[1:]
- }
+ var f []byte
+ f, line, _ = bytes.Cut(line, space)
+ line = bytes.TrimLeft(line, " ")
return f
}
for len(data) > 0 {
- i := bytes.IndexByte(data, '\n')
- if i < 0 {
- line, data = data, nil
- } else {
- line, data = data[:i], data[i+1:]
- }
+ line, data, _ = bytes.Cut(data, newline)
addr := next()
- i = bytes.IndexByte(addr, '-')
- if i < 0 {
+ loStr, hiStr, ok := strings.Cut(string(addr), "-")
+ if !ok {
continue
}
- lo, err := strconv.ParseUint(string(addr[:i]), 16, 64)
+ lo, err := strconv.ParseUint(loStr, 16, 64)
if err != nil {
continue
}
- hi, err := strconv.ParseUint(string(addr[i+1:]), 16, 64)
+ hi, err := strconv.ParseUint(hiStr, 16, 64)
if err != nil {
continue
}
f := func(t *testing.T, input string) {
for tx, tt := range strings.Split(input, "\n\n") {
- i := strings.Index(tt, "->\n")
- if i < 0 {
+ in, out, ok := strings.Cut(tt, "->\n")
+ if !ok {
t.Fatal("malformed test case")
}
- in, out := tt[:i], tt[i+len("->\n"):]
if len(out) > 0 && out[len(out)-1] != '\n' {
out += "\n"
}
t.Fatalf("gdb exited with error: %v", err)
}
- firstLine := bytes.SplitN(got, []byte("\n"), 2)[0]
+ firstLine, _, _ := bytes.Cut(got, []byte("\n"))
if string(firstLine) != "Loading Go Runtime support." {
// This can happen when using all.bash with
// GOROOT_FINAL set, because the tests are run before
if err != nil {
return nil, fmt.Errorf("fail to execute '%s': %s", cmdline, err)
}
- pos := bytes.IndexRune(output, '\n')
- if pos == -1 {
+ output, _, ok := bytes.Cut(output, []byte("\n"))
+ if !ok {
return nil, fmt.Errorf("invalid output from '%s', '\\n' not found: %s", cmdline, output)
}
- output = output[0:pos]
- pos = bytes.IndexRune(output, ':')
- if pos == -1 {
+ _, cpus, ok := bytes.Cut(output, []byte(":"))
+ if !ok {
return nil, fmt.Errorf("invalid output from '%s', ':' not found: %s", cmdline, output)
}
var list []string
- for _, val := range bytes.Split(output[pos+1:], []byte(",")) {
+ for _, val := range bytes.Split(cpus, []byte(",")) {
index := string(bytes.TrimSpace(val))
if len(index) == 0 {
continue
for {
n := runtime.Stack(buf, true)
if n < len(buf) {
- tb := string(buf[:n])
+ all := string(buf[:n])
+ var saved string
// Delete any ignored goroutines, if present.
- pos := 0
- for pos < len(tb) {
- next := pos + strings.Index(tb[pos:], "\n\n")
- if next < pos {
- next = len(tb)
- } else {
- next += len("\n\n")
- }
+ for all != "" {
+ var g string
+ g, all, _ = strings.Cut(all, "\n\n")
- if strings.HasPrefix(tb[pos:], "goroutine ") {
- id := tb[pos+len("goroutine "):]
- id = id[:strings.IndexByte(id, ' ')]
+ if strings.HasPrefix(g, "goroutine ") {
+ id, _, _ := strings.Cut(strings.TrimPrefix(g, "goroutine "), " ")
if ignoreGoroutines[id] {
- tb = tb[:pos] + tb[next:]
- next = pos
+ continue
}
}
- pos = next
+ if saved != "" {
+ saved += "\n\n"
+ }
+ saved += g
}
- fmt.Print(tb)
+ fmt.Print(saved)
return
}
buf = make([]byte, 2*len(buf))
func goroutineID() string {
buf := make([]byte, 128)
runtime.Stack(buf, false)
- const prefix = "goroutine "
- if !bytes.HasPrefix(buf, []byte(prefix)) {
+ prefix := []byte("goroutine ")
+ if !bytes.HasPrefix(buf, prefix) {
panic(fmt.Sprintf("expected %q at beginning of traceback:\n%s", prefix, buf))
}
- buf = buf[len(prefix):]
- n := bytes.IndexByte(buf, ' ')
- return string(buf[:n])
+ id, _, _ := bytes.Cut(bytes.TrimPrefix(buf, prefix), []byte(" "))
+ return string(id)
}
// Wrapper around strconv.ParseFloat(x, 64). Handles dddddp+ddd (binary exponent)
// itself, passes the rest on to strconv.ParseFloat.
func myatof64(s string) (f float64, ok bool) {
- a := strings.SplitN(s, "p", 2)
- if len(a) == 2 {
- n, err := strconv.ParseInt(a[0], 10, 64)
+ if mant, exp, ok := strings.Cut(s, "p"); ok {
+ n, err := strconv.ParseInt(mant, 10, 64)
if err != nil {
return 0, false
}
- e, err1 := strconv.Atoi(a[1])
+ e, err1 := strconv.Atoi(exp)
if err1 != nil {
- println("bad e", a[1])
+ println("bad e", exp)
return 0, false
}
v := float64(n)
// Wrapper around strconv.ParseFloat(x, 32). Handles dddddp+ddd (binary exponent)
// itself, passes the rest on to strconv.ParseFloat.
func myatof32(s string) (f float32, ok bool) {
- a := strings.SplitN(s, "p", 2)
- if len(a) == 2 {
- n, err := strconv.Atoi(a[0])
+ if mant, exp, ok := strings.Cut(s, "p"); ok {
+ n, err := strconv.Atoi(mant)
if err != nil {
- println("bad n", a[0])
+ println("bad n", mant)
return 0, false
}
- e, err1 := strconv.Atoi(a[1])
+ e, err1 := strconv.Atoi(exp)
if err1 != nil {
- println("bad p", a[1])
+ println("bad p", exp)
return 0, false
}
return float32(float64(n) * pow2(e)), true
buf[i] = byte(b)
}
ver := string(buf[:])
- if i := strings.Index(ver, "\x00"); i != -1 {
- ver = ver[:i]
- }
+ ver, _, _ = strings.Cut(ver, "\x00")
if strings.HasPrefix(ver, "2.") ||
strings.HasPrefix(ver, "3.") ||
strings.HasPrefix(ver, "4.1.") ||
if opt == "" {
panic("empty option string")
}
- elems := strings.Split(opt, "=")
- switch len(elems) {
- case 2:
- // key=value
- switch elems[0] {
+ // key=value
+ if key, value, ok := strings.Cut(opt, "="); ok {
+ switch key {
case "missingkey":
- switch elems[1] {
+ switch value {
case "invalid", "default":
t.option.missingKey = mapInvalid
return
if name == "" {
return false
}
- if i := strings.Index(name, ","); i >= 0 {
+ if first, rest, ok := strings.Cut(name, ","); ok {
// comma-separated list
- return ctxt.match(name[:i]) && ctxt.match(name[i+1:])
+ return ctxt.match(first) && ctxt.match(rest)
}
if strings.HasPrefix(name, "!!") { // bad syntax, reject always
return false
}
// Execution recipe stops at first blank line.
- pos := strings.Index(t.src, "\n\n")
- if pos == -1 {
+ action, _, ok := strings.Cut(t.src, "\n\n")
+ if !ok {
t.err = fmt.Errorf("double newline ending execution recipe not found in %s", t.goFileName())
return
}
- action := t.src[:pos]
- if nl := strings.Index(action, "\n"); nl >= 0 && strings.Contains(action[:nl], "+build") {
+ if firstLine, rest, ok := strings.Cut(action, "\n"); ok && strings.Contains(firstLine, "+build") {
// skip first line
- action = action[nl+1:]
+ action = rest
}
action = strings.TrimPrefix(action, "//")
// Check for build constraints only up to the actual code.
- pkgPos := strings.Index(t.src, "\npackage")
- if pkgPos == -1 {
- pkgPos = pos // some files are intentionally malformed
+ header, _, ok := strings.Cut(t.src, "\npackage")
+ if !ok {
+ header = action // some files are intentionally malformed
}
- if ok, why := shouldTest(t.src[:pkgPos], goos, goarch); !ok {
+ if ok, why := shouldTest(header, goos, goarch); !ok {
if *showSkips {
fmt.Printf("%-20s %-20s: %s\n", "skip", t.goFileName(), why)
}
// Assume errmsg says "file:line: foo".
// Cut leading "file:line: " to avoid accidental matching of file name instead of message.
text := errmsg
- if i := strings.Index(text, " "); i >= 0 {
- text = text[i+1:]
+ if _, suffix, ok := strings.Cut(text, " "); ok {
+ text = suffix
}
if we.re.MatchString(text) {
matched = true
}
lines := strings.Split(string(src), "\n")
// Remove old errors.
- for i, ln := range lines {
- pos := strings.Index(ln, " // ERROR ")
- if pos >= 0 {
- lines[i] = ln[:pos]
- }
+ for i := range lines {
+ lines[i], _, _ = strings.Cut(lines[i], " // ERROR ")
}
// Parse new errors.
errors := make(map[int]map[string]bool)
tmpRe := regexp.MustCompile(`autotmp_[0-9]+`)
for _, errStr := range splitOutput(out, false) {
- colon1 := strings.Index(errStr, ":")
- if colon1 < 0 || errStr[:colon1] != file {
+ errFile, rest, ok := strings.Cut(errStr, ":")
+ if !ok || errFile != file {
continue
}
- colon2 := strings.Index(errStr[colon1+1:], ":")
- if colon2 < 0 {
+ lineStr, msg, ok := strings.Cut(rest, ":")
+ if !ok {
continue
}
- colon2 += colon1 + 1
- line, err := strconv.Atoi(errStr[colon1+1 : colon2])
+ line, err := strconv.Atoi(lineStr)
line--
if err != nil || line < 0 || line >= len(lines) {
continue
}
- msg := errStr[colon2+2:]
msg = strings.Replace(msg, file, base, -1) // normalize file mentions in error itself
msg = strings.TrimLeft(msg, " \t")
for _, r := range []string{`\`, `*`, `+`, `?`, `[`, `]`, `(`, `)`} {
}
fmt.Printf("%s: expected no error; got %q\n", t.name, err)
case t.err != "" && err != "":
- if strings.Index(err, t.err) < 0 {
+ if !strings.Contains(err, t.err) {
if !bad {
bad = true
fmt.Printf("BUG\n")