<meta name="go-import" content="import-prefix vcs repo-root">
-The import-prefix is the import path correponding to the repository
+The import-prefix is the import path corresponding to the repository
root. It must be a prefix or an exact match of the package being
fetched with "go get". If it's not an exact match, another http
request is made at the prefix to verify the <meta> tags match.
}
}
-// walkRangeStmt walks a range statment.
+// walkRangeStmt walks a range statement.
func (f *File) walkRangeStmt(n *ast.RangeStmt) {
checkRangeLoop(f, n)
}
j = tokset[i].value
if j >= 0 && j < 256 {
if temp1[j] != 0 {
- fmt.Print("yacc bug -- cant have 2 different Ts with same value\n")
+ fmt.Print("yacc bug -- cannot have 2 different Ts with same value\n")
fmt.Printf(" %s and %s\n", tokset[i].name, tokset[temp1[j]].name)
nerrors++
}
j = tokset[i].value - PRIVATE
if j >= 0 && j < 256 {
if temp1[j] != 0 {
- fmt.Print("yacc bug -- cant have 2 different Ts with same value\n")
+ fmt.Print("yacc bug -- cannot have 2 different Ts with same value\n")
fmt.Printf(" %s and %s\n", tokset[i].name, tokset[temp1[j]].name)
nerrors++
}
logMaxOffsetSize = 15 // Standard DEFLATE
minMatchLength = 3 // The smallest match that the compressor looks for
maxMatchLength = 258 // The longest match for the compressor
- minOffsetSize = 1 // The shortest offset that makes any sence
+ minOffsetSize = 1 // The shortest offset that makes any sense
// The maximum number of tokens we put into a single flat block, just too
// stop things from getting too large.
// Note that this example is simplistic in that it omits any
// authentication of the encrypted data. It you were actually to use
- // StreamReader in this manner, an attacker could flip arbitary bits in
+ // StreamReader in this manner, an attacker could flip arbitrary bits in
// the output.
}
// Note that this example is simplistic in that it omits any
// authentication of the encrypted data. It you were actually to use
- // StreamReader in this manner, an attacker could flip arbitary bits in
+ // StreamReader in this manner, an attacker could flip arbitrary bits in
// the decrypted result.
}
t.Errorf("%d: public key algorithm is %v, want ECDSA", i, pka)
}
if err = cert.CheckSignatureFrom(cert); err != nil {
- t.Errorf("%d: certificate verfication failed: %s", i, err)
+ t.Errorf("%d: certificate verification failed: %s", i, err)
}
}
}
}
// test cert is self-signed
if err = cert.CheckSignatureFrom(cert); err != nil {
- t.Fatalf("DSA Certificate verfication failed: %s", err)
+ t.Fatalf("DSA Certificate verification failed: %s", err)
}
}
var putConnHook func(*DB, driver.Conn)
// putConn adds a connection to the db's free pool.
-// err is optionally the last error that occured on this connection.
+// err is optionally the last error that occurred on this connection.
func (db *DB) putConn(c driver.Conn, err error) {
if err == driver.ErrBadConn {
// Don't reuse bad connections.
}
// Flush writes any buffered data to the underlying io.Writer.
-// To check if an error occured during the Flush, call Error.
+// To check if an error occurred during the Flush, call Error.
func (w *Writer) Flush() {
w.w.Flush()
}
const (
blockSize = 64
- blockOffset = 2 // Substract 2 blocks to compensate for the 0x80 added to continuation bytes.
+ blockOffset = 2 // Subtract 2 blocks to compensate for the 0x80 added to continuation bytes.
)
type trieHandle struct {
i.rb.src.copySlice(out[outCopyStart:], inCopyStart, i.p)
if !i.rb.insertDecomposed(out[i.outStart:outp]) {
// Start over to prevent decompositions from crossing segment boundaries.
- // This is a rare occurance.
+ // This is a rare occurrence.
i.p = i.inStart
i.info = i.rb.f.info(i.rb.src, i.p)
}
}
runes := []rune(test.buffer)
if rb.nrune != len(runes) {
- t.Errorf("%s:%d: reorder buffer lenght is %d; want %d", name, i, rb.nrune, len(runes))
+ t.Errorf("%s:%d: reorder buffer length is %d; want %d", name, i, rb.nrune, len(runes))
continue
}
for j, want := range runes {
const (
blockSize = 64
- blockOffset = 2 // Substract two blocks to compensate for the 0x80 added to continuation bytes.
+ blockOffset = 2 // Subtract two blocks to compensate for the 0x80 added to continuation bytes.
maxSparseEntries = 16
)
}
// rawExpr typechecks expression e and initializes x with the expression
-// value or type. If an error occured, x.mode is set to invalid.
+// value or type. If an error occurred, x.mode is set to invalid.
// A hint != nil is used as operand type for untyped shifted operands;
// iota >= 0 indicates that the expression is part of a constant declaration.
// cycleOk indicates whether it is ok for a type expression to refer to itself.
x.typ = obj.Type.(Type)
case *ast.Ellipsis:
- // ellipses are handled explictly where they are legal
+ // ellipses are handled explicitly where they are legal
// (array composite literals and parameter lists)
check.errorf(e.Pos(), "invalid use of '...'")
goto Error
}
// typOrNil is like rawExpr but reports an error if e doesn't represents a type or the predeclared value nil.
-// It returns e's type, nil, or Typ[Invalid] if an error occured.
+// It returns e's type, nil, or Typ[Invalid] if an error occurred.
//
func (check *checker) typOrNil(e ast.Expr, cycleOk bool) Type {
return check.rawTyp(e, cycleOk, true)
}
// typ is like rawExpr but reports an error if e doesn't represents a type.
-// It returns e's type, or Typ[Invalid] if an error occured.
+// It returns e's type, or Typ[Invalid] if an error occurred.
//
func (check *checker) typ(e ast.Expr, cycleOk bool) Type {
return check.rawTyp(e, cycleOk, false)
// Sort imports if necessary.
if file != nil && hasUnsortedImports(file) {
// Make a copy of the AST because ast.SortImports is destructive.
- // TODO(gri) Do this more efficently.
+ // TODO(gri) Do this more efficiently.
var buf bytes.Buffer
err := config.Fprint(&buf, fset, file)
if err != nil {
// don't overwrite any pending comment in the p.comment cache
// (there may be a pending comment when a line comment is
// immediately followed by a lead comment with no other
- // tokens inbetween)
+ // tokens between)
if p.commentOffset == infinity {
p.nextComment() // get comment ready for use
}
// Convert words of q to base b digits in s. If q is large, it is recursively "split in half"
// by nat/nat division using tabulated divisors. Otherwise, it is converted iteratively using
-// repeated nat/Word divison.
+// repeated nat/Word division.
//
// The iterative method processes n Words by n divW() calls, each of which visits every Word in the
// incrementally shortened q for a total of n + (n-1) + (n-2) ... + 2 + 1, or n(n+1)/2 divW()'s.
// d = mant << (exp - mantbits)
// Next highest floating point number is mant+1 << exp-mantbits.
- // Our upper bound is halfway inbetween, mant*2+1 << exp-mantbits-1.
+ // Our upper bound is halfway between, mant*2+1 << exp-mantbits-1.
upper := new(decimal)
upper.Assign(mant*2 + 1)
upper.Shift(exp - int(flt.mantbits) - 1)
// unless mant-1 drops the significant bit and exp is not the minimum exp,
// in which case the next lowest is mant*2-1 << exp-mantbits-1.
// Either way, call it mantlo << explo-mantbits.
- // Our lower bound is halfway inbetween, mantlo*2+1 << explo-mantbits-1.
+ // Our lower bound is halfway between, mantlo*2+1 << explo-mantbits-1.
var mantlo uint64
var explo int
if mant > 1<<flt.mantbits || exp == minexp {
}
}
- // Commited to succeed.
+ // Committed to succeed.
l = &Location{zone: zone, tx: tx}
// Fill in the cache with information about right now,