(BEQZ (NEG x) yes no) => (BEQZ x yes no)
(BNEZ (NEG x) yes no) => (BNEZ x yes no)
-// Negate comparision with FNES/FNED.
+// Negate comparison with FNES/FNED.
(BEQZ (FNES <t> x y) yes no) => (BNEZ (FEQS <t> x y) yes no)
(BNEZ (FNES <t> x y) yes no) => (BEQZ (FEQS <t> x y) yes no)
(BEQZ (FNED <t> x y) yes no) => (BNEZ (FEQD <t> x y) yes no)
// HaveInlineBody reports whether we have fn's inline body available
// for inlining.
//
-// It's a function literal so that it can be overriden for
+// It's a function literal so that it can be overridden for
// GOEXPERIMENT=unified.
var HaveInlineBody = func(fn *ir.Func) bool {
if fn.Inl == nil {
if r := relocs.At(i); r.Type() == objabi.ElfRelocOffset+objabi.RelocType(elf.R_PPC64_REL24) {
switch ldr.SymType(r.Sym()) {
case sym.SDYNIMPORT:
- // This call goes throught the PLT, generate and call through a PLT stub.
+ // This call goes through the PLT, generate and call through a PLT stub.
if sym, firstUse := genpltstub(ctxt, ldr, r, s); firstUse {
stubs = append(stubs, sym)
}
// where "numCtrs" is the number of blocks / coverable units within the
// function, "pkgid" is the unique index assigned to this package by
// the runtime, "funcid" is the index of this function within its containing
-// packge, and "counterArray" stores the actual counters.
+// package, and "counterArray" stores the actual counters.
//
// The counter variable itself is created not as a struct but as a flat
// array of uint32's; we then use the offsets below to index into it.
t.Errorf("got %v, want %v", err, tt.expectedErr)
}
- // skip if error occured.
+ // skip if error occurred.
if err != nil {
continue
}
err := syscall.Mount("tmpfs", tmp, "tmpfs", 0, "")
if err != nil {
// Usually this means lack of CAP_SYS_ADMIN, but there might be
- // other reasons, expecially in restricted test environments.
+ // other reasons, especially in restricted test environments.
t.Skipf("requires ability to mount tmpfs (%v)", err)
}
t.Cleanup(func() {
// manipulating the return address so that libfuzzer's integer compare hooks
// work
// libFuzzer's compare hooks obtain the caller's address from the compiler
-// builtin __builtin_return_adress. Since we invoke the hooks always
+// builtin __builtin_return_address. Since we invoke the hooks always
// from the same native function, this builtin would always return the same
// value. Internally, the libFuzzer hooks call through to the always inlined
// HandleCmp and thus can't be mimicked without patching libFuzzer.