import (
"cmd/compile/internal/base"
"cmd/compile/internal/gc"
+ "cmd/compile/internal/ir"
"cmd/internal/obj"
"cmd/internal/obj/x86"
"cmd/internal/objabi"
}
p = pp.Appendpp(p, leaptr, obj.TYPE_MEM, x86.REG_SP, off+dzDI(cnt), obj.TYPE_REG, x86.REG_DI, 0)
p = pp.Appendpp(p, obj.ADUFFZERO, obj.TYPE_NONE, 0, 0, obj.TYPE_ADDR, 0, dzOff(cnt))
- p.To.Sym = gc.Duffzero
+ p.To.Sym = ir.Syms.Duffzero
if cnt%16 != 0 {
p = pp.Appendpp(p, x86.AMOVUPS, obj.TYPE_REG, x86.REG_X0, 0, obj.TYPE_MEM, x86.REG_DI, -int64(8))
"cmd/compile/internal/base"
"cmd/compile/internal/gc"
+ "cmd/compile/internal/ir"
"cmd/compile/internal/logopt"
"cmd/compile/internal/ssa"
"cmd/compile/internal/types"
}
p = s.Prog(obj.ADUFFZERO)
p.To.Type = obj.TYPE_ADDR
- p.To.Sym = gc.Duffzero
+ p.To.Sym = ir.Syms.Duffzero
p.To.Offset = off
case ssa.OpAMD64MOVOconst:
if v.AuxInt != 0 {
case ssa.OpAMD64DUFFCOPY:
p := s.Prog(obj.ADUFFCOPY)
p.To.Type = obj.TYPE_ADDR
- p.To.Sym = gc.Duffcopy
+ p.To.Sym = ir.Syms.Duffcopy
if v.AuxInt%16 != 0 {
v.Fatalf("bad DUFFCOPY AuxInt %v", v.AuxInt)
}
import (
"cmd/compile/internal/gc"
+ "cmd/compile/internal/ir"
"cmd/internal/obj"
"cmd/internal/obj/arm"
)
p.Reg = arm.REGSP
p = pp.Appendpp(p, obj.ADUFFZERO, obj.TYPE_NONE, 0, 0, obj.TYPE_MEM, 0, 0)
p.To.Name = obj.NAME_EXTERN
- p.To.Sym = gc.Duffzero
+ p.To.Sym = ir.Syms.Duffzero
p.To.Offset = 4 * (128 - cnt/int64(gc.Widthptr))
} else {
p = pp.Appendpp(p, arm.AADD, obj.TYPE_CONST, 0, 4+off, obj.TYPE_REG, arm.REG_R1, 0)
p := s.Prog(obj.ACALL)
p.To.Type = obj.TYPE_MEM
p.To.Name = obj.NAME_EXTERN
- p.To.Sym = gc.Udiv
+ p.To.Sym = ir.Syms.Udiv
case ssa.OpARMLoweredWB:
p := s.Prog(obj.ACALL)
p.To.Type = obj.TYPE_MEM
p := s.Prog(obj.ADUFFZERO)
p.To.Type = obj.TYPE_MEM
p.To.Name = obj.NAME_EXTERN
- p.To.Sym = gc.Duffzero
+ p.To.Sym = ir.Syms.Duffzero
p.To.Offset = v.AuxInt
case ssa.OpARMDUFFCOPY:
p := s.Prog(obj.ADUFFCOPY)
p.To.Type = obj.TYPE_MEM
p.To.Name = obj.NAME_EXTERN
- p.To.Sym = gc.Duffcopy
+ p.To.Sym = ir.Syms.Duffcopy
p.To.Offset = v.AuxInt
case ssa.OpARMLoweredNilCheck:
// Issue a load which will fault if arg is nil.
import (
"cmd/compile/internal/gc"
+ "cmd/compile/internal/ir"
"cmd/internal/obj"
"cmd/internal/obj/arm64"
"cmd/internal/objabi"
p.Reg = arm64.REG_R20
p = pp.Appendpp(p, obj.ADUFFZERO, obj.TYPE_NONE, 0, 0, obj.TYPE_MEM, 0, 0)
p.To.Name = obj.NAME_EXTERN
- p.To.Sym = gc.Duffzero
+ p.To.Sym = ir.Syms.Duffzero
p.To.Offset = 4 * (64 - cnt/(2*int64(gc.Widthptr)))
} else {
// Not using REGTMP, so this is async preemptible (async preemption clobbers REGTMP).
p := s.Prog(obj.ADUFFZERO)
p.To.Type = obj.TYPE_MEM
p.To.Name = obj.NAME_EXTERN
- p.To.Sym = gc.Duffzero
+ p.To.Sym = ir.Syms.Duffzero
p.To.Offset = v.AuxInt
case ssa.OpARM64LoweredZero:
// STP.P (ZR,ZR), 16(R16)
p := s.Prog(obj.ADUFFCOPY)
p.To.Type = obj.TYPE_MEM
p.To.Name = obj.NAME_EXTERN
- p.To.Sym = gc.Duffcopy
+ p.To.Sym = ir.Syms.Duffcopy
p.To.Offset = v.AuxInt
case ssa.OpARM64LoweredMove:
// MOVD.P 8(R16), Rtmp
case types.AMEM:
base.Fatalf("hashfor with AMEM type")
case types.AINTER:
- sym = Runtimepkg.Lookup("interhash")
+ sym = ir.Pkgs.Runtime.Lookup("interhash")
case types.ANILINTER:
- sym = Runtimepkg.Lookup("nilinterhash")
+ sym = ir.Pkgs.Runtime.Lookup("nilinterhash")
case types.ASTRING:
- sym = Runtimepkg.Lookup("strhash")
+ sym = ir.Pkgs.Runtime.Lookup("strhash")
case types.AFLOAT32:
- sym = Runtimepkg.Lookup("f32hash")
+ sym = ir.Pkgs.Runtime.Lookup("f32hash")
case types.AFLOAT64:
- sym = Runtimepkg.Lookup("f64hash")
+ sym = ir.Pkgs.Runtime.Lookup("f64hash")
case types.ACPLX64:
- sym = Runtimepkg.Lookup("c64hash")
+ sym = ir.Pkgs.Runtime.Lookup("c64hash")
case types.ACPLX128:
- sym = Runtimepkg.Lookup("c128hash")
+ sym = ir.Pkgs.Runtime.Lookup("c128hash")
default:
// Note: the caller of hashfor ensured that this symbol
// exists and has a body by calling genhash for t.
// Find the package the receiver type appeared in. For
// anonymous receiver types (i.e., anonymous structs with
// embedded fields), use the "go" pseudo-package instead.
- rpkg := gopkg
+ rpkg := ir.Pkgs.Go
if rsym != nil {
rpkg = rsym.Pkg
}
// sysfunc looks up Go function name in package runtime. This function
// must follow the internal calling convention.
func sysfunc(name string) *obj.LSym {
- s := Runtimepkg.Lookup(name)
+ s := ir.Pkgs.Runtime.Lookup(name)
s.SetFunc(true)
return s.Linksym()
}
// runtime. If this is a function, it may have a special calling
// convention.
func sysvar(name string) *obj.LSym {
- return Runtimepkg.Lookup(name).Linksym()
+ return ir.Pkgs.Runtime.Lookup(name).Linksym()
}
// isParamStackCopy reports whether this is the on-stack copy of a
var inimport bool // set during import
-var itabpkg *types.Pkg // fake pkg for itab entries
-
-var itablinkpkg *types.Pkg // fake package for runtime itab entries
-
-var Runtimepkg *types.Pkg // fake package runtime
-
-var racepkg *types.Pkg // package runtime/race
-
-var msanpkg *types.Pkg // package runtime/msan
-
-var unsafepkg *types.Pkg // package unsafe
-
-var trackpkg *types.Pkg // fake package for field tracking
-
-var mappkg *types.Pkg // fake package for map zero value
-
-var gopkg *types.Pkg // pseudo-package for method symbols on anonymous receiver types
-
var zerosize int64
var (
var thearch Arch
var (
- staticuint64s *ir.Name
- zerobase *ir.Name
-
- assertE2I,
- assertE2I2,
- assertI2I,
- assertI2I2,
- deferproc,
- deferprocStack,
- Deferreturn,
- Duffcopy,
- Duffzero,
- gcWriteBarrier,
- goschedguarded,
- growslice,
- msanread,
- msanwrite,
- msanmove,
- newobject,
- newproc,
- panicdivide,
- panicshift,
- panicdottypeE,
- panicdottypeI,
- panicnildottype,
- panicoverflow,
- raceread,
- racereadrange,
- racewrite,
- racewriterange,
- x86HasPOPCNT,
- x86HasSSE41,
- x86HasFMA,
- armHasVFPv4,
- arm64HasATOMICS,
- typedmemclr,
- typedmemmove,
- Udiv,
- writeBarrier,
- zerobaseSym *obj.LSym
-
BoundsCheckFunc [ssa.BoundsKindCount]*obj.LSym
ExtendCheckFunc [ssa.BoundsKindCount]*obj.LSym
-
- // Wasm
- WasmMove,
- WasmZero,
- WasmDiv,
- WasmTruncS,
- WasmTruncU,
- SigPanic *obj.LSym
)
// GCWriteBarrierReg maps from registers to gcWriteBarrier implementation LSyms.
}
// Don't export predeclared declarations.
- if n.Sym().Pkg == types.BuiltinPkg || n.Sym().Pkg == unsafepkg {
+ if n.Sym().Pkg == types.BuiltinPkg || n.Sym().Pkg == ir.Pkgs.Unsafe {
return
}
func (w *exportWriter) doTyp(t *types.Type) {
if t.Sym() != nil {
- if t.Sym().Pkg == types.BuiltinPkg || t.Sym().Pkg == unsafepkg {
+ if t.Sym().Pkg == types.BuiltinPkg || t.Sym().Pkg == ir.Pkgs.Unsafe {
base.Fatalf("builtin type missing from typIndex: %v", t)
}
types.BuiltinPkg.Prefix = "go.builtin" // not go%2ebuiltin
// pseudo-package, accessed by import "unsafe"
- unsafepkg = types.NewPkg("unsafe", "unsafe")
+ ir.Pkgs.Unsafe = types.NewPkg("unsafe", "unsafe")
// Pseudo-package that contains the compiler's builtin
// declarations for package runtime. These are declared in a
// separate package to avoid conflicts with package runtime's
// actual declarations, which may differ intentionally but
// insignificantly.
- Runtimepkg = types.NewPkg("go.runtime", "runtime")
- Runtimepkg.Prefix = "runtime"
+ ir.Pkgs.Runtime = types.NewPkg("go.runtime", "runtime")
+ ir.Pkgs.Runtime.Prefix = "runtime"
// pseudo-packages used in symbol tables
- itabpkg = types.NewPkg("go.itab", "go.itab")
- itabpkg.Prefix = "go.itab" // not go%2eitab
+ ir.Pkgs.Itab = types.NewPkg("go.itab", "go.itab")
+ ir.Pkgs.Itab.Prefix = "go.itab" // not go%2eitab
- itablinkpkg = types.NewPkg("go.itablink", "go.itablink")
- itablinkpkg.Prefix = "go.itablink" // not go%2eitablink
+ ir.Pkgs.Itablink = types.NewPkg("go.itablink", "go.itablink")
+ ir.Pkgs.Itablink.Prefix = "go.itablink" // not go%2eitablink
- trackpkg = types.NewPkg("go.track", "go.track")
- trackpkg.Prefix = "go.track" // not go%2etrack
+ ir.Pkgs.Track = types.NewPkg("go.track", "go.track")
+ ir.Pkgs.Track.Prefix = "go.track" // not go%2etrack
// pseudo-package used for map zero values
- mappkg = types.NewPkg("go.map", "go.map")
- mappkg.Prefix = "go.map"
+ ir.Pkgs.Map = types.NewPkg("go.map", "go.map")
+ ir.Pkgs.Map.Prefix = "go.map"
// pseudo-package used for methods with anonymous receivers
- gopkg = types.NewPkg("go", "")
+ ir.Pkgs.Go = types.NewPkg("go", "")
base.DebugSSA = ssa.PhaseOption
base.ParseFlags()
thearch.LinkArch.Init(base.Ctxt)
startProfile()
if base.Flag.Race {
- racepkg = types.NewPkg("runtime/race", "")
+ ir.Pkgs.Race = types.NewPkg("runtime/race", "")
}
if base.Flag.MSan {
- msanpkg = types.NewPkg("runtime/msan", "")
+ ir.Pkgs.Msan = types.NewPkg("runtime/msan", "")
}
if base.Flag.Race || base.Flag.MSan {
base.Flag.Cfg.Instrumenting = true
typs := runtimeTypes()
for _, d := range &runtimeDecls {
- sym := Runtimepkg.Lookup(d.name)
+ sym := ir.Pkgs.Runtime.Lookup(d.name)
typ := typs[d.typ]
switch d.tag {
case funcTag:
- importfunc(Runtimepkg, src.NoXPos, sym, typ)
+ importfunc(ir.Pkgs.Runtime, src.NoXPos, sym, typ)
case varTag:
- importvar(Runtimepkg, src.NoXPos, sym, typ)
+ importvar(ir.Pkgs.Runtime, src.NoXPos, sym, typ)
default:
base.Fatalf("unhandled declaration tag %v", d.tag)
}
}
if path_ == "unsafe" {
- return unsafepkg
+ return ir.Pkgs.Unsafe
}
if islocalname(path_) {
return
}
- if ipkg == unsafepkg {
+ if ipkg == ir.Pkgs.Unsafe {
p.importedUnsafe = true
}
if ipkg.Path == "embed" {
dumpglobls(Target.Externs[numExterns:])
if zerosize > 0 {
- zero := mappkg.Lookup("zero")
+ zero := ir.Pkgs.Map.Lookup("zero")
ggloblsym(zero.Linksym(), int32(zerosize), obj.DUPOK|obj.RODATA)
}
var load *ssa.Value
v := wbBlock.Controls[0]
for {
- if sym, ok := v.Aux.(*obj.LSym); ok && sym == writeBarrier {
+ if sym, ok := v.Aux.(*obj.LSym); ok && sym == ir.Syms.WriteBarrier {
load = v
break
}
// typedmemclr and typedmemmove are write barriers and
// deeply non-preemptible. They are unsafe points and
// hence should not have liveness maps.
- if sym, ok := v.Aux.(*ssa.AuxCall); ok && (sym.Fn == typedmemclr || sym.Fn == typedmemmove) {
+ if sym, ok := v.Aux.(*ssa.AuxCall); ok && (sym.Fn == ir.Syms.Typedmemclr || sym.Fn == ir.Syms.Typedmemmove) {
return false
}
return true
// If we are compiling the runtime package, there are two runtime packages around
// -- localpkg and Runtimepkg. We don't want to produce import path symbols for
// both of them, so just produce one for localpkg.
- if base.Ctxt.Pkgpath == "runtime" && p == Runtimepkg {
+ if base.Ctxt.Pkgpath == "runtime" && p == ir.Pkgs.Runtime {
return
}
// tracksym returns the symbol for tracking use of field/method f, assumed
// to be a member of struct/interface type t.
func tracksym(t *types.Type, f *types.Field) *types.Sym {
- return trackpkg.Lookup(t.ShortString() + "." + f.Sym.Name)
+ return ir.Pkgs.Track.Lookup(t.ShortString() + "." + f.Sym.Name)
}
func typesymprefix(prefix string, t *types.Type) *types.Sym {
if t == nil || (t.IsPtr() && t.Elem() == nil) || t.IsUntyped() || !itype.IsInterface() || itype.IsEmptyInterface() {
base.Fatalf("itabname(%v, %v)", t, itype)
}
- s := itabpkg.Lookup(t.ShortString() + "," + itype.ShortString())
+ s := ir.Pkgs.Itab.Lookup(t.ShortString() + "," + itype.ShortString())
if s.Def == nil {
n := NewName(s)
n.SetType(types.Types[types.TUINT8])
dtypesym(functype(nil, []*ir.Field{anonfield(types.ErrorType)}, []*ir.Field{anonfield(types.Types[types.TSTRING])}))
// add paths for runtime and main, which 6l imports implicitly.
- dimportpath(Runtimepkg)
+ dimportpath(ir.Pkgs.Runtime)
if base.Flag.Race {
- dimportpath(racepkg)
+ dimportpath(ir.Pkgs.Race)
}
if base.Flag.MSan {
- dimportpath(msanpkg)
+ dimportpath(ir.Pkgs.Msan)
}
dimportpath(types.NewPkg("main", ""))
}
fillptrmask(t, ptrmask)
p := fmt.Sprintf("gcbits.%x", ptrmask)
- sym := Runtimepkg.Lookup(p)
+ sym := ir.Pkgs.Runtime.Lookup(p)
lsym := sym.Linksym()
if !sym.Uniq() {
sym.SetUniq(true)
if zerosize < size {
zerosize = size
}
- s := mappkg.Lookup("zero")
+ s := ir.Pkgs.Map.Lookup("zero")
if s.Def == nil {
x := NewName(s)
x.SetType(types.Types[types.TUINT8])
ssaCaches = make([]ssa.Cache, base.Flag.LowerC)
// Set up some runtime functions we'll need to call.
- assertE2I = sysfunc("assertE2I")
- assertE2I2 = sysfunc("assertE2I2")
- assertI2I = sysfunc("assertI2I")
- assertI2I2 = sysfunc("assertI2I2")
- deferproc = sysfunc("deferproc")
- deferprocStack = sysfunc("deferprocStack")
- Deferreturn = sysfunc("deferreturn")
- Duffcopy = sysfunc("duffcopy")
- Duffzero = sysfunc("duffzero")
- gcWriteBarrier = sysfunc("gcWriteBarrier")
- goschedguarded = sysfunc("goschedguarded")
- growslice = sysfunc("growslice")
- msanread = sysfunc("msanread")
- msanwrite = sysfunc("msanwrite")
- msanmove = sysfunc("msanmove")
- newobject = sysfunc("newobject")
- newproc = sysfunc("newproc")
- panicdivide = sysfunc("panicdivide")
- panicdottypeE = sysfunc("panicdottypeE")
- panicdottypeI = sysfunc("panicdottypeI")
- panicnildottype = sysfunc("panicnildottype")
- panicoverflow = sysfunc("panicoverflow")
- panicshift = sysfunc("panicshift")
- raceread = sysfunc("raceread")
- racereadrange = sysfunc("racereadrange")
- racewrite = sysfunc("racewrite")
- racewriterange = sysfunc("racewriterange")
- x86HasPOPCNT = sysvar("x86HasPOPCNT") // bool
- x86HasSSE41 = sysvar("x86HasSSE41") // bool
- x86HasFMA = sysvar("x86HasFMA") // bool
- armHasVFPv4 = sysvar("armHasVFPv4") // bool
- arm64HasATOMICS = sysvar("arm64HasATOMICS") // bool
- typedmemclr = sysfunc("typedmemclr")
- typedmemmove = sysfunc("typedmemmove")
- Udiv = sysvar("udiv") // asm func with special ABI
- writeBarrier = sysvar("writeBarrier") // struct { bool; ... }
- zerobaseSym = sysvar("zerobase")
+ ir.Syms.AssertE2I = sysfunc("assertE2I")
+ ir.Syms.AssertE2I2 = sysfunc("assertE2I2")
+ ir.Syms.AssertI2I = sysfunc("assertI2I")
+ ir.Syms.AssertI2I2 = sysfunc("assertI2I2")
+ ir.Syms.Deferproc = sysfunc("deferproc")
+ ir.Syms.DeferprocStack = sysfunc("deferprocStack")
+ ir.Syms.Deferreturn = sysfunc("deferreturn")
+ ir.Syms.Duffcopy = sysfunc("duffcopy")
+ ir.Syms.Duffzero = sysfunc("duffzero")
+ ir.Syms.GCWriteBarrier = sysfunc("gcWriteBarrier")
+ ir.Syms.Goschedguarded = sysfunc("goschedguarded")
+ ir.Syms.Growslice = sysfunc("growslice")
+ ir.Syms.Msanread = sysfunc("msanread")
+ ir.Syms.Msanwrite = sysfunc("msanwrite")
+ ir.Syms.Msanmove = sysfunc("msanmove")
+ ir.Syms.Newobject = sysfunc("newobject")
+ ir.Syms.Newproc = sysfunc("newproc")
+ ir.Syms.Panicdivide = sysfunc("panicdivide")
+ ir.Syms.PanicdottypeE = sysfunc("panicdottypeE")
+ ir.Syms.PanicdottypeI = sysfunc("panicdottypeI")
+ ir.Syms.Panicnildottype = sysfunc("panicnildottype")
+ ir.Syms.Panicoverflow = sysfunc("panicoverflow")
+ ir.Syms.Panicshift = sysfunc("panicshift")
+ ir.Syms.Raceread = sysfunc("raceread")
+ ir.Syms.Racereadrange = sysfunc("racereadrange")
+ ir.Syms.Racewrite = sysfunc("racewrite")
+ ir.Syms.Racewriterange = sysfunc("racewriterange")
+ ir.Syms.X86HasPOPCNT = sysvar("x86HasPOPCNT") // bool
+ ir.Syms.X86HasSSE41 = sysvar("x86HasSSE41") // bool
+ ir.Syms.X86HasFMA = sysvar("x86HasFMA") // bool
+ ir.Syms.ARMHasVFPv4 = sysvar("armHasVFPv4") // bool
+ ir.Syms.ARM64HasATOMICS = sysvar("arm64HasATOMICS") // bool
+ ir.Syms.Typedmemclr = sysfunc("typedmemclr")
+ ir.Syms.Typedmemmove = sysfunc("typedmemmove")
+ ir.Syms.Udiv = sysvar("udiv") // asm func with special ABI
+ ir.Syms.WriteBarrier = sysvar("writeBarrier") // struct { bool; ... }
+ ir.Syms.Zerobase = sysvar("zerobase")
// asm funcs with special ABI
if thearch.LinkArch.Name == "amd64" {
}
// Wasm (all asm funcs with special ABIs)
- WasmMove = sysvar("wasmMove")
- WasmZero = sysvar("wasmZero")
- WasmDiv = sysvar("wasmDiv")
- WasmTruncS = sysvar("wasmTruncS")
- WasmTruncU = sysvar("wasmTruncU")
- SigPanic = sysfunc("sigpanic")
+ ir.Syms.WasmMove = sysvar("wasmMove")
+ ir.Syms.WasmZero = sysvar("wasmZero")
+ ir.Syms.WasmDiv = sysvar("wasmDiv")
+ ir.Syms.WasmTruncS = sysvar("wasmTruncS")
+ ir.Syms.WasmTruncU = sysvar("wasmTruncU")
+ ir.Syms.SigPanic = sysfunc("sigpanic")
}
// getParam returns the Field of ith param of node n (which is a
if base.Flag.MSan {
switch kind {
case instrumentRead:
- fn = msanread
+ fn = ir.Syms.Msanread
case instrumentWrite:
- fn = msanwrite
+ fn = ir.Syms.Msanwrite
case instrumentMove:
- fn = msanmove
+ fn = ir.Syms.Msanmove
default:
panic("unreachable")
}
// composites with only one element don't have subobjects, though.
switch kind {
case instrumentRead:
- fn = racereadrange
+ fn = ir.Syms.Racereadrange
case instrumentWrite:
- fn = racewriterange
+ fn = ir.Syms.Racewriterange
default:
panic("unreachable")
}
// address, as any write must write the first byte.
switch kind {
case instrumentRead:
- fn = raceread
+ fn = ir.Syms.Raceread
case instrumentWrite:
- fn = racewrite
+ fn = ir.Syms.Racewrite
default:
panic("unreachable")
}
s.callResult(n, callNormal)
if n.Op() == ir.OCALLFUNC && n.X.Op() == ir.ONAME && n.X.(*ir.Name).Class_ == ir.PFUNC {
if fn := n.X.Sym().Name; base.Flag.CompilingRuntime && fn == "throw" ||
- n.X.Sym().Pkg == Runtimepkg && (fn == "throwinit" || fn == "gopanic" || fn == "panicwrap" || fn == "block" || fn == "panicmakeslicelen" || fn == "panicmakeslicecap") {
+ n.X.Sym().Pkg == ir.Pkgs.Runtime && (fn == "throwinit" || fn == "gopanic" || fn == "panicwrap" || fn == "block" || fn == "panicmakeslicelen" || fn == "panicmakeslicecap") {
m := s.mem()
b := s.endBlock()
b.Kind = ssa.BlockExit
}
s.openDeferExit()
} else {
- s.rtcall(Deferreturn, true, nil)
+ s.rtcall(ir.Syms.Deferreturn, true, nil)
}
}
bt := b.Type
if bt.IsSigned() {
cmp := s.newValue2(s.ssaOp(ir.OLE, bt), types.Types[types.TBOOL], s.zeroVal(bt), b)
- s.check(cmp, panicshift)
+ s.check(cmp, ir.Syms.Panicshift)
bt = bt.ToUnsigned()
}
return s.newValue2(s.ssaShiftOp(n.Op(), n.Type(), bt), a.Type, a, b)
case ir.ONEWOBJ:
n := n.(*ir.UnaryExpr)
if n.Type().Elem().Size() == 0 {
- return s.newValue1A(ssa.OpAddr, n.Type(), zerobaseSym, s.sb)
+ return s.newValue1A(ssa.OpAddr, n.Type(), ir.Syms.Zerobase, s.sb)
}
typ := s.expr(n.X)
- vv := s.rtcall(newobject, true, []*types.Type{n.Type()}, typ)
+ vv := s.rtcall(ir.Syms.Newobject, true, []*types.Type{n.Type()}, typ)
return vv[0]
default:
// Call growslice
s.startBlock(grow)
taddr := s.expr(n.X)
- r := s.rtcall(growslice, true, []*types.Type{pt, types.Types[types.TINT], types.Types[types.TINT]}, taddr, p, l, c, nl)
+ r := s.rtcall(ir.Syms.Growslice, true, []*types.Type{pt, types.Types[types.TINT], types.Types[types.TINT]}, taddr, p, l, c, nl)
if inplace {
if sn.Op() == ir.ONAME {
return func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
// Target Atomic feature is identified by dynamic detection
- addr := s.entryNewValue1A(ssa.OpAddr, types.Types[types.TBOOL].PtrTo(), arm64HasATOMICS, s.sb)
+ addr := s.entryNewValue1A(ssa.OpAddr, types.Types[types.TBOOL].PtrTo(), ir.Syms.ARM64HasATOMICS, s.sb)
v := s.load(types.Types[types.TBOOL], addr)
b := s.endBlock()
b.Kind = ssa.BlockIf
s.vars[n] = s.callResult(n, callNormal) // types.Types[TFLOAT64]
return s.variable(n, types.Types[types.TFLOAT64])
}
- v := s.entryNewValue0A(ssa.OpHasCPUFeature, types.Types[types.TBOOL], x86HasFMA)
+ v := s.entryNewValue0A(ssa.OpHasCPUFeature, types.Types[types.TBOOL], ir.Syms.X86HasFMA)
b := s.endBlock()
b.Kind = ssa.BlockIf
b.SetControl(v)
s.vars[n] = s.callResult(n, callNormal) // types.Types[TFLOAT64]
return s.variable(n, types.Types[types.TFLOAT64])
}
- addr := s.entryNewValue1A(ssa.OpAddr, types.Types[types.TBOOL].PtrTo(), armHasVFPv4, s.sb)
+ addr := s.entryNewValue1A(ssa.OpAddr, types.Types[types.TBOOL].PtrTo(), ir.Syms.ARMHasVFPv4, s.sb)
v := s.load(types.Types[types.TBOOL], addr)
b := s.endBlock()
b.Kind = ssa.BlockIf
makeRoundAMD64 := func(op ssa.Op) func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
return func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
- v := s.entryNewValue0A(ssa.OpHasCPUFeature, types.Types[types.TBOOL], x86HasSSE41)
+ v := s.entryNewValue0A(ssa.OpHasCPUFeature, types.Types[types.TBOOL], ir.Syms.X86HasSSE41)
b := s.endBlock()
b.Kind = ssa.BlockIf
b.SetControl(v)
makeOnesCountAMD64 := func(op64 ssa.Op, op32 ssa.Op) func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
return func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
- v := s.entryNewValue0A(ssa.OpHasCPUFeature, types.Types[types.TBOOL], x86HasPOPCNT)
+ v := s.entryNewValue0A(ssa.OpHasCPUFeature, types.Types[types.TBOOL], ir.Syms.X86HasPOPCNT)
b := s.endBlock()
b.Kind = ssa.BlockIf
b.SetControl(v)
func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
// check for divide-by-zero/overflow and panic with appropriate message
cmpZero := s.newValue2(s.ssaOp(ir.ONE, types.Types[types.TUINT64]), types.Types[types.TBOOL], args[2], s.zeroVal(types.Types[types.TUINT64]))
- s.check(cmpZero, panicdivide)
+ s.check(cmpZero, ir.Syms.Panicdivide)
cmpOverflow := s.newValue2(s.ssaOp(ir.OLT, types.Types[types.TUINT64]), types.Types[types.TBOOL], args[0], args[2])
- s.check(cmpOverflow, panicoverflow)
+ s.check(cmpOverflow, ir.Syms.Panicoverflow)
return s.newValue3(ssa.OpDiv128u, types.NewTuple(types.Types[types.TUINT64], types.Types[types.TUINT64]), args[0], args[1], args[2])
},
sys.AMD64)
// Call runtime.deferprocStack with pointer to _defer record.
ACArgs = append(ACArgs, ssa.Param{Type: types.Types[types.TUINTPTR], Offset: int32(base.Ctxt.FixedFrameSize())})
- aux := ssa.StaticAuxCall(deferprocStack, ACArgs, ACResults)
+ aux := ssa.StaticAuxCall(ir.Syms.DeferprocStack, ACArgs, ACResults)
if testLateExpansion {
callArgs = append(callArgs, addr, s.mem())
call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
// call target
switch {
case k == callDefer:
- aux := ssa.StaticAuxCall(deferproc, ACArgs, ACResults)
+ aux := ssa.StaticAuxCall(ir.Syms.Deferproc, ACArgs, ACResults)
if testLateExpansion {
call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
call.AddArgs(callArgs...)
call = s.newValue1A(ssa.OpStaticCall, types.TypeMem, aux, s.mem())
}
case k == callGo:
- aux := ssa.StaticAuxCall(newproc, ACArgs, ACResults)
+ aux := ssa.StaticAuxCall(ir.Syms.Newproc, ACArgs, ACResults)
if testLateExpansion {
call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
call.AddArgs(callArgs...)
if needcheck {
// do a size-appropriate check for zero
cmp := s.newValue2(s.ssaOp(ir.ONE, n.Type()), types.Types[types.TBOOL], b, s.zeroVal(n.Type()))
- s.check(cmp, panicdivide)
+ s.check(cmp, ir.Syms.Panicdivide)
}
return s.newValue2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
}
if !commaok {
// On failure, panic by calling panicnildottype.
s.startBlock(bFail)
- s.rtcall(panicnildottype, false, nil, target)
+ s.rtcall(ir.Syms.Panicnildottype, false, nil, target)
// On success, return (perhaps modified) input interface.
s.startBlock(bOk)
}
if n.X.Type().IsEmptyInterface() {
if commaok {
- call := s.rtcall(assertE2I2, true, []*types.Type{n.Type(), types.Types[types.TBOOL]}, target, iface)
+ call := s.rtcall(ir.Syms.AssertE2I2, true, []*types.Type{n.Type(), types.Types[types.TBOOL]}, target, iface)
return call[0], call[1]
}
- return s.rtcall(assertE2I, true, []*types.Type{n.Type()}, target, iface)[0], nil
+ return s.rtcall(ir.Syms.AssertE2I, true, []*types.Type{n.Type()}, target, iface)[0], nil
}
if commaok {
- call := s.rtcall(assertI2I2, true, []*types.Type{n.Type(), types.Types[types.TBOOL]}, target, iface)
+ call := s.rtcall(ir.Syms.AssertI2I2, true, []*types.Type{n.Type(), types.Types[types.TBOOL]}, target, iface)
return call[0], call[1]
}
- return s.rtcall(assertI2I, true, []*types.Type{n.Type()}, target, iface)[0], nil
+ return s.rtcall(ir.Syms.AssertI2I, true, []*types.Type{n.Type()}, target, iface)[0], nil
}
if base.Debug.TypeAssert > 0 {
s.startBlock(bFail)
taddr := s.expr(n.Ntype.(*ir.AddrExpr).Alloc)
if n.X.Type().IsEmptyInterface() {
- s.rtcall(panicdottypeE, false, nil, itab, target, taddr)
+ s.rtcall(ir.Syms.PanicdottypeE, false, nil, itab, target, taddr)
} else {
- s.rtcall(panicdottypeI, false, nil, itab, target, taddr)
+ s.rtcall(ir.Syms.PanicdottypeI, false, nil, itab, target, taddr)
}
// on success, return data from interface
// deferreturn and a return. This will be used to during panic
// recovery to unwind the stack and return back to the runtime.
s.pp.nextLive = s.livenessMap.deferreturn
- gencallret(pp, Deferreturn)
+ gencallret(pp, ir.Syms.Deferreturn)
}
if inlMarks != nil {
idx := s.livenessMap.Get(v)
if !idx.StackMapValid() {
// See Liveness.hasStackMap.
- if sym, ok := v.Aux.(*ssa.AuxCall); !ok || !(sym.Fn == typedmemclr || sym.Fn == typedmemmove) {
+ if sym, ok := v.Aux.(*ssa.AuxCall); !ok || !(sym.Fn == ir.Syms.Typedmemclr || sym.Fn == ir.Syms.Typedmemmove) {
base.Fatalf("missing stack map index for %v", v.LongString())
}
}
call, ok := v.Aux.(*ssa.AuxCall)
- if ok && call.Fn == Deferreturn {
+ if ok && call.Fn == ir.Syms.Deferreturn {
// Deferred calls will appear to be returning to
// the CALL deferreturn(SB) that we are about to emit.
// However, the stack trace code will show the line
func (e *ssafn) Syslook(name string) *obj.LSym {
switch name {
case "goschedguarded":
- return goschedguarded
+ return ir.Syms.Goschedguarded
case "writeBarrier":
- return writeBarrier
+ return ir.Syms.WriteBarrier
case "gcWriteBarrier":
- return gcWriteBarrier
+ return ir.Syms.GCWriteBarrier
case "typedmemmove":
- return typedmemmove
+ return ir.Syms.Typedmemmove
case "typedmemclr":
- return typedmemclr
+ return ir.Syms.Typedmemclr
}
e.Fatalf(src.NoXPos, "unknown Syslook func %v", name)
return nil
}
func syslook(name string) *ir.Name {
- s := Runtimepkg.Lookup(name)
+ s := ir.Pkgs.Runtime.Lookup(name)
if s == nil || s.Def == nil {
base.Fatalf("syslook: can't find runtime.%s", name)
}
}
func hashmem(t *types.Type) ir.Node {
- sym := Runtimepkg.Lookup("memhash")
+ sym := ir.Pkgs.Runtime.Lookup("memhash")
n := NewName(sym)
setNodeNameFunc(n)
s.Def = n
dowidth(types.ErrorType)
- types.Types[types.TUNSAFEPTR] = defBasic(types.TUNSAFEPTR, unsafepkg, "Pointer")
+ types.Types[types.TUNSAFEPTR] = defBasic(types.TUNSAFEPTR, ir.Pkgs.Unsafe, "Pointer")
// simple aliases
types.SimType[types.TMAP] = types.TPTR
}
for _, s := range &unsafeFuncs {
- s2 := unsafepkg.Lookup(s.name)
+ s2 := ir.Pkgs.Unsafe.Lookup(s.name)
def := NewName(s2)
def.BuiltinOp = s.op
s2.Def = def
return l
}
- if staticuint64s == nil {
- staticuint64s = NewName(Runtimepkg.Lookup("staticuint64s"))
- staticuint64s.Class_ = ir.PEXTERN
+ if ir.Names.Staticuint64s == nil {
+ ir.Names.Staticuint64s = NewName(ir.Pkgs.Runtime.Lookup("staticuint64s"))
+ ir.Names.Staticuint64s.Class_ = ir.PEXTERN
// The actual type is [256]uint64, but we use [256*8]uint8 so we can address
// individual bytes.
- staticuint64s.SetType(types.NewArray(types.Types[types.TUINT8], 256*8))
- zerobase = NewName(Runtimepkg.Lookup("zerobase"))
- zerobase.Class_ = ir.PEXTERN
- zerobase.SetType(types.Types[types.TUINTPTR])
+ ir.Names.Staticuint64s.SetType(types.NewArray(types.Types[types.TUINT8], 256*8))
+ ir.Names.Zerobase = NewName(ir.Pkgs.Runtime.Lookup("zerobase"))
+ ir.Names.Zerobase.Class_ = ir.PEXTERN
+ ir.Names.Zerobase.SetType(types.Types[types.TUINTPTR])
}
// Optimize convT2{E,I} for many cases in which T is not pointer-shaped,
case fromType.Size() == 0:
// n.Left is zero-sized. Use zerobase.
cheapexpr(n.X, init) // Evaluate n.Left for side-effects. See issue 19246.
- value = zerobase
+ value = ir.Names.Zerobase
case fromType.IsBoolean() || (fromType.Size() == 1 && fromType.IsInteger()):
// n.Left is a bool/byte. Use staticuint64s[n.Left * 8] on little-endian
// and staticuint64s[n.Left * 8 + 7] on big-endian.
if thearch.LinkArch.ByteOrder == binary.BigEndian {
index = ir.NewBinaryExpr(base.Pos, ir.OADD, index, nodintconst(7))
}
- xe := ir.NewIndexExpr(base.Pos, staticuint64s, index)
+ xe := ir.NewIndexExpr(base.Pos, ir.Names.Staticuint64s, index)
xe.SetBounded(true)
value = xe
case n.X.Op() == ir.ONAME && n.X.(*ir.Name).Class_ == ir.PEXTERN && n.X.(*ir.Name).Readonly():
--- /dev/null
+// Copyright 2009 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ir
+
+import (
+ "cmd/compile/internal/types"
+ "cmd/internal/obj"
+)
+
+// Names holds known names.
+var Names struct {
+ Staticuint64s *Name
+ Zerobase *Name
+}
+
+// Syms holds known symbols.
+var Syms struct {
+ AssertE2I *obj.LSym
+ AssertE2I2 *obj.LSym
+ AssertI2I *obj.LSym
+ AssertI2I2 *obj.LSym
+ Deferproc *obj.LSym
+ DeferprocStack *obj.LSym
+ Deferreturn *obj.LSym
+ Duffcopy *obj.LSym
+ Duffzero *obj.LSym
+ GCWriteBarrier *obj.LSym
+ Goschedguarded *obj.LSym
+ Growslice *obj.LSym
+ Msanread *obj.LSym
+ Msanwrite *obj.LSym
+ Msanmove *obj.LSym
+ Newobject *obj.LSym
+ Newproc *obj.LSym
+ Panicdivide *obj.LSym
+ Panicshift *obj.LSym
+ PanicdottypeE *obj.LSym
+ PanicdottypeI *obj.LSym
+ Panicnildottype *obj.LSym
+ Panicoverflow *obj.LSym
+ Raceread *obj.LSym
+ Racereadrange *obj.LSym
+ Racewrite *obj.LSym
+ Racewriterange *obj.LSym
+ // Wasm
+ SigPanic *obj.LSym
+ Typedmemclr *obj.LSym
+ Typedmemmove *obj.LSym
+ Udiv *obj.LSym
+ WriteBarrier *obj.LSym
+ Zerobase *obj.LSym
+ ARM64HasATOMICS *obj.LSym
+ ARMHasVFPv4 *obj.LSym
+ X86HasFMA *obj.LSym
+ X86HasPOPCNT *obj.LSym
+ X86HasSSE41 *obj.LSym
+ // Wasm
+ WasmDiv *obj.LSym
+ // Wasm
+ WasmMove *obj.LSym
+ // Wasm
+ WasmZero *obj.LSym
+ // Wasm
+ WasmTruncS *obj.LSym
+ // Wasm
+ WasmTruncU *obj.LSym
+}
+
+// Pkgs holds known packages.
+var Pkgs struct {
+ Go *types.Pkg
+ Itab *types.Pkg
+ Itablink *types.Pkg
+ Map *types.Pkg
+ Msan *types.Pkg
+ Race *types.Pkg
+ Runtime *types.Pkg
+ Track *types.Pkg
+ Unsafe *types.Pkg
+}
import (
"cmd/compile/internal/gc"
+ "cmd/compile/internal/ir"
"cmd/internal/obj"
"cmd/internal/obj/mips"
)
p.Reg = mips.REGSP
p = pp.Appendpp(p, obj.ADUFFZERO, obj.TYPE_NONE, 0, 0, obj.TYPE_MEM, 0, 0)
p.To.Name = obj.NAME_EXTERN
- p.To.Sym = gc.Duffzero
+ p.To.Sym = ir.Syms.Duffzero
p.To.Offset = 8 * (128 - cnt/int64(gc.Widthptr))
} else {
// ADDV $(8+frame+lo-8), SP, r1
p = s.Prog(obj.ADUFFZERO)
p.To.Type = obj.TYPE_MEM
p.To.Name = obj.NAME_EXTERN
- p.To.Sym = gc.Duffzero
+ p.To.Sym = ir.Syms.Duffzero
p.To.Offset = v.AuxInt
case ssa.OpMIPS64LoweredZero:
// SUBV $8, R1
p := s.Prog(obj.ADUFFCOPY)
p.To.Type = obj.TYPE_MEM
p.To.Name = obj.NAME_EXTERN
- p.To.Sym = gc.Duffcopy
+ p.To.Sym = ir.Syms.Duffcopy
p.To.Offset = v.AuxInt
case ssa.OpMIPS64LoweredMove:
// SUBV $8, R1
import (
"cmd/compile/internal/base"
"cmd/compile/internal/gc"
+ "cmd/compile/internal/ir"
"cmd/internal/obj"
"cmd/internal/obj/ppc64"
)
p.Reg = ppc64.REGSP
p = pp.Appendpp(p, obj.ADUFFZERO, obj.TYPE_NONE, 0, 0, obj.TYPE_MEM, 0, 0)
p.To.Name = obj.NAME_EXTERN
- p.To.Sym = gc.Duffzero
+ p.To.Sym = ir.Syms.Duffzero
p.To.Offset = 4 * (128 - cnt/int64(gc.Widthptr))
} else {
p = pp.Appendpp(p, ppc64.AMOVD, obj.TYPE_CONST, 0, base.Ctxt.FixedFrameSize()+off-8, obj.TYPE_REG, ppc64.REGTMP, 0)
import (
"cmd/compile/internal/base"
"cmd/compile/internal/gc"
+ "cmd/compile/internal/ir"
"cmd/internal/obj"
"cmd/internal/obj/riscv"
)
p.Reg = riscv.REG_SP
p = pp.Appendpp(p, obj.ADUFFZERO, obj.TYPE_NONE, 0, 0, obj.TYPE_MEM, 0, 0)
p.To.Name = obj.NAME_EXTERN
- p.To.Sym = gc.Duffzero
+ p.To.Sym = ir.Syms.Duffzero
p.To.Offset = 8 * (128 - cnt/int64(gc.Widthptr))
return p
}
p := s.Prog(obj.ADUFFZERO)
p.To.Type = obj.TYPE_MEM
p.To.Name = obj.NAME_EXTERN
- p.To.Sym = gc.Duffzero
+ p.To.Sym = ir.Syms.Duffzero
p.To.Offset = v.AuxInt
case ssa.OpRISCV64DUFFCOPY:
p := s.Prog(obj.ADUFFCOPY)
p.To.Type = obj.TYPE_MEM
p.To.Name = obj.NAME_EXTERN
- p.To.Sym = gc.Duffcopy
+ p.To.Sym = ir.Syms.Duffcopy
p.To.Offset = v.AuxInt
default:
switch v.Op {
case ssa.OpWasmLoweredStaticCall, ssa.OpWasmLoweredClosureCall, ssa.OpWasmLoweredInterCall:
s.PrepareCall(v)
- if call, ok := v.Aux.(*ssa.AuxCall); ok && call.Fn == gc.Deferreturn {
+ if call, ok := v.Aux.(*ssa.AuxCall); ok && call.Fn == ir.Syms.Deferreturn {
// add a resume point before call to deferreturn so it can be called again via jmpdefer
s.Prog(wasm.ARESUMEPOINT)
}
getValue32(s, v.Args[1])
i32Const(s, int32(v.AuxInt))
p := s.Prog(wasm.ACall)
- p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: gc.WasmMove}
+ p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: ir.Syms.WasmMove}
case ssa.OpWasmLoweredZero:
getValue32(s, v.Args[0])
i32Const(s, int32(v.AuxInt))
p := s.Prog(wasm.ACall)
- p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: gc.WasmZero}
+ p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: ir.Syms.WasmZero}
case ssa.OpWasmLoweredNilCheck:
getValue64(s, v.Args[0])
s.Prog(wasm.AI64Eqz)
s.Prog(wasm.AIf)
p := s.Prog(wasm.ACALLNORESUME)
- p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: gc.SigPanic}
+ p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: ir.Syms.SigPanic}
s.Prog(wasm.AEnd)
if logopt.Enabled() {
logopt.LogOpt(v.Pos, "nilcheck", "genssa", v.Block.Func.Name)
if v.Type.Size() == 8 {
// Division of int64 needs helper function wasmDiv to handle the MinInt64 / -1 case.
p := s.Prog(wasm.ACall)
- p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: gc.WasmDiv}
+ p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: ir.Syms.WasmDiv}
break
}
s.Prog(wasm.AI64DivS)
s.Prog(wasm.AF64PromoteF32)
}
p := s.Prog(wasm.ACall)
- p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: gc.WasmTruncS}
+ p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: ir.Syms.WasmTruncS}
}
case ssa.OpWasmI64TruncSatF32U, ssa.OpWasmI64TruncSatF64U:
s.Prog(wasm.AF64PromoteF32)
}
p := s.Prog(wasm.ACall)
- p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: gc.WasmTruncU}
+ p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: ir.Syms.WasmTruncU}
}
case ssa.OpWasmF32DemoteF64:
import (
"cmd/compile/internal/gc"
+ "cmd/compile/internal/ir"
"cmd/internal/obj"
"cmd/internal/obj/x86"
)
} else if cnt <= int64(128*gc.Widthreg) {
p = pp.Appendpp(p, x86.ALEAL, obj.TYPE_MEM, x86.REG_SP, off, obj.TYPE_REG, x86.REG_DI, 0)
p = pp.Appendpp(p, obj.ADUFFZERO, obj.TYPE_NONE, 0, 0, obj.TYPE_ADDR, 0, 1*(128-cnt/int64(gc.Widthreg)))
- p.To.Sym = gc.Duffzero
+ p.To.Sym = ir.Syms.Duffzero
} else {
p = pp.Appendpp(p, x86.AMOVL, obj.TYPE_CONST, 0, cnt/int64(gc.Widthreg), obj.TYPE_REG, x86.REG_CX, 0)
p = pp.Appendpp(p, x86.ALEAL, obj.TYPE_MEM, x86.REG_SP, off, obj.TYPE_REG, x86.REG_DI, 0)
"cmd/compile/internal/base"
"cmd/compile/internal/gc"
+ "cmd/compile/internal/ir"
"cmd/compile/internal/logopt"
"cmd/compile/internal/ssa"
"cmd/compile/internal/types"
case ssa.Op386DUFFZERO:
p := s.Prog(obj.ADUFFZERO)
p.To.Type = obj.TYPE_ADDR
- p.To.Sym = gc.Duffzero
+ p.To.Sym = ir.Syms.Duffzero
p.To.Offset = v.AuxInt
case ssa.Op386DUFFCOPY:
p := s.Prog(obj.ADUFFCOPY)
p.To.Type = obj.TYPE_ADDR
- p.To.Sym = gc.Duffcopy
+ p.To.Sym = ir.Syms.Duffcopy
p.To.Offset = v.AuxInt
case ssa.OpCopy: // TODO: use MOVLreg for reg->reg copies instead of OpCopy?