}
ctxt.LineHist.TrimPathPrefix = *flags.TrimPath
ctxt.Flag_dynlink = *flags.Dynlink
- if *flags.Shared || *flags.Dynlink {
- ctxt.Flag_shared = 1
- }
+ ctxt.Flag_shared = *flags.Shared || *flags.Dynlink
ctxt.Bso = bufio.NewWriter(os.Stdout)
defer ctxt.Bso.Flush()
// If the MOVD is not needed, insert a hardware NOP
// so that the same number of instructions are used
// on ppc64 in both shared and non-shared modes.
- if Ctxt.Flag_shared != 0 {
+ if Ctxt.Flag_shared {
p := Thearch.Gins(ppc64.AMOVD, nil, nil)
p.From.Type = obj.TYPE_MEM
p.From.Offset = 24
flag.BoolVar(&ssaEnabled, "ssa", true, "use SSA backend to generate code")
obj.Flagparse(usage)
- if flag_dynlink {
- flag_shared = true
- }
- if flag_shared {
- // TODO(mdempsky): Change Flag_shared to bool.
- Ctxt.Flag_shared = 1
- }
+ Ctxt.Flag_shared = flag_dynlink || flag_shared
Ctxt.Flag_dynlink = flag_dynlink
Ctxt.Flag_optimize = Debug['N'] == 0
)
func betypeinit() {
- if gc.Ctxt.Flag_shared != 0 {
+ if gc.Ctxt.Flag_shared {
gc.Thearch.ReservedRegs = append(gc.Thearch.ReservedRegs, ppc64.REG_R2)
gc.Thearch.ReservedRegs = append(gc.Thearch.ReservedRegs, ppc64.REG_R12)
}
case obj.ACALL:
if p.To.Type == obj.TYPE_REG && p.To.Reg != ppc64.REG_CTR {
// Allow front end to emit CALL REG, and rewrite into MOV REG, CTR; CALL CTR.
- if gc.Ctxt.Flag_shared != 0 {
+ if gc.Ctxt.Flag_shared {
// Make sure function pointer is in R12 as well when
// compiling Go into PIC.
// TODO(mwhudson): it would obviously be better to
p.To.Type = obj.TYPE_REG
p.To.Reg = ppc64.REG_CTR
- if gc.Ctxt.Flag_shared != 0 {
+ if gc.Ctxt.Flag_shared {
// When compiling Go into PIC, the function we just
// called via pointer might have been implemented in
// a separate module and so overwritten the TOC
// Exclude registers with fixed functions
regbits := 1<<0 | RtoB(ppc64.REGSP) | RtoB(ppc64.REGG) | RtoB(ppc64.REGTLS) | RtoB(ppc64.REGTMP)
- if gc.Ctxt.Flag_shared != 0 {
+ if gc.Ctxt.Flag_shared {
// When compiling Go into PIC, R2 is reserved to be the TOC pointer
// and R12 so that calls via function pointer can stomp on it.
regbits |= RtoB(ppc64.REG_R2)
}
func excludedregs() uint64 {
- if gc.Ctxt.Flag_shared != 0 {
+ if gc.Ctxt.Flag_shared {
return RtoB(x86.REG_SP) | RtoB(x86.REG_CX)
} else {
return RtoB(x86.REG_SP)
t.To.Type = a.Type
t.To.Name = a.Name
- if ctxt.Flag_shared != 0 && t.To.Sym != nil {
+ if ctxt.Flag_shared && t.To.Sym != nil {
t.Rel = p
}
ctxt.Instoffset = 0 // s.b. unused but just in case
if a.Sym.Type == obj.STLSBSS {
- if ctxt.Flag_shared != 0 {
+ if ctxt.Flag_shared {
return C_TLS_IE
} else {
return C_TLS_LE
}
for n = 0; optab[n].as != obj.AXXX; n++ {
if optab[n].flag&LPCREL != 0 {
- if ctxt.Flag_shared != 0 {
+ if ctxt.Flag_shared {
optab[n].size += int8(optab[n].pcrelsiz)
} else {
optab[n].flag &^= LPCREL
rel.Sym = p.To.Sym
rel.Add = p.To.Offset
- if ctxt.Flag_shared != 0 {
+ if ctxt.Flag_shared {
if p.To.Name == obj.NAME_GOTREF {
rel.Type = obj.R_GOTPCREL
} else {
ctxt.Instoffset = a.Offset
if a.Sym != nil { // use relocation
if a.Sym.Type == obj.STLSBSS {
- if ctxt.Flag_shared != 0 {
+ if ctxt.Flag_shared {
return C_TLS_IE
} else {
return C_TLS_LE
Debugvlog int32
Debugdivmod int32
Debugpcln int32
- Flag_shared int32
+ Flag_shared bool
Flag_dynlink bool
Flag_optimize bool
Bso *bufio.Writer
ctxt.Instoffset = a.Offset
if a.Sym != nil { // use relocation
if a.Sym.Type == obj.STLSBSS {
- if ctxt.Flag_shared != 0 {
+ if ctxt.Flag_shared {
return C_TLS_IE
} else {
return C_TLS_LE
func symbolAccess(ctxt *obj.Link, s *obj.LSym, d int64, reg int16, op uint32) (o1, o2 uint32) {
var base uint32
form := opform(ctxt, op)
- if ctxt.Flag_shared != 0 {
+ if ctxt.Flag_shared {
base = REG_R2
} else {
base = REG_R0
rel.Siz = 8
rel.Sym = s
rel.Add = d
- if ctxt.Flag_shared != 0 {
+ if ctxt.Flag_shared {
switch form {
case D_FORM:
rel.Type = obj.R_ADDRPOWER_TOCREL
if v != 0 {
ctxt.Diag("illegal indexed instruction\n%v", p)
}
- if ctxt.Flag_shared != 0 && r == REG_R13 {
+ if ctxt.Flag_shared && r == REG_R13 {
rel := obj.Addrel(ctxt.Cursym)
rel.Off = int32(ctxt.Pc)
rel.Siz = 4
if v != 0 {
ctxt.Diag("illegal indexed instruction\n%v", p)
}
- if ctxt.Flag_shared != 0 && r == REG_R13 {
+ if ctxt.Flag_shared && r == REG_R13 {
rel := obj.Addrel(ctxt.Cursym)
rel.Off = int32(ctxt.Pc)
rel.Siz = 4
q = p
- if ctxt.Flag_shared != 0 && cursym.Name != "runtime.duffzero" && cursym.Name != "runtime.duffcopy" && cursym.Name != "runtime.stackBarrier" {
+ if ctxt.Flag_shared && cursym.Name != "runtime.duffzero" && cursym.Name != "runtime.duffcopy" && cursym.Name != "runtime.stackBarrier" {
// When compiling Go into PIC, all functions must start
// with instructions to load the TOC pointer into r2:
//
q.Spadj = int32(-aoffset)
}
- if ctxt.Flag_shared != 0 {
+ if ctxt.Flag_shared {
q = obj.Appendp(ctxt, q)
q.As = AMOVD
q.Lineno = p.Lineno
}
ctxt.Instoffset = a.Offset
if a.Sym.Type == obj.STLSBSS {
- if ctxt.Flag_shared != 0 {
+ if ctxt.Flag_shared {
return C_TLS_IE // initial exec model
}
return C_TLS_LE // local exec model
return 0x64 // FS
}
- if ctxt.Flag_shared != 0 {
+ if ctxt.Flag_shared {
log.Fatalf("unknown TLS base register for linux with -shared")
} else {
return 0x64 // FS
}
if p.Mode == 32 {
- if a.Index == REG_TLS && ctxt.Flag_shared != 0 {
+ if a.Index == REG_TLS && ctxt.Flag_shared {
// When building for inclusion into a shared library, an instruction of the form
// MOVL 0(CX)(TLS*1), AX
// becomes
return 0x26
case REG_TLS:
- if ctxt.Flag_shared != 0 {
+ if ctxt.Flag_shared {
// When building for inclusion into a shared library, an instruction of the form
// MOV 0(CX)(TLS*1), AX
// becomes
case obj.NAME_EXTERN,
obj.NAME_STATIC:
- if a.Sym != nil && isextern(a.Sym) || (p.Mode == 32 && ctxt.Flag_shared == 0) {
+ if a.Sym != nil && isextern(a.Sym) || (p.Mode == 32 && !ctxt.Flag_shared) {
return Yi32
}
return Yiauto // use pc-relative addressing
if a.Name == obj.NAME_GOTREF {
r.Siz = 4
r.Type = obj.R_GOTPCREL
- } else if isextern(s) || (p.Mode != 64 && ctxt.Flag_shared == 0) {
+ } else if isextern(s) || (p.Mode != 64 && !ctxt.Flag_shared) {
r.Siz = 4
r.Type = obj.R_ADDR
} else {
log.Fatalf("reloc")
}
- if ctxt.Flag_shared == 0 || isAndroid {
+ if !ctxt.Flag_shared || isAndroid {
r.Type = obj.R_TLS_LE
r.Siz = 4
r.Off = -1 // caller must fill in
if !isextern(a.Sym) && p.Mode == 64 {
goto bad
}
- if p.Mode == 32 && ctxt.Flag_shared != 0 {
+ if p.Mode == 32 && ctxt.Flag_shared {
base = REG_CX
} else {
base = REG_NONE
if a.Sym == nil {
ctxt.Diag("bad addr: %v", p)
}
- if p.Mode == 32 && ctxt.Flag_shared != 0 {
+ if p.Mode == 32 && ctxt.Flag_shared {
base = REG_CX
} else {
base = REG_NONE
}
if REG_AX <= base && base <= REG_R15 {
- if a.Index == REG_TLS && ctxt.Flag_shared == 0 {
+ if a.Index == REG_TLS && !ctxt.Flag_shared {
rel = obj.Reloc{}
rel.Type = obj.R_TLS_LE
rel.Siz = 4
case obj.Hlinux,
obj.Hnacl:
- if ctxt.Flag_shared != 0 {
+ if ctxt.Flag_shared {
// Note that this is not generating the same insns as the other cases.
// MOV TLS, R_to
// becomes
log.Fatalf("unknown TLS base location for %s", obj.Headstr(ctxt.Headtype))
case obj.Hlinux:
- if ctxt.Flag_shared == 0 {
+ if !ctxt.Flag_shared {
log.Fatalf("unknown TLS base location for linux without -shared")
}
// Note that this is not generating the same insn as the other cases.
obj.Hwindows:
return false
case obj.Hlinux:
- return ctxt.Flag_shared == 0
+ return !ctxt.Flag_shared
}
return true
rewriteToUseGot(ctxt, p)
}
- if ctxt.Flag_shared != 0 && p.Mode == 32 {
+ if ctxt.Flag_shared && p.Mode == 32 {
rewriteToPcrel(ctxt, p)
}
}