rt := r.Type
siz := r.Size
- if ldr.SymType(rs) == sym.SHOSTOBJ || rt == objabi.R_CALLARM64 || rt == objabi.R_ADDRARM64 {
+ if ldr.SymType(rs) == sym.SHOSTOBJ || rt == objabi.R_CALLARM64 || rt == objabi.R_ADDRARM64 || rt == objabi.R_ARM64_GOTPCREL {
if ldr.SymDynid(rs) < 0 {
ldr.Errorf(s, "reloc %d (%s) to non-macho symbol %s type=%d (%s)", rt, sym.RelocName(arch, rt), ldr.SymName(rs), ldr.SymType(rs), ldr.SymType(rs))
return false
}
v |= 1 << 24 // pc-relative bit
v |= ld.MACHO_ARM64_RELOC_PAGE21 << 28
+ case objabi.R_ARM64_GOTPCREL:
+ siz = 4
+ // Two relocation entries: MACHO_ARM64_RELOC_GOT_LOAD_PAGEOFF12 MACHO_ARM64_RELOC_GOT_LOAD_PAGE21
+ // if r.Xadd is non-zero, add two MACHO_ARM64_RELOC_ADDEND.
+ if r.Xadd != 0 {
+ out.Write32(uint32(sectoff + 4))
+ out.Write32((ld.MACHO_ARM64_RELOC_ADDEND << 28) | (2 << 25) | uint32(r.Xadd&0xffffff))
+ }
+ out.Write32(uint32(sectoff + 4))
+ out.Write32(v | (ld.MACHO_ARM64_RELOC_GOT_LOAD_PAGEOFF12 << 28) | (2 << 25))
+ if r.Xadd != 0 {
+ out.Write32(uint32(sectoff))
+ out.Write32((ld.MACHO_ARM64_RELOC_ADDEND << 28) | (2 << 25) | uint32(r.Xadd&0xffffff))
+ }
+ v |= 1 << 24 // pc-relative bit
+ v |= ld.MACHO_ARM64_RELOC_GOT_LOAD_PAGE21 << 28
}
switch siz {
}
nExtReloc = 2 // need two ELF/Mach-O relocations. see elfreloc1/machoreloc1
- if target.IsDarwin() && rt == objabi.R_ADDRARM64 && xadd != 0 {
+ if target.IsDarwin() && xadd != 0 {
nExtReloc = 4 // need another two relocations for non-zero addend
}
// -headerpad is incompatible with -fembed-bitcode.
argv = append(argv, "-Wl,-headerpad,1144")
}
- if ctxt.DynlinkingGo() && !ctxt.Arch.InFamily(sys.ARM, sys.ARM64) {
+ if ctxt.DynlinkingGo() && objabi.GOOS != "ios" {
+ // -flat_namespace is deprecated on iOS.
+ // It is useful for supporting plugins. We don't support plugins on iOS.
argv = append(argv, "-Wl,-flat_namespace")
}
if !combineDwarf {
)
const (
- MACHO_CPU_AMD64 = 1<<24 | 7
- MACHO_CPU_386 = 7
- MACHO_SUBCPU_X86 = 3
- MACHO_CPU_ARM = 12
- MACHO_SUBCPU_ARM = 0
- MACHO_SUBCPU_ARMV7 = 9
- MACHO_CPU_ARM64 = 1<<24 | 12
- MACHO_SUBCPU_ARM64_ALL = 0
- MACHO32SYMSIZE = 12
- MACHO64SYMSIZE = 16
- MACHO_X86_64_RELOC_UNSIGNED = 0
- MACHO_X86_64_RELOC_SIGNED = 1
- MACHO_X86_64_RELOC_BRANCH = 2
- MACHO_X86_64_RELOC_GOT_LOAD = 3
- MACHO_X86_64_RELOC_GOT = 4
- MACHO_X86_64_RELOC_SUBTRACTOR = 5
- MACHO_X86_64_RELOC_SIGNED_1 = 6
- MACHO_X86_64_RELOC_SIGNED_2 = 7
- MACHO_X86_64_RELOC_SIGNED_4 = 8
- MACHO_ARM_RELOC_VANILLA = 0
- MACHO_ARM_RELOC_PAIR = 1
- MACHO_ARM_RELOC_SECTDIFF = 2
- MACHO_ARM_RELOC_BR24 = 5
- MACHO_ARM64_RELOC_UNSIGNED = 0
- MACHO_ARM64_RELOC_BRANCH26 = 2
- MACHO_ARM64_RELOC_PAGE21 = 3
- MACHO_ARM64_RELOC_PAGEOFF12 = 4
- MACHO_ARM64_RELOC_ADDEND = 10
- MACHO_GENERIC_RELOC_VANILLA = 0
- MACHO_FAKE_GOTPCREL = 100
+ MACHO_CPU_AMD64 = 1<<24 | 7
+ MACHO_CPU_386 = 7
+ MACHO_SUBCPU_X86 = 3
+ MACHO_CPU_ARM = 12
+ MACHO_SUBCPU_ARM = 0
+ MACHO_SUBCPU_ARMV7 = 9
+ MACHO_CPU_ARM64 = 1<<24 | 12
+ MACHO_SUBCPU_ARM64_ALL = 0
+ MACHO32SYMSIZE = 12
+ MACHO64SYMSIZE = 16
+ MACHO_X86_64_RELOC_UNSIGNED = 0
+ MACHO_X86_64_RELOC_SIGNED = 1
+ MACHO_X86_64_RELOC_BRANCH = 2
+ MACHO_X86_64_RELOC_GOT_LOAD = 3
+ MACHO_X86_64_RELOC_GOT = 4
+ MACHO_X86_64_RELOC_SUBTRACTOR = 5
+ MACHO_X86_64_RELOC_SIGNED_1 = 6
+ MACHO_X86_64_RELOC_SIGNED_2 = 7
+ MACHO_X86_64_RELOC_SIGNED_4 = 8
+ MACHO_ARM_RELOC_VANILLA = 0
+ MACHO_ARM_RELOC_PAIR = 1
+ MACHO_ARM_RELOC_SECTDIFF = 2
+ MACHO_ARM_RELOC_BR24 = 5
+ MACHO_ARM64_RELOC_UNSIGNED = 0
+ MACHO_ARM64_RELOC_BRANCH26 = 2
+ MACHO_ARM64_RELOC_PAGE21 = 3
+ MACHO_ARM64_RELOC_PAGEOFF12 = 4
+ MACHO_ARM64_RELOC_GOT_LOAD_PAGE21 = 5
+ MACHO_ARM64_RELOC_GOT_LOAD_PAGEOFF12 = 6
+ MACHO_ARM64_RELOC_ADDEND = 10
+ MACHO_GENERIC_RELOC_VANILLA = 0
+ MACHO_FAKE_GOTPCREL = 100
)
const (