#define BCTR 0x4e800420 /* bctr */
#define ADDI_R11_R11 0x396b0000 /* addi %r11,%r11,off@l */
+#define ADDI_R12_R11 0x398b0000 /* addi %r12,%r11,off@l */
+#define ADDI_R12_R12 0x398c0000 /* addi %r12,%r12,off@l */
#define ADDIS_R2_R2 0x3c420000 /* addis %r2,%r2,off@ha */
#define ADDI_R2_R2 0x38420000 /* addi %r2,%r2,off@l */
#define ADD_R2_R2_R12 0x7c426214 /* add %r2,%r2,%r12 */
#define LIS_R2 0x3c400000 /* lis %r2,xxx@ha */
+#define LIS_R12 0x3d800000 /* lis %r12,xxx@ha */
#define ADDIS_R2_R12 0x3c4c0000 /* addis %r2,%r12,xxx@ha */
#define ADDIS_R12_R2 0x3d820000 /* addis %r12,%r2,xxx@ha */
+#define ADDIS_R12_R11 0x3d8b0000 /* addis %r12,%r11,xxx@ha */
#define ADDIS_R12_R12 0x3d8c0000 /* addis %r12,%r12,xxx@ha */
+#define ORIS_R12_R12_0 0x658c0000 /* oris %r12,%r12,xxx@hi */
+#define ORI_R12_R12_0 0x618c0000 /* ori %r12,%r12,xxx@l */
#define LD_R12_0R12 0xe98c0000 /* ld %r12,xxx@l(%r12) */
+#define SLDI_R12_R12_32 0x799c07c6 /* sldi %r12,%r12,32 */
+#define LDX_R12_R11_R12 0x7d8b602a /* ldx %r12,%r11,%r12 */
+#define ADD_R12_R11_R12 0x7d8b6214 /* add %r12,%r11,%r12 */
/* __glink_PLTresolve stub instructions. We enter with the index in R0. */
#define GLINK_PLTRESOLVE_SIZE(htab) \
0x03fffffc, /* dst_mask */
TRUE), /* pcrel_offset */
+ /* A variant of R_PPC64_REL24, used when r2 is not the toc pointer. */
+ HOWTO (R_PPC64_REL24_NOTOC, /* type */
+ 0, /* rightshift */
+ 2, /* size (0 = byte, 1 = short, 2 = long) */
+ 26, /* bitsize */
+ TRUE, /* pc_relative */
+ 0, /* bitpos */
+ complain_overflow_signed, /* complain_on_overflow */
+ ppc64_elf_branch_reloc, /* special_function */
+ "R_PPC64_REL24_NOTOC", /* name */
+ FALSE, /* partial_inplace */
+ 0, /* src_mask */
+ 0x03fffffc, /* dst_mask */
+ TRUE), /* pcrel_offset */
+
/* A relative 16 bit branch; the lower two bits must be zero. */
HOWTO (R_PPC64_REL14, /* type */
0, /* rightshift */
break;
case BFD_RELOC_PPC_B26: r = R_PPC64_REL24;
break;
+ case BFD_RELOC_PPC64_REL24_NOTOC: r = R_PPC64_REL24_NOTOC;
+ break;
case BFD_RELOC_PPC_B16: r = R_PPC64_REL14;
break;
case BFD_RELOC_PPC_B16_BRTAKEN: r = R_PPC64_REL14_BRTAKEN;
Used to call a function in a shared library. If it so happens that
the plt entry referenced crosses a 64k boundary, then an extra
"addi %r11,%r11,xxx@toc@l" will be inserted before the "mtctr".
- . std %r2,40(%r1)
+ ppc_stub_plt_call_r2save starts with "std %r2,40(%r1)".
. addis %r11,%r2,xxx@toc@ha
. ld %r12,xxx+0@toc@l(%r11)
. mtctr %r12
. mtctr %r12
. bctr
- In cases where the "addis" instruction would add zero, the "addis" is
- omitted and following instructions modified slightly in some cases.
-*/
+ All of the above stubs are shown as their ELFv1 variants. ELFv2
+ variants exist too, simpler for plt calls since a new toc pointer
+ and static chain are not loaded by the stub. In addition, ELFv2
+ has some more complex stubs to handle calls marked with NOTOC
+ relocs from functions where r2 is not a valid toc pointer. These
+ come in two flavours, the ones shown below, and _both variants that
+ start with "std %r2,24(%r1)" to save r2 in the unlikely event that
+ one call is from a function where r2 is used as the toc pointer but
+ needs a toc adjusting stub for small-model multi-toc, and another
+ call is from a function where r2 is not valid.
+ ppc_stub_long_branch_notoc:
+ . mflr %r12
+ . bcl 20,31,1f
+ . 1:
+ . mflr %r11
+ . mtlr %r12
+ . lis %r12,xxx-1b@highest
+ . ori %r12,xxx-1b@higher
+ . sldi %r12,%r12,32
+ . oris %r12,%r12,xxx-1b@hi
+ . ori %r12,%r12,xxx-1b@l
+ . add %r12,%r11,%r12
+ . b dest
+
+ ppc_stub_plt_branch_notoc:
+ . mflr %r12
+ . bcl 20,31,1f
+ . 1:
+ . mflr %r11
+ . mtlr %r12
+ . lis %r12,xxx-1b@highest
+ . ori %r12,xxx-1b@higher
+ . sldi %r12,%r12,32
+ . oris %r12,%r12,xxx-1b@hi
+ . ori %r12,%r12,xxx-1b@l
+ . add %r12,%r11,%r12
+ . mtctr %r12
+ . bctr
+
+ ppc_stub_plt_call_notoc:
+ . mflr %r12
+ . bcl 20,31,1f
+ . 1:
+ . mflr %r11
+ . mtlr %r12
+ . lis %r12,xxx-1b@highest
+ . ori %r12,xxx-1b@higher
+ . sldi %r12,%r12,32
+ . oris %r12,%r12,xxx-1b@hi
+ . ori %r12,%r12,xxx-1b@l
+ . ldx %r12,%r11,%r12
+ . mtctr %r12
+ . bctr
+
+ In cases where the high instructions would add zero, they are
+ omitted and following instructions modified in some cases.
+
+ For a given stub group (a set of sections all using the same toc
+ pointer value) there will be just one stub type used for any
+ particular function symbol. For example, if printf is called from
+ code with the tocsave optimization (ie. r2 saved in function
+ prologue) and therefore calls use a ppc_stub_plt_call linkage stub,
+ and from other code without the tocsave optimization requiring a
+ ppc_stub_plt_call_r2save linkage stub, a single stub of the latter
+ type will be created. Calls with the tocsave optimization will
+ enter this stub after the instruction saving r2. A similar
+ situation exists when calls are marked with R_PPC64_REL24_NOTOC
+ relocations. These require a ppc_stub_plt_call_notoc linkage stub
+ to call an external function like printf. If other calls to printf
+ require a ppc_stub_plt_call linkage stub then a single
+ ppc_stub_plt_call_notoc linkage stub will be used for both types of
+ call. If other calls to printf require a ppc_stub_plt_call_r2save
+ linkage stub then a single ppc_stub_plt_call_both linkage stub will
+ be created and calls not requiring r2 to be saved will enter the
+ stub after the r2 save instruction. There is an analogous
+ hierarchy of long branch and plt branch stubs for local call
+ linkage. */
enum ppc_stub_type {
ppc_stub_none,
ppc_stub_long_branch,
ppc_stub_long_branch_r2off,
+ ppc_stub_long_branch_notoc,
+ ppc_stub_long_branch_both, /* r2off and notoc variants both needed. */
ppc_stub_plt_branch,
ppc_stub_plt_branch_r2off,
+ ppc_stub_plt_branch_notoc,
+ ppc_stub_plt_branch_both,
ppc_stub_plt_call,
ppc_stub_plt_call_r2save,
+ ppc_stub_plt_call_notoc,
+ ppc_stub_plt_call_both,
ppc_stub_global_entry,
ppc_stub_save_res
};
is_branch_reloc (enum elf_ppc64_reloc_type r_type)
{
return (r_type == R_PPC64_REL24
+ || r_type == R_PPC64_REL24_NOTOC
|| r_type == R_PPC64_REL14
|| r_type == R_PPC64_REL14_BRTAKEN
|| r_type == R_PPC64_REL14_BRNTAKEN
/* Fall through. */
case R_PPC64_REL24:
+ case R_PPC64_REL24_NOTOC:
rel24:
plt_list = ifunc;
if (h != NULL)
return ppc_stub_none;
}
+/* Builds a 64-bit offset in r12 then adds it to r11 (LOAD false) or
+ loads r12 from r11+r12 (LOAD true).
+ . lis %r12,xxx-1b@highest
+ . ori %r12,xxx-1b@higher
+ . sldi %r12,%r12,32
+ . oris %r12,%r12,xxx-1b@hi
+ . ori %r12,%r12,xxx-1b@l
+ . add %r12,%r11,%r12 */
+
+static bfd_byte *
+build_offset (bfd *abfd, bfd_byte *p, bfd_vma off, bfd_boolean load)
+{
+ if (off + 0x8000 < 0x10000)
+ {
+ if (load)
+ bfd_put_32 (abfd, LD_R12_0R11 + PPC_LO (off), p);
+ else
+ bfd_put_32 (abfd, ADDI_R12_R11 + PPC_LO (off), p);
+ p += 4;
+ }
+ else if (off + 0x80008000ULL < 0x100000000ULL)
+ {
+ bfd_put_32 (abfd, ADDIS_R12_R11 + PPC_HA (off), p);
+ p += 4;
+ if (load)
+ bfd_put_32 (abfd, LD_R12_0R12 + PPC_LO (off), p);
+ else
+ bfd_put_32 (abfd, ADDI_R12_R12 + PPC_LO (off), p);
+ p += 4;
+ }
+ else
+ {
+ if (off + 0x800000000000ULL < 0x1000000000000ULL)
+ {
+ bfd_put_32 (abfd, LI_R12_0 + ((off >> 32) & 0xffff), p);
+ p += 4;
+ }
+ else
+ {
+ bfd_put_32 (abfd, LIS_R12 + ((off >> 48) & 0xffff), p);
+ p += 4;
+ if (((off >> 32) & 0xffff) != 0)
+ {
+ bfd_put_32 (abfd, ORI_R12_R12_0 + ((off >> 32) & 0xffff), p);
+ p += 4;
+ }
+ }
+ if (((off >> 32) & 0xffffffffULL) != 0)
+ {
+ bfd_put_32 (abfd, SLDI_R12_R12_32, p);
+ p += 4;
+ }
+ if (PPC_HI (off) != 0)
+ {
+ bfd_put_32 (abfd, ORIS_R12_R12_0 + PPC_HI (off), p);
+ p += 4;
+ }
+ if (PPC_LO (off) != 0)
+ {
+ bfd_put_32 (abfd, ORI_R12_R12_0 + PPC_LO (off), p);
+ p += 4;
+ }
+ if (load)
+ bfd_put_32 (abfd, LDX_R12_R11_R12, p);
+ else
+ bfd_put_32 (abfd, ADD_R12_R11_R12, p);
+ p += 4;
+ }
+ return p;
+}
+
+static unsigned int
+size_offset (bfd_vma off)
+{
+ unsigned int size;
+ if (off + 0x8000 < 0x10000)
+ size = 4;
+ else if (off + 0x80008000ULL < 0x100000000ULL)
+ size = 8;
+ else
+ {
+ if (off + 0x800000000000ULL < 0x1000000000000ULL)
+ size = 4;
+ else
+ {
+ size = 4;
+ if (((off >> 32) & 0xffff) != 0)
+ size += 4;
+ }
+ if (((off >> 32) & 0xffffffffULL) != 0)
+ size += 4;
+ if (PPC_HI (off) != 0)
+ size += 4;
+ if (PPC_LO (off) != 0)
+ size += 4;
+ size += 4;
+ }
+ return size;
+}
+
/* With power7 weakly ordered memory model, it is possible for ld.so
to update a plt entry in one thread and have another thread see a
stale zero toc entry. To avoid this we need some sort of acquire
struct ppc_stub_hash_entry *stub_entry,
bfd_vma off)
{
- unsigned size = 12;
+ unsigned size;
+ if (stub_entry->stub_type >= ppc_stub_plt_call_notoc)
+ {
+ size = 24 + size_offset (off);
+ if (stub_entry->stub_type > ppc_stub_plt_call_notoc)
+ size += 4;
+ return size;
+ }
+
+ size = 12;
if (ALWAYS_EMIT_R2SAVE
|| stub_entry->stub_type == ppc_stub_plt_call_r2save)
size += 4;
p += 4;
break;
+ case ppc_stub_long_branch_notoc:
+ case ppc_stub_long_branch_both:
+ case ppc_stub_plt_branch_notoc:
+ case ppc_stub_plt_branch_both:
+ case ppc_stub_plt_call_notoc:
+ case ppc_stub_plt_call_both:
+ p = loc;
+ off = (8 + stub_entry->stub_offset
+ + stub_entry->group->stub_sec->output_offset
+ + stub_entry->group->stub_sec->output_section->vma);
+ if (stub_entry->stub_type == ppc_stub_long_branch_both
+ || stub_entry->stub_type == ppc_stub_plt_branch_both
+ || stub_entry->stub_type == ppc_stub_plt_call_both)
+ {
+ off += 4;
+ bfd_put_32 (htab->params->stub_bfd, STD_R2_0R1 + STK_TOC (htab), p);
+ p += 4;
+ }
+ if (stub_entry->stub_type >= ppc_stub_plt_call_notoc)
+ {
+ targ = stub_entry->plt_ent->plt.offset & ~1;
+ if (targ >= (bfd_vma) -2)
+ abort ();
+
+ plt = htab->elf.splt;
+ if (!htab->elf.dynamic_sections_created
+ || stub_entry->h == NULL
+ || stub_entry->h->elf.dynindx == -1)
+ {
+ if (stub_entry->symtype == STT_GNU_IFUNC)
+ plt = htab->elf.iplt;
+ else
+ plt = htab->pltlocal;
+ }
+ targ += plt->output_offset + plt->output_section->vma;
+ }
+ else
+ targ = (stub_entry->target_value
+ + stub_entry->target_section->output_offset
+ + stub_entry->target_section->output_section->vma);
+ off = targ - off;
+ bfd_put_32 (htab->params->stub_bfd, MFLR_R12, p);
+ p += 4;
+ bfd_put_32 (htab->params->stub_bfd, BCL_20_31, p);
+ p += 4;
+ bfd_put_32 (htab->params->stub_bfd, MFLR_R11, p);
+ p += 4;
+ bfd_put_32 (htab->params->stub_bfd, MTLR_R12, p);
+ p += 4;
+ p = build_offset (htab->params->stub_bfd, p, off,
+ stub_entry->stub_type >= ppc_stub_plt_call_notoc);
+ if (stub_entry->stub_type == ppc_stub_long_branch_notoc)
+ {
+ off += 8;
+ bfd_put_32 (htab->params->stub_bfd,
+ B_DOT | ((off - (p - loc)) & 0x3fffffc), p);
+ }
+ else if (stub_entry->stub_type == ppc_stub_long_branch_both)
+ {
+ off += 12;
+ bfd_put_32 (htab->params->stub_bfd,
+ B_DOT | ((off - (p - loc)) & 0x3fffffc), p);
+ }
+ else
+ {
+ bfd_put_32 (htab->params->stub_bfd, MTCTR_R12, p);
+ p += 4;
+ bfd_put_32 (htab->params->stub_bfd, BCTR, p);
+ }
+ p += 4;
+ break;
+
case ppc_stub_plt_call:
case ppc_stub_plt_call_r2save:
if (stub_entry->h != NULL
size_t len1, len2;
char *name;
const char *const stub_str[] = { "long_branch",
+ "long_branch",
+ "long_branch",
"long_branch",
"plt_branch",
"plt_branch",
+ "plt_branch",
+ "plt_branch",
+ "plt_call",
+ "plt_call",
"plt_call",
"plt_call" };
return TRUE;
}
- if (stub_entry->stub_type == ppc_stub_plt_call
- || stub_entry->stub_type == ppc_stub_plt_call_r2save)
+ if (stub_entry->stub_type >= ppc_stub_plt_call
+ && stub_entry->stub_type <= ppc_stub_plt_call_both)
{
asection *plt;
targ = stub_entry->plt_ent->plt.offset & ~(bfd_vma) 1;
}
targ += plt->output_offset + plt->output_section->vma;
- off = (elf_gp (info->output_bfd)
- + htab->sec_info[stub_entry->group->link_sec->id].toc_off);
- off = targ - off;
+ if (stub_entry->stub_type >= ppc_stub_plt_call_notoc)
+ {
+ off = (8 + stub_entry->stub_offset
+ + stub_entry->group->stub_sec->output_offset
+ + stub_entry->group->stub_sec->output_section->vma);
+ if (stub_entry->stub_type > ppc_stub_plt_call_notoc)
+ off += 4;
+ }
+ else
+ off = (elf_gp (info->output_bfd)
+ + htab->sec_info[stub_entry->group->link_sec->id].toc_off);
if (htab->params->plt_stub_align != 0)
{
- unsigned pad = plt_stub_pad (htab, stub_entry, off);
+ unsigned pad = plt_stub_pad (htab, stub_entry, targ - off);
stub_entry->group->stub_sec->size += pad;
stub_entry->stub_offset = stub_entry->group->stub_sec->size;
+ if (stub_entry->stub_type >= ppc_stub_plt_call_notoc)
+ off += pad;
}
+ off = targ - off;
size = plt_stub_size (htab, stub_entry, off);
- if (stub_entry->h != NULL
- && (stub_entry->h == htab->tls_get_addr_fd
- || stub_entry->h == htab->tls_get_addr)
- && htab->params->tls_get_addr_opt
- && (ALWAYS_EMIT_R2SAVE
- || stub_entry->stub_type == ppc_stub_plt_call_r2save))
- stub_entry->group->tls_get_addr_opt_bctrl
- = stub_entry->stub_offset + size - 5 * 4;
-
- if (info->emitrelocations)
+ if (stub_entry->stub_type < ppc_stub_plt_call_notoc)
{
- stub_entry->group->stub_sec->reloc_count
- += ((PPC_HA (off) != 0)
- + (htab->opd_abi
- ? 2 + (htab->params->plt_static_chain
- && PPC_HA (off + 16) == PPC_HA (off))
- : 1));
- stub_entry->group->stub_sec->flags |= SEC_RELOC;
+ if (stub_entry->h != NULL
+ && (stub_entry->h == htab->tls_get_addr_fd
+ || stub_entry->h == htab->tls_get_addr)
+ && htab->params->tls_get_addr_opt
+ && (ALWAYS_EMIT_R2SAVE
+ || stub_entry->stub_type == ppc_stub_plt_call_r2save))
+ stub_entry->group->tls_get_addr_opt_bctrl
+ = stub_entry->stub_offset + size - 5 * 4;
+
+ if (info->emitrelocations)
+ {
+ stub_entry->group->stub_sec->reloc_count
+ += ((PPC_HA (off) != 0)
+ + (htab->opd_abi
+ ? 2 + (htab->params->plt_static_chain
+ && PPC_HA (off + 16) == PPC_HA (off))
+ : 1));
+ stub_entry->group->stub_sec->flags |= SEC_RELOC;
+ }
}
}
else
size += 4;
off += size - 4;
}
+ else if (stub_entry->stub_type >= ppc_stub_long_branch_notoc)
+ {
+ size = 20 + size_offset (targ - (off + 8));
+ if (stub_entry->stub_type > ppc_stub_long_branch_notoc)
+ size += 4;
+ off += size - 4;
+ }
off = targ - off;
- local_off = PPC64_LOCAL_ENTRY_OFFSET (stub_entry->other);
-
- /* If the branch offset is too big, use a ppc_stub_plt_branch.
- Do the same for -R objects without function descriptors. */
- if (off + (1 << 25) >= (bfd_vma) (1 << 26) - local_off
- || (stub_entry->stub_type == ppc_stub_long_branch_r2off
- && r2off == 0
- && htab->sec_info[stub_entry->target_section->id].toc_off == 0))
+ if (stub_entry->stub_type >= ppc_stub_long_branch_notoc)
{
- struct ppc_branch_hash_entry *br_entry;
-
- br_entry = ppc_branch_hash_lookup (&htab->branch_hash_table,
- stub_entry->root.string + 9,
- TRUE, FALSE);
- if (br_entry == NULL)
+ if (off + (1 << 25) >= (bfd_vma) (1 << 26))
{
- _bfd_error_handler (_("can't build branch stub `%s'"),
- stub_entry->root.string);
- htab->stub_error = TRUE;
- return FALSE;
+ stub_entry->stub_type += (ppc_stub_plt_branch_notoc
+ - ppc_stub_long_branch_notoc);
+ size += 4;
}
-
- if (br_entry->iter != htab->stub_iteration)
+ }
+ else
+ {
+ local_off = PPC64_LOCAL_ENTRY_OFFSET (stub_entry->other);
+
+ /* If the branch offset is too big, use a ppc_stub_plt_branch.
+ Do the same for -R objects without function descriptors. */
+ if ((stub_entry->stub_type == ppc_stub_long_branch_r2off
+ && r2off == 0
+ && htab->sec_info[stub_entry->target_section->id].toc_off == 0)
+ || off + (1 << 25) >= (bfd_vma) (1 << 26) - local_off)
{
- br_entry->iter = htab->stub_iteration;
- br_entry->offset = htab->brlt->size;
- htab->brlt->size += 8;
+ struct ppc_branch_hash_entry *br_entry;
- if (htab->relbrlt != NULL)
- htab->relbrlt->size += sizeof (Elf64_External_Rela);
- else if (info->emitrelocations)
+ br_entry = ppc_branch_hash_lookup (&htab->branch_hash_table,
+ stub_entry->root.string + 9,
+ TRUE, FALSE);
+ if (br_entry == NULL)
{
- htab->brlt->reloc_count += 1;
- htab->brlt->flags |= SEC_RELOC;
+ _bfd_error_handler (_("can't build branch stub `%s'"),
+ stub_entry->root.string);
+ htab->stub_error = TRUE;
+ return FALSE;
}
- }
- stub_entry->stub_type += ppc_stub_plt_branch - ppc_stub_long_branch;
- targ = (br_entry->offset
- + htab->brlt->output_offset
- + htab->brlt->output_section->vma);
- off = (elf_gp (info->output_bfd)
- + htab->sec_info[stub_entry->group->link_sec->id].toc_off);
- off = targ - off;
+ if (br_entry->iter != htab->stub_iteration)
+ {
+ br_entry->iter = htab->stub_iteration;
+ br_entry->offset = htab->brlt->size;
+ htab->brlt->size += 8;
- if (info->emitrelocations)
- {
- stub_entry->group->stub_sec->reloc_count
- += 1 + (PPC_HA (off) != 0);
- stub_entry->group->stub_sec->flags |= SEC_RELOC;
- }
+ if (htab->relbrlt != NULL)
+ htab->relbrlt->size += sizeof (Elf64_External_Rela);
+ else if (info->emitrelocations)
+ {
+ htab->brlt->reloc_count += 1;
+ htab->brlt->flags |= SEC_RELOC;
+ }
+ }
- if (stub_entry->stub_type != ppc_stub_plt_branch_r2off)
- {
- size = 12;
- if (PPC_HA (off) != 0)
- size = 16;
+ targ = (br_entry->offset
+ + htab->brlt->output_offset
+ + htab->brlt->output_section->vma);
+ off = (elf_gp (info->output_bfd)
+ + htab->sec_info[stub_entry->group->link_sec->id].toc_off);
+ off = targ - off;
+
+ if (info->emitrelocations)
+ {
+ stub_entry->group->stub_sec->reloc_count
+ += 1 + (PPC_HA (off) != 0);
+ stub_entry->group->stub_sec->flags |= SEC_RELOC;
+ }
+
+ stub_entry->stub_type
+ += ppc_stub_plt_branch - ppc_stub_long_branch;
+ if (stub_entry->stub_type != ppc_stub_plt_branch_r2off)
+ {
+ size = 12;
+ if (PPC_HA (off) != 0)
+ size = 16;
+ }
+ else
+ {
+ size = 16;
+ if (PPC_HA (off) != 0)
+ size += 4;
+
+ if (PPC_HA (r2off) != 0)
+ size += 4;
+ if (PPC_LO (r2off) != 0)
+ size += 4;
+ }
}
- else
+ else if (info->emitrelocations)
{
- size = 16;
- if (PPC_HA (off) != 0)
- size += 4;
-
- if (PPC_HA (r2off) != 0)
- size += 4;
- if (PPC_LO (r2off) != 0)
- size += 4;
+ stub_entry->group->stub_sec->reloc_count += 1;
+ stub_entry->group->stub_sec->flags |= SEC_RELOC;
}
}
- else if (info->emitrelocations)
- {
- stub_entry->group->stub_sec->reloc_count += 1;
- stub_entry->group->stub_sec->flags |= SEC_RELOC;
- }
}
stub_entry->group->stub_sec->size += size;
r_type = ELF64_R_TYPE (rel->r_info);
if (r_type != R_PPC64_REL24
+ && r_type != R_PPC64_REL24_NOTOC
&& r_type != R_PPC64_REL14
&& r_type != R_PPC64_REL14_BRTAKEN
&& r_type != R_PPC64_REL14_BRNTAKEN
/* Only look for stubs on branch instructions. */
if (r_type != R_PPC64_REL24
+ && r_type != R_PPC64_REL24_NOTOC
&& r_type != R_PPC64_REL14
&& r_type != R_PPC64_REL14_BRTAKEN
&& r_type != R_PPC64_REL14_BRNTAKEN)
&plt_ent, destination,
local_off);
- if (stub_type != ppc_stub_plt_call)
+ if (r_type == R_PPC64_REL24_NOTOC)
+ {
+ if (stub_type == ppc_stub_plt_call)
+ stub_type = ppc_stub_plt_call_notoc;
+ else if (stub_type == ppc_stub_long_branch
+ || (code_sec != NULL
+ && code_sec->output_section != NULL
+ && (((hash ? hash->elf.other : sym->st_other)
+ & STO_PPC64_LOCAL_MASK)
+ != 1 << STO_PPC64_LOCAL_BIT)))
+ stub_type = ppc_stub_long_branch_notoc;
+ }
+ else if (stub_type != ppc_stub_plt_call)
{
/* Check whether we need a TOC adjusting stub.
Since the linker pastes together pieces from
/* __tls_get_addr calls might be eliminated. */
if (stub_type != ppc_stub_plt_call
+ && stub_type != ppc_stub_plt_call_notoc
&& hash != NULL
&& (hash == htab->tls_get_addr
|| hash == htab->tls_get_addr_fd)
stub_name, FALSE, FALSE);
if (stub_entry != NULL)
{
- /* The proper stub has already been created. */
+ enum ppc_stub_type old_type;
+ /* A stub has already been created, but it may
+ not be the required type. We shouldn't be
+ transitioning from plt_call to long_branch
+ stubs or vice versa, but we might be
+ upgrading from plt_call to plt_call_r2save or
+ from long_branch to long_branch_r2off. */
free (stub_name);
- if (stub_type == ppc_stub_plt_call_r2save)
+ old_type = stub_entry->stub_type;
+ switch (old_type)
+ {
+ default:
+ abort ();
+
+ case ppc_stub_save_res:
+ continue;
+
+ case ppc_stub_plt_call:
+ case ppc_stub_plt_call_r2save:
+ case ppc_stub_plt_call_notoc:
+ case ppc_stub_plt_call_both:
+ if (stub_type == ppc_stub_plt_call)
+ continue;
+ else if (stub_type == ppc_stub_plt_call_r2save)
+ {
+ if (old_type == ppc_stub_plt_call_notoc)
+ stub_type = ppc_stub_plt_call_both;
+ }
+ else if (stub_type == ppc_stub_plt_call_notoc)
+ {
+ if (old_type == ppc_stub_plt_call_r2save)
+ stub_type = ppc_stub_plt_call_both;
+ }
+ else
+ abort ();
+ break;
+
+ case ppc_stub_plt_branch:
+ case ppc_stub_plt_branch_r2off:
+ case ppc_stub_plt_branch_notoc:
+ case ppc_stub_plt_branch_both:
+ old_type += (ppc_stub_long_branch
+ - ppc_stub_plt_branch);
+ /* Fall through. */
+ case ppc_stub_long_branch:
+ case ppc_stub_long_branch_r2off:
+ case ppc_stub_long_branch_notoc:
+ case ppc_stub_long_branch_both:
+ if (stub_type == ppc_stub_long_branch)
+ continue;
+ else if (stub_type == ppc_stub_long_branch_r2off)
+ {
+ if (old_type == ppc_stub_long_branch_notoc)
+ stub_type = ppc_stub_long_branch_both;
+ }
+ else if (stub_type == ppc_stub_long_branch_notoc)
+ {
+ if (old_type == ppc_stub_long_branch_r2off)
+ stub_type = ppc_stub_long_branch_both;
+ }
+ else
+ abort ();
+ break;
+ }
+ if (old_type < stub_type)
stub_entry->stub_type = stub_type;
continue;
}
}
stub_entry->stub_type = stub_type;
- if (stub_type != ppc_stub_plt_call
- && stub_type != ppc_stub_plt_call_r2save)
+ if (stub_type >= ppc_stub_plt_call
+ && stub_type <= ppc_stub_plt_call_both)
{
- stub_entry->target_value = code_value;
- stub_entry->target_section = code_sec;
+ stub_entry->target_value = sym_value;
+ stub_entry->target_section = sym_sec;
}
else
{
- stub_entry->target_value = sym_value;
- stub_entry->target_section = sym_sec;
+ stub_entry->target_value = code_value;
+ stub_entry->target_section = code_sec;
}
stub_entry->h = hash;
stub_entry->plt_ent = plt_ent;
"linker stubs in %u groups\n",
stub_sec_count),
stub_sec_count);
- sprintf (*stats + len, _(" branch %lu\n"
- " toc adjust %lu\n"
- " long branch %lu\n"
- " long toc adj %lu\n"
- " plt call %lu\n"
- " plt call toc %lu\n"
- " global entry %lu"),
+ sprintf (*stats + len, _(" branch %lu\n"
+ " branch toc adj %lu\n"
+ " branch notoc %lu\n"
+ " branch both %lu\n"
+ " long branch %lu\n"
+ " long toc adj %lu\n"
+ " long notoc %lu\n"
+ " long both %lu\n"
+ " plt call %lu\n"
+ " plt call save %lu\n"
+ " plt call notoc %lu\n"
+ " plt call both %lu\n"
+ " global entry %lu"),
htab->stub_count[ppc_stub_long_branch - 1],
htab->stub_count[ppc_stub_long_branch_r2off - 1],
+ htab->stub_count[ppc_stub_long_branch_notoc - 1],
+ htab->stub_count[ppc_stub_long_branch_both - 1],
htab->stub_count[ppc_stub_plt_branch - 1],
htab->stub_count[ppc_stub_plt_branch_r2off - 1],
+ htab->stub_count[ppc_stub_plt_branch_notoc - 1],
+ htab->stub_count[ppc_stub_plt_branch_both - 1],
htab->stub_count[ppc_stub_plt_call - 1],
htab->stub_count[ppc_stub_plt_call_r2save - 1],
+ htab->stub_count[ppc_stub_plt_call_notoc - 1],
+ htab->stub_count[ppc_stub_plt_call_both - 1],
htab->stub_count[ppc_stub_global_entry - 1]);
}
return TRUE;
/* Fall through. */
case R_PPC64_REL24:
+ case R_PPC64_REL24_NOTOC:
case R_PPC64_PLTCALL:
/* Calls to functions with a different TOC, such as calls to
shared objects, need to alter the TOC pointer. This is
htab);
if (r_type == R_PPC64_PLTCALL
&& stub_entry != NULL
- && (stub_entry->stub_type == ppc_stub_plt_call
- || stub_entry->stub_type == ppc_stub_plt_call_r2save))
+ && stub_entry->stub_type >= ppc_stub_plt_call
+ && stub_entry->stub_type <= ppc_stub_plt_call_both)
stub_entry = NULL;
if (stub_entry != NULL
&& (stub_entry->stub_type == ppc_stub_plt_call
|| stub_entry->stub_type == ppc_stub_plt_call_r2save
+ || stub_entry->stub_type == ppc_stub_plt_call_both
|| stub_entry->stub_type == ppc_stub_plt_branch_r2off
- || stub_entry->stub_type == ppc_stub_long_branch_r2off))
+ || stub_entry->stub_type == ppc_stub_plt_branch_both
+ || stub_entry->stub_type == ppc_stub_long_branch_r2off
+ || stub_entry->stub_type == ppc_stub_long_branch_both))
{
bfd_boolean can_plt_call = FALSE;
/* The function doesn't use or change r2. */
can_plt_call = TRUE;
}
+ else if (r_type == R_PPC64_REL24_NOTOC)
+ {
+ /* NOTOC calls don't need to restore r2. */
+ can_plt_call = TRUE;
+ }
/* All of these stubs may modify r2, so there must be a
branch and link followed by a nop. The nop is
if (!can_plt_call)
{
- if (stub_entry->stub_type == ppc_stub_plt_call
- || stub_entry->stub_type == ppc_stub_plt_call_r2save)
+ if (stub_entry->stub_type >= ppc_stub_plt_call
+ && stub_entry->stub_type <= ppc_stub_plt_call_both)
info->callbacks->einfo
/* xgettext:c-format */
(_("%H: call to `%pT' lacks nop, can't restore toc; "
}
if (can_plt_call
- && (stub_entry->stub_type == ppc_stub_plt_call
- || stub_entry->stub_type == ppc_stub_plt_call_r2save))
+ && stub_entry->stub_type >= ppc_stub_plt_call
+ && stub_entry->stub_type <= ppc_stub_plt_call_both)
unresolved_reloc = FALSE;
}
/* Don't use the stub if this branch is in range. */
stub_entry = NULL;
+ if (stub_entry != NULL
+ && (stub_entry->stub_type == ppc_stub_long_branch_notoc
+ || stub_entry->stub_type == ppc_stub_long_branch_both
+ || stub_entry->stub_type == ppc_stub_plt_branch_notoc
+ || stub_entry->stub_type == ppc_stub_plt_branch_both)
+ && (r_type != R_PPC64_REL24_NOTOC
+ || ((fdh ? fdh->elf.other : sym->st_other)
+ & STO_PPC64_LOCAL_MASK) == 1 << STO_PPC64_LOCAL_BIT)
+ && (relocation + addend - from + max_br_offset
+ < 2 * max_br_offset))
+ stub_entry = NULL;
+
+ if (stub_entry != NULL
+ && (stub_entry->stub_type == ppc_stub_long_branch_r2off
+ || stub_entry->stub_type == ppc_stub_long_branch_both
+ || stub_entry->stub_type == ppc_stub_plt_branch_r2off
+ || stub_entry->stub_type == ppc_stub_plt_branch_both)
+ && r_type == R_PPC64_REL24_NOTOC
+ && (relocation + addend - from + max_br_offset
+ < 2 * max_br_offset))
+ stub_entry = NULL;
+
if (stub_entry != NULL)
{
/* Munge up the value and addend so that we call the stub
addend = 0;
reloc_dest = DEST_STUB;
- if ((stub_entry->stub_type == ppc_stub_plt_call
- || stub_entry->stub_type == ppc_stub_plt_call_r2save)
- && (ALWAYS_EMIT_R2SAVE
- || stub_entry->stub_type == ppc_stub_plt_call_r2save)
+ if (((stub_entry->stub_type == ppc_stub_plt_call
+ && ALWAYS_EMIT_R2SAVE)
+ || stub_entry->stub_type == ppc_stub_plt_call_r2save
+ || stub_entry->stub_type == ppc_stub_plt_call_both)
&& rel + 1 < relend
&& rel[1].r_offset == rel->r_offset + 4
&& ELF64_R_TYPE (rel[1].r_info) == R_PPC64_TOCSAVE)
relocation += 4;
+ else if ((stub_entry->stub_type == ppc_stub_long_branch_both
+ || stub_entry->stub_type == ppc_stub_plt_branch_both
+ || stub_entry->stub_type == ppc_stub_plt_call_both)
+ && r_type == R_PPC64_REL24_NOTOC)
+ relocation += 4;
}
if (insn != 0)
else if (h != NULL
&& h->elf.root.type == bfd_link_hash_undefweak
&& h->elf.dynindx == -1
- && r_type == R_PPC64_REL24
+ && (r_type == R_PPC64_REL24
+ || r_type == R_PPC64_REL24_NOTOC)
&& relocation == 0
&& addend == 0)
{
case R_PPC64_REL14_BRNTAKEN:
case R_PPC64_REL14_BRTAKEN:
case R_PPC64_REL24:
+ case R_PPC64_REL24_NOTOC:
break;
case R_PPC64_TPREL16: