+2017-02-23 Alan Modra <amodra@gmail.com>
+
+ PR 20744
+ * elf32-ppc.c (ppc_elf_howto_raw): Correct dst_mask on all VLE
+ 16D relocations.
+ (ppc_elf_vle_split16): Correct field mask and shift for 16D relocs.
+ (ppc_elf_relocate_section): Correct calculation for VLE SDAREL
+ relocs.
+
2017-02-22 Maciej W. Rozycki <macro@imgtec.com>
PR ld/20828
"R_PPC_VLE_LO16D", /* name */
FALSE, /* partial_inplace */
0, /* src_mask */
- 0x1f007ff, /* dst_mask */
+ 0x3e007ff, /* dst_mask */
FALSE), /* pcrel_offset */
/* Bits 16-31 split16a format. */
"R_PPC_VLE_HI16D", /* name */
FALSE, /* partial_inplace */
0, /* src_mask */
- 0x1f007ff, /* dst_mask */
+ 0x3e007ff, /* dst_mask */
FALSE), /* pcrel_offset */
/* Bits 16-31 (High Adjusted) in split16a format. */
"R_PPC_VLE_HA16D", /* name */
FALSE, /* partial_inplace */
0, /* src_mask */
- 0x1f007ff, /* dst_mask */
+ 0x3e007ff, /* dst_mask */
FALSE), /* pcrel_offset */
/* This reloc is like R_PPC_EMB_SDA21 but only applies to e_add16i
"R_PPC_VLE_SDAREL_LO16D", /* name */
FALSE, /* partial_inplace */
0, /* src_mask */
- 0x1f007ff, /* dst_mask */
+ 0x3e007ff, /* dst_mask */
FALSE), /* pcrel_offset */
/* Bits 16-31 relative to _SDA_BASE_ in split16a format. */
"R_PPC_VLE_SDAREL_HI16D", /* name */
FALSE, /* partial_inplace */
0, /* src_mask */
- 0x1f007ff, /* dst_mask */
+ 0x3e007ff, /* dst_mask */
FALSE), /* pcrel_offset */
/* Bits 16-31 (HA) relative to _SDA_BASE split16a format. */
"R_PPC_VLE_SDAREL_HA16D", /* name */
FALSE, /* partial_inplace */
0, /* src_mask */
- 0x1f007ff, /* dst_mask */
+ 0x3e007ff, /* dst_mask */
FALSE), /* pcrel_offset */
HOWTO (R_PPC_IRELATIVE, /* type */
}
}
top5 = value & 0xf800;
- top5 = top5 << (split16_format == split16a_type ? 5 : 9);
- insn &= (split16_format == split16a_type ? ~0x1f07ff : ~0x1f007ff);
+ top5 = top5 << (split16_format == split16a_type ? 5 : 10);
+ insn &= (split16_format == split16a_type ? ~0x1f07ff : ~0x3e007ff);
insn |= top5;
insn |= value & 0x7ff;
bfd_put_32 (input_bfd, insn, loc);
{
bfd_vma value;
const char *name;
- //int reg;
struct elf_link_hash_entry *sda = NULL;
if (sec == NULL || sec->output_section == NULL)
name = bfd_get_section_name (output_bfd, sec->output_section);
if (strcmp (name, ".sdata") == 0
|| strcmp (name, ".sbss") == 0)
- {
- //reg = 13;
- sda = htab->sdata[0].sym;
- }
+ sda = htab->sdata[0].sym;
else if (strcmp (name, ".sdata2") == 0
|| strcmp (name, ".sbss2") == 0)
- {
- //reg = 2;
- sda = htab->sdata[1].sym;
- }
+ sda = htab->sdata[1].sym;
else
{
_bfd_error_handler
goto copy_reloc;
}
- if (sda != NULL)
+ if (sda == NULL || !is_static_defined (sda))
{
- if (!is_static_defined (sda))
- {
- unresolved_reloc = TRUE;
- break;
- }
+ unresolved_reloc = TRUE;
+ break;
}
-
- value = (sda->root.u.def.section->output_section->vma
- + sda->root.u.def.section->output_offset
- + addend);
+ value = relocation + addend - SYM_VAL (sda);
if (r_type == R_PPC_VLE_SDAREL_LO16A)
ppc_elf_vle_split16 (input_bfd, input_section, rel->r_offset,
+2017-02-23 Alan Modra <amodra@gmail.com>
+
+ PR 20744
+ * testsuite/ld-powerpc/vle-reloc-2.s: Use r6 for last insn of
+ each group.
+ * testsuite/ld-powerpc/vle-reloc-2.d: Update for above change
+ and sdarel reloc fix.
+
2017-02-22 Maciej W. Rozycki <macro@imgtec.com>
PR ld/20828
.*: 70 20 c1 a2 e_or2i r1,418
.*: 70 40 c1 81 e_or2i r2,385
.*: 70 60 c1 81 e_or2i r3,385
-.*: 70 80 c1 ae e_or2i r4,430
-.*: 70 a0 c1 80 e_or2i r5,384
-.*: 70 40 c1 81 e_or2i r2,385
+.*: 70 90 c0 00 e_or2i r4,32768
+.*: 70 bf c7 ff e_or2i r5,65535
+.*: 70 c0 c0 00 e_or2i r6,0
.*: 70 20 c9 a2 e_and2i\. r1,418
.*: 70 40 c9 81 e_and2i\. r2,385
.*: 70 60 c9 81 e_and2i\. r3,385
-.*: 70 80 c9 ae e_and2i\. r4,430
-.*: 70 a0 c9 80 e_and2i\. r5,384
-.*: 70 40 c9 81 e_and2i\. r2,385
+.*: 70 90 c8 00 e_and2i\. r4,32768
+.*: 70 bf cf ff e_and2i\. r5,65535
+.*: 70 c0 c8 00 e_and2i\. r6,0
.*: 70 20 d1 a2 e_or2is r1,418
.*: 70 40 d1 81 e_or2is r2,385
.*: 70 60 d1 81 e_or2is r3,385
-.*: 70 80 d1 ae e_or2is r4,430
-.*: 70 a0 d1 80 e_or2is r5,384
-.*: 70 40 d1 81 e_or2is r2,385
+.*: 70 90 d0 00 e_or2is r4,32768
+.*: 70 bf d7 ff e_or2is r5,65535
+.*: 70 c0 d0 00 e_or2is r6,0
.*: 70 20 e1 a2 e_lis r1,418
.*: 70 40 e1 81 e_lis r2,385
.*: 70 60 e1 81 e_lis r3,385
-.*: 70 80 e1 ae e_lis r4,430
-.*: 70 a0 e1 80 e_lis r5,384
-.*: 70 40 e1 81 e_lis r2,385
+.*: 70 90 e0 00 e_lis r4,32768
+.*: 70 bf e7 ff e_lis r5,65535
+.*: 70 c0 e0 00 e_lis r6,0
.*: 70 20 e9 a2 e_and2is\. r1,418
.*: 70 40 e9 81 e_and2is\. r2,385
.*: 70 60 e9 81 e_and2is\. r3,385
-.*: 70 80 e9 ae e_and2is\. r4,430
-.*: 70 a0 e9 80 e_and2is\. r5,384
-.*: 70 40 e9 81 e_and2is\. r2,385
+.*: 70 90 e8 00 e_and2is\. r4,32768
+.*: 70 bf ef ff e_and2is\. r5,65535
+.*: 70 c0 e8 00 e_and2is\. r6,0
.*: 70 01 99 a2 e_cmp16i r1,418
.*: 70 02 99 81 e_cmp16i r2,385
.*: 70 03 99 81 e_cmp16i r3,385
-.*: 70 04 99 ae e_cmp16i r4,430
-.*: 70 05 99 80 e_cmp16i r5,384
-.*: 70 02 99 81 e_cmp16i r2,385
+.*: 72 04 98 00 e_cmp16i r4,-32768
+.*: 73 e5 9f ff e_cmp16i r5,-1
+.*: 70 06 98 00 e_cmp16i r6,0
.*: 70 01 a9 a2 e_cmpl16i r1,418
.*: 70 02 a9 81 e_cmpl16i r2,385
.*: 70 03 a9 81 e_cmpl16i r3,385
-.*: 70 04 a9 ae e_cmpl16i r4,430
-.*: 70 05 a9 80 e_cmpl16i r5,384
-.*: 70 02 a9 81 e_cmpl16i r2,385
+.*: 72 04 a8 00 e_cmpl16i r4,32768
+.*: 73 e5 af ff e_cmpl16i r5,65535
+.*: 70 06 a8 00 e_cmpl16i r6,0
.*: 70 01 b1 a2 e_cmph16i r1,418
.*: 70 02 b1 81 e_cmph16i r2,385
.*: 70 03 b1 81 e_cmph16i r3,385
-.*: 70 04 b1 ae e_cmph16i r4,430
-.*: 70 05 b1 80 e_cmph16i r5,384
-.*: 70 02 b1 81 e_cmph16i r2,385
+.*: 72 04 b0 00 e_cmph16i r4,-32768
+.*: 73 e5 b7 ff e_cmph16i r5,-1
+.*: 70 06 b0 00 e_cmph16i r6,0
.*: 70 01 b9 a2 e_cmphl16i r1,418
.*: 70 02 b9 81 e_cmphl16i r2,385
.*: 70 03 b9 81 e_cmphl16i r3,385
-.*: 70 04 b9 ae e_cmphl16i r4,430
-.*: 70 05 b9 80 e_cmphl16i r5,384
-.*: 70 02 b9 81 e_cmphl16i r2,385
+.*: 72 04 b8 00 e_cmphl16i r4,32768
+.*: 73 e5 bf ff e_cmphl16i r5,65535
+.*: 70 06 b8 00 e_cmphl16i r6,0
.*: 70 01 89 a2 e_add2i\. r1,418
.*: 70 02 89 81 e_add2i\. r2,385
.*: 70 03 89 81 e_add2i\. r3,385
-.*: 70 04 89 ae e_add2i\. r4,430
-.*: 70 05 89 80 e_add2i\. r5,384
-.*: 70 02 89 81 e_add2i\. r2,385
+.*: 72 04 88 00 e_add2i. r4,-32768
+.*: 73 e5 8f ff e_add2i. r5,-1
+.*: 70 06 88 00 e_add2i. r6,0
.*: 70 01 91 a2 e_add2is r1,418
.*: 70 02 91 81 e_add2is r2,385
.*: 70 03 91 81 e_add2is r3,385
-.*: 70 04 91 ae e_add2is r4,430
-.*: 70 05 91 80 e_add2is r5,384
-.*: 70 02 91 81 e_add2is r2,385
+.*: 72 04 90 00 e_add2is r4,-32768
+.*: 73 e5 97 ff e_add2is r5,-1
+.*: 70 06 90 00 e_add2is r6,0
.*: 70 01 a1 a2 e_mull2i r1,418
.*: 70 02 a1 81 e_mull2i r2,385
.*: 70 03 a1 81 e_mull2i r3,385
-.*: 70 04 a1 ae e_mull2i r4,430
-.*: 70 05 a1 80 e_mull2i r5,384
-.*: 70 02 a1 81 e_mull2i r2,385
+.*: 72 04 a0 00 e_mull2i r4,-32768
+.*: 73 e5 a7 ff e_mull2i r5,-1
+.*: 70 06 a0 00 e_mull2i r6,0
.* <sub3>:
.*: 00 04 se_blr
.* <sub4>:
e_or2i 3, high_adjust@ha
e_or2i 4, low_sdarel@sdarel@l
e_or2i 5, high_sdarel@sdarel@h
- e_or2i 2, high_adjust_sdarel@sdarel@ha
+ e_or2i 6, high_adjust_sdarel@sdarel@ha
e_and2i. 1, low@l
e_and2i. 2, high@h
e_and2i. 3, high_adjust@ha
e_and2i. 4, low_sdarel@sdarel@l
e_and2i. 5, high_sdarel@sdarel@h
- e_and2i. 2, high_adjust_sdarel@sdarel@ha
+ e_and2i. 6, high_adjust_sdarel@sdarel@ha
e_or2is 1, low@l
e_or2is 2, high@h
e_or2is 3, high_adjust@ha
e_or2is 4, low_sdarel@sdarel@l
e_or2is 5, high_sdarel@sdarel@h
- e_or2is 2, high_adjust_sdarel@sdarel@ha
+ e_or2is 6, high_adjust_sdarel@sdarel@ha
e_lis 1, low@l
e_lis 2, high@h
e_lis 3, high_adjust@ha
e_lis 4, low_sdarel@sdarel@l
e_lis 5, high_sdarel@sdarel@h
- e_lis 2, high_adjust_sdarel@sdarel@ha
+ e_lis 6, high_adjust_sdarel@sdarel@ha
e_and2is. 1, low@l
e_and2is. 2, high@h
e_and2is. 3, high_adjust@ha
e_and2is. 4, low_sdarel@sdarel@l
e_and2is. 5, high_sdarel@sdarel@h
- e_and2is. 2, high_adjust_sdarel@sdarel@ha
+ e_and2is. 6, high_adjust_sdarel@sdarel@ha
e_cmp16i 1, low@l
e_cmp16i 2, high@h
e_cmp16i 3, high_adjust@ha
e_cmp16i 4, low_sdarel@sdarel@l
e_cmp16i 5, high_sdarel@sdarel@h
- e_cmp16i 2, high_adjust_sdarel@sdarel@ha
+ e_cmp16i 6, high_adjust_sdarel@sdarel@ha
e_cmpl16i 1, low@l
e_cmpl16i 2, high@h
e_cmpl16i 3, high_adjust@ha
e_cmpl16i 4, low_sdarel@sdarel@l
e_cmpl16i 5, high_sdarel@sdarel@h
- e_cmpl16i 2, high_adjust_sdarel@sdarel@ha
+ e_cmpl16i 6, high_adjust_sdarel@sdarel@ha
e_cmph16i 1, low@l
e_cmph16i 2, high@h
e_cmph16i 3, high_adjust@ha
e_cmph16i 4, low_sdarel@sdarel@l
e_cmph16i 5, high_sdarel@sdarel@h
- e_cmph16i 2, high_adjust_sdarel@sdarel@ha
+ e_cmph16i 6, high_adjust_sdarel@sdarel@ha
e_cmphl16i 1, low@l
e_cmphl16i 2, high@h
e_cmphl16i 3, high_adjust@ha
e_cmphl16i 4, low_sdarel@sdarel@l
e_cmphl16i 5, high_sdarel@sdarel@h
- e_cmphl16i 2, high_adjust_sdarel@sdarel@ha
+ e_cmphl16i 6, high_adjust_sdarel@sdarel@ha
e_add2i. 1, low@l
e_add2i. 2, high@h
e_add2i. 3, high_adjust@ha
e_add2i. 4, low_sdarel@sdarel@l
e_add2i. 5, high_sdarel@sdarel@h
- e_add2i. 2, high_adjust_sdarel@sdarel@ha
+ e_add2i. 6, high_adjust_sdarel@sdarel@ha
e_add2is 1, low@l
e_add2is 2, high@h
e_add2is 3, high_adjust@ha
e_add2is 4, low_sdarel@sdarel@l
e_add2is 5, high_sdarel@sdarel@h
- e_add2is 2, high_adjust_sdarel@sdarel@ha
+ e_add2is 6, high_adjust_sdarel@sdarel@ha
e_mull2i 1, low@l
e_mull2i 2, high@h
e_mull2i 3, high_adjust@ha
e_mull2i 4, low_sdarel@sdarel@l
e_mull2i 5, high_sdarel@sdarel@h
- e_mull2i 2, high_adjust_sdarel@sdarel@ha
+ e_mull2i 6, high_adjust_sdarel@sdarel@ha