Split the ARM Crypto ISA extensions for AES and SHA1+2 into their own options (+aes...
authorTamar Christina <tamar.christina@arm.com>
Thu, 9 Nov 2017 11:21:31 +0000 (11:21 +0000)
committerNick Clifton <nickc@redhat.com>
Thu, 9 Nov 2017 11:21:31 +0000 (11:21 +0000)
gas * config/tc-aarch64.c (aarch64_arch_option_table): Add armv8.4-a.
(aarch64_features): Added SM4 and SHA3.

include * opcode/aarch64.h:
(AARCH64_FEATURE_V8_4, AARCH64_FEATURE_SM4): New.
(AARCH64_ARCH_V8_4, AARCH64_FEATURE_SHA3): New.

opcodes * aarch64-tbl.h
(aarch64_feature_v8_4, aarch64_feature_crypto_v8_2): New.
(aarch64_feature_sm4, aarch64_feature_sha3): New.
(aarch64_feature_fp_16_v8_2): New.
(ARMV8_4, SHA3, SM4, CRYPTO_V8_2, FP_F16_V8_2): New.
(V8_4_INSN, CRYPTO_V8_2_INSN): New.
(SHA3_INSN, SM4_INSN, FP16_V8_2_INSN): New.

gas/ChangeLog
gas/config/tc-aarch64.c
include/ChangeLog
include/opcode/aarch64.h
opcodes/ChangeLog
opcodes/aarch64-tbl.h

index ad1c10c518f1219ff4047431e57fc164472c44a5..6589654f82f7713916061973e7599759f2d6ec5c 100644 (file)
@@ -1,3 +1,8 @@
+2017-11-09  Tamar Christina  <tamar.christina@arm.com>
+
+       * config/tc-aarch64.c (aarch64_arch_option_table): Add armv8.4-a.
+       (aarch64_features): Add SM4 and SHA3.
+
 2017-11-08  Tamar Christina  <tamar.christina@arm.com>
 
        * config/tc-aarch64.c
index ee3c586fac52d4f3b84b068d7295cce8f68e1cdc..054ee92c8f2efad655340f2505255d227176d14c 100644 (file)
@@ -8455,6 +8455,7 @@ static const struct aarch64_arch_option_table aarch64_archs[] = {
   {"armv8.1-a", AARCH64_ARCH_V8_1},
   {"armv8.2-a", AARCH64_ARCH_V8_2},
   {"armv8.3-a", AARCH64_ARCH_V8_3},
+  {"armv8.4-a", AARCH64_ARCH_V8_4},
   {NULL, AARCH64_ARCH_NONE}
 };
 
@@ -8506,6 +8507,11 @@ static const struct aarch64_option_cpu_value_table aarch64_features[] = {
                        AARCH64_ARCH_NONE},
   {"aes",              AARCH64_FEATURE (AARCH64_FEATURE_AES, 0),
                        AARCH64_ARCH_NONE},
+  {"sm4",              AARCH64_FEATURE (AARCH64_FEATURE_SM4, 0),
+                       AARCH64_ARCH_NONE},
+  {"sha3",             AARCH64_FEATURE (AARCH64_FEATURE_SHA2
+                                        | AARCH64_FEATURE_SHA3, 0),
+                       AARCH64_ARCH_NONE},
   {NULL,               AARCH64_ARCH_NONE, AARCH64_ARCH_NONE},
 };
 
index c27ec9c2df4650f1ce62f67a1819f44b20624264..4e3002f328ccaa53514370f929c8e18a858089c0 100644 (file)
@@ -1,3 +1,9 @@
+2017-11-09  Tamar Christina  <tamar.christina@arm.com>
+
+       * opcode/aarch64.h:
+       (AARCH64_FEATURE_V8_4, AARCH64_FEATURE_SM4): New.
+       (AARCH64_ARCH_V8_4, AARCH64_FEATURE_SHA3): New.
+
 2017-11-09  Nick Clifton  <nickc@redhat.com>
 
        * opcode/aarch64.h (aarch64_feature_set): Change type to unsigned
index a597ebe0cbdb78b5abeb3268b53024d85de6b601..20381648e8bd6c7eabfd5d016adc5e11ff3c9956 100644 (file)
@@ -39,6 +39,9 @@ typedef uint32_t aarch64_insn;
 /* The following bitmasks control CPU features.  */
 #define AARCH64_FEATURE_SHA2   0x200000000ULL  /* SHA2 instructions.  */
 #define AARCH64_FEATURE_AES    0x800000000ULL  /* AES instructions.  */
+#define AARCH64_FEATURE_V8_4   0x000000800ULL  /* ARMv8.4 processors.  */
+#define AARCH64_FEATURE_SM4    0x100000000ULL  /* SM3 & SM4 instructions.  */
+#define AARCH64_FEATURE_SHA3   0x400000000ULL  /* SHA3 instructions.  */
 #define AARCH64_FEATURE_V8     0x00000001      /* All processors.  */
 #define AARCH64_FEATURE_V8_2   0x00000020      /* ARMv8.2 processors.  */
 #define AARCH64_FEATURE_V8_3   0x00000040      /* ARMv8.3 processors.  */
@@ -77,6 +80,8 @@ typedef uint32_t aarch64_insn;
                                                 AARCH64_FEATURE_V8_3   \
                                                 | AARCH64_FEATURE_RCPC \
                                                 | AARCH64_FEATURE_COMPNUM)
+#define AARCH64_ARCH_V8_4      AARCH64_FEATURE (AARCH64_ARCH_V8_3,     \
+                                                AARCH64_FEATURE_V8_4)
 
 #define AARCH64_ARCH_NONE      AARCH64_FEATURE (0, 0)
 #define AARCH64_ANY            AARCH64_FEATURE (-1, 0) /* Any basic core.  */
index ffa56bbfcffd2fa8f16c554246de45facdad7597..410df9c6781817c9581f8c383e6d48097bb274cf 100644 (file)
@@ -1,3 +1,13 @@
+2017-11-09 Tamar Christina  <tamar.christina@arm.com>
+
+       * aarch64-tbl.h
+       (aarch64_feature_v8_4, aarch64_feature_crypto_v8_2): New.
+       (aarch64_feature_sm4, aarch64_feature_sha3): New.
+       (aarch64_feature_fp_16_v8_2): New.
+       (ARMV8_4, SHA3, SM4, CRYPTO_V8_2, FP_F16_V8_2): New.
+       (V8_4_INSN, CRYPTO_V8_2_INSN): New.
+       (SHA3_INSN, SM4_INSN, FP16_V8_2_INSN): New.
+
 2017-11-08  Tamar Christina  <tamar.christina@arm.com>
 
        * aarch64-tbl.h (aarch64_feature_crypto): Add AES and SHA2.
index 9d4f3a426ab7799135d305b2bd2c6bc0868fd8ff..a99f5f5e6f0e915017a3a131d4a0d86af7b3c8c5 100644 (file)
@@ -2041,6 +2041,18 @@ static const aarch64_feature_set aarch64_feature_sha2 =
   AARCH64_FEATURE (AARCH64_FEATURE_V8 | AARCH64_FEATURE_SHA2, 0);
 static const aarch64_feature_set aarch64_feature_aes =
   AARCH64_FEATURE (AARCH64_FEATURE_V8 | AARCH64_FEATURE_AES, 0);
+static const aarch64_feature_set aarch64_feature_v8_4 =
+  AARCH64_FEATURE (AARCH64_FEATURE_V8_4, 0);
+static const aarch64_feature_set aarch64_feature_crypto_v8_2 =
+  AARCH64_FEATURE (AARCH64_FEATURE_V8_2 | AARCH64_FEATURE_CRYPTO, 0);
+static const aarch64_feature_set aarch64_feature_sm4 =
+  AARCH64_FEATURE (AARCH64_FEATURE_V8_2 | AARCH64_FEATURE_SM4, 0);
+static const aarch64_feature_set aarch64_feature_sha3 =
+  AARCH64_FEATURE (AARCH64_FEATURE_V8_2 | AARCH64_FEATURE_SHA2
+                  | AARCH64_FEATURE_SHA3, 0);
+static const aarch64_feature_set aarch64_feature_fp_16_v8_2 =
+  AARCH64_FEATURE (AARCH64_FEATURE_V8_2 | AARCH64_FEATURE_F16
+                  | AARCH64_FEATURE_FP, 0);
 
 #define CORE           &aarch64_feature_v8
 #define FP             &aarch64_feature_fp
@@ -2062,6 +2074,11 @@ static const aarch64_feature_set aarch64_feature_aes =
 #define RCPC           &aarch64_feature_rcpc
 #define SHA2           &aarch64_feature_sha2
 #define AES            &aarch64_feature_aes
+#define ARMV8_4                &aarch64_feature_v8_4
+#define SHA3           &aarch64_feature_sha3
+#define SM4            &aarch64_feature_sm4
+#define CRYPTO_V8_2    &aarch64_feature_crypto_v8_2
+#define FP_F16_V8_2    &aarch64_feature_fp_16_v8_2
 #define DOTPROD                &aarch64_feature_dotprod
 
 #define CORE_INSN(NAME,OPCODE,MASK,CLASS,OP,OPS,QUALS,FLAGS) \
@@ -2099,6 +2116,16 @@ static const aarch64_feature_set aarch64_feature_aes =
   { NAME, OPCODE, MASK, CLASS, 0, SHA2, OPS, QUALS, FLAGS, 0, NULL }
 #define AES_INSN(NAME,OPCODE,MASK,CLASS,OPS,QUALS,FLAGS) \
   { NAME, OPCODE, MASK, CLASS, 0, AES, OPS, QUALS, FLAGS, 0, NULL }
+#define V8_4_INSN(NAME,OPCODE,MASK,CLASS,OPS,QUALS,FLAGS) \
+  { NAME, OPCODE, MASK, CLASS, 0, ARMV8_4, OPS, QUALS, FLAGS, 0, NULL }
+#define CRYPTO_V8_2_INSN(NAME,OPCODE,MASK,CLASS,OPS,QUALS,FLAGS) \
+  { NAME, OPCODE, MASK, CLASS, 0, CRYPTO_V8_2, OPS, QUALS, FLAGS, 0, NULL }
+#define SHA3_INSN(NAME,OPCODE,MASK,CLASS,OPS,QUALS,FLAGS) \
+  { NAME, OPCODE, MASK, CLASS, 0, SHA3, OPS, QUALS, FLAGS, 0, NULL }
+#define SM4_INSN(NAME,OPCODE,MASK,CLASS,OPS,QUALS,FLAGS) \
+  { NAME, OPCODE, MASK, CLASS, 0, SM4, OPS, QUALS, FLAGS, 0, NULL }
+#define FP16_V8_2_INSN(NAME,OPCODE,MASK,CLASS,OPS,QUALS,FLAGS) \
+  { NAME, OPCODE, MASK, CLASS, 0, FP_F16_V8_2, OPS, QUALS, FLAGS, 0, NULL }
 #define DOT_INSN(NAME,OPCODE,MASK,CLASS,OPS,QUALS,FLAGS) \
   { NAME, OPCODE, MASK, CLASS, 0, DOTPROD, OPS, QUALS, FLAGS, 0, NULL }