From 52bab3f6eb87bca3b3b79e28f516da9e79445d07 Mon Sep 17 00:00:00 2001 From: Giacomo Travaglini Date: Tue, 1 May 2018 10:14:35 +0100 Subject: [PATCH] arch-arm: AArch64 Crypto SHA This patch implements the AArch64 secure hashing instructions from the Crypto extension. Change-Id: I2cdfa81b994637c880f2523fe37cdc6596d05cb1 Signed-off-by: Giacomo Travaglini Reviewed-by: Andreas Sandberg Reviewed-on: https://gem5-review.googlesource.com/c/13249 Maintainer: Andreas Sandberg --- src/arch/arm/isa/formats/aarch64.isa | 11 +- src/arch/arm/isa/formats/crypto64.isa | 100 ++++++++++++++++ src/arch/arm/isa/formats/formats.isa | 3 + src/arch/arm/isa/insts/crypto64.isa | 159 ++++++++++++++++++++++++++ src/arch/arm/isa/insts/insts.isa | 3 + 5 files changed, 273 insertions(+), 3 deletions(-) create mode 100644 src/arch/arm/isa/formats/crypto64.isa create mode 100644 src/arch/arm/isa/insts/crypto64.isa diff --git a/src/arch/arm/isa/formats/aarch64.isa b/src/arch/arm/isa/formats/aarch64.isa index 77e598c7f..241f9637f 100644 --- a/src/arch/arm/isa/formats/aarch64.isa +++ b/src/arch/arm/isa/formats/aarch64.isa @@ -2111,15 +2111,20 @@ namespace Aarch64 return decodeNeonSc3Diff(machInst); } else if (bits(machInst, 20, 17) == 0x0) { return decodeNeonSc2RegMisc(machInst); + } else if (bits(machInst, 20, 17) == 0x4) { + return decodeCryptoTwoRegSHA(machInst); } else if (bits(machInst, 20, 17) == 0x8) { return decodeNeonScPwise(machInst); } else { return new Unknown64(machInst); } } else if (bits(machInst, 23, 22) == 0 && - bits(machInst, 15) == 0 && - bits(machInst, 10) == 1) { - return decodeNeonScCopy(machInst); + bits(machInst, 15) == 0) { + if (bits(machInst, 10) == 1) { + return decodeNeonScCopy(machInst); + } else { + return decodeCryptoThreeRegSHA(machInst); + } } else { return new Unknown64(machInst); } diff --git a/src/arch/arm/isa/formats/crypto64.isa b/src/arch/arm/isa/formats/crypto64.isa new file mode 100644 index 000000000..8975c2d93 --- /dev/null +++ b/src/arch/arm/isa/formats/crypto64.isa @@ -0,0 +1,100 @@ +// -*- mode:c++ -*- + +// Copyright (c) 2018 ARM Limited +// All rights reserved +// +// The license below extends only to copyright in the software and shall +// not be construed as granting a license to any other intellectual +// property including but not limited to intellectual property relating +// to a hardware implementation of the functionality of the software +// licensed hereunder. You may use the software subject to the license +// terms below provided that you ensure that this notice is replicated +// unmodified and in its entirety in all distributions of the software, +// modified or unmodified, in source code or in binary form. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer; +// redistributions in binary form must reproduce the above copyright +// notice, this list of conditions and the following disclaimer in the +// documentation and/or other materials provided with the distribution; +// neither the name of the copyright holders nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +// Authors: Giacomo Travaglini + +let {{ + header_output = ''' + StaticInstPtr + decodeCryptoThreeRegSHA(ExtMachInst machInst); + + StaticInstPtr + decodeCryptoTwoRegSHA(ExtMachInst machInst); + ''' + + decoder_output = ''' + + StaticInstPtr + decodeCryptoTwoRegSHA(ExtMachInst machInst) + { + const auto opcode = bits(machInst, 16, 12); + const auto size = bits(machInst, 23, 22); + + IntRegIndex rd = (IntRegIndex) (uint8_t) bits(machInst, 4, 0); + IntRegIndex rn = (IntRegIndex) (uint8_t) bits(machInst, 9, 5); + + if (size) { + // UNALLOCATED + return new Unknown64(machInst); + } else { + switch (opcode) { + case 0x0: return new SHA1H64(machInst, rd, rn); + case 0x1: return new SHA1SU164(machInst, rd, rn); + case 0x2: return new SHA256SU064(machInst, rd, rn); + default: return new Unknown64(machInst); + } + } + } + + StaticInstPtr + decodeCryptoThreeRegSHA(ExtMachInst machInst) + { + const auto opcode = bits(machInst, 14, 12); + const auto size = bits(machInst, 23, 22); + + IntRegIndex rd = (IntRegIndex) (uint8_t) bits(machInst, 4, 0); + IntRegIndex rn = (IntRegIndex) (uint8_t) bits(machInst, 9, 5); + IntRegIndex rm = (IntRegIndex) (uint8_t) bits(machInst, 20, 16); + + if (size) { + // UNALLOCATED + return new Unknown64(machInst); + } else { + switch (opcode) { + case 0x0: return new SHA1C64(machInst, rd, rn, rm); + case 0x1: return new SHA1P64(machInst, rd, rn, rm); + case 0x2: return new SHA1M64(machInst, rd, rn, rm); + case 0x3: return new SHA1SU064(machInst, rd, rn, rm); + case 0x4: return new SHA256H64(machInst, rd, rn, rm); + case 0x5: return new SHA256H264(machInst, rd, rn, rm); + case 0x6: return new SHA256SU164(machInst, rd, rn, rm); + default: return new Unknown64(machInst); + } + } + } + ''' +}}; diff --git a/src/arch/arm/isa/formats/formats.isa b/src/arch/arm/isa/formats/formats.isa index 37ac9eee2..25ace4be3 100644 --- a/src/arch/arm/isa/formats/formats.isa +++ b/src/arch/arm/isa/formats/formats.isa @@ -83,5 +83,8 @@ //M5 Psuedo-ops ##include "m5ops.isa" +//Crypto Ops +##include "crypto64.isa" + //gem5-internal pseudo instructions ##include "pseudo.isa" diff --git a/src/arch/arm/isa/insts/crypto64.isa b/src/arch/arm/isa/insts/crypto64.isa new file mode 100644 index 000000000..1af9263fd --- /dev/null +++ b/src/arch/arm/isa/insts/crypto64.isa @@ -0,0 +1,159 @@ +// -*- mode:c++ -*- +// +// Copyright (c) 2018 ARM Limited +// All rights reserved +// +// The license below extends only to copyright in the software and shall +// not be construed as granting a license to any other intellectual +// property including but not limited to intellectual property relating +// to a hardware implementation of the functionality of the software +// licensed hereunder. You may use the software subject to the license +// terms below provided that you ensure that this notice is replicated +// unmodified and in its entirety in all distributions of the software, +// modified or unmodified, in source code or in binary form. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer; +// redistributions in binary form must reproduce the above copyright +// notice, this list of conditions and the following disclaimer in the +// documentation and/or other materials provided with the distribution; +// neither the name of the copyright holders nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +// Authors: Matt Horsnell +// Prakash Ramrakhyani +// Giacomo Travaglini + +let {{ + header_output = "" + decoder_output = "" + exec_output = "" + + cryptoEnabledCheckCode = ''' + auto crypto_reg = xc->tcBase()->readMiscReg(MISCREG_ID_AA64ISAR0_EL1); + if (!(crypto_reg & %(mask)d)) { + return std::make_shared(machInst, true); + } + ''' + cryptoRegRegRegPrefix = ''' + Crypto crypto; + RegVect srcReg1, srcReg2, destReg; + // Read source and destination registers. + ''' + for reg in range(4): + cryptoRegRegRegPrefix += ''' + srcReg1.regs[%(reg)d] = htog(AA64FpOp1P%(reg)d_uw); + srcReg2.regs[%(reg)d] = htog(AA64FpOp2P%(reg)d_uw); + destReg.regs[%(reg)d] = htog(AA64FpDestP%(reg)d_uw); + ''' % { "reg" : reg } + cryptoRegRegRegPrefix += ''' + unsigned char *output = (unsigned char *)(&destReg.regs[0]); + unsigned char *input = (unsigned char *)(&srcReg1.regs[0]); + unsigned char *input2 = (unsigned char *)(&srcReg2.regs[0]); + ''' + + cryptoSuffix = "" + for reg in range(4): + cryptoSuffix += ''' + AA64FpDestP%(reg)d_uw = gtoh(destReg.regs[%(reg)d]); + ''' % { "reg" : reg } + + cryptoRegRegPrefix = ''' + Crypto crypto; + RegVect srcReg1, destReg; + // Read source and destination registers. + ''' + for reg in range(4): + cryptoRegRegPrefix += ''' + srcReg1.regs[%(reg)d] = htog(AA64FpOp1P%(reg)d_uw); + destReg.regs[%(reg)d] = htog(AA64FpDestP%(reg)d_uw); + ''' % { "reg" : reg } + + cryptoRegRegPrefix += ''' + // cast into format passed to aes encrypt method. + unsigned char *output = (unsigned char *)(&destReg.regs[0]); + unsigned char *input = (unsigned char *)(&srcReg1.regs[0]); + ''' + + def cryptoRegRegRegInst(name, Name, opClass, enable_check, crypto_func): + global header_output, decoder_output, exec_output + + crypto_prefix = enable_check + cryptoRegRegRegPrefix + cryptocode = crypto_prefix + crypto_func + cryptoSuffix + + cryptoiop = InstObjParams(name, Name, "RegRegRegOp", + { "code": cryptocode, + "r_count": 4, + "predicate_test": predicateTest, + "op_class": opClass}, []) + header_output += RegRegRegOpDeclare.subst(cryptoiop) + decoder_output += RegRegRegOpConstructor.subst(cryptoiop) + exec_output += CryptoPredOpExecute.subst(cryptoiop) + + def cryptoRegRegInst(name, Name, opClass, enable_check, crypto_func): + global header_output, decoder_output, exec_output + + crypto_prefix = enable_check + cryptoRegRegPrefix + cryptocode = crypto_prefix + crypto_func + cryptoSuffix + + cryptoiop = InstObjParams(name, Name, "RegRegOp", + { "code": cryptocode, + "r_count": 4, + "predicate_test": predicateTest, + "op_class": opClass}, []) + header_output += RegRegOpDeclare.subst(cryptoiop) + decoder_output += RegRegOpConstructor.subst(cryptoiop) + exec_output += CryptoPredOpExecute.subst(cryptoiop) + + sha1_cCode = "crypto.sha1C(output, input, input2);" + sha1_pCode = "crypto.sha1P(output, input, input2);" + sha1_mCode = "crypto.sha1M(output, input, input2);" + sha1_hCode = "crypto.sha1H(output, input);" + sha1_su0Code = "crypto.sha1Su0(output, input, input2);" + sha1_su1Code = "crypto.sha1Su1(output, input);" + + sha256_hCode = "crypto.sha256H(output, input, input2);" + sha256_h2Code = "crypto.sha256H2(output, input, input2);" + sha256_su0Code = "crypto.sha256Su0(output, input);" + sha256_su1Code = "crypto.sha256Su1(output, input, input2);" + + sha1_enabled = cryptoEnabledCheckCode % { "mask" : 0xF00 } + cryptoRegRegRegInst("sha1c", "SHA1C64", "SimdSha1HashOp", + sha1_enabled, sha1_cCode) + cryptoRegRegRegInst("sha1p", "SHA1P64", "SimdSha1HashOp", + sha1_enabled, sha1_pCode) + cryptoRegRegRegInst("sha1m", "SHA1M64", "SimdSha1HashOp", + sha1_enabled, sha1_mCode) + cryptoRegRegInst("sha1h", "SHA1H64", "SimdSha1Hash2Op", + sha1_enabled, sha1_hCode) + cryptoRegRegRegInst("sha1su0", "SHA1SU064", "SimdShaSigma3Op", + sha1_enabled, sha1_su0Code) + cryptoRegRegInst("sha1su1", "SHA1SU164", "SimdShaSigma2Op", + sha1_enabled, sha1_su1Code) + + sha2_enabled = cryptoEnabledCheckCode % { "mask" : 0xF000 } + cryptoRegRegRegInst("sha256h", "SHA256H64", "SimdSha256HashOp", + sha2_enabled, sha256_hCode) + cryptoRegRegRegInst("sha256h2", "SHA256H264", "SimdSha256Hash2Op", + sha2_enabled, sha256_h2Code) + cryptoRegRegInst("sha256su0", "SHA256SU064", "SimdShaSigma2Op", + sha2_enabled, sha256_su0Code) + cryptoRegRegRegInst("sha256su1", "SHA256SU164", "SimdShaSigma3Op", + sha2_enabled, sha256_su1Code) +}}; + diff --git a/src/arch/arm/isa/insts/insts.isa b/src/arch/arm/isa/insts/insts.isa index b95356b70..faca2f697 100644 --- a/src/arch/arm/isa/insts/insts.isa +++ b/src/arch/arm/isa/insts/insts.isa @@ -105,3 +105,6 @@ split decoder; //Crypto ##include "crypto.isa" + +//Crypto +##include "crypto64.isa" -- 2.30.2