| // Copyright (c) 2017-2019 ARM Limited |
| // All rights reserved |
| // |
| // The license below extends only to copyright in the software and shall |
| // not be construed as granting a license to any other intellectual |
| // property including but not limited to intellectual property relating |
| // to a hardware implementation of the functionality of the software |
| // licensed hereunder. You may use the software subject to the license |
| // terms below provided that you ensure that this notice is replicated |
| // unmodified and in its entirety in all distributions of the software, |
| // modified or unmodified, in source code or in binary form. |
| // |
| // Redistribution and use in source and binary forms, with or without |
| // modification, are permitted provided that the following conditions are |
| // met: redistributions of source code must retain the above copyright |
| // notice, this list of conditions and the following disclaimer; |
| // redistributions in binary form must reproduce the above copyright |
| // notice, this list of conditions and the following disclaimer in the |
| // documentation and/or other materials provided with the distribution; |
| // neither the name of the copyright holders nor the names of its |
| // contributors may be used to endorse or promote products derived from |
| // this software without specific prior written permission. |
| // |
| // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS |
| // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT |
| // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR |
| // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT |
| // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, |
| // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT |
| // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
| // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
| // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
| // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
| // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
| |
| def template SveMemFillSpillOpDeclare {{ |
| class %(class_name)s : public %(base_class)s |
| { |
| private: |
| %(reg_idx_arr_decl)s; |
| |
| protected: |
| typedef uint8_t TPElem; |
| typedef uint8_t RegElemType; |
| typedef uint8_t MemElemType; |
| |
| public: |
| %(class_name)s(ExtMachInst machInst, |
| RegIndex _dest, RegIndex _base, uint64_t _imm) : |
| %(base_class)s("%(mnemonic)s", machInst, %(op_class)s, |
| _dest, _base, _imm) |
| { |
| %(set_reg_idx_arr)s; |
| %(constructor)s; |
| } |
| |
| Fault execute(ExecContext *, trace::InstRecord *) const override; |
| Fault initiateAcc(ExecContext *, trace::InstRecord *) const override; |
| Fault completeAcc(PacketPtr, ExecContext *, |
| trace::InstRecord *) const override; |
| |
| void |
| annotateFault(ArmISA::ArmFault *fault) override |
| { |
| %(fa_code)s |
| } |
| }; |
| }}; |
| |
| def template SveContigMemSSOpDeclare {{ |
| %(tpl_header)s |
| class %(class_name)s : public %(base_class)s |
| { |
| private: |
| %(reg_idx_arr_decl)s; |
| |
| protected: |
| typedef RegElemType TPElem; |
| |
| public: |
| %(class_name)s(const char* mnem, ExtMachInst machInst, |
| RegIndex _dest, RegIndex _gp, RegIndex _base, |
| RegIndex _offset) : |
| %(base_class)s(mnem, machInst, %(op_class)s, |
| _dest, _gp, _base, _offset) |
| { |
| %(set_reg_idx_arr)s; |
| %(constructor)s; |
| } |
| |
| Fault execute(ExecContext *, trace::InstRecord *) const override; |
| Fault initiateAcc(ExecContext *, trace::InstRecord *) const override; |
| Fault completeAcc(PacketPtr, ExecContext *, |
| trace::InstRecord *) const override; |
| |
| void |
| annotateFault(ArmISA::ArmFault *fault) override |
| { |
| %(fa_code)s |
| } |
| }; |
| }}; |
| |
| def template SveContigMemSIOpDeclare {{ |
| %(tpl_header)s |
| class %(class_name)s : public %(base_class)s |
| { |
| private: |
| %(reg_idx_arr_decl)s; |
| |
| protected: |
| typedef RegElemType TPElem; |
| |
| public: |
| %(class_name)s(const char* mnem, ExtMachInst machInst, |
| RegIndex _dest, RegIndex _gp, RegIndex _base, |
| uint64_t _imm) : |
| %(base_class)s(mnem, machInst, %(op_class)s, |
| _dest, _gp, _base, _imm) |
| { |
| %(set_reg_idx_arr)s; |
| %(constructor)s; |
| } |
| |
| Fault execute(ExecContext *, trace::InstRecord *) const override; |
| Fault initiateAcc(ExecContext *, trace::InstRecord *) const override; |
| Fault completeAcc(PacketPtr, ExecContext *, |
| trace::InstRecord *) const override; |
| |
| void |
| annotateFault(ArmISA::ArmFault *fault) override |
| { |
| %(fa_code)s |
| } |
| }; |
| }}; |
| |
| def template SveContigMemExecDeclare {{ |
| template |
| Fault %(class_name)s%(tpl_args)s::execute(ExecContext *, |
| trace::InstRecord *) const; |
| |
| template |
| Fault %(class_name)s%(tpl_args)s::initiateAcc(ExecContext *, |
| trace::InstRecord *) const; |
| |
| template |
| Fault %(class_name)s%(tpl_args)s::completeAcc(PacketPtr, |
| ExecContext *, trace::InstRecord *) const; |
| }}; |
| |
| def template SveContigLoadExecute {{ |
| %(tpl_header)s |
| Fault |
| %(class_name)s%(tpl_args)s::execute(ExecContext *xc, |
| trace::InstRecord *traceData) const |
| { |
| Addr EA; |
| Fault fault = NoFault; |
| [[maybe_unused]] bool aarch64 = true; |
| unsigned eCount = |
| ArmStaticInst::getCurSveVecLen<RegElemType>(xc->tcBase()); |
| |
| %(op_decl)s; |
| %(op_rd)s; |
| %(ea_code)s; |
| |
| ArmISA::VecRegContainer memData; |
| auto memDataView = memData.as<MemElemType>(); |
| |
| %(rden_code)s; |
| |
| fault = readMemAtomic(xc, EA, memData.as<uint8_t>(), |
| memAccessSize, this->memAccessFlags, rdEn); |
| |
| %(fault_code)s; |
| |
| if (fault == NoFault) { |
| %(memacc_code)s; |
| %(op_wb)s; |
| } |
| |
| return fault; |
| } |
| }}; |
| |
| def template SveContigLoadInitiateAcc {{ |
| %(tpl_header)s |
| Fault |
| %(class_name)s%(tpl_args)s::initiateAcc(ExecContext *xc, |
| trace::InstRecord *traceData) const |
| { |
| Addr EA; |
| Fault fault = NoFault; |
| [[maybe_unused]] bool aarch64 = true; |
| unsigned eCount = |
| ArmStaticInst::getCurSveVecLen<RegElemType>(xc->tcBase()); |
| |
| %(op_src_decl)s; |
| %(op_rd)s; |
| %(ea_code)s; |
| |
| %(rden_code)s; |
| |
| fault = initiateMemRead(xc, EA, memAccessSize, |
| this->memAccessFlags, rdEn); |
| |
| %(fault_code)s; |
| |
| return fault; |
| } |
| }}; |
| |
| def template SveContigLoadCompleteAcc {{ |
| %(tpl_header)s |
| Fault |
| %(class_name)s%(tpl_args)s::completeAcc(PacketPtr pkt, |
| ExecContext *xc, trace::InstRecord *traceData) const |
| { |
| [[maybe_unused]] bool aarch64 = true; |
| unsigned eCount = |
| ArmStaticInst::getCurSveVecLen<RegElemType>(xc->tcBase()); |
| |
| %(op_decl)s; |
| %(op_rd)s; |
| |
| ArmISA::VecRegContainer memData; |
| auto memDataView = memData.as<MemElemType>(); |
| |
| if (xc->readMemAccPredicate()) { |
| memcpy(memData.as<uint8_t>(), pkt->getPtr<uint8_t>(), |
| pkt->getSize()); |
| } |
| |
| %(memacc_code)s; |
| %(op_wb)s; |
| |
| return NoFault; |
| } |
| }}; |
| |
| def template SveContigStoreExecute {{ |
| %(tpl_header)s |
| Fault |
| %(class_name)s%(tpl_args)s::execute(ExecContext *xc, |
| trace::InstRecord *traceData) const |
| { |
| Addr EA; |
| Fault fault = NoFault; |
| [[maybe_unused]] bool aarch64 = true; |
| unsigned eCount = |
| ArmStaticInst::getCurSveVecLen<RegElemType>(xc->tcBase()); |
| |
| %(op_decl)s; |
| %(op_rd)s; |
| %(ea_code)s; |
| |
| ArmISA::VecRegContainer memData; |
| auto memDataView = memData.as<MemElemType>(); |
| |
| %(wren_code)s; |
| |
| if (fault == NoFault) { |
| %(memacc_code)s; |
| } |
| |
| if (fault == NoFault) { |
| fault = writeMemAtomic(xc, memData.as<uint8_t>(), |
| EA, memAccessSize, this->memAccessFlags, nullptr, wrEn); |
| } |
| |
| if (fault == NoFault) { |
| %(op_wb)s; |
| } |
| |
| return fault; |
| } |
| }}; |
| |
| def template SveContigStoreInitiateAcc {{ |
| %(tpl_header)s |
| Fault |
| %(class_name)s%(tpl_args)s::initiateAcc(ExecContext *xc, |
| trace::InstRecord *traceData) const |
| { |
| Addr EA; |
| Fault fault = NoFault; |
| [[maybe_unused]] bool aarch64 = true; |
| unsigned eCount = |
| ArmStaticInst::getCurSveVecLen<RegElemType>(xc->tcBase()); |
| |
| %(op_decl)s; |
| %(op_rd)s; |
| %(ea_code)s; |
| |
| ArmISA::VecRegContainer memData; |
| auto memDataView = memData.as<MemElemType>(); |
| |
| %(wren_code)s; |
| |
| if (fault == NoFault) { |
| %(memacc_code)s; |
| } |
| |
| if (fault == NoFault) { |
| fault = writeMemTiming(xc, memData.as<uint8_t>(), |
| EA, memAccessSize, this->memAccessFlags, nullptr, wrEn); |
| } |
| |
| return fault; |
| } |
| }}; |
| |
| def template SveContigStoreCompleteAcc {{ |
| %(tpl_header)s |
| Fault |
| %(class_name)s%(tpl_args)s::completeAcc(PacketPtr pkt, |
| ExecContext *xc, trace::InstRecord *traceData) const |
| { |
| return NoFault; |
| } |
| }}; |
| |
| def template SveLoadAndReplExecute {{ |
| %(tpl_header)s |
| Fault |
| %(class_name)s%(tpl_args)s::execute(ExecContext *xc, |
| trace::InstRecord *traceData) const |
| { |
| Addr EA; |
| Fault fault = NoFault; |
| [[maybe_unused]] bool aarch64 = true; |
| unsigned eCount = |
| ArmStaticInst::getCurSveVecLen<RegElemType>(xc->tcBase()); |
| |
| %(op_decl)s; |
| %(op_rd)s; |
| %(ea_code)s; |
| |
| MemElemType memData = 0; |
| |
| if (fault == NoFault) { |
| fault = readMemAtomicLE(xc, traceData, EA, memData, |
| this->memAccessFlags); |
| %(memacc_code)s; |
| } |
| |
| if (fault == NoFault) { |
| %(op_wb)s; |
| } |
| |
| return fault; |
| } |
| }}; |
| |
| def template SveLoadAndReplInitiateAcc {{ |
| %(tpl_header)s |
| Fault |
| %(class_name)s%(tpl_args)s::initiateAcc(ExecContext *xc, |
| trace::InstRecord *traceData) const |
| { |
| Addr EA; |
| Fault fault = NoFault; |
| [[maybe_unused]] bool aarch64 = true; |
| |
| %(op_src_decl)s; |
| %(op_rd)s; |
| |
| %(ea_code)s; |
| |
| MemElemType memData = 0; |
| |
| if (fault == NoFault) { |
| fault = initiateMemRead(xc, traceData, EA, memData, |
| this->memAccessFlags); |
| } |
| |
| return fault; |
| } |
| }}; |
| |
| def template SveLoadAndReplCompleteAcc {{ |
| %(tpl_header)s |
| Fault |
| %(class_name)s%(tpl_args)s::completeAcc(PacketPtr pkt, |
| ExecContext *xc, trace::InstRecord *traceData) const |
| { |
| Fault fault = NoFault; |
| [[maybe_unused]] bool aarch64 = true; |
| unsigned eCount = |
| ArmStaticInst::getCurSveVecLen<RegElemType>(xc->tcBase()); |
| |
| %(op_decl)s; |
| %(op_rd)s; |
| |
| MemElemType memData = 0; |
| getMemLE(pkt, memData, traceData); |
| |
| if (fault == NoFault) { |
| %(memacc_code)s; |
| } |
| |
| if (fault == NoFault) { |
| %(op_wb)s; |
| } |
| |
| return fault; |
| } |
| }}; |
| |
| def template SveIndexedMemVIMicroopDeclare {{ |
| %(tpl_header)s |
| class %(class_name)s : public %(base_class)s |
| { |
| private: |
| %(reg_idx_arr_decl)s; |
| |
| protected: |
| typedef RegElemType TPElem; |
| |
| RegIndex dest; |
| RegIndex gp; |
| RegIndex base; |
| uint64_t imm; |
| |
| int elemIndex; |
| int numElems; |
| bool firstFault; |
| |
| unsigned memAccessFlags; |
| |
| public: |
| %(class_name)s(const char* mnem, ExtMachInst machInst, |
| OpClass __opClass, RegIndex _dest, RegIndex _gp, |
| RegIndex _base, uint64_t _imm, int _elemIndex, |
| int _numElems, bool _firstFault) : |
| %(base_class)s(mnem, machInst, %(op_class)s), |
| dest(_dest), gp(_gp), base(_base), imm(_imm), |
| elemIndex(_elemIndex), numElems(_numElems), |
| firstFault(_firstFault), |
| memAccessFlags(ArmISA::MMU::AllowUnaligned) |
| { |
| %(set_reg_idx_arr)s; |
| %(constructor)s; |
| if (_opClass == MemReadOp && elemIndex == 0) { |
| // The first micro-op is responsible for pinning the |
| // destination and the fault status registers |
| assert(_numDestRegs == 2); |
| for (int i = 0; i < _numDestRegs; i++) { |
| auto dr = destRegIdx(i); |
| dr.setNumPinnedWrites(numElems - 1); |
| setDestRegIdx(i, dr); |
| } |
| } |
| } |
| |
| Fault execute(ExecContext *, trace::InstRecord *) const override; |
| Fault initiateAcc(ExecContext *, trace::InstRecord *) const override; |
| Fault completeAcc(PacketPtr, ExecContext *, |
| trace::InstRecord *) const override; |
| |
| void |
| annotateFault(ArmISA::ArmFault *fault) override |
| { |
| %(fa_code)s |
| } |
| |
| std::string |
| generateDisassembly(Addr pc, |
| const loader::SymbolTable *symtab) const override |
| { |
| // TODO: add suffix to transfer register |
| std::stringstream ss; |
| printMnemonic(ss, "", false); |
| ccprintf(ss, "{"); |
| printVecReg(ss, dest, true); |
| ccprintf(ss, "}, "); |
| printVecPredReg(ss, gp); |
| if (_opClass == MemReadOp) { |
| ccprintf(ss, "/z"); |
| } |
| ccprintf(ss, ", ["); |
| printVecReg(ss, base, true); |
| if (imm != 0) { |
| ccprintf(ss, ", #%d", imm * sizeof(MemElemType)); |
| } |
| ccprintf(ss, "] (uop elem %d tfer)", elemIndex); |
| return ss.str(); |
| } |
| }; |
| }}; |
| |
| def template SveIndexedMemSVMicroopDeclare {{ |
| %(tpl_header)s |
| class %(class_name)s : public %(base_class)s |
| { |
| private: |
| %(reg_idx_arr_decl)s; |
| |
| protected: |
| typedef RegElemType TPElem; |
| |
| RegIndex dest; |
| RegIndex gp; |
| RegIndex base; |
| RegIndex offset; |
| |
| bool offsetIs32; |
| bool offsetIsSigned; |
| bool offsetIsScaled; |
| |
| int elemIndex; |
| int numElems; |
| bool firstFault; |
| |
| unsigned memAccessFlags; |
| |
| public: |
| %(class_name)s(const char* mnem, ExtMachInst machInst, |
| OpClass __opClass, RegIndex _dest, RegIndex _gp, |
| RegIndex _base, RegIndex _offset, bool _offsetIs32, |
| bool _offsetIsSigned, bool _offsetIsScaled, int _elemIndex, |
| int _numElems, bool _firstFault) : |
| %(base_class)s(mnem, machInst, %(op_class)s), |
| dest(_dest), gp(_gp), base(_base), offset(_offset), |
| offsetIs32(_offsetIs32), offsetIsSigned(_offsetIsSigned), |
| offsetIsScaled(_offsetIsScaled), elemIndex(_elemIndex), |
| numElems(_numElems), firstFault(_firstFault), |
| memAccessFlags(ArmISA::MMU::AllowUnaligned) |
| { |
| %(set_reg_idx_arr)s; |
| %(constructor)s; |
| if (_opClass == MemReadOp && elemIndex == 0) { |
| // The first micro-op is responsible for pinning the |
| // destination and the fault status registers |
| assert(_numDestRegs == 2); |
| for (int i = 0; i < _numDestRegs; i++) { |
| auto dr = destRegIdx(i); |
| dr.setNumPinnedWrites(numElems - 1); |
| setDestRegIdx(i, dr); |
| } |
| } |
| } |
| |
| Fault execute(ExecContext *, trace::InstRecord *) const override; |
| Fault initiateAcc(ExecContext *, trace::InstRecord *) const override; |
| Fault completeAcc(PacketPtr, ExecContext *, |
| trace::InstRecord *) const override; |
| |
| void |
| annotateFault(ArmISA::ArmFault *fault) override |
| { |
| %(fa_code)s |
| } |
| |
| std::string |
| generateDisassembly(Addr pc, |
| const loader::SymbolTable *symtab) const override |
| { |
| // TODO: add suffix to transfer and base registers |
| std::stringstream ss; |
| printMnemonic(ss, "", false); |
| ccprintf(ss, "{"); |
| printVecReg(ss, dest, true); |
| ccprintf(ss, "}, "); |
| printVecPredReg(ss, gp); |
| if (_opClass == MemReadOp) { |
| ccprintf(ss, "/z"); |
| } |
| ccprintf(ss, ", ["); |
| printIntReg(ss, base); |
| ccprintf(ss, ", "); |
| printVecReg(ss, offset, true); |
| ccprintf(ss, "] (uop elem %d tfer)", elemIndex); |
| return ss.str(); |
| } |
| }; |
| }}; |
| |
| def template SveGatherLoadMicroopExecute {{ |
| %(tpl_header)s |
| Fault |
| %(class_name)s%(tpl_args)s::execute(ExecContext *xc, |
| trace::InstRecord *traceData) const |
| { |
| Addr EA; |
| Fault fault = NoFault; |
| [[maybe_unused]] bool aarch64 = true; |
| |
| %(op_decl)s; |
| %(op_rd)s; |
| %(ea_code)s; |
| |
| MemElemType memData = 0; |
| |
| int index = elemIndex; |
| if (%(pred_check_code)s) { |
| fault = readMemAtomicLE(xc, traceData, EA, memData, |
| this->memAccessFlags); |
| } |
| |
| if (fault == NoFault) { |
| %(fault_status_reset_code)s; |
| %(memacc_code)s; |
| if (traceData) { |
| traceData->setData(memData); |
| } |
| } else { |
| %(fault_status_set_code)s; |
| if (firstFault) { |
| for (index = 0; |
| index < numElems && !(%(pred_check_code)s); |
| index++); |
| |
| if (index < elemIndex) { |
| fault = NoFault; |
| memData = 0; |
| %(memacc_code)s; |
| if (traceData) { |
| traceData->setData(memData); |
| } |
| } |
| } |
| } |
| return fault; |
| } |
| }}; |
| |
| def template SveGatherLoadMicroopInitiateAcc {{ |
| %(tpl_header)s |
| Fault |
| %(class_name)s%(tpl_args)s::initiateAcc(ExecContext *xc, |
| trace::InstRecord *traceData) const |
| { |
| Addr EA; |
| Fault fault = NoFault; |
| [[maybe_unused]] bool aarch64 = true; |
| |
| %(op_src_decl)s; |
| %(op_rd)s; |
| %(ea_code)s; |
| |
| MemElemType memData = 0; |
| |
| int index = elemIndex; |
| if (%(pred_check_code)s) { |
| fault = initiateMemRead(xc, traceData, EA, memData, |
| this->memAccessFlags); |
| if (fault != NoFault) { |
| %(fault_status_set_code)s; |
| if (firstFault) { |
| for (index = 0; |
| index < numElems && !(%(pred_check_code)s); |
| index++) {} |
| if (index < elemIndex) { |
| fault = NoFault; |
| xc->setMemAccPredicate(false); |
| } |
| } |
| } else { |
| %(fault_status_reset_code)s; |
| } |
| } else { |
| xc->setMemAccPredicate(false); |
| %(fault_status_reset_code)s; |
| } |
| |
| return fault; |
| } |
| }}; |
| |
| def template SveGatherLoadMicroopCompleteAcc {{ |
| %(tpl_header)s |
| Fault |
| %(class_name)s%(tpl_args)s::completeAcc(PacketPtr pkt, |
| ExecContext *xc, trace::InstRecord *traceData) const |
| { |
| [[maybe_unused]] bool aarch64 = true; |
| |
| %(op_decl)s; |
| %(op_rd)s; |
| |
| MemElemType memData = 0; |
| if (xc->readMemAccPredicate()) { |
| getMemLE(pkt, memData, traceData); |
| } |
| |
| %(memacc_code)s; |
| if (traceData) { |
| traceData->setData(memData); |
| } |
| |
| return NoFault; |
| } |
| }}; |
| |
| def template SveScatterStoreMicroopExecute {{ |
| %(tpl_header)s |
| Fault |
| %(class_name)s%(tpl_args)s::execute(ExecContext *xc, |
| trace::InstRecord *traceData) const |
| { |
| Addr EA; |
| Fault fault = NoFault; |
| [[maybe_unused]] bool aarch64 = true; |
| |
| %(op_decl)s; |
| %(op_rd)s; |
| %(ea_code)s; |
| |
| MemElemType memData = 0; |
| %(memacc_code)s; |
| |
| int index = elemIndex; |
| if (%(pred_check_code)s) { |
| fault = writeMemAtomicLE(xc, traceData, memData, EA, |
| this->memAccessFlags, nullptr); |
| } |
| |
| if (fault == NoFault) { |
| %(op_wb)s; |
| } |
| |
| return fault; |
| } |
| }}; |
| |
| def template SveScatterStoreMicroopInitiateAcc {{ |
| %(tpl_header)s |
| Fault |
| %(class_name)s%(tpl_args)s::initiateAcc(ExecContext *xc, |
| trace::InstRecord *traceData) const |
| { |
| Addr EA; |
| Fault fault = NoFault; |
| [[maybe_unused]] bool aarch64 = true; |
| |
| %(op_decl)s; |
| %(op_rd)s; |
| %(ea_code)s; |
| |
| MemElemType memData = 0; |
| %(memacc_code)s; |
| |
| int index = elemIndex; |
| if (%(pred_check_code)s) { |
| fault = writeMemTimingLE(xc, traceData, memData, EA, |
| this->memAccessFlags, nullptr); |
| } else { |
| xc->setPredicate(false); |
| } |
| |
| return fault; |
| } |
| }}; |
| |
| def template SveScatterStoreMicroopCompleteAcc {{ |
| %(tpl_header)s |
| Fault |
| %(class_name)s%(tpl_args)s::completeAcc(PacketPtr pkt, |
| ExecContext *xc, trace::InstRecord *traceData) const |
| { |
| return NoFault; |
| } |
| }}; |
| |
| def template SveFirstFaultWritebackMicroopDeclare {{ |
| %(tpl_header)s |
| class SveFirstFaultWritebackMicroop : public MicroOp |
| { |
| protected: |
| typedef RegElemType TPElem; |
| |
| int numElems; |
| StaticInst *macroOp; |
| |
| public: |
| %(reg_idx_arr_decl)s; |
| |
| SveFirstFaultWritebackMicroop(const char* mnem, ExtMachInst machInst, |
| OpClass __opClass, int _numElems, StaticInst *_macroOp) : |
| MicroOp(mnem, machInst, __opClass), |
| numElems(_numElems), macroOp(_macroOp) |
| { |
| %(set_reg_idx_arr)s; |
| %(constructor)s; |
| } |
| |
| Fault execute(ExecContext *, trace::InstRecord *) const override; |
| |
| std::string |
| generateDisassembly(Addr pc, |
| const loader::SymbolTable *symtab) const override |
| { |
| std::stringstream ss; |
| ccprintf(ss, "%s", macroOp->disassemble(pc, symtab)); |
| ccprintf(ss, " (uop%d)", numElems); |
| return ss.str(); |
| } |
| }; |
| }}; |
| |
| def template SveFirstFaultWritebackMicroopExecute {{ |
| %(tpl_header)s |
| Fault |
| %(class_name)s%(tpl_args)s::execute(ExecContext *xc, |
| trace::InstRecord *traceData) const |
| { |
| [[maybe_unused]] bool aarch64 = true; |
| |
| %(op_decl)s; |
| %(op_rd)s; |
| |
| int index, firstFaultIndex; |
| for (index = 0; |
| index < numElems && !%(fault_status_check_code)s; |
| index++) {} |
| firstFaultIndex = index; |
| for (index = 0; index < numElems; index++) { |
| if (index < firstFaultIndex) { |
| %(first_fault_forward_code)s; |
| } else { |
| %(first_fault_reset_code)s; |
| } |
| } |
| return NoFault; |
| } |
| }}; |
| |
| def template SveGatherLoadCpySrcVecMicroopDeclare {{ |
| class SveGatherLoadCpySrcVecMicroop : public MicroOp |
| { |
| private: |
| %(reg_idx_arr_decl)s; |
| |
| protected: |
| RegIndex op1; |
| |
| StaticInst *macroOp; |
| |
| public: |
| SveGatherLoadCpySrcVecMicroop(const char* mnem, ExtMachInst machInst, |
| RegIndex _op1, StaticInst *_macroOp) : |
| MicroOp(mnem, machInst, SimdAluOp), op1(_op1), macroOp(_macroOp) |
| { |
| %(set_reg_idx_arr)s; |
| %(constructor)s; |
| } |
| |
| Fault execute(ExecContext *, trace::InstRecord *) const override; |
| |
| std::string |
| generateDisassembly(Addr pc, |
| const loader::SymbolTable *symtab) const override |
| { |
| std::stringstream ss; |
| ccprintf(ss, "%s", macroOp->disassemble(pc, symtab)); |
| ccprintf(ss, " (uop src vec cpy)"); |
| return ss.str(); |
| } |
| }; |
| }}; |
| |
| def template SveGatherLoadCpySrcVecMicroopExecute {{ |
| Fault |
| SveGatherLoadCpySrcVecMicroop::execute(ExecContext *xc, |
| trace::InstRecord *traceData) const |
| { |
| Fault fault = NoFault; |
| %(op_decl)s; |
| %(op_rd)s; |
| |
| %(code)s; |
| if (fault == NoFault) { |
| %(op_wb)s; |
| } |
| |
| return fault; |
| } |
| }}; |
| |
| def template SveStructMemSIMicroopDeclare {{ |
| template<class _Element> |
| class %(class_name)s : public %(base_class)s |
| { |
| public: |
| %(reg_idx_arr_decl)s; |
| |
| protected: |
| typedef _Element Element; |
| typedef _Element TPElem; |
| |
| RegIndex dest; |
| RegIndex gp; |
| RegIndex base; |
| int64_t imm; |
| |
| uint8_t numRegs; |
| int regIndex; |
| |
| unsigned memAccessFlags; |
| |
| bool baseIsSP; |
| |
| public: |
| %(class_name)s(const char* mnem, ExtMachInst machInst, |
| RegIndex _dest, RegIndex _gp, RegIndex _base, |
| int64_t _imm, uint8_t _numRegs, int _regIndex) : |
| %(base_class)s(mnem, machInst, %(op_class)s), |
| dest(_dest), gp(_gp), base(_base), imm(_imm), |
| numRegs(_numRegs), regIndex(_regIndex), |
| memAccessFlags(ArmISA::MMU::AllowUnaligned) |
| { |
| %(set_reg_idx_arr)s; |
| %(constructor)s; |
| baseIsSP = isSP(_base); |
| } |
| |
| Fault execute(ExecContext *, trace::InstRecord *) const override; |
| Fault initiateAcc(ExecContext *, trace::InstRecord *) const override; |
| Fault completeAcc(PacketPtr, ExecContext *, |
| trace::InstRecord *) const override; |
| |
| void |
| annotateFault(ArmISA::ArmFault *fault) override |
| { |
| %(fa_code)s |
| } |
| |
| std::string |
| generateDisassembly(Addr pc, |
| const loader::SymbolTable *symtab) const override |
| { |
| std::stringstream ss; |
| printMnemonic(ss, "", false); |
| ccprintf(ss, "{"); |
| switch (dest) { |
| case INTRLVREG0: |
| ccprintf(ss, "INTRLV0"); |
| break; |
| case INTRLVREG1: |
| ccprintf(ss, "INTRLV1"); |
| break; |
| case INTRLVREG2: |
| ccprintf(ss, "INTRLV2"); |
| break; |
| case INTRLVREG3: |
| ccprintf(ss, "INTRLV3"); |
| break; |
| default: |
| printVecReg(ss, dest, true); |
| break; |
| } |
| ccprintf(ss, "}, "); |
| printVecPredReg(ss, gp); |
| if (_opClass == MemReadOp) { |
| ccprintf(ss, "/z"); |
| } |
| ccprintf(ss, ", ["); |
| printVecReg(ss, base, true); |
| if (imm != 0) { |
| ccprintf(ss, ", #%d", imm * sizeof(Element)); |
| } |
| ccprintf(ss, "] (uop reg %d tfer)", regIndex); |
| return ss.str(); |
| } |
| }; |
| }}; |
| |
| def template SveStructMemExecDeclare {{ |
| template |
| Fault %(class_name)s<%(targs)s>::execute(ExecContext *, |
| trace::InstRecord *) const; |
| |
| template |
| Fault %(class_name)s<%(targs)s>::initiateAcc(ExecContext *, |
| trace::InstRecord *) const; |
| |
| template |
| Fault %(class_name)s<%(targs)s>::completeAcc(PacketPtr, |
| ExecContext *, trace::InstRecord *) const; |
| }}; |
| |
| def template SveStructLoadExecute {{ |
| template <class Element> |
| Fault |
| %(class_name)s<Element>::execute(ExecContext *xc, |
| trace::InstRecord *traceData) const |
| { |
| Addr EA; |
| Fault fault = NoFault; |
| [[maybe_unused]] bool aarch64 = true; |
| unsigned eCount = |
| ArmStaticInst::getCurSveVecLen<Element>(xc->tcBase()); |
| |
| %(op_decl)s; |
| %(op_rd)s; |
| %(ea_code)s; |
| |
| ArmISA::VecRegContainer memData; |
| auto memDataView = memData.as<Element>(); |
| |
| if (fault == NoFault) { |
| fault = readMemAtomic(xc, EA, memData.as<uint8_t>(), |
| memAccessSize, this->memAccessFlags, |
| std::vector<bool>(memAccessSize, true)); |
| %(memacc_code)s; |
| } |
| |
| if (fault == NoFault) { |
| %(op_wb)s; |
| } |
| |
| return fault; |
| } |
| }}; |
| |
| def template SveStructLoadInitiateAcc {{ |
| template <class Element> |
| Fault |
| %(class_name)s<Element>::initiateAcc(ExecContext *xc, |
| trace::InstRecord *traceData) const |
| { |
| Addr EA; |
| Fault fault = NoFault; |
| [[maybe_unused]] bool aarch64 = true; |
| unsigned eCount = |
| ArmStaticInst::getCurSveVecLen<Element>(xc->tcBase()); |
| |
| %(op_src_decl)s; |
| %(op_rd)s; |
| |
| %(ea_code)s; |
| |
| if (fault == NoFault) { |
| fault = initiateMemRead(xc, EA, |
| memAccessSize, this->memAccessFlags, |
| std::vector<bool>(memAccessSize, true)); |
| } |
| |
| return fault; |
| } |
| }}; |
| |
| def template SveStructLoadCompleteAcc {{ |
| template <class Element> |
| Fault |
| %(class_name)s<Element>::completeAcc(PacketPtr pkt, |
| ExecContext *xc, trace::InstRecord *traceData) const |
| { |
| Fault fault = NoFault; |
| [[maybe_unused]] bool aarch64 = true; |
| unsigned eCount = |
| ArmStaticInst::getCurSveVecLen<Element>(xc->tcBase()); |
| |
| %(op_decl)s; |
| %(op_rd)s; |
| |
| ArmISA::VecRegContainer memData; |
| auto memDataView = memData.as<Element>(); |
| |
| memcpy(memData.as<uint8_t>(), pkt->getPtr<uint8_t>(), |
| pkt->getSize()); |
| |
| if (fault == NoFault) { |
| %(memacc_code)s; |
| } |
| |
| if (fault == NoFault) { |
| %(op_wb)s; |
| } |
| |
| return fault; |
| } |
| }}; |
| |
| def template SveStructStoreExecute {{ |
| template <class Element> |
| Fault |
| %(class_name)s<Element>::execute(ExecContext *xc, |
| trace::InstRecord *traceData) const |
| { |
| Addr EA; |
| Fault fault = NoFault; |
| [[maybe_unused]] bool aarch64 = true; |
| unsigned eCount = |
| ArmStaticInst::getCurSveVecLen<Element>(xc->tcBase()); |
| |
| %(op_decl)s; |
| %(op_rd)s; |
| %(ea_code)s; |
| |
| ArmISA::VecRegContainer memData; |
| auto memDataView = memData.as<Element>(); |
| |
| %(wren_code)s; |
| |
| if (fault == NoFault) { |
| %(memacc_code)s; |
| } |
| |
| if (fault == NoFault) { |
| fault = writeMemAtomic(xc, memData.as<uint8_t>(), |
| EA, memAccessSize, this->memAccessFlags, nullptr, wrEn); |
| } |
| |
| if (fault == NoFault) { |
| %(op_wb)s; |
| } |
| |
| return fault; |
| } |
| }}; |
| |
| def template SveStructStoreInitiateAcc {{ |
| template <class Element> |
| Fault |
| %(class_name)s<Element>::initiateAcc(ExecContext *xc, |
| trace::InstRecord *traceData) const |
| { |
| Addr EA; |
| Fault fault = NoFault; |
| [[maybe_unused]] bool aarch64 = true; |
| unsigned eCount = |
| ArmStaticInst::getCurSveVecLen<Element>(xc->tcBase()); |
| |
| %(op_decl)s; |
| %(op_rd)s; |
| %(ea_code)s; |
| |
| ArmISA::VecRegContainer memData; |
| auto memDataView = memData.as<Element>(); |
| |
| %(wren_code)s; |
| |
| if (fault == NoFault) { |
| %(memacc_code)s; |
| } |
| |
| if (fault == NoFault) { |
| fault = writeMemTiming(xc, memData.as<uint8_t>(), |
| EA, memAccessSize, this->memAccessFlags, nullptr, wrEn); |
| } |
| |
| return fault; |
| } |
| }}; |
| |
| def template SveStructStoreCompleteAcc {{ |
| template <class Element> |
| Fault |
| %(class_name)s<Element>::completeAcc(PacketPtr pkt, |
| ExecContext *xc, trace::InstRecord *traceData) const |
| { |
| return NoFault; |
| } |
| }}; |
| |
| def template SveStructMemSSMicroopDeclare {{ |
| template <class _Element> |
| class %(class_name)s : public %(base_class)s |
| { |
| private: |
| %(reg_idx_arr_decl)s; |
| |
| protected: |
| typedef _Element Element; |
| typedef _Element TPElem; |
| |
| RegIndex dest; |
| RegIndex gp; |
| RegIndex base; |
| RegIndex offset; |
| |
| uint8_t numRegs; |
| int regIndex; |
| |
| unsigned memAccessFlags; |
| |
| bool baseIsSP; |
| |
| public: |
| %(class_name)s(const char* mnem, ExtMachInst machInst, |
| RegIndex _dest, RegIndex _gp, RegIndex _base, |
| RegIndex _offset, uint8_t _numRegs, int _regIndex) : |
| %(base_class)s(mnem, machInst, %(op_class)s), |
| dest(_dest), gp(_gp), base(_base), offset(_offset), |
| numRegs(_numRegs), regIndex(_regIndex), |
| memAccessFlags(ArmISA::MMU::AllowUnaligned) |
| { |
| %(set_reg_idx_arr)s; |
| %(constructor)s; |
| baseIsSP = isSP(_base); |
| } |
| |
| Fault execute(ExecContext *, trace::InstRecord *) const override; |
| Fault initiateAcc(ExecContext *, trace::InstRecord *) const override; |
| Fault completeAcc(PacketPtr, ExecContext *, |
| trace::InstRecord *) const override; |
| |
| void |
| annotateFault(ArmISA::ArmFault *fault) override |
| { |
| %(fa_code)s |
| } |
| |
| std::string |
| generateDisassembly(Addr pc, |
| const loader::SymbolTable *symtab) const override |
| { |
| std::stringstream ss; |
| printMnemonic(ss, "", false); |
| ccprintf(ss, "{"); |
| switch (dest) { |
| case INTRLVREG0: |
| ccprintf(ss, "INTRLV0"); |
| break; |
| case INTRLVREG1: |
| ccprintf(ss, "INTRLV1"); |
| break; |
| case INTRLVREG2: |
| ccprintf(ss, "INTRLV2"); |
| break; |
| case INTRLVREG3: |
| ccprintf(ss, "INTRLV3"); |
| break; |
| default: |
| printVecReg(ss, dest, true); |
| break; |
| } |
| ccprintf(ss, "}, "); |
| printVecPredReg(ss, gp); |
| if (_opClass == MemReadOp) { |
| ccprintf(ss, "/z"); |
| } |
| ccprintf(ss, ", ["); |
| printIntReg(ss, base); |
| ccprintf(ss, ", "); |
| printVecReg(ss, offset, true); |
| ccprintf(ss, "] (uop reg %d tfer)", regIndex); |
| return ss.str(); |
| } |
| }; |
| }}; |
| |
| def template SveIntrlvMicroopDeclare {{ |
| template <class _Element> |
| class %(class_name)s: public %(base_class)s |
| { |
| private: |
| %(reg_idx_arr_decl)s; |
| |
| protected: |
| typedef _Element Element; |
| typedef _Element TPElem; |
| RegIndex dest; |
| RegIndex op1; |
| uint8_t numRegs; |
| int regIndex; |
| |
| StaticInst *macroOp; |
| |
| public: |
| %(class_name)s(const char* mnem, ExtMachInst machInst, |
| RegIndex _dest, RegIndex _op1, |
| uint8_t _numRegs, int _regIndex, StaticInst *_macroOp) : |
| MicroOp(mnem, machInst, SimdAluOp), dest(_dest), op1(_op1), |
| numRegs(_numRegs), regIndex(_regIndex), macroOp(_macroOp) |
| { |
| %(set_reg_idx_arr)s; |
| %(constructor)s; |
| } |
| |
| Fault execute(ExecContext *, trace::InstRecord *) const override; |
| |
| std::string |
| generateDisassembly(Addr pc, |
| const loader::SymbolTable *symtab) const override |
| { |
| std::stringstream ss; |
| ccprintf(ss, "%s", macroOp->disassemble(pc, symtab)); |
| ccprintf(ss, " (uop interleave)"); |
| return ss.str(); |
| } |
| }; |
| }}; |
| |
| def template SveDeIntrlvMicroopDeclare {{ |
| template <class _Element> |
| class %(class_name)s : public %(base_class)s |
| { |
| public: |
| %(reg_idx_arr_decl)s; |
| |
| protected: |
| typedef _Element Element; |
| typedef _Element TPElem; |
| RegIndex dest; |
| uint8_t numRegs; |
| int regIndex; |
| |
| StaticInst *macroOp; |
| |
| public: |
| %(class_name)s(const char* mnem, ExtMachInst machInst, |
| RegIndex _dest, uint8_t _numRegs, int _regIndex, |
| StaticInst *_macroOp) : |
| MicroOp(mnem, machInst, SimdAluOp), dest(_dest), |
| numRegs(_numRegs), regIndex(_regIndex), macroOp(_macroOp) |
| { |
| %(set_reg_idx_arr)s; |
| %(constructor)s; |
| } |
| |
| Fault execute(ExecContext *, trace::InstRecord *) const override; |
| |
| std::string |
| generateDisassembly(Addr pc, |
| const loader::SymbolTable *symtab) const override |
| { |
| std::stringstream ss; |
| ccprintf(ss, "%s", macroOp->disassemble(pc, symtab)); |
| ccprintf(ss, " (uop deinterleave)"); |
| return ss.str(); |
| } |
| }; |
| }}; |
| |
| def template SveIntrlvMicroopExecDeclare {{ |
| template |
| Fault %(class_name)s<%(targs)s>::execute( |
| ExecContext *, trace::InstRecord *) const; |
| }}; |
| |
| def template SveIntrlvMicroopExecute {{ |
| template <class Element> |
| Fault |
| %(class_name)s<Element>::execute(ExecContext *xc, |
| trace::InstRecord *traceData) const |
| { |
| Fault fault = NoFault; |
| %(op_decl)s; |
| %(op_rd)s; |
| |
| %(code)s; |
| if (fault == NoFault) { |
| %(op_wb)s; |
| } |
| |
| return fault; |
| } |
| }}; |