blob: 26e6d98b53978655d36c5f84e2c4477033fa1b55 [file] [log] [blame]
// -*- mode: c++ -*-
// Copyright (c) 2012-2013 ARM Limited
// All rights reserved
//
// The license below extends only to copyright in the software and shall
// not be construed as granting a license to any other intellectual
// property including but not limited to intellectual property relating
// to a hardware implementation of the functionality of the software
// licensed hereunder. You may use the software subject to the license
// terms below provided that you ensure that this notice is replicated
// unmodified and in its entirety in all distributions of the software,
// modified or unmodified, in source code or in binary form.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met: redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer;
// redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution;
// neither the name of the copyright holders nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
//
// Authors: Mbou Eyole
// Giacomo Gabrielli
let {{
simd64EnabledCheckCode = vfp64EnabledCheckCode
}};
def template NeonX2RegOpDeclare {{
template <class _Element>
class %(class_name)s : public %(base_class)s
{
protected:
typedef _Element Element;
public:
// Constructor
%(class_name)s(ExtMachInst machInst,
IntRegIndex _dest, IntRegIndex _op1, IntRegIndex _op2)
: %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
_dest, _op1, _op2)
{
%(constructor)s;
}
Fault execute(ExecContext *, Trace::InstRecord *) const override;
};
}};
def template NeonX2RegImmOpDeclare {{
template <class _Element>
class %(class_name)s : public %(base_class)s
{
protected:
typedef _Element Element;
public:
// Constructor
%(class_name)s(ExtMachInst machInst,
IntRegIndex _dest, IntRegIndex _op1, IntRegIndex _op2,
uint64_t _imm)
: %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
_dest, _op1, _op2, _imm)
{
%(constructor)s;
}
Fault execute(ExecContext *, Trace::InstRecord *) const override;
};
}};
def template NeonX1RegOpDeclare {{
template <class _Element>
class %(class_name)s : public %(base_class)s
{
protected:
typedef _Element Element;
public:
// Constructor
%(class_name)s(ExtMachInst machInst,
IntRegIndex _dest, IntRegIndex _op1)
: %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
_dest, _op1)
{
%(constructor)s;
}
Fault execute(ExecContext *, Trace::InstRecord *) const override;
};
}};
def template NeonX1RegImmOpDeclare {{
template <class _Element>
class %(class_name)s : public %(base_class)s
{
protected:
typedef _Element Element;
public:
// Constructor
%(class_name)s(ExtMachInst machInst,
IntRegIndex _dest, IntRegIndex _op1, uint64_t _imm)
: %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
_dest, _op1, _imm)
{
%(constructor)s;
}
Fault execute(ExecContext *, Trace::InstRecord *) const override;
};
}};
def template NeonX1Reg2ImmOpDeclare {{
template <class _Element>
class %(class_name)s : public %(base_class)s
{
protected:
typedef _Element Element;
public:
// Constructor
%(class_name)s(ExtMachInst machInst,
IntRegIndex _dest, IntRegIndex _op1, uint64_t _imm1,
uint64_t _imm2)
: %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
_dest, _op1, _imm1, _imm2)
{
%(constructor)s;
}
Fault execute(ExecContext *, Trace::InstRecord *) const override;
};
}};
def template NeonX1RegImmOnlyOpDeclare {{
template <class _Element>
class %(class_name)s : public %(base_class)s
{
protected:
typedef _Element Element;
public:
// Constructor
%(class_name)s(ExtMachInst machInst,
IntRegIndex _dest, uint64_t _imm)
: %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
_dest, _imm)
{
%(constructor)s;
}
Fault execute(ExecContext *, Trace::InstRecord *) const override;
};
}};
def template NeonXExecDeclare {{
template
Fault %(class_name)s<%(targs)s>::execute(
ExecContext *, Trace::InstRecord *) const;
}};
def template NeonXEqualRegOpExecute {{
template <class Element>
Fault %(class_name)s<Element>::execute(ExecContext *xc,
Trace::InstRecord *traceData) const
{
Fault fault = NoFault;
%(op_decl)s;
%(op_rd)s;
const unsigned rCount = %(r_count)d;
const unsigned eCount = rCount * sizeof(FloatRegBits) / sizeof(Element);
const unsigned eCountFull = 4 * sizeof(FloatRegBits) / sizeof(Element);
union RegVect {
FloatRegBits regs[rCount];
Element elements[eCount];
};
union FullRegVect {
FloatRegBits regs[4];
Element elements[eCountFull];
};
%(code)s;
if (fault == NoFault)
{
%(op_wb)s;
}
return fault;
}
}};
def template NeonXUnequalRegOpExecute {{
template <class Element>
Fault %(class_name)s<Element>::execute(ExecContext *xc,
Trace::InstRecord *traceData) const
{
typedef typename bigger_type_t<Element>::type BigElement;
Fault fault = NoFault;
%(op_decl)s;
%(op_rd)s;
const unsigned rCount = %(r_count)d;
const unsigned eCount = rCount * sizeof(FloatRegBits) / sizeof(Element);
const unsigned eCountFull = 4 * sizeof(FloatRegBits) / sizeof(Element);
union RegVect {
FloatRegBits regs[rCount];
Element elements[eCount];
BigElement bigElements[eCount / 2];
};
union BigRegVect {
FloatRegBits regs[2 * rCount];
BigElement elements[eCount];
};
union FullRegVect {
FloatRegBits regs[4];
Element elements[eCountFull];
};
%(code)s;
if (fault == NoFault)
{
%(op_wb)s;
}
return fault;
}
}};
def template MicroNeonMemDeclare64 {{
class %(class_name)s : public %(base_class)s
{
protected:
// True if the base register is SP (used for SP alignment checking)
bool baseIsSP;
// Access size in bytes
uint8_t accSize;
// Vector element size (0 -> 8-bit, 1 -> 16-bit, 2 -> 32-bit,
// 3 -> 64-bit)
uint8_t eSize;
public:
%(class_name)s(ExtMachInst machInst, RegIndex _dest, RegIndex _ura,
uint32_t _imm, unsigned extraMemFlags, bool _baseIsSP,
uint8_t _accSize, uint8_t _eSize)
: %(base_class)s("%(mnemonic)s", machInst, %(op_class)s, _dest,
_ura, _imm),
baseIsSP(_baseIsSP), accSize(_accSize), eSize(_eSize)
{
memAccessFlags |= extraMemFlags;
%(constructor)s;
}
Fault execute(ExecContext *, Trace::InstRecord *) const override;
Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
Fault completeAcc(PacketPtr, ExecContext *,
Trace::InstRecord *) const override;
};
}};
def template NeonLoadExecute64 {{
Fault %(class_name)s::execute(
ExecContext *xc, Trace::InstRecord *traceData) const
{
Addr EA;
Fault fault = NoFault;
%(op_decl)s;
%(mem_decl)s;
%(op_rd)s;
%(ea_code)s;
MemUnion memUnion;
uint8_t *dataPtr = memUnion.bytes;
if (fault == NoFault) {
fault = xc->readMem(EA, dataPtr, accSize, memAccessFlags);
%(memacc_code)s;
}
if (fault == NoFault) {
%(op_wb)s;
}
return fault;
}
}};
def template NeonLoadInitiateAcc64 {{
Fault %(class_name)s::initiateAcc(
ExecContext *xc, Trace::InstRecord *traceData) const
{
Addr EA;
Fault fault = NoFault;
%(op_decl)s;
%(mem_decl)s;
%(op_rd)s;
%(ea_code)s;
if (fault == NoFault) {
fault = xc->initiateMemRead(EA, accSize, memAccessFlags);
}
return fault;
}
}};
def template NeonLoadCompleteAcc64 {{
Fault %(class_name)s::completeAcc(
PacketPtr pkt, ExecContext *xc, Trace::InstRecord *traceData) const
{
Fault fault = NoFault;
%(mem_decl)s;
%(op_decl)s;
%(op_rd)s;
MemUnion memUnion { { } };
memcpy(&memUnion, pkt->getPtr<uint8_t>(), pkt->getSize());
if (fault == NoFault) {
%(memacc_code)s;
}
if (fault == NoFault) {
%(op_wb)s;
}
return fault;
}
}};
def template NeonStoreExecute64 {{
Fault %(class_name)s::execute(
ExecContext *xc, Trace::InstRecord *traceData) const
{
Addr EA;
Fault fault = NoFault;
%(op_decl)s;
%(mem_decl)s;
%(op_rd)s;
%(ea_code)s;
MemUnion memUnion;
uint8_t *dataPtr = memUnion.bytes;
if (fault == NoFault) {
%(memacc_code)s;
}
if (fault == NoFault) {
fault = xc->writeMem(dataPtr, accSize, EA, memAccessFlags,
NULL);
}
if (fault == NoFault) {
%(op_wb)s;
}
return fault;
}
}};
def template NeonStoreInitiateAcc64 {{
Fault %(class_name)s::initiateAcc(
ExecContext *xc, Trace::InstRecord *traceData) const
{
Addr EA;
Fault fault = NoFault;
%(op_decl)s;
%(mem_decl)s;
%(op_rd)s;
%(ea_code)s;
MemUnion memUnion;
if (fault == NoFault) {
%(memacc_code)s;
}
if (fault == NoFault) {
fault = xc->writeMem(memUnion.bytes, accSize, EA, memAccessFlags,
NULL);
}
return fault;
}
}};
def template NeonStoreCompleteAcc64 {{
Fault %(class_name)s::completeAcc(
PacketPtr pkt, ExecContext *xc, Trace::InstRecord *traceData) const
{
return NoFault;
}
}};
def template VMemMultDeclare64 {{
class %(class_name)s : public %(base_class)s
{
public:
// Constructor
%(class_name)s(ExtMachInst machInst, RegIndex rn, RegIndex vd,
RegIndex rm, uint8_t eSize, uint8_t dataSize,
uint8_t numStructElems, uint8_t numRegs, bool wb);
};
}};
def template VMemSingleDeclare64 {{
class %(class_name)s : public %(base_class)s
{
public:
// Constructor
%(class_name)s(ExtMachInst machInst, RegIndex rn, RegIndex vd,
RegIndex rm, uint8_t eSize, uint8_t dataSize,
uint8_t numStructElems, uint8_t index, bool wb,
bool replicate = false);
};
}};
def template VMemMultConstructor64 {{
%(class_name)s::%(class_name)s(
ExtMachInst machInst, RegIndex rn, RegIndex vd, RegIndex rm,
uint8_t _eSize, uint8_t _dataSize, uint8_t _numStructElems,
uint8_t _numRegs, bool _wb) :
%(base_class)s(
"%(mnemonic)s", machInst, %(op_class)s, rn, vd, rm,
_eSize, _dataSize, _numStructElems, _numRegs, _wb)
{
%(constructor)s;
}
}};
def template VMemSingleConstructor64 {{
%(class_name)s::%(class_name)s(
ExtMachInst machInst, RegIndex rn, RegIndex vd, RegIndex rm,
uint8_t _eSize, uint8_t _dataSize, uint8_t _numStructElems,
uint8_t _index, bool _wb, bool _replicate) :
%(base_class)s(
"%(mnemonic)s", machInst, %(op_class)s, rn, vd, rm,
_eSize, _dataSize, _numStructElems, _index, _wb,
_replicate)
{
%(constructor)s;
}
}};
def template MicroNeonMixDeclare64 {{
class %(class_name)s : public %(base_class)s
{
public:
%(class_name)s(ExtMachInst machInst, RegIndex _dest, RegIndex _op1,
uint8_t _eSize, uint8_t _dataSize,
uint8_t _numStructElems, uint8_t _numRegs,
uint8_t _step) :
%(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
_dest, _op1, _eSize, _dataSize, _numStructElems,
_numRegs, _step)
{
%(constructor)s;
}
Fault execute(ExecContext *, Trace::InstRecord *) const override;
};
}};
def template MicroNeonMixLaneDeclare64 {{
class %(class_name)s : public %(base_class)s
{
public:
%(class_name)s(ExtMachInst machInst, RegIndex _dest, RegIndex _op1,
uint8_t _eSize, uint8_t _dataSize,
uint8_t _numStructElems, uint8_t _lane, uint8_t _step,
bool _replicate = false) :
%(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
_dest, _op1, _eSize, _dataSize, _numStructElems,
_lane, _step, _replicate)
{
%(constructor)s;
}
Fault execute(ExecContext *, Trace::InstRecord *) const override;
};
}};
def template MicroNeonMixExecute64 {{
Fault %(class_name)s::execute(ExecContext *xc,
Trace::InstRecord *traceData) const
{
Fault fault = NoFault;
uint64_t resTemp = 0;
resTemp = resTemp;
%(op_decl)s;
%(op_rd)s;
%(code)s;
if (fault == NoFault)
{
%(op_wb)s;
}
return fault;
}
}};