// -*- mode:c++ -*- // Copyright (c) 2015 Riscv Developers // Copyright (c) 2016 The University of Virginia // All rights reserved. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer; // redistributions in binary form must reproduce the above copyright // notice, this list of conditions and the following disclaimer in the // documentation and/or other materials provided with the distribution; // neither the name of the copyright holders nor the names of its // contributors may be used to endorse or promote products derived from // this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. // // Authors: Alec Roelke //////////////////////////////////////////////////////////////////// // // Atomic memory operation instructions // def template AtomicMemOpDeclare {{ /** * Static instruction class for an AtomicMemOp operation */ class %(class_name)s : public %(base_class)s { public: // Constructor %(class_name)s(ExtMachInst machInst); protected: class %(class_name)sLoad : public %(base_class)sMicro { public: // Constructor %(class_name)sLoad(ExtMachInst machInst, %(class_name)s *_p); Fault execute(ExecContext *, Trace::InstRecord *) const override; Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override; Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const override; }; class %(class_name)sStore : public %(base_class)sMicro { public: // Constructor %(class_name)sStore(ExtMachInst machInst, %(class_name)s *_p); Fault execute(ExecContext *, Trace::InstRecord *) const override; Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override; Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const override; }; }; }}; def template LRSCConstructor {{ %(class_name)s::%(class_name)s(ExtMachInst machInst): %(base_class)s("%(mnemonic)s", machInst, %(op_class)s) { %(constructor)s; if (AQ) memAccessFlags = memAccessFlags | Request::ACQUIRE; if (RL) memAccessFlags = memAccessFlags | Request::RELEASE; } }}; def template AtomicMemOpMacroConstructor {{ %(class_name)s::%(class_name)s(ExtMachInst machInst) : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s) { %(constructor)s; microops = {new %(class_name)sLoad(machInst, this), new %(class_name)sStore(machInst, this)}; } }}; def template AtomicMemOpLoadConstructor {{ %(class_name)s::%(class_name)sLoad::%(class_name)sLoad( ExtMachInst machInst, %(class_name)s *_p) : %(base_class)s("%(mnemonic)s[l]", machInst, %(op_class)s) { %(constructor)s; flags[IsFirstMicroop] = true; flags[IsDelayedCommit] = true; if (AQ) memAccessFlags = Request::ACQUIRE; } }}; def template AtomicMemOpStoreConstructor {{ %(class_name)s::%(class_name)sStore::%(class_name)sStore( ExtMachInst machInst, %(class_name)s *_p) : %(base_class)s("%(mnemonic)s[s]", machInst, %(op_class)s) { %(constructor)s; flags[IsLastMicroop] = true; flags[IsNonSpeculative] = true; if (RL) memAccessFlags = Request::RELEASE; } }}; def template StoreCondExecute {{ Fault %(class_name)s::execute(ExecContext *xc, Trace::InstRecord *traceData) const { Addr EA; Fault fault = NoFault; uint64_t result; %(op_decl)s; %(op_rd)s; %(ea_code)s; if (fault == NoFault) { %(memacc_code)s; } if (fault == NoFault) { fault = writeMemAtomic(xc, traceData, Mem, EA, memAccessFlags, &result); // RISC-V has the opposite convention gem5 has for success flags, // so we invert the result here. result = !result; } if (fault == NoFault) { %(postacc_code)s; } if (fault == NoFault) { %(op_wb)s; } return fault; } }}; def template AtomicMemOpLoadExecute {{ Fault %(class_name)s::%(class_name)sLoad::execute(ExecContext *xc, Trace::InstRecord *traceData) const { Addr EA; Fault fault = NoFault; %(op_decl)s; %(op_rd)s; %(ea_code)s; if (fault == NoFault) { fault = readMemAtomic(xc, traceData, EA, Mem, memAccessFlags); } if (fault == NoFault) { %(code)s; } if (fault == NoFault) { %(op_wb)s; } return fault; } }}; def template AtomicMemOpStoreExecute {{ Fault %(class_name)s::%(class_name)sStore::execute(ExecContext *xc, Trace::InstRecord *traceData) const { Addr EA; Fault fault = NoFault; %(op_decl)s; %(op_rd)s; %(ea_code)s; if (fault == NoFault) { %(code)s; } if (fault == NoFault) { fault = writeMemAtomic(xc, traceData, Mem, EA, memAccessFlags, nullptr); } if (fault == NoFault) { %(op_wb)s; } return fault; } }}; def template AtomicMemOpLoadInitiateAcc {{ Fault %(class_name)s::%(class_name)sLoad::initiateAcc(ExecContext *xc, Trace::InstRecord *traceData) const { Addr EA; Fault fault = NoFault; %(op_src_decl)s; %(op_rd)s; %(ea_code)s; if (fault == NoFault) { fault = initiateMemRead(xc, traceData, EA, Mem, memAccessFlags); } return fault; } }}; def template AtomicMemOpStoreInitiateAcc {{ Fault %(class_name)s::%(class_name)sStore::initiateAcc( ExecContext *xc, Trace::InstRecord *traceData) const { Addr EA; Fault fault = NoFault; %(op_decl)s; %(op_rd)s; %(ea_code)s; if (fault == NoFault) { %(code)s; } if (fault == NoFault) { fault = writeMemTiming(xc, traceData, Mem, EA, memAccessFlags, nullptr); } if (fault == NoFault) { %(op_wb)s; } return fault; } }}; def template StoreCondCompleteAcc {{ Fault %(class_name)s::completeAcc(Packet *pkt, ExecContext *xc, Trace::InstRecord *traceData) const { Fault fault = NoFault; %(op_dest_decl)s; // RISC-V has the opposite convention gem5 has for success flags, // so we invert the result here. uint64_t result = !pkt->req->getExtraData(); if (fault == NoFault) { %(postacc_code)s; } if (fault == NoFault) { %(op_wb)s; } return fault; } }}; def template AtomicMemOpLoadCompleteAcc {{ Fault %(class_name)s::%(class_name)sLoad::completeAcc(PacketPtr pkt, ExecContext *xc, Trace::InstRecord *traceData) const { Fault fault = NoFault; %(op_decl)s; %(op_rd)s; getMem(pkt, Mem, traceData); if (fault == NoFault) { %(code)s; } if (fault == NoFault) { %(op_wb)s; } return fault; } }}; def template AtomicMemOpStoreCompleteAcc {{ Fault %(class_name)s::%(class_name)sStore::completeAcc(PacketPtr pkt, ExecContext *xc, Trace::InstRecord *traceData) const { return NoFault; } }}; def format LoadReserved(memacc_code, postacc_code={{ }}, ea_code={{EA = Rs1;}}, mem_flags=[], inst_flags=[]) {{ mem_flags = makeList(mem_flags) inst_flags = makeList(inst_flags) iop = InstObjParams(name, Name, 'LoadReserved', {'ea_code': ea_code, 'memacc_code': memacc_code, 'postacc_code': postacc_code}, inst_flags) iop.constructor += '\n\tmemAccessFlags = memAccessFlags | ' + \ '|'.join(['Request::%s' % flag for flag in mem_flags]) + ';' header_output = LoadStoreDeclare.subst(iop) decoder_output = LRSCConstructor.subst(iop) decode_block = BasicDecode.subst(iop) exec_output = LoadExecute.subst(iop) \ + LoadInitiateAcc.subst(iop) \ + LoadCompleteAcc.subst(iop) }}; def format StoreCond(memacc_code, postacc_code={{ }}, ea_code={{EA = Rs1;}}, mem_flags=[], inst_flags=[]) {{ mem_flags = makeList(mem_flags) inst_flags = makeList(inst_flags) iop = InstObjParams(name, Name, 'StoreCond', {'ea_code': ea_code, 'memacc_code': memacc_code, 'postacc_code': postacc_code}, inst_flags) iop.constructor += '\n\tmemAccessFlags = memAccessFlags | ' + \ '|'.join(['Request::%s' % flag for flag in mem_flags]) + ';' header_output = LoadStoreDeclare.subst(iop) decoder_output = LRSCConstructor.subst(iop) decode_block = BasicDecode.subst(iop) exec_output = StoreCondExecute.subst(iop) \ + StoreInitiateAcc.subst(iop) \ + StoreCondCompleteAcc.subst(iop) }}; def format AtomicMemOp(load_code, store_code, ea_code, load_flags=[], store_flags=[], inst_flags=[]) {{ macro_iop = InstObjParams(name, Name, 'AtomicMemOp', ea_code, inst_flags) header_output = AtomicMemOpDeclare.subst(macro_iop) decoder_output = AtomicMemOpMacroConstructor.subst(macro_iop) decode_block = BasicDecode.subst(macro_iop) exec_output = '' load_inst_flags = makeList(inst_flags) + ["IsMemRef", "IsLoad"] load_iop = InstObjParams(name, Name, 'AtomicMemOpMicro', {'ea_code': ea_code, 'code': load_code, 'op_name': 'Load'}, load_inst_flags) decoder_output += AtomicMemOpLoadConstructor.subst(load_iop) exec_output += AtomicMemOpLoadExecute.subst(load_iop) \ + AtomicMemOpLoadInitiateAcc.subst(load_iop) \ + AtomicMemOpLoadCompleteAcc.subst(load_iop) store_inst_flags = makeList(inst_flags) + ["IsMemRef", "IsStore"] store_iop = InstObjParams(name, Name, 'AtomicMemOpMicro', {'ea_code': ea_code, 'code': store_code, 'op_name': 'Store'}, store_inst_flags) decoder_output += AtomicMemOpStoreConstructor.subst(store_iop) exec_output += AtomicMemOpStoreExecute.subst(store_iop) \ + AtomicMemOpStoreInitiateAcc.subst(store_iop) \ + AtomicMemOpStoreCompleteAcc.subst(store_iop) }};