1package xiangshan.mem 2 3import chisel3._ 4import chisel3.util._ 5import utils._ 6import xiangshan._ 7import xiangshan.cache.{DCacheWordIO, TlbRequestIO, TlbCmd, MemoryOpConstants} 8import xiangshan.backend.LSUOpType 9 10class AtomicsUnit extends XSModule with MemoryOpConstants{ 11 val io = IO(new Bundle() { 12 val in = Flipped(Decoupled(new ExuInput)) 13 val out = Decoupled(new ExuOutput) 14 val dcache = new DCacheWordIO 15 val dtlb = new TlbRequestIO 16 val rsIdx = Input(UInt(log2Up(IssQueSize).W)) 17 val flush_sbuffer = new SbufferFlushBundle 18 val tlbFeedback = ValidIO(new TlbFeedback) 19 val redirect = Flipped(ValidIO(new Redirect)) 20 val flush = Input(Bool()) 21 val exceptionAddr = ValidIO(UInt(VAddrBits.W)) 22 }) 23 24 //------------------------------------------------------- 25 // Atomics Memory Accsess FSM 26 //------------------------------------------------------- 27 val s_invalid :: s_tlb :: s_flush_sbuffer_req :: s_flush_sbuffer_resp :: s_cache_req :: s_cache_resp :: s_finish :: Nil = Enum(7) 28 val state = RegInit(s_invalid) 29 val in = Reg(new ExuInput()) 30 val exceptionVec = RegInit(0.U.asTypeOf(ExceptionVec())) 31 val atom_override_xtval = RegInit(false.B) 32 // paddr after translation 33 val paddr = Reg(UInt()) 34 val is_mmio = Reg(Bool()) 35 // dcache response data 36 val resp_data = Reg(UInt()) 37 val is_lrsc_valid = Reg(Bool()) 38 39 io.exceptionAddr.valid := atom_override_xtval 40 io.exceptionAddr.bits := in.src1 41 42 // assign default value to output signals 43 io.in.ready := false.B 44 io.out.valid := false.B 45 io.out.bits := DontCare 46 47 io.dcache.req.valid := false.B 48 io.dcache.req.bits := DontCare 49 io.dcache.resp.ready := false.B 50 51 io.dtlb.req.valid := false.B 52 io.dtlb.req.bits := DontCare 53 io.dtlb.resp.ready := false.B 54 55 io.flush_sbuffer.valid := false.B 56 57 XSDebug("state: %d\n", state) 58 59 when (state === s_invalid) { 60 io.in.ready := true.B 61 when (io.in.fire()) { 62 in := io.in.bits 63 state := s_tlb 64 } 65 } 66 67 // Send TLB feedback to store issue queue 68 // we send feedback right after we receives request 69 // also, we always treat amo as tlb hit 70 // since we will continue polling tlb all by ourself 71 io.tlbFeedback.valid := RegNext(RegNext(io.in.valid)) 72 io.tlbFeedback.bits.hit := true.B 73 io.tlbFeedback.bits.rsIdx := RegEnable(io.rsIdx, io.in.valid) 74 75 // tlb translation, manipulating signals && deal with exception 76 when (state === s_tlb) { 77 // send req to dtlb 78 // keep firing until tlb hit 79 io.dtlb.req.valid := true.B 80 io.dtlb.req.bits.vaddr := in.src1 81 io.dtlb.req.bits.roqIdx := in.uop.roqIdx 82 io.dtlb.resp.ready := true.B 83 val is_lr = in.uop.ctrl.fuOpType === LSUOpType.lr_w || in.uop.ctrl.fuOpType === LSUOpType.lr_d 84 io.dtlb.req.bits.cmd := Mux(is_lr, TlbCmd.atom_read, TlbCmd.atom_write) 85 io.dtlb.req.bits.debug.pc := in.uop.cf.pc 86 87 when(io.dtlb.resp.fire && !io.dtlb.resp.bits.miss){ 88 // exception handling 89 val addrAligned = LookupTree(in.uop.ctrl.fuOpType(1,0), List( 90 "b00".U -> true.B, //b 91 "b01".U -> (in.src1(0) === 0.U), //h 92 "b10".U -> (in.src1(1,0) === 0.U), //w 93 "b11".U -> (in.src1(2,0) === 0.U) //d 94 )) 95 exceptionVec(storeAddrMisaligned) := !addrAligned 96 exceptionVec(storePageFault) := io.dtlb.resp.bits.excp.pf.st 97 exceptionVec(loadPageFault) := io.dtlb.resp.bits.excp.pf.ld 98 exceptionVec(storeAccessFault) := io.dtlb.resp.bits.excp.af.st 99 exceptionVec(loadAccessFault) := io.dtlb.resp.bits.excp.af.ld 100 val exception = !addrAligned || 101 io.dtlb.resp.bits.excp.pf.st || 102 io.dtlb.resp.bits.excp.pf.ld || 103 io.dtlb.resp.bits.excp.af.st || 104 io.dtlb.resp.bits.excp.af.ld 105 is_mmio := io.dtlb.resp.bits.mmio 106 when (exception) { 107 // check for exceptions 108 // if there are exceptions, no need to execute it 109 state := s_finish 110 atom_override_xtval := true.B 111 } .otherwise { 112 paddr := io.dtlb.resp.bits.paddr 113 state := s_flush_sbuffer_req 114 } 115 } 116 } 117 118 119 when (state === s_flush_sbuffer_req) { 120 io.flush_sbuffer.valid := true.B 121 state := s_flush_sbuffer_resp 122 } 123 124 when (state === s_flush_sbuffer_resp) { 125 when (io.flush_sbuffer.empty) { 126 state := s_cache_req 127 } 128 } 129 130 when (state === s_cache_req) { 131 io.dcache.req.valid := true.B 132 io.dcache.req.bits.cmd := LookupTree(in.uop.ctrl.fuOpType, List( 133 LSUOpType.lr_w -> M_XLR, 134 LSUOpType.sc_w -> M_XSC, 135 LSUOpType.amoswap_w -> M_XA_SWAP, 136 LSUOpType.amoadd_w -> M_XA_ADD, 137 LSUOpType.amoxor_w -> M_XA_XOR, 138 LSUOpType.amoand_w -> M_XA_AND, 139 LSUOpType.amoor_w -> M_XA_OR, 140 LSUOpType.amomin_w -> M_XA_MIN, 141 LSUOpType.amomax_w -> M_XA_MAX, 142 LSUOpType.amominu_w -> M_XA_MINU, 143 LSUOpType.amomaxu_w -> M_XA_MAXU, 144 145 LSUOpType.lr_d -> M_XLR, 146 LSUOpType.sc_d -> M_XSC, 147 LSUOpType.amoswap_d -> M_XA_SWAP, 148 LSUOpType.amoadd_d -> M_XA_ADD, 149 LSUOpType.amoxor_d -> M_XA_XOR, 150 LSUOpType.amoand_d -> M_XA_AND, 151 LSUOpType.amoor_d -> M_XA_OR, 152 LSUOpType.amomin_d -> M_XA_MIN, 153 LSUOpType.amomax_d -> M_XA_MAX, 154 LSUOpType.amominu_d -> M_XA_MINU, 155 LSUOpType.amomaxu_d -> M_XA_MAXU 156 )) 157 158 io.dcache.req.bits.addr := paddr 159 io.dcache.req.bits.data := genWdata(in.src2, in.uop.ctrl.fuOpType(1,0)) 160 // TODO: atomics do need mask: fix mask 161 io.dcache.req.bits.mask := genWmask(paddr, in.uop.ctrl.fuOpType(1,0)) 162 io.dcache.req.bits.id := DontCare 163 164 when(io.dcache.req.fire()){ 165 state := s_cache_resp 166 } 167 } 168 169 when (state === s_cache_resp) { 170 io.dcache.resp.ready := true.B 171 when(io.dcache.resp.fire()) { 172 is_lrsc_valid := io.dcache.resp.bits.id 173 val rdata = io.dcache.resp.bits.data 174 val rdataSel = LookupTree(paddr(2, 0), List( 175 "b000".U -> rdata(63, 0), 176 "b001".U -> rdata(63, 8), 177 "b010".U -> rdata(63, 16), 178 "b011".U -> rdata(63, 24), 179 "b100".U -> rdata(63, 32), 180 "b101".U -> rdata(63, 40), 181 "b110".U -> rdata(63, 48), 182 "b111".U -> rdata(63, 56) 183 )) 184 185 resp_data := LookupTree(in.uop.ctrl.fuOpType, List( 186 LSUOpType.lr_w -> SignExt(rdataSel(31, 0), XLEN), 187 LSUOpType.sc_w -> rdata, 188 LSUOpType.amoswap_w -> SignExt(rdataSel(31, 0), XLEN), 189 LSUOpType.amoadd_w -> SignExt(rdataSel(31, 0), XLEN), 190 LSUOpType.amoxor_w -> SignExt(rdataSel(31, 0), XLEN), 191 LSUOpType.amoand_w -> SignExt(rdataSel(31, 0), XLEN), 192 LSUOpType.amoor_w -> SignExt(rdataSel(31, 0), XLEN), 193 LSUOpType.amomin_w -> SignExt(rdataSel(31, 0), XLEN), 194 LSUOpType.amomax_w -> SignExt(rdataSel(31, 0), XLEN), 195 LSUOpType.amominu_w -> SignExt(rdataSel(31, 0), XLEN), 196 LSUOpType.amomaxu_w -> SignExt(rdataSel(31, 0), XLEN), 197 198 LSUOpType.lr_d -> SignExt(rdataSel(63, 0), XLEN), 199 LSUOpType.sc_d -> rdata, 200 LSUOpType.amoswap_d -> SignExt(rdataSel(63, 0), XLEN), 201 LSUOpType.amoadd_d -> SignExt(rdataSel(63, 0), XLEN), 202 LSUOpType.amoxor_d -> SignExt(rdataSel(63, 0), XLEN), 203 LSUOpType.amoand_d -> SignExt(rdataSel(63, 0), XLEN), 204 LSUOpType.amoor_d -> SignExt(rdataSel(63, 0), XLEN), 205 LSUOpType.amomin_d -> SignExt(rdataSel(63, 0), XLEN), 206 LSUOpType.amomax_d -> SignExt(rdataSel(63, 0), XLEN), 207 LSUOpType.amominu_d -> SignExt(rdataSel(63, 0), XLEN), 208 LSUOpType.amomaxu_d -> SignExt(rdataSel(63, 0), XLEN) 209 )) 210 211 state := s_finish 212 } 213 } 214 215 when (state === s_finish) { 216 io.out.valid := true.B 217 io.out.bits.uop := in.uop 218 io.out.bits.uop.cf.exceptionVec := exceptionVec 219 io.out.bits.uop.diffTestDebugLrScValid := is_lrsc_valid 220 io.out.bits.data := resp_data 221 io.out.bits.redirectValid := false.B 222 io.out.bits.redirect := DontCare 223 io.out.bits.debug.isMMIO := is_mmio 224 when (io.out.fire()) { 225 XSDebug("atomics writeback: pc %x data %x\n", io.out.bits.uop.cf.pc, io.dcache.resp.bits.data) 226 state := s_invalid 227 } 228 } 229 230 when(io.redirect.valid || io.flush){ 231 atom_override_xtval := false.B 232 } 233} 234