1package xiangshan.mem 2 3import chisel3._ 4import chisel3.util._ 5import utils._ 6import xiangshan._ 7import xiangshan.backend.decode.ImmUnion 8import xiangshan.cache._ 9// import xiangshan.cache.{DCacheWordIO, TlbRequestIO, TlbCmd, MemoryOpConstants, TlbReq, DCacheLoadReq, DCacheWordResp} 10import xiangshan.backend.LSUOpType 11 12class LoadToLsqIO extends XSBundle { 13 val loadIn = ValidIO(new LsPipelineBundle) 14 val ldout = Flipped(DecoupledIO(new ExuOutput)) 15 val loadDataForwarded = Output(Bool()) 16 val forward = new LoadForwardQueryIO 17} 18 19// Load Pipeline Stage 0 20// Generate addr, use addr to query DCache and DTLB 21class LoadUnit_S0 extends XSModule { 22 val io = IO(new Bundle() { 23 val in = Flipped(Decoupled(new ExuInput)) 24 val out = Decoupled(new LsPipelineBundle) 25 val dtlbReq = DecoupledIO(new TlbReq) 26 val dcacheReq = DecoupledIO(new DCacheWordReq) 27 val rsIdx = Input(UInt(log2Up(IssQueSize).W)) 28 }) 29 30 val s0_uop = io.in.bits.uop 31 val s0_vaddr = io.in.bits.src1 + SignExt(s0_uop.ctrl.imm(11,0), VAddrBits) 32 // val s0_vaddr_old = io.in.bits.src1 + SignExt(ImmUnion.I.toImm32(s0_uop.ctrl.imm), XLEN) 33 // val imm12 = WireInit(s0_uop.ctrl.imm(11,0)) 34 // val s0_vaddr_lo = io.in.bits.src1(11,0) + Cat(0.U(1.W), imm12) 35 // val s0_vaddr_hi = Mux(imm12(11), 36 // Mux((s0_vaddr_lo(12)), io.in.bits.src1(VAddrBits-1, 12), io.in.bits.src1(VAddrBits-1, 12)+SignExt(1.U, VAddrBits-12)), 37 // Mux((s0_vaddr_lo(12)), io.in.bits.src1(VAddrBits-1, 12)+1.U, io.in.bits.src1(VAddrBits-1, 12)) 38 // ) 39 // val s0_vaddr = Cat(s0_vaddr_hi, s0_vaddr_lo(11,0)) 40 // when(io.in.fire() && s0_vaddr(VAddrBits-1,0) =/= (io.in.bits.src1 + SignExt(ImmUnion.I.toImm32(s0_uop.ctrl.imm), XLEN))(VAddrBits-1,0)){ 41 // printf("s0_vaddr %x s0_vaddr_old %x\n", s0_vaddr, s0_vaddr_old(VAddrBits-1,0)) 42 // } 43 // val s0_mask = genWmask(s0_vaddr_lo, s0_uop.ctrl.fuOpType(1,0)) 44 val s0_mask = genWmask(s0_vaddr, s0_uop.ctrl.fuOpType(1,0)) 45 46 // query DTLB 47 io.dtlbReq.valid := io.in.valid 48 io.dtlbReq.bits.vaddr := s0_vaddr 49 io.dtlbReq.bits.cmd := TlbCmd.read 50 io.dtlbReq.bits.roqIdx := s0_uop.roqIdx 51 io.dtlbReq.bits.debug.pc := s0_uop.cf.pc 52 53 // query DCache 54 io.dcacheReq.valid := io.in.valid 55 io.dcacheReq.bits.cmd := MemoryOpConstants.M_XRD 56 io.dcacheReq.bits.addr := s0_vaddr 57 io.dcacheReq.bits.mask := s0_mask 58 io.dcacheReq.bits.data := DontCare 59 60 // TODO: update cache meta 61 io.dcacheReq.bits.id := DontCare 62 63 val addrAligned = LookupTree(s0_uop.ctrl.fuOpType(1, 0), List( 64 "b00".U -> true.B, //b 65 "b01".U -> (s0_vaddr(0) === 0.U), //h 66 "b10".U -> (s0_vaddr(1, 0) === 0.U), //w 67 "b11".U -> (s0_vaddr(2, 0) === 0.U) //d 68 )) 69 70 io.out.valid := io.in.valid && io.dcacheReq.ready 71 72 io.out.bits := DontCare 73 io.out.bits.vaddr := s0_vaddr 74 io.out.bits.mask := s0_mask 75 io.out.bits.uop := s0_uop 76 io.out.bits.uop.cf.exceptionVec(loadAddrMisaligned) := !addrAligned 77 io.out.bits.rsIdx := io.rsIdx 78 79 io.in.ready := !io.in.valid || (io.out.ready && io.dcacheReq.ready) 80 81 XSDebug(io.dcacheReq.fire(), 82 p"[DCACHE LOAD REQ] pc ${Hexadecimal(s0_uop.cf.pc)}, vaddr ${Hexadecimal(s0_vaddr)}\n" 83 ) 84} 85 86 87// Load Pipeline Stage 1 88// TLB resp (send paddr to dcache) 89class LoadUnit_S1 extends XSModule { 90 val io = IO(new Bundle() { 91 val in = Flipped(Decoupled(new LsPipelineBundle)) 92 val out = Decoupled(new LsPipelineBundle) 93 val dtlbResp = Flipped(DecoupledIO(new TlbResp)) 94 val dcachePAddr = Output(UInt(PAddrBits.W)) 95 val dcacheKill = Output(Bool()) 96 val sbuffer = new LoadForwardQueryIO 97 val lsq = new LoadForwardQueryIO 98 }) 99 100 val s1_uop = io.in.bits.uop 101 val s1_paddr = io.dtlbResp.bits.paddr 102 val s1_exception = selectLoad(io.out.bits.uop.cf.exceptionVec, false).asUInt.orR 103 val s1_tlb_miss = io.dtlbResp.bits.miss 104 val s1_mmio = !s1_tlb_miss && io.dtlbResp.bits.mmio 105 val s1_mask = io.in.bits.mask 106 107 io.out.bits := io.in.bits // forwardXX field will be updated in s1 108 109 io.dtlbResp.ready := true.B 110 111 // TOOD: PMA check 112 io.dcachePAddr := s1_paddr 113 io.dcacheKill := s1_tlb_miss || s1_exception || s1_mmio 114 115 // load forward query datapath 116 io.sbuffer.valid := io.in.valid 117 io.sbuffer.paddr := s1_paddr 118 io.sbuffer.uop := s1_uop 119 io.sbuffer.sqIdx := s1_uop.sqIdx 120 io.sbuffer.mask := s1_mask 121 io.sbuffer.pc := s1_uop.cf.pc // FIXME: remove it 122 123 io.lsq.valid := io.in.valid 124 io.lsq.paddr := s1_paddr 125 io.lsq.uop := s1_uop 126 io.lsq.sqIdx := s1_uop.sqIdx 127 io.lsq.mask := s1_mask 128 io.lsq.pc := s1_uop.cf.pc // FIXME: remove it 129 130 io.out.valid := io.in.valid// && !s1_tlb_miss 131 io.out.bits.paddr := s1_paddr 132 io.out.bits.mmio := s1_mmio && !s1_exception 133 io.out.bits.tlbMiss := s1_tlb_miss 134 io.out.bits.uop.cf.exceptionVec(loadPageFault) := io.dtlbResp.bits.excp.pf.ld 135 io.out.bits.uop.cf.exceptionVec(loadAccessFault) := io.dtlbResp.bits.excp.af.ld 136 io.out.bits.rsIdx := io.in.bits.rsIdx 137 138 io.in.ready := !io.in.valid || io.out.ready 139 140} 141 142 143// Load Pipeline Stage 2 144// DCache resp 145class LoadUnit_S2 extends XSModule with HasLoadHelper { 146 val io = IO(new Bundle() { 147 val in = Flipped(Decoupled(new LsPipelineBundle)) 148 val out = Decoupled(new LsPipelineBundle) 149 val tlbFeedback = ValidIO(new TlbFeedback) 150 val dcacheResp = Flipped(DecoupledIO(new DCacheWordResp)) 151 val lsq = new LoadForwardQueryIO 152 val sbuffer = new LoadForwardQueryIO 153 val dataForwarded = Output(Bool()) 154 }) 155 156 val s2_uop = io.in.bits.uop 157 val s2_mask = io.in.bits.mask 158 val s2_paddr = io.in.bits.paddr 159 val s2_tlb_miss = io.in.bits.tlbMiss 160 val s2_mmio = io.in.bits.mmio 161 val s2_exception = selectLoad(io.in.bits.uop.cf.exceptionVec, false).asUInt.orR 162 val s2_cache_miss = io.dcacheResp.bits.miss 163 val s2_cache_replay = io.dcacheResp.bits.replay 164 165 io.dcacheResp.ready := true.B 166 val dcacheShouldResp = !(s2_tlb_miss || s2_exception || s2_mmio) 167 assert(!(io.in.valid && dcacheShouldResp && !io.dcacheResp.valid), "DCache response got lost") 168 169 // feedback tlb result to RS 170 io.tlbFeedback.valid := io.in.valid 171 io.tlbFeedback.bits.hit := !s2_tlb_miss && (!s2_cache_replay || s2_mmio) 172 io.tlbFeedback.bits.rsIdx := io.in.bits.rsIdx 173 174 val forwardMask = io.out.bits.forwardMask 175 val forwardData = io.out.bits.forwardData 176 val fullForward = (~forwardMask.asUInt & s2_mask) === 0.U 177 178 XSDebug(io.out.fire(), "[FWD LOAD RESP] pc %x fwd %x(%b) + %x(%b)\n", 179 s2_uop.cf.pc, 180 io.lsq.forwardData.asUInt, io.lsq.forwardMask.asUInt, 181 io.in.bits.forwardData.asUInt, io.in.bits.forwardMask.asUInt 182 ) 183 184 // data merge 185 val rdata = VecInit((0 until XLEN / 8).map(j => 186 Mux(forwardMask(j), forwardData(j), io.dcacheResp.bits.data(8*(j+1)-1, 8*j)))).asUInt 187 val rdataSel = LookupTree(s2_paddr(2, 0), List( 188 "b000".U -> rdata(63, 0), 189 "b001".U -> rdata(63, 8), 190 "b010".U -> rdata(63, 16), 191 "b011".U -> rdata(63, 24), 192 "b100".U -> rdata(63, 32), 193 "b101".U -> rdata(63, 40), 194 "b110".U -> rdata(63, 48), 195 "b111".U -> rdata(63, 56) 196 )) 197 val rdataPartialLoad = rdataHelper(s2_uop, rdataSel) 198 199 // TODO: ECC check 200 201 io.out.valid := io.in.valid && !s2_tlb_miss && (!s2_cache_replay || s2_mmio || s2_exception) 202 // Inst will be canceled in store queue / lsq, 203 // so we do not need to care about flush in load / store unit's out.valid 204 io.out.bits := io.in.bits 205 io.out.bits.data := rdataPartialLoad 206 // when exception occurs, set it to not miss and let it write back to roq (via int port) 207 io.out.bits.miss := s2_cache_miss && !s2_exception 208 io.out.bits.uop.ctrl.fpWen := io.in.bits.uop.ctrl.fpWen && !s2_exception 209 io.out.bits.mmio := s2_mmio 210 211 // For timing reasons, we can not let 212 // io.out.bits.miss := s2_cache_miss && !s2_exception && !fullForward 213 // We use io.dataForwarded instead. It means forward logic have prepared all data needed, 214 // and dcache query is no longer needed. 215 // Such inst will be writebacked from load queue. 216 io.dataForwarded := s2_cache_miss && fullForward && !s2_exception 217 218 io.in.ready := io.out.ready || !io.in.valid 219 220 // merge forward result 221 // lsq has higher priority than sbuffer 222 io.lsq := DontCare 223 io.sbuffer := DontCare 224 // generate XLEN/8 Muxs 225 for (i <- 0 until XLEN / 8) { 226 when (io.sbuffer.forwardMask(i)) { 227 io.out.bits.forwardMask(i) := true.B 228 io.out.bits.forwardData(i) := io.sbuffer.forwardData(i) 229 } 230 when (io.lsq.forwardMask(i)) { 231 io.out.bits.forwardMask(i) := true.B 232 io.out.bits.forwardData(i) := io.lsq.forwardData(i) 233 } 234 } 235 236 XSDebug(io.out.fire(), "[DCACHE LOAD RESP] pc %x rdata %x <- D$ %x + fwd %x(%b)\n", 237 s2_uop.cf.pc, rdataPartialLoad, io.dcacheResp.bits.data, 238 io.out.bits.forwardData.asUInt, io.out.bits.forwardMask.asUInt 239 ) 240} 241 242class LoadUnit extends XSModule with HasLoadHelper { 243 val io = IO(new Bundle() { 244 val ldin = Flipped(Decoupled(new ExuInput)) 245 val ldout = Decoupled(new ExuOutput) 246 val fpout = Decoupled(new ExuOutput) 247 val redirect = Flipped(ValidIO(new Redirect)) 248 val flush = Input(Bool()) 249 val tlbFeedback = ValidIO(new TlbFeedback) 250 val rsIdx = Input(UInt(log2Up(IssQueSize).W)) 251 val dcache = new DCacheLoadIO 252 val dtlb = new TlbRequestIO() 253 val sbuffer = new LoadForwardQueryIO 254 val lsq = new LoadToLsqIO 255 }) 256 257 val load_s0 = Module(new LoadUnit_S0) 258 val load_s1 = Module(new LoadUnit_S1) 259 val load_s2 = Module(new LoadUnit_S2) 260 261 load_s0.io.in <> io.ldin 262 load_s0.io.dtlbReq <> io.dtlb.req 263 load_s0.io.dcacheReq <> io.dcache.req 264 load_s0.io.rsIdx := io.rsIdx 265 266 PipelineConnect(load_s0.io.out, load_s1.io.in, true.B, load_s0.io.out.bits.uop.roqIdx.needFlush(io.redirect, io.flush)) 267 268 load_s1.io.dtlbResp <> io.dtlb.resp 269 io.dcache.s1_paddr <> load_s1.io.dcachePAddr 270 io.dcache.s1_kill <> load_s1.io.dcacheKill 271 load_s1.io.sbuffer <> io.sbuffer 272 load_s1.io.lsq <> io.lsq.forward 273 274 PipelineConnect(load_s1.io.out, load_s2.io.in, true.B, load_s1.io.out.bits.uop.roqIdx.needFlush(io.redirect, io.flush)) 275 276 load_s2.io.tlbFeedback <> io.tlbFeedback 277 load_s2.io.dcacheResp <> io.dcache.resp 278 load_s2.io.lsq.forwardData <> io.lsq.forward.forwardData 279 load_s2.io.lsq.forwardMask <> io.lsq.forward.forwardMask 280 load_s2.io.sbuffer.forwardData <> io.sbuffer.forwardData 281 load_s2.io.sbuffer.forwardMask <> io.sbuffer.forwardMask 282 load_s2.io.dataForwarded <> io.lsq.loadDataForwarded 283 284 // use s2_hit_way to select data received in s1 285 load_s2.io.dcacheResp.bits.data := Mux1H(io.dcache.s2_hit_way, RegNext(io.dcache.s1_data)) 286 assert(load_s2.io.dcacheResp.bits.data === io.dcache.resp.bits.data) 287 288 XSDebug(load_s0.io.out.valid, 289 p"S0: pc ${Hexadecimal(load_s0.io.out.bits.uop.cf.pc)}, lId ${Hexadecimal(load_s0.io.out.bits.uop.lqIdx.asUInt)}, " + 290 p"vaddr ${Hexadecimal(load_s0.io.out.bits.vaddr)}, mask ${Hexadecimal(load_s0.io.out.bits.mask)}\n") 291 XSDebug(load_s1.io.out.valid, 292 p"S1: pc ${Hexadecimal(load_s1.io.out.bits.uop.cf.pc)}, lId ${Hexadecimal(load_s1.io.out.bits.uop.lqIdx.asUInt)}, tlb_miss ${io.dtlb.resp.bits.miss}, " + 293 p"paddr ${Hexadecimal(load_s1.io.out.bits.paddr)}, mmio ${load_s1.io.out.bits.mmio}\n") 294 295 // writeback to LSQ 296 // Current dcache use MSHR 297 // Load queue will be updated at s2 for both hit/miss int/fp load 298 io.lsq.loadIn.valid := load_s2.io.out.valid 299 io.lsq.loadIn.bits := load_s2.io.out.bits 300 301 // write to rob and writeback bus 302 val s2_wb_valid = load_s2.io.out.valid && !load_s2.io.out.bits.miss 303 val refillFpLoad = io.lsq.ldout.bits.uop.ctrl.fpWen 304 305 // Int load, if hit, will be writebacked at s2 306 val intHitLoadOut = Wire(Valid(new ExuOutput)) 307 intHitLoadOut.valid := s2_wb_valid && !load_s2.io.out.bits.uop.ctrl.fpWen 308 intHitLoadOut.bits.uop := load_s2.io.out.bits.uop 309 intHitLoadOut.bits.data := load_s2.io.out.bits.data 310 intHitLoadOut.bits.redirectValid := false.B 311 intHitLoadOut.bits.redirect := DontCare 312 intHitLoadOut.bits.debug.isMMIO := load_s2.io.out.bits.mmio 313 intHitLoadOut.bits.debug.isPerfCnt := false.B 314 intHitLoadOut.bits.fflags := DontCare 315 316 load_s2.io.out.ready := true.B 317 318 io.ldout.bits := Mux(intHitLoadOut.valid, intHitLoadOut.bits, io.lsq.ldout.bits) 319 io.ldout.valid := intHitLoadOut.valid || io.lsq.ldout.valid && !refillFpLoad 320 321 // Fp load, if hit, will be stored to reg at s2, then it will be recoded at s3, writebacked at s4 322 val fpHitLoadOut = Wire(Valid(new ExuOutput)) 323 fpHitLoadOut.valid := s2_wb_valid && load_s2.io.out.bits.uop.ctrl.fpWen 324 fpHitLoadOut.bits := intHitLoadOut.bits 325 326 val fpLoadUnRecodedReg = Reg(Valid(new ExuOutput)) 327 fpLoadUnRecodedReg.valid := fpHitLoadOut.valid || io.lsq.ldout.valid && refillFpLoad 328 when(fpHitLoadOut.valid || io.lsq.ldout.valid && refillFpLoad){ 329 fpLoadUnRecodedReg.bits := Mux(fpHitLoadOut.valid, fpHitLoadOut.bits, io.lsq.ldout.bits) 330 } 331 332 val fpLoadRecodedReg = Reg(Valid(new ExuOutput)) 333 when(fpLoadUnRecodedReg.valid){ 334 fpLoadRecodedReg := fpLoadUnRecodedReg 335 fpLoadRecodedReg.bits.data := fpRdataHelper(fpLoadUnRecodedReg.bits.uop, fpLoadUnRecodedReg.bits.data) // recode 336 } 337 fpLoadRecodedReg.valid := fpLoadUnRecodedReg.valid 338 339 io.fpout.bits := fpLoadRecodedReg.bits 340 io.fpout.valid := fpLoadRecodedReg.valid 341 342 io.lsq.ldout.ready := Mux(refillFpLoad, !fpHitLoadOut.valid, !intHitLoadOut.valid) 343 344 when(io.ldout.fire()){ 345 XSDebug("ldout %x\n", io.ldout.bits.uop.cf.pc) 346 } 347 348 when(io.fpout.fire()){ 349 XSDebug("fpout %x\n", io.fpout.bits.uop.cf.pc) 350 } 351} 352