1/*************************************************************************************** 2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences 3* Copyright (c) 2020-2021 Peng Cheng Laboratory 4* 5* XiangShan is licensed under Mulan PSL v2. 6* You can use this software according to the terms and conditions of the Mulan PSL v2. 7* You may obtain a copy of Mulan PSL v2 at: 8* http://license.coscl.org.cn/MulanPSL2 9* 10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, 11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, 12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE. 13* 14* See the Mulan PSL v2 for more details. 15***************************************************************************************/ 16 17package xiangshan.mem 18 19import chipsalliance.rocketchip.config.Parameters 20import chisel3._ 21import chisel3.util._ 22import utils._ 23import xiangshan._ 24import xiangshan.backend.decode.ImmUnion 25import xiangshan.cache._ 26import xiangshan.cache.mmu.{TlbPtwIO, TlbReq, TlbResp, TlbCmd, TlbRequestIO, TLB} 27 28class LoadToLsqIO(implicit p: Parameters) extends XSBundle { 29 val loadIn = ValidIO(new LsPipelineBundle) 30 val ldout = Flipped(DecoupledIO(new ExuOutput)) 31 val loadDataForwarded = Output(Bool()) 32 val needReplayFromRS = Output(Bool()) 33 val forward = new PipeLoadForwardQueryIO 34} 35 36class LoadToLoadIO(implicit p: Parameters) extends XSBundle { 37 // load to load fast path is limited to ld (64 bit) used as vaddr src1 only 38 val data = UInt(XLEN.W) 39 val valid = Bool() 40} 41 42// Load Pipeline Stage 0 43// Generate addr, use addr to query DCache and DTLB 44class LoadUnit_S0(implicit p: Parameters) extends XSModule { 45 val io = IO(new Bundle() { 46 val in = Flipped(Decoupled(new ExuInput)) 47 val out = Decoupled(new LsPipelineBundle) 48 val fastpath = Input(Vec(LoadPipelineWidth, new LoadToLoadIO)) 49 val dtlbReq = DecoupledIO(new TlbReq) 50 val dcacheReq = DecoupledIO(new DCacheWordReq) 51 val rsIdx = Input(UInt(log2Up(IssQueSize).W)) 52 val isFirstIssue = Input(Bool()) 53 val loadFastMatch = Input(UInt(exuParameters.LduCnt.W)) 54 }) 55 require(LoadPipelineWidth == exuParameters.LduCnt) 56 57 val s0_uop = io.in.bits.uop 58 val imm12 = WireInit(s0_uop.ctrl.imm(11,0)) 59 60 // slow vaddr from non-load insts 61 val slowpath_vaddr = io.in.bits.src(0) + SignExt(s0_uop.ctrl.imm(11,0), VAddrBits) 62 val slowpath_mask = genWmask(slowpath_vaddr, s0_uop.ctrl.fuOpType(1,0)) 63 64 // fast vaddr from load insts 65 val fastpath_vaddrs = WireInit(VecInit(List.tabulate(LoadPipelineWidth)(i => { 66 io.fastpath(i).data + SignExt(s0_uop.ctrl.imm(11,0), VAddrBits) 67 }))) 68 val fastpath_masks = WireInit(VecInit(List.tabulate(LoadPipelineWidth)(i => { 69 genWmask(fastpath_vaddrs(i), s0_uop.ctrl.fuOpType(1,0)) 70 }))) 71 val fastpath_vaddr = Mux1H(io.loadFastMatch, fastpath_vaddrs) 72 val fastpath_mask = Mux1H(io.loadFastMatch, fastpath_masks) 73 74 // select vaddr from 2 alus 75 val s0_vaddr = Mux(io.loadFastMatch.orR, fastpath_vaddr, slowpath_vaddr) 76 val s0_mask = Mux(io.loadFastMatch.orR, fastpath_mask, slowpath_mask) 77 XSPerfAccumulate("load_to_load_forward", io.loadFastMatch.orR && io.in.fire()) 78 79 // query DTLB 80 io.dtlbReq.valid := io.in.valid 81 io.dtlbReq.bits.vaddr := s0_vaddr 82 io.dtlbReq.bits.cmd := TlbCmd.read 83 io.dtlbReq.bits.roqIdx := s0_uop.roqIdx 84 io.dtlbReq.bits.debug.pc := s0_uop.cf.pc 85 io.dtlbReq.bits.debug.isFirstIssue := io.isFirstIssue 86 87 // query DCache 88 io.dcacheReq.valid := io.in.valid 89 io.dcacheReq.bits.cmd := MemoryOpConstants.M_XRD 90 io.dcacheReq.bits.addr := s0_vaddr 91 io.dcacheReq.bits.mask := s0_mask 92 io.dcacheReq.bits.data := DontCare 93 94 // TODO: update cache meta 95 io.dcacheReq.bits.id := DontCare 96 97 val addrAligned = LookupTree(s0_uop.ctrl.fuOpType(1, 0), List( 98 "b00".U -> true.B, //b 99 "b01".U -> (s0_vaddr(0) === 0.U), //h 100 "b10".U -> (s0_vaddr(1, 0) === 0.U), //w 101 "b11".U -> (s0_vaddr(2, 0) === 0.U) //d 102 )) 103 104 io.out.valid := io.in.valid && io.dcacheReq.ready 105 106 io.out.bits := DontCare 107 io.out.bits.vaddr := s0_vaddr 108 io.out.bits.mask := s0_mask 109 io.out.bits.uop := s0_uop 110 io.out.bits.uop.cf.exceptionVec(loadAddrMisaligned) := !addrAligned 111 io.out.bits.rsIdx := io.rsIdx 112 113 io.in.ready := !io.in.valid || (io.out.ready && io.dcacheReq.ready) 114 115 XSDebug(io.dcacheReq.fire(), 116 p"[DCACHE LOAD REQ] pc ${Hexadecimal(s0_uop.cf.pc)}, vaddr ${Hexadecimal(s0_vaddr)}\n" 117 ) 118 XSPerfAccumulate("in", io.in.valid) 119 XSPerfAccumulate("stall_out", io.out.valid && !io.out.ready && io.dcacheReq.ready) 120 XSPerfAccumulate("stall_dcache", io.out.valid && io.out.ready && !io.dcacheReq.ready) 121 XSPerfAccumulate("addr_spec_success", io.out.fire() && s0_vaddr(VAddrBits-1, 12) === io.in.bits.src(0)(VAddrBits-1, 12)) 122 XSPerfAccumulate("addr_spec_failed", io.out.fire() && s0_vaddr(VAddrBits-1, 12) =/= io.in.bits.src(0)(VAddrBits-1, 12)) 123 XSPerfAccumulate("addr_spec_success_once", io.out.fire() && s0_vaddr(VAddrBits-1, 12) === io.in.bits.src(0)(VAddrBits-1, 12) && io.isFirstIssue) 124 XSPerfAccumulate("addr_spec_failed_once", io.out.fire() && s0_vaddr(VAddrBits-1, 12) =/= io.in.bits.src(0)(VAddrBits-1, 12) && io.isFirstIssue) 125} 126 127 128// Load Pipeline Stage 1 129// TLB resp (send paddr to dcache) 130class LoadUnit_S1(implicit p: Parameters) extends XSModule { 131 val io = IO(new Bundle() { 132 val in = Flipped(Decoupled(new LsPipelineBundle)) 133 val out = Decoupled(new LsPipelineBundle) 134 val dtlbResp = Flipped(DecoupledIO(new TlbResp)) 135 val dcachePAddr = Output(UInt(PAddrBits.W)) 136 val dcacheKill = Output(Bool()) 137 val fullForwardFast = Output(Bool()) 138 val sbuffer = new LoadForwardQueryIO 139 val lsq = new PipeLoadForwardQueryIO 140 }) 141 142 val s1_uop = io.in.bits.uop 143 val s1_paddr = io.dtlbResp.bits.paddr 144 val s1_exception = selectLoad(io.out.bits.uop.cf.exceptionVec, false).asUInt.orR 145 val s1_tlb_miss = io.dtlbResp.bits.miss 146 val s1_mmio = !s1_tlb_miss && io.dtlbResp.bits.mmio 147 val s1_mask = io.in.bits.mask 148 149 io.out.bits := io.in.bits // forwardXX field will be updated in s1 150 151 io.dtlbResp.ready := true.B 152 153 // TOOD: PMA check 154 io.dcachePAddr := s1_paddr 155 io.dcacheKill := s1_tlb_miss || s1_exception || s1_mmio 156 157 // load forward query datapath 158 io.sbuffer.valid := io.in.valid && !(s1_exception || s1_tlb_miss) 159 io.sbuffer.vaddr := io.in.bits.vaddr 160 io.sbuffer.paddr := s1_paddr 161 io.sbuffer.uop := s1_uop 162 io.sbuffer.sqIdx := s1_uop.sqIdx 163 io.sbuffer.mask := s1_mask 164 io.sbuffer.pc := s1_uop.cf.pc // FIXME: remove it 165 166 io.lsq.valid := io.in.valid && !(s1_exception || s1_tlb_miss) 167 io.lsq.vaddr := io.in.bits.vaddr 168 io.lsq.paddr := s1_paddr 169 io.lsq.uop := s1_uop 170 io.lsq.sqIdx := s1_uop.sqIdx 171 io.lsq.sqIdxMask := DontCare // will be overwritten by sqIdxMask pre-generated in s0 172 io.lsq.mask := s1_mask 173 io.lsq.pc := s1_uop.cf.pc // FIXME: remove it 174 175 // Generate forwardMaskFast to wake up insts earlier 176 val forwardMaskFast = io.lsq.forwardMaskFast.asUInt | io.sbuffer.forwardMaskFast.asUInt 177 io.fullForwardFast := (~forwardMaskFast & s1_mask) === 0.U 178 179 180 io.out.valid := io.in.valid// && !s1_tlb_miss 181 io.out.bits.paddr := s1_paddr 182 io.out.bits.mmio := s1_mmio && !s1_exception 183 io.out.bits.tlbMiss := s1_tlb_miss 184 io.out.bits.uop.cf.exceptionVec(loadPageFault) := io.dtlbResp.bits.excp.pf.ld 185 io.out.bits.uop.cf.exceptionVec(loadAccessFault) := io.dtlbResp.bits.excp.af.ld 186 io.out.bits.ptwBack := io.dtlbResp.bits.ptwBack 187 io.out.bits.rsIdx := io.in.bits.rsIdx 188 189 io.in.ready := !io.in.valid || io.out.ready 190 191 XSPerfAccumulate("in", io.in.valid) 192 XSPerfAccumulate("tlb_miss", io.in.valid && s1_tlb_miss) 193 XSPerfAccumulate("stall_out", io.out.valid && !io.out.ready) 194} 195 196// Load Pipeline Stage 2 197// DCache resp 198class LoadUnit_S2(implicit p: Parameters) extends XSModule with HasLoadHelper { 199 val io = IO(new Bundle() { 200 val in = Flipped(Decoupled(new LsPipelineBundle)) 201 val out = Decoupled(new LsPipelineBundle) 202 val rsFeedback = ValidIO(new RSFeedback) 203 val dcacheResp = Flipped(DecoupledIO(new DCacheWordResp)) 204 val lsq = new LoadForwardQueryIO 205 val sbuffer = new LoadForwardQueryIO 206 val dataForwarded = Output(Bool()) 207 val needReplayFromRS = Output(Bool()) 208 val fastpath = Output(new LoadToLoadIO) 209 }) 210 211 val s2_uop = io.in.bits.uop 212 val s2_mask = io.in.bits.mask 213 val s2_paddr = io.in.bits.paddr 214 val s2_tlb_miss = io.in.bits.tlbMiss 215 val s2_data_invalid = io.lsq.dataInvalid 216 val s2_exception = selectLoad(io.in.bits.uop.cf.exceptionVec, false).asUInt.orR 217 val s2_mmio = io.in.bits.mmio && !s2_exception 218 val s2_cache_miss = io.dcacheResp.bits.miss 219 val s2_cache_replay = io.dcacheResp.bits.replay 220 221 // val cnt = RegInit(127.U) 222 // cnt := cnt + io.in.valid.asUInt 223 // val s2_forward_fail = io.lsq.matchInvalid || io.sbuffer.matchInvalid || cnt === 0.U 224 225 val s2_forward_fail = io.lsq.matchInvalid || io.sbuffer.matchInvalid 226 227 // assert(!s2_forward_fail) 228 229 io.dcacheResp.ready := true.B 230 val dcacheShouldResp = !(s2_tlb_miss || s2_exception || s2_mmio) 231 assert(!(io.in.valid && dcacheShouldResp && !io.dcacheResp.valid), "DCache response got lost") 232 233 // merge forward result 234 // lsq has higher priority than sbuffer 235 val forwardMask = Wire(Vec(8, Bool())) 236 val forwardData = Wire(Vec(8, UInt(8.W))) 237 238 val fullForward = (~forwardMask.asUInt & s2_mask) === 0.U && !io.lsq.dataInvalid 239 io.lsq := DontCare 240 io.sbuffer := DontCare 241 242 // generate XLEN/8 Muxs 243 for (i <- 0 until XLEN / 8) { 244 forwardMask(i) := io.lsq.forwardMask(i) || io.sbuffer.forwardMask(i) 245 forwardData(i) := Mux(io.lsq.forwardMask(i), io.lsq.forwardData(i), io.sbuffer.forwardData(i)) 246 } 247 248 XSDebug(io.out.fire(), "[FWD LOAD RESP] pc %x fwd %x(%b) + %x(%b)\n", 249 s2_uop.cf.pc, 250 io.lsq.forwardData.asUInt, io.lsq.forwardMask.asUInt, 251 io.in.bits.forwardData.asUInt, io.in.bits.forwardMask.asUInt 252 ) 253 254 // data merge 255 val rdataVec = VecInit((0 until XLEN / 8).map(j => 256 Mux(forwardMask(j), forwardData(j), io.dcacheResp.bits.data(8*(j+1)-1, 8*j)))) 257 val rdata = rdataVec.asUInt 258 val rdataSel = LookupTree(s2_paddr(2, 0), List( 259 "b000".U -> rdata(63, 0), 260 "b001".U -> rdata(63, 8), 261 "b010".U -> rdata(63, 16), 262 "b011".U -> rdata(63, 24), 263 "b100".U -> rdata(63, 32), 264 "b101".U -> rdata(63, 40), 265 "b110".U -> rdata(63, 48), 266 "b111".U -> rdata(63, 56) 267 )) 268 val rdataPartialLoad = rdataHelper(s2_uop, rdataSel) 269 270 io.out.valid := io.in.valid && !s2_tlb_miss && !s2_data_invalid 271 // Inst will be canceled in store queue / lsq, 272 // so we do not need to care about flush in load / store unit's out.valid 273 io.out.bits := io.in.bits 274 io.out.bits.data := rdataPartialLoad 275 // when exception occurs, set it to not miss and let it write back to roq (via int port) 276 if (EnableFastForward) { 277 io.out.bits.miss := s2_cache_miss && !s2_exception && !s2_forward_fail && !fullForward 278 } else { 279 io.out.bits.miss := s2_cache_miss && !s2_exception && !s2_forward_fail 280 } 281 io.out.bits.uop.ctrl.fpWen := io.in.bits.uop.ctrl.fpWen && !s2_exception 282 io.out.bits.uop.cf.replayInst := s2_forward_fail && !s2_mmio // if forward fail, repaly this inst 283 io.out.bits.mmio := s2_mmio 284 285 // For timing reasons, sometimes we can not let 286 // io.out.bits.miss := s2_cache_miss && !s2_exception && !fullForward 287 // We use io.dataForwarded instead. It means forward logic have prepared all data needed, 288 // and dcache query is no longer needed. 289 // Such inst will be writebacked from load queue. 290 io.dataForwarded := s2_cache_miss && fullForward && !s2_exception && !s2_forward_fail 291 // io.out.bits.forwardX will be send to lq 292 io.out.bits.forwardMask := forwardMask 293 // data retbrived from dcache is also included in io.out.bits.forwardData 294 io.out.bits.forwardData := rdataVec 295 296 io.in.ready := io.out.ready || !io.in.valid 297 298 299 // feedback tlb result to RS 300 io.rsFeedback.valid := io.in.valid 301 io.rsFeedback.bits.hit := !s2_tlb_miss && (!s2_cache_replay || s2_mmio || s2_exception || fullForward) && !s2_data_invalid 302 io.rsFeedback.bits.rsIdx := io.in.bits.rsIdx 303 io.rsFeedback.bits.flushState := io.in.bits.ptwBack 304 io.rsFeedback.bits.sourceType := Mux(s2_tlb_miss, RSFeedbackType.tlbMiss, 305 Mux(io.lsq.dataInvalid, 306 RSFeedbackType.dataInvalid, 307 RSFeedbackType.mshrFull 308 ) 309 ) 310 311 // s2_cache_replay is quite slow to generate, send it separately to LQ 312 io.needReplayFromRS := s2_cache_replay && !fullForward 313 314 // fast load to load forward 315 io.fastpath.valid := io.in.valid // for debug only 316 io.fastpath.data := rdata // raw data 317 318 319 XSDebug(io.out.fire(), "[DCACHE LOAD RESP] pc %x rdata %x <- D$ %x + fwd %x(%b)\n", 320 s2_uop.cf.pc, rdataPartialLoad, io.dcacheResp.bits.data, 321 forwardData.asUInt, forwardMask.asUInt 322 ) 323 324 XSPerfAccumulate("in", io.in.valid) 325 XSPerfAccumulate("dcache_miss", io.in.valid && s2_cache_miss) 326 XSPerfAccumulate("full_forward", io.in.valid && fullForward) 327 XSPerfAccumulate("dcache_miss_full_forward", io.in.valid && s2_cache_miss && fullForward) 328 XSPerfAccumulate("replay", io.rsFeedback.valid && !io.rsFeedback.bits.hit) 329 XSPerfAccumulate("replay_tlb_miss", io.rsFeedback.valid && !io.rsFeedback.bits.hit && s2_tlb_miss) 330 XSPerfAccumulate("replay_cache", io.rsFeedback.valid && !io.rsFeedback.bits.hit && !s2_tlb_miss && s2_cache_replay) 331 XSPerfAccumulate("stall_out", io.out.valid && !io.out.ready) 332} 333 334class LoadUnit(implicit p: Parameters) extends XSModule with HasLoadHelper { 335 val io = IO(new Bundle() { 336 val ldin = Flipped(Decoupled(new ExuInput)) 337 val ldout = Decoupled(new ExuOutput) 338 val redirect = Flipped(ValidIO(new Redirect)) 339 val flush = Input(Bool()) 340 val rsFeedback = ValidIO(new RSFeedback) 341 val rsIdx = Input(UInt(log2Up(IssQueSize).W)) 342 val isFirstIssue = Input(Bool()) 343 val dcache = new DCacheLoadIO 344 val sbuffer = new LoadForwardQueryIO 345 val lsq = new LoadToLsqIO 346 val fastUop = ValidIO(new MicroOp) // early wakeup signal generated in load_s1 347 348 val tlb = new TlbRequestIO 349 val fastpathOut = Output(new LoadToLoadIO) 350 val fastpathIn = Input(Vec(LoadPipelineWidth, new LoadToLoadIO)) 351 val loadFastMatch = Input(UInt(exuParameters.LduCnt.W)) 352 }) 353 354 val load_s0 = Module(new LoadUnit_S0) 355 val load_s1 = Module(new LoadUnit_S1) 356 val load_s2 = Module(new LoadUnit_S2) 357 358 load_s0.io.in <> io.ldin 359 load_s0.io.dtlbReq <> io.tlb.req 360 load_s0.io.dcacheReq <> io.dcache.req 361 load_s0.io.rsIdx := io.rsIdx 362 load_s0.io.isFirstIssue := io.isFirstIssue 363 load_s0.io.fastpath := io.fastpathIn 364 load_s0.io.loadFastMatch := io.loadFastMatch 365 366 PipelineConnect(load_s0.io.out, load_s1.io.in, true.B, load_s0.io.out.bits.uop.roqIdx.needFlush(io.redirect, io.flush)) 367 368 load_s1.io.dtlbResp <> io.tlb.resp 369 io.dcache.s1_paddr <> load_s1.io.dcachePAddr 370 io.dcache.s1_kill <> load_s1.io.dcacheKill 371 load_s1.io.sbuffer <> io.sbuffer 372 load_s1.io.lsq <> io.lsq.forward 373 374 PipelineConnect(load_s1.io.out, load_s2.io.in, true.B, load_s1.io.out.bits.uop.roqIdx.needFlush(io.redirect, io.flush)) 375 376 load_s2.io.dcacheResp <> io.dcache.resp 377 load_s2.io.lsq.forwardData <> io.lsq.forward.forwardData 378 load_s2.io.lsq.forwardMask <> io.lsq.forward.forwardMask 379 load_s2.io.lsq.forwardMaskFast <> io.lsq.forward.forwardMaskFast // should not be used in load_s2 380 load_s2.io.lsq.dataInvalid <> io.lsq.forward.dataInvalid 381 load_s2.io.lsq.matchInvalid <> io.lsq.forward.matchInvalid 382 load_s2.io.sbuffer.forwardData <> io.sbuffer.forwardData 383 load_s2.io.sbuffer.forwardMask <> io.sbuffer.forwardMask 384 load_s2.io.sbuffer.forwardMaskFast <> io.sbuffer.forwardMaskFast // should not be used in load_s2 385 load_s2.io.sbuffer.dataInvalid <> io.sbuffer.dataInvalid // always false 386 load_s2.io.sbuffer.matchInvalid <> io.sbuffer.matchInvalid 387 load_s2.io.dataForwarded <> io.lsq.loadDataForwarded 388 load_s2.io.fastpath <> io.fastpathOut 389 io.rsFeedback.bits := RegNext(load_s2.io.rsFeedback.bits) 390 io.rsFeedback.valid := RegNext(load_s2.io.rsFeedback.valid && !load_s2.io.out.bits.uop.roqIdx.needFlush(io.redirect, io.flush)) 391 io.lsq.needReplayFromRS := load_s2.io.needReplayFromRS 392 393 // pre-calcuate sqIdx mask in s0, then send it to lsq in s1 for forwarding 394 val sqIdxMaskReg = RegNext(UIntToMask(load_s0.io.in.bits.uop.sqIdx.value, StoreQueueSize)) 395 io.lsq.forward.sqIdxMask := sqIdxMaskReg 396 397 // // use s2_hit_way to select data received in s1 398 // load_s2.io.dcacheResp.bits.data := Mux1H(RegNext(io.dcache.s1_hit_way), RegNext(io.dcache.s1_data)) 399 // assert(load_s2.io.dcacheResp.bits.data === io.dcache.resp.bits.data) 400 401 io.fastUop.valid := io.dcache.s1_hit_way.orR && // dcache hit 402 !io.dcache.s1_disable_fast_wakeup && // load fast wakeup should be disabled when dcache data read is not ready 403 load_s1.io.in.valid && // valid laod request 404 !load_s1.io.dcacheKill && // not mmio or tlb miss 405 !io.lsq.forward.dataInvalidFast // forward failed 406 io.fastUop.bits := load_s1.io.out.bits.uop 407 408 XSDebug(load_s0.io.out.valid, 409 p"S0: pc ${Hexadecimal(load_s0.io.out.bits.uop.cf.pc)}, lId ${Hexadecimal(load_s0.io.out.bits.uop.lqIdx.asUInt)}, " + 410 p"vaddr ${Hexadecimal(load_s0.io.out.bits.vaddr)}, mask ${Hexadecimal(load_s0.io.out.bits.mask)}\n") 411 XSDebug(load_s1.io.out.valid, 412 p"S1: pc ${Hexadecimal(load_s1.io.out.bits.uop.cf.pc)}, lId ${Hexadecimal(load_s1.io.out.bits.uop.lqIdx.asUInt)}, tlb_miss ${io.tlb.resp.bits.miss}, " + 413 p"paddr ${Hexadecimal(load_s1.io.out.bits.paddr)}, mmio ${load_s1.io.out.bits.mmio}\n") 414 415 // writeback to LSQ 416 // Current dcache use MSHR 417 // Load queue will be updated at s2 for both hit/miss int/fp load 418 io.lsq.loadIn.valid := load_s2.io.out.valid 419 io.lsq.loadIn.bits := load_s2.io.out.bits 420 421 // write to rob and writeback bus 422 val s2_wb_valid = load_s2.io.out.valid && !load_s2.io.out.bits.miss && !load_s2.io.out.bits.mmio 423 424 // Int load, if hit, will be writebacked at s2 425 val hitLoadOut = Wire(Valid(new ExuOutput)) 426 hitLoadOut.valid := s2_wb_valid 427 hitLoadOut.bits.uop := load_s2.io.out.bits.uop 428 hitLoadOut.bits.data := load_s2.io.out.bits.data 429 hitLoadOut.bits.redirectValid := false.B 430 hitLoadOut.bits.redirect := DontCare 431 hitLoadOut.bits.debug.isMMIO := load_s2.io.out.bits.mmio 432 hitLoadOut.bits.debug.isPerfCnt := false.B 433 hitLoadOut.bits.debug.paddr := load_s2.io.out.bits.paddr 434 hitLoadOut.bits.fflags := DontCare 435 436 load_s2.io.out.ready := true.B 437 438 io.ldout.bits := Mux(hitLoadOut.valid, hitLoadOut.bits, io.lsq.ldout.bits) 439 io.ldout.valid := hitLoadOut.valid || io.lsq.ldout.valid 440 441 io.lsq.ldout.ready := !hitLoadOut.valid 442 443 when(io.ldout.fire()){ 444 XSDebug("ldout %x\n", io.ldout.bits.uop.cf.pc) 445 } 446} 447