xref: /XiangShan/src/main/scala/xiangshan/mem/pipeline/LoadUnit.scala (revision 5668a921eb594c3ea72da43594b3fb54e05959a3)
1/***************************************************************************************
2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences
3* Copyright (c) 2020-2021 Peng Cheng Laboratory
4*
5* XiangShan is licensed under Mulan PSL v2.
6* You can use this software according to the terms and conditions of the Mulan PSL v2.
7* You may obtain a copy of Mulan PSL v2 at:
8*          http://license.coscl.org.cn/MulanPSL2
9*
10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
13*
14* See the Mulan PSL v2 for more details.
15***************************************************************************************/
16
17package xiangshan.mem
18
19import chipsalliance.rocketchip.config.Parameters
20import chisel3._
21import chisel3.util._
22import utils._
23import xiangshan._
24import xiangshan.backend.decode.ImmUnion
25import xiangshan.backend.fu.PMPRespBundle
26import xiangshan.cache._
27import xiangshan.cache.mmu.{TLB, TlbCmd, TlbPtwIO, TlbReq, TlbRequestIO, TlbResp}
28
29class LoadToLsqIO(implicit p: Parameters) extends XSBundle {
30  val loadIn = ValidIO(new LsPipelineBundle)
31  val ldout = Flipped(DecoupledIO(new ExuOutput))
32  val loadDataForwarded = Output(Bool())
33  val needReplayFromRS = Output(Bool())
34  val forward = new PipeLoadForwardQueryIO
35  val loadViolationQuery = new LoadViolationQueryIO
36}
37
38class LoadToLoadIO(implicit p: Parameters) extends XSBundle {
39  // load to load fast path is limited to ld (64 bit) used as vaddr src1 only
40  val data = UInt(XLEN.W)
41  val valid = Bool()
42}
43
44// Load Pipeline Stage 0
45// Generate addr, use addr to query DCache and DTLB
46class LoadUnit_S0(implicit p: Parameters) extends XSModule with HasDCacheParameters{
47  val io = IO(new Bundle() {
48    val in = Flipped(Decoupled(new ExuInput))
49    val out = Decoupled(new LsPipelineBundle)
50    val fastpath = Input(Vec(LoadPipelineWidth, new LoadToLoadIO))
51    val dtlbReq = DecoupledIO(new TlbReq)
52    val dcacheReq = DecoupledIO(new DCacheWordReq)
53    val rsIdx = Input(UInt(log2Up(IssQueSize).W))
54    val isFirstIssue = Input(Bool())
55    val loadFastMatch = Input(UInt(exuParameters.LduCnt.W))
56  })
57  require(LoadPipelineWidth == exuParameters.LduCnt)
58
59  val s0_uop = io.in.bits.uop
60  val imm12 = WireInit(s0_uop.ctrl.imm(11,0))
61
62  // slow vaddr from non-load insts
63  val slowpath_vaddr = io.in.bits.src(0) + SignExt(s0_uop.ctrl.imm(11,0), VAddrBits)
64  val slowpath_mask = genWmask(slowpath_vaddr, s0_uop.ctrl.fuOpType(1,0))
65
66  // fast vaddr from load insts
67  val fastpath_vaddrs = WireInit(VecInit(List.tabulate(LoadPipelineWidth)(i => {
68     io.fastpath(i).data + SignExt(s0_uop.ctrl.imm(11,0), VAddrBits)
69  })))
70  val fastpath_masks = WireInit(VecInit(List.tabulate(LoadPipelineWidth)(i => {
71     genWmask(fastpath_vaddrs(i), s0_uop.ctrl.fuOpType(1,0))
72  })))
73  val fastpath_vaddr = Mux1H(io.loadFastMatch, fastpath_vaddrs)
74  val fastpath_mask  = Mux1H(io.loadFastMatch, fastpath_masks)
75
76  // select vaddr from 2 alus
77  val s0_vaddr = Mux(io.loadFastMatch.orR, fastpath_vaddr, slowpath_vaddr)
78  val s0_mask  = Mux(io.loadFastMatch.orR, fastpath_mask, slowpath_mask)
79  XSPerfAccumulate("load_to_load_forward", io.loadFastMatch.orR && io.in.fire())
80
81  val isSoftPrefetch = LSUOpType.isPrefetch(s0_uop.ctrl.fuOpType)
82  val isSoftPrefetchRead = s0_uop.ctrl.fuOpType === LSUOpType.prefetch_r
83  val isSoftPrefetchWrite = s0_uop.ctrl.fuOpType === LSUOpType.prefetch_w
84
85  // query DTLB
86  io.dtlbReq.valid := io.in.valid
87  io.dtlbReq.bits.vaddr := s0_vaddr
88  io.dtlbReq.bits.cmd := TlbCmd.read
89  io.dtlbReq.bits.size := LSUOpType.size(io.in.bits.uop.ctrl.fuOpType)
90  io.dtlbReq.bits.robIdx := s0_uop.robIdx
91  io.dtlbReq.bits.debug.pc := s0_uop.cf.pc
92  io.dtlbReq.bits.debug.isFirstIssue := io.isFirstIssue
93
94  // query DCache
95  io.dcacheReq.valid := io.in.valid
96  when (isSoftPrefetchRead) {
97    io.dcacheReq.bits.cmd  := MemoryOpConstants.M_PFR
98  }.elsewhen (isSoftPrefetchWrite) {
99    io.dcacheReq.bits.cmd  := MemoryOpConstants.M_PFW
100  }.otherwise {
101    io.dcacheReq.bits.cmd  := MemoryOpConstants.M_XRD
102  }
103  io.dcacheReq.bits.addr := s0_vaddr
104  io.dcacheReq.bits.mask := s0_mask
105  io.dcacheReq.bits.data := DontCare
106  when(isSoftPrefetch) {
107    io.dcacheReq.bits.instrtype := SOFT_PREFETCH.U
108  }.otherwise {
109    io.dcacheReq.bits.instrtype := LOAD_SOURCE.U
110  }
111
112  // TODO: update cache meta
113  io.dcacheReq.bits.id   := DontCare
114
115  val addrAligned = LookupTree(s0_uop.ctrl.fuOpType(1, 0), List(
116    "b00".U   -> true.B,                   //b
117    "b01".U   -> (s0_vaddr(0)    === 0.U), //h
118    "b10".U   -> (s0_vaddr(1, 0) === 0.U), //w
119    "b11".U   -> (s0_vaddr(2, 0) === 0.U)  //d
120  ))
121
122  io.out.valid := io.in.valid && io.dcacheReq.ready
123
124  io.out.bits := DontCare
125  io.out.bits.vaddr := s0_vaddr
126  io.out.bits.mask := s0_mask
127  io.out.bits.uop := s0_uop
128  io.out.bits.uop.cf.exceptionVec(loadAddrMisaligned) := !addrAligned
129  io.out.bits.rsIdx := io.rsIdx
130  io.out.bits.isFirstIssue := io.isFirstIssue
131  io.out.bits.isSoftPrefetch := isSoftPrefetch
132
133  io.in.ready := !io.in.valid || (io.out.ready && io.dcacheReq.ready)
134
135  XSDebug(io.dcacheReq.fire(),
136    p"[DCACHE LOAD REQ] pc ${Hexadecimal(s0_uop.cf.pc)}, vaddr ${Hexadecimal(s0_vaddr)}\n"
137  )
138  XSPerfAccumulate("in_valid", io.in.valid)
139  XSPerfAccumulate("in_fire", io.in.fire)
140  XSPerfAccumulate("in_fire_first_issue", io.in.valid && io.isFirstIssue)
141  XSPerfAccumulate("stall_out", io.out.valid && !io.out.ready && io.dcacheReq.ready)
142  XSPerfAccumulate("stall_dcache", io.out.valid && io.out.ready && !io.dcacheReq.ready)
143  XSPerfAccumulate("addr_spec_success", io.out.fire() && s0_vaddr(VAddrBits-1, 12) === io.in.bits.src(0)(VAddrBits-1, 12))
144  XSPerfAccumulate("addr_spec_failed", io.out.fire() && s0_vaddr(VAddrBits-1, 12) =/= io.in.bits.src(0)(VAddrBits-1, 12))
145  XSPerfAccumulate("addr_spec_success_once", io.out.fire() && s0_vaddr(VAddrBits-1, 12) === io.in.bits.src(0)(VAddrBits-1, 12) && io.isFirstIssue)
146  XSPerfAccumulate("addr_spec_failed_once", io.out.fire() && s0_vaddr(VAddrBits-1, 12) =/= io.in.bits.src(0)(VAddrBits-1, 12) && io.isFirstIssue)
147}
148
149
150// Load Pipeline Stage 1
151// TLB resp (send paddr to dcache)
152class LoadUnit_S1(implicit p: Parameters) extends XSModule {
153  val io = IO(new Bundle() {
154    val in = Flipped(Decoupled(new LsPipelineBundle))
155    val out = Decoupled(new LsPipelineBundle)
156    val dtlbResp = Flipped(DecoupledIO(new TlbResp))
157    val dcachePAddr = Output(UInt(PAddrBits.W))
158    val dcacheKill = Output(Bool())
159    val dcacheBankConflict = Input(Bool())
160    val fullForwardFast = Output(Bool())
161    val sbuffer = new LoadForwardQueryIO
162    val lsq = new PipeLoadForwardQueryIO
163    val loadViolationQueryReq = Decoupled(new LoadViolationQueryReq)
164    val rsFeedback = ValidIO(new RSFeedback)
165    val csrCtrl = Flipped(new CustomCSRCtrlIO)
166    val needLdVioCheckRedo = Output(Bool())
167  })
168
169  val s1_uop = io.in.bits.uop
170  val s1_paddr = io.dtlbResp.bits.paddr
171  val s1_exception = selectLoad(io.out.bits.uop.cf.exceptionVec, false).asUInt.orR // af & pf exception were modified below.
172  val s1_tlb_miss = io.dtlbResp.bits.miss
173  val s1_mask = io.in.bits.mask
174  val s1_bank_conflict = io.dcacheBankConflict
175
176  io.out.bits := io.in.bits // forwardXX field will be updated in s1
177
178  io.dtlbResp.ready := true.B
179
180  // TOOD: PMA check
181  io.dcachePAddr := s1_paddr
182  //io.dcacheKill := s1_tlb_miss || s1_exception || s1_mmio
183  io.dcacheKill := s1_tlb_miss || s1_exception
184
185  // load forward query datapath
186  io.sbuffer.valid := io.in.valid && !(s1_exception || s1_tlb_miss)
187  io.sbuffer.vaddr := io.in.bits.vaddr
188  io.sbuffer.paddr := s1_paddr
189  io.sbuffer.uop := s1_uop
190  io.sbuffer.sqIdx := s1_uop.sqIdx
191  io.sbuffer.mask := s1_mask
192  io.sbuffer.pc := s1_uop.cf.pc // FIXME: remove it
193
194  io.lsq.valid := io.in.valid && !(s1_exception || s1_tlb_miss)
195  io.lsq.vaddr := io.in.bits.vaddr
196  io.lsq.paddr := s1_paddr
197  io.lsq.uop := s1_uop
198  io.lsq.sqIdx := s1_uop.sqIdx
199  io.lsq.sqIdxMask := DontCare // will be overwritten by sqIdxMask pre-generated in s0
200  io.lsq.mask := s1_mask
201  io.lsq.pc := s1_uop.cf.pc // FIXME: remove it
202
203  // ld-ld violation query
204  io.loadViolationQueryReq.valid := io.in.valid && !(s1_exception || s1_tlb_miss)
205  io.loadViolationQueryReq.bits.paddr := s1_paddr
206  io.loadViolationQueryReq.bits.uop := s1_uop
207
208  // Generate forwardMaskFast to wake up insts earlier
209  val forwardMaskFast = io.lsq.forwardMaskFast.asUInt | io.sbuffer.forwardMaskFast.asUInt
210  io.fullForwardFast := (~forwardMaskFast & s1_mask) === 0.U
211
212  // Generate feedback signal caused by:
213  // * dcache bank conflict
214  // * need redo ld-ld violation check
215  val needLdVioCheckRedo = io.loadViolationQueryReq.valid &&
216    !io.loadViolationQueryReq.ready &&
217    RegNext(io.csrCtrl.ldld_vio_check)
218  io.needLdVioCheckRedo := needLdVioCheckRedo
219  io.rsFeedback.valid := io.in.valid && (s1_bank_conflict || needLdVioCheckRedo)
220  io.rsFeedback.bits.hit := false.B // we have found s1_bank_conflict / re do ld-ld violation check
221  io.rsFeedback.bits.rsIdx := io.in.bits.rsIdx
222  io.rsFeedback.bits.flushState := io.in.bits.ptwBack
223  io.rsFeedback.bits.sourceType := Mux(s1_bank_conflict, RSFeedbackType.bankConflict, RSFeedbackType.ldVioCheckRedo)
224  io.rsFeedback.bits.dataInvalidSqIdx := DontCare
225
226  // if replay is detected in load_s1,
227  // load inst will be canceled immediately
228  io.out.valid := io.in.valid && !io.rsFeedback.valid
229  io.out.bits.paddr := s1_paddr
230  io.out.bits.tlbMiss := s1_tlb_miss
231
232  // current ori test will cause the case of ldest == 0, below will be modifeid in the future.
233  // af & pf exception were modified
234  io.out.bits.uop.cf.exceptionVec(loadPageFault) := io.dtlbResp.bits.excp.pf.ld
235  io.out.bits.uop.cf.exceptionVec(loadAccessFault) := io.dtlbResp.bits.excp.af.ld
236
237  io.out.bits.ptwBack := io.dtlbResp.bits.ptwBack
238  io.out.bits.rsIdx := io.in.bits.rsIdx
239
240  io.out.bits.isSoftPrefetch := io.in.bits.isSoftPrefetch
241
242  io.in.ready := !io.in.valid || io.out.ready
243
244  XSPerfAccumulate("in_valid", io.in.valid)
245  XSPerfAccumulate("in_fire", io.in.fire)
246  XSPerfAccumulate("in_fire_first_issue", io.in.fire && io.in.bits.isFirstIssue)
247  XSPerfAccumulate("tlb_miss", io.in.fire && s1_tlb_miss)
248  XSPerfAccumulate("tlb_miss_first_issue", io.in.fire && s1_tlb_miss && io.in.bits.isFirstIssue)
249  XSPerfAccumulate("stall_out", io.out.valid && !io.out.ready)
250}
251
252// Load Pipeline Stage 2
253// DCache resp
254class LoadUnit_S2(implicit p: Parameters) extends XSModule with HasLoadHelper {
255  val io = IO(new Bundle() {
256    val in = Flipped(Decoupled(new LsPipelineBundle))
257    val out = Decoupled(new LsPipelineBundle)
258    val rsFeedback = ValidIO(new RSFeedback)
259    val dcacheResp = Flipped(DecoupledIO(new DCacheWordResp))
260    val pmpResp = Flipped(new PMPRespBundle())
261    val lsq = new LoadForwardQueryIO
262    val dataInvalidSqIdx = Input(UInt())
263    val sbuffer = new LoadForwardQueryIO
264    val dataForwarded = Output(Bool())
265    val needReplayFromRS = Output(Bool())
266    val fullForward = Output(Bool())
267    val fastpath = Output(new LoadToLoadIO)
268    val dcache_kill = Output(Bool())
269    val loadViolationQueryResp = Flipped(Valid(new LoadViolationQueryResp))
270    val csrCtrl = Flipped(new CustomCSRCtrlIO)
271    val sentFastUop = Input(Bool())
272  })
273  val isSoftPrefetch = io.in.bits.isSoftPrefetch
274  val excep = WireInit(io.in.bits.uop.cf.exceptionVec)
275  excep(loadAccessFault) := io.in.bits.uop.cf.exceptionVec(loadAccessFault) || io.pmpResp.ld
276  when (isSoftPrefetch) {
277    excep := 0.U.asTypeOf(excep.cloneType)
278  }
279  val s2_exception = selectLoad(excep, false).asUInt.orR
280
281  val actually_mmio = io.pmpResp.mmio
282  val s2_uop = io.in.bits.uop
283  val s2_mask = io.in.bits.mask
284  val s2_paddr = io.in.bits.paddr
285  val s2_tlb_miss = io.in.bits.tlbMiss
286  val s2_data_invalid = io.lsq.dataInvalid
287  val s2_mmio = !isSoftPrefetch && actually_mmio && !s2_exception
288  val s2_cache_miss = io.dcacheResp.bits.miss
289  val s2_cache_replay = io.dcacheResp.bits.replay
290  val s2_is_prefetch = io.in.bits.isSoftPrefetch
291
292  // val cnt = RegInit(127.U)
293  // cnt := cnt + io.in.valid.asUInt
294  // val s2_forward_fail = io.lsq.matchInvalid || io.sbuffer.matchInvalid || cnt === 0.U
295
296  val s2_forward_fail = io.lsq.matchInvalid || io.sbuffer.matchInvalid
297  // assert(!s2_forward_fail)
298  io.dcache_kill := false.B // move pmp resp kill to outside
299  io.dcacheResp.ready := true.B
300  val dcacheShouldResp = !(s2_tlb_miss || s2_exception || s2_mmio || s2_is_prefetch)
301  assert(!(io.in.valid && (dcacheShouldResp && !io.dcacheResp.valid)), "DCache response got lost")
302
303  // merge forward result
304  // lsq has higher priority than sbuffer
305  val forwardMask = Wire(Vec(8, Bool()))
306  val forwardData = Wire(Vec(8, UInt(8.W)))
307
308  val fullForward = (~forwardMask.asUInt & s2_mask) === 0.U && !io.lsq.dataInvalid
309  io.lsq := DontCare
310  io.sbuffer := DontCare
311  io.fullForward := fullForward
312
313  // generate XLEN/8 Muxs
314  for (i <- 0 until XLEN / 8) {
315    forwardMask(i) := io.lsq.forwardMask(i) || io.sbuffer.forwardMask(i)
316    forwardData(i) := Mux(io.lsq.forwardMask(i), io.lsq.forwardData(i), io.sbuffer.forwardData(i))
317  }
318
319  XSDebug(io.out.fire(), "[FWD LOAD RESP] pc %x fwd %x(%b) + %x(%b)\n",
320    s2_uop.cf.pc,
321    io.lsq.forwardData.asUInt, io.lsq.forwardMask.asUInt,
322    io.in.bits.forwardData.asUInt, io.in.bits.forwardMask.asUInt
323  )
324
325  // data merge
326  val rdataVec = VecInit((0 until XLEN / 8).map(j =>
327    Mux(forwardMask(j), forwardData(j), io.dcacheResp.bits.data(8*(j+1)-1, 8*j))))
328  val rdata = rdataVec.asUInt
329  val rdataSel = LookupTree(s2_paddr(2, 0), List(
330    "b000".U -> rdata(63, 0),
331    "b001".U -> rdata(63, 8),
332    "b010".U -> rdata(63, 16),
333    "b011".U -> rdata(63, 24),
334    "b100".U -> rdata(63, 32),
335    "b101".U -> rdata(63, 40),
336    "b110".U -> rdata(63, 48),
337    "b111".U -> rdata(63, 56)
338  ))
339  val rdataPartialLoad = rdataHelper(s2_uop, rdataSel)
340
341  io.out.valid := io.in.valid && !s2_tlb_miss && !s2_data_invalid
342  // Inst will be canceled in store queue / lsq,
343  // so we do not need to care about flush in load / store unit's out.valid
344  io.out.bits := io.in.bits
345  io.out.bits.data := rdataPartialLoad
346  // when exception occurs, set it to not miss and let it write back to rob (via int port)
347  if (EnableFastForward) {
348    io.out.bits.miss := s2_cache_miss &&
349      !s2_exception &&
350      !s2_forward_fail &&
351      !fullForward &&
352      !s2_is_prefetch
353  } else {
354    io.out.bits.miss := s2_cache_miss &&
355      !s2_exception &&
356      !s2_forward_fail &&
357      !s2_is_prefetch
358  }
359  io.out.bits.uop.ctrl.fpWen := io.in.bits.uop.ctrl.fpWen && !s2_exception
360  // if forward fail, replay this inst from fetch
361  val forwardFailReplay = s2_forward_fail && !s2_mmio
362  // if ld-ld violation is detected, replay from this inst from fetch
363  val ldldVioReplay = io.loadViolationQueryResp.valid &&
364    io.loadViolationQueryResp.bits.have_violation &&
365    RegNext(io.csrCtrl.ldld_vio_check)
366  io.out.bits.uop.ctrl.replayInst := forwardFailReplay || ldldVioReplay
367  io.out.bits.mmio := s2_mmio
368  io.out.bits.uop.ctrl.flushPipe := io.in.bits.uop.ctrl.flushPipe || (s2_mmio && io.sentFastUop)
369  io.out.bits.uop.cf.exceptionVec := excep
370
371  // For timing reasons, sometimes we can not let
372  // io.out.bits.miss := s2_cache_miss && !s2_exception && !fullForward
373  // We use io.dataForwarded instead. It means forward logic have prepared all data needed,
374  // and dcache query is no longer needed.
375  // Such inst will be writebacked from load queue.
376  io.dataForwarded := s2_cache_miss && fullForward && !s2_exception && !s2_forward_fail
377  // io.out.bits.forwardX will be send to lq
378  io.out.bits.forwardMask := forwardMask
379  // data retbrived from dcache is also included in io.out.bits.forwardData
380  io.out.bits.forwardData := rdataVec
381
382  io.in.ready := io.out.ready || !io.in.valid
383
384  // feedback tlb result to RS
385  io.rsFeedback.valid := io.in.valid
386  when (io.in.bits.isSoftPrefetch) {
387    io.rsFeedback.bits.hit := (!s2_tlb_miss && (!s2_cache_replay || s2_mmio || s2_exception))
388  }.otherwise {
389    io.rsFeedback.bits.hit := !s2_tlb_miss && (!s2_cache_replay || s2_mmio || s2_exception || fullForward) && !s2_data_invalid
390  }
391  io.rsFeedback.bits.rsIdx := io.in.bits.rsIdx
392  io.rsFeedback.bits.flushState := io.in.bits.ptwBack
393  io.rsFeedback.bits.sourceType := Mux(s2_tlb_miss, RSFeedbackType.tlbMiss,
394    Mux(s2_cache_replay,
395      RSFeedbackType.mshrFull,
396      RSFeedbackType.dataInvalid
397    )
398  )
399  io.rsFeedback.bits.dataInvalidSqIdx.value := io.dataInvalidSqIdx
400  io.rsFeedback.bits.dataInvalidSqIdx.flag := DontCare
401
402  // s2_cache_replay is quite slow to generate, send it separately to LQ
403  io.needReplayFromRS := s2_cache_replay && !fullForward
404
405  // fast load to load forward
406  io.fastpath.valid := io.in.valid // for debug only
407  io.fastpath.data := rdata // raw data
408
409
410  XSDebug(io.out.fire(), "[DCACHE LOAD RESP] pc %x rdata %x <- D$ %x + fwd %x(%b)\n",
411    s2_uop.cf.pc, rdataPartialLoad, io.dcacheResp.bits.data,
412    forwardData.asUInt, forwardMask.asUInt
413  )
414
415  XSPerfAccumulate("in_valid", io.in.valid)
416  XSPerfAccumulate("in_fire", io.in.fire)
417  XSPerfAccumulate("in_fire_first_issue", io.in.fire && io.in.bits.isFirstIssue)
418  XSPerfAccumulate("dcache_miss", io.in.fire && s2_cache_miss)
419  XSPerfAccumulate("dcache_miss_first_issue", io.in.fire && s2_cache_miss && io.in.bits.isFirstIssue)
420  XSPerfAccumulate("full_forward", io.in.valid && fullForward)
421  XSPerfAccumulate("dcache_miss_full_forward", io.in.valid && s2_cache_miss && fullForward)
422  XSPerfAccumulate("replay",  io.rsFeedback.valid && !io.rsFeedback.bits.hit)
423  XSPerfAccumulate("replay_tlb_miss", io.rsFeedback.valid && !io.rsFeedback.bits.hit && s2_tlb_miss)
424  XSPerfAccumulate("replay_cache", io.rsFeedback.valid && !io.rsFeedback.bits.hit && !s2_tlb_miss && s2_cache_replay)
425  XSPerfAccumulate("stall_out", io.out.valid && !io.out.ready)
426  XSPerfAccumulate("replay_from_fetch_forward", io.out.valid && forwardFailReplay)
427  XSPerfAccumulate("replay_from_fetch_load_vio", io.out.valid && ldldVioReplay)
428}
429
430class LoadUnit(implicit p: Parameters) extends XSModule with HasLoadHelper {
431  val io = IO(new Bundle() {
432    val ldin = Flipped(Decoupled(new ExuInput))
433    val ldout = Decoupled(new ExuOutput)
434    val redirect = Flipped(ValidIO(new Redirect))
435    val feedbackSlow = ValidIO(new RSFeedback)
436    val feedbackFast = ValidIO(new RSFeedback)
437    val rsIdx = Input(UInt(log2Up(IssQueSize).W))
438    val isFirstIssue = Input(Bool())
439    val dcache = new DCacheLoadIO
440    val sbuffer = new LoadForwardQueryIO
441    val lsq = new LoadToLsqIO
442    val fastUop = ValidIO(new MicroOp) // early wakeup signal generated in load_s1
443
444    val tlb = new TlbRequestIO
445    val pmp = Flipped(new PMPRespBundle()) // arrive same to tlb now
446
447    val fastpathOut = Output(new LoadToLoadIO)
448    val fastpathIn = Input(Vec(LoadPipelineWidth, new LoadToLoadIO))
449    val loadFastMatch = Input(UInt(exuParameters.LduCnt.W))
450
451    val csrCtrl = Flipped(new CustomCSRCtrlIO)
452  })
453
454  val load_s0 = Module(new LoadUnit_S0)
455  val load_s1 = Module(new LoadUnit_S1)
456  val load_s2 = Module(new LoadUnit_S2)
457
458  load_s0.io.in <> io.ldin
459  load_s0.io.dtlbReq <> io.tlb.req
460  load_s0.io.dcacheReq <> io.dcache.req
461  load_s0.io.rsIdx := io.rsIdx
462  load_s0.io.isFirstIssue := io.isFirstIssue
463  load_s0.io.fastpath := io.fastpathIn
464  load_s0.io.loadFastMatch := io.loadFastMatch
465
466  PipelineConnect(load_s0.io.out, load_s1.io.in, true.B, load_s0.io.out.bits.uop.robIdx.needFlush(io.redirect))
467
468  load_s1.io.dtlbResp <> io.tlb.resp
469  io.dcache.s1_paddr <> load_s1.io.dcachePAddr
470  io.dcache.s1_kill <> load_s1.io.dcacheKill
471  load_s1.io.sbuffer <> io.sbuffer
472  load_s1.io.lsq <> io.lsq.forward
473  load_s1.io.loadViolationQueryReq <> io.lsq.loadViolationQuery.req
474  load_s1.io.dcacheBankConflict <> io.dcache.s1_bank_conflict
475  load_s1.io.csrCtrl <> io.csrCtrl
476
477  PipelineConnect(load_s1.io.out, load_s2.io.in, true.B, load_s1.io.out.bits.uop.robIdx.needFlush(io.redirect))
478
479  io.dcache.s2_kill := load_s2.io.dcache_kill || (io.pmp.ld || io.pmp.mmio) // to kill mmio resp which are redirected
480  load_s2.io.dcacheResp <> io.dcache.resp
481  load_s2.io.pmpResp <> io.pmp
482  load_s2.io.lsq.forwardData <> io.lsq.forward.forwardData
483  load_s2.io.lsq.forwardMask <> io.lsq.forward.forwardMask
484  load_s2.io.lsq.forwardMaskFast <> io.lsq.forward.forwardMaskFast // should not be used in load_s2
485  load_s2.io.lsq.dataInvalid <> io.lsq.forward.dataInvalid
486  load_s2.io.lsq.matchInvalid <> io.lsq.forward.matchInvalid
487  load_s2.io.sbuffer.forwardData <> io.sbuffer.forwardData
488  load_s2.io.sbuffer.forwardMask <> io.sbuffer.forwardMask
489  load_s2.io.sbuffer.forwardMaskFast <> io.sbuffer.forwardMaskFast // should not be used in load_s2
490  load_s2.io.sbuffer.dataInvalid <> io.sbuffer.dataInvalid // always false
491  load_s2.io.sbuffer.matchInvalid <> io.sbuffer.matchInvalid
492  load_s2.io.dataForwarded <> io.lsq.loadDataForwarded
493  load_s2.io.fastpath <> io.fastpathOut
494  load_s2.io.dataInvalidSqIdx := io.lsq.forward.dataInvalidSqIdx // provide dataInvalidSqIdx to make wakeup faster
495  load_s2.io.loadViolationQueryResp <> io.lsq.loadViolationQuery.resp
496  load_s2.io.csrCtrl <> io.csrCtrl
497  load_s2.io.sentFastUop := RegEnable(io.fastUop.valid, load_s1.io.out.fire()) // RegNext is also ok
498  io.lsq.needReplayFromRS := load_s2.io.needReplayFromRS
499
500  // feedback tlb miss / dcache miss queue full
501  io.feedbackSlow.bits := RegNext(load_s2.io.rsFeedback.bits)
502  io.feedbackSlow.valid := RegNext(load_s2.io.rsFeedback.valid && !load_s2.io.out.bits.uop.robIdx.needFlush(io.redirect))
503
504  // feedback bank conflict to rs
505  io.feedbackFast.bits := load_s1.io.rsFeedback.bits
506  io.feedbackFast.valid := load_s1.io.rsFeedback.valid
507  // If replay is reported at load_s1, inst will be canceled (will not enter load_s2),
508  // in that case:
509  // * replay should not be reported twice
510  assert(!(RegNext(RegNext(io.feedbackFast.valid)) && io.feedbackSlow.valid))
511  // * io.fastUop.valid should not be reported
512  assert(!RegNext(io.feedbackFast.valid && io.fastUop.valid))
513
514  // pre-calcuate sqIdx mask in s0, then send it to lsq in s1 for forwarding
515  val sqIdxMaskReg = RegNext(UIntToMask(load_s0.io.in.bits.uop.sqIdx.value, StoreQueueSize))
516  io.lsq.forward.sqIdxMask := sqIdxMaskReg
517
518  // // use s2_hit_way to select data received in s1
519  // load_s2.io.dcacheResp.bits.data := Mux1H(RegNext(io.dcache.s1_hit_way), RegNext(io.dcache.s1_data))
520  // assert(load_s2.io.dcacheResp.bits.data === io.dcache.resp.bits.data)
521
522  io.fastUop.valid := io.dcache.s1_hit_way.orR && // dcache hit
523    !io.dcache.s1_disable_fast_wakeup &&  // load fast wakeup should be disabled when dcache data read is not ready
524    load_s1.io.in.valid && // valid laod request
525    !load_s1.io.dcacheKill && // not mmio or tlb miss
526    !io.lsq.forward.dataInvalidFast && // forward failed
527    !load_s1.io.needLdVioCheckRedo // load-load violation check: load paddr cam struct hazard
528  io.fastUop.bits := load_s1.io.out.bits.uop
529
530  XSDebug(load_s0.io.out.valid,
531    p"S0: pc ${Hexadecimal(load_s0.io.out.bits.uop.cf.pc)}, lId ${Hexadecimal(load_s0.io.out.bits.uop.lqIdx.asUInt)}, " +
532    p"vaddr ${Hexadecimal(load_s0.io.out.bits.vaddr)}, mask ${Hexadecimal(load_s0.io.out.bits.mask)}\n")
533  XSDebug(load_s1.io.out.valid,
534    p"S1: pc ${Hexadecimal(load_s1.io.out.bits.uop.cf.pc)}, lId ${Hexadecimal(load_s1.io.out.bits.uop.lqIdx.asUInt)}, tlb_miss ${io.tlb.resp.bits.miss}, " +
535    p"paddr ${Hexadecimal(load_s1.io.out.bits.paddr)}, mmio ${load_s1.io.out.bits.mmio}\n")
536
537  // writeback to LSQ
538  // Current dcache use MSHR
539  // Load queue will be updated at s2 for both hit/miss int/fp load
540  io.lsq.loadIn.valid := load_s2.io.out.valid
541  io.lsq.loadIn.bits := load_s2.io.out.bits
542
543  // write to rob and writeback bus
544  val s2_wb_valid = load_s2.io.out.valid && !load_s2.io.out.bits.miss && !load_s2.io.out.bits.mmio
545
546  // Int load, if hit, will be writebacked at s2
547  val hitLoadOut = Wire(Valid(new ExuOutput))
548  hitLoadOut.valid := s2_wb_valid
549  hitLoadOut.bits.uop := load_s2.io.out.bits.uop
550  hitLoadOut.bits.data := load_s2.io.out.bits.data
551  hitLoadOut.bits.redirectValid := false.B
552  hitLoadOut.bits.redirect := DontCare
553  hitLoadOut.bits.debug.isMMIO := load_s2.io.out.bits.mmio
554  hitLoadOut.bits.debug.isPerfCnt := false.B
555  hitLoadOut.bits.debug.paddr := load_s2.io.out.bits.paddr
556  hitLoadOut.bits.debug.vaddr := load_s2.io.out.bits.vaddr
557  hitLoadOut.bits.fflags := DontCare
558
559  load_s2.io.out.ready := true.B
560
561  io.ldout.bits := Mux(hitLoadOut.valid, hitLoadOut.bits, io.lsq.ldout.bits)
562  io.ldout.valid := hitLoadOut.valid || io.lsq.ldout.valid
563
564  io.lsq.ldout.ready := !hitLoadOut.valid
565
566  val perfinfo = IO(new Bundle(){
567    val perfEvents = Output(new PerfEventsBundle(12))
568  })
569
570  val perfEvents = Seq(
571    ("load_s0_in_fire         ", load_s0.io.in.fire()                                                                                                            ),
572    ("load_to_load_forward    ", load_s0.io.loadFastMatch.orR && load_s0.io.in.fire()                                                                            ),
573    ("stall_dcache            ", load_s0.io.out.valid && load_s0.io.out.ready && !load_s0.io.dcacheReq.ready                                                     ),
574    ("addr_spec_success       ", load_s0.io.out.fire() && load_s0.io.dtlbReq.bits.vaddr(VAddrBits-1, 12) === load_s0.io.in.bits.src(0)(VAddrBits-1, 12)          ),
575    ("addr_spec_failed        ", load_s0.io.out.fire() && load_s0.io.dtlbReq.bits.vaddr(VAddrBits-1, 12) =/= load_s0.io.in.bits.src(0)(VAddrBits-1, 12)          ),
576    ("load_s1_in_fire         ", load_s1.io.in.fire                                                                                                              ),
577    ("load_s1_tlb_miss        ", load_s1.io.in.fire && load_s1.io.dtlbResp.bits.miss                                                                             ),
578    ("load_s2_in_fire         ", load_s2.io.in.fire                                                                                                              ),
579    ("load_s2_dcache_miss     ", load_s2.io.in.fire && load_s2.io.dcacheResp.bits.miss                                                                           ),
580    ("load_s2_replay          ", load_s2.io.rsFeedback.valid && !load_s2.io.rsFeedback.bits.hit                                                                  ),
581    ("load_s2_replay_tlb_miss ", load_s2.io.rsFeedback.valid && !load_s2.io.rsFeedback.bits.hit && load_s2.io.in.bits.tlbMiss                                    ),
582    ("load_s2_replay_cache    ", load_s2.io.rsFeedback.valid && !load_s2.io.rsFeedback.bits.hit && !load_s2.io.in.bits.tlbMiss && load_s2.io.dcacheResp.bits.miss),
583  )
584
585  for (((perf_out,(perf_name,perf)),i) <- perfinfo.perfEvents.perf_events.zip(perfEvents).zipWithIndex) {
586    perf_out.incr_step := RegNext(perf)
587  }
588
589  when(io.ldout.fire()){
590    XSDebug("ldout %x\n", io.ldout.bits.uop.cf.pc)
591  }
592}
593