xref: /XiangShan/src/main/scala/xiangshan/mem/pipeline/LoadUnit.scala (revision 3802dba502b91d813c1e563035b876c4e6288166)
1package xiangshan.mem
2
3import chisel3._
4import chisel3.util._
5import utils._
6import xiangshan._
7import xiangshan.backend.decode.ImmUnion
8import xiangshan.cache._
9// import xiangshan.cache.{DCacheWordIO, TlbRequestIO, TlbCmd, MemoryOpConstants, TlbReq, DCacheLoadReq, DCacheWordResp}
10import xiangshan.backend.LSUOpType
11
12class LoadToLsqIO extends XSBundle {
13  val loadIn = ValidIO(new LsPipelineBundle)
14  val ldout = Flipped(DecoupledIO(new ExuOutput))
15  val loadDataForwarded = Output(Bool())
16  val forward = new LoadForwardQueryIO
17}
18
19// Load Pipeline Stage 0
20// Generate addr, use addr to query DCache and DTLB
21class LoadUnit_S0 extends XSModule {
22  val io = IO(new Bundle() {
23    val in = Flipped(Decoupled(new ExuInput))
24    val out = Decoupled(new LsPipelineBundle)
25    val dtlbReq = DecoupledIO(new TlbReq)
26    val dcacheReq = DecoupledIO(new DCacheWordReq)
27  })
28
29  val s0_uop = io.in.bits.uop
30  val s0_vaddr = io.in.bits.src1 + SignExt(s0_uop.ctrl.imm(11,0), VAddrBits)
31  // val s0_vaddr_old = io.in.bits.src1 + SignExt(ImmUnion.I.toImm32(s0_uop.ctrl.imm), XLEN)
32  // val imm12 = WireInit(s0_uop.ctrl.imm(11,0))
33  // val s0_vaddr_lo = io.in.bits.src1(11,0) + Cat(0.U(1.W), imm12)
34  // val s0_vaddr_hi = Mux(imm12(11),
35    // Mux((s0_vaddr_lo(12)), io.in.bits.src1(VAddrBits-1, 12), io.in.bits.src1(VAddrBits-1, 12)+SignExt(1.U, VAddrBits-12)),
36    // Mux((s0_vaddr_lo(12)), io.in.bits.src1(VAddrBits-1, 12)+1.U, io.in.bits.src1(VAddrBits-1, 12))
37  // )
38  // val s0_vaddr = Cat(s0_vaddr_hi, s0_vaddr_lo(11,0))
39  // when(io.in.fire() && s0_vaddr(VAddrBits-1,0) =/= (io.in.bits.src1 + SignExt(ImmUnion.I.toImm32(s0_uop.ctrl.imm), XLEN))(VAddrBits-1,0)){
40    // printf("s0_vaddr %x s0_vaddr_old %x\n", s0_vaddr, s0_vaddr_old(VAddrBits-1,0))
41  // }
42  val s0_mask = genWmask(s0_vaddr, s0_uop.ctrl.fuOpType(1,0))
43
44  // query DTLB
45  io.dtlbReq.valid := io.in.valid
46  io.dtlbReq.bits.vaddr := s0_vaddr
47  io.dtlbReq.bits.cmd := TlbCmd.read
48  io.dtlbReq.bits.roqIdx := s0_uop.roqIdx
49  io.dtlbReq.bits.debug.pc := s0_uop.cf.pc
50
51  // query DCache
52  io.dcacheReq.valid := io.in.valid
53  io.dcacheReq.bits.cmd  := MemoryOpConstants.M_XRD
54  io.dcacheReq.bits.addr := s0_vaddr
55  io.dcacheReq.bits.mask := s0_mask
56  io.dcacheReq.bits.data := DontCare
57
58  // TODO: update cache meta
59  io.dcacheReq.bits.meta.id       := DontCare
60  io.dcacheReq.bits.meta.vaddr    := s0_vaddr
61  io.dcacheReq.bits.meta.paddr    := DontCare
62  io.dcacheReq.bits.meta.uop      := s0_uop
63  io.dcacheReq.bits.meta.mmio     := false.B
64  io.dcacheReq.bits.meta.tlb_miss := false.B
65  io.dcacheReq.bits.meta.mask     := s0_mask
66  io.dcacheReq.bits.meta.replay   := false.B
67
68  val addrAligned = LookupTree(s0_uop.ctrl.fuOpType(1, 0), List(
69    "b00".U   -> true.B,                   //b
70    "b01".U   -> (s0_vaddr(0)    === 0.U), //h
71    "b10".U   -> (s0_vaddr(1, 0) === 0.U), //w
72    "b11".U   -> (s0_vaddr(2, 0) === 0.U)  //d
73  ))
74
75  io.out.valid := io.in.valid && io.dcacheReq.ready
76
77  io.out.bits := DontCare
78  io.out.bits.vaddr := s0_vaddr
79  io.out.bits.mask := s0_mask
80  io.out.bits.uop := s0_uop
81  io.out.bits.uop.cf.exceptionVec(loadAddrMisaligned) := !addrAligned
82
83  io.in.ready := !io.in.valid || (io.out.ready && io.dcacheReq.ready)
84
85  XSDebug(io.dcacheReq.fire(),
86    p"[DCACHE LOAD REQ] pc ${Hexadecimal(s0_uop.cf.pc)}, vaddr ${Hexadecimal(s0_vaddr)}\n"
87  )
88}
89
90
91// Load Pipeline Stage 1
92// TLB resp (send paddr to dcache)
93class LoadUnit_S1 extends XSModule {
94  val io = IO(new Bundle() {
95    val in = Flipped(Decoupled(new LsPipelineBundle))
96    val out = Decoupled(new LsPipelineBundle)
97    val dtlbResp = Flipped(DecoupledIO(new TlbResp))
98    val dcachePAddr = Output(UInt(PAddrBits.W))
99    val dcacheKill = Output(Bool())
100    val sbuffer = new LoadForwardQueryIO
101    val lsq = new LoadForwardQueryIO
102  })
103
104  val s1_uop = io.in.bits.uop
105  val s1_paddr = io.dtlbResp.bits.paddr
106  val s1_exception = selectLoad(io.out.bits.uop.cf.exceptionVec, false).asUInt.orR
107  val s1_tlb_miss = io.dtlbResp.bits.miss
108  val s1_mmio = !s1_tlb_miss && io.dtlbResp.bits.mmio
109  val s1_mask = io.in.bits.mask
110
111  io.out.bits := io.in.bits // forwardXX field will be updated in s1
112
113  io.dtlbResp.ready := true.B
114
115  // TOOD: PMA check
116  io.dcachePAddr := s1_paddr
117  io.dcacheKill := s1_tlb_miss || s1_exception || s1_mmio
118
119  // load forward query datapath
120  io.sbuffer.valid := io.in.valid
121  io.sbuffer.paddr := s1_paddr
122  io.sbuffer.uop := s1_uop
123  io.sbuffer.sqIdx := s1_uop.sqIdx
124  io.sbuffer.mask := s1_mask
125  io.sbuffer.pc := s1_uop.cf.pc // FIXME: remove it
126
127  io.lsq.valid := io.in.valid
128  io.lsq.paddr := s1_paddr
129  io.lsq.uop := s1_uop
130  io.lsq.sqIdx := s1_uop.sqIdx
131  io.lsq.mask := s1_mask
132  io.lsq.pc := s1_uop.cf.pc // FIXME: remove it
133
134  io.out.valid := io.in.valid// && !s1_tlb_miss
135  io.out.bits.paddr := s1_paddr
136  io.out.bits.mmio := s1_mmio && !s1_exception
137  io.out.bits.tlbMiss := s1_tlb_miss
138  io.out.bits.uop.cf.exceptionVec(loadPageFault) := io.dtlbResp.bits.excp.pf.ld
139  io.out.bits.uop.cf.exceptionVec(loadAccessFault) := io.dtlbResp.bits.excp.af.ld
140
141  io.in.ready := !io.in.valid || io.out.ready
142
143}
144
145
146// Load Pipeline Stage 2
147// DCache resp
148class LoadUnit_S2 extends XSModule with HasLoadHelper {
149  val io = IO(new Bundle() {
150    val in = Flipped(Decoupled(new LsPipelineBundle))
151    val out = Decoupled(new LsPipelineBundle)
152    val tlbFeedback = ValidIO(new TlbFeedback)
153    val dcacheResp = Flipped(DecoupledIO(new DCacheWordResp))
154    val lsq = new LoadForwardQueryIO
155    val sbuffer = new LoadForwardQueryIO
156    val dataForwarded = Output(Bool())
157  })
158
159  val s2_uop = io.in.bits.uop
160  val s2_mask = io.in.bits.mask
161  val s2_paddr = io.in.bits.paddr
162  val s2_tlb_miss = io.in.bits.tlbMiss
163  val s2_mmio = io.in.bits.mmio
164  val s2_exception = selectLoad(io.in.bits.uop.cf.exceptionVec, false).asUInt.orR
165  val s2_cache_miss = io.dcacheResp.bits.miss
166  val s2_cache_replay = io.dcacheResp.bits.replay
167
168  io.dcacheResp.ready := true.B
169  val dcacheShouldResp = !(s2_tlb_miss || s2_exception || s2_mmio)
170  assert(!(io.in.valid && dcacheShouldResp && !io.dcacheResp.valid), "DCache response got lost")
171
172  // feedback tlb result to RS
173  io.tlbFeedback.valid := io.in.valid
174  io.tlbFeedback.bits.hit := !s2_tlb_miss && (!s2_cache_replay || s2_mmio)
175  io.tlbFeedback.bits.roqIdx := s2_uop.roqIdx
176
177  val forwardMask = io.out.bits.forwardMask
178  val forwardData = io.out.bits.forwardData
179  val fullForward = (~forwardMask.asUInt & s2_mask) === 0.U
180
181  XSDebug(io.out.fire(), "[FWD LOAD RESP] pc %x fwd %x(%b) + %x(%b)\n",
182    s2_uop.cf.pc,
183    io.lsq.forwardData.asUInt, io.lsq.forwardMask.asUInt,
184    io.in.bits.forwardData.asUInt, io.in.bits.forwardMask.asUInt
185  )
186
187  // data merge
188  val rdata = VecInit((0 until XLEN / 8).map(j =>
189    Mux(forwardMask(j), forwardData(j), io.dcacheResp.bits.data(8*(j+1)-1, 8*j)))).asUInt
190  val rdataSel = LookupTree(s2_paddr(2, 0), List(
191    "b000".U -> rdata(63, 0),
192    "b001".U -> rdata(63, 8),
193    "b010".U -> rdata(63, 16),
194    "b011".U -> rdata(63, 24),
195    "b100".U -> rdata(63, 32),
196    "b101".U -> rdata(63, 40),
197    "b110".U -> rdata(63, 48),
198    "b111".U -> rdata(63, 56)
199  ))
200  val rdataPartialLoad = rdataHelper(s2_uop, rdataSel)
201
202  // TODO: ECC check
203
204  io.out.valid := io.in.valid && !s2_tlb_miss && (!s2_cache_replay || s2_mmio)
205  // Inst will be canceled in store queue / lsq,
206  // so we do not need to care about flush in load / store unit's out.valid
207  io.out.bits := io.in.bits
208  io.out.bits.data := rdataPartialLoad
209  // when exception occurs, set it to not miss and let it write back to roq (via int port)
210  io.out.bits.miss := s2_cache_miss && !s2_exception
211  io.out.bits.uop.ctrl.fpWen := io.in.bits.uop.ctrl.fpWen && !s2_exception
212  io.out.bits.mmio := s2_mmio
213
214  // For timing reasons, we can not let
215  // io.out.bits.miss := s2_cache_miss && !s2_exception && !fullForward
216  // We use io.dataForwarded instead. It means forward logic have prepared all data needed,
217  // and dcache query is no longer needed.
218  // Such inst will be writebacked from load queue.
219  io.dataForwarded := s2_cache_miss && fullForward && !s2_exception
220
221  io.in.ready := io.out.ready || !io.in.valid
222
223  // merge forward result
224  // lsq has higher priority than sbuffer
225  io.lsq := DontCare
226  io.sbuffer := DontCare
227  // generate XLEN/8 Muxs
228  for (i <- 0 until XLEN / 8) {
229    when (io.sbuffer.forwardMask(i)) {
230      io.out.bits.forwardMask(i) := true.B
231      io.out.bits.forwardData(i) := io.sbuffer.forwardData(i)
232    }
233    when (io.lsq.forwardMask(i)) {
234      io.out.bits.forwardMask(i) := true.B
235      io.out.bits.forwardData(i) := io.lsq.forwardData(i)
236    }
237  }
238
239  XSDebug(io.out.fire(), "[DCACHE LOAD RESP] pc %x rdata %x <- D$ %x + fwd %x(%b)\n",
240    s2_uop.cf.pc, rdataPartialLoad, io.dcacheResp.bits.data,
241    io.out.bits.forwardData.asUInt, io.out.bits.forwardMask.asUInt
242  )
243}
244
245class LoadUnit extends XSModule with HasLoadHelper {
246  val io = IO(new Bundle() {
247    val ldin = Flipped(Decoupled(new ExuInput))
248    val ldout = Decoupled(new ExuOutput)
249    val fpout = Decoupled(new ExuOutput)
250    val redirect = Flipped(ValidIO(new Redirect))
251    val tlbFeedback = ValidIO(new TlbFeedback)
252    val dcache = new DCacheLoadIO
253    val dtlb = new TlbRequestIO()
254    val sbuffer = new LoadForwardQueryIO
255    val lsq = new LoadToLsqIO
256  })
257
258  val load_s0 = Module(new LoadUnit_S0)
259  val load_s1 = Module(new LoadUnit_S1)
260  val load_s2 = Module(new LoadUnit_S2)
261
262  load_s0.io.in <> io.ldin
263  load_s0.io.dtlbReq <> io.dtlb.req
264  load_s0.io.dcacheReq <> io.dcache.req
265
266  PipelineConnect(load_s0.io.out, load_s1.io.in, true.B, load_s0.io.out.bits.uop.roqIdx.needFlush(io.redirect))
267
268  load_s1.io.dtlbResp <> io.dtlb.resp
269  io.dcache.s1_paddr <> load_s1.io.dcachePAddr
270  io.dcache.s1_kill <> load_s1.io.dcacheKill
271  load_s1.io.sbuffer <> io.sbuffer
272  load_s1.io.lsq <> io.lsq.forward
273
274  PipelineConnect(load_s1.io.out, load_s2.io.in, true.B, load_s1.io.out.bits.uop.roqIdx.needFlush(io.redirect))
275
276  load_s2.io.tlbFeedback <> io.tlbFeedback
277  load_s2.io.dcacheResp <> io.dcache.resp
278  load_s2.io.lsq.forwardData <> io.lsq.forward.forwardData
279  load_s2.io.lsq.forwardMask <> io.lsq.forward.forwardMask
280  load_s2.io.sbuffer.forwardData <> io.sbuffer.forwardData
281  load_s2.io.sbuffer.forwardMask <> io.sbuffer.forwardMask
282  load_s2.io.dataForwarded <> io.lsq.loadDataForwarded
283
284  XSDebug(load_s0.io.out.valid,
285    p"S0: pc ${Hexadecimal(load_s0.io.out.bits.uop.cf.pc)}, lId ${Hexadecimal(load_s0.io.out.bits.uop.lqIdx.asUInt)}, " +
286    p"vaddr ${Hexadecimal(load_s0.io.out.bits.vaddr)}, mask ${Hexadecimal(load_s0.io.out.bits.mask)}\n")
287  XSDebug(load_s1.io.out.valid,
288    p"S1: pc ${Hexadecimal(load_s1.io.out.bits.uop.cf.pc)}, lId ${Hexadecimal(load_s1.io.out.bits.uop.lqIdx.asUInt)}, tlb_miss ${io.dtlb.resp.bits.miss}, " +
289    p"paddr ${Hexadecimal(load_s1.io.out.bits.paddr)}, mmio ${load_s1.io.out.bits.mmio}\n")
290
291  // writeback to LSQ
292  // Current dcache use MSHR
293  // Load queue will be updated at s2 for both hit/miss int/fp load
294  io.lsq.loadIn.valid := load_s2.io.out.valid
295  io.lsq.loadIn.bits := load_s2.io.out.bits
296
297  // write to rob and writeback bus
298  val s2_wb_valid = load_s2.io.out.valid && !load_s2.io.out.bits.miss
299  val refillFpLoad = io.lsq.ldout.bits.uop.ctrl.fpWen
300
301  // Int load, if hit, will be writebacked at s2
302  val intHitLoadOut = Wire(Valid(new ExuOutput))
303  intHitLoadOut.valid := s2_wb_valid && !load_s2.io.out.bits.uop.ctrl.fpWen
304  intHitLoadOut.bits.uop := load_s2.io.out.bits.uop
305  intHitLoadOut.bits.data := load_s2.io.out.bits.data
306  intHitLoadOut.bits.redirectValid := false.B
307  intHitLoadOut.bits.redirect := DontCare
308  intHitLoadOut.bits.brUpdate := DontCare
309  intHitLoadOut.bits.debug.isMMIO := load_s2.io.out.bits.mmio
310  intHitLoadOut.bits.debug.isPerfCnt := false.B
311  intHitLoadOut.bits.fflags := DontCare
312
313  load_s2.io.out.ready := true.B
314
315  io.ldout.bits := Mux(intHitLoadOut.valid, intHitLoadOut.bits, io.lsq.ldout.bits)
316  io.ldout.valid := intHitLoadOut.valid || io.lsq.ldout.valid && !refillFpLoad
317
318  // Fp load, if hit, will be stored to reg at s2, then it will be recoded at s3, writebacked at s4
319  val fpHitLoadOut = Wire(Valid(new ExuOutput))
320  fpHitLoadOut.valid := s2_wb_valid && load_s2.io.out.bits.uop.ctrl.fpWen
321  fpHitLoadOut.bits := intHitLoadOut.bits
322
323  val fpLoadUnRecodedReg = Reg(Valid(new ExuOutput))
324  fpLoadUnRecodedReg.valid := fpHitLoadOut.valid || io.lsq.ldout.valid && refillFpLoad
325  when(fpHitLoadOut.valid || io.lsq.ldout.valid && refillFpLoad){
326    fpLoadUnRecodedReg.bits := Mux(fpHitLoadOut.valid, fpHitLoadOut.bits, io.lsq.ldout.bits)
327  }
328
329  val fpLoadRecodedReg = Reg(Valid(new ExuOutput))
330  when(fpLoadUnRecodedReg.valid){
331    fpLoadRecodedReg := fpLoadUnRecodedReg
332    fpLoadRecodedReg.bits.data := fpRdataHelper(fpLoadUnRecodedReg.bits.uop, fpLoadUnRecodedReg.bits.data) // recode
333  }
334  fpLoadRecodedReg.valid := fpLoadUnRecodedReg.valid
335
336  io.fpout.bits := fpLoadRecodedReg.bits
337  io.fpout.valid := fpLoadRecodedReg.valid
338
339  io.lsq.ldout.ready := Mux(refillFpLoad, !fpHitLoadOut.valid, !intHitLoadOut.valid)
340
341  when(io.ldout.fire()){
342    XSDebug("ldout %x\n", io.ldout.bits.uop.cf.pc)
343  }
344
345  when(io.fpout.fire()){
346    XSDebug("fpout %x\n", io.fpout.bits.uop.cf.pc)
347  }
348}
349