xref: /XiangShan/src/main/scala/xiangshan/mem/lsqueue/LSQWrapper.scala (revision 4b3d9f67355a9945cd5eca46929b89c130c43c26)
1package xiangshan.mem
2
3import chisel3._
4import chisel3.util._
5import utils._
6import xiangshan._
7import xiangshan.cache._
8import xiangshan.cache.{DCacheWordIO, DCacheLineIO, TlbRequestIO, MemoryOpConstants}
9import xiangshan.backend.LSUOpType
10import xiangshan.mem._
11import xiangshan.backend.roq.RoqPtr
12
13class ExceptionAddrIO extends XSBundle {
14  val lsIdx = Input(new LSIdx)
15  val isStore = Input(Bool())
16  val vaddr = Output(UInt(VAddrBits.W))
17}
18
19
20class LsqEntry extends XSBundle {
21  val vaddr = UInt(VAddrBits.W) // TODO: need opt
22  val paddr = UInt(PAddrBits.W)
23  val mask = UInt(8.W)
24  val data = UInt(XLEN.W)
25  val exception = UInt(16.W) // TODO: opt size
26  val mmio = Bool()
27  val fwdMask = Vec(8, Bool())
28  val fwdData = Vec(8, UInt(8.W))
29}
30
31
32class LSQueueData(size: Int, nchannel: Int) extends XSModule with HasDCacheParameters with HasCircularQueuePtrHelper {
33  val io = IO(new Bundle() {
34    val wb = Vec(nchannel, new Bundle() {
35      val wen = Input(Bool())
36      val index = Input(UInt(log2Up(size).W))
37      val wdata = Input(new LsqEntry)
38    })
39    val uncache = new Bundle() {
40      val wen = Input(Bool())
41      val index = Input(UInt(log2Up(size).W))
42      val wdata = Input(UInt(XLEN.W))
43    }
44    val refill = new Bundle() {
45      val wen = Input(Vec(size, Bool()))
46      val dcache = Input(new DCacheLineResp)
47    }
48    val needForward = Input(Vec(nchannel, Vec(2, UInt(size.W))))
49    val forward = Vec(nchannel, Flipped(new LoadForwardQueryIO))
50    val rdata = Output(Vec(size, new LsqEntry))
51
52    // val debug = new Bundle() {
53    //   val debug_data = Vec(LoadQueueSize, new LsqEntry)
54    // }
55
56    def wbWrite(channel: Int, index: UInt, wdata: LsqEntry): Unit = {
57      require(channel < nchannel && channel >= 0)
58      // need extra "this.wb(channel).wen := true.B"
59      this.wb(channel).index := index
60      this.wb(channel).wdata := wdata
61    }
62
63    def uncacheWrite(index: UInt, wdata: UInt): Unit = {
64      // need extra "this.uncache.wen := true.B"
65      this.uncache.index := index
66      this.uncache.wdata := wdata
67    }
68
69    def forwardQuery(channel: Int, paddr: UInt, needForward1: Data, needForward2: Data): Unit = {
70      this.needForward(channel)(0) := needForward1
71      this.needForward(channel)(1) := needForward2
72      this.forward(channel).paddr := paddr
73    }
74
75    // def refillWrite(ldIdx: Int): Unit = {
76    // }
77    // use "this.refill.wen(ldIdx) := true.B" instead
78  })
79
80  io := DontCare
81
82  val data = Reg(Vec(size, new LsqEntry))
83
84  // writeback to lq/sq
85  (0 until 2).map(i => {
86    when(io.wb(i).wen){
87      data(io.wb(i).index) := io.wb(i).wdata
88    }
89  })
90
91  when(io.uncache.wen){
92    data(io.uncache.index).data := io.uncache.wdata
93  }
94
95  // refill missed load
96  def mergeRefillData(refill: UInt, fwd: UInt, fwdMask: UInt): UInt = {
97    val res = Wire(Vec(8, UInt(8.W)))
98    (0 until 8).foreach(i => {
99      res(i) := Mux(fwdMask(i), fwd(8 * (i + 1) - 1, 8 * i), refill(8 * (i + 1) - 1, 8 * i))
100    })
101    res.asUInt
102  }
103
104  // split dcache result into words
105  val words = VecInit((0 until blockWords) map { i =>
106    io.refill.dcache.data(DataBits * (i + 1) - 1, DataBits * i)
107  })
108
109
110  (0 until size).map(i => {
111    when(io.refill.wen(i) ){
112      val refillData = words(get_word(data(i).paddr))
113      data(i).data := mergeRefillData(refillData, data(i).fwdData.asUInt, data(i).fwdMask.asUInt)
114      XSDebug("miss resp: pos %d addr %x data %x + %x(%b)\n", i.U, data(i).paddr, refillData, data(i).fwdData.asUInt, data(i).fwdMask.asUInt)
115    }
116  })
117
118  // forwarding
119  // Compare ringBufferTail (deqPtr) and forward.sqIdx, we have two cases:
120  // (1) if they have the same flag, we need to check range(tail, sqIdx)
121  // (2) if they have different flags, we need to check range(tail, LoadQueueSize) and range(0, sqIdx)
122  // Forward1: Mux(same_flag, range(tail, sqIdx), range(tail, LoadQueueSize))
123  // Forward2: Mux(same_flag, 0.U,                   range(0, sqIdx)    )
124  // i.e. forward1 is the target entries with the same flag bits and forward2 otherwise
125
126  // entry with larger index should have higher priority since it's data is younger
127  (0 until nchannel).map(i => {
128
129    val forwardMask1 = WireInit(VecInit(Seq.fill(8)(false.B)))
130    val forwardData1 = WireInit(VecInit(Seq.fill(8)(0.U(8.W))))
131    val forwardMask2 = WireInit(VecInit(Seq.fill(8)(false.B)))
132    val forwardData2 = WireInit(VecInit(Seq.fill(8)(0.U(8.W))))
133
134    for (j <- 0 until size) {
135      val needCheck = io.forward(i).paddr(PAddrBits - 1, 3) === data(j).paddr(PAddrBits - 1, 3)
136      (0 until XLEN / 8).foreach(k => {
137        when (needCheck && data(j).mask(k)) {
138          when (io.needForward(i)(0)(j)) {
139            forwardMask1(k) := true.B
140            forwardData1(k) := data(j).data(8 * (k + 1) - 1, 8 * k)
141          }
142          when (io.needForward(i)(1)(j)) {
143            forwardMask2(k) := true.B
144            forwardData2(k) := data(j).data(8 * (k + 1) - 1, 8 * k)
145          }
146          XSDebug(io.needForward(i)(0)(j) || io.needForward(i)(1)(j),
147            p"forwarding $k-th byte ${Hexadecimal(data(j).data(8 * (k + 1) - 1, 8 * k))} " +
148            p"from ptr $j\n")
149        }
150      })
151    }
152
153    // merge forward lookup results
154    // forward2 is younger than forward1 and should have higher priority
155    (0 until XLEN / 8).map(k => {
156      io.forward(i).forwardMask(k) := forwardMask1(k) || forwardMask2(k)
157      io.forward(i).forwardData(k) := Mux(forwardMask2(k), forwardData2(k), forwardData1(k))
158    })
159  })
160
161  // data read
162  io.rdata := data
163  // io.debug.debug_data := data
164}
165
166// inflight miss block reqs
167class InflightBlockInfo extends XSBundle {
168  val block_addr = UInt(PAddrBits.W)
169  val valid = Bool()
170}
171
172// Load / Store Queue Wrapper for XiangShan Out of Order LSU
173class LsqWrappper extends XSModule with HasDCacheParameters {
174  val io = IO(new Bundle() {
175    val enq = new Bundle() {
176      val canAccept = Output(Bool())
177      val req = Vec(RenameWidth, Flipped(ValidIO(new MicroOp)))
178      val resp = Vec(RenameWidth, Output(new LSIdx))
179    }
180    val brqRedirect = Input(Valid(new Redirect))
181    val loadIn = Vec(LoadPipelineWidth, Flipped(Valid(new LsPipelineBundle)))
182    val storeIn = Vec(StorePipelineWidth, Flipped(Valid(new LsPipelineBundle)))
183    val sbuffer = Vec(StorePipelineWidth, Decoupled(new DCacheWordReq))
184    val ldout = Vec(2, DecoupledIO(new ExuOutput)) // writeback store
185    val stout = Vec(2, DecoupledIO(new ExuOutput)) // writeback store
186    val forward = Vec(LoadPipelineWidth, Flipped(new LoadForwardQueryIO))
187    val commits = Flipped(Vec(CommitWidth, Valid(new RoqCommit)))
188    val rollback = Output(Valid(new Redirect))
189    val dcache = new DCacheLineIO
190    val uncache = new DCacheWordIO
191    val roqDeqPtr = Input(new RoqPtr)
192    val oldestStore = Output(Valid(new RoqPtr))
193    val exceptionAddr = new ExceptionAddrIO
194  })
195
196  val loadQueue = Module(new LoadQueue)
197  val storeQueue = Module(new StoreQueue)
198
199  // io.enq logic
200  // LSQ: send out canAccept when both load queue and store queue are ready
201  // Dispatch: send instructions to LSQ only when they are ready
202  io.enq.canAccept := loadQueue.io.enq.canAccept && storeQueue.io.enq.canAccept
203  for (i <- 0 until RenameWidth) {
204    val isStore = CommitType.lsInstIsStore(io.enq.req(i).bits.ctrl.commitType)
205    loadQueue.io.enq.req(i).valid  := !isStore && io.enq.req(i).valid
206    storeQueue.io.enq.req(i).valid :=  isStore && io.enq.req(i).valid
207    loadQueue.io.enq.req(i).bits  := io.enq.req(i).bits
208    storeQueue.io.enq.req(i).bits := io.enq.req(i).bits
209    io.enq.resp(i).lqIdx := loadQueue.io.enq.resp(i)
210    io.enq.resp(i).sqIdx := storeQueue.io.enq.resp(i)
211
212    XSError(!io.enq.canAccept && io.enq.req(i).valid, "should not enqueue LSQ when not")
213  }
214
215  // load queue wiring
216  loadQueue.io.brqRedirect <> io.brqRedirect
217  loadQueue.io.loadIn <> io.loadIn
218  loadQueue.io.storeIn <> io.storeIn
219  loadQueue.io.ldout <> io.ldout
220  loadQueue.io.commits <> io.commits
221  loadQueue.io.rollback <> io.rollback
222  loadQueue.io.dcache <> io.dcache
223  loadQueue.io.roqDeqPtr <> io.roqDeqPtr
224  loadQueue.io.exceptionAddr.lsIdx := io.exceptionAddr.lsIdx
225  loadQueue.io.exceptionAddr.isStore := DontCare
226
227  // store queue wiring
228  // storeQueue.io <> DontCare
229  storeQueue.io.brqRedirect <> io.brqRedirect
230  storeQueue.io.storeIn <> io.storeIn
231  storeQueue.io.sbuffer <> io.sbuffer
232  storeQueue.io.stout <> io.stout
233  storeQueue.io.commits <> io.commits
234  storeQueue.io.roqDeqPtr <> io.roqDeqPtr
235  storeQueue.io.oldestStore <> io.oldestStore
236  storeQueue.io.exceptionAddr.lsIdx := io.exceptionAddr.lsIdx
237  storeQueue.io.exceptionAddr.isStore := DontCare
238
239  loadQueue.io.forward <> io.forward
240  storeQueue.io.forward <> io.forward // overlap forwardMask & forwardData, DO NOT CHANGE SEQUENCE
241
242  io.exceptionAddr.vaddr := Mux(io.exceptionAddr.isStore, storeQueue.io.exceptionAddr.vaddr, loadQueue.io.exceptionAddr.vaddr)
243
244  // naive uncache arbiter
245  val s_idle :: s_load :: s_store :: Nil = Enum(3)
246  val uncacheState = RegInit(s_idle)
247
248  switch(uncacheState){
249    is(s_idle){
250      when(io.uncache.req.fire()){
251        uncacheState := Mux(loadQueue.io.uncache.req.valid, s_load, s_store)
252      }
253    }
254    is(s_load){
255      when(io.uncache.resp.fire()){
256        uncacheState := s_idle
257      }
258    }
259    is(s_store){
260      when(io.uncache.resp.fire()){
261        uncacheState := s_idle
262      }
263    }
264  }
265
266  loadQueue.io.uncache := DontCare
267  storeQueue.io.uncache := DontCare
268  loadQueue.io.uncache.resp.valid := false.B
269  storeQueue.io.uncache.resp.valid := false.B
270  when(loadQueue.io.uncache.req.valid){
271    io.uncache.req <> loadQueue.io.uncache.req
272  }.otherwise{
273    io.uncache.req <> storeQueue.io.uncache.req
274  }
275  when(uncacheState === s_load){
276    io.uncache.resp <> loadQueue.io.uncache.resp
277  }.otherwise{
278    io.uncache.resp <> storeQueue.io.uncache.resp
279  }
280
281  assert(!(loadQueue.io.uncache.req.valid && storeQueue.io.uncache.req.valid))
282  assert(!(loadQueue.io.uncache.resp.valid && storeQueue.io.uncache.resp.valid))
283  assert(!((loadQueue.io.uncache.resp.valid || storeQueue.io.uncache.resp.valid) && uncacheState === s_idle))
284
285}
286