xref: /XiangShan/src/main/scala/xiangshan/backend/CtrlBlock.scala (revision 5668a921eb594c3ea72da43594b3fb54e05959a3)
1/***************************************************************************************
2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences
3* Copyright (c) 2020-2021 Peng Cheng Laboratory
4*
5* XiangShan is licensed under Mulan PSL v2.
6* You can use this software according to the terms and conditions of the Mulan PSL v2.
7* You may obtain a copy of Mulan PSL v2 at:
8*          http://license.coscl.org.cn/MulanPSL2
9*
10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
13*
14* See the Mulan PSL v2 for more details.
15***************************************************************************************/
16
17package xiangshan.backend
18
19import chipsalliance.rocketchip.config.Parameters
20import chisel3._
21import chisel3.util._
22import utils._
23import xiangshan._
24import xiangshan.backend.decode.{DecodeStage, ImmUnion}
25import xiangshan.backend.dispatch.{Dispatch, DispatchQueue}
26import xiangshan.backend.rename.{Rename, RenameTableWrapper}
27import xiangshan.backend.rob.{Rob, RobCSRIO, RobLsqIO}
28import xiangshan.backend.fu.{PFEvent}
29import xiangshan.frontend.{FtqPtr, FtqRead}
30import xiangshan.mem.LsqEnqIO
31import difftest._
32
33class CtrlToFtqIO(implicit p: Parameters) extends XSBundle {
34  val rob_commits = Vec(CommitWidth, Valid(new RobCommitInfo))
35  val stage2Redirect = Valid(new Redirect)
36  val stage3Redirect = ValidIO(new Redirect)
37  val robFlush = ValidIO(new Redirect)
38}
39
40class RedirectGenerator(implicit p: Parameters) extends XSModule
41  with HasCircularQueuePtrHelper {
42  val numRedirect = exuParameters.JmpCnt + exuParameters.AluCnt
43  val io = IO(new Bundle() {
44    val hartId = Input(UInt(8.W))
45    val exuMispredict = Vec(numRedirect, Flipped(ValidIO(new ExuOutput)))
46    val loadReplay = Flipped(ValidIO(new Redirect))
47    val flush = Input(Bool())
48    val stage1PcRead = Vec(numRedirect+1, new FtqRead(UInt(VAddrBits.W)))
49    val stage2Redirect = ValidIO(new Redirect)
50    val stage3Redirect = ValidIO(new Redirect)
51    val memPredUpdate = Output(new MemPredUpdateReq)
52    val memPredPcRead = new FtqRead(UInt(VAddrBits.W)) // read req send form stage 2
53  })
54  /*
55        LoadQueue  Jump  ALU0  ALU1  ALU2  ALU3   exception    Stage1
56          |         |      |    |     |     |         |
57          |============= reg & compare =====|         |       ========
58                            |                         |
59                            |                         |
60                            |                         |        Stage2
61                            |                         |
62                    redirect (flush backend)          |
63                    |                                 |
64               === reg ===                            |       ========
65                    |                                 |
66                    |----- mux (exception first) -----|        Stage3
67                            |
68                redirect (send to frontend)
69   */
70  private class Wrapper(val n: Int) extends Bundle {
71    val redirect = new Redirect
72    val valid = Bool()
73    val idx = UInt(log2Up(n).W)
74  }
75  def selectOldestRedirect(xs: Seq[Valid[Redirect]]): Vec[Bool] = {
76    val compareVec = (0 until xs.length).map(i => (0 until i).map(j => isAfter(xs(j).bits.robIdx, xs(i).bits.robIdx)))
77    val resultOnehot = VecInit((0 until xs.length).map(i => Cat((0 until xs.length).map(j =>
78      (if (j < i) !xs(j).valid || compareVec(i)(j)
79      else if (j == i) xs(i).valid
80      else !xs(j).valid || !compareVec(j)(i))
81    )).andR))
82    resultOnehot
83  }
84
85  val redirects = io.exuMispredict.map(_.bits.redirect) :+ io.loadReplay.bits
86  val stage1FtqReadPcs =
87    (io.stage1PcRead zip redirects).map{ case (r, redirect) =>
88      r(redirect.ftqIdx, redirect.ftqOffset)
89    }
90
91  def getRedirect(exuOut: Valid[ExuOutput]): ValidIO[Redirect] = {
92    val redirect = Wire(Valid(new Redirect))
93    redirect.valid := exuOut.valid && exuOut.bits.redirect.cfiUpdate.isMisPred
94    redirect.bits := exuOut.bits.redirect
95    redirect
96  }
97
98  val jumpOut = io.exuMispredict.head
99  val allRedirect = VecInit(io.exuMispredict.map(x => getRedirect(x)) :+ io.loadReplay)
100  val oldestOneHot = selectOldestRedirect(allRedirect)
101  val needFlushVec = VecInit(allRedirect.map(_.bits.robIdx.needFlush(io.stage2Redirect) || io.flush))
102  val oldestValid = VecInit(oldestOneHot.zip(needFlushVec).map{ case (v, f) => v && !f }).asUInt.orR
103  val oldestExuOutput = Mux1H(io.exuMispredict.indices.map(oldestOneHot), io.exuMispredict)
104  val oldestRedirect = Mux1H(oldestOneHot, allRedirect)
105
106  val s1_jumpTarget = RegEnable(jumpOut.bits.redirect.cfiUpdate.target, jumpOut.valid)
107  val s1_imm12_reg = RegNext(oldestExuOutput.bits.uop.ctrl.imm(11, 0))
108  val s1_pd = RegNext(oldestExuOutput.bits.uop.cf.pd)
109  val s1_redirect_bits_reg = RegNext(oldestRedirect.bits)
110  val s1_redirect_valid_reg = RegNext(oldestValid)
111  val s1_redirect_onehot = RegNext(oldestOneHot)
112
113  // stage1 -> stage2
114  io.stage2Redirect.valid := s1_redirect_valid_reg && !io.flush
115  io.stage2Redirect.bits := s1_redirect_bits_reg
116  io.stage2Redirect.bits.cfiUpdate := DontCare
117
118  val s1_isReplay = s1_redirect_onehot.last
119  val s1_isJump = s1_redirect_onehot.head
120  val real_pc = Mux1H(s1_redirect_onehot, stage1FtqReadPcs)
121  val brTarget = real_pc + SignExt(ImmUnion.B.toImm32(s1_imm12_reg), XLEN)
122  val snpc = real_pc + Mux(s1_pd.isRVC, 2.U, 4.U)
123  val target = Mux(s1_isReplay,
124    real_pc, // replay from itself
125    Mux(s1_redirect_bits_reg.cfiUpdate.taken,
126      Mux(s1_isJump, s1_jumpTarget, brTarget),
127      snpc
128    )
129  )
130
131  // get pc from ftq
132  // valid only if redirect is caused by load violation
133  // store_pc is used to update store set
134  val store_pc = io.memPredPcRead(s1_redirect_bits_reg.stFtqIdx, s1_redirect_bits_reg.stFtqOffset)
135
136  // update load violation predictor if load violation redirect triggered
137  io.memPredUpdate.valid := RegNext(s1_isReplay && s1_redirect_valid_reg, init = false.B)
138  // update wait table
139  io.memPredUpdate.waddr := RegNext(XORFold(real_pc(VAddrBits-1, 1), MemPredPCWidth))
140  io.memPredUpdate.wdata := true.B
141  // update store set
142  io.memPredUpdate.ldpc := RegNext(XORFold(real_pc(VAddrBits-1, 1), MemPredPCWidth))
143  // store pc is ready 1 cycle after s1_isReplay is judged
144  io.memPredUpdate.stpc := XORFold(store_pc(VAddrBits-1, 1), MemPredPCWidth)
145
146  val s2_target = RegEnable(target, enable = s1_redirect_valid_reg)
147  val s2_pd = RegEnable(s1_pd, enable = s1_redirect_valid_reg)
148  val s2_pc = RegEnable(real_pc, enable = s1_redirect_valid_reg)
149  val s2_redirect_bits_reg = RegEnable(s1_redirect_bits_reg, enable = s1_redirect_valid_reg)
150  val s2_redirect_valid_reg = RegNext(s1_redirect_valid_reg && !io.flush, init = false.B)
151
152  io.stage3Redirect.valid := s2_redirect_valid_reg
153  io.stage3Redirect.bits := s2_redirect_bits_reg
154  val stage3CfiUpdate = io.stage3Redirect.bits.cfiUpdate
155  stage3CfiUpdate.pc := s2_pc
156  stage3CfiUpdate.pd := s2_pd
157  stage3CfiUpdate.predTaken := s2_redirect_bits_reg.cfiUpdate.predTaken
158  stage3CfiUpdate.target := s2_target
159  stage3CfiUpdate.taken := s2_redirect_bits_reg.cfiUpdate.taken
160  stage3CfiUpdate.isMisPred := s2_redirect_bits_reg.cfiUpdate.isMisPred
161
162  // recover runahead checkpoint if redirect
163  if (!env.FPGAPlatform) {
164    val runahead_redirect = Module(new DifftestRunaheadRedirectEvent)
165    runahead_redirect.io.clock := clock
166    runahead_redirect.io.coreid := io.hartId
167    runahead_redirect.io.valid := io.stage3Redirect.valid
168    runahead_redirect.io.pc :=  s2_pc // for debug only
169    runahead_redirect.io.target_pc := s2_target // for debug only
170    runahead_redirect.io.checkpoint_id := io.stage3Redirect.bits.debug_runahead_checkpoint_id // make sure it is right
171  }
172}
173
174class CtrlBlock(implicit p: Parameters) extends XSModule
175  with HasCircularQueuePtrHelper {
176  val io = IO(new Bundle {
177    val hartId = Input(UInt(8.W))
178    val frontend = Flipped(new FrontendToCtrlIO)
179    val allocPregs = Vec(RenameWidth, Output(new ResetPregStateReq))
180    val dispatch = Vec(3*dpParams.IntDqDeqWidth, DecoupledIO(new MicroOp))
181    // from int block
182    val exuRedirect = Vec(exuParameters.AluCnt + exuParameters.JmpCnt, Flipped(ValidIO(new ExuOutput)))
183    val stIn = Vec(exuParameters.StuCnt, Flipped(ValidIO(new ExuInput)))
184    val stOut = Vec(exuParameters.StuCnt, Flipped(ValidIO(new ExuOutput)))
185    val memoryViolation = Flipped(ValidIO(new Redirect))
186    val jumpPc = Output(UInt(VAddrBits.W))
187    val jalr_target = Output(UInt(VAddrBits.W))
188    val robio = new Bundle {
189      // to int block
190      val toCSR = new RobCSRIO
191      val exception = ValidIO(new ExceptionInfo)
192      // to mem block
193      val lsq = new RobLsqIO
194    }
195    val csrCtrl = Input(new CustomCSRCtrlIO)
196    val perfInfo = Output(new Bundle{
197      val ctrlInfo = new Bundle {
198        val robFull   = Input(Bool())
199        val intdqFull = Input(Bool())
200        val fpdqFull  = Input(Bool())
201        val lsdqFull  = Input(Bool())
202      }
203    })
204    val writeback = Vec(NRIntWritePorts + NRFpWritePorts, Flipped(ValidIO(new ExuOutput)))
205    // redirect out
206    val redirect = ValidIO(new Redirect)
207    val debug_int_rat = Vec(32, Output(UInt(PhyRegIdxWidth.W)))
208    val debug_fp_rat = Vec(32, Output(UInt(PhyRegIdxWidth.W)))
209  })
210
211  val decode = Module(new DecodeStage)
212  val rat = Module(new RenameTableWrapper)
213  val rename = Module(new Rename)
214  val dispatch = Module(new Dispatch)
215  val intDq = Module(new DispatchQueue(dpParams.IntDqSize, RenameWidth, dpParams.IntDqDeqWidth, "int"))
216  val fpDq = Module(new DispatchQueue(dpParams.FpDqSize, RenameWidth, dpParams.FpDqDeqWidth, "fp"))
217  val lsDq = Module(new DispatchQueue(dpParams.LsDqSize, RenameWidth, dpParams.LsDqDeqWidth, "ls"))
218  val redirectGen = Module(new RedirectGenerator)
219
220  val robWbSize = NRIntWritePorts + NRFpWritePorts + exuParameters.StuCnt
221  val rob = Module(new Rob(robWbSize))
222
223  val robPcRead = io.frontend.fromFtq.getRobFlushPcRead
224  val flushPC = robPcRead(rob.io.flushOut.bits.ftqIdx, rob.io.flushOut.bits.ftqOffset)
225
226  val flushRedirect = Wire(Valid(new Redirect))
227  flushRedirect.valid := RegNext(rob.io.flushOut.valid)
228  flushRedirect.bits := RegEnable(rob.io.flushOut.bits, rob.io.flushOut.valid)
229  flushRedirect.bits.cfiUpdate.target := Mux(io.robio.toCSR.isXRet || rob.io.exception.valid,
230    io.robio.toCSR.trapTarget,
231    Mux(flushRedirect.bits.flushItself(),
232      flushPC, // replay inst
233      flushPC + 4.U // flush pipe
234    )
235  )
236
237  val flushRedirectReg = Wire(Valid(new Redirect))
238  flushRedirectReg.valid := RegNext(flushRedirect.valid, init = false.B)
239  flushRedirectReg.bits := RegEnable(flushRedirect.bits, enable = flushRedirect.valid)
240
241  val stage2Redirect = Mux(flushRedirect.valid, flushRedirect, redirectGen.io.stage2Redirect)
242  val stage3Redirect = Mux(flushRedirectReg.valid, flushRedirectReg, redirectGen.io.stage3Redirect)
243
244  val exuRedirect = io.exuRedirect.map(x => {
245    val valid = x.valid && x.bits.redirectValid
246    val killedByOlder = x.bits.uop.robIdx.needFlush(stage2Redirect)
247    val delayed = Wire(Valid(new ExuOutput))
248    delayed.valid := RegNext(valid && !killedByOlder, init = false.B)
249    delayed.bits := RegEnable(x.bits, x.valid)
250    delayed
251  })
252  val loadReplay = Wire(Valid(new Redirect))
253  loadReplay.valid := RegNext(io.memoryViolation.valid &&
254    !io.memoryViolation.bits.robIdx.needFlush(stage2Redirect),
255    init = false.B
256  )
257  loadReplay.bits := RegEnable(io.memoryViolation.bits, io.memoryViolation.valid)
258  io.frontend.fromFtq.getRedirectPcRead <> redirectGen.io.stage1PcRead
259  io.frontend.fromFtq.getMemPredPcRead <> redirectGen.io.memPredPcRead
260  redirectGen.io.hartId := io.hartId
261  redirectGen.io.exuMispredict <> exuRedirect
262  redirectGen.io.loadReplay <> loadReplay
263  redirectGen.io.flush := RegNext(rob.io.flushOut.valid)
264
265  for(i <- 0 until CommitWidth){
266    io.frontend.toFtq.rob_commits(i).valid := rob.io.commits.valid(i) && !rob.io.commits.isWalk
267    io.frontend.toFtq.rob_commits(i).bits := rob.io.commits.info(i)
268  }
269  io.frontend.toFtq.stage2Redirect <> stage2Redirect
270  io.frontend.toFtq.robFlush <> RegNext(rob.io.flushOut)
271  io.frontend.toFtq.stage3Redirect := stage3Redirect
272
273  decode.io.in <> io.frontend.cfVec
274  // currently, we only update wait table when isReplay
275  decode.io.memPredUpdate(0) <> RegNext(redirectGen.io.memPredUpdate)
276  decode.io.memPredUpdate(1) := DontCare
277  decode.io.memPredUpdate(1).valid := false.B
278  decode.io.csrCtrl := RegNext(io.csrCtrl)
279
280  rat.io.robCommits := rob.io.commits
281  for ((r, i) <- rat.io.intReadPorts.zipWithIndex) {
282    val raddr = decode.io.out(i).bits.ctrl.lsrc.take(2) :+ decode.io.out(i).bits.ctrl.ldest
283    r.map(_.addr).zip(raddr).foreach(x => x._1 := x._2)
284    rename.io.intReadPorts(i) := r.map(_.data)
285    r.foreach(_.hold := !rename.io.in(i).ready)
286  }
287  rat.io.intRenamePorts := rename.io.intRenamePorts
288  for ((r, i) <- rat.io.fpReadPorts.zipWithIndex) {
289    val raddr = decode.io.out(i).bits.ctrl.lsrc.take(3) :+ decode.io.out(i).bits.ctrl.ldest
290    r.map(_.addr).zip(raddr).foreach(x => x._1 := x._2)
291    rename.io.fpReadPorts(i) := r.map(_.data)
292    r.foreach(_.hold := !rename.io.in(i).ready)
293  }
294  rat.io.fpRenamePorts := rename.io.fpRenamePorts
295  rat.io.debug_int_rat <> io.debug_int_rat
296  rat.io.debug_fp_rat <> io.debug_fp_rat
297
298  // pipeline between decode and rename
299  for (i <- 0 until RenameWidth) {
300    PipelineConnect(decode.io.out(i), rename.io.in(i), rename.io.in(i).ready,
301      stage2Redirect.valid || stage3Redirect.valid)
302  }
303
304  rename.io.redirect <> stage2Redirect
305  rename.io.robCommits <> rob.io.commits
306
307  // pipeline between rename and dispatch
308  for (i <- 0 until RenameWidth) {
309    PipelineConnect(rename.io.out(i), dispatch.io.fromRename(i), dispatch.io.recv(i), stage2Redirect.valid)
310  }
311
312  dispatch.io.hartId := io.hartId
313  dispatch.io.redirect <> stage2Redirect
314  dispatch.io.enqRob <> rob.io.enq
315  dispatch.io.toIntDq <> intDq.io.enq
316  dispatch.io.toFpDq <> fpDq.io.enq
317  dispatch.io.toLsDq <> lsDq.io.enq
318  dispatch.io.allocPregs <> io.allocPregs
319  dispatch.io.csrCtrl <> io.csrCtrl
320  dispatch.io.storeIssue <> io.stIn
321  dispatch.io.singleStep := false.B
322
323  intDq.io.redirect <> stage2Redirect
324  fpDq.io.redirect <> stage2Redirect
325  lsDq.io.redirect <> stage2Redirect
326
327  io.dispatch <> intDq.io.deq ++ lsDq.io.deq ++ fpDq.io.deq
328
329  val pingpong = RegInit(false.B)
330  pingpong := !pingpong
331  val jumpInst = Mux(pingpong && (exuParameters.AluCnt > 2).B, io.dispatch(2).bits, io.dispatch(0).bits)
332  val jumpPcRead = io.frontend.fromFtq.getJumpPcRead
333  io.jumpPc := jumpPcRead(jumpInst.cf.ftqPtr, jumpInst.cf.ftqOffset)
334  val jumpTargetRead = io.frontend.fromFtq.target_read
335  io.jalr_target := jumpTargetRead(jumpInst.cf.ftqPtr, jumpInst.cf.ftqOffset)
336
337  rob.io.hartId := io.hartId
338  rob.io.redirect <> stage2Redirect
339  val exeWbResults = VecInit(io.writeback ++ io.stOut)
340  val timer = GTimer()
341  for((rob_wb, wb) <- rob.io.exeWbResults.zip(exeWbResults)) {
342    rob_wb.valid := RegNext(wb.valid && !wb.bits.uop.robIdx.needFlush(stage2Redirect))
343    rob_wb.bits := RegNext(wb.bits)
344    rob_wb.bits.uop.debugInfo.writebackTime := timer
345  }
346
347  io.redirect <> stage2Redirect
348
349  // rob to int block
350  io.robio.toCSR <> rob.io.csr
351  io.robio.toCSR.perfinfo.retiredInstr <> RegNext(rob.io.csr.perfinfo.retiredInstr)
352  io.robio.exception := rob.io.exception
353  io.robio.exception.bits.uop.cf.pc := flushPC
354
355  // rob to mem block
356  io.robio.lsq <> rob.io.lsq
357
358  io.perfInfo.ctrlInfo.robFull := RegNext(rob.io.robFull)
359  io.perfInfo.ctrlInfo.intdqFull := RegNext(intDq.io.dqFull)
360  io.perfInfo.ctrlInfo.fpdqFull := RegNext(fpDq.io.dqFull)
361  io.perfInfo.ctrlInfo.lsdqFull := RegNext(lsDq.io.dqFull)
362
363  val pfevent = Module(new PFEvent)
364  val csrevents = pfevent.io.hpmevent.slice(8,16)
365  val perfinfo = IO(new Bundle(){
366    val perfEvents        = Output(new PerfEventsBundle(csrevents.length))
367    val perfEventsRs      = Input(new PerfEventsBundle(NumRs))
368    val perfEventsEu0     = Input(new PerfEventsBundle(10))
369    val perfEventsEu1     = Input(new PerfEventsBundle(10))
370  })
371
372  if(print_perfcounter){
373    val decode_perf     = decode.perfEvents.map(_._1).zip(decode.perfinfo.perfEvents.perf_events)
374    val rename_perf     = rename.perfEvents.map(_._1).zip(rename.perfinfo.perfEvents.perf_events)
375    val dispat_perf     = dispatch.perfEvents.map(_._1).zip(dispatch.perfinfo.perfEvents.perf_events)
376    val intdq_perf      = intDq.perfEvents.map(_._1).zip(intDq.perfinfo.perfEvents.perf_events)
377    val fpdq_perf       = fpDq.perfEvents.map(_._1).zip(fpDq.perfinfo.perfEvents.perf_events)
378    val lsdq_perf       = lsDq.perfEvents.map(_._1).zip(lsDq.perfinfo.perfEvents.perf_events)
379    val rob_perf        = rob.perfEvents.map(_._1).zip(rob.perfinfo.perfEvents.perf_events)
380    val perfEvents =  decode_perf ++ rename_perf ++ dispat_perf ++ intdq_perf ++ fpdq_perf ++ lsdq_perf ++ rob_perf
381
382    for (((perf_name,perf),i) <- perfEvents.zipWithIndex) {
383      println(s"ctrl perf $i: $perf_name")
384    }
385  }
386
387  val hpmEvents = decode.perfinfo.perfEvents.perf_events ++ rename.perfinfo.perfEvents.perf_events ++
388                  dispatch.perfinfo.perfEvents.perf_events ++
389                  intDq.perfinfo.perfEvents.perf_events ++ fpDq.perfinfo.perfEvents.perf_events ++
390                  lsDq.perfinfo.perfEvents.perf_events ++ rob.perfinfo.perfEvents.perf_events ++
391                  perfinfo.perfEventsEu0.perf_events ++ perfinfo.perfEventsEu1.perf_events ++
392                  perfinfo.perfEventsRs.perf_events
393
394  val perf_length = hpmEvents.length
395  val hpm_ctrl = Module(new HPerfmonitor(perf_length,csrevents.length))
396  hpm_ctrl.io.hpm_event := csrevents
397  hpm_ctrl.io.events_sets.perf_events := hpmEvents
398  perfinfo.perfEvents := RegNext(hpm_ctrl.io.events_selected)
399  pfevent.io.distribute_csr := RegNext(io.csrCtrl.distribute_csr)
400}
401