xref: /XiangShan/src/main/scala/xiangshan/frontend/BPU.scala (revision dcbc69cb2a7ea07707ede3d8f7c74421ef450202)
1/***************************************************************************************
2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences
3* Copyright (c) 2020-2021 Peng Cheng Laboratory
4*
5* XiangShan is licensed under Mulan PSL v2.
6* You can use this software according to the terms and conditions of the Mulan PSL v2.
7* You may obtain a copy of Mulan PSL v2 at:
8*          http://license.coscl.org.cn/MulanPSL2
9*
10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
13*
14* See the Mulan PSL v2 for more details.
15***************************************************************************************/
16
17package xiangshan.frontend
18
19import chipsalliance.rocketchip.config.Parameters
20import chisel3._
21import chisel3.experimental.chiselName
22import chisel3.util._
23import xiangshan._
24import utils._
25
26import scala.math.min
27
28trait HasBPUConst extends HasXSParameter {
29  val MaxMetaLength = 1024 // TODO: Reduce meta length
30  val MaxBasicBlockSize = 32
31  val LHistoryLength = 32
32  // val numBr = 2
33  val useBPD = true
34  val useLHist = true
35  val shareTailSlot = true
36  val numBrSlot = if (shareTailSlot) numBr-1 else numBr
37  val totalSlot = numBrSlot + 1
38
39  def BP_STAGES = (0 until 3).map(_.U(2.W))
40  def BP_S1 = BP_STAGES(0)
41  def BP_S2 = BP_STAGES(1)
42  def BP_S3 = BP_STAGES(2)
43  val numBpStages = BP_STAGES.length
44
45  val debug = true
46  val resetVector = 0x10000000L//TODO: set reset vec
47  // TODO: Replace log2Up by log2Ceil
48}
49
50trait HasBPUParameter extends HasXSParameter with HasBPUConst {
51  val BPUDebug = true && !env.FPGAPlatform && env.EnablePerfDebug
52  val EnableCFICommitLog = true
53  val EnbaleCFIPredLog = true
54  val EnableBPUTimeRecord = (EnableCFICommitLog || EnbaleCFIPredLog) && !env.FPGAPlatform
55  val EnableCommit = false
56}
57
58class BPUCtrl(implicit p: Parameters) extends XSBundle {
59  val ubtb_enable = Bool()
60  val btb_enable  = Bool()
61  val bim_enable  = Bool()
62  val tage_enable = Bool()
63  val sc_enable   = Bool()
64  val ras_enable  = Bool()
65  val loop_enable = Bool()
66}
67
68trait BPUUtils extends HasXSParameter {
69  // circular shifting
70  def circularShiftLeft(source: UInt, len: Int, shamt: UInt): UInt = {
71    val res = Wire(UInt(len.W))
72    val higher = source << shamt
73    val lower = source >> (len.U - shamt)
74    res := higher | lower
75    res
76  }
77
78  def circularShiftRight(source: UInt, len: Int, shamt: UInt): UInt = {
79    val res = Wire(UInt(len.W))
80    val higher = source << (len.U - shamt)
81    val lower = source >> shamt
82    res := higher | lower
83    res
84  }
85
86  // To be verified
87  def satUpdate(old: UInt, len: Int, taken: Bool): UInt = {
88    val oldSatTaken = old === ((1 << len)-1).U
89    val oldSatNotTaken = old === 0.U
90    Mux(oldSatTaken && taken, ((1 << len)-1).U,
91      Mux(oldSatNotTaken && !taken, 0.U,
92        Mux(taken, old + 1.U, old - 1.U)))
93  }
94
95  def signedSatUpdate(old: SInt, len: Int, taken: Bool): SInt = {
96    val oldSatTaken = old === ((1 << (len-1))-1).S
97    val oldSatNotTaken = old === (-(1 << (len-1))).S
98    Mux(oldSatTaken && taken, ((1 << (len-1))-1).S,
99      Mux(oldSatNotTaken && !taken, (-(1 << (len-1))).S,
100        Mux(taken, old + 1.S, old - 1.S)))
101  }
102
103  def getFallThroughAddr(start: UInt, carry: Bool, pft: UInt) = {
104    val higher = start.head(VAddrBits-log2Ceil(PredictWidth)-instOffsetBits-1)
105    Cat(Mux(carry, higher+1.U, higher), pft, 0.U(instOffsetBits.W))
106  }
107
108  def foldTag(tag: UInt, l: Int): UInt = {
109    val nChunks = (tag.getWidth + l - 1) / l
110    val chunks = (0 until nChunks).map { i =>
111      tag(min((i+1)*l, tag.getWidth)-1, i*l)
112    }
113    ParallelXOR(chunks)
114  }
115}
116
117// class BranchPredictionUpdate(implicit p: Parameters) extends XSBundle with HasBPUConst {
118//   val pc = UInt(VAddrBits.W)
119//   val br_offset = Vec(num_br, UInt(log2Up(MaxBasicBlockSize).W))
120//   val br_mask = Vec(MaxBasicBlockSize, Bool())
121//
122//   val jmp_valid = Bool()
123//   val jmp_type = UInt(3.W)
124//
125//   val is_NextMask = Vec(FetchWidth*2, Bool())
126//
127//   val cfi_idx = Valid(UInt(log2Ceil(MaxBasicBlockSize).W))
128//   val cfi_mispredict = Bool()
129//   val cfi_is_br = Bool()
130//   val cfi_is_jal = Bool()
131//   val cfi_is_jalr = Bool()
132//
133//   val ghist = new ShiftingGlobalHistory()
134//
135//   val target = UInt(VAddrBits.W)
136//
137//   val meta = UInt(MaxMetaLength.W)
138//   val spec_meta = UInt(MaxMetaLength.W)
139//
140//   def taken = cfi_idx.valid
141// }
142
143class AllFoldedHistories(val gen: Seq[Tuple2[Int, Int]])(implicit p: Parameters) extends XSBundle with HasBPUConst {
144  val hist = MixedVec(gen.map{case (l, cl) => new FoldedHistory(l, cl, numBr)})
145  // println(gen.mkString)
146  require(gen.toSet.toList.equals(gen))
147  def getHistWithInfo(info: Tuple2[Int, Int]) = {
148    val selected = hist.filter(_.info.equals(info))
149    require(selected.length == 1)
150    selected(0)
151  }
152  def autoConnectFrom(that: AllFoldedHistories) = {
153    require(this.hist.length <= that.hist.length)
154    for (h <- this.hist) {
155      h := that.getHistWithInfo(h.info)
156    }
157  }
158  def update(ghr: Vec[Bool], ptr: CGHPtr, shift: Int, taken: Bool): AllFoldedHistories = {
159    val res = WireInit(this)
160    for (i <- 0 until this.hist.length) {
161      res.hist(i) := this.hist(i).update(ghr, ptr, shift, taken)
162    }
163    res
164  }
165  // def update(ghr: Vec[Bool], ptr: CGHPtr, br_valids: Vec[Bool], br_takens: Vec[Bool]): AllFoldedHistories = {
166  //   val last_valid_idx = PriorityMux(
167  //     br_valids.reverse :+ true.B,
168  //     (numBr to 0 by -1).map(_.U(log2Ceil(numBr+1).W))
169  //   )
170  //   val first_taken_idx = PriorityEncoder(false.B +: br_takens)
171  //   val smaller = Mux(last_valid_idx < first_taken_idx,
172  //     last_valid_idx,
173  //     first_taken_idx
174  //   )
175  //   val shift = smaller
176  //   val taken = br_takens.reduce(_||_)
177  //   update(ghr, ptr, shift, taken)
178  // }
179  // def update(ghr: Vec[Bool], ptr: CGHPtr, resp: BranchPredictionBundle): AllFoldedHistories = {
180  //   update(ghr, ptr, resp.preds.br_valids, resp.real_br_taken_mask)
181  // }
182  def display(cond: Bool) = {
183    for (h <- hist) {
184      XSDebug(cond, p"hist len ${h.len}, folded len ${h.compLen}, value ${Binary(h.folded_hist)}\n")
185    }
186  }
187}
188
189class BasePredictorInput (implicit p: Parameters) extends XSBundle with HasBPUConst {
190  def nInputs = 1
191
192  val s0_pc = UInt(VAddrBits.W)
193
194  val folded_hist = new AllFoldedHistories(foldedGHistInfos)
195  val phist = UInt(PathHistoryLength.W)
196
197  val resp_in = Vec(nInputs, new BranchPredictionResp)
198
199  // val final_preds = Vec(numBpStages, new)
200  // val toFtq_fire = Bool()
201
202  // val s0_all_ready = Bool()
203}
204
205class BasePredictorOutput (implicit p: Parameters) extends XSBundle with HasBPUConst {
206  val s3_meta = UInt(MaxMetaLength.W) // This is use by composer
207  val resp = new BranchPredictionResp
208
209  // These store in meta, extract in composer
210  // val rasSp = UInt(log2Ceil(RasSize).W)
211  // val rasTop = new RASEntry
212  // val specCnt = Vec(PredictWidth, UInt(10.W))
213}
214
215class BasePredictorIO (implicit p: Parameters) extends XSBundle with HasBPUConst {
216  val in  = Flipped(DecoupledIO(new BasePredictorInput)) // TODO: Remove DecoupledIO
217  // val out = DecoupledIO(new BasePredictorOutput)
218  val out = Output(new BasePredictorOutput)
219  // val flush_out = Valid(UInt(VAddrBits.W))
220
221  // val ctrl = Input(new BPUCtrl())
222
223  val s0_fire = Input(Bool())
224  val s1_fire = Input(Bool())
225  val s2_fire = Input(Bool())
226  val s3_fire = Input(Bool())
227
228  val s1_ready = Output(Bool())
229  val s2_ready = Output(Bool())
230  val s3_ready = Output(Bool())
231
232  val update = Flipped(Valid(new BranchPredictionUpdate))
233  val redirect = Flipped(Valid(new BranchPredictionRedirect))
234}
235
236abstract class BasePredictor(implicit p: Parameters) extends XSModule with HasBPUConst with BPUUtils {
237  val meta_size = 0
238  val spec_meta_size = 0
239  val io = IO(new BasePredictorIO())
240
241  io.out.resp := io.in.bits.resp_in(0)
242
243  io.out.s3_meta := 0.U
244
245  io.in.ready := !io.redirect.valid
246
247  io.s1_ready := true.B
248  io.s2_ready := true.B
249  io.s3_ready := true.B
250
251  val s0_pc       = WireInit(io.in.bits.s0_pc) // fetchIdx(io.f0_pc)
252  val s1_pc       = RegEnable(s0_pc, resetVector.U, io.s0_fire)
253  val s2_pc       = RegEnable(s1_pc, io.s1_fire)
254  val s3_pc       = RegEnable(s2_pc, io.s2_fire)
255
256
257  def getFoldedHistoryInfo: Option[Set[FoldedHistoryInfo]] = None
258}
259
260class FakePredictor(implicit p: Parameters) extends BasePredictor {
261  io.in.ready                 := true.B
262  io.out.s3_meta              := 0.U
263  io.out.resp := io.in.bits.resp_in(0)
264}
265
266class BpuToFtqIO(implicit p: Parameters) extends XSBundle {
267  val resp = DecoupledIO(new BpuToFtqBundle())
268}
269
270class PredictorIO(implicit p: Parameters) extends XSBundle {
271  val bpu_to_ftq = new BpuToFtqIO()
272  val ftq_to_bpu = Flipped(new FtqToBpuIO())
273}
274
275class FakeBPU(implicit p: Parameters) extends XSModule with HasBPUConst {
276  val io = IO(new PredictorIO)
277
278  val toFtq_fire = io.bpu_to_ftq.resp.valid && io.bpu_to_ftq.resp.ready
279
280  val s0_pc = RegInit(resetVector.U)
281
282  when(toFtq_fire) {
283    s0_pc := s0_pc + (FetchWidth*4).U
284  }
285
286  when (io.ftq_to_bpu.redirect.valid) {
287    s0_pc := io.ftq_to_bpu.redirect.bits.cfiUpdate.target
288  }
289
290  io.bpu_to_ftq.resp.valid := !reset.asBool() && !io.ftq_to_bpu.redirect.valid
291
292  io.bpu_to_ftq.resp.bits := 0.U.asTypeOf(new BranchPredictionBundle)
293  io.bpu_to_ftq.resp.bits.s1.pc := s0_pc
294  io.bpu_to_ftq.resp.bits.s1.ftb_entry.pftAddr := s0_pc + (FetchWidth*4).U
295}
296
297@chiselName
298class Predictor(implicit p: Parameters) extends XSModule with HasBPUConst {
299  val io = IO(new PredictorIO)
300
301  val predictors = Module(if (useBPD) new Composer else new FakePredictor)
302
303  val folded_hist_infos = predictors.getFoldedHistoryInfo.getOrElse(Set()).toList
304  for ((len, compLen) <- folded_hist_infos) {
305    println(f"folded hist info: len $len, compLen $compLen")
306  }
307
308  val s0_fire, s1_fire, s2_fire, s3_fire = Wire(Bool())
309  val s1_valid, s2_valid, s3_valid = RegInit(false.B)
310  val s1_ready, s2_ready, s3_ready = Wire(Bool())
311  val s1_components_ready, s2_components_ready, s3_components_ready = Wire(Bool())
312
313  val s0_pc = WireInit(resetVector.U)
314  val s0_pc_reg = RegNext(s0_pc, init=resetVector.U)
315  val s1_pc = RegEnable(s0_pc, s0_fire)
316  val s2_pc = RegEnable(s1_pc, s1_fire)
317  val s3_pc = RegEnable(s2_pc, s2_fire)
318
319  val s0_folded_gh = Wire(new AllFoldedHistories(foldedGHistInfos))
320  val s0_folded_gh_reg = RegNext(s0_folded_gh, init=0.U.asTypeOf(s0_folded_gh))
321  val s1_folded_gh = RegEnable(s0_folded_gh, 0.U.asTypeOf(s0_folded_gh), s0_fire)
322  val s2_folded_gh = RegEnable(s1_folded_gh, 0.U.asTypeOf(s0_folded_gh), s1_fire)
323  val s3_folded_gh = RegEnable(s2_folded_gh, 0.U.asTypeOf(s0_folded_gh), s2_fire)
324
325  val npcGen   = new PhyPriorityMuxGenerator[UInt]
326  val foldedGhGen = new PhyPriorityMuxGenerator[AllFoldedHistories]
327  val ghistPtrGen = new PhyPriorityMuxGenerator[CGHPtr]
328  val phistGen = new PhyPriorityMuxGenerator[UInt]
329  val lastPredGen = new PhyPriorityMuxGenerator[BranchPredictionBundle]
330  val ghrBitWriteGens = Seq.tabulate(HistoryLength)(n => new PhyPriorityMuxGenerator[Bool])
331
332  val ghr = RegInit(0.U.asTypeOf(Vec(HistoryLength, Bool())))
333  val ghr_wire = WireInit(ghr)
334
335  val ghr_write_datas = Wire(Vec(HistoryLength, Bool()))
336  val ghr_wens = Wire(Vec(HistoryLength, Bool()))
337
338  val s0_ghist_ptr = Wire(new CGHPtr)
339  val s0_ghist_ptr_reg = RegNext(s0_ghist_ptr, init=0.U.asTypeOf(new CGHPtr))
340  val s1_ghist_ptr = RegEnable(s0_ghist_ptr, 0.U.asTypeOf(new CGHPtr), s0_fire)
341  val s2_ghist_ptr = RegEnable(s1_ghist_ptr, 0.U.asTypeOf(new CGHPtr), s1_fire)
342  val s3_ghist_ptr = RegEnable(s2_ghist_ptr, 0.U.asTypeOf(new CGHPtr), s2_fire)
343
344  val s0_last_pred = Wire(new BranchPredictionBundle)
345  val s0_last_pred_reg = RegNext(s0_last_pred, init=0.U.asTypeOf(new BranchPredictionBundle))
346  val s1_last_pred = RegEnable(s0_last_pred, 0.U.asTypeOf(new BranchPredictionBundle), s0_fire)
347  val s2_last_pred = RegEnable(s1_last_pred, 0.U.asTypeOf(new BranchPredictionBundle), s1_fire)
348  val s3_last_pred = RegEnable(s2_last_pred, 0.U.asTypeOf(new BranchPredictionBundle), s2_fire)
349
350  val s0_phist = WireInit(0.U(PathHistoryLength.W))
351  val s0_phist_reg = RegNext(s0_phist, init=0.U(PathHistoryLength.W))
352  val s1_phist = RegEnable(s0_phist, 0.U, s0_fire)
353  val s2_phist = RegEnable(s1_phist, 0.U, s1_fire)
354  val s3_phist = RegEnable(s2_phist, 0.U, s2_fire)
355
356  val resp = predictors.io.out.resp
357
358
359  val toFtq_fire = io.bpu_to_ftq.resp.valid && io.bpu_to_ftq.resp.ready
360
361  val s1_flush, s2_flush, s3_flush = Wire(Bool())
362  val s2_redirect, s3_redirect = Wire(Bool())
363
364  // predictors.io := DontCare
365  predictors.io.in.valid := s0_fire
366  predictors.io.in.bits.s0_pc := s0_pc
367  predictors.io.in.bits.folded_hist := s0_folded_gh
368  predictors.io.in.bits.phist := s0_phist
369  predictors.io.in.bits.resp_in(0) := (0.U).asTypeOf(new BranchPredictionResp)
370  // predictors.io.in.bits.resp_in(0).s1.pc := s0_pc
371  // predictors.io.in.bits.toFtq_fire := toFtq_fire
372
373  // predictors.io.out.ready := io.bpu_to_ftq.resp.ready
374
375  // Pipeline logic
376  s2_redirect := false.B
377  s3_redirect := false.B
378
379  s3_flush := io.ftq_to_bpu.redirect.valid
380  s2_flush := s3_flush || s3_redirect
381  s1_flush := s2_flush || s2_redirect
382
383  s1_components_ready := predictors.io.s1_ready
384  s1_ready := s1_fire || !s1_valid
385  s0_fire := !reset.asBool && s1_components_ready && s1_ready
386  predictors.io.s0_fire := s0_fire
387
388  s2_components_ready := predictors.io.s2_ready
389  s2_ready := s2_fire || !s2_valid
390  s1_fire := s1_valid && s2_components_ready && s2_ready && io.bpu_to_ftq.resp.ready
391
392  when(s0_fire)         { s1_valid := true.B  }
393    .elsewhen(s1_flush) { s1_valid := false.B }
394    .elsewhen(s1_fire)  { s1_valid := false.B }
395
396  predictors.io.s1_fire := s1_fire
397
398  s3_components_ready := predictors.io.s3_ready
399  s3_ready := s3_fire || !s3_valid
400  s2_fire := s2_valid && s3_components_ready && s3_ready
401
402  when(s2_flush)                    { s2_valid := false.B }
403    .elsewhen(s1_fire && !s1_flush) { s2_valid := true.B  }
404    .elsewhen(s2_fire)              { s2_valid := false.B }
405
406  predictors.io.s2_fire := s2_fire
407
408  // s3_fire := s3_valid && io.bpu_to_ftq.resp.ready
409  s3_fire := s3_valid
410
411  when(s3_flush)                    { s3_valid := false.B }
412    .elsewhen(s2_fire && !s2_flush) { s3_valid := true.B  }
413    .elsewhen(s3_fire)              { s3_valid := false.B }
414
415  predictors.io.s3_fire := s3_fire
416
417  io.bpu_to_ftq.resp.valid :=
418    s1_valid && s2_components_ready && s2_ready ||
419    s2_fire && s2_redirect ||
420    s3_fire && s3_redirect
421  io.bpu_to_ftq.resp.bits  := BpuToFtqBundle(predictors.io.out.resp)
422  io.bpu_to_ftq.resp.bits.meta  := predictors.io.out.s3_meta
423  // io.bpu_to_ftq.resp.bits.s3.ghist  := s3_ghist
424  io.bpu_to_ftq.resp.bits.s3.folded_hist := s3_folded_gh
425  io.bpu_to_ftq.resp.bits.s3.histPtr := s3_ghist_ptr
426  io.bpu_to_ftq.resp.bits.s3.phist  := s3_phist
427
428  npcGen.register(true.B, s0_pc_reg, Some("stallPC"), 0)
429  foldedGhGen.register(true.B, s0_folded_gh_reg, Some("stallFGH"), 0)
430  ghistPtrGen.register(true.B, s0_ghist_ptr_reg, Some("stallGHPtr"), 0)
431  phistGen.register(true.B, s0_phist_reg, Some("stallPhist"), 0)
432  lastPredGen.register(true.B, s0_last_pred_reg, Some("stallLastPred"), 0)
433
434  // History manage
435  // s1
436  val s1_possible_predicted_ghist_ptrs = (0 to numBr).map(s1_ghist_ptr - _.U)
437  val s1_predicted_ghist_ptr = Mux1H(resp.s1.lastBrPosOH, s1_possible_predicted_ghist_ptrs)
438  val s1_possible_predicted_fhs = (0 to numBr).map(i =>
439    s1_folded_gh.update(ghr, s1_ghist_ptr, i, if (i > 0) resp.s1.preds.br_taken_mask(i-1) else false.B))
440  val s1_predicted_fh = Mux1H(resp.s1.lastBrPosOH, s1_possible_predicted_fhs)
441
442  require(isPow2(HistoryLength))
443  val s1_ghr_wens = (0 until HistoryLength).map(n =>
444    (0 until numBr).map(b => (s1_ghist_ptr).value === n.U(log2Ceil(HistoryLength).W) + b.U && resp.s1.shouldShiftVec(b) && s1_valid))
445  val s1_ghr_wdatas = (0 until HistoryLength).map(n =>
446    Mux1H(
447      (0 until numBr).map(b => (s1_ghist_ptr).value === n.U(log2Ceil(HistoryLength).W) + b.U && resp.s1.shouldShiftVec(b)),
448      resp.s1.real_br_taken_mask()
449    )
450  )
451
452
453  npcGen.register(s1_valid, resp.s1.target, Some("s1_target"), 5)
454  foldedGhGen.register(s1_valid, s1_predicted_fh, Some("s1_FGH"), 5)
455  ghistPtrGen.register(s1_valid, s1_predicted_ghist_ptr, Some("s1_GHPtr"), 5)
456  phistGen.register(s1_valid, (s1_phist << 1) | s1_pc(instOffsetBits), Some("s1_Phist"), 5)
457  lastPredGen.register(s1_valid, resp.s1, Some("s1_lastPred"), 5)
458  ghrBitWriteGens.zip(s1_ghr_wens).zipWithIndex.map{case ((b, w), i) =>
459    b.register(w.reduce(_||_), s1_ghr_wdatas(i), Some(s"s1_new_bit_$i"), 5)
460  }
461
462  def preds_needs_redirect(x: BranchPredictionBundle, y: BranchPredictionBundle) = {
463    x.real_slot_taken_mask().asUInt.orR =/= y.real_slot_taken_mask().asUInt().orR ||
464    x.preds.br_valids.asUInt =/= y.preds.br_valids.asUInt ||
465    PriorityEncoder(x.real_br_taken_mask()) =/= PriorityEncoder(y.real_br_taken_mask)
466  }
467  // s2
468  val s2_possible_predicted_ghist_ptrs = (0 to numBr).map(s2_ghist_ptr - _.U)
469  val s2_predicted_ghist_ptr = Mux1H(resp.s2.lastBrPosOH, s2_possible_predicted_ghist_ptrs)
470  val s2_possible_predicted_fhs = (0 to numBr).map(i =>
471    s2_folded_gh.update(ghr, s2_ghist_ptr, i, if (i > 0) resp.s2.preds.br_taken_mask(i-1) else false.B))
472  val s2_predicted_fh = Mux1H(resp.s2.lastBrPosOH, s2_possible_predicted_fhs)
473  val s2_ghr_wens = (0 until HistoryLength).map(n =>
474    (0 until numBr).map(b => (s2_ghist_ptr).value === n.U(log2Ceil(HistoryLength).W) + b.U && resp.s2.shouldShiftVec(b) && s2_redirect))
475  val s2_ghr_wdatas = (0 until HistoryLength).map(n =>
476    Mux1H(
477      (0 until numBr).map(b => (s2_ghist_ptr).value === n.U(log2Ceil(HistoryLength).W) + b.U && resp.s2.shouldShiftVec(b)),
478      resp.s2.real_br_taken_mask()
479    )
480  )
481
482  val previous_s1_pred = RegEnable(resp.s1, init=0.U.asTypeOf(resp.s1), s1_fire)
483
484  val s2_redirect_s1_last_pred = preds_needs_redirect(s1_last_pred, resp.s2)
485  val s2_redirect_s0_last_pred = preds_needs_redirect(s0_last_pred_reg, resp.s2)
486
487  s2_redirect := s2_fire && ((s1_valid && (s1_pc =/= resp.s2.target || s2_redirect_s1_last_pred)) ||
488      !s1_valid && (s0_pc_reg =/= resp.s2.target || s2_redirect_s0_last_pred))
489
490  // when(s2_redirect) { ghist_update(s2_ghist_ptr, resp.s2) }
491  npcGen.register(s2_redirect, resp.s2.target, Some("s2_target"), 4)
492  foldedGhGen.register(s2_redirect, s2_predicted_fh, Some("s2_FGH"), 4)
493  ghistPtrGen.register(s2_redirect, s2_predicted_ghist_ptr, Some("s2_GHPtr"), 4)
494  phistGen.register(s2_redirect, (s2_phist << 1) | s2_pc(instOffsetBits), Some("s2_Phist"), 4)
495  lastPredGen.register(s2_redirect, resp.s2, Some("s2_lastPred"), 4)
496  ghrBitWriteGens.zip(s2_ghr_wens).zipWithIndex.map{case ((b, w), i) =>
497    b.register(w.reduce(_||_), s2_ghr_wdatas(i), Some(s"s2_new_bit_$i"), 4)
498  }
499
500  val s2_redirect_target = s2_fire && s1_valid && s1_pc =/= resp.s2.target
501  val s2_saw_s1_hit = RegEnable(resp.s1.preds.hit, s1_fire)
502  val s2_redirect_target_both_hit = s2_redirect_target &&  s2_saw_s1_hit &&  resp.s2.preds.hit
503
504  XSPerfAccumulate("s2_redirect_because_s1_not_valid", s2_fire && !s1_valid)
505  XSPerfAccumulate("s2_redirect_because_target_diff", s2_fire && s1_valid && s1_pc =/= resp.s2.target)
506  XSPerfAccumulate("s2_redirect_target_diff_s1_nhit_s2_hit", s2_redirect_target && !s2_saw_s1_hit &&  resp.s2.preds.hit)
507  XSPerfAccumulate("s2_redirect_target_diff_s1_hit_s2_nhit", s2_redirect_target &&  s2_saw_s1_hit && !resp.s2.preds.hit)
508  XSPerfAccumulate("s2_redirect_target_diff_both_hit",  s2_redirect_target &&  s2_saw_s1_hit &&  resp.s2.preds.hit)
509  XSPerfAccumulate("s2_redirect_br_direction_diff",
510    s2_redirect_target_both_hit &&
511    RegEnable(PriorityEncoder(resp.s1.preds.br_taken_mask), s1_fire) =/= PriorityEncoder(resp.s2.preds.br_taken_mask))
512  // XSPerfAccumulate("s2_redirect_because_ghist_diff", s2_fire && s1_valid && s2_correct_s1_ghist)
513
514  // s3
515  val s3_possible_predicted_ghist_ptrs = (0 to numBr).map(s3_ghist_ptr - _.U)
516  val s3_predicted_ghist_ptr = Mux1H(resp.s3.lastBrPosOH, s3_possible_predicted_ghist_ptrs)
517  val s3_possible_predicted_fhs = (0 to numBr).map(i =>
518    s3_folded_gh.update(ghr, s3_ghist_ptr, i, if (i > 0) resp.s3.preds.br_taken_mask(i-1) else false.B))
519  val s3_predicted_fh = Mux1H(resp.s3.lastBrPosOH, s3_possible_predicted_fhs)
520  val s3_ghr_wens = (0 until HistoryLength).map(n =>
521    (0 until numBr).map(b => (s3_ghist_ptr).value === n.U(log2Ceil(HistoryLength).W) + b.U && resp.s3.shouldShiftVec(b) && s3_redirect))
522  val s3_ghr_wdatas = (0 until HistoryLength).map(n =>
523    Mux1H(
524      (0 until numBr).map(b => (s3_ghist_ptr).value === n.U(log2Ceil(HistoryLength).W) + b.U && resp.s3.shouldShiftVec(b)),
525      resp.s3.real_br_taken_mask()
526    )
527  )
528
529  val s3_redirect_s2_last_pred = preds_needs_redirect(s2_last_pred, resp.s3)
530  val s3_redirect_s1_last_pred = preds_needs_redirect(s1_last_pred, resp.s3)
531  val s3_redirect_s0_last_pred = preds_needs_redirect(s0_last_pred_reg, resp.s3)
532
533  s3_redirect := s3_fire && ((s2_valid && (s2_pc =/= resp.s3.target || s3_redirect_s2_last_pred)) ||
534      (!s2_valid && s1_valid && (s1_pc =/= resp.s3.target || s3_redirect_s1_last_pred)) ||
535      (!s2_valid && !s1_valid && (s0_pc_reg =/= resp.s3.target || s3_redirect_s0_last_pred)))
536
537  // when(s3_redirect) { ghist_update(s3_ghist_ptr, resp.s3) }
538  npcGen.register(s3_redirect, resp.s3.target, Some("s3_target"), 3)
539  foldedGhGen.register(s3_redirect, s3_predicted_fh, Some("s3_FGH"), 3)
540  ghistPtrGen.register(s3_redirect, s3_predicted_ghist_ptr, Some("s3_GHPtr"), 3)
541  phistGen.register(s3_redirect, (s3_phist << 1) | s3_pc(instOffsetBits), Some("s3_Phist"), 3)
542  lastPredGen.register(s3_redirect, resp.s3, Some("s3_lastPred"), 3)
543  ghrBitWriteGens.zip(s3_ghr_wens).zipWithIndex.map{case ((b, w), i) =>
544    b.register(w.reduce(_||_), s3_ghr_wdatas(i), Some(s"s3_new_bit_$i"), 3)
545  }
546  // Send signal tell Ftq override
547  val s2_ftq_idx = RegEnable(io.ftq_to_bpu.enq_ptr, s1_fire)
548  val s3_ftq_idx = RegEnable(s2_ftq_idx, s2_fire)
549
550  io.bpu_to_ftq.resp.bits.s1.valid := s1_fire && !s1_flush
551  io.bpu_to_ftq.resp.bits.s1.hasRedirect := false.B
552  io.bpu_to_ftq.resp.bits.s1.ftq_idx := DontCare
553  io.bpu_to_ftq.resp.bits.s2.valid := s2_fire && !s2_flush
554  io.bpu_to_ftq.resp.bits.s2.hasRedirect := s2_redirect
555  io.bpu_to_ftq.resp.bits.s2.ftq_idx := s2_ftq_idx
556  io.bpu_to_ftq.resp.bits.s3.valid := s3_fire && !s3_flush
557  io.bpu_to_ftq.resp.bits.s3.hasRedirect := s3_redirect
558  io.bpu_to_ftq.resp.bits.s3.ftq_idx := s3_ftq_idx
559
560  val redirect = io.ftq_to_bpu.redirect.bits
561
562  predictors.io.update := io.ftq_to_bpu.update
563  predictors.io.redirect := io.ftq_to_bpu.redirect
564
565  // Redirect logic
566  val shift = redirect.cfiUpdate.shift
567  val addIntoHist = redirect.cfiUpdate.addIntoHist
568  // TODO: remove these below
569  val shouldShiftVec = Mux(shift === 0.U, VecInit(0.U((1 << (log2Ceil(numBr) + 1)).W).asBools), VecInit((LowerMask(1.U << (shift-1.U))).asBools()))
570  // TODO end
571
572  val isBr = redirect.cfiUpdate.pd.isBr
573  val taken = redirect.cfiUpdate.taken
574  val real_br_taken_mask = (0 until numBr).map(i => shift === (i+1).U && taken && addIntoHist )
575
576  val oldPtr = redirect.cfiUpdate.histPtr
577  val oldFh = redirect.cfiUpdate.folded_hist
578  val updated_ptr = oldPtr - shift
579  val updated_fh = VecInit((0 to numBr).map(i => oldFh.update(ghr, oldPtr, i, taken && addIntoHist)))(shift)
580  val redirect_ghr_wens = (0 until HistoryLength).map(n =>
581    (0 until numBr).map(b => oldPtr.value === (n.U(log2Ceil(HistoryLength).W) + b.U) && shouldShiftVec(b) && io.ftq_to_bpu.redirect.valid))
582  val redirect_ghr_wdatas = (0 until HistoryLength).map(n =>
583    Mux1H(
584      (0 until numBr).map(b => oldPtr.value === (n.U(log2Ceil(HistoryLength).W) + b.U) && shouldShiftVec(b)),
585      real_br_taken_mask
586    )
587  )
588
589
590  // val updatedGh = oldGh.update(shift, taken && addIntoHist)
591  val oldPh = redirect.cfiUpdate.phist
592  val phNewBit = redirect.cfiUpdate.phNewBit
593
594  // when(io.ftq_to_bpu.redirect.valid) { ghist_update(oldPtr, shift, taken && addIntoHist) }
595  npcGen.register(io.ftq_to_bpu.redirect.valid, redirect.cfiUpdate.target, Some("redirect_target"), 2)
596  foldedGhGen.register(io.ftq_to_bpu.redirect.valid, updated_fh, Some("redirect_FGHT"), 2)
597  ghistPtrGen.register(io.ftq_to_bpu.redirect.valid, updated_ptr, Some("redirect_GHPtr"), 2)
598  phistGen.register(io.ftq_to_bpu.redirect.valid, (oldPh << 1) | phNewBit, Some("redirect_phist"), 2)
599  ghrBitWriteGens.zip(redirect_ghr_wens).zipWithIndex.map{case ((b, w), i) =>
600    b.register(w.reduce(_||_), redirect_ghr_wdatas(i), Some(s"redirect_new_bit_$i"), 2)
601  }
602  // no need to assign s0_last_pred
603
604  val need_reset = RegNext(reset.asBool) && !reset.asBool
605
606  // Reset
607  npcGen.register(need_reset, resetVector.U, Some("reset_pc"), 1)
608  foldedGhGen.register(need_reset, 0.U.asTypeOf(s0_folded_gh), Some("reset_FGH"), 1)
609  ghistPtrGen.register(need_reset, 0.U.asTypeOf(new CGHPtr), Some("reset_GHPtr"), 1)
610  phistGen.register(need_reset, 0.U, Some("reset_phist"), 1)
611  lastPredGen.register(need_reset, 0.U.asTypeOf(new BranchPredictionBundle), Some("reset_lastPred"), 1)
612
613  s0_pc         := npcGen()
614  s0_pc_reg     := s0_pc
615  s0_folded_gh  := foldedGhGen()
616  s0_ghist_ptr  := ghistPtrGen()
617  s0_phist      := phistGen()
618  s0_last_pred  := lastPredGen()
619  (ghr_write_datas zip ghrBitWriteGens).map{case (wd, d) => wd := d()}
620  for (i <- 0 until HistoryLength) {
621    ghr_wens(i) := Seq(s1_ghr_wens, s2_ghr_wens, s3_ghr_wens, redirect_ghr_wens).map(_(i).reduce(_||_)).reduce(_||_)
622    when (ghr_wens(i)) {
623      ghr(i) := ghr_write_datas(i)
624    }
625  }
626
627  XSDebug(RegNext(reset.asBool) && !reset.asBool, "Reseting...\n")
628  XSDebug(io.ftq_to_bpu.update.valid, p"Update from ftq\n")
629  XSDebug(io.ftq_to_bpu.redirect.valid, p"Redirect from ftq\n")
630
631  XSDebug("[BP0]                 fire=%d                      pc=%x\n", s0_fire, s0_pc)
632  XSDebug("[BP1] v=%d r=%d cr=%d fire=%d             flush=%d pc=%x\n",
633    s1_valid, s1_ready, s1_components_ready, s1_fire, s1_flush, s1_pc)
634  XSDebug("[BP2] v=%d r=%d cr=%d fire=%d redirect=%d flush=%d pc=%x\n",
635  s2_valid, s2_ready, s2_components_ready, s2_fire, s2_redirect, s2_flush, s2_pc)
636  XSDebug("[BP3] v=%d r=%d cr=%d fire=%d redirect=%d flush=%d pc=%x\n",
637  s3_valid, s3_ready, s3_components_ready, s3_fire, s3_redirect, s3_flush, s3_pc)
638  XSDebug("[FTQ] ready=%d\n", io.bpu_to_ftq.resp.ready)
639  XSDebug("resp.s1.target=%x\n", resp.s1.target)
640  XSDebug("resp.s2.target=%x\n", resp.s2.target)
641  // XSDebug("s0_ghist: %b\n", s0_ghist.predHist)
642  // XSDebug("s1_ghist: %b\n", s1_ghist.predHist)
643  // XSDebug("s2_ghist: %b\n", s2_ghist.predHist)
644  // XSDebug("s3_ghist: %b\n", s3_ghist.predHist)
645  // XSDebug("s2_predicted_ghist: %b\n", s2_predicted_ghist.predHist)
646  // XSDebug("s3_predicted_ghist: %b\n", s3_predicted_ghist.predHist)
647  // XSDebug("s3_correct_s2_ghist: %b, s3_correct_s1_ghist: %b, s2_correct_s1_ghist: %b\n",
648  // s3_correct_s2_ghist,  s3_correct_s1_ghist,  s2_correct_s1_ghist)
649  XSDebug(p"s0_ghist_ptr: $s0_ghist_ptr\n")
650  XSDebug(p"s1_ghist_ptr: $s1_ghist_ptr\n")
651  XSDebug(p"s2_ghist_ptr: $s2_ghist_ptr\n")
652  XSDebug(p"s3_ghist_ptr: $s3_ghist_ptr\n")
653
654  io.ftq_to_bpu.update.bits.display(io.ftq_to_bpu.update.valid)
655  io.ftq_to_bpu.redirect.bits.display(io.ftq_to_bpu.redirect.valid)
656
657
658  XSPerfAccumulate("s2_redirect", s2_redirect)
659  XSPerfAccumulate("s3_redirect", s3_redirect)
660
661  val perfEvents = predictors.asInstanceOf[Composer].perfEvents.map(_._1).zip(predictors.asInstanceOf[Composer].perfinfo.perfEvents.perf_events)
662  val perfinfo = IO(new Bundle(){
663    val perfEvents = Output(new PerfEventsBundle(predictors.asInstanceOf[Composer].perfinfo.perfEvents.perf_events.length))
664  })
665  perfinfo.perfEvents := predictors.asInstanceOf[Composer].perfinfo.perfEvents
666
667}
668