xref: /XiangShan/src/main/scala/xiangshan/frontend/IFU.scala (revision 2199a01c65d5a7bf503c4b40771336a50a6f1122)
1package xiangshan.frontend
2
3import chisel3._
4import chisel3.util._
5import device.RAMHelper
6import xiangshan._
7import utils._
8import xiangshan.cache._
9import chisel3.experimental.chiselName
10import freechips.rocketchip.tile.HasLazyRoCC
11import chisel3.ExcitingUtils._
12
13trait HasInstrMMIOConst extends HasXSParameter with HasIFUConst{
14  def mmioBusWidth = 64
15  def mmioBusBytes = mmioBusWidth /8
16  def mmioBeats = FetchWidth * 4 * 8 / mmioBusWidth
17  def mmioMask  = VecInit(List.fill(PredictWidth)(true.B)).asUInt
18  def mmioBusAligned(pc :UInt): UInt = align(pc, mmioBusBytes)
19}
20
21trait HasIFUConst extends HasXSParameter {
22  val resetVector = 0x10000000L//TODO: set reset vec
23  def align(pc: UInt, bytes: Int): UInt = Cat(pc(VAddrBits-1, log2Ceil(bytes)), 0.U(log2Ceil(bytes).W))
24  val instBytes = if (HasCExtension) 2 else 4
25  val instOffsetBits = log2Ceil(instBytes)
26  val groupBytes = 64 // correspond to cache line size
27  val groupOffsetBits = log2Ceil(groupBytes)
28  val groupWidth = groupBytes / instBytes
29  val packetBytes = PredictWidth * instBytes
30  val packetOffsetBits = log2Ceil(packetBytes)
31  def offsetInPacket(pc: UInt) = pc(packetOffsetBits-1, instOffsetBits)
32  def packetIdx(pc: UInt) = pc(VAddrBits-1, log2Ceil(packetBytes))
33  def groupAligned(pc: UInt)  = align(pc, groupBytes)
34  def packetAligned(pc: UInt) = align(pc, packetBytes)
35  def mask(pc: UInt): UInt = ((~(0.U(PredictWidth.W))) << offsetInPacket(pc))(PredictWidth-1,0)
36  def snpc(pc: UInt): UInt = packetAligned(pc) + packetBytes.U
37
38  val enableGhistRepair = true
39  val IFUDebug = true
40}
41
42class GlobalHistory extends XSBundle {
43  val predHist = UInt(HistoryLength.W)
44  def update(sawNTBr: Bool, takenOnBr: Bool, hist: UInt = predHist): GlobalHistory = {
45    val g = Wire(new GlobalHistory)
46    val shifted = takenOnBr || sawNTBr
47    g.predHist := Mux(shifted, (hist << 1) | takenOnBr.asUInt, hist)
48    g
49  }
50
51  final def === (that: GlobalHistory): Bool = {
52    predHist === that.predHist
53  }
54
55  final def =/= (that: GlobalHistory): Bool = !(this === that)
56
57  implicit val name = "IFU"
58  def debug(where: String) = XSDebug(p"[${where}_GlobalHistory] hist=${Binary(predHist)}\n")
59  // override def toString(): String = "histPtr=%d, sawNTBr=%d, takenOnBr=%d, saveHalfRVI=%d".format(histPtr, sawNTBr, takenOnBr, saveHalfRVI)
60}
61
62
63class IFUIO extends XSBundle
64{
65  // to ibuffer
66  val fetchPacket = DecoupledIO(new FetchPacket)
67  // from backend
68  val redirect = Flipped(ValidIO(UInt(VAddrBits.W)))
69  val cfiUpdateInfo = Flipped(ValidIO(new CfiUpdateInfo))
70  // to icache
71  val icacheMemGrant = Flipped(DecoupledIO(new L1plusCacheResp))
72  val fencei = Input(Bool())
73  // from icache
74  val icacheMemAcq = DecoupledIO(new L1plusCacheReq)
75  val l1plusFlush = Output(Bool())
76  val prefetchTrainReq = ValidIO(new IcacheMissReq)
77  // to tlb
78  val sfence = Input(new SfenceBundle)
79  val tlbCsr = Input(new TlbCsrBundle)
80  // from tlb
81  val ptw = new TlbPtwIO
82  // icache uncache
83  val mmio_acquire = DecoupledIO(new InsUncacheReq)
84  val mmio_grant  = Flipped(DecoupledIO(new InsUncacheResp))
85  val mmio_flush = Output(Bool())
86}
87
88class PrevHalfInstr extends XSBundle {
89  val taken = Bool()
90  val ghInfo = new GlobalHistory()
91  val fetchpc = UInt(VAddrBits.W) // only for debug
92  val idx = UInt(VAddrBits.W) // only for debug
93  val pc = UInt(VAddrBits.W)
94  val npc = UInt(VAddrBits.W)
95  val target = UInt(VAddrBits.W)
96  val instr = UInt(16.W)
97  val ipf = Bool()
98  val meta = new BpuMeta
99}
100
101@chiselName
102class IFU extends XSModule with HasIFUConst
103{
104  val io = IO(new IFUIO)
105  val bpu = BPU(EnableBPU)
106  val icache = Module(new ICache)
107
108  io.ptw <> TLB(
109    in = Seq(icache.io.tlb),
110    sfence = io.sfence,
111    csr = io.tlbCsr,
112    width = 1,
113    isDtlb = false,
114    shouldBlock = true
115  )
116
117  val if2_redirect, if3_redirect, if4_redirect = WireInit(false.B)
118  val if1_flush, if2_flush, if3_flush, if4_flush = WireInit(false.B)
119
120  val icacheResp = icache.io.resp.bits
121
122  if4_flush := io.redirect.valid
123  if3_flush := if4_flush || if4_redirect
124  if2_flush := if3_flush || if3_redirect
125  if1_flush := if2_flush || if2_redirect
126
127  //********************** IF1 ****************************//
128  val if1_valid = !reset.asBool && GTimer() > 500.U
129  val if1_npc = WireInit(0.U(VAddrBits.W))
130  val if2_ready = WireInit(false.B)
131  val if2_valid = RegInit(init = false.B)
132  val if2_allReady = WireInit(if2_ready && icache.io.req.ready)
133  val if1_fire = (if1_valid &&  if2_allReady) && (icache.io.tlb.resp.valid || !if2_valid)
134  val if1_can_go = if1_fire || if3_flush
135
136  val if1_gh, if2_gh, if3_gh, if4_gh = Wire(new GlobalHistory)
137  val if2_predicted_gh, if3_predicted_gh, if4_predicted_gh = Wire(new GlobalHistory)
138  val final_gh = RegInit(0.U.asTypeOf(new GlobalHistory))
139  val final_gh_bypass = WireInit(0.U.asTypeOf(new GlobalHistory))
140  val flush_final_gh = WireInit(false.B)
141
142  //********************** IF2 ****************************//
143  val if2_allValid = if2_valid && icache.io.tlb.resp.valid
144  val if3_ready = WireInit(false.B)
145  val if2_fire = (if2_valid && if3_ready) && icache.io.tlb.resp.valid
146  val if2_pc = RegEnable(next = if1_npc, init = resetVector.U, enable = if1_can_go)
147  val if2_snpc = snpc(if2_pc)
148  val if2_predHist = RegEnable(if1_gh.predHist, enable=if1_can_go)
149  if2_ready := if3_ready || !if2_valid
150  when (if1_can_go)       { if2_valid := true.B }
151  .elsewhen (if2_flush) { if2_valid := false.B }
152  .elsewhen (if2_fire)  { if2_valid := false.B }
153
154  val npcGen = new PriorityMuxGenerator[UInt]
155  npcGen.register(true.B, RegNext(if1_npc), Some("stallPC"))
156  val if2_bp = bpu.io.out(0)
157
158  // if taken, bp_redirect should be true
159  // when taken on half RVI, we suppress this redirect signal
160
161  npcGen.register(if2_valid, Mux(if2_bp.taken, if2_bp.target, if2_snpc), Some("if2_target"))
162
163  if2_predicted_gh := if2_gh.update(if2_bp.hasNotTakenBrs, if2_bp.takenOnBr)
164
165  //********************** IF3 ****************************//
166  // if3 should wait for instructions resp to arrive
167  val if3_valid = RegInit(init = false.B)
168  val if4_ready = WireInit(false.B)
169  val if3_allValid = if3_valid && icache.io.resp.valid
170  val if3_fire = if3_allValid && if4_ready
171  val if3_pc = RegEnable(if2_pc, if2_fire)
172  val if3_snpc = RegEnable(if2_snpc, if2_fire)
173  val if3_predHist = RegEnable(if2_predHist, enable=if2_fire)
174  if3_ready := if4_ready && icache.io.resp.valid || !if3_valid
175  when (if3_flush) {
176    if3_valid := false.B
177  }.elsewhen (if2_fire && !if2_flush) {
178    if3_valid := true.B
179  }.elsewhen (if3_fire) {
180    if3_valid := false.B
181  }
182
183  val if3_bp = bpu.io.out(1)
184  if3_predicted_gh := if3_gh.update(if3_bp.hasNotTakenBrs, if3_bp.takenOnBr)
185
186
187  val prevHalfInstrReq = WireInit(0.U.asTypeOf(ValidUndirectioned(new PrevHalfInstr)))
188  // only valid when if4_fire
189  val hasPrevHalfInstrReq = prevHalfInstrReq.valid && HasCExtension.B
190
191  val if3_prevHalfInstr = RegInit(0.U.asTypeOf(ValidUndirectioned(new PrevHalfInstr)))
192
193  // 32-bit instr crosses 2 pages, and the higher 16-bit triggers page fault
194  val crossPageIPF = WireInit(false.B)
195
196  val if3_pendingPrevHalfInstr = if3_prevHalfInstr.valid && HasCExtension.B
197
198  // the previous half of RVI instruction waits until it meets its last half
199  val if3_prevHalfInstrMet = if3_pendingPrevHalfInstr && if3_prevHalfInstr.bits.npc === if3_pc && if3_valid
200  // set to invalid once consumed or redirect from backend
201  val if3_prevHalfConsumed = if3_prevHalfInstrMet && if3_fire
202  val if3_prevHalfFlush = if4_flush
203  when (if3_prevHalfFlush) {
204    if3_prevHalfInstr.valid := false.B
205  }.elsewhen (hasPrevHalfInstrReq) {
206    if3_prevHalfInstr.valid := true.B
207  }.elsewhen (if3_prevHalfConsumed) {
208    if3_prevHalfInstr.valid := false.B
209  }
210  when (hasPrevHalfInstrReq) {
211    if3_prevHalfInstr.bits := prevHalfInstrReq.bits
212  }
213  // when bp signal a redirect, we distinguish between taken and not taken
214  // if taken and saveHalfRVI is true, we do not redirect to the target
215
216  class IF3_PC_COMP extends XSModule {
217    val io = IO(new Bundle {
218      val if2_pc = Input(UInt(VAddrBits.W))
219      val pc     = Input(UInt(VAddrBits.W))
220      val if2_valid = Input(Bool())
221      val res = Output(Bool())
222    })
223    io.res := !io.if2_valid || io.if2_valid && io.if2_pc =/= io.pc
224  }
225  def if3_nextValidPCNotEquals(pc: UInt) = {
226    val comp = Module(new IF3_PC_COMP)
227    comp.io.if2_pc := if2_pc
228    comp.io.pc     := pc
229    comp.io.if2_valid := if2_valid
230    comp.io.res
231  }
232
233  val if3_predTakenRedirectVec = VecInit((0 until PredictWidth).map(i => !if3_pendingPrevHalfInstr && if3_bp.realTakens(i) && if3_nextValidPCNotEquals(if3_bp.targets(i))))
234  val if3_prevHalfMetRedirect    = if3_pendingPrevHalfInstr && if3_prevHalfInstrMet && if3_prevHalfInstr.bits.taken && if3_nextValidPCNotEquals(if3_prevHalfInstr.bits.target)
235  val if3_prevHalfNotMetRedirect = if3_pendingPrevHalfInstr && !if3_prevHalfInstrMet && if3_nextValidPCNotEquals(if3_prevHalfInstr.bits.npc)
236  val if3_predTakenRedirect    = ParallelOR(if3_predTakenRedirectVec)
237  val if3_predNotTakenRedirect = !if3_pendingPrevHalfInstr && !if3_bp.taken && if3_nextValidPCNotEquals(if3_snpc)
238  // when pendingPrevHalfInstr, if3_GHInfo is set to the info of last prev half instr
239  // val if3_ghInfoNotIdenticalRedirect = !if3_pendingPrevHalfInstr && if3_GHInfo =/= if3_lastGHInfo && enableGhistRepair.B
240
241  if3_redirect := if3_valid && (
242                    // prevHalf is consumed but the next packet is not where it meant to be
243                    // we do not handle this condition because of the burden of building a correct GHInfo
244                    // prevHalfMetRedirect ||
245                    // prevHalf does not match if3_pc and the next fetch packet is not snpc
246                    if3_prevHalfNotMetRedirect && HasCExtension.B ||
247                    // pred taken and next fetch packet is not the predicted target
248                    if3_predTakenRedirect ||
249                    // pred not taken and next fetch packet is not snpc
250                    if3_predNotTakenRedirect
251                    // GHInfo from last pred does not corresponds with this packet
252                    // if3_ghInfoNotIdenticalRedirect
253                  )
254
255  val if3_target = WireInit(if3_snpc)
256
257  if3_target := Mux1H(Seq((if3_prevHalfNotMetRedirect -> if3_prevHalfInstr.bits.npc),
258                          (if3_predTakenRedirect      -> if3_bp.target),
259                          (if3_predNotTakenRedirect   -> if3_snpc)))
260
261  npcGen.register(if3_redirect, if3_target, Some("if3_target"))
262
263
264  //********************** IF4 ****************************//
265  val if4_pd = RegEnable(icache.io.pd_out, if3_fire)
266  val if4_ipf = RegEnable(icacheResp.ipf || if3_prevHalfInstrMet && if3_prevHalfInstr.bits.ipf, if3_fire)
267  val if4_acf = RegEnable(icacheResp.acf, if3_fire)
268  val if4_crossPageIPF = RegEnable(crossPageIPF, if3_fire)
269  val if4_valid = RegInit(false.B)
270  val if4_fire = if4_valid && io.fetchPacket.ready
271  val if4_pc = RegEnable(if3_pc, if3_fire)
272  val if4_snpc = RegEnable(if3_snpc, if3_fire)
273  // This is the real mask given from icache
274  val if4_mask = RegEnable(icacheResp.mask, if3_fire)
275
276
277  val if4_predHist = RegEnable(if3_predHist, enable=if3_fire)
278  // wait until prevHalfInstr written into reg
279  if4_ready := (io.fetchPacket.ready && !hasPrevHalfInstrReq || !if4_valid) && GTimer() > 500.U
280  when (if4_flush) {
281    if4_valid := false.B
282  }.elsewhen (if3_fire && !if3_flush) {
283    if4_valid := Mux(if3_pendingPrevHalfInstr, if3_prevHalfInstrMet, true.B)
284  }.elsewhen (if4_fire) {
285    if4_valid := false.B
286  }
287
288  val if4_bp = Wire(new BranchPrediction)
289  if4_bp := bpu.io.out(2)
290
291  if4_predicted_gh := if4_gh.update(if4_bp.hasNotTakenBrs, if4_bp.takenOnBr)
292
293  def jal_offset(inst: UInt, rvc: Bool): SInt = {
294    Mux(rvc,
295      Cat(inst(12), inst(8), inst(10, 9), inst(6), inst(7), inst(2), inst(11), inst(5, 3), 0.U(1.W)).asSInt(),
296      Cat(inst(31), inst(19, 12), inst(20), inst(30, 21), 0.U(1.W)).asSInt()
297    )
298  }
299  val if4_instrs = if4_pd.instrs
300  val if4_jals = if4_bp.jalMask
301  val if4_jal_tgts = VecInit((0 until PredictWidth).map(i => (if4_pd.pc(i).asSInt + jal_offset(if4_instrs(i), if4_pd.pd(i).isRVC)).asUInt))
302
303  (0 until PredictWidth).foreach {i =>
304    when (if4_jals(i)) {
305      if4_bp.targets(i) := if4_jal_tgts(i)
306    }
307  }
308
309  // we need this to tell BPU the prediction of prev half
310  // because the prediction is with the start of each inst
311  val if4_prevHalfInstr = RegInit(0.U.asTypeOf(ValidUndirectioned(new PrevHalfInstr)))
312  val if4_pendingPrevHalfInstr = if4_prevHalfInstr.valid && HasCExtension.B
313  val if4_prevHalfInstrMet = if4_pendingPrevHalfInstr && if4_valid
314  val if4_prevHalfConsumed = if4_prevHalfInstrMet && if4_fire
315  val if4_prevHalfFlush = if4_flush
316
317  val if4_takenPrevHalf = WireInit(if4_prevHalfInstrMet && if4_prevHalfInstr.bits.taken)
318  when (if4_prevHalfFlush) {
319    if4_prevHalfInstr.valid := false.B
320  }.elsewhen (if3_prevHalfConsumed) {
321    if4_prevHalfInstr.valid := if3_prevHalfInstr.valid
322  }.elsewhen (if4_prevHalfConsumed) {
323    if4_prevHalfInstr.valid := false.B
324  }
325
326  when (if3_prevHalfConsumed) {
327    if4_prevHalfInstr.bits := if3_prevHalfInstr.bits
328  }
329
330  prevHalfInstrReq.valid := if4_fire && if4_bp.saveHalfRVI && HasCExtension.B
331  val idx = if4_bp.lastHalfRVIIdx
332
333  // // this is result of the last half RVI
334  prevHalfInstrReq.bits.taken := if4_bp.lastHalfRVITaken
335  prevHalfInstrReq.bits.ghInfo := if4_gh
336  prevHalfInstrReq.bits.fetchpc := if4_pc
337  prevHalfInstrReq.bits.idx := idx
338  prevHalfInstrReq.bits.pc := if4_pd.pc(idx)
339  prevHalfInstrReq.bits.npc := if4_pd.pc(idx) + 2.U
340  prevHalfInstrReq.bits.target := if4_bp.lastHalfRVITarget
341  prevHalfInstrReq.bits.instr := if4_pd.instrs(idx)(15, 0)
342  prevHalfInstrReq.bits.ipf := if4_ipf
343  prevHalfInstrReq.bits.meta := bpu.io.bpuMeta(idx)
344
345  class IF4_PC_COMP extends XSModule {
346    val io = IO(new Bundle {
347      val if2_pc = Input(UInt(VAddrBits.W))
348      val if3_pc = Input(UInt(VAddrBits.W))
349      val pc     = Input(UInt(VAddrBits.W))
350      val if2_valid = Input(Bool())
351      val if3_valid = Input(Bool())
352      val res = Output(Bool())
353    })
354    io.res := io.if3_valid  && io.if3_pc =/= io.pc ||
355              !io.if3_valid && (io.if2_valid && io.if2_pc =/= io.pc) ||
356              !io.if3_valid && !io.if2_valid
357  }
358  def if4_nextValidPCNotEquals(pc: UInt) = {
359    val comp = Module(new IF4_PC_COMP)
360    comp.io.if2_pc := if2_pc
361    comp.io.if3_pc := if3_pc
362    comp.io.pc     := pc
363    comp.io.if2_valid := if2_valid
364    comp.io.if3_valid := if3_valid
365    comp.io.res
366  }
367
368  val if4_predTakenRedirectVec = VecInit((0 until PredictWidth).map(i => if4_bp.realTakens(i) && if4_nextValidPCNotEquals(if4_bp.targets(i))))
369
370  val if4_prevHalfNextNotMet = hasPrevHalfInstrReq && if4_nextValidPCNotEquals(prevHalfInstrReq.bits.pc+2.U)
371  val if4_predTakenRedirect = ParallelORR(if4_predTakenRedirectVec)
372  val if4_predNotTakenRedirect = !if4_bp.taken && if4_nextValidPCNotEquals(if4_snpc)
373  // val if4_ghInfoNotIdenticalRedirect = if4_GHInfo =/= if4_lastGHInfo && enableGhistRepair.B
374
375  if4_redirect := if4_valid && (
376                    // when if4 has a lastHalfRVI, but the next fetch packet is not snpc
377                    // if4_prevHalfNextNotMet ||
378                    // when if4 preds taken, but the pc of next fetch packet is not the target
379                    if4_predTakenRedirect ||
380                    // when if4 preds not taken, but the pc of next fetch packet is not snpc
381                    if4_predNotTakenRedirect
382                    // GHInfo from last pred does not corresponds with this packet
383                    // if4_ghInfoNotIdenticalRedirect
384                  )
385
386  val if4_target = WireInit(if4_snpc)
387
388  if4_target := Mux(if4_bp.taken, if4_bp.target, if4_snpc)
389
390  npcGen.register(if4_redirect, if4_target, Some("if4_target"))
391
392  when (if4_fire) {
393    final_gh := if4_predicted_gh
394  }
395  if4_gh := Mux(flush_final_gh, final_gh_bypass, final_gh)
396  if3_gh := Mux(if4_valid && !if4_flush, if4_predicted_gh, if4_gh)
397  if2_gh := Mux(if3_valid && !if3_flush, if3_predicted_gh, if3_gh)
398  if1_gh := Mux(if2_valid && !if2_flush, if2_predicted_gh, if2_gh)
399
400
401
402
403  val cfiUpdate = io.cfiUpdateInfo
404  when (cfiUpdate.valid && (cfiUpdate.bits.isMisPred || cfiUpdate.bits.isReplay)) {
405    val b = cfiUpdate.bits
406    val oldGh = b.bpuMeta.hist
407    val sawNTBr = b.bpuMeta.sawNotTakenBranch
408    val isBr = b.pd.isBr
409    val taken = Mux(cfiUpdate.bits.isReplay, b.bpuMeta.predTaken, b.taken)
410    val updatedGh = oldGh.update(sawNTBr, isBr && taken)
411    final_gh := updatedGh
412    final_gh_bypass := updatedGh
413    flush_final_gh := true.B
414  }
415
416  npcGen.register(io.redirect.valid, io.redirect.bits, Some("backend_redirect"))
417  npcGen.register(RegNext(reset.asBool) && !reset.asBool, resetVector.U(VAddrBits.W), Some("reset_vector"))
418
419  if1_npc := npcGen()
420
421
422  icache.io.req.valid := if1_can_go
423  icache.io.resp.ready := if4_ready
424  icache.io.req.bits.addr := if1_npc
425  icache.io.req.bits.mask := mask(if1_npc)
426  icache.io.flush := Cat(if3_flush, if2_flush)
427  icache.io.mem_grant <> io.icacheMemGrant
428  icache.io.fencei := io.fencei
429  icache.io.prev.valid := if3_prevHalfInstrMet
430  icache.io.prev.bits := if3_prevHalfInstr.bits.instr
431  icache.io.prev_ipf := if3_prevHalfInstr.bits.ipf
432  icache.io.prev_pc := if3_prevHalfInstr.bits.pc
433  icache.io.mmio_acquire <> io.mmio_acquire
434  icache.io.mmio_grant <> io.mmio_grant
435  icache.io.mmio_flush <> io.mmio_flush
436  io.icacheMemAcq <> icache.io.mem_acquire
437  io.l1plusFlush := icache.io.l1plusflush
438  io.prefetchTrainReq := icache.io.prefetchTrainReq
439
440  bpu.io.cfiUpdateInfo <> io.cfiUpdateInfo
441
442  bpu.io.inFire(0) := if1_can_go
443  bpu.io.inFire(1) := if2_fire
444  bpu.io.inFire(2) := if3_fire
445  bpu.io.inFire(3) := if4_fire
446  bpu.io.in.pc := if1_npc
447  bpu.io.in.hist := if1_gh.asUInt
448  bpu.io.in.inMask := mask(if1_npc)
449  bpu.io.predecode.mask := if4_pd.mask
450  bpu.io.predecode.lastHalf := if4_pd.lastHalf
451  bpu.io.predecode.pd := if4_pd.pd
452  bpu.io.predecode.hasLastHalfRVI := if4_prevHalfInstrMet
453  bpu.io.realMask := if4_mask
454  bpu.io.prevHalf := if4_prevHalfInstr
455
456
457  when (if3_prevHalfInstrMet && icacheResp.ipf && !if3_prevHalfInstr.bits.ipf) {
458    crossPageIPF := true.B // higher 16 bits page fault
459  }
460
461  //RVC expand
462  val expandedInstrs = Wire(Vec(PredictWidth, UInt(32.W)))
463  for(i <- 0 until PredictWidth){
464      val expander = Module(new RVCExpander)
465      expander.io.in := if4_pd.instrs(i)
466      expandedInstrs(i) := expander.io.out.bits
467  }
468
469  val fetchPacketValid = if4_valid && !io.redirect.valid
470  val fetchPacketWire = Wire(new FetchPacket)
471
472  fetchPacketWire.instrs := expandedInstrs
473  fetchPacketWire.mask := if4_pd.mask & (Fill(PredictWidth, !if4_bp.taken) | (Fill(PredictWidth, 1.U(1.W)) >> (~if4_bp.jmpIdx)))
474  fetchPacketWire.pdmask := if4_pd.mask
475
476  fetchPacketWire.pc := if4_pd.pc
477  (0 until PredictWidth).foreach(i => fetchPacketWire.pnpc(i) := if4_pd.pc(i) + Mux(if4_pd.pd(i).isRVC, 2.U, 4.U))
478  when (if4_bp.taken) {
479    fetchPacketWire.pnpc(if4_bp.jmpIdx) := if4_bp.target
480  }
481  fetchPacketWire.bpuMeta := bpu.io.bpuMeta
482  // save it for update
483  when (if4_pendingPrevHalfInstr) {
484    fetchPacketWire.bpuMeta(0) := if4_prevHalfInstr.bits.meta
485  }
486  (0 until PredictWidth).foreach(i => {
487    val meta = fetchPacketWire.bpuMeta(i)
488    meta.hist := final_gh
489    meta.predHist := if4_predHist.asTypeOf(new GlobalHistory)
490    meta.predTaken := if4_bp.takens(i)
491  })
492  fetchPacketWire.pd := if4_pd.pd
493  fetchPacketWire.ipf := if4_ipf
494  fetchPacketWire.acf := if4_acf
495  fetchPacketWire.crossPageIPFFix := if4_crossPageIPF
496
497  // predTaken Vec
498  fetchPacketWire.predTaken := if4_bp.taken
499
500  io.fetchPacket.bits := fetchPacketWire
501  io.fetchPacket.valid := fetchPacketValid
502
503//  if(IFUDebug) {
504    val predictor_s3 = RegEnable(Mux(if3_redirect, 1.U(log2Up(4).W), 0.U(log2Up(4).W)), if3_fire)
505    val predictor_s4 = Mux(if4_redirect, 2.U, predictor_s3)
506    val predictor = predictor_s4
507
508    fetchPacketWire.bpuMeta.map(_.predictor := predictor)
509 // }
510
511  // val predRight = cfiUpdate.valid && !cfiUpdate.bits.isMisPred && !cfiUpdate.bits.isReplay
512  // val predWrong = cfiUpdate.valid && cfiUpdate.bits.isMisPred && !cfiUpdate.bits.isReplay
513
514  // val ubtbRight = predRight && cfiUpdate.bits.bpuMeta.predictor === 0.U
515  // val ubtbWrong = predWrong && cfiUpdate.bits.bpuMeta.predictor === 0.U
516  // val btbRight  = predRight && cfiUpdate.bits.bpuMeta.predictor === 1.U
517  // val btbWrong  = predWrong && cfiUpdate.bits.bpuMeta.predictor === 1.U
518  // val tageRight = predRight && cfiUpdate.bits.bpuMeta.predictor === 2.U
519  // val tageWrong = predWrong && cfiUpdate.bits.bpuMeta.predictor === 2.U
520  // val loopRight = predRight && cfiUpdate.bits.bpuMeta.predictor === 3.U
521  // val loopWrong = predWrong && cfiUpdate.bits.bpuMeta.predictor === 3.U
522
523  // ExcitingUtils.addSource(ubtbRight, "perfCntubtbRight", Perf)
524  // ExcitingUtils.addSource(ubtbWrong, "perfCntubtbWrong", Perf)
525  // ExcitingUtils.addSource(btbRight, "perfCntbtbRight", Perf)
526  // ExcitingUtils.addSource(btbWrong, "perfCntbtbWrong", Perf)
527  // ExcitingUtils.addSource(tageRight, "perfCnttageRight", Perf)
528  // ExcitingUtils.addSource(tageWrong, "perfCnttageWrong", Perf)
529  // ExcitingUtils.addSource(loopRight, "perfCntloopRight", Perf)
530  // ExcitingUtils.addSource(loopWrong, "perfCntloopWrong", Perf)
531
532  // debug info
533  if (IFUDebug) {
534    XSDebug(RegNext(reset.asBool) && !reset.asBool, "Reseting...\n")
535    XSDebug(icache.io.flush(0).asBool, "Flush icache stage2...\n")
536    XSDebug(icache.io.flush(1).asBool, "Flush icache stage3...\n")
537    XSDebug(io.redirect.valid, p"Redirect from backend! target=${Hexadecimal(io.redirect.bits)}\n")
538
539    XSDebug("[IF1] v=%d     fire=%d  cango=%d          flush=%d pc=%x mask=%b\n", if1_valid, if1_fire,if1_can_go, if1_flush, if1_npc, mask(if1_npc))
540    XSDebug("[IF2] v=%d r=%d fire=%d redirect=%d flush=%d pc=%x snpc=%x\n", if2_valid, if2_ready, if2_fire, if2_redirect, if2_flush, if2_pc, if2_snpc)
541    XSDebug("[IF3] v=%d r=%d fire=%d redirect=%d flush=%d pc=%x crossPageIPF=%d sawNTBrs=%d\n", if3_valid, if3_ready, if3_fire, if3_redirect, if3_flush, if3_pc, crossPageIPF, if3_bp.hasNotTakenBrs)
542    XSDebug("[IF4] v=%d r=%d fire=%d redirect=%d flush=%d pc=%x crossPageIPF=%d sawNTBrs=%d\n", if4_valid, if4_ready, if4_fire, if4_redirect, if4_flush, if4_pc, if4_crossPageIPF, if4_bp.hasNotTakenBrs)
543    XSDebug("[predictor] predictor_s3=%d, predictor_s4=%d, predictor=%d\n", predictor_s3, predictor_s4, predictor)
544    XSDebug("[IF1][icacheReq] v=%d r=%d addr=%x\n", icache.io.req.valid, icache.io.req.ready, icache.io.req.bits.addr)
545    XSDebug("[IF1][ghr] hist=%b\n", if1_gh.asUInt)
546    XSDebug("[IF1][ghr] extHist=%b\n\n", if1_gh.asUInt)
547
548    XSDebug("[IF2][bp] taken=%d jmpIdx=%d hasNTBrs=%d target=%x saveHalfRVI=%d\n\n", if2_bp.taken, if2_bp.jmpIdx, if2_bp.hasNotTakenBrs, if2_bp.target, if2_bp.saveHalfRVI)
549    if2_gh.debug("if2")
550
551    XSDebug("[IF3][icacheResp] v=%d r=%d pc=%x mask=%b\n", icache.io.resp.valid, icache.io.resp.ready, icache.io.resp.bits.pc, icache.io.resp.bits.mask)
552    XSDebug("[IF3][bp] taken=%d jmpIdx=%d hasNTBrs=%d target=%x saveHalfRVI=%d\n", if3_bp.taken, if3_bp.jmpIdx, if3_bp.hasNotTakenBrs, if3_bp.target, if3_bp.saveHalfRVI)
553    XSDebug("[IF3][redirect]: v=%d, prevMet=%d, prevNMet=%d, predT=%d, predNT=%d\n", if3_redirect, if3_prevHalfMetRedirect, if3_prevHalfNotMetRedirect, if3_predTakenRedirect, if3_predNotTakenRedirect)
554    // XSDebug("[IF3][prevHalfInstr] v=%d redirect=%d fetchpc=%x idx=%d tgt=%x taken=%d instr=%x\n\n",
555    //   prev_half_valid, prev_half_redirect, prev_half_fetchpc, prev_half_idx, prev_half_tgt, prev_half_taken, prev_half_instr)
556    XSDebug("[IF3][if3_prevHalfInstr] v=%d taken=%d fetchpc=%x idx=%d pc=%x npc=%x tgt=%x instr=%x ipf=%d\n\n",
557    if3_prevHalfInstr.valid, if3_prevHalfInstr.bits.taken, if3_prevHalfInstr.bits.fetchpc, if3_prevHalfInstr.bits.idx, if3_prevHalfInstr.bits.pc, if3_prevHalfInstr.bits.npc, if3_prevHalfInstr.bits.target, if3_prevHalfInstr.bits.instr, if3_prevHalfInstr.bits.ipf)
558    if3_gh.debug("if3")
559
560    XSDebug("[IF4][predecode] mask=%b\n", if4_pd.mask)
561    XSDebug("[IF4][snpc]: %x, realMask=%b\n", if4_snpc, if4_mask)
562    XSDebug("[IF4][bp] taken=%d jmpIdx=%d hasNTBrs=%d target=%x saveHalfRVI=%d\n", if4_bp.taken, if4_bp.jmpIdx, if4_bp.hasNotTakenBrs, if4_bp.target, if4_bp.saveHalfRVI)
563    XSDebug("[IF4][redirect]: v=%d, prevNotMet=%d, predT=%d, predNT=%d\n", if4_redirect, if4_prevHalfNextNotMet, if4_predTakenRedirect, if4_predNotTakenRedirect)
564    XSDebug(if4_pd.pd(if4_bp.jmpIdx).isJal && if4_bp.taken, "[IF4] cfi is jal!  instr=%x target=%x\n", if4_instrs(if4_bp.jmpIdx), if4_jal_tgts(if4_bp.jmpIdx))
565    XSDebug("[IF4][ prevHalfInstrReq] v=%d taken=%d fetchpc=%x idx=%d pc=%x npc=%x tgt=%x instr=%x ipf=%d\n",
566      prevHalfInstrReq.valid, prevHalfInstrReq.bits.taken, prevHalfInstrReq.bits.fetchpc, prevHalfInstrReq.bits.idx, prevHalfInstrReq.bits.pc, prevHalfInstrReq.bits.npc, prevHalfInstrReq.bits.target, prevHalfInstrReq.bits.instr, prevHalfInstrReq.bits.ipf)
567    XSDebug("[IF4][if4_prevHalfInstr] v=%d taken=%d fetchpc=%x idx=%d pc=%x npc=%x tgt=%x instr=%x ipf=%d\n",
568      if4_prevHalfInstr.valid, if4_prevHalfInstr.bits.taken, if4_prevHalfInstr.bits.fetchpc, if4_prevHalfInstr.bits.idx, if4_prevHalfInstr.bits.pc, if4_prevHalfInstr.bits.npc, if4_prevHalfInstr.bits.target, if4_prevHalfInstr.bits.instr, if4_prevHalfInstr.bits.ipf)
569    if4_gh.debug("if4")
570    XSDebug(io.fetchPacket.fire(), "[IF4][fetchPacket] v=%d r=%d mask=%b ipf=%d acf=%d crossPageIPF=%d\n",
571      io.fetchPacket.valid, io.fetchPacket.ready, io.fetchPacket.bits.mask, io.fetchPacket.bits.ipf, io.fetchPacket.bits.acf, io.fetchPacket.bits.crossPageIPFFix)
572    for (i <- 0 until PredictWidth) {
573      XSDebug(io.fetchPacket.fire(), "[IF4][fetchPacket] %b %x pc=%x pnpc=%x pd: rvc=%d brType=%b call=%d ret=%d\n",
574        io.fetchPacket.bits.mask(i),
575        io.fetchPacket.bits.instrs(i),
576        io.fetchPacket.bits.pc(i),
577        io.fetchPacket.bits.pnpc(i),
578        io.fetchPacket.bits.pd(i).isRVC,
579        io.fetchPacket.bits.pd(i).brType,
580        io.fetchPacket.bits.pd(i).isCall,
581        io.fetchPacket.bits.pd(i).isRet
582      )
583    }
584  }
585}
586