xref: /XiangShan/src/main/scala/xiangshan/frontend/IFU.scala (revision dcbc69cb2a7ea07707ede3d8f7c74421ef450202)
1/***************************************************************************************
2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences
3* Copyright (c) 2020-2021 Peng Cheng Laboratory
4*
5* XiangShan is licensed under Mulan PSL v2.
6* You can use this software according to the terms and conditions of the Mulan PSL v2.
7* You may obtain a copy of Mulan PSL v2 at:
8*          http://license.coscl.org.cn/MulanPSL2
9*
10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
13*
14* See the Mulan PSL v2 for more details.
15***************************************************************************************/
16
17package xiangshan.frontend
18
19import chipsalliance.rocketchip.config.Parameters
20import chisel3._
21import chisel3.util._
22import xiangshan._
23import xiangshan.cache.mmu._
24import xiangshan.frontend.icache._
25import utils._
26import xiangshan.backend.fu.{PMPReqBundle, PMPRespBundle}
27
28trait HasInstrMMIOConst extends HasXSParameter with HasIFUConst{
29  def mmioBusWidth = 64
30  def mmioBusBytes = mmioBusWidth / 8
31  def maxInstrLen = 32
32}
33
34trait HasIFUConst extends HasXSParameter {
35  def addrAlign(addr: UInt, bytes: Int, highest: Int): UInt = Cat(addr(highest-1, log2Ceil(bytes)), 0.U(log2Ceil(bytes).W))
36  def fetchQueueSize = 2
37}
38
39class IfuPtr(implicit p: Parameters) extends CircularQueuePtr[IfuPtr](entries = 2){
40  override def cloneType = (new IfuPtr).asInstanceOf[this.type]
41}
42
43object IfuPtr {
44  def apply(f: Bool, v: UInt)(implicit p: Parameters): IfuPtr = {
45    val ptr = Wire(new IfuPtr)
46    ptr.flag := f
47    ptr.value := v
48    ptr
49  }
50  def inverse(ptr: IfuPtr)(implicit p: Parameters): IfuPtr = {
51    apply(!ptr.flag, ptr.value)
52  }
53}
54
55class IfuToFtqIO(implicit p:Parameters) extends XSBundle {
56  val pdWb = Valid(new PredecodeWritebackBundle)
57}
58
59class FtqInterface(implicit p: Parameters) extends XSBundle {
60  val fromFtq = Flipped(new FtqToIfuIO)
61  val toFtq   = new IfuToFtqIO
62}
63
64class UncacheInterface(implicit p: Parameters) extends XSBundle {
65  val fromUncache = Flipped(DecoupledIO(new InsUncacheResp))
66  val toUncache   = DecoupledIO( new InsUncacheReq )
67}
68class NewIFUIO(implicit p: Parameters) extends XSBundle {
69  val ftqInter        = new FtqInterface
70  val icacheInter     = Vec(2, Flipped(new ICacheMainPipeBundle))
71  val icacheStop      = Output(Bool())
72  val icachePerfInfo  = Input(new ICachePerfInfo)
73  val toIbuffer       = Decoupled(new FetchToIBuffer)
74  val uncacheInter   =  new UncacheInterface
75  val frontendTrigger = Flipped(new FrontendTdataDistributeIO)
76  val csrTriggerEnable = Input(Vec(4, Bool()))
77  val rob_commits = Flipped(Vec(CommitWidth, Valid(new RobCommitInfo)))
78}
79
80// record the situation in which fallThruAddr falls into
81// the middle of an RVI inst
82class LastHalfInfo(implicit p: Parameters) extends XSBundle {
83  val valid = Bool()
84  val middlePC = UInt(VAddrBits.W)
85  def matchThisBlock(startAddr: UInt) = valid && middlePC === startAddr
86}
87
88class IfuToPreDecode(implicit p: Parameters) extends XSBundle {
89  val data          = if(HasCExtension) Vec(PredictWidth + 1, UInt(16.W)) else Vec(PredictWidth, UInt(32.W))
90  val startAddr     = UInt(VAddrBits.W)
91  val fallThruAddr  = UInt(VAddrBits.W)
92  val fallThruError = Bool()
93  val isDoubleLine  = Bool()
94  val ftqOffset     = Valid(UInt(log2Ceil(PredictWidth).W))
95  val target        = UInt(VAddrBits.W)
96  val pageFault     = Vec(2, Bool())
97  val accessFault   = Vec(2, Bool())
98  val instValid     = Bool()
99  val lastHalfMatch = Bool()
100  val oversize      = Bool()
101  val mmio = Bool()
102  val frontendTrigger = new FrontendTdataDistributeIO
103  val csrTriggerEnable = Vec(4, Bool())
104}
105
106class NewIFU(implicit p: Parameters) extends XSModule with HasICacheParameters with HasIFUConst
107with HasCircularQueuePtrHelper
108{
109  println(s"icache ways: ${nWays} sets:${nSets}")
110  val io = IO(new NewIFUIO)
111  val (toFtq, fromFtq)    = (io.ftqInter.toFtq, io.ftqInter.fromFtq)
112  val (toICache, fromICache) = (VecInit(io.icacheInter.map(_.req)), VecInit(io.icacheInter.map(_.resp)))
113  val (toUncache, fromUncache) = (io.uncacheInter.toUncache , io.uncacheInter.fromUncache)
114
115  def isCrossLineReq(start: UInt, end: UInt): Bool = start(blockOffBits) ^ end(blockOffBits)
116
117  def isLastInCacheline(fallThruAddr: UInt): Bool = fallThruAddr(blockOffBits - 1, 1) === 0.U
118
119  class TlbExept(implicit p: Parameters) extends XSBundle{
120    val pageFault = Bool()
121    val accessFault = Bool()
122    val mmio = Bool()
123  }
124
125
126  //---------------------------------------------
127  //  Fetch Stage 1 :
128  //  * Send req to ICache Meta/Data
129  //  * Check whether need 2 line fetch
130  //---------------------------------------------
131
132  val f0_valid                             = fromFtq.req.valid
133  val f0_ftq_req                           = fromFtq.req.bits
134  val f0_situation                         = VecInit(Seq(isCrossLineReq(f0_ftq_req.startAddr, f0_ftq_req.fallThruAddr), isLastInCacheline(f0_ftq_req.fallThruAddr)))
135  val f0_doubleLine                        = f0_situation(0) || f0_situation(1)
136  val f0_vSetIdx                           = VecInit(get_idx((f0_ftq_req.startAddr)), get_idx(f0_ftq_req.fallThruAddr))
137  val f0_fire                              = fromFtq.req.fire()
138
139  val f0_flush, f1_flush, f2_flush, f3_flush = WireInit(false.B)
140  val from_bpu_f0_flush, from_bpu_f1_flush, from_bpu_f2_flush, from_bpu_f3_flush = WireInit(false.B)
141
142  from_bpu_f0_flush := fromFtq.flushFromBpu.shouldFlushByStage2(f0_ftq_req.ftqIdx) ||
143                       fromFtq.flushFromBpu.shouldFlushByStage3(f0_ftq_req.ftqIdx)
144
145  val f3_redirect = WireInit(false.B)
146  f3_flush := fromFtq.redirect.valid
147  f2_flush := f3_flush || f3_redirect
148  f1_flush := f2_flush || from_bpu_f1_flush
149  f0_flush := f1_flush || from_bpu_f0_flush
150
151  val f1_ready, f2_ready, f3_ready         = WireInit(false.B)
152
153  fromFtq.req.ready := toICache(0).ready && toICache(1).ready && f2_ready && GTimer() > 500.U
154
155  toICache(0).valid       := fromFtq.req.fire() && !f0_flush
156  toICache(0).bits.vaddr  := fromFtq.req.bits.startAddr
157  toICache(1).valid       := fromFtq.req.fire() && f0_doubleLine && !f0_flush
158  toICache(1).bits.vaddr  := fromFtq.req.bits.fallThruAddr
159
160
161  //---------------------------------------------
162  //  Fetch Stage 1 :
163  //  * Send req to ITLB and TLB Response (Get Paddr)
164  //  * ICache Response (Get Meta and Data)
165  //  * Hit Check (Generate hit signal and hit vector)
166  //  * Get victim way
167  //---------------------------------------------
168
169  val f1_valid      = RegInit(false.B)
170  val f1_ftq_req    = RegEnable(next = f0_ftq_req,    enable=f0_fire)
171  val f1_situation  = RegEnable(next = f0_situation,  enable=f0_fire)
172  val f1_doubleLine = RegEnable(next = f0_doubleLine, enable=f0_fire)
173  val f1_vSetIdx    = RegEnable(next = f0_vSetIdx,    enable=f0_fire)
174  val f1_fire       = f1_valid && f1_ready
175
176  f1_ready := f2_ready || !f1_valid
177
178  from_bpu_f1_flush := fromFtq.flushFromBpu.shouldFlushByStage3(f1_ftq_req.ftqIdx)
179
180  when(f1_flush)                  {f1_valid  := false.B}
181  .elsewhen(f0_fire && !f0_flush) {f1_valid  := true.B}
182  .elsewhen(f1_fire)              {f1_valid  := false.B}
183  //---------------------------------------------
184  //  Fetch Stage 2 :
185  //  * Send req to ITLB and TLB Response (Get Paddr)
186  //  * ICache Response (Get Meta and Data)
187  //  * Hit Check (Generate hit signal and hit vector)
188  //  * Get victim way
189  //---------------------------------------------
190  val icacheRespAllValid = WireInit(false.B)
191
192  val f2_valid      = RegInit(false.B)
193  val f2_ftq_req    = RegEnable(next = f1_ftq_req,    enable=f1_fire)
194  val f2_situation  = RegEnable(next = f1_situation,  enable=f1_fire)
195  val f2_doubleLine = RegEnable(next = f1_doubleLine, enable=f1_fire)
196  val f2_vSetIdx    = RegEnable(next = f1_vSetIdx,    enable=f1_fire)
197  val f2_fire       = f2_valid && f2_ready
198
199  def ResultHoldBypass[T<:Data](data: T, valid: Bool): T = {
200    Mux(valid, data, RegEnable(data, valid))
201  }
202
203  f2_ready := f3_ready && icacheRespAllValid || !f2_valid
204  //TODO: addr compare may be timing critical
205  val f2_icache_all_resp_wire       =  fromICache(0).valid && (fromICache(0).bits.vaddr ===  f2_ftq_req.startAddr) && ((fromICache(1).valid && (fromICache(1).bits.vaddr ===  f2_ftq_req.fallThruAddr)) || !f2_doubleLine)
206  val f2_icache_all_resp_reg        = RegInit(false.B)
207
208  icacheRespAllValid := f2_icache_all_resp_reg || f2_icache_all_resp_wire
209
210  io.icacheStop := !f3_ready
211
212  when(f2_flush)                                              {f2_icache_all_resp_reg := false.B}
213  .elsewhen(f2_valid && f2_icache_all_resp_wire && !f3_ready) {f2_icache_all_resp_reg := true.B}
214  .elsewhen(f2_fire && f2_icache_all_resp_reg)                {f2_icache_all_resp_reg := false.B}
215
216  when(f2_flush)                  {f2_valid := false.B}
217  .elsewhen(f1_fire && !f1_flush) {f2_valid := true.B }
218  .elsewhen(f2_fire)              {f2_valid := false.B}
219
220  val f2_cache_response_data = ResultHoldBypass(valid = f2_icache_all_resp_wire, data = VecInit(fromICache.map(_.bits.readData)))
221
222  val f2_datas        = VecInit((0 until PortNumber).map(i => f2_cache_response_data(i)))
223  val f2_except_pf    = VecInit((0 until PortNumber).map(i => fromICache(i).bits.tlbExcp.pageFault))
224  val f2_except_af    = VecInit((0 until PortNumber).map(i => fromICache(i).bits.tlbExcp.accessFault))
225  val f2_mmio         = fromICache(0).bits.tlbExcp.mmio && !fromICache(0).bits.tlbExcp.accessFault
226
227  val f2_paddrs       = VecInit((0 until PortNumber).map(i => fromICache(i).bits.paddr))
228  val f2_perf_info    = io.icachePerfInfo
229
230  def cut(cacheline: UInt, start: UInt) : Vec[UInt] ={
231    if(HasCExtension){
232      val result   = Wire(Vec(PredictWidth + 1, UInt(16.W)))
233      val dataVec  = cacheline.asTypeOf(Vec(blockBytes * 2/ 2, UInt(16.W)))
234      val startPtr = Cat(0.U(1.W), start(blockOffBits-1, 1))
235      (0 until PredictWidth + 1).foreach( i =>
236        result(i) := dataVec(startPtr + i.U)
237      )
238      result
239    } else {
240      val result   = Wire(Vec(PredictWidth, UInt(32.W)) )
241      val dataVec  = cacheline.asTypeOf(Vec(blockBytes * 2/ 4, UInt(32.W)))
242      val startPtr = Cat(0.U(1.W), start(blockOffBits-1, 2))
243      (0 until PredictWidth).foreach( i =>
244        result(i) := dataVec(startPtr + i.U)
245      )
246      result
247    }
248  }
249
250  val preDecoder      = Module(new PreDecode)
251  val (preDecoderIn, preDecoderOut)   = (preDecoder.io.in, preDecoder.io.out)
252  val predecodeOutValid = WireInit(false.B)
253
254  val f2_cut_data = cut( Cat(f2_datas.map(cacheline => cacheline.asUInt ).reverse).asUInt, f2_ftq_req.startAddr )
255
256  //---------------------------------------------
257  //  Fetch Stage 3 :
258  //  * get data from last stage (hit from f2_hit_data/miss from missQueue response)
259  //  * if at least one needed cacheline miss, wait for miss queue response (a wait_state machine) THIS IS TOO UGLY!!!
260  //  * cut cacheline(s) and send to PreDecode
261  //  * check if prediction is right (branch target and type, jump direction and type , jal target )
262  //---------------------------------------------
263  val f3_valid          = RegInit(false.B)
264  val f3_ftq_req        = RegEnable(next = f2_ftq_req,    enable=f2_fire)
265  val f3_situation      = RegEnable(next = f2_situation,  enable=f2_fire)
266  val f3_doubleLine     = RegEnable(next = f2_doubleLine, enable=f2_fire)
267  val f3_fire           = io.toIbuffer.fire()
268
269  f3_ready := io.toIbuffer.ready || !f3_valid
270
271  val f3_cut_data       = RegEnable(next = f2_cut_data, enable=f2_fire)
272
273  val f3_except_pf      = RegEnable(next = f2_except_pf, enable = f2_fire)
274  val f3_except_af      = RegEnable(next = f2_except_af, enable = f2_fire)
275  val f3_mmio           = RegEnable(next = f2_mmio   , enable = f2_fire)
276
277  val f3_lastHalf       = RegInit(0.U.asTypeOf(new LastHalfInfo))
278  val f3_lastHalfMatch  = f3_lastHalf.matchThisBlock(f3_ftq_req.startAddr)
279  val f3_except         = VecInit((0 until 2).map{i => f3_except_pf(i) || f3_except_af(i)})
280  val f3_has_except     = f3_valid && (f3_except_af.reduce(_||_) || f3_except_pf.reduce(_||_))
281  val f3_pAddrs   = RegEnable(next = f2_paddrs, enable = f2_fire)
282
283  val f3_mmio_data    = Reg(UInt(maxInstrLen.W))
284
285  val f3_data = if(HasCExtension) Wire(Vec(PredictWidth + 1, UInt(16.W))) else Wire(Vec(PredictWidth, UInt(32.W)))
286  f3_data       :=  f3_cut_data
287
288  val mmio_idle :: mmio_send_req :: mmio_w_resp :: mmio_resend :: mmio_resend_w_resp :: mmio_w_commit :: Nil = Enum(6)
289  val mmio_state = RegInit(mmio_idle)
290
291  val f3_req_is_mmio     = f3_mmio && f3_valid
292  val mmio_has_commited = VecInit(io.rob_commits.map{commit => commit.valid && commit.bits.ftqIdx === f3_ftq_req.ftqIdx &&  commit.bits.ftqOffset === 0.U}).asUInt.orR
293  val f3_mmio_req_commit = f3_req_is_mmio && mmio_state === mmio_w_commit && mmio_has_commited
294
295  val f3_mmio_to_commit =  f3_req_is_mmio && mmio_state === mmio_w_commit
296  val f3_mmio_to_commit_next = RegNext(f3_mmio_to_commit)
297  val f3_mmio_can_go      = f3_mmio_to_commit && !f3_mmio_to_commit_next
298
299  val f3_ftq_flush_self     = fromFtq.redirect.valid && RedirectLevel.flushItself(fromFtq.redirect.bits.level)
300  val f3_ftq_flush_by_older = fromFtq.redirect.valid && isBefore(fromFtq.redirect.bits.ftqIdx, f3_ftq_req.ftqIdx)
301
302  val f3_need_not_flush = f3_req_is_mmio && fromFtq.redirect.valid && !f3_ftq_flush_self && !f3_ftq_flush_by_older
303
304  when(f3_flush && !f3_need_not_flush)               {f3_valid := false.B}
305  .elsewhen(f2_fire && !f2_flush)                    {f3_valid := true.B }
306  .elsewhen(io.toIbuffer.fire() && !f3_req_is_mmio)  {f3_valid := false.B}
307  .elsewhen{f3_req_is_mmio && f3_mmio_req_commit}    {f3_valid := false.B}
308
309  val f3_mmio_use_seq_pc = RegInit(false.B)
310
311  val (redirect_ftqIdx, redirect_ftqOffset)  = (fromFtq.redirect.bits.ftqIdx,fromFtq.redirect.bits.ftqOffset)
312  val redirect_mmio_req = fromFtq.redirect.valid && redirect_ftqIdx === f3_ftq_req.ftqIdx && redirect_ftqOffset === 0.U
313
314  when(RegNext(f2_fire && !f2_flush) && f3_req_is_mmio)        { f3_mmio_use_seq_pc := true.B  }
315  .elsewhen(redirect_mmio_req)                                 { f3_mmio_use_seq_pc := false.B }
316
317  f3_ready := Mux(f3_req_is_mmio, io.toIbuffer.ready && f3_mmio_req_commit || !f3_valid , io.toIbuffer.ready || !f3_valid)
318
319  when(f3_req_is_mmio){
320    f3_data(0) := f3_mmio_data(15, 0)
321    f3_data(1) := f3_mmio_data(31, 16)
322  }
323
324  when(fromUncache.fire())    {f3_mmio_data   :=  fromUncache.bits.data}
325
326
327  switch(mmio_state){
328    is(mmio_idle){
329      when(f3_req_is_mmio){
330        mmio_state :=  mmio_send_req
331      }
332    }
333
334    is(mmio_send_req){
335      mmio_state :=  Mux(toUncache.fire(), mmio_w_resp, mmio_send_req )
336    }
337
338    is(mmio_w_resp){
339      when(fromUncache.fire()){
340          val isRVC =  fromUncache.bits.data(1,0) =/= 3.U
341          mmio_state :=  Mux(isRVC, mmio_resend , mmio_w_commit)
342      }
343    }
344
345    is(mmio_resend){
346      mmio_state :=  Mux(toUncache.fire(), mmio_resend_w_resp, mmio_resend )
347    }
348
349    is(mmio_resend_w_resp){
350      when(fromUncache.fire()){
351          mmio_state :=  mmio_w_commit
352      }
353    }
354
355    is(mmio_w_commit){
356      when(mmio_has_commited){
357          mmio_state  :=  mmio_idle
358      }
359    }
360  }
361
362  when(f3_ftq_flush_self || f3_ftq_flush_by_older)  {
363    mmio_state := mmio_idle
364    f3_mmio_data := 0.U
365  }
366
367  toUncache.valid     :=  ((mmio_state === mmio_send_req) || (mmio_state === mmio_resend)) && f3_req_is_mmio
368  toUncache.bits.addr := Mux((mmio_state === mmio_resend), f3_pAddrs(0) + 2.U, f3_pAddrs(0))
369  fromUncache.ready   := true.B
370
371  preDecoderIn.instValid     :=  f3_valid && !f3_has_except
372  preDecoderIn.data          :=  f3_data
373  preDecoderIn.startAddr     :=  f3_ftq_req.startAddr
374  preDecoderIn.fallThruAddr  :=  f3_ftq_req.fallThruAddr
375  preDecoderIn.fallThruError :=  f3_ftq_req.fallThruError
376  preDecoderIn.isDoubleLine  :=  f3_doubleLine
377  preDecoderIn.ftqOffset     :=  f3_ftq_req.ftqOffset
378  preDecoderIn.target        :=  f3_ftq_req.target
379  preDecoderIn.oversize      :=  f3_ftq_req.oversize
380  preDecoderIn.lastHalfMatch :=  f3_lastHalfMatch
381  preDecoderIn.pageFault     :=  f3_except_pf
382  preDecoderIn.accessFault   :=  f3_except_af
383  preDecoderIn.mmio          :=  f3_mmio
384  preDecoderIn.frontendTrigger := io.frontendTrigger
385  preDecoderIn.csrTriggerEnable := io.csrTriggerEnable
386
387
388  // TODO: What if next packet does not match?
389  when (f3_flush) {
390    f3_lastHalf.valid := false.B
391  }.elsewhen (io.toIbuffer.fire()) {
392    f3_lastHalf.valid := preDecoderOut.hasLastHalf
393    f3_lastHalf.middlePC := preDecoderOut.realEndPC
394  }
395
396  val f3_predecode_range = VecInit(preDecoderOut.pd.map(inst => inst.valid)).asUInt
397  val f3_mmio_range      = VecInit((0 until PredictWidth).map(i => if(i ==0) true.B else false.B))
398
399  io.toIbuffer.valid          := f3_valid && (!f3_req_is_mmio || f3_mmio_can_go)
400  io.toIbuffer.bits.instrs    := preDecoderOut.instrs
401  io.toIbuffer.bits.valid     := Mux(f3_req_is_mmio, f3_mmio_range.asUInt, f3_predecode_range & preDecoderOut.instrRange.asUInt)
402  io.toIbuffer.bits.pd        := preDecoderOut.pd
403  io.toIbuffer.bits.ftqPtr    := f3_ftq_req.ftqIdx
404  io.toIbuffer.bits.pc        := preDecoderOut.pc
405  io.toIbuffer.bits.ftqOffset.zipWithIndex.map{case(a, i) => a.bits := i.U; a.valid := preDecoderOut.takens(i) && !f3_req_is_mmio}
406  io.toIbuffer.bits.foldpc    := preDecoderOut.pc.map(i => XORFold(i(VAddrBits-1,1), MemPredPCWidth))
407  io.toIbuffer.bits.ipf       := preDecoderOut.pageFault
408  io.toIbuffer.bits.acf       := preDecoderOut.accessFault
409  io.toIbuffer.bits.crossPageIPFFix := preDecoderOut.crossPageIPF
410  io.toIbuffer.bits.triggered := preDecoderOut.triggered
411
412  //Write back to Ftq
413  val f3_cache_fetch = f3_valid && !(f2_fire && !f2_flush)
414  val finishFetchMaskReg = RegNext(f3_cache_fetch)
415
416
417  val f3_mmio_missOffset = Wire(ValidUndirectioned(UInt(log2Ceil(PredictWidth).W)))
418  f3_mmio_missOffset.valid := f3_req_is_mmio
419  f3_mmio_missOffset.bits  := 0.U
420
421  toFtq.pdWb.valid           := (!finishFetchMaskReg && f3_valid && !f3_req_is_mmio) || (f3_mmio_req_commit && f3_mmio_use_seq_pc)
422  toFtq.pdWb.bits.pc         := preDecoderOut.pc
423  toFtq.pdWb.bits.pd         := preDecoderOut.pd
424  toFtq.pdWb.bits.pd.zipWithIndex.map{case(instr,i) => instr.valid :=  Mux(f3_req_is_mmio, f3_mmio_range(i), f3_predecode_range(i))}
425  toFtq.pdWb.bits.ftqIdx     := f3_ftq_req.ftqIdx
426  toFtq.pdWb.bits.ftqOffset  := f3_ftq_req.ftqOffset.bits
427  toFtq.pdWb.bits.misOffset  := Mux(f3_req_is_mmio, f3_mmio_missOffset, preDecoderOut.misOffset)
428  toFtq.pdWb.bits.cfiOffset  := preDecoderOut.cfiOffset
429  toFtq.pdWb.bits.target     := Mux(f3_req_is_mmio,Mux((f3_mmio_data(1,0) =/= 3.U), f3_ftq_req.startAddr + 2.U , f3_ftq_req.startAddr + 4.U) ,preDecoderOut.target)
430  toFtq.pdWb.bits.jalTarget  := preDecoderOut.jalTarget
431  toFtq.pdWb.bits.instrRange := Mux(f3_req_is_mmio, f3_mmio_range, preDecoderOut.instrRange)
432
433  val predecodeFlush     = preDecoderOut.misOffset.valid && f3_valid
434  val predecodeFlushReg  = RegNext(predecodeFlush && !(f2_fire && !f2_flush))
435
436
437  /** performance counter */
438  val f3_perf_info     = RegEnable(next = f2_perf_info, enable = f2_fire)
439  val f3_req_0    = io.toIbuffer.fire()
440  val f3_req_1    = io.toIbuffer.fire() && f3_doubleLine
441  val f3_hit_0    = io.toIbuffer.fire() && f3_perf_info.bank_hit(0)
442  val f3_hit_1    = io.toIbuffer.fire() && f3_doubleLine & f3_perf_info.bank_hit(1)
443  val f3_hit      = f3_perf_info.hit
444
445  val perfinfo = IO(new Bundle(){
446    val perfEvents = Output(new PerfEventsBundle(15))
447  })
448
449  val perfEvents = Seq(
450    ("frontendFlush                ", f3_redirect                                ),
451    ("ifu_req                      ", io.toIbuffer.fire()                        ),
452    ("ifu_miss                     ", io.toIbuffer.fire() && !f3_perf_info.hit   ),
453    ("ifu_req_cacheline_0          ", f3_req_0                                   ),
454    ("ifu_req_cacheline_1          ", f3_req_1                                   ),
455    ("ifu_req_cacheline_0_hit      ", f3_hit_1                                   ),
456    ("ifu_req_cacheline_1_hit      ", f3_hit_1                                   ),
457    ("only_0_hit                   ", f3_perf_info.only_0_hit       && io.toIbuffer.fire() ),
458    ("only_0_miss                  ", f3_perf_info.only_0_miss      && io.toIbuffer.fire() ),
459    ("hit_0_hit_1                  ", f3_perf_info.hit_0_hit_1      && io.toIbuffer.fire() ),
460    ("hit_0_miss_1                 ", f3_perf_info.hit_0_miss_1     && io.toIbuffer.fire() ),
461    ("miss_0_hit_1                 ", f3_perf_info.miss_0_hit_1     && io.toIbuffer.fire() ),
462    ("miss_0_miss_1                ", f3_perf_info.miss_0_miss_1    && io.toIbuffer.fire() ),
463    ("cross_line_block             ", io.toIbuffer.fire() && f3_situation(0)     ),
464    ("fall_through_is_cacheline_end", io.toIbuffer.fire() && f3_situation(1)     ),
465  )
466
467  for (((perf_out,(perf_name,perf)),i) <- perfinfo.perfEvents.perf_events.zip(perfEvents).zipWithIndex) {
468    perf_out.incr_step := RegNext(perf)
469  }
470
471  f3_redirect := (!predecodeFlushReg && predecodeFlush && !f3_req_is_mmio) || (f3_mmio_req_commit && f3_mmio_use_seq_pc)
472
473  XSPerfAccumulate("ifu_req",   io.toIbuffer.fire() )
474  XSPerfAccumulate("ifu_miss",  io.toIbuffer.fire() && !f3_hit )
475  XSPerfAccumulate("ifu_req_cacheline_0", f3_req_0  )
476  XSPerfAccumulate("ifu_req_cacheline_1", f3_req_1  )
477  XSPerfAccumulate("ifu_req_cacheline_0_hit",   f3_hit_0 )
478  XSPerfAccumulate("ifu_req_cacheline_1_hit",   f3_hit_1 )
479  XSPerfAccumulate("frontendFlush",  f3_redirect )
480  XSPerfAccumulate("only_0_hit",      f3_perf_info.only_0_hit   && io.toIbuffer.fire()  )
481  XSPerfAccumulate("only_0_miss",     f3_perf_info.only_0_miss  && io.toIbuffer.fire()  )
482  XSPerfAccumulate("hit_0_hit_1",     f3_perf_info.hit_0_hit_1  && io.toIbuffer.fire()  )
483  XSPerfAccumulate("hit_0_miss_1",    f3_perf_info.hit_0_miss_1  && io.toIbuffer.fire()  )
484  XSPerfAccumulate("miss_0_hit_1",    f3_perf_info.miss_0_hit_1   && io.toIbuffer.fire() )
485  XSPerfAccumulate("miss_0_miss_1",   f3_perf_info.miss_0_miss_1 && io.toIbuffer.fire() )
486  XSPerfAccumulate("cross_line_block", io.toIbuffer.fire() && f3_situation(0) )
487  XSPerfAccumulate("fall_through_is_cacheline_end", io.toIbuffer.fire() && f3_situation(1) )
488}
489