xref: /XiangShan/src/main/scala/xiangshan/frontend/icache/ICacheMainPipe.scala (revision dcbc69cb2a7ea07707ede3d8f7c74421ef450202)
1/***************************************************************************************
2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences
3* Copyright (c) 2020-2021 Peng Cheng Laboratory
4*
5* XiangShan is licensed under Mulan PSL v2.
6* You can use this software according to the terms and conditions of the Mulan PSL v2.
7* You may obtain a copy of Mulan PSL v2 at:
8*          http://license.coscl.org.cn/MulanPSL2
9*
10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
13*
14* See the Mulan PSL v2 for more details.
15***************************************************************************************/
16
17package xiangshan.frontend.icache
18
19import chipsalliance.rocketchip.config.Parameters
20import chisel3._
21import chisel3.util._
22import freechips.rocketchip.tilelink.ClientStates
23import xiangshan._
24import xiangshan.cache.mmu._
25import utils._
26import xiangshan.backend.fu.{PMPReqBundle, PMPRespBundle}
27
28class ICacheMainPipeReq(implicit p: Parameters) extends ICacheBundle
29{
30  val vaddr  = UInt(VAddrBits.W)
31  def vsetIdx = get_idx(vaddr)
32}
33
34class ICacheMainPipeResp(implicit p: Parameters) extends ICacheBundle
35{
36  val vaddr    = UInt(VAddrBits.W)
37  val readData = UInt(blockBits.W)
38  val paddr    = UInt(PAddrBits.W)
39  val tlbExcp  = new Bundle{
40    val pageFault = Bool()
41    val accessFault = Bool()
42    val mmio = Bool()
43  }
44}
45
46class ICacheMainPipeBundle(implicit p: Parameters) extends ICacheBundle
47{
48  val req  = Flipped(DecoupledIO(new ICacheMainPipeReq))
49  val resp = ValidIO(new ICacheMainPipeResp)
50}
51
52class ICacheMetaReqBundle(implicit p: Parameters) extends ICacheBundle{
53  val toIMeta       = Decoupled(new ICacheReadBundle)
54  val fromIMeta     = Input(new ICacheMetaRespBundle)
55}
56
57class ICacheDataReqBundle(implicit p: Parameters) extends ICacheBundle{
58  val toIData       = Decoupled(new ICacheReadBundle)
59  val fromIData     = Input(new ICacheDataRespBundle)
60}
61
62class ICacheMSHRBundle(implicit p: Parameters) extends ICacheBundle{
63  val toMSHR        = Decoupled(new ICacheMissReq)
64  val fromMSHR      = Flipped(ValidIO(new ICacheMissResp))
65}
66
67class ICachePMPBundle(implicit p: Parameters) extends ICacheBundle{
68  val req  = Valid(new PMPReqBundle())
69  val resp = Input(new PMPRespBundle())
70}
71
72class ICachePerfInfo(implicit p: Parameters) extends ICacheBundle{
73  val only_0_hit     = Bool()
74  val only_0_miss    = Bool()
75  val hit_0_hit_1    = Bool()
76  val hit_0_miss_1   = Bool()
77  val miss_0_hit_1   = Bool()
78  val miss_0_miss_1  = Bool()
79  val bank_hit       = Vec(2,Bool())
80  val hit            = Bool()
81}
82
83class ICacheMainPipeInterface(implicit p: Parameters) extends ICacheBundle {
84  /* internal interface */
85  val metaArray   = new ICacheMetaReqBundle
86  val dataArray   = new ICacheDataReqBundle
87  val mshr        = Vec(PortNumber, new ICacheMSHRBundle)
88  /* outside interface */
89  val fetch       = Vec(PortNumber, new ICacheMainPipeBundle)
90  val pmp         = Vec(PortNumber, new ICachePMPBundle)
91  val itlb        = Vec(PortNumber, new BlockTlbRequestIO)
92  val respStall   = Input(Bool())
93  val toReleaseUnit = Vec(2, Decoupled(new ReleaseReq))
94  val victimInfor = new Bundle() {
95    val s1 = Vec(2, Output(new ICacheVictimInfor()))
96    val s2 = Vec(2, Output(new ICacheVictimInfor()))
97  }
98  val setInfor    = new Bundle(){
99    val s1 = Vec(2, Output(new ICacheSetInfor()))
100    val s2 = Vec(2, Output(new ICacheSetInfor()))
101  }
102  val perfInfo = Output(new ICachePerfInfo)
103}
104
105class ICacheMainPipe(implicit p: Parameters) extends ICacheModule
106{
107  val io = IO(new ICacheMainPipeInterface)
108
109  val (fromIFU, toIFU)    = (io.fetch.map(_.req), io.fetch.map(_.resp))
110  val (toMeta, toData, metaResp, dataResp) =  (io.metaArray.toIMeta, io.dataArray.toIData, io.metaArray.fromIMeta, io.dataArray.fromIData)
111  val (toMSHR, fromMSHR)  = (io.mshr.map(_.toMSHR), io.mshr.map(_.fromMSHR))
112  val (toITLB, fromITLB)  = (io.itlb.map(_.req), io.itlb.map(_.resp))
113  val (toPMP,  fromPMP)   = (io.pmp.map(_.req), io.pmp.map(_.resp))
114
115  def generatePipeControl(lastFire: Bool, thisFire: Bool, thisFlush: Bool, lastFlush: Bool): Bool = {
116    val valid  = RegInit(false.B)
117    when(thisFlush)                    {valid  := false.B}
118    .elsewhen(lastFire && !lastFlush)  {valid  := true.B}
119    .elsewhen(thisFire)                 {valid  := false.B}
120    valid
121  }
122
123  def ResultHoldBypass[T<:Data](data: T, valid: Bool): T = {
124    Mux(valid, data, RegEnable(data, valid))
125  }
126
127  val s0_ready, s1_ready, s2_ready = WireInit(false.B)
128  val s0_fire,  s1_fire , s2_fire  = WireInit(false.B)
129
130  //Stage 1
131  val s0_valid       = fromIFU.map(_.valid).reduce(_||_)
132  val s0_req_vaddr   = VecInit(fromIFU.map(_.bits.vaddr))
133  val s0_req_vsetIdx = VecInit(fromIFU.map(_.bits.vsetIdx))
134  val s0_only_fisrt  = fromIFU(0).valid && !fromIFU(0).valid
135  val s0_double_line = fromIFU(0).valid && fromIFU(1).valid
136
137  s0_fire        := s0_valid && s1_ready
138
139  //fetch: send addr to Meta/TLB and Data simultaneously
140  val fetch_req = List(toMeta, toData)
141  for(i <- 0 until 2) {
142    fetch_req(i).valid             := s0_fire
143    fetch_req(i).bits.isDoubleLine := s0_double_line
144    fetch_req(i).bits.vSetIdx      := s0_req_vsetIdx
145  }
146  //TODO: fix GTimer() condition
147  fromIFU.map(_.ready := fetch_req(0).ready && fetch_req(1).ready && s1_ready && GTimer() > 500.U)
148
149
150//  XSPerfAccumulate("ifu_bubble_ftq_not_valid",   !f0_valid )
151//  XSPerfAccumulate("ifu_bubble_pipe_stall",    f0_valid && fetch_req(0).ready && fetch_req(1).ready && !s1_ready )
152//  XSPerfAccumulate("ifu_bubble_sram_0_busy",   f0_valid && !fetch_req(0).ready  )
153//  XSPerfAccumulate("ifu_bubble_sram_1_busy",   f0_valid && !fetch_req(1).ready  )
154
155  //---------------------------------------------
156  //  Fetch Stage 2 :
157  //  * Send req to ITLB and TLB Response (Get Paddr)
158  //  * ICache Response (Get Meta and Data)
159  //  * Hit Check (Generate hit signal and hit vector)
160  //  * Get victim way
161  //---------------------------------------------
162
163  //TODO: handle fetch exceptions
164
165  val tlbRespAllValid = WireInit(false.B)
166
167  val s1_valid = generatePipeControl(lastFire = s0_fire, thisFire = s1_fire, thisFlush = false.B, lastFlush = false.B)
168
169  val s1_req_vaddr   = RegEnable(next = s0_req_vaddr,    enable = s0_fire)
170  val s1_req_vsetIdx = RegEnable(next = s0_req_vsetIdx, enable = s0_fire)
171  val s1_only_fisrt  = RegEnable(next = s0_only_fisrt, enable = s0_fire)
172  val s1_double_line = RegEnable(next = s0_double_line, enable = s0_fire)
173
174  s1_ready := s2_ready && tlbRespAllValid  || !s1_valid
175  s1_fire  := s1_valid && tlbRespAllValid && s2_ready
176
177  toITLB(0).valid         := s1_valid
178  toITLB(0).bits.size     := 3.U // TODO: fix the size
179  toITLB(0).bits.vaddr    := s1_req_vaddr(0)
180  toITLB(0).bits.debug.pc := s1_req_vaddr(0)
181
182  toITLB(1).valid         := s1_valid && s1_double_line
183  toITLB(1).bits.size     := 3.U // TODO: fix the size
184  toITLB(1).bits.vaddr    := s1_req_vaddr(1)
185  toITLB(1).bits.debug.pc := s1_req_vaddr(1)
186
187  toITLB.map{port =>
188    port.bits.cmd                 := TlbCmd.exec
189    port.bits.robIdx              := DontCare
190    port.bits.debug.isFirstIssue  := DontCare
191  }
192
193  fromITLB.map(_.ready := true.B)
194
195  val (tlbRespValid, tlbRespPAddr) = (fromITLB.map(_.valid), VecInit(fromITLB.map(_.bits.paddr)))
196  val (tlbRespMiss)  = fromITLB.map(port => port.bits.miss && port.valid)
197  val (tlbExcpPF,    tlbExcpAF)    = (fromITLB.map(port => port.bits.excp.pf.instr && port.valid),
198                                        fromITLB.map(port => (port.bits.excp.af.instr) && port.valid))
199
200  tlbRespAllValid := tlbRespValid(0)  && (tlbRespValid(1) || !s1_double_line)
201
202  val s1_req_paddr              = tlbRespPAddr
203  val s1_req_ptags              = VecInit(s1_req_paddr.map(get_phy_tag(_)))
204
205  val s1_meta_ptags              = ResultHoldBypass(data = metaResp.tags, valid = RegNext(toMeta.fire()))
206  val s1_meta_cohs               = ResultHoldBypass(data = metaResp.cohs, valid = RegNext(toMeta.fire()))
207  val s1_data_cacheline          = ResultHoldBypass(data = dataResp.datas, valid = RegNext(toData.fire()))
208
209  val s1_tag_eq_vec        = VecInit((0 until PortNumber).map( p => VecInit((0 until nWays).map( w =>  s1_meta_ptags(p)(w) ===  s1_req_ptags(p) ))))
210  val s1_tag_match_vec     = VecInit((0 until PortNumber).map( k => VecInit(s1_tag_eq_vec(k).zipWithIndex.map{ case(way_tag_eq, w) => way_tag_eq && s1_meta_cohs(k)(w).isValid()})))
211  val s1_tag_match         = VecInit(s1_tag_match_vec.map(vector => ParallelOR(vector)))
212
213  val s1_port_hit          = VecInit(Seq(s1_tag_match(0) && s1_valid  && !tlbExcpPF(0) && !tlbExcpAF(0),  s1_tag_match(1) && s1_valid && s1_double_line && !tlbExcpPF(1) && !tlbExcpAF(1) ))
214  val s1_bank_miss         = VecInit(Seq(!s1_tag_match(0) && s1_valid && !tlbExcpPF(0) && !tlbExcpAF(0), !s1_tag_match(1) && s1_valid && s1_double_line && !tlbExcpPF(1) && !tlbExcpAF(1) ))
215  val s1_hit               = (s1_port_hit(0) && s1_port_hit(1)) || (!s1_double_line && s1_port_hit(0))
216
217  /** choose victim cacheline */
218  val replacers       = Seq.fill(PortNumber)(ReplacementPolicy.fromString(cacheParams.replacer,nWays,nSets/PortNumber))
219  val s1_victim_oh    = ResultHoldBypass(data = VecInit(replacers.zipWithIndex.map{case (replacer, i) => UIntToOH(replacer.way(s1_req_vsetIdx(i)))}), valid = RegNext(toMeta.fire()))
220
221  val s1_victim_coh   = VecInit(s1_victim_oh.zipWithIndex.map {case(oh, port) => Mux1H(oh, s1_meta_cohs(port))})
222  val s1_victim_tag   = VecInit(s1_victim_oh.zipWithIndex.map {case(oh, port) => Mux1H(oh, s1_meta_ptags(port))})
223  val s1_victim_data  = VecInit(s1_victim_oh.zipWithIndex.map {case(oh, port) => Mux1H(oh, s1_data_cacheline(port))})
224  val s1_need_replace = VecInit(s1_victim_coh.zipWithIndex.map{case(coh, port) => coh.isValid() && s1_bank_miss(port)})
225
226  (0 until PortNumber).map{ i =>
227    io.victimInfor.s1(i).valid := s1_valid && s1_need_replace(i)
228    io.victimInfor.s1(i).ptag  := s1_victim_tag(i)
229    io.victimInfor.s1(i).vidx  := get_idx(s1_req_vaddr(i))
230  }
231
232  (0 until PortNumber).map{ i =>
233    io.setInfor.s1(i).valid := s1_bank_miss(i)
234    io.setInfor.s1(i).vidx  := s1_req_vsetIdx(i)
235  }
236
237  assert(PopCount(s1_tag_match_vec(0)) <= 1.U && PopCount(s1_tag_match_vec(1)) <= 1.U, "Multiple hit in main pipe")
238
239  val touch_sets = Seq.fill(2)(Wire(Vec(2, UInt(log2Ceil(nSets/2).W))))
240  val touch_ways = Seq.fill(2)(Wire(Vec(2, Valid(UInt(log2Ceil(nWays).W)))) )
241
242  ((replacers zip touch_sets) zip touch_ways).map{case ((r, s),w) => r.access(s,w)}
243
244  val s1_hit_data      =  VecInit(s1_data_cacheline.zipWithIndex.map { case(bank, i) =>
245    val port_hit_data = Mux1H(s1_tag_match_vec(i).asUInt, bank)
246    port_hit_data
247  })
248
249  (0 until nWays).map{ w =>
250    XSPerfAccumulate("line_0_hit_way_" + Integer.toString(w, 10),  s1_fire && s1_port_hit(0) && OHToUInt(s1_tag_match_vec(0))  === w.U)
251  }
252
253  (0 until nWays).map{ w =>
254    XSPerfAccumulate("line_0_victim_way_" + Integer.toString(w, 10),  s1_fire && !s1_port_hit(0) && OHToUInt(s1_victim_oh(0))  === w.U)
255  }
256
257  (0 until nWays).map{ w =>
258    XSPerfAccumulate("line_1_hit_way_" + Integer.toString(w, 10),  s1_fire && s1_double_line && s1_port_hit(1) && OHToUInt(s1_tag_match_vec(1))  === w.U)
259  }
260
261  (0 until nWays).map{ w =>
262    XSPerfAccumulate("line_1_victim_way_" + Integer.toString(w, 10),  s1_fire && s1_double_line && !s1_port_hit(1) && OHToUInt(s1_victim_oh(1))  === w.U)
263  }
264
265  XSPerfAccumulate("ifu_bubble_s1_tlb_miss",    s1_valid && !tlbRespAllValid )
266
267  //---------------------------------------------
268  //  Fetch Stage 2 :
269  //  * get data from last stage (hit from s1_hit_data/miss from missQueue response)
270  //  * if at least one needed cacheline miss, wait for miss queue response (a wait_state machine) THIS IS TOO UGLY!!!
271  //  * cut cacheline(s) and send to PreDecode
272  //  * check if prediction is right (branch target and type, jump direction and type , jal target )
273  //---------------------------------------------
274  val s2_fetch_finish = Wire(Bool())
275
276  val s2_valid          = generatePipeControl(lastFire = s1_fire, thisFire = s2_fire, thisFlush = false.B, lastFlush = false.B)
277  val s2_miss_available = Wire(Bool())
278
279  s2_ready      := (s2_valid && s2_fetch_finish && !io.respStall) || (!s2_valid && s2_miss_available)
280  s2_fire       := s2_valid && s2_fetch_finish && !io.respStall
281
282  val pmpExcpAF = fromPMP.map(port => port.instr)
283  val mmio = fromPMP.map(port => port.mmio) // TODO: handle it
284
285  val (s2_req_paddr , s2_req_vaddr)   = (RegEnable(next = s1_req_paddr, enable = s1_fire), RegEnable(next = s1_req_vaddr, enable = s1_fire))
286  val s2_req_vsetIdx  = RegEnable(next = s1_req_vsetIdx, enable = s1_fire)
287  val s2_req_ptags    = RegEnable(next = s1_req_ptags, enable = s1_fire)
288  val s2_only_fisrt   = RegEnable(next = s1_only_fisrt, enable = s1_fire)
289  val s2_double_line  = RegEnable(next = s1_double_line, enable = s1_fire)
290  val s2_hit          = RegEnable(next = s1_hit   , enable = s1_fire)
291  val s2_port_hit     = RegEnable(next = s1_port_hit, enable = s1_fire)
292  val s2_bank_miss    = RegEnable(next = s1_bank_miss, enable = s1_fire)
293
294  val sec_meet_vec = Wire(Vec(2, Bool()))
295  val s2_fixed_hit_vec = VecInit((0 until 2).map(i => s2_port_hit(i) || sec_meet_vec(i)))
296  val s2_fixed_hit = (s2_valid && s2_fixed_hit_vec(0) && s2_fixed_hit_vec(1) && s2_double_line) || (s2_valid && s2_fixed_hit_vec(0) && !s2_double_line)
297
298  //replacement
299  val s2_waymask      = RegEnable(next = s1_victim_oh, enable = s1_fire)
300  val s2_victim_coh   = RegEnable(next = s1_victim_coh, enable = s1_fire)
301  val s2_victim_tag   = RegEnable(next = s1_victim_tag, enable = s1_fire)
302  val s2_victim_data  = RegEnable(next = s1_victim_data,  enable = s1_fire)
303  val s2_need_replace = RegEnable(next = s1_need_replace,  enable = s1_fire)
304  val s2_has_replace  = s2_need_replace.asUInt.orR
305
306  /*** exception and pmp logic ***/
307  //exception information
308  val s2_except_pf = RegEnable(next = VecInit(tlbExcpPF), enable = s1_fire)
309  val s2_except_af = VecInit(RegEnable(next = VecInit(tlbExcpAF), enable = s1_fire).zip(pmpExcpAF).map(a => a._1 || DataHoldBypass(a._2, RegNext(s1_fire)).asBool))
310  val s2_except    = VecInit((0 until 2).map{i => s2_except_pf(i) || s2_except_af(i)})
311  val s2_has_except = s2_valid && (s2_except_af.reduce(_||_) || s2_except_pf.reduce(_||_))
312  //MMIO
313  val s2_mmio      = DataHoldBypass(io.pmp(0).resp.mmio && !s2_except_af(0) && !s2_except_pf(0), RegNext(s1_fire)).asBool()
314
315  io.pmp.zipWithIndex.map { case (p, i) =>
316    p.req.valid := s2_fire
317    p.req.bits.addr := s2_req_paddr(i)
318    p.req.bits.size := 3.U // TODO
319    p.req.bits.cmd := TlbCmd.exec
320  }
321
322  /*** cacheline miss logic ***/
323  val wait_idle :: wait_queue_ready :: wait_send_req  :: wait_two_resp :: wait_0_resp :: wait_1_resp :: wait_one_resp ::wait_finish :: Nil = Enum(8)
324  val wait_state = RegInit(wait_idle)
325
326  val port_miss_fix  = VecInit(Seq(fromMSHR(0).fire() && !s2_port_hit(0),   fromMSHR(1).fire() && s2_double_line && !s2_port_hit(1) ))
327
328  class MissSlot(implicit p: Parameters) extends  XSBundle with HasICacheParameters {
329    val m_vSetIdx   = UInt(idxBits.W)
330    val m_pTag      = UInt(tagBits.W)
331    val m_data      = UInt(blockBits.W)
332  }
333
334  val missSlot    = Seq.fill(2)(RegInit(0.U.asTypeOf(new MissSlot)))
335  val m_invalid :: m_valid :: m_refilled :: m_flushed :: m_wait_sec_miss :: m_check_final ::Nil = Enum(6)
336  val missStateQueue = RegInit(VecInit(Seq.fill(2)(m_invalid)) )
337  val reservedRefillData = Wire(Vec(2, UInt(blockBits.W)))
338
339  s2_miss_available :=  VecInit(missStateQueue.map(entry => entry === m_invalid  || entry === m_wait_sec_miss)).reduce(_&&_)
340
341  val fix_sec_miss     = Wire(Vec(4, Bool()))
342  val sec_meet_0_miss = fix_sec_miss(0) || fix_sec_miss(2)
343  val sec_meet_1_miss = fix_sec_miss(1) || fix_sec_miss(3)
344  sec_meet_vec := VecInit(Seq(sec_meet_0_miss,sec_meet_1_miss ))
345
346  //only raise at the first cycle of s2_valid
347  val  only_0_miss      = RegNext(s1_fire) && !s2_hit && !s2_double_line && !s2_has_except && !sec_meet_0_miss && !s2_mmio
348  val  only_0_hit       = RegNext(s1_fire) && s2_hit && !s2_double_line && !s2_mmio
349  val  hit_0_hit_1      = RegNext(s1_fire) && s2_hit && s2_double_line && !s2_mmio
350  val  hit_0_miss_1     = RegNext(s1_fire) && !s2_port_hit(1) && !sec_meet_1_miss && (s2_port_hit(0) || sec_meet_0_miss) && s2_double_line  && !s2_has_except && !s2_mmio
351  val  miss_0_hit_1     = RegNext(s1_fire) && !s2_port_hit(0) && !sec_meet_0_miss && (s2_port_hit(1) || sec_meet_1_miss) && s2_double_line  && !s2_has_except && !s2_mmio
352  val  miss_0_miss_1    = RegNext(s1_fire) && !s2_port_hit(0) && !s2_port_hit(1) && !sec_meet_0_miss && !sec_meet_1_miss && s2_double_line  && !s2_has_except && !s2_mmio
353
354  val  hit_0_except_1   = RegNext(s1_fire) && s2_double_line &&  !s2_except(0) && s2_except(1)  &&  s2_port_hit(0)
355  val  miss_0_except_1  = RegNext(s1_fire) && s2_double_line &&  !s2_except(0) && s2_except(1)  && !s2_port_hit(0)
356  val  except_0         = RegNext(s1_fire) && s2_except(0)
357
358  def holdReleaseLatch(valid: Bool, release: Bool, flush: Bool): Bool ={
359    val bit = RegInit(false.B)
360    when(flush)                   { bit := false.B  }
361      .elsewhen(valid && !release)  { bit := true.B  }
362      .elsewhen(release)            { bit := false.B}
363    bit || valid
364  }
365
366  val  miss_0_hit_1_latch     =   holdReleaseLatch(valid = miss_0_hit_1,    release = s2_fire,      flush = false.B)
367  val  miss_0_miss_1_latch    =   holdReleaseLatch(valid = miss_0_miss_1,   release = s2_fire,      flush = false.B)
368  val  only_0_miss_latch      =   holdReleaseLatch(valid = only_0_miss,     release = s2_fire,      flush = false.B)
369  val  hit_0_miss_1_latch     =   holdReleaseLatch(valid = hit_0_miss_1,    release = s2_fire,      flush = false.B)
370
371  val  miss_0_except_1_latch  =   holdReleaseLatch(valid = miss_0_except_1, release = s2_fire,      flush = false.B)
372  val  except_0_latch          =   holdReleaseLatch(valid = except_0,    release = s2_fire,      flush = false.B)
373  val  hit_0_except_1_latch         =    holdReleaseLatch(valid = hit_0_except_1,    release = s2_fire,      flush = false.B)
374
375  val only_0_hit_latch        = holdReleaseLatch(valid = only_0_hit,   release = s2_fire,      flush = false.B)
376  val hit_0_hit_1_latch        = holdReleaseLatch(valid = hit_0_hit_1,   release = s2_fire,      flush = false.B)
377
378
379  def waitSecondComeIn(missState: UInt): Bool = (missState === m_wait_sec_miss)
380
381  // deal with secondary miss when s1 enter f2
382  def getMissSituat(slotNum : Int, missNum : Int ) :Bool =  {
383    RegNext(s1_fire) && (missSlot(slotNum).m_vSetIdx === s2_req_vsetIdx(missNum)) && (missSlot(slotNum).m_pTag  === s2_req_ptags(missNum)) && !s2_port_hit(missNum)  && waitSecondComeIn(missStateQueue(slotNum)) && !s2_mmio
384  }
385
386  val miss_0_s2_0 =   getMissSituat(slotNum = 0, missNum = 0)
387  val miss_0_s2_1 =   getMissSituat(slotNum = 0, missNum = 1)
388  val miss_1_s2_0 =   getMissSituat(slotNum = 1, missNum = 0)
389  val miss_1_s2_1 =   getMissSituat(slotNum = 1, missNum = 1)
390
391  val miss_0_s2_0_latch =   holdReleaseLatch(valid = miss_0_s2_0,    release = s2_fire,      flush = false.B)
392  val miss_0_s2_1_latch =   holdReleaseLatch(valid = miss_0_s2_1,    release = s2_fire,      flush = false.B)
393  val miss_1_s2_0_latch =   holdReleaseLatch(valid = miss_1_s2_0,    release = s2_fire,      flush = false.B)
394  val miss_1_s2_1_latch =   holdReleaseLatch(valid = miss_1_s2_1,    release = s2_fire,      flush = false.B)
395
396
397  val slot_0_solve = fix_sec_miss(0) || fix_sec_miss(1)
398  val slot_1_solve = fix_sec_miss(2) || fix_sec_miss(3)
399  val slot_slove   = VecInit(Seq(slot_0_solve, slot_1_solve))
400
401  fix_sec_miss   := VecInit(Seq(miss_0_s2_0_latch, miss_0_s2_1_latch, miss_1_s2_0_latch, miss_1_s2_1_latch))
402
403  reservedRefillData(0) := DataHoldBypass(data = missSlot(0).m_data, valid = miss_0_s2_0 || miss_0_s2_1)
404  reservedRefillData(1) := DataHoldBypass(data = missSlot(1).m_data, valid = miss_1_s2_0 || miss_1_s2_1)
405
406  switch(wait_state){
407    is(wait_idle){
408      when(miss_0_except_1_latch){
409        wait_state :=  Mux(toMSHR(0).ready, wait_queue_ready ,wait_idle )
410      }.elsewhen( only_0_miss_latch  || miss_0_hit_1_latch){
411        wait_state :=  Mux(toMSHR(0).ready, wait_queue_ready ,wait_idle )
412      }.elsewhen(hit_0_miss_1_latch){
413        wait_state :=  Mux(toMSHR(1).ready, wait_queue_ready ,wait_idle )
414      }.elsewhen( miss_0_miss_1_latch ){
415        wait_state := Mux(toMSHR(0).ready && toMSHR(1).ready, wait_queue_ready ,wait_idle)
416      }
417    }
418
419    is(wait_queue_ready){
420      wait_state := wait_send_req
421    }
422
423    is(wait_send_req) {
424      when(miss_0_except_1_latch || only_0_miss_latch || hit_0_miss_1_latch || miss_0_hit_1_latch){
425        wait_state :=  wait_one_resp
426      }.elsewhen( miss_0_miss_1_latch ){
427        wait_state := wait_two_resp
428      }
429    }
430
431    is(wait_one_resp) {
432      when( (miss_0_except_1_latch ||only_0_miss_latch || miss_0_hit_1_latch) && fromMSHR(0).fire()){
433        wait_state := wait_finish
434      }.elsewhen( hit_0_miss_1_latch && fromMSHR(1).fire()){
435        wait_state := wait_finish
436      }
437    }
438
439    is(wait_two_resp) {
440      when(fromMSHR(0).fire() && fromMSHR(1).fire()){
441        wait_state := wait_finish
442      }.elsewhen( !fromMSHR(0).fire() && fromMSHR(1).fire() ){
443        wait_state := wait_0_resp
444      }.elsewhen(fromMSHR(0).fire() && !fromMSHR(1).fire()){
445        wait_state := wait_1_resp
446      }
447    }
448
449    is(wait_0_resp) {
450      when(fromMSHR(0).fire()){
451        wait_state := wait_finish
452      }
453    }
454
455    is(wait_1_resp) {
456      when(fromMSHR(1).fire()){
457        wait_state := wait_finish
458      }
459    }
460
461    is(wait_finish) {
462      when(s2_fire) {wait_state := wait_idle }
463    }
464  }
465
466
467  (0 until 2).map { i =>
468    if(i == 1) toMSHR(i).valid   := (hit_0_miss_1_latch || miss_0_miss_1_latch) && wait_state === wait_queue_ready && !s2_mmio
469        else     toMSHR(i).valid := (only_0_miss_latch || miss_0_hit_1_latch || miss_0_miss_1_latch || miss_0_except_1_latch) && wait_state === wait_queue_ready && !s2_mmio
470    toMSHR(i).bits.paddr    := s2_req_paddr(i)
471    toMSHR(i).bits.vaddr    := s2_req_vaddr(i)
472    toMSHR(i).bits.waymask  := s2_waymask(i)
473    toMSHR(i).bits.coh      := s2_victim_coh(i)
474
475
476    when(toMSHR(i).fire() && missStateQueue(i) === m_invalid){
477      missStateQueue(i)     := m_valid
478      missSlot(i).m_vSetIdx := s2_req_vsetIdx(i)
479      missSlot(i).m_pTag    := get_phy_tag(s2_req_paddr(i))
480    }
481
482    when(fromMSHR(i).fire() && missStateQueue(i) === m_valid ){
483      missStateQueue(i)     := m_refilled
484      missSlot(i).m_data    := fromMSHR(i).bits.data
485    }
486
487
488    when(s2_fire && missStateQueue(i) === m_refilled){
489      missStateQueue(i)     := m_wait_sec_miss
490    }
491
492    //only the first cycle to check whether meet the secondary miss
493    when(missStateQueue(i) === m_wait_sec_miss){
494      //the seondary req has been fix by this slot and another also hit || the secondary req for other cacheline and hit
495      when((slot_slove(i) && s2_fire) || (!slot_slove(i) && s2_fire) ) {
496        missStateQueue(i)     := m_invalid
497      }
498      //the seondary req has been fix by this slot but another miss/f3 not ready || the seondary req for other cacheline and miss
499      .elsewhen((slot_slove(i) && !s2_fire && s2_valid) ||  (s2_valid && !slot_slove(i) && !s2_fire) ){
500        missStateQueue(i)     := m_check_final
501      }
502    }
503
504    when(missStateQueue(i) === m_check_final && toMSHR(i).fire()){
505      missStateQueue(i)     :=  m_valid
506      missSlot(i).m_vSetIdx := s2_req_vsetIdx(i)
507      missSlot(i).m_pTag    := get_phy_tag(s2_req_paddr(i))
508    }.elsewhen(missStateQueue(i) === m_check_final) {
509      missStateQueue(i)     :=  m_invalid
510    }
511  }
512
513
514   val release_idle  :: release_wait_fire ::Nil = Enum(2)
515   val release_state = RegInit(VecInit(Seq.fill(2)(release_idle)) )
516   val s2_need_release =  VecInit((0 until PortNumber).map(i =>s2_valid && s2_need_replace(i) && !s2_mmio && !s2_except_af(i) && !s2_except_pf(i)))
517
518   val toRealseUnit = io.toReleaseUnit
519
520
521  (0 until 2).map{ i =>
522    switch(release_state(i)){
523     is(release_idle){
524       when(s2_need_release(i)){
525         release_state(i) := Mux(toRealseUnit(i).fire() , release_wait_fire ,release_idle )
526       }
527     }
528
529     is(release_wait_fire){
530       when(s2_fire){ release_state(i) := release_idle}
531     }
532   }
533
534    toRealseUnit(i).valid          := s2_valid && s2_need_release(i) && (release_state(i) === release_idle)
535    toRealseUnit(i).bits.addr      := get_block_addr(Cat(s2_victim_tag(i), get_untag(s2_req_vaddr(i))) )
536    toRealseUnit(i).bits.param     := s2_victim_coh(i).onCacheControl(M_FLUSH)._2
537    toRealseUnit(i).bits.voluntary := true.B
538    toRealseUnit(i).bits.hasData   := s2_victim_coh(i) === ClientStates.Dirty
539    toRealseUnit(i).bits.dirty     := s2_victim_coh(i) === ClientStates.Dirty
540    toRealseUnit(i).bits.data      := s2_victim_data(i)
541    toRealseUnit(i).bits.waymask   := s2_waymask(i)
542    toRealseUnit(i).bits.vidx      := s2_req_vsetIdx(i)
543  }
544
545  (0 until PortNumber).map{ i =>
546    io.victimInfor.s2(i).valid := s2_valid && s2_need_release(i)
547    io.victimInfor.s2(i).ptag  := s2_victim_tag(i)
548    io.victimInfor.s2(i).vidx  := get_idx(s2_req_vaddr(i))
549  }
550
551  (0 until PortNumber).map{ i =>
552    io.setInfor.s2(i).valid := s2_bank_miss(i) && s2_valid
553    io.setInfor.s2(i).vidx  := s1_req_vsetIdx(i)
554  }
555
556  val miss_all_fix       =  wait_state === wait_finish
557  val release_all_fix    =  VecInit((0 until PortNumber).map(i => !s2_need_release(i) || release_state(i) === release_wait_fire))
558  s2_fetch_finish        := ((s2_valid && s2_fixed_hit) || miss_all_fix || hit_0_except_1_latch || except_0_latch || s2_mmio) && release_all_fix.reduce(_&&_)
559
560  XSPerfAccumulate("ifu_bubble_s2_miss",    s2_valid && !s2_fetch_finish )
561
562  (touch_ways zip touch_sets).zipWithIndex.map{ case((t_w,t_s), i) =>
563    t_s(0)         := s1_req_vsetIdx(i)
564    t_w(0).valid   := s1_port_hit(i)
565    t_w(0).bits    := OHToUInt(s1_tag_match_vec(i))
566
567    t_s(1)         := s2_req_vsetIdx(i)
568    t_w(1).valid   := s2_valid && !s2_port_hit(i)
569    t_w(1).bits    := OHToUInt(s2_waymask(i))
570  }
571
572  val s2_hit_datas    = RegEnable(next = s1_hit_data, enable = s1_fire)
573  val s2_datas        = Wire(Vec(2, UInt(blockBits.W)))
574
575  s2_datas.zipWithIndex.map{case(bank,i) =>
576    if(i == 0) bank := Mux(s2_port_hit(i), s2_hit_datas(i),Mux(miss_0_s2_0_latch,reservedRefillData(0), Mux(miss_1_s2_0_latch,reservedRefillData(1), missSlot(0).m_data)))
577    else    bank := Mux(s2_port_hit(i), s2_hit_datas(i),Mux(miss_0_s2_1_latch,reservedRefillData(0), Mux(miss_1_s2_1_latch,reservedRefillData(1), missSlot(1).m_data)))
578  }
579
580
581  (0 until PortNumber).map{ i =>
582    if(i ==0) toIFU(i).valid          := s2_fire
583       else   toIFU(i).valid          := s2_fire && s2_double_line
584    toIFU(i).bits.readData  := s2_datas(i)
585    toIFU(i).bits.paddr     := s2_req_paddr(i)
586    toIFU(i).bits.vaddr     := s2_req_vaddr(i)
587    toIFU(i).bits.tlbExcp.pageFault     := s2_except_pf(i)
588    toIFU(i).bits.tlbExcp.accessFault   := s2_except_af(i)
589    toIFU(i).bits.tlbExcp.mmio          := s2_mmio
590  }
591
592  io.perfInfo.only_0_hit    := only_0_miss_latch
593  io.perfInfo.only_0_miss   := only_0_miss_latch
594  io.perfInfo.hit_0_hit_1   := hit_0_hit_1_latch
595  io.perfInfo.hit_0_miss_1  := hit_0_miss_1_latch
596  io.perfInfo.miss_0_hit_1  := miss_0_hit_1_latch
597  io.perfInfo.miss_0_miss_1 := miss_0_miss_1_latch
598  io.perfInfo.bank_hit(0)   := only_0_miss_latch  || hit_0_hit_1_latch || hit_0_miss_1_latch || hit_0_except_1_latch
599  io.perfInfo.bank_hit(1)   := miss_0_hit_1_latch || hit_0_hit_1_latch
600  io.perfInfo.hit           := hit_0_hit_1_latch
601}
602