xref: /XiangShan/src/main/scala/xiangshan/cache/mmu/PageTableCache.scala (revision 3ea4388c307775f866cbebd6405f8201d60f1e53)
1/***************************************************************************************
2* Copyright (c) 2024 Beijing Institute of Open Source Chip (BOSC)
3* Copyright (c) 2020-2024 Institute of Computing Technology, Chinese Academy of Sciences
4* Copyright (c) 2020-2021 Peng Cheng Laboratory
5*
6* XiangShan is licensed under Mulan PSL v2.
7* You can use this software according to the terms and conditions of the Mulan PSL v2.
8* You may obtain a copy of Mulan PSL v2 at:
9*          http://license.coscl.org.cn/MulanPSL2
10*
11* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
12* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
13* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
14*
15* See the Mulan PSL v2 for more details.
16***************************************************************************************/
17
18package xiangshan.cache.mmu
19
20import org.chipsalliance.cde.config.Parameters
21import chisel3._
22import chisel3.util._
23import xiangshan._
24import xiangshan.cache.{HasDCacheParameters, MemoryOpConstants}
25import utils._
26import utility._
27import freechips.rocketchip.diplomacy.{LazyModule, LazyModuleImp}
28import freechips.rocketchip.tilelink._
29
30/* ptw cache caches the page table of all the three layers
31 * ptw cache resp at next cycle
32 * the cache should not be blocked
33 * when miss queue if full, just block req outside
34 */
35
36class PageCachePerPespBundle(implicit p: Parameters) extends PtwBundle {
37  val hit = Bool()
38  val pre = Bool()
39  val ppn = UInt(gvpnLen.W)
40  val perm = new PtePermBundle()
41  val ecc = Bool()
42  val level = UInt(2.W)
43  val v = Bool()
44
45  def apply(hit: Bool, pre: Bool, ppn: UInt, perm: PtePermBundle = 0.U.asTypeOf(new PtePermBundle()),
46            ecc: Bool = false.B, level: UInt = 0.U, valid: Bool = true.B): Unit = {
47    this.hit := hit && !ecc
48    this.pre := pre
49    this.ppn := ppn
50    this.perm := perm
51    this.ecc := ecc && hit
52    this.level := level
53    this.v := valid
54  }
55}
56
57class PageCacheMergePespBundle(implicit p: Parameters) extends PtwBundle {
58  assert(tlbcontiguous == 8, "Only support tlbcontiguous = 8!")
59  val hit = Bool()
60  val pre = Bool()
61  val ppn = Vec(tlbcontiguous, UInt(gvpnLen.W))
62  val perm = Vec(tlbcontiguous, new PtePermBundle())
63  val ecc = Bool()
64  val level = UInt(2.W)
65  val v = Vec(tlbcontiguous, Bool())
66  val af = Vec(tlbcontiguous, Bool())
67
68  def apply(hit: Bool, pre: Bool, ppn: Vec[UInt], perm: Vec[PtePermBundle] = Vec(tlbcontiguous, 0.U.asTypeOf(new PtePermBundle())),
69            ecc: Bool = false.B, level: UInt = 0.U, valid: Vec[Bool] = Vec(tlbcontiguous, true.B), accessFault: Vec[Bool] = Vec(tlbcontiguous, true.B)): Unit = {
70    this.hit := hit && !ecc
71    this.pre := pre
72    this.ppn := ppn
73    this.perm := perm
74    this.ecc := ecc && hit
75    this.level := level
76    this.v := valid
77    this.af := accessFault
78  }
79}
80
81class PageCacheRespBundle(implicit p: Parameters) extends PtwBundle {
82  val l3 = if (EnableSv48) Some(new PageCachePerPespBundle) else None
83  val l2 = new PageCachePerPespBundle
84  val l1 = new PageCachePerPespBundle
85  val l0 = new PageCacheMergePespBundle
86  val sp = new PageCachePerPespBundle
87}
88
89class PtwCacheReq(implicit p: Parameters) extends PtwBundle {
90  val req_info = new L2TlbInnerBundle()
91  val isFirst = Bool()
92  val bypassed = if (EnableSv48) Vec(4, Bool()) else Vec(3, Bool())
93  val isHptwReq = Bool()
94  val hptwId = UInt(log2Up(l2tlbParams.llptwsize).W)
95}
96
97class PtwCacheIO()(implicit p: Parameters) extends MMUIOBaseBundle with HasPtwConst {
98  val req = Flipped(DecoupledIO(new PtwCacheReq()))
99  val resp = DecoupledIO(new Bundle {
100    val req_info = new L2TlbInnerBundle()
101    val isFirst = Bool()
102    val hit = Bool()
103    val prefetch = Bool() // is the entry fetched by prefetch
104    val bypassed = Bool()
105    val toFsm = new Bundle {
106      val l3Hit = if (EnableSv48) Some(Bool()) else None
107      val l2Hit = Bool()
108      val l1Hit = Bool()
109      val ppn = UInt(gvpnLen.W)
110      val stage1Hit = Bool() // find stage 1 pte in cache, but need to search stage 2 pte in cache at PTW
111    }
112    val stage1 = new PtwMergeResp()
113    val isHptwReq = Bool()
114    val toHptw = new Bundle {
115      val l3Hit = if (EnableSv48) Some(Bool()) else None
116      val l2Hit = Bool()
117      val l1Hit = Bool()
118      val ppn = UInt(ppnLen.W)
119      val id = UInt(log2Up(l2tlbParams.llptwsize).W)
120      val resp = new HptwResp() // used if hit
121      val bypassed = Bool()
122    }
123  })
124  val refill = Flipped(ValidIO(new Bundle {
125    val ptes = UInt(blockBits.W)
126    val levelOH = new Bundle {
127      // NOTE: levelOH has (Level+1) bits, each stands for page cache entries
128      val sp = Bool()
129      val l0 = Bool()
130      val l1 = Bool()
131      val l2 = Bool()
132      val l3 = if (EnableSv48) Some(Bool()) else None
133      def apply(levelUInt: UInt, valid: Bool) = {
134        sp := GatedValidRegNext((levelUInt === 1.U || levelUInt === 2.U || levelUInt === 3.U) && valid, false.B)
135        l0 := GatedValidRegNext((levelUInt === 0.U) & valid, false.B)
136        l1 := GatedValidRegNext((levelUInt === 1.U) & valid, false.B)
137        l2 := GatedValidRegNext((levelUInt === 2.U) & valid, false.B)
138        l3.map(_ := GatedValidRegNext((levelUInt === 3.U) & valid, false.B))
139      }
140    }
141    // duplicate level and sel_pte for each page caches, for better fanout
142    val req_info_dup = Vec(3, new L2TlbInnerBundle())
143    val level_dup = Vec(3, UInt(log2Up(Level + 1).W))
144    val sel_pte_dup = Vec(3, UInt(XLEN.W))
145  }))
146  val sfence_dup = Vec(4, Input(new SfenceBundle()))
147  val csr_dup = Vec(3, Input(new TlbCsrBundle()))
148}
149
150class PtwCache()(implicit p: Parameters) extends XSModule with HasPtwConst with HasPerfEvents {
151  val io = IO(new PtwCacheIO)
152  val ecc = Code.fromString(l2tlbParams.ecc)
153  val l1EntryType = new PTWEntriesWithEcc(ecc, num = PtwL1SectorSize, tagLen = PtwL1TagLen, level = 1, hasPerm = false)
154  val l0EntryType = new PTWEntriesWithEcc(ecc, num = PtwL0SectorSize, tagLen = PtwL0TagLen, level = 0, hasPerm = true)
155
156  // TODO: four caches make the codes dirty, think about how to deal with it
157
158  val sfence_dup = io.sfence_dup
159  val refill = io.refill.bits
160  val refill_prefetch_dup = io.refill.bits.req_info_dup.map(a => from_pre(a.source))
161  val refill_h = io.refill.bits.req_info_dup.map(a => Mux(a.s2xlate === allStage, onlyStage1, a.s2xlate))
162  val flush_dup = sfence_dup.zip(io.csr_dup).map(f => f._1.valid || f._2.satp.changed || f._2.vsatp.changed || f._2.hgatp.changed)
163  val flush = flush_dup(0)
164
165  // when refill, refuce to accept new req
166  val rwHarzad = if (sramSinglePort) io.refill.valid else false.B
167
168  // handle hand signal and req_info
169  // TODO: replace with FlushableQueue
170  val stageReq = Wire(Decoupled(new PtwCacheReq()))         // enq stage & read page cache valid
171  val stageDelay = Wire(Vec(2, Decoupled(new PtwCacheReq()))) // page cache resp
172  val stageCheck = Wire(Vec(2, Decoupled(new PtwCacheReq()))) // check hit & check ecc
173  val stageResp = Wire(Decoupled(new PtwCacheReq()))         // deq stage
174
175  val stageDelay_valid_1cycle = OneCycleValid(stageReq.fire, flush)      // catch ram data
176  val stageCheck_valid_1cycle = OneCycleValid(stageDelay(1).fire, flush) // replace & perf counter
177  val stageResp_valid_1cycle_dup = Wire(Vec(2, Bool()))
178  stageResp_valid_1cycle_dup.map(_ := OneCycleValid(stageCheck(1).fire, flush))  // ecc flush
179
180  stageReq <> io.req
181  PipelineConnect(stageReq, stageDelay(0), stageDelay(1).ready, flush, rwHarzad)
182  InsideStageConnect(stageDelay(0), stageDelay(1), stageDelay_valid_1cycle)
183  PipelineConnect(stageDelay(1), stageCheck(0), stageCheck(1).ready, flush)
184  InsideStageConnect(stageCheck(0), stageCheck(1), stageCheck_valid_1cycle)
185  PipelineConnect(stageCheck(1), stageResp, io.resp.ready, flush)
186  stageResp.ready := !stageResp.valid || io.resp.ready
187
188  // l3: level 3 non-leaf pte
189  val l3 = if (EnableSv48) Some(Reg(Vec(l2tlbParams.l3Size, new PtwEntry(tagLen = PtwL3TagLen)))) else None
190  val l3v = if (EnableSv48) Some(RegInit(0.U(l2tlbParams.l3Size.W))) else None
191  val l3g = if (EnableSv48) Some(Reg(UInt(l2tlbParams.l3Size.W))) else None
192  val l3asids = if (EnableSv48) Some(l3.get.map(_.asid)) else None
193  val l3vmids = if (EnableSv48) Some(l3.get.map(_.vmid)) else None
194  val l3h = if (EnableSv48) Some(Reg(Vec(l2tlbParams.l3Size, UInt(2.W)))) else None
195
196  // l2: level 2 non-leaf pte
197  val l2 = Reg(Vec(l2tlbParams.l2Size, new PtwEntry(tagLen = PtwL2TagLen)))
198  val l2v = RegInit(0.U(l2tlbParams.l2Size.W))
199  val l2g = Reg(UInt(l2tlbParams.l2Size.W))
200  val l2asids = l2.map(_.asid)
201  val l2vmids = l2.map(_.vmid)
202  val l2h = Reg(Vec(l2tlbParams.l2Size, UInt(2.W)))
203
204  // l1: level 1 non-leaf pte
205  val l1 = Module(new SRAMTemplate(
206    l1EntryType,
207    set = l2tlbParams.l1nSets,
208    way = l2tlbParams.l1nWays,
209    singlePort = sramSinglePort
210  ))
211  val l1v = RegInit(0.U((l2tlbParams.l1nSets * l2tlbParams.l1nWays).W))
212  val l1g = Reg(UInt((l2tlbParams.l1nSets * l2tlbParams.l1nWays).W))
213  val l1h = Reg(Vec(l2tlbParams.l1nSets, Vec(l2tlbParams.l1nWays, UInt(2.W))))
214  def getl1vSet(vpn: UInt) = {
215    require(log2Up(l2tlbParams.l1nWays) == log2Down(l2tlbParams.l1nWays))
216    val set = genPtwL1SetIdx(vpn)
217    require(set.getWidth == log2Up(l2tlbParams.l1nSets))
218    val l1vVec = l1v.asTypeOf(Vec(l2tlbParams.l1nSets, UInt(l2tlbParams.l1nWays.W)))
219    l1vVec(set)
220  }
221  def getl1hSet(vpn: UInt) = {
222    require(log2Up(l2tlbParams.l1nWays) == log2Down(l2tlbParams.l1nWays))
223    val set = genPtwL1SetIdx(vpn)
224    require(set.getWidth == log2Up(l2tlbParams.l1nSets))
225    l1h(set)
226  }
227
228  // l0: level 0 leaf pte of 4KB pages
229  val l0 = Module(new SRAMTemplate(
230    l0EntryType,
231    set = l2tlbParams.l0nSets,
232    way = l2tlbParams.l0nWays,
233    singlePort = sramSinglePort
234  ))
235  val l0v = RegInit(0.U((l2tlbParams.l0nSets * l2tlbParams.l0nWays).W))
236  val l0g = Reg(UInt((l2tlbParams.l0nSets * l2tlbParams.l0nWays).W))
237  val l0h = Reg(Vec(l2tlbParams.l0nSets, Vec(l2tlbParams.l0nWays, UInt(2.W))))
238  def getl0vSet(vpn: UInt) = {
239    require(log2Up(l2tlbParams.l0nWays) == log2Down(l2tlbParams.l0nWays))
240    val set = genPtwL0SetIdx(vpn)
241    require(set.getWidth == log2Up(l2tlbParams.l0nSets))
242    val l0vVec = l0v.asTypeOf(Vec(l2tlbParams.l0nSets, UInt(l2tlbParams.l0nWays.W)))
243    l0vVec(set)
244  }
245  def getl0hSet(vpn: UInt) = {
246    require(log2Up(l2tlbParams.l0nWays) == log2Down(l2tlbParams.l0nWays))
247    val set = genPtwL0SetIdx(vpn)
248    require(set.getWidth == log2Up(l2tlbParams.l0nSets))
249    l0h(set)
250  }
251
252  // sp: level 1/2/3 leaf pte of 512GB/1GB/2MB super pages
253  val sp = Reg(Vec(l2tlbParams.spSize, new PtwEntry(tagLen = SPTagLen, hasPerm = true, hasLevel = true)))
254  val spv = RegInit(0.U(l2tlbParams.spSize.W))
255  val spg = Reg(UInt(l2tlbParams.spSize.W))
256  val spasids = sp.map(_.asid)
257  val spvmids = sp.map(_.vmid)
258  val sph = Reg(Vec(l2tlbParams.spSize, UInt(2.W)))
259
260  // Access Perf
261  val l3AccessPerf = if(EnableSv48) Some(Wire(Vec(l2tlbParams.l3Size, Bool()))) else None
262  val l2AccessPerf = Wire(Vec(l2tlbParams.l2Size, Bool()))
263  val l1AccessPerf = Wire(Vec(l2tlbParams.l1nWays, Bool()))
264  val l0AccessPerf = Wire(Vec(l2tlbParams.l0nWays, Bool()))
265  val spAccessPerf = Wire(Vec(l2tlbParams.spSize, Bool()))
266  if (EnableSv48) l3AccessPerf.map(_.map(_ := false.B))
267  l2AccessPerf.map(_ := false.B)
268  l1AccessPerf.map(_ := false.B)
269  l0AccessPerf.map(_ := false.B)
270  spAccessPerf.map(_ := false.B)
271
272
273
274  def vpn_match(vpn1: UInt, vpn2: UInt, level: Int) = {
275    (vpn1(vpnLen-1, vpnnLen*level+3) === vpn2(vpnLen-1, vpnnLen*level+3))
276  }
277  // NOTE: not actually bypassed, just check if hit, re-access the page cache
278  def refill_bypass(vpn: UInt, level: Int, h_search: UInt) = {
279    val change_h = MuxLookup(h_search, noS2xlate)(Seq(
280      allStage -> onlyStage1,
281      onlyStage1 -> onlyStage1,
282      onlyStage2 -> onlyStage2
283    ))
284    val change_refill_h = MuxLookup(io.refill.bits.req_info_dup(0).s2xlate, noS2xlate)(Seq(
285      allStage -> onlyStage1,
286      onlyStage1 -> onlyStage1,
287      onlyStage2 -> onlyStage2
288    ))
289    val refill_vpn = io.refill.bits.req_info_dup(0).vpn
290    io.refill.valid && (level.U === io.refill.bits.level_dup(0)) && vpn_match(refill_vpn, vpn, level) && change_h === change_refill_h
291  }
292
293  val vpn_search = stageReq.bits.req_info.vpn
294  val h_search = MuxLookup(stageReq.bits.req_info.s2xlate, noS2xlate)(Seq(
295    allStage -> onlyStage1,
296    onlyStage1 -> onlyStage1,
297    onlyStage2 -> onlyStage2
298  ))
299
300  // l3
301  val l3Hit = if(EnableSv48) Some(Wire(Bool())) else None
302  val l3HitPPN = if(EnableSv48) Some(Wire(UInt(ppnLen.W))) else None
303  val l3Pre = if(EnableSv48) Some(Wire(Bool())) else None
304  val ptwl3replace = if(EnableSv48) Some(ReplacementPolicy.fromString(l2tlbParams.l3Replacer, l2tlbParams.l3Size)) else None
305  if (EnableSv48) {
306    val hitVecT = l3.get.zipWithIndex.map {
307        case (e, i) => (e.hit(vpn_search, io.csr_dup(2).satp.asid, io.csr_dup(2).vsatp.asid, io.csr_dup(2).hgatp.asid, s2xlate = h_search =/= noS2xlate)
308          && l3v.get(i) && h_search === l3h.get(i))
309    }
310    val hitVec = hitVecT.map(RegEnable(_, stageReq.fire))
311
312    // stageDelay, but check for l3
313    val hitPPN = DataHoldBypass(ParallelPriorityMux(hitVec zip l3.get.map(_.ppn)), stageDelay_valid_1cycle)
314    val hitPre = DataHoldBypass(ParallelPriorityMux(hitVec zip l3.get.map(_.prefetch)), stageDelay_valid_1cycle)
315    val hit = DataHoldBypass(ParallelOR(hitVec), stageDelay_valid_1cycle)
316
317    when (hit && stageDelay_valid_1cycle) { ptwl3replace.get.access(OHToUInt(hitVec)) }
318
319    l3AccessPerf.get.zip(hitVec).map{ case (l, h) => l := h && stageDelay_valid_1cycle}
320    for (i <- 0 until l2tlbParams.l3Size) {
321      XSDebug(stageReq.fire, p"[l3] l3(${i.U}) ${l3.get(i)} hit:${l3.get(i).hit(vpn_search, io.csr_dup(2).satp.asid, io.csr_dup(2).vsatp.asid, io.csr_dup(2).hgatp.asid, s2xlate = h_search =/= noS2xlate)}\n")
322    }
323    XSDebug(stageReq.fire, p"[l3] l3v:${Binary(l3v.get)} hitVecT:${Binary(VecInit(hitVecT).asUInt)}\n")
324    XSDebug(stageDelay(0).valid, p"[l3] l3Hit:${hit} l3HitPPN:0x${Hexadecimal(hitPPN)} hitVec:${VecInit(hitVec).asUInt}\n")
325
326    VecInit(hitVecT).suggestName(s"l3_hitVecT")
327    VecInit(hitVec).suggestName(s"l3_hitVec")
328
329    // synchronize with other entries with RegEnable
330    l3Hit.map(_ := RegEnable(hit, stageDelay(1).fire))
331    l3HitPPN.map(_ := RegEnable(hitPPN, stageDelay(1).fire))
332    l3Pre.map(_ := RegEnable(hitPre, stageDelay(1).fire))
333  }
334
335  // l2
336  val ptwl2replace = ReplacementPolicy.fromString(l2tlbParams.l2Replacer, l2tlbParams.l2Size)
337  val (l2Hit, l2HitPPN, l2Pre) = {
338    val hitVecT = l2.zipWithIndex.map {
339      case (e, i) => (e.hit(vpn_search, io.csr_dup(2).satp.asid, io.csr_dup(2).vsatp.asid, io.csr_dup(2).hgatp.asid, s2xlate = h_search =/= noS2xlate)
340        && l2v(i) && h_search === l2h(i))
341    }
342    val hitVec = hitVecT.map(RegEnable(_, stageReq.fire))
343
344    // stageDelay, but check for l2
345    val hitPPN = DataHoldBypass(ParallelPriorityMux(hitVec zip l2.map(_.ppn)), stageDelay_valid_1cycle)
346    val hitPre = DataHoldBypass(ParallelPriorityMux(hitVec zip l2.map(_.prefetch)), stageDelay_valid_1cycle)
347    val hit = DataHoldBypass(ParallelOR(hitVec), stageDelay_valid_1cycle)
348
349    when (hit && stageDelay_valid_1cycle) { ptwl2replace.access(OHToUInt(hitVec)) }
350
351    l2AccessPerf.zip(hitVec).map{ case (l, h) => l := h && stageDelay_valid_1cycle}
352    for (i <- 0 until l2tlbParams.l2Size) {
353      XSDebug(stageReq.fire, p"[l2] l2(${i.U}) ${l2(i)} hit:${l2(i).hit(vpn_search, io.csr_dup(2).satp.asid, io.csr_dup(2).vsatp.asid, io.csr_dup(2).hgatp.asid, s2xlate = h_search =/= noS2xlate)}\n")
354    }
355    XSDebug(stageReq.fire, p"[l2] l2v:${Binary(l2v)} hitVecT:${Binary(VecInit(hitVecT).asUInt)}\n")
356    XSDebug(stageDelay(0).valid, p"[l2] l2Hit:${hit} l2HitPPN:0x${Hexadecimal(hitPPN)} hitVec:${VecInit(hitVec).asUInt}\n")
357
358    VecInit(hitVecT).suggestName(s"l2_hitVecT")
359    VecInit(hitVec).suggestName(s"l2_hitVec")
360
361    // synchronize with other entries with RegEnable
362    (RegEnable(hit, stageDelay(1).fire),
363     RegEnable(hitPPN, stageDelay(1).fire),
364     RegEnable(hitPre, stageDelay(1).fire))
365  }
366
367  // l1
368  val ptwl1replace = ReplacementPolicy.fromString(l2tlbParams.l1Replacer,l2tlbParams.l1nWays,l2tlbParams.l1nSets)
369  val (l1Hit, l1HitPPN, l1Pre, l1eccError) = {
370    val ridx = genPtwL1SetIdx(vpn_search)
371    l1.io.r.req.valid := stageReq.fire
372    l1.io.r.req.bits.apply(setIdx = ridx)
373    val vVec_req = getl1vSet(vpn_search)
374    val hVec_req = getl1hSet(vpn_search)
375
376    // delay one cycle after sram read
377    val delay_vpn = stageDelay(0).bits.req_info.vpn
378    val delay_h = MuxLookup(stageDelay(0).bits.req_info.s2xlate, noS2xlate)(Seq(
379      allStage -> onlyStage1,
380      onlyStage1 -> onlyStage1,
381      onlyStage2 -> onlyStage2
382    ))
383    val data_resp = DataHoldBypass(l1.io.r.resp.data, stageDelay_valid_1cycle)
384    val vVec_delay = RegEnable(vVec_req, stageReq.fire)
385    val hVec_delay = RegEnable(hVec_req, stageReq.fire)
386    val hitVec_delay = VecInit(data_resp.zip(vVec_delay.asBools).zip(hVec_delay).map { case ((wayData, v), h) =>
387      wayData.entries.hit(delay_vpn, io.csr_dup(1).satp.asid, io.csr_dup(1).vsatp.asid, io.csr_dup(1).hgatp.asid, s2xlate = delay_h =/= noS2xlate) && v && (delay_h === h)})
388
389    // check hit and ecc
390    val check_vpn = stageCheck(0).bits.req_info.vpn
391    val ramDatas = RegEnable(data_resp, stageDelay(1).fire)
392    val vVec = RegEnable(vVec_delay, stageDelay(1).fire).asBools
393
394    val hitVec = RegEnable(hitVec_delay, stageDelay(1).fire)
395    val hitWayEntry = ParallelPriorityMux(hitVec zip ramDatas)
396    val hitWayData = hitWayEntry.entries
397    val hit = ParallelOR(hitVec)
398    val hitWay = ParallelPriorityMux(hitVec zip (0 until l2tlbParams.l1nWays).map(_.U(log2Up(l2tlbParams.l1nWays).W)))
399    val eccError = WireInit(false.B)
400    if (l2tlbParams.enablePTWECC) {
401      eccError := hitWayEntry.decode()
402    } else {
403      eccError := false.B
404    }
405
406    ridx.suggestName(s"l1_ridx")
407    ramDatas.suggestName(s"l1_ramDatas")
408    hitVec.suggestName(s"l1_hitVec")
409    hitWayData.suggestName(s"l1_hitWayData")
410    hitWay.suggestName(s"l1_hitWay")
411
412    when (hit && stageCheck_valid_1cycle) { ptwl1replace.access(genPtwL1SetIdx(check_vpn), hitWay) }
413
414    l1AccessPerf.zip(hitVec).map{ case (l, h) => l := h && stageCheck_valid_1cycle }
415    XSDebug(stageDelay_valid_1cycle, p"[l1] ridx:0x${Hexadecimal(ridx)}\n")
416    for (i <- 0 until l2tlbParams.l1nWays) {
417      XSDebug(stageCheck_valid_1cycle, p"[l1] ramDatas(${i.U}) ${ramDatas(i)}  l1v:${vVec(i)}  hit:${hit}\n")
418    }
419    XSDebug(stageCheck_valid_1cycle, p"[l1] l1Hit:${hit} l1HitPPN:0x${Hexadecimal(hitWayData.ppns(genPtwL1SectorIdx(check_vpn)))} hitVec:${Binary(hitVec.asUInt)} hitWay:${hitWay} vidx:${vVec}\n")
420
421    (hit, hitWayData.ppns(genPtwL1SectorIdx(check_vpn)), hitWayData.prefetch, eccError)
422  }
423
424  // l0
425  val ptwl0replace = ReplacementPolicy.fromString(l2tlbParams.l0Replacer,l2tlbParams.l0nWays,l2tlbParams.l0nSets)
426  val (l0Hit, l0HitData, l0Pre, l0eccError) = {
427    val ridx = genPtwL0SetIdx(vpn_search)
428    l0.io.r.req.valid := stageReq.fire
429    l0.io.r.req.bits.apply(setIdx = ridx)
430    val vVec_req = getl0vSet(vpn_search)
431    val hVec_req = getl0hSet(vpn_search)
432
433    // delay one cycle after sram read
434    val delay_vpn = stageDelay(0).bits.req_info.vpn
435    val delay_h = MuxLookup(stageDelay(0).bits.req_info.s2xlate, noS2xlate)(Seq(
436      allStage -> onlyStage1,
437      onlyStage1 -> onlyStage1,
438      onlyStage2 -> onlyStage2
439    ))
440    val data_resp = DataHoldBypass(l0.io.r.resp.data, stageDelay_valid_1cycle)
441    val vVec_delay = RegEnable(vVec_req, stageReq.fire)
442    val hVec_delay = RegEnable(hVec_req, stageReq.fire)
443    val hitVec_delay = VecInit(data_resp.zip(vVec_delay.asBools).zip(hVec_delay).map { case ((wayData, v), h) =>
444      wayData.entries.hit(delay_vpn, io.csr_dup(0).satp.asid, io.csr_dup(0).vsatp.asid, io.csr_dup(0).hgatp.asid, s2xlate = delay_h =/= noS2xlate) && v && (delay_h === h)})
445
446    // check hit and ecc
447    val check_vpn = stageCheck(0).bits.req_info.vpn
448    val ramDatas = RegEnable(data_resp, stageDelay(1).fire)
449    val vVec = RegEnable(vVec_delay, stageDelay(1).fire).asBools
450
451    val hitVec = RegEnable(hitVec_delay, stageDelay(1).fire)
452    val hitWayEntry = ParallelPriorityMux(hitVec zip ramDatas)
453    val hitWayData = hitWayEntry.entries
454    val hitWayEcc = hitWayEntry.ecc
455    val hit = ParallelOR(hitVec)
456    val hitWay = ParallelPriorityMux(hitVec zip (0 until l2tlbParams.l0nWays).map(_.U(log2Up(l2tlbParams.l0nWays).W)))
457    val eccError = WireInit(false.B)
458    if (l2tlbParams.enablePTWECC) {
459      eccError := hitWayEntry.decode()
460    } else {
461      eccError := false.B
462    }
463
464    when (hit && stageCheck_valid_1cycle) { ptwl0replace.access(genPtwL0SetIdx(check_vpn), hitWay) }
465
466    l0AccessPerf.zip(hitVec).map{ case (l, h) => l := h && stageCheck_valid_1cycle }
467    XSDebug(stageReq.fire, p"[l0] ridx:0x${Hexadecimal(ridx)}\n")
468    for (i <- 0 until l2tlbParams.l0nWays) {
469      XSDebug(stageCheck_valid_1cycle, p"[l0] ramDatas(${i.U}) ${ramDatas(i)}  l0v:${vVec(i)}  hit:${hitVec(i)}\n")
470    }
471    XSDebug(stageCheck_valid_1cycle, p"[l0] l0Hit:${hit} l0HitData:${hitWayData} hitVec:${Binary(hitVec.asUInt)} hitWay:${hitWay} v:${vVec}\n")
472
473    ridx.suggestName(s"l0_ridx")
474    ramDatas.suggestName(s"l0_ramDatas")
475    hitVec.suggestName(s"l0_hitVec")
476    hitWay.suggestName(s"l0_hitWay")
477
478    (hit, hitWayData, hitWayData.prefetch, eccError)
479  }
480  val l0HitPPN = l0HitData.ppns
481  val l0HitPerm = l0HitData.perms.getOrElse(0.U.asTypeOf(Vec(PtwL0SectorSize, new PtePermBundle)))
482  val l0HitValid = l0HitData.vs
483  val l0HitAf = l0HitData.af
484
485  // super page
486  val spreplace = ReplacementPolicy.fromString(l2tlbParams.spReplacer, l2tlbParams.spSize)
487  val (spHit, spHitData, spPre, spValid) = {
488    val hitVecT = sp.zipWithIndex.map { case (e, i) => e.hit(vpn_search, io.csr_dup(0).satp.asid, io.csr_dup(0).vsatp.asid, io.csr_dup(0).hgatp.asid, s2xlate = h_search =/= noS2xlate) && spv(i) && (sph(i) === h_search) }
489    val hitVec = hitVecT.map(RegEnable(_, stageReq.fire))
490    val hitData = ParallelPriorityMux(hitVec zip sp)
491    val hit = ParallelOR(hitVec)
492
493    when (hit && stageDelay_valid_1cycle) { spreplace.access(OHToUInt(hitVec)) }
494
495    spAccessPerf.zip(hitVec).map{ case (s, h) => s := h && stageDelay_valid_1cycle }
496    for (i <- 0 until l2tlbParams.spSize) {
497      XSDebug(stageReq.fire, p"[sp] sp(${i.U}) ${sp(i)} hit:${sp(i).hit(vpn_search, io.csr_dup(0).satp.asid, io.csr_dup(0).vsatp.asid, io.csr_dup(0).hgatp.asid, s2xlate = h_search =/= noS2xlate)} spv:${spv(i)}\n")
498    }
499    XSDebug(stageDelay_valid_1cycle, p"[sp] spHit:${hit} spHitData:${hitData} hitVec:${Binary(VecInit(hitVec).asUInt)}\n")
500
501    VecInit(hitVecT).suggestName(s"sp_hitVecT")
502    VecInit(hitVec).suggestName(s"sp_hitVec")
503
504    (RegEnable(hit, stageDelay(1).fire),
505     RegEnable(hitData, stageDelay(1).fire),
506     RegEnable(hitData.prefetch, stageDelay(1).fire),
507     RegEnable(hitData.v, stageDelay(1).fire))
508  }
509  val spHitPerm = spHitData.perm.getOrElse(0.U.asTypeOf(new PtePermBundle))
510  val spHitLevel = spHitData.level.getOrElse(0.U)
511
512  val check_res = Wire(new PageCacheRespBundle)
513  check_res.l3.map(_.apply(l3Hit.get, l3Pre.get, l3HitPPN.get))
514  check_res.l2.apply(l2Hit, l2Pre, l2HitPPN)
515  check_res.l1.apply(l1Hit, l1Pre, l1HitPPN, ecc = l1eccError)
516  check_res.l0.apply(l0Hit, l0Pre, l0HitPPN, l0HitPerm, l0eccError, valid = l0HitValid, accessFault = l0HitAf)
517  check_res.sp.apply(spHit, spPre, spHitData.ppn, spHitPerm, false.B, spHitLevel, spValid)
518
519  val resp_res = Reg(new PageCacheRespBundle)
520  when (stageCheck(1).fire) { resp_res := check_res }
521
522  // stageResp bypass
523  val bypassed = if (EnableSv48) Wire(Vec(4, Bool())) else Wire(Vec(3, Bool()))
524  bypassed.indices.foreach(i =>
525    bypassed(i) := stageResp.bits.bypassed(i) ||
526      ValidHoldBypass(refill_bypass(stageResp.bits.req_info.vpn, i, stageResp.bits.req_info.s2xlate),
527        OneCycleValid(stageCheck(1).fire, false.B) || io.refill.valid)
528  )
529
530  // stageResp bypass to hptw
531  val hptw_bypassed = if (EnableSv48) Wire(Vec(4, Bool())) else Wire(Vec(3, Bool()))
532  hptw_bypassed.indices.foreach(i =>
533    hptw_bypassed(i) := stageResp.bits.bypassed(i) ||
534      ValidHoldBypass(refill_bypass(stageResp.bits.req_info.vpn, i, stageResp.bits.req_info.s2xlate),
535        io.resp.fire)
536  )
537
538  val isAllStage = stageResp.bits.req_info.s2xlate === allStage
539  val isOnlyStage2 = stageResp.bits.req_info.s2xlate === onlyStage2
540  val stage1Hit = (resp_res.l0.hit || resp_res.sp.hit) && isAllStage
541  val idx = stageResp.bits.req_info.vpn(2, 0)
542  val stage1Pf = !Mux(resp_res.l0.hit, resp_res.l0.v(idx), resp_res.sp.v)
543  io.resp.bits.req_info   := stageResp.bits.req_info
544  io.resp.bits.isFirst  := stageResp.bits.isFirst
545  io.resp.bits.hit      := (resp_res.l0.hit || resp_res.sp.hit) && (!isAllStage || isAllStage && stage1Pf)
546  if (EnableSv48) {
547    io.resp.bits.bypassed := (bypassed(0) || (bypassed(1) && !resp_res.l1.hit) || (bypassed(2) && !resp_res.l2.hit) || (bypassed(3) && !resp_res.l3.get.hit)) && !isAllStage
548  } else {
549    io.resp.bits.bypassed := (bypassed(0) || (bypassed(1) && !resp_res.l1.hit) || (bypassed(2) && !resp_res.l2.hit)) && !isAllStage
550  }
551  io.resp.bits.prefetch := resp_res.l0.pre && resp_res.l0.hit || resp_res.sp.pre && resp_res.sp.hit
552  io.resp.bits.toFsm.l3Hit.map(_ := resp_res.l3.get.hit && !stage1Hit && !isOnlyStage2 && !stageResp.bits.isHptwReq)
553  io.resp.bits.toFsm.l2Hit := resp_res.l2.hit && !stage1Hit && !isOnlyStage2 && !stageResp.bits.isHptwReq
554  io.resp.bits.toFsm.l1Hit := resp_res.l1.hit && !stage1Hit && !isOnlyStage2 && !stageResp.bits.isHptwReq
555  io.resp.bits.toFsm.ppn   := Mux(resp_res.l1.hit, resp_res.l1.ppn, Mux(resp_res.l2.hit, resp_res.l2.ppn, resp_res.l3.getOrElse(0.U.asTypeOf(new PageCachePerPespBundle)).ppn))
556  io.resp.bits.toFsm.stage1Hit := stage1Hit
557
558  io.resp.bits.isHptwReq := stageResp.bits.isHptwReq
559  if (EnableSv48) {
560    io.resp.bits.toHptw.bypassed := (hptw_bypassed(0) || (hptw_bypassed(1) && !resp_res.l1.hit) || (hptw_bypassed(2) && !resp_res.l2.hit) || (hptw_bypassed(3) && !resp_res.l3.get.hit)) && stageResp.bits.isHptwReq
561  } else {
562    io.resp.bits.toHptw.bypassed := (hptw_bypassed(0) || (hptw_bypassed(1) && !resp_res.l1.hit) || (hptw_bypassed(2) && !resp_res.l2.hit)) && stageResp.bits.isHptwReq
563  }
564  io.resp.bits.toHptw.id := stageResp.bits.hptwId
565  io.resp.bits.toHptw.l3Hit.map(_ := resp_res.l3.get.hit && stageResp.bits.isHptwReq)
566  io.resp.bits.toHptw.l2Hit := resp_res.l2.hit && stageResp.bits.isHptwReq
567  io.resp.bits.toHptw.l1Hit := resp_res.l1.hit && stageResp.bits.isHptwReq
568  io.resp.bits.toHptw.ppn := Mux(resp_res.l1.hit, resp_res.l1.ppn, Mux(resp_res.l2.hit, resp_res.l2.ppn, resp_res.l3.getOrElse(0.U.asTypeOf(new PageCachePerPespBundle)).ppn))(ppnLen - 1, 0)
569  io.resp.bits.toHptw.resp.entry.tag := stageResp.bits.req_info.vpn
570  io.resp.bits.toHptw.resp.entry.asid := DontCare
571  io.resp.bits.toHptw.resp.entry.vmid.map(_ := io.csr_dup(0).hgatp.asid)
572  io.resp.bits.toHptw.resp.entry.level.map(_ := Mux(resp_res.l0.hit, 0.U, resp_res.sp.level))
573  io.resp.bits.toHptw.resp.entry.prefetch := from_pre(stageResp.bits.req_info.source)
574  io.resp.bits.toHptw.resp.entry.ppn := Mux(resp_res.l0.hit, resp_res.l0.ppn(idx), resp_res.sp.ppn)(ppnLen - 1, 0)
575  io.resp.bits.toHptw.resp.entry.perm.map(_ := Mux(resp_res.l0.hit, resp_res.l0.perm(idx), resp_res.sp.perm))
576  io.resp.bits.toHptw.resp.entry.v := Mux(resp_res.l0.hit, resp_res.l0.v(idx), resp_res.sp.v)
577  io.resp.bits.toHptw.resp.gpf := !io.resp.bits.toHptw.resp.entry.v
578  io.resp.bits.toHptw.resp.gaf := Mux(resp_res.l0.hit, resp_res.l0.af(idx), false.B)
579
580  io.resp.bits.stage1.entry.map(_.tag := stageResp.bits.req_info.vpn(vpnLen - 1, 3))
581  io.resp.bits.stage1.entry.map(_.asid := Mux(stageResp.bits.req_info.hasS2xlate(), io.csr_dup(0).vsatp.asid, io.csr_dup(0).satp.asid)) // DontCare
582  io.resp.bits.stage1.entry.map(_.vmid.map(_ := io.csr_dup(0).hgatp.asid))
583  if (EnableSv48) {
584    io.resp.bits.stage1.entry.map(_.level.map(_ := Mux(resp_res.l0.hit, 0.U,
585      Mux(resp_res.sp.hit, resp_res.sp.level,
586        Mux(resp_res.l1.hit, 1.U,
587          Mux(resp_res.l2.hit, 2.U, 3.U))))))
588  } else {
589    io.resp.bits.stage1.entry.map(_.level.map(_ := Mux(resp_res.l0.hit, 0.U,
590      Mux(resp_res.sp.hit, resp_res.sp.level,
591        Mux(resp_res.l1.hit, 1.U, 2.U)))))
592  }
593  io.resp.bits.stage1.entry.map(_.prefetch := from_pre(stageResp.bits.req_info.source))
594  for (i <- 0 until tlbcontiguous) {
595    if (EnableSv48) {
596      io.resp.bits.stage1.entry(i).ppn := Mux(resp_res.l0.hit, resp_res.l0.ppn(i)(gvpnLen - 1, sectortlbwidth),
597        Mux(resp_res.sp.hit, resp_res.sp.ppn(gvpnLen - 1, sectortlbwidth),
598          Mux(resp_res.l1.hit, resp_res.l1.ppn(gvpnLen - 1, sectortlbwidth),
599            Mux(resp_res.l2.hit, resp_res.l2.ppn(gvpnLen - 1, sectortlbwidth),
600              resp_res.l3.get.ppn(gvpnLen - 1, sectortlbwidth)))))
601      io.resp.bits.stage1.entry(i).ppn_low := Mux(resp_res.l0.hit, resp_res.l0.ppn(i)(sectortlbwidth - 1, 0),
602        Mux(resp_res.sp.hit, resp_res.sp.ppn(sectortlbwidth - 1, 0),
603          Mux(resp_res.l1.hit, resp_res.l1.ppn(sectortlbwidth - 1, 0),
604            Mux(resp_res.l2.hit, resp_res.l2.ppn(sectortlbwidth - 1, 0),
605              resp_res.l3.get.ppn(sectortlbwidth - 1, 0)))))
606      io.resp.bits.stage1.entry(i).v := Mux(resp_res.l0.hit, resp_res.l0.v(i),
607        Mux(resp_res.sp.hit, resp_res.sp.v,
608          Mux(resp_res.l1.hit, resp_res.l1.v,
609            Mux(resp_res.l2.hit, resp_res.l2.v,
610              resp_res.l3.get.v))))
611    } else {
612      io.resp.bits.stage1.entry(i).ppn := Mux(resp_res.l0.hit, resp_res.l0.ppn(i)(gvpnLen - 1, sectortlbwidth),
613        Mux(resp_res.sp.hit, resp_res.sp.ppn(gvpnLen - 1, sectortlbwidth),
614          Mux(resp_res.l1.hit, resp_res.l1.ppn(gvpnLen - 1, sectortlbwidth),
615            resp_res.l2.ppn(gvpnLen - 1, sectortlbwidth))))
616      io.resp.bits.stage1.entry(i).ppn_low := Mux(resp_res.l0.hit, resp_res.l0.ppn(i)(sectortlbwidth - 1, 0),
617        Mux(resp_res.sp.hit, resp_res.sp.ppn(sectortlbwidth - 1, 0),
618          Mux(resp_res.l1.hit, resp_res.l1.ppn(sectortlbwidth - 1, 0),
619            resp_res.l2.ppn(sectortlbwidth - 1, 0))))
620      io.resp.bits.stage1.entry(i).v := Mux(resp_res.l0.hit, resp_res.l0.v(i),
621        Mux(resp_res.sp.hit, resp_res.sp.v,
622          Mux(resp_res.l1.hit, resp_res.l1.v,
623            resp_res.l2.v)))
624    }
625    io.resp.bits.stage1.entry(i).perm.map(_ := Mux(resp_res.l0.hit, resp_res.l0.perm(i), resp_res.sp.perm))
626    io.resp.bits.stage1.entry(i).pf := !io.resp.bits.stage1.entry(i).v
627    io.resp.bits.stage1.entry(i).af := Mux(resp_res.l0.hit, resp_res.l0.af(i), false.B)
628  }
629  io.resp.bits.stage1.pteidx := UIntToOH(idx).asBools
630  io.resp.bits.stage1.not_super := Mux(resp_res.l0.hit, true.B, false.B)
631  io.resp.valid := stageResp.valid
632  XSError(stageResp.valid && resp_res.l0.hit && resp_res.sp.hit, "normal page and super page both hit")
633  XSError(stageResp.valid && io.resp.bits.hit && bypassed(0), "page cache, bypassed but hit")
634
635  // refill Perf
636  val l3RefillPerf = if (EnableSv48) Some(Wire(Vec(l2tlbParams.l3Size, Bool()))) else None
637  val l2RefillPerf = Wire(Vec(l2tlbParams.l2Size, Bool()))
638  val l1RefillPerf = Wire(Vec(l2tlbParams.l1nWays, Bool()))
639  val l0RefillPerf = Wire(Vec(l2tlbParams.l0nWays, Bool()))
640  val spRefillPerf = Wire(Vec(l2tlbParams.spSize, Bool()))
641  l3RefillPerf.map(_.map(_ := false.B))
642  l2RefillPerf.map(_ := false.B)
643  l1RefillPerf.map(_ := false.B)
644  l0RefillPerf.map(_ := false.B)
645  spRefillPerf.map(_ := false.B)
646
647  // refill
648  l1.io.w.req <> DontCare
649  l0.io.w.req <> DontCare
650  l1.io.w.req.valid := false.B
651  l0.io.w.req.valid := false.B
652
653  val memRdata = refill.ptes
654  val memPtes = (0 until (l2tlbParams.blockBytes/(XLEN/8))).map(i => memRdata((i+1)*XLEN-1, i*XLEN).asTypeOf(new PteBundle))
655  val memSelData = io.refill.bits.sel_pte_dup
656  val memPte = memSelData.map(a => a.asTypeOf(new PteBundle))
657
658  // TODO: handle sfenceLatch outsize
659  if (EnableSv48) {
660    when (!flush_dup(2) && refill.levelOH.l3.get && !memPte(2).isLeaf() && !memPte(2).isPf(refill.level_dup(2)) && Mux(refill.req_info_dup(2).s2xlate === allStage, true.B, !memPte(2).isAf())) {
661      val refillIdx = replaceWrapper(l3v.get, ptwl3replace.get.way)
662      refillIdx.suggestName(s"Ptwl3RefillIdx")
663      val rfOH = UIntToOH(refillIdx)
664      l3.get(refillIdx).refill(
665        refill.req_info_dup(2).vpn,
666        Mux(refill.req_info_dup(2).s2xlate =/= noS2xlate, io.csr_dup(2).vsatp.asid, io.csr_dup(2).satp.asid),
667        io.csr_dup(2).hgatp.asid,
668        memSelData(2),
669        3.U,
670        refill_prefetch_dup(2)
671      )
672      ptwl2replace.access(refillIdx)
673      l3v.get := l3v.get | rfOH
674      l3g.get := (l3g.get & ~rfOH) | Mux(memPte(2).perm.g, rfOH, 0.U)
675      l3h.get(refillIdx) := refill_h(2)
676
677      for (i <- 0 until l2tlbParams.l3Size) {
678        l3RefillPerf.get(i) := i.U === refillIdx
679      }
680
681      XSDebug(p"[l3 refill] refillIdx:${refillIdx} refillEntry:${l3.get(refillIdx).genPtwEntry(refill.req_info_dup(2).vpn, Mux(refill.req_info_dup(2).s2xlate =/= noS2xlate, io.csr_dup(2).vsatp.asid, io.csr_dup(2).satp.asid), memSelData(2), 0.U, prefetch = refill_prefetch_dup(2))}\n")
682      XSDebug(p"[l3 refill] l3v:${Binary(l3v.get)}->${Binary(l3v.get | rfOH)} l3g:${Binary(l3g.get)}->${Binary((l3g.get & ~rfOH) | Mux(memPte(2).perm.g, rfOH, 0.U))}\n")
683
684      refillIdx.suggestName(s"l3_refillIdx")
685      rfOH.suggestName(s"l3_rfOH")
686    }
687  }
688
689  when (!flush_dup(2) && refill.levelOH.l2 && !memPte(2).isLeaf() && !memPte(2).isPf(refill.level_dup(2)) && Mux(refill.req_info_dup(2).s2xlate === allStage, true.B, !memPte(2).isAf())) {
690    val refillIdx = replaceWrapper(l2v, ptwl2replace.way)
691    refillIdx.suggestName(s"Ptwl2RefillIdx")
692    val rfOH = UIntToOH(refillIdx)
693    l2(refillIdx).refill(
694      refill.req_info_dup(2).vpn,
695      Mux(refill.req_info_dup(2).s2xlate =/= noS2xlate, io.csr_dup(2).vsatp.asid, io.csr_dup(2).satp.asid),
696      io.csr_dup(2).hgatp.asid,
697      memSelData(2),
698      2.U,
699      refill_prefetch_dup(2)
700    )
701    ptwl2replace.access(refillIdx)
702    l2v := l2v | rfOH
703    l2g := (l2g & ~rfOH) | Mux(memPte(2).perm.g, rfOH, 0.U)
704    l2h(refillIdx) := refill_h(2)
705
706    for (i <- 0 until l2tlbParams.l2Size) {
707      l2RefillPerf(i) := i.U === refillIdx
708    }
709
710    XSDebug(p"[l2 refill] refillIdx:${refillIdx} refillEntry:${l2(refillIdx).genPtwEntry(refill.req_info_dup(2).vpn, Mux(refill.req_info_dup(2).s2xlate =/= noS2xlate, io.csr_dup(2).vsatp.asid, io.csr_dup(2).satp.asid), memSelData(2), 0.U, prefetch = refill_prefetch_dup(2))}\n")
711    XSDebug(p"[l2 refill] l2v:${Binary(l2v)}->${Binary(l2v | rfOH)} l2g:${Binary(l2g)}->${Binary((l2g & ~rfOH) | Mux(memPte(2).perm.g, rfOH, 0.U))}\n")
712
713    refillIdx.suggestName(s"l2_refillIdx")
714    rfOH.suggestName(s"l2_rfOH")
715  }
716
717  when (!flush_dup(1) && refill.levelOH.l1 && !memPte(1).isLeaf() && !memPte(1).isPf(refill.level_dup(1)) && Mux(refill.req_info_dup(1).s2xlate === allStage, true.B, !memPte(1).isAf())) {
718    val refillIdx = genPtwL1SetIdx(refill.req_info_dup(1).vpn)
719    val victimWay = replaceWrapper(getl1vSet(refill.req_info_dup(1).vpn), ptwl1replace.way(refillIdx))
720    val victimWayOH = UIntToOH(victimWay)
721    val rfvOH = UIntToOH(Cat(refillIdx, victimWay))
722    val wdata = Wire(l1EntryType)
723    wdata.gen(
724      vpn = refill.req_info_dup(1).vpn,
725      asid = Mux(refill.req_info_dup(1).s2xlate =/= noS2xlate, io.csr_dup(1).vsatp.asid, io.csr_dup(1).satp.asid),
726      vmid = io.csr_dup(1).hgatp.asid,
727      data = memRdata,
728      levelUInt = 1.U,
729      refill_prefetch_dup(1),
730      refill.req_info_dup(1).s2xlate
731    )
732    l1.io.w.apply(
733      valid = true.B,
734      setIdx = refillIdx,
735      data = wdata,
736      waymask = victimWayOH
737    )
738    ptwl1replace.access(refillIdx, victimWay)
739    l1v := l1v | rfvOH
740    l1g := l1g & ~rfvOH | Mux(Cat(memPtes.map(_.perm.g)).andR, rfvOH, 0.U)
741    l1h(refillIdx)(victimWay) := refill_h(1)
742
743    for (i <- 0 until l2tlbParams.l1nWays) {
744      l1RefillPerf(i) := i.U === victimWay
745    }
746
747    XSDebug(p"[l1 refill] refillIdx:0x${Hexadecimal(refillIdx)} victimWay:${victimWay} victimWayOH:${Binary(victimWayOH)} rfvOH(in UInt):${Cat(refillIdx, victimWay)}\n")
748    XSDebug(p"[l1 refill] refilldata:0x${wdata}\n")
749    XSDebug(p"[l1 refill] l1v:${Binary(l1v)} -> ${Binary(l1v | rfvOH)}\n")
750    XSDebug(p"[l1 refill] l1g:${Binary(l1g)} -> ${Binary(l1g & ~rfvOH | Mux(Cat(memPtes.map(_.perm.g)).andR, rfvOH, 0.U))}\n")
751
752    refillIdx.suggestName(s"l1_refillIdx")
753    victimWay.suggestName(s"l1_victimWay")
754    victimWayOH.suggestName(s"l1_victimWayOH")
755    rfvOH.suggestName(s"l1_rfvOH")
756  }
757
758  when (!flush_dup(0) && refill.levelOH.l0) {
759    val refillIdx = genPtwL0SetIdx(refill.req_info_dup(0).vpn)
760    val victimWay = replaceWrapper(getl0vSet(refill.req_info_dup(0).vpn), ptwl0replace.way(refillIdx))
761    val victimWayOH = UIntToOH(victimWay)
762    val rfvOH = UIntToOH(Cat(refillIdx, victimWay))
763    val wdata = Wire(l0EntryType)
764    wdata.gen(
765      vpn =  refill.req_info_dup(0).vpn,
766      asid = Mux(refill.req_info_dup(0).s2xlate =/= noS2xlate, io.csr_dup(0).vsatp.asid, io.csr_dup(0).satp.asid),
767      vmid = io.csr_dup(0).hgatp.asid,
768      data = memRdata,
769      levelUInt = 0.U,
770      refill_prefetch_dup(0),
771      refill.req_info_dup(0).s2xlate
772    )
773    l0.io.w.apply(
774      valid = true.B,
775      setIdx = refillIdx,
776      data = wdata,
777      waymask = victimWayOH
778    )
779    ptwl0replace.access(refillIdx, victimWay)
780    l0v := l0v | rfvOH
781    l0g := l0g & ~rfvOH | Mux(Cat(memPtes.map(_.perm.g)).andR, rfvOH, 0.U)
782    l0h(refillIdx)(victimWay) := refill_h(0)
783
784    for (i <- 0 until l2tlbParams.l0nWays) {
785      l0RefillPerf(i) := i.U === victimWay
786    }
787
788    XSDebug(p"[l0 refill] refillIdx:0x${Hexadecimal(refillIdx)} victimWay:${victimWay} victimWayOH:${Binary(victimWayOH)} rfvOH(in UInt):${Cat(refillIdx, victimWay)}\n")
789    XSDebug(p"[l0 refill] refilldata:0x${wdata}\n")
790    XSDebug(p"[l0 refill] l0v:${Binary(l0v)} -> ${Binary(l0v | rfvOH)}\n")
791    XSDebug(p"[l0 refill] l0g:${Binary(l0g)} -> ${Binary(l0g & ~rfvOH | Mux(Cat(memPtes.map(_.perm.g)).andR, rfvOH, 0.U))}\n")
792
793    refillIdx.suggestName(s"l0_refillIdx")
794    victimWay.suggestName(s"l0_victimWay")
795    victimWayOH.suggestName(s"l0_victimWayOH")
796    rfvOH.suggestName(s"l0_rfvOH")
797  }
798
799
800  // misc entries: super & invalid
801  when (!flush_dup(0) && refill.levelOH.sp && (memPte(0).isLeaf() || memPte(0).isPf(refill.level_dup(0))) && Mux(refill.req_info_dup(0).s2xlate === allStage, true.B, !memPte(0).isAf())) {
802    val refillIdx = spreplace.way// LFSR64()(log2Up(l2tlbParams.spSize)-1,0) // TODO: may be LRU
803    val rfOH = UIntToOH(refillIdx)
804    sp(refillIdx).refill(
805      refill.req_info_dup(0).vpn,
806      Mux(refill.req_info_dup(0).s2xlate =/= noS2xlate, io.csr_dup(0).vsatp.asid, io.csr_dup(0).satp.asid),
807      io.csr_dup(0).hgatp.asid,
808      memSelData(0),
809      refill.level_dup(0),
810      refill_prefetch_dup(0),
811      !memPte(0).isPf(refill.level_dup(0)),
812    )
813    spreplace.access(refillIdx)
814    spv := spv | rfOH
815    spg := spg & ~rfOH | Mux(memPte(0).perm.g, rfOH, 0.U)
816    sph(refillIdx) := refill_h(0)
817
818    for (i <- 0 until l2tlbParams.spSize) {
819      spRefillPerf(i) := i.U === refillIdx
820    }
821
822    XSDebug(p"[sp refill] refillIdx:${refillIdx} refillEntry:${sp(refillIdx).genPtwEntry(refill.req_info_dup(0).vpn, Mux(refill.req_info_dup(0).s2xlate =/= noS2xlate, io.csr_dup(0).vsatp.asid, io.csr_dup(0).satp.asid), memSelData(0), refill.level_dup(0), refill_prefetch_dup(0))}\n")
823    XSDebug(p"[sp refill] spv:${Binary(spv)}->${Binary(spv | rfOH)} spg:${Binary(spg)}->${Binary(spg & ~rfOH | Mux(memPte(0).perm.g, rfOH, 0.U))}\n")
824
825    refillIdx.suggestName(s"sp_refillIdx")
826    rfOH.suggestName(s"sp_rfOH")
827  }
828
829  val l1eccFlush = resp_res.l1.ecc && stageResp_valid_1cycle_dup(0) // RegNext(l1eccError, init = false.B)
830  val l0eccFlush = resp_res.l0.ecc && stageResp_valid_1cycle_dup(1) // RegNext(l0eccError, init = false.B)
831  val eccVpn = stageResp.bits.req_info.vpn
832
833  XSError(l1eccFlush, "l2tlb.cache.l1 ecc error. Should not happen at sim stage")
834  XSError(l0eccFlush, "l2tlb.cache.l0 ecc error. Should not happen at sim stage")
835  when (l1eccFlush) {
836    val flushSetIdxOH = UIntToOH(genPtwL1SetIdx(eccVpn))
837    val flushMask = VecInit(flushSetIdxOH.asBools.map { a => Fill(l2tlbParams.l1nWays, a.asUInt) }).asUInt
838    l1v := l1v & ~flushMask
839    l1g := l1g & ~flushMask
840  }
841
842  when (l0eccFlush) {
843    val flushSetIdxOH = UIntToOH(genPtwL0SetIdx(eccVpn))
844    val flushMask = VecInit(flushSetIdxOH.asBools.map { a => Fill(l2tlbParams.l0nWays, a.asUInt) }).asUInt
845    l0v := l0v & ~flushMask
846    l0g := l0g & ~flushMask
847  }
848
849  // sfence for l0
850  val sfence_valid_l0 = sfence_dup(0).valid && !sfence_dup(0).bits.hg && !sfence_dup(0).bits.hv
851  when (sfence_valid_l0) {
852    val l0hhit = VecInit(l0h.flatMap(_.map{a => io.csr_dup(0).priv.virt && a === onlyStage1 || !io.csr_dup(0).priv.virt && a === noS2xlate})).asUInt
853    val sfence_vpn = sfence_dup(0).bits.addr(sfence_dup(0).bits.addr.getWidth-1, offLen)
854    when (sfence_dup(0).bits.rs1/*va*/) {
855      when (sfence_dup(0).bits.rs2) {
856        // all va && all asid
857        l0v := l0v & ~l0hhit
858      } .otherwise {
859        // all va && specific asid except global
860        l0v := l0v & (l0g | ~l0hhit)
861      }
862    } .otherwise {
863      // val flushMask = UIntToOH(genTlbl1Idx(sfence.bits.addr(sfence.bits.addr.getWidth-1, offLen)))
864      val flushSetIdxOH = UIntToOH(genPtwL0SetIdx(sfence_vpn))
865      // val flushMask = VecInit(flushSetIdxOH.asBools.map(Fill(l2tlbParams.l0nWays, _.asUInt))).asUInt
866      val flushMask = VecInit(flushSetIdxOH.asBools.map { a => Fill(l2tlbParams.l0nWays, a.asUInt) }).asUInt
867      flushSetIdxOH.suggestName(s"sfence_nrs1_flushSetIdxOH")
868      flushMask.suggestName(s"sfence_nrs1_flushMask")
869
870      when (sfence_dup(0).bits.rs2) {
871        // specific leaf of addr && all asid
872        l0v := l0v & ~flushMask & ~l0hhit
873      } .otherwise {
874        // specific leaf of addr && specific asid
875        l0v := l0v & (~flushMask | l0g | ~l0hhit)
876      }
877    }
878  }
879
880  // hfencev, simple implementation for l0
881  val hfencev_valid_l0 = sfence_dup(0).valid && sfence_dup(0).bits.hv
882  when(hfencev_valid_l0) {
883    val flushMask = VecInit(l0h.flatMap(_.map(_  === onlyStage1))).asUInt
884    l0v := l0v & ~flushMask // all VS-stage l0 pte
885  }
886
887  // hfenceg, simple implementation for l0
888  val hfenceg_valid_l0 = sfence_dup(0).valid && sfence_dup(0).bits.hg
889  when(hfenceg_valid_l0) {
890    val flushMask = VecInit(l0h.flatMap(_.map(_ === onlyStage2))).asUInt
891    l0v := l0v & ~flushMask // all G-stage l0 pte
892  }
893
894  val l2asidhit = VecInit(l2asids.map(_ === sfence_dup(2).bits.id)).asUInt
895  val spasidhit = VecInit(spasids.map(_ === sfence_dup(0).bits.id)).asUInt
896  val sfence_valid = sfence_dup(0).valid && !sfence_dup(0).bits.hg && !sfence_dup(0).bits.hv
897  when (sfence_valid) {
898    val l2vmidhit = VecInit(l2vmids.map(_.getOrElse(0.U) === io.csr_dup(2).hgatp.asid)).asUInt
899    val spvmidhit = VecInit(spvmids.map(_.getOrElse(0.U) === io.csr_dup(0).hgatp.asid)).asUInt
900    val l2hhit = VecInit(l2h.map{a => io.csr_dup(2).priv.virt && a === onlyStage1 || !io.csr_dup(2).priv.virt && a === noS2xlate}).asUInt
901    val sphhit = VecInit(sph.map{a => io.csr_dup(0).priv.virt && a === onlyStage1 || !io.csr_dup(0).priv.virt && a === noS2xlate}).asUInt
902    val l1hhit = VecInit(l1h.flatMap(_.map{a => io.csr_dup(1).priv.virt && a === onlyStage1 || !io.csr_dup(1).priv.virt && a === noS2xlate})).asUInt
903    val sfence_vpn = sfence_dup(0).bits.addr(sfence_dup(0).bits.addr.getWidth-1, offLen)
904
905    when (sfence_dup(0).bits.rs1/*va*/) {
906      when (sfence_dup(0).bits.rs2) {
907        // all va && all asid
908        l1v := l1v & ~l1hhit
909        l2v := l2v & ~(l2hhit & VecInit(l2vmidhit.asBools.map{a => io.csr_dup(2).priv.virt && a || !io.csr_dup(2).priv.virt}).asUInt)
910        spv := spv & ~(sphhit & VecInit(spvmidhit.asBools.map{a => io.csr_dup(0).priv.virt && a || !io.csr_dup(0).priv.virt}).asUInt)
911      } .otherwise {
912        // all va && specific asid except global
913        l1v := l1v & (l1g | ~l1hhit)
914        l2v := l2v & ~(~l2g & l2hhit & l2asidhit & VecInit(l2vmidhit.asBools.map{a => io.csr_dup(2).priv.virt && a || !io.csr_dup(2).priv.virt}).asUInt)
915        spv := spv & ~(~spg & sphhit & spasidhit & VecInit(spvmidhit.asBools.map{a => io.csr_dup(0).priv.virt && a || !io.csr_dup(0).priv.virt}).asUInt)
916      }
917    } .otherwise {
918      when (sfence_dup(0).bits.rs2) {
919        // specific leaf of addr && all asid
920        spv := spv & ~(sphhit & VecInit(sp.map(_.hit(sfence_vpn, sfence_dup(0).bits.id, sfence_dup(0).bits.id, io.csr_dup(0).hgatp.asid, ignoreAsid = true, s2xlate = io.csr_dup(0).priv.virt))).asUInt)
921      } .otherwise {
922        // specific leaf of addr && specific asid
923        spv := spv & ~(~spg & sphhit & VecInit(sp.map(_.hit(sfence_vpn, sfence_dup(0).bits.id, sfence_dup(0).bits.id, io.csr_dup(0).hgatp.asid, s2xlate = io.csr_dup(0).priv.virt))).asUInt)
924      }
925    }
926  }
927
928  val hfencev_valid = sfence_dup(0).valid && sfence_dup(0).bits.hv
929  when (hfencev_valid) {
930    val l2vmidhit = VecInit(l2vmids.map(_.getOrElse(0.U) === io.csr_dup(2).hgatp.asid)).asUInt
931    val spvmidhit = VecInit(spvmids.map(_.getOrElse(0.U) === io.csr_dup(0).hgatp.asid)).asUInt
932    val l2hhit = VecInit(l2h.map(_ === onlyStage1)).asUInt
933    val sphhit = VecInit(sph.map(_ === onlyStage1)).asUInt
934    val l1hhit = VecInit(l1h.flatMap(_.map(_ === onlyStage1))).asUInt
935    val hfencev_vpn = sfence_dup(0).bits.addr(sfence_dup(0).bits.addr.getWidth-1, offLen)
936    when(sfence_dup(0).bits.rs1) {
937      when(sfence_dup(0).bits.rs2) {
938        l1v := l1v & ~l1hhit
939        l2v := l2v & ~(l2hhit & l2vmidhit)
940        spv := spv & ~(sphhit & spvmidhit)
941      }.otherwise {
942        l1v := l1v & (l1g | ~l1hhit)
943        l2v := l2v & ~(~l2g & l2hhit & l2asidhit & l2vmidhit)
944        spv := spv & ~(~spg & sphhit & spasidhit & spvmidhit)
945      }
946    }.otherwise {
947      when(sfence_dup(0).bits.rs2) {
948        spv := spv & ~(sphhit & VecInit(sp.map(_.hit(hfencev_vpn, sfence_dup(0).bits.id, sfence_dup(0).bits.id, io.csr_dup(0).hgatp.asid, ignoreAsid = true, s2xlate = true.B))).asUInt)
949      }.otherwise {
950        spv := spv & ~(~spg & sphhit & VecInit(sp.map(_.hit(hfencev_vpn, sfence_dup(0).bits.id, sfence_dup(0).bits.id, io.csr_dup(0).hgatp.asid, s2xlate = true.B))).asUInt)
951      }
952    }
953  }
954
955
956  val hfenceg_valid = sfence_dup(0).valid && sfence_dup(0).bits.hg
957  when(hfenceg_valid) {
958    val l2vmidhit = VecInit(l2vmids.map(_.getOrElse(0.U) === sfence_dup(2).bits.id)).asUInt
959    val spvmidhit = VecInit(spvmids.map(_.getOrElse(0.U) === sfence_dup(0).bits.id)).asUInt
960    val l2hhit = VecInit(l2h.map(_ === onlyStage2)).asUInt
961    val sphhit = VecInit(sph.map(_ === onlyStage2)).asUInt
962    val l1hhit = VecInit(l1h.flatMap(_.map(_ === onlyStage2))).asUInt
963    val hfenceg_gvpn = (sfence_dup(0).bits.addr << 2)(sfence_dup(0).bits.addr.getWidth - 1, offLen)
964    when(sfence_dup(0).bits.rs1) {
965      when(sfence_dup(0).bits.rs2) {
966        l1v := l1v & ~l1hhit
967        l2v := l2v & ~l2hhit
968        spv := spv & ~sphhit
969      }.otherwise {
970        l1v := l1v & ~l1hhit
971        l2v := l2v & ~(l2hhit & l2vmidhit)
972        spv := spv & ~(sphhit & spvmidhit)
973      }
974    }.otherwise {
975      when(sfence_dup(0).bits.rs2) {
976        spv := spv & ~(sphhit & VecInit(sp.map(_.hit(hfenceg_gvpn, 0.U, 0.U, sfence_dup(0).bits.id, ignoreAsid = true, s2xlate = false.B))).asUInt)
977      }.otherwise {
978        spv := spv & ~(~spg & sphhit & VecInit(sp.map(_.hit(hfenceg_gvpn, 0.U, 0.U, sfence_dup(0).bits.id, ignoreAsid = true, s2xlate = true.B))).asUInt)
979      }
980    }
981  }
982
983  if (EnableSv48) {
984    val l3asidhit = VecInit(l3asids.get.map(_ === sfence_dup(2).bits.id)).asUInt
985    val l3vmidhit = VecInit(l3vmids.get.map(_.getOrElse(0.U) === io.csr_dup(2).hgatp.asid)).asUInt
986    val l3hhit = VecInit(l3h.get.map{a => io.csr_dup(2).priv.virt && a === onlyStage1 || !io.csr_dup(2).priv.virt && a === noS2xlate}).asUInt
987
988    when (sfence_valid) {
989      val l3vmidhit = VecInit(l3vmids.get.map(_.getOrElse(0.U) === io.csr_dup(2).hgatp.asid)).asUInt
990      val l3hhit = VecInit(l3h.get.map{a => io.csr_dup(2).priv.virt && a === onlyStage1 || !io.csr_dup(2).priv.virt && a === noS2xlate}).asUInt
991      val sfence_vpn = sfence_dup(2).bits.addr(sfence_dup(2).bits.addr.getWidth-1, offLen)
992
993      when (sfence_dup(2).bits.rs1/*va*/) {
994        when (sfence_dup(2).bits.rs2) {
995          // all va && all asid
996          l3v.map(_ := l3v.get & ~(l3hhit & VecInit(l3vmidhit.asBools.map{a => io.csr_dup(2).priv.virt && a || !io.csr_dup(2).priv.virt}).asUInt))
997        } .otherwise {
998          // all va && specific asid except global
999          l3v.map(_ := l3v.get & ~(~l3g.get & l3hhit & l3asidhit & VecInit(l3vmidhit.asBools.map{a => io.csr_dup(2).priv.virt && a || !io.csr_dup(2).priv.virt}).asUInt))
1000        }
1001      }
1002    }
1003
1004    when (hfencev_valid) {
1005      val l3vmidhit = VecInit(l3vmids.get.map(_.getOrElse(0.U) === io.csr_dup(2).hgatp.asid)).asUInt
1006      val l3hhit = VecInit(l3h.get.map(_ === onlyStage1)).asUInt
1007      val hfencev_vpn = sfence_dup(2).bits.addr(sfence_dup(2).bits.addr.getWidth-1, offLen)
1008      when(sfence_dup(2).bits.rs1) {
1009        when(sfence_dup(2).bits.rs2) {
1010          l3v.map(_ := l3v.get & ~(l3hhit & l3vmidhit))
1011        }.otherwise {
1012          l3v.map(_ := l3v.get & ~(~l3g.get & l3hhit & l3asidhit & l3vmidhit))
1013        }
1014      }
1015    }
1016
1017    when (hfenceg_valid) {
1018      val l3vmidhit = VecInit(l3vmids.get.map(_.getOrElse(0.U) === sfence_dup(2).bits.id)).asUInt
1019      val l3hhit = VecInit(l3h.get.map(_ === onlyStage2)).asUInt
1020      val hfenceg_gvpn = (sfence_dup(2).bits.addr << 2)(sfence_dup(2).bits.addr.getWidth - 1, offLen)
1021      when(sfence_dup(2).bits.rs1) {
1022        when(sfence_dup(2).bits.rs2) {
1023          l3v.map(_ := l3v.get & ~l3hhit)
1024        }.otherwise {
1025          l3v.map(_ := l3v.get & ~(l3hhit & l3vmidhit))
1026        }
1027      }
1028    }
1029  }
1030
1031  def InsideStageConnect(in: DecoupledIO[PtwCacheReq], out: DecoupledIO[PtwCacheReq], inFire: Bool): Unit = {
1032    in.ready := !in.valid || out.ready
1033    out.valid := in.valid
1034    out.bits := in.bits
1035    out.bits.bypassed.zip(in.bits.bypassed).zipWithIndex.map{ case (b, i) =>
1036      val bypassed_reg = Reg(Bool())
1037      val bypassed_wire = refill_bypass(in.bits.req_info.vpn, i, in.bits.req_info.s2xlate) && io.refill.valid
1038      when (inFire) { bypassed_reg := bypassed_wire }
1039      .elsewhen (io.refill.valid) { bypassed_reg := bypassed_reg || bypassed_wire }
1040
1041      b._1 := b._2 || (bypassed_wire || (bypassed_reg && !inFire))
1042    }
1043  }
1044
1045  // Perf Count
1046  val resp_l0 = resp_res.l0.hit
1047  val resp_sp = resp_res.sp.hit
1048  val resp_l3_pre = if (EnableSv48) Some(resp_res.l3.get.pre) else None
1049  val resp_l2_pre = resp_res.l2.pre
1050  val resp_l1_pre = resp_res.l1.pre
1051  val resp_l0_pre = resp_res.l0.pre
1052  val resp_sp_pre = resp_res.sp.pre
1053  val base_valid_access_0 = !from_pre(io.resp.bits.req_info.source) && io.resp.fire
1054  XSPerfAccumulate("access", base_valid_access_0)
1055  if (EnableSv48) {
1056    XSPerfAccumulate("l3_hit", base_valid_access_0 && io.resp.bits.toFsm.l3Hit.get && !io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1057  }
1058  XSPerfAccumulate("l2_hit", base_valid_access_0 && io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1059  XSPerfAccumulate("l1_hit", base_valid_access_0 && io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1060  XSPerfAccumulate("l0_hit", base_valid_access_0 && resp_l0)
1061  XSPerfAccumulate("sp_hit", base_valid_access_0 && resp_sp)
1062  XSPerfAccumulate("pte_hit",base_valid_access_0 && io.resp.bits.hit)
1063
1064  if (EnableSv48) {
1065    XSPerfAccumulate("l3_hit_pre", base_valid_access_0 && resp_l3_pre.get && io.resp.bits.toFsm.l3Hit.get && !io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1066  }
1067  XSPerfAccumulate("l2_hit_pre", base_valid_access_0 && resp_l2_pre && !io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1068  XSPerfAccumulate("l1_hit_pre", base_valid_access_0 && resp_l1_pre && io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1069  XSPerfAccumulate("l0_hit_pre", base_valid_access_0 && resp_l0_pre && resp_l0)
1070  XSPerfAccumulate("sp_hit_pre", base_valid_access_0 && resp_sp_pre && resp_sp)
1071  XSPerfAccumulate("pte_hit_pre",base_valid_access_0 && (resp_l0_pre && resp_l0 || resp_sp_pre && resp_sp) && io.resp.bits.hit)
1072
1073  val base_valid_access_1 = from_pre(io.resp.bits.req_info.source) && io.resp.fire
1074  XSPerfAccumulate("pre_access", base_valid_access_1)
1075  if (EnableSv48) {
1076    XSPerfAccumulate("pre_l3_hit", base_valid_access_1 && io.resp.bits.toFsm.l3Hit.get && !io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1077  }
1078  XSPerfAccumulate("pre_l2_hit", base_valid_access_1 && io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1079  XSPerfAccumulate("pre_l1_hit", base_valid_access_1 && io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1080  XSPerfAccumulate("pre_l0_hit", base_valid_access_1 && resp_l0)
1081  XSPerfAccumulate("pre_sp_hit", base_valid_access_1 && resp_sp)
1082  XSPerfAccumulate("pre_pte_hit",base_valid_access_1 && io.resp.bits.hit)
1083
1084  if (EnableSv48) {
1085    XSPerfAccumulate("pre_l3_hit_pre", base_valid_access_1 && resp_l3_pre.get && io.resp.bits.toFsm.l3Hit.get && !io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1086  }
1087  XSPerfAccumulate("pre_l2_hit_pre", base_valid_access_1 && resp_l2_pre && io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1088  XSPerfAccumulate("pre_l1_hit_pre", base_valid_access_1 && resp_l1_pre && io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1089  XSPerfAccumulate("pre_l0_hit_pre", base_valid_access_1 && resp_l0_pre && resp_l0)
1090  XSPerfAccumulate("pre_sp_hit_pre", base_valid_access_1 && resp_sp_pre && resp_sp)
1091  XSPerfAccumulate("pre_pte_hit_pre",base_valid_access_1 && (resp_l0_pre && resp_l0 || resp_sp_pre && resp_sp) && io.resp.bits.hit)
1092
1093  val base_valid_access_2 = stageResp.bits.isFirst && !from_pre(io.resp.bits.req_info.source) && io.resp.fire
1094  XSPerfAccumulate("access_first", base_valid_access_2)
1095  if (EnableSv48) {
1096    XSPerfAccumulate("l3_hit_first", base_valid_access_2 && io.resp.bits.toFsm.l3Hit.get && !io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1097  }
1098  XSPerfAccumulate("l2_hit_first", base_valid_access_2 && io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1099  XSPerfAccumulate("l1_hit_first", base_valid_access_2 && io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1100  XSPerfAccumulate("l0_hit_first", base_valid_access_2 && resp_l0)
1101  XSPerfAccumulate("sp_hit_first", base_valid_access_2 && resp_sp)
1102  XSPerfAccumulate("pte_hit_first",base_valid_access_2 && io.resp.bits.hit)
1103
1104  if (EnableSv48) {
1105    XSPerfAccumulate("l3_hit_pre_first", base_valid_access_2 && resp_l3_pre.get && io.resp.bits.toFsm.l3Hit.get && !io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1106  }
1107  XSPerfAccumulate("l2_hit_pre_first", base_valid_access_2 && resp_l2_pre && io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1108  XSPerfAccumulate("l1_hit_pre_first", base_valid_access_2 && resp_l1_pre && io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1109  XSPerfAccumulate("l0_hit_pre_first", base_valid_access_2 && resp_l0_pre && resp_l0)
1110  XSPerfAccumulate("sp_hit_pre_first", base_valid_access_2 && resp_sp_pre && resp_sp)
1111  XSPerfAccumulate("pte_hit_pre_first",base_valid_access_2 && (resp_l0_pre && resp_l0 || resp_sp_pre && resp_sp) && io.resp.bits.hit)
1112
1113  val base_valid_access_3 = stageResp.bits.isFirst && from_pre(io.resp.bits.req_info.source) && io.resp.fire
1114  XSPerfAccumulate("pre_access_first", base_valid_access_3)
1115  if (EnableSv48) {
1116    XSPerfAccumulate("pre_l3_hit_first", base_valid_access_3 && io.resp.bits.toFsm.l3Hit.get && !io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1117  }
1118  XSPerfAccumulate("pre_l2_hit_first", base_valid_access_3 && io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1119  XSPerfAccumulate("pre_l1_hit_first", base_valid_access_3 && io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1120  XSPerfAccumulate("pre_l0_hit_first", base_valid_access_3 && resp_l0)
1121  XSPerfAccumulate("pre_sp_hit_first", base_valid_access_3 && resp_sp)
1122  XSPerfAccumulate("pre_pte_hit_first", base_valid_access_3 && io.resp.bits.hit)
1123
1124  if (EnableSv48) {
1125    XSPerfAccumulate("pre_l3_hit_pre_first", base_valid_access_3 && resp_l3_pre.get && io.resp.bits.toFsm.l3Hit.get && !io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1126  }
1127  XSPerfAccumulate("pre_l2_hit_pre_first", base_valid_access_3 && resp_l2_pre && io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1128  XSPerfAccumulate("pre_l1_hit_pre_first", base_valid_access_3 && resp_l1_pre && io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1129  XSPerfAccumulate("pre_l0_hit_pre_first", base_valid_access_3 && resp_l0_pre && resp_l0)
1130  XSPerfAccumulate("pre_sp_hit_pre_first", base_valid_access_3 && resp_sp_pre && resp_sp)
1131  XSPerfAccumulate("pre_pte_hit_pre_first",base_valid_access_3 && (resp_l0_pre && resp_l0 || resp_sp_pre && resp_sp) && io.resp.bits.hit)
1132
1133  XSPerfAccumulate("rwHarzad", io.req.valid && !io.req.ready)
1134  XSPerfAccumulate("out_blocked", io.resp.valid && !io.resp.ready)
1135  if (EnableSv48) {
1136    l3AccessPerf.get.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"l3AccessIndex${i}", l) }
1137  }
1138  l2AccessPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"l2AccessIndex${i}", l) }
1139  l1AccessPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"l1AccessIndex${i}", l) }
1140  l0AccessPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"l0AccessIndex${i}", l) }
1141  spAccessPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"SPAccessIndex${i}", l) }
1142  if (EnableSv48) {
1143    l3RefillPerf.get.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"l3RefillIndex${i}", l) }
1144  }
1145  l2RefillPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"l2RefillIndex${i}", l) }
1146  l1RefillPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"l1RefillIndex${i}", l) }
1147  l0RefillPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"l0RefillIndex${i}", l) }
1148  spRefillPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"SPRefillIndex${i}", l) }
1149
1150  if (EnableSv48) {
1151    XSPerfAccumulate("l3Refill", Cat(l3RefillPerf.get).orR)
1152  }
1153  XSPerfAccumulate("l2Refill", Cat(l2RefillPerf).orR)
1154  XSPerfAccumulate("l1Refill", Cat(l1RefillPerf).orR)
1155  XSPerfAccumulate("l0Refill", Cat(l0RefillPerf).orR)
1156  XSPerfAccumulate("spRefill", Cat(spRefillPerf).orR)
1157  if (EnableSv48) {
1158    XSPerfAccumulate("l3Refill_pre", Cat(l3RefillPerf.get).orR && refill_prefetch_dup(0))
1159  }
1160  XSPerfAccumulate("l2Refill_pre", Cat(l2RefillPerf).orR && refill_prefetch_dup(0))
1161  XSPerfAccumulate("l1Refill_pre", Cat(l1RefillPerf).orR && refill_prefetch_dup(0))
1162  XSPerfAccumulate("l0Refill_pre", Cat(l0RefillPerf).orR && refill_prefetch_dup(0))
1163  XSPerfAccumulate("spRefill_pre", Cat(spRefillPerf).orR && refill_prefetch_dup(0))
1164
1165  // debug
1166  XSDebug(sfence_dup(0).valid, p"[sfence] original v and g vector:\n")
1167  if (EnableSv48) {
1168    XSDebug(sfence_dup(0).valid, p"[sfence] l3v:${Binary(l3v.get)}\n")
1169  }
1170  XSDebug(sfence_dup(0).valid, p"[sfence] l2v:${Binary(l2v)}\n")
1171  XSDebug(sfence_dup(0).valid, p"[sfence] l1v:${Binary(l1v)}\n")
1172  XSDebug(sfence_dup(0).valid, p"[sfence] l0v:${Binary(l0v)}\n")
1173  XSDebug(sfence_dup(0).valid, p"[sfence] l0g:${Binary(l0g)}\n")
1174  XSDebug(sfence_dup(0).valid, p"[sfence] spv:${Binary(spv)}\n")
1175  XSDebug(RegNext(sfence_dup(0).valid), p"[sfence] new v and g vector:\n")
1176  if (EnableSv48) {
1177    XSDebug(RegNext(sfence_dup(0).valid), p"[sfence] l3v:${Binary(l3v.get)}\n")
1178  }
1179  XSDebug(RegNext(sfence_dup(0).valid), p"[sfence] l2v:${Binary(l2v)}\n")
1180  XSDebug(RegNext(sfence_dup(0).valid), p"[sfence] l1v:${Binary(l1v)}\n")
1181  XSDebug(RegNext(sfence_dup(0).valid), p"[sfence] l0v:${Binary(l0v)}\n")
1182  XSDebug(RegNext(sfence_dup(0).valid), p"[sfence] l0g:${Binary(l0g)}\n")
1183  XSDebug(RegNext(sfence_dup(0).valid), p"[sfence] spv:${Binary(spv)}\n")
1184
1185  val perfEvents = Seq(
1186    ("access           ", base_valid_access_0             ),
1187    ("l2_hit           ", l2Hit                           ),
1188    ("l1_hit           ", l1Hit                           ),
1189    ("l0_hit           ", l0Hit                           ),
1190    ("sp_hit           ", spHit                           ),
1191    ("pte_hit          ", l0Hit || spHit                  ),
1192    ("rwHarzad         ",  io.req.valid && !io.req.ready  ),
1193    ("out_blocked      ",  io.resp.valid && !io.resp.ready),
1194  )
1195  generatePerfEvent()
1196}
1197