xref: /XiangShan/src/main/scala/xiangshan/cache/mmu/PageTableCache.scala (revision 800ac0f1d01fac5d118955113cd5a0cc7844aff4)
1/***************************************************************************************
2* Copyright (c) 2021-2025 Beijing Institute of Open Source Chip (BOSC)
3* Copyright (c) 2020-2024 Institute of Computing Technology, Chinese Academy of Sciences
4* Copyright (c) 2020-2021 Peng Cheng Laboratory
5* Copyright (c) 2024-2025 Institute of Information Engineering, Chinese Academy of Sciences
6*
7* XiangShan is licensed under Mulan PSL v2.
8* You can use this software according to the terms and conditions of the Mulan PSL v2.
9* You may obtain a copy of Mulan PSL v2 at:
10*          http://license.coscl.org.cn/MulanPSL2
11*
12* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
13* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
14* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
15*
16* See the Mulan PSL v2 for more details.
17***************************************************************************************/
18
19package xiangshan.cache.mmu
20
21import org.chipsalliance.cde.config.Parameters
22import chisel3._
23import chisel3.util._
24import xiangshan._
25import xiangshan.cache.{HasDCacheParameters, MemoryOpConstants}
26import utils._
27import utility._
28import coupledL2.utils.SplittedSRAM
29import freechips.rocketchip.diplomacy.{LazyModule, LazyModuleImp}
30import freechips.rocketchip.tilelink._
31
32/* ptw cache caches the page table of all the three layers
33 * ptw cache resp at next cycle
34 * the cache should not be blocked
35 * when miss queue if full, just block req outside
36 */
37
38class PageCachePerPespBundle(implicit p: Parameters) extends PtwBundle {
39  val hit = Bool()
40  val pre = Bool()
41  val ppn = UInt(gvpnLen.W)
42  val pbmt = UInt(ptePbmtLen.W)
43  val perm = new PtePermBundle()
44  val n = UInt(pteNLen.W)
45  val ecc = Bool()
46  val level = UInt(2.W)
47  val v = Bool()
48  val bitmapCheck = Option.when(HasBitmapCheck)(new Bundle {
49    val jmp_bitmap_check = Bool()
50    val pte = UInt(XLEN.W) // Page Table Entry
51  })
52
53  def apply(hit: Bool, pre: Bool, ppn: UInt, pbmt: UInt = 0.U, n: UInt = 0.U,
54            perm: PtePermBundle = 0.U.asTypeOf(new PtePermBundle()),
55            ecc: Bool = false.B, level: UInt = 0.U, valid: Bool = true.B, jmp_bitmap_check: Bool = false.B,
56            pte: UInt = 0.U): Unit = {
57    this.hit := hit && !ecc
58    this.pre := pre
59    this.ppn := ppn
60    this.n := n
61    this.pbmt := pbmt
62    this.perm := perm
63    this.ecc := ecc && hit
64    this.level := level
65    this.v := valid
66    if (HasBitmapCheck) {
67      this.bitmapCheck.get.jmp_bitmap_check := jmp_bitmap_check
68      this.bitmapCheck.get.pte := pte
69    }
70  }
71}
72
73class PageCacheMergePespBundle(implicit p: Parameters) extends PtwBundle {
74  assert(tlbcontiguous == 8, "Only support tlbcontiguous = 8!")
75  val hit = Bool()
76  val pre = Bool()
77  val ppn = Vec(tlbcontiguous, UInt(gvpnLen.W))
78  val pbmt = Vec(tlbcontiguous, UInt(ptePbmtLen.W))
79  val perm = Vec(tlbcontiguous, new PtePermBundle())
80  val ecc = Bool()
81  val level = UInt(2.W)
82  val v = Vec(tlbcontiguous, Bool())
83  val bitmapCheck = Option.when(HasBitmapCheck)(new Bundle {
84    val jmp_bitmap_check = Bool()
85    val hitway = UInt(l2tlbParams.l0nWays.W)
86    val ptes = Vec(tlbcontiguous, UInt(XLEN.W)) // Page Table Entry Vector
87    val cfs = Vec(tlbcontiguous, Bool()) // Bitmap Check Failed Vector
88  })
89
90  def apply(hit: Bool, pre: Bool, ppn: Vec[UInt], pbmt: Vec[UInt] = Vec(tlbcontiguous, 0.U),
91            perm: Vec[PtePermBundle] = Vec(tlbcontiguous, 0.U.asTypeOf(new PtePermBundle())),
92            ecc: Bool = false.B, level: UInt = 0.U, valid: Vec[Bool] = Vec(tlbcontiguous, true.B),
93            jmp_bitmap_check: Bool = false.B,
94            hitway: UInt = 0.U, ptes: Vec[UInt] , cfs: Vec[Bool]): Unit = {
95    this.hit := hit && !ecc
96    this.pre := pre
97    this.ppn := ppn
98    this.pbmt := pbmt
99    this.perm := perm
100    this.ecc := ecc && hit
101    this.level := level
102    this.v := valid
103    if (HasBitmapCheck) {
104      this.bitmapCheck.get.jmp_bitmap_check := jmp_bitmap_check
105      this.bitmapCheck.get.hitway := hitway
106      this.bitmapCheck.get.ptes := ptes
107      this.bitmapCheck.get.cfs := cfs
108    }
109  }
110}
111
112class PageCacheRespBundle(implicit p: Parameters) extends PtwBundle {
113  val l3 = if (EnableSv48) Some(new PageCachePerPespBundle) else None
114  val l2 = new PageCachePerPespBundle
115  val l1 = new PageCachePerPespBundle
116  val l0 = new PageCacheMergePespBundle
117  val sp = new PageCachePerPespBundle
118}
119
120class PtwCacheReq(implicit p: Parameters) extends PtwBundle {
121  val req_info = new L2TlbInnerBundle()
122  val isFirst = Bool()
123  val bypassed = if (EnableSv48) Vec(4, Bool()) else Vec(3, Bool())
124  val isHptwReq = Bool()
125  val hptwId = UInt(log2Up(l2tlbParams.llptwsize).W)
126}
127
128class PtwCacheIO()(implicit p: Parameters) extends MMUIOBaseBundle with HasPtwConst {
129  val req = Flipped(DecoupledIO(new PtwCacheReq()))
130  val resp = DecoupledIO(new Bundle {
131    val req_info = new L2TlbInnerBundle()
132    val isFirst = Bool()
133    val hit = Bool()
134    val prefetch = Bool() // is the entry fetched by prefetch
135    val bypassed = Bool()
136    val toFsm = new Bundle {
137      val l3Hit = if (EnableSv48) Some(Bool()) else None
138      val l2Hit = Bool()
139      val l1Hit = Bool()
140      val ppn = UInt(gvpnLen.W)
141      val stage1Hit = Bool() // find stage 1 pte in cache, but need to search stage 2 pte in cache at PTW
142      val bitmapCheck = Option.when(HasBitmapCheck)(new Bundle {
143        val jmp_bitmap_check = Bool() // find pte in l0 or sp, but need bitmap check
144        val toLLPTW = Bool()
145        val hitway = UInt(l2tlbParams.l0nWays.W)
146        val pte = UInt(XLEN.W) // Page Table Entry
147        val ptes = Vec(tlbcontiguous, UInt(XLEN.W)) // Page Table Entry Vector
148        val cfs = Vec(tlbcontiguous, Bool()) // Bitmap Check Failed Vector
149        val SPlevel = UInt(log2Up(Level).W)
150      })
151    }
152    val stage1 = new PtwMergeResp()
153    val isHptwReq = Bool()
154    val toHptw = new Bundle {
155      val l3Hit = if (EnableSv48) Some(Bool()) else None
156      val l2Hit = Bool()
157      val l1Hit = Bool()
158      val ppn = UInt(ppnLen.W)
159      val id = UInt(log2Up(l2tlbParams.llptwsize).W)
160      val resp = new HptwResp() // used if hit
161      val bypassed = Bool()
162      val bitmapCheck = Option.when(HasBitmapCheck)(new Bundle {
163        val jmp_bitmap_check = Bool() // find pte in l0 or sp, but need bitmap check
164        val hitway = UInt(l2tlbParams.l0nWays.W)
165        val pte = UInt(XLEN.W) // Page Table Entry
166        val ptes = Vec(tlbcontiguous, UInt(XLEN.W)) // Page Table Entry Vector
167        val cfs = Vec(tlbcontiguous, Bool()) // Bitmap Check Failed Vector
168        val fromSP = Bool()
169        val SPlevel = UInt(log2Up(Level).W)
170      })
171    }
172  })
173  val refill = Flipped(ValidIO(new Bundle {
174    val ptes = UInt(blockBits.W)
175    val levelOH = new Bundle {
176      // NOTE: levelOH has (Level+1) bits, each stands for page cache entries
177      val sp = Bool()
178      val l0 = Bool()
179      val l1 = Bool()
180      val l2 = Bool()
181      val l3 = if (EnableSv48) Some(Bool()) else None
182      def apply(levelUInt: UInt, valid: Bool) = {
183        sp := GatedValidRegNext((levelUInt === 1.U || levelUInt === 2.U || levelUInt === 3.U) && valid, false.B)
184        l0 := GatedValidRegNext((levelUInt === 0.U) & valid, false.B)
185        l1 := GatedValidRegNext((levelUInt === 1.U) & valid, false.B)
186        l2 := GatedValidRegNext((levelUInt === 2.U) & valid, false.B)
187        l3.map(_ := GatedValidRegNext((levelUInt === 3.U) & valid, false.B))
188      }
189    }
190    // duplicate level and sel_pte for each page caches, for better fanout
191    val req_info_dup = Vec(3, new L2TlbInnerBundle())
192    val level_dup = Vec(3, UInt(log2Up(Level + 1).W))
193    val sel_pte_dup = Vec(3, UInt(XLEN.W))
194  }))
195  // when refill l0,save way info for late bitmap wakeup convenient
196  // valid at same cycle of refill.levelOH.l0
197  val l0_way_info = Option.when(HasBitmapCheck)(Output(UInt(l2tlbParams.l0nWays.W)))
198  val sfence_dup = Vec(4, Input(new SfenceBundle()))
199  val csr_dup = Vec(3, Input(new TlbCsrBundle()))
200  val bitmap_wakeup = Option.when(HasBitmapCheck)(Flipped(ValidIO(new Bundle {
201    val setIndex = Input(UInt(PtwL0SetIdxLen.W))
202    val tag = Input(UInt(SPTagLen.W))
203    val isSp = Input(Bool())
204    val way_info = UInt(l2tlbParams.l0nWays.W)
205    val pte_index = UInt(sectortlbwidth.W)
206    val check_success = Bool()
207  })))
208}
209
210class PtwCache()(implicit p: Parameters) extends XSModule with HasPtwConst with HasPerfEvents {
211  val io = IO(new PtwCacheIO)
212  val ecc = Code.fromString(l2tlbParams.ecc)
213  val l1EntryType = new PTWEntriesWithEcc(ecc, num = PtwL1SectorSize, tagLen = PtwL1TagLen, level = 1, hasPerm = false, ReservedBits = l2tlbParams.l1ReservedBits)
214  val l0EntryType = new PTWEntriesWithEcc(ecc, num = PtwL0SectorSize, tagLen = PtwL0TagLen, level = 0, hasPerm = true, ReservedBits = l2tlbParams.l0ReservedBits)
215
216  // use two additional regs to record corresponding cache entry whether via bitmap check
217  // 32(l0nSets)* 8 (l0nWays) * 8 (tlbcontiguous)
218  val l0BitmapReg = RegInit(VecInit(Seq.fill(l2tlbParams.l0nSets)(VecInit(Seq.fill(l2tlbParams.l0nWays)(VecInit(Seq.fill(tlbcontiguous)(0.U(1.W))))))))
219  val spBitmapReg = RegInit(VecInit(Seq.fill(l2tlbParams.spSize)(0.U(1.W))))
220
221  val bitmapEnable = io.csr_dup(0).mbmc.BME === 1.U && io.csr_dup(0).mbmc.CMODE === 0.U
222  // TODO: four caches make the codes dirty, think about how to deal with it
223
224  val sfence_dup = io.sfence_dup
225  val refill = io.refill.bits
226  val refill_prefetch_dup = io.refill.bits.req_info_dup.map(a => from_pre(a.source))
227  val refill_h = io.refill.bits.req_info_dup.map(a => Mux(a.s2xlate === allStage, onlyStage1, a.s2xlate))
228  val flush_dup = sfence_dup.zip(io.csr_dup).map(f => f._1.valid || f._2.satp.changed || f._2.vsatp.changed || f._2.hgatp.changed)
229  val flush = flush_dup(0)
230
231  // when refill, refuce to accept new req
232  val rwHarzad = if (sramSinglePort) io.refill.valid else false.B
233
234  // handle hand signal and req_info
235  // TODO: replace with FlushableQueue
236  val stageReq = Wire(Decoupled(new PtwCacheReq()))         // enq stage & read page cache valid
237  val stageDelay = Wire(Vec(2, Decoupled(new PtwCacheReq()))) // page cache resp
238  val stageCheck = Wire(Vec(2, Decoupled(new PtwCacheReq()))) // check hit & check ecc
239  val stageResp = Wire(Decoupled(new PtwCacheReq()))         // deq stage
240
241  val stageDelay_valid_1cycle = OneCycleValid(stageReq.fire, flush)      // catch ram data
242  val stageCheck_valid_1cycle = OneCycleValid(stageDelay(1).fire, flush) // replace & perf counter
243  val stageResp_valid_1cycle_dup = Wire(Vec(2, Bool()))
244  stageResp_valid_1cycle_dup.map(_ := OneCycleValid(stageCheck(1).fire, flush))  // ecc flush
245
246  stageReq <> io.req
247  PipelineConnect(stageReq, stageDelay(0), stageDelay(1).ready, flush, rwHarzad)
248  InsideStageConnect(stageDelay(0), stageDelay(1), stageDelay_valid_1cycle)
249  PipelineConnect(stageDelay(1), stageCheck(0), stageCheck(1).ready, flush)
250  InsideStageConnect(stageCheck(0), stageCheck(1), stageCheck_valid_1cycle)
251  PipelineConnect(stageCheck(1), stageResp, io.resp.ready, flush)
252  stageResp.ready := !stageResp.valid || io.resp.ready
253
254  // l3: level 3 non-leaf pte
255  val l3 = if (EnableSv48) Some(Reg(Vec(l2tlbParams.l3Size, new PtwEntry(tagLen = PtwL3TagLen)))) else None
256  val l3v = if (EnableSv48) Some(RegInit(0.U(l2tlbParams.l3Size.W))) else None
257  val l3g = if (EnableSv48) Some(Reg(UInt(l2tlbParams.l3Size.W))) else None
258  val l3asids = if (EnableSv48) Some(l3.get.map(_.asid)) else None
259  val l3vmids = if (EnableSv48) Some(l3.get.map(_.vmid)) else None
260  val l3h = if (EnableSv48) Some(Reg(Vec(l2tlbParams.l3Size, UInt(2.W)))) else None
261
262  // l2: level 2 non-leaf pte
263  val l2 = Reg(Vec(l2tlbParams.l2Size, new PtwEntry(tagLen = PtwL2TagLen)))
264  val l2v = RegInit(0.U(l2tlbParams.l2Size.W))
265  val l2g = Reg(UInt(l2tlbParams.l2Size.W))
266  val l2asids = l2.map(_.asid)
267  val l2vmids = l2.map(_.vmid)
268  val l2h = Reg(Vec(l2tlbParams.l2Size, UInt(2.W)))
269
270  // l1: level 1 non-leaf pte
271  val l1 = Module(new SplittedSRAM(
272    l1EntryType,
273    set = l2tlbParams.l1nSets,
274    way = l2tlbParams.l1nWays,
275    waySplit = 1,
276    dataSplit = 4,
277    singlePort = sramSinglePort,
278    readMCP2 = false
279  ))
280  val l1v = RegInit(0.U((l2tlbParams.l1nSets * l2tlbParams.l1nWays).W))
281  val l1g = Reg(UInt((l2tlbParams.l1nSets * l2tlbParams.l1nWays).W))
282  val l1h = Reg(Vec(l2tlbParams.l1nSets, Vec(l2tlbParams.l1nWays, UInt(2.W))))
283  def getl1vSet(vpn: UInt) = {
284    require(log2Up(l2tlbParams.l1nWays) == log2Down(l2tlbParams.l1nWays))
285    val set = genPtwL1SetIdx(vpn)
286    require(set.getWidth == log2Up(l2tlbParams.l1nSets))
287    val l1vVec = l1v.asTypeOf(Vec(l2tlbParams.l1nSets, UInt(l2tlbParams.l1nWays.W)))
288    l1vVec(set)
289  }
290  def getl1hSet(vpn: UInt) = {
291    require(log2Up(l2tlbParams.l1nWays) == log2Down(l2tlbParams.l1nWays))
292    val set = genPtwL1SetIdx(vpn)
293    require(set.getWidth == log2Up(l2tlbParams.l1nSets))
294    l1h(set)
295  }
296
297  // l0: level 0 leaf pte of 4KB pages
298  val l0 = Module(new SplittedSRAM(
299    l0EntryType,
300    set = l2tlbParams.l0nSets,
301    way = l2tlbParams.l0nWays,
302    waySplit = 2,
303    dataSplit = 4,
304    singlePort = sramSinglePort,
305    readMCP2 = false
306  ))
307  val l0v = RegInit(0.U((l2tlbParams.l0nSets * l2tlbParams.l0nWays).W))
308  val l0g = Reg(UInt((l2tlbParams.l0nSets * l2tlbParams.l0nWays).W))
309  val l0h = Reg(Vec(l2tlbParams.l0nSets, Vec(l2tlbParams.l0nWays, UInt(2.W))))
310  def getl0vSet(vpn: UInt) = {
311    require(log2Up(l2tlbParams.l0nWays) == log2Down(l2tlbParams.l0nWays))
312    val set = genPtwL0SetIdx(vpn)
313    require(set.getWidth == log2Up(l2tlbParams.l0nSets))
314    val l0vVec = l0v.asTypeOf(Vec(l2tlbParams.l0nSets, UInt(l2tlbParams.l0nWays.W)))
315    l0vVec(set)
316  }
317  def getl0hSet(vpn: UInt) = {
318    require(log2Up(l2tlbParams.l0nWays) == log2Down(l2tlbParams.l0nWays))
319    val set = genPtwL0SetIdx(vpn)
320    require(set.getWidth == log2Up(l2tlbParams.l0nSets))
321    l0h(set)
322  }
323
324  // sp: level 1/2/3 leaf pte of 512GB/1GB/2MB super pages
325  val sp = Reg(Vec(l2tlbParams.spSize, new PtwEntry(tagLen = SPTagLen, hasPerm = true, hasLevel = true, hasNapot = true)))
326  val spv = RegInit(0.U(l2tlbParams.spSize.W))
327  val spg = Reg(UInt(l2tlbParams.spSize.W))
328  val spasids = sp.map(_.asid)
329  val spvmids = sp.map(_.vmid)
330  val sph = Reg(Vec(l2tlbParams.spSize, UInt(2.W)))
331
332  if (HasBitmapCheck) {
333    // wakeup corresponding entry
334    when (io.bitmap_wakeup.get.valid) {
335      when (io.bitmap_wakeup.get.bits.isSp) {
336        for (i <- 0 until l2tlbParams.spSize) {
337          when (sp(i).tag === io.bitmap_wakeup.get.bits.tag && spv(i) === 1.U) {
338            spBitmapReg(i) := io.bitmap_wakeup.get.bits.check_success
339          }
340        }
341      } .otherwise {
342        val wakeup_setindex = io.bitmap_wakeup.get.bits.setIndex
343        l0BitmapReg(wakeup_setindex)(OHToUInt(io.bitmap_wakeup.get.bits.way_info))(io.bitmap_wakeup.get.bits.pte_index) := io.bitmap_wakeup.get.bits.check_success
344        assert(l0v(wakeup_setindex * l2tlbParams.l0nWays.U + OHToUInt(io.bitmap_wakeup.get.bits.way_info)) === 1.U,
345          "Wakeuped entry must be valid!")
346      }
347    }
348  }
349
350  // Access Perf
351  val l3AccessPerf = if(EnableSv48) Some(Wire(Vec(l2tlbParams.l3Size, Bool()))) else None
352  val l2AccessPerf = Wire(Vec(l2tlbParams.l2Size, Bool()))
353  val l1AccessPerf = Wire(Vec(l2tlbParams.l1nWays, Bool()))
354  val l0AccessPerf = Wire(Vec(l2tlbParams.l0nWays, Bool()))
355  val spAccessPerf = Wire(Vec(l2tlbParams.spSize, Bool()))
356  if (EnableSv48) l3AccessPerf.map(_.map(_ := false.B))
357  l2AccessPerf.map(_ := false.B)
358  l1AccessPerf.map(_ := false.B)
359  l0AccessPerf.map(_ := false.B)
360  spAccessPerf.map(_ := false.B)
361
362
363
364  def vpn_match(vpn1: UInt, vpn2: UInt, level: Int) = {
365    (vpn1(vpnLen-1, vpnnLen*level+3) === vpn2(vpnLen-1, vpnnLen*level+3))
366  }
367  // NOTE: not actually bypassed, just check if hit, re-access the page cache
368  def refill_bypass(vpn: UInt, level: Int, h_search: UInt) = {
369    val change_h = MuxLookup(h_search, noS2xlate)(Seq(
370      allStage -> onlyStage1,
371      onlyStage1 -> onlyStage1,
372      onlyStage2 -> onlyStage2
373    ))
374    val change_refill_h = MuxLookup(io.refill.bits.req_info_dup(0).s2xlate, noS2xlate)(Seq(
375      allStage -> onlyStage1,
376      onlyStage1 -> onlyStage1,
377      onlyStage2 -> onlyStage2
378    ))
379    val refill_vpn = io.refill.bits.req_info_dup(0).vpn
380    io.refill.valid && (level.U === io.refill.bits.level_dup(0)) && vpn_match(refill_vpn, vpn, level) && change_h === change_refill_h
381  }
382
383  val vpn_search = stageReq.bits.req_info.vpn
384  val h_search = MuxLookup(stageReq.bits.req_info.s2xlate, noS2xlate)(Seq(
385    allStage -> onlyStage1,
386    onlyStage1 -> onlyStage1,
387    onlyStage2 -> onlyStage2
388  ))
389
390  // l3
391  val l3Hit = if(EnableSv48) Some(Wire(Bool())) else None
392  val l3HitPPN = if(EnableSv48) Some(Wire(UInt(ppnLen.W))) else None
393  val l3HitPbmt = if(EnableSv48) Some(Wire(UInt(ptePbmtLen.W))) else None
394  val l3Pre = if(EnableSv48) Some(Wire(Bool())) else None
395  val ptwl3replace = if(EnableSv48) Some(ReplacementPolicy.fromString(l2tlbParams.l3Replacer, l2tlbParams.l3Size)) else None
396  if (EnableSv48) {
397    val hitVecT = l3.get.zipWithIndex.map {
398        case (e, i) => (e.hit(vpn_search, io.csr_dup(2).satp.asid, io.csr_dup(2).vsatp.asid, io.csr_dup(2).hgatp.vmid, s2xlate = h_search =/= noS2xlate)
399          && l3v.get(i) && h_search === l3h.get(i))
400    }
401    val hitVec = hitVecT.map(RegEnable(_, stageReq.fire))
402
403    // stageDelay, but check for l3
404    val hitPPN = DataHoldBypass(ParallelPriorityMux(hitVec zip l3.get.map(_.ppn)), stageDelay_valid_1cycle)
405    val hitPbmt = DataHoldBypass(ParallelPriorityMux(hitVec zip l3.get.map(_.pbmt)), stageDelay_valid_1cycle)
406    val hitPre = DataHoldBypass(ParallelPriorityMux(hitVec zip l3.get.map(_.prefetch)), stageDelay_valid_1cycle)
407    val hit = DataHoldBypass(ParallelOR(hitVec), stageDelay_valid_1cycle)
408
409    when (hit && stageDelay_valid_1cycle) { ptwl3replace.get.access(OHToUInt(hitVec)) }
410
411    l3AccessPerf.get.zip(hitVec).map{ case (l, h) => l := h && stageDelay_valid_1cycle}
412    for (i <- 0 until l2tlbParams.l3Size) {
413      XSDebug(stageReq.fire, p"[l3] l3(${i.U}) ${l3.get(i)} hit:${l3.get(i).hit(vpn_search, io.csr_dup(2).satp.asid, io.csr_dup(2).vsatp.asid, io.csr_dup(2).hgatp.vmid, s2xlate = h_search =/= noS2xlate)}\n")
414    }
415    XSDebug(stageReq.fire, p"[l3] l3v:${Binary(l3v.get)} hitVecT:${Binary(VecInit(hitVecT).asUInt)}\n")
416    XSDebug(stageDelay(0).valid, p"[l3] l3Hit:${hit} l3HitPPN:0x${Hexadecimal(hitPPN)} hitVec:${VecInit(hitVec).asUInt}\n")
417
418    VecInit(hitVecT).suggestName(s"l3_hitVecT")
419    VecInit(hitVec).suggestName(s"l3_hitVec")
420
421    // synchronize with other entries with RegEnable
422    l3Hit.map(_ := RegEnable(hit, stageDelay(1).fire))
423    l3HitPPN.map(_ := RegEnable(hitPPN, stageDelay(1).fire))
424    l3HitPbmt.map(_ := RegEnable(hitPbmt, stageDelay(1).fire))
425    l3Pre.map(_ := RegEnable(hitPre, stageDelay(1).fire))
426  }
427
428  // l2
429  val ptwl2replace = ReplacementPolicy.fromString(l2tlbParams.l2Replacer, l2tlbParams.l2Size)
430  val (l2Hit, l2HitPPN, l2HitPbmt, l2Pre) = {
431    val hitVecT = l2.zipWithIndex.map {
432      case (e, i) => (e.hit(vpn_search, io.csr_dup(2).satp.asid, io.csr_dup(2).vsatp.asid, io.csr_dup(2).hgatp.vmid, s2xlate = h_search =/= noS2xlate)
433        && l2v(i) && h_search === l2h(i))
434    }
435    val hitVec = hitVecT.map(RegEnable(_, stageReq.fire))
436
437    // stageDelay, but check for l2
438    val hitPPN = DataHoldBypass(ParallelPriorityMux(hitVec zip l2.map(_.ppn)), stageDelay_valid_1cycle)
439    val hitPbmt = DataHoldBypass(ParallelPriorityMux(hitVec zip l2.map(_.pbmt)), stageDelay_valid_1cycle)
440    val hitPre = DataHoldBypass(ParallelPriorityMux(hitVec zip l2.map(_.prefetch)), stageDelay_valid_1cycle)
441    val hit = DataHoldBypass(ParallelOR(hitVec), stageDelay_valid_1cycle)
442
443    when (hit && stageDelay_valid_1cycle) { ptwl2replace.access(OHToUInt(hitVec)) }
444
445    l2AccessPerf.zip(hitVec).map{ case (l, h) => l := h && stageDelay_valid_1cycle}
446    for (i <- 0 until l2tlbParams.l2Size) {
447      XSDebug(stageReq.fire, p"[l2] l2(${i.U}) ${l2(i)} hit:${l2(i).hit(vpn_search, io.csr_dup(2).satp.asid, io.csr_dup(2).vsatp.asid, io.csr_dup(2).hgatp.vmid, s2xlate = h_search =/= noS2xlate)}\n")
448    }
449    XSDebug(stageReq.fire, p"[l2] l2v:${Binary(l2v)} hitVecT:${Binary(VecInit(hitVecT).asUInt)}\n")
450    XSDebug(stageDelay(0).valid, p"[l2] l2Hit:${hit} l2HitPPN:0x${Hexadecimal(hitPPN)} hitVec:${VecInit(hitVec).asUInt}\n")
451
452    VecInit(hitVecT).suggestName(s"l2_hitVecT")
453    VecInit(hitVec).suggestName(s"l2_hitVec")
454
455    // synchronize with other entries with RegEnable
456    (RegEnable(hit, stageDelay(1).fire),
457     RegEnable(hitPPN, stageDelay(1).fire),
458     RegEnable(hitPbmt, stageDelay(1).fire),
459     RegEnable(hitPre, stageDelay(1).fire))
460  }
461
462  // l1
463  val ptwl1replace = ReplacementPolicy.fromString(l2tlbParams.l1Replacer,l2tlbParams.l1nWays,l2tlbParams.l1nSets)
464  val (l1Hit, l1HitPPN, l1HitPbmt, l1Pre, l1eccError) = {
465    val ridx = genPtwL1SetIdx(vpn_search)
466    l1.io.r.req.valid := stageReq.fire
467    l1.io.r.req.bits.apply(setIdx = ridx)
468    val vVec_req = getl1vSet(vpn_search)
469    val hVec_req = getl1hSet(vpn_search)
470
471    // delay one cycle after sram read
472    val delay_vpn = stageDelay(0).bits.req_info.vpn
473    val delay_h = MuxLookup(stageDelay(0).bits.req_info.s2xlate, noS2xlate)(Seq(
474      allStage -> onlyStage1,
475      onlyStage1 -> onlyStage1,
476      onlyStage2 -> onlyStage2
477    ))
478    val data_resp = DataHoldBypass(l1.io.r.resp.data, stageDelay_valid_1cycle)
479    val vVec_delay = RegEnable(vVec_req, stageReq.fire)
480    val hVec_delay = RegEnable(hVec_req, stageReq.fire)
481    val hitVec_delay = VecInit(data_resp.zip(vVec_delay.asBools).zip(hVec_delay).map { case ((wayData, v), h) =>
482      wayData.entries.hit(delay_vpn, io.csr_dup(1).satp.asid, io.csr_dup(1).vsatp.asid, io.csr_dup(1).hgatp.vmid, s2xlate = delay_h =/= noS2xlate) && v && (delay_h === h)})
483
484    // check hit and ecc
485    val check_vpn = stageCheck(0).bits.req_info.vpn
486    val ramDatas = RegEnable(data_resp, stageDelay(1).fire)
487    val vVec = RegEnable(vVec_delay, stageDelay(1).fire).asBools
488
489    val hitVec = RegEnable(hitVec_delay, stageDelay(1).fire)
490    val hitWayEntry = ParallelPriorityMux(hitVec zip ramDatas)
491    val hitWayData = hitWayEntry.entries
492    val hit = ParallelOR(hitVec)
493    val hitWay = ParallelPriorityMux(hitVec zip (0 until l2tlbParams.l1nWays).map(_.U(log2Up(l2tlbParams.l1nWays).W)))
494    val eccError = WireInit(false.B)
495    if (l2tlbParams.enablePTWECC) {
496      eccError := hitWayEntry.decode()
497    } else {
498      eccError := false.B
499    }
500
501    ridx.suggestName(s"l1_ridx")
502    ramDatas.suggestName(s"l1_ramDatas")
503    hitVec.suggestName(s"l1_hitVec")
504    hitWayData.suggestName(s"l1_hitWayData")
505    hitWay.suggestName(s"l1_hitWay")
506
507    when (hit && stageCheck_valid_1cycle) { ptwl1replace.access(genPtwL1SetIdx(check_vpn), hitWay) }
508
509    l1AccessPerf.zip(hitVec).map{ case (l, h) => l := h && stageCheck_valid_1cycle }
510    XSDebug(stageDelay_valid_1cycle, p"[l1] ridx:0x${Hexadecimal(ridx)}\n")
511    for (i <- 0 until l2tlbParams.l1nWays) {
512      XSDebug(stageCheck_valid_1cycle, p"[l1] ramDatas(${i.U}) ${ramDatas(i)}  l1v:${vVec(i)}  hit:${hit}\n")
513    }
514    XSDebug(stageCheck_valid_1cycle, p"[l1] l1Hit:${hit} l1HitPPN:0x${Hexadecimal(hitWayData.ppns(genPtwL1SectorIdx(check_vpn)))} hitVec:${Binary(hitVec.asUInt)} hitWay:${hitWay} vidx:${vVec}\n")
515
516    (hit, hitWayData.ppns(genPtwL1SectorIdx(check_vpn)), hitWayData.pbmts(genPtwL1SectorIdx(check_vpn)), hitWayData.prefetch, eccError)
517  }
518
519  val l0_masked_clock = ClockGate(false.B, stageReq.fire | (!flush_dup(0) && refill.levelOH.l0), clock)
520  val l1_masked_clock = ClockGate(false.B, stageReq.fire | (!flush_dup(1) && refill.levelOH.l1), clock)
521  l0.clock := l0_masked_clock
522  l1.clock := l1_masked_clock
523  // l0
524  val ptwl0replace = ReplacementPolicy.fromString(l2tlbParams.l0Replacer,l2tlbParams.l0nWays,l2tlbParams.l0nSets)
525  val (l0Hit, l0HitData, l0Pre, l0eccError, l0HitWay, l0BitmapCheckResult, l0JmpBitmapCheck) = {
526    val ridx = genPtwL0SetIdx(vpn_search)
527    l0.io.r.req.valid := stageReq.fire
528    l0.io.r.req.bits.apply(setIdx = ridx)
529    val vVec_req = getl0vSet(vpn_search)
530    val hVec_req = getl0hSet(vpn_search)
531
532    // delay one cycle after sram read
533    val delay_vpn = stageDelay(0).bits.req_info.vpn
534    val delay_h = MuxLookup(stageDelay(0).bits.req_info.s2xlate, noS2xlate)(Seq(
535      allStage -> onlyStage1,
536      onlyStage1 -> onlyStage1,
537      onlyStage2 -> onlyStage2
538    ))
539    val data_resp = DataHoldBypass(l0.io.r.resp.data, stageDelay_valid_1cycle)
540    val vVec_delay = RegEnable(vVec_req, stageReq.fire)
541    val hVec_delay = RegEnable(hVec_req, stageReq.fire)
542    val hitVec_delay = VecInit(data_resp.zip(vVec_delay.asBools).zip(hVec_delay).map { case ((wayData, v), h) =>
543      wayData.entries.hit(delay_vpn, io.csr_dup(0).satp.asid, io.csr_dup(0).vsatp.asid, io.csr_dup(0).hgatp.vmid, s2xlate = delay_h =/= noS2xlate) && v && (delay_h === h)})
544
545    // check hit and ecc
546    val check_vpn = stageCheck(0).bits.req_info.vpn
547    val ramDatas = RegEnable(data_resp, stageDelay(1).fire)
548    val vVec = RegEnable(vVec_delay, stageDelay(1).fire).asBools
549
550    val hitVec = RegEnable(hitVec_delay, stageDelay(1).fire)
551    val hitWayEntry = ParallelPriorityMux(hitVec zip ramDatas)
552    val hitWayData = hitWayEntry.entries
553    val hitWayEcc = hitWayEntry.ecc
554    val hitWay = ParallelPriorityMux(hitVec zip (0 until l2tlbParams.l0nWays).map(_.U(log2Up(l2tlbParams.l0nWays).W)))
555
556    val ishptw = RegEnable(stageDelay(0).bits.isHptwReq,stageDelay(1).fire)
557    val s2x_info = RegEnable(stageDelay(0).bits.req_info.s2xlate,stageDelay(1).fire)
558    val pte_index = RegEnable(stageDelay(0).bits.req_info.vpn(sectortlbwidth - 1, 0),stageDelay(1).fire)
559    val jmp_bitmap_check  = WireInit(false.B)
560    val hit = WireInit(false.B)
561    val l0bitmapreg = WireInit((VecInit(Seq.fill(l2tlbParams.l0nWays)(VecInit(Seq.fill(tlbcontiguous)(0.U(1.W)))))))
562    if (HasBitmapCheck) {
563      l0bitmapreg := RegEnable(RegNext(l0BitmapReg(ridx)), stageDelay(1).fire)
564      // cause llptw will trigger bitmapcheck
565      // add a coniditonal logic
566      // (s2x_info =/= allStage || ishptw)
567      hit := Mux(bitmapEnable && (s2x_info =/= allStage || ishptw), ParallelOR(hitVec) && l0bitmapreg(hitWay)(pte_index) === 1.U, ParallelOR(hitVec))
568      when (bitmapEnable && (s2x_info =/= allStage || ishptw) && ParallelOR(hitVec) && l0bitmapreg(hitWay)(pte_index) === 0.U) {
569        jmp_bitmap_check := true.B
570      }
571    } else {
572      hit := ParallelOR(hitVec)
573    }
574    val eccError = WireInit(false.B)
575    if (l2tlbParams.enablePTWECC) {
576      eccError := hitWayEntry.decode()
577    } else {
578      eccError := false.B
579    }
580
581    when (hit && stageCheck_valid_1cycle) { ptwl0replace.access(genPtwL0SetIdx(check_vpn), hitWay) }
582
583    l0AccessPerf.zip(hitVec).map{ case (l, h) => l := h && stageCheck_valid_1cycle }
584    XSDebug(stageReq.fire, p"[l0] ridx:0x${Hexadecimal(ridx)}\n")
585    for (i <- 0 until l2tlbParams.l0nWays) {
586      XSDebug(stageCheck_valid_1cycle, p"[l0] ramDatas(${i.U}) ${ramDatas(i)}  l0v:${vVec(i)}  hit:${hitVec(i)}\n")
587    }
588    XSDebug(stageCheck_valid_1cycle, p"[l0] l0Hit:${hit} l0HitData:${hitWayData} hitVec:${Binary(hitVec.asUInt)} hitWay:${hitWay} v:${vVec}\n")
589
590    ridx.suggestName(s"l0_ridx")
591    ramDatas.suggestName(s"l0_ramDatas")
592    hitVec.suggestName(s"l0_hitVec")
593    hitWay.suggestName(s"l0_hitWay")
594
595    (hit, hitWayData, hitWayData.prefetch, eccError, UIntToOH(hitWay), l0bitmapreg(hitWay), jmp_bitmap_check)
596  }
597  val l0HitPPN = l0HitData.ppns
598  val l0HitPbmt = l0HitData.pbmts
599  val l0HitPerm = l0HitData.perms.getOrElse(0.U.asTypeOf(Vec(PtwL0SectorSize, new PtePermBundle)))
600  val l0HitValid = VecInit(l0HitData.onlypf.map(!_))
601  val l0Ptes = WireInit(VecInit(Seq.fill(tlbcontiguous)(0.U(XLEN.W)))) // L0 lavel Page Table Entry Vector
602  val l0cfs = WireInit(VecInit(Seq.fill(tlbcontiguous)(false.B))) // L0 lavel Bitmap Check Failed Vector
603  if (HasBitmapCheck) {
604    for (i <- 0 until tlbcontiguous) {
605      l0Ptes(i) := Cat(l0HitData.pbmts(i).asUInt,l0HitPPN(i), 0.U(2.W),l0HitPerm(i).asUInt,l0HitValid(i).asUInt)
606      l0cfs(i) := !l0BitmapCheckResult(i)
607    }
608  }
609
610  // super page
611  val spreplace = ReplacementPolicy.fromString(l2tlbParams.spReplacer, l2tlbParams.spSize)
612  val (spHit, spHitData, spPre, spValid, spJmpBitmapCheck) = {
613    val hitVecT = sp.zipWithIndex.map { case (e, i) => e.hit(vpn_search, io.csr_dup(0).satp.asid, io.csr_dup(0).vsatp.asid, io.csr_dup(0).hgatp.vmid, allType = true, s2xlate = h_search =/= noS2xlate) && spv(i) && (sph(i) === h_search) }
614    val hitVec = hitVecT.map(RegEnable(_, stageReq.fire))
615    val hitData = ParallelPriorityMux(hitVec zip sp)
616    val ishptw = RegEnable(stageReq.bits.isHptwReq, stageReq.fire)
617    val s2x_info = RegEnable(stageReq.bits.req_info.s2xlate, stageReq.fire)
618    val jmp_bitmap_check  = WireInit(false.B)
619    val hit = WireInit(false.B)
620    if (HasBitmapCheck) {
621      hit := Mux(bitmapEnable && (s2x_info =/= allStage || ishptw), ParallelOR(hitVec) && spBitmapReg(OHToUInt(hitVec)) === 1.U, ParallelOR(hitVec))
622      when (bitmapEnable && (s2x_info =/= allStage || ishptw) && ParallelOR(hitVec) && spBitmapReg(OHToUInt(hitVec)) === 0.U) {
623        jmp_bitmap_check := true.B
624      }
625    } else {
626      hit := ParallelOR(hitVec)
627    }
628
629    when (hit && stageDelay_valid_1cycle) { spreplace.access(OHToUInt(hitVec)) }
630
631    spAccessPerf.zip(hitVec).map{ case (s, h) => s := h && stageDelay_valid_1cycle }
632    for (i <- 0 until l2tlbParams.spSize) {
633      XSDebug(stageReq.fire, p"[sp] sp(${i.U}) ${sp(i)} hit:${sp(i).hit(vpn_search, io.csr_dup(0).satp.asid, io.csr_dup(0).vsatp.asid, io.csr_dup(0).hgatp.vmid, s2xlate = h_search =/= noS2xlate)} spv:${spv(i)}\n")
634    }
635    XSDebug(stageDelay_valid_1cycle, p"[sp] spHit:${hit} spHitData:${hitData} hitVec:${Binary(VecInit(hitVec).asUInt)}\n")
636
637    VecInit(hitVecT).suggestName(s"sp_hitVecT")
638    VecInit(hitVec).suggestName(s"sp_hitVec")
639
640    (RegEnable(hit, stageDelay(1).fire),
641     RegEnable(hitData, stageDelay(1).fire),
642     RegEnable(hitData.prefetch, stageDelay(1).fire),
643     RegEnable(hitData.v, stageDelay(1).fire),
644     RegEnable(jmp_bitmap_check, stageDelay(1).fire))
645  }
646  val spHitPerm = spHitData.perm.getOrElse(0.U.asTypeOf(new PtePermBundle))
647  val spHitLevel = spHitData.level.getOrElse(0.U)
648  val spPte = Cat(spHitData.pbmt.asUInt,spHitData.ppn, 0.U(2.W), spHitPerm.asUInt,spHitData.v.asUInt) // Super-page Page Table Entry
649
650  val check_res = Wire(new PageCacheRespBundle)
651  check_res.l3.map(_.apply(l3Hit.get, l3Pre.get, l3HitPPN.get, l3HitPbmt.get))
652  check_res.l2.apply(l2Hit, l2Pre, l2HitPPN, l2HitPbmt)
653  check_res.l1.apply(l1Hit, l1Pre, l1HitPPN, l1HitPbmt, ecc = l1eccError)
654  check_res.l0.apply(l0Hit, l0Pre, l0HitPPN, l0HitPbmt, l0HitPerm, l0eccError, valid = l0HitValid, jmp_bitmap_check = l0JmpBitmapCheck, hitway = l0HitWay, ptes = l0Ptes, cfs = l0cfs)
655  check_res.sp.apply(spHit, spPre, spHitData.ppn, spHitData.pbmt, spHitData.n.getOrElse(0.U), spHitPerm, false.B, spHitLevel, spValid, spJmpBitmapCheck, spPte)
656
657  val resp_res = Reg(new PageCacheRespBundle)
658  when (stageCheck(1).fire) { resp_res := check_res }
659
660  // stageResp bypass
661  val bypassed = if (EnableSv48) Wire(Vec(4, Bool())) else Wire(Vec(3, Bool()))
662  bypassed.indices.foreach(i =>
663    bypassed(i) := stageResp.bits.bypassed(i) ||
664      ValidHoldBypass(refill_bypass(stageResp.bits.req_info.vpn, i, stageResp.bits.req_info.s2xlate),
665        OneCycleValid(stageCheck(1).fire, false.B) || io.refill.valid)
666  )
667
668  // stageResp bypass to hptw
669  val hptw_bypassed = if (EnableSv48) Wire(Vec(4, Bool())) else Wire(Vec(3, Bool()))
670  hptw_bypassed.indices.foreach(i =>
671    hptw_bypassed(i) := stageResp.bits.bypassed(i) ||
672      ValidHoldBypass(refill_bypass(stageResp.bits.req_info.vpn, i, stageResp.bits.req_info.s2xlate),
673        io.resp.fire)
674  )
675
676  val isAllStage = stageResp.bits.req_info.s2xlate === allStage
677  val isOnlyStage2 = stageResp.bits.req_info.s2xlate === onlyStage2
678  val stage1Hit = (resp_res.l0.hit || resp_res.sp.hit) && isAllStage
679  val idx = stageResp.bits.req_info.vpn(2, 0)
680  val stage1Pf = !Mux(resp_res.l0.hit, resp_res.l0.v(idx), resp_res.sp.v)
681  io.resp.bits.req_info   := stageResp.bits.req_info
682  io.resp.bits.isFirst  := stageResp.bits.isFirst
683  io.resp.bits.hit      := (resp_res.l0.hit || resp_res.sp.hit) && (!isAllStage || isAllStage && stage1Pf)
684  if (EnableSv48) {
685    io.resp.bits.bypassed := ((bypassed(0) && !resp_res.l0.hit) || (bypassed(1) && !resp_res.l1.hit) || (bypassed(2) && !resp_res.l2.hit) || (bypassed(3) && !resp_res.l3.get.hit)) && !isAllStage
686  } else {
687    io.resp.bits.bypassed := ((bypassed(0) && !resp_res.l0.hit) || (bypassed(1) && !resp_res.l1.hit) || (bypassed(2) && !resp_res.l2.hit)) && !isAllStage
688  }
689  io.resp.bits.prefetch := resp_res.l0.pre && resp_res.l0.hit || resp_res.sp.pre && resp_res.sp.hit
690  io.resp.bits.toFsm.l3Hit.map(_ := resp_res.l3.get.hit && !stage1Hit && !isOnlyStage2 && !stageResp.bits.isHptwReq)
691  io.resp.bits.toFsm.l2Hit := resp_res.l2.hit && !stage1Hit && !isOnlyStage2 && !stageResp.bits.isHptwReq
692  io.resp.bits.toFsm.l1Hit := resp_res.l1.hit && !stage1Hit && !isOnlyStage2 && !stageResp.bits.isHptwReq
693  io.resp.bits.toFsm.ppn   := Mux(resp_res.l1.hit, resp_res.l1.ppn, Mux(resp_res.l2.hit, resp_res.l2.ppn, resp_res.l3.getOrElse(0.U.asTypeOf(new PageCachePerPespBundle)).ppn))
694  io.resp.bits.toFsm.stage1Hit := stage1Hit
695  if (HasBitmapCheck) {
696    io.resp.bits.toFsm.bitmapCheck.get.jmp_bitmap_check := resp_res.l0.bitmapCheck.get.jmp_bitmap_check || resp_res.sp.bitmapCheck.get.jmp_bitmap_check
697    io.resp.bits.toFsm.bitmapCheck.get.toLLPTW := resp_res.l0.bitmapCheck.get.jmp_bitmap_check && (stageResp.bits.req_info.s2xlate === noS2xlate || stageResp.bits.req_info.s2xlate === onlyStage1)
698    io.resp.bits.toFsm.bitmapCheck.get.hitway := resp_res.l0.bitmapCheck.get.hitway
699    io.resp.bits.toFsm.bitmapCheck.get.pte := resp_res.sp.bitmapCheck.get.pte
700    io.resp.bits.toFsm.bitmapCheck.get.ptes := resp_res.l0.bitmapCheck.get.ptes
701    io.resp.bits.toFsm.bitmapCheck.get.cfs := resp_res.l0.bitmapCheck.get.cfs
702    io.resp.bits.toFsm.bitmapCheck.get.SPlevel := resp_res.sp.level
703  }
704
705  io.resp.bits.isHptwReq := stageResp.bits.isHptwReq
706  if (EnableSv48) {
707    io.resp.bits.toHptw.bypassed := ((hptw_bypassed(0) && !resp_res.l0.hit) || (hptw_bypassed(1) && !resp_res.l1.hit) || (hptw_bypassed(2) && !resp_res.l2.hit) || (hptw_bypassed(3) && !resp_res.l3.get.hit)) && stageResp.bits.isHptwReq
708  } else {
709    io.resp.bits.toHptw.bypassed := ((hptw_bypassed(0) && !resp_res.l0.hit) || (hptw_bypassed(1) && !resp_res.l1.hit) || (hptw_bypassed(2) && !resp_res.l2.hit)) && stageResp.bits.isHptwReq
710  }
711  io.resp.bits.toHptw.id := stageResp.bits.hptwId
712  io.resp.bits.toHptw.l3Hit.map(_ := resp_res.l3.get.hit && stageResp.bits.isHptwReq)
713  io.resp.bits.toHptw.l2Hit := resp_res.l2.hit && stageResp.bits.isHptwReq
714  io.resp.bits.toHptw.l1Hit := resp_res.l1.hit && stageResp.bits.isHptwReq
715  io.resp.bits.toHptw.ppn := Mux(resp_res.l1.hit, resp_res.l1.ppn, Mux(resp_res.l2.hit, resp_res.l2.ppn, resp_res.l3.getOrElse(0.U.asTypeOf(new PageCachePerPespBundle)).ppn))(ppnLen - 1, 0)
716  io.resp.bits.toHptw.resp.entry.tag := stageResp.bits.req_info.vpn
717  io.resp.bits.toHptw.resp.entry.asid := DontCare
718  io.resp.bits.toHptw.resp.entry.vmid.map(_ := io.csr_dup(0).hgatp.vmid)
719  io.resp.bits.toHptw.resp.entry.level.map(_ := Mux(resp_res.l0.hit, 0.U, resp_res.sp.level))
720  io.resp.bits.toHptw.resp.entry.prefetch := from_pre(stageResp.bits.req_info.source)
721  io.resp.bits.toHptw.resp.entry.ppn := Mux(resp_res.l0.hit, resp_res.l0.ppn(idx), resp_res.sp.ppn)(ppnLen - 1, 0)
722  io.resp.bits.toHptw.resp.entry.pbmt := Mux(resp_res.l0.hit, resp_res.l0.pbmt(idx), resp_res.sp.pbmt)
723  io.resp.bits.toHptw.resp.entry.n.map(_ := Mux(resp_res.sp.hit, resp_res.sp.n, 0.U))
724  io.resp.bits.toHptw.resp.entry.perm.map(_ := Mux(resp_res.l0.hit, resp_res.l0.perm(idx), resp_res.sp.perm))
725  io.resp.bits.toHptw.resp.entry.v := Mux(resp_res.l0.hit, resp_res.l0.v(idx), resp_res.sp.v)
726  io.resp.bits.toHptw.resp.gpf := !io.resp.bits.toHptw.resp.entry.v
727  io.resp.bits.toHptw.resp.gaf := false.B
728  if (HasBitmapCheck) {
729    io.resp.bits.toHptw.bitmapCheck.get.jmp_bitmap_check := resp_res.l0.bitmapCheck.get.jmp_bitmap_check || resp_res.sp.bitmapCheck.get.jmp_bitmap_check
730    io.resp.bits.toHptw.bitmapCheck.get.hitway := resp_res.l0.bitmapCheck.get.hitway
731    io.resp.bits.toHptw.bitmapCheck.get.pte := resp_res.sp.bitmapCheck.get.pte
732    io.resp.bits.toHptw.bitmapCheck.get.ptes := resp_res.l0.bitmapCheck.get.ptes
733    io.resp.bits.toHptw.bitmapCheck.get.cfs := resp_res.l0.bitmapCheck.get.cfs
734    io.resp.bits.toHptw.bitmapCheck.get.fromSP := resp_res.sp.bitmapCheck.get.jmp_bitmap_check
735    io.resp.bits.toHptw.bitmapCheck.get.SPlevel := resp_res.sp.level
736  }
737
738  io.resp.bits.stage1.entry.map(_.tag := stageResp.bits.req_info.vpn(vpnLen - 1, 3))
739  io.resp.bits.stage1.entry.map(_.asid := Mux(stageResp.bits.req_info.hasS2xlate(), io.csr_dup(0).vsatp.asid, io.csr_dup(0).satp.asid)) // DontCare
740  io.resp.bits.stage1.entry.map(_.vmid.map(_ := io.csr_dup(0).hgatp.vmid))
741  if (EnableSv48) {
742    io.resp.bits.stage1.entry.map(_.level.map(_ := Mux(resp_res.l0.hit, 0.U,
743      Mux(resp_res.sp.hit, resp_res.sp.level,
744        Mux(resp_res.l1.hit, 1.U,
745          Mux(resp_res.l2.hit, 2.U, 3.U))))))
746  } else {
747    io.resp.bits.stage1.entry.map(_.level.map(_ := Mux(resp_res.l0.hit, 0.U,
748      Mux(resp_res.sp.hit, resp_res.sp.level,
749        Mux(resp_res.l1.hit, 1.U, 2.U)))))
750  }
751  io.resp.bits.stage1.entry.map(_.prefetch := from_pre(stageResp.bits.req_info.source))
752  for (i <- 0 until tlbcontiguous) {
753    if (EnableSv48) {
754      io.resp.bits.stage1.entry(i).ppn := Mux(resp_res.l0.hit, resp_res.l0.ppn(i)(gvpnLen - 1, sectortlbwidth),
755        Mux(resp_res.sp.hit, resp_res.sp.ppn(gvpnLen - 1, sectortlbwidth),
756          Mux(resp_res.l1.hit, resp_res.l1.ppn(gvpnLen - 1, sectortlbwidth),
757            Mux(resp_res.l2.hit, resp_res.l2.ppn(gvpnLen - 1, sectortlbwidth),
758              resp_res.l3.get.ppn(gvpnLen - 1, sectortlbwidth)))))
759      io.resp.bits.stage1.entry(i).ppn_low := Mux(resp_res.l0.hit, resp_res.l0.ppn(i)(sectortlbwidth - 1, 0),
760        Mux(resp_res.sp.hit, resp_res.sp.ppn(sectortlbwidth - 1, 0),
761          Mux(resp_res.l1.hit, resp_res.l1.ppn(sectortlbwidth - 1, 0),
762            Mux(resp_res.l2.hit, resp_res.l2.ppn(sectortlbwidth - 1, 0),
763              resp_res.l3.get.ppn(sectortlbwidth - 1, 0)))))
764      io.resp.bits.stage1.entry(i).v := Mux(resp_res.l0.hit, resp_res.l0.v(i),
765        Mux(resp_res.sp.hit, resp_res.sp.v,
766          Mux(resp_res.l1.hit, resp_res.l1.v,
767            Mux(resp_res.l2.hit, resp_res.l2.v,
768              resp_res.l3.get.v))))
769    } else {
770      io.resp.bits.stage1.entry(i).ppn := Mux(resp_res.l0.hit, resp_res.l0.ppn(i)(gvpnLen - 1, sectortlbwidth),
771        Mux(resp_res.sp.hit, resp_res.sp.ppn(gvpnLen - 1, sectortlbwidth),
772          Mux(resp_res.l1.hit, resp_res.l1.ppn(gvpnLen - 1, sectortlbwidth),
773            resp_res.l2.ppn(gvpnLen - 1, sectortlbwidth))))
774      io.resp.bits.stage1.entry(i).ppn_low := Mux(resp_res.l0.hit, resp_res.l0.ppn(i)(sectortlbwidth - 1, 0),
775        Mux(resp_res.sp.hit, resp_res.sp.ppn(sectortlbwidth - 1, 0),
776          Mux(resp_res.l1.hit, resp_res.l1.ppn(sectortlbwidth - 1, 0),
777            resp_res.l2.ppn(sectortlbwidth - 1, 0))))
778      io.resp.bits.stage1.entry(i).v := Mux(resp_res.l0.hit, resp_res.l0.v(i),
779        Mux(resp_res.sp.hit, resp_res.sp.v,
780          Mux(resp_res.l1.hit, resp_res.l1.v,
781            resp_res.l2.v)))
782    }
783    io.resp.bits.stage1.entry(i).pbmt := Mux(resp_res.l0.hit, resp_res.l0.pbmt(i),
784      Mux(resp_res.sp.hit, resp_res.sp.pbmt,
785        Mux(resp_res.l1.hit, resp_res.l1.pbmt,
786          resp_res.l2.pbmt)))
787    io.resp.bits.stage1.entry(i).n.map(_ := Mux(resp_res.sp.hit, resp_res.sp.n, 0.U))
788    io.resp.bits.stage1.entry(i).perm.map(_ := Mux(resp_res.l0.hit, resp_res.l0.perm(i),  Mux(resp_res.sp.hit, resp_res.sp.perm, 0.U.asTypeOf(new PtePermBundle))))
789    io.resp.bits.stage1.entry(i).pf := !io.resp.bits.stage1.entry(i).v
790    io.resp.bits.stage1.entry(i).af := false.B
791    io.resp.bits.stage1.entry(i).cf := l0cfs(i) // L0 lavel Bitmap Check Failed Vector
792  }
793  io.resp.bits.stage1.pteidx := UIntToOH(idx).asBools
794  io.resp.bits.stage1.not_super := Mux(resp_res.l0.hit, true.B, false.B)
795  io.resp.bits.stage1.not_merge := false.B
796  io.resp.valid := stageResp.valid
797  XSError(stageResp.valid && resp_res.l0.hit && resp_res.sp.hit, "normal page and super page both hit")
798
799  // refill Perf
800  val l3RefillPerf = if (EnableSv48) Some(Wire(Vec(l2tlbParams.l3Size, Bool()))) else None
801  val l2RefillPerf = Wire(Vec(l2tlbParams.l2Size, Bool()))
802  val l1RefillPerf = Wire(Vec(l2tlbParams.l1nWays, Bool()))
803  val l0RefillPerf = Wire(Vec(l2tlbParams.l0nWays, Bool()))
804  val spRefillPerf = Wire(Vec(l2tlbParams.spSize, Bool()))
805  l3RefillPerf.map(_.map(_ := false.B))
806  l2RefillPerf.map(_ := false.B)
807  l1RefillPerf.map(_ := false.B)
808  l0RefillPerf.map(_ := false.B)
809  spRefillPerf.map(_ := false.B)
810
811  // refill
812  l1.io.w.req <> DontCare
813  l0.io.w.req <> DontCare
814  l1.io.w.req.valid := false.B
815  l0.io.w.req.valid := false.B
816
817  val memRdata = refill.ptes
818  val memPtes = (0 until (l2tlbParams.blockBytes/(XLEN/8))).map(i => memRdata((i+1)*XLEN-1, i*XLEN).asTypeOf(new PteBundle))
819  val memSelData = io.refill.bits.sel_pte_dup
820  val memPte = memSelData.map(a => a.asTypeOf(new PteBundle))
821  val mPBMTE = io.csr.mPBMTE
822  val hPBMTE = io.csr.hPBMTE
823  val pbmte = Mux(refill.req_info_dup(0).s2xlate === onlyStage1 || refill.req_info_dup(0).s2xlate === allStage, hPBMTE, mPBMTE)
824
825  def Tran2D(flushMask: UInt): Vec[UInt] = {
826    val tran2D = Wire(Vec(l2tlbParams.l0nSets,UInt(l2tlbParams.l0nWays.W)))
827    for (i <- 0 until l2tlbParams.l0nSets) {
828      tran2D(i) := flushMask((i + 1) * l2tlbParams.l0nWays - 1, i * l2tlbParams.l0nWays)
829    }
830    tran2D
831  }
832  def updateL0BitmapReg(l0BitmapReg: Vec[Vec[Vec[UInt]]], tran2D: Vec[UInt]) = {
833    for (i <- 0 until l2tlbParams.l0nSets) {
834      for (j <- 0 until l2tlbParams.l0nWays) {
835        when (tran2D(i)(j) === 0.U) {
836          for (k <- 0 until tlbcontiguous) {
837            l0BitmapReg(i)(j)(k) := 0.U
838          }
839        }
840      }
841    }
842  }
843  def TranVec(flushMask: UInt): Vec[UInt] = {
844    val vec = Wire(Vec(l2tlbParams.spSize,UInt(1.W)))
845    for (i <- 0 until l2tlbParams.spSize) {
846      vec(i) := flushMask(i)
847    }
848    vec
849  }
850  def updateSpBitmapReg(spBitmapReg: Vec[UInt], vec : Vec[UInt]) = {
851    for (i <- 0 until l2tlbParams.spSize) {
852      spBitmapReg(i) := spBitmapReg(i) & vec(i)
853    }
854  }
855
856  // TODO: handle sfenceLatch outsize
857  if (EnableSv48) {
858    val l3Refill =
859      !flush_dup(2) &&
860      refill.levelOH.l3.get &&
861      !memPte(2).isLeaf() &&
862      memPte(2).canRefill(refill.level_dup(2), refill.req_info_dup(2).s2xlate, pbmte, io.csr_dup(2).vsatp.mode)
863    val l3RefillIdx = replaceWrapper(l3v.get, ptwl3replace.get.way).suggestName(s"l3_refillIdx")
864    val l3RfOH = UIntToOH(l3RefillIdx).asUInt.suggestName(s"l3_rfOH")
865    when (l3Refill) {
866      l3.get(l3RefillIdx).refill(
867        refill.req_info_dup(2).vpn,
868        Mux(refill.req_info_dup(2).s2xlate =/= noS2xlate, io.csr_dup(2).vsatp.asid, io.csr_dup(2).satp.asid),
869        io.csr_dup(2).hgatp.vmid,
870        memSelData(2),
871        3.U,
872        refill_prefetch_dup(2)
873      )
874      ptwl2replace.access(l3RefillIdx)
875      l3v.get := l3v.get | l3RfOH
876      l3g.get := (l3g.get & ~l3RfOH) | Mux(memPte(2).perm.g, l3RfOH, 0.U)
877      l3h.get(l3RefillIdx) := refill_h(2)
878
879      for (i <- 0 until l2tlbParams.l3Size) {
880        l3RefillPerf.get(i) := i.U === l3RefillIdx
881      }
882    }
883    XSDebug(l3Refill, p"[l3 refill] refillIdx:${l3RefillIdx} refillEntry:${l3.get(l3RefillIdx).genPtwEntry(refill.req_info_dup(2).vpn, Mux(refill.req_info_dup(2).s2xlate =/= noS2xlate, io.csr_dup(2).vsatp.asid, io.csr_dup(2).satp.asid), memSelData(2), 0.U, prefetch = refill_prefetch_dup(2))}\n")
884    XSDebug(l3Refill, p"[l3 refill] l3v:${Binary(l3v.get)}->${Binary(l3v.get | l3RfOH)} l3g:${Binary(l3g.get)}->${Binary((l3g.get & ~l3RfOH) | Mux(memPte(2).perm.g, l3RfOH, 0.U))}\n")
885  }
886
887  // L2 refill
888  val l2Refill =
889    !flush_dup(2) &&
890    refill.levelOH.l2 &&
891    !memPte(2).isLeaf() &&
892    memPte(2).canRefill(refill.level_dup(2), refill.req_info_dup(2).s2xlate, pbmte, io.csr_dup(2).vsatp.mode)
893  val l2RefillIdx = replaceWrapper(l2v, ptwl2replace.way).suggestName(s"l2_refillIdx")
894  val l2RfOH = UIntToOH(l2RefillIdx).asUInt.suggestName(s"l2_rfOH")
895  when (
896    l2Refill
897  ) {
898    l2(l2RefillIdx).refill(
899      refill.req_info_dup(2).vpn,
900      Mux(refill.req_info_dup(2).s2xlate =/= noS2xlate, io.csr_dup(2).vsatp.asid, io.csr_dup(2).satp.asid),
901      io.csr_dup(2).hgatp.vmid,
902      memSelData(2),
903      2.U,
904      refill_prefetch_dup(2)
905    )
906    ptwl2replace.access(l2RefillIdx)
907    l2v := l2v | l2RfOH
908    l2g := (l2g & ~l2RfOH) | Mux(memPte(2).perm.g, l2RfOH, 0.U)
909    l2h(l2RefillIdx) := refill_h(2)
910
911    for (i <- 0 until l2tlbParams.l2Size) {
912      l2RefillPerf(i) := i.U === l2RefillIdx
913    }
914  }
915  XSDebug(l2Refill, p"[l2 refill] refillIdx:${l2RefillIdx} refillEntry:${l2(l2RefillIdx).genPtwEntry(refill.req_info_dup(2).vpn, Mux(refill.req_info_dup(2).s2xlate =/= noS2xlate, io.csr_dup(2).vsatp.asid, io.csr_dup(2).satp.asid), memSelData(2), 0.U, prefetch = refill_prefetch_dup(2))}\n")
916  XSDebug(l2Refill, p"[l2 refill] l2v:${Binary(l2v)}->${Binary(l2v | l2RfOH)} l2g:${Binary(l2g)}->${Binary((l2g & ~l2RfOH) | Mux(memPte(2).perm.g, l2RfOH, 0.U))}\n")
917
918  // L1 refill
919  val l1Refill = !flush_dup(1) && refill.levelOH.l1
920  val l1RefillIdx = genPtwL1SetIdx(refill.req_info_dup(1).vpn).suggestName(s"l1_refillIdx")
921  val l1VictimWay = replaceWrapper(getl1vSet(refill.req_info_dup(1).vpn), ptwl1replace.way(l1RefillIdx)).suggestName(s"l1_victimWay")
922  val l1VictimWayOH = UIntToOH(l1VictimWay).suggestName(s"l1_victimWayOH")
923  val l1RfvOH = UIntToOH(Cat(l1RefillIdx, l1VictimWay)).asUInt.suggestName(s"l1_rfvOH")
924  val l1Wdata = Wire(l1EntryType)
925  l1Wdata.gen(
926    vpn = refill.req_info_dup(1).vpn,
927    asid = Mux(refill.req_info_dup(1).s2xlate =/= noS2xlate, io.csr_dup(1).vsatp.asid, io.csr_dup(1).satp.asid),
928    vmid = io.csr_dup(1).hgatp.vmid,
929    data = memRdata,
930    levelUInt = 1.U,
931    refill_prefetch_dup(1),
932    refill.req_info_dup(1).s2xlate,
933    pbmte,
934    io.csr_dup(1).vsatp.mode
935  )
936  when (l1Refill) {
937    l1.io.w.apply(
938      valid = true.B,
939      setIdx = l1RefillIdx,
940      data = l1Wdata,
941      waymask = l1VictimWayOH
942    )
943    ptwl1replace.access(l1RefillIdx, l1VictimWay)
944    l1v := l1v | l1RfvOH
945    l1g := l1g & ~l1RfvOH | Mux(Cat(memPtes.map(_.perm.g)).andR, l1RfvOH, 0.U)
946    l1h(l1RefillIdx)(l1VictimWay) := refill_h(1)
947
948    for (i <- 0 until l2tlbParams.l1nWays) {
949      l1RefillPerf(i) := i.U === l1VictimWay
950    }
951  }
952  XSDebug(l1Refill, p"[l1 refill] refillIdx:0x${Hexadecimal(l1RefillIdx)} victimWay:${l1VictimWay} victimWayOH:${Binary(l1VictimWayOH)} rfvOH(in UInt):${Cat(l1RefillIdx, l1VictimWay)}\n")
953  XSDebug(l1Refill, p"[l1 refill] refilldata:0x${l1Wdata}\n")
954  XSDebug(l1Refill, p"[l1 refill] l1v:${Binary(l1v)} -> ${Binary(l1v | l1RfvOH)}\n")
955  XSDebug(l1Refill, p"[l1 refill] l1g:${Binary(l1g)} -> ${Binary(l1g & ~l1RfvOH | Mux(Cat(memPtes.map(_.perm.g)).andR, l1RfvOH, 0.U))}\n")
956
957  // L0 refill
958  val l0Refill = !flush_dup(0) && refill.levelOH.l0 && !memPte(0).isNapot(refill.level_dup(0))
959  val l0RefillIdx = genPtwL0SetIdx(refill.req_info_dup(0).vpn).suggestName(s"l0_refillIdx")
960  val l0VictimWay = replaceWrapper(getl0vSet(refill.req_info_dup(0).vpn), ptwl0replace.way(l0RefillIdx)).suggestName(s"l0_victimWay")
961  val l0VictimWayOH = UIntToOH(l0VictimWay).asUInt.suggestName(s"l0_victimWayOH")
962  val l0RfvOH = UIntToOH(Cat(l0RefillIdx, l0VictimWay)).suggestName(s"l0_rfvOH")
963  val l0Wdata = Wire(l0EntryType)
964  // trans the l0 way info, for late wakeup logic
965  if (HasBitmapCheck) {
966    io.l0_way_info.get := l0VictimWayOH
967  }
968  l0Wdata.gen(
969    vpn = refill.req_info_dup(0).vpn,
970    asid = Mux(refill.req_info_dup(0).s2xlate =/= noS2xlate, io.csr_dup(0).vsatp.asid, io.csr_dup(0).satp.asid),
971    vmid = io.csr_dup(0).hgatp.vmid,
972    data = memRdata,
973    levelUInt = 0.U,
974    refill_prefetch_dup(0),
975    refill.req_info_dup(0).s2xlate,
976    pbmte,
977    io.csr_dup(0).vsatp.mode
978  )
979  when (l0Refill) {
980    l0.io.w.apply(
981      valid = true.B,
982      setIdx = l0RefillIdx,
983      data = l0Wdata,
984      waymask = l0VictimWayOH
985    )
986    ptwl0replace.access(l0RefillIdx, l0VictimWay)
987    l0v := l0v | l0RfvOH
988    l0g := l0g & ~l0RfvOH | Mux(Cat(memPtes.map(_.perm.g)).andR, l0RfvOH, 0.U)
989    l0h(l0RefillIdx)(l0VictimWay) := refill_h(0)
990    if (HasBitmapCheck) {updateL0BitmapReg(l0BitmapReg, Tran2D(~l0RfvOH))}
991
992    for (i <- 0 until l2tlbParams.l0nWays) {
993      l0RefillPerf(i) := i.U === l0VictimWay
994    }
995  }
996  XSDebug(l0Refill, p"[l0 refill] refillIdx:0x${Hexadecimal(l0RefillIdx)} victimWay:${l0VictimWay} victimWayOH:${Binary(l0VictimWayOH)} rfvOH(in UInt):${Cat(l0RefillIdx, l0VictimWay)}\n")
997  XSDebug(l0Refill, p"[l0 refill] refilldata:0x${l0Wdata}\n")
998  XSDebug(l0Refill, p"[l0 refill] l0v:${Binary(l0v)} -> ${Binary(l0v | l0RfvOH)}\n")
999  XSDebug(l0Refill, p"[l0 refill] l0g:${Binary(l0g)} -> ${Binary(l0g & ~l0RfvOH | Mux(Cat(memPtes.map(_.perm.g)).andR, l0RfvOH, 0.U))}\n")
1000
1001
1002  // misc entries: super & invalid
1003  val spRefill =
1004    !flush_dup(0) &&
1005    (refill.levelOH.sp || (refill.levelOH.l0 && memPte(0).isNapot(refill.level_dup(0)))) &&
1006    ((memPte(0).isLeaf() && memPte(0).canRefill(refill.level_dup(0), refill.req_info_dup(0).s2xlate, pbmte, io.csr_dup(0).vsatp.mode)) ||
1007    memPte(0).onlyPf(refill.level_dup(0), refill.req_info_dup(0).s2xlate, pbmte))
1008  val spRefillIdx = spreplace.way.suggestName(s"sp_refillIdx") // LFSR64()(log2Up(l2tlbParams.spSize)-1,0) // TODO: may be LRU
1009  val spRfOH = UIntToOH(spRefillIdx).asUInt.suggestName(s"sp_rfOH")
1010  when (spRefill) {
1011    sp(spRefillIdx).refill(
1012      refill.req_info_dup(0).vpn,
1013      Mux(refill.req_info_dup(0).s2xlate =/= noS2xlate, io.csr_dup(0).vsatp.asid, io.csr_dup(0).satp.asid),
1014      io.csr_dup(0).hgatp.vmid,
1015      memSelData(0),
1016      refill.level_dup(0),
1017      refill_prefetch_dup(0),
1018      !memPte(0).onlyPf(refill.level_dup(0), refill.req_info_dup(0).s2xlate, pbmte)
1019    )
1020    spreplace.access(spRefillIdx)
1021    spv := spv | spRfOH
1022    spg := spg & ~spRfOH | Mux(memPte(0).perm.g, spRfOH, 0.U)
1023    sph(spRefillIdx) := refill_h(0)
1024    if (HasBitmapCheck) {updateSpBitmapReg(spBitmapReg, TranVec(~spRfOH))}
1025
1026    for (i <- 0 until l2tlbParams.spSize) {
1027      spRefillPerf(i) := i.U === spRefillIdx
1028    }
1029  }
1030  XSDebug(spRefill, p"[sp refill] refillIdx:${spRefillIdx} refillEntry:${sp(spRefillIdx).genPtwEntry(refill.req_info_dup(0).vpn, Mux(refill.req_info_dup(0).s2xlate =/= noS2xlate, io.csr_dup(0).vsatp.asid, io.csr_dup(0).satp.asid), memSelData(0), refill.level_dup(0), refill_prefetch_dup(0))}\n")
1031  XSDebug(spRefill, p"[sp refill] spv:${Binary(spv)}->${Binary(spv | spRfOH)} spg:${Binary(spg)}->${Binary(spg & ~spRfOH | Mux(memPte(0).perm.g, spRfOH, 0.U))}\n")
1032
1033  val l1eccFlush = resp_res.l1.ecc && stageResp_valid_1cycle_dup(0) // RegNext(l1eccError, init = false.B)
1034  val l0eccFlush = resp_res.l0.ecc && stageResp_valid_1cycle_dup(1) // RegNext(l0eccError, init = false.B)
1035  val eccVpn = stageResp.bits.req_info.vpn
1036
1037  XSError(l1eccFlush, "l2tlb.cache.l1 ecc error. Should not happen at sim stage")
1038  XSError(l0eccFlush, "l2tlb.cache.l0 ecc error. Should not happen at sim stage")
1039  when (l1eccFlush) {
1040    val flushSetIdxOH = UIntToOH(genPtwL1SetIdx(eccVpn))
1041    val flushMask = VecInit(flushSetIdxOH.asBools.map { a => Fill(l2tlbParams.l1nWays, a.asUInt) }).asUInt
1042    l1v := l1v & ~flushMask
1043    l1g := l1g & ~flushMask
1044  }
1045
1046  when (l0eccFlush) {
1047    val flushSetIdxOH = UIntToOH(genPtwL0SetIdx(eccVpn))
1048    val flushMask = VecInit(flushSetIdxOH.asBools.map { a => Fill(l2tlbParams.l0nWays, a.asUInt) }).asUInt
1049    l0v := l0v & ~flushMask
1050    l0g := l0g & ~flushMask
1051  }
1052
1053  // sfence for l0
1054  val sfence_valid_l0 = sfence_dup(0).valid && !sfence_dup(0).bits.hg && !sfence_dup(0).bits.hv
1055  when (sfence_valid_l0) {
1056    val l0hhit = VecInit(l0h.flatMap(_.map{a => io.csr_dup(0).priv.virt && a === onlyStage1 || !io.csr_dup(0).priv.virt && a === noS2xlate})).asUInt
1057    val sfence_vpn = sfence_dup(0).bits.addr(sfence_dup(0).bits.addr.getWidth-1, offLen)
1058    when (sfence_dup(0).bits.rs1/*va*/) {
1059      when (sfence_dup(0).bits.rs2) {
1060        // all va && all asid
1061        l0v := l0v & ~l0hhit
1062      } .otherwise {
1063        // all va && specific asid except global
1064        l0v := l0v & (l0g | ~l0hhit)
1065      }
1066    } .otherwise {
1067      // val flushMask = UIntToOH(genTlbl1Idx(sfence.bits.addr(sfence.bits.addr.getWidth-1, offLen)))
1068      val flushSetIdxOH = UIntToOH(genPtwL0SetIdx(sfence_vpn))
1069      // val flushMask = VecInit(flushSetIdxOH.asBools.map(Fill(l2tlbParams.l0nWays, _.asUInt))).asUInt
1070      val flushMask = VecInit(flushSetIdxOH.asBools.map { a => Fill(l2tlbParams.l0nWays, a.asUInt) }).asUInt
1071      flushSetIdxOH.suggestName(s"sfence_nrs1_flushSetIdxOH")
1072      flushMask.suggestName(s"sfence_nrs1_flushMask")
1073
1074      when (sfence_dup(0).bits.rs2) {
1075        // specific leaf of addr && all asid
1076        l0v := l0v & ~flushMask & ~l0hhit
1077      } .otherwise {
1078        // specific leaf of addr && specific asid
1079        l0v := l0v & (~flushMask | l0g | ~l0hhit)
1080      }
1081    }
1082  }
1083
1084  // hfencev, simple implementation for l0
1085  val hfencev_valid_l0 = sfence_dup(0).valid && sfence_dup(0).bits.hv
1086  when(hfencev_valid_l0) {
1087    val flushMask = VecInit(l0h.flatMap(_.map(_  === onlyStage1))).asUInt
1088    l0v := l0v & ~flushMask // all VS-stage l0 pte
1089  }
1090
1091  // hfenceg, simple implementation for l0
1092  val hfenceg_valid_l0 = sfence_dup(0).valid && sfence_dup(0).bits.hg
1093  when(hfenceg_valid_l0) {
1094    val flushMask = VecInit(l0h.flatMap(_.map(_ === onlyStage2))).asUInt
1095    l0v := l0v & ~flushMask // all G-stage l0 pte
1096  }
1097
1098  val l2asidhit = VecInit(l2asids.map(_ === sfence_dup(2).bits.id)).asUInt
1099  val spasidhit = VecInit(spasids.map(_ === sfence_dup(0).bits.id)).asUInt
1100  val sfence_valid = sfence_dup(0).valid && !sfence_dup(0).bits.hg && !sfence_dup(0).bits.hv
1101  when (sfence_valid) {
1102    val l2vmidhit = VecInit(l2vmids.map(_.getOrElse(0.U) === io.csr_dup(2).hgatp.vmid)).asUInt
1103    val spvmidhit = VecInit(spvmids.map(_.getOrElse(0.U) === io.csr_dup(0).hgatp.vmid)).asUInt
1104    val l2hhit = VecInit(l2h.map{a => io.csr_dup(2).priv.virt && a === onlyStage1 || !io.csr_dup(2).priv.virt && a === noS2xlate}).asUInt
1105    val sphhit = VecInit(sph.map{a => io.csr_dup(0).priv.virt && a === onlyStage1 || !io.csr_dup(0).priv.virt && a === noS2xlate}).asUInt
1106    val l1hhit = VecInit(l1h.flatMap(_.map{a => io.csr_dup(1).priv.virt && a === onlyStage1 || !io.csr_dup(1).priv.virt && a === noS2xlate})).asUInt
1107    val sfence_vpn = sfence_dup(0).bits.addr(sfence_dup(0).bits.addr.getWidth-1, offLen)
1108
1109    when (sfence_dup(0).bits.rs1/*va*/) {
1110      when (sfence_dup(0).bits.rs2) {
1111        // all va && all asid
1112        l1v := l1v & ~l1hhit
1113        l2v := l2v & ~(l2hhit & VecInit(l2vmidhit.asBools.map{a => io.csr_dup(2).priv.virt && a || !io.csr_dup(2).priv.virt}).asUInt)
1114        spv := spv & ~(sphhit & VecInit(spvmidhit.asBools.map{a => io.csr_dup(0).priv.virt && a || !io.csr_dup(0).priv.virt}).asUInt)
1115      } .otherwise {
1116        // all va && specific asid except global
1117        l1v := l1v & (l1g | ~l1hhit)
1118        l2v := l2v & ~(~l2g & l2hhit & l2asidhit & VecInit(l2vmidhit.asBools.map{a => io.csr_dup(2).priv.virt && a || !io.csr_dup(2).priv.virt}).asUInt)
1119        spv := spv & ~(~spg & sphhit & spasidhit & VecInit(spvmidhit.asBools.map{a => io.csr_dup(0).priv.virt && a || !io.csr_dup(0).priv.virt}).asUInt)
1120      }
1121    } .otherwise {
1122      when (sfence_dup(0).bits.rs2) {
1123        // specific leaf of addr && all asid
1124        spv := spv & ~(sphhit & VecInit(sp.map(_.hit(sfence_vpn, sfence_dup(0).bits.id, sfence_dup(0).bits.id, io.csr_dup(0).hgatp.vmid, ignoreAsid = true, s2xlate = io.csr_dup(0).priv.virt))).asUInt)
1125      } .otherwise {
1126        // specific leaf of addr && specific asid
1127        spv := spv & ~(~spg & sphhit & VecInit(sp.map(_.hit(sfence_vpn, sfence_dup(0).bits.id, sfence_dup(0).bits.id, io.csr_dup(0).hgatp.vmid, s2xlate = io.csr_dup(0).priv.virt))).asUInt)
1128      }
1129    }
1130  }
1131
1132  val hfencev_valid = sfence_dup(0).valid && sfence_dup(0).bits.hv
1133  when (hfencev_valid) {
1134    val l2vmidhit = VecInit(l2vmids.map(_.getOrElse(0.U) === io.csr_dup(2).hgatp.vmid)).asUInt
1135    val spvmidhit = VecInit(spvmids.map(_.getOrElse(0.U) === io.csr_dup(0).hgatp.vmid)).asUInt
1136    val l2hhit = VecInit(l2h.map(_ === onlyStage1)).asUInt
1137    val sphhit = VecInit(sph.map(_ === onlyStage1)).asUInt
1138    val l1hhit = VecInit(l1h.flatMap(_.map(_ === onlyStage1))).asUInt
1139    val hfencev_vpn = sfence_dup(0).bits.addr(sfence_dup(0).bits.addr.getWidth-1, offLen)
1140    when(sfence_dup(0).bits.rs1) {
1141      when(sfence_dup(0).bits.rs2) {
1142        l1v := l1v & ~l1hhit
1143        l2v := l2v & ~(l2hhit & l2vmidhit)
1144        spv := spv & ~(sphhit & spvmidhit)
1145      }.otherwise {
1146        l1v := l1v & (l1g | ~l1hhit)
1147        l2v := l2v & ~(~l2g & l2hhit & l2asidhit & l2vmidhit)
1148        spv := spv & ~(~spg & sphhit & spasidhit & spvmidhit)
1149      }
1150    }.otherwise {
1151      when(sfence_dup(0).bits.rs2) {
1152        spv := spv & ~(sphhit & VecInit(sp.map(_.hit(hfencev_vpn, sfence_dup(0).bits.id, sfence_dup(0).bits.id, io.csr_dup(0).hgatp.vmid, ignoreAsid = true, s2xlate = true.B))).asUInt)
1153      }.otherwise {
1154        spv := spv & ~(~spg & sphhit & VecInit(sp.map(_.hit(hfencev_vpn, sfence_dup(0).bits.id, sfence_dup(0).bits.id, io.csr_dup(0).hgatp.vmid, s2xlate = true.B))).asUInt)
1155      }
1156    }
1157  }
1158
1159
1160  val hfenceg_valid = sfence_dup(0).valid && sfence_dup(0).bits.hg
1161  when(hfenceg_valid) {
1162    val l2vmidhit = VecInit(l2vmids.map(_.getOrElse(0.U) === sfence_dup(2).bits.id)).asUInt
1163    val spvmidhit = VecInit(spvmids.map(_.getOrElse(0.U) === sfence_dup(0).bits.id)).asUInt
1164    val l2hhit = VecInit(l2h.map(_ === onlyStage2)).asUInt
1165    val sphhit = VecInit(sph.map(_ === onlyStage2)).asUInt
1166    val l1hhit = VecInit(l1h.flatMap(_.map(_ === onlyStage2))).asUInt
1167    val hfenceg_gvpn = (sfence_dup(0).bits.addr << 2)(sfence_dup(0).bits.addr.getWidth - 1, offLen)
1168    when(sfence_dup(0).bits.rs1) {
1169      when(sfence_dup(0).bits.rs2) {
1170        l1v := l1v & ~l1hhit
1171        l2v := l2v & ~l2hhit
1172        spv := spv & ~sphhit
1173      }.otherwise {
1174        l1v := l1v & ~l1hhit
1175        l2v := l2v & ~(l2hhit & l2vmidhit)
1176        spv := spv & ~(sphhit & spvmidhit)
1177      }
1178    }.otherwise {
1179      when(sfence_dup(0).bits.rs2) {
1180        spv := spv & ~(sphhit & VecInit(sp.map(_.hit(hfenceg_gvpn, 0.U, 0.U, sfence_dup(0).bits.id, ignoreAsid = true, s2xlate = false.B))).asUInt)
1181      }.otherwise {
1182        spv := spv & ~(~spg & sphhit & VecInit(sp.map(_.hit(hfenceg_gvpn, 0.U, 0.U, sfence_dup(0).bits.id, ignoreAsid = true, s2xlate = true.B))).asUInt)
1183      }
1184    }
1185  }
1186
1187  if (EnableSv48) {
1188    val l3asidhit = VecInit(l3asids.get.map(_ === sfence_dup(2).bits.id)).asUInt
1189    val l3vmidhit = VecInit(l3vmids.get.map(_.getOrElse(0.U) === io.csr_dup(2).hgatp.vmid)).asUInt
1190    val l3hhit = VecInit(l3h.get.map{a => io.csr_dup(2).priv.virt && a === onlyStage1 || !io.csr_dup(2).priv.virt && a === noS2xlate}).asUInt
1191
1192    when (sfence_valid) {
1193      val l3vmidhit = VecInit(l3vmids.get.map(_.getOrElse(0.U) === io.csr_dup(2).hgatp.vmid)).asUInt
1194      val l3hhit = VecInit(l3h.get.map{a => io.csr_dup(2).priv.virt && a === onlyStage1 || !io.csr_dup(2).priv.virt && a === noS2xlate}).asUInt
1195      val sfence_vpn = sfence_dup(2).bits.addr(sfence_dup(2).bits.addr.getWidth-1, offLen)
1196
1197      when (sfence_dup(2).bits.rs1/*va*/) {
1198        when (sfence_dup(2).bits.rs2) {
1199          // all va && all asid
1200          l3v.map(_ := l3v.get & ~(l3hhit & VecInit(l3vmidhit.asBools.map{a => io.csr_dup(2).priv.virt && a || !io.csr_dup(2).priv.virt}).asUInt))
1201        } .otherwise {
1202          // all va && specific asid except global
1203          l3v.map(_ := l3v.get & ~(~l3g.get & l3hhit & l3asidhit & VecInit(l3vmidhit.asBools.map{a => io.csr_dup(2).priv.virt && a || !io.csr_dup(2).priv.virt}).asUInt))
1204        }
1205      }
1206    }
1207
1208    when (hfencev_valid) {
1209      val l3vmidhit = VecInit(l3vmids.get.map(_.getOrElse(0.U) === io.csr_dup(2).hgatp.vmid)).asUInt
1210      val l3hhit = VecInit(l3h.get.map(_ === onlyStage1)).asUInt
1211      val hfencev_vpn = sfence_dup(2).bits.addr(sfence_dup(2).bits.addr.getWidth-1, offLen)
1212      when(sfence_dup(2).bits.rs1) {
1213        when(sfence_dup(2).bits.rs2) {
1214          l3v.map(_ := l3v.get & ~(l3hhit & l3vmidhit))
1215        }.otherwise {
1216          l3v.map(_ := l3v.get & ~(~l3g.get & l3hhit & l3asidhit & l3vmidhit))
1217        }
1218      }
1219    }
1220
1221    when (hfenceg_valid) {
1222      val l3vmidhit = VecInit(l3vmids.get.map(_.getOrElse(0.U) === sfence_dup(2).bits.id)).asUInt
1223      val l3hhit = VecInit(l3h.get.map(_ === onlyStage2)).asUInt
1224      val hfenceg_gvpn = (sfence_dup(2).bits.addr << 2)(sfence_dup(2).bits.addr.getWidth - 1, offLen)
1225      when(sfence_dup(2).bits.rs1) {
1226        when(sfence_dup(2).bits.rs2) {
1227          l3v.map(_ := l3v.get & ~l3hhit)
1228        }.otherwise {
1229          l3v.map(_ := l3v.get & ~(l3hhit & l3vmidhit))
1230        }
1231      }
1232    }
1233  }
1234
1235  def InsideStageConnect(in: DecoupledIO[PtwCacheReq], out: DecoupledIO[PtwCacheReq], inFire: Bool): Unit = {
1236    in.ready := !in.valid || out.ready
1237    out.valid := in.valid
1238    out.bits := in.bits
1239    out.bits.bypassed.zip(in.bits.bypassed).zipWithIndex.map{ case (b, i) =>
1240      val bypassed_reg = Reg(Bool())
1241      val bypassed_wire = refill_bypass(in.bits.req_info.vpn, i, in.bits.req_info.s2xlate) && io.refill.valid
1242      when (inFire) { bypassed_reg := bypassed_wire }
1243      .elsewhen (io.refill.valid) { bypassed_reg := bypassed_reg || bypassed_wire }
1244
1245      b._1 := b._2 || (bypassed_wire || (bypassed_reg && !inFire))
1246    }
1247  }
1248
1249  // Perf Count
1250  val resp_l0 = resp_res.l0.hit
1251  val resp_sp = resp_res.sp.hit
1252  val resp_l3_pre = if (EnableSv48) Some(resp_res.l3.get.pre) else None
1253  val resp_l2_pre = resp_res.l2.pre
1254  val resp_l1_pre = resp_res.l1.pre
1255  val resp_l0_pre = resp_res.l0.pre
1256  val resp_sp_pre = resp_res.sp.pre
1257  val base_valid_access_0 = !from_pre(io.resp.bits.req_info.source) && io.resp.fire
1258  XSPerfAccumulate("access", base_valid_access_0)
1259  if (EnableSv48) {
1260    XSPerfAccumulate("l3_hit", base_valid_access_0 && io.resp.bits.toFsm.l3Hit.get && !io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1261  }
1262  XSPerfAccumulate("l2_hit", base_valid_access_0 && io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1263  XSPerfAccumulate("l1_hit", base_valid_access_0 && io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1264  XSPerfAccumulate("l0_hit", base_valid_access_0 && resp_l0)
1265  XSPerfAccumulate("sp_hit", base_valid_access_0 && resp_sp)
1266  XSPerfAccumulate("pte_hit",base_valid_access_0 && io.resp.bits.hit)
1267
1268  if (EnableSv48) {
1269    XSPerfAccumulate("l3_hit_pre", base_valid_access_0 && resp_l3_pre.get && io.resp.bits.toFsm.l3Hit.get && !io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1270  }
1271  XSPerfAccumulate("l2_hit_pre", base_valid_access_0 && resp_l2_pre && !io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1272  XSPerfAccumulate("l1_hit_pre", base_valid_access_0 && resp_l1_pre && io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1273  XSPerfAccumulate("l0_hit_pre", base_valid_access_0 && resp_l0_pre && resp_l0)
1274  XSPerfAccumulate("sp_hit_pre", base_valid_access_0 && resp_sp_pre && resp_sp)
1275  XSPerfAccumulate("pte_hit_pre",base_valid_access_0 && (resp_l0_pre && resp_l0 || resp_sp_pre && resp_sp) && io.resp.bits.hit)
1276
1277  val base_valid_access_1 = from_pre(io.resp.bits.req_info.source) && io.resp.fire
1278  XSPerfAccumulate("pre_access", base_valid_access_1)
1279  if (EnableSv48) {
1280    XSPerfAccumulate("pre_l3_hit", base_valid_access_1 && io.resp.bits.toFsm.l3Hit.get && !io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1281  }
1282  XSPerfAccumulate("pre_l2_hit", base_valid_access_1 && io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1283  XSPerfAccumulate("pre_l1_hit", base_valid_access_1 && io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1284  XSPerfAccumulate("pre_l0_hit", base_valid_access_1 && resp_l0)
1285  XSPerfAccumulate("pre_sp_hit", base_valid_access_1 && resp_sp)
1286  XSPerfAccumulate("pre_pte_hit",base_valid_access_1 && io.resp.bits.hit)
1287
1288  if (EnableSv48) {
1289    XSPerfAccumulate("pre_l3_hit_pre", base_valid_access_1 && resp_l3_pre.get && io.resp.bits.toFsm.l3Hit.get && !io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1290  }
1291  XSPerfAccumulate("pre_l2_hit_pre", base_valid_access_1 && resp_l2_pre && io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1292  XSPerfAccumulate("pre_l1_hit_pre", base_valid_access_1 && resp_l1_pre && io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1293  XSPerfAccumulate("pre_l0_hit_pre", base_valid_access_1 && resp_l0_pre && resp_l0)
1294  XSPerfAccumulate("pre_sp_hit_pre", base_valid_access_1 && resp_sp_pre && resp_sp)
1295  XSPerfAccumulate("pre_pte_hit_pre",base_valid_access_1 && (resp_l0_pre && resp_l0 || resp_sp_pre && resp_sp) && io.resp.bits.hit)
1296
1297  val base_valid_access_2 = stageResp.bits.isFirst && !from_pre(io.resp.bits.req_info.source) && io.resp.fire
1298  XSPerfAccumulate("access_first", base_valid_access_2)
1299  if (EnableSv48) {
1300    XSPerfAccumulate("l3_hit_first", base_valid_access_2 && io.resp.bits.toFsm.l3Hit.get && !io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1301  }
1302  XSPerfAccumulate("l2_hit_first", base_valid_access_2 && io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1303  XSPerfAccumulate("l1_hit_first", base_valid_access_2 && io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1304  XSPerfAccumulate("l0_hit_first", base_valid_access_2 && resp_l0)
1305  XSPerfAccumulate("sp_hit_first", base_valid_access_2 && resp_sp)
1306  XSPerfAccumulate("pte_hit_first",base_valid_access_2 && io.resp.bits.hit)
1307
1308  if (EnableSv48) {
1309    XSPerfAccumulate("l3_hit_pre_first", base_valid_access_2 && resp_l3_pre.get && io.resp.bits.toFsm.l3Hit.get && !io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1310  }
1311  XSPerfAccumulate("l2_hit_pre_first", base_valid_access_2 && resp_l2_pre && io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1312  XSPerfAccumulate("l1_hit_pre_first", base_valid_access_2 && resp_l1_pre && io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1313  XSPerfAccumulate("l0_hit_pre_first", base_valid_access_2 && resp_l0_pre && resp_l0)
1314  XSPerfAccumulate("sp_hit_pre_first", base_valid_access_2 && resp_sp_pre && resp_sp)
1315  XSPerfAccumulate("pte_hit_pre_first",base_valid_access_2 && (resp_l0_pre && resp_l0 || resp_sp_pre && resp_sp) && io.resp.bits.hit)
1316
1317  val base_valid_access_3 = stageResp.bits.isFirst && from_pre(io.resp.bits.req_info.source) && io.resp.fire
1318  XSPerfAccumulate("pre_access_first", base_valid_access_3)
1319  if (EnableSv48) {
1320    XSPerfAccumulate("pre_l3_hit_first", base_valid_access_3 && io.resp.bits.toFsm.l3Hit.get && !io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1321  }
1322  XSPerfAccumulate("pre_l2_hit_first", base_valid_access_3 && io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1323  XSPerfAccumulate("pre_l1_hit_first", base_valid_access_3 && io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1324  XSPerfAccumulate("pre_l0_hit_first", base_valid_access_3 && resp_l0)
1325  XSPerfAccumulate("pre_sp_hit_first", base_valid_access_3 && resp_sp)
1326  XSPerfAccumulate("pre_pte_hit_first", base_valid_access_3 && io.resp.bits.hit)
1327
1328  if (EnableSv48) {
1329    XSPerfAccumulate("pre_l3_hit_pre_first", base_valid_access_3 && resp_l3_pre.get && io.resp.bits.toFsm.l3Hit.get && !io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1330  }
1331  XSPerfAccumulate("pre_l2_hit_pre_first", base_valid_access_3 && resp_l2_pre && io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1332  XSPerfAccumulate("pre_l1_hit_pre_first", base_valid_access_3 && resp_l1_pre && io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1333  XSPerfAccumulate("pre_l0_hit_pre_first", base_valid_access_3 && resp_l0_pre && resp_l0)
1334  XSPerfAccumulate("pre_sp_hit_pre_first", base_valid_access_3 && resp_sp_pre && resp_sp)
1335  XSPerfAccumulate("pre_pte_hit_pre_first",base_valid_access_3 && (resp_l0_pre && resp_l0 || resp_sp_pre && resp_sp) && io.resp.bits.hit)
1336
1337  XSPerfAccumulate("rwHarzad", io.req.valid && !io.req.ready)
1338  XSPerfAccumulate("out_blocked", io.resp.valid && !io.resp.ready)
1339  if (EnableSv48) {
1340    l3AccessPerf.get.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"l3AccessIndex${i}", l) }
1341  }
1342  l2AccessPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"l2AccessIndex${i}", l) }
1343  l1AccessPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"l1AccessIndex${i}", l) }
1344  l0AccessPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"l0AccessIndex${i}", l) }
1345  spAccessPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"SPAccessIndex${i}", l) }
1346  if (EnableSv48) {
1347    l3RefillPerf.get.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"l3RefillIndex${i}", l) }
1348  }
1349  l2RefillPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"l2RefillIndex${i}", l) }
1350  l1RefillPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"l1RefillIndex${i}", l) }
1351  l0RefillPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"l0RefillIndex${i}", l) }
1352  spRefillPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"SPRefillIndex${i}", l) }
1353
1354  if (EnableSv48) {
1355    XSPerfAccumulate("l3Refill", Cat(l3RefillPerf.get).orR)
1356  }
1357  XSPerfAccumulate("l2Refill", Cat(l2RefillPerf).orR)
1358  XSPerfAccumulate("l1Refill", Cat(l1RefillPerf).orR)
1359  XSPerfAccumulate("l0Refill", Cat(l0RefillPerf).orR)
1360  XSPerfAccumulate("spRefill", Cat(spRefillPerf).orR)
1361  if (EnableSv48) {
1362    XSPerfAccumulate("l3Refill_pre", Cat(l3RefillPerf.get).orR && refill_prefetch_dup(0))
1363  }
1364  XSPerfAccumulate("l2Refill_pre", Cat(l2RefillPerf).orR && refill_prefetch_dup(0))
1365  XSPerfAccumulate("l1Refill_pre", Cat(l1RefillPerf).orR && refill_prefetch_dup(0))
1366  XSPerfAccumulate("l0Refill_pre", Cat(l0RefillPerf).orR && refill_prefetch_dup(0))
1367  XSPerfAccumulate("spRefill_pre", Cat(spRefillPerf).orR && refill_prefetch_dup(0))
1368
1369  // debug
1370  XSDebug(sfence_dup(0).valid, p"[sfence] original v and g vector:\n")
1371  if (EnableSv48) {
1372    XSDebug(sfence_dup(0).valid, p"[sfence] l3v:${Binary(l3v.get)}\n")
1373  }
1374  XSDebug(sfence_dup(0).valid, p"[sfence] l2v:${Binary(l2v)}\n")
1375  XSDebug(sfence_dup(0).valid, p"[sfence] l1v:${Binary(l1v)}\n")
1376  XSDebug(sfence_dup(0).valid, p"[sfence] l0v:${Binary(l0v)}\n")
1377  XSDebug(sfence_dup(0).valid, p"[sfence] l0g:${Binary(l0g)}\n")
1378  XSDebug(sfence_dup(0).valid, p"[sfence] spv:${Binary(spv)}\n")
1379  XSDebug(RegNext(sfence_dup(0).valid), p"[sfence] new v and g vector:\n")
1380  if (EnableSv48) {
1381    XSDebug(RegNext(sfence_dup(0).valid), p"[sfence] l3v:${Binary(l3v.get)}\n")
1382  }
1383  XSDebug(RegNext(sfence_dup(0).valid), p"[sfence] l2v:${Binary(l2v)}\n")
1384  XSDebug(RegNext(sfence_dup(0).valid), p"[sfence] l1v:${Binary(l1v)}\n")
1385  XSDebug(RegNext(sfence_dup(0).valid), p"[sfence] l0v:${Binary(l0v)}\n")
1386  XSDebug(RegNext(sfence_dup(0).valid), p"[sfence] l0g:${Binary(l0g)}\n")
1387  XSDebug(RegNext(sfence_dup(0).valid), p"[sfence] spv:${Binary(spv)}\n")
1388
1389  val perfEvents = Seq(
1390    ("access           ", base_valid_access_0             ),
1391    ("l2_hit           ", l2Hit                           ),
1392    ("l1_hit           ", l1Hit                           ),
1393    ("l0_hit           ", l0Hit                           ),
1394    ("sp_hit           ", spHit                           ),
1395    ("pte_hit          ", l0Hit || spHit                  ),
1396    ("rwHarzad         ", io.req.valid && !io.req.ready   ),
1397    ("out_blocked      ", io.resp.valid && !io.resp.ready ),
1398  )
1399  generatePerfEvent()
1400}
1401