xref: /XiangShan/src/main/scala/xiangshan/cache/mmu/PageTableCache.scala (revision 3088616cbf0793407bb68460b2db89b7de80c12a)
1/***************************************************************************************
2* Copyright (c) 2024 Beijing Institute of Open Source Chip (BOSC)
3* Copyright (c) 2020-2024 Institute of Computing Technology, Chinese Academy of Sciences
4* Copyright (c) 2020-2021 Peng Cheng Laboratory
5*
6* XiangShan is licensed under Mulan PSL v2.
7* You can use this software according to the terms and conditions of the Mulan PSL v2.
8* You may obtain a copy of Mulan PSL v2 at:
9*          http://license.coscl.org.cn/MulanPSL2
10*
11* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
12* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
13* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
14*
15* See the Mulan PSL v2 for more details.
16***************************************************************************************/
17
18package xiangshan.cache.mmu
19
20import org.chipsalliance.cde.config.Parameters
21import chisel3._
22import chisel3.util._
23import xiangshan._
24import xiangshan.cache.{HasDCacheParameters, MemoryOpConstants}
25import utils._
26import utility._
27import coupledL2.utils.SplittedSRAM
28import freechips.rocketchip.diplomacy.{LazyModule, LazyModuleImp}
29import freechips.rocketchip.tilelink._
30
31/* ptw cache caches the page table of all the three layers
32 * ptw cache resp at next cycle
33 * the cache should not be blocked
34 * when miss queue if full, just block req outside
35 */
36
37class PageCachePerPespBundle(implicit p: Parameters) extends PtwBundle {
38  val hit = Bool()
39  val pre = Bool()
40  val ppn = UInt(gvpnLen.W)
41  val pbmt = UInt(ptePbmtLen.W)
42  val perm = new PtePermBundle()
43  val ecc = Bool()
44  val level = UInt(2.W)
45  val v = Bool()
46
47  def apply(hit: Bool, pre: Bool, ppn: UInt, pbmt: UInt = 0.U,
48            perm: PtePermBundle = 0.U.asTypeOf(new PtePermBundle()),
49            ecc: Bool = false.B, level: UInt = 0.U, valid: Bool = true.B): Unit = {
50    this.hit := hit && !ecc
51    this.pre := pre
52    this.ppn := ppn
53    this.pbmt := pbmt
54    this.perm := perm
55    this.ecc := ecc && hit
56    this.level := level
57    this.v := valid
58  }
59}
60
61class PageCacheMergePespBundle(implicit p: Parameters) extends PtwBundle {
62  assert(tlbcontiguous == 8, "Only support tlbcontiguous = 8!")
63  val hit = Bool()
64  val pre = Bool()
65  val ppn = Vec(tlbcontiguous, UInt(gvpnLen.W))
66  val pbmt = Vec(tlbcontiguous, UInt(ptePbmtLen.W))
67  val perm = Vec(tlbcontiguous, new PtePermBundle())
68  val ecc = Bool()
69  val level = UInt(2.W)
70  val v = Vec(tlbcontiguous, Bool())
71  val af = Vec(tlbcontiguous, Bool())
72
73  def apply(hit: Bool, pre: Bool, ppn: Vec[UInt], pbmt: Vec[UInt] = Vec(tlbcontiguous, 0.U),
74            perm: Vec[PtePermBundle] = Vec(tlbcontiguous, 0.U.asTypeOf(new PtePermBundle())),
75            ecc: Bool = false.B, level: UInt = 0.U, valid: Vec[Bool] = Vec(tlbcontiguous, true.B),
76            accessFault: Vec[Bool] = Vec(tlbcontiguous, true.B)): Unit = {
77    this.hit := hit && !ecc
78    this.pre := pre
79    this.ppn := ppn
80    this.pbmt := pbmt
81    this.perm := perm
82    this.ecc := ecc && hit
83    this.level := level
84    this.v := valid
85    this.af := accessFault
86  }
87}
88
89class PageCacheRespBundle(implicit p: Parameters) extends PtwBundle {
90  val l3 = if (EnableSv48) Some(new PageCachePerPespBundle) else None
91  val l2 = new PageCachePerPespBundle
92  val l1 = new PageCachePerPespBundle
93  val l0 = new PageCacheMergePespBundle
94  val sp = new PageCachePerPespBundle
95}
96
97class PtwCacheReq(implicit p: Parameters) extends PtwBundle {
98  val req_info = new L2TlbInnerBundle()
99  val isFirst = Bool()
100  val bypassed = if (EnableSv48) Vec(4, Bool()) else Vec(3, Bool())
101  val isHptwReq = Bool()
102  val hptwId = UInt(log2Up(l2tlbParams.llptwsize).W)
103}
104
105class PtwCacheIO()(implicit p: Parameters) extends MMUIOBaseBundle with HasPtwConst {
106  val req = Flipped(DecoupledIO(new PtwCacheReq()))
107  val resp = DecoupledIO(new Bundle {
108    val req_info = new L2TlbInnerBundle()
109    val isFirst = Bool()
110    val hit = Bool()
111    val prefetch = Bool() // is the entry fetched by prefetch
112    val bypassed = Bool()
113    val toFsm = new Bundle {
114      val l3Hit = if (EnableSv48) Some(Bool()) else None
115      val l2Hit = Bool()
116      val l1Hit = Bool()
117      val ppn = UInt(gvpnLen.W)
118      val stage1Hit = Bool() // find stage 1 pte in cache, but need to search stage 2 pte in cache at PTW
119    }
120    val stage1 = new PtwMergeResp()
121    val isHptwReq = Bool()
122    val toHptw = new Bundle {
123      val l3Hit = if (EnableSv48) Some(Bool()) else None
124      val l2Hit = Bool()
125      val l1Hit = Bool()
126      val ppn = UInt(ppnLen.W)
127      val id = UInt(log2Up(l2tlbParams.llptwsize).W)
128      val resp = new HptwResp() // used if hit
129      val bypassed = Bool()
130    }
131  })
132  val refill = Flipped(ValidIO(new Bundle {
133    val ptes = UInt(blockBits.W)
134    val levelOH = new Bundle {
135      // NOTE: levelOH has (Level+1) bits, each stands for page cache entries
136      val sp = Bool()
137      val l0 = Bool()
138      val l1 = Bool()
139      val l2 = Bool()
140      val l3 = if (EnableSv48) Some(Bool()) else None
141      def apply(levelUInt: UInt, valid: Bool) = {
142        sp := GatedValidRegNext((levelUInt === 1.U || levelUInt === 2.U || levelUInt === 3.U) && valid, false.B)
143        l0 := GatedValidRegNext((levelUInt === 0.U) & valid, false.B)
144        l1 := GatedValidRegNext((levelUInt === 1.U) & valid, false.B)
145        l2 := GatedValidRegNext((levelUInt === 2.U) & valid, false.B)
146        l3.map(_ := GatedValidRegNext((levelUInt === 3.U) & valid, false.B))
147      }
148    }
149    // duplicate level and sel_pte for each page caches, for better fanout
150    val req_info_dup = Vec(3, new L2TlbInnerBundle())
151    val level_dup = Vec(3, UInt(log2Up(Level + 1).W))
152    val sel_pte_dup = Vec(3, UInt(XLEN.W))
153  }))
154  val sfence_dup = Vec(4, Input(new SfenceBundle()))
155  val csr_dup = Vec(3, Input(new TlbCsrBundle()))
156}
157
158class PtwCache()(implicit p: Parameters) extends XSModule with HasPtwConst with HasPerfEvents {
159  val io = IO(new PtwCacheIO)
160  val ecc = Code.fromString(l2tlbParams.ecc)
161  val l1EntryType = new PTWEntriesWithEcc(ecc, num = PtwL1SectorSize, tagLen = PtwL1TagLen, level = 1, hasPerm = false, ReservedBits = l2tlbParams.l1ReservedBits)
162  val l0EntryType = new PTWEntriesWithEcc(ecc, num = PtwL0SectorSize, tagLen = PtwL0TagLen, level = 0, hasPerm = true, ReservedBits = l2tlbParams.l0ReservedBits)
163
164  // TODO: four caches make the codes dirty, think about how to deal with it
165
166  val sfence_dup = io.sfence_dup
167  val refill = io.refill.bits
168  val refill_prefetch_dup = io.refill.bits.req_info_dup.map(a => from_pre(a.source))
169  val refill_h = io.refill.bits.req_info_dup.map(a => Mux(a.s2xlate === allStage, onlyStage1, a.s2xlate))
170  val flush_dup = sfence_dup.zip(io.csr_dup).map(f => f._1.valid || f._2.satp.changed || f._2.vsatp.changed || f._2.hgatp.changed)
171  val flush = flush_dup(0)
172
173  // when refill, refuce to accept new req
174  val rwHarzad = if (sramSinglePort) io.refill.valid else false.B
175
176  // handle hand signal and req_info
177  // TODO: replace with FlushableQueue
178  val stageReq = Wire(Decoupled(new PtwCacheReq()))         // enq stage & read page cache valid
179  val stageDelay = Wire(Vec(2, Decoupled(new PtwCacheReq()))) // page cache resp
180  val stageCheck = Wire(Vec(2, Decoupled(new PtwCacheReq()))) // check hit & check ecc
181  val stageResp = Wire(Decoupled(new PtwCacheReq()))         // deq stage
182
183  val stageDelay_valid_1cycle = OneCycleValid(stageReq.fire, flush)      // catch ram data
184  val stageCheck_valid_1cycle = OneCycleValid(stageDelay(1).fire, flush) // replace & perf counter
185  val stageResp_valid_1cycle_dup = Wire(Vec(2, Bool()))
186  stageResp_valid_1cycle_dup.map(_ := OneCycleValid(stageCheck(1).fire, flush))  // ecc flush
187
188  stageReq <> io.req
189  PipelineConnect(stageReq, stageDelay(0), stageDelay(1).ready, flush, rwHarzad)
190  InsideStageConnect(stageDelay(0), stageDelay(1), stageDelay_valid_1cycle)
191  PipelineConnect(stageDelay(1), stageCheck(0), stageCheck(1).ready, flush)
192  InsideStageConnect(stageCheck(0), stageCheck(1), stageCheck_valid_1cycle)
193  PipelineConnect(stageCheck(1), stageResp, io.resp.ready, flush)
194  stageResp.ready := !stageResp.valid || io.resp.ready
195
196  // l3: level 3 non-leaf pte
197  val l3 = if (EnableSv48) Some(Reg(Vec(l2tlbParams.l3Size, new PtwEntry(tagLen = PtwL3TagLen)))) else None
198  val l3v = if (EnableSv48) Some(RegInit(0.U(l2tlbParams.l3Size.W))) else None
199  val l3g = if (EnableSv48) Some(Reg(UInt(l2tlbParams.l3Size.W))) else None
200  val l3asids = if (EnableSv48) Some(l3.get.map(_.asid)) else None
201  val l3vmids = if (EnableSv48) Some(l3.get.map(_.vmid)) else None
202  val l3h = if (EnableSv48) Some(Reg(Vec(l2tlbParams.l3Size, UInt(2.W)))) else None
203
204  // l2: level 2 non-leaf pte
205  val l2 = Reg(Vec(l2tlbParams.l2Size, new PtwEntry(tagLen = PtwL2TagLen)))
206  val l2v = RegInit(0.U(l2tlbParams.l2Size.W))
207  val l2g = Reg(UInt(l2tlbParams.l2Size.W))
208  val l2asids = l2.map(_.asid)
209  val l2vmids = l2.map(_.vmid)
210  val l2h = Reg(Vec(l2tlbParams.l2Size, UInt(2.W)))
211
212  // l1: level 1 non-leaf pte
213  val l1 = Module(new SplittedSRAM(
214    l1EntryType,
215    set = l2tlbParams.l1nSets,
216    way = l2tlbParams.l1nWays,
217    waySplit = 2,
218    dataSplit = 4,
219    singlePort = sramSinglePort,
220    readMCP2 = false
221  ))
222  val l1v = RegInit(0.U((l2tlbParams.l1nSets * l2tlbParams.l1nWays).W))
223  val l1g = Reg(UInt((l2tlbParams.l1nSets * l2tlbParams.l1nWays).W))
224  val l1h = Reg(Vec(l2tlbParams.l1nSets, Vec(l2tlbParams.l1nWays, UInt(2.W))))
225  def getl1vSet(vpn: UInt) = {
226    require(log2Up(l2tlbParams.l1nWays) == log2Down(l2tlbParams.l1nWays))
227    val set = genPtwL1SetIdx(vpn)
228    require(set.getWidth == log2Up(l2tlbParams.l1nSets))
229    val l1vVec = l1v.asTypeOf(Vec(l2tlbParams.l1nSets, UInt(l2tlbParams.l1nWays.W)))
230    l1vVec(set)
231  }
232  def getl1hSet(vpn: UInt) = {
233    require(log2Up(l2tlbParams.l1nWays) == log2Down(l2tlbParams.l1nWays))
234    val set = genPtwL1SetIdx(vpn)
235    require(set.getWidth == log2Up(l2tlbParams.l1nSets))
236    l1h(set)
237  }
238
239  // l0: level 0 leaf pte of 4KB pages
240  val l0 = Module(new SplittedSRAM(
241    l0EntryType,
242    set = l2tlbParams.l0nSets,
243    way = l2tlbParams.l0nWays,
244    waySplit = 4,
245    dataSplit = 4,
246    singlePort = sramSinglePort,
247    readMCP2 = false
248  ))
249  val l0v = RegInit(0.U((l2tlbParams.l0nSets * l2tlbParams.l0nWays).W))
250  val l0g = Reg(UInt((l2tlbParams.l0nSets * l2tlbParams.l0nWays).W))
251  val l0h = Reg(Vec(l2tlbParams.l0nSets, Vec(l2tlbParams.l0nWays, UInt(2.W))))
252  def getl0vSet(vpn: UInt) = {
253    require(log2Up(l2tlbParams.l0nWays) == log2Down(l2tlbParams.l0nWays))
254    val set = genPtwL0SetIdx(vpn)
255    require(set.getWidth == log2Up(l2tlbParams.l0nSets))
256    val l0vVec = l0v.asTypeOf(Vec(l2tlbParams.l0nSets, UInt(l2tlbParams.l0nWays.W)))
257    l0vVec(set)
258  }
259  def getl0hSet(vpn: UInt) = {
260    require(log2Up(l2tlbParams.l0nWays) == log2Down(l2tlbParams.l0nWays))
261    val set = genPtwL0SetIdx(vpn)
262    require(set.getWidth == log2Up(l2tlbParams.l0nSets))
263    l0h(set)
264  }
265
266  // sp: level 1/2/3 leaf pte of 512GB/1GB/2MB super pages
267  val sp = Reg(Vec(l2tlbParams.spSize, new PtwEntry(tagLen = SPTagLen, hasPerm = true, hasLevel = true)))
268  val spv = RegInit(0.U(l2tlbParams.spSize.W))
269  val spg = Reg(UInt(l2tlbParams.spSize.W))
270  val spasids = sp.map(_.asid)
271  val spvmids = sp.map(_.vmid)
272  val sph = Reg(Vec(l2tlbParams.spSize, UInt(2.W)))
273
274  // Access Perf
275  val l3AccessPerf = if(EnableSv48) Some(Wire(Vec(l2tlbParams.l3Size, Bool()))) else None
276  val l2AccessPerf = Wire(Vec(l2tlbParams.l2Size, Bool()))
277  val l1AccessPerf = Wire(Vec(l2tlbParams.l1nWays, Bool()))
278  val l0AccessPerf = Wire(Vec(l2tlbParams.l0nWays, Bool()))
279  val spAccessPerf = Wire(Vec(l2tlbParams.spSize, Bool()))
280  if (EnableSv48) l3AccessPerf.map(_.map(_ := false.B))
281  l2AccessPerf.map(_ := false.B)
282  l1AccessPerf.map(_ := false.B)
283  l0AccessPerf.map(_ := false.B)
284  spAccessPerf.map(_ := false.B)
285
286
287
288  def vpn_match(vpn1: UInt, vpn2: UInt, level: Int) = {
289    (vpn1(vpnLen-1, vpnnLen*level+3) === vpn2(vpnLen-1, vpnnLen*level+3))
290  }
291  // NOTE: not actually bypassed, just check if hit, re-access the page cache
292  def refill_bypass(vpn: UInt, level: Int, h_search: UInt) = {
293    val change_h = MuxLookup(h_search, noS2xlate)(Seq(
294      allStage -> onlyStage1,
295      onlyStage1 -> onlyStage1,
296      onlyStage2 -> onlyStage2
297    ))
298    val change_refill_h = MuxLookup(io.refill.bits.req_info_dup(0).s2xlate, noS2xlate)(Seq(
299      allStage -> onlyStage1,
300      onlyStage1 -> onlyStage1,
301      onlyStage2 -> onlyStage2
302    ))
303    val refill_vpn = io.refill.bits.req_info_dup(0).vpn
304    io.refill.valid && (level.U === io.refill.bits.level_dup(0)) && vpn_match(refill_vpn, vpn, level) && change_h === change_refill_h
305  }
306
307  val vpn_search = stageReq.bits.req_info.vpn
308  val h_search = MuxLookup(stageReq.bits.req_info.s2xlate, noS2xlate)(Seq(
309    allStage -> onlyStage1,
310    onlyStage1 -> onlyStage1,
311    onlyStage2 -> onlyStage2
312  ))
313
314  // l3
315  val l3Hit = if(EnableSv48) Some(Wire(Bool())) else None
316  val l3HitPPN = if(EnableSv48) Some(Wire(UInt(ppnLen.W))) else None
317  val l3HitPbmt = if(EnableSv48) Some(Wire(UInt(ptePbmtLen.W))) else None
318  val l3Pre = if(EnableSv48) Some(Wire(Bool())) else None
319  val ptwl3replace = if(EnableSv48) Some(ReplacementPolicy.fromString(l2tlbParams.l3Replacer, l2tlbParams.l3Size)) else None
320  if (EnableSv48) {
321    val hitVecT = l3.get.zipWithIndex.map {
322        case (e, i) => (e.hit(vpn_search, io.csr_dup(2).satp.asid, io.csr_dup(2).vsatp.asid, io.csr_dup(2).hgatp.vmid, s2xlate = h_search =/= noS2xlate)
323          && l3v.get(i) && h_search === l3h.get(i))
324    }
325    val hitVec = hitVecT.map(RegEnable(_, stageReq.fire))
326
327    // stageDelay, but check for l3
328    val hitPPN = DataHoldBypass(ParallelPriorityMux(hitVec zip l3.get.map(_.ppn)), stageDelay_valid_1cycle)
329    val hitPbmt = DataHoldBypass(ParallelPriorityMux(hitVec zip l3.get.map(_.pbmt)), stageDelay_valid_1cycle)
330    val hitPre = DataHoldBypass(ParallelPriorityMux(hitVec zip l3.get.map(_.prefetch)), stageDelay_valid_1cycle)
331    val hit = DataHoldBypass(ParallelOR(hitVec), stageDelay_valid_1cycle)
332
333    when (hit && stageDelay_valid_1cycle) { ptwl3replace.get.access(OHToUInt(hitVec)) }
334
335    l3AccessPerf.get.zip(hitVec).map{ case (l, h) => l := h && stageDelay_valid_1cycle}
336    for (i <- 0 until l2tlbParams.l3Size) {
337      XSDebug(stageReq.fire, p"[l3] l3(${i.U}) ${l3.get(i)} hit:${l3.get(i).hit(vpn_search, io.csr_dup(2).satp.asid, io.csr_dup(2).vsatp.asid, io.csr_dup(2).hgatp.vmid, s2xlate = h_search =/= noS2xlate)}\n")
338    }
339    XSDebug(stageReq.fire, p"[l3] l3v:${Binary(l3v.get)} hitVecT:${Binary(VecInit(hitVecT).asUInt)}\n")
340    XSDebug(stageDelay(0).valid, p"[l3] l3Hit:${hit} l3HitPPN:0x${Hexadecimal(hitPPN)} hitVec:${VecInit(hitVec).asUInt}\n")
341
342    VecInit(hitVecT).suggestName(s"l3_hitVecT")
343    VecInit(hitVec).suggestName(s"l3_hitVec")
344
345    // synchronize with other entries with RegEnable
346    l3Hit.map(_ := RegEnable(hit, stageDelay(1).fire))
347    l3HitPPN.map(_ := RegEnable(hitPPN, stageDelay(1).fire))
348    l3HitPbmt.map(_ := RegEnable(hitPbmt, stageDelay(1).fire))
349    l3Pre.map(_ := RegEnable(hitPre, stageDelay(1).fire))
350  }
351
352  // l2
353  val ptwl2replace = ReplacementPolicy.fromString(l2tlbParams.l2Replacer, l2tlbParams.l2Size)
354  val (l2Hit, l2HitPPN, l2HitPbmt, l2Pre) = {
355    val hitVecT = l2.zipWithIndex.map {
356      case (e, i) => (e.hit(vpn_search, io.csr_dup(2).satp.asid, io.csr_dup(2).vsatp.asid, io.csr_dup(2).hgatp.vmid, s2xlate = h_search =/= noS2xlate)
357        && l2v(i) && h_search === l2h(i))
358    }
359    val hitVec = hitVecT.map(RegEnable(_, stageReq.fire))
360
361    // stageDelay, but check for l2
362    val hitPPN = DataHoldBypass(ParallelPriorityMux(hitVec zip l2.map(_.ppn)), stageDelay_valid_1cycle)
363    val hitPbmt = DataHoldBypass(ParallelPriorityMux(hitVec zip l2.map(_.pbmt)), stageDelay_valid_1cycle)
364    val hitPre = DataHoldBypass(ParallelPriorityMux(hitVec zip l2.map(_.prefetch)), stageDelay_valid_1cycle)
365    val hit = DataHoldBypass(ParallelOR(hitVec), stageDelay_valid_1cycle)
366
367    when (hit && stageDelay_valid_1cycle) { ptwl2replace.access(OHToUInt(hitVec)) }
368
369    l2AccessPerf.zip(hitVec).map{ case (l, h) => l := h && stageDelay_valid_1cycle}
370    for (i <- 0 until l2tlbParams.l2Size) {
371      XSDebug(stageReq.fire, p"[l2] l2(${i.U}) ${l2(i)} hit:${l2(i).hit(vpn_search, io.csr_dup(2).satp.asid, io.csr_dup(2).vsatp.asid, io.csr_dup(2).hgatp.vmid, s2xlate = h_search =/= noS2xlate)}\n")
372    }
373    XSDebug(stageReq.fire, p"[l2] l2v:${Binary(l2v)} hitVecT:${Binary(VecInit(hitVecT).asUInt)}\n")
374    XSDebug(stageDelay(0).valid, p"[l2] l2Hit:${hit} l2HitPPN:0x${Hexadecimal(hitPPN)} hitVec:${VecInit(hitVec).asUInt}\n")
375
376    VecInit(hitVecT).suggestName(s"l2_hitVecT")
377    VecInit(hitVec).suggestName(s"l2_hitVec")
378
379    // synchronize with other entries with RegEnable
380    (RegEnable(hit, stageDelay(1).fire),
381     RegEnable(hitPPN, stageDelay(1).fire),
382     RegEnable(hitPbmt, stageDelay(1).fire),
383     RegEnable(hitPre, stageDelay(1).fire))
384  }
385
386  // l1
387  val ptwl1replace = ReplacementPolicy.fromString(l2tlbParams.l1Replacer,l2tlbParams.l1nWays,l2tlbParams.l1nSets)
388  val (l1Hit, l1HitPPN, l1HitPbmt, l1Pre, l1eccError) = {
389    val ridx = genPtwL1SetIdx(vpn_search)
390    l1.io.r.req.valid := stageReq.fire
391    l1.io.r.req.bits.apply(setIdx = ridx)
392    val vVec_req = getl1vSet(vpn_search)
393    val hVec_req = getl1hSet(vpn_search)
394
395    // delay one cycle after sram read
396    val delay_vpn = stageDelay(0).bits.req_info.vpn
397    val delay_h = MuxLookup(stageDelay(0).bits.req_info.s2xlate, noS2xlate)(Seq(
398      allStage -> onlyStage1,
399      onlyStage1 -> onlyStage1,
400      onlyStage2 -> onlyStage2
401    ))
402    val data_resp = DataHoldBypass(l1.io.r.resp.data, stageDelay_valid_1cycle)
403    val vVec_delay = RegEnable(vVec_req, stageReq.fire)
404    val hVec_delay = RegEnable(hVec_req, stageReq.fire)
405    val hitVec_delay = VecInit(data_resp.zip(vVec_delay.asBools).zip(hVec_delay).map { case ((wayData, v), h) =>
406      wayData.entries.hit(delay_vpn, io.csr_dup(1).satp.asid, io.csr_dup(1).vsatp.asid, io.csr_dup(1).hgatp.vmid, s2xlate = delay_h =/= noS2xlate) && v && (delay_h === h)})
407
408    // check hit and ecc
409    val check_vpn = stageCheck(0).bits.req_info.vpn
410    val ramDatas = RegEnable(data_resp, stageDelay(1).fire)
411    val vVec = RegEnable(vVec_delay, stageDelay(1).fire).asBools
412
413    val hitVec = RegEnable(hitVec_delay, stageDelay(1).fire)
414    val hitWayEntry = ParallelPriorityMux(hitVec zip ramDatas)
415    val hitWayData = hitWayEntry.entries
416    val hit = ParallelOR(hitVec)
417    val hitWay = ParallelPriorityMux(hitVec zip (0 until l2tlbParams.l1nWays).map(_.U(log2Up(l2tlbParams.l1nWays).W)))
418    val eccError = WireInit(false.B)
419    if (l2tlbParams.enablePTWECC) {
420      eccError := hitWayEntry.decode()
421    } else {
422      eccError := false.B
423    }
424
425    ridx.suggestName(s"l1_ridx")
426    ramDatas.suggestName(s"l1_ramDatas")
427    hitVec.suggestName(s"l1_hitVec")
428    hitWayData.suggestName(s"l1_hitWayData")
429    hitWay.suggestName(s"l1_hitWay")
430
431    when (hit && stageCheck_valid_1cycle) { ptwl1replace.access(genPtwL1SetIdx(check_vpn), hitWay) }
432
433    l1AccessPerf.zip(hitVec).map{ case (l, h) => l := h && stageCheck_valid_1cycle }
434    XSDebug(stageDelay_valid_1cycle, p"[l1] ridx:0x${Hexadecimal(ridx)}\n")
435    for (i <- 0 until l2tlbParams.l1nWays) {
436      XSDebug(stageCheck_valid_1cycle, p"[l1] ramDatas(${i.U}) ${ramDatas(i)}  l1v:${vVec(i)}  hit:${hit}\n")
437    }
438    XSDebug(stageCheck_valid_1cycle, p"[l1] l1Hit:${hit} l1HitPPN:0x${Hexadecimal(hitWayData.ppns(genPtwL1SectorIdx(check_vpn)))} hitVec:${Binary(hitVec.asUInt)} hitWay:${hitWay} vidx:${vVec}\n")
439
440    (hit, hitWayData.ppns(genPtwL1SectorIdx(check_vpn)), hitWayData.pbmts(genPtwL1SectorIdx(check_vpn)), hitWayData.prefetch, eccError)
441  }
442
443  // l0
444  val ptwl0replace = ReplacementPolicy.fromString(l2tlbParams.l0Replacer,l2tlbParams.l0nWays,l2tlbParams.l0nSets)
445  val (l0Hit, l0HitData, l0Pre, l0eccError) = {
446    val ridx = genPtwL0SetIdx(vpn_search)
447    l0.io.r.req.valid := stageReq.fire
448    l0.io.r.req.bits.apply(setIdx = ridx)
449    val vVec_req = getl0vSet(vpn_search)
450    val hVec_req = getl0hSet(vpn_search)
451
452    // delay one cycle after sram read
453    val delay_vpn = stageDelay(0).bits.req_info.vpn
454    val delay_h = MuxLookup(stageDelay(0).bits.req_info.s2xlate, noS2xlate)(Seq(
455      allStage -> onlyStage1,
456      onlyStage1 -> onlyStage1,
457      onlyStage2 -> onlyStage2
458    ))
459    val data_resp = DataHoldBypass(l0.io.r.resp.data, stageDelay_valid_1cycle)
460    val vVec_delay = RegEnable(vVec_req, stageReq.fire)
461    val hVec_delay = RegEnable(hVec_req, stageReq.fire)
462    val hitVec_delay = VecInit(data_resp.zip(vVec_delay.asBools).zip(hVec_delay).map { case ((wayData, v), h) =>
463      wayData.entries.hit(delay_vpn, io.csr_dup(0).satp.asid, io.csr_dup(0).vsatp.asid, io.csr_dup(0).hgatp.vmid, s2xlate = delay_h =/= noS2xlate) && v && (delay_h === h)})
464
465    // check hit and ecc
466    val check_vpn = stageCheck(0).bits.req_info.vpn
467    val ramDatas = RegEnable(data_resp, stageDelay(1).fire)
468    val vVec = RegEnable(vVec_delay, stageDelay(1).fire).asBools
469
470    val hitVec = RegEnable(hitVec_delay, stageDelay(1).fire)
471    val hitWayEntry = ParallelPriorityMux(hitVec zip ramDatas)
472    val hitWayData = hitWayEntry.entries
473    val hitWayEcc = hitWayEntry.ecc
474    val hit = ParallelOR(hitVec)
475    val hitWay = ParallelPriorityMux(hitVec zip (0 until l2tlbParams.l0nWays).map(_.U(log2Up(l2tlbParams.l0nWays).W)))
476    val eccError = WireInit(false.B)
477    if (l2tlbParams.enablePTWECC) {
478      eccError := hitWayEntry.decode()
479    } else {
480      eccError := false.B
481    }
482
483    when (hit && stageCheck_valid_1cycle) { ptwl0replace.access(genPtwL0SetIdx(check_vpn), hitWay) }
484
485    l0AccessPerf.zip(hitVec).map{ case (l, h) => l := h && stageCheck_valid_1cycle }
486    XSDebug(stageReq.fire, p"[l0] ridx:0x${Hexadecimal(ridx)}\n")
487    for (i <- 0 until l2tlbParams.l0nWays) {
488      XSDebug(stageCheck_valid_1cycle, p"[l0] ramDatas(${i.U}) ${ramDatas(i)}  l0v:${vVec(i)}  hit:${hitVec(i)}\n")
489    }
490    XSDebug(stageCheck_valid_1cycle, p"[l0] l0Hit:${hit} l0HitData:${hitWayData} hitVec:${Binary(hitVec.asUInt)} hitWay:${hitWay} v:${vVec}\n")
491
492    ridx.suggestName(s"l0_ridx")
493    ramDatas.suggestName(s"l0_ramDatas")
494    hitVec.suggestName(s"l0_hitVec")
495    hitWay.suggestName(s"l0_hitWay")
496
497    (hit, hitWayData, hitWayData.prefetch, eccError)
498  }
499  val l0HitPPN = l0HitData.ppns
500  val l0HitPbmt = l0HitData.pbmts
501  val l0HitPerm = l0HitData.perms.getOrElse(0.U.asTypeOf(Vec(PtwL0SectorSize, new PtePermBundle)))
502  val l0HitValid = l0HitData.vs
503  val l0HitAf = l0HitData.af
504
505  // super page
506  val spreplace = ReplacementPolicy.fromString(l2tlbParams.spReplacer, l2tlbParams.spSize)
507  val (spHit, spHitData, spPre, spValid) = {
508    val hitVecT = sp.zipWithIndex.map { case (e, i) => e.hit(vpn_search, io.csr_dup(0).satp.asid, io.csr_dup(0).vsatp.asid, io.csr_dup(0).hgatp.vmid, s2xlate = h_search =/= noS2xlate) && spv(i) && (sph(i) === h_search) }
509    val hitVec = hitVecT.map(RegEnable(_, stageReq.fire))
510    val hitData = ParallelPriorityMux(hitVec zip sp)
511    val hit = ParallelOR(hitVec)
512
513    when (hit && stageDelay_valid_1cycle) { spreplace.access(OHToUInt(hitVec)) }
514
515    spAccessPerf.zip(hitVec).map{ case (s, h) => s := h && stageDelay_valid_1cycle }
516    for (i <- 0 until l2tlbParams.spSize) {
517      XSDebug(stageReq.fire, p"[sp] sp(${i.U}) ${sp(i)} hit:${sp(i).hit(vpn_search, io.csr_dup(0).satp.asid, io.csr_dup(0).vsatp.asid, io.csr_dup(0).hgatp.vmid, s2xlate = h_search =/= noS2xlate)} spv:${spv(i)}\n")
518    }
519    XSDebug(stageDelay_valid_1cycle, p"[sp] spHit:${hit} spHitData:${hitData} hitVec:${Binary(VecInit(hitVec).asUInt)}\n")
520
521    VecInit(hitVecT).suggestName(s"sp_hitVecT")
522    VecInit(hitVec).suggestName(s"sp_hitVec")
523
524    (RegEnable(hit, stageDelay(1).fire),
525     RegEnable(hitData, stageDelay(1).fire),
526     RegEnable(hitData.prefetch, stageDelay(1).fire),
527     RegEnable(hitData.v, stageDelay(1).fire))
528  }
529  val spHitPerm = spHitData.perm.getOrElse(0.U.asTypeOf(new PtePermBundle))
530  val spHitLevel = spHitData.level.getOrElse(0.U)
531
532  val check_res = Wire(new PageCacheRespBundle)
533  check_res.l3.map(_.apply(l3Hit.get, l3Pre.get, l3HitPPN.get))
534  check_res.l2.apply(l2Hit, l2Pre, l2HitPPN, l2HitPbmt)
535  check_res.l1.apply(l1Hit, l1Pre, l1HitPPN, l1HitPbmt, ecc = l1eccError)
536  check_res.l0.apply(l0Hit, l0Pre, l0HitPPN, l0HitPbmt, l0HitPerm, l0eccError, valid = l0HitValid, accessFault = l0HitAf)
537  check_res.sp.apply(spHit, spPre, spHitData.ppn, spHitData.pbmt, spHitPerm, false.B, spHitLevel, spValid)
538
539  val resp_res = Reg(new PageCacheRespBundle)
540  when (stageCheck(1).fire) { resp_res := check_res }
541
542  // stageResp bypass
543  val bypassed = if (EnableSv48) Wire(Vec(4, Bool())) else Wire(Vec(3, Bool()))
544  bypassed.indices.foreach(i =>
545    bypassed(i) := stageResp.bits.bypassed(i) ||
546      ValidHoldBypass(refill_bypass(stageResp.bits.req_info.vpn, i, stageResp.bits.req_info.s2xlate),
547        OneCycleValid(stageCheck(1).fire, false.B) || io.refill.valid)
548  )
549
550  // stageResp bypass to hptw
551  val hptw_bypassed = if (EnableSv48) Wire(Vec(4, Bool())) else Wire(Vec(3, Bool()))
552  hptw_bypassed.indices.foreach(i =>
553    hptw_bypassed(i) := stageResp.bits.bypassed(i) ||
554      ValidHoldBypass(refill_bypass(stageResp.bits.req_info.vpn, i, stageResp.bits.req_info.s2xlate),
555        io.resp.fire)
556  )
557
558  val isAllStage = stageResp.bits.req_info.s2xlate === allStage
559  val isOnlyStage2 = stageResp.bits.req_info.s2xlate === onlyStage2
560  val stage1Hit = (resp_res.l0.hit || resp_res.sp.hit) && isAllStage
561  val idx = stageResp.bits.req_info.vpn(2, 0)
562  val stage1Pf = !Mux(resp_res.l0.hit, resp_res.l0.v(idx), resp_res.sp.v)
563  io.resp.bits.req_info   := stageResp.bits.req_info
564  io.resp.bits.isFirst  := stageResp.bits.isFirst
565  io.resp.bits.hit      := (resp_res.l0.hit || resp_res.sp.hit) && (!isAllStage || isAllStage && stage1Pf)
566  if (EnableSv48) {
567    io.resp.bits.bypassed := (bypassed(0) || (bypassed(1) && !resp_res.l1.hit) || (bypassed(2) && !resp_res.l2.hit) || (bypassed(3) && !resp_res.l3.get.hit)) && !isAllStage
568  } else {
569    io.resp.bits.bypassed := (bypassed(0) || (bypassed(1) && !resp_res.l1.hit) || (bypassed(2) && !resp_res.l2.hit)) && !isAllStage
570  }
571  io.resp.bits.prefetch := resp_res.l0.pre && resp_res.l0.hit || resp_res.sp.pre && resp_res.sp.hit
572  io.resp.bits.toFsm.l3Hit.map(_ := resp_res.l3.get.hit && !stage1Hit && !isOnlyStage2 && !stageResp.bits.isHptwReq)
573  io.resp.bits.toFsm.l2Hit := resp_res.l2.hit && !stage1Hit && !isOnlyStage2 && !stageResp.bits.isHptwReq
574  io.resp.bits.toFsm.l1Hit := resp_res.l1.hit && !stage1Hit && !isOnlyStage2 && !stageResp.bits.isHptwReq
575  io.resp.bits.toFsm.ppn   := Mux(resp_res.l1.hit, resp_res.l1.ppn, Mux(resp_res.l2.hit, resp_res.l2.ppn, resp_res.l3.getOrElse(0.U.asTypeOf(new PageCachePerPespBundle)).ppn))
576  io.resp.bits.toFsm.stage1Hit := stage1Hit
577
578  io.resp.bits.isHptwReq := stageResp.bits.isHptwReq
579  if (EnableSv48) {
580    io.resp.bits.toHptw.bypassed := (hptw_bypassed(0) || (hptw_bypassed(1) && !resp_res.l1.hit) || (hptw_bypassed(2) && !resp_res.l2.hit) || (hptw_bypassed(3) && !resp_res.l3.get.hit)) && stageResp.bits.isHptwReq
581  } else {
582    io.resp.bits.toHptw.bypassed := (hptw_bypassed(0) || (hptw_bypassed(1) && !resp_res.l1.hit) || (hptw_bypassed(2) && !resp_res.l2.hit)) && stageResp.bits.isHptwReq
583  }
584  io.resp.bits.toHptw.id := stageResp.bits.hptwId
585  io.resp.bits.toHptw.l3Hit.map(_ := resp_res.l3.get.hit && stageResp.bits.isHptwReq)
586  io.resp.bits.toHptw.l2Hit := resp_res.l2.hit && stageResp.bits.isHptwReq
587  io.resp.bits.toHptw.l1Hit := resp_res.l1.hit && stageResp.bits.isHptwReq
588  io.resp.bits.toHptw.ppn := Mux(resp_res.l1.hit, resp_res.l1.ppn, Mux(resp_res.l2.hit, resp_res.l2.ppn, resp_res.l3.getOrElse(0.U.asTypeOf(new PageCachePerPespBundle)).ppn))(ppnLen - 1, 0)
589  io.resp.bits.toHptw.resp.entry.tag := stageResp.bits.req_info.vpn
590  io.resp.bits.toHptw.resp.entry.asid := DontCare
591  io.resp.bits.toHptw.resp.entry.vmid.map(_ := io.csr_dup(0).hgatp.vmid)
592  io.resp.bits.toHptw.resp.entry.level.map(_ := Mux(resp_res.l0.hit, 0.U, resp_res.sp.level))
593  io.resp.bits.toHptw.resp.entry.prefetch := from_pre(stageResp.bits.req_info.source)
594  io.resp.bits.toHptw.resp.entry.ppn := Mux(resp_res.l0.hit, resp_res.l0.ppn(idx), resp_res.sp.ppn)(ppnLen - 1, 0)
595  io.resp.bits.toHptw.resp.entry.pbmt := Mux(resp_res.l0.hit, resp_res.l0.pbmt(idx), resp_res.sp.pbmt)
596  io.resp.bits.toHptw.resp.entry.perm.map(_ := Mux(resp_res.l0.hit, resp_res.l0.perm(idx), resp_res.sp.perm))
597  io.resp.bits.toHptw.resp.entry.v := Mux(resp_res.l0.hit, resp_res.l0.v(idx), resp_res.sp.v)
598  io.resp.bits.toHptw.resp.gpf := !io.resp.bits.toHptw.resp.entry.v
599  io.resp.bits.toHptw.resp.gaf := Mux(resp_res.l0.hit, resp_res.l0.af(idx), false.B)
600
601  io.resp.bits.stage1.entry.map(_.tag := stageResp.bits.req_info.vpn(vpnLen - 1, 3))
602  io.resp.bits.stage1.entry.map(_.asid := Mux(stageResp.bits.req_info.hasS2xlate(), io.csr_dup(0).vsatp.asid, io.csr_dup(0).satp.asid)) // DontCare
603  io.resp.bits.stage1.entry.map(_.vmid.map(_ := io.csr_dup(0).hgatp.vmid))
604  if (EnableSv48) {
605    io.resp.bits.stage1.entry.map(_.level.map(_ := Mux(resp_res.l0.hit, 0.U,
606      Mux(resp_res.sp.hit, resp_res.sp.level,
607        Mux(resp_res.l1.hit, 1.U,
608          Mux(resp_res.l2.hit, 2.U, 3.U))))))
609  } else {
610    io.resp.bits.stage1.entry.map(_.level.map(_ := Mux(resp_res.l0.hit, 0.U,
611      Mux(resp_res.sp.hit, resp_res.sp.level,
612        Mux(resp_res.l1.hit, 1.U, 2.U)))))
613  }
614  io.resp.bits.stage1.entry.map(_.prefetch := from_pre(stageResp.bits.req_info.source))
615  for (i <- 0 until tlbcontiguous) {
616    if (EnableSv48) {
617      io.resp.bits.stage1.entry(i).ppn := Mux(resp_res.l0.hit, resp_res.l0.ppn(i)(gvpnLen - 1, sectortlbwidth),
618        Mux(resp_res.sp.hit, resp_res.sp.ppn(gvpnLen - 1, sectortlbwidth),
619          Mux(resp_res.l1.hit, resp_res.l1.ppn(gvpnLen - 1, sectortlbwidth),
620            Mux(resp_res.l2.hit, resp_res.l2.ppn(gvpnLen - 1, sectortlbwidth),
621              resp_res.l3.get.ppn(gvpnLen - 1, sectortlbwidth)))))
622      io.resp.bits.stage1.entry(i).ppn_low := Mux(resp_res.l0.hit, resp_res.l0.ppn(i)(sectortlbwidth - 1, 0),
623        Mux(resp_res.sp.hit, resp_res.sp.ppn(sectortlbwidth - 1, 0),
624          Mux(resp_res.l1.hit, resp_res.l1.ppn(sectortlbwidth - 1, 0),
625            Mux(resp_res.l2.hit, resp_res.l2.ppn(sectortlbwidth - 1, 0),
626              resp_res.l3.get.ppn(sectortlbwidth - 1, 0)))))
627      io.resp.bits.stage1.entry(i).v := Mux(resp_res.l0.hit, resp_res.l0.v(i),
628        Mux(resp_res.sp.hit, resp_res.sp.v,
629          Mux(resp_res.l1.hit, resp_res.l1.v,
630            Mux(resp_res.l2.hit, resp_res.l2.v,
631              resp_res.l3.get.v))))
632    } else {
633      io.resp.bits.stage1.entry(i).ppn := Mux(resp_res.l0.hit, resp_res.l0.ppn(i)(gvpnLen - 1, sectortlbwidth),
634        Mux(resp_res.sp.hit, resp_res.sp.ppn(gvpnLen - 1, sectortlbwidth),
635          Mux(resp_res.l1.hit, resp_res.l1.ppn(gvpnLen - 1, sectortlbwidth),
636            resp_res.l2.ppn(gvpnLen - 1, sectortlbwidth))))
637      io.resp.bits.stage1.entry(i).ppn_low := Mux(resp_res.l0.hit, resp_res.l0.ppn(i)(sectortlbwidth - 1, 0),
638        Mux(resp_res.sp.hit, resp_res.sp.ppn(sectortlbwidth - 1, 0),
639          Mux(resp_res.l1.hit, resp_res.l1.ppn(sectortlbwidth - 1, 0),
640            resp_res.l2.ppn(sectortlbwidth - 1, 0))))
641      io.resp.bits.stage1.entry(i).v := Mux(resp_res.l0.hit, resp_res.l0.v(i),
642        Mux(resp_res.sp.hit, resp_res.sp.v,
643          Mux(resp_res.l1.hit, resp_res.l1.v,
644            resp_res.l2.v)))
645    }
646    io.resp.bits.stage1.entry(i).pbmt := Mux(resp_res.l0.hit, resp_res.l0.pbmt(i),
647      Mux(resp_res.sp.hit, resp_res.sp.pbmt,
648        Mux(resp_res.l1.hit, resp_res.l1.pbmt,
649          resp_res.l2.pbmt)))
650    io.resp.bits.stage1.entry(i).perm.map(_ := Mux(resp_res.l0.hit, resp_res.l0.perm(i),  Mux(resp_res.sp.hit, resp_res.sp.perm, 0.U.asTypeOf(new PtePermBundle))))
651    io.resp.bits.stage1.entry(i).pf := !io.resp.bits.stage1.entry(i).v
652    io.resp.bits.stage1.entry(i).af := Mux(resp_res.l0.hit, resp_res.l0.af(i), false.B)
653  }
654  io.resp.bits.stage1.pteidx := UIntToOH(idx).asBools
655  io.resp.bits.stage1.not_super := Mux(resp_res.l0.hit, true.B, false.B)
656  io.resp.valid := stageResp.valid
657  XSError(stageResp.valid && resp_res.l0.hit && resp_res.sp.hit, "normal page and super page both hit")
658  XSError(stageResp.valid && io.resp.bits.hit && bypassed(0), "page cache, bypassed but hit")
659
660  // refill Perf
661  val l3RefillPerf = if (EnableSv48) Some(Wire(Vec(l2tlbParams.l3Size, Bool()))) else None
662  val l2RefillPerf = Wire(Vec(l2tlbParams.l2Size, Bool()))
663  val l1RefillPerf = Wire(Vec(l2tlbParams.l1nWays, Bool()))
664  val l0RefillPerf = Wire(Vec(l2tlbParams.l0nWays, Bool()))
665  val spRefillPerf = Wire(Vec(l2tlbParams.spSize, Bool()))
666  l3RefillPerf.map(_.map(_ := false.B))
667  l2RefillPerf.map(_ := false.B)
668  l1RefillPerf.map(_ := false.B)
669  l0RefillPerf.map(_ := false.B)
670  spRefillPerf.map(_ := false.B)
671
672  // refill
673  l1.io.w.req <> DontCare
674  l0.io.w.req <> DontCare
675  l1.io.w.req.valid := false.B
676  l0.io.w.req.valid := false.B
677
678  val memRdata = refill.ptes
679  val memPtes = (0 until (l2tlbParams.blockBytes/(XLEN/8))).map(i => memRdata((i+1)*XLEN-1, i*XLEN).asTypeOf(new PteBundle))
680  val memSelData = io.refill.bits.sel_pte_dup
681  val memPte = memSelData.map(a => a.asTypeOf(new PteBundle))
682
683  // TODO: handle sfenceLatch outsize
684  if (EnableSv48) {
685    when (!flush_dup(2) && refill.levelOH.l3.get && !memPte(2).isLeaf() && !memPte(2).isPf(refill.level_dup(2))
686    && Mux(refill.req_info_dup(2).s2xlate === allStage, !memPte(2).isStage1Gpf(io.csr_dup(2).vsatp.mode), Mux(refill.req_info_dup(2).s2xlate === onlyStage1, !(memPte(2).isAf() || memPte(2).isStage1Gpf(io.csr_dup(2).vsatp.mode)), Mux(refill.req_info_dup(2).s2xlate === onlyStage2, !memPte(2).isGpf(refill.level_dup(2)), !memPte(2).isAf())))) {
687      val refillIdx = replaceWrapper(l3v.get, ptwl3replace.get.way)
688      refillIdx.suggestName(s"Ptwl3RefillIdx")
689      val rfOH = UIntToOH(refillIdx)
690      l3.get(refillIdx).refill(
691        refill.req_info_dup(2).vpn,
692        Mux(refill.req_info_dup(2).s2xlate =/= noS2xlate, io.csr_dup(2).vsatp.asid, io.csr_dup(2).satp.asid),
693        io.csr_dup(2).hgatp.vmid,
694        memSelData(2),
695        3.U,
696        refill_prefetch_dup(2)
697      )
698      ptwl2replace.access(refillIdx)
699      l3v.get := l3v.get | rfOH
700      l3g.get := (l3g.get & ~rfOH) | Mux(memPte(2).perm.g, rfOH, 0.U)
701      l3h.get(refillIdx) := refill_h(2)
702
703      for (i <- 0 until l2tlbParams.l3Size) {
704        l3RefillPerf.get(i) := i.U === refillIdx
705      }
706
707      XSDebug(p"[l3 refill] refillIdx:${refillIdx} refillEntry:${l3.get(refillIdx).genPtwEntry(refill.req_info_dup(2).vpn, Mux(refill.req_info_dup(2).s2xlate =/= noS2xlate, io.csr_dup(2).vsatp.asid, io.csr_dup(2).satp.asid), memSelData(2), 0.U, prefetch = refill_prefetch_dup(2))}\n")
708      XSDebug(p"[l3 refill] l3v:${Binary(l3v.get)}->${Binary(l3v.get | rfOH)} l3g:${Binary(l3g.get)}->${Binary((l3g.get & ~rfOH) | Mux(memPte(2).perm.g, rfOH, 0.U))}\n")
709
710      refillIdx.suggestName(s"l3_refillIdx")
711      rfOH.suggestName(s"l3_rfOH")
712    }
713  }
714
715  when (!flush_dup(2) && refill.levelOH.l2 && !memPte(2).isLeaf() && !memPte(2).isPf(refill.level_dup(2))
716    && Mux(refill.req_info_dup(2).s2xlate === allStage, !memPte(2).isStage1Gpf(io.csr_dup(2).vsatp.mode), Mux(refill.req_info_dup(2).s2xlate === onlyStage1, !(memPte(2).isAf() || memPte(2).isStage1Gpf(io.csr_dup(2).vsatp.mode)), Mux(refill.req_info_dup(2).s2xlate === onlyStage2, !memPte(2).isGpf(refill.level_dup(2)), !memPte(2).isAf())))) {
717    val refillIdx = replaceWrapper(l2v, ptwl2replace.way)
718    refillIdx.suggestName(s"Ptwl2RefillIdx")
719    val rfOH = UIntToOH(refillIdx)
720    l2(refillIdx).refill(
721      refill.req_info_dup(2).vpn,
722      Mux(refill.req_info_dup(2).s2xlate =/= noS2xlate, io.csr_dup(2).vsatp.asid, io.csr_dup(2).satp.asid),
723      io.csr_dup(2).hgatp.vmid,
724      memSelData(2),
725      2.U,
726      refill_prefetch_dup(2)
727    )
728    ptwl2replace.access(refillIdx)
729    l2v := l2v | rfOH
730    l2g := (l2g & ~rfOH) | Mux(memPte(2).perm.g, rfOH, 0.U)
731    l2h(refillIdx) := refill_h(2)
732
733    for (i <- 0 until l2tlbParams.l2Size) {
734      l2RefillPerf(i) := i.U === refillIdx
735    }
736
737    XSDebug(p"[l2 refill] refillIdx:${refillIdx} refillEntry:${l2(refillIdx).genPtwEntry(refill.req_info_dup(2).vpn, Mux(refill.req_info_dup(2).s2xlate =/= noS2xlate, io.csr_dup(2).vsatp.asid, io.csr_dup(2).satp.asid), memSelData(2), 0.U, prefetch = refill_prefetch_dup(2))}\n")
738    XSDebug(p"[l2 refill] l2v:${Binary(l2v)}->${Binary(l2v | rfOH)} l2g:${Binary(l2g)}->${Binary((l2g & ~rfOH) | Mux(memPte(2).perm.g, rfOH, 0.U))}\n")
739
740    refillIdx.suggestName(s"l2_refillIdx")
741    rfOH.suggestName(s"l2_rfOH")
742  }
743
744  when (!flush_dup(1) && refill.levelOH.l1 && !memPte(1).isLeaf() && !memPte(1).isPf(refill.level_dup(1))
745  && Mux(refill.req_info_dup(1).s2xlate === allStage, !memPte(1).isStage1Gpf(io.csr_dup(1).vsatp.mode), Mux(refill.req_info_dup(1).s2xlate === onlyStage1, !(memPte(1).isAf() || memPte(1).isStage1Gpf(io.csr_dup(1).vsatp.mode)), Mux(refill.req_info_dup(1).s2xlate === onlyStage2, !memPte(1).isGpf(refill.level_dup(1)), !memPte(1).isAf())))) {
746    val refillIdx = genPtwL1SetIdx(refill.req_info_dup(1).vpn)
747    val victimWay = replaceWrapper(getl1vSet(refill.req_info_dup(1).vpn), ptwl1replace.way(refillIdx))
748    val victimWayOH = UIntToOH(victimWay)
749    val rfvOH = UIntToOH(Cat(refillIdx, victimWay))
750    val wdata = Wire(l1EntryType)
751    wdata.gen(
752      vpn = refill.req_info_dup(1).vpn,
753      asid = Mux(refill.req_info_dup(1).s2xlate =/= noS2xlate, io.csr_dup(1).vsatp.asid, io.csr_dup(1).satp.asid),
754      vmid = io.csr_dup(1).hgatp.vmid,
755      data = memRdata,
756      levelUInt = 1.U,
757      refill_prefetch_dup(1),
758      refill.req_info_dup(1).s2xlate
759    )
760    l1.io.w.apply(
761      valid = true.B,
762      setIdx = refillIdx,
763      data = wdata,
764      waymask = victimWayOH
765    )
766    ptwl1replace.access(refillIdx, victimWay)
767    l1v := l1v | rfvOH
768    l1g := l1g & ~rfvOH | Mux(Cat(memPtes.map(_.perm.g)).andR, rfvOH, 0.U)
769    l1h(refillIdx)(victimWay) := refill_h(1)
770
771    for (i <- 0 until l2tlbParams.l1nWays) {
772      l1RefillPerf(i) := i.U === victimWay
773    }
774
775    XSDebug(p"[l1 refill] refillIdx:0x${Hexadecimal(refillIdx)} victimWay:${victimWay} victimWayOH:${Binary(victimWayOH)} rfvOH(in UInt):${Cat(refillIdx, victimWay)}\n")
776    XSDebug(p"[l1 refill] refilldata:0x${wdata}\n")
777    XSDebug(p"[l1 refill] l1v:${Binary(l1v)} -> ${Binary(l1v | rfvOH)}\n")
778    XSDebug(p"[l1 refill] l1g:${Binary(l1g)} -> ${Binary(l1g & ~rfvOH | Mux(Cat(memPtes.map(_.perm.g)).andR, rfvOH, 0.U))}\n")
779
780    refillIdx.suggestName(s"l1_refillIdx")
781    victimWay.suggestName(s"l1_victimWay")
782    victimWayOH.suggestName(s"l1_victimWayOH")
783    rfvOH.suggestName(s"l1_rfvOH")
784  }
785
786  when (!flush_dup(0) && refill.levelOH.l0
787  && Mux(refill.req_info_dup(0).s2xlate === allStage, !memPte(0).isStage1Gpf(io.csr_dup(0).vsatp.mode), Mux(refill.req_info_dup(0).s2xlate === onlyStage1, !(memPte(0).isAf() || memPte(0).isStage1Gpf(io.csr_dup(0).vsatp.mode)), Mux(refill.req_info_dup(0).s2xlate === onlyStage2, !memPte(0).isGpf(refill.level_dup(0)), !memPte(0).isAf())))) {
788    val refillIdx = genPtwL0SetIdx(refill.req_info_dup(0).vpn)
789    val victimWay = replaceWrapper(getl0vSet(refill.req_info_dup(0).vpn), ptwl0replace.way(refillIdx))
790    val victimWayOH = UIntToOH(victimWay)
791    val rfvOH = UIntToOH(Cat(refillIdx, victimWay))
792    val wdata = Wire(l0EntryType)
793    wdata.gen(
794      vpn =  refill.req_info_dup(0).vpn,
795      asid = Mux(refill.req_info_dup(0).s2xlate =/= noS2xlate, io.csr_dup(0).vsatp.asid, io.csr_dup(0).satp.asid),
796      vmid = io.csr_dup(0).hgatp.vmid,
797      data = memRdata,
798      levelUInt = 0.U,
799      refill_prefetch_dup(0),
800      refill.req_info_dup(0).s2xlate
801    )
802    l0.io.w.apply(
803      valid = true.B,
804      setIdx = refillIdx,
805      data = wdata,
806      waymask = victimWayOH
807    )
808    ptwl0replace.access(refillIdx, victimWay)
809    l0v := l0v | rfvOH
810    l0g := l0g & ~rfvOH | Mux(Cat(memPtes.map(_.perm.g)).andR, rfvOH, 0.U)
811    l0h(refillIdx)(victimWay) := refill_h(0)
812
813    for (i <- 0 until l2tlbParams.l0nWays) {
814      l0RefillPerf(i) := i.U === victimWay
815    }
816
817    XSDebug(p"[l0 refill] refillIdx:0x${Hexadecimal(refillIdx)} victimWay:${victimWay} victimWayOH:${Binary(victimWayOH)} rfvOH(in UInt):${Cat(refillIdx, victimWay)}\n")
818    XSDebug(p"[l0 refill] refilldata:0x${wdata}\n")
819    XSDebug(p"[l0 refill] l0v:${Binary(l0v)} -> ${Binary(l0v | rfvOH)}\n")
820    XSDebug(p"[l0 refill] l0g:${Binary(l0g)} -> ${Binary(l0g & ~rfvOH | Mux(Cat(memPtes.map(_.perm.g)).andR, rfvOH, 0.U))}\n")
821
822    refillIdx.suggestName(s"l0_refillIdx")
823    victimWay.suggestName(s"l0_victimWay")
824    victimWayOH.suggestName(s"l0_victimWayOH")
825    rfvOH.suggestName(s"l0_rfvOH")
826  }
827
828
829  // misc entries: super & invalid
830  when (!flush_dup(0) && refill.levelOH.sp && (memPte(0).isLeaf() || memPte(0).isPf(refill.level_dup(0)))
831  && Mux(refill.req_info_dup(0).s2xlate === allStage, !memPte(0).isStage1Gpf(io.csr_dup(0).vsatp.mode), Mux(refill.req_info_dup(0).s2xlate === onlyStage1, !(memPte(0).isAf() || memPte(0).isStage1Gpf(io.csr_dup(0).vsatp.mode)), Mux(refill.req_info_dup(0).s2xlate === onlyStage2, !memPte(0).isGpf(refill.level_dup(0)), !memPte(0).isAf())))) {
832    val refillIdx = spreplace.way// LFSR64()(log2Up(l2tlbParams.spSize)-1,0) // TODO: may be LRU
833    val rfOH = UIntToOH(refillIdx)
834    sp(refillIdx).refill(
835      refill.req_info_dup(0).vpn,
836      Mux(refill.req_info_dup(0).s2xlate =/= noS2xlate, io.csr_dup(0).vsatp.asid, io.csr_dup(0).satp.asid),
837      io.csr_dup(0).hgatp.vmid,
838      memSelData(0),
839      refill.level_dup(0),
840      refill_prefetch_dup(0),
841      !memPte(0).isPf(refill.level_dup(0)),
842    )
843    spreplace.access(refillIdx)
844    spv := spv | rfOH
845    spg := spg & ~rfOH | Mux(memPte(0).perm.g, rfOH, 0.U)
846    sph(refillIdx) := refill_h(0)
847
848    for (i <- 0 until l2tlbParams.spSize) {
849      spRefillPerf(i) := i.U === refillIdx
850    }
851
852    XSDebug(p"[sp refill] refillIdx:${refillIdx} refillEntry:${sp(refillIdx).genPtwEntry(refill.req_info_dup(0).vpn, Mux(refill.req_info_dup(0).s2xlate =/= noS2xlate, io.csr_dup(0).vsatp.asid, io.csr_dup(0).satp.asid), memSelData(0), refill.level_dup(0), refill_prefetch_dup(0))}\n")
853    XSDebug(p"[sp refill] spv:${Binary(spv)}->${Binary(spv | rfOH)} spg:${Binary(spg)}->${Binary(spg & ~rfOH | Mux(memPte(0).perm.g, rfOH, 0.U))}\n")
854
855    refillIdx.suggestName(s"sp_refillIdx")
856    rfOH.suggestName(s"sp_rfOH")
857  }
858
859  val l1eccFlush = resp_res.l1.ecc && stageResp_valid_1cycle_dup(0) // RegNext(l1eccError, init = false.B)
860  val l0eccFlush = resp_res.l0.ecc && stageResp_valid_1cycle_dup(1) // RegNext(l0eccError, init = false.B)
861  val eccVpn = stageResp.bits.req_info.vpn
862
863  XSError(l1eccFlush, "l2tlb.cache.l1 ecc error. Should not happen at sim stage")
864  XSError(l0eccFlush, "l2tlb.cache.l0 ecc error. Should not happen at sim stage")
865  when (l1eccFlush) {
866    val flushSetIdxOH = UIntToOH(genPtwL1SetIdx(eccVpn))
867    val flushMask = VecInit(flushSetIdxOH.asBools.map { a => Fill(l2tlbParams.l1nWays, a.asUInt) }).asUInt
868    l1v := l1v & ~flushMask
869    l1g := l1g & ~flushMask
870  }
871
872  when (l0eccFlush) {
873    val flushSetIdxOH = UIntToOH(genPtwL0SetIdx(eccVpn))
874    val flushMask = VecInit(flushSetIdxOH.asBools.map { a => Fill(l2tlbParams.l0nWays, a.asUInt) }).asUInt
875    l0v := l0v & ~flushMask
876    l0g := l0g & ~flushMask
877  }
878
879  // sfence for l0
880  val sfence_valid_l0 = sfence_dup(0).valid && !sfence_dup(0).bits.hg && !sfence_dup(0).bits.hv
881  when (sfence_valid_l0) {
882    val l0hhit = VecInit(l0h.flatMap(_.map{a => io.csr_dup(0).priv.virt && a === onlyStage1 || !io.csr_dup(0).priv.virt && a === noS2xlate})).asUInt
883    val sfence_vpn = sfence_dup(0).bits.addr(sfence_dup(0).bits.addr.getWidth-1, offLen)
884    when (sfence_dup(0).bits.rs1/*va*/) {
885      when (sfence_dup(0).bits.rs2) {
886        // all va && all asid
887        l0v := l0v & ~l0hhit
888      } .otherwise {
889        // all va && specific asid except global
890        l0v := l0v & (l0g | ~l0hhit)
891      }
892    } .otherwise {
893      // val flushMask = UIntToOH(genTlbl1Idx(sfence.bits.addr(sfence.bits.addr.getWidth-1, offLen)))
894      val flushSetIdxOH = UIntToOH(genPtwL0SetIdx(sfence_vpn))
895      // val flushMask = VecInit(flushSetIdxOH.asBools.map(Fill(l2tlbParams.l0nWays, _.asUInt))).asUInt
896      val flushMask = VecInit(flushSetIdxOH.asBools.map { a => Fill(l2tlbParams.l0nWays, a.asUInt) }).asUInt
897      flushSetIdxOH.suggestName(s"sfence_nrs1_flushSetIdxOH")
898      flushMask.suggestName(s"sfence_nrs1_flushMask")
899
900      when (sfence_dup(0).bits.rs2) {
901        // specific leaf of addr && all asid
902        l0v := l0v & ~flushMask & ~l0hhit
903      } .otherwise {
904        // specific leaf of addr && specific asid
905        l0v := l0v & (~flushMask | l0g | ~l0hhit)
906      }
907    }
908  }
909
910  // hfencev, simple implementation for l0
911  val hfencev_valid_l0 = sfence_dup(0).valid && sfence_dup(0).bits.hv
912  when(hfencev_valid_l0) {
913    val flushMask = VecInit(l0h.flatMap(_.map(_  === onlyStage1))).asUInt
914    l0v := l0v & ~flushMask // all VS-stage l0 pte
915  }
916
917  // hfenceg, simple implementation for l0
918  val hfenceg_valid_l0 = sfence_dup(0).valid && sfence_dup(0).bits.hg
919  when(hfenceg_valid_l0) {
920    val flushMask = VecInit(l0h.flatMap(_.map(_ === onlyStage2))).asUInt
921    l0v := l0v & ~flushMask // all G-stage l0 pte
922  }
923
924  val l2asidhit = VecInit(l2asids.map(_ === sfence_dup(2).bits.id)).asUInt
925  val spasidhit = VecInit(spasids.map(_ === sfence_dup(0).bits.id)).asUInt
926  val sfence_valid = sfence_dup(0).valid && !sfence_dup(0).bits.hg && !sfence_dup(0).bits.hv
927  when (sfence_valid) {
928    val l2vmidhit = VecInit(l2vmids.map(_.getOrElse(0.U) === io.csr_dup(2).hgatp.vmid)).asUInt
929    val spvmidhit = VecInit(spvmids.map(_.getOrElse(0.U) === io.csr_dup(0).hgatp.vmid)).asUInt
930    val l2hhit = VecInit(l2h.map{a => io.csr_dup(2).priv.virt && a === onlyStage1 || !io.csr_dup(2).priv.virt && a === noS2xlate}).asUInt
931    val sphhit = VecInit(sph.map{a => io.csr_dup(0).priv.virt && a === onlyStage1 || !io.csr_dup(0).priv.virt && a === noS2xlate}).asUInt
932    val l1hhit = VecInit(l1h.flatMap(_.map{a => io.csr_dup(1).priv.virt && a === onlyStage1 || !io.csr_dup(1).priv.virt && a === noS2xlate})).asUInt
933    val sfence_vpn = sfence_dup(0).bits.addr(sfence_dup(0).bits.addr.getWidth-1, offLen)
934
935    when (sfence_dup(0).bits.rs1/*va*/) {
936      when (sfence_dup(0).bits.rs2) {
937        // all va && all asid
938        l1v := l1v & ~l1hhit
939        l2v := l2v & ~(l2hhit & VecInit(l2vmidhit.asBools.map{a => io.csr_dup(2).priv.virt && a || !io.csr_dup(2).priv.virt}).asUInt)
940        spv := spv & ~(sphhit & VecInit(spvmidhit.asBools.map{a => io.csr_dup(0).priv.virt && a || !io.csr_dup(0).priv.virt}).asUInt)
941      } .otherwise {
942        // all va && specific asid except global
943        l1v := l1v & (l1g | ~l1hhit)
944        l2v := l2v & ~(~l2g & l2hhit & l2asidhit & VecInit(l2vmidhit.asBools.map{a => io.csr_dup(2).priv.virt && a || !io.csr_dup(2).priv.virt}).asUInt)
945        spv := spv & ~(~spg & sphhit & spasidhit & VecInit(spvmidhit.asBools.map{a => io.csr_dup(0).priv.virt && a || !io.csr_dup(0).priv.virt}).asUInt)
946      }
947    } .otherwise {
948      when (sfence_dup(0).bits.rs2) {
949        // specific leaf of addr && all asid
950        spv := spv & ~(sphhit & VecInit(sp.map(_.hit(sfence_vpn, sfence_dup(0).bits.id, sfence_dup(0).bits.id, io.csr_dup(0).hgatp.vmid, ignoreAsid = true, s2xlate = io.csr_dup(0).priv.virt))).asUInt)
951      } .otherwise {
952        // specific leaf of addr && specific asid
953        spv := spv & ~(~spg & sphhit & VecInit(sp.map(_.hit(sfence_vpn, sfence_dup(0).bits.id, sfence_dup(0).bits.id, io.csr_dup(0).hgatp.vmid, s2xlate = io.csr_dup(0).priv.virt))).asUInt)
954      }
955    }
956  }
957
958  val hfencev_valid = sfence_dup(0).valid && sfence_dup(0).bits.hv
959  when (hfencev_valid) {
960    val l2vmidhit = VecInit(l2vmids.map(_.getOrElse(0.U) === io.csr_dup(2).hgatp.vmid)).asUInt
961    val spvmidhit = VecInit(spvmids.map(_.getOrElse(0.U) === io.csr_dup(0).hgatp.vmid)).asUInt
962    val l2hhit = VecInit(l2h.map(_ === onlyStage1)).asUInt
963    val sphhit = VecInit(sph.map(_ === onlyStage1)).asUInt
964    val l1hhit = VecInit(l1h.flatMap(_.map(_ === onlyStage1))).asUInt
965    val hfencev_vpn = sfence_dup(0).bits.addr(sfence_dup(0).bits.addr.getWidth-1, offLen)
966    when(sfence_dup(0).bits.rs1) {
967      when(sfence_dup(0).bits.rs2) {
968        l1v := l1v & ~l1hhit
969        l2v := l2v & ~(l2hhit & l2vmidhit)
970        spv := spv & ~(sphhit & spvmidhit)
971      }.otherwise {
972        l1v := l1v & (l1g | ~l1hhit)
973        l2v := l2v & ~(~l2g & l2hhit & l2asidhit & l2vmidhit)
974        spv := spv & ~(~spg & sphhit & spasidhit & spvmidhit)
975      }
976    }.otherwise {
977      when(sfence_dup(0).bits.rs2) {
978        spv := spv & ~(sphhit & VecInit(sp.map(_.hit(hfencev_vpn, sfence_dup(0).bits.id, sfence_dup(0).bits.id, io.csr_dup(0).hgatp.vmid, ignoreAsid = true, s2xlate = true.B))).asUInt)
979      }.otherwise {
980        spv := spv & ~(~spg & sphhit & VecInit(sp.map(_.hit(hfencev_vpn, sfence_dup(0).bits.id, sfence_dup(0).bits.id, io.csr_dup(0).hgatp.vmid, s2xlate = true.B))).asUInt)
981      }
982    }
983  }
984
985
986  val hfenceg_valid = sfence_dup(0).valid && sfence_dup(0).bits.hg
987  when(hfenceg_valid) {
988    val l2vmidhit = VecInit(l2vmids.map(_.getOrElse(0.U) === sfence_dup(2).bits.id)).asUInt
989    val spvmidhit = VecInit(spvmids.map(_.getOrElse(0.U) === sfence_dup(0).bits.id)).asUInt
990    val l2hhit = VecInit(l2h.map(_ === onlyStage2)).asUInt
991    val sphhit = VecInit(sph.map(_ === onlyStage2)).asUInt
992    val l1hhit = VecInit(l1h.flatMap(_.map(_ === onlyStage2))).asUInt
993    val hfenceg_gvpn = (sfence_dup(0).bits.addr << 2)(sfence_dup(0).bits.addr.getWidth - 1, offLen)
994    when(sfence_dup(0).bits.rs1) {
995      when(sfence_dup(0).bits.rs2) {
996        l1v := l1v & ~l1hhit
997        l2v := l2v & ~l2hhit
998        spv := spv & ~sphhit
999      }.otherwise {
1000        l1v := l1v & ~l1hhit
1001        l2v := l2v & ~(l2hhit & l2vmidhit)
1002        spv := spv & ~(sphhit & spvmidhit)
1003      }
1004    }.otherwise {
1005      when(sfence_dup(0).bits.rs2) {
1006        spv := spv & ~(sphhit & VecInit(sp.map(_.hit(hfenceg_gvpn, 0.U, 0.U, sfence_dup(0).bits.id, ignoreAsid = true, s2xlate = false.B))).asUInt)
1007      }.otherwise {
1008        spv := spv & ~(~spg & sphhit & VecInit(sp.map(_.hit(hfenceg_gvpn, 0.U, 0.U, sfence_dup(0).bits.id, ignoreAsid = true, s2xlate = true.B))).asUInt)
1009      }
1010    }
1011  }
1012
1013  if (EnableSv48) {
1014    val l3asidhit = VecInit(l3asids.get.map(_ === sfence_dup(2).bits.id)).asUInt
1015    val l3vmidhit = VecInit(l3vmids.get.map(_.getOrElse(0.U) === io.csr_dup(2).hgatp.vmid)).asUInt
1016    val l3hhit = VecInit(l3h.get.map{a => io.csr_dup(2).priv.virt && a === onlyStage1 || !io.csr_dup(2).priv.virt && a === noS2xlate}).asUInt
1017
1018    when (sfence_valid) {
1019      val l3vmidhit = VecInit(l3vmids.get.map(_.getOrElse(0.U) === io.csr_dup(2).hgatp.vmid)).asUInt
1020      val l3hhit = VecInit(l3h.get.map{a => io.csr_dup(2).priv.virt && a === onlyStage1 || !io.csr_dup(2).priv.virt && a === noS2xlate}).asUInt
1021      val sfence_vpn = sfence_dup(2).bits.addr(sfence_dup(2).bits.addr.getWidth-1, offLen)
1022
1023      when (sfence_dup(2).bits.rs1/*va*/) {
1024        when (sfence_dup(2).bits.rs2) {
1025          // all va && all asid
1026          l3v.map(_ := l3v.get & ~(l3hhit & VecInit(l3vmidhit.asBools.map{a => io.csr_dup(2).priv.virt && a || !io.csr_dup(2).priv.virt}).asUInt))
1027        } .otherwise {
1028          // all va && specific asid except global
1029          l3v.map(_ := l3v.get & ~(~l3g.get & l3hhit & l3asidhit & VecInit(l3vmidhit.asBools.map{a => io.csr_dup(2).priv.virt && a || !io.csr_dup(2).priv.virt}).asUInt))
1030        }
1031      }
1032    }
1033
1034    when (hfencev_valid) {
1035      val l3vmidhit = VecInit(l3vmids.get.map(_.getOrElse(0.U) === io.csr_dup(2).hgatp.vmid)).asUInt
1036      val l3hhit = VecInit(l3h.get.map(_ === onlyStage1)).asUInt
1037      val hfencev_vpn = sfence_dup(2).bits.addr(sfence_dup(2).bits.addr.getWidth-1, offLen)
1038      when(sfence_dup(2).bits.rs1) {
1039        when(sfence_dup(2).bits.rs2) {
1040          l3v.map(_ := l3v.get & ~(l3hhit & l3vmidhit))
1041        }.otherwise {
1042          l3v.map(_ := l3v.get & ~(~l3g.get & l3hhit & l3asidhit & l3vmidhit))
1043        }
1044      }
1045    }
1046
1047    when (hfenceg_valid) {
1048      val l3vmidhit = VecInit(l3vmids.get.map(_.getOrElse(0.U) === sfence_dup(2).bits.id)).asUInt
1049      val l3hhit = VecInit(l3h.get.map(_ === onlyStage2)).asUInt
1050      val hfenceg_gvpn = (sfence_dup(2).bits.addr << 2)(sfence_dup(2).bits.addr.getWidth - 1, offLen)
1051      when(sfence_dup(2).bits.rs1) {
1052        when(sfence_dup(2).bits.rs2) {
1053          l3v.map(_ := l3v.get & ~l3hhit)
1054        }.otherwise {
1055          l3v.map(_ := l3v.get & ~(l3hhit & l3vmidhit))
1056        }
1057      }
1058    }
1059  }
1060
1061  def InsideStageConnect(in: DecoupledIO[PtwCacheReq], out: DecoupledIO[PtwCacheReq], inFire: Bool): Unit = {
1062    in.ready := !in.valid || out.ready
1063    out.valid := in.valid
1064    out.bits := in.bits
1065    out.bits.bypassed.zip(in.bits.bypassed).zipWithIndex.map{ case (b, i) =>
1066      val bypassed_reg = Reg(Bool())
1067      val bypassed_wire = refill_bypass(in.bits.req_info.vpn, i, in.bits.req_info.s2xlate) && io.refill.valid
1068      when (inFire) { bypassed_reg := bypassed_wire }
1069      .elsewhen (io.refill.valid) { bypassed_reg := bypassed_reg || bypassed_wire }
1070
1071      b._1 := b._2 || (bypassed_wire || (bypassed_reg && !inFire))
1072    }
1073  }
1074
1075  // Perf Count
1076  val resp_l0 = resp_res.l0.hit
1077  val resp_sp = resp_res.sp.hit
1078  val resp_l3_pre = if (EnableSv48) Some(resp_res.l3.get.pre) else None
1079  val resp_l2_pre = resp_res.l2.pre
1080  val resp_l1_pre = resp_res.l1.pre
1081  val resp_l0_pre = resp_res.l0.pre
1082  val resp_sp_pre = resp_res.sp.pre
1083  val base_valid_access_0 = !from_pre(io.resp.bits.req_info.source) && io.resp.fire
1084  XSPerfAccumulate("access", base_valid_access_0)
1085  if (EnableSv48) {
1086    XSPerfAccumulate("l3_hit", base_valid_access_0 && io.resp.bits.toFsm.l3Hit.get && !io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1087  }
1088  XSPerfAccumulate("l2_hit", base_valid_access_0 && io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1089  XSPerfAccumulate("l1_hit", base_valid_access_0 && io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1090  XSPerfAccumulate("l0_hit", base_valid_access_0 && resp_l0)
1091  XSPerfAccumulate("sp_hit", base_valid_access_0 && resp_sp)
1092  XSPerfAccumulate("pte_hit",base_valid_access_0 && io.resp.bits.hit)
1093
1094  if (EnableSv48) {
1095    XSPerfAccumulate("l3_hit_pre", base_valid_access_0 && resp_l3_pre.get && io.resp.bits.toFsm.l3Hit.get && !io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1096  }
1097  XSPerfAccumulate("l2_hit_pre", base_valid_access_0 && resp_l2_pre && !io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1098  XSPerfAccumulate("l1_hit_pre", base_valid_access_0 && resp_l1_pre && io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1099  XSPerfAccumulate("l0_hit_pre", base_valid_access_0 && resp_l0_pre && resp_l0)
1100  XSPerfAccumulate("sp_hit_pre", base_valid_access_0 && resp_sp_pre && resp_sp)
1101  XSPerfAccumulate("pte_hit_pre",base_valid_access_0 && (resp_l0_pre && resp_l0 || resp_sp_pre && resp_sp) && io.resp.bits.hit)
1102
1103  val base_valid_access_1 = from_pre(io.resp.bits.req_info.source) && io.resp.fire
1104  XSPerfAccumulate("pre_access", base_valid_access_1)
1105  if (EnableSv48) {
1106    XSPerfAccumulate("pre_l3_hit", base_valid_access_1 && io.resp.bits.toFsm.l3Hit.get && !io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1107  }
1108  XSPerfAccumulate("pre_l2_hit", base_valid_access_1 && io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1109  XSPerfAccumulate("pre_l1_hit", base_valid_access_1 && io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1110  XSPerfAccumulate("pre_l0_hit", base_valid_access_1 && resp_l0)
1111  XSPerfAccumulate("pre_sp_hit", base_valid_access_1 && resp_sp)
1112  XSPerfAccumulate("pre_pte_hit",base_valid_access_1 && io.resp.bits.hit)
1113
1114  if (EnableSv48) {
1115    XSPerfAccumulate("pre_l3_hit_pre", base_valid_access_1 && resp_l3_pre.get && io.resp.bits.toFsm.l3Hit.get && !io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1116  }
1117  XSPerfAccumulate("pre_l2_hit_pre", base_valid_access_1 && resp_l2_pre && io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1118  XSPerfAccumulate("pre_l1_hit_pre", base_valid_access_1 && resp_l1_pre && io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1119  XSPerfAccumulate("pre_l0_hit_pre", base_valid_access_1 && resp_l0_pre && resp_l0)
1120  XSPerfAccumulate("pre_sp_hit_pre", base_valid_access_1 && resp_sp_pre && resp_sp)
1121  XSPerfAccumulate("pre_pte_hit_pre",base_valid_access_1 && (resp_l0_pre && resp_l0 || resp_sp_pre && resp_sp) && io.resp.bits.hit)
1122
1123  val base_valid_access_2 = stageResp.bits.isFirst && !from_pre(io.resp.bits.req_info.source) && io.resp.fire
1124  XSPerfAccumulate("access_first", base_valid_access_2)
1125  if (EnableSv48) {
1126    XSPerfAccumulate("l3_hit_first", base_valid_access_2 && io.resp.bits.toFsm.l3Hit.get && !io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1127  }
1128  XSPerfAccumulate("l2_hit_first", base_valid_access_2 && io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1129  XSPerfAccumulate("l1_hit_first", base_valid_access_2 && io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1130  XSPerfAccumulate("l0_hit_first", base_valid_access_2 && resp_l0)
1131  XSPerfAccumulate("sp_hit_first", base_valid_access_2 && resp_sp)
1132  XSPerfAccumulate("pte_hit_first",base_valid_access_2 && io.resp.bits.hit)
1133
1134  if (EnableSv48) {
1135    XSPerfAccumulate("l3_hit_pre_first", base_valid_access_2 && resp_l3_pre.get && io.resp.bits.toFsm.l3Hit.get && !io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1136  }
1137  XSPerfAccumulate("l2_hit_pre_first", base_valid_access_2 && resp_l2_pre && io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1138  XSPerfAccumulate("l1_hit_pre_first", base_valid_access_2 && resp_l1_pre && io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1139  XSPerfAccumulate("l0_hit_pre_first", base_valid_access_2 && resp_l0_pre && resp_l0)
1140  XSPerfAccumulate("sp_hit_pre_first", base_valid_access_2 && resp_sp_pre && resp_sp)
1141  XSPerfAccumulate("pte_hit_pre_first",base_valid_access_2 && (resp_l0_pre && resp_l0 || resp_sp_pre && resp_sp) && io.resp.bits.hit)
1142
1143  val base_valid_access_3 = stageResp.bits.isFirst && from_pre(io.resp.bits.req_info.source) && io.resp.fire
1144  XSPerfAccumulate("pre_access_first", base_valid_access_3)
1145  if (EnableSv48) {
1146    XSPerfAccumulate("pre_l3_hit_first", base_valid_access_3 && io.resp.bits.toFsm.l3Hit.get && !io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1147  }
1148  XSPerfAccumulate("pre_l2_hit_first", base_valid_access_3 && io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1149  XSPerfAccumulate("pre_l1_hit_first", base_valid_access_3 && io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1150  XSPerfAccumulate("pre_l0_hit_first", base_valid_access_3 && resp_l0)
1151  XSPerfAccumulate("pre_sp_hit_first", base_valid_access_3 && resp_sp)
1152  XSPerfAccumulate("pre_pte_hit_first", base_valid_access_3 && io.resp.bits.hit)
1153
1154  if (EnableSv48) {
1155    XSPerfAccumulate("pre_l3_hit_pre_first", base_valid_access_3 && resp_l3_pre.get && io.resp.bits.toFsm.l3Hit.get && !io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1156  }
1157  XSPerfAccumulate("pre_l2_hit_pre_first", base_valid_access_3 && resp_l2_pre && io.resp.bits.toFsm.l2Hit && !io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1158  XSPerfAccumulate("pre_l1_hit_pre_first", base_valid_access_3 && resp_l1_pre && io.resp.bits.toFsm.l1Hit && !io.resp.bits.hit)
1159  XSPerfAccumulate("pre_l0_hit_pre_first", base_valid_access_3 && resp_l0_pre && resp_l0)
1160  XSPerfAccumulate("pre_sp_hit_pre_first", base_valid_access_3 && resp_sp_pre && resp_sp)
1161  XSPerfAccumulate("pre_pte_hit_pre_first",base_valid_access_3 && (resp_l0_pre && resp_l0 || resp_sp_pre && resp_sp) && io.resp.bits.hit)
1162
1163  XSPerfAccumulate("rwHarzad", io.req.valid && !io.req.ready)
1164  XSPerfAccumulate("out_blocked", io.resp.valid && !io.resp.ready)
1165  if (EnableSv48) {
1166    l3AccessPerf.get.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"l3AccessIndex${i}", l) }
1167  }
1168  l2AccessPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"l2AccessIndex${i}", l) }
1169  l1AccessPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"l1AccessIndex${i}", l) }
1170  l0AccessPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"l0AccessIndex${i}", l) }
1171  spAccessPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"SPAccessIndex${i}", l) }
1172  if (EnableSv48) {
1173    l3RefillPerf.get.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"l3RefillIndex${i}", l) }
1174  }
1175  l2RefillPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"l2RefillIndex${i}", l) }
1176  l1RefillPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"l1RefillIndex${i}", l) }
1177  l0RefillPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"l0RefillIndex${i}", l) }
1178  spRefillPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"SPRefillIndex${i}", l) }
1179
1180  if (EnableSv48) {
1181    XSPerfAccumulate("l3Refill", Cat(l3RefillPerf.get).orR)
1182  }
1183  XSPerfAccumulate("l2Refill", Cat(l2RefillPerf).orR)
1184  XSPerfAccumulate("l1Refill", Cat(l1RefillPerf).orR)
1185  XSPerfAccumulate("l0Refill", Cat(l0RefillPerf).orR)
1186  XSPerfAccumulate("spRefill", Cat(spRefillPerf).orR)
1187  if (EnableSv48) {
1188    XSPerfAccumulate("l3Refill_pre", Cat(l3RefillPerf.get).orR && refill_prefetch_dup(0))
1189  }
1190  XSPerfAccumulate("l2Refill_pre", Cat(l2RefillPerf).orR && refill_prefetch_dup(0))
1191  XSPerfAccumulate("l1Refill_pre", Cat(l1RefillPerf).orR && refill_prefetch_dup(0))
1192  XSPerfAccumulate("l0Refill_pre", Cat(l0RefillPerf).orR && refill_prefetch_dup(0))
1193  XSPerfAccumulate("spRefill_pre", Cat(spRefillPerf).orR && refill_prefetch_dup(0))
1194
1195  // debug
1196  XSDebug(sfence_dup(0).valid, p"[sfence] original v and g vector:\n")
1197  if (EnableSv48) {
1198    XSDebug(sfence_dup(0).valid, p"[sfence] l3v:${Binary(l3v.get)}\n")
1199  }
1200  XSDebug(sfence_dup(0).valid, p"[sfence] l2v:${Binary(l2v)}\n")
1201  XSDebug(sfence_dup(0).valid, p"[sfence] l1v:${Binary(l1v)}\n")
1202  XSDebug(sfence_dup(0).valid, p"[sfence] l0v:${Binary(l0v)}\n")
1203  XSDebug(sfence_dup(0).valid, p"[sfence] l0g:${Binary(l0g)}\n")
1204  XSDebug(sfence_dup(0).valid, p"[sfence] spv:${Binary(spv)}\n")
1205  XSDebug(RegNext(sfence_dup(0).valid), p"[sfence] new v and g vector:\n")
1206  if (EnableSv48) {
1207    XSDebug(RegNext(sfence_dup(0).valid), p"[sfence] l3v:${Binary(l3v.get)}\n")
1208  }
1209  XSDebug(RegNext(sfence_dup(0).valid), p"[sfence] l2v:${Binary(l2v)}\n")
1210  XSDebug(RegNext(sfence_dup(0).valid), p"[sfence] l1v:${Binary(l1v)}\n")
1211  XSDebug(RegNext(sfence_dup(0).valid), p"[sfence] l0v:${Binary(l0v)}\n")
1212  XSDebug(RegNext(sfence_dup(0).valid), p"[sfence] l0g:${Binary(l0g)}\n")
1213  XSDebug(RegNext(sfence_dup(0).valid), p"[sfence] spv:${Binary(spv)}\n")
1214
1215  val perfEvents = Seq(
1216    ("access           ", base_valid_access_0             ),
1217    ("l2_hit           ", l2Hit                           ),
1218    ("l1_hit           ", l1Hit                           ),
1219    ("l0_hit           ", l0Hit                           ),
1220    ("sp_hit           ", spHit                           ),
1221    ("pte_hit          ", l0Hit || spHit                  ),
1222    ("rwHarzad         ",  io.req.valid && !io.req.ready  ),
1223    ("out_blocked      ",  io.resp.valid && !io.resp.ready),
1224  )
1225  generatePerfEvent()
1226}
1227