xref: /XiangShan/src/main/scala/xiangshan/cache/mmu/PageTableCache.scala (revision dc597826530cb6803c2396d6ab0e5eb176b732e0)
1/***************************************************************************************
2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences
3* Copyright (c) 2020-2021 Peng Cheng Laboratory
4*
5* XiangShan is licensed under Mulan PSL v2.
6* You can use this software according to the terms and conditions of the Mulan PSL v2.
7* You may obtain a copy of Mulan PSL v2 at:
8*          http://license.coscl.org.cn/MulanPSL2
9*
10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
13*
14* See the Mulan PSL v2 for more details.
15***************************************************************************************/
16
17package xiangshan.cache.mmu
18
19import chipsalliance.rocketchip.config.Parameters
20import chisel3._
21import chisel3.util._
22import xiangshan._
23import xiangshan.cache.{HasDCacheParameters, MemoryOpConstants}
24import utils._
25import freechips.rocketchip.diplomacy.{LazyModule, LazyModuleImp}
26import freechips.rocketchip.tilelink._
27
28/* ptw cache caches the page table of all the three layers
29 * ptw cache resp at next cycle
30 * the cache should not be blocked
31 * when miss queue if full, just block req outside
32 */
33class PtwCacheIO()(implicit p: Parameters) extends PtwBundle {
34  val req = Flipped(DecoupledIO(new Bundle {
35    val vpn = UInt(vpnLen.W)
36    val source = UInt(bPtwWidth.W)
37    val isReplay = Bool()
38  }))
39  val resp = DecoupledIO(new Bundle {
40    val source = UInt(bPtwWidth.W)
41    val vpn = UInt(vpnLen.W)
42    val isReplay = Bool()
43    val hit = Bool()
44    val toFsm = new Bundle {
45      val l1Hit = Bool()
46      val l2Hit = Bool()
47      val ppn = UInt(ppnLen.W)
48    }
49    val toTlb = new PtwEntry(tagLen = vpnLen, hasPerm = true, hasLevel = true)
50  })
51  val refill = Flipped(ValidIO(new Bundle {
52    val ptes = UInt(blockBits.W)
53    val vpn = UInt(vpnLen.W)
54    val level = UInt(log2Up(Level).W)
55    val memAddr = Input(UInt(PAddrBits.W))
56  }))
57  val sfence = Input(new SfenceBundle)
58  val refuseRefill = Input(Bool())
59}
60
61class PtwCache()(implicit p: Parameters) extends XSModule with HasPtwConst {
62  val io = IO(new PtwCacheIO)
63
64  // TODO: four caches make the codes dirty, think about how to deal with it
65
66  val sfence = io.sfence
67  val refuseRefill = io.refuseRefill
68  val refill = io.refill.bits
69
70  val first_valid = io.req.valid
71  val first_fire = first_valid && io.req.ready
72  val first_req = io.req.bits
73  val second_ready = Wire(Bool())
74  val second_valid = ValidHold(first_fire, io.resp.fire(), sfence.valid)
75  val second_req = RegEnable(first_req, first_fire)
76  // NOTE: if ptw cache resp may be blocked, hard to handle refill
77  // when miss queue is full, please to block itlb and dtlb input
78
79  // when refill, refuce to accept new req
80  val rwHarzad = if (sramSinglePort) io.refill.valid else false.B
81  io.req.ready := !rwHarzad && (second_ready || io.req.bits.isReplay)
82  // NOTE: when write, don't ready, whe
83  //       when replay, just come in, out make sure resp.fire()
84
85  // l1: level 0 non-leaf pte
86  val l1 = Reg(Vec(l2tlbParams.l1Size, new PtwEntry(tagLen = PtwL1TagLen)))
87  val l1v = RegInit(0.U(l2tlbParams.l1Size.W))
88  val l1g = Reg(UInt(l2tlbParams.l1Size.W))
89
90  // l2: level 1 non-leaf pte
91  val l2 = Module(new SRAMTemplate(
92    new PtwEntries(num = PtwL2SectorSize, tagLen = PtwL2TagLen, level = 1, hasPerm = false),
93    set = l2tlbParams.l2nSets,
94    way = l2tlbParams.l2nWays,
95    singlePort = sramSinglePort
96  ))
97  val l2v = RegInit(0.U((l2tlbParams.l2nSets * l2tlbParams.l2nWays).W))
98  val l2g = Reg(UInt((l2tlbParams.l2nSets * l2tlbParams.l2nWays).W))
99  def getl2vSet(vpn: UInt) = {
100    require(log2Up(l2tlbParams.l2nWays) == log2Down(l2tlbParams.l2nWays))
101    val set = genPtwL2SetIdx(vpn)
102    require(set.getWidth == log2Up(l2tlbParams.l2nSets))
103    val l2vVec = l2v.asTypeOf(Vec(l2tlbParams.l2nSets, UInt(l2tlbParams.l2nWays.W)))
104    l2vVec(set)
105  }
106
107  // l3: level 2 leaf pte of 4KB pages
108  val l3 = Module(new SRAMTemplate(
109    new PtwEntries(num = PtwL3SectorSize, tagLen = PtwL3TagLen, level = 2, hasPerm = true),
110    set = l2tlbParams.l3nSets,
111    way = l2tlbParams.l3nWays,
112    singlePort = sramSinglePort
113  ))
114  val l3v = RegInit(0.U((l2tlbParams.l3nSets * l2tlbParams.l3nWays).W))
115  val l3g = Reg(UInt((l2tlbParams.l3nSets * l2tlbParams.l3nWays).W))
116  def getl3vSet(vpn: UInt) = {
117    require(log2Up(l2tlbParams.l3nWays) == log2Down(l2tlbParams.l3nWays))
118    val set = genPtwL3SetIdx(vpn)
119    require(set.getWidth == log2Up(l2tlbParams.l3nSets))
120    val l3vVec = l3v.asTypeOf(Vec(l2tlbParams.l3nSets, UInt(l2tlbParams.l3nWays.W)))
121    l3vVec(set)
122  }
123
124  // sp: level 0/1 leaf pte of 1GB/2MB super pages
125  val sp = Reg(Vec(l2tlbParams.spSize, new PtwEntry(tagLen = SPTagLen, hasPerm = true, hasLevel = true)))
126  val spv = RegInit(0.U(l2tlbParams.spSize.W))
127  val spg = Reg(UInt(l2tlbParams.spSize.W))
128
129  // Access Perf
130  val l1AccessPerf = Wire(Vec(l2tlbParams.l1Size, Bool()))
131  val l2AccessPerf = Wire(Vec(l2tlbParams.l2nWays, Bool()))
132  val l3AccessPerf = Wire(Vec(l2tlbParams.l3nWays, Bool()))
133  val spAccessPerf = Wire(Vec(l2tlbParams.spSize, Bool()))
134  l1AccessPerf.map(_ := false.B)
135  l2AccessPerf.map(_ := false.B)
136  l3AccessPerf.map(_ := false.B)
137  spAccessPerf.map(_ := false.B)
138
139  // l1
140  val ptwl1replace = ReplacementPolicy.fromString(l2tlbParams.l1Replacer, l2tlbParams.l1Size)
141  val (l1Hit, l1HitPPN) = {
142    val hitVecT = l1.zipWithIndex.map { case (e, i) => e.hit(first_req.vpn) && l1v(i) }
143    val hitVec = hitVecT.map(RegEnable(_, first_fire))
144    val hitPPN = ParallelPriorityMux(hitVec zip l1.map(_.ppn))
145    val hit = ParallelOR(hitVec) && second_valid
146
147    when (hit) { ptwl1replace.access(OHToUInt(hitVec)) }
148
149    l1AccessPerf.zip(hitVec).map{ case (l, h) => l := h && RegNext(first_fire)}
150    for (i <- 0 until l2tlbParams.l1Size) {
151      XSDebug(first_fire, p"[l1] l1(${i.U}) ${l1(i)} hit:${l1(i).hit(first_req.vpn)}\n")
152    }
153    XSDebug(first_fire, p"[l1] l1v:${Binary(l1v)} hitVecT:${Binary(VecInit(hitVecT).asUInt)}\n")
154    XSDebug(second_valid, p"[l1] l1Hit:${hit} l1HitPPN:0x${Hexadecimal(hitPPN)} hitVec:${VecInit(hitVec).asUInt}\n")
155
156    VecInit(hitVecT).suggestName(s"l1_hitVecT")
157    VecInit(hitVec).suggestName(s"l1_hitVec")
158
159    (hit, hitPPN)
160  }
161
162  // l2
163  val ptwl2replace = ReplacementPolicy.fromString(l2tlbParams.l2Replacer,l2tlbParams.l2nWays,l2tlbParams.l2nSets)
164  val (l2Hit, l2HitPPN) = {
165    val ridx = genPtwL2SetIdx(first_req.vpn)
166    val vidx = RegEnable(VecInit(getl2vSet(first_req.vpn).asBools), first_fire)
167    l2.io.r.req.valid := first_fire
168    l2.io.r.req.bits.apply(setIdx = ridx)
169    val ramDatas = l2.io.r.resp.data
170    // val hitVec = VecInit(ramDatas.map{wayData => wayData.hit(first_req.vpn) })
171    val hitVec = VecInit(ramDatas.zip(vidx).map { case (wayData, v) => wayData.hit(second_req.vpn) && v })
172    val hitWayData = ParallelPriorityMux(hitVec zip ramDatas)
173    val hit = ParallelOR(hitVec) && second_valid
174    val hitWay = ParallelPriorityMux(hitVec zip (0 until l2tlbParams.l2nWays).map(_.U))
175
176    ridx.suggestName(s"l2_ridx")
177    vidx.suggestName(s"l2_vidx")
178    ramDatas.suggestName(s"l2_ramDatas")
179    hitVec.suggestName(s"l2_hitVec")
180    hitWayData.suggestName(s"l2_hitWayData")
181    hitWay.suggestName(s"l2_hitWay")
182
183    when (hit) { ptwl2replace.access(genPtwL2SetIdx(second_req.vpn), hitWay) }
184
185    l2AccessPerf.zip(hitVec).map{ case (l, h) => l := h && RegNext(first_fire) }
186    XSDebug(first_fire, p"[l2] ridx:0x${Hexadecimal(ridx)}\n")
187    for (i <- 0 until l2tlbParams.l2nWays) {
188      XSDebug(RegNext(first_fire), p"[l2] ramDatas(${i.U}) ${ramDatas(i)}  l2v:${vidx(i)}  hit:${ramDatas(i).hit(second_req.vpn)}\n")
189    }
190    XSDebug(second_valid, p"[l2] l2Hit:${hit} l2HitPPN:0x${Hexadecimal(hitWayData.ppns(genPtwL2SectorIdx(second_req.vpn)))} hitVec:${Binary(hitVec.asUInt)} hitWay:${hitWay} vidx:${Binary(vidx.asUInt)}\n")
191
192    (hit, hitWayData.ppns(genPtwL2SectorIdx(second_req.vpn)))
193  }
194
195  // l3
196  val ptwl3replace = ReplacementPolicy.fromString(l2tlbParams.l3Replacer,l2tlbParams.l3nWays,l2tlbParams.l3nSets)
197  val (l3Hit, l3HitData) = {
198    val ridx = genPtwL3SetIdx(first_req.vpn)
199    val vidx = RegEnable(VecInit(getl3vSet(first_req.vpn).asBools), first_fire)
200    l3.io.r.req.valid := first_fire
201    l3.io.r.req.bits.apply(setIdx = ridx)
202    val ramDatas = l3.io.r.resp.data
203    val hitVec = VecInit(ramDatas.zip(vidx).map{ case (wayData, v) => wayData.hit(second_req.vpn) && v })
204    val hitWayData = ParallelPriorityMux(hitVec zip ramDatas)
205    val hit = ParallelOR(hitVec) && second_valid
206    val hitWay = ParallelPriorityMux(hitVec zip (0 until l2tlbParams.l3nWays).map(_.U))
207
208    when (hit) { ptwl3replace.access(genPtwL3SetIdx(second_req.vpn), hitWay) }
209
210    l3AccessPerf.zip(hitVec).map{ case (l, h) => l := h && RegNext(first_fire) }
211    XSDebug(first_fire, p"[l3] ridx:0x${Hexadecimal(ridx)}\n")
212    for (i <- 0 until l2tlbParams.l3nWays) {
213      XSDebug(RegNext(first_fire), p"[l3] ramDatas(${i.U}) ${ramDatas(i)}  l3v:${vidx(i)}  hit:${ramDatas(i).hit(second_req.vpn)}\n")
214    }
215    XSDebug(second_valid, p"[l3] l3Hit:${hit} l3HitData:${hitWayData} hitVec:${Binary(hitVec.asUInt)} hitWay:${hitWay} vidx:${Binary(vidx.asUInt)}\n")
216
217    ridx.suggestName(s"l3_ridx")
218    vidx.suggestName(s"l3_vidx")
219    ramDatas.suggestName(s"l3_ramDatas")
220    hitVec.suggestName(s"l3_hitVec")
221    hitWay.suggestName(s"l3_hitWay")
222
223    (hit, hitWayData)
224  }
225  val l3HitPPN = l3HitData.ppns(genPtwL3SectorIdx(second_req.vpn))
226  val l3HitPerm = l3HitData.perms.getOrElse(0.U.asTypeOf(Vec(PtwL3SectorSize, new PtePermBundle)))(genPtwL3SectorIdx(second_req.vpn))
227
228  // super page
229  val spreplace = ReplacementPolicy.fromString(l2tlbParams.spReplacer, l2tlbParams.spSize)
230  val (spHit, spHitData) = {
231    val hitVecT = sp.zipWithIndex.map { case (e, i) => e.hit(first_req.vpn) && spv(i) }
232    val hitVec = hitVecT.map(RegEnable(_, first_fire))
233    val hitData = ParallelPriorityMux(hitVec zip sp)
234    val hit = ParallelOR(hitVec) && second_valid
235
236    when (hit) { spreplace.access(OHToUInt(hitVec)) }
237
238    spAccessPerf.zip(hitVec).map{ case (s, h) => s := h && RegNext(first_fire) }
239    for (i <- 0 until l2tlbParams.spSize) {
240      XSDebug(first_fire, p"[sp] sp(${i.U}) ${sp(i)} hit:${sp(i).hit(first_req.vpn)} spv:${spv(i)}\n")
241    }
242    XSDebug(second_valid, p"[sp] spHit:${hit} spHitData:${hitData} hitVec:${Binary(VecInit(hitVec).asUInt)}\n")
243
244    VecInit(hitVecT).suggestName(s"sp_hitVecT")
245    VecInit(hitVec).suggestName(s"sp_hitVec")
246
247    (hit, hitData)
248  }
249  val spHitPerm = spHitData.perm.getOrElse(0.U.asTypeOf(new PtePermBundle))
250  val spHitLevel = spHitData.level.getOrElse(0.U)
251
252  val resp = Wire(io.resp.bits.cloneType)
253  val resp_latch = RegEnable(resp, io.resp.valid && !io.resp.ready)
254  val resp_latch_valid = ValidHold(io.resp.valid && !io.resp.ready, io.resp.ready, sfence.valid)
255  second_ready := !(second_valid || resp_latch_valid) || io.resp.fire()
256  resp.source   := second_req.source
257  resp.vpn      := second_req.vpn
258  resp.isReplay := second_req.isReplay
259  resp.hit      := l3Hit || spHit
260  resp.toFsm.l1Hit := l1Hit
261  resp.toFsm.l2Hit := l2Hit
262  resp.toFsm.ppn   := Mux(l2Hit, l2HitPPN, l1HitPPN)
263  resp.toTlb.tag   := second_req.vpn
264  resp.toTlb.ppn   := Mux(l3Hit, l3HitPPN, spHitData.ppn)
265  resp.toTlb.perm.map(_ := Mux(l3Hit, l3HitPerm, spHitPerm))
266  resp.toTlb.level.map(_ := Mux(l3Hit, 2.U, spHitLevel))
267
268  io.resp.valid := second_valid
269  io.resp.bits := Mux(resp_latch_valid, resp_latch, resp)
270  assert(!(l3Hit && spHit), "normal page and super page both hit")
271
272  // refill Perf
273  val l1RefillPerf = Wire(Vec(l2tlbParams.l1Size, Bool()))
274  val l2RefillPerf = Wire(Vec(l2tlbParams.l2nWays, Bool()))
275  val l3RefillPerf = Wire(Vec(l2tlbParams.l3nWays, Bool()))
276  val spRefillPerf = Wire(Vec(l2tlbParams.spSize, Bool()))
277  l1RefillPerf.map(_ := false.B)
278  l2RefillPerf.map(_ := false.B)
279  l3RefillPerf.map(_ := false.B)
280  spRefillPerf.map(_ := false.B)
281
282  // refill
283  l2.io.w.req <> DontCare
284  l3.io.w.req <> DontCare
285  l2.io.w.req.valid := false.B
286  l3.io.w.req.valid := false.B
287
288  def get_part(data: UInt, index: UInt): UInt = {
289    val inner_data = data.asTypeOf(Vec(data.getWidth / XLEN, UInt(XLEN.W)))
290    inner_data(index)
291  }
292
293  val memRdata = refill.ptes
294  val memSelData = get_part(memRdata, refill.memAddr(log2Up(l2tlbParams.blockBytes)-1, log2Up(XLEN/8)))
295  val memPtes = (0 until (l2tlbParams.blockBytes/(XLEN/8))).map(i => memRdata((i+1)*XLEN-1, i*XLEN).asTypeOf(new PteBundle))
296  val memPte = memSelData.asTypeOf(new PteBundle)
297
298  // TODO: handle sfenceLatch outsize
299  when (io.refill.valid && !memPte.isPf(refill.level) && !(sfence.valid || refuseRefill)) {
300    when (refill.level === 0.U && !memPte.isLeaf()) {
301      // val refillIdx = LFSR64()(log2Up(l2tlbParams.l1Size)-1,0) // TODO: may be LRU
302      val refillIdx = replaceWrapper(l1v, ptwl1replace.way)
303      refillIdx.suggestName(s"PtwL1RefillIdx")
304      val rfOH = UIntToOH(refillIdx)
305      l1(refillIdx).refill(refill.vpn, memSelData)
306      ptwl1replace.access(refillIdx)
307      l1v := l1v | rfOH
308      l1g := (l1g & ~rfOH) | Mux(memPte.perm.g, rfOH, 0.U)
309
310      for (i <- 0 until l2tlbParams.l1Size) {
311        l1RefillPerf(i) := i.U === refillIdx
312      }
313
314      XSDebug(p"[l1 refill] refillIdx:${refillIdx} refillEntry:${l1(refillIdx).genPtwEntry(refill.vpn, memSelData)}\n")
315      XSDebug(p"[l1 refill] l1v:${Binary(l1v)}->${Binary(l1v | rfOH)} l1g:${Binary(l1g)}->${Binary((l1g & ~rfOH) | Mux(memPte.perm.g, rfOH, 0.U))}\n")
316
317      refillIdx.suggestName(s"l1_refillIdx")
318      rfOH.suggestName(s"l1_rfOH")
319    }
320
321    when (refill.level === 1.U && !memPte.isLeaf()) {
322      val refillIdx = genPtwL2SetIdx(refill.vpn)
323      val victimWay = replaceWrapper(RegEnable(VecInit(getl2vSet(refill.vpn).asBools).asUInt, first_fire), ptwl2replace.way(refillIdx))
324      val victimWayOH = UIntToOH(victimWay)
325      val rfvOH = UIntToOH(Cat(refillIdx, victimWay))
326      l2.io.w.apply(
327        valid = true.B,
328        setIdx = refillIdx,
329        data = (new PtwEntries(num = PtwL2SectorSize, tagLen = PtwL2TagLen, level = 1, hasPerm = false)).genEntries(
330          vpn = refill.vpn, data = memRdata, levelUInt = 1.U
331        ),
332        waymask = victimWayOH
333      )
334      ptwl2replace.access(refillIdx, victimWay)
335      l2v := l2v | rfvOH
336      l2g := l2g & ~rfvOH | Mux(Cat(memPtes.map(_.perm.g)).andR, rfvOH, 0.U)
337
338      for (i <- 0 until l2tlbParams.l2nWays) {
339        l2RefillPerf(i) := i.U === victimWay
340      }
341
342      XSDebug(p"[l2 refill] refillIdx:0x${Hexadecimal(refillIdx)} victimWay:${victimWay} victimWayOH:${Binary(victimWayOH)} rfvOH(in UInt):${Cat(refillIdx, victimWay)}\n")
343      XSDebug(p"[l2 refill] refilldata:0x${
344        (new PtwEntries(num = PtwL2SectorSize, tagLen = PtwL2TagLen, level = 1, hasPerm = false)).genEntries(
345          vpn = refill.vpn, data = memRdata, levelUInt = 1.U)
346      }\n")
347      XSDebug(p"[l2 refill] l2v:${Binary(l2v)} -> ${Binary(l2v | rfvOH)}\n")
348      XSDebug(p"[l2 refill] l2g:${Binary(l2g)} -> ${Binary(l2g & ~rfvOH | Mux(Cat(memPtes.map(_.perm.g)).andR, rfvOH, 0.U))}\n")
349
350      refillIdx.suggestName(s"l2_refillIdx")
351      victimWay.suggestName(s"l2_victimWay")
352      victimWayOH.suggestName(s"l2_victimWayOH")
353      rfvOH.suggestName(s"l2_rfvOH")
354    }
355
356    when (refill.level === 2.U && memPte.isLeaf()) {
357      val refillIdx = genPtwL3SetIdx(refill.vpn)
358      val victimWay = replaceWrapper(RegEnable(VecInit(getl3vSet(refill.vpn).asBools).asUInt, first_fire), ptwl3replace.way(refillIdx))
359      val victimWayOH = UIntToOH(victimWay)
360      val rfvOH = UIntToOH(Cat(refillIdx, victimWay))
361      l3.io.w.apply(
362        valid = true.B,
363        setIdx = refillIdx,
364        data = (new PtwEntries(num = PtwL3SectorSize, tagLen = PtwL3TagLen, level = 2, hasPerm = true)).genEntries(
365          vpn = refill.vpn, data = memRdata, levelUInt = 2.U
366        ),
367        waymask = victimWayOH
368      )
369      ptwl3replace.access(refillIdx, victimWay)
370      l3v := l3v | rfvOH
371      l3g := l3g & ~rfvOH | Mux(Cat(memPtes.map(_.perm.g)).andR, rfvOH, 0.U)
372
373        for (i <- 0 until l2tlbParams.l3nWays) {
374          l3RefillPerf(i) := i.U === victimWay
375        }
376
377      XSDebug(p"[l3 refill] refillIdx:0x${Hexadecimal(refillIdx)} victimWay:${victimWay} victimWayOH:${Binary(victimWayOH)} rfvOH(in UInt):${Cat(refillIdx, victimWay)}\n")
378      XSDebug(p"[l3 refill] refilldata:0x${
379        (new PtwEntries(num = PtwL3SectorSize, tagLen = PtwL3TagLen, level = 2, hasPerm = true)).genEntries(
380          vpn = refill.vpn, data = memRdata, levelUInt = 2.U)
381      }\n")
382      XSDebug(p"[l3 refill] l3v:${Binary(l3v)} -> ${Binary(l3v | rfvOH)}\n")
383      XSDebug(p"[l3 refill] l3g:${Binary(l3g)} -> ${Binary(l3g & ~rfvOH | Mux(Cat(memPtes.map(_.perm.g)).andR, rfvOH, 0.U))}\n")
384
385      refillIdx.suggestName(s"l3_refillIdx")
386      victimWay.suggestName(s"l3_victimWay")
387      victimWayOH.suggestName(s"l3_victimWayOH")
388      rfvOH.suggestName(s"l3_rfvOH")
389    }
390
391    when ((refill.level === 0.U || refill.level === 1.U) && memPte.isLeaf()) {
392      val refillIdx = spreplace.way// LFSR64()(log2Up(l2tlbParams.spSize)-1,0) // TODO: may be LRU
393      val rfOH = UIntToOH(refillIdx)
394      sp(refillIdx).refill(refill.vpn, memSelData, refill.level)
395      spreplace.access(refillIdx)
396      spv := spv | rfOH
397      spg := spg & ~rfOH | Mux(memPte.perm.g, rfOH, 0.U)
398
399      for (i <- 0 until l2tlbParams.spSize) {
400        spRefillPerf(i) := i.U === refillIdx
401      }
402
403      XSDebug(p"[sp refill] refillIdx:${refillIdx} refillEntry:${sp(refillIdx).genPtwEntry(refill.vpn, memSelData, refill.level)}\n")
404      XSDebug(p"[sp refill] spv:${Binary(spv)}->${Binary(spv | rfOH)} spg:${Binary(spg)}->${Binary(spg & ~rfOH | Mux(memPte.perm.g, rfOH, 0.U))}\n")
405
406      refillIdx.suggestName(s"sp_refillIdx")
407      rfOH.suggestName(s"sp_rfOH")
408    }
409  }
410
411  // sfence
412  when (sfence.valid) {
413    when (sfence.bits.rs1/*va*/) {
414      when (sfence.bits.rs2) {
415        // all va && all asid
416        l1v := 0.U
417        l2v := 0.U
418        l3v := 0.U
419        spv := 0.U
420      } .otherwise {
421        // all va && specific asid except global
422        l1v := l1v & l1g
423        l2v := l2v & l2g
424        l3v := l3v & l3g
425        spv := spv & spg
426      }
427    } .otherwise {
428      // val flushMask = UIntToOH(genTlbL2Idx(sfence.bits.addr(sfence.bits.addr.getWidth-1, offLen)))
429      val flushSetIdxOH = UIntToOH(genPtwL3SetIdx(sfence.bits.addr(sfence.bits.addr.getWidth-1, offLen)))
430      // val flushMask = VecInit(flushSetIdxOH.asBools.map(Fill(l2tlbParams.l3nWays, _.asUInt))).asUInt
431      val flushMask = VecInit(flushSetIdxOH.asBools.map { a => Fill(l2tlbParams.l3nWays, a.asUInt) }).asUInt
432      flushSetIdxOH.suggestName(s"sfence_nrs1_flushSetIdxOH")
433      flushMask.suggestName(s"sfence_nrs1_flushMask")
434      when (sfence.bits.rs2) {
435        // specific leaf of addr && all asid
436        l3v := l3v & ~flushMask
437        l3g := l3g & ~flushMask
438      } .otherwise {
439        // specific leaf of addr && specific asid
440        l3v := l3v & (~flushMask | l3g)
441      }
442      spv := 0.U
443    }
444  }
445
446  // Perf Count
447  XSPerfAccumulate("access", second_valid)
448  XSPerfAccumulate("l1_hit", l1Hit)
449  XSPerfAccumulate("l2_hit", l2Hit)
450  XSPerfAccumulate("l3_hit", l3Hit)
451  XSPerfAccumulate("sp_hit", spHit)
452  XSPerfAccumulate("pte_hit", l3Hit || spHit)
453  XSPerfAccumulate("rwHarzad", io.req.valid && !io.req.ready)
454  XSPerfAccumulate("out_blocked", io.resp.valid && !io.resp.ready)
455  l1AccessPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"L1AccessIndex${i}", l) }
456  l2AccessPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"L2AccessIndex${i}", l) }
457  l3AccessPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"L3AccessIndex${i}", l) }
458  spAccessPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"SPAccessIndex${i}", l) }
459  l1RefillPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"L1RefillIndex${i}", l) }
460  l2RefillPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"L2RefillIndex${i}", l) }
461  l3RefillPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"L3RefillIndex${i}", l) }
462  spRefillPerf.zipWithIndex.map{ case (l, i) => XSPerfAccumulate(s"SPRefillIndex${i}", l) }
463
464  // debug
465  XSDebug(sfence.valid, p"[sfence] original v and g vector:\n")
466  XSDebug(sfence.valid, p"[sfence] l1v:${Binary(l1v)}\n")
467  XSDebug(sfence.valid, p"[sfence] l2v:${Binary(l2v)}\n")
468  XSDebug(sfence.valid, p"[sfence] l3v:${Binary(l3v)}\n")
469  XSDebug(sfence.valid, p"[sfence] l3g:${Binary(l3g)}\n")
470  XSDebug(sfence.valid, p"[sfence] spv:${Binary(spv)}\n")
471  XSDebug(RegNext(sfence.valid), p"[sfence] new v and g vector:\n")
472  XSDebug(RegNext(sfence.valid), p"[sfence] l1v:${Binary(l1v)}\n")
473  XSDebug(RegNext(sfence.valid), p"[sfence] l2v:${Binary(l2v)}\n")
474  XSDebug(RegNext(sfence.valid), p"[sfence] l3v:${Binary(l3v)}\n")
475  XSDebug(RegNext(sfence.valid), p"[sfence] l3g:${Binary(l3g)}\n")
476  XSDebug(RegNext(sfence.valid), p"[sfence] spv:${Binary(spv)}\n")
477}