xref: /XiangShan/src/main/scala/xiangshan/cache/mmu/TLB.scala (revision a273862e37f1d43bee748f2a6353320a2f52f6f4)
1/***************************************************************************************
2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences
3* Copyright (c) 2020-2021 Peng Cheng Laboratory
4*
5* XiangShan is licensed under Mulan PSL v2.
6* You can use this software according to the terms and conditions of the Mulan PSL v2.
7* You may obtain a copy of Mulan PSL v2 at:
8*          http://license.coscl.org.cn/MulanPSL2
9*
10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
13*
14* See the Mulan PSL v2 for more details.
15***************************************************************************************/
16
17package xiangshan.cache.mmu
18
19import chipsalliance.rocketchip.config.Parameters
20import chisel3._
21import chisel3.internal.naming.chiselName
22import chisel3.util._
23import freechips.rocketchip.util.SRAMAnnotation
24import xiangshan._
25import utils._
26import xiangshan.backend.fu.{PMPChecker, PMPReqBundle}
27import xiangshan.backend.rob.RobPtr
28import xiangshan.backend.fu.util.HasCSRConst
29
30
31@chiselName
32class TLB(Width: Int, q: TLBParameters)(implicit p: Parameters) extends TlbModule with HasCSRConst {
33  val io = IO(new TlbIO(Width, q))
34
35  require(q.superAssociative == "fa")
36  if (q.sameCycle) {
37    require(q.normalAssociative == "fa")
38  }
39
40  val req = io.requestor.map(_.req)
41  val resp = io.requestor.map(_.resp)
42  val ptw = io.ptw
43  val pmp = io.pmp
44
45  val sfence = io.sfence
46  val csr = io.csr
47  val satp = csr.satp
48  val priv = csr.priv
49  val ifecth = if (q.fetchi) true.B else false.B
50  val mode = if (q.useDmode) priv.dmode else priv.imode
51  // val vmEnable = satp.mode === 8.U // && (mode < ModeM) // FIXME: fix me when boot xv6/linux...
52  val vmEnable = if (EnbaleTlbDebug) (satp.mode === 8.U)
53  else (satp.mode === 8.U && (mode < ModeM))
54
55  val reqAddr = req.map(_.bits.vaddr.asTypeOf((new VaBundle).cloneType))
56  val vpn = reqAddr.map(_.vpn)
57  val cmd = req.map(_.bits.cmd)
58  val valid = req.map(_.valid)
59
60  def widthMapSeq[T <: Seq[Data]](f: Int => T) = (0 until Width).map(f)
61
62  def widthMap[T <: Data](f: Int => T) = (0 until Width).map(f)
63
64  // Normal page && Super page
65  val normalPage = TlbStorage(
66    name = "normal",
67    associative = q.normalAssociative,
68    sameCycle = q.sameCycle,
69    ports = Width,
70    nSets = q.normalNSets,
71    nWays = q.normalNWays,
72    sramSinglePort = sramSinglePort,
73    normalPage = true,
74    superPage = false
75  )
76  val superPage = TlbStorage(
77    name = "super",
78    associative = q.superAssociative,
79    sameCycle = q.sameCycle,
80    ports = Width,
81    nSets = q.superNSets,
82    nWays = q.superNWays,
83    sramSinglePort = sramSinglePort,
84    normalPage = q.normalAsVictim,
85    superPage = true,
86  )
87
88
89  for (i <- 0 until Width) {
90    normalPage.r_req_apply(
91      valid = io.requestor(i).req.valid,
92      vpn = vpn(i),
93      asid = csr.satp.asid,
94      i = i
95    )
96    superPage.r_req_apply(
97      valid = io.requestor(i).req.valid,
98      vpn = vpn(i),
99      asid = csr.satp.asid,
100      i = i
101    )
102  }
103
104  normalPage.victim.in <> superPage.victim.out
105  normalPage.victim.out <> superPage.victim.in
106  normalPage.sfence <> io.sfence
107  superPage.sfence <> io.sfence
108  normalPage.csr <> io.csr
109  superPage.csr <> io.csr
110
111  def TLBNormalRead(i: Int) = {
112    val (normal_hit, normal_ppn, normal_perm) = normalPage.r_resp_apply(i)
113    val (super_hit, super_ppn, super_perm) = superPage.r_resp_apply(i)
114    assert(!(normal_hit && super_hit && vmEnable && RegNext(req(i).valid, init = false.B)))
115
116    val hit = normal_hit || super_hit
117    val ppn = Mux(normal_hit, normal_ppn, super_ppn)
118    val perm = Mux(normal_hit, normal_perm, super_perm)
119
120    val pf = perm.pf && hit
121    val af = perm.af && hit
122    val cmdReg = if (!q.sameCycle) RegNext(cmd(i)) else cmd(i)
123    val validReg = if (!q.sameCycle) RegNext(valid(i)) else valid(i)
124    val offReg = if (!q.sameCycle) RegNext(reqAddr(i).off) else reqAddr(i).off
125    val sizeReg = if (!q.sameCycle) RegNext(req(i).bits.size) else req(i).bits.size
126
127    /** *************** next cycle when two cycle is false******************* */
128    val miss = !hit && vmEnable
129    hit.suggestName(s"hit_${i}")
130    miss.suggestName(s"miss_${i}")
131
132    XSDebug(validReg, p"(${i.U}) hit:${hit} miss:${miss} ppn:${Hexadecimal(ppn)} perm:${perm}\n")
133
134    val paddr = Cat(ppn, offReg)
135    val vaddr = SignExt(req(i).bits.vaddr, PAddrBits)
136
137    req(i).ready := resp(i).ready
138    resp(i).valid := validReg
139    resp(i).bits.paddr := Mux(vmEnable, paddr, if (!q.sameCycle) RegNext(vaddr) else vaddr)
140    resp(i).bits.miss := miss
141    resp(i).bits.ptwBack := io.ptw.resp.fire()
142
143    pmp(i).valid := resp(i).valid
144    pmp(i).bits.addr := resp(i).bits.paddr
145    pmp(i).bits.size := sizeReg
146    pmp(i).bits.cmd := cmdReg
147
148    val ldUpdate = hit && !perm.a && TlbCmd.isRead(cmdReg) && !TlbCmd.isAmo(cmdReg) // update A/D through exception
149    val stUpdate = hit && (!perm.a || !perm.d) && (TlbCmd.isWrite(cmdReg) || TlbCmd.isAmo(cmdReg)) // update A/D through exception
150    val instrUpdate = hit && !perm.a && TlbCmd.isExec(cmdReg) // update A/D through exception
151    val modeCheck = !(mode === ModeU && !perm.u || mode === ModeS && perm.u && (!priv.sum || ifecth))
152    val ldPermFail = !(modeCheck && (perm.r || priv.mxr && perm.x))
153    val stPermFail = !(modeCheck && perm.w)
154    val instrPermFail = !(modeCheck && perm.x)
155    val ldPf = (ldPermFail || pf) && (TlbCmd.isRead(cmdReg) && !TlbCmd.isAmo(cmdReg))
156    val stPf = (stPermFail || pf) && (TlbCmd.isWrite(cmdReg) || TlbCmd.isAmo(cmdReg))
157    val fault_valid = vmEnable && hit
158    val instrPf = (instrPermFail || pf) && TlbCmd.isExec(cmdReg)
159    resp(i).bits.excp.pf.ld := (ldPf || ldUpdate) && fault_valid && !af
160    resp(i).bits.excp.pf.st := (stPf || stUpdate) && fault_valid && !af
161    resp(i).bits.excp.pf.instr := (instrPf || instrUpdate) && fault_valid && !af
162    // NOTE: pf need && with !af, page fault has higher priority than access fault
163    // but ptw may also have access fault, then af happens, the translation is wrong.
164    // In this case, pf has lower priority than af
165
166    resp(i).bits.excp.af.ld := af && TlbCmd.isRead(cmdReg) && fault_valid
167    resp(i).bits.excp.af.st := af && TlbCmd.isWrite(cmdReg) && fault_valid
168    resp(i).bits.excp.af.instr := af && TlbCmd.isExec(cmdReg) && fault_valid
169
170    (hit, miss, validReg)
171  }
172
173  val readResult = (0 until Width).map(TLBNormalRead(_))
174  val hitVec = readResult.map(_._1)
175  val missVec = readResult.map(_._2)
176  val validRegVec = readResult.map(_._3)
177
178  // replacement
179  def get_access(one_hot: UInt, valid: Bool): Valid[UInt] = {
180    val res = Wire(Valid(UInt(log2Up(one_hot.getWidth).W)))
181    res.valid := Cat(one_hot).orR && valid
182    res.bits := OHToUInt(one_hot)
183    res
184  }
185
186  val normal_refill_idx = if (q.outReplace) {
187    io.replace.normalPage.access <> normalPage.access
188    io.replace.normalPage.chosen_set := get_set_idx(io.ptw.resp.bits.entry.tag, q.normalNSets)
189    io.replace.normalPage.refillIdx
190  } else if (q.normalAssociative == "fa") {
191    val re = ReplacementPolicy.fromString(q.normalReplacer, q.normalNWays)
192    re.access(normalPage.access.map(_.touch_ways)) // normalhitVecVec.zipWithIndex.map{ case (hv, i) => get_access(hv, validRegVec(i))})
193    re.way
194  } else { // set-acco && plru
195    val re = ReplacementPolicy.fromString(q.normalReplacer, q.normalNSets, q.normalNWays)
196    re.access(normalPage.access.map(_.sets), normalPage.access.map(_.touch_ways))
197    re.way(get_set_idx(io.ptw.resp.bits.entry.tag, q.normalNSets))
198  }
199
200  val super_refill_idx = if (q.outReplace) {
201    io.replace.superPage.access <> superPage.access
202    io.replace.superPage.chosen_set := DontCare
203    io.replace.superPage.refillIdx
204  } else {
205    val re = ReplacementPolicy.fromString(q.superReplacer, q.superNWays)
206    re.access(superPage.access.map(_.touch_ways))
207    re.way
208  }
209
210  val refill = ptw.resp.fire() && !sfence.valid && !satp.changed
211  normalPage.w_apply(
212    valid = { if (q.normalAsVictim) false.B
213    else refill && ptw.resp.bits.entry.level.get === 2.U },
214    wayIdx = normal_refill_idx,
215    data = ptw.resp.bits
216  )
217  superPage.w_apply(
218    valid = { if (q.normalAsVictim) refill
219    else refill && ptw.resp.bits.entry.level.get =/= 2.U },
220    wayIdx = super_refill_idx,
221    data = ptw.resp.bits
222  )
223
224  for (i <- 0 until Width) {
225    io.ptw.req(i).valid := validRegVec(i) && missVec(i) && !RegNext(refill)
226    io.ptw.req(i).bits.vpn := RegNext(reqAddr(i).vpn)
227  }
228  io.ptw.resp.ready := true.B
229
230  if (!q.shouldBlock) {
231    for (i <- 0 until Width) {
232      XSPerfAccumulate("first_access" + Integer.toString(i, 10), validRegVec(i) && vmEnable && RegNext(req(i).bits.debug.isFirstIssue))
233      XSPerfAccumulate("access" + Integer.toString(i, 10), validRegVec(i) && vmEnable)
234    }
235    for (i <- 0 until Width) {
236      XSPerfAccumulate("first_miss" + Integer.toString(i, 10), validRegVec(i) && vmEnable && missVec(i) && RegNext(req(i).bits.debug.isFirstIssue))
237      XSPerfAccumulate("miss" + Integer.toString(i, 10), validRegVec(i) && vmEnable && missVec(i))
238    }
239  } else {
240    // NOTE: ITLB is blocked, so every resp will be valid only when hit
241    // every req will be ready only when hit
242    for (i <- 0 until Width) {
243      XSPerfAccumulate(s"access${i}", io.requestor(i).req.fire() && vmEnable)
244      XSPerfAccumulate(s"miss${i}", ptw.req(i).fire())
245    }
246
247  }
248  //val reqCycleCnt = Reg(UInt(16.W))
249  //reqCycleCnt := reqCycleCnt + BoolStopWatch(ptw.req(0).fire(), ptw.resp.fire || sfence.valid)
250  //XSPerfAccumulate("ptw_req_count", ptw.req.fire())
251  //XSPerfAccumulate("ptw_req_cycle", Mux(ptw.resp.fire(), reqCycleCnt, 0.U))
252  XSPerfAccumulate("ptw_resp_count", ptw.resp.fire())
253  XSPerfAccumulate("ptw_resp_pf_count", ptw.resp.fire() && ptw.resp.bits.pf)
254
255  // Log
256  for(i <- 0 until Width) {
257    XSDebug(req(i).valid, p"req(${i.U}): (${req(i).valid} ${req(i).ready}) ${req(i).bits}\n")
258    XSDebug(resp(i).valid, p"resp(${i.U}): (${resp(i).valid} ${resp(i).ready}) ${resp(i).bits}\n")
259  }
260
261  XSDebug(sfence.valid, p"Sfence: ${sfence}\n")
262  XSDebug(ParallelOR(valid)|| ptw.resp.valid, p"CSR: ${csr}\n")
263  XSDebug(ParallelOR(valid) || ptw.resp.valid, p"vmEnable:${vmEnable} hit:${Binary(VecInit(hitVec).asUInt)} miss:${Binary(VecInit(missVec).asUInt)}\n")
264  for (i <- ptw.req.indices) {
265    XSDebug(ptw.req(i).fire(), p"PTW req:${ptw.req(i).bits}\n")
266  }
267  XSDebug(ptw.resp.valid, p"PTW resp:${ptw.resp.bits} (v:${ptw.resp.valid}r:${ptw.resp.ready}) \n")
268
269  println(s"${q.name}: normal page: ${q.normalNWays} ${q.normalAssociative} ${q.normalReplacer.get} super page: ${q.superNWays} ${q.superAssociative} ${q.superReplacer.get}")
270
271//   // NOTE: just for simple tlb debug, comment it after tlb's debug
272  // assert(!io.ptw.resp.valid || io.ptw.resp.bits.entry.tag === io.ptw.resp.bits.entry.ppn, "Simple tlb debug requires vpn === ppn")
273  val perfinfo = IO(new Bundle(){
274    val perfEvents = Output(new PerfEventsBundle(2))
275  })
276    if(!q.shouldBlock) {
277      val perfEvents = Seq(
278        ("access         ", PopCount((0 until Width).map(i => vmEnable && validRegVec(i)))                                         ),
279        ("miss           ", PopCount((0 until Width).map(i => vmEnable && validRegVec(i) && missVec(i)))                           ),
280        )
281      for (((perf_out,(perf_name,perf)),i) <- perfinfo.perfEvents.perf_events.zip(perfEvents).zipWithIndex) {
282        perf_out.incr_step := RegNext(perf)
283      }
284    } else {
285      val perfEvents = Seq(
286        ("access         ", PopCount((0 until Width).map(i => io.requestor(i).req.fire()))                           ),
287        ("miss           ", PopCount((0 until Width).map(i => ptw.req(i).fire()))                                    ),
288      )
289      for (((perf_out,(perf_name,perf)),i) <- perfinfo.perfEvents.perf_events.zip(perfEvents).zipWithIndex) {
290        perf_out.incr_step := RegNext(perf)
291      }
292    }
293}
294
295class TlbReplace(Width: Int, q: TLBParameters)(implicit p: Parameters) extends TlbModule {
296  val io = IO(new TlbReplaceIO(Width, q))
297
298  if (q.normalAssociative == "fa") {
299    val re = ReplacementPolicy.fromString(q.normalReplacer, q.normalNWays)
300    re.access(io.normalPage.access.map(_.touch_ways))
301    io.normalPage.refillIdx := re.way
302  } else { // set-acco && plru
303    val re = ReplacementPolicy.fromString(q.normalReplacer, q.normalNSets, q.normalNWays)
304    re.access(io.normalPage.access.map(_.sets), io.normalPage.access.map(_.touch_ways))
305    io.normalPage.refillIdx := { if (q.normalNWays == 1) 0.U else re.way(io.normalPage.chosen_set) }
306  }
307
308  if (q.superAssociative == "fa") {
309    val re = ReplacementPolicy.fromString(q.superReplacer, q.superNWays)
310    re.access(io.superPage.access.map(_.touch_ways))
311    io.superPage.refillIdx := re.way
312  } else { // set-acco && plru
313    val re = ReplacementPolicy.fromString(q.superReplacer, q.superNSets, q.superNWays)
314    re.access(io.superPage.access.map(_.sets), io.superPage.access.map(_.touch_ways))
315    io.superPage.refillIdx := { if (q.superNWays == 1) 0.U else re.way(io.superPage.chosen_set) }
316  }
317}
318
319object TLB {
320  def apply
321  (
322    in: Seq[BlockTlbRequestIO],
323    sfence: SfenceBundle,
324    csr: TlbCsrBundle,
325    width: Int,
326    shouldBlock: Boolean,
327    q: TLBParameters
328  )(implicit p: Parameters) = {
329    require(in.length == width)
330
331    val tlb = Module(new TLB(width, q))
332
333    tlb.io.sfence <> sfence
334    tlb.io.csr <> csr
335    tlb.suggestName(s"tlb_${q.name}")
336
337    if (!shouldBlock) { // dtlb
338      for (i <- 0 until width) {
339        tlb.io.requestor(i) <> in(i)
340        // tlb.io.requestor(i).req.valid := in(i).req.valid
341        // tlb.io.requestor(i).req.bits := in(i).req.bits
342        // in(i).req.ready := tlb.io.requestor(i).req.ready
343
344        // in(i).resp.valid := tlb.io.requestor(i).resp.valid
345        // in(i).resp.bits := tlb.io.requestor(i).resp.bits
346        // tlb.io.requestor(i).resp.ready := in(i).resp.ready
347      }
348    } else { // itlb
349      //require(width == 1)
350      (0 until width).map{ i =>
351        tlb.io.requestor(i).req.valid := in(i).req.valid
352        tlb.io.requestor(i).req.bits := in(i).req.bits
353        in(i).req.ready := !tlb.io.requestor(i).resp.bits.miss && in(i).resp.ready && tlb.io.requestor(i).req.ready
354
355        in(i).resp.valid := tlb.io.requestor(i).resp.valid && !tlb.io.requestor(i).resp.bits.miss
356        in(i).resp.bits := tlb.io.requestor(i).resp.bits
357        tlb.io.requestor(i).resp.ready := in(i).resp.ready
358      }
359    }
360    tlb.io.ptw
361  }
362}
363