xref: /XiangShan/src/main/scala/xiangshan/cache/mmu/TLB.scala (revision a58e33519795596dc4f85fe66907cbc7dde2d66a)
1/***************************************************************************************
2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences
3* Copyright (c) 2020-2021 Peng Cheng Laboratory
4*
5* XiangShan is licensed under Mulan PSL v2.
6* You can use this software according to the terms and conditions of the Mulan PSL v2.
7* You may obtain a copy of Mulan PSL v2 at:
8*          http://license.coscl.org.cn/MulanPSL2
9*
10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
13*
14* See the Mulan PSL v2 for more details.
15***************************************************************************************/
16
17package xiangshan.cache.mmu
18
19import chipsalliance.rocketchip.config.Parameters
20import chisel3._
21import chisel3.internal.naming.chiselName
22import chisel3.util._
23import freechips.rocketchip.util.SRAMAnnotation
24import xiangshan._
25import utils._
26import xiangshan.backend.roq.RoqPtr
27import xiangshan.backend.fu.util.HasCSRConst
28
29
30@chiselName
31class TLB(Width: Int, q: TLBParameters)(implicit p: Parameters) extends TlbModule with HasCSRConst {
32  val io = IO(new TlbIO(Width, q))
33
34  require(q.superAssociative == "fa")
35  if (q.sameCycle) {
36    require(q.normalAssociative == "fa")
37  }
38
39  val req = io.requestor.map(_.req)
40  val resp = io.requestor.map(_.resp)
41  val ptw = io.ptw
42
43  val sfence = io.sfence
44  val csr = io.csr
45  val satp = csr.satp
46  val priv = csr.priv
47  val ifecth = if (q.fetchi) true.B else false.B
48  val mode = if (q.useDmode) priv.dmode else priv.imode
49  // val vmEnable = satp.mode === 8.U // && (mode < ModeM) // FIXME: fix me when boot xv6/linux...
50  val vmEnable = if (EnbaleTlbDebug) (satp.mode === 8.U)
51  else (satp.mode === 8.U && (mode < ModeM))
52
53  val reqAddr = req.map(_.bits.vaddr.asTypeOf((new VaBundle).cloneType))
54  val vpn = reqAddr.map(_.vpn)
55  val cmd = req.map(_.bits.cmd)
56  val valid = req.map(_.valid)
57
58  def widthMapSeq[T <: Seq[Data]](f: Int => T) = (0 until Width).map(f)
59
60  def widthMap[T <: Data](f: Int => T) = (0 until Width).map(f)
61
62  // Normal page && Super page
63  val normalPage = TlbStorage(
64    name = "normal",
65    associative = q.normalAssociative,
66    sameCycle = q.sameCycle,
67    ports = Width,
68    nSets = q.normalNSets,
69    nWays = q.normalNWays,
70    sramSinglePort = sramSinglePort,
71    normalPage = true,
72    superPage = false
73  )
74  val superPage = TlbStorage(
75    name = "super",
76    associative = q.superAssociative,
77    sameCycle = q.sameCycle,
78    ports = Width,
79    nSets = q.superNSets,
80    nWays = q.superNWays,
81    sramSinglePort = sramSinglePort,
82    normalPage = q.normalAsVictim,
83    superPage = true,
84  )
85
86
87  for (i <- 0 until Width) {
88    normalPage.r_req_apply(
89      valid = io.requestor(i).req.valid,
90      vpn = vpn(i),
91      i = i
92    )
93    superPage.r_req_apply(
94      valid = io.requestor(i).req.valid,
95      vpn = vpn(i),
96      i = i
97    )
98  }
99
100
101  normalPage.victim.in <> superPage.victim.out
102  normalPage.victim.out <> superPage.victim.in
103  normalPage.sfence <> io.sfence
104  superPage.sfence <> io.sfence
105
106  def TLBNormalRead(i: Int) = {
107    val (normal_hit, normal_ppn, normal_perm, normal_hitVec) = normalPage.r_resp_apply(i)
108    val (super_hit, super_ppn, super_perm, super_hitVec) = superPage.r_resp_apply(i)
109    assert(!(normal_hit && super_hit && vmEnable && RegNext(req(i).valid, init = false.B)))
110
111    val hit = normal_hit || super_hit
112    val ppn = Mux(normal_hit, normal_ppn, super_ppn)
113    val perm = Mux(normal_hit, normal_perm, super_perm)
114
115    val pf = perm.pf && hit
116    val cmdReg = if (!q.sameCycle) RegNext(cmd(i)) else cmd(i)
117    val validReg = if (!q.sameCycle) RegNext(valid(i)) else valid(i)
118    val offReg = if (!q.sameCycle) RegNext(reqAddr(i).off) else reqAddr(i).off
119
120    /** *************** next cycle when two cycle is false******************* */
121    val miss = !hit && vmEnable
122    hit.suggestName(s"hit_${i}")
123    miss.suggestName(s"miss_${i}")
124
125    XSDebug(validReg, p"(${i.U}) hit:${hit} miss:${miss} ppn:${Hexadecimal(ppn)} perm:${perm}\n")
126
127    val paddr = Cat(ppn, offReg)
128    val vaddr = SignExt(req(i).bits.vaddr, PAddrBits)
129
130    req(i).ready := resp(i).ready
131    resp(i).valid := validReg
132    resp(i).bits.paddr := Mux(vmEnable, paddr, if (!q.sameCycle) RegNext(vaddr) else vaddr)
133    resp(i).bits.miss := miss
134    resp(i).bits.ptwBack := io.ptw.resp.fire()
135
136    val update = hit && (!perm.a || !perm.d && TlbCmd.isWrite(cmdReg)) // update A/D through exception
137    val modeCheck = !(mode === ModeU && !perm.u || mode === ModeS && perm.u && (!priv.sum || ifecth))
138    val ldPf = !(modeCheck && (perm.r || priv.mxr && perm.x)) && (TlbCmd.isRead(cmdReg) && true.B /* TODO !isAMO*/)
139    val stPf = !(modeCheck && perm.w) && (TlbCmd.isWrite(cmdReg) || false.B /*TODO isAMO. */)
140    val instrPf = !(modeCheck && perm.x) && TlbCmd.isExec(cmdReg)
141    resp(i).bits.excp.pf.ld := (ldPf || update || pf) && vmEnable && hit
142    resp(i).bits.excp.pf.st := (stPf || update || pf) && vmEnable && hit
143    resp(i).bits.excp.pf.instr := (instrPf || update || pf) && vmEnable && hit
144
145    // if vmenable, use pre-calcuated pma check result
146    resp(i).bits.mmio := Mux(TlbCmd.isExec(cmdReg), !perm.pi, !perm.pd) && vmEnable && hit
147    resp(i).bits.excp.af.ld := Mux(TlbCmd.isAtom(cmdReg), !perm.pa, !perm.pr) && TlbCmd.isRead(cmdReg) && vmEnable && hit
148    resp(i).bits.excp.af.st := Mux(TlbCmd.isAtom(cmdReg), !perm.pa, !perm.pw) && TlbCmd.isWrite(cmdReg) && vmEnable && hit
149    resp(i).bits.excp.af.instr := Mux(TlbCmd.isAtom(cmdReg), false.B, !perm.pe) && vmEnable && hit
150
151    // if !vmenable, check pma
152    val (pmaMode, accessWidth) = AddressSpace.memmapAddrMatch(resp(i).bits.paddr)
153    when(!vmEnable) {
154      resp(i).bits.mmio := Mux(TlbCmd.isExec(cmdReg), !PMAMode.icache(pmaMode), !PMAMode.dcache(pmaMode))
155      resp(i).bits.excp.af.ld := Mux(TlbCmd.isAtom(cmdReg), !PMAMode.atomic(pmaMode), !PMAMode.read(pmaMode)) && TlbCmd.isRead(cmdReg)
156      resp(i).bits.excp.af.st := Mux(TlbCmd.isAtom(cmdReg), !PMAMode.atomic(pmaMode), !PMAMode.write(pmaMode)) && TlbCmd.isWrite(cmdReg)
157      resp(i).bits.excp.af.instr := Mux(TlbCmd.isAtom(cmdReg), false.B, !PMAMode.execute(pmaMode))
158    }
159
160    (hit, miss, normal_hitVec, super_hitVec, validReg)
161  }
162
163  val readResult = (0 until Width).map(TLBNormalRead(_))
164  val hitVec = readResult.map(_._1)
165  val missVec = readResult.map(_._2)
166  val normalhitVecVec = readResult.map(_._3)
167  val superhitVecVec = readResult.map(_._4)
168  val validRegVec = readResult.map(_._5)
169
170  // replacement
171  def get_access(one_hot: UInt, valid: Bool): Valid[UInt] = {
172    val res = Wire(Valid(UInt(log2Up(one_hot.getWidth).W)))
173    res.valid := Cat(one_hot).orR && valid
174    res.bits := OHToUInt(one_hot)
175    res
176  }
177
178  val normal_refill_idx = if (q.outReplace) {
179    io.replace.normalPage.access.sets := vpn.map(get_idx(_, q.normalNSets))
180    io.replace.normalPage.access.touch_ways := normalhitVecVec.zipWithIndex.map{ case (hv, i) => get_access(hv,
181      validRegVec(i))}
182    io.replace.normalPage.chosen_set := get_idx(io.ptw.resp.bits.entry.tag, q.normalNSets)
183    io.replace.normalPage.refillIdx
184  } else if (q.normalAssociative == "fa") {
185    val re = ReplacementPolicy.fromString(q.normalReplacer, q.normalNWays)
186    re.access(normalhitVecVec.zipWithIndex.map{ case (hv, i) => get_access(hv, validRegVec(i))})
187    re.way
188  } else { // set-acco && plru
189    val re = ReplacementPolicy.fromString(q.normalReplacer, q.normalNSets, q.normalNWays)
190    re.access(vpn.map(get_idx(_, q.normalNSets)), normalhitVecVec.zipWithIndex.map{ case (hv, i) => get_access(hv,
191      validRegVec(i))})
192    re.way(get_idx(io.ptw.resp.bits.entry.tag, q.normalNSets))
193  }
194
195  val super_refill_idx = if (q.outReplace) {
196    io.replace.superPage.access.sets := vpn.map(get_idx(_, q.normalNSets))
197    io.replace.superPage.access.touch_ways := superhitVecVec.zipWithIndex.map{ case (hv, i) => get_access(hv,
198      validRegVec(i))}
199    io.replace.superPage.chosen_set := DontCare
200    io.replace.superPage.refillIdx
201  } else {
202    val re = ReplacementPolicy.fromString(q.superReplacer, q.superNWays)
203    re.access(superhitVecVec.zipWithIndex.map{ case (hv, i) => get_access(hv, validRegVec(i))})
204    re.way
205  }
206
207  val refill = ptw.resp.fire() && !sfence.valid
208  normalPage.w_apply(
209    valid = { if (q.normalAsVictim) false.B
210    else refill && ptw.resp.bits.entry.level.get === 2.U },
211    wayIdx = normal_refill_idx,
212    data = ptw.resp.bits
213  )
214  superPage.w_apply(
215    valid = { if (q.normalAsVictim) refill
216    else refill && ptw.resp.bits.entry.level.get =/= 2.U },
217    wayIdx = super_refill_idx,
218    data = ptw.resp.bits
219  )
220
221  for (i <- 0 until Width) {
222    io.ptw.req(i).valid := validRegVec(i) && missVec(i) && !RegNext(refill)
223    io.ptw.req(i).bits.vpn := RegNext(reqAddr(i).vpn)
224  }
225  io.ptw.resp.ready := true.B
226
227  if (!q.shouldBlock) {
228    for (i <- 0 until Width) {
229      XSPerfAccumulate("first_access" + Integer.toString(i, 10), validRegVec(i) && vmEnable && RegNext(req(i).bits.debug.isFirstIssue))
230      XSPerfAccumulate("access" + Integer.toString(i, 10), validRegVec(i) && vmEnable)
231    }
232    for (i <- 0 until Width) {
233      XSPerfAccumulate("first_miss" + Integer.toString(i, 10), validRegVec(i) && vmEnable && missVec(i) && RegNext(req(i).bits.debug.isFirstIssue))
234      XSPerfAccumulate("miss" + Integer.toString(i, 10), validRegVec(i) && vmEnable && missVec(i))
235    }
236  } else {
237    // NOTE: ITLB is blocked, so every resp will be valid only when hit
238    // every req will be ready only when hit
239    for (i <- 0 until Width) {
240      XSPerfAccumulate(s"access${i}", io.requestor(i).req.fire() && vmEnable)
241      XSPerfAccumulate(s"miss${i}", ptw.req(i).fire())
242    }
243
244  }
245  //val reqCycleCnt = Reg(UInt(16.W))
246  //reqCycleCnt := reqCycleCnt + BoolStopWatch(ptw.req(0).fire(), ptw.resp.fire || sfence.valid)
247  //XSPerfAccumulate("ptw_req_count", ptw.req.fire())
248  //XSPerfAccumulate("ptw_req_cycle", Mux(ptw.resp.fire(), reqCycleCnt, 0.U))
249  XSPerfAccumulate("ptw_resp_count", ptw.resp.fire())
250  XSPerfAccumulate("ptw_resp_pf_count", ptw.resp.fire() && ptw.resp.bits.pf)
251
252  // Log
253  for(i <- 0 until Width) {
254    XSDebug(req(i).valid, p"req(${i.U}): (${req(i).valid} ${req(i).ready}) ${req(i).bits}\n")
255    XSDebug(resp(i).valid, p"resp(${i.U}): (${resp(i).valid} ${resp(i).ready}) ${resp(i).bits}\n")
256  }
257
258  XSDebug(sfence.valid, p"Sfence: ${sfence}\n")
259  XSDebug(ParallelOR(valid)|| ptw.resp.valid, p"CSR: ${csr}\n")
260  XSDebug(ParallelOR(valid) || ptw.resp.valid, p"vmEnable:${vmEnable} hit:${Binary(VecInit(hitVec).asUInt)} miss:${Binary(VecInit(missVec).asUInt)}\n")
261  for (i <- ptw.req.indices) {
262    XSDebug(ptw.req(i).fire(), p"PTW req:${ptw.req(i).bits}\n")
263  }
264  XSDebug(ptw.resp.valid, p"PTW resp:${ptw.resp.bits} (v:${ptw.resp.valid}r:${ptw.resp.ready}) \n")
265
266  println(s"${q.name}: normal page: ${q.normalNWays} ${q.normalAssociative} ${q.normalReplacer.get} super page: ${q.superNWays} ${q.superAssociative} ${q.superReplacer.get}")
267
268//   // NOTE: just for simple tlb debug, comment it after tlb's debug
269  // assert(!io.ptw.resp.valid || io.ptw.resp.bits.entry.tag === io.ptw.resp.bits.entry.ppn, "Simple tlb debug requires vpn === ppn")
270}
271
272class TlbReplace(Width: Int, q: TLBParameters)(implicit p: Parameters) extends TlbModule {
273  val io = IO(new TlbReplaceIO(Width, q))
274
275  if (q.normalAssociative == "fa") {
276    val re = ReplacementPolicy.fromString(q.normalReplacer, q.normalNWays)
277    re.access(io.normalPage.access.touch_ways)
278    io.normalPage.refillIdx := re.way
279  } else { // set-acco && plru
280    val re = ReplacementPolicy.fromString(q.normalReplacer, q.normalNSets, q.normalNWays)
281    re.access(io.normalPage.access.sets, io.normalPage.access.touch_ways)
282    io.normalPage.refillIdx := { if (q.normalNWays == 1) 0.U else re.way(io.normalPage.chosen_set) }
283  }
284
285  if (q.superAssociative == "fa") {
286    val re = ReplacementPolicy.fromString(q.superReplacer, q.superNWays)
287    re.access(io.superPage.access.touch_ways)
288    io.superPage.refillIdx := re.way
289  } else { // set-acco && plru
290    val re = ReplacementPolicy.fromString(q.superReplacer, q.superNSets, q.superNWays)
291    re.access(io.superPage.access.sets, io.superPage.access.touch_ways)
292    io.superPage.refillIdx := { if (q.superNWays == 1) 0.U else re.way(io.superPage.chosen_set) }
293  }
294}
295
296object TLB {
297  def apply
298  (
299    in: Seq[BlockTlbRequestIO],
300    sfence: SfenceBundle,
301    csr: TlbCsrBundle,
302    width: Int,
303    shouldBlock: Boolean,
304    q: TLBParameters
305  )(implicit p: Parameters) = {
306    require(in.length == width)
307
308    val tlb = Module(new TLB(width, q))
309
310    tlb.io.sfence <> sfence
311    tlb.io.csr <> csr
312    tlb.suggestName(s"tlb_${q.name}")
313
314    if (!shouldBlock) { // dtlb
315      for (i <- 0 until width) {
316        tlb.io.requestor(i) <> in(i)
317        // tlb.io.requestor(i).req.valid := in(i).req.valid
318        // tlb.io.requestor(i).req.bits := in(i).req.bits
319        // in(i).req.ready := tlb.io.requestor(i).req.ready
320
321        // in(i).resp.valid := tlb.io.requestor(i).resp.valid
322        // in(i).resp.bits := tlb.io.requestor(i).resp.bits
323        // tlb.io.requestor(i).resp.ready := in(i).resp.ready
324      }
325    } else { // itlb
326      //require(width == 1)
327      (0 until width).map{ i =>
328        tlb.io.requestor(i).req.valid := in(i).req.valid
329        tlb.io.requestor(i).req.bits := in(i).req.bits
330        in(i).req.ready := !tlb.io.requestor(i).resp.bits.miss && in(i).resp.ready && tlb.io.requestor(i).req.ready
331
332        in(i).resp.valid := tlb.io.requestor(i).resp.valid && !tlb.io.requestor(i).resp.bits.miss
333        in(i).resp.bits := tlb.io.requestor(i).resp.bits
334        tlb.io.requestor(i).resp.ready := in(i).resp.ready
335      }
336    }
337
338    tlb.io.ptw
339  }
340}
341