1/*************************************************************************************** 2 * Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences 3 * Copyright (c) 2020-2021 Peng Cheng Laboratory 4 * 5 * XiangShan is licensed under Mulan PSL v2. 6 * You can use this software according to the terms and conditions of the Mulan PSL v2. 7 * You may obtain a copy of Mulan PSL v2 at: 8 * http://license.coscl.org.cn/MulanPSL2 9 * 10 * THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, 11 * EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, 12 * MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE. 13 * 14 * See the Mulan PSL v2 for more details. 15 ***************************************************************************************/ 16 17package xiangshan.cache.mmu 18 19import org.chipsalliance.cde.config.Parameters 20import chisel3._ 21import chisel3.util._ 22import utils._ 23import utility._ 24import freechips.rocketchip.formal.PropertyClass 25import xiangshan.backend.fu.util.HasCSRConst 26 27import scala.math.min 28 29// For Direct-map TLBs, we do not use it now 30class BankedAsyncDataModuleTemplateWithDup[T <: Data]( 31 gen: T, 32 numEntries: Int, 33 numRead: Int, 34 numDup: Int, 35 numBanks: Int 36) extends Module { 37 val io = IO(new Bundle { 38 val raddr = Vec(numRead, Input(UInt(log2Ceil(numEntries).W))) 39 val rdata = Vec(numRead, Vec(numDup, Output(gen))) 40 val wen = Input(Bool()) 41 val waddr = Input(UInt(log2Ceil(numEntries).W)) 42 val wdata = Input(gen) 43 }) 44 require(numBanks > 1) 45 require(numEntries > numBanks) 46 47 val numBankEntries = numEntries / numBanks 48 def bankOffset(address: UInt): UInt = { 49 address(log2Ceil(numBankEntries) - 1, 0) 50 } 51 52 def bankIndex(address: UInt): UInt = { 53 address(log2Ceil(numEntries) - 1, log2Ceil(numBankEntries)) 54 } 55 56 val dataBanks = Seq.tabulate(numBanks)(i => { 57 val bankEntries = if (i < numBanks - 1) numBankEntries else (numEntries - (i * numBankEntries)) 58 Mem(bankEntries, gen) 59 }) 60 61 // async read, but regnext 62 for (i <- 0 until numRead) { 63 val data_read = Reg(Vec(numDup, Vec(numBanks, gen))) 64 val bank_index = Reg(Vec(numDup, UInt(numBanks.W))) 65 for (j <- 0 until numDup) { 66 bank_index(j) := UIntToOH(bankIndex(io.raddr(i))) 67 for (k <- 0 until numBanks) { 68 data_read(j)(k) := Mux(io.wen && (io.waddr === io.raddr(i)), 69 io.wdata, dataBanks(k)(bankOffset(io.raddr(i)))) 70 } 71 } 72 // next cycle 73 for (j <- 0 until numDup) { 74 io.rdata(i)(j) := Mux1H(bank_index(j), data_read(j)) 75 } 76 } 77 78 // write 79 for (i <- 0 until numBanks) { 80 when (io.wen && (bankIndex(io.waddr) === i.U)) { 81 dataBanks(i)(bankOffset(io.waddr)) := io.wdata 82 } 83 } 84} 85 86class TLBFA( 87 parentName: String, 88 ports: Int, 89 nDups: Int, 90 nSets: Int, 91 nWays: Int, 92 saveLevel: Boolean = false, 93 normalPage: Boolean, 94 superPage: Boolean 95)(implicit p: Parameters) extends TlbModule with HasPerfEvents { 96 97 val io = IO(new TlbStorageIO(nSets, nWays, ports, nDups)) 98 io.r.req.map(_.ready := true.B) 99 100 val v = RegInit(VecInit(Seq.fill(nWays)(false.B))) 101 val entries = Reg(Vec(nWays, new TlbSectorEntry(normalPage, superPage))) 102 val g = entries.map(_.perm.g) 103 104 for (i <- 0 until ports) { 105 val req = io.r.req(i) 106 val resp = io.r.resp(i) 107 val access = io.access(i) 108 109 val vpn = req.bits.vpn 110 val vpn_reg = RegEnable(vpn, req.fire) 111 val hasS2xlate = req.bits.s2xlate =/= noS2xlate 112 val OnlyS2 = req.bits.s2xlate === onlyStage2 113 val OnlyS1 = req.bits.s2xlate === onlyStage1 114 val refill_mask = Mux(io.w.valid, UIntToOH(io.w.bits.wayIdx), 0.U(nWays.W)) 115 val hitVec = VecInit((entries.zipWithIndex).zip(v zip refill_mask.asBools).map{ 116 case (e, m) => { 117 val s2xlate_hit = e._1.s2xlate === req.bits.s2xlate 118 val hit = e._1.hit(vpn, Mux(hasS2xlate, io.csr.vsatp.asid, io.csr.satp.asid), vmid = io.csr.hgatp.asid, hasS2xlate = hasS2xlate, onlyS2 = OnlyS2, onlyS1 = OnlyS1) 119 s2xlate_hit && hit && m._1 && !m._2 120 } 121 }) 122 123 hitVec.suggestName("hitVec") 124 125 val hitVecReg = RegEnable(hitVec, req.fire) 126 // Sector tlb may trigger multi-hit, see def "wbhit" 127 XSPerfAccumulate(s"port${i}_multi_hit", !(!resp.valid || (PopCount(hitVecReg) === 0.U || PopCount(hitVecReg) === 1.U))) 128 129 resp.valid := RegNext(req.valid) 130 resp.bits.hit := Cat(hitVecReg).orR 131 if (nWays == 1) { 132 for (d <- 0 until nDups) { 133 resp.bits.ppn(d) := RegEnable(entries(0).genPPN(saveLevel, req.valid)(vpn), req.fire) 134 resp.bits.perm(d) := RegEnable(entries(0).perm, req.fire) 135 resp.bits.g_perm(d) := RegEnable(entries(0).g_perm, req.fire) 136 resp.bits.s2xlate(d) := RegEnable(entries(0).s2xlate, req.fire) 137 } 138 } else { 139 for (d <- 0 until nDups) { 140 resp.bits.ppn(d) := RegEnable(ParallelMux(hitVec zip entries.map(_.genPPN(saveLevel, req.valid)(vpn))), req.fire) 141 resp.bits.perm(d) := RegEnable(ParallelMux(hitVec zip entries.map(_.perm)), req.fire) 142 resp.bits.g_perm(d) := RegEnable(ParallelMux(hitVec zip entries.map(_.g_perm)), req.fire) 143 resp.bits.s2xlate(d) := RegEnable(ParallelMux(hitVec zip entries.map(_.s2xlate)), req.fire) 144 } 145 } 146 147 access.sets := get_set_idx(vpn_reg(vpn_reg.getWidth - 1, sectortlbwidth), nSets) // no use 148 access.touch_ways.valid := resp.valid && Cat(hitVecReg).orR 149 access.touch_ways.bits := OHToUInt(hitVecReg) 150 151 resp.bits.hit.suggestName("hit") 152 resp.bits.ppn.suggestName("ppn") 153 resp.bits.perm.suggestName("perm") 154 resp.bits.g_perm.suggestName("g_perm") 155 } 156 157 when (io.w.valid) { 158 v(io.w.bits.wayIdx) := true.B 159 entries(io.w.bits.wayIdx).apply(io.w.bits.data) 160 } 161 // write assert, should not duplicate with the existing entries 162 val w_hit_vec = VecInit(entries.zip(v).map{case (e, vi) => e.wbhit(io.w.bits.data, Mux(io.w.bits.data.s2xlate =/= noS2xlate, io.csr.vsatp.asid, io.csr.satp.asid), s2xlate = io.w.bits.data.s2xlate) && vi }) 163 XSError(io.w.valid && Cat(w_hit_vec).orR, s"${parentName} refill, duplicate with existing entries") 164 165 val refill_vpn_reg = RegNext(io.w.bits.data.s1.entry.tag) 166 val refill_wayIdx_reg = RegNext(io.w.bits.wayIdx) 167 when (RegNext(io.w.valid)) { 168 io.access.map { access => 169 access.sets := get_set_idx(refill_vpn_reg, nSets) 170 access.touch_ways.valid := true.B 171 access.touch_ways.bits := refill_wayIdx_reg 172 } 173 } 174 175 val sfence = io.sfence 176 val sfence_valid = sfence.valid && !sfence.bits.hg && !sfence.bits.hv 177 val sfence_vpn = sfence.bits.addr(VAddrBits - 1, offLen) 178 val sfenceHit = entries.map(_.hit(sfence_vpn, sfence.bits.id, vmid = io.csr.hgatp.asid, hasS2xlate = io.csr.priv.virt)) 179 val sfenceHit_noasid = entries.map(_.hit(sfence_vpn, sfence.bits.id, ignoreAsid = true, vmid = io.csr.hgatp.asid, hasS2xlate = io.csr.priv.virt)) 180 // Sfence will flush all sectors of an entry when hit 181 when (sfence_valid) { 182 when (sfence.bits.rs1) { // virtual address *.rs1 <- (rs1===0.U) 183 when (sfence.bits.rs2) { // asid, but i do not want to support asid, *.rs2 <- (rs2===0.U) 184 // all addr and all asid 185 v.zipWithIndex.map{ case(a, i) => a := a && !((io.csr.priv.virt === false.B && entries(i).s2xlate === noS2xlate) || 186 (io.csr.priv.virt && entries(i).s2xlate =/= noS2xlate && entries(i).vmid === io.csr.hgatp.asid))} 187 }.otherwise { 188 // all addr but specific asid 189 v.zipWithIndex.map{ case (a, i) => a := a && !(!g(i) && ((!io.csr.priv.virt && entries(i).s2xlate === noS2xlate && entries(i).asid === sfence.bits.id) || 190 (io.csr.priv.virt && entries(i).s2xlate =/= noS2xlate && entries(i).asid === sfence.bits.id && entries(i).vmid === io.csr.hgatp.asid)))} 191 } 192 }.otherwise { 193 when (sfence.bits.rs2) { 194 // specific addr but all asid 195 v.zipWithIndex.map{ case (a, i) => a := a & !sfenceHit_noasid(i) } 196 }.otherwise { 197 // specific addr and specific asid 198 v.zipWithIndex.map{ case (a, i) => a := a & !(sfenceHit(i) && !g(i)) } 199 } 200 } 201 } 202 203 val hfencev_valid = sfence.valid && sfence.bits.hv 204 val hfenceg_valid = sfence.valid && sfence.bits.hg 205 val hfencev = io.sfence 206 val hfencev_vpn = sfence_vpn 207 val hfencevHit = entries.map(_.hit(hfencev_vpn, hfencev.bits.id, vmid = io.csr.hgatp.asid, hasS2xlate = true.B)) 208 val hfencevHit_noasid = entries.map(_.hit(hfencev_vpn, 0.U, ignoreAsid = true, vmid = io.csr.hgatp.asid, hasS2xlate = true.B)) 209 when (hfencev_valid) { 210 when (hfencev.bits.rs1) { 211 when (hfencev.bits.rs2) { 212 v.zipWithIndex.map { case (a, i) => a := a && !(entries(i).s2xlate =/= noS2xlate && entries(i).vmid === io.csr.hgatp.asid)} 213 }.otherwise { 214 v.zipWithIndex.map { case (a, i) => a := a && !(!g(i) && (entries(i).s2xlate =/= noS2xlate && entries(i).asid === sfence.bits.id && entries(i).vmid === io.csr.hgatp.asid)) 215 } 216 } 217 }.otherwise { 218 when (hfencev.bits.rs2) { 219 v.zipWithIndex.map{ case (a, i) => a := a && !hfencevHit_noasid(i) } 220 }.otherwise { 221 v.zipWithIndex.map{ case (a, i) => a := a && !(hfencevHit(i) && !g(i)) } 222 } 223 } 224 } 225 226 227 val hfenceg = io.sfence 228 val hfenceg_gvpn = (sfence.bits.addr << 2)(VAddrBits - 1, offLen) 229 when (hfenceg_valid) { 230 when(hfenceg.bits.rs2) { 231 v.zipWithIndex.map { case (a, i) => a := a && !(entries(i).s2xlate =/= noS2xlate) } 232 }.otherwise { 233 v.zipWithIndex.map { case (a, i) => a := a && !(entries(i).s2xlate =/= noS2xlate && entries(i).vmid === sfence.bits.id) } 234 } 235 } 236 237 XSPerfAccumulate(s"access", io.r.resp.map(_.valid.asUInt).fold(0.U)(_ + _)) 238 XSPerfAccumulate(s"hit", io.r.resp.map(a => a.valid && a.bits.hit).fold(0.U)(_.asUInt + _.asUInt)) 239 240 for (i <- 0 until nWays) { 241 XSPerfAccumulate(s"access${i}", io.r.resp.zip(io.access.map(acc => UIntToOH(acc.touch_ways.bits))).map{ case (a, b) => 242 a.valid && a.bits.hit && b(i)}.fold(0.U)(_.asUInt + _.asUInt)) 243 } 244 for (i <- 0 until nWays) { 245 XSPerfAccumulate(s"refill${i}", io.w.valid && io.w.bits.wayIdx === i.U) 246 } 247 248 val perfEvents = Seq( 249 ("tlbstore_access", io.r.resp.map(_.valid.asUInt).fold(0.U)(_ + _) ), 250 ("tlbstore_hit ", io.r.resp.map(a => a.valid && a.bits.hit).fold(0.U)(_.asUInt + _.asUInt)), 251 ) 252 generatePerfEvent() 253 254 println(s"${parentName} tlb_fa: nSets${nSets} nWays:${nWays}") 255} 256 257class TLBFakeFA( 258 ports: Int, 259 nDups: Int, 260 nSets: Int, 261 nWays: Int, 262 useDmode: Boolean = false 263 )(implicit p: Parameters) extends TlbModule with HasCSRConst{ 264 265 val io = IO(new TlbStorageIO(nSets, nWays, ports, nDups)) 266 io.r.req.map(_.ready := true.B) 267 val mode = if (useDmode) io.csr.priv.dmode else io.csr.priv.imode 268 val vmEnable = if (EnbaleTlbDebug) (io.csr.satp.mode === 8.U) 269 else (io.csr.satp.mode === 8.U && (mode < ModeM)) 270 271 for (i <- 0 until ports) { 272 val req = io.r.req(i) 273 val resp = io.r.resp(i) 274 275 val helper = Module(new PTEHelper()) 276 helper.clock := clock 277 helper.satp := io.csr.satp.ppn 278 helper.enable := req.fire && vmEnable 279 helper.vpn := req.bits.vpn 280 281 val pte = helper.pte.asTypeOf(new PteBundle) 282 val ppn = pte.ppn 283 val vpn_reg = RegNext(req.bits.vpn) 284 val pf = helper.pf 285 val level = helper.level 286 287 resp.valid := RegNext(req.valid) 288 resp.bits.hit := true.B 289 for (d <- 0 until nDups) { 290 resp.bits.perm(d).pf := pf 291 resp.bits.perm(d).af := false.B 292 resp.bits.perm(d).d := pte.perm.d 293 resp.bits.perm(d).a := pte.perm.a 294 resp.bits.perm(d).g := pte.perm.g 295 resp.bits.perm(d).u := pte.perm.u 296 resp.bits.perm(d).x := pte.perm.x 297 resp.bits.perm(d).w := pte.perm.w 298 resp.bits.perm(d).r := pte.perm.r 299 300 resp.bits.ppn(d) := MuxLookup(level, 0.U)(Seq( 301 0.U -> Cat(ppn(ppn.getWidth-1, vpnnLen*2), vpn_reg(vpnnLen*2-1, 0)), 302 1.U -> Cat(ppn(ppn.getWidth-1, vpnnLen), vpn_reg(vpnnLen-1, 0)), 303 2.U -> ppn) 304 ) 305 } 306 } 307 308 io.access := DontCare 309} 310 311object TlbStorage { 312 def apply 313 ( 314 parentName: String, 315 associative: String, 316 ports: Int, 317 nDups: Int = 1, 318 nSets: Int, 319 nWays: Int, 320 saveLevel: Boolean = false, 321 normalPage: Boolean, 322 superPage: Boolean, 323 useDmode: Boolean, 324 SoftTLB: Boolean 325 )(implicit p: Parameters) = { 326 if (SoftTLB) { 327 val storage = Module(new TLBFakeFA(ports, nDups, nSets, nWays, useDmode)) 328 storage.suggestName(s"${parentName}_fake_fa") 329 storage.io 330 } else { 331 val storage = Module(new TLBFA(parentName, ports, nDups, nSets, nWays, saveLevel, normalPage, superPage)) 332 storage.suggestName(s"${parentName}_fa") 333 storage.io 334 } 335 } 336} 337 338class TlbStorageWrapper(ports: Int, q: TLBParameters, nDups: Int = 1)(implicit p: Parameters) extends TlbModule { 339 val io = IO(new TlbStorageWrapperIO(ports, q, nDups)) 340 341 val page = TlbStorage( 342 parentName = q.name + "_storage", 343 associative = q.Associative, 344 ports = ports, 345 nDups = nDups, 346 nSets = q.NSets, 347 nWays = q.NWays, 348 normalPage = true, 349 superPage = true, 350 useDmode = q.useDmode, 351 SoftTLB = coreParams.softTLB 352 ) 353 354 for (i <- 0 until ports) { 355 page.r_req_apply( 356 valid = io.r.req(i).valid, 357 vpn = io.r.req(i).bits.vpn, 358 i = i, 359 s2xlate = io.r.req(i).bits.s2xlate 360 ) 361 } 362 363 for (i <- 0 until ports) { 364 val q = page.r.req(i) 365 val p = page.r.resp(i) 366 val rq = io.r.req(i) 367 val rp = io.r.resp(i) 368 rq.ready := q.ready // actually, not used 369 rp.valid := p.valid // actually, not used 370 rp.bits.hit := p.bits.hit 371 for (d <- 0 until nDups) { 372 rp.bits.ppn(d) := p.bits.ppn(d) 373 rp.bits.perm(d).pf := p.bits.perm(d).pf 374 rp.bits.perm(d).af := p.bits.perm(d).af 375 rp.bits.perm(d).d := p.bits.perm(d).d 376 rp.bits.perm(d).a := p.bits.perm(d).a 377 rp.bits.perm(d).g := p.bits.perm(d).g 378 rp.bits.perm(d).u := p.bits.perm(d).u 379 rp.bits.perm(d).x := p.bits.perm(d).x 380 rp.bits.perm(d).w := p.bits.perm(d).w 381 rp.bits.perm(d).r := p.bits.perm(d).r 382 rp.bits.s2xlate(d) := p.bits.s2xlate(d) 383 rp.bits.g_perm(d) := p.bits.g_perm(d) 384 } 385 } 386 387 page.sfence <> io.sfence 388 page.csr <> io.csr 389 390 val refill_idx = if (q.outReplace) { 391 io.replace.page.access <> page.access 392 io.replace.page.chosen_set := DontCare 393 io.replace.page.refillIdx 394 } else { 395 val re = ReplacementPolicy.fromString(q.Replacer, q.NWays) 396 re.access(page.access.map(_.touch_ways)) 397 re.way 398 } 399 400 page.w_apply( 401 valid = io.w.valid, 402 wayIdx = refill_idx, 403 data = io.w.bits.data 404 ) 405 406 // replacement 407 def get_access(one_hot: UInt, valid: Bool): Valid[UInt] = { 408 val res = Wire(Valid(UInt(log2Up(one_hot.getWidth).W))) 409 res.valid := Cat(one_hot).orR && valid 410 res.bits := OHToUInt(one_hot) 411 res 412 } 413} 414