1/*************************************************************************************** 2* Copyright (c) 2021-2025 Beijing Institute of Open Source Chip (BOSC) 3* Copyright (c) 2020-2024 Institute of Computing Technology, Chinese Academy of Sciences 4* Copyright (c) 2020-2021 Peng Cheng Laboratory 5* Copyright (c) 2024-2025 Institute of Information Engineering, Chinese Academy of Sciences 6* 7* XiangShan is licensed under Mulan PSL v2. 8* You can use this software according to the terms and conditions of the Mulan PSL v2. 9* You may obtain a copy of Mulan PSL v2 at: 10* http://license.coscl.org.cn/MulanPSL2 11* 12* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, 13* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, 14* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE. 15* 16* See the Mulan PSL v2 for more details. 17***************************************************************************************/ 18 19package xiangshan.cache.mmu 20 21import org.chipsalliance.cde.config.Parameters 22import chisel3._ 23import chisel3.util._ 24import xiangshan._ 25import xiangshan.cache.{HasDCacheParameters, MemoryOpConstants} 26import utils._ 27import utility._ 28import freechips.rocketchip.diplomacy.{LazyModule, LazyModuleImp} 29import freechips.rocketchip.tilelink._ 30import xiangshan.backend.fu.{PMPReqBundle, PMPRespBundle} 31 32/** Page Table Walk is divided into two parts 33 * One, PTW: page walk for pde, except for leaf entries, one by one 34 * Two, LLPTW: page walk for pte, only the leaf entries(4KB), in parallel 35 */ 36 37 38/** PTW : page table walker 39 * a finite state machine 40 * only take 1GB and 2MB page walks 41 * or in other words, except the last level(leaf) 42 **/ 43class PTWIO()(implicit p: Parameters) extends MMUIOBaseBundle with HasPtwConst { 44 val req = Flipped(DecoupledIO(new Bundle { 45 val req_info = new L2TlbInnerBundle() 46 val l3Hit = if (EnableSv48) Some(new Bool()) else None 47 val l2Hit = Bool() 48 val ppn = UInt(ptePPNLen.W) 49 val stage1Hit = Bool() 50 val stage1 = new PtwMergeResp 51 val bitmapCheck = Option.when(HasBitmapCheck)(new Bundle { 52 val jmp_bitmap_check = Bool() // super page in PtwCache ptw hit, but need bitmap check 53 val pte = UInt(XLEN.W) // Page Table Entry 54 val cfs = Vec(tlbcontiguous, Bool()) // Bitmap Check Failed Vector 55 val SPlevel = UInt(log2Up(Level).W) 56 }) 57 })) 58 val resp = DecoupledIO(new Bundle { 59 val source = UInt(bSourceWidth.W) 60 val s2xlate = UInt(2.W) 61 val resp = new PtwMergeResp 62 val h_resp = new HptwResp 63 }) 64 65 val llptw = DecoupledIO(new LLPTWInBundle()) 66 // NOTE: llptw change from "connect to llptw" to "connect to page cache" 67 // to avoid corner case that caused duplicate entries 68 69 val hptw = new Bundle { 70 val req = DecoupledIO(new Bundle { 71 val source = UInt(bSourceWidth.W) 72 val id = UInt(log2Up(l2tlbParams.llptwsize).W) 73 val gvpn = UInt(ptePPNLen.W) 74 }) 75 val resp = Flipped(Valid(new Bundle { 76 val h_resp = Output(new HptwResp) 77 })) 78 } 79 val mem = new Bundle { 80 val req = DecoupledIO(new L2TlbMemReqBundle()) 81 val resp = Flipped(ValidIO(UInt(XLEN.W))) 82 val mask = Input(Bool()) 83 } 84 val pmp = new Bundle { 85 val req = ValidIO(new PMPReqBundle()) 86 val resp = Flipped(new PMPRespBundle()) 87 } 88 89 val refill = Output(new Bundle { 90 val req_info = new L2TlbInnerBundle() 91 val level = UInt(log2Up(Level + 1).W) 92 }) 93 val bitmap = Option.when(HasBitmapCheck)(new Bundle { 94 val req = DecoupledIO(new bitmapReqBundle()) 95 val resp = Flipped(DecoupledIO(new bitmapRespBundle())) 96 }) 97} 98 99class PTW()(implicit p: Parameters) extends XSModule with HasPtwConst with HasPerfEvents { 100 val io = IO(new PTWIO) 101 val sfence = io.sfence 102 val mem = io.mem 103 val req_s2xlate = Reg(UInt(2.W)) 104 val enableS2xlate = req_s2xlate =/= noS2xlate 105 val onlyS1xlate = req_s2xlate === onlyStage1 106 val onlyS2xlate = req_s2xlate === onlyStage2 107 108 // mbmc:bitmap csr 109 val mbmc = io.csr.mbmc 110 val bitmap_enable = (if (HasBitmapCheck) true.B else false.B) && mbmc.BME === 1.U && mbmc.CMODE === 0.U 111 112 val satp = Wire(new TlbSatpBundle()) 113 when (io.req.fire) { 114 satp := Mux(io.req.bits.req_info.s2xlate =/= noS2xlate, io.csr.vsatp, io.csr.satp) 115 } .otherwise { 116 satp := Mux(enableS2xlate, io.csr.vsatp, io.csr.satp) 117 } 118 val s1Pbmte = Mux(req_s2xlate =/= noS2xlate, io.csr.hPBMTE, io.csr.mPBMTE) 119 120 val mode = satp.mode 121 val hgatp = io.csr.hgatp 122 val flush = io.sfence.valid || io.csr.satp.changed || io.csr.vsatp.changed || io.csr.hgatp.changed 123 val s2xlate = enableS2xlate && !onlyS1xlate 124 val level = RegInit(3.U(log2Up(Level + 1).W)) 125 val af_level = RegInit(3.U(log2Up(Level + 1).W)) // access fault return this level 126 val gpf_level = RegInit(3.U(log2Up(Level + 1).W)) 127 val ppn = Reg(UInt(ptePPNLen.W)) 128 val vpn = Reg(UInt(vpnLen.W)) // vpn or gvpn(onlyS2xlate) 129 val levelNext = level - 1.U 130 val l3Hit = Reg(Bool()) 131 val l2Hit = Reg(Bool()) 132 val jmp_bitmap_check_w = if (HasBitmapCheck) { io.req.bits.bitmapCheck.get.jmp_bitmap_check && io.req.bits.req_info.s2xlate =/= onlyStage2 } else { false.B } 133 val jmp_bitmap_check_r = if (HasBitmapCheck) { RegEnable(jmp_bitmap_check_w, io.req.fire) } else { false.B } 134 val cache_pte = Option.when(HasBitmapCheck)(RegEnable(io.req.bits.bitmapCheck.get.pte.asTypeOf(new PteBundle().cloneType), io.req.fire)) 135 val pte = if (HasBitmapCheck) { Mux(jmp_bitmap_check_r, cache_pte.get, io.mem.resp.bits.asTypeOf(new PteBundle().cloneType)) } else { mem.resp.bits.asTypeOf(new PteBundle()) } 136 137 // s/w register 138 val s_pmp_check = RegInit(true.B) 139 val s_mem_req = RegInit(true.B) 140 val s_llptw_req = RegInit(true.B) 141 val w_mem_resp = RegInit(true.B) 142 val s_hptw_req = RegInit(true.B) 143 val w_hptw_resp = RegInit(true.B) 144 val s_last_hptw_req = RegInit(true.B) 145 val w_last_hptw_resp = RegInit(true.B) 146 // for updating "level" 147 val mem_addr_update = RegInit(false.B) 148 149 val s_bitmap_check = RegInit(true.B) 150 val w_bitmap_resp = RegInit(true.B) 151 val whether_need_bitmap_check = RegInit(false.B) 152 val bitmap_checkfailed = RegInit(false.B) 153 154 val idle = RegInit(true.B) 155 val finish = WireInit(false.B) 156 dontTouch(finish) 157 val vs_finish = WireInit(false.B) // need to wait for G-stage translate, should not do pmp check 158 dontTouch(vs_finish) 159 160 val hptw_pageFault = RegInit(false.B) 161 val hptw_accessFault = RegInit(false.B) 162 val need_last_s2xlate = RegInit(false.B) 163 val stage1Hit = RegEnable(io.req.bits.stage1Hit, io.req.fire) 164 val stage1 = RegEnable(io.req.bits.stage1, io.req.fire) 165 val hptw_resp_stage2 = Reg(Bool()) 166 val first_gvpn_check_fail = RegInit(false.B) 167 168 // use accessfault repersent bitmap check failed 169 val pte_isAf = Mux(bitmap_enable, pte.isAf() || bitmap_checkfailed, pte.isAf()) 170 val ppn_af = if (HasBitmapCheck) { 171 Mux(enableS2xlate, Mux(onlyS1xlate, pte_isAf, false.B), pte_isAf) // In two-stage address translation, stage 1 ppn is a vpn for host, so don't need to check ppn_high 172 } else { 173 Mux(enableS2xlate, Mux(onlyS1xlate, pte.isAf(), false.B), pte.isAf()) // In two-stage address translation, stage 1 ppn is a vpn for host, so don't need to check ppn_high 174 } 175 val pte_valid = RegInit(false.B) // avoid l1tlb pf from stage1 when gpf happens in the first s2xlate in PTW 176 177 val pageFault = pte.isPf(level, s1Pbmte) 178 val find_pte = pte.isLeaf() || ppn_af || pageFault 179 val to_find_pte = level === 1.U && find_pte === false.B 180 val source = RegEnable(io.req.bits.req_info.source, io.req.fire) 181 182 val sent_to_pmp = idle === false.B && (s_pmp_check === false.B || mem_addr_update) && !finish && !vs_finish && !first_gvpn_check_fail && !(find_pte && pte_valid) 183 val accessFault = RegEnable(io.pmp.resp.ld || io.pmp.resp.mmio, false.B, sent_to_pmp) 184 185 val l3addr = Wire(UInt(ptePaddrLen.W)) 186 val l2addr = Wire(UInt(ptePaddrLen.W)) 187 val l1addr = Wire(UInt(ptePaddrLen.W)) 188 val hptw_addr = Wire(UInt(ptePaddrLen.W)) 189 val mem_addr = Wire(UInt(PAddrBits.W)) 190 191 l3addr := MakeAddr(satp.ppn, getVpnn(vpn, 3)) 192 if (EnableSv48) { 193 when (mode === Sv48) { 194 l2addr := MakeAddr(Mux(l3Hit, ppn, pte.getPPN()), getVpnn(vpn, 2)) 195 } .otherwise { 196 l2addr := MakeAddr(satp.ppn, getVpnn(vpn, 2)) 197 } 198 } else { 199 l2addr := MakeAddr(satp.ppn, getVpnn(vpn, 2)) 200 } 201 l1addr := MakeAddr(Mux(l2Hit, ppn, pte.getPPN()), getVpnn(vpn, 1)) 202 hptw_addr := Mux(af_level === 3.U, l3addr, Mux(af_level === 2.U, l2addr, l1addr)) 203 mem_addr := hptw_addr(PAddrBits - 1, 0) 204 205 val hptw_resp = Reg(new HptwResp) 206 207 val update_full_gvpn_mem_resp = RegInit(false.B) 208 val full_gvpn_reg = Reg(UInt(ptePPNLen.W)) 209 val full_gvpn_wire = pte.getPPN() 210 val full_gvpn = Mux(update_full_gvpn_mem_resp, full_gvpn_wire, full_gvpn_reg) 211 212 val gpaddr = MuxCase(hptw_addr, Seq( 213 (stage1Hit || onlyS2xlate) -> Cat(full_gvpn, 0.U(offLen.W)), 214 !s_last_hptw_req -> Cat(MuxLookup(level, pte.getPPN())(Seq( 215 3.U -> Cat(pte.getPPN()(ptePPNLen - 1, vpnnLen * 3), vpn(vpnnLen * 3 - 1, 0)), 216 2.U -> Cat(pte.getPPN()(ptePPNLen - 1, vpnnLen * 2), vpn(vpnnLen * 2 - 1, 0)), 217 1.U -> Cat(pte.getPPN()(ptePPNLen - 1, vpnnLen), vpn(vpnnLen - 1, 0) 218 ))), 219 0.U(offLen.W)) 220 )) 221 val gvpn_gpf = 222 (!(hptw_pageFault || hptw_accessFault || ((pageFault || ppn_af) && pte_valid)) && 223 Mux( 224 s2xlate && io.csr.hgatp.mode === Sv39x4, 225 full_gvpn(ptePPNLen - 1, GPAddrBitsSv39x4 - offLen) =/= 0.U, 226 Mux( 227 s2xlate && io.csr.hgatp.mode === Sv48x4, 228 full_gvpn(ptePPNLen - 1, GPAddrBitsSv48x4 - offLen) =/= 0.U, 229 false.B 230 ) 231 )) || first_gvpn_check_fail 232 233 val guestFault = hptw_pageFault || hptw_accessFault || gvpn_gpf 234 val hpaddr = Cat(hptw_resp.genPPNS2(get_pn(gpaddr)), get_off(gpaddr)) 235 val fake_h_resp = WireInit(0.U.asTypeOf(new HptwResp)) 236 fake_h_resp.entry.tag := get_pn(gpaddr) 237 fake_h_resp.entry.vmid.map(_ := io.csr.hgatp.vmid) 238 fake_h_resp.gpf := true.B 239 240 val fake_pte = WireInit(0.U.asTypeOf(new PteBundle())) 241 fake_pte.perm.v := false.B // tell L1TLB this is fake pte 242 fake_pte.ppn := ppn(ppnLen - 1, 0) 243 fake_pte.ppn_high := ppn(ptePPNLen - 1, ppnLen) 244 245 io.req.ready := idle 246 val ptw_resp = Wire(new PtwMergeResp) 247 // pageFault is always valid when pte_valid 248 val resp_pf = pte_valid && pageFault 249 // when (pte_valid && (pageFault || guestFault), should not report accessFault or ppn_af 250 val resp_af = (accessFault || ppn_af) && !((pte_valid && pageFault) || guestFault) 251 // should use af_level when accessFault && !((pte_valid && pageFault) || guestFault) 252 val resp_level = Mux(accessFault && resp_af, af_level, Mux(guestFault, gpf_level, level)) 253 // when ptw do not really send a memory request, should use fake_pte 254 val resp_pte = Mux(pte_valid, pte, fake_pte) 255 ptw_resp.apply(resp_pf, resp_af, resp_level, resp_pte, vpn, satp.asid, hgatp.vmid, vpn(sectortlbwidth - 1, 0), not_super = false, not_merge = false, bitmap_checkfailed.asBool) 256 257 val normal_resp = idle === false.B && mem_addr_update && !need_last_s2xlate && (guestFault || (w_mem_resp && find_pte) || (s_pmp_check && accessFault) || onlyS2xlate ) 258 val stageHit_resp = idle === false.B && hptw_resp_stage2 259 io.resp.valid := Mux(stage1Hit, stageHit_resp, normal_resp) 260 io.resp.bits.source := source 261 io.resp.bits.resp := Mux(stage1Hit || (l3Hit || l2Hit) && guestFault && !pte_valid, stage1, ptw_resp) 262 io.resp.bits.h_resp := Mux(gvpn_gpf, fake_h_resp, hptw_resp) 263 io.resp.bits.s2xlate := req_s2xlate 264 265 io.llptw.valid := s_llptw_req === false.B && to_find_pte && !accessFault && !guestFault 266 io.llptw.bits.req_info.source := source 267 io.llptw.bits.req_info.vpn := vpn 268 io.llptw.bits.req_info.s2xlate := req_s2xlate 269 io.llptw.bits.ppn := DontCare 270 if (HasBitmapCheck) { 271 io.llptw.bits.bitmapCheck.get.jmp_bitmap_check := DontCare 272 io.llptw.bits.bitmapCheck.get.ptes := DontCare 273 io.llptw.bits.bitmapCheck.get.cfs := DontCare 274 io.llptw.bits.bitmapCheck.get.hitway := DontCare 275 } 276 277 io.pmp.req.valid := DontCare // samecycle, do not use valid 278 io.pmp.req.bits.addr := Mux(s2xlate, hpaddr, mem_addr) 279 io.pmp.req.bits.size := 3.U // TODO: fix it 280 io.pmp.req.bits.cmd := TlbCmd.read 281 282 if (HasBitmapCheck) { 283 val cache_level = RegEnable(io.req.bits.bitmapCheck.get.SPlevel, io.req.fire) 284 io.bitmap.get.req.valid := !s_bitmap_check 285 io.bitmap.get.req.bits.bmppn := pte.ppn 286 io.bitmap.get.req.bits.id := FsmReqID.U(bMemID.W) 287 io.bitmap.get.req.bits.vpn := vpn 288 io.bitmap.get.req.bits.level := Mux(jmp_bitmap_check_r, cache_level, level) 289 io.bitmap.get.req.bits.way_info := DontCare 290 io.bitmap.get.req.bits.hptw_bypassed := false.B 291 io.bitmap.get.resp.ready := !w_bitmap_resp 292 } 293 mem.req.valid := s_mem_req === false.B && !mem.mask && !accessFault && s_pmp_check 294 mem.req.bits.addr := Mux(s2xlate, hpaddr, mem_addr) 295 mem.req.bits.id := FsmReqID.U(bMemID.W) 296 mem.req.bits.hptw_bypassed := false.B 297 298 io.refill.req_info.s2xlate := req_s2xlate 299 io.refill.req_info.vpn := vpn 300 io.refill.level := level 301 io.refill.req_info.source := source 302 303 io.hptw.req.valid := !s_hptw_req || !s_last_hptw_req 304 io.hptw.req.bits.id := FsmReqID.U(bMemID.W) 305 io.hptw.req.bits.gvpn := get_pn(gpaddr) 306 io.hptw.req.bits.source := source 307 308 if (HasBitmapCheck) { 309 when (io.req.fire && jmp_bitmap_check_w) { 310 idle := false.B 311 req_s2xlate := io.req.bits.req_info.s2xlate 312 vpn := io.req.bits.req_info.vpn 313 s_bitmap_check := false.B 314 need_last_s2xlate := false.B 315 hptw_pageFault := false.B 316 hptw_accessFault := false.B 317 level := io.req.bits.bitmapCheck.get.SPlevel 318 pte_valid := true.B 319 accessFault := false.B 320 } 321 } 322 323 when (io.req.fire && io.req.bits.stage1Hit && (if (HasBitmapCheck) !jmp_bitmap_check_w else true.B)) { 324 idle := false.B 325 req_s2xlate := io.req.bits.req_info.s2xlate 326 s_last_hptw_req := false.B 327 hptw_resp_stage2 := false.B 328 need_last_s2xlate := false.B 329 hptw_pageFault := false.B 330 hptw_accessFault := false.B 331 full_gvpn_reg := io.req.bits.stage1.genPPN() 332 } 333 334 when (io.resp.fire && stage1Hit){ 335 idle := true.B 336 } 337 338 when (io.req.fire && !io.req.bits.stage1Hit && (if (HasBitmapCheck) !jmp_bitmap_check_w else true.B)) { 339 val req = io.req.bits 340 val gvpn_wire = Wire(UInt(ptePPNLen.W)) 341 if (EnableSv48) { 342 when (mode === Sv48) { 343 level := Mux(req.l2Hit, 1.U, Mux(req.l3Hit.get, 2.U, 3.U)) 344 af_level := Mux(req.l2Hit, 1.U, Mux(req.l3Hit.get, 2.U, 3.U)) 345 gpf_level := Mux(req.l2Hit, 2.U, Mux(req.l3Hit.get, 3.U, 0.U)) 346 ppn := Mux(req.l2Hit || req.l3Hit.get, io.req.bits.ppn, satp.ppn) 347 l3Hit := req.l3Hit.get 348 gvpn_wire := Mux(req.l2Hit || req.l3Hit.get, io.req.bits.ppn, satp.ppn) 349 } .otherwise { 350 level := Mux(req.l2Hit, 1.U, 2.U) 351 af_level := Mux(req.l2Hit, 1.U, 2.U) 352 gpf_level := Mux(req.l2Hit, 2.U, 0.U) 353 ppn := Mux(req.l2Hit, io.req.bits.ppn, satp.ppn) 354 l3Hit := false.B 355 gvpn_wire := Mux(req.l2Hit, io.req.bits.ppn, satp.ppn) 356 } 357 } else { 358 level := Mux(req.l2Hit, 1.U, 2.U) 359 af_level := Mux(req.l2Hit, 1.U, 2.U) 360 gpf_level := Mux(req.l2Hit, 2.U, 0.U) 361 ppn := Mux(req.l2Hit, io.req.bits.ppn, satp.ppn) 362 l3Hit := false.B 363 gvpn_wire := Mux(req.l2Hit, io.req.bits.ppn, satp.ppn) 364 } 365 vpn := io.req.bits.req_info.vpn 366 l2Hit := req.l2Hit 367 accessFault := false.B 368 idle := false.B 369 hptw_pageFault := false.B 370 hptw_accessFault := false.B 371 pte_valid := false.B 372 req_s2xlate := io.req.bits.req_info.s2xlate 373 when(io.req.bits.req_info.s2xlate === onlyStage2){ 374 full_gvpn_reg := io.req.bits.req_info.vpn 375 val onlys2_gpaddr = Cat(io.req.bits.req_info.vpn, 0.U(offLen.W)) // is 50 bits, don't need to check high bits when sv48x4 is enabled 376 val check_gpa_high_fail = Mux(io.req.bits.req_info.s2xlate === onlyStage2 && io.csr.hgatp.mode === Sv39x4, onlys2_gpaddr(onlys2_gpaddr.getWidth - 1, GPAddrBitsSv39x4) =/= 0.U, false.B) 377 need_last_s2xlate := false.B 378 when(check_gpa_high_fail){ 379 mem_addr_update := true.B 380 first_gvpn_check_fail := true.B 381 }.otherwise{ 382 s_last_hptw_req := false.B 383 } 384 }.elsewhen(io.req.bits.req_info.s2xlate === allStage){ 385 full_gvpn_reg := 0.U 386 val allstage_gpaddr = Cat(gvpn_wire, 0.U(offLen.W)) 387 val check_gpa_high_fail = Mux(io.csr.hgatp.mode === Sv39x4, allstage_gpaddr(allstage_gpaddr.getWidth - 1, GPAddrBitsSv39x4) =/= 0.U, Mux(io.csr.hgatp.mode === Sv48x4, allstage_gpaddr(allstage_gpaddr.getWidth - 1, GPAddrBitsSv48x4) =/= 0.U, false.B)) 388 when(check_gpa_high_fail){ 389 mem_addr_update := true.B 390 first_gvpn_check_fail := true.B 391 }.otherwise{ 392 need_last_s2xlate := true.B 393 s_hptw_req := false.B 394 } 395 }.otherwise { 396 full_gvpn_reg := 0.U 397 need_last_s2xlate := false.B 398 s_pmp_check := false.B 399 } 400 } 401 402 when(io.hptw.req.fire && s_hptw_req === false.B){ 403 s_hptw_req := true.B 404 w_hptw_resp := false.B 405 } 406 407 when(io.hptw.resp.fire && w_hptw_resp === false.B) { 408 w_hptw_resp := true.B 409 val g_perm_fail = !io.hptw.resp.bits.h_resp.gaf && (!io.hptw.resp.bits.h_resp.entry.perm.get.r && !(io.csr.priv.mxr && io.hptw.resp.bits.h_resp.entry.perm.get.x)) 410 hptw_pageFault := io.hptw.resp.bits.h_resp.gpf || g_perm_fail 411 hptw_accessFault := io.hptw.resp.bits.h_resp.gaf 412 hptw_resp := io.hptw.resp.bits.h_resp 413 hptw_resp.gpf := io.hptw.resp.bits.h_resp.gpf || g_perm_fail 414 when(!(g_perm_fail || io.hptw.resp.bits.h_resp.gpf || io.hptw.resp.bits.h_resp.gaf)) { 415 s_pmp_check := false.B 416 }.otherwise { 417 mem_addr_update := true.B 418 need_last_s2xlate := false.B 419 } 420 } 421 422 when(io.hptw.req.fire && s_last_hptw_req === false.B) { 423 w_last_hptw_resp := false.B 424 s_last_hptw_req := true.B 425 } 426 427 when (io.hptw.resp.fire && w_last_hptw_resp === false.B && stage1Hit){ 428 w_last_hptw_resp := true.B 429 hptw_resp_stage2 := true.B 430 hptw_resp := io.hptw.resp.bits.h_resp 431 } 432 433 when(io.hptw.resp.fire && w_last_hptw_resp === false.B && !stage1Hit){ 434 hptw_pageFault := io.hptw.resp.bits.h_resp.gpf 435 hptw_accessFault := io.hptw.resp.bits.h_resp.gaf 436 hptw_resp := io.hptw.resp.bits.h_resp 437 w_last_hptw_resp := true.B 438 mem_addr_update := true.B 439 } 440 441 when(sent_to_pmp && mem_addr_update === false.B){ 442 s_mem_req := false.B 443 s_pmp_check := true.B 444 } 445 446 when(accessFault && idle === false.B){ 447 s_pmp_check := true.B 448 s_mem_req := true.B 449 w_mem_resp := true.B 450 s_llptw_req := true.B 451 s_hptw_req := true.B 452 w_hptw_resp := true.B 453 s_last_hptw_req := true.B 454 w_last_hptw_resp := true.B 455 mem_addr_update := true.B 456 need_last_s2xlate := false.B 457 if (HasBitmapCheck) { 458 s_bitmap_check := true.B 459 w_bitmap_resp := true.B 460 whether_need_bitmap_check := false.B 461 bitmap_checkfailed := false.B 462 } 463 } 464 465 when(guestFault && idle === false.B){ 466 s_pmp_check := true.B 467 s_mem_req := true.B 468 w_mem_resp := true.B 469 s_llptw_req := true.B 470 s_hptw_req := true.B 471 w_hptw_resp := true.B 472 s_last_hptw_req := true.B 473 w_last_hptw_resp := true.B 474 mem_addr_update := true.B 475 need_last_s2xlate := false.B 476 if (HasBitmapCheck) { 477 s_bitmap_check := true.B 478 w_bitmap_resp := true.B 479 whether_need_bitmap_check := false.B 480 bitmap_checkfailed := false.B 481 } 482 } 483 484 when (mem.req.fire){ 485 s_mem_req := true.B 486 w_mem_resp := false.B 487 } 488 489 when(mem.resp.fire && w_mem_resp === false.B){ 490 w_mem_resp := true.B 491 af_level := af_level - 1.U 492 gpf_level := Mux(mode === Sv39 && !pte_valid && !l2Hit, gpf_level - 2.U, gpf_level - 1.U) 493 pte_valid := true.B 494 update_full_gvpn_mem_resp := true.B 495 if (HasBitmapCheck) { 496 when (bitmap_enable) { 497 whether_need_bitmap_check := true.B 498 } .otherwise { 499 s_llptw_req := false.B 500 mem_addr_update := true.B 501 whether_need_bitmap_check := false.B 502 } 503 } else { 504 s_llptw_req := false.B 505 mem_addr_update := true.B 506 } 507 } 508 509 when(update_full_gvpn_mem_resp) { 510 update_full_gvpn_mem_resp := false.B 511 full_gvpn_reg := pte.getPPN() 512 } 513 514 if (HasBitmapCheck) { 515 when (whether_need_bitmap_check) { 516 when (bitmap_enable && (!enableS2xlate || onlyS1xlate) && pte.isLeaf()) { 517 s_bitmap_check := false.B 518 whether_need_bitmap_check := false.B 519 } .otherwise { 520 mem_addr_update := true.B 521 s_llptw_req := false.B 522 whether_need_bitmap_check := false.B 523 } 524 } 525 // bitmapcheck 526 when (io.bitmap.get.req.fire) { 527 s_bitmap_check := true.B 528 w_bitmap_resp := false.B 529 } 530 when (io.bitmap.get.resp.fire) { 531 w_bitmap_resp := true.B 532 mem_addr_update := true.B 533 bitmap_checkfailed := io.bitmap.get.resp.bits.cf 534 } 535 } 536 537 when(mem_addr_update){ 538 when(level >= 2.U && !onlyS2xlate && !(guestFault || find_pte || accessFault)) { 539 level := levelNext 540 when(s2xlate){ 541 s_hptw_req := false.B 542 vs_finish := true.B 543 }.otherwise{ 544 s_mem_req := false.B 545 } 546 s_llptw_req := true.B 547 mem_addr_update := false.B 548 }.elsewhen(io.llptw.valid){ 549 when(io.llptw.fire) { 550 idle := true.B 551 s_llptw_req := true.B 552 mem_addr_update := false.B 553 need_last_s2xlate := false.B 554 } 555 finish := true.B 556 }.elsewhen(s2xlate && need_last_s2xlate === true.B) { 557 need_last_s2xlate := false.B 558 when(!(guestFault || accessFault || pageFault || ppn_af)){ 559 s_last_hptw_req := false.B 560 mem_addr_update := false.B 561 } 562 }.elsewhen(io.resp.valid){ 563 when(io.resp.fire) { 564 idle := true.B 565 s_llptw_req := true.B 566 mem_addr_update := false.B 567 accessFault := false.B 568 first_gvpn_check_fail := false.B 569 } 570 finish := true.B 571 } 572 } 573 574 575 when (flush) { 576 idle := true.B 577 s_pmp_check := true.B 578 s_mem_req := true.B 579 s_llptw_req := true.B 580 w_mem_resp := true.B 581 accessFault := false.B 582 mem_addr_update := false.B 583 first_gvpn_check_fail := false.B 584 s_hptw_req := true.B 585 w_hptw_resp := true.B 586 s_last_hptw_req := true.B 587 w_last_hptw_resp := true.B 588 if (HasBitmapCheck) { 589 s_bitmap_check := true.B 590 w_bitmap_resp := true.B 591 whether_need_bitmap_check := false.B 592 bitmap_checkfailed := false.B 593 } 594 } 595 596 597 XSDebug(p"[ptw] level:${level} notFound:${pageFault}\n") 598 599 // perf 600 XSPerfAccumulate("fsm_count", io.req.fire) 601 for (i <- 0 until PtwWidth) { 602 XSPerfAccumulate(s"fsm_count_source${i}", io.req.fire && io.req.bits.req_info.source === i.U) 603 } 604 XSPerfAccumulate("fsm_busy", !idle) 605 XSPerfAccumulate("fsm_idle", idle) 606 XSPerfAccumulate("resp_blocked", io.resp.valid && !io.resp.ready) 607 XSPerfAccumulate("ptw_ppn_af", io.resp.fire && ppn_af) 608 XSPerfAccumulate("mem_count", mem.req.fire) 609 XSPerfAccumulate("mem_cycle", BoolStopWatch(mem.req.fire, mem.resp.fire, true)) 610 XSPerfAccumulate("mem_blocked", mem.req.valid && !mem.req.ready) 611 612 val perfEvents = Seq( 613 ("fsm_count ", io.req.fire ), 614 ("fsm_busy ", !idle ), 615 ("fsm_idle ", idle ), 616 ("resp_blocked ", io.resp.valid && !io.resp.ready ), 617 ("mem_count ", mem.req.fire ), 618 ("mem_cycle ", BoolStopWatch(mem.req.fire, mem.resp.fire, true)), 619 ("mem_blocked ", mem.req.valid && !mem.req.ready ), 620 ) 621 generatePerfEvent() 622} 623 624/*========================= LLPTW ==============================*/ 625 626/** LLPTW : Last Level Page Table Walker 627 * the page walker that only takes 4KB(last level) page walk. 628 **/ 629 630class LLPTWInBundle(implicit p: Parameters) extends XSBundle with HasPtwConst { 631 val req_info = Output(new L2TlbInnerBundle()) 632 val ppn = Output(UInt(ptePPNLen.W)) 633 val bitmapCheck = Option.when(HasBitmapCheck)(new Bundle { 634 val jmp_bitmap_check = Bool() // find pte in l0 or sp, but need bitmap check 635 val ptes = Vec(tlbcontiguous, UInt(XLEN.W)) // Page Table Entry Vector 636 val cfs = Vec(tlbcontiguous, Bool()) // Bitmap Check Failed Vector 637 val hitway = UInt(l2tlbParams.l0nWays.W) 638 }) 639} 640 641class LLPTWIO(implicit p: Parameters) extends MMUIOBaseBundle with HasPtwConst { 642 val in = Flipped(DecoupledIO(new LLPTWInBundle())) 643 val out = DecoupledIO(new Bundle { 644 val req_info = Output(new L2TlbInnerBundle()) 645 val id = Output(UInt(bMemID.W)) 646 val h_resp = Output(new HptwResp) 647 val first_s2xlate_fault = Output(Bool()) // Whether the first stage 2 translation occurs pf/af 648 val af = Output(Bool()) 649 val bitmapCheck = Option.when(HasBitmapCheck)(new Bundle { 650 val jmp_bitmap_check = Bool() // find pte in l0 or sp, but need bitmap check 651 val ptes = Vec(tlbcontiguous, UInt(XLEN.W)) // Page Table Entry Vector 652 val cfs = Vec(tlbcontiguous, Bool()) // Bitmap Check Failed Vector 653 }) 654 }) 655 val mem = new Bundle { 656 val req = DecoupledIO(new L2TlbMemReqBundle()) 657 val resp = Flipped(Valid(new Bundle { 658 val id = Output(UInt(log2Up(l2tlbParams.llptwsize).W)) 659 val value = Output(UInt(blockBits.W)) 660 })) 661 val enq_ptr = Output(UInt(log2Ceil(l2tlbParams.llptwsize).W)) 662 val buffer_it = Output(Vec(l2tlbParams.llptwsize, Bool())) 663 val refill = Output(new L2TlbInnerBundle()) 664 val req_mask = Input(Vec(l2tlbParams.llptwsize, Bool())) 665 val flush_latch = Input(Vec(l2tlbParams.llptwsize, Bool())) 666 } 667 val cache = DecoupledIO(new L2TlbInnerBundle()) 668 val pmp = new Bundle { 669 val req = Valid(new PMPReqBundle()) 670 val resp = Flipped(new PMPRespBundle()) 671 } 672 val hptw = new Bundle { 673 val req = DecoupledIO(new Bundle{ 674 val source = UInt(bSourceWidth.W) 675 val id = UInt(log2Up(l2tlbParams.llptwsize).W) 676 val gvpn = UInt(ptePPNLen.W) 677 }) 678 val resp = Flipped(Valid(new Bundle { 679 val id = Output(UInt(log2Up(l2tlbParams.llptwsize).W)) 680 val h_resp = Output(new HptwResp) 681 })) 682 } 683 val bitmap = Option.when(HasBitmapCheck)(new Bundle { 684 val req = DecoupledIO(new bitmapReqBundle()) 685 val resp = Flipped(DecoupledIO(new bitmapRespBundle())) 686 }) 687 688 val l0_way_info = Option.when(HasBitmapCheck)(Input(UInt(l2tlbParams.l0nWays.W))) 689} 690 691class LLPTWEntry(implicit p: Parameters) extends XSBundle with HasPtwConst { 692 val req_info = new L2TlbInnerBundle() 693 val ppn = UInt(ptePPNLen.W) 694 val wait_id = UInt(log2Up(l2tlbParams.llptwsize).W) 695 val af = Bool() 696 val hptw_resp = new HptwResp() 697 val first_s2xlate_fault = Output(Bool()) 698 val cf = Bool() 699 val from_l0 = Bool() 700 val way_info = UInt(l2tlbParams.l0nWays.W) 701 val jmp_bitmap_check = Bool() 702 val ptes = Vec(tlbcontiguous, UInt(XLEN.W)) 703 val cfs = Vec(tlbcontiguous, Bool()) 704} 705 706 707class LLPTW(implicit p: Parameters) extends XSModule with HasPtwConst with HasPerfEvents { 708 val io = IO(new LLPTWIO()) 709 710 // mbmc:bitmap csr 711 val mbmc = io.csr.mbmc 712 val bitmap_enable = (if (HasBitmapCheck) true.B else false.B) && mbmc.BME === 1.U && mbmc.CMODE === 0.U 713 714 val flush = io.sfence.valid || io.csr.satp.changed || io.csr.vsatp.changed || io.csr.hgatp.changed 715 val entries = RegInit(VecInit(Seq.fill(l2tlbParams.llptwsize)(0.U.asTypeOf(new LLPTWEntry())))) 716 val state_idle :: state_hptw_req :: state_hptw_resp :: state_addr_check :: state_mem_req :: state_mem_waiting :: state_mem_out :: state_last_hptw_req :: state_last_hptw_resp :: state_cache :: state_bitmap_check :: state_bitmap_resp :: Nil = Enum(12) 717 val state = RegInit(VecInit(Seq.fill(l2tlbParams.llptwsize)(state_idle))) 718 719 val is_emptys = state.map(_ === state_idle) 720 val is_mems = state.map(_ === state_mem_req) 721 val is_waiting = state.map(_ === state_mem_waiting) 722 val is_having = state.map(_ === state_mem_out) 723 val is_cache = state.map(_ === state_cache) 724 val is_hptw_req = state.map(_ === state_hptw_req) 725 val is_last_hptw_req = state.map(_ === state_last_hptw_req) 726 val is_hptw_resp = state.map(_ === state_hptw_resp) 727 val is_last_hptw_resp = state.map(_ === state_last_hptw_resp) 728 val is_bitmap_req = state.map(_ === state_bitmap_check) 729 val is_bitmap_resp = state.map(_ === state_bitmap_resp) 730 731 val full = !ParallelOR(is_emptys).asBool 732 val enq_ptr = ParallelPriorityEncoder(is_emptys) 733 734 val mem_ptr = ParallelPriorityEncoder(is_having) // TODO: optimize timing, bad: entries -> ptr -> entry 735 val mem_arb = Module(new RRArbiterInit(new LLPTWEntry(), l2tlbParams.llptwsize)) 736 for (i <- 0 until l2tlbParams.llptwsize) { 737 mem_arb.io.in(i).bits := entries(i) 738 mem_arb.io.in(i).valid := is_mems(i) && !io.mem.req_mask(i) 739 } 740 741 // process hptw requests in serial 742 val hyper_arb1 = Module(new RRArbiterInit(new LLPTWEntry(), l2tlbParams.llptwsize)) 743 for (i <- 0 until l2tlbParams.llptwsize) { 744 hyper_arb1.io.in(i).bits := entries(i) 745 hyper_arb1.io.in(i).valid := is_hptw_req(i) && !(Cat(is_hptw_resp).orR) && !(Cat(is_last_hptw_resp).orR) 746 } 747 val hyper_arb2 = Module(new RRArbiterInit(new LLPTWEntry(), l2tlbParams.llptwsize)) 748 for(i <- 0 until l2tlbParams.llptwsize) { 749 hyper_arb2.io.in(i).bits := entries(i) 750 hyper_arb2.io.in(i).valid := is_last_hptw_req(i) && !(Cat(is_hptw_resp).orR) && !(Cat(is_last_hptw_resp).orR) 751 } 752 753 754 val bitmap_arb = Option.when(HasBitmapCheck)(Module(new RRArbiter(new bitmapReqBundle(), l2tlbParams.llptwsize))) 755 val way_info = Option.when(HasBitmapCheck)(Wire(Vec(l2tlbParams.llptwsize, UInt(l2tlbParams.l0nWays.W)))) 756 if (HasBitmapCheck) { 757 for (i <- 0 until l2tlbParams.llptwsize) { 758 bitmap_arb.get.io.in(i).valid := is_bitmap_req(i) 759 bitmap_arb.get.io.in(i).bits.bmppn := entries(i).ppn 760 bitmap_arb.get.io.in(i).bits.vpn := entries(i).req_info.vpn 761 bitmap_arb.get.io.in(i).bits.id := i.U 762 bitmap_arb.get.io.in(i).bits.level := 0.U // last level 763 bitmap_arb.get.io.in(i).bits.way_info := Mux(entries(i).from_l0, entries(i).way_info, way_info.get(i)) 764 bitmap_arb.get.io.in(i).bits.hptw_bypassed := false.B 765 } 766 } 767 768 val cache_ptr = ParallelMux(is_cache, (0 until l2tlbParams.llptwsize).map(_.U(log2Up(l2tlbParams.llptwsize).W))) 769 770 // duplicate req 771 // to_wait: wait for the last to access mem, set to mem_resp 772 // to_cache: the last is back just right now, set to mem_cache 773 val dup_vec = state.indices.map(i => 774 dup(io.in.bits.req_info.vpn, entries(i).req_info.vpn) && io.in.bits.req_info.s2xlate === entries(i).req_info.s2xlate 775 ) 776 val dup_req_fire = mem_arb.io.out.fire && dup(io.in.bits.req_info.vpn, mem_arb.io.out.bits.req_info.vpn) && io.in.bits.req_info.s2xlate === mem_arb.io.out.bits.req_info.s2xlate // dup with the req fire entry 777 val dup_vec_wait = dup_vec.zip(is_waiting).map{case (d, w) => d && w} // dup with "mem_waiting" entries, sending mem req already 778 val dup_vec_having = dup_vec.zipWithIndex.map{case (d, i) => d && is_having(i)} // dup with the "mem_out" entry recv the data just now 779 val dup_vec_bitmap = dup_vec.zipWithIndex.map{case (d, i) => d && (is_bitmap_req(i) || is_bitmap_resp(i))} 780 val dup_vec_last_hptw = dup_vec.zipWithIndex.map{case (d, i) => d && (is_last_hptw_req(i) || is_last_hptw_resp(i))} 781 val wait_id = Mux(dup_req_fire, mem_arb.io.chosen, ParallelMux(dup_vec_wait zip entries.map(_.wait_id))) 782 val dup_wait_resp = io.mem.resp.fire && VecInit(dup_vec_wait)(io.mem.resp.bits.id) && !io.mem.flush_latch(io.mem.resp.bits.id) // dup with the entry that data coming next cycle 783 val to_wait = Cat(dup_vec_wait).orR || dup_req_fire 784 val to_mem_out = dup_wait_resp && ((entries(io.mem.resp.bits.id).req_info.s2xlate === noS2xlate) || (entries(io.mem.resp.bits.id).req_info.s2xlate === onlyStage1)) && !bitmap_enable 785 val to_bitmap_req = (if (HasBitmapCheck) true.B else false.B) && dup_wait_resp && ((entries(io.mem.resp.bits.id).req_info.s2xlate === noS2xlate) || (entries(io.mem.resp.bits.id).req_info.s2xlate === onlyStage1)) && bitmap_enable 786 val to_cache = if (HasBitmapCheck) Cat(dup_vec_bitmap).orR || Cat(dup_vec_having).orR || Cat(dup_vec_last_hptw).orR 787 else Cat(dup_vec_having).orR || Cat(dup_vec_last_hptw).orR 788 val to_hptw_req = io.in.bits.req_info.s2xlate === allStage 789 val to_last_hptw_req = dup_wait_resp && entries(io.mem.resp.bits.id).req_info.s2xlate === allStage 790 val last_hptw_req_id = io.mem.resp.bits.id 791 val req_paddr = MakeAddr(io.in.bits.ppn(ppnLen-1, 0), getVpnn(io.in.bits.req_info.vpn, 0)) 792 val req_hpaddr = MakeAddr(entries(last_hptw_req_id).hptw_resp.genPPNS2(get_pn(req_paddr)), getVpnn(io.in.bits.req_info.vpn, 0)) 793 val index = Mux(entries(last_hptw_req_id).req_info.s2xlate === allStage, req_hpaddr, req_paddr)(log2Up(l2tlbParams.blockBytes)-1, log2Up(XLEN/8)) 794 val last_hptw_req_ppn = io.mem.resp.bits.value.asTypeOf(Vec(blockBits / XLEN, new PteBundle()))(index).getPPN() 795 XSError(RegNext(dup_req_fire && Cat(dup_vec_wait).orR, init = false.B), "mem req but some entries already waiting, should not happed") 796 797 XSError(io.in.fire && ((to_mem_out && to_cache) || (to_wait && to_cache)), "llptw enq, to cache conflict with to mem") 798 val mem_resp_hit = RegInit(VecInit(Seq.fill(l2tlbParams.llptwsize)(false.B))) 799 val enq_state_normal = MuxCase(state_addr_check, Seq( 800 to_mem_out -> state_mem_out, // same to the blew, but the mem resp now 801 to_bitmap_req -> state_bitmap_check, 802 to_last_hptw_req -> state_last_hptw_req, 803 to_wait -> state_mem_waiting, 804 to_cache -> state_cache, 805 to_hptw_req -> state_hptw_req 806 )) 807 val enq_state = Mux(from_pre(io.in.bits.req_info.source) && enq_state_normal =/= state_addr_check, state_idle, enq_state_normal) 808 when (io.in.fire && (if (HasBitmapCheck) !io.in.bits.bitmapCheck.get.jmp_bitmap_check else true.B)) { 809 // if prefetch req does not need mem access, just give it up. 810 // so there will be at most 1 + FilterSize entries that needs re-access page cache 811 // so 2 + FilterSize is enough to avoid dead-lock 812 state(enq_ptr) := enq_state 813 entries(enq_ptr).req_info := io.in.bits.req_info 814 entries(enq_ptr).ppn := Mux(to_last_hptw_req, last_hptw_req_ppn, io.in.bits.ppn) 815 entries(enq_ptr).wait_id := Mux(to_wait, wait_id, enq_ptr) 816 entries(enq_ptr).af := false.B 817 if (HasBitmapCheck) { 818 entries(enq_ptr).cf := false.B 819 entries(enq_ptr).from_l0 := false.B 820 entries(enq_ptr).way_info := 0.U 821 entries(enq_ptr).jmp_bitmap_check := false.B 822 for (i <- 0 until tlbcontiguous) { 823 entries(enq_ptr).ptes(i) := 0.U 824 } 825 entries(enq_ptr).cfs := io.in.bits.bitmapCheck.get.cfs 826 } 827 entries(enq_ptr).hptw_resp := Mux(to_last_hptw_req, entries(last_hptw_req_id).hptw_resp, Mux(to_wait, entries(wait_id).hptw_resp, entries(enq_ptr).hptw_resp)) 828 entries(enq_ptr).first_s2xlate_fault := false.B 829 mem_resp_hit(enq_ptr) := to_bitmap_req || to_mem_out || to_last_hptw_req 830 } 831 832 if (HasBitmapCheck) { 833 when (io.in.bits.bitmapCheck.get.jmp_bitmap_check && io.in.fire) { 834 state(enq_ptr) := state_bitmap_check 835 entries(enq_ptr).req_info := io.in.bits.req_info 836 entries(enq_ptr).ppn := io.in.bits.bitmapCheck.get.ptes(io.in.bits.req_info.vpn(sectortlbwidth - 1, 0)).asTypeOf(new PteBundle().cloneType).ppn 837 entries(enq_ptr).wait_id := enq_ptr 838 entries(enq_ptr).af := false.B 839 entries(enq_ptr).cf := false.B 840 entries(enq_ptr).from_l0 := true.B 841 entries(enq_ptr).way_info := io.in.bits.bitmapCheck.get.hitway 842 entries(enq_ptr).jmp_bitmap_check := io.in.bits.bitmapCheck.get.jmp_bitmap_check 843 entries(enq_ptr).ptes := io.in.bits.bitmapCheck.get.ptes 844 entries(enq_ptr).cfs := io.in.bits.bitmapCheck.get.cfs 845 mem_resp_hit(enq_ptr) := false.B 846 } 847 } 848 849 val enq_ptr_reg = RegNext(enq_ptr) 850 val need_addr_check = GatedValidRegNext(enq_state === state_addr_check && io.in.fire && !flush && (if (HasBitmapCheck) !io.in.bits.bitmapCheck.get.jmp_bitmap_check else true.B)) 851 852 val hasHptwResp = ParallelOR(state.map(_ === state_hptw_resp)).asBool 853 val hptw_resp_ptr_reg = RegNext(io.hptw.resp.bits.id) 854 val hptw_need_addr_check = RegNext(hasHptwResp && io.hptw.resp.fire && !flush) && state(hptw_resp_ptr_reg) === state_addr_check 855 856 val ptes = io.mem.resp.bits.value.asTypeOf(Vec(blockBits / XLEN, new PteBundle())) 857 val gpaddr = MakeGPAddr(entries(hptw_resp_ptr_reg).ppn, getVpnn(entries(hptw_resp_ptr_reg).req_info.vpn, 0)) 858 val hptw_resp = entries(hptw_resp_ptr_reg).hptw_resp 859 val hpaddr = Cat(hptw_resp.genPPNS2(get_pn(gpaddr)), get_off(gpaddr)) 860 val addr = RegEnable(MakeAddr(io.in.bits.ppn(ppnLen - 1, 0), getVpnn(io.in.bits.req_info.vpn, 0)), io.in.fire) 861 io.pmp.req.valid := need_addr_check || hptw_need_addr_check 862 io.pmp.req.bits.addr := Mux(hptw_need_addr_check, hpaddr, addr) 863 io.pmp.req.bits.cmd := TlbCmd.read 864 io.pmp.req.bits.size := 3.U // TODO: fix it 865 val pmp_resp_valid = io.pmp.req.valid // same cycle 866 when (pmp_resp_valid) { 867 // NOTE: when pmp resp but state is not addr check, then the entry is dup with other entry, the state was changed before 868 // when dup with the req-ing entry, set to mem_waiting (above codes), and the ld must be false, so dontcare 869 val ptr = Mux(hptw_need_addr_check, hptw_resp_ptr_reg, enq_ptr_reg); 870 val accessFault = io.pmp.resp.ld || io.pmp.resp.mmio 871 entries(ptr).af := accessFault 872 state(ptr) := Mux(accessFault, state_mem_out, state_mem_req) 873 } 874 875 when (mem_arb.io.out.fire) { 876 for (i <- state.indices) { 877 when (state(i) =/= state_idle && state(i) =/= state_mem_out && state(i) =/= state_last_hptw_req && state(i) =/= state_last_hptw_resp 878 && (if (HasBitmapCheck) state(i) =/= state_bitmap_check && state(i) =/= state_bitmap_resp else true.B) 879 && entries(i).req_info.s2xlate === mem_arb.io.out.bits.req_info.s2xlate 880 && dup(entries(i).req_info.vpn, mem_arb.io.out.bits.req_info.vpn)) { 881 // NOTE: "dup enq set state to mem_wait" -> "sending req set other dup entries to mem_wait" 882 state(i) := state_mem_waiting 883 entries(i).hptw_resp := entries(mem_arb.io.chosen).hptw_resp 884 entries(i).wait_id := mem_arb.io.chosen 885 } 886 } 887 } 888 when (io.mem.resp.fire) { 889 state.indices.map{i => 890 when (state(i) === state_mem_waiting && io.mem.resp.bits.id === entries(i).wait_id) { 891 val req_paddr = MakeAddr(entries(i).ppn, getVpnn(entries(i).req_info.vpn, 0)) 892 val req_hpaddr = MakeAddr(entries(i).hptw_resp.genPPNS2(get_pn(req_paddr)), getVpnn(entries(i).req_info.vpn, 0)) 893 val index = Mux(entries(i).req_info.s2xlate === allStage, req_hpaddr, req_paddr)(log2Up(l2tlbParams.blockBytes)-1, log2Up(XLEN/8)) 894 val enableS2xlate = entries(i).req_info.s2xlate =/= noS2xlate 895 val s1Pbmte = Mux(enableS2xlate, io.csr.hPBMTE, io.csr.mPBMTE) 896 val vsStagePf = ptes(index).isPf(0.U, s1Pbmte) || !ptes(index).isLeaf() // Pagefault in vs-Stage 897 // Pagefault in g-Stage; when vsStagePf valid, should not check gStagepf 898 val gStagePf = ptes(index).isStage1Gpf(io.csr.hgatp.mode) && !vsStagePf 899 state(i) := Mux(entries(i).req_info.s2xlate === allStage && !(vsStagePf || gStagePf), 900 state_last_hptw_req, 901 Mux(bitmap_enable, state_bitmap_check, state_mem_out)) 902 mem_resp_hit(i) := true.B 903 entries(i).ppn := Mux(ptes(index).n === 0.U, ptes(index).getPPN(), Cat(ptes(index).getPPN()(ptePPNLen - 1, pteNapotBits), entries(i).req_info.vpn(pteNapotBits - 1, 0))) // for last stage 2 translation 904 // af will be judged in L2 TLB `contiguous_pte_to_merge_ptwResp` 905 entries(i).hptw_resp.gpf := Mux(entries(i).req_info.s2xlate === allStage, gStagePf, false.B) 906 } 907 } 908 } 909 910 if (HasBitmapCheck) { 911 for (i <- 0 until l2tlbParams.llptwsize) { 912 way_info.get(i) := DataHoldBypass(io.l0_way_info.get, mem_resp_hit(i)) 913 } 914 } 915 916 when (hyper_arb1.io.out.fire) { 917 for (i <- state.indices) { 918 when (state(i) === state_hptw_req && entries(i).ppn === hyper_arb1.io.out.bits.ppn && entries(i).req_info.s2xlate === allStage && hyper_arb1.io.chosen === i.U) { 919 state(i) := state_hptw_resp 920 entries(i).wait_id := hyper_arb1.io.chosen 921 } 922 } 923 } 924 925 when (hyper_arb2.io.out.fire) { 926 for (i <- state.indices) { 927 when (state(i) === state_last_hptw_req && entries(i).ppn === hyper_arb2.io.out.bits.ppn && entries(i).req_info.s2xlate === allStage && hyper_arb2.io.chosen === i.U) { 928 state(i) := state_last_hptw_resp 929 entries(i).wait_id := hyper_arb2.io.chosen 930 } 931 } 932 } 933 934 if (HasBitmapCheck) { 935 when (bitmap_arb.get.io.out.fire) { 936 for (i <- state.indices) { 937 when (is_bitmap_req(i) && bitmap_arb.get.io.out.bits.bmppn === entries(i).ppn(ppnLen - 1, 0)) { 938 state(i) := state_bitmap_resp 939 entries(i).wait_id := bitmap_arb.get.io.chosen 940 } 941 } 942 } 943 944 when (io.bitmap.get.resp.fire) { 945 for (i <- state.indices) { 946 when (is_bitmap_resp(i) && io.bitmap.get.resp.bits.id === entries(i).wait_id) { 947 entries(i).cfs := io.bitmap.get.resp.bits.cfs 948 entries(i).cf := io.bitmap.get.resp.bits.cf 949 state(i) := state_mem_out 950 } 951 } 952 } 953 } 954 955 when (io.hptw.resp.fire) { 956 for (i <- state.indices) { 957 when (state(i) === state_hptw_resp && io.hptw.resp.bits.id === entries(i).wait_id && io.hptw.resp.bits.h_resp.entry.tag === entries(i).ppn) { 958 val check_g_perm_fail = !io.hptw.resp.bits.h_resp.gaf && (!io.hptw.resp.bits.h_resp.entry.perm.get.r && !(io.csr.priv.mxr && io.hptw.resp.bits.h_resp.entry.perm.get.x)) 959 when (check_g_perm_fail || io.hptw.resp.bits.h_resp.gaf || io.hptw.resp.bits.h_resp.gpf) { 960 state(i) := state_mem_out 961 entries(i).hptw_resp := io.hptw.resp.bits.h_resp 962 entries(i).hptw_resp.gpf := io.hptw.resp.bits.h_resp.gpf || check_g_perm_fail 963 entries(i).first_s2xlate_fault := io.hptw.resp.bits.h_resp.gaf || io.hptw.resp.bits.h_resp.gpf 964 }.otherwise{ // change the entry that is waiting hptw resp 965 val need_to_waiting_vec = state.indices.map(i => state(i) === state_mem_waiting && dup(entries(i).req_info.vpn, entries(io.hptw.resp.bits.id).req_info.vpn)) 966 val waiting_index = ParallelMux(need_to_waiting_vec zip entries.map(_.wait_id)) 967 state(i) := Mux(Cat(need_to_waiting_vec).orR, state_mem_waiting, state_addr_check) 968 entries(i).hptw_resp := io.hptw.resp.bits.h_resp 969 entries(i).wait_id := Mux(Cat(need_to_waiting_vec).orR, waiting_index, entries(i).wait_id) 970 //To do: change the entry that is having the same hptw req 971 } 972 } 973 when (state(i) === state_last_hptw_resp && io.hptw.resp.bits.id === entries(i).wait_id && io.hptw.resp.bits.h_resp.entry.tag === entries(i).ppn) { 974 state(i) := state_mem_out 975 entries(i).hptw_resp := io.hptw.resp.bits.h_resp 976 //To do: change the entry that is having the same hptw req 977 } 978 } 979 } 980 when (io.out.fire) { 981 assert(state(mem_ptr) === state_mem_out) 982 state(mem_ptr) := state_idle 983 } 984 mem_resp_hit.map(a => when (a) { a := false.B } ) 985 986 when (io.cache.fire) { 987 state(cache_ptr) := state_idle 988 } 989 XSError(io.out.fire && io.cache.fire && (mem_ptr === cache_ptr), "mem resp and cache fire at the same time at same entry") 990 991 when (flush) { 992 state.map(_ := state_idle) 993 } 994 995 io.in.ready := !full 996 997 io.out.valid := ParallelOR(is_having).asBool 998 io.out.bits.req_info := entries(mem_ptr).req_info 999 io.out.bits.id := mem_ptr 1000 if (HasBitmapCheck) { 1001 io.out.bits.af := Mux(bitmap_enable, entries(mem_ptr).af || entries(mem_ptr).cf, entries(mem_ptr).af) 1002 io.out.bits.bitmapCheck.get.jmp_bitmap_check := entries(mem_ptr).jmp_bitmap_check 1003 io.out.bits.bitmapCheck.get.ptes := entries(mem_ptr).ptes 1004 io.out.bits.bitmapCheck.get.cfs := entries(mem_ptr).cfs 1005 } else { 1006 io.out.bits.af := entries(mem_ptr).af 1007 } 1008 1009 io.out.bits.h_resp := entries(mem_ptr).hptw_resp 1010 io.out.bits.first_s2xlate_fault := entries(mem_ptr).first_s2xlate_fault 1011 1012 val hptw_req_arb = Module(new Arbiter(new Bundle{ 1013 val source = UInt(bSourceWidth.W) 1014 val id = UInt(log2Up(l2tlbParams.llptwsize).W) 1015 val ppn = UInt(ptePPNLen.W) 1016 } , 2)) 1017 // first stage 2 translation 1018 hptw_req_arb.io.in(0).valid := hyper_arb1.io.out.valid 1019 hptw_req_arb.io.in(0).bits.source := hyper_arb1.io.out.bits.req_info.source 1020 hptw_req_arb.io.in(0).bits.ppn := hyper_arb1.io.out.bits.ppn 1021 hptw_req_arb.io.in(0).bits.id := hyper_arb1.io.chosen 1022 hyper_arb1.io.out.ready := hptw_req_arb.io.in(0).ready 1023 // last stage 2 translation 1024 hptw_req_arb.io.in(1).valid := hyper_arb2.io.out.valid 1025 hptw_req_arb.io.in(1).bits.source := hyper_arb2.io.out.bits.req_info.source 1026 hptw_req_arb.io.in(1).bits.ppn := hyper_arb2.io.out.bits.ppn 1027 hptw_req_arb.io.in(1).bits.id := hyper_arb2.io.chosen 1028 hyper_arb2.io.out.ready := hptw_req_arb.io.in(1).ready 1029 hptw_req_arb.io.out.ready := io.hptw.req.ready 1030 io.hptw.req.valid := hptw_req_arb.io.out.fire && !flush 1031 io.hptw.req.bits.gvpn := hptw_req_arb.io.out.bits.ppn 1032 io.hptw.req.bits.id := hptw_req_arb.io.out.bits.id 1033 io.hptw.req.bits.source := hptw_req_arb.io.out.bits.source 1034 1035 io.mem.req.valid := mem_arb.io.out.valid && !flush 1036 val mem_paddr = MakeAddr(mem_arb.io.out.bits.ppn, getVpnn(mem_arb.io.out.bits.req_info.vpn, 0)) 1037 val mem_hpaddr = MakeAddr(mem_arb.io.out.bits.hptw_resp.genPPNS2(get_pn(mem_paddr)), getVpnn(mem_arb.io.out.bits.req_info.vpn, 0)) 1038 io.mem.req.bits.addr := Mux(mem_arb.io.out.bits.req_info.s2xlate === allStage, mem_hpaddr, mem_paddr) 1039 io.mem.req.bits.id := mem_arb.io.chosen 1040 io.mem.req.bits.hptw_bypassed := false.B 1041 mem_arb.io.out.ready := io.mem.req.ready 1042 val mem_refill_id = RegNext(io.mem.resp.bits.id(log2Up(l2tlbParams.llptwsize)-1, 0)) 1043 io.mem.refill := entries(mem_refill_id).req_info 1044 io.mem.refill.s2xlate := entries(mem_refill_id).req_info.s2xlate 1045 io.mem.buffer_it := mem_resp_hit 1046 io.mem.enq_ptr := enq_ptr 1047 1048 io.cache.valid := Cat(is_cache).orR 1049 io.cache.bits := ParallelMux(is_cache, entries.map(_.req_info)) 1050 1051 val has_bitmap_resp = ParallelOR(is_bitmap_resp).asBool 1052 if (HasBitmapCheck) { 1053 io.bitmap.get.req.valid := bitmap_arb.get.io.out.valid && !flush 1054 io.bitmap.get.req.bits.bmppn := bitmap_arb.get.io.out.bits.bmppn 1055 io.bitmap.get.req.bits.id := bitmap_arb.get.io.chosen 1056 io.bitmap.get.req.bits.vpn := bitmap_arb.get.io.out.bits.vpn 1057 io.bitmap.get.req.bits.level := 0.U 1058 io.bitmap.get.req.bits.way_info := bitmap_arb.get.io.out.bits.way_info 1059 io.bitmap.get.req.bits.hptw_bypassed := bitmap_arb.get.io.out.bits.hptw_bypassed 1060 bitmap_arb.get.io.out.ready := io.bitmap.get.req.ready 1061 io.bitmap.get.resp.ready := has_bitmap_resp 1062 } 1063 1064 XSPerfAccumulate("llptw_in_count", io.in.fire) 1065 XSPerfAccumulate("llptw_in_block", io.in.valid && !io.in.ready) 1066 for (i <- 0 until 7) { 1067 XSPerfAccumulate(s"enq_state${i}", io.in.fire && enq_state === i.U) 1068 } 1069 for (i <- 0 until (l2tlbParams.llptwsize + 1)) { 1070 XSPerfAccumulate(s"util${i}", PopCount(is_emptys.map(!_)) === i.U) 1071 XSPerfAccumulate(s"mem_util${i}", PopCount(is_mems) === i.U) 1072 XSPerfAccumulate(s"waiting_util${i}", PopCount(is_waiting) === i.U) 1073 } 1074 XSPerfAccumulate("mem_count", io.mem.req.fire) 1075 XSPerfAccumulate("mem_cycle", PopCount(is_waiting) =/= 0.U) 1076 XSPerfAccumulate("blocked_in", io.in.valid && !io.in.ready) 1077 1078 val perfEvents = Seq( 1079 ("tlbllptw_incount ", io.in.fire ), 1080 ("tlbllptw_inblock ", io.in.valid && !io.in.ready), 1081 ("tlbllptw_memcount ", io.mem.req.fire ), 1082 ("tlbllptw_memcycle ", PopCount(is_waiting) ), 1083 ) 1084 generatePerfEvent() 1085} 1086 1087/*========================= HPTW ==============================*/ 1088 1089/** HPTW : Hypervisor Page Table Walker 1090 * the page walker take the virtual machine's page walk. 1091 * guest physical address translation, guest physical address -> host physical address 1092 **/ 1093class HPTWIO()(implicit p: Parameters) extends MMUIOBaseBundle with HasPtwConst { 1094 val req = Flipped(DecoupledIO(new Bundle { 1095 val source = UInt(bSourceWidth.W) 1096 val id = UInt(log2Up(l2tlbParams.llptwsize).W) 1097 val gvpn = UInt(gvpnLen.W) 1098 val ppn = UInt(ppnLen.W) 1099 val l3Hit = if (EnableSv48) Some(new Bool()) else None 1100 val l2Hit = Bool() 1101 val l1Hit = Bool() 1102 val bypassed = Bool() // if bypass, don't refill 1103 val bitmapCheck = Option.when(HasBitmapCheck)(new Bundle { 1104 val jmp_bitmap_check = Bool() // find pte in l0 or sp, but need bitmap check 1105 val pte = UInt(XLEN.W) // Page Table Entry 1106 val ptes = Vec(tlbcontiguous, UInt(XLEN.W)) // Page Table Entry Vector 1107 val cfs = Vec(tlbcontiguous, Bool()) // Bitmap Check Failed Vector 1108 val hitway = UInt(l2tlbParams.l0nWays.W) 1109 val fromSP = Bool() 1110 val SPlevel = UInt(log2Up(Level).W) 1111 }) 1112 })) 1113 val resp = DecoupledIO(new Bundle { 1114 val source = UInt(bSourceWidth.W) 1115 val resp = Output(new HptwResp()) 1116 val id = Output(UInt(bMemID.W)) 1117 }) 1118 1119 val mem = new Bundle { 1120 val req = DecoupledIO(new L2TlbMemReqBundle()) 1121 val resp = Flipped(ValidIO(UInt(XLEN.W))) 1122 val mask = Input(Bool()) 1123 } 1124 val refill = Output(new Bundle { 1125 val req_info = new L2TlbInnerBundle() 1126 val level = UInt(log2Up(Level + 1).W) 1127 }) 1128 val pmp = new Bundle { 1129 val req = ValidIO(new PMPReqBundle()) 1130 val resp = Flipped(new PMPRespBundle()) 1131 } 1132 val bitmap = Option.when(HasBitmapCheck)(new Bundle { 1133 val req = DecoupledIO(new bitmapReqBundle()) 1134 val resp = Flipped(DecoupledIO(new bitmapRespBundle())) 1135 }) 1136 1137 val l0_way_info = Option.when(HasBitmapCheck)(Input(UInt(l2tlbParams.l0nWays.W))) 1138} 1139 1140class HPTW()(implicit p: Parameters) extends XSModule with HasPtwConst { 1141 val io = IO(new HPTWIO) 1142 val hgatp = io.csr.hgatp 1143 val mpbmte = io.csr.mPBMTE 1144 val sfence = io.sfence 1145 val flush = sfence.valid || hgatp.changed || io.csr.satp.changed || io.csr.vsatp.changed 1146 val mode = hgatp.mode 1147 1148 // mbmc:bitmap csr 1149 val mbmc = io.csr.mbmc 1150 val bitmap_enable = (if (HasBitmapCheck) true.B else false.B) && mbmc.BME === 1.U && mbmc.CMODE === 0.U 1151 1152 val level = RegInit(3.U(log2Up(Level + 1).W)) 1153 val af_level = RegInit(3.U(log2Up(Level + 1).W)) // access fault return this level 1154 val gpaddr = Reg(UInt(GPAddrBits.W)) 1155 val req_ppn = Reg(UInt(ppnLen.W)) 1156 val vpn = gpaddr(GPAddrBits-1, offLen) 1157 val levelNext = level - 1.U 1158 val l3Hit = Reg(Bool()) 1159 val l2Hit = Reg(Bool()) 1160 val l1Hit = Reg(Bool()) 1161 val bypassed = Reg(Bool()) 1162// val pte = io.mem.resp.bits.MergeRespToPte() 1163 val jmp_bitmap_check = if (HasBitmapCheck) RegEnable(io.req.bits.bitmapCheck.get.jmp_bitmap_check, io.req.fire) else false.B 1164 val fromSP = if (HasBitmapCheck) RegEnable(io.req.bits.bitmapCheck.get.fromSP, io.req.fire) else false.B 1165 val cache_pte = Option.when(HasBitmapCheck)(RegEnable(Mux(io.req.bits.bitmapCheck.get.fromSP, io.req.bits.bitmapCheck.get.pte.asTypeOf(new PteBundle().cloneType), io.req.bits.bitmapCheck.get.ptes(io.req.bits.gvpn(sectortlbwidth - 1, 0)).asTypeOf(new PteBundle().cloneType)), io.req.fire)) 1166 val pte = if (HasBitmapCheck) Mux(jmp_bitmap_check, cache_pte.get, io.mem.resp.bits.asTypeOf(new PteBundle().cloneType)) else io.mem.resp.bits.asTypeOf(new PteBundle().cloneType) 1167 val ppn_l3 = Mux(l3Hit, req_ppn, pte.ppn) 1168 val ppn_l2 = Mux(l2Hit, req_ppn, pte.ppn) 1169 val ppn_l1 = Mux(l1Hit, req_ppn, pte.ppn) 1170 val ppn = Wire(UInt(PAddrBits.W)) 1171 val p_pte = MakeAddr(ppn, getVpnn(vpn, level)) 1172 val pg_base = Wire(UInt(PAddrBits.W)) 1173 val mem_addr = Wire(UInt(PAddrBits.W)) 1174 if (EnableSv48) { 1175 when (mode === Sv48) { 1176 ppn := Mux(af_level === 2.U, ppn_l3, Mux(af_level === 1.U, ppn_l2, ppn_l1)) // for l2, l1 and l3 1177 pg_base := MakeGPAddr(hgatp.ppn, getGVpnn(vpn, 3.U, mode = Sv48)) // for l3 1178 mem_addr := Mux(af_level === 3.U, pg_base, p_pte) 1179 } .otherwise { 1180 ppn := Mux(af_level === 1.U, ppn_l2, ppn_l1) //for l1 and l2 1181 pg_base := MakeGPAddr(hgatp.ppn, getGVpnn(vpn, 2.U, mode = Sv39)) 1182 mem_addr := Mux(af_level === 2.U, pg_base, p_pte) 1183 } 1184 } else { 1185 ppn := Mux(af_level === 1.U, ppn_l2, ppn_l1) //for l1 and l2 1186 pg_base := MakeGPAddr(hgatp.ppn, getGVpnn(vpn, 2.U, mode = Sv39)) 1187 mem_addr := Mux(af_level === 2.U, pg_base, p_pte) 1188 } 1189 1190 //s/w register 1191 val s_pmp_check = RegInit(true.B) 1192 val s_mem_req = RegInit(true.B) 1193 val w_mem_resp = RegInit(true.B) 1194 val idle = RegInit(true.B) 1195 val mem_addr_update = RegInit(false.B) 1196 val finish = WireInit(false.B) 1197 val s_bitmap_check = RegInit(true.B) 1198 val w_bitmap_resp = RegInit(true.B) 1199 val whether_need_bitmap_check = RegInit(false.B) 1200 val bitmap_checkfailed = RegInit(false.B) 1201 1202 val sent_to_pmp = !idle && (!s_pmp_check || mem_addr_update) && !finish 1203 val pageFault = pte.isGpf(level, mpbmte) || (!pte.isLeaf() && level === 0.U) 1204 val accessFault = RegEnable(io.pmp.resp.ld || io.pmp.resp.mmio, sent_to_pmp) 1205 1206 // use access fault when bitmap check failed 1207 val ppn_af = if (HasBitmapCheck) { 1208 Mux(bitmap_enable, pte.isAf() || bitmap_checkfailed, pte.isAf()) 1209 } else { 1210 pte.isAf() 1211 } 1212 val find_pte = pte.isLeaf() || ppn_af || pageFault 1213 1214 val resp_valid = !idle && mem_addr_update && ((w_mem_resp && find_pte) || (s_pmp_check && accessFault)) 1215 val id = Reg(UInt(log2Up(l2tlbParams.llptwsize).W)) 1216 val source = RegEnable(io.req.bits.source, io.req.fire) 1217 1218 io.req.ready := idle 1219 val resp = Wire(new HptwResp()) 1220 // accessFault > pageFault > ppn_af 1221 resp.apply( 1222 gpf = pageFault && !accessFault, 1223 gaf = accessFault || (ppn_af && !pageFault), 1224 level = Mux(accessFault, af_level, level), 1225 pte = pte, 1226 vpn = vpn, 1227 vmid = hgatp.vmid 1228 ) 1229 io.resp.valid := resp_valid 1230 io.resp.bits.id := id 1231 io.resp.bits.resp := resp 1232 io.resp.bits.source := source 1233 1234 io.pmp.req.valid := DontCare 1235 io.pmp.req.bits.addr := mem_addr 1236 io.pmp.req.bits.size := 3.U 1237 io.pmp.req.bits.cmd := TlbCmd.read 1238 1239 if (HasBitmapCheck) { 1240 val way_info = DataHoldBypass(io.l0_way_info.get, RegNext(io.mem.resp.fire, init=false.B)) 1241 val cache_hitway = RegEnable(io.req.bits.bitmapCheck.get.hitway, io.req.fire) 1242 val cache_level = RegEnable(io.req.bits.bitmapCheck.get.SPlevel, io.req.fire) 1243 io.bitmap.get.req.valid := !s_bitmap_check 1244 io.bitmap.get.req.bits.bmppn := pte.ppn 1245 io.bitmap.get.req.bits.id := HptwReqId.U(bMemID.W) 1246 io.bitmap.get.req.bits.vpn := vpn 1247 io.bitmap.get.req.bits.level := Mux(jmp_bitmap_check, Mux(fromSP,cache_level,0.U), level) 1248 io.bitmap.get.req.bits.way_info := Mux(jmp_bitmap_check, cache_hitway, way_info) 1249 io.bitmap.get.req.bits.hptw_bypassed := bypassed 1250 io.bitmap.get.resp.ready := !w_bitmap_resp 1251 } 1252 1253 io.mem.req.valid := !s_mem_req && !io.mem.mask && !accessFault && s_pmp_check 1254 io.mem.req.bits.addr := mem_addr 1255 io.mem.req.bits.id := HptwReqId.U(bMemID.W) 1256 io.mem.req.bits.hptw_bypassed := bypassed 1257 1258 io.refill.req_info.vpn := vpn 1259 io.refill.level := level 1260 io.refill.req_info.source := source 1261 io.refill.req_info.s2xlate := onlyStage2 1262 1263 when (idle){ 1264 if (HasBitmapCheck) { 1265 when (io.req.bits.bitmapCheck.get.jmp_bitmap_check && io.req.fire) { 1266 idle := false.B 1267 gpaddr := Cat(io.req.bits.gvpn, 0.U(offLen.W)) 1268 s_bitmap_check := false.B 1269 id := io.req.bits.id 1270 level := Mux(io.req.bits.bitmapCheck.get.fromSP, io.req.bits.bitmapCheck.get.SPlevel, 0.U) 1271 } 1272 } 1273 when (io.req.fire && (if (HasBitmapCheck) !io.req.bits.bitmapCheck.get.jmp_bitmap_check else true.B)) { 1274 bypassed := io.req.bits.bypassed 1275 idle := false.B 1276 gpaddr := Cat(io.req.bits.gvpn, 0.U(offLen.W)) 1277 accessFault := false.B 1278 s_pmp_check := false.B 1279 id := io.req.bits.id 1280 req_ppn := io.req.bits.ppn 1281 if (EnableSv48) { 1282 when (mode === Sv48) { 1283 level := Mux(io.req.bits.l1Hit, 0.U, Mux(io.req.bits.l2Hit, 1.U, Mux(io.req.bits.l3Hit.get, 2.U, 3.U))) 1284 af_level := Mux(io.req.bits.l1Hit, 0.U, Mux(io.req.bits.l2Hit, 1.U, Mux(io.req.bits.l3Hit.get, 2.U, 3.U))) 1285 l3Hit := io.req.bits.l3Hit.get 1286 } .otherwise { 1287 level := Mux(io.req.bits.l1Hit, 0.U, Mux(io.req.bits.l2Hit, 1.U, 2.U)) 1288 af_level := Mux(io.req.bits.l1Hit, 0.U, Mux(io.req.bits.l2Hit, 1.U, 2.U)) 1289 l3Hit := false.B 1290 } 1291 } else { 1292 level := Mux(io.req.bits.l1Hit, 0.U, Mux(io.req.bits.l2Hit, 1.U, 2.U)) 1293 af_level := Mux(io.req.bits.l1Hit, 0.U, Mux(io.req.bits.l2Hit, 1.U, 2.U)) 1294 l3Hit := false.B 1295 } 1296 l2Hit := io.req.bits.l2Hit 1297 l1Hit := io.req.bits.l1Hit 1298 } 1299 } 1300 1301 when(sent_to_pmp && !mem_addr_update){ 1302 s_mem_req := false.B 1303 s_pmp_check := true.B 1304 } 1305 1306 when(accessFault && !idle){ 1307 s_pmp_check := true.B 1308 s_mem_req := true.B 1309 w_mem_resp := true.B 1310 mem_addr_update := true.B 1311 if (HasBitmapCheck) { 1312 s_bitmap_check := true.B 1313 w_bitmap_resp := true.B 1314 whether_need_bitmap_check := false.B 1315 bitmap_checkfailed := false.B 1316 } 1317 } 1318 1319 when(io.mem.req.fire){ 1320 s_mem_req := true.B 1321 w_mem_resp := false.B 1322 } 1323 1324 when(io.mem.resp.fire && !w_mem_resp){ 1325 w_mem_resp := true.B 1326 af_level := af_level - 1.U 1327 if (HasBitmapCheck) { 1328 when (bitmap_enable) { 1329 whether_need_bitmap_check := true.B 1330 } .otherwise { 1331 mem_addr_update := true.B 1332 whether_need_bitmap_check := false.B 1333 } 1334 } else { 1335 mem_addr_update := true.B 1336 } 1337 } 1338 1339 if (HasBitmapCheck) { 1340 when (whether_need_bitmap_check) { 1341 when (bitmap_enable && pte.isLeaf()) { 1342 s_bitmap_check := false.B 1343 whether_need_bitmap_check := false.B 1344 } .otherwise { 1345 mem_addr_update := true.B 1346 whether_need_bitmap_check := false.B 1347 } 1348 } 1349 // bitmapcheck 1350 when (io.bitmap.get.req.fire) { 1351 s_bitmap_check := true.B 1352 w_bitmap_resp := false.B 1353 } 1354 when (io.bitmap.get.resp.fire) { 1355 w_bitmap_resp := true.B 1356 mem_addr_update := true.B 1357 bitmap_checkfailed := io.bitmap.get.resp.bits.cf 1358 } 1359 } 1360 1361 when(mem_addr_update){ 1362 when(!(find_pte || accessFault)){ 1363 level := levelNext 1364 s_mem_req := false.B 1365 mem_addr_update := false.B 1366 }.elsewhen(resp_valid){ 1367 when(io.resp.fire){ 1368 idle := true.B 1369 mem_addr_update := false.B 1370 accessFault := false.B 1371 } 1372 finish := true.B 1373 } 1374 } 1375 when (flush) { 1376 idle := true.B 1377 s_pmp_check := true.B 1378 s_mem_req := true.B 1379 w_mem_resp := true.B 1380 accessFault := false.B 1381 mem_addr_update := false.B 1382 if (HasBitmapCheck) { 1383 s_bitmap_check := true.B 1384 w_bitmap_resp := true.B 1385 whether_need_bitmap_check := false.B 1386 bitmap_checkfailed := false.B 1387 } 1388 } 1389} 1390