1/*************************************************************************************** 2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences 3* Copyright (c) 2020-2021 Peng Cheng Laboratory 4* 5* XiangShan is licensed under Mulan PSL v2. 6* You can use this software according to the terms and conditions of the Mulan PSL v2. 7* You may obtain a copy of Mulan PSL v2 at: 8* http://license.coscl.org.cn/MulanPSL2 9* 10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, 11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, 12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE. 13* 14* See the Mulan PSL v2 for more details. 15***************************************************************************************/ 16 17package xiangshan.cache.mmu 18 19import chipsalliance.rocketchip.config.Parameters 20import chisel3._ 21import chisel3.util._ 22import xiangshan._ 23import xiangshan.cache.{HasDCacheParameters, MemoryOpConstants} 24import utils._ 25import freechips.rocketchip.diplomacy.{LazyModule, LazyModuleImp} 26import freechips.rocketchip.tilelink._ 27 28/* ptw finite state machine, the actual page table walker 29 */ 30class PtwFsmIO()(implicit p: Parameters) extends PtwBundle { 31 val req = Flipped(DecoupledIO(new Bundle { 32 val source = UInt(bPtwWidth.W) 33 val l1Hit = Bool() 34 val l2Hit = Bool() 35 val vpn = UInt(vpnLen.W) 36 val ppn = UInt(ppnLen.W) 37 })) 38 val resp = DecoupledIO(new Bundle { 39 val source = UInt(bPtwWidth.W) 40 val resp = new PtwResp 41 }) 42 43 val mem = new Bundle { 44 val req = DecoupledIO(new Bundle { 45 val addr = UInt(PAddrBits.W) 46 }) 47 val resp = Flipped(ValidIO(UInt(XLEN.W))) 48 } 49 50 val csr = Input(new TlbCsrBundle) 51 val sfence = Input(new SfenceBundle) 52 val sfenceLatch = Output(Bool()) 53 val refill = Output(new Bundle { 54 val vpn = UInt(vpnLen.W) 55 val level = UInt(log2Up(Level).W) 56 val memAddr = UInt(PAddrBits.W) 57 }) 58} 59 60class PtwFsm()(implicit p: Parameters) extends XSModule with HasPtwConst { 61 val io = IO(new PtwFsmIO) 62 63 val sfence = io.sfence 64 val mem = io.mem 65 val satp = io.csr.satp 66 67 val s_idle :: s_mem_req :: s_mem_resp :: s_resp :: Nil = Enum(4) 68 val state = RegInit(s_idle) 69 val level = RegInit(0.U(log2Up(Level).W)) 70 val ppn = Reg(UInt(ppnLen.W)) 71 val vpn = Reg(UInt(vpnLen.W)) 72 val levelNext = level + 1.U 73 74 val sfenceLatch = RegEnable(false.B, init = false.B, mem.resp.valid) // NOTE: store sfence to disable mem.resp.fire(), but not stall other ptw req 75 val memAddrReg = RegEnable(mem.req.bits.addr, mem.req.fire()) 76 val l1Hit = Reg(Bool()) 77 val l2Hit = Reg(Bool()) 78 79 val memPte = mem.resp.bits.asTypeOf(new PteBundle().cloneType) 80 val memPteReg = RegEnable(memPte, mem.resp.fire()) 81 82 val notFound = WireInit(false.B) 83 switch (state) { 84 is (s_idle) { 85 when (io.req.fire()) { 86 val req = io.req.bits 87 state := s_mem_req 88 level := Mux(req.l2Hit, 2.U, Mux(req.l1Hit, 1.U, 0.U)) 89 ppn := Mux(req.l2Hit || req.l1Hit, io.req.bits.ppn, satp.ppn) 90 vpn := io.req.bits.vpn 91 l1Hit := req.l1Hit 92 l2Hit := req.l2Hit 93 } 94 } 95 96 is (s_mem_req) { 97 when (mem.req.fire()) { 98 state := s_mem_resp 99 } 100 } 101 102 is (s_mem_resp) { 103 when (mem.resp.fire()) { 104 when (memPte.isLeaf() || memPte.isPf(level)) { 105 state := s_resp 106 notFound := memPte.isPf(level) 107 }.otherwise { 108 when (level =/= 2.U) { 109 level := levelNext 110 state := s_mem_req 111 }.otherwise { 112 state := s_resp 113 notFound := true.B 114 } 115 } 116 } 117 } 118 119 is (s_resp) { 120 when (io.resp.fire()) { 121 state := s_idle 122 } 123 } 124 } 125 126 when (sfence.valid) { 127 state := s_idle 128 when (state === s_mem_resp && !mem.resp.fire() || state === s_mem_req && mem.req.fire()) { 129 sfenceLatch := true.B 130 } 131 } 132 133 val finish = mem.resp.fire() && (memPte.isLeaf() || memPte.isPf(level) || level === 2.U) 134 val resp_pf = Reg(Bool()) 135 val resp_level = Reg(UInt(2.W)) 136 val resp_pte = Reg(new PteBundle()) 137 when (finish && !sfenceLatch) { 138 resp_pf := level === 3.U || notFound 139 resp_level := level 140 resp_pte := memPte 141 } 142 io.resp.valid := state === s_resp 143 io.resp.bits.source := RegEnable(io.req.bits.source, io.req.fire()) 144 io.resp.bits.resp.apply(resp_pf, resp_level, resp_pte, vpn) 145 io.req.ready := state === s_idle 146 147 val l1addr = MakeAddr(satp.ppn, getVpnn(vpn, 2)) 148 val l2addr = MakeAddr(Mux(l1Hit, ppn, memPteReg.ppn), getVpnn(vpn, 1)) 149 val l3addr = MakeAddr(Mux(l2Hit, ppn, memPteReg.ppn), getVpnn(vpn, 0)) 150 mem.req.valid := state === s_mem_req && !sfenceLatch 151 mem.req.bits.addr := Mux(level === 0.U, l1addr, Mux(level === 1.U, l2addr, l3addr)) 152 153 io.refill.vpn := vpn 154 io.refill.level := level 155 io.refill.memAddr := memAddrReg 156 io.sfenceLatch := sfenceLatch 157 158 XSDebug(p"[fsm] state:${state} level:${level} sfenceLatch:${sfenceLatch} notFound:${notFound}\n") 159 160 // perf 161 XSPerfAccumulate("fsm_count", io.req.fire()) 162 for (i <- 0 until PtwWidth) { 163 XSPerfAccumulate(s"fsm_count_source${i}", io.req.fire() && io.req.bits.source === i.U) 164 } 165 XSPerfAccumulate("fsm_busy", state =/= s_idle) 166 XSPerfAccumulate("fsm_idle", state === s_idle) 167 XSPerfAccumulate("resp_blocked", io.resp.valid && !io.resp.ready) 168 XSPerfAccumulate("mem_count", mem.req.fire()) 169 XSPerfAccumulate("mem_cycle", BoolStopWatch(mem.req.fire, mem.resp.fire(), true)) 170 XSPerfAccumulate("mem_blocked", mem.req.valid && !mem.req.ready) 171} 172