xref: /XiangShan/src/main/scala/xiangshan/cache/mmu/PageTableWalker.scala (revision f320e0f01bd645f0a3045a8a740e60dd770734a9)
1/***************************************************************************************
2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences
3* Copyright (c) 2020-2021 Peng Cheng Laboratory
4*
5* XiangShan is licensed under Mulan PSL v2.
6* You can use this software according to the terms and conditions of the Mulan PSL v2.
7* You may obtain a copy of Mulan PSL v2 at:
8*          http://license.coscl.org.cn/MulanPSL2
9*
10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
13*
14* See the Mulan PSL v2 for more details.
15***************************************************************************************/
16
17package xiangshan.cache.mmu
18
19import chipsalliance.rocketchip.config.Parameters
20import chisel3._
21import chisel3.util._
22import xiangshan._
23import xiangshan.cache.{HasDCacheParameters, MemoryOpConstants}
24import utils._
25import freechips.rocketchip.diplomacy.{LazyModule, LazyModuleImp}
26import freechips.rocketchip.tilelink._
27
28/* ptw finite state machine, the actual page table walker
29 */
30class PtwFsmIO()(implicit p: Parameters) extends PtwBundle {
31  val req = Flipped(DecoupledIO(new Bundle {
32    val source = UInt(bPtwWidth.W)
33    val l1Hit = Bool()
34    val l2Hit = Bool()
35    val vpn = UInt(vpnLen.W)
36    val ppn = UInt(ppnLen.W)
37  }))
38  val resp = DecoupledIO(new Bundle {
39    val source = UInt(bPtwWidth.W)
40    val resp = new PtwResp
41  })
42
43  val mem = new Bundle {
44    val req = DecoupledIO(new Bundle {
45      val addr = UInt(PAddrBits.W)
46    })
47    val resp = Flipped(ValidIO(new Bundle {
48      val data = UInt(MemBandWidth.W)
49    }))
50  }
51
52  val csr = Input(new TlbCsrBundle)
53  val sfence = Input(new SfenceBundle)
54  val sfenceLatch = Output(Bool())
55  val refill = Output(new Bundle {
56    val vpn = UInt(vpnLen.W)
57    val level = UInt(log2Up(Level).W)
58    val memAddr = UInt(PAddrBits.W)
59  })
60}
61
62class PtwFsm()(implicit p: Parameters) extends XSModule with HasPtwConst {
63  val io = IO(new PtwFsmIO)
64
65  val sfence = io.sfence
66  val mem = io.mem
67  val satp = io.csr.satp
68
69  val s_idle :: s_mem_req :: s_mem_resp :: s_resp :: Nil = Enum(4)
70  val state = RegInit(s_idle)
71  val level = RegInit(0.U(log2Up(Level).W))
72  val ppn = Reg(UInt(ppnLen.W))
73  val vpn = Reg(UInt(vpnLen.W))
74  val levelNext = level + 1.U
75
76  val sfenceLatch = RegEnable(false.B, init = false.B, mem.resp.valid) // NOTE: store sfence to disable mem.resp.fire(), but not stall other ptw req
77  val memAddrReg = RegEnable(mem.req.bits.addr, mem.req.fire())
78  val l1Hit = Reg(Bool())
79  val l2Hit = Reg(Bool())
80
81  val memRdata = mem.resp.bits.data
82  val memSelData = memRdata.asTypeOf(Vec(MemBandWidth/XLEN, UInt(XLEN.W)))(memAddrReg(log2Up(l1BusDataWidth/8) - 1, log2Up(XLEN/8)))
83  val memPtes = (0 until PtwL3SectorSize).map(i => memRdata((i+1)*XLEN-1, i*XLEN).asTypeOf(new PteBundle))
84  val memPte = memSelData.asTypeOf(new PteBundle)
85  val memPteReg = RegEnable(memPte, mem.resp.fire())
86
87  val notFound = WireInit(false.B)
88  switch (state) {
89    is (s_idle) {
90      when (io.req.fire()) {
91        val req = io.req.bits
92        state := s_mem_req
93        level := Mux(req.l2Hit, 2.U, Mux(req.l1Hit, 1.U, 0.U))
94        ppn := Mux(req.l2Hit || req.l1Hit, io.req.bits.ppn, satp.ppn)
95        vpn := io.req.bits.vpn
96        l1Hit := req.l1Hit
97        l2Hit := req.l2Hit
98      }
99    }
100
101    is (s_mem_req) {
102      when (mem.req.fire()) {
103        state := s_mem_resp
104      }
105    }
106
107    is (s_mem_resp) {
108      when (mem.resp.fire()) {
109        when (memPte.isLeaf() || memPte.isPf(level)) {
110          state := s_resp
111          notFound := memPte.isPf(level)
112        }.otherwise {
113          when (level =/= 2.U) {
114            level := levelNext
115            state := s_mem_req
116          }.otherwise {
117            state := s_resp
118            notFound := true.B
119          }
120        }
121      }
122    }
123
124    is (s_resp) {
125      when (io.resp.fire()) {
126        state := s_idle
127      }
128    }
129  }
130
131  when (sfence.valid) {
132    state := s_idle
133    when (state === s_mem_resp && !mem.resp.fire() || state === s_mem_req && mem.req.fire()) {
134      sfenceLatch := true.B
135    }
136  }
137
138  val finish = mem.resp.fire()  && (memPte.isLeaf() || memPte.isPf(level) || level === 2.U)
139  val resp = Reg(io.resp.bits.cloneType)
140  when (finish && !sfenceLatch) {
141    resp.source := RegEnable(io.req.bits.source, io.req.fire())
142    resp.resp.pf := level === 3.U || notFound
143    resp.resp.entry.tag := vpn
144    resp.resp.entry.ppn := memPte.ppn
145    resp.resp.entry.perm.map(_ := memPte.getPerm())
146    resp.resp.entry.level.map(_ := level)
147  }
148  io.resp.valid := state === s_resp
149  io.resp.bits := resp
150  io.req.ready := state === s_idle
151
152  val l1addr = MakeAddr(satp.ppn, getVpnn(vpn, 2))
153  val l2addr = MakeAddr(Mux(l1Hit, ppn, memPteReg.ppn), getVpnn(vpn, 1))
154  val l3addr = MakeAddr(Mux(l2Hit, ppn, memPteReg.ppn), getVpnn(vpn, 0))
155  mem.req.valid := state === s_mem_req && !sfenceLatch
156  mem.req.bits.addr := Mux(level === 0.U, l1addr, Mux(level === 1.U, l2addr, l3addr))
157
158  io.refill.vpn := vpn
159  io.refill.level := level
160  io.refill.memAddr := memAddrReg
161  io.sfenceLatch := sfenceLatch
162
163  XSDebug(p"[fsm] state:${state} level:${level} sfenceLatch:${sfenceLatch} notFound:${notFound}\n")
164
165  // perf
166  XSPerfAccumulate("fsm_count", io.req.fire())
167  for (i <- 0 until PtwWidth) {
168    XSPerfAccumulate(s"fsm_count_source${i}", io.req.fire() && io.req.bits.source === i.U)
169  }
170  XSPerfAccumulate("fsm_busy", state =/= s_idle)
171  XSPerfAccumulate("fsm_idle", state === s_idle)
172  XSPerfAccumulate("resp_blocked", io.resp.valid && !io.resp.ready)
173  XSPerfAccumulate("mem_count", mem.req.fire())
174  XSPerfAccumulate("mem_cycle", BoolStopWatch(mem.req.fire, mem.resp.fire(), true))
175  XSPerfAccumulate("mem_blocked", mem.req.valid && !mem.req.ready)
176}