xref: /XiangShan/src/main/scala/xiangshan/cache/mmu/PageTableWalker.scala (revision a58e33519795596dc4f85fe66907cbc7dde2d66a)
1/***************************************************************************************
2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences
3* Copyright (c) 2020-2021 Peng Cheng Laboratory
4*
5* XiangShan is licensed under Mulan PSL v2.
6* You can use this software according to the terms and conditions of the Mulan PSL v2.
7* You may obtain a copy of Mulan PSL v2 at:
8*          http://license.coscl.org.cn/MulanPSL2
9*
10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
13*
14* See the Mulan PSL v2 for more details.
15***************************************************************************************/
16
17package xiangshan.cache.mmu
18
19import chipsalliance.rocketchip.config.Parameters
20import chisel3._
21import chisel3.util._
22import chisel3.internal.naming.chiselName
23import xiangshan._
24import xiangshan.cache.{HasDCacheParameters, MemoryOpConstants}
25import utils._
26import freechips.rocketchip.diplomacy.{LazyModule, LazyModuleImp}
27import freechips.rocketchip.tilelink._
28
29/* ptw finite state machine, the actual page table walker
30 */
31class PtwFsmIO()(implicit p: Parameters) extends PtwBundle {
32  val req = Flipped(DecoupledIO(new Bundle {
33    val source = UInt(bPtwWidth.W)
34    val l1Hit = Bool()
35    val vpn = UInt(vpnLen.W)
36    val ppn = UInt(ppnLen.W)
37  }))
38  val resp = DecoupledIO(new Bundle {
39    val source = UInt(bPtwWidth.W)
40    val resp = new PtwResp
41  })
42
43  val mq = DecoupledIO(new L2TlbMQInBundle())
44
45  val mem = new Bundle {
46    val req = DecoupledIO(new L2TlbMemReqBundle())
47    val resp = Flipped(ValidIO(UInt(XLEN.W)))
48    val mask = Input(Bool())
49  }
50
51  val csr = Input(new TlbCsrBundle)
52  val sfence = Input(new SfenceBundle)
53  val refill = Output(new Bundle {
54    val vpn = UInt(vpnLen.W)
55    val level = UInt(log2Up(Level).W)
56  })
57}
58
59@chiselName
60class PtwFsm()(implicit p: Parameters) extends XSModule with HasPtwConst {
61  val io = IO(new PtwFsmIO)
62
63  val sfence = io.sfence
64  val mem = io.mem
65  val satp = io.csr.satp
66
67  val s_idle :: s_mem_req :: s_mem_resp :: s_check_pte :: Nil = Enum(4)
68  val state = RegInit(s_idle)
69  val level = RegInit(0.U(log2Up(Level).W))
70  val ppn = Reg(UInt(ppnLen.W))
71  val vpn = Reg(UInt(vpnLen.W))
72  val levelNext = level + 1.U
73  val l1Hit = Reg(Bool())
74  val memPte = mem.resp.bits.asTypeOf(new PteBundle().cloneType)
75  io.req.ready := state === s_idle
76
77  val pageFault = WireInit(false.B)
78  switch (state) {
79    is (s_idle) {
80      when (io.req.fire()) {
81        val req = io.req.bits
82        state := s_mem_req
83        level := Mux(req.l1Hit, 1.U, 0.U)
84        ppn := Mux(req.l1Hit, io.req.bits.ppn, satp.ppn)
85        vpn := io.req.bits.vpn
86        l1Hit := req.l1Hit
87      }
88    }
89
90    is (s_mem_req) {
91      when (mem.req.fire()) {
92        state := s_mem_resp
93      }
94    }
95
96    is (s_mem_resp) {
97      when(mem.resp.fire()) {
98        state := s_check_pte
99      }
100    }
101
102    is (s_check_pte) {
103      when (memPte.isLeaf() || memPte.isPf(level)) {
104        when (io.resp.fire()) {
105          state := s_idle
106        }
107        pageFault := memPte.isPf(level)
108      }.otherwise {
109        when (level =/= (Level-2).U) { // when level is 1.U, finish
110          level := levelNext
111          state := s_mem_req
112        }.otherwise {
113          when (io.mq.fire()) {
114            state := s_idle
115          }
116        }
117      }
118    }
119  }
120
121  when (sfence.valid) {
122    state := s_idle
123  }
124
125  val is_pte = memPte.isLeaf() || memPte.isPf(level)
126  val find_pte = is_pte
127  val to_find_pte = level === 1.U && !is_pte
128  val source = RegEnable(io.req.bits.source, io.req.fire())
129  io.resp.valid := state === s_check_pte && find_pte
130  io.resp.bits.source := source
131  io.resp.bits.resp.apply(pageFault, level, memPte, vpn)
132
133  io.mq.valid := state === s_check_pte && to_find_pte
134  io.mq.bits.source := source
135  io.mq.bits.vpn := vpn
136  io.mq.bits.l3.valid := true.B
137  io.mq.bits.l3.bits := memPte.ppn
138
139  assert(level =/= 2.U || level =/= 3.U)
140
141  val l1addr = MakeAddr(satp.ppn, getVpnn(vpn, 2))
142  val l2addr = MakeAddr(Mux(l1Hit, ppn, memPte.ppn), getVpnn(vpn, 1))
143  mem.req.valid := state === s_mem_req && !io.mem.mask
144  mem.req.bits.addr := Mux(level === 0.U, l1addr, l2addr)
145  mem.req.bits.id := MSHRSize.U(bMemID.W)
146
147  io.refill.vpn := vpn
148  io.refill.level := level
149
150  XSDebug(p"[fsm] state:${state} level:${level} notFound:${pageFault}\n")
151
152  // perf
153  XSPerfAccumulate("fsm_count", io.req.fire())
154  for (i <- 0 until PtwWidth) {
155    XSPerfAccumulate(s"fsm_count_source${i}", io.req.fire() && io.req.bits.source === i.U)
156  }
157  XSPerfAccumulate("fsm_busy", state =/= s_idle)
158  XSPerfAccumulate("fsm_idle", state === s_idle)
159  XSPerfAccumulate("resp_blocked", io.resp.valid && !io.resp.ready)
160  XSPerfAccumulate("mem_count", mem.req.fire())
161  XSPerfAccumulate("mem_cycle", BoolStopWatch(mem.req.fire, mem.resp.fire(), true))
162  XSPerfAccumulate("mem_blocked", mem.req.valid && !mem.req.ready)
163
164  TimeOutAssert(state =/= s_idle, timeOutThreshold, "page table walker time out")
165}
166