xref: /XiangShan/src/main/scala/xiangshan/cache/mmu/MMUConst.scala (revision 4daa5bf3c3f27e7fd090866d52405b21e107eb8d)
1/***************************************************************************************
2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences
3* Copyright (c) 2020-2021 Peng Cheng Laboratory
4*
5* XiangShan is licensed under Mulan PSL v2.
6* You can use this software according to the terms and conditions of the Mulan PSL v2.
7* You may obtain a copy of Mulan PSL v2 at:
8*          http://license.coscl.org.cn/MulanPSL2
9*
10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
13*
14* See the Mulan PSL v2 for more details.
15***************************************************************************************/
16
17package xiangshan.cache.mmu
18
19import org.chipsalliance.cde.config.Parameters
20import chisel3._
21import chisel3.util._
22import xiangshan._
23import xiangshan.cache.{HasDCacheParameters, MemoryOpConstants}
24import utils._
25import utility._
26import freechips.rocketchip.diplomacy.{LazyModule, LazyModuleImp}
27import freechips.rocketchip.tilelink._
28
29
30case class TLBParameters
31(
32  name: String = "none",
33  fetchi: Boolean = false, // TODO: remove it
34  fenceDelay: Int = 2,
35  useDmode: Boolean = true,
36  NSets: Int = 1,
37  NWays: Int = 2,
38  Replacer: Option[String] = Some("plru"),
39  Associative: String = "fa", // must be fa
40  outReplace: Boolean = false,
41  partialStaticPMP: Boolean = false, // partial static pmp result stored in entries
42  outsideRecvFlush: Boolean = false, // if outside moudle waiting for tlb recv flush pipe
43  saveLevel: Boolean = false
44)
45
46case class L2TLBParameters
47(
48  name: String = "l2tlb",
49  // l1
50  l1Size: Int = 16,
51  l1Associative: String = "fa",
52  l1Replacer: Option[String] = Some("plru"),
53  // l2
54  l2nSets: Int = 8,
55  l2nWays: Int = 4,
56  l2Replacer: Option[String] = Some("setplru"),
57  // l3
58  l3nSets: Int = 32,
59  l3nWays: Int = 8,
60  l3Replacer: Option[String] = Some("setplru"),
61  // sp
62  spSize: Int = 16,
63  spReplacer: Option[String] = Some("plru"),
64  // filter
65  ifilterSize: Int = 8,
66  dfilterSize: Int = 32,
67  // miss queue, add more entries than 'must require'
68  // 0 for easier bug trigger, please set as big as u can, 8 maybe
69  missqueueExtendSize: Int = 0,
70  // llptw
71  llptwsize: Int = 6,
72  // way size
73  blockBytes: Int = 64,
74  // prefetch
75  enablePrefetch: Boolean = true,
76  // ecc
77  ecc: Option[String] = Some("secded"),
78  // enable ecc
79  enablePTWECC: Boolean = false
80)
81
82trait HasTlbConst extends HasXSParameter {
83  val Level = 3
84
85  val offLen  = 12
86  val ppnLen  = PAddrBits - offLen
87  val vpnnLen = 9
88  val extendVpnnBits = if (HasHExtension) 2 else 0
89  val vpnLen  = VAddrBits - offLen // when opening H extention, vpnlen broaden two bits
90  val flagLen = 8
91  val pteResLen = XLEN - 44 - 2 - flagLen
92  val ppnHignLen = 44 - ppnLen
93
94  val tlbcontiguous = 8
95  val sectortlbwidth = log2Up(tlbcontiguous)
96  val sectorppnLen = ppnLen - sectortlbwidth
97  val sectorvpnLen = vpnLen - sectortlbwidth
98
99  val loadfiltersize = 16 // 4*3(LduCnt:2 + HyuCnt:1) + 4(prefetch:1)
100  val storefiltersize = if (StorePipelineWidth >= 3) 16 else 8
101  val prefetchfiltersize = 8
102
103  val sramSinglePort = true
104
105  val timeOutThreshold = 10000
106
107  def noS2xlate = "b00".U
108  def allStage = "b11".U
109  def onlyStage1 = "b01".U
110  def onlyStage2 = "b10".U
111
112  def get_pn(addr: UInt) = {
113    require(addr.getWidth > offLen)
114    addr(addr.getWidth-1, offLen)
115  }
116  def get_off(addr: UInt) = {
117    require(addr.getWidth > offLen)
118    addr(offLen-1, 0)
119  }
120
121  def get_set_idx(vpn: UInt, nSets: Int): UInt = {
122    require(nSets >= 1)
123    vpn(log2Up(nSets)-1, 0)
124  }
125
126  def drop_set_idx(vpn: UInt, nSets: Int): UInt = {
127    require(nSets >= 1)
128    require(vpn.getWidth > log2Ceil(nSets))
129    vpn(vpn.getWidth-1, log2Ceil(nSets))
130  }
131
132  def drop_set_equal(vpn1: UInt, vpn2: UInt, nSets: Int): Bool = {
133    require(nSets >= 1)
134    require(vpn1.getWidth == vpn2.getWidth)
135    if (vpn1.getWidth <= log2Ceil(nSets)) {
136      true.B
137    } else {
138      drop_set_idx(vpn1, nSets) === drop_set_idx(vpn2, nSets)
139    }
140  }
141
142  def replaceWrapper(v: UInt, lruIdx: UInt): UInt = {
143    val width = v.getWidth
144    val emptyIdx = ParallelPriorityMux((0 until width).map( i => (!v(i), i.U(log2Up(width).W))))
145    val full = Cat(v).andR
146    Mux(full, lruIdx, emptyIdx)
147  }
148
149  def replaceWrapper(v: Seq[Bool], lruIdx: UInt): UInt = {
150    replaceWrapper(VecInit(v).asUInt, lruIdx)
151  }
152
153  implicit def hptwresp_to_tlbperm(hptwResp: HptwResp): TlbPermBundle = {
154    val tp = Wire(new TlbPermBundle)
155    val ptePerm = hptwResp.entry.perm.get.asTypeOf(new PtePermBundle().cloneType)
156    tp.pf := hptwResp.gpf
157    tp.af := hptwResp.gaf
158    tp.d := ptePerm.d
159    tp.a := ptePerm.a
160    tp.g := ptePerm.g
161    tp.u := ptePerm.u
162    tp.x := ptePerm.x
163    tp.w := ptePerm.w
164    tp.r := ptePerm.r
165    tp
166  }
167
168  implicit def ptwresp_to_tlbperm(ptwResp: PtwSectorResp): TlbPermBundle = {
169    val tp = Wire(new TlbPermBundle)
170    val ptePerm = ptwResp.entry.perm.get.asTypeOf(new PtePermBundle().cloneType)
171    tp.pf := ptwResp.pf
172    tp.af := ptwResp.af
173    tp.d := ptePerm.d
174    tp.a := ptePerm.a
175    tp.g := ptePerm.g
176    tp.u := ptePerm.u
177    tp.x := ptePerm.x
178    tp.w := ptePerm.w
179    tp.r := ptePerm.r
180    tp
181  }
182}
183
184trait HasPtwConst extends HasTlbConst with MemoryOpConstants{
185  val PtwWidth = 2
186  val sourceWidth = { if (l2tlbParams.enablePrefetch) PtwWidth + 1 else PtwWidth}
187  val prefetchID = PtwWidth
188
189  val blockBits = l2tlbParams.blockBytes * 8
190
191  val bPtwWidth = log2Up(PtwWidth)
192  val bSourceWidth = log2Up(sourceWidth)
193  // ptwl1: fully-associated
194  val PtwL1TagLen = vpnnLen + extendVpnnBits
195
196  /* +-------+----------+-------------+
197   * |  Tag  |  SetIdx  |  SectorIdx  |
198   * +-------+----------+-------------+
199   */
200  // ptwl2: 8-way group-associated
201  val PtwL2SetNum = l2tlbParams.l2nSets
202  val PtwL2SectorSize = blockBits / XLEN
203  val PtwL2IdxLen = log2Up(PtwL2SetNum * PtwL2SectorSize)
204  val PtwL2SectorIdxLen = log2Up(PtwL2SectorSize)
205  val PtwL2SetIdxLen = log2Up(PtwL2SetNum)
206  val PtwL2TagLen = vpnnLen * 2 - PtwL2IdxLen + extendVpnnBits
207
208  // ptwl3: 16-way group-associated
209  val PtwL3SetNum = l2tlbParams.l3nSets
210  val PtwL3SectorSize =  blockBits / XLEN
211  val PtwL3IdxLen = log2Up(PtwL3SetNum * PtwL3SectorSize)
212  val PtwL3SectorIdxLen = log2Up(PtwL3SectorSize)
213  val PtwL3SetIdxLen = log2Up(PtwL3SetNum)
214  val PtwL3TagLen = vpnnLen * 3 - PtwL3IdxLen + extendVpnnBits
215
216  // super page, including 1GB and 2MB page
217  val SPTagLen = vpnnLen * 2 + extendVpnnBits
218
219  // miss queue
220  val MissQueueSize = l2tlbParams.ifilterSize + l2tlbParams.dfilterSize
221  val MemReqWidth = l2tlbParams.llptwsize + 1 + 1
222  val HptwReqId = l2tlbParams.llptwsize + 1
223  val FsmReqID = l2tlbParams.llptwsize
224  val bMemID = log2Up(MemReqWidth)
225
226  def genPtwL2Idx(vpn: UInt) = {
227    (vpn(vpnLen - 1, vpnnLen))(PtwL2IdxLen - 1, 0)
228  }
229
230  def genPtwL2SectorIdx(vpn: UInt) = {
231    genPtwL2Idx(vpn)(PtwL2SectorIdxLen - 1, 0)
232  }
233
234  def genPtwL2SetIdx(vpn: UInt) = {
235    genPtwL2Idx(vpn)(PtwL2SetIdxLen + PtwL2SectorIdxLen - 1, PtwL2SectorIdxLen)
236  }
237
238  def genPtwL3Idx(vpn: UInt) = {
239    vpn(PtwL3IdxLen - 1, 0)
240  }
241
242  def genPtwL3SectorIdx(vpn: UInt) = {
243    genPtwL3Idx(vpn)(PtwL3SectorIdxLen - 1, 0)
244  }
245
246  def dropL3SectorBits(vpn: UInt) = {
247    vpn(vpn.getWidth-1, PtwL3SectorIdxLen)
248  }
249
250  def genPtwL3SetIdx(vpn: UInt) = {
251    genPtwL3Idx(vpn)(PtwL3SetIdxLen + PtwL3SectorIdxLen - 1, PtwL3SectorIdxLen)
252  }
253
254  def MakeAddr(ppn: UInt, off: UInt) = {
255    require(off.getWidth == 9)
256    Cat(ppn, off, 0.U(log2Up(XLEN/8).W))(PAddrBits-1, 0)
257  }
258
259  def MakeGPAddr(ppn: UInt, off: UInt) = {
260    require(off.getWidth == 9 || off.getWidth == 11)
261    (Cat(ppn, 0.U(offLen.W)) + Cat(off, 0.U(log2Up(XLEN / 8).W)))(GPAddrBits - 1, 0)
262  }
263
264  def getVpnn(vpn: UInt, idx: Int): UInt = {
265    vpn(vpnnLen*(idx+1)-1, vpnnLen*idx)
266  }
267
268  def getVpnn(vpn: UInt, idx: UInt): UInt = {
269    Mux(idx === 0.U, vpn(vpnnLen - 1, 0), Mux(idx === 1.U, vpn(vpnnLen * 2 - 1, vpnnLen), vpn(vpnnLen * 3 - 1, vpnnLen * 2)))
270  }
271
272  def getGVpnn(vpn: UInt, idx: UInt): UInt = {
273    Mux(idx === 0.U, vpn(vpnnLen - 1, 0), Mux(idx === 1.U, vpn(vpnnLen * 2 - 1, vpnnLen), vpn(vpnnLen * 3 + 1, vpnnLen * 2)))
274  }
275
276  def getVpnClip(vpn: UInt, level: Int) = {
277    // level 0  /* vpnn2 */
278    // level 1  /* vpnn2 * vpnn1 */
279    // level 2  /* vpnn2 * vpnn1 * vpnn0*/
280    vpn(vpnLen - 1, (2 - level) * vpnnLen)
281  }
282
283  def get_next_line(vpn: UInt) = {
284    Cat(dropL3SectorBits(vpn) + 1.U, 0.U(PtwL3SectorIdxLen.W))
285  }
286
287  def same_l2entry(vpn1: UInt, vpn2: UInt) = {
288    vpn1(vpnLen-1, vpnnLen) === vpn2(vpnLen-1, vpnnLen)
289  }
290
291  def from_pre(source: UInt) = {
292    (source === prefetchID.U)
293  }
294
295  def sel_data(data: UInt, index: UInt): UInt = {
296    val inner_data = data.asTypeOf(Vec(data.getWidth / XLEN, UInt(XLEN.W)))
297    inner_data(index)
298  }
299
300  // vpn1 and vpn2 is at same cacheline
301  def dup(vpn1: UInt, vpn2: UInt): Bool = {
302    dropL3SectorBits(vpn1) === dropL3SectorBits(vpn2)
303  }
304
305
306  def printVec[T <: Data](x: Seq[T]): Printable = {
307    (0 until x.length).map(i => p"(${i.U})${x(i)} ").reduce(_+_)
308  }
309}
310