xref: /XiangShan/src/main/scala/xiangshan/cache/mmu/Repeater.scala (revision dc597826530cb6803c2396d6ab0e5eb176b732e0)
1/***************************************************************************************
2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences
3* Copyright (c) 2020-2021 Peng Cheng Laboratory
4*
5* XiangShan is licensed under Mulan PSL v2.
6* You can use this software according to the terms and conditions of the Mulan PSL v2.
7* You may obtain a copy of Mulan PSL v2 at:
8*          http://license.coscl.org.cn/MulanPSL2
9*
10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
13*
14* See the Mulan PSL v2 for more details.
15***************************************************************************************/
16
17package xiangshan.cache.mmu
18
19import chipsalliance.rocketchip.config.Parameters
20import chisel3._
21import chisel3.util._
22import xiangshan._
23import xiangshan.cache.{HasDCacheParameters, MemoryOpConstants}
24import utils._
25import freechips.rocketchip.diplomacy.{LazyModule, LazyModuleImp}
26import freechips.rocketchip.tilelink._
27
28class PTWRepeater(Width: Int = 1)(implicit p: Parameters) extends XSModule with HasPtwConst {
29  val io = IO(new Bundle {
30    val tlb = Flipped(new TlbPtwIO(Width))
31    val ptw = new TlbPtwIO
32    val sfence = Input(new SfenceBundle)
33  })
34  val req_in = if (Width == 1) {
35    io.tlb.req(0)
36  } else {
37    val arb = Module(new RRArbiter(io.tlb.req(0).bits.cloneType, Width))
38    arb.io.in <> io.tlb.req
39    arb.io.out
40  }
41  val (tlb, ptw, sfence) = (io.tlb, io.ptw, RegNext(io.sfence.valid))
42  val req = RegEnable(req_in.bits, req_in.fire())
43  val resp = RegEnable(ptw.resp.bits, ptw.resp.fire())
44  val haveOne = BoolStopWatch(req_in.fire(), tlb.resp.fire() || sfence)
45  val sent = BoolStopWatch(ptw.req(0).fire(), req_in.fire() || sfence)
46  val recv = BoolStopWatch(ptw.resp.fire(), req_in.fire() || sfence)
47
48  req_in.ready := !haveOne
49  ptw.req(0).valid := haveOne && !sent
50  ptw.req(0).bits := req
51
52  tlb.resp.bits := resp
53  tlb.resp.valid := haveOne && recv
54  ptw.resp.ready := !recv
55
56  XSPerfAccumulate("req_count", ptw.req(0).fire())
57  XSPerfAccumulate("tlb_req_cycle", BoolStopWatch(req_in.fire(), tlb.resp.fire() || sfence))
58  XSPerfAccumulate("ptw_req_cycle", BoolStopWatch(ptw.req(0).fire(), ptw.resp.fire() || sfence))
59
60  XSDebug(haveOne, p"haveOne:${haveOne} sent:${sent} recv:${recv} sfence:${sfence} req:${req} resp:${resp}")
61  XSDebug(req_in.valid || io.tlb.resp.valid, p"tlb: ${tlb}\n")
62  XSDebug(io.ptw.req(0).valid || io.ptw.resp.valid, p"ptw: ${ptw}\n")
63  assert(!RegNext(recv && io.ptw.resp.valid, init = false.B), "re-receive ptw.resp")
64}
65
66/* dtlb
67 *
68 */
69class PTWFilter(Width: Int, Size: Int)(implicit p: Parameters) extends XSModule with HasPtwConst {
70  val io = IO(new Bundle {
71    val tlb = Flipped(new TlbPtwIO(Width))
72    val ptw = new TlbPtwIO
73    val sfence = Input(new SfenceBundle)
74  })
75
76  require(Size >= Width)
77
78  val v = RegInit(VecInit(Seq.fill(Size)(false.B)))
79  val vpn = Reg(Vec(Size, UInt(vpnLen.W)))
80  val enqPtr = RegInit(0.U(log2Up(Size).W)) // Enq
81  val issPtr = RegInit(0.U(log2Up(Size).W)) // Iss to Ptw
82  val deqPtr = RegInit(0.U(log2Up(Size).W)) // Deq
83  val mayFullDeq = RegInit(false.B)
84  val mayFullIss = RegInit(false.B)
85  val counter = RegInit(0.U(log2Up(Size+1).W))
86
87  val sfence = RegNext(io.sfence)
88  val ptwResp = RegEnable(io.ptw.resp.bits, io.ptw.resp.fire())
89  val ptwResp_valid = RegNext(io.ptw.resp.valid, init = false.B)
90  val reqs = filter(io.tlb.req)
91
92  var enqPtr_next = WireInit(deqPtr)
93  val isFull = enqPtr === deqPtr && mayFullDeq
94  val isEmptyDeq = enqPtr === deqPtr && !mayFullDeq
95  val isEmptyIss = enqPtr === issPtr && !mayFullIss
96  val accumEnqNum = (0 until Width).map(i => PopCount(reqs.take(i).map(_.valid)))
97  val enqPtrVec = VecInit((0 until Width).map(i => enqPtr + accumEnqNum(i)))
98  val enqNum = PopCount(reqs.map(_.valid))
99  val canEnqueue = counter +& enqNum <= Size.U
100
101  io.tlb.req.map(_.ready := true.B) // NOTE: just drop un-fire reqs
102  io.tlb.resp.valid := ptwResp_valid
103  io.tlb.resp.bits := ptwResp
104  io.ptw.req(0).valid := v(issPtr) && !isEmptyIss && !(ptwResp_valid && ptwResp.entry.hit(io.ptw.req(0).bits.vpn))
105  io.ptw.req(0).bits.vpn := vpn(issPtr)
106  io.ptw.resp.ready := true.B
107
108  reqs.zipWithIndex.map{
109    case (req, i) =>
110      when (req.valid && canEnqueue) {
111        v(enqPtrVec(i)) := true.B
112        vpn(enqPtrVec(i)) := req.bits.vpn
113      }
114  }
115
116  val do_enq = canEnqueue && Cat(reqs.map(_.valid)).orR
117  val do_deq = (!v(deqPtr) && !isEmptyDeq)
118  val do_iss = io.ptw.req(0).fire() || (!v(issPtr) && !isEmptyIss)
119  when (do_enq) {
120    enqPtr := enqPtr + enqNum
121  }
122  when (do_deq) {
123    deqPtr := deqPtr + 1.U
124  }
125  when (do_iss) {
126    issPtr := issPtr + 1.U
127  }
128  when (do_enq =/= do_deq) {
129    mayFullDeq := do_enq
130  }
131  when (do_enq =/= do_iss) {
132    mayFullIss := do_enq
133  }
134
135  when (ptwResp_valid) {
136    vpn.zip(v).map{case (pi, vi) =>
137      when (vi && ptwResp.entry.hit(pi, allType = true)) { vi := false.B }
138    }
139  }
140
141  counter := counter - do_deq + Mux(do_enq, enqNum, 0.U)
142  assert(counter <= Size.U, "counter should be less than Size")
143  when (counter === 0.U) {
144    assert(!io.ptw.req(0).fire(), "when counter is 0, should not req")
145    assert(isEmptyDeq && isEmptyIss, "when counter is 0, should be empty")
146  }
147  when (counter === Size.U) {
148    assert(mayFullDeq, "when counter is Size, should be full")
149  }
150
151  when (sfence.valid) {
152    v.map(_ := false.B)
153    deqPtr := 0.U
154    enqPtr := 0.U
155    issPtr := 0.U
156    ptwResp_valid := false.B
157    mayFullDeq := false.B
158    mayFullIss := false.B
159    counter := 0.U
160  }
161
162  def canMerge(vpnReq: UInt, reqs: Seq[DecoupledIO[PtwReq]], index: Int) : Bool = {
163    Cat((vpn ++ reqs.take(index).map(_.bits.vpn))
164      .zip(v ++ reqs.take(index).map(_.valid))
165      .map{case (pi, vi) => vi && pi === vpnReq}
166    ).orR || (ptwResp_valid && ptwResp.entry.hit(vpnReq))
167  }
168
169  def filter(tlbReq: Vec[DecoupledIO[PtwReq]]) = {
170    val reqs =  tlbReq.indices.map{ i =>
171      val req = Wire(ValidIO(new PtwReq()))
172      req.bits := tlbReq(i).bits
173      req.valid := !canMerge(tlbReq(i).bits.vpn, tlbReq, i) && tlbReq(i).valid
174      req
175    }
176    reqs
177  }
178
179  // perf
180  val inflight_counter = RegInit(0.U(log2Up(Size + 1).W))
181  when (io.ptw.req(0).fire() =/= io.ptw.resp.fire()) {
182    inflight_counter := Mux(io.ptw.req(0).fire(), inflight_counter + 1.U, inflight_counter - 1.U)
183  }
184  when (sfence.valid) {
185    inflight_counter := 0.U
186  }
187  XSPerfAccumulate("tlb_req_count", PopCount(Cat(io.tlb.req.map(_.valid))))
188  XSPerfAccumulate("tlb_req_count_filtered", Mux(do_enq, accumEnqNum(Width - 1), 0.U))
189  XSPerfAccumulate("ptw_req_count", io.ptw.req(0).fire())
190  XSPerfAccumulate("ptw_req_cycle", inflight_counter)
191  XSPerfAccumulate("tlb_resp_count", io.tlb.resp.fire())
192  XSPerfAccumulate("ptw_resp_count", io.ptw.resp.fire())
193  XSPerfAccumulate("inflight_cycle", !isEmptyDeq)
194  for (i <- 0 until Size + 1) {
195    XSPerfAccumulate(s"counter${i}", counter === i.U)
196  }
197}