xref: /XiangShan/src/main/scala/xiangshan/frontend/icache/ICacheMissUnit.scala (revision dcbc69cb2a7ea07707ede3d8f7c74421ef450202)
1/***************************************************************************************
2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences
3* Copyright (c) 2020-2021 Peng Cheng Laboratory
4*
5* XiangShan is licensed under Mulan PSL v2.
6* You can use this software according to the terms and conditions of the Mulan PSL v2.
7* You may obtain a copy of Mulan PSL v2 at:
8*          http://license.coscl.org.cn/MulanPSL2
9*
10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
13*
14* See the Mulan PSL v2 for more details.
15***************************************************************************************/
16
17package xiangshan.frontend.icache
18
19import chipsalliance.rocketchip.config.Parameters
20import chisel3._
21import chisel3.util._
22import freechips.rocketchip.diplomacy.IdRange
23import freechips.rocketchip.tilelink.ClientStates._
24import freechips.rocketchip.tilelink.TLPermissions._
25import freechips.rocketchip.tilelink._
26import xiangshan._
27import huancun.{AliasKey, DirtyKey}
28import xiangshan.cache._
29import utils._
30
31
32abstract class ICacheMissUnitModule(implicit p: Parameters) extends XSModule
33  with HasICacheParameters
34
35abstract class ICacheMissUnitBundle(implicit p: Parameters) extends XSBundle
36  with HasICacheParameters
37
38class ICacheMissReq(implicit p: Parameters) extends ICacheBundle
39{
40    val paddr      = UInt(PAddrBits.W)
41    val vaddr      = UInt(VAddrBits.W)
42    val waymask   = UInt(nWays.W)
43    val coh       = new ClientMetadata
44    //val release   = ValidUndirectioned(new ReleaseReq)
45
46    def getVirSetIdx = get_idx(vaddr)
47    def getPhyTag    = get_phy_tag(paddr)
48}
49
50
51class ICacheMissResp(implicit p: Parameters) extends ICacheBundle
52{
53    val data     = UInt(blockBits.W)
54}
55
56class ICacheMissBundle(implicit p: Parameters) extends ICacheBundle{
57    val req       =   Vec(2, Flipped(DecoupledIO(new ICacheMissReq)))
58    val resp      =   Vec(2,ValidIO(new ICacheMissResp))
59    val flush     =   Input(Bool())
60}
61
62
63class ICacheMissEntry(edge: TLEdgeOut, id: Int)(implicit p: Parameters) extends ICacheMissUnitModule
64  with MemoryOpConstants
65{
66  val io = IO(new Bundle {
67    val id = Input(UInt(log2Ceil(nMissEntries).W))
68
69    val req = Flipped(DecoupledIO(new ICacheMissReq))
70    val resp = ValidIO(new ICacheMissResp)
71
72    //tilelink channel
73    val mem_acquire = DecoupledIO(new TLBundleA(edge.bundle))
74    val mem_grant = Flipped(DecoupledIO(new TLBundleD(edge.bundle)))
75    val mem_finish = DecoupledIO(new TLBundleE(edge.bundle))
76
77    val meta_write = DecoupledIO(new ICacheMetaWriteBundle)
78    val data_write = DecoupledIO(new ICacheDataWriteBundle)
79
80//    val release    =  DecoupledIO(new ReleaseReq)
81//    val victimInfor = Output(new ICacheVictimInfor())
82//    val probeMerge  = Input(new ICacheVictimInfor)
83//    val probeMergeFix = Output(Bool())
84
85  })
86
87  /** default value for control signals */
88  io.resp := DontCare
89  io.mem_acquire.bits := DontCare
90  io.mem_grant.ready := true.B
91  io.meta_write.bits := DontCare
92  io.data_write.bits := DontCare
93
94  val s_idle :: s_send_release :: s_send_mem_aquire :: s_wait_mem_grant :: s_write_back :: s_send_grant_ack :: s_wait_resp :: Nil = Enum(7)
95  val state = RegInit(s_idle)
96
97  /** control logic transformation */
98  //request register
99  val req = Reg(new ICacheMissReq)
100  val req_idx = req.getVirSetIdx //virtual index
101  val req_tag = req.getPhyTag //physical tag
102  val req_waymask = req.waymask
103
104//  val victim_tag = get_phy_tag(req.release.bits.addr)
105//  val victim_idx = req_idx
106
107  val (_, _, refill_done, refill_address_inc) = edge.addr_inc(io.mem_grant)
108
109//  val needMergeProbe = (io.probeMerge.valid && state =/= s_idle && io.probeMerge.ptag === victim_tag && io.probeMerge.vidx === victim_idx)
110//
111//  //change release into a ProbeAck
112//  //WARNING: no change to param, default TtoN
113//  when(needMergeProbe && state === s_send_release){
114//    io.release.bits.voluntary := false.B
115//    io.release.bits.hasData   := true.B
116//  }
117//
118//  io.probeMergeFix := needMergeProbe && state === s_send_release
119
120  //cacheline register
121  //refullCycles: 8 for 64-bit bus bus and 2 for 256-bit
122  val readBeatCnt = Reg(UInt(log2Up(refillCycles).W))
123  val respDataReg = Reg(Vec(refillCycles, UInt(beatBits.W)))
124
125  //initial
126  io.resp.bits := DontCare
127  io.mem_acquire.bits := DontCare
128  io.mem_grant.ready := true.B
129  io.meta_write.bits := DontCare
130  io.data_write.bits := DontCare
131
132
133  io.req.ready := (state === s_idle)
134  io.mem_acquire.valid := (state === s_send_mem_aquire) //&& !io.flush
135
136  val grantack = RegEnable(edge.GrantAck(io.mem_grant.bits), io.mem_grant.fire())
137  val grant_param = Reg(UInt(TLPermissions.bdWidth.W))
138  val is_dirty = RegInit(false.B)
139  val is_grant = RegEnable(edge.isRequest(io.mem_grant.bits), io.mem_grant.fire())
140
141  val neddSendAck = RegInit(false.B)
142
143  //state change
144  switch(state) {
145    is(s_idle) {
146      when(io.req.fire()) {
147        readBeatCnt := 0.U
148        state := s_send_mem_aquire
149        req := io.req.bits
150      }
151    }
152
153    // memory request
154    is(s_send_mem_aquire) {
155      when(io.mem_acquire.fire()) {
156        state := s_wait_mem_grant
157      }
158    }
159
160    is(s_wait_mem_grant) {
161      when(edge.hasData(io.mem_grant.bits)) {
162        when(io.mem_grant.fire()) {
163          readBeatCnt := readBeatCnt + 1.U
164          respDataReg(readBeatCnt) := io.mem_grant.bits.data
165          grant_param := io.mem_grant.bits.param
166          is_dirty    := io.mem_grant.bits.echo.lift(DirtyKey).getOrElse(false.B)
167          when(readBeatCnt === (refillCycles - 1).U) {
168            assert(refill_done, "refill not done!")
169            state := s_write_back
170            neddSendAck := edge.isResponse(io.mem_grant.bits)
171          }
172        }
173      }
174    }
175
176    is(s_write_back) {
177      state := Mux(io.meta_write.fire() && io.data_write.fire(), Mux(neddSendAck, s_send_grant_ack, s_wait_resp), s_write_back)
178    }
179
180    is(s_send_grant_ack) {
181      when(io.mem_finish.fire()) {
182        state := s_wait_resp
183      }
184    }
185
186    is(s_wait_resp) {
187      io.resp.bits.data := respDataReg.asUInt
188      when(io.resp.fire()) {
189        state := s_idle
190      }
191    }
192  }
193
194  /** refill write and meta write */
195
196  /** update coh meta */
197  def missCohGen(param: UInt, dirty: Bool): UInt = {
198    MuxLookup(Cat(param, dirty), Nothing, Seq(
199      Cat(toB, false.B) -> Branch,
200      Cat(toB, true.B)  -> Branch,
201      Cat(toT, false.B) -> Trunk,
202      Cat(toT, true.B)  -> Dirty))
203  }
204
205  val miss_new_coh = ClientMetadata(missCohGen(grant_param, is_dirty))
206
207  io.meta_write.valid := (state === s_write_back)
208  io.meta_write.bits.generate(tag = req_tag, coh = miss_new_coh, idx = req_idx, waymask = req_waymask, bankIdx = req_idx(0))
209
210  io.data_write.valid := (state === s_write_back)
211  io.data_write.bits.generate(data = respDataReg.asUInt, idx = req_idx, waymask = req_waymask, bankIdx = req_idx(0))
212
213//  io.release.valid := req.release.valid && (state === s_send_release)
214//  io.release.bits  := req.release.bits
215//
216//  io.victimInfor.valid   := state =/= s_idle
217//  io.victimInfor.ptag    := req_tag
218//  io.victimInfor.vidx    := req_idx
219
220  /** Tilelink request for next level cache/memory */
221  val missCoh    = ClientMetadata(Nothing)
222  val grow_param = missCoh.onAccess(M_XRD)._2
223  val acquireBlock = edge.AcquireBlock(
224    fromSource = io.id,
225    toAddress = addrAlign(req.paddr, blockBytes, PAddrBits),
226    lgSize = (log2Up(cacheParams.blockBytes)).U,
227    growPermissions = grow_param
228  )._2
229  io.mem_acquire.bits := acquireBlock
230  // resolve cache alias by L2
231  io.mem_acquire.bits.user.lift(AliasKey).foreach(_ := req.vaddr(13, 12))
232  require(nSets <= 256) // icache size should not be more than 128KB
233
234  /** Grant ACK */
235  io.mem_finish.valid := (state === s_send_grant_ack) && is_grant
236  io.mem_finish.bits := grantack
237
238  //resp to ifu
239  io.resp.valid := state === s_wait_resp
240
241  XSPerfAccumulate(
242    "entryPenalty" + Integer.toString(id, 10),
243    BoolStopWatch(
244      start = io.req.fire(),
245      stop = io.resp.valid,
246      startHighPriority = true)
247  )
248  XSPerfAccumulate("entryReq" + Integer.toString(id, 10), io.req.fire())
249
250}
251
252
253class ICacheMissUnit(edge: TLEdgeOut)(implicit p: Parameters) extends ICacheMissUnitModule
254{
255  val io = IO(new Bundle{
256    val req         = Vec(2, Flipped(DecoupledIO(new ICacheMissReq)))
257    val resp        = Vec(2, ValidIO(new ICacheMissResp))
258
259    val mem_acquire = DecoupledIO(new TLBundleA(edge.bundle))
260    val mem_grant   = Flipped(DecoupledIO(new TLBundleD(edge.bundle)))
261    val mem_finish  = DecoupledIO(new TLBundleE(edge.bundle))
262
263    val meta_write  = DecoupledIO(new ICacheMetaWriteBundle)
264    val data_write  = DecoupledIO(new ICacheDataWriteBundle)
265
266//    val release     = DecoupledIO(new ReleaseReq)
267
268//    val victimInfor = Vec(2, Output(new ICacheVictimInfor()))
269//    val probeMerge  = Flipped(ValidIO(new ICacheVictimInfor))
270  })
271  // assign default values to output signals
272  io.mem_grant.ready := false.B
273
274  val meta_write_arb = Module(new Arbiter(new ICacheMetaWriteBundle,  PortNumber))
275  val refill_arb     = Module(new Arbiter(new ICacheDataWriteBundle,  PortNumber))
276  //val release_arb    = Module(new Arbiter(new ReleaseReq,  PortNumber))
277
278  io.mem_grant.ready := true.B
279
280//  val probeMergeFix = VecInit(Seq.fill(2)(WireInit(false.B)))
281//
282//  val probeMerge = RegInit(0.U.asTypeOf(new ICacheVictimInfor))
283//  when(io.probeMerge.valid){
284//    probeMerge.ptag   := io.probeMerge.bits.ptag
285//    probeMerge.vidx   := io.probeMerge.bits.vidx
286//    probeMerge.valid  := true.B
287//  }
288//
289//  when(probeMergeFix.reduce(_||_)){
290//    probeMerge.valid := false.B
291//  }
292
293  val entries = (0 until 2) map { i =>
294    val entry = Module(new ICacheMissEntry(edge, i))
295
296    entry.io.id := i.U
297
298    // entry req
299    entry.io.req.valid := io.req(i).valid
300    entry.io.req.bits  := io.req(i).bits
301    io.req(i).ready    := entry.io.req.ready
302
303//    io.victimInfor(i)  := entry.io.victimInfor
304//    entry.io.probeMerge := probeMerge
305//
306//    probeMergeFix(i) := entry.io.probeMergeFix
307
308    // entry resp
309    meta_write_arb.io.in(i)     <>  entry.io.meta_write
310    refill_arb.io.in(i)         <>  entry.io.data_write
311    //release_arb.io.in(i)        <>  entry.io.release
312
313    entry.io.mem_grant.valid := false.B
314    entry.io.mem_grant.bits  := DontCare
315    when (io.mem_grant.bits.source === i.U) {
316      entry.io.mem_grant <> io.mem_grant
317    }
318
319    io.resp(i) <> entry.io.resp
320
321    XSPerfAccumulate(
322      "entryPenalty" + Integer.toString(i, 10),
323      BoolStopWatch(
324        start = entry.io.req.fire(),
325        stop = entry.io.resp.fire(),
326        startHighPriority = true)
327    )
328    XSPerfAccumulate("entryReq" + Integer.toString(i, 10), entry.io.req.fire())
329
330    entry
331  }
332
333  TLArbiter.lowest(edge, io.mem_acquire, entries.map(_.io.mem_acquire):_*)
334  TLArbiter.lowest(edge, io.mem_finish,  entries.map(_.io.mem_finish):_*)
335
336  io.meta_write     <> meta_write_arb.io.out
337  io.data_write     <> refill_arb.io.out
338  //io.release        <> release_arb.io.out
339
340  (0 until nWays).map{ w =>
341    XSPerfAccumulate("line_0_refill_way_" + Integer.toString(w, 10),  entries(0).io.meta_write.valid && OHToUInt(entries(0).io.meta_write.bits.waymask)  === w.U)
342    XSPerfAccumulate("line_1_refill_way_" + Integer.toString(w, 10),  entries(1).io.meta_write.valid && OHToUInt(entries(1).io.meta_write.bits.waymask)  === w.U)
343  }
344
345}
346
347
348
349