xref: /XiangShan/src/main/scala/xiangshan/frontend/icache/IPrefetch.scala (revision 9473e04d5cab97eaf63add958b2392eec3d876a2)
1/***************************************************************************************
2  * Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences
3  * Copyright (c) 2020-2021 Peng Cheng Laboratory
4  *
5  * XiangShan is licensed under Mulan PSL v2.
6  * You can use this software according to the terms and conditions of the Mulan PSL v2.
7  * You may obtain a copy of Mulan PSL v2 at:
8  *          http://license.coscl.org.cn/MulanPSL2
9  *
10  * THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
11  * EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
12  * MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
13  *
14  * See the Mulan PSL v2 for more details.
15  ***************************************************************************************/
16
17package xiangshan.frontend.icache
18
19import chipsalliance.rocketchip.config.Parameters
20import chisel3._
21import chisel3.util._
22import freechips.rocketchip.tilelink._
23import utils._
24import utility._
25import xiangshan.cache.mmu._
26import xiangshan.frontend._
27import xiangshan.backend.fu.{PMPReqBundle, PMPRespBundle}
28import huancun.{PreferCacheKey}
29
30
31abstract class IPrefetchBundle(implicit p: Parameters) extends ICacheBundle
32abstract class IPrefetchModule(implicit p: Parameters) extends ICacheModule
33
34class PIQReq(implicit p: Parameters) extends IPrefetchBundle {
35  val paddr      = UInt(PAddrBits.W)
36}
37
38
39class IPrefetchToMissUnit(implicit  p: Parameters) extends IPrefetchBundle{
40  val enqReq  = DecoupledIO(new PIQReq)
41}
42
43class IPredfetchIO(implicit p: Parameters) extends IPrefetchBundle {
44  val fromFtq         = Flipped(new FtqPrefechBundle)
45  val iTLBInter       = new TlbRequestIO
46  val pmp             =   new ICachePMPBundle
47  val toIMeta         = DecoupledIO(new ICacheReadBundle)
48  val fromIMeta       = Input(new ICacheMetaRespBundle)
49  val toMissUnit      = new IPrefetchToMissUnit
50  val fromMSHR        = Flipped(Vec(PortNumber,ValidIO(UInt(PAddrBits.W))))
51
52  val prefetchEnable = Input(Bool())
53  val prefetchDisable = Input(Bool())
54}
55
56class IPrefetchPipe(implicit p: Parameters) extends  IPrefetchModule
57{
58  val io = IO(new IPredfetchIO)
59
60  val enableBit = RegInit(false.B)
61  val maxPrefetchCoutner = RegInit(0.U(log2Ceil(nPrefetchEntries + 1).W))
62
63  val reachMaxSize = maxPrefetchCoutner === nPrefetchEntries.U
64
65  when(io.prefetchEnable){
66    enableBit := true.B
67  }.elsewhen((enableBit && io.prefetchDisable) || (enableBit && reachMaxSize)){
68    enableBit := false.B
69  }
70
71  class PrefetchDir(implicit  p: Parameters) extends IPrefetchBundle
72  {
73    val valid = Bool()
74    val paddr = UInt(PAddrBits.W)
75  }
76
77  val prefetch_dir = RegInit(VecInit(Seq.fill(nPrefetchEntries)(0.U.asTypeOf(new PrefetchDir))))
78
79  val fromFtq = io.fromFtq
80  val (toITLB,  fromITLB) = (io.iTLBInter.req, io.iTLBInter.resp)
81  io.iTLBInter.req_kill := false.B
82  val (toIMeta, fromIMeta) = (io.toIMeta, io.fromIMeta.metaData(0))
83  val (toPMP,  fromPMP)   = (io.pmp.req, io.pmp.resp)
84  val toMissUnit = io.toMissUnit
85
86  val p0_fire, p1_fire, p2_fire, p3_fire =  WireInit(false.B)
87  val p1_discard, p2_discard, p3_discard = WireInit(false.B)
88  val p0_ready, p1_ready, p2_ready, p3_ready = WireInit(false.B)
89
90  /** Prefetch Stage 0: req from Ftq */
91  val p0_valid  =   fromFtq.req.valid
92  val p0_vaddr  =   addrAlign(fromFtq.req.bits.target, blockBytes, VAddrBits)
93  p0_fire   :=   p0_valid && p1_ready && toITLB.fire() && !fromITLB.bits.miss && toIMeta.ready && enableBit
94  //discard req when source not ready
95  // p0_discard := p0_valid && ((toITLB.fire() && fromITLB.bits.miss) || !toIMeta.ready || !enableBit)
96
97  toIMeta.valid     := p0_valid
98  toIMeta.bits.vSetIdx(0) := get_idx(p0_vaddr)
99  toIMeta.bits.vSetIdx(1) := DontCare
100  toIMeta.bits.isDoubleLine := false.B
101
102  toITLB.valid         := p0_valid
103  toITLB.bits.size     := 3.U // TODO: fix the size
104  toITLB.bits.vaddr    := p0_vaddr
105  toITLB.bits.debug.pc := p0_vaddr
106
107  toITLB.bits.kill                := DontCare
108  toITLB.bits.cmd                 := TlbCmd.exec
109  toITLB.bits.memidx              := DontCare
110  toITLB.bits.debug.robIdx        := DontCare
111  toITLB.bits.no_translate        := false.B
112  toITLB.bits.debug.isFirstIssue  := DontCare
113
114
115  fromITLB.ready := true.B
116
117  fromFtq.req.ready :=  true.B
118
119  /** Prefetch Stage 1: cache probe filter */
120  val p1_valid =  generatePipeControl(lastFire = p0_fire, thisFire = p1_fire || p1_discard, thisFlush = false.B, lastFlush = false.B)
121
122  val p1_vaddr   =  RegEnable(p0_vaddr,    p0_fire)
123
124  //tlb resp
125  val tlb_resp_valid = RegInit(false.B)
126  when(p0_fire) {tlb_resp_valid := true.B}
127  .elsewhen(tlb_resp_valid && (p1_fire || p1_discard)) {tlb_resp_valid := false.B}
128
129  val tlb_resp_paddr = ResultHoldBypass(valid = RegNext(p0_fire), data = fromITLB.bits.paddr(0))
130  val tlb_resp_pf    = ResultHoldBypass(valid = RegNext(p0_fire), data = fromITLB.bits.excp(0).pf.instr && tlb_resp_valid)
131  val tlb_resp_af    = ResultHoldBypass(valid = RegNext(p0_fire), data = fromITLB.bits.excp(0).af.instr && tlb_resp_valid)
132
133  val p1_exception  = VecInit(Seq(tlb_resp_pf, tlb_resp_af))
134  val p1_has_except =  p1_exception.reduce(_ || _)
135
136  val p1_ptag = get_phy_tag(tlb_resp_paddr)
137
138  val p1_meta_ptags       = ResultHoldBypass(data = VecInit(fromIMeta.map(way => way.tag)),valid = RegNext(p0_fire))
139  val p1_meta_cohs        = ResultHoldBypass(data = VecInit(fromIMeta.map(way => way.coh)),valid = RegNext(p0_fire))
140
141  val p1_tag_eq_vec       =  VecInit(p1_meta_ptags.map(_  ===  p1_ptag ))
142  val p1_tag_match_vec    =  VecInit(p1_tag_eq_vec.zipWithIndex.map{ case(way_tag_eq, w) => way_tag_eq && p1_meta_cohs(w).isValid()})
143  val p1_tag_match        =  ParallelOR(p1_tag_match_vec)
144  val (p1_hit, p1_miss)   =  (p1_valid && p1_tag_match && !p1_has_except, p1_valid && !p1_tag_match && !p1_has_except)
145
146  //overriding the invalid req
147  val p1_req_cancle = (p1_hit || (tlb_resp_valid && p1_exception.reduce(_ || _))) && p1_valid
148  val p1_req_accept   = p1_valid && tlb_resp_valid && p1_miss
149
150  p1_ready    :=   p1_fire || p1_req_cancle || !p1_valid
151  p1_fire     :=   p1_valid && p1_req_accept && p2_ready && enableBit
152  p1_discard  :=   p1_valid && p1_req_cancle
153
154  /** Prefetch Stage 2: filtered req PIQ enqueue */
155  val p2_valid =  generatePipeControl(lastFire = p1_fire, thisFire = p2_fire || p2_discard, thisFlush = false.B, lastFlush = false.B)
156
157  val p2_paddr     = RegEnable(next = tlb_resp_paddr,  enable = p1_fire)
158  val p2_except_pf = RegEnable(next =tlb_resp_pf, enable = p1_fire)
159  val p2_except_tlb_af = RegEnable(next = tlb_resp_af, enable = p1_fire)
160
161  /*when a prefetch req meet with a miss req in MSHR cancle the prefetch req */
162  val p2_check_in_mshr = VecInit(io.fromMSHR.map(mshr => mshr.valid && mshr.bits === addrAlign(p2_paddr, blockBytes, PAddrBits))).reduce(_||_)
163
164  //TODO wait PMP logic
165  val p2_exception  = VecInit(Seq(p2_except_tlb_af, p2_except_pf)).reduce(_||_)
166
167  p2_ready :=   p2_fire || p2_discard || !p2_valid
168  p2_fire  :=   p2_valid && !p2_exception && p3_ready
169  p2_discard := p2_valid && p2_exception
170
171  /** Prefetch Stage 2: filtered req PIQ enqueue */
172  val p3_valid =  generatePipeControl(lastFire = p2_fire, thisFire = p3_fire || p3_discard, thisFlush = false.B, lastFlush = false.B)
173
174  val p3_pmp_fire = p3_valid
175  val pmpExcpAF = fromPMP.instr
176  val p3_paddr = RegEnable(next = p2_paddr,  enable = p2_fire)
177
178  io.pmp.req.valid      := p3_pmp_fire
179  io.pmp.req.bits.addr  := p3_paddr
180  io.pmp.req.bits.size  := 3.U
181  io.pmp.req.bits.cmd   := TlbCmd.exec
182
183  val p3_except_pmp_af = DataHoldBypass(pmpExcpAF, p3_pmp_fire)
184  val p3_check_in_mshr = RegEnable(next = p2_check_in_mshr,  enable = p2_fire)
185  val p3_mmio      = DataHoldBypass(io.pmp.resp.mmio && !p3_except_pmp_af, p3_pmp_fire)
186
187  val p3_exception  = VecInit(Seq(p3_except_pmp_af, p3_mmio)).reduce(_||_)
188
189  val p3_hit_dir = VecInit((0 until nPrefetchEntries).map(i => prefetch_dir(i).valid && prefetch_dir(i).paddr === p3_paddr )).reduce(_||_)
190
191  p3_discard := p3_exception || p3_hit_dir || p3_check_in_mshr || (p3_valid && enableBit && !toMissUnit.enqReq.ready)
192
193  toMissUnit.enqReq.valid             := p3_valid && enableBit && !p3_discard
194  toMissUnit.enqReq.bits.paddr        := p3_paddr
195
196  when(reachMaxSize){
197    maxPrefetchCoutner := 0.U
198
199    prefetch_dir.foreach(_.valid := false.B)
200  }.elsewhen(toMissUnit.enqReq.fire()){
201    maxPrefetchCoutner := maxPrefetchCoutner + 1.U
202
203    prefetch_dir(maxPrefetchCoutner).valid := true.B
204    prefetch_dir(maxPrefetchCoutner).paddr := p3_paddr
205  }
206
207  p3_ready := toMissUnit.enqReq.ready || !enableBit
208  p3_fire  := toMissUnit.enqReq.fire()
209
210}
211
212class IPrefetchEntry(edge: TLEdgeOut, id: Int)(implicit p: Parameters) extends ICacheMissUnitModule
213{
214  val io = IO(new Bundle {
215    val id = Input(UInt(log2Ceil(PortNumber + nPrefetchEntries).W))
216
217    val req = Flipped(DecoupledIO(new PIQReq))
218
219    //tilelink channel
220    val mem_hint = DecoupledIO(new TLBundleA(edge.bundle))
221    val mem_hint_ack = Flipped(DecoupledIO(new TLBundleD(edge.bundle)))
222
223  })
224
225  /** default value for control signals */
226  io.mem_hint.bits := DontCare
227  io.mem_hint_ack.ready := true.B
228
229
230  val s_idle  :: s_send_hint :: s_wait_hint_ack :: Nil = Enum(3)
231  val state = RegInit(s_idle)
232  /** control logic transformation */
233  //request register
234  val req = Reg(new PIQReq)
235  //initial
236  io.mem_hint.bits := DontCare
237  io.mem_hint_ack.ready := true.B
238
239  io.req.ready := (state === s_idle)
240  io.mem_hint.valid := (state === s_send_hint)
241
242  //state change
243  switch(state) {
244    is(s_idle) {
245      when(io.req.fire()) {
246        state := s_send_hint
247        req := io.req.bits
248      }
249    }
250
251    // memory request
252    is(s_send_hint) {
253      when(io.mem_hint.fire()) {
254        state := s_idle
255      }
256    }
257  }
258
259  /** refill write and meta write */
260  val hint = edge.Hint(
261    fromSource = io.id,
262    toAddress = addrAlign(req.paddr, blockBytes, PAddrBits) + blockBytes.U,
263    lgSize = (log2Up(cacheParams.blockBytes)).U,
264    param = TLHints.PREFETCH_READ
265  )._2
266  io.mem_hint.bits := hint
267  io.mem_hint.bits.user.lift(PreferCacheKey).foreach(_ := true.B)
268
269
270  XSPerfAccumulate("PrefetchEntryReq" + Integer.toString(id, 10), io.req.fire())
271
272}
273