1/*************************************************************************************** 2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences 3* Copyright (c) 2020-2021 Peng Cheng Laboratory 4* 5* XiangShan is licensed under Mulan PSL v2. 6* You can use this software according to the terms and conditions of the Mulan PSL v2. 7* You may obtain a copy of Mulan PSL v2 at: 8* http://license.coscl.org.cn/MulanPSL2 9* 10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, 11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, 12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE. 13* 14* See the Mulan PSL v2 for more details. 15***************************************************************************************/ 16 17package xiangshan.frontend.icache 18 19import chipsalliance.rocketchip.config.Parameters 20import chisel3._ 21import chisel3.util._ 22import freechips.rocketchip.diplomacy.IdRange 23import freechips.rocketchip.tilelink.ClientStates._ 24import freechips.rocketchip.tilelink.TLPermissions._ 25import freechips.rocketchip.tilelink._ 26import xiangshan._ 27import huancun.{AliasKey, DirtyKey} 28import xiangshan.cache._ 29import utils._ 30import difftest._ 31 32 33abstract class ICacheMissUnitModule(implicit p: Parameters) extends XSModule 34 with HasICacheParameters 35 36abstract class ICacheMissUnitBundle(implicit p: Parameters) extends XSBundle 37 with HasICacheParameters 38 39class ICacheMissReq(implicit p: Parameters) extends ICacheBundle 40{ 41 val paddr = UInt(PAddrBits.W) 42 val vaddr = UInt(VAddrBits.W) 43 val waymask = UInt(nWays.W) 44 val coh = new ClientMetadata 45 46 def getVirSetIdx = get_idx(vaddr) 47 def getPhyTag = get_phy_tag(paddr) 48} 49 50 51class ICacheMissResp(implicit p: Parameters) extends ICacheBundle 52{ 53 val data = UInt(blockBits.W) 54 val corrupt = Bool() 55} 56 57class ICacheMissBundle(implicit p: Parameters) extends ICacheBundle{ 58 val req = Vec(2, Flipped(DecoupledIO(new ICacheMissReq))) 59 val resp = Vec(2,ValidIO(new ICacheMissResp)) 60 val flush = Input(Bool()) 61} 62 63 64class ICacheMissEntry(edge: TLEdgeOut, id: Int)(implicit p: Parameters) extends ICacheMissUnitModule 65 with MemoryOpConstants 66{ 67 val io = IO(new Bundle { 68 val id = Input(UInt(log2Ceil(PortNumber).W)) 69 70 val req = Flipped(DecoupledIO(new ICacheMissReq)) 71 val resp = ValidIO(new ICacheMissResp) 72 73 //tilelink channel 74 val mem_acquire = DecoupledIO(new TLBundleA(edge.bundle)) 75 val mem_grant = Flipped(DecoupledIO(new TLBundleD(edge.bundle))) 76 val mem_finish = DecoupledIO(new TLBundleE(edge.bundle)) 77 78 val meta_write = DecoupledIO(new ICacheMetaWriteBundle) 79 val data_write = DecoupledIO(new ICacheDataWriteBundle) 80 81 val release_req = DecoupledIO(new ReplacePipeReq) 82 val release_resp = Flipped(ValidIO(UInt(ReplaceIdWid.W))) 83 val victimInfor = Output(new ICacheVictimInfor()) 84 85 val toPrefetch = ValidIO(UInt(PAddrBits.W)) 86 87 }) 88 89 /** default value for control signals */ 90 io.resp := DontCare 91 io.mem_acquire.bits := DontCare 92 io.mem_grant.ready := true.B 93 io.meta_write.bits := DontCare 94 io.data_write.bits := DontCare 95 96 val s_idle :: s_send_mem_aquire :: s_wait_mem_grant :: s_write_back :: s_send_grant_ack :: s_send_replace :: s_wait_replace :: s_wait_resp :: Nil = Enum(8) 97 val state = RegInit(s_idle) 98 /** control logic transformation */ 99 //request register 100 val req = Reg(new ICacheMissReq) 101 val req_idx = req.getVirSetIdx //virtual index 102 val req_tag = req.getPhyTag //physical tag 103 val req_waymask = req.waymask 104 val release_id = Cat(MainPipeKey.U, id.U) 105 val req_corrupt = RegInit(false.B) 106 107 io.victimInfor.valid := state === s_send_replace || state === s_wait_replace || state === s_wait_resp 108 io.victimInfor.vidx := req_idx 109 110 val (_, _, refill_done, refill_address_inc) = edge.addr_inc(io.mem_grant) 111 112 //cacheline register 113 val readBeatCnt = Reg(UInt(log2Up(refillCycles).W)) 114 val respDataReg = Reg(Vec(refillCycles, UInt(beatBits.W))) 115 116 //initial 117 io.resp.bits := DontCare 118 io.mem_acquire.bits := DontCare 119 io.mem_grant.ready := true.B 120 io.meta_write.bits := DontCare 121 io.data_write.bits := DontCare 122 123 io.release_req.bits.paddr := req.paddr 124 io.release_req.bits.vaddr := req.vaddr 125 io.release_req.bits.voluntary := true.B 126 io.release_req.bits.waymask := req.waymask 127 io.release_req.bits.needData := false.B 128 io.release_req.bits.id := release_id 129 io.release_req.bits.param := DontCare //release will not care tilelink param 130 131 io.req.ready := (state === s_idle) 132 io.mem_acquire.valid := (state === s_send_mem_aquire) 133 io.release_req.valid := (state === s_send_replace) 134 135 io.toPrefetch.valid := (state =/= s_idle) 136 io.toPrefetch.bits := addrAlign(req.paddr, blockBytes, PAddrBits) 137 138 val grantack = RegEnable(edge.GrantAck(io.mem_grant.bits), io.mem_grant.fire()) 139 val grant_param = Reg(UInt(TLPermissions.bdWidth.W)) 140 val is_dirty = RegInit(false.B) 141 val is_grant = RegEnable(edge.isRequest(io.mem_grant.bits), io.mem_grant.fire()) 142 143 //state change 144 switch(state) { 145 is(s_idle) { 146 when(io.req.fire()) { 147 readBeatCnt := 0.U 148 state := s_send_mem_aquire 149 req := io.req.bits 150 } 151 } 152 153 // memory request 154 is(s_send_mem_aquire) { 155 when(io.mem_acquire.fire()) { 156 state := s_wait_mem_grant 157 } 158 } 159 160 is(s_wait_mem_grant) { 161 when(edge.hasData(io.mem_grant.bits)) { 162 when(io.mem_grant.fire()) { 163 readBeatCnt := readBeatCnt + 1.U 164 respDataReg(readBeatCnt) := io.mem_grant.bits.data 165 req_corrupt := io.mem_grant.bits.corrupt 166 grant_param := io.mem_grant.bits.param 167 is_dirty := io.mem_grant.bits.echo.lift(DirtyKey).getOrElse(false.B) 168 when(readBeatCnt === (refillCycles - 1).U) { 169 assert(refill_done, "refill not done!") 170 state := s_send_grant_ack 171 } 172 } 173 } 174 } 175 176 is(s_send_grant_ack) { 177 when(io.mem_finish.fire()) { 178 state := s_send_replace 179 } 180 } 181 182 is(s_send_replace){ 183 when(io.release_req.fire()){ 184 state := s_wait_replace 185 } 186 } 187 188 is(s_wait_replace){ 189 when(io.release_resp.valid && io.release_resp.bits === release_id){ 190 state := s_write_back 191 } 192 } 193 194 is(s_write_back) { 195 state := Mux(io.meta_write.fire() && io.data_write.fire(), s_wait_resp, s_write_back) 196 } 197 198 is(s_wait_resp) { 199 io.resp.bits.data := respDataReg.asUInt 200 io.resp.bits.corrupt := req_corrupt 201 when(io.resp.fire()) { 202 state := s_idle 203 } 204 } 205 } 206 207 /** refill write and meta write */ 208 val missCoh = ClientMetadata(Nothing) 209 val grow_param = missCoh.onAccess(M_XRD)._2 210 val acquireBlock = edge.AcquireBlock( 211 fromSource = io.id, 212 toAddress = addrAlign(req.paddr, blockBytes, PAddrBits), 213 lgSize = (log2Up(cacheParams.blockBytes)).U, 214 growPermissions = grow_param 215 )._2 216 io.mem_acquire.bits := acquireBlock 217 // resolve cache alias by L2 218 io.mem_acquire.bits.user.lift(AliasKey).foreach(_ := req.vaddr(13, 12)) 219 require(nSets <= 256) // icache size should not be more than 128KB 220 221 /** Grant ACK */ 222 io.mem_finish.valid := (state === s_send_grant_ack) && is_grant 223 io.mem_finish.bits := grantack 224 225 //resp to ifu 226 io.resp.valid := state === s_wait_resp 227 /** update coh meta */ 228 def missCohGen(param: UInt, dirty: Bool): UInt = { 229 MuxLookup(Cat(param, dirty), Nothing, Seq( 230 Cat(toB, false.B) -> Branch, 231 Cat(toB, true.B) -> Branch, 232 Cat(toT, false.B) -> Trunk, 233 Cat(toT, true.B) -> Dirty)) 234 } 235 236 val miss_new_coh = ClientMetadata(missCohGen(grant_param, is_dirty)) 237 238 io.meta_write.valid := (state === s_write_back) 239 io.meta_write.bits.generate(tag = req_tag, coh = miss_new_coh, idx = req_idx, waymask = req_waymask, bankIdx = req_idx(0)) 240 241 io.data_write.valid := (state === s_write_back) 242 io.data_write.bits.generate(data = respDataReg.asUInt, 243 idx = req_idx, 244 waymask = req_waymask, 245 bankIdx = req_idx(0), 246 paddr = req.paddr) 247 248 XSPerfAccumulate( 249 "entryPenalty" + Integer.toString(id, 10), 250 BoolStopWatch( 251 start = io.req.fire(), 252 stop = io.resp.valid, 253 startHighPriority = true) 254 ) 255 XSPerfAccumulate("entryReq" + Integer.toString(id, 10), io.req.fire()) 256 257} 258 259 260class ICacheMissUnit(edge: TLEdgeOut)(implicit p: Parameters) extends ICacheMissUnitModule 261{ 262 val io = IO(new Bundle{ 263 val hartId = Input(UInt(8.W)) 264 val req = Vec(2, Flipped(DecoupledIO(new ICacheMissReq))) 265 val resp = Vec(2, ValidIO(new ICacheMissResp)) 266 267 val mem_acquire = DecoupledIO(new TLBundleA(edge.bundle)) 268 val mem_grant = Flipped(DecoupledIO(new TLBundleD(edge.bundle))) 269 val mem_finish = DecoupledIO(new TLBundleE(edge.bundle)) 270 271 val meta_write = DecoupledIO(new ICacheMetaWriteBundle) 272 val data_write = DecoupledIO(new ICacheDataWriteBundle) 273 274 val release_req = DecoupledIO(new ReplacePipeReq) 275 val release_resp = Flipped(ValidIO(UInt(ReplaceIdWid.W))) 276 277 val victimInfor = Vec(PortNumber, Output(new ICacheVictimInfor())) 278 279 val prefetch_req = Flipped(DecoupledIO(new PIQReq)) 280 val prefetch_check = Vec(PortNumber,ValidIO(UInt(PAddrBits.W))) 281 282 283 }) 284 // assign default values to output signals 285 io.mem_grant.ready := false.B 286 287 val meta_write_arb = Module(new Arbiter(new ICacheMetaWriteBundle, PortNumber)) 288 val refill_arb = Module(new Arbiter(new ICacheDataWriteBundle, PortNumber)) 289 val release_arb = Module(new Arbiter(new ReplacePipeReq, PortNumber)) 290 291 io.mem_grant.ready := true.B 292 293 val entries = (0 until PortNumber) map { i => 294 val entry = Module(new ICacheMissEntry(edge, i)) 295 296 entry.io.id := i.U 297 298 // entry req 299 entry.io.req.valid := io.req(i).valid 300 entry.io.req.bits := io.req(i).bits 301 io.req(i).ready := entry.io.req.ready 302 303 // entry resp 304 meta_write_arb.io.in(i) <> entry.io.meta_write 305 refill_arb.io.in(i) <> entry.io.data_write 306 release_arb.io.in(i) <> entry.io.release_req 307 308 entry.io.mem_grant.valid := false.B 309 entry.io.mem_grant.bits := DontCare 310 when (io.mem_grant.bits.source === i.U) { 311 entry.io.mem_grant <> io.mem_grant 312 } 313 314 io.resp(i) <> entry.io.resp 315 316 io.victimInfor(i) := entry.io.victimInfor 317 io.prefetch_check(i) <> entry.io.toPrefetch 318 319 entry.io.release_resp <> io.release_resp 320 321 XSPerfAccumulate( 322 "entryPenalty" + Integer.toString(i, 10), 323 BoolStopWatch( 324 start = entry.io.req.fire(), 325 stop = entry.io.resp.fire(), 326 startHighPriority = true) 327 ) 328 XSPerfAccumulate("entryReq" + Integer.toString(i, 10), entry.io.req.fire()) 329 330 entry 331 } 332 333 val alloc = Wire(UInt(log2Ceil(nPrefetchEntries).W)) 334 335 val prefEntries = (PortNumber until PortNumber + nPrefetchEntries) map { i => 336 val prefetchEntry = Module(new IPrefetchEntry(edge, PortNumber)) 337 338 prefetchEntry.io.mem_hint_ack.valid := false.B 339 prefetchEntry.io.mem_hint_ack.bits := DontCare 340 341 when(io.mem_grant.bits.source === PortNumber.U) { 342 prefetchEntry.io.mem_hint_ack <> io.mem_grant 343 } 344 345 prefetchEntry.io.req.valid := io.prefetch_req.valid && ((i-PortNumber).U === alloc) 346 prefetchEntry.io.req.bits := io.prefetch_req.bits 347 348 prefetchEntry.io.id := i.U 349 350 prefetchEntry 351 } 352 353 alloc := PriorityEncoder(prefEntries.map(_.io.req.ready)) 354 io.prefetch_req.ready := ParallelOR(prefEntries.map(_.io.req.ready)) 355 val tl_a_chanel = entries.map(_.io.mem_acquire) ++ prefEntries.map(_.io.mem_hint) 356 TLArbiter.lowest(edge, io.mem_acquire, tl_a_chanel:_*) 357 358 TLArbiter.lowest(edge, io.mem_finish, entries.map(_.io.mem_finish):_*) 359 360 io.meta_write <> meta_write_arb.io.out 361 io.data_write <> refill_arb.io.out 362 io.release_req <> release_arb.io.out 363 364 if (env.EnableDifftest) { 365 val difftest = Module(new DifftestRefillEvent) 366 difftest.io.clock := clock 367 difftest.io.coreid := io.hartId 368 difftest.io.cacheid := 0.U 369 difftest.io.valid := refill_arb.io.out.valid 370 difftest.io.addr := refill_arb.io.out.bits.paddr 371 difftest.io.data := refill_arb.io.out.bits.data.asTypeOf(difftest.io.data) 372 } 373 374 (0 until nWays).map{ w => 375 XSPerfAccumulate("line_0_refill_way_" + Integer.toString(w, 10), entries(0).io.meta_write.valid && OHToUInt(entries(0).io.meta_write.bits.waymask) === w.U) 376 XSPerfAccumulate("line_1_refill_way_" + Integer.toString(w, 10), entries(1).io.meta_write.valid && OHToUInt(entries(1).io.meta_write.bits.waymask) === w.U) 377 } 378 379} 380 381 382 383