1/*************************************************************************************** 2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences 3* Copyright (c) 2020-2021 Peng Cheng Laboratory 4* 5* XiangShan is licensed under Mulan PSL v2. 6* You can use this software according to the terms and conditions of the Mulan PSL v2. 7* You may obtain a copy of Mulan PSL v2 at: 8* http://license.coscl.org.cn/MulanPSL2 9* 10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, 11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, 12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE. 13* 14* See the Mulan PSL v2 for more details. 15***************************************************************************************/ 16 17package xiangshan.frontend.icache 18 19import chipsalliance.rocketchip.config.Parameters 20import chisel3._ 21import chisel3.util._ 22import freechips.rocketchip.diplomacy.IdRange 23import freechips.rocketchip.tilelink.ClientStates._ 24import freechips.rocketchip.tilelink.TLPermissions._ 25import freechips.rocketchip.tilelink._ 26import xiangshan._ 27import huancun.{AliasKey, DirtyKey} 28import xiangshan.cache._ 29import utils._ 30import utility._ 31import difftest._ 32 33 34abstract class ICacheMissUnitModule(implicit p: Parameters) extends XSModule 35 with HasICacheParameters 36 37abstract class ICacheMissUnitBundle(implicit p: Parameters) extends XSBundle 38 with HasICacheParameters 39 40class ICacheMissReq(implicit p: Parameters) extends ICacheBundle 41{ 42 val paddr = UInt(PAddrBits.W) 43 val vaddr = UInt(VAddrBits.W) 44 val waymask = UInt(nWays.W) 45 val coh = new ClientMetadata 46 47 def getVirSetIdx = get_idx(vaddr) 48 def getPhyTag = get_phy_tag(paddr) 49} 50 51 52class ICacheMissResp(implicit p: Parameters) extends ICacheBundle 53{ 54 val data = UInt(blockBits.W) 55 val corrupt = Bool() 56} 57 58class ICacheMissBundle(implicit p: Parameters) extends ICacheBundle{ 59 val req = Vec(2, Flipped(DecoupledIO(new ICacheMissReq))) 60 val resp = Vec(2,ValidIO(new ICacheMissResp)) 61 val flush = Input(Bool()) 62} 63 64 65class ICacheMissEntry(edge: TLEdgeOut, id: Int)(implicit p: Parameters) extends ICacheMissUnitModule 66 with MemoryOpConstants 67{ 68 val io = IO(new Bundle { 69 val id = Input(UInt(log2Ceil(PortNumber).W)) 70 71 val req = Flipped(DecoupledIO(new ICacheMissReq)) 72 val resp = ValidIO(new ICacheMissResp) 73 74 //tilelink channel 75 val mem_acquire = DecoupledIO(new TLBundleA(edge.bundle)) 76 val mem_grant = Flipped(DecoupledIO(new TLBundleD(edge.bundle))) 77 val mem_finish = DecoupledIO(new TLBundleE(edge.bundle)) 78 79 val meta_write = DecoupledIO(new ICacheMetaWriteBundle) 80 val data_write = DecoupledIO(new ICacheDataWriteBundle) 81 82 val release_req = DecoupledIO(new ReplacePipeReq) 83 val release_resp = Flipped(ValidIO(UInt(ReplaceIdWid.W))) 84 val victimInfor = Output(new ICacheVictimInfor()) 85 86 val toPrefetch = ValidIO(UInt(PAddrBits.W)) 87 88 }) 89 90 /** default value for control signals */ 91 io.resp := DontCare 92 io.mem_acquire.bits := DontCare 93 io.mem_grant.ready := true.B 94 io.meta_write.bits := DontCare 95 io.data_write.bits := DontCare 96 97 val s_idle :: s_send_mem_aquire :: s_wait_mem_grant :: s_write_back :: s_send_grant_ack :: s_send_replace :: s_wait_replace :: s_wait_resp :: Nil = Enum(8) 98 val state = RegInit(s_idle) 99 /** control logic transformation */ 100 //request register 101 val req = Reg(new ICacheMissReq) 102 val req_idx = req.getVirSetIdx //virtual index 103 val req_tag = req.getPhyTag //physical tag 104 val req_waymask = req.waymask 105 val release_id = Cat(MainPipeKey.U, id.U) 106 val req_corrupt = RegInit(false.B) 107 108 io.victimInfor.valid := state === s_send_replace || state === s_wait_replace || state === s_wait_resp 109 io.victimInfor.vidx := req_idx 110 111 val (_, _, refill_done, refill_address_inc) = edge.addr_inc(io.mem_grant) 112 113 //cacheline register 114 val readBeatCnt = Reg(UInt(log2Up(refillCycles).W)) 115 val respDataReg = Reg(Vec(refillCycles, UInt(beatBits.W))) 116 117 //initial 118 io.resp.bits := DontCare 119 io.mem_acquire.bits := DontCare 120 io.mem_grant.ready := true.B 121 io.meta_write.bits := DontCare 122 io.data_write.bits := DontCare 123 124 io.release_req.bits.paddr := req.paddr 125 io.release_req.bits.vaddr := req.vaddr 126 io.release_req.bits.voluntary := true.B 127 io.release_req.bits.waymask := req.waymask 128 io.release_req.bits.needData := false.B 129 io.release_req.bits.id := release_id 130 io.release_req.bits.param := DontCare //release will not care tilelink param 131 132 io.req.ready := (state === s_idle) 133 io.mem_acquire.valid := (state === s_send_mem_aquire) 134 io.release_req.valid := (state === s_send_replace) 135 136 io.toPrefetch.valid := (state =/= s_idle) 137 io.toPrefetch.bits := addrAlign(req.paddr, blockBytes, PAddrBits) 138 139 val grantack = RegEnable(edge.GrantAck(io.mem_grant.bits), io.mem_grant.fire()) 140 val grant_param = Reg(UInt(TLPermissions.bdWidth.W)) 141 val is_dirty = RegInit(false.B) 142 val is_grant = RegEnable(edge.isRequest(io.mem_grant.bits), io.mem_grant.fire()) 143 144 //state change 145 switch(state) { 146 is(s_idle) { 147 when(io.req.fire()) { 148 readBeatCnt := 0.U 149 state := s_send_mem_aquire 150 req := io.req.bits 151 } 152 } 153 154 // memory request 155 is(s_send_mem_aquire) { 156 when(io.mem_acquire.fire()) { 157 state := s_wait_mem_grant 158 } 159 } 160 161 is(s_wait_mem_grant) { 162 when(edge.hasData(io.mem_grant.bits)) { 163 when(io.mem_grant.fire()) { 164 readBeatCnt := readBeatCnt + 1.U 165 respDataReg(readBeatCnt) := io.mem_grant.bits.data 166 req_corrupt := io.mem_grant.bits.corrupt 167 grant_param := io.mem_grant.bits.param 168 is_dirty := io.mem_grant.bits.echo.lift(DirtyKey).getOrElse(false.B) 169 when(readBeatCnt === (refillCycles - 1).U) { 170 assert(refill_done, "refill not done!") 171 state := s_send_grant_ack 172 } 173 } 174 } 175 } 176 177 is(s_send_grant_ack) { 178 when(io.mem_finish.fire()) { 179 state := s_send_replace 180 } 181 } 182 183 is(s_send_replace){ 184 when(io.release_req.fire()){ 185 state := s_wait_replace 186 } 187 } 188 189 is(s_wait_replace){ 190 when(io.release_resp.valid && io.release_resp.bits === release_id){ 191 state := s_write_back 192 } 193 } 194 195 is(s_write_back) { 196 state := Mux(io.meta_write.fire() && io.data_write.fire(), s_wait_resp, s_write_back) 197 } 198 199 is(s_wait_resp) { 200 io.resp.bits.data := respDataReg.asUInt 201 io.resp.bits.corrupt := req_corrupt 202 when(io.resp.fire()) { 203 state := s_idle 204 } 205 } 206 } 207 208 /** refill write and meta write */ 209 val missCoh = ClientMetadata(Nothing) 210 val grow_param = missCoh.onAccess(M_XRD)._2 211 val acquireBlock = edge.AcquireBlock( 212 fromSource = io.id, 213 toAddress = addrAlign(req.paddr, blockBytes, PAddrBits), 214 lgSize = (log2Up(cacheParams.blockBytes)).U, 215 growPermissions = grow_param 216 )._2 217 io.mem_acquire.bits := acquireBlock 218 // resolve cache alias by L2 219 io.mem_acquire.bits.user.lift(AliasKey).foreach(_ := req.vaddr(13, 12)) 220 require(nSets <= 256) // icache size should not be more than 128KB 221 222 /** Grant ACK */ 223 io.mem_finish.valid := (state === s_send_grant_ack) && is_grant 224 io.mem_finish.bits := grantack 225 226 //resp to ifu 227 io.resp.valid := state === s_wait_resp 228 /** update coh meta */ 229 def missCohGen(param: UInt, dirty: Bool): UInt = { 230 MuxLookup(Cat(param, dirty), Nothing, Seq( 231 Cat(toB, false.B) -> Branch, 232 Cat(toB, true.B) -> Branch, 233 Cat(toT, false.B) -> Trunk, 234 Cat(toT, true.B) -> Dirty)) 235 } 236 237 val miss_new_coh = ClientMetadata(missCohGen(grant_param, is_dirty)) 238 239 io.meta_write.valid := (state === s_write_back) 240 io.meta_write.bits.generate(tag = req_tag, coh = miss_new_coh, idx = req_idx, waymask = req_waymask, bankIdx = req_idx(0)) 241 242 io.data_write.valid := (state === s_write_back) 243 io.data_write.bits.generate(data = respDataReg.asUInt, 244 idx = req_idx, 245 waymask = req_waymask, 246 bankIdx = req_idx(0), 247 paddr = req.paddr) 248 249 XSPerfAccumulate( 250 "entryPenalty" + Integer.toString(id, 10), 251 BoolStopWatch( 252 start = io.req.fire(), 253 stop = io.resp.valid, 254 startHighPriority = true) 255 ) 256 XSPerfAccumulate("entryReq" + Integer.toString(id, 10), io.req.fire()) 257 258} 259 260 261class ICacheMissUnit(edge: TLEdgeOut)(implicit p: Parameters) extends ICacheMissUnitModule 262{ 263 val io = IO(new Bundle{ 264 val hartId = Input(UInt(8.W)) 265 val req = Vec(2, Flipped(DecoupledIO(new ICacheMissReq))) 266 val resp = Vec(2, ValidIO(new ICacheMissResp)) 267 268 val mem_acquire = DecoupledIO(new TLBundleA(edge.bundle)) 269 val mem_grant = Flipped(DecoupledIO(new TLBundleD(edge.bundle))) 270 val mem_finish = DecoupledIO(new TLBundleE(edge.bundle)) 271 272 val meta_write = DecoupledIO(new ICacheMetaWriteBundle) 273 val data_write = DecoupledIO(new ICacheDataWriteBundle) 274 275 val release_req = DecoupledIO(new ReplacePipeReq) 276 val release_resp = Flipped(ValidIO(UInt(ReplaceIdWid.W))) 277 278 val victimInfor = Vec(PortNumber, Output(new ICacheVictimInfor())) 279 280 val prefetch_req = Flipped(DecoupledIO(new PIQReq)) 281 val prefetch_check = Vec(PortNumber,ValidIO(UInt(PAddrBits.W))) 282 283 284 }) 285 // assign default values to output signals 286 io.mem_grant.ready := false.B 287 288 val meta_write_arb = Module(new Arbiter(new ICacheMetaWriteBundle, PortNumber)) 289 val refill_arb = Module(new Arbiter(new ICacheDataWriteBundle, PortNumber)) 290 val release_arb = Module(new Arbiter(new ReplacePipeReq, PortNumber)) 291 292 io.mem_grant.ready := true.B 293 294 val entries = (0 until PortNumber) map { i => 295 val entry = Module(new ICacheMissEntry(edge, i)) 296 297 entry.io.id := i.U 298 299 // entry req 300 entry.io.req.valid := io.req(i).valid 301 entry.io.req.bits := io.req(i).bits 302 io.req(i).ready := entry.io.req.ready 303 304 // entry resp 305 meta_write_arb.io.in(i) <> entry.io.meta_write 306 refill_arb.io.in(i) <> entry.io.data_write 307 release_arb.io.in(i) <> entry.io.release_req 308 309 entry.io.mem_grant.valid := false.B 310 entry.io.mem_grant.bits := DontCare 311 when (io.mem_grant.bits.source === i.U) { 312 entry.io.mem_grant <> io.mem_grant 313 } 314 315 io.resp(i) <> entry.io.resp 316 317 io.victimInfor(i) := entry.io.victimInfor 318 io.prefetch_check(i) <> entry.io.toPrefetch 319 320 entry.io.release_resp <> io.release_resp 321 322 XSPerfAccumulate( 323 "entryPenalty" + Integer.toString(i, 10), 324 BoolStopWatch( 325 start = entry.io.req.fire(), 326 stop = entry.io.resp.fire(), 327 startHighPriority = true) 328 ) 329 XSPerfAccumulate("entryReq" + Integer.toString(i, 10), entry.io.req.fire()) 330 331 entry 332 } 333 334 val alloc = Wire(UInt(log2Ceil(nPrefetchEntries).W)) 335 336 val prefEntries = (PortNumber until PortNumber + nPrefetchEntries) map { i => 337 val prefetchEntry = Module(new IPrefetchEntry(edge, PortNumber)) 338 339 prefetchEntry.io.mem_hint_ack.valid := false.B 340 prefetchEntry.io.mem_hint_ack.bits := DontCare 341 342 when(io.mem_grant.bits.source === PortNumber.U) { 343 prefetchEntry.io.mem_hint_ack <> io.mem_grant 344 } 345 346 prefetchEntry.io.req.valid := io.prefetch_req.valid && ((i-PortNumber).U === alloc) 347 prefetchEntry.io.req.bits := io.prefetch_req.bits 348 349 prefetchEntry.io.id := i.U 350 351 prefetchEntry 352 } 353 354 alloc := PriorityEncoder(prefEntries.map(_.io.req.ready)) 355 io.prefetch_req.ready := ParallelOR(prefEntries.map(_.io.req.ready)) 356 val tl_a_chanel = entries.map(_.io.mem_acquire) ++ prefEntries.map(_.io.mem_hint) 357 TLArbiter.lowest(edge, io.mem_acquire, tl_a_chanel:_*) 358 359 TLArbiter.lowest(edge, io.mem_finish, entries.map(_.io.mem_finish):_*) 360 361 io.meta_write <> meta_write_arb.io.out 362 io.data_write <> refill_arb.io.out 363 io.release_req <> release_arb.io.out 364 365 if (env.EnableDifftest) { 366 val difftest = Module(new DifftestRefillEvent) 367 difftest.io.clock := clock 368 difftest.io.coreid := io.hartId 369 difftest.io.cacheid := 0.U 370 difftest.io.valid := refill_arb.io.out.valid 371 difftest.io.addr := refill_arb.io.out.bits.paddr 372 difftest.io.data := refill_arb.io.out.bits.data.asTypeOf(difftest.io.data) 373 } 374 375 (0 until nWays).map{ w => 376 XSPerfAccumulate("line_0_refill_way_" + Integer.toString(w, 10), entries(0).io.meta_write.valid && OHToUInt(entries(0).io.meta_write.bits.waymask) === w.U) 377 XSPerfAccumulate("line_1_refill_way_" + Integer.toString(w, 10), entries(1).io.meta_write.valid && OHToUInt(entries(1).io.meta_write.bits.waymask) === w.U) 378 } 379 380} 381 382 383 384