1/*************************************************************************************** 2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences 3* Copyright (c) 2020-2021 Peng Cheng Laboratory 4* 5* XiangShan is licensed under Mulan PSL v2. 6* You can use this software according to the terms and conditions of the Mulan PSL v2. 7* You may obtain a copy of Mulan PSL v2 at: 8* http://license.coscl.org.cn/MulanPSL2 9* 10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, 11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, 12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE. 13* 14* See the Mulan PSL v2 for more details. 15***************************************************************************************/ 16 17package xiangshan.frontend.icache 18 19import chipsalliance.rocketchip.config.Parameters 20import chisel3._ 21import chisel3.util._ 22import freechips.rocketchip.tilelink.ClientStates 23import xiangshan._ 24import xiangshan.cache.mmu._ 25import utils._ 26import xiangshan.backend.fu.{PMPReqBundle, PMPRespBundle} 27 28class ICacheMainPipeReq(implicit p: Parameters) extends ICacheBundle 29{ 30 val vaddr = UInt(VAddrBits.W) 31 def vsetIdx = get_idx(vaddr) 32} 33 34class ICacheMainPipeResp(implicit p: Parameters) extends ICacheBundle 35{ 36 val vaddr = UInt(VAddrBits.W) 37 val readData = UInt(blockBits.W) 38 val paddr = UInt(PAddrBits.W) 39 val tlbExcp = new Bundle{ 40 val pageFault = Bool() 41 val accessFault = Bool() 42 val mmio = Bool() 43 } 44} 45 46class ICacheMainPipeBundle(implicit p: Parameters) extends ICacheBundle 47{ 48 val req = Flipped(DecoupledIO(new ICacheMainPipeReq)) 49 val resp = ValidIO(new ICacheMainPipeResp) 50} 51 52class ICacheMetaReqBundle(implicit p: Parameters) extends ICacheBundle{ 53 val toIMeta = Decoupled(new ICacheReadBundle) 54 val fromIMeta = Input(new ICacheMetaRespBundle) 55} 56 57class ICacheDataReqBundle(implicit p: Parameters) extends ICacheBundle{ 58 val toIData = Decoupled(new ICacheReadBundle) 59 val fromIData = Input(new ICacheDataRespBundle) 60} 61 62class ICacheMSHRBundle(implicit p: Parameters) extends ICacheBundle{ 63 val toMSHR = Decoupled(new ICacheMissReq) 64 val fromMSHR = Flipped(ValidIO(new ICacheMissResp)) 65} 66 67class ICachePMPBundle(implicit p: Parameters) extends ICacheBundle{ 68 val req = Valid(new PMPReqBundle()) 69 val resp = Input(new PMPRespBundle()) 70} 71 72class ICachePerfInfo(implicit p: Parameters) extends ICacheBundle{ 73 val only_0_hit = Bool() 74 val only_0_miss = Bool() 75 val hit_0_hit_1 = Bool() 76 val hit_0_miss_1 = Bool() 77 val miss_0_hit_1 = Bool() 78 val miss_0_miss_1 = Bool() 79 val hit_0_except_1 = Bool() 80 val miss_0_except_1 = Bool() 81 val except_0 = Bool() 82 val bank_hit = Vec(2,Bool()) 83 val hit = Bool() 84} 85 86class ICacheMainPipeInterface(implicit p: Parameters) extends ICacheBundle { 87 /*** internal interface ***/ 88 val metaArray = new ICacheMetaReqBundle 89 val dataArray = new ICacheDataReqBundle 90 val mshr = Vec(PortNumber, new ICacheMSHRBundle) 91 val errors = Output(Vec(PortNumber, new L1CacheErrorInfo)) 92 /*** outside interface ***/ 93 val fetch = Vec(PortNumber, new ICacheMainPipeBundle) 94 val pmp = Vec(PortNumber, new ICachePMPBundle) 95 val itlb = Vec(PortNumber * 2, new BlockTlbRequestIO) 96 val respStall = Input(Bool()) 97 val perfInfo = Output(new ICachePerfInfo) 98 99 val prefetchEnable = Output(Bool()) 100 val prefetchDisable = Output(Bool()) 101 val csr_parity_enable = Input(Bool()) 102 103} 104 105class ICacheMainPipe(implicit p: Parameters) extends ICacheModule 106{ 107 val io = IO(new ICacheMainPipeInterface) 108 109 /** Input/Output port */ 110 val (fromIFU, toIFU) = (io.fetch.map(_.req), io.fetch.map(_.resp)) 111 val (toMeta, metaResp) = (io.metaArray.toIMeta, io.metaArray.fromIMeta) 112 val (toData, dataResp) = (io.dataArray.toIData, io.dataArray.fromIData) 113 val (toMSHR, fromMSHR) = (io.mshr.map(_.toMSHR), io.mshr.map(_.fromMSHR)) 114 val (toITLB, fromITLB) = (io.itlb.map(_.req), io.itlb.map(_.resp)) 115 val (toPMP, fromPMP) = (io.pmp.map(_.req), io.pmp.map(_.resp)) 116 117 /** pipeline control signal */ 118 val s0_ready, s1_ready, s2_ready = WireInit(false.B) 119 val s0_fire, s1_fire , s2_fire = WireInit(false.B) 120 121 val missSwitchBit = RegInit(false.B) 122 123 io.prefetchEnable := false.B 124 io.prefetchDisable := false.B 125 /** replacement status register */ 126 val touch_sets = Seq.fill(2)(Wire(Vec(2, UInt(log2Ceil(nSets/2).W)))) 127 val touch_ways = Seq.fill(2)(Wire(Vec(2, Valid(UInt(log2Ceil(nWays).W)))) ) 128 129 /** 130 ****************************************************************************** 131 * ICache Stage 0 132 * - send req to ITLB and wait for tlb miss fixing 133 * - send req to Meta/Data SRAM 134 ****************************************************************************** 135 */ 136 137 /** s0 control */ 138 val s0_valid = fromIFU.map(_.valid).reduce(_||_) 139 val s0_req_vaddr = VecInit(fromIFU.map(_.bits.vaddr)) 140 val s0_req_vsetIdx = VecInit(fromIFU.map(_.bits.vsetIdx)) 141 val s0_only_first = fromIFU(0).valid && !fromIFU(0).valid 142 val s0_double_line = fromIFU(0).valid && fromIFU(1).valid 143 144 val s0_slot_fire = WireInit(false.B) 145 val s0_fetch_fire = WireInit(false.B) 146 147 val s0_can_go = WireInit(false.B) 148 149 /** s0 tlb */ 150 class tlbMissSlot(implicit p: Parameters) extends ICacheBundle{ 151 val valid = Bool() 152 val only_first = Bool() 153 val double_line = Bool() 154 val req_vaddr = Vec(PortNumber,UInt(VAddrBits.W)) 155 val req_vsetIdx = Vec(PortNumber, UInt(idxBits.W)) 156 } 157 158 val tlb_slot = RegInit(0.U.asTypeOf(new tlbMissSlot)) 159 160 val s0_final_vaddr = Mux(tlb_slot.valid,tlb_slot.req_vaddr ,s0_req_vaddr) 161 val s0_final_vsetIdx = Mux(tlb_slot.valid,tlb_slot.req_vsetIdx ,s0_req_vsetIdx) 162 val s0_final_only_first = Mux(tlb_slot.valid,tlb_slot.only_first ,s0_only_first) 163 val s0_final_double_line = Mux(tlb_slot.valid,tlb_slot.double_line ,s0_double_line) 164 165 166 167 /** SRAM request */ 168 val fetch_req = List(toMeta, toData) 169 for(i <- 0 until 2) { 170 fetch_req(i).valid := (s0_valid || tlb_slot.valid) && !missSwitchBit 171 fetch_req(i).bits.isDoubleLine := s0_final_double_line 172 fetch_req(i).bits.vSetIdx := s0_final_vsetIdx 173 } 174 175 toITLB(0).valid := s0_valid 176 toITLB(0).bits.size := 3.U // TODO: fix the size 177 toITLB(0).bits.vaddr := s0_req_vaddr(0) 178 toITLB(0).bits.debug.pc := s0_req_vaddr(0) 179 180 toITLB(1).valid := s0_valid && s0_double_line 181 toITLB(1).bits.size := 3.U // TODO: fix the size 182 toITLB(1).bits.vaddr := s0_req_vaddr(1) 183 toITLB(1).bits.debug.pc := s0_req_vaddr(1) 184 185 toITLB(2).valid := tlb_slot.valid 186 toITLB(2).bits.size := 3.U // TODO: fix the size 187 toITLB(2).bits.vaddr := tlb_slot.req_vaddr(0) 188 toITLB(2).bits.debug.pc := tlb_slot.req_vaddr(0) 189 190 toITLB(3).valid := tlb_slot.valid && tlb_slot.double_line 191 toITLB(3).bits.size := 3.U // TODO: fix the size 192 toITLB(3).bits.vaddr := tlb_slot.req_vaddr(1) 193 toITLB(3).bits.debug.pc := tlb_slot.req_vaddr(1) 194 195 toITLB.map{port => 196 port.bits.cmd := TlbCmd.exec 197 port.bits.robIdx := DontCare 198 port.bits.debug.isFirstIssue := DontCare 199 } 200 201 /** ITLB miss wait logic */ 202 203 //** tlb 0/1 port result **// 204 val tlb_miss_vec = VecInit((0 until PortNumber).map( i => toITLB(i).valid && fromITLB(i).bits.miss )) 205 val tlb_has_miss = tlb_miss_vec.reduce(_||_) 206 val tlb_miss_flush = RegNext(tlb_has_miss) && RegNext(s0_fetch_fire) 207 208 //** tlb 2/3 port result **// 209 val tlb_resp = Wire(Vec(2, Bool())) 210 tlb_resp(0) := !fromITLB(2).bits.miss && toITLB(2).valid 211 tlb_resp(1) := (!fromITLB(3).bits.miss && toITLB(3).valid) || !tlb_slot.double_line 212 val tlb_all_resp = RegNext(tlb_resp.reduce(_&&_)) 213 214 XSPerfAccumulate("icache_bubble_s0_tlb_miss", s0_valid && tlb_has_miss ) 215 216 when(tlb_has_miss && !tlb_slot.valid){ 217 tlb_slot.valid := s0_valid 218 tlb_slot.only_first := s0_only_first 219 tlb_slot.double_line := s0_double_line 220 tlb_slot.req_vaddr := s0_req_vaddr 221 tlb_slot.req_vsetIdx := s0_req_vsetIdx 222 } 223 224 225 when(tlb_slot.valid && tlb_all_resp && s0_can_go){ 226 tlb_slot.valid := false.B 227 } 228 229 s0_can_go := !missSwitchBit && s1_ready && fetch_req(0).ready && fetch_req(1).ready 230 s0_slot_fire := tlb_slot.valid && tlb_all_resp && s0_can_go 231 s0_fetch_fire := s0_valid && !tlb_slot.valid && s0_can_go 232 s0_fire := s0_slot_fire || s0_fetch_fire 233 234 //TODO: fix GTimer() condition 235 fromIFU.map(_.ready := fetch_req(0).ready && fetch_req(1).ready && !missSwitchBit && 236 !tlb_slot.valid && 237 s1_ready && GTimer() > 500.U ) 238 /** 239 ****************************************************************************** 240 * ICache Stage 1 241 * - get tlb resp data (exceptiong info and physical addresses) 242 * - get Meta/Data SRAM read responses (latched for pipeline stop) 243 * - tag compare/hit check 244 ****************************************************************************** 245 */ 246 247 /** s1 control */ 248 val tlbRespAllValid = WireInit(false.B) 249 250 val s1_valid = generatePipeControl(lastFire = s0_fire, thisFire = s1_fire, thisFlush = tlb_miss_flush, lastFlush = false.B) 251 252 val s1_req_vaddr = RegEnable(next = s0_final_vaddr, enable = s0_fire) 253 val s1_req_vsetIdx = RegEnable(next = s0_final_vsetIdx, enable = s0_fire) 254 val s1_only_first = RegEnable(next = s0_final_only_first, enable = s0_fire) 255 val s1_double_line = RegEnable(next = s0_final_double_line, enable = s0_fire) 256 val s1_tlb_miss = RegEnable(next = tlb_slot.valid, enable = s0_fire) 257 258 s1_ready := s2_ready && tlbRespAllValid || !s1_valid 259 s1_fire := s1_valid && tlbRespAllValid && s2_ready && !tlb_miss_flush 260 261 fromITLB.map(_.ready := true.B) 262 263 /** tlb response latch for pipeline stop */ 264 val s1_tlb_all_resp_wire = RegNext(s0_fire) 265 val s1_tlb_all_resp_reg = RegInit(false.B) 266 267 when(s1_valid && s1_tlb_all_resp_wire && !tlb_miss_flush && !s2_ready) {s1_tlb_all_resp_reg := true.B} 268 .elsewhen(s1_fire && s1_tlb_all_resp_reg) {s1_tlb_all_resp_reg := false.B} 269 270 tlbRespAllValid := s1_tlb_all_resp_wire || s1_tlb_all_resp_reg 271 272 val hit_tlbRespPAddr = ResultHoldBypass(valid = RegNext(s0_fire), data = VecInit((0 until PortNumber).map( i => fromITLB(i).bits.paddr))) 273 val hit_tlbExcpPF = ResultHoldBypass(valid = RegNext(s0_fire), data = VecInit((0 until PortNumber).map( i => fromITLB(i).bits.excp.pf.instr && fromITLB(i).valid))) 274 val hit_tlbExcpAF = ResultHoldBypass(valid = RegNext(s0_fire), data = VecInit((0 until PortNumber).map( i => fromITLB(i).bits.excp.af.instr && fromITLB(i).valid))) 275 276 val miss_tlbRespPAddr = ResultHoldBypass(valid = RegNext(s0_fire), data = VecInit((PortNumber until PortNumber * 2).map( i => fromITLB(i).bits.paddr))) 277 val miss_tlbExcpPF = ResultHoldBypass(valid = RegNext(s0_fire), data = VecInit((PortNumber until PortNumber * 2).map( i => fromITLB(i).bits.excp.pf.instr && fromITLB(i).valid))) 278 val miss_tlbExcpAF = ResultHoldBypass(valid = RegNext(s0_fire), data = VecInit((PortNumber until PortNumber * 2).map( i => fromITLB(i).bits.excp.af.instr && fromITLB(i).valid))) 279 280 val tlbRespPAddr = Mux(s1_tlb_miss,miss_tlbRespPAddr,hit_tlbRespPAddr ) 281 val tlbExcpPF = Mux(s1_tlb_miss,miss_tlbExcpPF,hit_tlbExcpPF ) 282 val tlbExcpAF = Mux(s1_tlb_miss,miss_tlbExcpAF,hit_tlbExcpAF ) 283 284 /** s1 hit check/tag compare */ 285 val s1_req_paddr = tlbRespPAddr 286 val s1_req_ptags = VecInit(s1_req_paddr.map(get_phy_tag(_))) 287 288 val s1_meta_ptags = ResultHoldBypass(data = metaResp.tags, valid = RegNext(s0_fire)) 289 val s1_meta_cohs = ResultHoldBypass(data = metaResp.cohs, valid = RegNext(s0_fire)) 290 val s1_meta_errors = ResultHoldBypass(data = metaResp.errors, valid = RegNext(s0_fire)) 291 292 val s1_data_cacheline = ResultHoldBypass(data = dataResp.datas, valid = RegNext(s0_fire)) 293 val s1_data_errorBits = ResultHoldBypass(data = dataResp.codes, valid = RegNext(s0_fire)) 294 295 val s1_tag_eq_vec = VecInit((0 until PortNumber).map( p => VecInit((0 until nWays).map( w => s1_meta_ptags(p)(w) === s1_req_ptags(p) )))) 296 val s1_tag_match_vec = VecInit((0 until PortNumber).map( k => VecInit(s1_tag_eq_vec(k).zipWithIndex.map{ case(way_tag_eq, w) => way_tag_eq && s1_meta_cohs(k)(w).isValid()}))) 297 val s1_tag_match = VecInit(s1_tag_match_vec.map(vector => ParallelOR(vector))) 298 299 val s1_port_hit = VecInit(Seq(s1_tag_match(0) && s1_valid && !tlbExcpPF(0) && !tlbExcpAF(0), s1_tag_match(1) && s1_valid && s1_double_line && !tlbExcpPF(1) && !tlbExcpAF(1) )) 300 val s1_bank_miss = VecInit(Seq(!s1_tag_match(0) && s1_valid && !tlbExcpPF(0) && !tlbExcpAF(0), !s1_tag_match(1) && s1_valid && s1_double_line && !tlbExcpPF(1) && !tlbExcpAF(1) )) 301 val s1_hit = (s1_port_hit(0) && s1_port_hit(1)) || (!s1_double_line && s1_port_hit(0)) 302 303 /** choose victim cacheline */ 304 val replacers = Seq.fill(PortNumber)(ReplacementPolicy.fromString(cacheParams.replacer,nWays,nSets/PortNumber)) 305 val s1_victim_oh = ResultHoldBypass(data = VecInit(replacers.zipWithIndex.map{case (replacer, i) => UIntToOH(replacer.way(s1_req_vsetIdx(i)))}), valid = RegNext(s0_fire)) 306 307 val s1_victim_coh = VecInit(s1_victim_oh.zipWithIndex.map {case(oh, port) => Mux1H(oh, s1_meta_cohs(port))}) 308 309 assert(PopCount(s1_tag_match_vec(0)) <= 1.U && PopCount(s1_tag_match_vec(1)) <= 1.U, "Multiple hit in main pipe") 310 311 ((replacers zip touch_sets) zip touch_ways).map{case ((r, s),w) => r.access(s,w)} 312 313 val s1_hit_data = VecInit(s1_data_cacheline.zipWithIndex.map { case(bank, i) => 314 val port_hit_data = Mux1H(s1_tag_match_vec(i).asUInt, bank) 315 port_hit_data 316 }) 317 318 /** <PERF> replace victim way number */ 319 320 (0 until nWays).map{ w => 321 XSPerfAccumulate("line_0_hit_way_" + Integer.toString(w, 10), s1_fire && s1_port_hit(0) && OHToUInt(s1_tag_match_vec(0)) === w.U) 322 } 323 324 (0 until nWays).map{ w => 325 XSPerfAccumulate("line_0_victim_way_" + Integer.toString(w, 10), s1_fire && !s1_port_hit(0) && OHToUInt(s1_victim_oh(0)) === w.U) 326 } 327 328 (0 until nWays).map{ w => 329 XSPerfAccumulate("line_1_hit_way_" + Integer.toString(w, 10), s1_fire && s1_double_line && s1_port_hit(1) && OHToUInt(s1_tag_match_vec(1)) === w.U) 330 } 331 332 (0 until nWays).map{ w => 333 XSPerfAccumulate("line_1_victim_way_" + Integer.toString(w, 10), s1_fire && s1_double_line && !s1_port_hit(1) && OHToUInt(s1_victim_oh(1)) === w.U) 334 } 335 336 /** 337 ****************************************************************************** 338 * ICache Stage 2 339 * - send request to MSHR if ICache miss 340 * - generate secondary miss status/data registers 341 * - response to IFU 342 ****************************************************************************** 343 */ 344 345 /** s2 control */ 346 val s2_fetch_finish = Wire(Bool()) 347 348 val s2_valid = generatePipeControl(lastFire = s1_fire, thisFire = s2_fire, thisFlush = false.B, lastFlush = tlb_miss_flush) 349 val s2_miss_available = Wire(Bool()) 350 351 s2_ready := (s2_valid && s2_fetch_finish && !io.respStall) || (!s2_valid && s2_miss_available) 352 s2_fire := s2_valid && s2_fetch_finish && !io.respStall 353 354 /** s2 data */ 355 val mmio = fromPMP.map(port => port.mmio) // TODO: handle it 356 357 val (s2_req_paddr , s2_req_vaddr) = (RegEnable(next = s1_req_paddr, enable = s1_fire), RegEnable(next = s1_req_vaddr, enable = s1_fire)) 358 val s2_req_vsetIdx = RegEnable(next = s1_req_vsetIdx, enable = s1_fire) 359 val s2_req_ptags = RegEnable(next = s1_req_ptags, enable = s1_fire) 360 val s2_only_first = RegEnable(next = s1_only_first, enable = s1_fire) 361 val s2_double_line = RegEnable(next = s1_double_line, enable = s1_fire) 362 val s2_hit = RegEnable(next = s1_hit , enable = s1_fire) 363 val s2_port_hit = RegEnable(next = s1_port_hit, enable = s1_fire) 364 val s2_bank_miss = RegEnable(next = s1_bank_miss, enable = s1_fire) 365 val s2_waymask = RegEnable(next = s1_victim_oh, enable = s1_fire) 366 val s2_victim_coh = RegEnable(next = s1_victim_coh, enable = s1_fire) 367 val s2_tag_match_vec = RegEnable(next = s1_tag_match_vec, enable = s1_fire) 368 369 /** status imply that s2 is a secondary miss (no need to resend miss request) */ 370 val sec_meet_vec = Wire(Vec(2, Bool())) 371 val s2_fixed_hit_vec = VecInit((0 until 2).map(i => s2_port_hit(i) || sec_meet_vec(i))) 372 val s2_fixed_hit = (s2_valid && s2_fixed_hit_vec(0) && s2_fixed_hit_vec(1) && s2_double_line) || (s2_valid && s2_fixed_hit_vec(0) && !s2_double_line) 373 374 val s2_meta_errors = RegEnable(next = s1_meta_errors, enable = s1_fire) 375 val s2_data_errorBits = RegEnable(next = s1_data_errorBits, enable = s1_fire) 376 val s2_data_cacheline = RegEnable(next = s1_data_cacheline, enable = s1_fire) 377 378 val s2_data_errors = Wire(Vec(PortNumber,Vec(nWays, Bool()))) 379 380 (0 until PortNumber).map{ i => 381 val read_datas = s2_data_cacheline(i).asTypeOf(Vec(nWays,Vec(dataCodeUnitNum, UInt(dataCodeUnit.W)))) 382 val read_codes = s2_data_errorBits(i).asTypeOf(Vec(nWays,Vec(dataCodeUnitNum, UInt(dataCodeBits.W)))) 383 val data_full_wayBits = VecInit((0 until nWays).map( w => 384 VecInit((0 until dataCodeUnitNum).map(u => 385 Cat(read_codes(w)(u), read_datas(w)(u)))))) 386 val data_error_wayBits = VecInit((0 until nWays).map( w => 387 VecInit((0 until dataCodeUnitNum).map(u => 388 cacheParams.dataCode.decode(data_full_wayBits(w)(u)).error )))) 389 if(i == 0){ 390 (0 until nWays).map{ w => 391 s2_data_errors(i)(w) := RegNext(RegNext(s1_fire)) && RegNext(data_error_wayBits(w)).reduce(_||_) 392 } 393 } else { 394 (0 until nWays).map{ w => 395 s2_data_errors(i)(w) := RegNext(RegNext(s1_fire)) && RegNext(RegNext(s1_double_line)) && RegNext(data_error_wayBits(w)).reduce(_||_) 396 } 397 } 398 } 399 400 val s2_parity_meta_error = VecInit((0 until PortNumber).map(i => s2_meta_errors(i).reduce(_||_) && io.csr_parity_enable)) 401 val s2_parity_data_error = VecInit((0 until PortNumber).map(i => s2_data_errors(i).reduce(_||_) && io.csr_parity_enable)) 402 val s2_parity_error = VecInit((0 until PortNumber).map(i => RegNext(s2_parity_meta_error(i)) || s2_parity_data_error(i))) 403 404 for(i <- 0 until PortNumber){ 405 io.errors(i).valid := RegNext(s2_parity_error(i) && RegNext(RegNext(s1_fire))) 406 io.errors(i).report_to_beu := RegNext(s2_parity_error(i) && RegNext(RegNext(s1_fire))) 407 io.errors(i).paddr := RegNext(RegNext(s2_req_paddr(i))) 408 io.errors(i).source := DontCare 409 io.errors(i).source.tag := RegNext(RegNext(s2_parity_meta_error(i))) 410 io.errors(i).source.data := RegNext(s2_parity_data_error(i)) 411 io.errors(i).source.l2 := false.B 412 io.errors(i).opType := DontCare 413 io.errors(i).opType.fetch := true.B 414 } 415 XSError(s2_parity_error.reduce(_||_) && RegNext(RegNext(s1_fire)), "ICache has parity error in MainPaipe!") 416 417 418 /** exception and pmp logic **/ 419 //PMP Result 420 val pmpExcpAF = Wire(Vec(PortNumber, Bool())) 421 pmpExcpAF(0) := fromPMP(0).instr 422 pmpExcpAF(1) := fromPMP(1).instr && s2_double_line 423 //exception information 424 val s2_except_pf = RegEnable(next =tlbExcpPF, enable = s1_fire) 425 val s2_except_af = VecInit(RegEnable(next = tlbExcpAF, enable = s1_fire).zip(pmpExcpAF).map{ 426 case(tlbAf, pmpAf) => tlbAf || DataHoldBypass(pmpAf, RegNext(s1_fire)).asBool}) 427 val s2_except = VecInit((0 until 2).map{i => s2_except_pf(i) || s2_except_af(i)}) 428 val s2_has_except = s2_valid && (s2_except_af.reduce(_||_) || s2_except_pf.reduce(_||_)) 429 //MMIO 430 val s2_mmio = DataHoldBypass(io.pmp(0).resp.mmio && !s2_except_af(0) && !s2_except_pf(0), RegNext(s1_fire)).asBool() 431 432 //send physical address to PMP 433 io.pmp.zipWithIndex.map { case (p, i) => 434 p.req.valid := s2_valid && !missSwitchBit 435 p.req.bits.addr := s2_req_paddr(i) 436 p.req.bits.size := 3.U // TODO 437 p.req.bits.cmd := TlbCmd.exec 438 } 439 440 /*** cacheline miss logic ***/ 441 val wait_idle :: wait_queue_ready :: wait_send_req :: wait_two_resp :: wait_0_resp :: wait_1_resp :: wait_one_resp ::wait_finish :: Nil = Enum(8) 442 val wait_state = RegInit(wait_idle) 443 444 val port_miss_fix = VecInit(Seq(fromMSHR(0).fire() && !s2_port_hit(0), fromMSHR(1).fire() && s2_double_line && !s2_port_hit(1) )) 445 446 // secondary miss record registers 447 class MissSlot(implicit p: Parameters) extends ICacheBundle { 448 val m_vSetIdx = UInt(idxBits.W) 449 val m_pTag = UInt(tagBits.W) 450 val m_data = UInt(blockBits.W) 451 val m_corrupt = Bool() 452 } 453 454 val missSlot = Seq.fill(2)(RegInit(0.U.asTypeOf(new MissSlot))) 455 val m_invalid :: m_valid :: m_refilled :: m_flushed :: m_wait_sec_miss :: m_check_final ::Nil = Enum(6) 456 val missStateQueue = RegInit(VecInit(Seq.fill(2)(m_invalid)) ) 457 val reservedRefillData = Wire(Vec(2, UInt(blockBits.W))) 458 459 s2_miss_available := VecInit(missStateQueue.map(entry => entry === m_invalid || entry === m_wait_sec_miss)).reduce(_&&_) 460 461 val fix_sec_miss = Wire(Vec(4, Bool())) 462 val sec_meet_0_miss = fix_sec_miss(0) || fix_sec_miss(2) 463 val sec_meet_1_miss = fix_sec_miss(1) || fix_sec_miss(3) 464 sec_meet_vec := VecInit(Seq(sec_meet_0_miss,sec_meet_1_miss )) 465 466 /*** miss/hit pattern: <Control Signal> only raise at the first cycle of s2_valid ***/ 467 val cacheline_0_hit = (s2_port_hit(0) || sec_meet_0_miss) 468 val cacheline_0_miss = !s2_port_hit(0) && !sec_meet_0_miss 469 470 val cacheline_1_hit = (s2_port_hit(1) || sec_meet_1_miss) 471 val cacheline_1_miss = !s2_port_hit(1) && !sec_meet_1_miss 472 473 val only_0_miss = RegNext(s1_fire) && cacheline_0_miss && !s2_double_line && !s2_has_except && !s2_mmio 474 val only_0_hit = RegNext(s1_fire) && cacheline_0_hit && !s2_double_line && !s2_mmio 475 val hit_0_hit_1 = RegNext(s1_fire) && cacheline_0_hit && cacheline_1_hit && s2_double_line && !s2_mmio 476 val hit_0_miss_1 = RegNext(s1_fire) && cacheline_0_hit && cacheline_1_miss && s2_double_line && !s2_has_except && !s2_mmio 477 val miss_0_hit_1 = RegNext(s1_fire) && cacheline_0_miss && cacheline_1_hit && s2_double_line && !s2_has_except && !s2_mmio 478 val miss_0_miss_1 = RegNext(s1_fire) && cacheline_0_miss && cacheline_1_miss && s2_double_line && !s2_has_except && !s2_mmio 479 480 val hit_0_except_1 = RegNext(s1_fire) && s2_double_line && !s2_except(0) && s2_except(1) && cacheline_0_hit 481 val miss_0_except_1 = RegNext(s1_fire) && s2_double_line && !s2_except(0) && s2_except(1) && cacheline_0_miss 482 val except_0 = RegNext(s1_fire) && s2_except(0) 483 484 def holdReleaseLatch(valid: Bool, release: Bool, flush: Bool): Bool ={ 485 val bit = RegInit(false.B) 486 when(flush) { bit := false.B } 487 .elsewhen(valid && !release) { bit := true.B } 488 .elsewhen(release) { bit := false.B} 489 bit || valid 490 } 491 492 /*** miss/hit pattern latch: <Control Signal> latch the miss/hit patter if pipeline stop ***/ 493 val miss_0_hit_1_latch = holdReleaseLatch(valid = miss_0_hit_1, release = s2_fire, flush = false.B) 494 val miss_0_miss_1_latch = holdReleaseLatch(valid = miss_0_miss_1, release = s2_fire, flush = false.B) 495 val only_0_miss_latch = holdReleaseLatch(valid = only_0_miss, release = s2_fire, flush = false.B) 496 val hit_0_miss_1_latch = holdReleaseLatch(valid = hit_0_miss_1, release = s2_fire, flush = false.B) 497 498 val miss_0_except_1_latch = holdReleaseLatch(valid = miss_0_except_1, release = s2_fire, flush = false.B) 499 val except_0_latch = holdReleaseLatch(valid = except_0, release = s2_fire, flush = false.B) 500 val hit_0_except_1_latch = holdReleaseLatch(valid = hit_0_except_1, release = s2_fire, flush = false.B) 501 502 val only_0_hit_latch = holdReleaseLatch(valid = only_0_hit, release = s2_fire, flush = false.B) 503 val hit_0_hit_1_latch = holdReleaseLatch(valid = hit_0_hit_1, release = s2_fire, flush = false.B) 504 505 506 /*** secondary miss judegment ***/ 507 508 def waitSecondComeIn(missState: UInt): Bool = (missState === m_wait_sec_miss) 509 510 def getMissSituat(slotNum : Int, missNum : Int ) :Bool = { 511 RegNext(s1_fire) && (missSlot(slotNum).m_vSetIdx === s2_req_vsetIdx(missNum)) && (missSlot(slotNum).m_pTag === s2_req_ptags(missNum)) && !s2_port_hit(missNum) && waitSecondComeIn(missStateQueue(slotNum)) //&& !s2_mmio 512 } 513 514 val miss_0_s2_0 = getMissSituat(slotNum = 0, missNum = 0) 515 val miss_0_s2_1 = getMissSituat(slotNum = 0, missNum = 1) 516 val miss_1_s2_0 = getMissSituat(slotNum = 1, missNum = 0) 517 val miss_1_s2_1 = getMissSituat(slotNum = 1, missNum = 1) 518 519 val miss_0_s2_0_latch = holdReleaseLatch(valid = miss_0_s2_0, release = s2_fire, flush = false.B) 520 val miss_0_s2_1_latch = holdReleaseLatch(valid = miss_0_s2_1, release = s2_fire, flush = false.B) 521 val miss_1_s2_0_latch = holdReleaseLatch(valid = miss_1_s2_0, release = s2_fire, flush = false.B) 522 val miss_1_s2_1_latch = holdReleaseLatch(valid = miss_1_s2_1, release = s2_fire, flush = false.B) 523 524 525 val slot_0_solve = fix_sec_miss(0) || fix_sec_miss(1) 526 val slot_1_solve = fix_sec_miss(2) || fix_sec_miss(3) 527 val slot_slove = VecInit(Seq(slot_0_solve, slot_1_solve)) 528 529 fix_sec_miss := VecInit(Seq(miss_0_s2_0_latch, miss_0_s2_1_latch, miss_1_s2_0_latch, miss_1_s2_1_latch)) 530 531 /*** reserved data for secondary miss ***/ 532 533 reservedRefillData(0) := DataHoldBypass(data = missSlot(0).m_data, valid = miss_0_s2_0 || miss_0_s2_1) 534 reservedRefillData(1) := DataHoldBypass(data = missSlot(1).m_data, valid = miss_1_s2_0 || miss_1_s2_1) 535 536 /*** miss state machine ***/ 537 538 switch(wait_state){ 539 is(wait_idle){ 540 when(miss_0_except_1_latch){ 541 wait_state := Mux(toMSHR(0).ready, wait_queue_ready ,wait_idle ) 542 }.elsewhen( only_0_miss_latch || miss_0_hit_1_latch){ 543 wait_state := Mux(toMSHR(0).ready, wait_queue_ready ,wait_idle ) 544 }.elsewhen(hit_0_miss_1_latch){ 545 wait_state := Mux(toMSHR(1).ready, wait_queue_ready ,wait_idle ) 546 }.elsewhen( miss_0_miss_1_latch ){ 547 wait_state := Mux(toMSHR(0).ready && toMSHR(1).ready, wait_queue_ready ,wait_idle) 548 } 549 } 550 551 is(wait_queue_ready){ 552 wait_state := wait_send_req 553 } 554 555 is(wait_send_req) { 556 when(miss_0_except_1_latch || only_0_miss_latch || hit_0_miss_1_latch || miss_0_hit_1_latch){ 557 wait_state := wait_one_resp 558 }.elsewhen( miss_0_miss_1_latch ){ 559 wait_state := wait_two_resp 560 } 561 } 562 563 is(wait_one_resp) { 564 when( (miss_0_except_1_latch ||only_0_miss_latch || miss_0_hit_1_latch) && fromMSHR(0).fire()){ 565 wait_state := wait_finish 566 }.elsewhen( hit_0_miss_1_latch && fromMSHR(1).fire()){ 567 wait_state := wait_finish 568 } 569 } 570 571 is(wait_two_resp) { 572 when(fromMSHR(0).fire() && fromMSHR(1).fire()){ 573 wait_state := wait_finish 574 }.elsewhen( !fromMSHR(0).fire() && fromMSHR(1).fire() ){ 575 wait_state := wait_0_resp 576 }.elsewhen(fromMSHR(0).fire() && !fromMSHR(1).fire()){ 577 wait_state := wait_1_resp 578 } 579 } 580 581 is(wait_0_resp) { 582 when(fromMSHR(0).fire()){ 583 wait_state := wait_finish 584 } 585 } 586 587 is(wait_1_resp) { 588 when(fromMSHR(1).fire()){ 589 wait_state := wait_finish 590 } 591 } 592 593 is(wait_finish) {when(s2_fire) {wait_state := wait_idle } 594 } 595 } 596 597 598 /*** send request to MissUnit ***/ 599 600 (0 until 2).map { i => 601 if(i == 1) toMSHR(i).valid := (hit_0_miss_1_latch || miss_0_miss_1_latch) && wait_state === wait_queue_ready && !s2_mmio 602 else toMSHR(i).valid := (only_0_miss_latch || miss_0_hit_1_latch || miss_0_miss_1_latch || miss_0_except_1_latch) && wait_state === wait_queue_ready && !s2_mmio 603 toMSHR(i).bits.paddr := s2_req_paddr(i) 604 toMSHR(i).bits.vaddr := s2_req_vaddr(i) 605 toMSHR(i).bits.waymask := s2_waymask(i) 606 toMSHR(i).bits.coh := s2_victim_coh(i) 607 608 609 when(toMSHR(i).fire() && missStateQueue(i) === m_invalid){ 610 missStateQueue(i) := m_valid 611 missSlot(i).m_vSetIdx := s2_req_vsetIdx(i) 612 missSlot(i).m_pTag := get_phy_tag(s2_req_paddr(i)) 613 } 614 615 when(fromMSHR(i).fire() && missStateQueue(i) === m_valid ){ 616 missStateQueue(i) := m_refilled 617 missSlot(i).m_data := fromMSHR(i).bits.data 618 missSlot(i).m_corrupt := fromMSHR(i).bits.corrupt 619 } 620 621 622 when(s2_fire && missStateQueue(i) === m_refilled){ 623 missStateQueue(i) := m_wait_sec_miss 624 } 625 626 /*** Only the first cycle to check whether meet the secondary miss ***/ 627 when(missStateQueue(i) === m_wait_sec_miss){ 628 /*** The seondary req has been fix by this slot and another also hit || the secondary req for other cacheline and hit ***/ 629 when((slot_slove(i) && s2_fire) || (!slot_slove(i) && s2_fire) ) { 630 missStateQueue(i) := m_invalid 631 } 632 /*** The seondary req has been fix by this slot but another miss/f3 not ready || the seondary req for other cacheline and miss ***/ 633 .elsewhen((slot_slove(i) && !s2_fire && s2_valid) || (s2_valid && !slot_slove(i) && !s2_fire) ){ 634 missStateQueue(i) := m_check_final 635 } 636 } 637 638 when(missStateQueue(i) === m_check_final && toMSHR(i).fire()){ 639 missStateQueue(i) := m_valid 640 missSlot(i).m_vSetIdx := s2_req_vsetIdx(i) 641 missSlot(i).m_pTag := get_phy_tag(s2_req_paddr(i)) 642 }.elsewhen(missStateQueue(i) === m_check_final) { 643 missStateQueue(i) := m_invalid 644 } 645 } 646 647 when(toMSHR.map(_.valid).reduce(_||_)){ 648 missSwitchBit := true.B 649 io.prefetchEnable := true.B 650 }.elsewhen(missSwitchBit && s2_fetch_finish){ 651 missSwitchBit := false.B 652 io.prefetchDisable := true.B 653 } 654 655 656 val miss_all_fix = wait_state === wait_finish 657 s2_fetch_finish := ((s2_valid && s2_fixed_hit) || miss_all_fix || hit_0_except_1_latch || except_0_latch || s2_mmio) 658 659 /** update replacement status register: 0 is hit access/ 1 is miss access */ 660 (touch_ways zip touch_sets).zipWithIndex.map{ case((t_w,t_s), i) => 661 t_s(0) := s2_req_vsetIdx(i) 662 t_w(0).valid := s2_valid && s2_port_hit(i) 663 t_w(0).bits := OHToUInt(s2_tag_match_vec(i)) 664 665 t_s(1) := s2_req_vsetIdx(i) 666 t_w(1).valid := s2_valid && !s2_port_hit(i) 667 t_w(1).bits := OHToUInt(s2_waymask(i)) 668 } 669 670 val s2_hit_datas = RegEnable(next = s1_hit_data, enable = s1_fire) 671 val s2_datas = Wire(Vec(2, UInt(blockBits.W))) 672 673 s2_datas.zipWithIndex.map{case(bank,i) => 674 if(i == 0) bank := Mux(s2_port_hit(i), s2_hit_datas(i),Mux(miss_0_s2_0_latch,reservedRefillData(0), Mux(miss_1_s2_0_latch,reservedRefillData(1), missSlot(0).m_data))) 675 else bank := Mux(s2_port_hit(i), s2_hit_datas(i),Mux(miss_0_s2_1_latch,reservedRefillData(0), Mux(miss_1_s2_1_latch,reservedRefillData(1), missSlot(1).m_data))) 676 } 677 678 /** response to IFU */ 679 680 (0 until PortNumber).map{ i => 681 if(i ==0) toIFU(i).valid := s2_fire 682 else toIFU(i).valid := s2_fire && s2_double_line 683 toIFU(i).bits.readData := s2_datas(i) 684 toIFU(i).bits.paddr := s2_req_paddr(i) 685 toIFU(i).bits.vaddr := s2_req_vaddr(i) 686 toIFU(i).bits.tlbExcp.pageFault := s2_except_pf(i) 687 toIFU(i).bits.tlbExcp.accessFault := s2_except_af(i) || missSlot(i).m_corrupt 688 toIFU(i).bits.tlbExcp.mmio := s2_mmio 689 690 when(RegNext(s2_fire && missSlot(i).m_corrupt)){ 691 io.errors(i).valid := true.B 692 io.errors(i).report_to_beu := false.B // l2 should have report that to bus error unit, no need to do it again 693 io.errors(i).paddr := RegNext(s2_req_paddr(i)) 694 io.errors(i).source.tag := false.B 695 io.errors(i).source.data := false.B 696 io.errors(i).source.l2 := true.B 697 } 698 } 699 700 io.perfInfo.only_0_hit := only_0_hit_latch 701 io.perfInfo.only_0_miss := only_0_miss_latch 702 io.perfInfo.hit_0_hit_1 := hit_0_hit_1_latch 703 io.perfInfo.hit_0_miss_1 := hit_0_miss_1_latch 704 io.perfInfo.miss_0_hit_1 := miss_0_hit_1_latch 705 io.perfInfo.miss_0_miss_1 := miss_0_miss_1_latch 706 io.perfInfo.hit_0_except_1 := hit_0_except_1_latch 707 io.perfInfo.miss_0_except_1 := miss_0_except_1_latch 708 io.perfInfo.except_0 := except_0_latch 709 io.perfInfo.bank_hit(0) := only_0_miss_latch || hit_0_hit_1_latch || hit_0_miss_1_latch || hit_0_except_1_latch 710 io.perfInfo.bank_hit(1) := miss_0_hit_1_latch || hit_0_hit_1_latch 711 io.perfInfo.hit := hit_0_hit_1_latch || only_0_hit_latch || hit_0_except_1_latch || except_0_latch 712 713 /** <PERF> fetch bubble generated by icache miss*/ 714 715 XSPerfAccumulate("icache_bubble_s2_miss", s2_valid && !s2_fetch_finish ) 716 717} 718