1/*************************************************************************************** 2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences 3* Copyright (c) 2020-2021 Peng Cheng Laboratory 4* 5* XiangShan is licensed under Mulan PSL v2. 6* You can use this software according to the terms and conditions of the Mulan PSL v2. 7* You may obtain a copy of Mulan PSL v2 at: 8* http://license.coscl.org.cn/MulanPSL2 9* 10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, 11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, 12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE. 13* 14* See the Mulan PSL v2 for more details. 15***************************************************************************************/ 16 17package xiangshan.frontend 18 19import chipsalliance.rocketchip.config.Parameters 20import chisel3._ 21import chisel3.stage.{ChiselGeneratorAnnotation, ChiselStage} 22import chisel3.util._ 23import xiangshan._ 24import utils._ 25import chisel3.experimental.chiselName 26 27import scala.math.min 28import os.copy 29 30 31trait FTBParams extends HasXSParameter with HasBPUConst { 32 val numEntries = 4096 33 val numWays = 4 34 val numSets = numEntries/numWays // 512 35 val tagSize = 20 36 37 38 39 val TAR_STAT_SZ = 2 40 def TAR_FIT = 0.U(TAR_STAT_SZ.W) 41 def TAR_OVF = 1.U(TAR_STAT_SZ.W) 42 def TAR_UDF = 2.U(TAR_STAT_SZ.W) 43 44 def BR_OFFSET_LEN = 12 45 def JMP_OFFSET_LEN = 20 46} 47 48class FtbSlot(val offsetLen: Int, val subOffsetLen: Int = 0)(implicit p: Parameters) extends XSBundle with FTBParams { 49 require(subOffsetLen <= offsetLen) 50 val offset = UInt(log2Ceil(PredictWidth).W) 51 val lower = UInt(offsetLen.W) 52 val tarStat = UInt(TAR_STAT_SZ.W) 53 val sharing = Bool() 54 val valid = Bool() 55 56 def setLowerStatByTarget(pc: UInt, target: UInt, isShare: Boolean) = { 57 def getTargetStatByHigher(pc_higher: UInt, target_higher: UInt) = 58 Mux(target_higher > pc_higher, TAR_OVF, 59 Mux(target_higher < pc_higher, TAR_UDF, TAR_FIT)) 60 def getLowerByTarget(target: UInt, offsetLen: Int) = target(offsetLen, 1) 61 val offLen = if (isShare) this.subOffsetLen else this.offsetLen 62 val pc_higher = pc(VAddrBits-1, offLen+1) 63 val target_higher = target(VAddrBits-1, offLen+1) 64 val stat = getTargetStatByHigher(pc_higher, target_higher) 65 val lower = ZeroExt(getLowerByTarget(target, offLen), this.offsetLen) 66 this.lower := lower 67 this.tarStat := stat 68 this.sharing := isShare.B 69 } 70 71 def getTarget(pc: UInt) = { 72 def getTarget(offLen: Int)(pc: UInt, lower: UInt, stat: UInt) = { 73 val higher = pc(VAddrBits-1, offLen+1) 74 val target = 75 Cat( 76 Mux(stat === TAR_OVF, higher+1.U, 77 Mux(stat === TAR_UDF, higher-1.U, higher)), 78 lower(offLen-1, 0), 0.U(1.W) 79 ) 80 require(target.getWidth == VAddrBits) 81 require(offLen != 0) 82 target 83 } 84 if (subOffsetLen != 0) 85 Mux(sharing, 86 getTarget(subOffsetLen)(pc, lower, tarStat), 87 getTarget(offsetLen)(pc, lower, tarStat) 88 ) 89 else 90 getTarget(offsetLen)(pc, lower, tarStat) 91 } 92 def fromAnotherSlot(that: FtbSlot) = { 93 require( 94 this.offsetLen > that.offsetLen && that.offsetLen == this.subOffsetLen || 95 this.offsetLen == that.offsetLen 96 ) 97 this.offset := that.offset 98 this.tarStat := that.tarStat 99 this.sharing := (this.offsetLen > that.offsetLen && that.offsetLen == this.subOffsetLen).B 100 this.valid := that.valid 101 this.lower := ZeroExt(that.lower, this.offsetLen) 102 } 103 104} 105 106class FTBEntry(implicit p: Parameters) extends XSBundle with FTBParams with BPUUtils { 107 108 109 val valid = Bool() 110 111 val brSlots = Vec(numBrSlot, new FtbSlot(BR_OFFSET_LEN)) 112 113 // if shareTailSlot is set, this slot can hold a branch or a jal/jalr 114 // else this slot holds only jal/jalr 115 val tailSlot = new FtbSlot(JMP_OFFSET_LEN, BR_OFFSET_LEN) 116 117 // Partial Fall-Through Address 118 val pftAddr = UInt((log2Up(PredictWidth)+1).W) 119 val carry = Bool() 120 121 val isCall = Bool() 122 val isRet = Bool() 123 val isJalr = Bool() 124 125 // 126 val oversize = Bool() 127 128 val last_is_rvc = Bool() 129 130 val always_taken = Vec(numBr, Bool()) 131 132 def getSlotForBr(idx: Int): FtbSlot = { 133 require( 134 idx < numBr-1 || idx == numBr-1 && !shareTailSlot || 135 idx == numBr-1 && shareTailSlot 136 ) 137 (idx, numBr, shareTailSlot) match { 138 case (i, n, true) if i == n-1 => this.tailSlot 139 case _ => this.brSlots(idx) 140 } 141 } 142 def allSlotsForBr = { 143 (0 until numBr).map(getSlotForBr(_)) 144 } 145 def setByBrTarget(brIdx: Int, pc: UInt, target: UInt) = { 146 val slot = getSlotForBr(brIdx) 147 slot.setLowerStatByTarget(pc, target, shareTailSlot && brIdx == numBr-1) 148 } 149 def setByJmpTarget(pc: UInt, target: UInt) = { 150 this.tailSlot.setLowerStatByTarget(pc, target, false) 151 } 152 153 def getTargetVec(pc: UInt) = { 154 VecInit((brSlots :+ tailSlot).map(_.getTarget(pc))) 155 } 156 157 def getOffsetVec = VecInit(brSlots.map(_.offset) :+ tailSlot.offset) 158 def isJal = !isJalr 159 def getFallThrough(pc: UInt) = getFallThroughAddr(pc, carry, pftAddr) 160 def hasBr(offset: UInt) = 161 brSlots.map{ s => s.valid && s.offset <= offset}.reduce(_||_) || 162 (shareTailSlot.B && tailSlot.valid && tailSlot.offset <= offset && tailSlot.sharing) 163 164 def getBrMaskByOffset(offset: UInt) = 165 brSlots.map{ s => s.valid && s.offset <= offset } ++ 166 (if (shareTailSlot) Seq(tailSlot.valid && tailSlot.offset <= offset && tailSlot.sharing) else Nil) 167 168 def getBrRecordedVec(offset: UInt) = { 169 VecInit( 170 brSlots.map(s => s.valid && s.offset === offset) ++ 171 (if (shareTailSlot) Seq(tailSlot.valid && tailSlot.offset === offset && tailSlot.sharing) else Nil) 172 ) 173 } 174 175 def brIsSaved(offset: UInt) = getBrRecordedVec(offset).reduce(_||_) 176 177 def onNotHit(pc: UInt) = { 178 pftAddr := pc(instOffsetBits + log2Ceil(PredictWidth), instOffsetBits) ^ (1 << log2Ceil(PredictWidth)).U 179 carry := pc(instOffsetBits + log2Ceil(PredictWidth)).asBool 180 oversize := false.B 181 } 182 183 def brValids = { 184 VecInit( 185 brSlots.map(_.valid) ++ 186 (if (shareTailSlot) Seq(tailSlot.valid && tailSlot.sharing) else Nil) 187 ) 188 } 189 190 def noEmptySlotForNewBr = { 191 VecInit( 192 brSlots.map(_.valid) ++ 193 (if (shareTailSlot) Seq(tailSlot.valid) else Nil) 194 ).reduce(_&&_) 195 } 196 197 def newBrCanNotInsert(offset: UInt) = { 198 val lastSlotForBr = if (shareTailSlot) tailSlot else brSlots.last 199 lastSlotForBr.valid && lastSlotForBr.offset < offset 200 } 201 202 def jmpValid = { 203 tailSlot.valid && (!shareTailSlot.B || !tailSlot.sharing) 204 } 205 206 def brOffset = { 207 VecInit( 208 brSlots.map(_.offset) ++ 209 (if (shareTailSlot) Seq(tailSlot.offset) else Nil) 210 ) 211 } 212 213 214 def display(cond: Bool): Unit = { 215 XSDebug(cond, p"-----------FTB entry----------- \n") 216 XSDebug(cond, p"v=${valid}\n") 217 for(i <- 0 until numBr) { 218 XSDebug(cond, p"[br$i]: v=${allSlotsForBr(i).valid}, offset=${allSlotsForBr(i).offset}," + 219 p"lower=${Hexadecimal(allSlotsForBr(i).lower)}\n") 220 } 221 XSDebug(cond, p"[tailSlot]: v=${tailSlot.valid}, offset=${tailSlot.offset}," + 222 p"lower=${Hexadecimal(tailSlot.lower)}, sharing=${tailSlot.sharing}}\n") 223 XSDebug(cond, p"pftAddr=${Hexadecimal(pftAddr)}, carry=$carry\n") 224 XSDebug(cond, p"isCall=$isCall, isRet=$isRet, isjalr=$isJalr\n") 225 XSDebug(cond, p"oversize=$oversize, last_is_rvc=$last_is_rvc\n") 226 XSDebug(cond, p"------------------------------- \n") 227 } 228 229} 230 231class FTBEntryWithTag(implicit p: Parameters) extends XSBundle with FTBParams with BPUUtils { 232 val entry = new FTBEntry 233 val tag = UInt(tagSize.W) 234 def display(cond: Bool): Unit = { 235 entry.display(cond) 236 XSDebug(cond, p"tag is ${Hexadecimal(tag)}\n------------------------------- \n") 237 } 238} 239 240class FTBMeta(implicit p: Parameters) extends XSBundle with FTBParams { 241 val writeWay = UInt(log2Ceil(numWays).W) 242 val hit = Bool() 243 val pred_cycle = UInt(64.W) // TODO: Use Option 244} 245 246object FTBMeta { 247 def apply(writeWay: UInt, hit: Bool, pred_cycle: UInt)(implicit p: Parameters): FTBMeta = { 248 val e = Wire(new FTBMeta) 249 e.writeWay := writeWay 250 e.hit := hit 251 e.pred_cycle := pred_cycle 252 e 253 } 254} 255 256// class UpdateQueueEntry(implicit p: Parameters) extends XSBundle with FTBParams { 257// val pc = UInt(VAddrBits.W) 258// val ftb_entry = new FTBEntry 259// val hit = Bool() 260// val hit_way = UInt(log2Ceil(numWays).W) 261// } 262// 263// object UpdateQueueEntry { 264// def apply(pc: UInt, fe: FTBEntry, hit: Bool, hit_way: UInt)(implicit p: Parameters): UpdateQueueEntry = { 265// val e = Wire(new UpdateQueueEntry) 266// e.pc := pc 267// e.ftb_entry := fe 268// e.hit := hit 269// e.hit_way := hit_way 270// e 271// } 272// } 273 274class FTB(implicit p: Parameters) extends BasePredictor with FTBParams with BPUUtils with HasCircularQueuePtrHelper { 275 override val meta_size = WireInit(0.U.asTypeOf(new FTBMeta)).getWidth 276 277 val ftbAddr = new TableAddr(log2Up(numSets), 1) 278 279 class FTBBank(val numSets: Int, val nWays: Int) extends XSModule with BPUUtils { 280 val io = IO(new Bundle { 281 val s1_fire = Input(Bool()) 282 283 // when ftb hit, read_hits.valid is true, and read_hits.bits is OH of hit way 284 // when ftb not hit, read_hits.valid is false, and read_hits is OH of allocWay 285 // val read_hits = Valid(Vec(numWays, Bool())) 286 val req_pc = Flipped(DecoupledIO(UInt(VAddrBits.W))) 287 val read_resp = Output(new FTBEntry) 288 val read_hits = Valid(UInt(log2Ceil(numWays).W)) 289 290 val u_req_pc = Flipped(DecoupledIO(UInt(VAddrBits.W))) 291 val update_hits = Valid(UInt(log2Ceil(numWays).W)) 292 val update_access = Input(Bool()) 293 294 val update_pc = Input(UInt(VAddrBits.W)) 295 val update_write_data = Flipped(Valid(new FTBEntryWithTag)) 296 val update_write_way = Input(UInt(log2Ceil(numWays).W)) 297 val update_write_alloc = Input(Bool()) 298 }) 299 300 // Extract holdRead logic to fix bug that update read override predict read result 301 val ftb = Module(new SRAMTemplate(new FTBEntryWithTag, set = numSets, way = numWays, shouldReset = true, holdRead = false, singlePort = true)) 302 303 val pred_rdata = HoldUnless(ftb.io.r.resp.data, RegNext(io.req_pc.valid && !io.update_access)) 304 ftb.io.r.req.valid := io.req_pc.valid || io.u_req_pc.valid // io.s0_fire 305 ftb.io.r.req.bits.setIdx := Mux(io.u_req_pc.valid, ftbAddr.getIdx(io.u_req_pc.bits), ftbAddr.getIdx(io.req_pc.bits)) // s0_idx 306 307 assert(!(io.req_pc.valid && io.u_req_pc.valid)) 308 309 io.req_pc.ready := ftb.io.r.req.ready 310 io.u_req_pc.ready := ftb.io.r.req.ready 311 312 val req_tag = RegEnable(ftbAddr.getTag(io.req_pc.bits)(tagSize-1, 0), io.req_pc.valid) 313 val req_idx = RegEnable(ftbAddr.getIdx(io.req_pc.bits), io.req_pc.valid) 314 315 val u_req_tag = RegEnable(ftbAddr.getTag(io.u_req_pc.bits)(tagSize-1, 0), io.u_req_pc.valid) 316 317 val read_entries = pred_rdata.map(_.entry) 318 val read_tags = pred_rdata.map(_.tag) 319 320 val total_hits = VecInit((0 until numWays).map(b => read_tags(b) === req_tag && read_entries(b).valid && io.s1_fire)) 321 val hit = total_hits.reduce(_||_) 322 // val hit_way_1h = VecInit(PriorityEncoderOH(total_hits)) 323 val hit_way = PriorityEncoder(total_hits) 324 325 val u_total_hits = VecInit((0 until numWays).map(b => 326 ftb.io.r.resp.data(b).tag === u_req_tag && ftb.io.r.resp.data(b).entry.valid && RegNext(io.update_access))) 327 val u_hit = u_total_hits.reduce(_||_) 328 // val hit_way_1h = VecInit(PriorityEncoderOH(total_hits)) 329 val u_hit_way = PriorityEncoder(u_total_hits) 330 331 assert(PopCount(total_hits) === 1.U || PopCount(total_hits) === 0.U) 332 assert(PopCount(u_total_hits) === 1.U || PopCount(u_total_hits) === 0.U) 333 334 val replacer = ReplacementPolicy.fromString(Some("setplru"), numWays, numSets) 335 // val allocWriteWay = replacer.way(req_idx) 336 337 val touch_set = Seq.fill(1)(Wire(UInt(log2Ceil(numSets).W))) 338 val touch_way = Seq.fill(1)(Wire(Valid(UInt(log2Ceil(numWays).W)))) 339 340 touch_set(0) := req_idx 341 342 touch_way(0).valid := hit 343 touch_way(0).bits := hit_way 344 345 replacer.access(touch_set, touch_way) 346 347 // def allocWay(valids: UInt, meta_tags: UInt, req_tag: UInt) = { 348 // val randomAlloc = false 349 // if (numWays > 1) { 350 // val w = Wire(UInt(log2Up(numWays).W)) 351 // val valid = WireInit(valids.andR) 352 // val tags = Cat(meta_tags, req_tag) 353 // val l = log2Up(numWays) 354 // val nChunks = (tags.getWidth + l - 1) / l 355 // val chunks = (0 until nChunks).map( i => 356 // tags(min((i+1)*l, tags.getWidth)-1, i*l) 357 // ) 358 // w := Mux(valid, if (randomAlloc) {LFSR64()(log2Up(numWays)-1,0)} else {chunks.reduce(_^_)}, PriorityEncoder(~valids)) 359 // w 360 // } else { 361 // val w = WireInit(0.U) 362 // w 363 // } 364 // } 365 366 // val allocWriteWay = allocWay( 367 // VecInit(read_entries.map(_.valid)).asUInt, 368 // VecInit(read_tags).asUInt, 369 // req_tag 370 // ) 371 372 def allocWay(valids: UInt, idx: UInt) = { 373 if (numWays > 1) { 374 val w = Wire(UInt(log2Up(numWays).W)) 375 val valid = WireInit(valids.andR) 376 w := Mux(valid, replacer.way(idx), PriorityEncoder(~valids)) 377 w 378 }else { 379 val w = WireInit(0.U) 380 w 381 } 382 } 383 384 io.read_resp := PriorityMux(total_hits, read_entries) // Mux1H 385 io.read_hits.valid := hit 386 // io.read_hits.bits := Mux(hit, hit_way_1h, VecInit(UIntToOH(allocWriteWay).asBools())) 387 io.read_hits.bits := hit_way 388 389 io.update_hits.valid := u_hit 390 io.update_hits.bits := u_hit_way 391 392 // XSDebug(!hit, "FTB not hit, alloc a way: %d\n", allocWriteWay) 393 394 // Update logic 395 val u_valid = io.update_write_data.valid 396 val u_data = io.update_write_data.bits 397 val u_idx = ftbAddr.getIdx(io.update_pc) 398 val allocWriteWay = allocWay(VecInit(read_entries.map(_.valid)).asUInt, u_idx) 399 val u_mask = UIntToOH(Mux(io.update_write_alloc, allocWriteWay, io.update_write_way)) 400 401 for (i <- 0 until numWays) { 402 XSPerfAccumulate(f"ftb_replace_way$i", u_valid && io.update_write_alloc && OHToUInt(u_mask) === i.U) 403 XSPerfAccumulate(f"ftb_replace_way${i}_has_empty", u_valid && io.update_write_alloc && !read_entries.map(_.valid).reduce(_&&_) && OHToUInt(u_mask) === i.U) 404 XSPerfAccumulate(f"ftb_hit_way$i", hit && !io.update_access && hit_way === i.U) 405 } 406 407 ftb.io.w.apply(u_valid, u_data, u_idx, u_mask) 408 409 // print hit entry info 410 PriorityMux(total_hits, ftb.io.r.resp.data).display(true.B) 411 } // FTBBank 412 413 val ftbBank = Module(new FTBBank(numSets, numWays)) 414 415 ftbBank.io.req_pc.valid := io.s0_fire 416 ftbBank.io.req_pc.bits := s0_pc 417 418 val ftb_entry = RegEnable(ftbBank.io.read_resp, io.s1_fire) 419 val s1_hit = ftbBank.io.read_hits.valid 420 val s2_hit = RegEnable(s1_hit, io.s1_fire) 421 val writeWay = ftbBank.io.read_hits.bits 422 423 val fallThruAddr = getFallThroughAddr(s2_pc, ftb_entry.carry, ftb_entry.pftAddr) 424 425 // io.out.bits.resp := RegEnable(io.in.bits.resp_in(0), 0.U.asTypeOf(new BranchPredictionResp), io.s1_fire) 426 io.out.resp := io.in.bits.resp_in(0) 427 428 val s1_latch_call_is_rvc = DontCare // TODO: modify when add RAS 429 430 io.out.resp.s2.preds.hit := s2_hit 431 io.out.resp.s2.pc := s2_pc 432 io.out.resp.s2.ftb_entry := ftb_entry 433 io.out.resp.s2.preds.fromFtbEntry(ftb_entry, s2_pc) 434 435 io.out.s3_meta := RegEnable(RegEnable(FTBMeta(writeWay.asUInt(), s1_hit, GTimer()).asUInt(), io.s1_fire), io.s2_fire) 436 437 when(!s2_hit) { 438 io.out.resp.s2.ftb_entry.onNotHit(s2_pc) 439 } 440 441 // always taken logic 442 when (s2_hit) { 443 for (i <- 0 until numBr) { 444 when (ftb_entry.always_taken(i)) { 445 io.out.resp.s2.preds.br_taken_mask(i) := true.B 446 } 447 } 448 } 449 450 // Update logic 451 val update = RegNext(io.update.bits) 452 453 // val update_queue = Mem(64, new UpdateQueueEntry) 454 // val head, tail = RegInit(UpdateQueuePtr(false.B, 0.U)) 455 // val u_queue = Module(new Queue(new UpdateQueueEntry, entries = 64, flow = true)) 456 // assert(u_queue.io.count < 64.U) 457 458 val u_meta = update.meta.asTypeOf(new FTBMeta) 459 val u_valid = RegNext(io.update.valid && !io.update.bits.old_entry) 460 461 // io.s1_ready := ftbBank.io.req_pc.ready && u_queue.io.count === 0.U && !u_valid 462 io.s1_ready := ftbBank.io.req_pc.ready && !(u_valid && !u_meta.hit) 463 464 // val update_now = u_queue.io.deq.fire && u_queue.io.deq.bits.hit 465 val update_now = u_valid && u_meta.hit 466 467 ftbBank.io.u_req_pc.valid := u_valid && !u_meta.hit 468 ftbBank.io.u_req_pc.bits := update.pc 469 470 // assert(!(u_valid && RegNext(u_valid) && update.pc === RegNext(update.pc))) 471 // assert(!(u_valid && RegNext(u_valid))) 472 473 // val u_way = u_queue.io.deq.bits.hit_way 474 475 val ftb_write = Wire(new FTBEntryWithTag) 476 // ftb_write.entry := Mux(update_now, u_queue.io.deq.bits.ftb_entry, RegNext(u_queue.io.deq.bits.ftb_entry)) 477 // ftb_write.tag := ftbAddr.getTag(Mux(update_now, u_queue.io.deq.bits.pc, RegNext(u_queue.io.deq.bits.pc)))(tagSize-1, 0) 478 ftb_write.entry := Mux(update_now, update.ftb_entry, RegNext(update.ftb_entry)) 479 ftb_write.tag := ftbAddr.getTag(Mux(update_now, update.pc, RegNext(update.pc)))(tagSize-1, 0) 480 481 // val write_valid = update_now || RegNext(u_queue.io.deq.fire && !u_queue.io.deq.bits.hit) 482 val write_valid = update_now || RegNext(u_valid && !u_meta.hit) 483 484 // u_queue.io.enq.valid := u_valid 485 // u_queue.io.enq.bits := UpdateQueueEntry(update.pc, update.ftb_entry, u_meta.hit, u_meta.writeWay) 486 // u_queue.io.deq.ready := RegNext(!u_queue.io.deq.fire || update_now) 487 488 ftbBank.io.update_write_data.valid := write_valid 489 ftbBank.io.update_write_data.bits := ftb_write 490 // ftbBank.io.update_pc := Mux(update_now, u_queue.io.deq.bits.pc, RegNext(u_queue.io.deq.bits.pc)) 491 ftbBank.io.update_pc := Mux(update_now, update.pc, RegNext(update.pc)) 492 ftbBank.io.update_write_way := Mux(update_now, u_meta.writeWay, ftbBank.io.update_hits.bits) 493 // ftbBank.io.update_write_alloc := Mux(update_now, !u_queue.io.deq.bits.hit, !ftbBank.io.update_hits.valid) 494 ftbBank.io.update_write_alloc := Mux(update_now, false.B, !ftbBank.io.update_hits.valid) 495 ftbBank.io.update_access := u_valid && !u_meta.hit 496 ftbBank.io.s1_fire := io.s1_fire 497 498 XSDebug("req_v=%b, req_pc=%x, ready=%b (resp at next cycle)\n", io.s0_fire, s0_pc, ftbBank.io.req_pc.ready) 499 XSDebug("s2_hit=%b, hit_way=%b\n", s2_hit, writeWay.asUInt) 500 XSDebug("s2_br_taken_mask=%b, s2_real_taken_mask=%b\n", 501 io.in.bits.resp_in(0).s2.preds.br_taken_mask.asUInt, io.out.resp.s2.real_slot_taken_mask().asUInt) 502 XSDebug("s2_target=%x\n", io.out.resp.s2.target) 503 504 ftb_entry.display(true.B) 505 506 XSPerfAccumulate("ftb_read_hits", RegNext(io.s0_fire) && s1_hit) 507 XSPerfAccumulate("ftb_read_misses", RegNext(io.s0_fire) && !s1_hit) 508 509 XSPerfAccumulate("ftb_commit_hits", io.update.valid && io.update.bits.preds.hit) 510 XSPerfAccumulate("ftb_commit_misses", io.update.valid && !io.update.bits.preds.hit) 511 512 XSPerfAccumulate("ftb_update_req", io.update.valid) 513 XSPerfAccumulate("ftb_update_ignored", io.update.valid && io.update.bits.old_entry) 514 XSPerfAccumulate("ftb_updated", u_valid) 515 516 val perfinfo = IO(new Bundle(){ 517 val perfEvents = Output(new PerfEventsBundle(2)) 518 }) 519 val perfEvents = Seq( 520 ("ftb_commit_hits ", u_valid && update.preds.hit), 521 ("ftb_commit_misses ", u_valid && !update.preds.hit), 522 ) 523 524 for (((perf_out,(perf_name,perf)),i) <- perfinfo.perfEvents.perf_events.zip(perfEvents).zipWithIndex) { 525 perf_out.incr_step := RegNext(perf) 526 } 527} 528