1/*************************************************************************************** 2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences 3* Copyright (c) 2020-2021 Peng Cheng Laboratory 4* 5* XiangShan is licensed under Mulan PSL v2. 6* You can use this software according to the terms and conditions of the Mulan PSL v2. 7* You may obtain a copy of Mulan PSL v2 at: 8* http://license.coscl.org.cn/MulanPSL2 9* 10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, 11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, 12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE. 13* 14* See the Mulan PSL v2 for more details. 15***************************************************************************************/ 16package xiangshan.frontend 17 18import chipsalliance.rocketchip.config.Parameters 19import chisel3._ 20import chisel3.util._ 21import chisel3.experimental.chiselName 22import xiangshan._ 23import xiangshan.frontend.icache._ 24import utils._ 25import utility._ 26import scala.math._ 27 28@chiselName 29class FetchRequestBundle(implicit p: Parameters) extends XSBundle with HasICacheParameters { 30 31 //fast path: Timing critical 32 val startAddr = UInt(VAddrBits.W) 33 val nextlineStart = UInt(VAddrBits.W) 34 val nextStartAddr = UInt(VAddrBits.W) 35 //slow path 36 val ftqIdx = new FtqPtr 37 val ftqOffset = ValidUndirectioned(UInt(log2Ceil(PredictWidth).W)) 38 39 def crossCacheline = startAddr(blockOffBits - 1) === 1.U 40 41 def fromFtqPcBundle(b: Ftq_RF_Components) = { 42 this.startAddr := b.startAddr 43 this.nextlineStart := b.nextLineAddr 44 when (b.fallThruError) { 45 val nextBlockHigherTemp = Mux(startAddr(log2Ceil(PredictWidth)+instOffsetBits), b.startAddr, b.nextLineAddr) 46 val nextBlockHigher = nextBlockHigherTemp(VAddrBits-1, log2Ceil(PredictWidth)+instOffsetBits+1) 47 this.nextStartAddr := 48 Cat(nextBlockHigher, 49 startAddr(log2Ceil(PredictWidth)+instOffsetBits) ^ 1.U(1.W), 50 startAddr(log2Ceil(PredictWidth)+instOffsetBits-1, instOffsetBits), 51 0.U(instOffsetBits.W) 52 ) 53 } 54 this 55 } 56 override def toPrintable: Printable = { 57 p"[start] ${Hexadecimal(startAddr)} [next] ${Hexadecimal(nextlineStart)}" + 58 p"[tgt] ${Hexadecimal(nextStartAddr)} [ftqIdx] $ftqIdx [jmp] v:${ftqOffset.valid}" + 59 p" offset: ${ftqOffset.bits}\n" 60 } 61} 62 63class FtqICacheInfo(implicit p: Parameters)extends XSBundle with HasICacheParameters{ 64 val startAddr = UInt(VAddrBits.W) 65 val nextlineStart = UInt(VAddrBits.W) 66 def crossCacheline = startAddr(blockOffBits - 1) === 1.U 67 def fromFtqPcBundle(b: Ftq_RF_Components) = { 68 this.startAddr := b.startAddr 69 this.nextlineStart := b.nextLineAddr 70 this 71 } 72} 73 74class IFUICacheIO(implicit p: Parameters)extends XSBundle with HasICacheParameters{ 75 val icacheReady = Output(Bool()) 76 val resp = Vec(PortNumber, ValidIO(new ICacheMainPipeResp)) 77} 78 79class FtqToICacheRequestBundle(implicit p: Parameters)extends XSBundle with HasICacheParameters{ 80 val pcMemRead = Vec(5, new FtqICacheInfo) 81 val readValid = Vec(5, Bool()) 82} 83 84 85class PredecodeWritebackBundle(implicit p:Parameters) extends XSBundle { 86 val pc = Vec(PredictWidth, UInt(VAddrBits.W)) 87 val pd = Vec(PredictWidth, new PreDecodeInfo) // TODO: redefine Predecode 88 val ftqIdx = new FtqPtr 89 val ftqOffset = UInt(log2Ceil(PredictWidth).W) 90 val misOffset = ValidUndirectioned(UInt(log2Ceil(PredictWidth).W)) 91 val cfiOffset = ValidUndirectioned(UInt(log2Ceil(PredictWidth).W)) 92 val target = UInt(VAddrBits.W) 93 val jalTarget = UInt(VAddrBits.W) 94 val instrRange = Vec(PredictWidth, Bool()) 95} 96 97// Ftq send req to Prefetch 98class PrefetchRequest(implicit p:Parameters) extends XSBundle { 99 val target = UInt(VAddrBits.W) 100} 101 102class FtqPrefechBundle(implicit p:Parameters) extends XSBundle { 103 val req = DecoupledIO(new PrefetchRequest) 104} 105 106class mmioCommitRead(implicit p: Parameters) extends XSBundle { 107 val mmioFtqPtr = Output(new FtqPtr) 108 val mmioLastCommit = Input(Bool()) 109} 110 111class FetchToIBuffer(implicit p: Parameters) extends XSBundle { 112 val instrs = Vec(PredictWidth, UInt(32.W)) 113 val valid = UInt(PredictWidth.W) 114 val enqEnable = UInt(PredictWidth.W) 115 val pd = Vec(PredictWidth, new PreDecodeInfo) 116 val pc = Vec(PredictWidth, UInt(VAddrBits.W)) 117 val foldpc = Vec(PredictWidth, UInt(MemPredPCWidth.W)) 118 val ftqPtr = new FtqPtr 119 val ftqOffset = Vec(PredictWidth, ValidUndirectioned(UInt(log2Ceil(PredictWidth).W))) 120 val ipf = Vec(PredictWidth, Bool()) 121 val acf = Vec(PredictWidth, Bool()) 122 val crossPageIPFFix = Vec(PredictWidth, Bool()) 123 val triggered = Vec(PredictWidth, new TriggerCf) 124} 125 126// class BitWiseUInt(val width: Int, val init: UInt) extends Module { 127// val io = IO(new Bundle { 128// val set 129// }) 130// } 131// Move from BPU 132abstract class GlobalHistory(implicit p: Parameters) extends XSBundle with HasBPUConst { 133 def update(br_valids: Vec[Bool], real_taken_mask: Vec[Bool]): GlobalHistory 134} 135 136class ShiftingGlobalHistory(implicit p: Parameters) extends GlobalHistory { 137 val predHist = UInt(HistoryLength.W) 138 139 def update(shift: UInt, taken: Bool, hist: UInt = this.predHist): ShiftingGlobalHistory = { 140 val g = Wire(new ShiftingGlobalHistory) 141 g.predHist := (hist << shift) | taken 142 g 143 } 144 145 def update(br_valids: Vec[Bool], real_taken_mask: Vec[Bool]): ShiftingGlobalHistory = { 146 require(br_valids.length == numBr) 147 require(real_taken_mask.length == numBr) 148 val last_valid_idx = PriorityMux( 149 br_valids.reverse :+ true.B, 150 (numBr to 0 by -1).map(_.U(log2Ceil(numBr+1).W)) 151 ) 152 val first_taken_idx = PriorityEncoder(false.B +: real_taken_mask) 153 val smaller = Mux(last_valid_idx < first_taken_idx, 154 last_valid_idx, 155 first_taken_idx 156 ) 157 val shift = smaller 158 val taken = real_taken_mask.reduce(_||_) 159 update(shift, taken, this.predHist) 160 } 161 162 // static read 163 def read(n: Int): Bool = predHist.asBools()(n) 164 165 final def === (that: ShiftingGlobalHistory): Bool = { 166 predHist === that.predHist 167 } 168 169 final def =/= (that: ShiftingGlobalHistory): Bool = !(this === that) 170} 171 172// circular global history pointer 173class CGHPtr(implicit p: Parameters) extends CircularQueuePtr[CGHPtr]( 174 p => p(XSCoreParamsKey).HistoryLength 175){ 176} 177 178object CGHPtr { 179 def apply(f: Bool, v: UInt)(implicit p: Parameters): CGHPtr = { 180 val ptr = Wire(new CGHPtr) 181 ptr.flag := f 182 ptr.value := v 183 ptr 184 } 185 def inverse(ptr: CGHPtr)(implicit p: Parameters): CGHPtr = { 186 apply(!ptr.flag, ptr.value) 187 } 188} 189 190class CircularGlobalHistory(implicit p: Parameters) extends GlobalHistory { 191 val buffer = Vec(HistoryLength, Bool()) 192 type HistPtr = UInt 193 def update(br_valids: Vec[Bool], real_taken_mask: Vec[Bool]): CircularGlobalHistory = { 194 this 195 } 196} 197 198class FoldedHistory(val len: Int, val compLen: Int, val max_update_num: Int)(implicit p: Parameters) 199 extends XSBundle with HasBPUConst { 200 require(compLen >= 1) 201 require(len > 0) 202 // require(folded_len <= len) 203 require(compLen >= max_update_num) 204 val folded_hist = UInt(compLen.W) 205 206 def need_oldest_bits = len > compLen 207 def info = (len, compLen) 208 def oldest_bit_to_get_from_ghr = (0 until max_update_num).map(len - _ - 1) 209 def oldest_bit_pos_in_folded = oldest_bit_to_get_from_ghr map (_ % compLen) 210 def oldest_bit_wrap_around = oldest_bit_to_get_from_ghr map (_ / compLen > 0) 211 def oldest_bit_start = oldest_bit_pos_in_folded.head 212 213 def get_oldest_bits_from_ghr(ghr: Vec[Bool], histPtr: CGHPtr) = { 214 // TODO: wrap inc for histPtr value 215 oldest_bit_to_get_from_ghr.map(i => ghr((histPtr + (i+1).U).value)) 216 } 217 218 def circular_shift_left(src: UInt, shamt: Int) = { 219 val srcLen = src.getWidth 220 val src_doubled = Cat(src, src) 221 val shifted = src_doubled(srcLen*2-1-shamt, srcLen-shamt) 222 shifted 223 } 224 225 // slow path, read bits from ghr 226 def update(ghr: Vec[Bool], histPtr: CGHPtr, num: Int, taken: Bool): FoldedHistory = { 227 val oldest_bits = VecInit(get_oldest_bits_from_ghr(ghr, histPtr)) 228 update(oldest_bits, num, taken) 229 } 230 231 232 // fast path, use pre-read oldest bits 233 def update(ob: Vec[Bool], num: Int, taken: Bool): FoldedHistory = { 234 // do xors for several bitsets at specified bits 235 def bitsets_xor(len: Int, bitsets: Seq[Seq[Tuple2[Int, Bool]]]) = { 236 val res = Wire(Vec(len, Bool())) 237 // println(f"num bitsets: ${bitsets.length}") 238 // println(f"bitsets $bitsets") 239 val resArr = Array.fill(len)(List[Bool]()) 240 for (bs <- bitsets) { 241 for ((n, b) <- bs) { 242 resArr(n) = b :: resArr(n) 243 } 244 } 245 // println(f"${resArr.mkString}") 246 // println(f"histLen: ${this.len}, foldedLen: $folded_len") 247 for (i <- 0 until len) { 248 // println(f"bit[$i], ${resArr(i).mkString}") 249 if (resArr(i).length > 2) { 250 println(f"[warning] update logic of foldest history has two or more levels of xor gates! " + 251 f"histlen:${this.len}, compLen:$compLen, at bit $i") 252 } 253 if (resArr(i).length == 0) { 254 println(f"[error] bits $i is not assigned in folded hist update logic! histlen:${this.len}, compLen:$compLen") 255 } 256 res(i) := resArr(i).foldLeft(false.B)(_^_) 257 } 258 res.asUInt 259 } 260 261 val new_folded_hist = if (need_oldest_bits) { 262 val oldest_bits = ob 263 require(oldest_bits.length == max_update_num) 264 // mask off bits that do not update 265 val oldest_bits_masked = oldest_bits.zipWithIndex.map{ 266 case (ob, i) => ob && (i < num).B 267 } 268 // if a bit does not wrap around, it should not be xored when it exits 269 val oldest_bits_set = (0 until max_update_num).filter(oldest_bit_wrap_around).map(i => (oldest_bit_pos_in_folded(i), oldest_bits_masked(i))) 270 271 // println(f"old bits pos ${oldest_bits_set.map(_._1)}") 272 273 // only the last bit could be 1, as we have at most one taken branch at a time 274 val newest_bits_masked = VecInit((0 until max_update_num).map(i => taken && ((i+1) == num).B)).asUInt 275 // if a bit does not wrap around, newest bits should not be xored onto it either 276 val newest_bits_set = (0 until max_update_num).map(i => (compLen-1-i, newest_bits_masked(i))) 277 278 // println(f"new bits set ${newest_bits_set.map(_._1)}") 279 // 280 val original_bits_masked = VecInit(folded_hist.asBools.zipWithIndex.map{ 281 case (fb, i) => fb && !(num >= (len-i)).B 282 }) 283 val original_bits_set = (0 until compLen).map(i => (i, original_bits_masked(i))) 284 285 // do xor then shift 286 val xored = bitsets_xor(compLen, Seq(original_bits_set, oldest_bits_set, newest_bits_set)) 287 circular_shift_left(xored, num) 288 } else { 289 // histLen too short to wrap around 290 ((folded_hist << num) | taken)(compLen-1,0) 291 } 292 293 val fh = WireInit(this) 294 fh.folded_hist := new_folded_hist 295 fh 296 } 297} 298 299class AheadFoldedHistoryOldestBits(val len: Int, val max_update_num: Int)(implicit p: Parameters) extends XSBundle { 300 val bits = Vec(max_update_num*2, Bool()) 301 // def info = (len, compLen) 302 def getRealOb(brNumOH: UInt): Vec[Bool] = { 303 val ob = Wire(Vec(max_update_num, Bool())) 304 for (i <- 0 until max_update_num) { 305 ob(i) := Mux1H(brNumOH, bits.drop(i).take(numBr+1)) 306 } 307 ob 308 } 309} 310 311class AllAheadFoldedHistoryOldestBits(val gen: Seq[Tuple2[Int, Int]])(implicit p: Parameters) extends XSBundle with HasBPUConst { 312 val afhob = MixedVec(gen.filter(t => t._1 > t._2).map{_._1} 313 .toSet.toList.map(l => new AheadFoldedHistoryOldestBits(l, numBr))) // remove duplicates 314 require(gen.toSet.toList.equals(gen)) 315 def getObWithInfo(info: Tuple2[Int, Int]) = { 316 val selected = afhob.filter(_.len == info._1) 317 require(selected.length == 1) 318 selected(0) 319 } 320 def read(ghv: Vec[Bool], ptr: CGHPtr) = { 321 val hisLens = afhob.map(_.len) 322 val bitsToRead = hisLens.flatMap(l => (0 until numBr*2).map(i => l-i-1)).toSet // remove duplicates 323 val bitsWithInfo = bitsToRead.map(pos => (pos, ghv((ptr+(pos+1).U).value))) 324 for (ob <- afhob) { 325 for (i <- 0 until numBr*2) { 326 val pos = ob.len - i - 1 327 val bit_found = bitsWithInfo.filter(_._1 == pos).toList 328 require(bit_found.length == 1) 329 ob.bits(i) := bit_found(0)._2 330 } 331 } 332 } 333} 334 335class AllFoldedHistories(val gen: Seq[Tuple2[Int, Int]])(implicit p: Parameters) extends XSBundle with HasBPUConst { 336 val hist = MixedVec(gen.map{case (l, cl) => new FoldedHistory(l, cl, numBr)}) 337 // println(gen.mkString) 338 require(gen.toSet.toList.equals(gen)) 339 def getHistWithInfo(info: Tuple2[Int, Int]) = { 340 val selected = hist.filter(_.info.equals(info)) 341 require(selected.length == 1) 342 selected(0) 343 } 344 def autoConnectFrom(that: AllFoldedHistories) = { 345 require(this.hist.length <= that.hist.length) 346 for (h <- this.hist) { 347 h := that.getHistWithInfo(h.info) 348 } 349 } 350 def update(ghv: Vec[Bool], ptr: CGHPtr, shift: Int, taken: Bool): AllFoldedHistories = { 351 val res = WireInit(this) 352 for (i <- 0 until this.hist.length) { 353 res.hist(i) := this.hist(i).update(ghv, ptr, shift, taken) 354 } 355 res 356 } 357 def update(afhob: AllAheadFoldedHistoryOldestBits, lastBrNumOH: UInt, shift: Int, taken: Bool): AllFoldedHistories = { 358 val res = WireInit(this) 359 for (i <- 0 until this.hist.length) { 360 val fh = this.hist(i) 361 if (fh.need_oldest_bits) { 362 val info = fh.info 363 val selectedAfhob = afhob.getObWithInfo(info) 364 val ob = selectedAfhob.getRealOb(lastBrNumOH) 365 res.hist(i) := this.hist(i).update(ob, shift, taken) 366 } else { 367 val dumb = Wire(Vec(numBr, Bool())) // not needed 368 dumb := DontCare 369 res.hist(i) := this.hist(i).update(dumb, shift, taken) 370 } 371 } 372 res 373 } 374 375 def display(cond: Bool) = { 376 for (h <- hist) { 377 XSDebug(cond, p"hist len ${h.len}, folded len ${h.compLen}, value ${Binary(h.folded_hist)}\n") 378 } 379 } 380} 381 382class TableAddr(val idxBits: Int, val banks: Int)(implicit p: Parameters) extends XSBundle{ 383 def tagBits = VAddrBits - idxBits - instOffsetBits 384 385 val tag = UInt(tagBits.W) 386 val idx = UInt(idxBits.W) 387 val offset = UInt(instOffsetBits.W) 388 389 def fromUInt(x: UInt) = x.asTypeOf(UInt(VAddrBits.W)).asTypeOf(this) 390 def getTag(x: UInt) = fromUInt(x).tag 391 def getIdx(x: UInt) = fromUInt(x).idx 392 def getBank(x: UInt) = if (banks > 1) getIdx(x)(log2Up(banks) - 1, 0) else 0.U 393 def getBankIdx(x: UInt) = if (banks > 1) getIdx(x)(idxBits - 1, log2Up(banks)) else getIdx(x) 394} 395 396trait BasicPrediction extends HasXSParameter { 397 def cfiIndex: ValidUndirectioned[UInt] 398 def target(pc: UInt): UInt 399 def lastBrPosOH: Vec[Bool] 400 def brTaken: Bool 401 def shouldShiftVec: Vec[Bool] 402 def fallThruError: Bool 403} 404@chiselName 405class FullBranchPrediction(implicit p: Parameters) extends XSBundle with HasBPUConst with BasicPrediction { 406 val br_taken_mask = Vec(numBr, Bool()) 407 408 val slot_valids = Vec(totalSlot, Bool()) 409 410 val targets = Vec(totalSlot, UInt(VAddrBits.W)) 411 val jalr_target = UInt(VAddrBits.W) // special path for indirect predictors 412 val offsets = Vec(totalSlot, UInt(log2Ceil(PredictWidth).W)) 413 val fallThroughAddr = UInt(VAddrBits.W) 414 val fallThroughErr = Bool() 415 416 val is_jal = Bool() 417 val is_jalr = Bool() 418 val is_call = Bool() 419 val is_ret = Bool() 420 val last_may_be_rvi_call = Bool() 421 val is_br_sharing = Bool() 422 423 // val call_is_rvc = Bool() 424 val hit = Bool() 425 426 def br_slot_valids = slot_valids.init 427 def tail_slot_valid = slot_valids.last 428 429 def br_valids = { 430 VecInit(br_slot_valids :+ (tail_slot_valid && is_br_sharing)) 431 } 432 433 def taken_mask_on_slot = { 434 VecInit( 435 (br_slot_valids zip br_taken_mask.init).map{ case (t, v) => t && v } :+ ( 436 tail_slot_valid && ( 437 is_br_sharing && br_taken_mask.last || !is_br_sharing 438 ) 439 ) 440 ) 441 } 442 443 def real_slot_taken_mask(): Vec[Bool] = { 444 VecInit(taken_mask_on_slot.map(_ && hit)) 445 } 446 447 // len numBr 448 def real_br_taken_mask(): Vec[Bool] = { 449 VecInit( 450 taken_mask_on_slot.map(_ && hit).init :+ 451 (br_taken_mask.last && tail_slot_valid && is_br_sharing && hit) 452 ) 453 } 454 455 // the vec indicating if ghr should shift on each branch 456 def shouldShiftVec = 457 VecInit(br_valids.zipWithIndex.map{ case (v, i) => 458 v && !real_br_taken_mask.take(i).reduceOption(_||_).getOrElse(false.B)}) 459 460 def lastBrPosOH = 461 VecInit((!hit || !br_valids.reduce(_||_)) +: // not hit or no brs in entry 462 (0 until numBr).map(i => 463 br_valids(i) && 464 !real_br_taken_mask.take(i).reduceOption(_||_).getOrElse(false.B) && // no brs taken in front it 465 (real_br_taken_mask()(i) || !br_valids.drop(i+1).reduceOption(_||_).getOrElse(false.B)) && // no brs behind it 466 hit 467 ) 468 ) 469 470 def brTaken = (br_valids zip br_taken_mask).map{ case (a, b) => a && b && hit}.reduce(_||_) 471 472 def target(pc: UInt): UInt = { 473 val targetVec = targets :+ fallThroughAddr :+ (pc + (FetchWidth * 4).U) 474 val tm = taken_mask_on_slot 475 val selVecOH = 476 tm.zipWithIndex.map{ case (t, i) => !tm.take(i).fold(false.B)(_||_) && t && hit} :+ 477 (!tm.asUInt.orR && hit) :+ !hit 478 Mux1H(selVecOH, targetVec) 479 } 480 481 def fallThruError: Bool = hit && fallThroughErr 482 483 def hit_taken_on_jmp = 484 !real_slot_taken_mask().init.reduce(_||_) && 485 real_slot_taken_mask().last && !is_br_sharing 486 def hit_taken_on_call = hit_taken_on_jmp && is_call 487 def hit_taken_on_ret = hit_taken_on_jmp && is_ret 488 def hit_taken_on_jalr = hit_taken_on_jmp && is_jalr 489 490 def cfiIndex = { 491 val cfiIndex = Wire(ValidUndirectioned(UInt(log2Ceil(PredictWidth).W))) 492 cfiIndex.valid := real_slot_taken_mask().asUInt.orR 493 // when no takens, set cfiIndex to PredictWidth-1 494 cfiIndex.bits := 495 ParallelPriorityMux(real_slot_taken_mask(), offsets) | 496 Fill(log2Ceil(PredictWidth), (!real_slot_taken_mask().asUInt.orR).asUInt) 497 cfiIndex 498 } 499 500 def taken = br_taken_mask.reduce(_||_) || slot_valids.last // || (is_jal || is_jalr) 501 502 def fromFtbEntry(entry: FTBEntry, pc: UInt, last_stage: Option[Tuple2[UInt, Bool]] = None) = { 503 slot_valids := entry.brSlots.map(_.valid) :+ entry.tailSlot.valid 504 targets := entry.getTargetVec(pc) 505 jalr_target := targets.last 506 offsets := entry.getOffsetVec 507 is_jal := entry.tailSlot.valid && entry.isJal 508 is_jalr := entry.tailSlot.valid && entry.isJalr 509 is_call := entry.tailSlot.valid && entry.isCall 510 is_ret := entry.tailSlot.valid && entry.isRet 511 last_may_be_rvi_call := entry.last_may_be_rvi_call 512 is_br_sharing := entry.tailSlot.valid && entry.tailSlot.sharing 513 514 val startLower = Cat(0.U(1.W), pc(instOffsetBits+log2Ceil(PredictWidth)-1, instOffsetBits)) 515 val endLowerwithCarry = Cat(entry.carry, entry.pftAddr) 516 fallThroughErr := startLower >= endLowerwithCarry 517 fallThroughAddr := Mux(fallThroughErr, pc + (FetchWidth * 4).U, entry.getFallThrough(pc)) 518 } 519 520 def display(cond: Bool): Unit = { 521 XSDebug(cond, p"[taken_mask] ${Binary(br_taken_mask.asUInt)} [hit] $hit\n") 522 } 523} 524 525class SpeculativeInfo(implicit p: Parameters) extends XSBundle 526 with HasBPUConst with BPUUtils { 527 val folded_hist = new AllFoldedHistories(foldedGHistInfos) 528 val afhob = new AllAheadFoldedHistoryOldestBits(foldedGHistInfos) 529 val lastBrNumOH = UInt((numBr+1).W) 530 val histPtr = new CGHPtr 531 val rasSp = UInt(log2Ceil(RasSize).W) 532 val rasTop = new RASEntry 533} 534 535@chiselName 536class BranchPredictionBundle(implicit p: Parameters) extends XSBundle 537 with HasBPUConst with BPUUtils { 538 val pc = UInt(VAddrBits.W) 539 val valid = Bool() 540 val hasRedirect = Bool() 541 val ftq_idx = new FtqPtr 542 val full_pred = new FullBranchPrediction 543 544 545 def target(pc: UInt) = full_pred.target(pc) 546 def cfiIndex = full_pred.cfiIndex 547 def lastBrPosOH = full_pred.lastBrPosOH 548 def brTaken = full_pred.brTaken 549 def shouldShiftVec = full_pred.shouldShiftVec 550 def fallThruError = full_pred.fallThruError 551 552 def getTarget = target(pc) 553 def taken = cfiIndex.valid 554 555 def display(cond: Bool): Unit = { 556 XSDebug(cond, p"[pc] ${Hexadecimal(pc)}\n") 557 full_pred.display(cond) 558 } 559} 560 561@chiselName 562class BranchPredictionResp(implicit p: Parameters) extends XSBundle with HasBPUConst { 563 // val valids = Vec(3, Bool()) 564 val s1 = new BranchPredictionBundle 565 val s2 = new BranchPredictionBundle 566 val s3 = new BranchPredictionBundle 567 568 val last_stage_meta = UInt(MaxMetaLength.W) 569 val last_stage_spec_info = new SpeculativeInfo 570 val last_stage_ftb_entry = new FTBEntry 571 572 def selectedResp ={ 573 val res = 574 PriorityMux(Seq( 575 ((s3.valid && s3.hasRedirect) -> s3), 576 ((s2.valid && s2.hasRedirect) -> s2), 577 (s1.valid -> s1) 578 )) 579 res 580 } 581 def selectedRespIdx = 582 PriorityMux(Seq( 583 ((s3.valid && s3.hasRedirect) -> BP_S3), 584 ((s2.valid && s2.hasRedirect) -> BP_S2), 585 (s1.valid -> BP_S1) 586 )) 587 def lastStage = s3 588} 589 590class BpuToFtqBundle(implicit p: Parameters) extends BranchPredictionResp {} 591 592class BranchPredictionUpdate(implicit p: Parameters) extends XSBundle with HasBPUConst { 593 val pc = UInt(VAddrBits.W) 594 val spec_info = new SpeculativeInfo 595 val ftb_entry = new FTBEntry() 596 597 val cfi_idx = ValidUndirectioned(UInt(log2Ceil(PredictWidth).W)) 598 val br_taken_mask = Vec(numBr, Bool()) 599 val jmp_taken = Bool() 600 val mispred_mask = Vec(numBr+1, Bool()) 601 val pred_hit = Bool() 602 val false_hit = Bool() 603 val new_br_insert_pos = Vec(numBr, Bool()) 604 val old_entry = Bool() 605 val meta = UInt(MaxMetaLength.W) 606 val full_target = UInt(VAddrBits.W) 607 val from_stage = UInt(2.W) 608 val ghist = UInt(HistoryLength.W) 609 610 def is_jal = ftb_entry.tailSlot.valid && ftb_entry.isJal 611 def is_jalr = ftb_entry.tailSlot.valid && ftb_entry.isJalr 612 def is_call = ftb_entry.tailSlot.valid && ftb_entry.isCall 613 def is_ret = ftb_entry.tailSlot.valid && ftb_entry.isRet 614 615 def display(cond: Bool) = { 616 XSDebug(cond, p"-----------BranchPredictionUpdate-----------\n") 617 XSDebug(cond, p"[mispred_mask] ${Binary(mispred_mask.asUInt)} [false_hit] $false_hit\n") 618 XSDebug(cond, p"[new_br_insert_pos] ${Binary(new_br_insert_pos.asUInt)}\n") 619 XSDebug(cond, p"--------------------------------------------\n") 620 } 621} 622 623class BranchPredictionRedirect(implicit p: Parameters) extends Redirect with HasBPUConst { 624 // override def toPrintable: Printable = { 625 // p"-----------BranchPredictionRedirect----------- " + 626 // p"-----------cfiUpdate----------- " + 627 // p"[pc] ${Hexadecimal(cfiUpdate.pc)} " + 628 // p"[predTaken] ${cfiUpdate.predTaken}, [taken] ${cfiUpdate.taken}, [isMisPred] ${cfiUpdate.isMisPred} " + 629 // p"[target] ${Hexadecimal(cfiUpdate.target)} " + 630 // p"------------------------------- " + 631 // p"[robPtr] f=${robIdx.flag} v=${robIdx.value} " + 632 // p"[ftqPtr] f=${ftqIdx.flag} v=${ftqIdx.value} " + 633 // p"[ftqOffset] ${ftqOffset} " + 634 // p"[level] ${level}, [interrupt] ${interrupt} " + 635 // p"[stFtqIdx] f=${stFtqIdx.flag} v=${stFtqIdx.value} " + 636 // p"[stFtqOffset] ${stFtqOffset} " + 637 // p"\n" 638 639 // } 640 641 def display(cond: Bool): Unit = { 642 XSDebug(cond, p"-----------BranchPredictionRedirect----------- \n") 643 XSDebug(cond, p"-----------cfiUpdate----------- \n") 644 XSDebug(cond, p"[pc] ${Hexadecimal(cfiUpdate.pc)}\n") 645 // XSDebug(cond, p"[hist] ${Binary(cfiUpdate.hist.predHist)}\n") 646 XSDebug(cond, p"[br_hit] ${cfiUpdate.br_hit} [isMisPred] ${cfiUpdate.isMisPred}\n") 647 XSDebug(cond, p"[pred_taken] ${cfiUpdate.predTaken} [taken] ${cfiUpdate.taken} [isMisPred] ${cfiUpdate.isMisPred}\n") 648 XSDebug(cond, p"[target] ${Hexadecimal(cfiUpdate.target)} \n") 649 XSDebug(cond, p"[shift] ${cfiUpdate.shift}\n") 650 XSDebug(cond, p"------------------------------- \n") 651 XSDebug(cond, p"[robPtr] f=${robIdx.flag} v=${robIdx.value}\n") 652 XSDebug(cond, p"[ftqPtr] f=${ftqIdx.flag} v=${ftqIdx.value} \n") 653 XSDebug(cond, p"[ftqOffset] ${ftqOffset} \n") 654 XSDebug(cond, p"[stFtqIdx] f=${stFtqIdx.flag} v=${stFtqIdx.value}\n") 655 XSDebug(cond, p"[stFtqOffset] ${stFtqOffset}\n") 656 XSDebug(cond, p"---------------------------------------------- \n") 657 } 658} 659