xref: /XiangShan/src/main/scala/xiangshan/XSCore.scala (revision 0d32f7132f120ac0b32ab552fe0da4934208dd01)
1/***************************************************************************************
2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences
3* Copyright (c) 2020-2021 Peng Cheng Laboratory
4*
5* XiangShan is licensed under Mulan PSL v2.
6* You can use this software according to the terms and conditions of the Mulan PSL v2.
7* You may obtain a copy of Mulan PSL v2 at:
8*          http://license.coscl.org.cn/MulanPSL2
9*
10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
13*
14* See the Mulan PSL v2 for more details.
15***************************************************************************************/
16
17package xiangshan
18
19import chipsalliance.rocketchip.config
20import chipsalliance.rocketchip.config.Parameters
21import chisel3._
22import chisel3.util._
23import freechips.rocketchip.diplomacy.{BundleBridgeSource, LazyModule, LazyModuleImp}
24import freechips.rocketchip.interrupts.{IntSinkNode, IntSinkPortSimple}
25import freechips.rocketchip.tile.HasFPUParameters
26import system.HasSoCParameter
27import utils._
28import utility._
29import xiangshan.backend._
30import xiangshan.backend.exu.{ExuConfig, Wb2Ctrl, WbArbiterWrapper}
31import xiangshan.frontend._
32import xiangshan.mem.L1PrefetchFuzzer
33
34import scala.collection.mutable.ListBuffer
35
36abstract class XSModule(implicit val p: Parameters) extends Module
37  with HasXSParameter
38  with HasFPUParameters
39
40//remove this trait after impl module logic
41trait NeedImpl {
42  this: RawModule =>
43  override protected def IO[T <: Data](iodef: T): T = {
44    println(s"[Warn]: (${this.name}) please reomve 'NeedImpl' after implement this module")
45    val io = chisel3.experimental.IO(iodef)
46    io <> DontCare
47    io
48  }
49}
50
51class WritebackSourceParams(
52  var exuConfigs: Seq[Seq[ExuConfig]] = Seq()
53 ) {
54  def length: Int = exuConfigs.length
55  def ++(that: WritebackSourceParams): WritebackSourceParams = {
56    new WritebackSourceParams(exuConfigs ++ that.exuConfigs)
57  }
58}
59
60trait HasWritebackSource {
61  val writebackSourceParams: Seq[WritebackSourceParams]
62  final def writebackSource(sourceMod: HasWritebackSourceImp): Seq[Seq[Valid[ExuOutput]]] = {
63    require(sourceMod.writebackSource.isDefined, "should not use Valid[ExuOutput]")
64    val source = sourceMod.writebackSource.get
65    require(source.length == writebackSourceParams.length, "length mismatch between sources")
66    for ((s, p) <- source.zip(writebackSourceParams)) {
67      require(s.length == p.length, "params do not match with the exuOutput")
68    }
69    source
70  }
71  final def writebackSource1(sourceMod: HasWritebackSourceImp): Seq[Seq[DecoupledIO[ExuOutput]]] = {
72    require(sourceMod.writebackSource1.isDefined, "should not use DecoupledIO[ExuOutput]")
73    val source = sourceMod.writebackSource1.get
74    require(source.length == writebackSourceParams.length, "length mismatch between sources")
75    for ((s, p) <- source.zip(writebackSourceParams)) {
76      require(s.length == p.length, "params do not match with the exuOutput")
77    }
78    source
79  }
80  val writebackSourceImp: HasWritebackSourceImp
81}
82
83trait HasWritebackSourceImp {
84  def writebackSource: Option[Seq[Seq[Valid[ExuOutput]]]] = None
85  def writebackSource1: Option[Seq[Seq[DecoupledIO[ExuOutput]]]] = None
86}
87
88trait HasWritebackSink {
89  // Caches all sources. The selected source will be the one with smallest length.
90  var writebackSinks = ListBuffer.empty[(Seq[HasWritebackSource], Seq[Int])]
91  def addWritebackSink(source: Seq[HasWritebackSource], index: Option[Seq[Int]] = None): HasWritebackSink = {
92    val realIndex = if (index.isDefined) index.get else Seq.fill(source.length)(0)
93    writebackSinks += ((source, realIndex))
94    this
95  }
96
97  def writebackSinksParams: Seq[WritebackSourceParams] = {
98    writebackSinks.map{ case (s, i) => s.zip(i).map(x => x._1.writebackSourceParams(x._2)).reduce(_ ++ _) }
99  }
100  final def writebackSinksMod(
101     thisMod: Option[HasWritebackSource] = None,
102     thisModImp: Option[HasWritebackSourceImp] = None
103   ): Seq[Seq[HasWritebackSourceImp]] = {
104    require(thisMod.isDefined == thisModImp.isDefined)
105    writebackSinks.map(_._1.map(source =>
106      if (thisMod.isDefined && source == thisMod.get) thisModImp.get else source.writebackSourceImp)
107    )
108  }
109  final def writebackSinksImp(
110    thisMod: Option[HasWritebackSource] = None,
111    thisModImp: Option[HasWritebackSourceImp] = None
112  ): Seq[Seq[ValidIO[ExuOutput]]] = {
113    val sourceMod = writebackSinksMod(thisMod, thisModImp)
114    writebackSinks.zip(sourceMod).map{ case ((s, i), m) =>
115      s.zip(i).zip(m).flatMap(x => x._1._1.writebackSource(x._2)(x._1._2))
116    }
117  }
118  def selWritebackSinks(func: WritebackSourceParams => Int): Int = {
119    writebackSinksParams.zipWithIndex.minBy(params => func(params._1))._2
120  }
121  def generateWritebackIO(
122    thisMod: Option[HasWritebackSource] = None,
123    thisModImp: Option[HasWritebackSourceImp] = None
124   ): Unit
125}
126
127abstract class XSBundle(implicit val p: Parameters) extends Bundle
128  with HasXSParameter
129
130abstract class XSCoreBase()(implicit p: config.Parameters) extends LazyModule
131  with HasXSParameter with HasExuWbHelper
132{
133  // interrupt sinks
134  val clint_int_sink = IntSinkNode(IntSinkPortSimple(1, 2))
135  val debug_int_sink = IntSinkNode(IntSinkPortSimple(1, 1))
136  val plic_int_sink = IntSinkNode(IntSinkPortSimple(2, 1))
137  // outer facing nodes
138  val frontend = LazyModule(new Frontend())
139  val csrOut = BundleBridgeSource(Some(() => new DistributedCSRIO()))
140
141  val wbArbiter = LazyModule(new WbArbiterWrapper(exuConfigs, NRIntWritePorts, NRFpWritePorts))
142  val intWbPorts = wbArbiter.intWbPorts
143  val fpWbPorts = wbArbiter.fpWbPorts
144
145  // TODO: better RS organization
146  // generate rs according to number of function units
147  require(exuParameters.JmpCnt == 1)
148  require(exuParameters.MduCnt <= exuParameters.AluCnt && exuParameters.MduCnt > 0)
149  require(exuParameters.FmiscCnt <= exuParameters.FmacCnt && exuParameters.FmiscCnt > 0)
150  require(exuParameters.LduCnt == exuParameters.StuCnt) // TODO: remove this limitation
151
152  // one RS every 2 MDUs
153  val schedulePorts = Seq(
154    // exuCfg, numDeq, intFastWakeupTarget, fpFastWakeupTarget
155    Seq(
156      (AluExeUnitCfg, exuParameters.AluCnt, Seq(AluExeUnitCfg, LdExeUnitCfg, StaExeUnitCfg), Seq()),
157      (MulDivExeUnitCfg, exuParameters.MduCnt, Seq(AluExeUnitCfg, MulDivExeUnitCfg), Seq()),
158      (JumpCSRExeUnitCfg, 1, Seq(), Seq()),
159      (LdExeUnitCfg, exuParameters.LduCnt, Seq(AluExeUnitCfg, LdExeUnitCfg), Seq()),
160      (StaExeUnitCfg, exuParameters.StuCnt, Seq(), Seq()),
161      (StdExeUnitCfg, exuParameters.StuCnt, Seq(), Seq())
162    ),
163    Seq(
164      (FmacExeUnitCfg, exuParameters.FmacCnt, Seq(), Seq(FmacExeUnitCfg, FmiscExeUnitCfg)),
165      (FmiscExeUnitCfg, exuParameters.FmiscCnt, Seq(), Seq())
166    )
167  )
168
169  // should do outer fast wakeup ports here
170  val otherFastPorts = schedulePorts.zipWithIndex.map { case (sche, i) =>
171    val otherCfg = schedulePorts.zipWithIndex.filter(_._2 != i).map(_._1).reduce(_ ++ _)
172    val outerPorts = sche.map(cfg => {
173      // exe units from this scheduler need fastUops from exeunits
174      val outerWakeupInSche = sche.filter(_._1.wakeupFromExu)
175      val intraIntScheOuter = outerWakeupInSche.filter(_._3.contains(cfg._1)).map(_._1)
176      val intraFpScheOuter = outerWakeupInSche.filter(_._4.contains(cfg._1)).map(_._1)
177      // exe units from other schedulers need fastUop from outside
178      val otherIntSource = otherCfg.filter(_._3.contains(cfg._1)).map(_._1)
179      val otherFpSource = otherCfg.filter(_._4.contains(cfg._1)).map(_._1)
180      val intSource = findInWbPorts(intWbPorts, intraIntScheOuter ++ otherIntSource)
181      val fpSource = findInWbPorts(fpWbPorts, intraFpScheOuter ++ otherFpSource)
182      getFastWakeupIndex(cfg._1, intSource, fpSource, intWbPorts.length).sorted
183    })
184    println(s"inter-scheduler wakeup sources for $i: $outerPorts")
185    outerPorts
186  }
187
188  // allow mdu and fmisc to have 2*numDeq enqueue ports
189  val intDpPorts = (0 until exuParameters.AluCnt).map(i => {
190    if (i < exuParameters.JmpCnt) Seq((0, i), (1, i), (2, i))
191    else if (i < 2 * exuParameters.MduCnt) Seq((0, i), (1, i))
192    else Seq((0, i))
193  })
194  val lsDpPorts = (0 until exuParameters.LduCnt).map(i => Seq((3, i))) ++
195                  (0 until exuParameters.StuCnt).map(i => Seq((4, i))) ++
196                  (0 until exuParameters.StuCnt).map(i => Seq((5, i)))
197  val fpDpPorts = (0 until exuParameters.FmacCnt).map(i => {
198    if (i < 2 * exuParameters.FmiscCnt) Seq((0, i), (1, i))
199    else Seq((0, i))
200  })
201
202  val dispatchPorts = Seq(intDpPorts ++ lsDpPorts, fpDpPorts)
203
204  val outIntRfReadPorts = Seq(0, 0)
205  val outFpRfReadPorts = Seq(0, StorePipelineWidth)
206  val hasIntRf = Seq(true, false)
207  val hasFpRf = Seq(false, true)
208  val exuBlocks = schedulePorts.zip(dispatchPorts).zip(otherFastPorts).zipWithIndex.map {
209    case (((sche, disp), other), i) =>
210      LazyModule(new ExuBlock(sche, disp, intWbPorts, fpWbPorts, other, outIntRfReadPorts(i), outFpRfReadPorts(i), hasIntRf(i), hasFpRf(i)))
211  }
212
213  val memBlock = LazyModule(new MemBlock()(p.alter((site, here, up) => {
214    case XSCoreParamsKey => up(XSCoreParamsKey).copy(
215      IssQueSize = exuBlocks.head.scheduler.getMemRsEntries
216    )
217  })))
218
219  val wb2Ctrl = LazyModule(new Wb2Ctrl(exuConfigs))
220  wb2Ctrl.addWritebackSink(exuBlocks :+ memBlock)
221  val dpExuConfigs = exuBlocks.flatMap(_.scheduler.dispatch2.map(_.configs))
222  val ctrlBlock = LazyModule(new CtrlBlock(dpExuConfigs))
223  val writebackSources = Seq(Seq(wb2Ctrl), Seq(wbArbiter))
224  writebackSources.foreach(s => ctrlBlock.addWritebackSink(s))
225}
226
227class XSCore()(implicit p: config.Parameters) extends XSCoreBase
228  with HasXSDts
229{
230  lazy val module = new XSCoreImp(this)
231}
232
233class XSCoreImp(outer: XSCoreBase) extends LazyModuleImp(outer)
234  with HasXSParameter
235  with HasSoCParameter {
236  val io = IO(new Bundle {
237    val hartId = Input(UInt(64.W))
238    val reset_vector = Input(UInt(PAddrBits.W))
239    val cpu_halt = Output(Bool())
240    val l2_pf_enable = Output(Bool())
241    val perfEvents = Input(Vec(numPCntHc * coreParams.L2NBanks, new PerfEvent))
242    val beu_errors = Output(new XSL1BusErrors())
243    val l2_hint = Input(Valid(new L2ToL1Hint()))
244    val l2PfqBusy = Input(Bool())
245  })
246
247  println(s"FPGAPlatform:${env.FPGAPlatform} EnableDebug:${env.EnableDebug}")
248
249  val frontend = outer.frontend.module
250  val ctrlBlock = outer.ctrlBlock.module
251  val wb2Ctrl = outer.wb2Ctrl.module
252  val memBlock = outer.memBlock.module
253  val exuBlocks = outer.exuBlocks.map(_.module)
254
255  frontend.io.hartId  := io.hartId
256  ctrlBlock.io.hartId := io.hartId
257  exuBlocks.foreach(_.io.hartId := io.hartId)
258  memBlock.io.hartId := io.hartId
259  outer.wbArbiter.module.io.hartId := io.hartId
260  frontend.io.reset_vector := io.reset_vector
261
262  io.cpu_halt := ctrlBlock.io.cpu_halt
263
264  outer.wbArbiter.module.io.redirect <> ctrlBlock.io.redirect
265  val allWriteback = exuBlocks.flatMap(_.io.fuWriteback) ++ memBlock.io.mem_to_ooo.writeback
266  require(exuConfigs.length == allWriteback.length, s"${exuConfigs.length} != ${allWriteback.length}")
267  outer.wbArbiter.module.io.in <> allWriteback
268  val rfWriteback = outer.wbArbiter.module.io.out
269
270  // memblock error exception writeback, 1 cycle after normal writeback
271  wb2Ctrl.io.s3_delayed_load_error <> memBlock.io.s3_delayed_load_error
272
273  wb2Ctrl.io.redirect <> ctrlBlock.io.redirect
274  outer.wb2Ctrl.generateWritebackIO()
275
276  io.beu_errors.icache <> frontend.io.error.toL1BusErrorUnitInfo()
277  io.beu_errors.dcache <> memBlock.io.error.toL1BusErrorUnitInfo()
278
279  require(exuBlocks.count(_.fuConfigs.map(_._1).contains(JumpCSRExeUnitCfg)) == 1)
280  val csrFenceMod = exuBlocks.filter(_.fuConfigs.map(_._1).contains(JumpCSRExeUnitCfg)).head
281  val csrioIn = csrFenceMod.io.fuExtra.csrio.get
282  val fenceio = csrFenceMod.io.fuExtra.fenceio.get
283
284  frontend.io.backend <> ctrlBlock.io.frontend
285  frontend.io.sfence <> fenceio.sfence
286  frontend.io.tlbCsr <> csrioIn.tlb
287  frontend.io.csrCtrl <> csrioIn.customCtrl
288  frontend.io.fencei := fenceio.fencei
289
290  ctrlBlock.io.csrCtrl <> csrioIn.customCtrl
291  val redirectBlocks = exuBlocks.reverse.filter(_.fuConfigs.map(_._1).map(_.hasRedirect).reduce(_ || _))
292  ctrlBlock.io.exuRedirect <> redirectBlocks.flatMap(_.io.fuExtra.exuRedirect)
293  ctrlBlock.io.stIn <> memBlock.io.mem_to_ooo.stIn
294  ctrlBlock.io.memoryViolation <> memBlock.io.mem_to_ooo.memoryViolation
295  exuBlocks.head.io.scheExtra.enqLsq.get <> memBlock.io.ooo_to_mem.enqLsq
296  exuBlocks.foreach(b => {
297    b.io.scheExtra.lcommit := memBlock.io.mem_to_ooo.lqDeq
298    b.io.scheExtra.scommit := memBlock.io.mem_to_ooo.sqDeq
299    b.io.scheExtra.lqCancelCnt := memBlock.io.mem_to_ooo.lqCancelCnt
300    b.io.scheExtra.sqCancelCnt := memBlock.io.mem_to_ooo.sqCancelCnt
301  })
302  val sourceModules = outer.writebackSources.map(_.map(_.module.asInstanceOf[HasWritebackSourceImp]))
303  outer.ctrlBlock.generateWritebackIO()
304
305  val allFastUop = exuBlocks.flatMap(b => b.io.fastUopOut.dropRight(b.numOutFu)) ++ memBlock.io.mem_to_ooo.otherFastWakeup
306  require(allFastUop.length == exuConfigs.length, s"${allFastUop.length} != ${exuConfigs.length}")
307  val intFastUop = allFastUop.zip(exuConfigs).filter(_._2.writeIntRf).map(_._1)
308  val fpFastUop = allFastUop.zip(exuConfigs).filter(_._2.writeFpRf).map(_._1)
309  val intFastUop1 = outer.wbArbiter.intConnections.map(c => intFastUop(c.head))
310  val fpFastUop1 = outer.wbArbiter.fpConnections.map(c => fpFastUop(c.head))
311  val allFastUop1 = intFastUop1 ++ fpFastUop1
312
313  ctrlBlock.io.dispatch <> exuBlocks.flatMap(_.io.in)
314  ctrlBlock.io.rsReady := exuBlocks.flatMap(_.io.scheExtra.rsReady)
315  ctrlBlock.io.enqLsq <> memBlock.io.ooo_to_mem.enqLsq
316  ctrlBlock.io.lqDeq := memBlock.io.mem_to_ooo.lqDeq
317  ctrlBlock.io.sqDeq := memBlock.io.mem_to_ooo.sqDeq
318  ctrlBlock.io.lqCanAccept := memBlock.io.mem_to_ooo.lsqio.lqCanAccept
319  ctrlBlock.io.sqCanAccept := memBlock.io.mem_to_ooo.lsqio.sqCanAccept
320  ctrlBlock.io.lqCancelCnt := memBlock.io.mem_to_ooo.lqCancelCnt
321  ctrlBlock.io.sqCancelCnt := memBlock.io.mem_to_ooo.sqCancelCnt
322  ctrlBlock.io.robHeadLsIssue := exuBlocks.map(_.io.scheExtra.robHeadLsIssue).reduce(_ || _)
323
324  exuBlocks(0).io.scheExtra.fpRfReadIn.get <> exuBlocks(1).io.scheExtra.fpRfReadOut.get
325  exuBlocks(0).io.scheExtra.fpStateReadIn.get <> exuBlocks(1).io.scheExtra.fpStateReadOut.get
326
327  for((c, e) <- ctrlBlock.io.ld_pc_read.zip(exuBlocks(0).io.issue.get)){
328    // read load pc at load s0
329    c.ptr := e.bits.uop.cf.ftqPtr
330    c.offset := e.bits.uop.cf.ftqOffset
331  }
332  // return load pc at load s2
333  memBlock.io.ooo_to_mem.loadPc <> VecInit(ctrlBlock.io.ld_pc_read.map(_.data))
334
335  for((c, e) <- ctrlBlock.io.st_pc_read.zip(exuBlocks(0).io.issue.get.drop(exuParameters.LduCnt))){
336    // read store pc at store s0
337    c.ptr := e.bits.uop.cf.ftqPtr
338    c.offset := e.bits.uop.cf.ftqOffset
339  }
340  // return store pc at store s2
341  memBlock.io.ooo_to_mem.storePc <> VecInit(ctrlBlock.io.st_pc_read.map(_.data))
342
343  memBlock.io.ooo_to_mem.issue <> exuBlocks(0).io.issue.get
344  // By default, instructions do not have exceptions when they enter the function units.
345  memBlock.io.ooo_to_mem.issue.map(_.bits.uop.clearExceptions())
346  exuBlocks(0).io.scheExtra.loadFastMatch.get <> memBlock.io.ooo_to_mem.loadFastMatch
347  exuBlocks(0).io.scheExtra.loadFastFuOpType.get <> memBlock.io.ooo_to_mem.loadFastFuOpType
348  exuBlocks(0).io.scheExtra.loadFastImm.get <> memBlock.io.ooo_to_mem.loadFastImm
349
350  val stdIssue = exuBlocks(0).io.issue.get.takeRight(exuParameters.StuCnt)
351  exuBlocks.map(_.io).foreach { exu =>
352    exu.redirect <> ctrlBlock.io.redirect
353    exu.allocPregs <> ctrlBlock.io.allocPregs
354    exu.rfWriteback <> rfWriteback
355    exu.fastUopIn <> allFastUop1
356    exu.scheExtra.jumpPc <> ctrlBlock.io.jumpPc
357    exu.scheExtra.jalr_target <> ctrlBlock.io.jalr_target
358    exu.scheExtra.stIssuePtr <> memBlock.io.mem_to_ooo.stIssuePtr
359    exu.scheExtra.debug_fp_rat <> ctrlBlock.io.debug_fp_rat
360    exu.scheExtra.debug_int_rat <> ctrlBlock.io.debug_int_rat
361    exu.scheExtra.robDeqPtr := ctrlBlock.io.robDeqPtr
362    exu.scheExtra.memWaitUpdateReq.staIssue.zip(memBlock.io.mem_to_ooo.stIn).foreach{case (sink, src) => {
363      sink.bits := src.bits
364      sink.valid := src.valid
365    }}
366    exu.scheExtra.memWaitUpdateReq.stdIssue.zip(stdIssue).foreach{case (sink, src) => {
367      sink.valid := src.valid
368      sink.bits := src.bits
369    }}
370  }
371  XSPerfHistogram("fastIn_count", PopCount(allFastUop1.map(_.valid)), true.B, 0, allFastUop1.length, 1)
372  XSPerfHistogram("wakeup_count", PopCount(rfWriteback.map(_.valid)), true.B, 0, rfWriteback.length, 1)
373
374  ctrlBlock.perfinfo.perfEventsEu0 := exuBlocks(0).getPerf.dropRight(outer.exuBlocks(0).scheduler.numRs)
375  ctrlBlock.perfinfo.perfEventsEu1 := exuBlocks(1).getPerf.dropRight(outer.exuBlocks(1).scheduler.numRs)
376  ctrlBlock.perfinfo.perfEventsRs  := outer.exuBlocks.flatMap(b => b.module.getPerf.takeRight(b.scheduler.numRs))
377
378  csrioIn.hartId <> io.hartId
379  csrioIn.perf <> DontCare
380  csrioIn.perf.retiredInstr <> ctrlBlock.io.robio.toCSR.perfinfo.retiredInstr
381  csrioIn.perf.ctrlInfo <> ctrlBlock.io.perfInfo.ctrlInfo
382  csrioIn.perf.memInfo <> memBlock.io.memInfo
383  csrioIn.perf.frontendInfo <> frontend.io.frontendInfo
384
385  csrioIn.perf.perfEventsFrontend <> frontend.getPerf
386  csrioIn.perf.perfEventsCtrl     <> ctrlBlock.getPerf
387  csrioIn.perf.perfEventsLsu      <> memBlock.getPerf
388  csrioIn.perf.perfEventsHc       <> io.perfEvents
389
390  csrioIn.fpu.fflags <> ctrlBlock.io.robio.toCSR.fflags
391  csrioIn.fpu.isIllegal := false.B
392  csrioIn.fpu.dirty_fs <> ctrlBlock.io.robio.toCSR.dirty_fs
393  csrioIn.fpu.frm <> exuBlocks(1).io.fuExtra.frm.get
394  csrioIn.exception <> ctrlBlock.io.robio.exception
395  csrioIn.isXRet <> ctrlBlock.io.robio.toCSR.isXRet
396  csrioIn.trapTarget <> ctrlBlock.io.robio.toCSR.trapTarget
397  csrioIn.interrupt <> ctrlBlock.io.robio.toCSR.intrBitSet
398  csrioIn.wfi_event <> ctrlBlock.io.robio.toCSR.wfiEvent
399  csrioIn.memExceptionVAddr <> memBlock.io.mem_to_ooo.lsqio.vaddr
400
401  csrioIn.externalInterrupt.msip := outer.clint_int_sink.in.head._1(0)
402  csrioIn.externalInterrupt.mtip := outer.clint_int_sink.in.head._1(1)
403  csrioIn.externalInterrupt.meip := outer.plic_int_sink.in.head._1(0)
404  csrioIn.externalInterrupt.seip := outer.plic_int_sink.in.last._1(0)
405  csrioIn.externalInterrupt.debug := outer.debug_int_sink.in.head._1(0)
406
407  csrioIn.distributedUpdate(0).w.valid := memBlock.io.mem_to_ooo.csrUpdate.w.valid
408  csrioIn.distributedUpdate(0).w.bits := memBlock.io.mem_to_ooo.csrUpdate.w.bits
409  csrioIn.distributedUpdate(1).w.valid := frontend.io.csrUpdate.w.valid
410  csrioIn.distributedUpdate(1).w.bits := frontend.io.csrUpdate.w.bits
411
412  fenceio.sfence <> memBlock.io.ooo_to_mem.sfence
413  memBlock.io.fetch_to_mem.itlb <> frontend.io.ptw
414  memBlock.io.ooo_to_mem.flushSb := fenceio.sbuffer.flushSb
415  fenceio.sbuffer.sbIsEmpty := memBlock.io.mem_to_ooo.sbIsEmpty
416
417
418  memBlock.io.redirect <> ctrlBlock.io.redirect
419  memBlock.io.rsfeedback <> exuBlocks(0).io.scheExtra.feedback.get
420
421  memBlock.io.ooo_to_mem.csrCtrl <> csrioIn.customCtrl
422  memBlock.io.ooo_to_mem.tlbCsr <> csrioIn.tlb
423
424  memBlock.io.ooo_to_mem.lsqio.lcommit   := ctrlBlock.io.robio.lsq.lcommit
425  memBlock.io.ooo_to_mem.lsqio.scommit   := ctrlBlock.io.robio.lsq.scommit
426  memBlock.io.ooo_to_mem.lsqio.pendingld := ctrlBlock.io.robio.lsq.pendingld
427  memBlock.io.ooo_to_mem.lsqio.pendingst := ctrlBlock.io.robio.lsq.pendingst
428  memBlock.io.ooo_to_mem.lsqio.commit    := ctrlBlock.io.robio.lsq.commit
429  memBlock.io.ooo_to_mem.lsqio.pendingPtr:= ctrlBlock.io.robio.lsq.pendingPtr
430  ctrlBlock.io.robio.lsq.mmio            := memBlock.io.mem_to_ooo.lsqio.mmio
431  ctrlBlock.io.robio.lsq.uop             := memBlock.io.mem_to_ooo.lsqio.uop
432//  memBlock.io.lsqio.rob <> ctrlBlock.io.robio.lsq
433  memBlock.io.ooo_to_mem.isStore := CommitType.lsInstIsStore(ctrlBlock.io.robio.exception.bits.uop.ctrl.commitType)
434  memBlock.io.debug_ls <> ctrlBlock.io.robio.debug_ls
435  memBlock.io.mem_to_ooo.lsTopdownInfo <> ctrlBlock.io.robio.lsTopdownInfo
436  memBlock.io.l2_hint.valid := io.l2_hint.valid
437  memBlock.io.l2_hint.bits.sourceId := io.l2_hint.bits.sourceId
438  memBlock.io.l2PfqBusy := io.l2PfqBusy
439
440  // if l2 prefetcher use stream prefetch, it should be placed in XSCore
441  io.l2_pf_enable := csrioIn.customCtrl.l2_pf_enable
442
443  // Modules are reset one by one
444  val resetTree = ResetGenNode(
445    Seq(
446      ModuleNode(memBlock),
447      ResetGenNode(Seq(
448        ModuleNode(exuBlocks.head),
449        ResetGenNode(
450          exuBlocks.tail.map(m => ModuleNode(m)) :+ ModuleNode(outer.wbArbiter.module)
451        ),
452        ResetGenNode(Seq(
453          ModuleNode(ctrlBlock),
454          ResetGenNode(Seq(
455            ModuleNode(frontend)
456          ))
457        ))
458      ))
459    )
460  )
461
462  ResetGen(resetTree, reset, !debugOpts.FPGAPlatform)
463
464}
465