1/*************************************************************************************** 2* Copyright (c) 2024 Beijing Institute of Open Source Chip (BOSC) 3* Copyright (c) 2024 Institute of Computing Technology, Chinese Academy of Sciences 4* 5* XiangShan is licensed under Mulan PSL v2. 6* You can use this software according to the terms and conditions of the Mulan PSL v2. 7* You may obtain a copy of Mulan PSL v2 at: 8* http://license.coscl.org.cn/MulanPSL2 9* 10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, 11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, 12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE. 13* 14* See the Mulan PSL v2 for more details. 15***************************************************************************************/ 16 17package top 18 19import chisel3._ 20import chisel3.util._ 21import chisel3.experimental.dataview._ 22import xiangshan._ 23import utils._ 24import utility._ 25import utility.sram.SramBroadcastBundle 26import system._ 27import device._ 28import org.chipsalliance.cde.config._ 29import freechips.rocketchip.amba.axi4._ 30import freechips.rocketchip.devices.debug.DebugModuleKey 31import freechips.rocketchip.diplomacy._ 32import freechips.rocketchip.interrupts._ 33import freechips.rocketchip.tilelink._ 34import coupledL2.tl2chi.{CHIAsyncBridgeSink, PortIO} 35import freechips.rocketchip.tile.MaxHartIdBits 36import freechips.rocketchip.util.{AsyncQueueParams, AsyncQueueSource} 37import chisel3.experimental.{ChiselAnnotation, annotate} 38import sifive.enterprise.firrtl.NestedPrefixModulesAnnotation 39 40import difftest.common.DifftestWiring 41import difftest.util.Profile 42 43class XSNoCTop()(implicit p: Parameters) extends BaseXSSoc with HasSoCParameter 44{ 45 override lazy val desiredName: String = "XSTop" 46 47 ResourceBinding { 48 val width = ResourceInt(2) 49 val model = "freechips,rocketchip-unknown" 50 Resource(ResourceAnchors.root, "model").bind(ResourceString(model)) 51 Resource(ResourceAnchors.root, "compat").bind(ResourceString(model + "-dev")) 52 Resource(ResourceAnchors.soc, "compat").bind(ResourceString(model + "-soc")) 53 Resource(ResourceAnchors.root, "width").bind(width) 54 Resource(ResourceAnchors.soc, "width").bind(width) 55 Resource(ResourceAnchors.cpus, "width").bind(ResourceInt(1)) 56 def bindManagers(xbar: TLNexusNode) = { 57 ManagerUnification(xbar.edges.in.head.manager.managers).foreach{ manager => 58 manager.resources.foreach(r => r.bind(manager.toResource)) 59 } 60 } 61 } 62 63 require(enableCHI) 64 65 // xstile 66 val core_with_l2 = LazyModule(new XSTileWrap()(p.alter((site, here, up) => { 67 case XSCoreParamsKey => tiles.head 68 case PerfCounterOptionsKey => up(PerfCounterOptionsKey).copy(perfDBHartID = tiles.head.HartId) 69 }))) 70 71 // imsic bus top 72 val u_imsic_bus_top = LazyModule(new imsic_bus_top) 73 74 // interrupts 75 val clintIntNode = IntSourceNode(IntSourcePortSimple(1, 1, 2)) 76 val debugIntNode = IntSourceNode(IntSourcePortSimple(1, 1, 1)) 77 val plicIntNode = IntSourceNode(IntSourcePortSimple(1, 2, 1)) 78 val nmiIntNode = IntSourceNode(IntSourcePortSimple(1, 1, (new NonmaskableInterruptIO).elements.size)) 79 val beuIntNode = IntSinkNode(IntSinkPortSimple(1, 1)) 80 core_with_l2.clintIntNode := clintIntNode 81 core_with_l2.debugIntNode := debugIntNode 82 core_with_l2.plicIntNode :*= plicIntNode 83 core_with_l2.nmiIntNode := nmiIntNode 84 beuIntNode := core_with_l2.beuIntNode 85 val clint = InModuleBody(clintIntNode.makeIOs()) 86 val debug = InModuleBody(debugIntNode.makeIOs()) 87 val plic = InModuleBody(plicIntNode.makeIOs()) 88 val nmi = InModuleBody(nmiIntNode.makeIOs()) 89 val beu = InModuleBody(beuIntNode.makeIOs()) 90 91 // asynchronous bridge sink node 92 val tlAsyncSinkOpt = Option.when(SeperateTLBus && EnableSeperateTLAsync)( 93 LazyModule(new TLAsyncCrossingSink(SeperateTLAsyncBridge.get)) 94 ) 95 tlAsyncSinkOpt.foreach(_.node := core_with_l2.tlAsyncSourceOpt.get.node) 96 // synchronous sink node 97 val tlSyncSinkOpt = Option.when(SeperateTLBus && !EnableSeperateTLAsync)(TLTempNode()) 98 tlSyncSinkOpt.foreach(_ := core_with_l2.tlSyncSourceOpt.get) 99 100 // The Manager Node is only used to make IO 101 val tl = Option.when(SeperateTLBus)(TLManagerNode(Seq( 102 TLSlavePortParameters.v1( 103 managers = SeperateTLBusRanges map { address => 104 TLSlaveParameters.v1( 105 address = Seq(address), 106 regionType = RegionType.UNCACHED, 107 executable = true, 108 supportsGet = TransferSizes(1, p(SoCParamsKey).L3BlockSize), 109 supportsPutPartial = TransferSizes(1, p(SoCParamsKey).L3BlockSize), 110 supportsPutFull = TransferSizes(1, p(SoCParamsKey).L3BlockSize), 111 fifoId = Some(0) 112 ) 113 114 }, 115 beatBytes = 8 116 ) 117 ))) 118 val tlXbar = Option.when(SeperateTLBus)(TLXbar()) 119 tlAsyncSinkOpt.foreach(sink => tlXbar.get := sink.node) 120 tlSyncSinkOpt.foreach(sink => tlXbar.get := sink) 121 tl.foreach(_ := tlXbar.get) 122 // seperate TL io 123 val io_tl = tl.map(x => InModuleBody(x.makeIOs())) 124 125 // reset nodes 126 val core_rst_node = BundleBridgeSource(() => Reset()) 127 core_with_l2.tile.core_reset_sink := core_rst_node 128 129 class XSNoCTopImp(wrapper: XSNoCTop) extends LazyRawModuleImp(wrapper) { 130 soc.XSTopPrefix.foreach { prefix => 131 val mod = this.toNamed 132 annotate(new ChiselAnnotation { 133 def toFirrtl = NestedPrefixModulesAnnotation(mod, prefix, true) 134 }) 135 } 136 FileRegisters.add("dts", dts) 137 FileRegisters.add("graphml", graphML) 138 FileRegisters.add("json", json) 139 FileRegisters.add("plusArgs", freechips.rocketchip.util.PlusArgArtefacts.serialize_cHeader()) 140 141 val clock = IO(Input(Clock())) 142 val reset = IO(Input(AsyncReset())) 143 val noc_clock = EnableCHIAsyncBridge.map(_ => IO(Input(Clock()))) 144 val noc_reset = EnableCHIAsyncBridge.map(_ => IO(Input(AsyncReset()))) 145 val soc_clock = IO(Input(Clock())) 146 val soc_reset = IO(Input(AsyncReset())) 147 private val hasMbist = tiles.head.hasMbist 148 private val hasSramCtl = tiles.head.hasSramCtl 149 private val hasDFT = hasMbist || hasSramCtl 150 val io = IO(new Bundle { 151 val hartId = Input(UInt(p(MaxHartIdBits).W)) 152 val riscv_halt = Output(Bool()) 153 val riscv_critical_error = Output(Bool()) 154 val hartResetReq = Input(Bool()) 155 val hartIsInReset = Output(Bool()) 156 val riscv_rst_vec = Input(UInt(soc.PAddrBits.W)) 157 val chi = new PortIO 158 val nodeID = Input(UInt(soc.NodeIDWidthList(issue).W)) 159 val clintTime = Input(ValidIO(UInt(64.W))) 160 val traceCoreInterface = new Bundle { 161 val fromEncoder = Input(new Bundle { 162 val enable = Bool() 163 val stall = Bool() 164 }) 165 val toEncoder = Output(new Bundle { 166 val cause = UInt(TraceCauseWidth.W) 167 val tval = UInt(TraceTvalWidth.W) 168 val priv = UInt(TracePrivWidth.W) 169 val iaddr = UInt((TraceTraceGroupNum * TraceIaddrWidth).W) 170 val itype = UInt((TraceTraceGroupNum * TraceItypeWidth).W) 171 val iretire = UInt((TraceTraceGroupNum * TraceIretireWidthCompressed).W) 172 val ilastsize = UInt((TraceTraceGroupNum * TraceIlastsizeWidth).W) 173 }) 174 } 175 val dft = Option.when(hasDFT)(Input(new SramBroadcastBundle)) 176 val dft_reset = Option.when(hasMbist)(Input(new DFTResetSignals())) 177 val lp = Option.when(EnablePowerDown) (new LowPowerIO) 178 }) 179 // imsic axi4 io 180 val imsic_axi4 = wrapper.u_imsic_bus_top.axi4.map(x => IO(Flipped(new VerilogAXI4Record(x.elts.head.params.copy(addrBits = 32))))) 181 // imsic tl io 182 val imsic_m_tl = wrapper.u_imsic_bus_top.tl_m.map(x => IO(chiselTypeOf(x.getWrappedValue))) 183 val imsic_s_tl = wrapper.u_imsic_bus_top.tl_s.map(x => IO(chiselTypeOf(x.getWrappedValue))) 184 // imsic bare io 185 val imsic = wrapper.u_imsic_bus_top.module.msi.map(x => IO(chiselTypeOf(x))) 186 187 val noc_reset_sync = EnableCHIAsyncBridge.map(_ => withClockAndReset(noc_clock, noc_reset) { ResetGen(2, io.dft_reset) }) 188 val soc_reset_sync = withClockAndReset(soc_clock, soc_reset) { ResetGen(2, io.dft_reset) } 189 wrapper.core_with_l2.module.io.dft.zip(io.dft).foreach { case (a, b) => a := b } 190 wrapper.core_with_l2.module.io.dft_reset.zip(io.dft_reset).foreach { case (a, b) => a := b } 191 // device clock and reset 192 wrapper.u_imsic_bus_top.module.clock := soc_clock 193 wrapper.u_imsic_bus_top.module.reset := soc_reset_sync 194 195 // imsic axi4 io connection 196 imsic_axi4.foreach(_.viewAs[AXI4Bundle] <> wrapper.u_imsic_bus_top.axi4.get.elements.head._2) 197 // imsic tl io connection 198 wrapper.u_imsic_bus_top.tl_m.foreach(_ <> imsic_m_tl.get) 199 wrapper.u_imsic_bus_top.tl_s.foreach(_ <> imsic_s_tl.get) 200 // imsic bare io connection 201 wrapper.u_imsic_bus_top.module.msi.foreach(_ <> imsic.get) 202 203 // input 204 dontTouch(io) 205 206 /* 207 SoC control the sequence of power on/off with isolation/reset/clock 208 */ 209 val soc_rst_n = io.lp.map(_.i_cpu_sw_rst_n).getOrElse(true.B) 210 val soc_iso_en = io.lp.map(_.i_cpu_iso_en).getOrElse(false.B) 211 212 /* Core+L2 reset when: 213 1. normal reset from SoC 214 2. SoC initialize reset during Power on/off flow 215 */ 216 val cpuReset = reset.asBool || !soc_rst_n 217 218 //Interrupt sources collect 219 val msip = clint.head(0) 220 val mtip = clint.head(1) 221 val meip = plic.head(0) 222 val seip = plic.last(0) 223 val nmi_31 = nmi.head(0) 224 val nmi_43 = nmi.head(1) 225 val msi_info_vld = core_with_l2.module.io.msiInfo.valid 226 val intSrc = Cat(msip, mtip, meip, seip, nmi_31, nmi_43, msi_info_vld) 227 228 /* 229 * CPU Low Power State: 230 * 1. core+L2 Low power state transactions is triggered by l2 flush request from core CSR 231 * 2. wait L2 flush done 232 * 3. wait Core to wfi -> send out < io.o_cpu_no_op > 233 */ 234 val sIDLE :: sL2FLUSH :: sWAITWFI :: sEXITCO :: sPOFFREQ :: Nil = Enum(5) 235 val lpState = withClockAndReset(clock, cpuReset.asAsyncReset) {RegInit(sIDLE)} 236 val l2_flush_en = core_with_l2.module.io.l2_flush_en.getOrElse(false.B) 237 val l2_flush_done = core_with_l2.module.io.l2_flush_done.getOrElse(false.B) 238 val isWFI = core_with_l2.module.io.cpu_halt 239 val exitco = !io.chi.syscoreq & !io.chi.syscoack 240 lpState := lpStateNext(lpState, l2_flush_en, l2_flush_done, isWFI, exitco) 241 io.lp.foreach { lp => lp.o_cpu_no_op := lpState === sPOFFREQ } // inform SoC core+l2 want to power off 242 243 /*WFI clock Gating state 244 1. works only when lpState is IDLE means Core+L2 works in normal state 245 2. when Core is in wfi state, core+l2 clock is gated 246 3. only reset/interrupt/snoop could recover core+l2 clock 247 */ 248 val sNORMAL :: sGCLOCK :: sAWAKE :: sFLITWAKE :: Nil = Enum(4) 249 val wfiState = withClockAndReset(clock, cpuReset.asAsyncReset) {RegInit(sNORMAL)} 250 val isNormal = lpState === sIDLE 251 val wfiGateClock = withClockAndReset(clock, cpuReset.asAsyncReset) {RegInit(false.B)} 252 val flitpend = io.chi.rx.snp.flitpend | io.chi.rx.rsp.flitpend | io.chi.rx.dat.flitpend 253 wfiState := WfiStateNext(wfiState, isWFI, isNormal, flitpend, intSrc) 254 255 if (WFIClockGate) { 256 wfiGateClock := (wfiState === sGCLOCK) 257 }else { 258 wfiGateClock := false.B 259 } 260 261 262 263 /* during power down sequence, SoC reset will gate clock */ 264 val pwrdownGateClock = withClockAndReset(clock, cpuReset.asAsyncReset) {RegInit(false.B)} 265 pwrdownGateClock := !soc_rst_n && lpState === sPOFFREQ 266 /* 267 physical power off handshake: 268 i_cpu_pwrdown_req_n 269 o_cpu_pwrdown_ack_n means all power is safely on 270 */ 271 val soc_pwrdown_n = io.lp.map(_.i_cpu_pwrdown_req_n).getOrElse(true.B) 272 io.lp.foreach { lp => lp.o_cpu_pwrdown_ack_n := core_with_l2.module.io.pwrdown_ack_n.getOrElse(true.B) } 273 274 275 /* Core+L2 hardware initial clock gating as: 276 1. Gate clock when SoC reset CPU with < io.i_cpu_sw_rst_n > valid 277 2. Gate clock when SoC is enable clock (Core+L2 in normal state) and core is in wfi state 278 3. Disable clock gate at the cycle of Flitpend valid in rx.snp channel 279 */ 280 val cpuClockEn = !wfiGateClock && !pwrdownGateClock | io.chi.rx.snp.flitpend 281 282 dontTouch(wfiGateClock) 283 dontTouch(pwrdownGateClock) 284 dontTouch(cpuClockEn) 285 286 core_with_l2.module.clock := ClockGate(false.B, cpuClockEn, clock) 287 core_with_l2.module.reset := cpuReset.asAsyncReset 288 core_with_l2.module.noc_reset.foreach(_ := noc_reset.get) 289 core_with_l2.module.soc_reset := soc_reset 290 core_with_l2.module.io.hartId := io.hartId 291 core_with_l2.module.io.nodeID.get := io.nodeID 292 io.riscv_halt := core_with_l2.module.io.cpu_halt 293 io.riscv_critical_error := core_with_l2.module.io.cpu_crtical_error 294 core_with_l2.module.io.hartResetReq := io.hartResetReq 295 io.hartIsInReset := core_with_l2.module.io.hartIsInReset 296 core_with_l2.module.io.reset_vector := io.riscv_rst_vec 297 core_with_l2.module.io.iso_en.foreach { _ := false.B } 298 core_with_l2.module.io.pwrdown_req_n.foreach { _ := true.B } 299 // trace Interface 300 val traceInterface = core_with_l2.module.io.traceCoreInterface 301 traceInterface.fromEncoder := io.traceCoreInterface.fromEncoder 302 io.traceCoreInterface.toEncoder.priv := traceInterface.toEncoder.priv 303 io.traceCoreInterface.toEncoder.cause := traceInterface.toEncoder.trap.cause 304 io.traceCoreInterface.toEncoder.tval := traceInterface.toEncoder.trap.tval 305 io.traceCoreInterface.toEncoder.iaddr := VecInit(traceInterface.toEncoder.groups.map(_.bits.iaddr)).asUInt 306 io.traceCoreInterface.toEncoder.itype := VecInit(traceInterface.toEncoder.groups.map(_.bits.itype)).asUInt 307 io.traceCoreInterface.toEncoder.iretire := VecInit(traceInterface.toEncoder.groups.map(_.bits.iretire)).asUInt 308 io.traceCoreInterface.toEncoder.ilastsize := VecInit(traceInterface.toEncoder.groups.map(_.bits.ilastsize)).asUInt 309 310 EnableClintAsyncBridge match { 311 case Some(param) => 312 withClockAndReset(soc_clock, soc_reset_sync) { 313 val source = Module(new AsyncQueueSource(UInt(64.W), param)) 314 source.io.enq.valid := io.clintTime.valid 315 source.io.enq.bits := io.clintTime.bits 316 core_with_l2.module.io.clintTime <> source.io.async 317 } 318 case None => 319 core_with_l2.module.io.clintTime <> io.clintTime 320 } 321 322 EnableCHIAsyncBridge match { 323 case Some(param) => 324 withClockAndReset(noc_clock.get, noc_reset_sync.get) { 325 val sink = Module(new CHIAsyncBridgeSink(param)) 326 sink.io.async <> core_with_l2.module.io.chi 327 io.chi <> sink.io.deq 328 } 329 case None => 330 io.chi <> core_with_l2.module.io.chi 331 } 332 333 // Seperate DebugModule TL Async Queue Sink 334 if (SeperateTLBus && EnableSeperateTLAsync) { 335 tlAsyncSinkOpt.get.module.clock := soc_clock 336 tlAsyncSinkOpt.get.module.reset := soc_reset_sync 337 } 338 339 core_with_l2.module.io.msiInfo.valid := wrapper.u_imsic_bus_top.module.msiio.vld_req 340 core_with_l2.module.io.msiInfo.bits := wrapper.u_imsic_bus_top.module.msiio.data 341 wrapper.u_imsic_bus_top.module.msiio.vld_ack := core_with_l2.module.io.msiAck 342 // tie off core soft reset 343 core_rst_node.out.head._1 := false.B.asAsyncReset 344 345 core_with_l2.module.io.debugTopDown.l3MissMatch := false.B 346 core_with_l2.module.io.l3Miss := false.B 347 } 348 349 lazy val module = new XSNoCTopImp(this) 350} 351 352class XSNoCDiffTop(implicit p: Parameters) extends Module { 353 override val desiredName: String = "XSDiffTop" 354 val l_soc = LazyModule(new XSNoCTop()) 355 val soc = Module(l_soc.module) 356 357 // Expose XSTop IOs outside, i.e. io 358 def exposeIO(data: Data, name: String): Unit = { 359 val dummy = IO(chiselTypeOf(data)).suggestName(name) 360 dummy <> data 361 } 362 def exposeOptionIO(data: Option[Data], name: String): Unit = { 363 if (data.isDefined) { 364 val dummy = IO(chiselTypeOf(data.get)).suggestName(name) 365 dummy <> data.get 366 } 367 } 368 exposeIO(l_soc.clint, "clint") 369 exposeIO(l_soc.debug, "debug") 370 exposeIO(l_soc.plic, "plic") 371 exposeIO(l_soc.beu, "beu") 372 exposeIO(l_soc.nmi, "nmi") 373 soc.clock := clock 374 soc.reset := reset.asAsyncReset 375 exposeIO(soc.soc_clock, "soc_clock") 376 exposeIO(soc.soc_reset, "soc_reset") 377 exposeIO(soc.io, "io") 378 exposeOptionIO(soc.noc_clock, "noc_clock") 379 exposeOptionIO(soc.noc_reset, "noc_reset") 380 exposeOptionIO(soc.imsic_axi4, "imsic_axi4") 381 exposeOptionIO(soc.imsic_m_tl, "imsic_m_tl") 382 exposeOptionIO(soc.imsic_s_tl, "imsic_s_tl") 383 exposeOptionIO(soc.imsic, "imsic") 384 385 // TODO: 386 // XSDiffTop is only part of DUT, we can not instantiate difftest here. 387 // Temporarily we collect Performance counters for each DiffTop, need control signals passed from Difftest 388 val timer = IO(Input(UInt(64.W))) 389 val logEnable = IO(Input(Bool())) 390 val clean = IO(Input(Bool())) 391 val dump = IO(Input(Bool())) 392 XSLog.collect(timer, logEnable, clean, dump) 393 DifftestWiring.createAndConnectExtraIOs() 394 Profile.generateJson("XiangShan") 395 XSNoCDiffTopChecker() 396} 397 398// TODO: 399// Currently we use two-step XiangShan-Difftest, generating XS(with Diff Interface only) and Difftest seperately 400// To avoid potential interface problem between XS and Diff, we add Checker and CI(dual-core) 401// We will try one-step XS-Diff later 402object XSNoCDiffTopChecker { 403 def apply(): Unit = { 404 val verilog = 405 """ 406 |`define CONFIG_XSCORE_NR 2 407 |`include "gateway_interface.svh" 408 |module XSDiffTopChecker( 409 | input cpu_clk, 410 | input cpu_rstn, 411 | input sys_clk, 412 | input sys_rstn 413 |); 414 |wire [63:0] timer; 415 |wire logEnable; 416 |wire clean; 417 |wire dump; 418 |// FIXME: use siganls from Difftest rather than default value 419 |assign timer = 64'b0; 420 |assign logEnable = 1'b0; 421 |assign clean = 1'b0; 422 |assign dump = 1'b0; 423 |gateway_if gateway_if_i(); 424 |core_if core_if_o[`CONFIG_XSCORE_NR](); 425 |generate 426 | genvar i; 427 | for (i = 0; i < `CONFIG_XSCORE_NR; i = i+1) 428 | begin: u_CPU_TOP 429 | // FIXME: add missing ports 430 | XSDiffTop u_XSTop ( 431 | .clock (cpu_clk), 432 | .noc_clock (sys_clk), 433 | .soc_clock (sys_clk), 434 | .io_hartId (6'h0 + i), 435 | .timer (timer), 436 | .logEnable (logEnable), 437 | .clean (clean), 438 | .dump (dump), 439 | .gateway_out (core_if_o[i]) 440 | ); 441 | end 442 |endgenerate 443 | CoreToGateway u_CoreToGateway( 444 | .gateway_out (gateway_if_i.out), 445 | .core_in (core_if_o) 446 | ); 447 | GatewayEndpoint u_GatewayEndpoint( 448 | .clock (sys_clk), 449 | .reset (sys_rstn), 450 | .gateway_in (gateway_if_i.in), 451 | .step () 452 | ); 453 | 454 |endmodule 455 """.stripMargin 456 FileRegisters.writeOutputFile("./build", "XSDiffTopChecker.sv", verilog) 457 } 458} 459