1/*************************************************************************************** 2* Copyright (c) 2024 Beijing Institute of Open Source Chip (BOSC) 3* Copyright (c) 2024 Institute of Computing Technology, Chinese Academy of Sciences 4* 5* XiangShan is licensed under Mulan PSL v2. 6* You can use this software according to the terms and conditions of the Mulan PSL v2. 7* You may obtain a copy of Mulan PSL v2 at: 8* http://license.coscl.org.cn/MulanPSL2 9* 10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, 11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, 12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE. 13* 14* See the Mulan PSL v2 for more details. 15***************************************************************************************/ 16 17package top 18 19import chisel3._ 20import chisel3.util._ 21import chisel3.experimental.dataview._ 22import xiangshan._ 23import utils._ 24import utility._ 25import system._ 26import device._ 27import org.chipsalliance.cde.config._ 28import freechips.rocketchip.amba.axi4._ 29import freechips.rocketchip.devices.debug.DebugModuleKey 30import freechips.rocketchip.diplomacy._ 31import freechips.rocketchip.interrupts._ 32import freechips.rocketchip.tilelink._ 33import coupledL2.tl2chi.{CHIAsyncBridgeSink, PortIO} 34import freechips.rocketchip.tile.MaxHartIdBits 35import freechips.rocketchip.util.{AsyncQueueParams, AsyncQueueSource} 36import chisel3.experimental.{ChiselAnnotation, annotate} 37import sifive.enterprise.firrtl.NestedPrefixModulesAnnotation 38import utility.sram.SramBroadcastBundle 39 40import difftest.common.DifftestWiring 41import difftest.util.Profile 42 43class XSNoCTop()(implicit p: Parameters) extends BaseXSSoc with HasSoCParameter 44{ 45 override lazy val desiredName: String = "XSTop" 46 47 ResourceBinding { 48 val width = ResourceInt(2) 49 val model = "freechips,rocketchip-unknown" 50 Resource(ResourceAnchors.root, "model").bind(ResourceString(model)) 51 Resource(ResourceAnchors.root, "compat").bind(ResourceString(model + "-dev")) 52 Resource(ResourceAnchors.soc, "compat").bind(ResourceString(model + "-soc")) 53 Resource(ResourceAnchors.root, "width").bind(width) 54 Resource(ResourceAnchors.soc, "width").bind(width) 55 Resource(ResourceAnchors.cpus, "width").bind(ResourceInt(1)) 56 def bindManagers(xbar: TLNexusNode) = { 57 ManagerUnification(xbar.edges.in.head.manager.managers).foreach{ manager => 58 manager.resources.foreach(r => r.bind(manager.toResource)) 59 } 60 } 61 } 62 63 require(enableCHI) 64 65 // xstile 66 val core_with_l2 = LazyModule(new XSTileWrap()(p.alter((site, here, up) => { 67 case XSCoreParamsKey => tiles.head 68 case PerfCounterOptionsKey => up(PerfCounterOptionsKey).copy(perfDBHartID = tiles.head.HartId) 69 }))) 70 71 // imsic bus top 72 val u_imsic_bus_top = LazyModule(new imsic_bus_top) 73 74 // interrupts 75 val clintIntNode = IntSourceNode(IntSourcePortSimple(1, 1, 2)) 76 val debugIntNode = IntSourceNode(IntSourcePortSimple(1, 1, 1)) 77 val plicIntNode = IntSourceNode(IntSourcePortSimple(1, 2, 1)) 78 val nmiIntNode = IntSourceNode(IntSourcePortSimple(1, 1, (new NonmaskableInterruptIO).elements.size)) 79 val beuIntNode = IntSinkNode(IntSinkPortSimple(1, 1)) 80 core_with_l2.clintIntNode := clintIntNode 81 core_with_l2.debugIntNode := debugIntNode 82 core_with_l2.plicIntNode :*= plicIntNode 83 core_with_l2.nmiIntNode := nmiIntNode 84 beuIntNode := core_with_l2.beuIntNode 85 val clint = InModuleBody(clintIntNode.makeIOs()) 86 val debug = InModuleBody(debugIntNode.makeIOs()) 87 val plic = InModuleBody(plicIntNode.makeIOs()) 88 val nmi = InModuleBody(nmiIntNode.makeIOs()) 89 val beu = InModuleBody(beuIntNode.makeIOs()) 90 91 // seperate DebugModule bus 92 val EnableDMAsync = EnableDMAsyncBridge.isDefined 93 // asynchronous bridge sink node 94 val dmAsyncSinkOpt = Option.when(SeperateDMBus && EnableDMAsync)( 95 LazyModule(new TLAsyncCrossingSink(EnableDMAsyncBridge.get)) 96 ) 97 dmAsyncSinkOpt.foreach(_.node := core_with_l2.dmAsyncSourceOpt.get.node) 98 // synchronous sink node 99 val dmSyncSinkOpt = Option.when(SeperateDMBus && !EnableDMAsync)(TLTempNode()) 100 dmSyncSinkOpt.foreach(_ := core_with_l2.dmSyncSourceOpt.get) 101 102 // The Manager Node is only used to make IO. Standalone DM should be used for XSNoCTopConfig 103 val dm = Option.when(SeperateDMBus)(TLManagerNode(Seq( 104 TLSlavePortParameters.v1( 105 managers = Seq( 106 TLSlaveParameters.v1( 107 address = Seq(p(DebugModuleKey).get.address), 108 regionType = RegionType.UNCACHED, 109 supportsGet = TransferSizes(1, p(SoCParamsKey).L3BlockSize), 110 supportsPutPartial = TransferSizes(1, p(SoCParamsKey).L3BlockSize), 111 supportsPutFull = TransferSizes(1, p(SoCParamsKey).L3BlockSize), 112 fifoId = Some(0) 113 ) 114 ), 115 beatBytes = 8 116 ) 117 ))) 118 val dmXbar = Option.when(SeperateDMBus)(TLXbar()) 119 dmAsyncSinkOpt.foreach(sink => dmXbar.get := sink.node) 120 dmSyncSinkOpt.foreach(sink => dmXbar.get := sink) 121 dm.foreach(_ := dmXbar.get) 122 // seperate debug module io 123 val io_dm = dm.map(x => InModuleBody(x.makeIOs())) 124 125 // reset nodes 126 val core_rst_node = BundleBridgeSource(() => Reset()) 127 core_with_l2.tile.core_reset_sink := core_rst_node 128 129 class XSNoCTopImp(wrapper: XSNoCTop) extends LazyRawModuleImp(wrapper) { 130 soc.XSTopPrefix.foreach { prefix => 131 val mod = this.toNamed 132 annotate(new ChiselAnnotation { 133 def toFirrtl = NestedPrefixModulesAnnotation(mod, prefix, true) 134 }) 135 } 136 FileRegisters.add("dts", dts) 137 FileRegisters.add("graphml", graphML) 138 FileRegisters.add("json", json) 139 FileRegisters.add("plusArgs", freechips.rocketchip.util.PlusArgArtefacts.serialize_cHeader()) 140 141 val clock = IO(Input(Clock())) 142 val reset = IO(Input(AsyncReset())) 143 val noc_clock = EnableCHIAsyncBridge.map(_ => IO(Input(Clock()))) 144 val noc_reset = EnableCHIAsyncBridge.map(_ => IO(Input(AsyncReset()))) 145 val soc_clock = IO(Input(Clock())) 146 val soc_reset = IO(Input(AsyncReset())) 147 private val hasMbist = tiles.head.hasMbist 148 val io = IO(new Bundle { 149 val hartId = Input(UInt(p(MaxHartIdBits).W)) 150 val riscv_halt = Output(Bool()) 151 val riscv_critical_error = Output(Bool()) 152 val hartResetReq = Input(Bool()) 153 val hartIsInReset = Output(Bool()) 154 val riscv_rst_vec = Input(UInt(soc.PAddrBits.W)) 155 val chi = new PortIO 156 val nodeID = Input(UInt(soc.NodeIDWidthList(issue).W)) 157 val clintTime = Input(ValidIO(UInt(64.W))) 158 val traceCoreInterface = new Bundle { 159 val fromEncoder = Input(new Bundle { 160 val enable = Bool() 161 val stall = Bool() 162 }) 163 val toEncoder = Output(new Bundle { 164 val cause = UInt(TraceCauseWidth.W) 165 val tval = UInt(TraceTvalWidth.W) 166 val priv = UInt(TracePrivWidth.W) 167 val iaddr = UInt((TraceTraceGroupNum * TraceIaddrWidth).W) 168 val itype = UInt((TraceTraceGroupNum * TraceItypeWidth).W) 169 val iretire = UInt((TraceTraceGroupNum * TraceIretireWidthCompressed).W) 170 val ilastsize = UInt((TraceTraceGroupNum * TraceIlastsizeWidth).W) 171 }) 172 } 173 val dft = if(hasMbist) Some(Input(new SramBroadcastBundle)) else None 174 val dft_reset = if(hasMbist) Some(Input(new DFTResetSignals())) else None 175 val lp = Option.when(EnablePowerDown) (new LowPowerIO) 176 }) 177 // imsic axi4 io 178 val imsic_axi4 = wrapper.u_imsic_bus_top.axi4.map(x => IO(Flipped(new VerilogAXI4Record(x.elts.head.params.copy(addrBits = 32))))) 179 // imsic tl io 180 val imsic_m_tl = wrapper.u_imsic_bus_top.tl_m.map(x => IO(chiselTypeOf(x.getWrappedValue))) 181 val imsic_s_tl = wrapper.u_imsic_bus_top.tl_s.map(x => IO(chiselTypeOf(x.getWrappedValue))) 182 // imsic bare io 183 val imsic = wrapper.u_imsic_bus_top.module.msi.map(x => IO(chiselTypeOf(x))) 184 185 val noc_reset_sync = EnableCHIAsyncBridge.map(_ => withClockAndReset(noc_clock, noc_reset) { ResetGen(2, io.dft_reset) }) 186 val soc_reset_sync = withClockAndReset(soc_clock, soc_reset) { ResetGen(2, io.dft_reset) } 187 wrapper.core_with_l2.module.io.dft.zip(io.dft).foreach({case(a, b) => a := b}) 188 wrapper.core_with_l2.module.io.dft_reset.zip(io.dft_reset).foreach({case(a, b) => a := b}) 189 // device clock and reset 190 wrapper.u_imsic_bus_top.module.clock := soc_clock 191 wrapper.u_imsic_bus_top.module.reset := soc_reset_sync 192 193 // imsic axi4 io connection 194 imsic_axi4.foreach(_.viewAs[AXI4Bundle] <> wrapper.u_imsic_bus_top.axi4.get.elements.head._2) 195 // imsic tl io connection 196 wrapper.u_imsic_bus_top.tl_m.foreach(_ <> imsic_m_tl.get) 197 wrapper.u_imsic_bus_top.tl_s.foreach(_ <> imsic_s_tl.get) 198 // imsic bare io connection 199 wrapper.u_imsic_bus_top.module.msi.foreach(_ <> imsic.get) 200 201 // input 202 dontTouch(io) 203 204 /* 205 SoC control the sequence of power on/off with isolation/reset/clock 206 */ 207 val soc_rst_n = io.lp.map(_.i_cpu_sw_rst_n).getOrElse(true.B) 208 val soc_iso_en = io.lp.map(_.i_cpu_iso_en).getOrElse(false.B) 209 210 /* Core+L2 reset when: 211 1. normal reset from SoC 212 2. SoC initialize reset during Power on/off flow 213 */ 214 val cpuReset = reset.asBool || !soc_rst_n 215 216 //Interrupt sources collect 217 val msip = clint.head(0) 218 val mtip = clint.head(1) 219 val meip = plic.head(0) 220 val seip = plic.last(0) 221 val nmi_31 = nmi.head(0) 222 val nmi_43 = nmi.head(1) 223 val msi_info_vld = core_with_l2.module.io.msiInfo.valid 224 val intSrc = Cat(msip, mtip, meip, seip, nmi_31, nmi_43, msi_info_vld) 225 226 /* 227 * CPU Low Power State: 228 * 1. core+L2 Low power state transactions is triggered by l2 flush request from core CSR 229 * 2. wait L2 flush done 230 * 3. wait Core to wfi -> send out < io.o_cpu_no_op > 231 */ 232 val sIDLE :: sL2FLUSH :: sWAITWFI :: sEXITCO :: sPOFFREQ :: Nil = Enum(5) 233 val lpState = withClockAndReset(clock, cpuReset.asAsyncReset) {RegInit(sIDLE)} 234 val l2_flush_en = core_with_l2.module.io.l2_flush_en.getOrElse(false.B) 235 val l2_flush_done = core_with_l2.module.io.l2_flush_done.getOrElse(false.B) 236 val isWFI = core_with_l2.module.io.cpu_halt 237 val exitco = !io.chi.syscoreq & !io.chi.syscoack 238 lpState := lpStateNext(lpState, l2_flush_en, l2_flush_done, isWFI, exitco) 239 io.lp.foreach { lp => lp.o_cpu_no_op := lpState === sPOFFREQ } // inform SoC core+l2 want to power off 240 241 /*WFI clock Gating state 242 1. works only when lpState is IDLE means Core+L2 works in normal state 243 2. when Core is in wfi state, core+l2 clock is gated 244 3. only reset/interrupt/snoop could recover core+l2 clock 245 */ 246 val sNORMAL :: sGCLOCK :: sAWAKE :: Nil = Enum(3) 247 val wfiState = withClockAndReset(clock, cpuReset.asAsyncReset) {RegInit(sNORMAL)} 248 val isNormal = lpState === sIDLE 249 val wfiGateClock = withClockAndReset(clock, cpuReset.asAsyncReset) {RegInit(false.B)} 250 wfiState := WfiStateNext(wfiState, isWFI, isNormal, io.chi.rx.snp.flitpend, intSrc) 251 252 if (WFIClockGate) { 253 wfiGateClock := (wfiState === sGCLOCK) 254 }else { 255 wfiGateClock := false.B 256 } 257 258 259 260 /* during power down sequence, SoC reset will gate clock */ 261 val pwrdownGateClock = withClockAndReset(clock, cpuReset.asAsyncReset) {RegInit(false.B)} 262 pwrdownGateClock := !soc_rst_n && lpState === sPOFFREQ 263 /* 264 physical power off handshake: 265 i_cpu_pwrdown_req_n 266 o_cpu_pwrdown_ack_n means all power is safely on 267 */ 268 val soc_pwrdown_n = io.lp.map(_.i_cpu_pwrdown_req_n).getOrElse(true.B) 269 io.lp.foreach { lp => lp.o_cpu_pwrdown_ack_n := core_with_l2.module.io.pwrdown_ack_n.getOrElse(true.B) } 270 271 272 /* Core+L2 hardware initial clock gating as: 273 1. Gate clock when SoC reset CPU with < io.i_cpu_sw_rst_n > valid 274 2. Gate clock when SoC is enable clock (Core+L2 in normal state) and core is in wfi state 275 3. Disable clock gate at the cycle of Flitpend valid in rx.snp channel 276 */ 277 val cpuClockEn = !wfiGateClock && !pwrdownGateClock | io.chi.rx.snp.flitpend 278 279 dontTouch(wfiGateClock) 280 dontTouch(pwrdownGateClock) 281 dontTouch(cpuClockEn) 282 283 core_with_l2.module.clock := ClockGate(false.B, cpuClockEn, clock) 284 core_with_l2.module.reset := cpuReset.asAsyncReset 285 core_with_l2.module.noc_reset.foreach(_ := noc_reset.get) 286 core_with_l2.module.soc_reset := soc_reset 287 core_with_l2.module.io.hartId := io.hartId 288 core_with_l2.module.io.nodeID.get := io.nodeID 289 io.riscv_halt := core_with_l2.module.io.cpu_halt 290 io.riscv_critical_error := core_with_l2.module.io.cpu_crtical_error 291 core_with_l2.module.io.hartResetReq := io.hartResetReq 292 io.hartIsInReset := core_with_l2.module.io.hartIsInReset 293 core_with_l2.module.io.reset_vector := io.riscv_rst_vec 294 core_with_l2.module.io.iso_en.foreach { _ := false.B } 295 core_with_l2.module.io.pwrdown_req_n.foreach { _ := true.B } 296 // trace Interface 297 val traceInterface = core_with_l2.module.io.traceCoreInterface 298 traceInterface.fromEncoder := io.traceCoreInterface.fromEncoder 299 io.traceCoreInterface.toEncoder.priv := traceInterface.toEncoder.priv 300 io.traceCoreInterface.toEncoder.cause := traceInterface.toEncoder.trap.cause 301 io.traceCoreInterface.toEncoder.tval := traceInterface.toEncoder.trap.tval 302 io.traceCoreInterface.toEncoder.iaddr := VecInit(traceInterface.toEncoder.groups.map(_.bits.iaddr)).asUInt 303 io.traceCoreInterface.toEncoder.itype := VecInit(traceInterface.toEncoder.groups.map(_.bits.itype)).asUInt 304 io.traceCoreInterface.toEncoder.iretire := VecInit(traceInterface.toEncoder.groups.map(_.bits.iretire)).asUInt 305 io.traceCoreInterface.toEncoder.ilastsize := VecInit(traceInterface.toEncoder.groups.map(_.bits.ilastsize)).asUInt 306 307 EnableClintAsyncBridge match { 308 case Some(param) => 309 withClockAndReset(soc_clock, soc_reset_sync) { 310 val source = Module(new AsyncQueueSource(UInt(64.W), param)) 311 source.io.enq.valid := io.clintTime.valid 312 source.io.enq.bits := io.clintTime.bits 313 core_with_l2.module.io.clintTime <> source.io.async 314 } 315 case None => 316 core_with_l2.module.io.clintTime <> io.clintTime 317 } 318 319 EnableCHIAsyncBridge match { 320 case Some(param) => 321 withClockAndReset(noc_clock.get, noc_reset_sync.get) { 322 val sink = Module(new CHIAsyncBridgeSink(param)) 323 sink.io.async <> core_with_l2.module.io.chi 324 io.chi <> sink.io.deq 325 } 326 case None => 327 io.chi <> core_with_l2.module.io.chi 328 } 329 330 // Seperate DebugModule TL Async Queue Sink 331 if (SeperateDMBus && EnableDMAsync) { 332 dmAsyncSinkOpt.get.module.clock := soc_clock 333 dmAsyncSinkOpt.get.module.reset := soc_reset_sync 334 } 335 336 core_with_l2.module.io.msiInfo.valid := wrapper.u_imsic_bus_top.module.msiio.vld_req 337 core_with_l2.module.io.msiInfo.bits := wrapper.u_imsic_bus_top.module.msiio.data 338 wrapper.u_imsic_bus_top.module.msiio.vld_ack := core_with_l2.module.io.msiAck 339 // tie off core soft reset 340 core_rst_node.out.head._1 := false.B.asAsyncReset 341 342 core_with_l2.module.io.debugTopDown.l3MissMatch := false.B 343 core_with_l2.module.io.l3Miss := false.B 344 } 345 346 lazy val module = new XSNoCTopImp(this) 347} 348 349class XSNoCDiffTop(implicit p: Parameters) extends Module { 350 override val desiredName: String = "XSDiffTop" 351 val l_soc = LazyModule(new XSNoCTop()) 352 val soc = Module(l_soc.module) 353 354 // Expose XSTop IOs outside, i.e. io 355 def exposeIO(data: Data, name: String): Unit = { 356 val dummy = IO(chiselTypeOf(data)).suggestName(name) 357 dummy <> data 358 } 359 def exposeOptionIO(data: Option[Data], name: String): Unit = { 360 if (data.isDefined) { 361 val dummy = IO(chiselTypeOf(data.get)).suggestName(name) 362 dummy <> data.get 363 } 364 } 365 exposeIO(l_soc.clint, "clint") 366 exposeIO(l_soc.debug, "debug") 367 exposeIO(l_soc.plic, "plic") 368 exposeIO(l_soc.beu, "beu") 369 exposeIO(l_soc.nmi, "nmi") 370 soc.clock := clock 371 soc.reset := reset.asAsyncReset 372 exposeIO(soc.soc_clock, "soc_clock") 373 exposeIO(soc.soc_reset, "soc_reset") 374 exposeIO(soc.io, "io") 375 exposeOptionIO(soc.noc_clock, "noc_clock") 376 exposeOptionIO(soc.noc_reset, "noc_reset") 377 exposeOptionIO(soc.imsic_axi4, "imsic_axi4") 378 exposeOptionIO(soc.imsic_m_tl, "imsic_m_tl") 379 exposeOptionIO(soc.imsic_s_tl, "imsic_s_tl") 380 exposeOptionIO(soc.imsic, "imsic") 381 382 // TODO: 383 // XSDiffTop is only part of DUT, we can not instantiate difftest here. 384 // Temporarily we collect Performance counters for each DiffTop, need control signals passed from Difftest 385 val timer = IO(Input(UInt(64.W))) 386 val logEnable = IO(Input(Bool())) 387 val clean = IO(Input(Bool())) 388 val dump = IO(Input(Bool())) 389 XSLog.collect(timer, logEnable, clean, dump) 390 DifftestWiring.createAndConnectExtraIOs() 391 Profile.generateJson("XiangShan") 392 XSNoCDiffTopChecker() 393} 394 395// TODO: 396// Currently we use two-step XiangShan-Difftest, generating XS(with Diff Interface only) and Difftest seperately 397// To avoid potential interface problem between XS and Diff, we add Checker and CI(dual-core) 398// We will try one-step XS-Diff later 399object XSNoCDiffTopChecker { 400 def apply(): Unit = { 401 val verilog = 402 """ 403 |`define CONFIG_XSCORE_NR 2 404 |`include "gateway_interface.svh" 405 |module XSDiffTopChecker( 406 | input cpu_clk, 407 | input cpu_rstn, 408 | input sys_clk, 409 | input sys_rstn 410 |); 411 |wire [63:0] timer; 412 |wire logEnable; 413 |wire clean; 414 |wire dump; 415 |// FIXME: use siganls from Difftest rather than default value 416 |assign timer = 64'b0; 417 |assign logEnable = 1'b0; 418 |assign clean = 1'b0; 419 |assign dump = 1'b0; 420 |gateway_if gateway_if_i(); 421 |core_if core_if_o[`CONFIG_XSCORE_NR](); 422 |generate 423 | genvar i; 424 | for (i = 0; i < `CONFIG_XSCORE_NR; i = i+1) 425 | begin: u_CPU_TOP 426 | // FIXME: add missing ports 427 | XSDiffTop u_XSTop ( 428 | .clock (cpu_clk), 429 | .noc_clock (sys_clk), 430 | .soc_clock (sys_clk), 431 | .io_hartId (6'h0 + i), 432 | .timer (timer), 433 | .logEnable (logEnable), 434 | .clean (clean), 435 | .dump (dump), 436 | .gateway_out (core_if_o[i]) 437 | ); 438 | end 439 |endgenerate 440 | CoreToGateway u_CoreToGateway( 441 | .gateway_out (gateway_if_i.out), 442 | .core_in (core_if_o) 443 | ); 444 | GatewayEndpoint u_GatewayEndpoint( 445 | .clock (sys_clk), 446 | .reset (sys_rstn), 447 | .gateway_in (gateway_if_i.in), 448 | .step () 449 | ); 450 | 451 |endmodule 452 """.stripMargin 453 FileRegisters.writeOutputFile("./build", "XSDiffTopChecker.sv", verilog) 454 } 455} 456