1/*************************************************************************************** 2* Copyright (c) 2024 Beijing Institute of Open Source Chip (BOSC) 3* Copyright (c) 2024 Institute of Computing Technology, Chinese Academy of Sciences 4* 5* XiangShan is licensed under Mulan PSL v2. 6* You can use this software according to the terms and conditions of the Mulan PSL v2. 7* You may obtain a copy of Mulan PSL v2 at: 8* http://license.coscl.org.cn/MulanPSL2 9* 10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, 11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, 12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE. 13* 14* See the Mulan PSL v2 for more details. 15***************************************************************************************/ 16 17package top 18 19import chisel3._ 20import chisel3.util._ 21import xiangshan._ 22import utils._ 23import utility._ 24import utility.sram.SramMbistBundle 25import system._ 26import device._ 27import org.chipsalliance.cde.config._ 28import freechips.rocketchip.amba.axi4._ 29import freechips.rocketchip.devices.debug.DebugModuleKey 30import freechips.rocketchip.diplomacy._ 31import freechips.rocketchip.interrupts._ 32import freechips.rocketchip.tilelink._ 33import coupledL2.tl2chi.{CHIAsyncBridgeSink, PortIO} 34import freechips.rocketchip.tile.MaxHartIdBits 35import freechips.rocketchip.util.{AsyncQueueParams, AsyncQueueSource} 36import chisel3.experimental.{ChiselAnnotation, annotate} 37import sifive.enterprise.firrtl.NestedPrefixModulesAnnotation 38 39import difftest.common.DifftestWiring 40import difftest.util.Profile 41 42class XSNoCTop()(implicit p: Parameters) extends BaseXSSoc with HasSoCParameter 43{ 44 override lazy val desiredName: String = "XSTop" 45 46 ResourceBinding { 47 val width = ResourceInt(2) 48 val model = "freechips,rocketchip-unknown" 49 Resource(ResourceAnchors.root, "model").bind(ResourceString(model)) 50 Resource(ResourceAnchors.root, "compat").bind(ResourceString(model + "-dev")) 51 Resource(ResourceAnchors.soc, "compat").bind(ResourceString(model + "-soc")) 52 Resource(ResourceAnchors.root, "width").bind(width) 53 Resource(ResourceAnchors.soc, "width").bind(width) 54 Resource(ResourceAnchors.cpus, "width").bind(ResourceInt(1)) 55 def bindManagers(xbar: TLNexusNode) = { 56 ManagerUnification(xbar.edges.in.head.manager.managers).foreach{ manager => 57 manager.resources.foreach(r => r.bind(manager.toResource)) 58 } 59 } 60 } 61 62 require(enableCHI) 63 64 // xstile 65 val core_with_l2 = LazyModule(new XSTileWrap()(p.alter((site, here, up) => { 66 case XSCoreParamsKey => tiles.head 67 case PerfCounterOptionsKey => up(PerfCounterOptionsKey).copy(perfDBHartID = tiles.head.HartId) 68 }))) 69 70 // imsic bus top 71 val u_imsic_bus_top = LazyModule(new imsic_bus_top( 72 useTL = soc.IMSICUseTL, 73 baseAddress = (0x3A800000, 0x3B000000) 74 )) 75 76 // interrupts 77 val clintIntNode = IntSourceNode(IntSourcePortSimple(1, 1, 2)) 78 val debugIntNode = IntSourceNode(IntSourcePortSimple(1, 1, 1)) 79 val plicIntNode = IntSourceNode(IntSourcePortSimple(1, 2, 1)) 80 val nmiIntNode = IntSourceNode(IntSourcePortSimple(1, 1, (new NonmaskableInterruptIO).elements.size)) 81 val beuIntNode = IntSinkNode(IntSinkPortSimple(1, 1)) 82 core_with_l2.clintIntNode := clintIntNode 83 core_with_l2.debugIntNode := debugIntNode 84 core_with_l2.plicIntNode :*= plicIntNode 85 core_with_l2.nmiIntNode := nmiIntNode 86 beuIntNode := core_with_l2.beuIntNode 87 val clint = InModuleBody(clintIntNode.makeIOs()) 88 val debug = InModuleBody(debugIntNode.makeIOs()) 89 val plic = InModuleBody(plicIntNode.makeIOs()) 90 val nmi = InModuleBody(nmiIntNode.makeIOs()) 91 val beu = InModuleBody(beuIntNode.makeIOs()) 92 93 // asynchronous bridge sink node 94 val tlAsyncSinkOpt = Option.when(SeperateTLBus && EnableSeperateTLAsync)( 95 LazyModule(new TLAsyncCrossingSink(SeperateTLAsyncBridge.get)) 96 ) 97 tlAsyncSinkOpt.foreach(_.node := core_with_l2.tlAsyncSourceOpt.get.node) 98 // synchronous sink node 99 val tlSyncSinkOpt = Option.when(SeperateTLBus && !EnableSeperateTLAsync)(TLTempNode()) 100 tlSyncSinkOpt.foreach(_ := core_with_l2.tlSyncSourceOpt.get) 101 102 // The Manager Node is only used to make IO 103 val tl = Option.when(SeperateTLBus)(TLManagerNode(Seq( 104 TLSlavePortParameters.v1( 105 managers = SeperateTLBusRanges map { address => 106 TLSlaveParameters.v1( 107 address = Seq(address), 108 regionType = RegionType.UNCACHED, 109 executable = true, 110 supportsGet = TransferSizes(1, p(SoCParamsKey).L3BlockSize), 111 supportsPutPartial = TransferSizes(1, p(SoCParamsKey).L3BlockSize), 112 supportsPutFull = TransferSizes(1, p(SoCParamsKey).L3BlockSize), 113 fifoId = Some(0) 114 ) 115 116 }, 117 beatBytes = 8 118 ) 119 ))) 120 val tlXbar = Option.when(SeperateTLBus)(TLXbar()) 121 tlAsyncSinkOpt.foreach(sink => tlXbar.get := sink.node) 122 tlSyncSinkOpt.foreach(sink => tlXbar.get := sink) 123 tl.foreach(_ := tlXbar.get) 124 // seperate TL io 125 val io_tl = tl.map(x => InModuleBody(x.makeIOs())) 126 127 // reset nodes 128 val core_rst_node = BundleBridgeSource(() => Reset()) 129 core_with_l2.tile.core_reset_sink := core_rst_node 130 131 class XSNoCTopImp(wrapper: XSNoCTop) extends LazyRawModuleImp(wrapper) { 132 soc.XSTopPrefix.foreach { prefix => 133 val mod = this.toNamed 134 annotate(new ChiselAnnotation { 135 def toFirrtl = NestedPrefixModulesAnnotation(mod, prefix, true) 136 }) 137 } 138 FileRegisters.add("dts", dts) 139 FileRegisters.add("graphml", graphML) 140 FileRegisters.add("json", json) 141 FileRegisters.add("plusArgs", freechips.rocketchip.util.PlusArgArtefacts.serialize_cHeader()) 142 143 val clock = IO(Input(Clock())) 144 val reset = IO(Input(AsyncReset())) 145 val noc_clock = EnableCHIAsyncBridge.map(_ => IO(Input(Clock()))) 146 val noc_reset = EnableCHIAsyncBridge.map(_ => IO(Input(AsyncReset()))) 147 val soc_clock = IO(Input(Clock())) 148 val soc_reset = IO(Input(AsyncReset())) 149 private val hasMbist = tiles.head.hasMbist 150 private val hasSramCtl = tiles.head.hasSramCtl 151 val io = IO(new Bundle { 152 val hartId = Input(UInt(p(MaxHartIdBits).W)) 153 val riscv_halt = Output(Bool()) 154 val riscv_critical_error = Output(Bool()) 155 val hartResetReq = Input(Bool()) 156 val hartIsInReset = Output(Bool()) 157 val riscv_rst_vec = Input(UInt(soc.PAddrBits.W)) 158 val chi = new PortIO 159 val nodeID = Input(UInt(soc.NodeIDWidthList(issue).W)) 160 val clintTime = Input(ValidIO(UInt(64.W))) 161 val traceCoreInterface = new Bundle { 162 val fromEncoder = Input(new Bundle { 163 val enable = Bool() 164 val stall = Bool() 165 }) 166 val toEncoder = Output(new Bundle { 167 val cause = UInt(TraceCauseWidth.W) 168 val tval = UInt(TraceTvalWidth.W) 169 val priv = UInt(TracePrivWidth.W) 170 val iaddr = UInt((TraceTraceGroupNum * TraceIaddrWidth).W) 171 val itype = UInt((TraceTraceGroupNum * TraceItypeWidth).W) 172 val iretire = UInt((TraceTraceGroupNum * TraceIretireWidthCompressed).W) 173 val ilastsize = UInt((TraceTraceGroupNum * TraceIlastsizeWidth).W) 174 }) 175 } 176 val sramTest = new Bundle() { 177 val mbist = Option.when(hasMbist)(Input(new SramMbistBundle)) 178 val mbistReset = Option.when(hasMbist)(Input(new DFTResetSignals())) 179 val sramCtl = Option.when(hasSramCtl)(Input(UInt(64.W))) 180 } 181 val lp = Option.when(EnablePowerDown) (new LowPowerIO) 182 }) 183 // imsic axi4lite io 184 val imsic_axi4lite = wrapper.u_imsic_bus_top.module.axi4lite.map(x => IO(chiselTypeOf(x))) 185 // imsic tl io 186 val imsic_m_tl = wrapper.u_imsic_bus_top.tl_m.map(x => IO(chiselTypeOf(x.getWrappedValue))) 187 val imsic_s_tl = wrapper.u_imsic_bus_top.tl_s.map(x => IO(chiselTypeOf(x.getWrappedValue))) 188 189 val noc_reset_sync = EnableCHIAsyncBridge.map(_ => withClockAndReset(noc_clock, noc_reset) { ResetGen(2, io.sramTest.mbistReset) }) 190 val soc_reset_sync = withClockAndReset(soc_clock, soc_reset) { ResetGen(2, io.sramTest.mbistReset) } 191 wrapper.core_with_l2.module.io.sramTest.mbist.zip(io.sramTest.mbist).foreach({case(a, b) => a := b}) 192 wrapper.core_with_l2.module.io.sramTest.mbistReset.zip(io.sramTest.mbistReset).foreach({case(a, b) => a := b}) 193 wrapper.core_with_l2.module.io.sramTest.sramCtl.zip(io.sramTest.sramCtl).foreach({case(a, b) => a := b }) 194 // device clock and reset 195 wrapper.u_imsic_bus_top.module.clock := soc_clock 196 wrapper.u_imsic_bus_top.module.reset := soc_reset_sync 197 198 // imsic axi4lite io connection 199 wrapper.u_imsic_bus_top.module.axi4lite.foreach(_ <> imsic_axi4lite.get) 200 201 // imsic tl io connection 202 wrapper.u_imsic_bus_top.tl_m.foreach(_ <> imsic_m_tl.get) 203 wrapper.u_imsic_bus_top.tl_s.foreach(_ <> imsic_s_tl.get) 204 205 // input 206 dontTouch(io) 207 208 /* 209 SoC control the sequence of power on/off with isolation/reset/clock 210 */ 211 val soc_rst_n = io.lp.map(_.i_cpu_sw_rst_n).getOrElse(true.B) 212 val soc_iso_en = io.lp.map(_.i_cpu_iso_en).getOrElse(false.B) 213 214 /* Core+L2 reset when: 215 1. normal reset from SoC 216 2. SoC initialize reset during Power on/off flow 217 */ 218 val cpuReset = reset.asBool || !soc_rst_n 219 220 //Interrupt sources collect 221 val msip = clint.head(0) 222 val mtip = clint.head(1) 223 val meip = plic.head(0) 224 val seip = plic.last(0) 225 val nmi_31 = nmi.head(0) 226 val nmi_43 = nmi.head(1) 227 val msi_info_vld = core_with_l2.module.io.msiInfo.valid 228 val intSrc = Cat(msip, mtip, meip, seip, nmi_31, nmi_43, msi_info_vld) 229 230 /* 231 * CPU Low Power State: 232 * 1. core+L2 Low power state transactions is triggered by l2 flush request from core CSR 233 * 2. wait L2 flush done 234 * 3. wait Core to wfi -> send out < io.o_cpu_no_op > 235 */ 236 val sIDLE :: sL2FLUSH :: sWAITWFI :: sEXITCO :: sPOFFREQ :: Nil = Enum(5) 237 val lpState = withClockAndReset(clock, cpuReset.asAsyncReset) {RegInit(sIDLE)} 238 val l2_flush_en = core_with_l2.module.io.l2_flush_en.getOrElse(false.B) 239 val l2_flush_done = core_with_l2.module.io.l2_flush_done.getOrElse(false.B) 240 val isWFI = core_with_l2.module.io.cpu_halt 241 val exitco = !io.chi.syscoreq & !io.chi.syscoack 242 lpState := lpStateNext(lpState, l2_flush_en, l2_flush_done, isWFI, exitco) 243 io.lp.foreach { lp => lp.o_cpu_no_op := lpState === sPOFFREQ } // inform SoC core+l2 want to power off 244 245 /*WFI clock Gating state 246 1. works only when lpState is IDLE means Core+L2 works in normal state 247 2. when Core is in wfi state, core+l2 clock is gated 248 3. only reset/interrupt/snoop could recover core+l2 clock 249 */ 250 val sNORMAL :: sGCLOCK :: sAWAKE :: Nil = Enum(3) 251 val wfiState = withClockAndReset(clock, cpuReset.asAsyncReset) {RegInit(sNORMAL)} 252 val isNormal = lpState === sIDLE 253 val wfiGateClock = withClockAndReset(clock, cpuReset.asAsyncReset) {RegInit(false.B)} 254 wfiState := WfiStateNext(wfiState, isWFI, isNormal, io.chi.rx.snp.flitpend, intSrc) 255 256 if (WFIClockGate) { 257 wfiGateClock := (wfiState === sGCLOCK) 258 }else { 259 wfiGateClock := false.B 260 } 261 262 263 264 /* during power down sequence, SoC reset will gate clock */ 265 val pwrdownGateClock = withClockAndReset(clock, cpuReset.asAsyncReset) {RegInit(false.B)} 266 pwrdownGateClock := !soc_rst_n && lpState === sPOFFREQ 267 /* 268 physical power off handshake: 269 i_cpu_pwrdown_req_n 270 o_cpu_pwrdown_ack_n means all power is safely on 271 */ 272 val soc_pwrdown_n = io.lp.map(_.i_cpu_pwrdown_req_n).getOrElse(true.B) 273 io.lp.foreach { lp => lp.o_cpu_pwrdown_ack_n := core_with_l2.module.io.pwrdown_ack_n.getOrElse(true.B) } 274 275 276 /* Core+L2 hardware initial clock gating as: 277 1. Gate clock when SoC reset CPU with < io.i_cpu_sw_rst_n > valid 278 2. Gate clock when SoC is enable clock (Core+L2 in normal state) and core is in wfi state 279 3. Disable clock gate at the cycle of Flitpend valid in rx.snp channel 280 */ 281 val cpuClockEn = !wfiGateClock && !pwrdownGateClock | io.chi.rx.snp.flitpend 282 283 dontTouch(wfiGateClock) 284 dontTouch(pwrdownGateClock) 285 dontTouch(cpuClockEn) 286 287 core_with_l2.module.clock := ClockGate(false.B, cpuClockEn, clock) 288 core_with_l2.module.reset := cpuReset.asAsyncReset 289 core_with_l2.module.noc_reset.foreach(_ := noc_reset.get) 290 core_with_l2.module.soc_reset := soc_reset 291 core_with_l2.module.io.hartId := io.hartId 292 core_with_l2.module.io.nodeID.get := io.nodeID 293 io.riscv_halt := core_with_l2.module.io.cpu_halt 294 io.riscv_critical_error := core_with_l2.module.io.cpu_crtical_error 295 core_with_l2.module.io.hartResetReq := io.hartResetReq 296 io.hartIsInReset := core_with_l2.module.io.hartIsInReset 297 core_with_l2.module.io.reset_vector := io.riscv_rst_vec 298 core_with_l2.module.io.iso_en.foreach { _ := false.B } 299 core_with_l2.module.io.pwrdown_req_n.foreach { _ := true.B } 300 // trace Interface 301 val traceInterface = core_with_l2.module.io.traceCoreInterface 302 traceInterface.fromEncoder := io.traceCoreInterface.fromEncoder 303 io.traceCoreInterface.toEncoder.priv := traceInterface.toEncoder.priv 304 io.traceCoreInterface.toEncoder.cause := traceInterface.toEncoder.trap.cause 305 io.traceCoreInterface.toEncoder.tval := traceInterface.toEncoder.trap.tval 306 io.traceCoreInterface.toEncoder.iaddr := VecInit(traceInterface.toEncoder.groups.map(_.bits.iaddr)).asUInt 307 io.traceCoreInterface.toEncoder.itype := VecInit(traceInterface.toEncoder.groups.map(_.bits.itype)).asUInt 308 io.traceCoreInterface.toEncoder.iretire := VecInit(traceInterface.toEncoder.groups.map(_.bits.iretire)).asUInt 309 io.traceCoreInterface.toEncoder.ilastsize := VecInit(traceInterface.toEncoder.groups.map(_.bits.ilastsize)).asUInt 310 311 EnableClintAsyncBridge match { 312 case Some(param) => 313 withClockAndReset(soc_clock, soc_reset_sync) { 314 val source = Module(new AsyncQueueSource(UInt(64.W), param)) 315 source.io.enq.valid := io.clintTime.valid 316 source.io.enq.bits := io.clintTime.bits 317 core_with_l2.module.io.clintTime <> source.io.async 318 } 319 case None => 320 core_with_l2.module.io.clintTime <> io.clintTime 321 } 322 323 EnableCHIAsyncBridge match { 324 case Some(param) => 325 withClockAndReset(noc_clock.get, noc_reset_sync.get) { 326 val sink = Module(new CHIAsyncBridgeSink(param)) 327 sink.io.async <> core_with_l2.module.io.chi 328 io.chi <> sink.io.deq 329 } 330 case None => 331 io.chi <> core_with_l2.module.io.chi 332 } 333 334 // Seperate DebugModule TL Async Queue Sink 335 if (SeperateTLBus && EnableSeperateTLAsync) { 336 tlAsyncSinkOpt.get.module.clock := soc_clock 337 tlAsyncSinkOpt.get.module.reset := soc_reset_sync 338 } 339 340 core_with_l2.module.io.msiInfo.valid := wrapper.u_imsic_bus_top.module.o_msi_info_vld 341 core_with_l2.module.io.msiInfo.bits.info := wrapper.u_imsic_bus_top.module.o_msi_info 342 // tie off core soft reset 343 core_rst_node.out.head._1 := false.B.asAsyncReset 344 345 core_with_l2.module.io.debugTopDown.l3MissMatch := false.B 346 core_with_l2.module.io.l3Miss := false.B 347 } 348 349 lazy val module = new XSNoCTopImp(this) 350} 351 352class XSNoCDiffTop(implicit p: Parameters) extends Module { 353 override val desiredName: String = "XSDiffTop" 354 val l_soc = LazyModule(new XSNoCTop()) 355 val soc = Module(l_soc.module) 356 357 // Expose XSTop IOs outside, i.e. io 358 def exposeIO(data: Data, name: String): Unit = { 359 val dummy = IO(chiselTypeOf(data)).suggestName(name) 360 dummy <> data 361 } 362 def exposeOptionIO(data: Option[Data], name: String): Unit = { 363 if (data.isDefined) { 364 val dummy = IO(chiselTypeOf(data.get)).suggestName(name) 365 dummy <> data.get 366 } 367 } 368 exposeIO(l_soc.clint, "clint") 369 exposeIO(l_soc.debug, "debug") 370 exposeIO(l_soc.plic, "plic") 371 exposeIO(l_soc.beu, "beu") 372 exposeIO(l_soc.nmi, "nmi") 373 soc.clock := clock 374 soc.reset := reset.asAsyncReset 375 exposeIO(soc.soc_clock, "soc_clock") 376 exposeIO(soc.soc_reset, "soc_reset") 377 exposeIO(soc.io, "io") 378 exposeOptionIO(soc.noc_clock, "noc_clock") 379 exposeOptionIO(soc.noc_reset, "noc_reset") 380 exposeOptionIO(soc.imsic_axi4lite, "imsic_axi4lite") 381 382 // TODO: 383 // XSDiffTop is only part of DUT, we can not instantiate difftest here. 384 // Temporarily we collect Performance counters for each DiffTop, need control signals passed from Difftest 385 val timer = IO(Input(UInt(64.W))) 386 val logEnable = IO(Input(Bool())) 387 val clean = IO(Input(Bool())) 388 val dump = IO(Input(Bool())) 389 XSLog.collect(timer, logEnable, clean, dump) 390 DifftestWiring.createAndConnectExtraIOs() 391 Profile.generateJson("XiangShan") 392 XSNoCDiffTopChecker() 393} 394 395// TODO: 396// Currently we use two-step XiangShan-Difftest, generating XS(with Diff Interface only) and Difftest seperately 397// To avoid potential interface problem between XS and Diff, we add Checker and CI(dual-core) 398// We will try one-step XS-Diff later 399object XSNoCDiffTopChecker { 400 def apply(): Unit = { 401 val verilog = 402 """ 403 |`define CONFIG_XSCORE_NR 2 404 |`include "gateway_interface.svh" 405 |module XSDiffTopChecker( 406 | input cpu_clk, 407 | input cpu_rstn, 408 | input sys_clk, 409 | input sys_rstn 410 |); 411 |wire [63:0] timer; 412 |wire logEnable; 413 |wire clean; 414 |wire dump; 415 |// FIXME: use siganls from Difftest rather than default value 416 |assign timer = 64'b0; 417 |assign logEnable = 1'b0; 418 |assign clean = 1'b0; 419 |assign dump = 1'b0; 420 |gateway_if gateway_if_i(); 421 |core_if core_if_o[`CONFIG_XSCORE_NR](); 422 |generate 423 | genvar i; 424 | for (i = 0; i < `CONFIG_XSCORE_NR; i = i+1) 425 | begin: u_CPU_TOP 426 | // FIXME: add missing ports 427 | XSDiffTop u_XSTop ( 428 | .clock (cpu_clk), 429 | .noc_clock (sys_clk), 430 | .soc_clock (sys_clk), 431 | .io_hartId (6'h0 + i), 432 | .timer (timer), 433 | .logEnable (logEnable), 434 | .clean (clean), 435 | .dump (dump), 436 | .gateway_out (core_if_o[i]) 437 | ); 438 | end 439 |endgenerate 440 | CoreToGateway u_CoreToGateway( 441 | .gateway_out (gateway_if_i.out), 442 | .core_in (core_if_o) 443 | ); 444 | GatewayEndpoint u_GatewayEndpoint( 445 | .clock (sys_clk), 446 | .reset (sys_rstn), 447 | .gateway_in (gateway_if_i.in), 448 | .step () 449 | ); 450 | 451 |endmodule 452 """.stripMargin 453 FileRegisters.writeOutputFile("./build", "XSDiffTopChecker.sv", verilog) 454 } 455} 456