2018-05-23 16:17:30 +00:00
|
|
|
// SPDX-License-Identifier: GPL-2.0
|
|
|
|
/*
|
|
|
|
* Copyright (C) 2016-2018 Intel Corporation <www.intel.com>
|
|
|
|
*
|
|
|
|
*/
|
|
|
|
|
|
|
|
#include <common.h>
|
2019-11-14 19:57:37 +00:00
|
|
|
#include <cpu_func.h>
|
2019-05-06 01:56:01 +00:00
|
|
|
#include <dm.h>
|
2018-05-23 16:17:30 +00:00
|
|
|
#include <errno.h>
|
|
|
|
#include <div64.h>
|
2019-03-21 17:24:01 +00:00
|
|
|
#include <fdtdec.h>
|
2019-12-28 17:45:07 +00:00
|
|
|
#include <hang.h>
|
2019-12-28 17:45:05 +00:00
|
|
|
#include <init.h>
|
2020-05-10 17:40:05 +00:00
|
|
|
#include <log.h>
|
2019-05-06 01:56:01 +00:00
|
|
|
#include <ram.h>
|
|
|
|
#include <reset.h>
|
|
|
|
#include "sdram_s10.h"
|
2018-05-23 16:17:30 +00:00
|
|
|
#include <wait_bit.h>
|
2019-11-27 07:55:15 +00:00
|
|
|
#include <asm/arch/firewall.h>
|
2018-05-23 16:17:30 +00:00
|
|
|
#include <asm/arch/reset_manager.h>
|
2019-05-06 01:56:01 +00:00
|
|
|
#include <asm/io.h>
|
2019-03-21 17:24:01 +00:00
|
|
|
#include <linux/sizes.h>
|
2018-05-23 16:17:30 +00:00
|
|
|
|
|
|
|
DECLARE_GLOBAL_DATA_PTR;
|
|
|
|
|
|
|
|
#define DDR_CONFIG(A, B, C, R) (((A) << 24) | ((B) << 16) | ((C) << 8) | (R))
|
|
|
|
|
|
|
|
/* The followring are the supported configurations */
|
|
|
|
u32 ddr_config[] = {
|
|
|
|
/* DDR_CONFIG(Address order,Bank,Column,Row) */
|
|
|
|
/* List for DDR3 or LPDDR3 (pinout order > chip, row, bank, column) */
|
|
|
|
DDR_CONFIG(0, 3, 10, 12),
|
|
|
|
DDR_CONFIG(0, 3, 9, 13),
|
|
|
|
DDR_CONFIG(0, 3, 10, 13),
|
|
|
|
DDR_CONFIG(0, 3, 9, 14),
|
|
|
|
DDR_CONFIG(0, 3, 10, 14),
|
|
|
|
DDR_CONFIG(0, 3, 10, 15),
|
|
|
|
DDR_CONFIG(0, 3, 11, 14),
|
|
|
|
DDR_CONFIG(0, 3, 11, 15),
|
|
|
|
DDR_CONFIG(0, 3, 10, 16),
|
|
|
|
DDR_CONFIG(0, 3, 11, 16),
|
|
|
|
DDR_CONFIG(0, 3, 12, 15), /* 0xa */
|
|
|
|
/* List for DDR4 only (pinout order > chip, bank, row, column) */
|
|
|
|
DDR_CONFIG(1, 3, 10, 14),
|
|
|
|
DDR_CONFIG(1, 4, 10, 14),
|
|
|
|
DDR_CONFIG(1, 3, 10, 15),
|
|
|
|
DDR_CONFIG(1, 4, 10, 15),
|
|
|
|
DDR_CONFIG(1, 3, 10, 16),
|
|
|
|
DDR_CONFIG(1, 4, 10, 16),
|
|
|
|
DDR_CONFIG(1, 3, 10, 17),
|
|
|
|
DDR_CONFIG(1, 4, 10, 17),
|
|
|
|
};
|
|
|
|
|
|
|
|
int match_ddr_conf(u32 ddr_conf)
|
|
|
|
{
|
|
|
|
int i;
|
|
|
|
|
|
|
|
for (i = 0; i < ARRAY_SIZE(ddr_config); i++) {
|
|
|
|
if (ddr_conf == ddr_config[i])
|
|
|
|
return i;
|
|
|
|
}
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* sdram_mmr_init_full() - Function to initialize SDRAM MMR
|
|
|
|
*
|
|
|
|
* Initialize the SDRAM MMR.
|
|
|
|
*/
|
2019-11-27 07:55:26 +00:00
|
|
|
int sdram_mmr_init_full(struct udevice *dev)
|
2018-05-23 16:17:30 +00:00
|
|
|
{
|
2020-12-03 23:55:18 +00:00
|
|
|
struct altera_sdram_platdata *plat = dev->plat;
|
2019-05-06 01:56:01 +00:00
|
|
|
struct altera_sdram_priv *priv = dev_get_priv(dev);
|
2018-05-23 16:17:30 +00:00
|
|
|
u32 update_value, io48_value, ddrioctl;
|
|
|
|
u32 i;
|
|
|
|
int ret;
|
2019-03-21 17:24:01 +00:00
|
|
|
phys_size_t hw_size;
|
2020-06-26 06:13:33 +00:00
|
|
|
struct bd_info bd = {0};
|
2018-05-23 16:17:30 +00:00
|
|
|
|
|
|
|
/* Enable access to DDR from CPU master */
|
|
|
|
clrbits_le32(CCU_REG_ADDR(CCU_CPU0_MPRT_ADBASE_DDRREG),
|
|
|
|
CCU_ADBASE_DI_MASK);
|
|
|
|
clrbits_le32(CCU_REG_ADDR(CCU_CPU0_MPRT_ADBASE_MEMSPACE0),
|
|
|
|
CCU_ADBASE_DI_MASK);
|
|
|
|
clrbits_le32(CCU_REG_ADDR(CCU_CPU0_MPRT_ADBASE_MEMSPACE1A),
|
|
|
|
CCU_ADBASE_DI_MASK);
|
|
|
|
clrbits_le32(CCU_REG_ADDR(CCU_CPU0_MPRT_ADBASE_MEMSPACE1B),
|
|
|
|
CCU_ADBASE_DI_MASK);
|
|
|
|
clrbits_le32(CCU_REG_ADDR(CCU_CPU0_MPRT_ADBASE_MEMSPACE1C),
|
|
|
|
CCU_ADBASE_DI_MASK);
|
|
|
|
clrbits_le32(CCU_REG_ADDR(CCU_CPU0_MPRT_ADBASE_MEMSPACE1D),
|
|
|
|
CCU_ADBASE_DI_MASK);
|
|
|
|
clrbits_le32(CCU_REG_ADDR(CCU_CPU0_MPRT_ADBASE_MEMSPACE1E),
|
|
|
|
CCU_ADBASE_DI_MASK);
|
|
|
|
|
|
|
|
/* Enable access to DDR from IO master */
|
|
|
|
clrbits_le32(CCU_REG_ADDR(CCU_IOM_MPRT_ADBASE_MEMSPACE0),
|
|
|
|
CCU_ADBASE_DI_MASK);
|
|
|
|
clrbits_le32(CCU_REG_ADDR(CCU_IOM_MPRT_ADBASE_MEMSPACE1A),
|
|
|
|
CCU_ADBASE_DI_MASK);
|
|
|
|
clrbits_le32(CCU_REG_ADDR(CCU_IOM_MPRT_ADBASE_MEMSPACE1B),
|
|
|
|
CCU_ADBASE_DI_MASK);
|
|
|
|
clrbits_le32(CCU_REG_ADDR(CCU_IOM_MPRT_ADBASE_MEMSPACE1C),
|
|
|
|
CCU_ADBASE_DI_MASK);
|
|
|
|
clrbits_le32(CCU_REG_ADDR(CCU_IOM_MPRT_ADBASE_MEMSPACE1D),
|
|
|
|
CCU_ADBASE_DI_MASK);
|
|
|
|
clrbits_le32(CCU_REG_ADDR(CCU_IOM_MPRT_ADBASE_MEMSPACE1E),
|
|
|
|
CCU_ADBASE_DI_MASK);
|
|
|
|
|
2019-12-06 19:47:31 +00:00
|
|
|
/* Enable access to DDR from TCU */
|
|
|
|
clrbits_le32(CCU_REG_ADDR(CCU_TCU_MPRT_ADBASE_MEMSPACE0),
|
|
|
|
CCU_ADBASE_DI_MASK);
|
|
|
|
clrbits_le32(CCU_REG_ADDR(CCU_TCU_MPRT_ADBASE_MEMSPACE1A),
|
|
|
|
CCU_ADBASE_DI_MASK);
|
|
|
|
clrbits_le32(CCU_REG_ADDR(CCU_TCU_MPRT_ADBASE_MEMSPACE1B),
|
|
|
|
CCU_ADBASE_DI_MASK);
|
|
|
|
clrbits_le32(CCU_REG_ADDR(CCU_TCU_MPRT_ADBASE_MEMSPACE1C),
|
|
|
|
CCU_ADBASE_DI_MASK);
|
|
|
|
clrbits_le32(CCU_REG_ADDR(CCU_TCU_MPRT_ADBASE_MEMSPACE1D),
|
|
|
|
CCU_ADBASE_DI_MASK);
|
|
|
|
clrbits_le32(CCU_REG_ADDR(CCU_TCU_MPRT_ADBASE_MEMSPACE1E),
|
|
|
|
CCU_ADBASE_DI_MASK);
|
|
|
|
|
2018-05-23 16:17:30 +00:00
|
|
|
/* this enables nonsecure access to DDR */
|
|
|
|
/* mpuregion0addr_limit */
|
|
|
|
FW_MPU_DDR_SCR_WRITEL(0xFFFF0000, FW_MPU_DDR_SCR_MPUREGION0ADDR_LIMIT);
|
|
|
|
FW_MPU_DDR_SCR_WRITEL(0x1F, FW_MPU_DDR_SCR_MPUREGION0ADDR_LIMITEXT);
|
|
|
|
|
|
|
|
/* nonmpuregion0addr_limit */
|
|
|
|
FW_MPU_DDR_SCR_WRITEL(0xFFFF0000,
|
|
|
|
FW_MPU_DDR_SCR_NONMPUREGION0ADDR_LIMIT);
|
|
|
|
FW_MPU_DDR_SCR_WRITEL(0x1F, FW_MPU_DDR_SCR_NONMPUREGION0ADDR_LIMITEXT);
|
|
|
|
|
|
|
|
/* Enable mpuregion0enable and nonmpuregion0enable */
|
|
|
|
FW_MPU_DDR_SCR_WRITEL(MPUREGION0_ENABLE | NONMPUREGION0_ENABLE,
|
|
|
|
FW_MPU_DDR_SCR_EN_SET);
|
|
|
|
|
|
|
|
/* Ensure HMC clock is running */
|
|
|
|
if (poll_hmc_clock_status()) {
|
|
|
|
puts("DDR: Error as HMC clock not running\n");
|
|
|
|
return -1;
|
|
|
|
}
|
|
|
|
|
|
|
|
/* Try 3 times to do a calibration */
|
|
|
|
for (i = 0; i < 3; i++) {
|
2019-05-06 01:56:01 +00:00
|
|
|
ret = wait_for_bit_le32((const void *)(plat->hmc +
|
2018-05-23 16:17:30 +00:00
|
|
|
DDRCALSTAT),
|
|
|
|
DDR_HMC_DDRCALSTAT_CAL_MSK, true, 1000,
|
|
|
|
false);
|
|
|
|
if (!ret)
|
|
|
|
break;
|
|
|
|
|
2019-05-06 01:56:01 +00:00
|
|
|
emif_reset(plat);
|
2018-05-23 16:17:30 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
if (ret) {
|
|
|
|
puts("DDR: Error as SDRAM calibration failed\n");
|
|
|
|
return -1;
|
|
|
|
}
|
|
|
|
debug("DDR: Calibration success\n");
|
|
|
|
|
2019-05-06 01:56:01 +00:00
|
|
|
u32 ctrlcfg0 = hmc_readl(plat, CTRLCFG0);
|
|
|
|
u32 ctrlcfg1 = hmc_readl(plat, CTRLCFG1);
|
|
|
|
u32 dramaddrw = hmc_readl(plat, DRAMADDRW);
|
|
|
|
u32 dramtim0 = hmc_readl(plat, DRAMTIMING0);
|
|
|
|
u32 caltim0 = hmc_readl(plat, CALTIMING0);
|
|
|
|
u32 caltim1 = hmc_readl(plat, CALTIMING1);
|
|
|
|
u32 caltim2 = hmc_readl(plat, CALTIMING2);
|
|
|
|
u32 caltim3 = hmc_readl(plat, CALTIMING3);
|
|
|
|
u32 caltim4 = hmc_readl(plat, CALTIMING4);
|
|
|
|
u32 caltim9 = hmc_readl(plat, CALTIMING9);
|
2018-05-23 16:17:30 +00:00
|
|
|
|
|
|
|
/*
|
|
|
|
* Configure the DDR IO size [0xFFCFB008]
|
|
|
|
* niosreserve0: Used to indicate DDR width &
|
|
|
|
* bit[7:0] = Number of data bits (bit[6:5] 0x01=32bit, 0x10=64bit)
|
|
|
|
* bit[8] = 1 if user-mode OCT is present
|
|
|
|
* bit[9] = 1 if warm reset compiled into EMIF Cal Code
|
|
|
|
* bit[10] = 1 if warm reset is on during generation in EMIF Cal
|
|
|
|
* niosreserve1: IP ADCDS version encoded as 16 bit value
|
|
|
|
* bit[2:0] = Variant (0=not special,1=FAE beta, 2=Customer beta,
|
|
|
|
* 3=EAP, 4-6 are reserved)
|
|
|
|
* bit[5:3] = Service Pack # (e.g. 1)
|
|
|
|
* bit[9:6] = Minor Release #
|
|
|
|
* bit[14:10] = Major Release #
|
|
|
|
*/
|
2019-05-06 01:56:01 +00:00
|
|
|
update_value = hmc_readl(plat, NIOSRESERVED0);
|
|
|
|
hmc_ecc_writel(plat, ((update_value & 0xFF) >> 5), DDRIOCTRL);
|
|
|
|
ddrioctl = hmc_ecc_readl(plat, DDRIOCTRL);
|
2018-05-23 16:17:30 +00:00
|
|
|
|
|
|
|
/* enable HPS interface to HMC */
|
2019-05-06 01:56:01 +00:00
|
|
|
hmc_ecc_writel(plat, DDR_HMC_HPSINTFCSEL_ENABLE_MASK, HPSINTFCSEL);
|
2018-05-23 16:17:30 +00:00
|
|
|
|
|
|
|
/* Set the DDR Configuration */
|
|
|
|
io48_value = DDR_CONFIG(CTRLCFG1_CFG_ADDR_ORDER(ctrlcfg1),
|
|
|
|
(DRAMADDRW_CFG_BANK_ADDR_WIDTH(dramaddrw) +
|
|
|
|
DRAMADDRW_CFG_BANK_GRP_ADDR_WIDTH(dramaddrw)),
|
|
|
|
DRAMADDRW_CFG_COL_ADDR_WIDTH(dramaddrw),
|
|
|
|
DRAMADDRW_CFG_ROW_ADDR_WIDTH(dramaddrw));
|
|
|
|
|
|
|
|
update_value = match_ddr_conf(io48_value);
|
|
|
|
if (update_value)
|
2019-05-06 01:56:01 +00:00
|
|
|
ddr_sch_writel(plat, update_value, DDR_SCH_DDRCONF);
|
2018-05-23 16:17:30 +00:00
|
|
|
|
|
|
|
/* Configure HMC dramaddrw */
|
2019-05-06 01:56:01 +00:00
|
|
|
hmc_ecc_writel(plat, hmc_readl(plat, DRAMADDRW), DRAMADDRWIDTH);
|
2018-05-23 16:17:30 +00:00
|
|
|
|
|
|
|
/*
|
|
|
|
* Configure DDR timing
|
|
|
|
* RDTOMISS = tRTP + tRP + tRCD - BL/2
|
|
|
|
* WRTOMISS = WL + tWR + tRP + tRCD and
|
|
|
|
* WL = RL + BL/2 + 2 - rd-to-wr ; tWR = 15ns so...
|
|
|
|
* First part of equation is in memory clock units so divide by 2
|
|
|
|
* for HMC clock units. 1066MHz is close to 1ns so use 15 directly.
|
|
|
|
* WRTOMISS = ((RL + BL/2 + 2 + tWR) >> 1)- rd-to-wr + tRP + tRCD
|
|
|
|
*/
|
|
|
|
u32 burst_len = CTRLCFG0_CFG_CTRL_BURST_LEN(ctrlcfg0);
|
|
|
|
|
|
|
|
update_value = CALTIMING2_CFG_RD_TO_WR_PCH(caltim2) +
|
|
|
|
CALTIMING4_CFG_PCH_TO_VALID(caltim4) +
|
|
|
|
CALTIMING0_CFG_ACT_TO_RDWR(caltim0) -
|
|
|
|
(burst_len >> 2);
|
|
|
|
io48_value = (((DRAMTIMING0_CFG_TCL(dramtim0) + 2 + DDR_TWR +
|
|
|
|
(burst_len >> 1)) >> 1) -
|
|
|
|
/* Up to here was in memory cycles so divide by 2 */
|
|
|
|
CALTIMING1_CFG_RD_TO_WR(caltim1) +
|
|
|
|
CALTIMING0_CFG_ACT_TO_RDWR(caltim0) +
|
|
|
|
CALTIMING4_CFG_PCH_TO_VALID(caltim4));
|
|
|
|
|
2019-05-06 01:56:01 +00:00
|
|
|
ddr_sch_writel(plat, ((CALTIMING0_CFG_ACT_TO_ACT(caltim0) <<
|
2018-05-23 16:17:30 +00:00
|
|
|
DDR_SCH_DDRTIMING_ACTTOACT_OFF) |
|
|
|
|
(update_value << DDR_SCH_DDRTIMING_RDTOMISS_OFF) |
|
|
|
|
(io48_value << DDR_SCH_DDRTIMING_WRTOMISS_OFF) |
|
|
|
|
((burst_len >> 2) << DDR_SCH_DDRTIMING_BURSTLEN_OFF) |
|
|
|
|
(CALTIMING1_CFG_RD_TO_WR(caltim1) <<
|
|
|
|
DDR_SCH_DDRTIMING_RDTOWR_OFF) |
|
|
|
|
(CALTIMING3_CFG_WR_TO_RD(caltim3) <<
|
|
|
|
DDR_SCH_DDRTIMING_WRTORD_OFF) |
|
|
|
|
(((ddrioctl == 1) ? 1 : 0) <<
|
|
|
|
DDR_SCH_DDRTIMING_BWRATIO_OFF)),
|
|
|
|
DDR_SCH_DDRTIMING);
|
|
|
|
|
|
|
|
/* Configure DDR mode [precharge = 0] */
|
2019-05-06 01:56:01 +00:00
|
|
|
ddr_sch_writel(plat, ((ddrioctl ? 0 : 1) <<
|
2018-05-23 16:17:30 +00:00
|
|
|
DDR_SCH_DDRMOD_BWRATIOEXTENDED_OFF),
|
|
|
|
DDR_SCH_DDRMODE);
|
|
|
|
|
|
|
|
/* Configure the read latency */
|
2019-05-06 01:56:01 +00:00
|
|
|
ddr_sch_writel(plat, (DRAMTIMING0_CFG_TCL(dramtim0) >> 1) +
|
2018-05-23 16:17:30 +00:00
|
|
|
DDR_READ_LATENCY_DELAY,
|
|
|
|
DDR_SCH_READ_LATENCY);
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Configuring timing values concerning activate commands
|
|
|
|
* [FAWBANK alway 1 because always 4 bank DDR]
|
|
|
|
*/
|
2019-05-06 01:56:01 +00:00
|
|
|
ddr_sch_writel(plat, ((CALTIMING0_CFG_ACT_TO_ACT_DB(caltim0) <<
|
2018-05-23 16:17:30 +00:00
|
|
|
DDR_SCH_ACTIVATE_RRD_OFF) |
|
|
|
|
(CALTIMING9_CFG_4_ACT_TO_ACT(caltim9) <<
|
|
|
|
DDR_SCH_ACTIVATE_FAW_OFF) |
|
|
|
|
(DDR_ACTIVATE_FAWBANK <<
|
|
|
|
DDR_SCH_ACTIVATE_FAWBANK_OFF)),
|
|
|
|
DDR_SCH_ACTIVATE);
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Configuring timing values concerning device to device data bus
|
|
|
|
* ownership change
|
|
|
|
*/
|
2019-05-06 01:56:01 +00:00
|
|
|
ddr_sch_writel(plat, ((CALTIMING1_CFG_RD_TO_RD_DC(caltim1) <<
|
2018-05-23 16:17:30 +00:00
|
|
|
DDR_SCH_DEVTODEV_BUSRDTORD_OFF) |
|
|
|
|
(CALTIMING1_CFG_RD_TO_WR_DC(caltim1) <<
|
|
|
|
DDR_SCH_DEVTODEV_BUSRDTOWR_OFF) |
|
|
|
|
(CALTIMING3_CFG_WR_TO_RD_DC(caltim3) <<
|
|
|
|
DDR_SCH_DEVTODEV_BUSWRTORD_OFF)),
|
|
|
|
DDR_SCH_DEVTODEV);
|
|
|
|
|
|
|
|
/* assigning the SDRAM size */
|
2019-05-06 01:56:01 +00:00
|
|
|
unsigned long long size = sdram_calculate_size(plat);
|
2018-05-23 16:17:30 +00:00
|
|
|
/* If the size is invalid, use default Config size */
|
|
|
|
if (size <= 0)
|
2019-03-21 17:24:01 +00:00
|
|
|
hw_size = PHYS_SDRAM_1_SIZE;
|
2018-05-23 16:17:30 +00:00
|
|
|
else
|
2019-03-21 17:24:01 +00:00
|
|
|
hw_size = size;
|
|
|
|
|
|
|
|
/* Get bank configuration from devicetree */
|
|
|
|
ret = fdtdec_decode_ram_size(gd->fdt_blob, NULL, 0, NULL,
|
|
|
|
(phys_size_t *)&gd->ram_size, &bd);
|
|
|
|
if (ret) {
|
|
|
|
puts("DDR: Failed to decode memory node\n");
|
|
|
|
return -1;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (gd->ram_size != hw_size)
|
|
|
|
printf("DDR: Warning: DRAM size from device tree mismatch with hardware.\n");
|
2018-05-23 16:17:30 +00:00
|
|
|
|
2019-03-21 17:24:00 +00:00
|
|
|
printf("DDR: %lld MiB\n", gd->ram_size >> 20);
|
|
|
|
|
2018-05-23 16:17:30 +00:00
|
|
|
/* Enable or disable the SDRAM ECC */
|
|
|
|
if (CTRLCFG1_CFG_CTRL_EN_ECC(ctrlcfg1)) {
|
2019-05-06 01:56:01 +00:00
|
|
|
setbits_le32(plat->hmc + ECCCTRL1,
|
2018-05-23 16:17:30 +00:00
|
|
|
(DDR_HMC_ECCCTL_AWB_CNT_RST_SET_MSK |
|
|
|
|
DDR_HMC_ECCCTL_CNT_RST_SET_MSK |
|
|
|
|
DDR_HMC_ECCCTL_ECC_EN_SET_MSK));
|
2019-05-06 01:56:01 +00:00
|
|
|
clrbits_le32(plat->hmc + ECCCTRL1,
|
2018-05-23 16:17:30 +00:00
|
|
|
(DDR_HMC_ECCCTL_AWB_CNT_RST_SET_MSK |
|
|
|
|
DDR_HMC_ECCCTL_CNT_RST_SET_MSK));
|
2019-05-06 01:56:01 +00:00
|
|
|
setbits_le32(plat->hmc + ECCCTRL2,
|
2018-05-23 16:17:30 +00:00
|
|
|
(DDR_HMC_ECCCTL2_RMW_EN_SET_MSK |
|
|
|
|
DDR_HMC_ECCCTL2_AWB_EN_SET_MSK));
|
2019-05-06 01:56:01 +00:00
|
|
|
hmc_ecc_writel(plat, DDR_HMC_ERRINTEN_INTMASK, ERRINTENS);
|
2019-03-21 17:24:05 +00:00
|
|
|
|
|
|
|
/* Initialize memory content if not from warm reset */
|
|
|
|
if (!cpu_has_been_warmreset())
|
|
|
|
sdram_init_ecc_bits(&bd);
|
2018-05-23 16:17:30 +00:00
|
|
|
} else {
|
2019-05-06 01:56:01 +00:00
|
|
|
clrbits_le32(plat->hmc + ECCCTRL1,
|
2018-05-23 16:17:30 +00:00
|
|
|
(DDR_HMC_ECCCTL_AWB_CNT_RST_SET_MSK |
|
|
|
|
DDR_HMC_ECCCTL_CNT_RST_SET_MSK |
|
|
|
|
DDR_HMC_ECCCTL_ECC_EN_SET_MSK));
|
2019-05-06 01:56:01 +00:00
|
|
|
clrbits_le32(plat->hmc + ECCCTRL2,
|
2018-05-23 16:17:30 +00:00
|
|
|
(DDR_HMC_ECCCTL2_RMW_EN_SET_MSK |
|
|
|
|
DDR_HMC_ECCCTL2_AWB_EN_SET_MSK));
|
|
|
|
}
|
|
|
|
|
2019-12-06 19:47:32 +00:00
|
|
|
/* Enable non-secure reads/writes to HMC Adapter for SDRAM ECC */
|
|
|
|
writel(FW_HMC_ADAPTOR_MPU_MASK, FW_HMC_ADAPTOR_REG_ADDR);
|
|
|
|
|
2019-03-21 17:24:01 +00:00
|
|
|
sdram_size_check(&bd);
|
2019-03-21 17:24:00 +00:00
|
|
|
|
2019-05-06 01:56:01 +00:00
|
|
|
priv->info.base = bd.bi_dram[0].start;
|
|
|
|
priv->info.size = gd->ram_size;
|
|
|
|
|
2018-05-23 16:17:30 +00:00
|
|
|
debug("DDR: HMC init success\n");
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|