mirror of
https://github.com/AsahiLinux/u-boot
synced 2024-11-26 22:52:18 +00:00
c595199194
The load() methods have inconsistent behaviour on error. Some of them load an empty default environment. Some load an environment containing an error message. Others do nothing. As a step in the right direction, have the method return an error code. Then the caller could handle this itself in a consistent way. Signed-off-by: Simon Glass <sjg@chromium.org>
64 lines
1.2 KiB
C
64 lines
1.2 KiB
C
/*
|
|
* (C) Copyright 2011-2012 Freescale Semiconductor, Inc.
|
|
*
|
|
* SPDX-License-Identifier: GPL-2.0+
|
|
*/
|
|
|
|
/* #define DEBUG */
|
|
|
|
#include <common.h>
|
|
#include <command.h>
|
|
#include <environment.h>
|
|
#include <linux/stddef.h>
|
|
|
|
#ifdef ENV_IS_EMBEDDED
|
|
env_t *env_ptr = &environment;
|
|
#else /* ! ENV_IS_EMBEDDED */
|
|
env_t *env_ptr = (env_t *)CONFIG_ENV_ADDR;
|
|
#endif /* ENV_IS_EMBEDDED */
|
|
|
|
DECLARE_GLOBAL_DATA_PTR;
|
|
|
|
#if !defined(CONFIG_ENV_OFFSET)
|
|
#define CONFIG_ENV_OFFSET 0
|
|
#endif
|
|
|
|
static int env_remote_init(void)
|
|
{
|
|
if (crc32(0, env_ptr->data, ENV_SIZE) == env_ptr->crc) {
|
|
gd->env_addr = (ulong)&(env_ptr->data);
|
|
gd->env_valid = ENV_VALID;
|
|
return 0;
|
|
}
|
|
|
|
return -ENOENT;
|
|
}
|
|
|
|
#ifdef CONFIG_CMD_SAVEENV
|
|
static int env_remote_save(void)
|
|
{
|
|
#ifdef CONFIG_SRIO_PCIE_BOOT_SLAVE
|
|
printf("Can not support the 'saveenv' when boot from SRIO or PCIE!\n");
|
|
return 1;
|
|
#else
|
|
return 0;
|
|
#endif
|
|
}
|
|
#endif /* CONFIG_CMD_SAVEENV */
|
|
|
|
static int env_remote_load(void)
|
|
{
|
|
#ifndef ENV_IS_EMBEDDED
|
|
env_import((char *)env_ptr, 1);
|
|
#endif
|
|
|
|
return 0;
|
|
}
|
|
|
|
U_BOOT_ENV_LOCATION(remote) = {
|
|
.location = ENVL_REMOTE,
|
|
ENV_NAME("Remote")
|
|
.load = env_remote_load,
|
|
.save = env_save_ptr(env_remote_save),
|
|
.init = env_remote_init,
|
|
};
|