mirror of
https://github.com/AsahiLinux/u-boot
synced 2024-11-25 06:00:43 +00:00
core: ofnode: Have ofnode_read_u32_default return a u32
It was returning an int, which doesn't work if the u32 it is reading, or the default value, will overflow a signed int. While it could be made to work, when using a C standard/compiler where casting negative signed values to unsigned has a defined behavior, combined with careful casting, it seems obvious one is meant to use ofnode_read_s32_default() with signed values. Cc: Simon Glass <sjg@chromium.org> Signed-off-by: Trent Piepho <tpiepho@impinj.com>
This commit is contained in:
parent
347ea0b63e
commit
b061ef39c3
2 changed files with 2 additions and 2 deletions
|
@ -39,7 +39,7 @@ int ofnode_read_u32(ofnode node, const char *propname, u32 *outp)
|
|||
return 0;
|
||||
}
|
||||
|
||||
int ofnode_read_u32_default(ofnode node, const char *propname, u32 def)
|
||||
u32 ofnode_read_u32_default(ofnode node, const char *propname, u32 def)
|
||||
{
|
||||
assert(ofnode_valid(node));
|
||||
ofnode_read_u32(node, propname, &def);
|
||||
|
|
|
@ -224,7 +224,7 @@ static inline int ofnode_read_s32(ofnode node, const char *propname,
|
|||
* @def: default value to return if the property has no value
|
||||
* @return property value, or @def if not found
|
||||
*/
|
||||
int ofnode_read_u32_default(ofnode ref, const char *propname, u32 def);
|
||||
u32 ofnode_read_u32_default(ofnode ref, const char *propname, u32 def);
|
||||
|
||||
/**
|
||||
* ofnode_read_s32_default() - Read a 32-bit integer from a property
|
||||
|
|
Loading…
Reference in a new issue