Merge branch 'ofw_dev' into dev

This commit is contained in:
MX 2023-10-23 21:40:52 +03:00
commit 2958e5cef3
No known key found for this signature in database
GPG key ID: 7CCC66B7DBDD1C83
8 changed files with 94 additions and 20 deletions

11
fbt
View file

@ -5,7 +5,8 @@
set -eu;
# private variables
N_GIT_THREADS="$(getconf _NPROCESSORS_ONLN)";
N_CORES="$(getconf _NPROCESSORS_ONLN)";
N_GIT_THREADS="$(($N_CORES * 2))";
SCRIPT_PATH="$(cd "$(dirname "$0")" && pwd -P)";
SCONS_DEFAULT_FLAGS="--warn=target-not-built";
SCONS_EP="python3 -m SCons";
@ -15,6 +16,7 @@ FBT_NOENV="${FBT_NOENV:-""}";
FBT_NO_SYNC="${FBT_NO_SYNC:-""}";
FBT_TOOLCHAIN_PATH="${FBT_TOOLCHAIN_PATH:-$SCRIPT_PATH}";
FBT_VERBOSE="${FBT_VERBOSE:-""}";
FBT_GIT_SUBMODULE_SHALLOW="${FBT_GIT_SUBMODULE_SHALLOW:-""}";
if [ -z "$FBT_NOENV" ]; then
FBT_VERBOSE="$FBT_VERBOSE" . "$SCRIPT_PATH/scripts/toolchain/fbtenv.sh";
@ -29,7 +31,12 @@ if [ -z "$FBT_NO_SYNC" ]; then
echo "\".git\" directory not found, please clone repo via \"git clone\"";
exit 1;
fi
git submodule update --init --jobs "$N_GIT_THREADS";
_FBT_CLONE_FLAGS="--jobs $N_GIT_THREADS";
if [ ! -z "$FBT_GIT_SUBMODULE_SHALLOW" ]; then
_FBT_CLONE_FLAGS="$_FBT_CLONE_FLAGS --depth 1";
fi
git submodule update --init --recursive $_FBT_CLONE_FLAGS;
fi
$SCONS_EP $SCONS_DEFAULT_FLAGS "$@"

12
fbt.cmd
View file

@ -4,10 +4,18 @@ call "%~dp0scripts\toolchain\fbtenv.cmd" env
set SCONS_EP=python -m SCons
if [%FBT_NO_SYNC%] == [] (
set _FBT_CLONE_FLAGS=--jobs %NUMBER_OF_PROCESSORS%
if not [%FBT_GIT_SUBMODULE_SHALLOW%] == [] (
set _FBT_CLONE_FLAGS=%_FBT_CLONE_FLAGS% --depth 1
)
if exist ".git" (
git submodule update --init --depth 1 --jobs %NUMBER_OF_PROCESSORS%
git submodule update --init --recursive %_FBT_CLONE_FLAGS%
if %ERRORLEVEL% neq 0 (
echo Failed to update submodules, set FBT_NO_SYNC to skip
exit /b 1
)
) else (
echo Not in a git repo, please clone with "git clone"
echo .git not found, please clone repo with "git clone"
exit /b 1
)
)

View file

@ -613,10 +613,31 @@ static Elf32_Addr elf_address_of_by_hash(ELFFile* elf, uint32_t hash) {
return ELF_INVALID_ADDRESS;
}
static bool elf_file_find_string_by_hash(ELFFile* elf, uint32_t hash, FuriString* out) {
bool result = false;
FuriString* symbol_name = furi_string_alloc();
Elf32_Sym sym;
for(size_t i = 0; i < elf->symbol_count; i++) {
furi_string_reset(symbol_name);
if(elf_read_symbol(elf, i, &sym, symbol_name)) {
if(elf_symbolname_hash(furi_string_get_cstr(symbol_name)) == hash) {
furi_string_set(out, symbol_name);
result = true;
break;
}
}
}
furi_string_free(symbol_name);
return result;
}
static bool elf_relocate_fast(ELFFile* elf, ELFSection* s) {
UNUSED(elf);
const uint8_t* start = s->fast_rel->data;
const uint8_t version = *start;
bool no_errors = true;
if(version != FAST_RELOCATION_VERSION) {
FURI_LOG_E(TAG, "Unsupported fast relocation version %d", version);
@ -664,24 +685,38 @@ static bool elf_relocate_fast(ELFFile* elf, ELFSection* s) {
}
if(address == ELF_INVALID_ADDRESS) {
FURI_LOG_E(TAG, "Failed to resolve address for hash %lX", hash_or_section_index);
return false;
FuriString* symbol_name = furi_string_alloc();
if(elf_file_find_string_by_hash(elf, hash_or_section_index, symbol_name)) {
FURI_LOG_E(
TAG,
"Failed to resolve address for symbol %s (hash %lX)",
furi_string_get_cstr(symbol_name),
hash_or_section_index);
} else {
FURI_LOG_E(
TAG,
"Failed to resolve address for hash %lX (string not found)",
hash_or_section_index);
}
furi_string_free(symbol_name);
no_errors = false;
start += 3 * offsets_count;
} else {
for(uint32_t j = 0; j < offsets_count; j++) {
uint32_t offset = *((uint32_t*)start) & 0x00FFFFFF;
start += 3;
// FURI_LOG_I(TAG, " Fast relocation offset %ld: %ld", j, offset);
Elf32_Addr relAddr = ((Elf32_Addr)s->data) + offset;
elf_relocate_symbol(elf, relAddr, type, address);
}
}
}
aligned_free(s->fast_rel->data);
free(s->fast_rel);
s->fast_rel = NULL;
return true;
return no_errors;
}
static bool elf_relocate_section(ELFFile* elf, ELFSection* section) {

View file

@ -100,6 +100,10 @@ class FlipperApplication:
def is_default_deployable(self):
return self.apptype != FlipperAppType.DEBUG and self.fap_category != "Examples"
@property
def do_strict_import_checks(self):
return self.apptype != FlipperAppType.PLUGIN
def __post_init__(self):
if self.apptype == FlipperAppType.PLUGIN:
self.stack_size = 0

View file

@ -42,6 +42,7 @@ class AppBuilder:
self.ext_apps_work_dir = env["EXT_APPS_WORK_DIR"]
self.app_work_dir = self.get_app_work_dir(env, app)
self.app_alias = f"fap_{self.app.appid}"
self.icons_src = None
self.externally_built_files = []
self.private_libs = []
@ -93,6 +94,7 @@ class AppBuilder:
)
self.app_env.Alias("_fap_icons", fap_icons)
self.fw_env.Append(_APP_ICONS=[fap_icons])
self.icons_src = next(filter(lambda n: n.path.endswith(".c"), fap_icons))
def _build_private_libs(self):
for lib_def in self.app.fap_private_libs:
@ -160,6 +162,10 @@ class AppBuilder:
if not app_sources:
raise UserError(f"No source files found for {self.app.appid}")
# Ensure that icons are included in the build, regardless of user-configured sources
if self.icons_src and not self.icons_src in app_sources:
app_sources.append(self.icons_src)
## Uncomment for debug
# print(f"App sources for {self.app.appid}: {list(f.path for f in app_sources)}")
@ -180,7 +186,9 @@ class AppBuilder:
self.app._assets_dirs.append(self.app_work_dir.Dir("assets"))
app_artifacts.validator = self.app_env.ValidateAppImports(
app_artifacts.compact
app_artifacts.compact,
_CHECK_APP=self.app.do_strict_import_checks
and self.app_env.get("STRICT_FAP_IMPORT_CHECK"),
)[0]
if self.app.apptype == FlipperAppType.PLUGIN:
@ -300,6 +308,9 @@ def validate_app_imports(target, source, env):
+ fg.brightmagenta(f"{disabled_api_syms}")
+ fg.brightyellow(")")
)
if env.get("_CHECK_APP"):
raise UserError(warning_msg)
else:
SCons.Warnings.warn(SCons.Warnings.LinkWarning, warning_msg),

View file

@ -20,10 +20,9 @@ def GlobRecursive(env, pattern, node=".", exclude=[]):
source=True,
exclude=exclude,
)
# Otherwise, just check if that's an existing file path
# NB: still creates "virtual" nodes as part of existence check
elif (file_node := node.File(pattern)).exists() or file_node.rexists():
results.append(file_node)
# Otherwise, just assume that file at path exists
else:
results.append(node.File(pattern))
# print(f"Glob result for {pattern} from {node}: {results}")
return results

View file

@ -88,6 +88,11 @@ vars.AddVariables(
"CDC Port of Flipper to use, if multiple are connected",
"auto",
),
BoolVariable(
"STRICT_FAP_IMPORT_CHECK",
help="Enable strict import check for .faps",
default=True,
),
)
Return("vars")

View file

@ -274,6 +274,11 @@ vars.AddVariables(
"clangd",
],
),
BoolVariable(
"STRICT_FAP_IMPORT_CHECK",
help="Enable strict import check for .faps",
default=True,
),
)
Return("vars")