mirror of
https://github.com/DarkFlippers/unleashed-firmware
synced 2024-11-10 06:54:19 +00:00
[FL-3627, FL-3628, FL-3631] fbt: glob & git improvements (#3151)
* fbt: optional shallow submodule checkout * fbt: more git threads by default * fbt: git condition fix * fbt: renamed FBT_SHALLOW to FBT_GIT_SUBMODULE_SHALLOW * github: enabled FBT_GIT_SUBMODULE_SHALLOW in flows * fbt: always compile icons' .c, even if user does not specify a proper source glob; changed glob to require files at user-specified paths to exist * fbt: fail build for missing imports in .faps * fbt: moved STRICT_FAP_IMPORT_CHECK to commandline options; enabled by default * ufbt: enabled STRICT_FAP_IMPORT_CHECK Co-authored-by: あく <alleteam@gmail.com>
This commit is contained in:
parent
1b6295b2bf
commit
35c903494c
12 changed files with 54 additions and 10 deletions
1
.github/workflows/build.yml
vendored
1
.github/workflows/build.yml
vendored
|
@ -11,6 +11,7 @@ on:
|
|||
env:
|
||||
DEFAULT_TARGET: f7
|
||||
FBT_TOOLCHAIN_PATH: /runner/_work
|
||||
FBT_GIT_SUBMODULE_SHALLOW: 1
|
||||
|
||||
jobs:
|
||||
main:
|
||||
|
|
1
.github/workflows/build_compact.yml
vendored
1
.github/workflows/build_compact.yml
vendored
|
@ -5,6 +5,7 @@ on:
|
|||
|
||||
env:
|
||||
FBT_TOOLCHAIN_PATH: /runner/_work
|
||||
FBT_GIT_SUBMODULE_SHALLOW: 1
|
||||
|
||||
jobs:
|
||||
compact:
|
||||
|
|
1
.github/workflows/pvs_studio.yml
vendored
1
.github/workflows/pvs_studio.yml
vendored
|
@ -10,6 +10,7 @@ env:
|
|||
TARGETS: f7
|
||||
DEFAULT_TARGET: f7
|
||||
FBT_TOOLCHAIN_PATH: /runner/_work
|
||||
FBT_GIT_SUBMODULE_SHALLOW: 1
|
||||
|
||||
jobs:
|
||||
analyse_c_cpp:
|
||||
|
|
1
.github/workflows/unit_tests.yml
vendored
1
.github/workflows/unit_tests.yml
vendored
|
@ -7,6 +7,7 @@ env:
|
|||
TARGETS: f7
|
||||
DEFAULT_TARGET: f7
|
||||
FBT_TOOLCHAIN_PATH: /opt
|
||||
FBT_GIT_SUBMODULE_SHALLOW: 1
|
||||
|
||||
jobs:
|
||||
run_units_on_bench:
|
||||
|
|
1
.github/workflows/updater_test.yml
vendored
1
.github/workflows/updater_test.yml
vendored
|
@ -7,6 +7,7 @@ env:
|
|||
TARGETS: f7
|
||||
DEFAULT_TARGET: f7
|
||||
FBT_TOOLCHAIN_PATH: /opt
|
||||
FBT_GIT_SUBMODULE_SHALLOW: 1
|
||||
|
||||
jobs:
|
||||
test_updater_on_bench:
|
||||
|
|
11
fbt
11
fbt
|
@ -5,7 +5,8 @@
|
|||
set -eu;
|
||||
|
||||
# private variables
|
||||
N_GIT_THREADS="$(getconf _NPROCESSORS_ONLN)";
|
||||
N_CORES="$(getconf _NPROCESSORS_ONLN)";
|
||||
N_GIT_THREADS="$(($N_CORES * 2))";
|
||||
SCRIPT_PATH="$(cd "$(dirname "$0")" && pwd -P)";
|
||||
SCONS_DEFAULT_FLAGS="--warn=target-not-built";
|
||||
SCONS_EP="python3 -m SCons";
|
||||
|
@ -15,6 +16,7 @@ FBT_NOENV="${FBT_NOENV:-""}";
|
|||
FBT_NO_SYNC="${FBT_NO_SYNC:-""}";
|
||||
FBT_TOOLCHAIN_PATH="${FBT_TOOLCHAIN_PATH:-$SCRIPT_PATH}";
|
||||
FBT_VERBOSE="${FBT_VERBOSE:-""}";
|
||||
FBT_GIT_SUBMODULE_SHALLOW="${FBT_GIT_SUBMODULE_SHALLOW:-""}";
|
||||
|
||||
if [ -z "$FBT_NOENV" ]; then
|
||||
FBT_VERBOSE="$FBT_VERBOSE" . "$SCRIPT_PATH/scripts/toolchain/fbtenv.sh";
|
||||
|
@ -29,7 +31,12 @@ if [ -z "$FBT_NO_SYNC" ]; then
|
|||
echo "\".git\" directory not found, please clone repo via \"git clone\"";
|
||||
exit 1;
|
||||
fi
|
||||
git submodule update --init --jobs "$N_GIT_THREADS";
|
||||
_FBT_CLONE_FLAGS="--jobs $N_GIT_THREADS";
|
||||
if [ ! -z "$FBT_GIT_SUBMODULE_SHALLOW" ]; then
|
||||
_FBT_CLONE_FLAGS="$_FBT_CLONE_FLAGS --depth 1";
|
||||
fi
|
||||
|
||||
git submodule update --init --recursive $_FBT_CLONE_FLAGS;
|
||||
fi
|
||||
|
||||
$SCONS_EP $SCONS_DEFAULT_FLAGS "$@"
|
||||
|
|
12
fbt.cmd
12
fbt.cmd
|
@ -4,10 +4,18 @@ call "%~dp0scripts\toolchain\fbtenv.cmd" env
|
|||
set SCONS_EP=python -m SCons
|
||||
|
||||
if [%FBT_NO_SYNC%] == [] (
|
||||
set _FBT_CLONE_FLAGS=--jobs %NUMBER_OF_PROCESSORS%
|
||||
if not [%FBT_GIT_SUBMODULE_SHALLOW%] == [] (
|
||||
set _FBT_CLONE_FLAGS=%_FBT_CLONE_FLAGS% --depth 1
|
||||
)
|
||||
if exist ".git" (
|
||||
git submodule update --init --depth 1 --jobs %NUMBER_OF_PROCESSORS%
|
||||
git submodule update --init --recursive %_FBT_CLONE_FLAGS%
|
||||
if %ERRORLEVEL% neq 0 (
|
||||
echo Failed to update submodules, set FBT_NO_SYNC to skip
|
||||
exit /b 1
|
||||
)
|
||||
) else (
|
||||
echo Not in a git repo, please clone with "git clone"
|
||||
echo .git not found, please clone repo with "git clone"
|
||||
exit /b 1
|
||||
)
|
||||
)
|
||||
|
|
|
@ -100,6 +100,10 @@ class FlipperApplication:
|
|||
def is_default_deployable(self):
|
||||
return self.apptype != FlipperAppType.DEBUG and self.fap_category != "Examples"
|
||||
|
||||
@property
|
||||
def do_strict_import_checks(self):
|
||||
return self.apptype != FlipperAppType.PLUGIN
|
||||
|
||||
def __post_init__(self):
|
||||
if self.apptype == FlipperAppType.PLUGIN:
|
||||
self.stack_size = 0
|
||||
|
|
|
@ -42,6 +42,7 @@ class AppBuilder:
|
|||
self.ext_apps_work_dir = env["EXT_APPS_WORK_DIR"]
|
||||
self.app_work_dir = self.get_app_work_dir(env, app)
|
||||
self.app_alias = f"fap_{self.app.appid}"
|
||||
self.icons_src = None
|
||||
self.externally_built_files = []
|
||||
self.private_libs = []
|
||||
|
||||
|
@ -93,6 +94,7 @@ class AppBuilder:
|
|||
)
|
||||
self.app_env.Alias("_fap_icons", fap_icons)
|
||||
self.fw_env.Append(_APP_ICONS=[fap_icons])
|
||||
self.icons_src = next(filter(lambda n: n.path.endswith(".c"), fap_icons))
|
||||
|
||||
def _build_private_libs(self):
|
||||
for lib_def in self.app.fap_private_libs:
|
||||
|
@ -160,6 +162,10 @@ class AppBuilder:
|
|||
if not app_sources:
|
||||
raise UserError(f"No source files found for {self.app.appid}")
|
||||
|
||||
# Ensure that icons are included in the build, regardless of user-configured sources
|
||||
if self.icons_src and not self.icons_src in app_sources:
|
||||
app_sources.append(self.icons_src)
|
||||
|
||||
## Uncomment for debug
|
||||
# print(f"App sources for {self.app.appid}: {list(f.path for f in app_sources)}")
|
||||
|
||||
|
@ -180,7 +186,9 @@ class AppBuilder:
|
|||
self.app._assets_dirs.append(self.app_work_dir.Dir("assets"))
|
||||
|
||||
app_artifacts.validator = self.app_env.ValidateAppImports(
|
||||
app_artifacts.compact
|
||||
app_artifacts.compact,
|
||||
_CHECK_APP=self.app.do_strict_import_checks
|
||||
and self.app_env.get("STRICT_FAP_IMPORT_CHECK"),
|
||||
)[0]
|
||||
|
||||
if self.app.apptype == FlipperAppType.PLUGIN:
|
||||
|
@ -300,7 +308,10 @@ def validate_app_imports(target, source, env):
|
|||
+ fg.brightmagenta(f"{disabled_api_syms}")
|
||||
+ fg.brightyellow(")")
|
||||
)
|
||||
SCons.Warnings.warn(SCons.Warnings.LinkWarning, warning_msg),
|
||||
if env.get("_CHECK_APP"):
|
||||
raise UserError(warning_msg)
|
||||
else:
|
||||
SCons.Warnings.warn(SCons.Warnings.LinkWarning, warning_msg),
|
||||
|
||||
|
||||
def GetExtAppByIdOrPath(env, app_dir):
|
||||
|
|
|
@ -20,10 +20,9 @@ def GlobRecursive(env, pattern, node=".", exclude=[]):
|
|||
source=True,
|
||||
exclude=exclude,
|
||||
)
|
||||
# Otherwise, just check if that's an existing file path
|
||||
# NB: still creates "virtual" nodes as part of existence check
|
||||
elif (file_node := node.File(pattern)).exists() or file_node.rexists():
|
||||
results.append(file_node)
|
||||
# Otherwise, just assume that file at path exists
|
||||
else:
|
||||
results.append(node.File(pattern))
|
||||
# print(f"Glob result for {pattern} from {node}: {results}")
|
||||
return results
|
||||
|
||||
|
|
|
@ -88,6 +88,11 @@ vars.AddVariables(
|
|||
"CDC Port of Flipper to use, if multiple are connected",
|
||||
"auto",
|
||||
),
|
||||
BoolVariable(
|
||||
"STRICT_FAP_IMPORT_CHECK",
|
||||
help="Enable strict import check for .faps",
|
||||
default=True,
|
||||
),
|
||||
)
|
||||
|
||||
Return("vars")
|
||||
|
|
|
@ -269,6 +269,11 @@ vars.AddVariables(
|
|||
"clangd",
|
||||
],
|
||||
),
|
||||
BoolVariable(
|
||||
"STRICT_FAP_IMPORT_CHECK",
|
||||
help="Enable strict import check for .faps",
|
||||
default=True,
|
||||
),
|
||||
)
|
||||
|
||||
Return("vars")
|
||||
|
|
Loading…
Reference in a new issue