mirror of
https://github.com/DarkFlippers/unleashed-firmware
synced 2024-11-23 04:53:08 +00:00
fbt: removed assets rebuild on git commit id change; added explicit dependency for SDK source on compiled assets parts; removed unneeded sdk regeneration runs
This commit is contained in:
parent
181533df1b
commit
51369d6219
4 changed files with 24 additions and 31 deletions
|
@ -1,16 +1,5 @@
|
|||
Import("env")
|
||||
|
||||
from fbt.version import get_fast_git_version_id
|
||||
|
||||
# HACHHACK
|
||||
# Currently injected to CPPPATH by libs - since they are built earlier and depend on assets
|
||||
# env.Append(
|
||||
# CPPPATH=[
|
||||
# Dir("./compiled"),
|
||||
# ]
|
||||
# )
|
||||
version_value = Value(get_fast_git_version_id())
|
||||
|
||||
assetsenv = env.Clone(
|
||||
tools=["fbt_assets"],
|
||||
FW_LIB_NAME="assets",
|
||||
|
@ -77,7 +66,6 @@ assetsenv.Alias("proto_ver", proto_ver)
|
|||
|
||||
# Gather everything into a static lib
|
||||
assets_parts = (icons, proto, dolphin_blocking, dolphin_internal, proto_ver)
|
||||
assetsenv.Depends(assets_parts, version_value)
|
||||
|
||||
assetslib = assetsenv.Library("${FW_LIB_NAME}", assets_parts)
|
||||
assetsenv.Install("${LIB_DIST_DIR}", assetslib)
|
||||
|
@ -113,6 +101,7 @@ if assetsenv["IS_BASE_FIRMWARE"]:
|
|||
)
|
||||
|
||||
# Exporting resources node to external environment
|
||||
env["FW_ASSETS_HEADERS"] = assets_parts
|
||||
env["FW_RESOURCES"] = resources
|
||||
assetsenv.Alias("resources", resources)
|
||||
|
||||
|
|
|
@ -302,7 +302,7 @@ if fwenv["IS_BASE_FIRMWARE"]:
|
|||
"-D__inline__=inline",
|
||||
],
|
||||
)
|
||||
Depends(sdk_source, fwenv["SDK_HEADERS"])
|
||||
Depends(sdk_source, (fwenv["SDK_HEADERS"], fwenv["FW_ASSETS_HEADERS"]))
|
||||
|
||||
sdk_tree = fwenv.SDKTree("sdk/sdk.opts", "sdk_origin")
|
||||
AlwaysBuild(sdk_tree)
|
||||
|
|
|
@ -329,7 +329,6 @@ class SdkCache:
|
|||
self.sdk = ApiEntries()
|
||||
self.disabled_entries = set()
|
||||
self.new_entries = set()
|
||||
self.loaded_dirty = False
|
||||
self.loaded_dirty_version = False
|
||||
|
||||
self.version_action = VersionBump.NONE
|
||||
|
@ -340,8 +339,7 @@ class SdkCache:
|
|||
return (
|
||||
self.version != SdkVersion(0, 0)
|
||||
and self.version_action == VersionBump.NONE
|
||||
and not self.loaded_dirty
|
||||
and not self.new_entries
|
||||
and not self._have_pending_entries()
|
||||
)
|
||||
|
||||
def _filter_enabled(self, sdk_entries):
|
||||
|
@ -388,21 +386,12 @@ class SdkCache:
|
|||
if self._load_version_only:
|
||||
raise Exception("Only SDK version was loaded, cannot save")
|
||||
|
||||
version_is_clean = True
|
||||
if self.loaded_dirty:
|
||||
# There are still new entries and version was already updated
|
||||
version_is_clean = False
|
||||
|
||||
if self.version_action == VersionBump.MINOR:
|
||||
self.version = SdkVersion(self.version.major, self.version.minor + 1)
|
||||
version_is_clean = False
|
||||
elif self.version_action == VersionBump.MAJOR:
|
||||
self.version = SdkVersion(self.version.major + 1, 0)
|
||||
version_is_clean = False
|
||||
|
||||
if version_is_clean:
|
||||
print(f"API version {self.version} is up to date")
|
||||
else:
|
||||
if self._have_pending_entries():
|
||||
self.new_entries.add(self.version)
|
||||
print(
|
||||
f"API version is still WIP: {self.version}. Review the changes and re-run command."
|
||||
|
@ -418,16 +407,23 @@ class SdkCache:
|
|||
)
|
||||
)
|
||||
)
|
||||
else:
|
||||
print(f"API version {self.version} is up to date")
|
||||
|
||||
if not version_is_clean or self.loaded_dirty_version:
|
||||
# Regenerate cache file
|
||||
regenerate_csv = (
|
||||
self.loaded_dirty_version
|
||||
or self._have_pending_entries()
|
||||
or self.version_action != VersionBump.NONE
|
||||
)
|
||||
|
||||
if regenerate_csv:
|
||||
str_cache_entries = [self.version]
|
||||
name_getter = operator.attrgetter("name")
|
||||
str_cache_entries.extend(sorted(self.sdk.headers, key=name_getter))
|
||||
str_cache_entries.extend(sorted(self.sdk.functions, key=name_getter))
|
||||
str_cache_entries.extend(sorted(self.sdk.variables, key=name_getter))
|
||||
|
||||
with open(self.cache_file_name, "w", newline="") as f:
|
||||
with open(self.cache_file_name, "wt", newline="") as f:
|
||||
writer = csv.DictWriter(f, fieldnames=SdkCache.CSV_FIELD_NAMES)
|
||||
writer.writeheader()
|
||||
|
||||
|
@ -476,13 +472,20 @@ class SdkCache:
|
|||
f"Cannot load symbol cache '{self.cache_file_name}'! File does not exist"
|
||||
)
|
||||
|
||||
with open(self.cache_file_name, "r") as f:
|
||||
with open(self.cache_file_name, "rt") as f:
|
||||
reader = csv.DictReader(f)
|
||||
for row in reader:
|
||||
self._process_entry(row)
|
||||
if self._load_version_only and row.get("entry") == SdkVersion.csv_type:
|
||||
break
|
||||
self.loaded_dirty = bool(self.new_entries)
|
||||
|
||||
def _have_pending_entries(self) -> bool:
|
||||
return any(
|
||||
filter(
|
||||
lambda e: not isinstance(e, SdkVersion),
|
||||
self.new_entries,
|
||||
)
|
||||
)
|
||||
|
||||
def sync_sets(
|
||||
self, known_set: Set[Any], new_set: Set[Any], update_version: bool = True
|
||||
|
|
|
@ -15,6 +15,7 @@ from fbt.sdk import SdkCollector, SdkCache
|
|||
|
||||
def prebuild_sdk_emitter(target, source, env):
|
||||
target.append(env.ChangeFileExtension(target[0], ".d"))
|
||||
target.append(env.ChangeFileExtension(target[0], ".i.c"))
|
||||
return target, source
|
||||
|
||||
|
||||
|
|
Loading…
Reference in a new issue