mirror of
https://github.com/DarkFlippers/unleashed-firmware
synced 2024-11-10 06:54:19 +00:00
Merge remote-tracking branch 'OFW/dev' into dev
This commit is contained in:
commit
0812cd30b2
39 changed files with 2029 additions and 287 deletions
Binary file not shown.
Binary file not shown.
|
@ -1,6 +1,9 @@
|
|||
#include "../test.h" // IWYU pragma: keep
|
||||
|
||||
#include <toolbox/compress.h>
|
||||
#include <toolbox/md5_calc.h>
|
||||
#include <toolbox/tar/tar_archive.h>
|
||||
#include <toolbox/dir_walk.h>
|
||||
|
||||
#include <furi.h>
|
||||
#include <furi_hal.h>
|
||||
|
@ -56,7 +59,7 @@ static void compress_test_reference_comp_decomp() {
|
|||
furi_record_close(RECORD_STORAGE);
|
||||
|
||||
uint8_t* temp_buffer = malloc(1024);
|
||||
Compress* comp = compress_alloc(1024);
|
||||
Compress* comp = compress_alloc(CompressTypeHeatshrink, &compress_config_heatshrink_default);
|
||||
|
||||
size_t encoded_size = 0;
|
||||
mu_assert(
|
||||
|
@ -98,7 +101,7 @@ static void compress_test_random_comp_decomp() {
|
|||
// We only fill half of the buffer with random data, so if anything goes wrong, there's no overflow
|
||||
static const size_t src_data_size = src_buffer_size / 2;
|
||||
|
||||
Compress* comp = compress_alloc(src_buffer_size);
|
||||
Compress* comp = compress_alloc(CompressTypeHeatshrink, &compress_config_heatshrink_default);
|
||||
uint8_t* src_buff = malloc(src_buffer_size);
|
||||
uint8_t* encoded_buff = malloc(encoded_buffer_size);
|
||||
uint8_t* decoded_buff = malloc(src_buffer_size);
|
||||
|
@ -146,9 +149,200 @@ static void compress_test_random_comp_decomp() {
|
|||
compress_free(comp);
|
||||
}
|
||||
|
||||
static int32_t hs_unpacker_file_read(void* context, uint8_t* buffer, size_t size) {
|
||||
File* file = (File*)context;
|
||||
return storage_file_read(file, buffer, size);
|
||||
}
|
||||
|
||||
static int32_t hs_unpacker_file_write(void* context, uint8_t* buffer, size_t size) {
|
||||
File* file = (File*)context;
|
||||
return storage_file_write(file, buffer, size);
|
||||
}
|
||||
/*
|
||||
Source file was generated with:
|
||||
```python3
|
||||
import random, string
|
||||
random.seed(1337)
|
||||
with open("hsstream.out.bin", "wb") as f:
|
||||
for c in random.choices(string.printable, k=1024):
|
||||
for _ in range(random.randint(1, 10)):
|
||||
f.write(c.encode())
|
||||
```
|
||||
|
||||
It was compressed with heatshrink using the following command:
|
||||
`python3 -m heatshrink2 compress -w 9 -l 4 hsstream.out.bin hsstream.in.bin`
|
||||
*/
|
||||
|
||||
#define HSSTREAM_IN COMPRESS_UNIT_TESTS_PATH("hsstream.in.bin")
|
||||
#define HSSTREAM_OUT COMPRESS_UNIT_TESTS_PATH("hsstream.out.bin")
|
||||
|
||||
static void compress_test_heatshrink_stream() {
|
||||
Storage* api = furi_record_open(RECORD_STORAGE);
|
||||
File* comp_file = storage_file_alloc(api);
|
||||
File* dest_file = storage_file_alloc(api);
|
||||
|
||||
CompressConfigHeatshrink config = {
|
||||
.window_sz2 = 9,
|
||||
.lookahead_sz2 = 4,
|
||||
.input_buffer_sz = 128,
|
||||
};
|
||||
Compress* compress = compress_alloc(CompressTypeHeatshrink, &config);
|
||||
|
||||
do {
|
||||
storage_simply_remove(api, HSSTREAM_OUT);
|
||||
|
||||
mu_assert(
|
||||
storage_file_open(comp_file, HSSTREAM_IN, FSAM_READ, FSOM_OPEN_EXISTING),
|
||||
"Failed to open compressed file");
|
||||
|
||||
mu_assert(
|
||||
storage_file_open(dest_file, HSSTREAM_OUT, FSAM_WRITE, FSOM_OPEN_ALWAYS),
|
||||
"Failed to open decompressed file");
|
||||
|
||||
mu_assert(
|
||||
compress_decode_streamed(
|
||||
compress, hs_unpacker_file_read, comp_file, hs_unpacker_file_write, dest_file),
|
||||
"Decompression failed");
|
||||
|
||||
storage_file_close(dest_file);
|
||||
|
||||
unsigned char md5[16];
|
||||
FS_Error file_error;
|
||||
mu_assert(
|
||||
md5_calc_file(dest_file, HSSTREAM_OUT, md5, &file_error), "Failed to calculate md5");
|
||||
|
||||
const unsigned char expected_md5[16] = {
|
||||
0xa3,
|
||||
0x70,
|
||||
0xe8,
|
||||
0x8b,
|
||||
0xa9,
|
||||
0x42,
|
||||
0x74,
|
||||
0xf4,
|
||||
0xaa,
|
||||
0x12,
|
||||
0x8d,
|
||||
0x41,
|
||||
0xd2,
|
||||
0xb6,
|
||||
0x71,
|
||||
0xc9};
|
||||
mu_assert(memcmp(md5, expected_md5, sizeof(md5)) == 0, "MD5 mismatch after decompression");
|
||||
|
||||
storage_simply_remove(api, HSSTREAM_OUT);
|
||||
} while(false);
|
||||
|
||||
compress_free(compress);
|
||||
storage_file_free(comp_file);
|
||||
storage_file_free(dest_file);
|
||||
furi_record_close(RECORD_STORAGE);
|
||||
}
|
||||
|
||||
#define HS_TAR_PATH COMPRESS_UNIT_TESTS_PATH("test.ths")
|
||||
#define HS_TAR_EXTRACT_PATH COMPRESS_UNIT_TESTS_PATH("tar_out")
|
||||
|
||||
static bool file_counter(const char* name, bool is_dir, void* context) {
|
||||
UNUSED(name);
|
||||
UNUSED(is_dir);
|
||||
int32_t* n_entries = (int32_t*)context;
|
||||
(*n_entries)++;
|
||||
return true;
|
||||
}
|
||||
|
||||
/*
|
||||
Heatshrink tar file contents and MD5 sums:
|
||||
file1.txt: 64295676ceed5cce2d0dcac402e4bda4
|
||||
file2.txt: 188f67f297eedd7bf3d6a4d3c2fc31c4
|
||||
dir/file3.txt: 34d98ad8135ffe502dba374690136d16
|
||||
dir/big_file.txt: ee169c1e1791a4d319dbfaefaa850e98
|
||||
dir/nested_dir/file4.txt: e099fcb2aaa0672375eaedc549247ee6
|
||||
dir/nested_dir/empty_file.txt: d41d8cd98f00b204e9800998ecf8427e
|
||||
|
||||
XOR of all MD5 sums: 92ed5729786d0e1176d047e35f52d376
|
||||
*/
|
||||
|
||||
static void compress_test_heatshrink_tar() {
|
||||
Storage* api = furi_record_open(RECORD_STORAGE);
|
||||
|
||||
TarArchive* archive = tar_archive_alloc(api);
|
||||
FuriString* path = furi_string_alloc();
|
||||
FileInfo fileinfo;
|
||||
File* file = storage_file_alloc(api);
|
||||
|
||||
do {
|
||||
storage_simply_remove_recursive(api, HS_TAR_EXTRACT_PATH);
|
||||
|
||||
mu_assert(storage_simply_mkdir(api, HS_TAR_EXTRACT_PATH), "Failed to create extract dir");
|
||||
|
||||
mu_assert(
|
||||
tar_archive_get_mode_for_path(HS_TAR_PATH) == TarOpenModeReadHeatshrink,
|
||||
"Invalid mode for heatshrink tar");
|
||||
|
||||
mu_assert(
|
||||
tar_archive_open(archive, HS_TAR_PATH, TarOpenModeReadHeatshrink),
|
||||
"Failed to open heatshrink tar");
|
||||
|
||||
int32_t n_entries = 0;
|
||||
tar_archive_set_file_callback(archive, file_counter, &n_entries);
|
||||
|
||||
mu_assert(
|
||||
tar_archive_unpack_to(archive, HS_TAR_EXTRACT_PATH, NULL),
|
||||
"Failed to unpack heatshrink tar");
|
||||
|
||||
mu_assert(n_entries == 9, "Invalid number of entries in heatshrink tar");
|
||||
|
||||
uint8_t md5_total[16] = {0}, md5_file[16];
|
||||
|
||||
DirWalk* dir_walk = dir_walk_alloc(api);
|
||||
mu_assert(dir_walk_open(dir_walk, HS_TAR_EXTRACT_PATH), "Failed to open dirwalk");
|
||||
while(dir_walk_read(dir_walk, path, &fileinfo) == DirWalkOK) {
|
||||
if(file_info_is_dir(&fileinfo)) {
|
||||
continue;
|
||||
}
|
||||
mu_assert(
|
||||
md5_calc_file(file, furi_string_get_cstr(path), md5_file, NULL),
|
||||
"Failed to calc md5");
|
||||
|
||||
for(size_t i = 0; i < 16; i++) {
|
||||
md5_total[i] ^= md5_file[i];
|
||||
}
|
||||
}
|
||||
dir_walk_free(dir_walk);
|
||||
|
||||
static const unsigned char expected_md5[16] = {
|
||||
0x92,
|
||||
0xed,
|
||||
0x57,
|
||||
0x29,
|
||||
0x78,
|
||||
0x6d,
|
||||
0x0e,
|
||||
0x11,
|
||||
0x76,
|
||||
0xd0,
|
||||
0x47,
|
||||
0xe3,
|
||||
0x5f,
|
||||
0x52,
|
||||
0xd3,
|
||||
0x76};
|
||||
mu_assert(memcmp(md5_total, expected_md5, sizeof(md5_total)) == 0, "MD5 mismatch");
|
||||
|
||||
storage_simply_remove_recursive(api, HS_TAR_EXTRACT_PATH);
|
||||
} while(false);
|
||||
|
||||
storage_file_free(file);
|
||||
furi_string_free(path);
|
||||
tar_archive_free(archive);
|
||||
furi_record_close(RECORD_STORAGE);
|
||||
}
|
||||
|
||||
MU_TEST_SUITE(test_compress) {
|
||||
MU_RUN_TEST(compress_test_random_comp_decomp);
|
||||
MU_RUN_TEST(compress_test_reference_comp_decomp);
|
||||
MU_RUN_TEST(compress_test_heatshrink_stream);
|
||||
MU_RUN_TEST(compress_test_heatshrink_tar);
|
||||
}
|
||||
|
||||
int run_minunit_test_compress(void) {
|
||||
|
|
|
@ -36,7 +36,22 @@ bool ibutton_scene_add_value_on_event(void* context, SceneManagerEvent event) {
|
|||
if(event.type == SceneManagerEventTypeCustom) {
|
||||
consumed = true;
|
||||
if(event.event == iButtonCustomEventByteEditResult) {
|
||||
scene_manager_next_scene(scene_manager, iButtonSceneSaveName);
|
||||
furi_string_printf(
|
||||
ibutton->file_path,
|
||||
"%s/%s%s",
|
||||
IBUTTON_APP_FOLDER,
|
||||
ibutton->key_name,
|
||||
IBUTTON_APP_FILENAME_EXTENSION);
|
||||
|
||||
if(ibutton_save_key(ibutton)) {
|
||||
scene_manager_next_scene(ibutton->scene_manager, iButtonSceneSaveSuccess);
|
||||
|
||||
} else {
|
||||
const uint32_t possible_scenes[] = {
|
||||
iButtonSceneReadKeyMenu, iButtonSceneSavedKeyMenu, iButtonSceneAddType};
|
||||
scene_manager_search_and_switch_to_previous_scene_one_of(
|
||||
ibutton->scene_manager, possible_scenes, COUNT_OF(possible_scenes));
|
||||
}
|
||||
} else if(event.event == iButtonCustomEventByteEditChanged) {
|
||||
ibutton_protocols_apply_edits(ibutton->protocols, ibutton->key);
|
||||
}
|
||||
|
|
|
@ -41,9 +41,17 @@ bool ibutton_scene_save_name_on_event(void* context, SceneManagerEvent event) {
|
|||
iButton* ibutton = context;
|
||||
bool consumed = false;
|
||||
|
||||
const bool is_new_file = furi_string_empty(ibutton->file_path);
|
||||
|
||||
if(event.type == SceneManagerEventTypeCustom) {
|
||||
consumed = true;
|
||||
if(event.event == iButtonCustomEventTextEditResult) {
|
||||
if(!is_new_file) {
|
||||
Storage* storage = furi_record_open(RECORD_STORAGE);
|
||||
storage_simply_remove(storage, furi_string_get_cstr(ibutton->file_path));
|
||||
furi_record_close(RECORD_STORAGE);
|
||||
}
|
||||
|
||||
furi_string_printf(
|
||||
ibutton->file_path,
|
||||
"%s/%s%s",
|
||||
|
|
|
@ -6,6 +6,7 @@ enum SubmenuIndex {
|
|||
SubmenuIndexWriteBlank,
|
||||
SubmenuIndexWriteCopy,
|
||||
SubmenuIndexEdit,
|
||||
SubmenuIndexRename,
|
||||
SubmenuIndexDelete,
|
||||
SubmenuIndexInfo,
|
||||
};
|
||||
|
@ -34,6 +35,7 @@ void ibutton_scene_saved_key_menu_on_enter(void* context) {
|
|||
}
|
||||
|
||||
submenu_add_item(submenu, "Edit", SubmenuIndexEdit, ibutton_submenu_callback, ibutton);
|
||||
submenu_add_item(submenu, "Rename", SubmenuIndexRename, ibutton_submenu_callback, ibutton);
|
||||
submenu_add_item(submenu, "Delete", SubmenuIndexDelete, ibutton_submenu_callback, ibutton);
|
||||
submenu_add_item(submenu, "Info", SubmenuIndexInfo, ibutton_submenu_callback, ibutton);
|
||||
|
||||
|
@ -61,6 +63,8 @@ bool ibutton_scene_saved_key_menu_on_event(void* context, SceneManagerEvent even
|
|||
scene_manager_next_scene(scene_manager, iButtonSceneWrite);
|
||||
} else if(event.event == SubmenuIndexEdit) {
|
||||
scene_manager_next_scene(scene_manager, iButtonSceneAddValue);
|
||||
} else if(event.event == SubmenuIndexRename) {
|
||||
scene_manager_next_scene(scene_manager, iButtonSceneSaveName);
|
||||
} else if(event.event == SubmenuIndexDelete) {
|
||||
scene_manager_next_scene(scene_manager, iButtonSceneDeleteConfirm);
|
||||
} else if(event.event == SubmenuIndexInfo) {
|
||||
|
|
|
@ -31,8 +31,17 @@ bool lfrfid_scene_save_data_on_event(void* context, SceneManagerEvent event) {
|
|||
consumed = true;
|
||||
size_t size = protocol_dict_get_data_size(app->dict, app->protocol_id);
|
||||
protocol_dict_set_data(app->dict, app->protocol_id, app->new_key_data, size);
|
||||
scene_manager_next_scene(scene_manager, LfRfidSceneSaveName);
|
||||
scene_manager_set_scene_state(scene_manager, LfRfidSceneSaveData, 1);
|
||||
|
||||
if(!furi_string_empty(app->file_name)) {
|
||||
lfrfid_delete_key(app);
|
||||
}
|
||||
|
||||
if(lfrfid_save_key(app)) {
|
||||
scene_manager_next_scene(scene_manager, LfRfidSceneSaveSuccess);
|
||||
} else {
|
||||
scene_manager_search_and_switch_to_previous_scene(
|
||||
scene_manager, LfRfidSceneSavedKeyMenu);
|
||||
}
|
||||
}
|
||||
} else if(event.type == SceneManagerEventTypeBack) {
|
||||
scene_manager_set_scene_state(scene_manager, LfRfidSceneSaveData, 0);
|
||||
|
|
|
@ -6,6 +6,7 @@ typedef enum {
|
|||
SubmenuIndexWrite,
|
||||
SubmenuIndexWriteAndSetPass,
|
||||
SubmenuIndexEdit,
|
||||
SubmenuIndexRename,
|
||||
SubmenuIndexDelete,
|
||||
SubmenuIndexInfo,
|
||||
} SubmenuIndex;
|
||||
|
@ -32,6 +33,8 @@ void lfrfid_scene_saved_key_menu_on_enter(void* context) {
|
|||
app);
|
||||
submenu_add_item(
|
||||
submenu, "Edit", SubmenuIndexEdit, lfrfid_scene_saved_key_menu_submenu_callback, app);
|
||||
submenu_add_item(
|
||||
submenu, "Rename", SubmenuIndexRename, lfrfid_scene_saved_key_menu_submenu_callback, app);
|
||||
submenu_add_item(
|
||||
submenu, "Delete", SubmenuIndexDelete, lfrfid_scene_saved_key_menu_submenu_callback, app);
|
||||
submenu_add_item(
|
||||
|
@ -63,6 +66,9 @@ bool lfrfid_scene_saved_key_menu_on_event(void* context, SceneManagerEvent event
|
|||
} else if(event.event == SubmenuIndexEdit) {
|
||||
scene_manager_next_scene(app->scene_manager, LfRfidSceneSaveData);
|
||||
consumed = true;
|
||||
} else if(event.event == SubmenuIndexRename) {
|
||||
scene_manager_next_scene(app->scene_manager, LfRfidSceneSaveName);
|
||||
consumed = true;
|
||||
} else if(event.event == SubmenuIndexDelete) {
|
||||
scene_manager_next_scene(app->scene_manager, LfRfidSceneDeleteConfirm);
|
||||
consumed = true;
|
||||
|
|
|
@ -22,7 +22,7 @@ void nfc_scene_mf_classic_mfkey_complete_on_enter(void* context) {
|
|||
AlignCenter,
|
||||
AlignTop,
|
||||
FontSecondary,
|
||||
"Now use Mfkey32 to extract \nkeys: lab.flipper.net/nfc-tools");
|
||||
"Now use Mfkey32 to extract \nkeys: r.flipper.net/nfc-tools");
|
||||
widget_add_icon_element(instance->widget, 50, 39, &I_MFKey_qr_25x25);
|
||||
widget_add_button_element(
|
||||
instance->widget,
|
||||
|
|
|
@ -700,21 +700,21 @@ static void rpc_system_storage_tar_extract_process(const PB_Main* request, void*
|
|||
TarArchive* archive = tar_archive_alloc(rpc_storage->api);
|
||||
|
||||
do {
|
||||
if(!path_contains_only_ascii(request->content.storage_tar_extract_request.out_path)) {
|
||||
const char *tar_path = request->content.storage_tar_extract_request.tar_path,
|
||||
*out_path = request->content.storage_tar_extract_request.out_path;
|
||||
if(!path_contains_only_ascii(out_path)) {
|
||||
status = PB_CommandStatus_ERROR_STORAGE_INVALID_NAME;
|
||||
break;
|
||||
}
|
||||
|
||||
if(!tar_archive_open(
|
||||
archive,
|
||||
request->content.storage_tar_extract_request.tar_path,
|
||||
TAR_OPEN_MODE_READ)) {
|
||||
TarOpenMode tar_mode = tar_archive_get_mode_for_path(tar_path);
|
||||
|
||||
if(!tar_archive_open(archive, tar_path, tar_mode)) {
|
||||
status = PB_CommandStatus_ERROR_STORAGE_INVALID_PARAMETER;
|
||||
break;
|
||||
}
|
||||
|
||||
if(!tar_archive_unpack_to(
|
||||
archive, request->content.storage_tar_extract_request.out_path, NULL)) {
|
||||
if(!tar_archive_unpack_to(archive, out_path, NULL)) {
|
||||
status = PB_CommandStatus_ERROR_STORAGE_INTERNAL;
|
||||
break;
|
||||
}
|
||||
|
|
|
@ -5,42 +5,22 @@
|
|||
#include <lib/toolbox/args.h>
|
||||
#include <lib/toolbox/md5_calc.h>
|
||||
#include <lib/toolbox/dir_walk.h>
|
||||
#include <lib/toolbox/tar/tar_archive.h>
|
||||
#include <storage/storage.h>
|
||||
#include <storage/storage_sd_api.h>
|
||||
#include <power/power_service/power.h>
|
||||
|
||||
#define MAX_NAME_LENGTH 255
|
||||
|
||||
static void storage_cli_print_usage(void) {
|
||||
printf("Usage:\r\n");
|
||||
printf("storage <cmd> <path> <args>\r\n");
|
||||
printf("The path must start with /int or /ext\r\n");
|
||||
printf("Cmd list:\r\n");
|
||||
printf("\tinfo\t - get FS info\r\n");
|
||||
printf("\tformat\t - format filesystem\r\n");
|
||||
printf("\tlist\t - list files and dirs\r\n");
|
||||
printf("\ttree\t - list files and dirs, recursive\r\n");
|
||||
printf("\tremove\t - delete the file or directory\r\n");
|
||||
printf("\tread\t - read text from file and print file size and content to cli\r\n");
|
||||
printf(
|
||||
"\tread_chunks\t - read data from file and print file size and content to cli, <args> should contain how many bytes you want to read in block\r\n");
|
||||
printf("\twrite\t - read text from cli and append it to file, stops by ctrl+c\r\n");
|
||||
printf(
|
||||
"\twrite_chunk\t - read data from cli and append it to file, <args> should contain how many bytes you want to write\r\n");
|
||||
printf("\tcopy\t - copy file to new file, <args> must contain new path\r\n");
|
||||
printf("\trename\t - move file to new file, <args> must contain new path\r\n");
|
||||
printf("\tmkdir\t - creates a new directory\r\n");
|
||||
printf("\tmd5\t - md5 hash of the file\r\n");
|
||||
printf("\tstat\t - info about file or dir\r\n");
|
||||
printf("\ttimestamp\t - last modification timestamp\r\n");
|
||||
};
|
||||
static void storage_cli_print_usage(void);
|
||||
|
||||
static void storage_cli_print_error(FS_Error error) {
|
||||
printf("Storage error: %s\r\n", storage_error_get_desc(error));
|
||||
}
|
||||
|
||||
static void storage_cli_info(Cli* cli, FuriString* path) {
|
||||
static void storage_cli_info(Cli* cli, FuriString* path, FuriString* args) {
|
||||
UNUSED(cli);
|
||||
UNUSED(args);
|
||||
Storage* api = furi_record_open(RECORD_STORAGE);
|
||||
|
||||
if(furi_string_cmp_str(path, STORAGE_INT_PATH_PREFIX) == 0) {
|
||||
|
@ -88,7 +68,8 @@ static void storage_cli_info(Cli* cli, FuriString* path) {
|
|||
furi_record_close(RECORD_STORAGE);
|
||||
};
|
||||
|
||||
static void storage_cli_format(Cli* cli, FuriString* path) {
|
||||
static void storage_cli_format(Cli* cli, FuriString* path, FuriString* args) {
|
||||
UNUSED(args);
|
||||
if(furi_string_cmp_str(path, STORAGE_INT_PATH_PREFIX) == 0) {
|
||||
storage_cli_print_error(FSE_NOT_IMPLEMENTED);
|
||||
} else if(furi_string_cmp_str(path, STORAGE_EXT_PATH_PREFIX) == 0) {
|
||||
|
@ -114,8 +95,9 @@ static void storage_cli_format(Cli* cli, FuriString* path) {
|
|||
}
|
||||
};
|
||||
|
||||
static void storage_cli_list(Cli* cli, FuriString* path) {
|
||||
static void storage_cli_list(Cli* cli, FuriString* path, FuriString* args) {
|
||||
UNUSED(cli);
|
||||
UNUSED(args);
|
||||
if(furi_string_cmp_str(path, "/") == 0) {
|
||||
printf("\t[D] int\r\n");
|
||||
printf("\t[D] ext\r\n");
|
||||
|
@ -151,12 +133,13 @@ static void storage_cli_list(Cli* cli, FuriString* path) {
|
|||
}
|
||||
}
|
||||
|
||||
static void storage_cli_tree(Cli* cli, FuriString* path) {
|
||||
static void storage_cli_tree(Cli* cli, FuriString* path, FuriString* args) {
|
||||
UNUSED(args);
|
||||
if(furi_string_cmp_str(path, "/") == 0) {
|
||||
furi_string_set(path, STORAGE_INT_PATH_PREFIX);
|
||||
storage_cli_tree(cli, path);
|
||||
storage_cli_tree(cli, path, NULL);
|
||||
furi_string_set(path, STORAGE_EXT_PATH_PREFIX);
|
||||
storage_cli_tree(cli, path);
|
||||
storage_cli_tree(cli, path, NULL);
|
||||
} else {
|
||||
Storage* api = furi_record_open(RECORD_STORAGE);
|
||||
DirWalk* dir_walk = dir_walk_alloc(api);
|
||||
|
@ -192,8 +175,9 @@ static void storage_cli_tree(Cli* cli, FuriString* path) {
|
|||
}
|
||||
}
|
||||
|
||||
static void storage_cli_read(Cli* cli, FuriString* path) {
|
||||
static void storage_cli_read(Cli* cli, FuriString* path, FuriString* args) {
|
||||
UNUSED(cli);
|
||||
UNUSED(args);
|
||||
Storage* api = furi_record_open(RECORD_STORAGE);
|
||||
File* file = storage_file_alloc(api);
|
||||
|
||||
|
@ -223,7 +207,8 @@ static void storage_cli_read(Cli* cli, FuriString* path) {
|
|||
furi_record_close(RECORD_STORAGE);
|
||||
}
|
||||
|
||||
static void storage_cli_write(Cli* cli, FuriString* path) {
|
||||
static void storage_cli_write(Cli* cli, FuriString* path, FuriString* args) {
|
||||
UNUSED(args);
|
||||
Storage* api = furi_record_open(RECORD_STORAGE);
|
||||
File* file = storage_file_alloc(api);
|
||||
|
||||
|
@ -353,8 +338,9 @@ static void storage_cli_write_chunk(Cli* cli, FuriString* path, FuriString* args
|
|||
furi_record_close(RECORD_STORAGE);
|
||||
}
|
||||
|
||||
static void storage_cli_stat(Cli* cli, FuriString* path) {
|
||||
static void storage_cli_stat(Cli* cli, FuriString* path, FuriString* args) {
|
||||
UNUSED(cli);
|
||||
UNUSED(args);
|
||||
Storage* api = furi_record_open(RECORD_STORAGE);
|
||||
|
||||
if(furi_string_cmp_str(path, "/") == 0) {
|
||||
|
@ -394,8 +380,9 @@ static void storage_cli_stat(Cli* cli, FuriString* path) {
|
|||
furi_record_close(RECORD_STORAGE);
|
||||
}
|
||||
|
||||
static void storage_cli_timestamp(Cli* cli, FuriString* path) {
|
||||
static void storage_cli_timestamp(Cli* cli, FuriString* path, FuriString* args) {
|
||||
UNUSED(cli);
|
||||
UNUSED(args);
|
||||
Storage* api = furi_record_open(RECORD_STORAGE);
|
||||
|
||||
uint32_t timestamp = 0;
|
||||
|
@ -431,8 +418,9 @@ static void storage_cli_copy(Cli* cli, FuriString* old_path, FuriString* args) {
|
|||
furi_record_close(RECORD_STORAGE);
|
||||
}
|
||||
|
||||
static void storage_cli_remove(Cli* cli, FuriString* path) {
|
||||
static void storage_cli_remove(Cli* cli, FuriString* path, FuriString* args) {
|
||||
UNUSED(cli);
|
||||
UNUSED(args);
|
||||
Storage* api = furi_record_open(RECORD_STORAGE);
|
||||
FS_Error error = storage_common_remove(api, furi_string_get_cstr(path));
|
||||
|
||||
|
@ -464,8 +452,9 @@ static void storage_cli_rename(Cli* cli, FuriString* old_path, FuriString* args)
|
|||
furi_record_close(RECORD_STORAGE);
|
||||
}
|
||||
|
||||
static void storage_cli_mkdir(Cli* cli, FuriString* path) {
|
||||
static void storage_cli_mkdir(Cli* cli, FuriString* path, FuriString* args) {
|
||||
UNUSED(cli);
|
||||
UNUSED(args);
|
||||
Storage* api = furi_record_open(RECORD_STORAGE);
|
||||
FS_Error error = storage_common_mkdir(api, furi_string_get_cstr(path));
|
||||
|
||||
|
@ -476,8 +465,9 @@ static void storage_cli_mkdir(Cli* cli, FuriString* path) {
|
|||
furi_record_close(RECORD_STORAGE);
|
||||
}
|
||||
|
||||
static void storage_cli_md5(Cli* cli, FuriString* path) {
|
||||
static void storage_cli_md5(Cli* cli, FuriString* path, FuriString* args) {
|
||||
UNUSED(cli);
|
||||
UNUSED(args);
|
||||
Storage* api = furi_record_open(RECORD_STORAGE);
|
||||
File* file = storage_file_alloc(api);
|
||||
FuriString* md5 = furi_string_alloc();
|
||||
|
@ -496,6 +486,152 @@ static void storage_cli_md5(Cli* cli, FuriString* path) {
|
|||
furi_record_close(RECORD_STORAGE);
|
||||
}
|
||||
|
||||
static bool tar_extract_file_callback(const char* name, bool is_directory, void* context) {
|
||||
UNUSED(context);
|
||||
printf("\t%s %s\r\n", is_directory ? "D" : "F", name);
|
||||
return true;
|
||||
}
|
||||
|
||||
static void storage_cli_extract(Cli* cli, FuriString* old_path, FuriString* args) {
|
||||
UNUSED(cli);
|
||||
FuriString* new_path = furi_string_alloc();
|
||||
|
||||
if(!args_read_probably_quoted_string_and_trim(args, new_path)) {
|
||||
storage_cli_print_usage();
|
||||
furi_string_free(new_path);
|
||||
return;
|
||||
}
|
||||
|
||||
Storage* api = furi_record_open(RECORD_STORAGE);
|
||||
|
||||
TarArchive* archive = tar_archive_alloc(api);
|
||||
TarOpenMode tar_mode = tar_archive_get_mode_for_path(furi_string_get_cstr(old_path));
|
||||
do {
|
||||
if(!tar_archive_open(archive, furi_string_get_cstr(old_path), tar_mode)) {
|
||||
printf("Failed to open archive\r\n");
|
||||
break;
|
||||
}
|
||||
uint32_t start_tick = furi_get_tick();
|
||||
tar_archive_set_file_callback(archive, tar_extract_file_callback, NULL);
|
||||
printf("Unpacking to %s\r\n", furi_string_get_cstr(new_path));
|
||||
bool success = tar_archive_unpack_to(archive, furi_string_get_cstr(new_path), NULL);
|
||||
uint32_t end_tick = furi_get_tick();
|
||||
printf(
|
||||
"Decompression %s in %lu ticks \r\n",
|
||||
success ? "success" : "failed",
|
||||
end_tick - start_tick);
|
||||
} while(false);
|
||||
|
||||
tar_archive_free(archive);
|
||||
furi_string_free(new_path);
|
||||
furi_record_close(RECORD_STORAGE);
|
||||
}
|
||||
|
||||
typedef void (*StorageCliCommandCallback)(Cli* cli, FuriString* path, FuriString* args);
|
||||
|
||||
typedef struct {
|
||||
const char* command;
|
||||
const char* help;
|
||||
const StorageCliCommandCallback impl;
|
||||
} StorageCliCommand;
|
||||
|
||||
static const StorageCliCommand storage_cli_commands[] = {
|
||||
{
|
||||
"write_chunk",
|
||||
"read data from cli and append it to file, <args> should contain how many bytes you want to write",
|
||||
&storage_cli_write_chunk,
|
||||
},
|
||||
{
|
||||
"read_chunks",
|
||||
"read data from file and print file size and content to cli, <args> should contain how many bytes you want to read in block",
|
||||
&storage_cli_read_chunks,
|
||||
},
|
||||
{
|
||||
"list",
|
||||
"list files and dirs",
|
||||
&storage_cli_list,
|
||||
},
|
||||
{
|
||||
"md5",
|
||||
"md5 hash of the file",
|
||||
&storage_cli_md5,
|
||||
},
|
||||
{
|
||||
"stat",
|
||||
"info about file or dir",
|
||||
&storage_cli_stat,
|
||||
},
|
||||
{
|
||||
"info",
|
||||
"get FS info",
|
||||
&storage_cli_info,
|
||||
},
|
||||
{
|
||||
"tree",
|
||||
"list files and dirs, recursive",
|
||||
&storage_cli_tree,
|
||||
},
|
||||
{
|
||||
"read",
|
||||
"read text from file and print file size and content to cli",
|
||||
&storage_cli_read,
|
||||
},
|
||||
{
|
||||
"write",
|
||||
"read text from cli and append it to file, stops by ctrl+c",
|
||||
&storage_cli_write,
|
||||
},
|
||||
{
|
||||
"copy",
|
||||
"copy file to new file, <args> must contain new path",
|
||||
&storage_cli_copy,
|
||||
},
|
||||
{
|
||||
"remove",
|
||||
"delete the file or directory",
|
||||
&storage_cli_remove,
|
||||
},
|
||||
{
|
||||
"rename",
|
||||
"move file to new file, <args> must contain new path",
|
||||
&storage_cli_rename,
|
||||
},
|
||||
{
|
||||
"mkdir",
|
||||
"creates a new directory",
|
||||
&storage_cli_mkdir,
|
||||
},
|
||||
{
|
||||
"timestamp",
|
||||
"last modification timestamp",
|
||||
&storage_cli_timestamp,
|
||||
},
|
||||
{
|
||||
"extract",
|
||||
"extract tar archive to destination",
|
||||
&storage_cli_extract,
|
||||
},
|
||||
{
|
||||
"format",
|
||||
"format filesystem",
|
||||
&storage_cli_format,
|
||||
},
|
||||
};
|
||||
|
||||
static void storage_cli_print_usage(void) {
|
||||
printf("Usage:\r\n");
|
||||
printf("storage <cmd> <path> <args>\r\n");
|
||||
printf("The path must start with /int or /ext\r\n");
|
||||
printf("Cmd list:\r\n");
|
||||
|
||||
for(size_t i = 0; i < COUNT_OF(storage_cli_commands); ++i) {
|
||||
const StorageCliCommand* command_descr = &storage_cli_commands[i];
|
||||
const char* cli_cmd = command_descr->command;
|
||||
printf(
|
||||
"\t%s%s - %s\r\n", cli_cmd, strlen(cli_cmd) > 8 ? "\t" : "\t\t", command_descr->help);
|
||||
}
|
||||
};
|
||||
|
||||
void storage_cli(Cli* cli, FuriString* args, void* context) {
|
||||
UNUSED(context);
|
||||
FuriString* cmd;
|
||||
|
@ -514,82 +650,18 @@ void storage_cli(Cli* cli, FuriString* args, void* context) {
|
|||
break;
|
||||
}
|
||||
|
||||
if(furi_string_cmp_str(cmd, "info") == 0) {
|
||||
storage_cli_info(cli, path);
|
||||
break;
|
||||
size_t i = 0;
|
||||
for(; i < COUNT_OF(storage_cli_commands); ++i) {
|
||||
const StorageCliCommand* command_descr = &storage_cli_commands[i];
|
||||
if(furi_string_cmp_str(cmd, command_descr->command) == 0) {
|
||||
command_descr->impl(cli, path, args);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if(furi_string_cmp_str(cmd, "format") == 0) {
|
||||
storage_cli_format(cli, path);
|
||||
break;
|
||||
if(i == COUNT_OF(storage_cli_commands)) {
|
||||
storage_cli_print_usage();
|
||||
}
|
||||
|
||||
if(furi_string_cmp_str(cmd, "list") == 0) {
|
||||
storage_cli_list(cli, path);
|
||||
break;
|
||||
}
|
||||
|
||||
if(furi_string_cmp_str(cmd, "tree") == 0) {
|
||||
storage_cli_tree(cli, path);
|
||||
break;
|
||||
}
|
||||
|
||||
if(furi_string_cmp_str(cmd, "read") == 0) {
|
||||
storage_cli_read(cli, path);
|
||||
break;
|
||||
}
|
||||
|
||||
if(furi_string_cmp_str(cmd, "read_chunks") == 0) {
|
||||
storage_cli_read_chunks(cli, path, args);
|
||||
break;
|
||||
}
|
||||
|
||||
if(furi_string_cmp_str(cmd, "write") == 0) {
|
||||
storage_cli_write(cli, path);
|
||||
break;
|
||||
}
|
||||
|
||||
if(furi_string_cmp_str(cmd, "write_chunk") == 0) {
|
||||
storage_cli_write_chunk(cli, path, args);
|
||||
break;
|
||||
}
|
||||
|
||||
if(furi_string_cmp_str(cmd, "copy") == 0) {
|
||||
storage_cli_copy(cli, path, args);
|
||||
break;
|
||||
}
|
||||
|
||||
if(furi_string_cmp_str(cmd, "remove") == 0) {
|
||||
storage_cli_remove(cli, path);
|
||||
break;
|
||||
}
|
||||
|
||||
if(furi_string_cmp_str(cmd, "rename") == 0) {
|
||||
storage_cli_rename(cli, path, args);
|
||||
break;
|
||||
}
|
||||
|
||||
if(furi_string_cmp_str(cmd, "mkdir") == 0) {
|
||||
storage_cli_mkdir(cli, path);
|
||||
break;
|
||||
}
|
||||
|
||||
if(furi_string_cmp_str(cmd, "md5") == 0) {
|
||||
storage_cli_md5(cli, path);
|
||||
break;
|
||||
}
|
||||
|
||||
if(furi_string_cmp_str(cmd, "stat") == 0) {
|
||||
storage_cli_stat(cli, path);
|
||||
break;
|
||||
}
|
||||
|
||||
if(furi_string_cmp_str(cmd, "timestamp") == 0) {
|
||||
storage_cli_timestamp(cli, path);
|
||||
break;
|
||||
}
|
||||
|
||||
storage_cli_print_usage();
|
||||
} while(false);
|
||||
|
||||
furi_string_free(path);
|
||||
|
|
|
@ -6,7 +6,7 @@ FS_Error storage_int_backup(Storage* storage, const char* dstname) {
|
|||
furi_check(storage);
|
||||
|
||||
TarArchive* archive = tar_archive_alloc(storage);
|
||||
bool success = tar_archive_open(archive, dstname, TAR_OPEN_MODE_WRITE) &&
|
||||
bool success = tar_archive_open(archive, dstname, TarOpenModeWrite) &&
|
||||
tar_archive_add_dir(archive, STORAGE_INT_PATH_PREFIX, "") &&
|
||||
tar_archive_finalize(archive);
|
||||
tar_archive_free(archive);
|
||||
|
@ -18,7 +18,7 @@ FS_Error
|
|||
furi_check(storage);
|
||||
|
||||
TarArchive* archive = tar_archive_alloc(storage);
|
||||
bool success = tar_archive_open(archive, srcname, TAR_OPEN_MODE_READ) &&
|
||||
bool success = tar_archive_open(archive, srcname, TarOpenModeRead) &&
|
||||
tar_archive_unpack_to(archive, STORAGE_INT_PATH_PREFIX, converter);
|
||||
tar_archive_free(archive);
|
||||
return success ? FSE_OK : FSE_INTERNAL;
|
||||
|
|
|
@ -9,6 +9,8 @@
|
|||
#include <update_util/lfs_backup.h>
|
||||
#include <update_util/update_operation.h>
|
||||
|
||||
#define TAG "UpdWorker"
|
||||
|
||||
static const char* update_task_stage_descr[] = {
|
||||
[UpdateTaskStageProgress] = "...",
|
||||
[UpdateTaskStageReadManifest] = "Loading update manifest",
|
||||
|
@ -23,7 +25,9 @@ static const char* update_task_stage_descr[] = {
|
|||
[UpdateTaskStageOBValidation] = "Validating opt. bytes",
|
||||
[UpdateTaskStageLfsBackup] = "Backing up LFS",
|
||||
[UpdateTaskStageLfsRestore] = "Restoring LFS",
|
||||
[UpdateTaskStageResourcesUpdate] = "Updating resources",
|
||||
[UpdateTaskStageResourcesFileCleanup] = "Cleaning up files",
|
||||
[UpdateTaskStageResourcesDirCleanup] = "Cleaning up directories",
|
||||
[UpdateTaskStageResourcesFileUnpack] = "Extracting resources",
|
||||
[UpdateTaskStageSplashscreenInstall] = "Installing splashscreen",
|
||||
[UpdateTaskStageCompleted] = "Restarting...",
|
||||
[UpdateTaskStageError] = "Error",
|
||||
|
@ -196,7 +200,19 @@ static const struct {
|
|||
.descr = "LFS I/O error",
|
||||
},
|
||||
{
|
||||
.stage = UpdateTaskStageResourcesUpdate,
|
||||
.stage = UpdateTaskStageResourcesFileCleanup,
|
||||
.percent_min = 0,
|
||||
.percent_max = 100,
|
||||
.descr = "SD card I/O error",
|
||||
},
|
||||
{
|
||||
.stage = UpdateTaskStageResourcesDirCleanup,
|
||||
.percent_min = 0,
|
||||
.percent_max = 100,
|
||||
.descr = "SD card I/O error",
|
||||
},
|
||||
{
|
||||
.stage = UpdateTaskStageResourcesFileUnpack,
|
||||
.percent_min = 0,
|
||||
.percent_max = 100,
|
||||
.descr = "SD card I/O error",
|
||||
|
@ -230,20 +246,22 @@ static const UpdateTaskStageGroupMap update_task_stage_progress[] = {
|
|||
[UpdateTaskStageLfsBackup] = STAGE_DEF(UpdateTaskStageGroupPreUpdate, 5),
|
||||
|
||||
[UpdateTaskStageRadioImageValidate] = STAGE_DEF(UpdateTaskStageGroupRadio, 15),
|
||||
[UpdateTaskStageRadioErase] = STAGE_DEF(UpdateTaskStageGroupRadio, 35),
|
||||
[UpdateTaskStageRadioWrite] = STAGE_DEF(UpdateTaskStageGroupRadio, 60),
|
||||
[UpdateTaskStageRadioErase] = STAGE_DEF(UpdateTaskStageGroupRadio, 25),
|
||||
[UpdateTaskStageRadioWrite] = STAGE_DEF(UpdateTaskStageGroupRadio, 40),
|
||||
[UpdateTaskStageRadioInstall] = STAGE_DEF(UpdateTaskStageGroupRadio, 30),
|
||||
[UpdateTaskStageRadioBusy] = STAGE_DEF(UpdateTaskStageGroupRadio, 5),
|
||||
|
||||
[UpdateTaskStageOBValidation] = STAGE_DEF(UpdateTaskStageGroupOptionBytes, 2),
|
||||
|
||||
[UpdateTaskStageValidateDFUImage] = STAGE_DEF(UpdateTaskStageGroupFirmware, 30),
|
||||
[UpdateTaskStageFlashWrite] = STAGE_DEF(UpdateTaskStageGroupFirmware, 150),
|
||||
[UpdateTaskStageFlashValidate] = STAGE_DEF(UpdateTaskStageGroupFirmware, 15),
|
||||
[UpdateTaskStageValidateDFUImage] = STAGE_DEF(UpdateTaskStageGroupFirmware, 33),
|
||||
[UpdateTaskStageFlashWrite] = STAGE_DEF(UpdateTaskStageGroupFirmware, 100),
|
||||
[UpdateTaskStageFlashValidate] = STAGE_DEF(UpdateTaskStageGroupFirmware, 20),
|
||||
|
||||
[UpdateTaskStageLfsRestore] = STAGE_DEF(UpdateTaskStageGroupPostUpdate, 5),
|
||||
|
||||
[UpdateTaskStageResourcesUpdate] = STAGE_DEF(UpdateTaskStageGroupResources, 255),
|
||||
[UpdateTaskStageResourcesFileCleanup] = STAGE_DEF(UpdateTaskStageGroupResources, 100),
|
||||
[UpdateTaskStageResourcesDirCleanup] = STAGE_DEF(UpdateTaskStageGroupResources, 50),
|
||||
[UpdateTaskStageResourcesFileUnpack] = STAGE_DEF(UpdateTaskStageGroupResources, 255),
|
||||
[UpdateTaskStageSplashscreenInstall] = STAGE_DEF(UpdateTaskStageGroupSplashscreen, 5),
|
||||
|
||||
[UpdateTaskStageCompleted] = STAGE_DEF(UpdateTaskStageGroupMisc, 1),
|
||||
|
@ -288,6 +306,7 @@ static void update_task_calc_completed_stages(UpdateTask* update_task) {
|
|||
|
||||
void update_task_set_progress(UpdateTask* update_task, UpdateTaskStage stage, uint8_t progress) {
|
||||
if(stage != UpdateTaskStageProgress) {
|
||||
FURI_LOG_I(TAG, "Stage %d, progress %d", stage, progress);
|
||||
/* do not override more specific error states */
|
||||
if((stage >= UpdateTaskStageError) && (update_task->state.stage >= UpdateTaskStageError)) {
|
||||
return;
|
||||
|
|
|
@ -31,7 +31,9 @@ typedef enum {
|
|||
UpdateTaskStageFlashValidate,
|
||||
|
||||
UpdateTaskStageLfsRestore,
|
||||
UpdateTaskStageResourcesUpdate,
|
||||
UpdateTaskStageResourcesFileCleanup,
|
||||
UpdateTaskStageResourcesDirCleanup,
|
||||
UpdateTaskStageResourcesFileUnpack,
|
||||
UpdateTaskStageSplashscreenInstall,
|
||||
|
||||
UpdateTaskStageCompleted,
|
||||
|
|
|
@ -35,36 +35,23 @@ static bool update_task_pre_update(UpdateTask* update_task) {
|
|||
furi_string_free(backup_file_path);
|
||||
return success;
|
||||
}
|
||||
|
||||
typedef enum {
|
||||
UpdateTaskResourcesWeightsFileCleanup = 20,
|
||||
UpdateTaskResourcesWeightsDirCleanup = 20,
|
||||
UpdateTaskResourcesWeightsFileUnpack = 60,
|
||||
} UpdateTaskResourcesWeights;
|
||||
|
||||
#define UPDATE_TASK_RESOURCES_FILE_TO_TOTAL_PERCENT 90
|
||||
|
||||
typedef struct {
|
||||
UpdateTask* update_task;
|
||||
int32_t total_files, processed_files;
|
||||
TarArchive* archive;
|
||||
} TarUnpackProgress;
|
||||
|
||||
static bool update_task_resource_unpack_cb(const char* name, bool is_directory, void* context) {
|
||||
UNUSED(name);
|
||||
UNUSED(is_directory);
|
||||
TarUnpackProgress* unpack_progress = context;
|
||||
unpack_progress->processed_files++;
|
||||
int32_t progress = 0, total = 0;
|
||||
tar_archive_get_read_progress(unpack_progress->archive, &progress, &total);
|
||||
update_task_set_progress(
|
||||
unpack_progress->update_task,
|
||||
UpdateTaskStageProgress,
|
||||
/* For this stage, last progress segment = extraction */
|
||||
(UpdateTaskResourcesWeightsFileCleanup + UpdateTaskResourcesWeightsDirCleanup) +
|
||||
(unpack_progress->processed_files * UpdateTaskResourcesWeightsFileUnpack) /
|
||||
(unpack_progress->total_files + 1));
|
||||
unpack_progress->update_task, UpdateTaskStageProgress, (progress * 100) / (total + 1));
|
||||
return true;
|
||||
}
|
||||
|
||||
static void update_task_cleanup_resources(UpdateTask* update_task, const uint32_t n_tar_entries) {
|
||||
static void update_task_cleanup_resources(UpdateTask* update_task) {
|
||||
ResourceManifestReader* manifest_reader = resource_manifest_reader_alloc(update_task->storage);
|
||||
do {
|
||||
FURI_LOG_D(TAG, "Cleaning up old manifest");
|
||||
|
@ -73,20 +60,26 @@ static void update_task_cleanup_resources(UpdateTask* update_task, const uint32_
|
|||
break;
|
||||
}
|
||||
|
||||
const uint32_t n_approx_file_entries =
|
||||
n_tar_entries * UPDATE_TASK_RESOURCES_FILE_TO_TOTAL_PERCENT / 100 + 1;
|
||||
uint32_t n_dir_entries = 1;
|
||||
|
||||
ResourceManifestEntry* entry_ptr = NULL;
|
||||
uint32_t n_processed_entries = 0;
|
||||
/* Iterate over manifest and calculate entries count */
|
||||
uint32_t n_file_entries = 1, n_dir_entries = 1;
|
||||
while((entry_ptr = resource_manifest_reader_next(manifest_reader))) {
|
||||
if(entry_ptr->type == ResourceManifestEntryTypeFile) {
|
||||
n_file_entries++;
|
||||
} else if(entry_ptr->type == ResourceManifestEntryTypeDirectory) {
|
||||
n_dir_entries++;
|
||||
}
|
||||
}
|
||||
resource_manifest_rewind(manifest_reader);
|
||||
|
||||
update_task_set_progress(update_task, UpdateTaskStageResourcesFileCleanup, 0);
|
||||
uint32_t n_processed_file_entries = 0;
|
||||
while((entry_ptr = resource_manifest_reader_next(manifest_reader))) {
|
||||
if(entry_ptr->type == ResourceManifestEntryTypeFile) {
|
||||
update_task_set_progress(
|
||||
update_task,
|
||||
UpdateTaskStageProgress,
|
||||
/* For this stage, first pass = old manifest's file cleanup */
|
||||
(n_processed_entries++ * UpdateTaskResourcesWeightsFileCleanup) /
|
||||
n_approx_file_entries);
|
||||
(n_processed_file_entries++ * 100) / n_file_entries);
|
||||
|
||||
FuriString* file_path = furi_string_alloc();
|
||||
path_concat(
|
||||
|
@ -108,16 +101,14 @@ static void update_task_cleanup_resources(UpdateTask* update_task, const uint32_
|
|||
}
|
||||
}
|
||||
|
||||
n_processed_entries = 0;
|
||||
update_task_set_progress(update_task, UpdateTaskStageResourcesDirCleanup, 0);
|
||||
uint32_t n_processed_dir_entries = 0;
|
||||
while((entry_ptr = resource_manifest_reader_previous(manifest_reader))) {
|
||||
if(entry_ptr->type == ResourceManifestEntryTypeDirectory) {
|
||||
update_task_set_progress(
|
||||
update_task,
|
||||
UpdateTaskStageProgress,
|
||||
/* For this stage, second 10% of progress = cleanup directories */
|
||||
UpdateTaskResourcesWeightsFileCleanup +
|
||||
(n_processed_entries++ * UpdateTaskResourcesWeightsDirCleanup) /
|
||||
n_dir_entries);
|
||||
(n_processed_dir_entries++ * 100) / n_dir_entries);
|
||||
|
||||
FuriString* folder_path = furi_string_alloc();
|
||||
|
||||
|
@ -166,26 +157,22 @@ static bool update_task_post_update(UpdateTask* update_task) {
|
|||
if(update_task->state.groups & UpdateTaskStageGroupResources) {
|
||||
TarUnpackProgress progress = {
|
||||
.update_task = update_task,
|
||||
.total_files = 0,
|
||||
.processed_files = 0,
|
||||
.archive = archive,
|
||||
};
|
||||
update_task_set_progress(update_task, UpdateTaskStageResourcesUpdate, 0);
|
||||
|
||||
path_concat(
|
||||
furi_string_get_cstr(update_task->update_path),
|
||||
furi_string_get_cstr(update_task->manifest->resource_bundle),
|
||||
file_path);
|
||||
|
||||
CHECK_RESULT(tar_archive_open(
|
||||
archive, furi_string_get_cstr(file_path), TarOpenModeReadHeatshrink));
|
||||
|
||||
update_task_cleanup_resources(update_task);
|
||||
|
||||
update_task_set_progress(update_task, UpdateTaskStageResourcesFileUnpack, 0);
|
||||
tar_archive_set_file_callback(archive, update_task_resource_unpack_cb, &progress);
|
||||
CHECK_RESULT(
|
||||
tar_archive_open(archive, furi_string_get_cstr(file_path), TAR_OPEN_MODE_READ));
|
||||
|
||||
progress.total_files = tar_archive_get_entries_count(archive);
|
||||
if(progress.total_files > 0) {
|
||||
update_task_cleanup_resources(update_task, progress.total_files);
|
||||
|
||||
CHECK_RESULT(tar_archive_unpack_to(archive, STORAGE_EXT_PATH_PREFIX, NULL));
|
||||
}
|
||||
CHECK_RESULT(tar_archive_unpack_to(archive, STORAGE_EXT_PATH_PREFIX, NULL));
|
||||
}
|
||||
|
||||
if(update_task->state.groups & UpdateTaskStageGroupSplashscreen) {
|
||||
|
|
|
@ -102,7 +102,7 @@ Even if something goes wrong, updater allows you to retry failed operations and
|
|||
| Writing flash | **10** | **0-100** | Block read/write error |
|
||||
| Validating flash | **11** | **0-100** | Block read/write error |
|
||||
| Restoring LFS | **12** | **0-100** | FS read/write error |
|
||||
| Updating resources | **13** | **0-100** | SD card read/write error |
|
||||
| Updating resources | **13-15** | **0-100** | SD card read/write error |
|
||||
|
||||
## Building update packages
|
||||
|
||||
|
|
19
documentation/file_formats/TarHeatshrinkFormat.md
Normal file
19
documentation/file_formats/TarHeatshrinkFormat.md
Normal file
|
@ -0,0 +1,19 @@
|
|||
# Heatshrink-compressed Tarball Format
|
||||
|
||||
Flipper supports the use of Heatshrink compression library for .tar archives. This allows for smaller file sizes and faster OTA updates.
|
||||
|
||||
Heatshrink specification does not define a container format for storing compression parameters. This document describes the format used by Flipper to store Heatshrink-compressed data streams.
|
||||
|
||||
## Header
|
||||
|
||||
Header begins with a magic value, followed by a version number and compression parameters - window size and lookahead size.
|
||||
|
||||
Magic value consists of 4 bytes: `0x48 0x53 0x44 0x53` (ASCII "HSDS", HeatShrink DataStream).
|
||||
|
||||
Version number is a single byte, currently set to `0x01`.
|
||||
|
||||
Window size is a single byte, representing the size of the sliding window used by the compressor. It corresponds to `-w` parameter in Heatshrink CLI.
|
||||
|
||||
Lookahead size is a single byte, representing the size of the lookahead buffer used by the compressor. It corresponds to `-l` parameter in Heatshrink CLI.
|
||||
|
||||
Total header size is 7 bytes. Header is followed by compressed data.
|
|
@ -111,10 +111,10 @@ badusb.println("Hello, world!"); // print "Hello, world!" and press "ENTER"
|
|||
|
||||
| Name |
|
||||
| ------------- |
|
||||
| CTRL |
|
||||
| SHIFT |
|
||||
| CTRL |
|
||||
| SHIFT |
|
||||
| ALT |
|
||||
| GUI |
|
||||
| GUI |
|
||||
|
||||
## Special keys
|
||||
|
||||
|
|
|
@ -13,7 +13,7 @@ Show a simple message dialog with header, text and "OK" button.
|
|||
- Dialog header text
|
||||
- Dialog text
|
||||
|
||||
### Retuns
|
||||
### Returns
|
||||
true if central button was pressed, false if the dialog was closed by back key press
|
||||
|
||||
### Examples:
|
||||
|
@ -25,14 +25,14 @@ dialog.message("Dialog demo", "Press OK to start");
|
|||
More complex dialog with configurable buttons
|
||||
|
||||
### Parameters
|
||||
Configuration object with the following fileds:
|
||||
Configuration object with the following fields:
|
||||
- header: Dialog header text
|
||||
- text: Dialog text
|
||||
- button_left: (optional) left button name
|
||||
- button_right: (optional) right button name
|
||||
- button_center: (optional) central button name
|
||||
|
||||
### Retuns
|
||||
### Returns
|
||||
Name of pressed button or empty string if the dialog was closed by back key press
|
||||
|
||||
### Examples:
|
||||
|
|
362
documentation/js/js_math.md
Normal file
362
documentation/js/js_math.md
Normal file
|
@ -0,0 +1,362 @@
|
|||
# js_math {#js_math}
|
||||
|
||||
# Math module
|
||||
```js
|
||||
let math = require("math");
|
||||
```
|
||||
# Constants
|
||||
|
||||
## PI
|
||||
The number π = 3.14159265358979323846264338327950288.
|
||||
|
||||
## E
|
||||
The number e (Euler's number) = 2.71828182845904523536028747135266250.
|
||||
|
||||
## EPSILON
|
||||
The smallest number that satisfies the condition: 1.0 + EPSILON != 1.0.
|
||||
EPSILON = 2.2204460492503131e-16.
|
||||
|
||||
# Methods
|
||||
|
||||
## abs
|
||||
Return the absolute value of a number.
|
||||
|
||||
### Parameters
|
||||
- x: A number
|
||||
|
||||
### Returns
|
||||
The absolute value of `x`. If `x` is negative (including -0), returns `-x`. Otherwise, returns `x`. The result is therefore always a positive number or 0.
|
||||
|
||||
### Example
|
||||
```js
|
||||
math.abs(-5); // 5
|
||||
```
|
||||
|
||||
## acos
|
||||
Return the inverse cosine (in radians) of a number.
|
||||
|
||||
### Parameters
|
||||
- x: A number between -1 and 1, inclusive, representing the angle's cosine value
|
||||
|
||||
### Returns
|
||||
The inverse cosine (angle in radians between 0 and π, inclusive) of `x`. If `x` is less than -1 or greater than 1, returns `NaN`.
|
||||
|
||||
### Example
|
||||
```js
|
||||
math.acos(-1); // 3.141592653589793
|
||||
```
|
||||
|
||||
## acosh
|
||||
Return the inverse hyperbolic cosine of a number.
|
||||
|
||||
### Parameters
|
||||
- x: A number greater than or equal to 1
|
||||
|
||||
### Returns
|
||||
The inverse hyperbolic cosine of `x`.
|
||||
|
||||
### Example
|
||||
```js
|
||||
math.acosh(1); // 0
|
||||
```
|
||||
|
||||
## asin
|
||||
Return the inverse sine (in radians) of a number.
|
||||
|
||||
### Parameters
|
||||
- x: A number between -1 and 1, inclusive, representing the angle's sine value
|
||||
|
||||
### Returns
|
||||
The inverse sine (angle in radians between -𝜋/2 and 𝜋/2, inclusive) of `x`.
|
||||
|
||||
### Example
|
||||
```js
|
||||
math.asin(0.5); // 0.5235987755982989
|
||||
```
|
||||
|
||||
## asinh
|
||||
Return the inverse hyperbolic sine of a number.
|
||||
|
||||
### Parameters
|
||||
- x: A number
|
||||
|
||||
### Returns
|
||||
The inverse hyperbolic sine of `x`.
|
||||
|
||||
### Example
|
||||
```js
|
||||
math.asinh(1); // 0.881373587019543
|
||||
```
|
||||
|
||||
## atan
|
||||
Return the inverse tangent (in radians) of a number.
|
||||
|
||||
### Parameters
|
||||
- x: A number
|
||||
|
||||
### Returns
|
||||
The inverse tangent (angle in radians between -𝜋/2 and 𝜋/2, inclusive) of `x`.
|
||||
|
||||
### Example
|
||||
```js
|
||||
math.atan(1); // 0.7853981633974483
|
||||
```
|
||||
|
||||
## atan2
|
||||
Return the angle in the plane (in radians) between the positive x-axis and the ray from (0, 0) to the point (x, y), for math.atan2(y, x).
|
||||
|
||||
### Parameters
|
||||
- y: The y coordinate of the point
|
||||
- x: The x coordinate of the point
|
||||
|
||||
### Returns
|
||||
The angle in radians (between -π and π, inclusive) between the positive x-axis and the ray from (0, 0) to the point (x, y).
|
||||
|
||||
### Example
|
||||
```js
|
||||
math.atan2(90, 15); // 1.4056476493802699
|
||||
```
|
||||
|
||||
## atanh
|
||||
The method returns the inverse hyperbolic tangent of a number.
|
||||
|
||||
### Parameters
|
||||
- x: A number between -1 and 1, inclusive
|
||||
|
||||
### Returns
|
||||
The inverse hyperbolic tangent of `x`.
|
||||
|
||||
### Example
|
||||
```js
|
||||
math.atanh(0.5); // 0.5493061443340548
|
||||
```
|
||||
|
||||
## cbrt
|
||||
Return the cube root of a number.
|
||||
|
||||
### Parameters
|
||||
- x: A number
|
||||
|
||||
### Returns
|
||||
The cube root of `x`.
|
||||
|
||||
### Example
|
||||
```js
|
||||
math.cbrt(2); // 1.2599210498948732
|
||||
```
|
||||
|
||||
## ceil
|
||||
Round up and return the smallest integer greater than or equal to a given number.
|
||||
|
||||
### Parameters
|
||||
- x: A number
|
||||
|
||||
### Returns
|
||||
The smallest integer greater than or equal to `x`. It's the same value as `-math.floor(-x)`.
|
||||
|
||||
### Example
|
||||
```js
|
||||
math.ceil(-7.004); // -7
|
||||
math.ceil(7.004); // 8
|
||||
```
|
||||
|
||||
## clz32
|
||||
Return the number of leading zero bits in the 32-bit binary representation of a number.
|
||||
|
||||
### Parameters
|
||||
- x: A number
|
||||
|
||||
### Returns
|
||||
The number of leading zero bits in the 32-bit binary representation of `x`.
|
||||
|
||||
### Example
|
||||
```js
|
||||
math.clz32(1); // 31
|
||||
math.clz32(1000); // 22
|
||||
```
|
||||
|
||||
## cos
|
||||
Return the cosine of a number in radians.
|
||||
|
||||
### Parameters
|
||||
- x: A number representing an angle in radians
|
||||
|
||||
### Returns
|
||||
The cosine of `x`, between -1 and 1, inclusive.
|
||||
|
||||
### Example
|
||||
```js
|
||||
math.cos(math.PI); // -1
|
||||
```
|
||||
|
||||
## exp
|
||||
Return e raised to the power of a number.
|
||||
|
||||
### Parameters
|
||||
- x: A number
|
||||
|
||||
### Returns
|
||||
A nonnegative number representing `e^x`, where `e` is the base of the natural logarithm.
|
||||
|
||||
### Example
|
||||
```js
|
||||
math.exp(0); // 1
|
||||
math.exp(1); // 2.718281828459045
|
||||
```
|
||||
|
||||
## floor
|
||||
Round down and return the largest integer less than or equal to a given number.
|
||||
|
||||
### Parameters
|
||||
- x: A number
|
||||
|
||||
### Returns
|
||||
The largest integer smaller than or equal to `x`. It's the same value as `-math.ceil(-x)`.
|
||||
|
||||
### Example
|
||||
```js
|
||||
math.floor(-45.95); // -46
|
||||
math.floor(-45.05); // -46
|
||||
math.floor(-0); // -0
|
||||
math.floor(0); // 0
|
||||
math.floor(45.05); // 45
|
||||
math.floor(45.95); // 45
|
||||
```
|
||||
|
||||
## is_equal
|
||||
Return true if the difference between numbers `a` and `b` is less than the specified parameter `e`.
|
||||
|
||||
### Parameters
|
||||
- a: A number a
|
||||
- b: A number b
|
||||
- e: An epsilon parameter
|
||||
|
||||
### Returns
|
||||
True if the difference between numbers `a` and `b` is less than the specified parameter `e`. Otherwise, false.
|
||||
|
||||
### Example
|
||||
```js
|
||||
math.is_equal(1.4, 1.6, 0.2); // false
|
||||
math.is_equal(3.556, 3.555, 0.01); // true
|
||||
```
|
||||
|
||||
## max
|
||||
Return the largest of two numbers given as input parameters.
|
||||
|
||||
### Parameters
|
||||
- a: A number a
|
||||
- b: A number b
|
||||
|
||||
### Returns
|
||||
The largest of the given numbers.
|
||||
|
||||
### Example
|
||||
```js
|
||||
math.max(10, 20); // 20
|
||||
math.max(-10, -20); // -10
|
||||
```
|
||||
|
||||
## min
|
||||
Return the smallest of two numbers given as input parameters.
|
||||
|
||||
### Parameters
|
||||
- a: A number a
|
||||
- b: A number b
|
||||
|
||||
### Returns
|
||||
The smallest of the given numbers.
|
||||
|
||||
### Example
|
||||
```js
|
||||
math.min(10, 20); // 10
|
||||
math.min(-10, -20); // -20
|
||||
```
|
||||
|
||||
## pow
|
||||
Return the value of a base raised to a power.
|
||||
|
||||
### Parameters
|
||||
- base: The base number
|
||||
- exponent: The exponent number
|
||||
|
||||
### Returns
|
||||
A number representing base taken to the power of exponent.
|
||||
|
||||
### Example
|
||||
```js
|
||||
math.pow(7, 2); // 49
|
||||
math.pow(7, 3); // 343
|
||||
math.pow(2, 10); // 1024
|
||||
```
|
||||
|
||||
## random
|
||||
Return a floating-point, pseudo-random number that's greater than or equal to 0 and less than 1, with approximately uniform distribution over that range - which you can then scale to your desired range.
|
||||
|
||||
### Returns
|
||||
A floating-point, pseudo-random number between 0 (inclusive) and 1 (exclusive).
|
||||
|
||||
### Example
|
||||
```js
|
||||
let num = math.random();
|
||||
```
|
||||
|
||||
## sign
|
||||
Return 1 or -1, indicating the sign of the number passed as argument.
|
||||
|
||||
### Parameters
|
||||
- x: A number
|
||||
|
||||
### Returns
|
||||
-1 if the number is less than 0, and 1 otherwise.
|
||||
|
||||
### Example
|
||||
```js
|
||||
math.sign(3); // 1
|
||||
math.sign(0); // 1
|
||||
math.sign(-3); // -1
|
||||
```
|
||||
|
||||
## sin
|
||||
Return the sine of a number in radians.
|
||||
|
||||
### Parameters
|
||||
- x: A number representing an angle in radians
|
||||
|
||||
### Returns
|
||||
The sine of `x`, between -1 and 1, inclusive.
|
||||
|
||||
### Example
|
||||
```js
|
||||
math.sin(math.PI / 2); // 1
|
||||
```
|
||||
|
||||
## sqrt
|
||||
Return the square root of a number.
|
||||
|
||||
### Parameters
|
||||
- x: A number greater than or equal to 0
|
||||
|
||||
### Returns
|
||||
The square root of `x`, a nonnegative number. If `x` < 0, script will fail with an error.
|
||||
|
||||
### Example
|
||||
```js
|
||||
math.sqrt(25); // 5
|
||||
```
|
||||
|
||||
## trunc
|
||||
Return the integer part of a number by removing any fractional digits.
|
||||
|
||||
### Parameters
|
||||
- x: A number
|
||||
|
||||
### Returns
|
||||
The integer part of `x`.
|
||||
|
||||
### Example
|
||||
```js
|
||||
math.trunc(-1.123); // -1
|
||||
math.trunc(0.123); // 0
|
||||
math.trunc(13.37); // 13
|
||||
math.trunc(42.84); // 42
|
||||
```
|
|
@ -53,7 +53,7 @@ serial.read(10, 5000); // Read 10 bytes, with 5s timeout
|
|||
```
|
||||
|
||||
## readln
|
||||
Read from serial port untill line break character
|
||||
Read from serial port until line break character
|
||||
|
||||
### Parameters
|
||||
(optional) Timeout value in ms
|
||||
|
@ -68,7 +68,7 @@ serial.readln(5000); // Read with 5s timeout
|
|||
```
|
||||
|
||||
## readBytes
|
||||
Read from serial port untill line break character
|
||||
Read from serial port until line break character
|
||||
|
||||
### Parameters
|
||||
- Number of bytes to read
|
||||
|
@ -81,7 +81,7 @@ ArrayBuffer with received data or undefined if nothing was received before timeo
|
|||
```js
|
||||
serial.readBytes(4); // Read 4 bytes, without timeout
|
||||
|
||||
// Read one byte from receive buffer with zero timeout, returns UNDEFINED if Rx bufer is empty
|
||||
// Read one byte from receive buffer with zero timeout, returns UNDEFINED if Rx buffer is empty
|
||||
serial.readBytes(1, 0);
|
||||
```
|
||||
|
||||
|
|
48
documentation/js/js_submenu.md
Normal file
48
documentation/js/js_submenu.md
Normal file
|
@ -0,0 +1,48 @@
|
|||
# js_submenu {#js_submenu}
|
||||
|
||||
# Submenu module
|
||||
```js
|
||||
let submenu = require("submenu");
|
||||
```
|
||||
# Methods
|
||||
|
||||
## setHeader
|
||||
Set the submenu header text.
|
||||
|
||||
### Parameters
|
||||
- header (string): The submenu header text
|
||||
|
||||
### Example
|
||||
```js
|
||||
submenu.setHeader("Select an option:");
|
||||
```
|
||||
|
||||
## addItem
|
||||
Add a new submenu item.
|
||||
|
||||
### Parameters
|
||||
- label (string): The submenu item label text
|
||||
- id (number): The submenu item ID, must be a Uint32 number
|
||||
|
||||
### Example
|
||||
```js
|
||||
submenu.addItem("Option 1", 1);
|
||||
submenu.addItem("Option 2", 2);
|
||||
submenu.addItem("Option 3", 3);
|
||||
```
|
||||
|
||||
## show
|
||||
Show a submenu that was previously configured using `setHeader()` and `addItem()` methods.
|
||||
|
||||
### Returns
|
||||
The ID of the submenu item that was selected, or `undefined` if the BACK button was pressed.
|
||||
|
||||
### Example
|
||||
```js
|
||||
let selected = submenu.show();
|
||||
if (selected === undefined) {
|
||||
// if BACK button was pressed
|
||||
} else if (selected === 1) {
|
||||
// if item with ID 1 was selected
|
||||
}
|
||||
```
|
69
documentation/js/js_textbox.md
Normal file
69
documentation/js/js_textbox.md
Normal file
|
@ -0,0 +1,69 @@
|
|||
# js_textbox {#js_textbox}
|
||||
|
||||
# Textbox module
|
||||
```js
|
||||
let textbox = require("textbox");
|
||||
```
|
||||
# Methods
|
||||
|
||||
## setConfig
|
||||
Set focus and font for the textbox.
|
||||
|
||||
### Parameters
|
||||
- focus: "start" to focus on the beginning of the text, or "end" to focus on the end of the text
|
||||
- font: "text" to use the default proportional font, or "hex" to use a monospaced font, which is convenient for aligned array output in HEX
|
||||
|
||||
### Example
|
||||
```js
|
||||
textbox.setConfig("start", "text");
|
||||
textbox.addText("Hello world");
|
||||
textbox.show();
|
||||
```
|
||||
|
||||
## addText
|
||||
Add text to the end of the textbox.
|
||||
|
||||
### Parameters
|
||||
- text (string): The text to add to the end of the textbox
|
||||
|
||||
### Example
|
||||
```js
|
||||
textbox.addText("New text 1\nNew text 2");
|
||||
```
|
||||
|
||||
## clearText
|
||||
Clear the textbox.
|
||||
|
||||
### Example
|
||||
```js
|
||||
textbox.clearText();
|
||||
```
|
||||
|
||||
## isOpen
|
||||
Return true if the textbox is open.
|
||||
|
||||
### Returns
|
||||
True if the textbox is open, false otherwise.
|
||||
|
||||
### Example
|
||||
```js
|
||||
let isOpen = textbox.isOpen();
|
||||
```
|
||||
|
||||
## show
|
||||
Show the textbox. You can add text to it using the `addText()` method before or after calling the `show()` method.
|
||||
|
||||
### Example
|
||||
```js
|
||||
textbox.show();
|
||||
```
|
||||
|
||||
## close
|
||||
Close the textbox.
|
||||
|
||||
### Example
|
||||
```js
|
||||
if (textbox.isOpen()) {
|
||||
textbox.close();
|
||||
}
|
||||
```
|
|
@ -432,6 +432,23 @@ bool mf_classic_is_sector_trailer(uint8_t block) {
|
|||
return block == mf_classic_get_sector_trailer_num_by_block(block);
|
||||
}
|
||||
|
||||
void mf_classic_set_sector_trailer_read(
|
||||
MfClassicData* data,
|
||||
uint8_t block_num,
|
||||
MfClassicSectorTrailer* sec_tr) {
|
||||
furi_check(data);
|
||||
furi_check(sec_tr);
|
||||
furi_check(mf_classic_is_sector_trailer(block_num));
|
||||
|
||||
uint8_t sector_num = mf_classic_get_sector_by_block(block_num);
|
||||
MfClassicSectorTrailer* sec_trailer =
|
||||
mf_classic_get_sector_trailer_by_sector(data, sector_num);
|
||||
memcpy(sec_trailer, sec_tr, sizeof(MfClassicSectorTrailer));
|
||||
FURI_BIT_SET(data->block_read_mask[block_num / 32], block_num % 32);
|
||||
FURI_BIT_SET(data->key_a_mask, sector_num);
|
||||
FURI_BIT_SET(data->key_b_mask, sector_num);
|
||||
}
|
||||
|
||||
uint8_t mf_classic_get_sector_by_block(uint8_t block) {
|
||||
uint8_t sector = 0;
|
||||
|
||||
|
|
|
@ -184,6 +184,11 @@ MfClassicSectorTrailer*
|
|||
|
||||
bool mf_classic_is_sector_trailer(uint8_t block);
|
||||
|
||||
void mf_classic_set_sector_trailer_read(
|
||||
MfClassicData* data,
|
||||
uint8_t block_num,
|
||||
MfClassicSectorTrailer* sec_tr);
|
||||
|
||||
uint8_t mf_classic_get_sector_by_block(uint8_t block);
|
||||
|
||||
bool mf_classic_block_to_value(const MfClassicBlock* block, int32_t* value, uint8_t* addr);
|
||||
|
|
|
@ -13,9 +13,15 @@
|
|||
/** Defines encoder and decoder lookahead buffer size */
|
||||
#define COMPRESS_LOOKAHEAD_BUFF_SIZE_LOG (4u)
|
||||
|
||||
/** Buffer size for input data */
|
||||
#define COMPRESS_ICON_ENCODED_BUFF_SIZE (256u)
|
||||
|
||||
const CompressConfigHeatshrink compress_config_heatshrink_default = {
|
||||
.window_sz2 = COMPRESS_EXP_BUFF_SIZE_LOG,
|
||||
.lookahead_sz2 = COMPRESS_LOOKAHEAD_BUFF_SIZE_LOG,
|
||||
.input_buffer_sz = COMPRESS_ICON_ENCODED_BUFF_SIZE,
|
||||
};
|
||||
|
||||
/** Buffer size for input data */
|
||||
static bool compress_decode_internal(
|
||||
heatshrink_decoder* decoder,
|
||||
const uint8_t* data_in,
|
||||
|
@ -83,16 +89,19 @@ void compress_icon_decode(CompressIcon* instance, const uint8_t* icon_data, uint
|
|||
}
|
||||
|
||||
struct Compress {
|
||||
const void* config;
|
||||
heatshrink_encoder* encoder;
|
||||
heatshrink_decoder* decoder;
|
||||
};
|
||||
|
||||
Compress* compress_alloc(uint16_t compress_buff_size) {
|
||||
Compress* compress_alloc(CompressType type, const void* config) {
|
||||
furi_check(type == CompressTypeHeatshrink);
|
||||
furi_check(config);
|
||||
|
||||
Compress* compress = malloc(sizeof(Compress));
|
||||
compress->encoder =
|
||||
heatshrink_encoder_alloc(COMPRESS_EXP_BUFF_SIZE_LOG, COMPRESS_LOOKAHEAD_BUFF_SIZE_LOG);
|
||||
compress->decoder = heatshrink_decoder_alloc(
|
||||
compress_buff_size, COMPRESS_EXP_BUFF_SIZE_LOG, COMPRESS_LOOKAHEAD_BUFF_SIZE_LOG);
|
||||
compress->config = config;
|
||||
compress->encoder = NULL;
|
||||
compress->decoder = NULL;
|
||||
|
||||
return compress;
|
||||
}
|
||||
|
@ -100,8 +109,12 @@ Compress* compress_alloc(uint16_t compress_buff_size) {
|
|||
void compress_free(Compress* compress) {
|
||||
furi_check(compress);
|
||||
|
||||
heatshrink_encoder_free(compress->encoder);
|
||||
heatshrink_decoder_free(compress->decoder);
|
||||
if(compress->encoder) {
|
||||
heatshrink_encoder_free(compress->encoder);
|
||||
}
|
||||
if(compress->decoder) {
|
||||
heatshrink_decoder_free(compress->decoder);
|
||||
}
|
||||
free(compress);
|
||||
}
|
||||
|
||||
|
@ -125,6 +138,7 @@ static bool compress_encode_internal(
|
|||
size_t sunk = 0;
|
||||
size_t res_buff_size = sizeof(CompressHeader);
|
||||
|
||||
heatshrink_encoder_reset(encoder);
|
||||
/* Sink data to encoding buffer */
|
||||
while((sunk < data_in_size) && !encode_failed) {
|
||||
sink_res =
|
||||
|
@ -179,10 +193,116 @@ static bool compress_encode_internal(
|
|||
*data_res_size = 0;
|
||||
result = false;
|
||||
}
|
||||
heatshrink_encoder_reset(encoder);
|
||||
return result;
|
||||
}
|
||||
|
||||
static inline bool compress_decoder_poll(
|
||||
heatshrink_decoder* decoder,
|
||||
uint8_t* decompressed_chunk,
|
||||
size_t decomp_buffer_size,
|
||||
CompressIoCallback write_cb,
|
||||
void* write_context) {
|
||||
HSD_poll_res poll_res;
|
||||
size_t poll_size;
|
||||
|
||||
do {
|
||||
poll_res =
|
||||
heatshrink_decoder_poll(decoder, decompressed_chunk, decomp_buffer_size, &poll_size);
|
||||
if(poll_res < 0) {
|
||||
return false;
|
||||
}
|
||||
|
||||
size_t write_size = write_cb(write_context, decompressed_chunk, poll_size);
|
||||
if(write_size != poll_size) {
|
||||
return false;
|
||||
}
|
||||
} while(poll_res == HSDR_POLL_MORE);
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
static bool compress_decode_stream_internal(
|
||||
heatshrink_decoder* decoder,
|
||||
const size_t work_buffer_size,
|
||||
CompressIoCallback read_cb,
|
||||
void* read_context,
|
||||
CompressIoCallback write_cb,
|
||||
void* write_context) {
|
||||
bool decode_failed = false;
|
||||
HSD_sink_res sink_res;
|
||||
HSD_finish_res finish_res;
|
||||
size_t read_size = 0;
|
||||
size_t sink_size = 0;
|
||||
|
||||
uint8_t* compressed_chunk = malloc(work_buffer_size);
|
||||
uint8_t* decompressed_chunk = malloc(work_buffer_size);
|
||||
|
||||
/* Sink data to decoding buffer */
|
||||
do {
|
||||
read_size = read_cb(read_context, compressed_chunk, work_buffer_size);
|
||||
|
||||
size_t sunk = 0;
|
||||
while(sunk < read_size && !decode_failed) {
|
||||
sink_res = heatshrink_decoder_sink(
|
||||
decoder, &compressed_chunk[sunk], read_size - sunk, &sink_size);
|
||||
if(sink_res < 0) {
|
||||
decode_failed = true;
|
||||
break;
|
||||
}
|
||||
sunk += sink_size;
|
||||
|
||||
if(!compress_decoder_poll(
|
||||
decoder, decompressed_chunk, work_buffer_size, write_cb, write_context)) {
|
||||
decode_failed = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
} while(!decode_failed && read_size);
|
||||
|
||||
/* Notify sinking complete and poll decoded data */
|
||||
if(!decode_failed) {
|
||||
while((finish_res = heatshrink_decoder_finish(decoder)) != HSDR_FINISH_DONE) {
|
||||
if(finish_res < 0) {
|
||||
decode_failed = true;
|
||||
break;
|
||||
}
|
||||
|
||||
if(!compress_decoder_poll(
|
||||
decoder, decompressed_chunk, work_buffer_size, write_cb, write_context)) {
|
||||
decode_failed = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
free(compressed_chunk);
|
||||
free(decompressed_chunk);
|
||||
|
||||
return !decode_failed;
|
||||
}
|
||||
|
||||
typedef struct {
|
||||
uint8_t* data_ptr;
|
||||
size_t data_size;
|
||||
bool is_source;
|
||||
} MemoryStreamState;
|
||||
|
||||
static int32_t memory_stream_io_callback(void* context, uint8_t* ptr, size_t size) {
|
||||
MemoryStreamState* state = (MemoryStreamState*)context;
|
||||
|
||||
if(size > state->data_size) {
|
||||
size = state->data_size;
|
||||
}
|
||||
if(state->is_source) {
|
||||
memcpy(ptr, state->data_ptr, size);
|
||||
} else {
|
||||
memcpy(state->data_ptr, ptr, size);
|
||||
}
|
||||
state->data_ptr += size;
|
||||
state->data_size -= size;
|
||||
return size;
|
||||
}
|
||||
|
||||
static bool compress_decode_internal(
|
||||
heatshrink_decoder* decoder,
|
||||
const uint8_t* data_in,
|
||||
|
@ -196,59 +316,29 @@ static bool compress_decode_internal(
|
|||
furi_check(data_res_size);
|
||||
|
||||
bool result = false;
|
||||
bool decode_failed = false;
|
||||
HSD_sink_res sink_res;
|
||||
HSD_poll_res poll_res;
|
||||
HSD_finish_res finish_res;
|
||||
size_t sink_size = 0;
|
||||
size_t res_buff_size = 0;
|
||||
size_t poll_size = 0;
|
||||
|
||||
CompressHeader* header = (CompressHeader*)data_in;
|
||||
if(header->is_compressed) {
|
||||
/* Sink data to decoding buffer */
|
||||
size_t compressed_size = header->compressed_buff_size;
|
||||
size_t sunk = 0;
|
||||
while(sunk < compressed_size && !decode_failed) {
|
||||
sink_res = heatshrink_decoder_sink(
|
||||
MemoryStreamState compressed_context = {
|
||||
.data_ptr = (uint8_t*)&data_in[sizeof(CompressHeader)],
|
||||
.data_size = header->compressed_buff_size,
|
||||
.is_source = true,
|
||||
};
|
||||
MemoryStreamState decompressed_context = {
|
||||
.data_ptr = data_out,
|
||||
.data_size = data_out_size,
|
||||
.is_source = false,
|
||||
};
|
||||
heatshrink_decoder_reset(decoder);
|
||||
if((result = compress_decode_stream_internal(
|
||||
decoder,
|
||||
(uint8_t*)&data_in[sizeof(CompressHeader) + sunk],
|
||||
compressed_size - sunk,
|
||||
&sink_size);
|
||||
if(sink_res < 0) {
|
||||
decode_failed = true;
|
||||
break;
|
||||
}
|
||||
sunk += sink_size;
|
||||
do {
|
||||
poll_res = heatshrink_decoder_poll(
|
||||
decoder, &data_out[res_buff_size], data_out_size - res_buff_size, &poll_size);
|
||||
if((poll_res < 0) || ((data_out_size - res_buff_size) == 0)) {
|
||||
decode_failed = true;
|
||||
break;
|
||||
}
|
||||
res_buff_size += poll_size;
|
||||
} while(poll_res == HSDR_POLL_MORE);
|
||||
COMPRESS_ICON_ENCODED_BUFF_SIZE,
|
||||
memory_stream_io_callback,
|
||||
&compressed_context,
|
||||
memory_stream_io_callback,
|
||||
&decompressed_context))) {
|
||||
*data_res_size = data_out_size - decompressed_context.data_size;
|
||||
}
|
||||
/* Notify sinking complete and poll decoded data */
|
||||
if(!decode_failed) {
|
||||
finish_res = heatshrink_decoder_finish(decoder);
|
||||
if(finish_res < 0) {
|
||||
decode_failed = true;
|
||||
} else {
|
||||
do {
|
||||
poll_res = heatshrink_decoder_poll(
|
||||
decoder,
|
||||
&data_out[res_buff_size],
|
||||
data_out_size - res_buff_size,
|
||||
&poll_size);
|
||||
res_buff_size += poll_size;
|
||||
finish_res = heatshrink_decoder_finish(decoder);
|
||||
} while(finish_res != HSDR_FINISH_DONE);
|
||||
}
|
||||
}
|
||||
*data_res_size = res_buff_size;
|
||||
result = !decode_failed;
|
||||
} else if(data_out_size >= data_in_size - 1) {
|
||||
memcpy(data_out, &data_in[1], data_in_size);
|
||||
*data_res_size = data_in_size - 1;
|
||||
|
@ -257,7 +347,6 @@ static bool compress_decode_internal(
|
|||
/* Not enough space in output buffer */
|
||||
result = false;
|
||||
}
|
||||
heatshrink_decoder_reset(decoder);
|
||||
return result;
|
||||
}
|
||||
|
||||
|
@ -268,6 +357,11 @@ bool compress_encode(
|
|||
uint8_t* data_out,
|
||||
size_t data_out_size,
|
||||
size_t* data_res_size) {
|
||||
if(!compress->encoder) {
|
||||
CompressConfigHeatshrink* hs_config = (CompressConfigHeatshrink*)compress->config;
|
||||
compress->encoder =
|
||||
heatshrink_encoder_alloc(hs_config->window_sz2, hs_config->lookahead_sz2);
|
||||
}
|
||||
return compress_encode_internal(
|
||||
compress->encoder, data_in, data_in_size, data_out, data_out_size, data_res_size);
|
||||
}
|
||||
|
@ -279,6 +373,201 @@ bool compress_decode(
|
|||
uint8_t* data_out,
|
||||
size_t data_out_size,
|
||||
size_t* data_res_size) {
|
||||
if(!compress->decoder) {
|
||||
CompressConfigHeatshrink* hs_config = (CompressConfigHeatshrink*)compress->config;
|
||||
compress->decoder = heatshrink_decoder_alloc(
|
||||
hs_config->input_buffer_sz, hs_config->window_sz2, hs_config->lookahead_sz2);
|
||||
}
|
||||
return compress_decode_internal(
|
||||
compress->decoder, data_in, data_in_size, data_out, data_out_size, data_res_size);
|
||||
}
|
||||
|
||||
bool compress_decode_streamed(
|
||||
Compress* compress,
|
||||
CompressIoCallback read_cb,
|
||||
void* read_context,
|
||||
CompressIoCallback write_cb,
|
||||
void* write_context) {
|
||||
CompressConfigHeatshrink* hs_config = (CompressConfigHeatshrink*)compress->config;
|
||||
if(!compress->decoder) {
|
||||
compress->decoder = heatshrink_decoder_alloc(
|
||||
hs_config->input_buffer_sz, hs_config->window_sz2, hs_config->lookahead_sz2);
|
||||
}
|
||||
|
||||
heatshrink_decoder_reset(compress->decoder);
|
||||
return compress_decode_stream_internal(
|
||||
compress->decoder,
|
||||
hs_config->input_buffer_sz,
|
||||
read_cb,
|
||||
read_context,
|
||||
write_cb,
|
||||
write_context);
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
struct CompressStreamDecoder {
|
||||
heatshrink_decoder* decoder;
|
||||
size_t stream_position;
|
||||
size_t decode_buffer_size;
|
||||
size_t decode_buffer_position;
|
||||
uint8_t* decode_buffer;
|
||||
CompressIoCallback read_cb;
|
||||
void* read_context;
|
||||
};
|
||||
|
||||
CompressStreamDecoder* compress_stream_decoder_alloc(
|
||||
CompressType type,
|
||||
const void* config,
|
||||
CompressIoCallback read_cb,
|
||||
void* read_context) {
|
||||
furi_check(type == CompressTypeHeatshrink);
|
||||
furi_check(config);
|
||||
|
||||
const CompressConfigHeatshrink* hs_config = (const CompressConfigHeatshrink*)config;
|
||||
CompressStreamDecoder* instance = malloc(sizeof(CompressStreamDecoder));
|
||||
instance->decoder = heatshrink_decoder_alloc(
|
||||
hs_config->input_buffer_sz, hs_config->window_sz2, hs_config->lookahead_sz2);
|
||||
instance->stream_position = 0;
|
||||
instance->decode_buffer_size = hs_config->input_buffer_sz;
|
||||
instance->decode_buffer_position = 0;
|
||||
instance->decode_buffer = malloc(hs_config->input_buffer_sz);
|
||||
instance->read_cb = read_cb;
|
||||
instance->read_context = read_context;
|
||||
|
||||
return instance;
|
||||
}
|
||||
|
||||
void compress_stream_decoder_free(CompressStreamDecoder* instance) {
|
||||
furi_check(instance);
|
||||
heatshrink_decoder_free(instance->decoder);
|
||||
free(instance->decode_buffer);
|
||||
free(instance);
|
||||
}
|
||||
|
||||
static bool compress_decode_stream_chunk(
|
||||
CompressStreamDecoder* sd,
|
||||
CompressIoCallback read_cb,
|
||||
void* read_context,
|
||||
uint8_t* decompressed_chunk,
|
||||
size_t decomp_chunk_size) {
|
||||
HSD_sink_res sink_res;
|
||||
HSD_poll_res poll_res;
|
||||
|
||||
/*
|
||||
First, try to output data from decoder to the output buffer.
|
||||
If the we could fill the output buffer, return
|
||||
If the output buffer is not full, keep polling the decoder
|
||||
until it has no more data to output.
|
||||
Then, read more data from the input and sink it to the decoder.
|
||||
Repeat until the input is exhausted or output buffer is full.
|
||||
*/
|
||||
|
||||
bool failed = false;
|
||||
bool can_sink_more = true;
|
||||
bool can_read_more = true;
|
||||
|
||||
do {
|
||||
do {
|
||||
size_t poll_size = 0;
|
||||
poll_res = heatshrink_decoder_poll(
|
||||
sd->decoder, decompressed_chunk, decomp_chunk_size, &poll_size);
|
||||
if(poll_res < 0) {
|
||||
return false;
|
||||
}
|
||||
|
||||
decomp_chunk_size -= poll_size;
|
||||
decompressed_chunk += poll_size;
|
||||
} while((poll_res == HSDR_POLL_MORE) && decomp_chunk_size);
|
||||
|
||||
if(!decomp_chunk_size) {
|
||||
break;
|
||||
}
|
||||
|
||||
if(can_read_more && (sd->decode_buffer_position < sd->decode_buffer_size)) {
|
||||
size_t read_size = read_cb(
|
||||
read_context,
|
||||
&sd->decode_buffer[sd->decode_buffer_position],
|
||||
sd->decode_buffer_size - sd->decode_buffer_position);
|
||||
sd->decode_buffer_position += read_size;
|
||||
can_read_more = read_size > 0;
|
||||
}
|
||||
|
||||
while(sd->decode_buffer_position && can_sink_more) {
|
||||
size_t sink_size = 0;
|
||||
sink_res = heatshrink_decoder_sink(
|
||||
sd->decoder, sd->decode_buffer, sd->decode_buffer_position, &sink_size);
|
||||
can_sink_more = sink_res == HSDR_SINK_OK;
|
||||
if(sink_res < 0) {
|
||||
failed = true;
|
||||
break;
|
||||
}
|
||||
sd->decode_buffer_position -= sink_size;
|
||||
|
||||
/* If some data was left in the buffer, move it to the beginning */
|
||||
if(sink_size && sd->decode_buffer_position) {
|
||||
memmove(
|
||||
sd->decode_buffer, &sd->decode_buffer[sink_size], sd->decode_buffer_position);
|
||||
}
|
||||
}
|
||||
} while(!failed);
|
||||
|
||||
return decomp_chunk_size == 0;
|
||||
}
|
||||
|
||||
bool compress_stream_decoder_read(
|
||||
CompressStreamDecoder* instance,
|
||||
uint8_t* data_out,
|
||||
size_t data_out_size) {
|
||||
furi_check(instance);
|
||||
furi_check(data_out);
|
||||
|
||||
if(compress_decode_stream_chunk(
|
||||
instance, instance->read_cb, instance->read_context, data_out, data_out_size)) {
|
||||
instance->stream_position += data_out_size;
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
bool compress_stream_decoder_seek(CompressStreamDecoder* instance, size_t position) {
|
||||
furi_check(instance);
|
||||
|
||||
/* Check if requested position is ahead of current position
|
||||
we can't rewind the input stream */
|
||||
furi_check(position >= instance->stream_position);
|
||||
|
||||
/* Read and discard data up to requested position */
|
||||
uint8_t* dummy_buffer = malloc(instance->decode_buffer_size);
|
||||
bool success = true;
|
||||
|
||||
while(instance->stream_position < position) {
|
||||
size_t bytes_to_read = position - instance->stream_position;
|
||||
if(bytes_to_read > instance->decode_buffer_size) {
|
||||
bytes_to_read = instance->decode_buffer_size;
|
||||
}
|
||||
if(!compress_stream_decoder_read(instance, dummy_buffer, bytes_to_read)) {
|
||||
success = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
free(dummy_buffer);
|
||||
return success;
|
||||
}
|
||||
|
||||
size_t compress_stream_decoder_tell(CompressStreamDecoder* instance) {
|
||||
furi_check(instance);
|
||||
return instance->stream_position;
|
||||
}
|
||||
|
||||
bool compress_stream_decoder_rewind(CompressStreamDecoder* instance) {
|
||||
furi_check(instance);
|
||||
|
||||
/* Reset decoder and read buffer */
|
||||
heatshrink_decoder_reset(instance->decoder);
|
||||
instance->stream_position = 0;
|
||||
instance->decode_buffer_position = 0;
|
||||
|
||||
return true;
|
||||
}
|
||||
|
|
|
@ -44,17 +44,34 @@ void compress_icon_free(CompressIcon* instance);
|
|||
*/
|
||||
void compress_icon_decode(CompressIcon* instance, const uint8_t* icon_data, uint8_t** output);
|
||||
|
||||
//////////////////////////////////////////////////////////////////////////
|
||||
|
||||
/** Compress control structure */
|
||||
typedef struct Compress Compress;
|
||||
|
||||
/** Supported compression types */
|
||||
typedef enum {
|
||||
CompressTypeHeatshrink = 0,
|
||||
} CompressType;
|
||||
|
||||
/** Configuration for heatshrink compression */
|
||||
typedef struct {
|
||||
uint16_t window_sz2;
|
||||
uint16_t lookahead_sz2;
|
||||
uint16_t input_buffer_sz;
|
||||
} CompressConfigHeatshrink;
|
||||
|
||||
/** Default configuration for heatshrink compression. Used for image assets. */
|
||||
extern const CompressConfigHeatshrink compress_config_heatshrink_default;
|
||||
|
||||
/** Allocate encoder and decoder
|
||||
*
|
||||
* @param compress_buff_size size of decoder and encoder buffer to
|
||||
* allocate
|
||||
* @param type Compression type
|
||||
* @param[in] config Configuration for compression, specific to type
|
||||
*
|
||||
* @return Compress instance
|
||||
*/
|
||||
Compress* compress_alloc(uint16_t compress_buff_size);
|
||||
Compress* compress_alloc(CompressType type, const void* config);
|
||||
|
||||
/** Free encoder and decoder
|
||||
*
|
||||
|
@ -71,6 +88,8 @@ void compress_free(Compress* compress);
|
|||
* @param[in] data_out_size The data out size
|
||||
* @param data_res_size pointer to result output data size
|
||||
*
|
||||
* @note Prepends compressed stream with a header. If data is not compressible,
|
||||
* it will be stored as is after the header.
|
||||
* @return true on success
|
||||
*/
|
||||
bool compress_encode(
|
||||
|
@ -90,6 +109,7 @@ bool compress_encode(
|
|||
* @param[in] data_out_size The data out size
|
||||
* @param data_res_size pointer to result output data size
|
||||
*
|
||||
* @note Expects compressed stream with a header, as produced by `compress_encode`.
|
||||
* @return true on success
|
||||
*/
|
||||
bool compress_decode(
|
||||
|
@ -100,6 +120,100 @@ bool compress_decode(
|
|||
size_t data_out_size,
|
||||
size_t* data_res_size);
|
||||
|
||||
/** I/O callback for streamed compression/decompression
|
||||
*
|
||||
* @param context user context
|
||||
* @param buffer buffer to read/write
|
||||
* @param size size of buffer
|
||||
*
|
||||
* @return number of bytes read/written, 0 on end of stream, negative on error
|
||||
*/
|
||||
typedef int32_t (*CompressIoCallback)(void* context, uint8_t* buffer, size_t size);
|
||||
|
||||
/** Decompress streamed data
|
||||
*
|
||||
* @param compress Compress instance
|
||||
* @param read_cb read callback
|
||||
* @param read_context read callback context
|
||||
* @param write_cb write callback
|
||||
* @param write_context write callback context
|
||||
*
|
||||
* @note Does not expect a header, just compressed data stream.
|
||||
* @return true on success
|
||||
*/
|
||||
bool compress_decode_streamed(
|
||||
Compress* compress,
|
||||
CompressIoCallback read_cb,
|
||||
void* read_context,
|
||||
CompressIoCallback write_cb,
|
||||
void* write_context);
|
||||
|
||||
//////////////////////////////////////////////////////////////////////////
|
||||
|
||||
/** CompressStreamDecoder control structure */
|
||||
typedef struct CompressStreamDecoder CompressStreamDecoder;
|
||||
|
||||
/** Allocate stream decoder
|
||||
*
|
||||
* @param type Compression type
|
||||
* @param[in] config Configuration for compression, specific to type
|
||||
* @param read_cb The read callback for input (compressed) data
|
||||
* @param read_context The read context
|
||||
*
|
||||
* @return CompressStreamDecoder instance
|
||||
*/
|
||||
CompressStreamDecoder* compress_stream_decoder_alloc(
|
||||
CompressType type,
|
||||
const void* config,
|
||||
CompressIoCallback read_cb,
|
||||
void* read_context);
|
||||
|
||||
/** Free stream decoder
|
||||
*
|
||||
* @param instance The CompressStreamDecoder instance
|
||||
*/
|
||||
void compress_stream_decoder_free(CompressStreamDecoder* instance);
|
||||
|
||||
/** Read uncompressed data chunk from stream decoder
|
||||
*
|
||||
* @param instance The CompressStreamDecoder instance
|
||||
* @param data_out The data out
|
||||
* @param[in] data_out_size The data out size
|
||||
*
|
||||
* @return true on success
|
||||
*/
|
||||
bool compress_stream_decoder_read(
|
||||
CompressStreamDecoder* instance,
|
||||
uint8_t* data_out,
|
||||
size_t data_out_size);
|
||||
|
||||
/** Seek to position in uncompressed data stream
|
||||
*
|
||||
* @param instance The CompressStreamDecoder instance
|
||||
* @param[in] position The position
|
||||
*
|
||||
* @return true on success
|
||||
* @warning Backward seeking is not supported
|
||||
*/
|
||||
bool compress_stream_decoder_seek(CompressStreamDecoder* instance, size_t position);
|
||||
|
||||
/** Get current position in uncompressed data stream
|
||||
*
|
||||
* @param instance The CompressStreamDecoder instance
|
||||
*
|
||||
* @return current position
|
||||
*/
|
||||
size_t compress_stream_decoder_tell(CompressStreamDecoder* instance);
|
||||
|
||||
/** Reset stream decoder to the beginning
|
||||
* @warning Read callback must be repositioned by caller separately
|
||||
*
|
||||
* @param instance The CompressStreamDecoder instance
|
||||
*
|
||||
* @return true on success
|
||||
*/
|
||||
bool compress_stream_decoder_rewind(CompressStreamDecoder* instance);
|
||||
|
||||
#ifdef __cplusplus
|
||||
}
|
||||
#endif
|
||||
|
|
|
@ -43,6 +43,7 @@ void path_extract_filename(FuriString* path, FuriString* name, bool trim_ext) {
|
|||
void path_extract_extension(FuriString* path, char* ext, size_t ext_len_max) {
|
||||
furi_check(path);
|
||||
furi_check(ext);
|
||||
furi_check(ext_len_max > 0);
|
||||
|
||||
size_t dot = furi_string_search_rchar(path, '.');
|
||||
size_t filename_start = furi_string_search_rchar(path, '/');
|
||||
|
|
|
@ -4,6 +4,7 @@
|
|||
#include <storage/storage.h>
|
||||
#include <furi.h>
|
||||
#include <toolbox/path.h>
|
||||
#include <toolbox/compress.h>
|
||||
|
||||
#define TAG "TarArch"
|
||||
#define MAX_NAME_LEN 255
|
||||
|
@ -12,14 +13,29 @@
|
|||
#define FILE_OPEN_NTRIES 10
|
||||
#define FILE_OPEN_RETRY_DELAY 25
|
||||
|
||||
TarOpenMode tar_archive_get_mode_for_path(const char* path) {
|
||||
char ext[8];
|
||||
|
||||
FuriString* path_str = furi_string_alloc_set_str(path);
|
||||
path_extract_extension(path_str, ext, sizeof(ext));
|
||||
furi_string_free(path_str);
|
||||
|
||||
if(strcmp(ext, ".ths") == 0) {
|
||||
return TarOpenModeReadHeatshrink;
|
||||
} else {
|
||||
return TarOpenModeRead;
|
||||
}
|
||||
}
|
||||
|
||||
typedef struct TarArchive {
|
||||
Storage* storage;
|
||||
File* stream;
|
||||
mtar_t tar;
|
||||
tar_unpack_file_cb unpack_cb;
|
||||
void* unpack_cb_context;
|
||||
} TarArchive;
|
||||
|
||||
/* API WRAPPER */
|
||||
/* Plain file backend - uncompressed, supports read and write */
|
||||
static int mtar_storage_file_write(void* stream, const void* data, unsigned size) {
|
||||
uint16_t bytes_written = storage_file_write(stream, data, size);
|
||||
return (bytes_written == size) ? bytes_written : MTAR_EWRITEFAIL;
|
||||
|
@ -38,7 +54,6 @@ static int mtar_storage_file_seek(void* stream, unsigned offset) {
|
|||
static int mtar_storage_file_close(void* stream) {
|
||||
if(stream) {
|
||||
storage_file_close(stream);
|
||||
storage_file_free(stream);
|
||||
}
|
||||
return MTAR_ESUCCESS;
|
||||
}
|
||||
|
@ -50,41 +65,133 @@ const struct mtar_ops filesystem_ops = {
|
|||
.close = mtar_storage_file_close,
|
||||
};
|
||||
|
||||
/* Heatshrink stream backend - compressed, read-only */
|
||||
|
||||
typedef struct {
|
||||
CompressConfigHeatshrink heatshrink_config;
|
||||
File* stream;
|
||||
CompressStreamDecoder* decoder;
|
||||
} HeatshrinkStream;
|
||||
|
||||
/* HSDS 'heatshrink data stream' header magic */
|
||||
static const uint32_t HEATSHRINK_MAGIC = 0x53445348;
|
||||
|
||||
typedef struct {
|
||||
uint32_t magic;
|
||||
uint8_t version;
|
||||
uint8_t window_sz2;
|
||||
uint8_t lookahead_sz2;
|
||||
} FURI_PACKED HeatshrinkStreamHeader;
|
||||
_Static_assert(sizeof(HeatshrinkStreamHeader) == 7, "Invalid HeatshrinkStreamHeader size");
|
||||
|
||||
static int mtar_heatshrink_file_close(void* stream) {
|
||||
HeatshrinkStream* hs_stream = stream;
|
||||
if(hs_stream) {
|
||||
if(hs_stream->decoder) {
|
||||
compress_stream_decoder_free(hs_stream->decoder);
|
||||
}
|
||||
storage_file_close(hs_stream->stream);
|
||||
storage_file_free(hs_stream->stream);
|
||||
free(hs_stream);
|
||||
}
|
||||
return MTAR_ESUCCESS;
|
||||
}
|
||||
|
||||
static int mtar_heatshrink_file_read(void* stream, void* data, unsigned size) {
|
||||
HeatshrinkStream* hs_stream = stream;
|
||||
bool read_success = compress_stream_decoder_read(hs_stream->decoder, data, size);
|
||||
return read_success ? (int)size : MTAR_EREADFAIL;
|
||||
}
|
||||
|
||||
static int mtar_heatshrink_file_seek(void* stream, unsigned offset) {
|
||||
HeatshrinkStream* hs_stream = stream;
|
||||
bool success = false;
|
||||
if(offset == 0) {
|
||||
success = storage_file_seek(hs_stream->stream, sizeof(HeatshrinkStreamHeader), true) &&
|
||||
compress_stream_decoder_rewind(hs_stream->decoder);
|
||||
} else {
|
||||
success = compress_stream_decoder_seek(hs_stream->decoder, offset);
|
||||
}
|
||||
return success ? MTAR_ESUCCESS : MTAR_ESEEKFAIL;
|
||||
}
|
||||
|
||||
const struct mtar_ops heatshrink_ops = {
|
||||
.read = mtar_heatshrink_file_read,
|
||||
.write = NULL, // not supported
|
||||
.seek = mtar_heatshrink_file_seek,
|
||||
.close = mtar_heatshrink_file_close,
|
||||
};
|
||||
|
||||
//////////////////////////////////////////////////////////////////////////
|
||||
|
||||
TarArchive* tar_archive_alloc(Storage* storage) {
|
||||
furi_check(storage);
|
||||
TarArchive* archive = malloc(sizeof(TarArchive));
|
||||
archive->storage = storage;
|
||||
archive->stream = storage_file_alloc(archive->storage);
|
||||
archive->unpack_cb = NULL;
|
||||
return archive;
|
||||
}
|
||||
|
||||
static int32_t file_read_cb(void* context, uint8_t* buffer, size_t buffer_size) {
|
||||
File* file = context;
|
||||
return storage_file_read(file, buffer, buffer_size);
|
||||
}
|
||||
|
||||
bool tar_archive_open(TarArchive* archive, const char* path, TarOpenMode mode) {
|
||||
furi_check(archive);
|
||||
FS_AccessMode access_mode;
|
||||
FS_OpenMode open_mode;
|
||||
bool compressed = false;
|
||||
int mtar_access = 0;
|
||||
|
||||
switch(mode) {
|
||||
case TAR_OPEN_MODE_READ:
|
||||
case TarOpenModeRead:
|
||||
mtar_access = MTAR_READ;
|
||||
access_mode = FSAM_READ;
|
||||
open_mode = FSOM_OPEN_EXISTING;
|
||||
break;
|
||||
case TAR_OPEN_MODE_WRITE:
|
||||
case TarOpenModeWrite:
|
||||
mtar_access = MTAR_WRITE;
|
||||
access_mode = FSAM_WRITE;
|
||||
open_mode = FSOM_CREATE_ALWAYS;
|
||||
break;
|
||||
case TarOpenModeReadHeatshrink:
|
||||
mtar_access = MTAR_READ;
|
||||
access_mode = FSAM_READ;
|
||||
open_mode = FSOM_OPEN_EXISTING;
|
||||
compressed = true;
|
||||
break;
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
|
||||
File* stream = storage_file_alloc(archive->storage);
|
||||
File* stream = archive->stream;
|
||||
if(!storage_file_open(stream, path, access_mode, open_mode)) {
|
||||
storage_file_free(stream);
|
||||
return false;
|
||||
}
|
||||
mtar_init(&archive->tar, mtar_access, &filesystem_ops, stream);
|
||||
|
||||
if(compressed) {
|
||||
/* Read and validate stream header */
|
||||
HeatshrinkStreamHeader header;
|
||||
if(storage_file_read(stream, &header, sizeof(HeatshrinkStreamHeader)) !=
|
||||
sizeof(HeatshrinkStreamHeader) ||
|
||||
header.magic != HEATSHRINK_MAGIC) {
|
||||
storage_file_close(stream);
|
||||
return false;
|
||||
}
|
||||
|
||||
HeatshrinkStream* hs_stream = malloc(sizeof(HeatshrinkStream));
|
||||
hs_stream->stream = stream;
|
||||
hs_stream->heatshrink_config.window_sz2 = header.window_sz2;
|
||||
hs_stream->heatshrink_config.lookahead_sz2 = header.lookahead_sz2;
|
||||
hs_stream->heatshrink_config.input_buffer_sz = FILE_BLOCK_SIZE;
|
||||
hs_stream->decoder = compress_stream_decoder_alloc(
|
||||
CompressTypeHeatshrink, &hs_stream->heatshrink_config, file_read_cb, stream);
|
||||
mtar_init(&archive->tar, mtar_access, &heatshrink_ops, hs_stream);
|
||||
} else {
|
||||
mtar_init(&archive->tar, mtar_access, &filesystem_ops, stream);
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
@ -94,6 +201,7 @@ void tar_archive_free(TarArchive* archive) {
|
|||
if(mtar_is_open(&archive->tar)) {
|
||||
mtar_close(&archive->tar);
|
||||
}
|
||||
storage_file_free(archive->stream);
|
||||
free(archive);
|
||||
}
|
||||
|
||||
|
@ -121,6 +229,21 @@ int32_t tar_archive_get_entries_count(TarArchive* archive) {
|
|||
return counter;
|
||||
}
|
||||
|
||||
bool tar_archive_get_read_progress(TarArchive* archive, int32_t* processed, int32_t* total) {
|
||||
furi_check(archive);
|
||||
if(mtar_access_mode(&archive->tar) != MTAR_READ) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if(processed) {
|
||||
*processed = storage_file_tell(archive->stream);
|
||||
}
|
||||
if(total) {
|
||||
*total = storage_file_size(archive->stream);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
bool tar_archive_dir_add_element(TarArchive* archive, const char* dirpath) {
|
||||
furi_check(archive);
|
||||
return (mtar_write_dir_header(&archive->tar, dirpath) == MTAR_ESUCCESS);
|
||||
|
@ -258,7 +381,7 @@ static int archive_extract_foreach_cb(mtar_t* tar, const mtar_header_t* header,
|
|||
|
||||
furi_string_free(converted_fname);
|
||||
furi_string_free(full_extracted_fname);
|
||||
return success ? 0 : -1;
|
||||
return success ? 0 : MTAR_EFAILURE;
|
||||
}
|
||||
|
||||
bool tar_archive_unpack_to(
|
||||
|
|
|
@ -12,62 +12,197 @@ typedef struct TarArchive TarArchive;
|
|||
|
||||
typedef struct Storage Storage;
|
||||
|
||||
/** Tar archive open mode
|
||||
*/
|
||||
typedef enum {
|
||||
TAR_OPEN_MODE_READ = 'r',
|
||||
TAR_OPEN_MODE_WRITE = 'w',
|
||||
TAR_OPEN_MODE_STDOUT = 's' /* to be implemented */
|
||||
TarOpenModeRead = 'r',
|
||||
TarOpenModeWrite = 'w',
|
||||
/* read-only heatshrink compressed tar */
|
||||
TarOpenModeReadHeatshrink = 'h',
|
||||
} TarOpenMode;
|
||||
|
||||
/** Get expected open mode for archive at the path.
|
||||
* Used for automatic mode detection based on the file extension.
|
||||
*
|
||||
* @param[in] path Path to the archive
|
||||
*
|
||||
* @return open mode from TarOpenMode enum
|
||||
*/
|
||||
TarOpenMode tar_archive_get_mode_for_path(const char* path);
|
||||
|
||||
/** Tar archive constructor
|
||||
*
|
||||
* @param storage Storage API pointer
|
||||
*
|
||||
* @return allocated object
|
||||
*/
|
||||
TarArchive* tar_archive_alloc(Storage* storage);
|
||||
|
||||
/** Open tar archive
|
||||
*
|
||||
* @param archive Tar archive object
|
||||
* @param[in] path Path to the tar archive
|
||||
* @param mode Open mode
|
||||
*
|
||||
* @return true if successful
|
||||
*/
|
||||
bool tar_archive_open(TarArchive* archive, const char* path, TarOpenMode mode);
|
||||
|
||||
/** Tar archive destructor
|
||||
*
|
||||
* @param archive Tar archive object
|
||||
*/
|
||||
void tar_archive_free(TarArchive* archive);
|
||||
|
||||
/* High-level API - assumes archive is open */
|
||||
|
||||
/** Unpack tar archive to destination
|
||||
*
|
||||
* @param archive Tar archive object. Must be opened in read mode
|
||||
* @param[in] destination Destination path
|
||||
* @param converter Storage name converter
|
||||
*
|
||||
* @return true if successful
|
||||
*/
|
||||
bool tar_archive_unpack_to(
|
||||
TarArchive* archive,
|
||||
const char* destination,
|
||||
Storage_name_converter converter);
|
||||
|
||||
/** Add file to tar archive
|
||||
*
|
||||
* @param archive Tar archive object. Must be opened in write mode
|
||||
* @param[in] fs_file_path Path to the file on the filesystem
|
||||
* @param[in] archive_fname Name of the file in the archive
|
||||
* @param file_size Size of the file
|
||||
*
|
||||
* @return true if successful
|
||||
*/
|
||||
bool tar_archive_add_file(
|
||||
TarArchive* archive,
|
||||
const char* fs_file_path,
|
||||
const char* archive_fname,
|
||||
const int32_t file_size);
|
||||
|
||||
/** Add directory to tar archive
|
||||
*
|
||||
* @param archive Tar archive object. Must be opened in write mode
|
||||
* @param fs_full_path Path to the directory on the filesystem
|
||||
* @param path_prefix Prefix to add to the directory name in the archive
|
||||
*
|
||||
* @return true if successful
|
||||
*/
|
||||
bool tar_archive_add_dir(TarArchive* archive, const char* fs_full_path, const char* path_prefix);
|
||||
|
||||
/** Get number of entries in the archive
|
||||
*
|
||||
* @param archive Tar archive object
|
||||
*
|
||||
* @return number of entries. -1 on error
|
||||
*/
|
||||
int32_t tar_archive_get_entries_count(TarArchive* archive);
|
||||
|
||||
/** Get read progress
|
||||
*
|
||||
* @param archive Tar archive object. Must be opened in read mode
|
||||
* @param[in] processed Number of processed entries
|
||||
* @param[in] total Total number of entries
|
||||
*
|
||||
* @return true if successful
|
||||
*/
|
||||
bool tar_archive_get_read_progress(TarArchive* archive, int32_t* processed, int32_t* total);
|
||||
|
||||
/** Unpack single file from tar archive
|
||||
*
|
||||
* @param archive Tar archive object. Must be opened in read mode
|
||||
* @param[in] archive_fname Name of the file in the archive
|
||||
* @param[in] destination Destination path
|
||||
*
|
||||
* @return true if successful
|
||||
*/
|
||||
bool tar_archive_unpack_file(
|
||||
TarArchive* archive,
|
||||
const char* archive_fname,
|
||||
const char* destination);
|
||||
|
||||
/* Optional per-entry callback on unpacking - return false to skip entry */
|
||||
/** Optional per-entry callback on unpacking
|
||||
* @param name Name of the file or directory
|
||||
* @param is_directory True if the entry is a directory
|
||||
* @param[in] context User context
|
||||
* @return true to process the entry, false to skip
|
||||
*/
|
||||
typedef bool (*tar_unpack_file_cb)(const char* name, bool is_directory, void* context);
|
||||
|
||||
/** Set per-entry callback on unpacking
|
||||
* @param archive Tar archive object
|
||||
* @param callback Callback function
|
||||
* @param[in] context User context
|
||||
*/
|
||||
void tar_archive_set_file_callback(TarArchive* archive, tar_unpack_file_cb callback, void* context);
|
||||
|
||||
/* Low-level API */
|
||||
|
||||
/** Add tar archive directory header
|
||||
*
|
||||
* @param archive Tar archive object. Must be opened in write mode
|
||||
* @param[in] dirpath Path to the directory
|
||||
*
|
||||
* @return true if successful
|
||||
*/
|
||||
bool tar_archive_dir_add_element(TarArchive* archive, const char* dirpath);
|
||||
|
||||
/** Add tar archive file header
|
||||
*
|
||||
* @param archive Tar archive object. Must be opened in write mode
|
||||
* @param[in] path Path to the file
|
||||
* @param data_len Size of the file
|
||||
*
|
||||
* @return true if successful
|
||||
*/
|
||||
bool tar_archive_file_add_header(TarArchive* archive, const char* path, const int32_t data_len);
|
||||
|
||||
/** Add tar archive file data block
|
||||
*
|
||||
* @param archive Tar archive object. Must be opened in write mode
|
||||
* @param[in] data_block Data block
|
||||
* @param block_len Size of the data block
|
||||
*
|
||||
* @return true if successful
|
||||
*/
|
||||
bool tar_archive_file_add_data_block(
|
||||
TarArchive* archive,
|
||||
const uint8_t* data_block,
|
||||
const int32_t block_len);
|
||||
|
||||
/** Finalize tar archive file
|
||||
*
|
||||
* @param archive Tar archive object. Must be opened in write mode
|
||||
*
|
||||
* @return true if successful
|
||||
*/
|
||||
bool tar_archive_file_finalize(TarArchive* archive);
|
||||
|
||||
/** Store data in tar archive
|
||||
*
|
||||
* @param archive Tar archive object. Must be opened in write mode
|
||||
* @param[in] path Path to the file
|
||||
* @param[in] data Data to store
|
||||
* @param data_len Size of the data
|
||||
*
|
||||
* @return true if successful
|
||||
*/
|
||||
bool tar_archive_store_data(
|
||||
TarArchive* archive,
|
||||
const char* path,
|
||||
const uint8_t* data,
|
||||
const int32_t data_len);
|
||||
|
||||
/** Finalize tar archive
|
||||
*
|
||||
* @param archive Tar archive object. Must be opened in write mode
|
||||
*
|
||||
* @return true if successful
|
||||
*/
|
||||
bool tar_archive_finalize(TarArchive* archive);
|
||||
|
||||
#ifdef __cplusplus
|
||||
|
|
|
@ -161,3 +161,9 @@ ResourceManifestEntry*
|
|||
return NULL;
|
||||
}
|
||||
}
|
||||
|
||||
bool resource_manifest_rewind(ResourceManifestReader* resource_manifest) {
|
||||
furi_assert(resource_manifest);
|
||||
|
||||
return stream_seek(resource_manifest->stream, 0, StreamOffsetFromStart);
|
||||
}
|
||||
|
|
|
@ -47,6 +47,13 @@ void resource_manifest_reader_free(ResourceManifestReader* resource_manifest);
|
|||
*/
|
||||
bool resource_manifest_reader_open(ResourceManifestReader* resource_manifest, const char* filename);
|
||||
|
||||
/**
|
||||
* @brief Rewind manifest to the beginning
|
||||
* @param resource_manifest allocated object
|
||||
* @return true if successful
|
||||
*/
|
||||
bool resource_manifest_rewind(ResourceManifestReader* resource_manifest);
|
||||
|
||||
/**
|
||||
* @brief Read next file/dir entry from manifest
|
||||
* @param resource_manifest allocated object
|
||||
|
|
26
scripts/flipper/assets/heatshrink_stream.py
Normal file
26
scripts/flipper/assets/heatshrink_stream.py
Normal file
|
@ -0,0 +1,26 @@
|
|||
import struct
|
||||
|
||||
|
||||
class HeatshrinkDataStreamHeader:
|
||||
MAGIC = 0x53445348
|
||||
VERSION = 1
|
||||
|
||||
def __init__(self, window_size, lookahead_size):
|
||||
self.window_size = window_size
|
||||
self.lookahead_size = lookahead_size
|
||||
|
||||
def pack(self):
|
||||
return struct.pack(
|
||||
"<IBBB", self.MAGIC, self.VERSION, self.window_size, self.lookahead_size
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def unpack(data):
|
||||
if len(data) != 7:
|
||||
raise ValueError("Invalid header length")
|
||||
magic, version, window_size, lookahead_size = struct.unpack("<IBBB", data)
|
||||
if magic != HeatshrinkDataStreamHeader.MAGIC:
|
||||
raise ValueError("Invalid magic number")
|
||||
if version != HeatshrinkDataStreamHeader.VERSION:
|
||||
raise ValueError("Invalid version")
|
||||
return HeatshrinkDataStreamHeader(window_size, lookahead_size)
|
41
scripts/flipper/assets/tarball.py
Normal file
41
scripts/flipper/assets/tarball.py
Normal file
|
@ -0,0 +1,41 @@
|
|||
import io
|
||||
import tarfile
|
||||
|
||||
import heatshrink2
|
||||
|
||||
from .heatshrink_stream import HeatshrinkDataStreamHeader
|
||||
|
||||
FLIPPER_TAR_FORMAT = tarfile.USTAR_FORMAT
|
||||
TAR_HEATSRINK_EXTENSION = ".ths"
|
||||
|
||||
|
||||
def tar_sanitizer_filter(tarinfo: tarfile.TarInfo):
|
||||
tarinfo.gid = tarinfo.uid = 0
|
||||
tarinfo.mtime = 0
|
||||
tarinfo.uname = tarinfo.gname = "furippa"
|
||||
return tarinfo
|
||||
|
||||
|
||||
def compress_tree_tarball(
|
||||
src_dir, output_name, filter=tar_sanitizer_filter, hs_window=13, hs_lookahead=6
|
||||
):
|
||||
plain_tar = io.BytesIO()
|
||||
with tarfile.open(
|
||||
fileobj=plain_tar,
|
||||
mode="w:",
|
||||
format=FLIPPER_TAR_FORMAT,
|
||||
) as tarball:
|
||||
tarball.add(src_dir, arcname="", filter=filter)
|
||||
plain_tar.seek(0)
|
||||
|
||||
src_data = plain_tar.read()
|
||||
compressed = heatshrink2.compress(
|
||||
src_data, window_sz2=hs_window, lookahead_sz2=hs_lookahead
|
||||
)
|
||||
|
||||
header = HeatshrinkDataStreamHeader(hs_window, hs_lookahead)
|
||||
with open(output_name, "wb") as f:
|
||||
f.write(header.pack())
|
||||
f.write(compressed)
|
||||
|
||||
return len(src_data), len(compressed)
|
145
scripts/hs.py
Executable file
145
scripts/hs.py
Executable file
|
@ -0,0 +1,145 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import heatshrink2 as hs
|
||||
from flipper.app import App
|
||||
from flipper.assets.heatshrink_stream import HeatshrinkDataStreamHeader
|
||||
from flipper.assets.tarball import compress_tree_tarball
|
||||
|
||||
|
||||
class HSWrapper(App):
|
||||
DEFAULT_WINDOW = 13
|
||||
DEFAULT_LOOKAHEAD = 6
|
||||
|
||||
def init(self):
|
||||
self.subparsers = self.parser.add_subparsers(
|
||||
title="subcommands", dest="subcommand"
|
||||
)
|
||||
|
||||
self.parser_compress = self.subparsers.add_parser(
|
||||
"compress", help="compress file using heatshrink"
|
||||
)
|
||||
self.parser_compress.add_argument(
|
||||
"-w", "--window", help="window size", type=int, default=self.DEFAULT_WINDOW
|
||||
)
|
||||
self.parser_compress.add_argument(
|
||||
"-l",
|
||||
"--lookahead",
|
||||
help="lookahead size",
|
||||
type=int,
|
||||
default=self.DEFAULT_LOOKAHEAD,
|
||||
)
|
||||
self.parser_compress.add_argument("file", help="file to compress")
|
||||
self.parser_compress.add_argument(
|
||||
"-o", "--output", help="output file", required=True
|
||||
)
|
||||
self.parser_compress.set_defaults(func=self.compress)
|
||||
|
||||
self.parser_decompress = self.subparsers.add_parser(
|
||||
"decompress", help="decompress file using heatshrink"
|
||||
)
|
||||
self.parser_decompress.add_argument("file", help="file to decompress")
|
||||
self.parser_decompress.add_argument(
|
||||
"-o", "--output", help="output file", required=True
|
||||
)
|
||||
self.parser_decompress.set_defaults(func=self.decompress)
|
||||
|
||||
self.parser_info = self.subparsers.add_parser("info", help="show file info")
|
||||
self.parser_info.add_argument("file", help="file to show info for")
|
||||
self.parser_info.set_defaults(func=self.info)
|
||||
|
||||
self.parser_tar = self.subparsers.add_parser(
|
||||
"tar", help="create a tarball and compress it"
|
||||
)
|
||||
self.parser_tar.add_argument("dir", help="directory to tar")
|
||||
self.parser_tar.add_argument(
|
||||
"-o", "--output", help="output file", required=True
|
||||
)
|
||||
self.parser_tar.add_argument(
|
||||
"-w", "--window", help="window size", type=int, default=self.DEFAULT_WINDOW
|
||||
)
|
||||
self.parser_tar.add_argument(
|
||||
"-l",
|
||||
"--lookahead",
|
||||
help="lookahead size",
|
||||
type=int,
|
||||
default=self.DEFAULT_LOOKAHEAD,
|
||||
)
|
||||
self.parser_tar.set_defaults(func=self.tar)
|
||||
|
||||
def compress(self):
|
||||
args = self.args
|
||||
|
||||
with open(args.file, "rb") as f:
|
||||
data = f.read()
|
||||
|
||||
compressed = hs.compress(
|
||||
data, window_sz2=args.window, lookahead_sz2=args.lookahead
|
||||
)
|
||||
|
||||
with open(args.output, "wb") as f:
|
||||
header = HeatshrinkDataStreamHeader(args.window, args.lookahead)
|
||||
f.write(header.pack())
|
||||
f.write(compressed)
|
||||
|
||||
self.logger.info(
|
||||
f"Compressed {len(data)} bytes to {len(compressed)} bytes, "
|
||||
f"compression ratio: {len(compressed) * 100 / len(data):.2f}%"
|
||||
)
|
||||
return 0
|
||||
|
||||
def decompress(self):
|
||||
args = self.args
|
||||
|
||||
with open(args.file, "rb") as f:
|
||||
header = HeatshrinkDataStreamHeader.unpack(f.read(7))
|
||||
compressed = f.read()
|
||||
|
||||
self.logger.info(
|
||||
f"Decompressing with window size {header.window_size} and lookahead size {header.lookahead_size}"
|
||||
)
|
||||
|
||||
data = hs.decompress(
|
||||
compressed,
|
||||
window_sz2=header.window_size,
|
||||
lookahead_sz2=header.lookahead_size,
|
||||
)
|
||||
|
||||
with open(args.output, "wb") as f:
|
||||
f.write(data)
|
||||
|
||||
self.logger.info(f"Decompressed {len(compressed)} bytes to {len(data)} bytes")
|
||||
return 0
|
||||
|
||||
def info(self):
|
||||
args = self.args
|
||||
|
||||
try:
|
||||
with open(args.file, "rb") as f:
|
||||
header = HeatshrinkDataStreamHeader.unpack(f.read(7))
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error: {e}")
|
||||
return 1
|
||||
|
||||
self.logger.info(
|
||||
f"Window size: {header.window_size}, lookahead size: {header.lookahead_size}"
|
||||
)
|
||||
|
||||
return 0
|
||||
|
||||
def tar(self):
|
||||
args = self.args
|
||||
|
||||
orig_size, compressed_size = compress_tree_tarball(
|
||||
args.dir, args.output, hs_window=args.window, hs_lookahead=args.lookahead
|
||||
)
|
||||
|
||||
self.logger.info(
|
||||
f"Tarred and compressed {orig_size} bytes to {compressed_size} bytes, "
|
||||
f"compression ratio: {compressed_size * 100 / orig_size:.2f}%"
|
||||
)
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
HSWrapper()()
|
|
@ -9,6 +9,7 @@ from os.path import exists, join, relpath, basename, split
|
|||
|
||||
from ansi.color import fg
|
||||
from flipper.app import App
|
||||
from flipper.assets.tarball import FLIPPER_TAR_FORMAT, tar_sanitizer_filter
|
||||
from update import Main as UpdateMain
|
||||
|
||||
|
||||
|
@ -266,20 +267,15 @@ class Main(App):
|
|||
),
|
||||
"w:gz",
|
||||
compresslevel=9,
|
||||
format=tarfile.USTAR_FORMAT,
|
||||
format=FLIPPER_TAR_FORMAT,
|
||||
) as tar:
|
||||
self.note_dist_component(
|
||||
"update", "tgz", self.get_dist_path(bundle_tgz)
|
||||
)
|
||||
|
||||
# Strip uid and gid in case of overflow
|
||||
def tar_filter(tarinfo):
|
||||
tarinfo.uid = tarinfo.gid = 0
|
||||
tarinfo.mtime = 0
|
||||
tarinfo.uname = tarinfo.gname = "furippa"
|
||||
return tarinfo
|
||||
|
||||
tar.add(bundle_dir, arcname=bundle_dir_name, filter=tar_filter)
|
||||
tar.add(
|
||||
bundle_dir, arcname=bundle_dir_name, filter=tar_sanitizer_filter
|
||||
)
|
||||
return bundle_result
|
||||
|
||||
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import io
|
||||
import math
|
||||
import os
|
||||
import shutil
|
||||
|
@ -7,9 +8,12 @@ import tarfile
|
|||
import zlib
|
||||
from os.path import exists, join
|
||||
|
||||
import heatshrink2
|
||||
from flipper.app import App
|
||||
from flipper.assets.coprobin import CoproBinary, get_stack_type
|
||||
from flipper.assets.heatshrink_stream import HeatshrinkDataStreamHeader
|
||||
from flipper.assets.obdata import ObReferenceValues, OptionBytesData
|
||||
from flipper.assets.tarball import compress_tree_tarball, tar_sanitizer_filter
|
||||
from flipper.utils.fff import FlipperFormatFile
|
||||
from slideshow import Main as SlideshowMain
|
||||
|
||||
|
@ -20,8 +24,7 @@ class Main(App):
|
|||
|
||||
# No compression, plain tar
|
||||
RESOURCE_TAR_MODE = "w:"
|
||||
RESOURCE_TAR_FORMAT = tarfile.USTAR_FORMAT
|
||||
RESOURCE_FILE_NAME = "resources.tar"
|
||||
RESOURCE_FILE_NAME = "resources.ths" # .Tar.HeatShrink
|
||||
RESOURCE_ENTRY_NAME_MAX_LENGTH = 100
|
||||
|
||||
WHITELISTED_STACK_TYPES = set(
|
||||
|
@ -34,6 +37,9 @@ class Main(App):
|
|||
FLASH_BASE = 0x8000000
|
||||
MIN_LFS_PAGES = 6
|
||||
|
||||
HEATSHRINK_WINDOW_SIZE = 13
|
||||
HEATSHRINK_LOOKAHEAD_SIZE = 6
|
||||
|
||||
# Post-update slideshow
|
||||
SPLASH_BIN_NAME = "splash.bin"
|
||||
|
||||
|
@ -221,23 +227,19 @@ class Main(App):
|
|||
f"Cannot package resource: name '{tarinfo.name}' too long"
|
||||
)
|
||||
raise ValueError("Resource name too long")
|
||||
tarinfo.gid = tarinfo.uid = 0
|
||||
tarinfo.mtime = 0
|
||||
tarinfo.uname = tarinfo.gname = "furippa"
|
||||
return tarinfo
|
||||
return tar_sanitizer_filter(tarinfo)
|
||||
|
||||
def package_resources(self, srcdir: str, dst_name: str):
|
||||
try:
|
||||
with tarfile.open(
|
||||
dst_name, self.RESOURCE_TAR_MODE, format=self.RESOURCE_TAR_FORMAT
|
||||
) as tarball:
|
||||
tarball.add(
|
||||
srcdir,
|
||||
arcname="",
|
||||
filter=self._tar_filter,
|
||||
)
|
||||
src_size, compressed_size = compress_tree_tarball(
|
||||
srcdir, dst_name, filter=self._tar_filter
|
||||
)
|
||||
|
||||
self.logger.info(
|
||||
f"Resources compression ratio: {compressed_size * 100 / src_size:.2f}%"
|
||||
)
|
||||
return True
|
||||
except ValueError as e:
|
||||
except Exception as e:
|
||||
self.logger.error(f"Cannot package resources: {e}")
|
||||
return False
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
entry,status,name,type,params
|
||||
Version,+,66.2,,
|
||||
Version,+,67.1,,
|
||||
Header,+,applications/services/bt/bt_service/bt.h,,
|
||||
Header,+,applications/services/bt/bt_service/bt_keys_storage.h,,
|
||||
Header,+,applications/services/cli/cli.h,,
|
||||
|
@ -780,13 +780,20 @@ Function,+,composite_api_resolver_add,void,"CompositeApiResolver*, const ElfApiI
|
|||
Function,+,composite_api_resolver_alloc,CompositeApiResolver*,
|
||||
Function,+,composite_api_resolver_free,void,CompositeApiResolver*
|
||||
Function,+,composite_api_resolver_get,const ElfApiInterface*,CompositeApiResolver*
|
||||
Function,+,compress_alloc,Compress*,uint16_t
|
||||
Function,+,compress_alloc,Compress*,"CompressType, const void*"
|
||||
Function,+,compress_decode,_Bool,"Compress*, uint8_t*, size_t, uint8_t*, size_t, size_t*"
|
||||
Function,+,compress_decode_streamed,_Bool,"Compress*, CompressIoCallback, void*, CompressIoCallback, void*"
|
||||
Function,+,compress_encode,_Bool,"Compress*, uint8_t*, size_t, uint8_t*, size_t, size_t*"
|
||||
Function,+,compress_free,void,Compress*
|
||||
Function,+,compress_icon_alloc,CompressIcon*,size_t
|
||||
Function,+,compress_icon_decode,void,"CompressIcon*, const uint8_t*, uint8_t**"
|
||||
Function,+,compress_icon_free,void,CompressIcon*
|
||||
Function,+,compress_stream_decoder_alloc,CompressStreamDecoder*,"CompressType, const void*, CompressIoCallback, void*"
|
||||
Function,+,compress_stream_decoder_free,void,CompressStreamDecoder*
|
||||
Function,+,compress_stream_decoder_read,_Bool,"CompressStreamDecoder*, uint8_t*, size_t"
|
||||
Function,+,compress_stream_decoder_rewind,_Bool,CompressStreamDecoder*
|
||||
Function,+,compress_stream_decoder_seek,_Bool,"CompressStreamDecoder*, size_t"
|
||||
Function,+,compress_stream_decoder_tell,size_t,CompressStreamDecoder*
|
||||
Function,-,copysign,double,"double, double"
|
||||
Function,-,copysignf,float,"float, float"
|
||||
Function,-,copysignl,long double,"long double, long double"
|
||||
|
@ -2617,6 +2624,8 @@ Function,+,tar_archive_file_finalize,_Bool,TarArchive*
|
|||
Function,+,tar_archive_finalize,_Bool,TarArchive*
|
||||
Function,+,tar_archive_free,void,TarArchive*
|
||||
Function,+,tar_archive_get_entries_count,int32_t,TarArchive*
|
||||
Function,+,tar_archive_get_mode_for_path,TarOpenMode,const char*
|
||||
Function,+,tar_archive_get_read_progress,_Bool,"TarArchive*, int32_t*, int32_t*"
|
||||
Function,+,tar_archive_open,_Bool,"TarArchive*, const char*, TarOpenMode"
|
||||
Function,+,tar_archive_set_file_callback,void,"TarArchive*, tar_unpack_file_cb, void*"
|
||||
Function,+,tar_archive_store_data,_Bool,"TarArchive*, const char*, const uint8_t*, const int32_t"
|
||||
|
@ -2816,6 +2825,7 @@ Variable,-,_sys_nerr,int,
|
|||
Variable,-,ble_profile_hid,const FuriHalBleProfileTemplate*,
|
||||
Variable,-,ble_profile_serial,const FuriHalBleProfileTemplate*,
|
||||
Variable,+,cli_vcp,CliSession,
|
||||
Variable,+,compress_config_heatshrink_default,const CompressConfigHeatshrink,
|
||||
Variable,+,firmware_api_interface,const ElfApiInterface*,
|
||||
Variable,+,furi_hal_i2c_bus_external,FuriHalI2cBus,
|
||||
Variable,+,furi_hal_i2c_bus_power,FuriHalI2cBus,
|
||||
|
|
|
|
@ -1,5 +1,5 @@
|
|||
entry,status,name,type,params
|
||||
Version,+,66.2,,
|
||||
Version,+,67.1,,
|
||||
Header,+,applications/drivers/subghz/cc1101_ext/cc1101_ext_interconnect.h,,
|
||||
Header,+,applications/services/bt/bt_service/bt.h,,
|
||||
Header,+,applications/services/bt/bt_service/bt_keys_storage.h,,
|
||||
|
@ -866,13 +866,20 @@ Function,+,composite_api_resolver_add,void,"CompositeApiResolver*, const ElfApiI
|
|||
Function,+,composite_api_resolver_alloc,CompositeApiResolver*,
|
||||
Function,+,composite_api_resolver_free,void,CompositeApiResolver*
|
||||
Function,+,composite_api_resolver_get,const ElfApiInterface*,CompositeApiResolver*
|
||||
Function,+,compress_alloc,Compress*,uint16_t
|
||||
Function,+,compress_alloc,Compress*,"CompressType, const void*"
|
||||
Function,+,compress_decode,_Bool,"Compress*, uint8_t*, size_t, uint8_t*, size_t, size_t*"
|
||||
Function,+,compress_decode_streamed,_Bool,"Compress*, CompressIoCallback, void*, CompressIoCallback, void*"
|
||||
Function,+,compress_encode,_Bool,"Compress*, uint8_t*, size_t, uint8_t*, size_t, size_t*"
|
||||
Function,+,compress_free,void,Compress*
|
||||
Function,+,compress_icon_alloc,CompressIcon*,size_t
|
||||
Function,+,compress_icon_decode,void,"CompressIcon*, const uint8_t*, uint8_t**"
|
||||
Function,+,compress_icon_free,void,CompressIcon*
|
||||
Function,+,compress_stream_decoder_alloc,CompressStreamDecoder*,"CompressType, const void*, CompressIoCallback, void*"
|
||||
Function,+,compress_stream_decoder_free,void,CompressStreamDecoder*
|
||||
Function,+,compress_stream_decoder_read,_Bool,"CompressStreamDecoder*, uint8_t*, size_t"
|
||||
Function,+,compress_stream_decoder_rewind,_Bool,CompressStreamDecoder*
|
||||
Function,+,compress_stream_decoder_seek,_Bool,"CompressStreamDecoder*, size_t"
|
||||
Function,+,compress_stream_decoder_tell,size_t,CompressStreamDecoder*
|
||||
Function,-,copysign,double,"double, double"
|
||||
Function,-,copysignf,float,"float, float"
|
||||
Function,-,copysignl,long double,"long double, long double"
|
||||
|
@ -2552,6 +2559,7 @@ Function,+,mf_classic_save,_Bool,"const MfClassicData*, FlipperFormat*"
|
|||
Function,+,mf_classic_set_block_read,void,"MfClassicData*, uint8_t, MfClassicBlock*"
|
||||
Function,+,mf_classic_set_key_found,void,"MfClassicData*, uint8_t, MfClassicKeyType, uint64_t"
|
||||
Function,+,mf_classic_set_key_not_found,void,"MfClassicData*, uint8_t, MfClassicKeyType"
|
||||
Function,+,mf_classic_set_sector_trailer_read,void,"MfClassicData*, uint8_t, MfClassicSectorTrailer*"
|
||||
Function,+,mf_classic_set_uid,_Bool,"MfClassicData*, const uint8_t*, size_t"
|
||||
Function,+,mf_classic_value_to_block,void,"int32_t, uint8_t, MfClassicBlock*"
|
||||
Function,+,mf_classic_verify,_Bool,"MfClassicData*, const FuriString*"
|
||||
|
@ -3519,6 +3527,8 @@ Function,+,tar_archive_file_finalize,_Bool,TarArchive*
|
|||
Function,+,tar_archive_finalize,_Bool,TarArchive*
|
||||
Function,+,tar_archive_free,void,TarArchive*
|
||||
Function,+,tar_archive_get_entries_count,int32_t,TarArchive*
|
||||
Function,+,tar_archive_get_mode_for_path,TarOpenMode,const char*
|
||||
Function,+,tar_archive_get_read_progress,_Bool,"TarArchive*, int32_t*, int32_t*"
|
||||
Function,+,tar_archive_open,_Bool,"TarArchive*, const char*, TarOpenMode"
|
||||
Function,+,tar_archive_set_file_callback,void,"TarArchive*, tar_unpack_file_cb, void*"
|
||||
Function,+,tar_archive_store_data,_Bool,"TarArchive*, const char*, const uint8_t*, const int32_t"
|
||||
|
@ -3723,6 +3733,7 @@ Variable,-,_sys_nerr,int,
|
|||
Variable,-,ble_profile_hid,const FuriHalBleProfileTemplate*,
|
||||
Variable,-,ble_profile_serial,const FuriHalBleProfileTemplate*,
|
||||
Variable,+,cli_vcp,CliSession,
|
||||
Variable,+,compress_config_heatshrink_default,const CompressConfigHeatshrink,
|
||||
Variable,+,firmware_api_interface,const ElfApiInterface*,
|
||||
Variable,+,furi_hal_i2c_bus_external,FuriHalI2cBus,
|
||||
Variable,+,furi_hal_i2c_bus_power,FuriHalI2cBus,
|
||||
|
|
|
Loading…
Reference in a new issue