Removed old code

This commit is contained in:
Tommy Mikkelsen 2022-12-06 23:16:50 +01:00
parent 2c9323a15e
commit f962812fa1

View file

@ -1859,98 +1859,6 @@ const etHelper = new class ETHELPER {
catch (error){ catch (error){
log.error(`[etHelper] (createOutFile) Exception happened when creating xlsx stream as: ${error}`); log.error(`[etHelper] (createOutFile) Exception happened when creating xlsx stream as: ${error}`);
} }
/*
var sectionData, x;
{
sectionData, x
// await etHelper.getAndSaveItemsToFile({stream: stream});
// Get all the items in small chuncks
sectionData = await et.getSectionData();
log.verbose(`Amount of chunks in sectionData are: ${sectionData.length}`);
let item;
let counter = 1;
const totalSize = JSONPath({path: '$..totalSize', json: sectionData[0]});
let jPath, sectionChunk;
// We need to load fields and defs into def var
switch(libType) {
case et.ETmediaType.Libraries:
jPath = "$.MediaContainer.Directory[*]";
break;
default:
jPath = "$.MediaContainer.Metadata[*]";
}
const bExportPosters = wtconfig.get(`ET.CustomLevels.${et.expSettings.libTypeSec}.Posters.${et.expSettings.levelName}`, false);
const bExportArt = wtconfig.get(`ET.CustomLevels.${et.expSettings.libTypeSec}.Art.${et.expSettings.levelName}`, false);
for (x=0; x<sectionData.length; x++)
{
et.updateStatusMsg(et.rawMsgType.Chuncks, i18n.t('Modules.ET.Status.Processing-Chunk', {current: x, total: sectionData.length -1}));
sectionChunk = await JSONPath({path: jPath, json: sectionData[x]});
const fields = et.getFields( libType, level);
if ( call == 1 )
{
for (item of sectionChunk){
et.updateStatusMsg(et.rawMsgType.Items, i18n.t('Modules.ET.Status.ProcessItem', {count: counter, total: totalSize}));
await excel2.addRowToTmp( { libType: libType, level: level, data: item, stream: stream, fields: fields } );
if (bExportPosters)
{
await this.exportPics( { type: 'posters', data: item, baseURL: baseURL, accessToken: accessToken } )
}
if (bExportArt)
{
await this.exportPics( { type: 'arts', data: item, baseURL: baseURL, accessToken: accessToken } )
}
counter += 1;
await new Promise(resolve => setTimeout(resolve, 1));
}
}
else
{
// Get ratingKeys in the chunk
const urls = await JSONPath({path: '$..ratingKey', json: sectionChunk});
let urlStr = urls.join(',');
log.verbose(`Items to lookup are: ${urlStr}`);
et.updateStatusMsg(et.rawMsgType.Chuncks, i18n.t('Modules.ET.Status.Processing-Chunk', {current: x, total: sectionData.length -1}));
const urlWIthPath = '/library/metadata/' + urlStr + '?' + this.uriParams;
log.verbose(`Items retrieved`);
const contents = await et.getItemData({baseURL: baseURL, accessToken: accessToken, element: urlWIthPath});
const contentsItems = await JSONPath({path: '$.MediaContainer.Metadata[*]', json: contents});
for (item of contentsItems){
et.updateStatusMsg(et.rawMsgType.Items, i18n.t('Modules.ET.Status.ProcessItem', {count: counter, total: totalSize}));
if (bExportPosters)
{
await this.exportPics( { type: 'posters', data: item, baseURL: baseURL, accessToken: accessToken } )
}
if (bExportArt)
{
await this.exportPics( { type: 'arts', data: item, baseURL: baseURL, accessToken: accessToken } )
}
await excel2.addRowToTmp( { libType: libType, level: level, data: item, stream: stream, fields: fields } );
counter += 1;
await new Promise(resolve => setTimeout(resolve, 1));
}
}
}
}
*/
/*
// Need to export to xlsx as well?
if (wtconfig.get('ET.ExpXLSX')){
log.info('We need to create an xlsx file as well');
et.updateStatusMsg( et.rawMsgType.Info, i18n.t('Modules.ET.Status.CreateExlsFile'));
await excel2.createXLSXFile( {csvFile: newFile, level: level, libType: libType, libName: libName, exType: exType, pListType: pListType});
}
*/
} }
// Generate the filename for an export // Generate the filename for an export