mirror of
https://github.com/WebTools-NG/WebTools-NG
synced 2024-11-23 03:23:05 +00:00
Merge pull request #235 from WebTools-NG/#226-Exporting-episodes
#226 exporting episodes
This commit is contained in:
commit
2eba458142
4 changed files with 35 additions and 19 deletions
|
@ -7,6 +7,7 @@
|
|||
* [#229 Bump electron from 7.3.3 to 9.4.0](https://github.com/WebTools-NG/WebTools-NG/pull/229)
|
||||
* [#227 ET Export filename](https://github.com/WebTools-NG/WebTools-NG/pull/227)
|
||||
* [#231 Open LogDir folder failes](https://github.com/WebTools-NG/WebTools-NG/pull/231)
|
||||
* [#226 Exporting episodes](https://github.com/WebTools-NG/WebTools-NG/pull/226)
|
||||
|
||||
## V0.1.13
|
||||
|
||||
|
|
|
@ -233,7 +233,9 @@
|
|||
"Processing-Chunk": "Processing chunk {current} of {total}",
|
||||
"Processing-Chunk-Detailed": "Processing chunk {current} of {total}.\nitems to export: {urlStr}",
|
||||
"StartExport": "Starting to Export",
|
||||
"GetSectionItems": "Fetching items {idx} in chunks of {chunck}"
|
||||
"CreateExlsFile": "Creating Excel file",
|
||||
"ProcessItem": "Processing item {count} of {total}",
|
||||
"GetSectionItems": "Fetching items {idx} in chunks of {chunck} with a total of {totalSize}"
|
||||
},
|
||||
"ErrorNoOutDirTitle": "No output directory defined",
|
||||
"ErrorNoOutDirMsg": "You need to define an output directory in the settings page first",
|
||||
|
|
|
@ -73,7 +73,7 @@
|
|||
<b-form-textarea
|
||||
id="status"
|
||||
v-bind:placeholder="$t('Modules.ET.Status.Status')"
|
||||
v-model="count"
|
||||
v-model="statusMsg"
|
||||
:disabled=true
|
||||
rows="1"
|
||||
max-rows="8"
|
||||
|
@ -89,7 +89,6 @@
|
|||
<script>
|
||||
import { et } from "./scripts/et";
|
||||
import i18n from '../../../i18n';
|
||||
import store from '../../../store';
|
||||
import { wtconfig } from '../General/wtutils';
|
||||
|
||||
const log = require("electron-log");
|
||||
|
@ -124,6 +123,10 @@
|
|||
},
|
||||
selectedServerAddressUpdateInProgress: async function(){
|
||||
this.selLibraryWait = false;
|
||||
},
|
||||
statusMsg: async function(){
|
||||
console.log('Ged Watch for statusMsg: ' + this.statusMsg())
|
||||
this.statusMsg();
|
||||
}
|
||||
},
|
||||
created() {
|
||||
|
@ -134,10 +137,13 @@
|
|||
},
|
||||
computed: {
|
||||
selectedServerAddress: function(){
|
||||
return this.$store.getters.getSelectedServerAddress
|
||||
return this.$store.getters.getSelectedServerAddress
|
||||
},
|
||||
selectedServerAddressUpdateInProgress(){
|
||||
return this.$store.getters.getSelectedServerAddressUpdateInProgress
|
||||
return this.$store.getters.getSelectedServerAddressUpdateInProgress
|
||||
},
|
||||
statusMsg: function(){
|
||||
return this.$store.getters.getExportStatus
|
||||
},
|
||||
exportLevels: function() {
|
||||
et.getLevelDisplayName('My Level', this.selMediaType);
|
||||
|
@ -189,9 +195,6 @@
|
|||
});
|
||||
item['options']=options;
|
||||
return options;
|
||||
},
|
||||
count () {
|
||||
return store.getters.getExportStatus
|
||||
}
|
||||
},
|
||||
methods: {
|
||||
|
|
|
@ -52,12 +52,13 @@ const et = new class ET {
|
|||
{
|
||||
postURI += '&type=4'
|
||||
postURI +='&checkFiles=1&includeAllConcerts=1&includeBandwidths=1&includeChapters=1&includeChildren=1&includeConcerts=1&includeExtras=1&includeFields=1&includeGeolocation=1&includeLoudnessRamps=1&includeMarkers=1&includeOnDeck=1&includePopularLeaves=1&includePreferences=1&includeRelated=1&includeRelatedCount=1&includeReviews=1&includeStations=1'
|
||||
log.verbose(`Calling url ${baseURL + element + postURI}`)
|
||||
log.info(`Calling url ${baseURL + element + postURI}`)
|
||||
}
|
||||
chuncks = await et.getItemData({baseURL: baseURL, accessToken: accessToken, element: element, postURI: postURI});
|
||||
size = JSONPath({path: '$.MediaContainer.size', json: chuncks});
|
||||
log.verbose(`getSectionData chunck size is ${size} and idx is ${idx}`)
|
||||
store.commit("UPDATE_EXPORTSTATUS", i18n.t('Modules.ET.Status.GetSectionItems', {idx: idx, chunck: size}))
|
||||
const totalSize = JSONPath({path: '$.MediaContainer.totalSize', json: chuncks});
|
||||
log.info(`getSectionData chunck size is ${size} and idx is ${idx} and totalsize is ${totalSize}`)
|
||||
store.commit("UPDATE_EXPORTSTATUS", i18n.t('Modules.ET.Status.GetSectionItems', {idx: idx, chunck: size, totalSize: totalSize}))
|
||||
sectionData.push(chuncks)
|
||||
log.debug(`Pushed chunk as ${JSON.stringify(chuncks)}`)
|
||||
idx = idx + step;
|
||||
|
@ -173,7 +174,7 @@ const et = new class ET {
|
|||
// We are dealing with a custom level here
|
||||
realName = level
|
||||
}
|
||||
log.debug(`RealName is ${realName}`)
|
||||
// log.debug(`RealName is ${realName}`)
|
||||
// We need to load fields and defs into def var
|
||||
switch(libType) {
|
||||
case 'movie':
|
||||
|
@ -498,7 +499,7 @@ const excel2 = new class Excel {
|
|||
async SaveWorkbook(Workbook, Library, Level, Type) {
|
||||
const fs = require('fs')
|
||||
const name = await this.getFileName( { Library: Library, Level: Level, Type: Type })
|
||||
log.debug('Saving output file as: ' + name)
|
||||
log.info('Saving output file as: ' + name)
|
||||
// Save Excel on Hard Disk
|
||||
Workbook.xlsx.writeBuffer()
|
||||
.then(buffer => fs.writeFileSync(name, buffer))
|
||||
|
@ -610,7 +611,7 @@ const excel2 = new class Excel {
|
|||
}
|
||||
|
||||
async addRowToTmp( { libType, level, data, stream }) {
|
||||
log.debug(`Start addRowToTmp. libType: ${libType} - level: ${level}`)
|
||||
// log.debug(`Start addRowToTmp. libType: ${libType} - level: ${level}`)
|
||||
let date, year, month, day, hours, minutes, seconds
|
||||
const fields = et.getFields( libType, level)
|
||||
let lookup, val, array, i, valArray, valArrayVal, subType, subKey
|
||||
|
@ -818,6 +819,8 @@ const excel2 = new class Excel {
|
|||
var sectionData = await et.getSectionData({sectionName: libName, baseURL: baseURL, accessToken: accessToken, libType: libType})
|
||||
log.verbose(`Amount of chunks in sectionData are: ${sectionData.length}`)
|
||||
let item
|
||||
let counter = 1
|
||||
const totalSize = JSONPath({path: '$..totalSize', json: sectionData[0]});
|
||||
for (var x=0; x<sectionData.length; x++)
|
||||
{
|
||||
store.commit("UPDATE_EXPORTSTATUS", i18n.t('Modules.ET.Status.Processing-Chunk', {current: x, total: sectionData.length}))
|
||||
|
@ -825,7 +828,10 @@ const excel2 = new class Excel {
|
|||
if ( call == 1 )
|
||||
{
|
||||
for (item of sectionChunk){
|
||||
store.commit("UPDATE_EXPORTSTATUS", i18n.t('Modules.ET.Status.ProcessItem', {count: counter, total: totalSize}));
|
||||
await excel2.addRowToTmp( { libType: libType, level: level, data: item, stream: stream } );
|
||||
counter += 1;
|
||||
await new Promise(resolve => setTimeout(resolve, 1));
|
||||
}
|
||||
}
|
||||
else
|
||||
|
@ -840,7 +846,10 @@ const excel2 = new class Excel {
|
|||
const contents = await et.getItemData({baseURL: baseURL, accessToken: accessToken, element: urlWIthPath});
|
||||
const contentsItems = await JSONPath({path: '$.MediaContainer.Metadata[*]', json: contents});
|
||||
for (item of contentsItems){
|
||||
store.commit("UPDATE_EXPORTSTATUS", i18n.t('Modules.ET.Status.ProcessItem', {count: counter, total: totalSize}));
|
||||
await excel2.addRowToTmp( { libType: libType, level: level, data: item, stream: stream } );
|
||||
counter += 1;
|
||||
await new Promise(resolve => setTimeout(resolve, 1));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -852,6 +861,7 @@ const excel2 = new class Excel {
|
|||
// Need to export to xlsx as well?
|
||||
if (wtconfig.get('ET.ExpExcel')){
|
||||
log.info('We need to create an xlsx file as well')
|
||||
store.commit("UPDATE_EXPORTSTATUS", i18n.t('Modules.ET.Status.CreateExlsFile'))
|
||||
await excel2.createXLSXFile( {csvFile: newFile, level: level, libType: libType, libName: libName})
|
||||
}
|
||||
store.commit("UPDATE_EXPORTSTATUS", `Export finished. File:"${newFile}" created`);
|
||||
|
|
Loading…
Reference in a new issue