diff --git a/src/components/DownloadLogsFilePopover.vue b/src/components/DownloadLogsFilePopover.vue index 27f2f66c..88026a13 100644 --- a/src/components/DownloadLogsFilePopover.vue +++ b/src/components/DownloadLogsFilePopover.vue @@ -1,16 +1,16 @@ @@ -23,11 +23,10 @@ import { IonListHeader, popoverController } from "@ionic/vue"; -import { mapGetters } from 'vuex'; import { defineComponent } from "vue"; import { translate } from "@hotwax/dxp-components"; import { JobService } from "@/services/JobService"; -import { responseFileType, showToast } from '@/utils'; +import { saveDataFile, showToast } from '@/utils'; import logger from "@/logger"; export default defineComponent({ @@ -38,36 +37,28 @@ export default defineComponent({ IonList, IonListHeader }, - computed: { - ...mapGetters({ - getDataResourceId: 'job/getDataResourceIds', - }), - }, - props: ["log"], + props: ["dataManagerLog"], methods: { async downloadLogFile(type: any) { - let contentIdType; let dataResource = {} as any; if (type === 'logFile') { - contentIdType = 'logFileContentId'; + dataResource.dataResourceId = this.dataManagerLog.logFileDataResourceId + dataResource.name = this.dataManagerLog.logFileContentName + } else if (type === 'uploadedFile') { + dataResource.name = this.dataManagerLog.contentName + dataResource.dataResourceId = this.dataManagerLog.dataResourceId } else if (type === 'failedRecords') { - contentIdType = 'errorRecordContentId'; - } else if (type === 'uploadedFle') { - dataResource.dataResourceId = this.log.dataResourceId; - dataResource.name = this.log.contentName - } - - if (contentIdType) { - dataResource = this.getDataResourceId(this.log[contentIdType]); + dataResource.dataResourceId = this.dataManagerLog.errorRecordDataResourceId + dataResource.name = this.dataManagerLog.errorRecordContentName } if (dataResource) { try { - const response = await JobService.downloadCsv({ + const response = await JobService.fetchFileData({ dataResourceId: dataResource.dataResourceId }); - responseFileType(response.data, dataResource.name); + saveDataFile(response.data, dataResource.name); } catch (error) { showToast(translate('Error downloading file')) logger.error(error) diff --git a/src/components/ImportLogsDetail.vue b/src/components/ImportLogsDetail.vue index fd7fd609..f8518bca 100644 --- a/src/components/ImportLogsDetail.vue +++ b/src/components/ImportLogsDetail.vue @@ -23,7 +23,7 @@ {{ translate('Files received') }} - {{ getDataLogs.length }} + {{ getDataManagerLogs.length }} @@ -64,17 +64,17 @@
- - {{ chip.label }} - + + {{ filter.label }} +
-
-
+
+
@@ -93,12 +93,10 @@ {{ getDateTime(log.finishDateTime) }}

{{ translate('Finished') }}

+ + {{ translate(getStatusDesc(log.statusId)) }} - {{ translate('Finished') }} - {{ translate('Failed') }} - {{ translate('Running') }} - -
+

{{ translate('Failed records') }}

@@ -123,7 +121,7 @@ import { checkmarkOutline, codeWorkingOutline, cloudDownloadOutline, documentTex import { IonBackButton, IonBadge, IonButton, IonChip, IonContent, IonHeader, IonIcon, IonItem, IonLabel, IonList, IonPage, IonSpinner, IonTitle, IonToolbar, popoverController } from "@ionic/vue"; import { defineComponent } from 'vue' import { mapGetters, useStore } from 'vuex' -import { responseFileType } from '@/utils'; +import { saveDataFile, hasError } from '@/utils'; import { translate } from '@hotwax/dxp-components' import { DateTime } from 'luxon' import DownloadLogsFilePopover from "@/components/DownloadLogsFilePopover.vue"; @@ -151,59 +149,60 @@ export default defineComponent ({ data() { return { configDetails: {}, - selectedChip: 'All', - chips: [ - { label: 'All' }, - { label: 'Failed logs' }, - { label: 'Failed records' } + selectedFilter: 'All', + dataManagerLogFilters: [ + { id: 'ALL', label: 'All' }, + { id: 'FAILED_LOGS', label: 'Failed logs' }, + { id: 'FAILED_RECORDS', label: 'Failed records' } ], - filteredLogs: [], + dataManagerLogList: [], isLoading: true, } }, computed: { ...mapGetters({ currentJob: 'job/getCurrentJob', - getDataLogs: 'job/getDataLogs', + getDataManagerLogs: 'job/getDataManagerLogs', getJob: 'job/getJob', - getDataResourceId: 'job/getDataResourceIds' + getStatusDesc: 'util/getStatusDesc', }), }, async mounted() { await this.fetchJobs(); const job = await this.getJob(this.$route.params.jobId) await this.store.dispatch('job/updateCurrentJob', { job }); - this.filteredLogs = await this.store.dispatch('job/fetchDataManagerLogs', job.runtimeData?.configId) - this.configDetails = await this.store.dispatch('job/fetchDataManagerConfig', job.runtimeData?.configId) - this.updateLogs('All'); + await this.fetchDataManagerConfig(job.runtimeData?.configId) + await this.store.dispatch('job/fetchDataManagerLogs', job.runtimeData?.configId) + await this.store.dispatch('job/fetchDataResource', this.getDataManagerLogs) + this.filterDataManagerLogs('ALL'); this.isLoading = false; }, methods : { - updateLogs(label) { - this.selectedChip = label - if (label === 'All') { - this.filteredLogs = this.getDataLogs - } else if (label === 'Failed logs') { - this.filteredLogs = this.getDataLogs.filter(log => log.statusId === 'SERVICE_FAILED') - } else if (label === 'Failed records') { - this.filteredLogs = this.getDataLogs.filter(log => log.errorRecordContentId !== null) + filterDataManagerLogs(id) { + this.selectedFilter = id + if (id === 'ALL') { + this.dataManagerLogList = this.getDataManagerLogs + } else if (id === 'FAILED_LOGS') { + this.dataManagerLogList = this.getDataManagerLogs.filter(log => log.statusId === 'SERVICE_FAILED') + } else if (id === 'FAILED_RECORDS') { + this.dataManagerLogList = this.getDataManagerLogs.filter(log => log.errorRecordContentId !== null) } }, getDateTime(time) { return DateTime.fromMillis(time).toFormat("dd/MM/yyyy H:mm a") }, getProcessedFileCount() { - return this.getDataLogs.filter((log) => log.statusId === "SERVICE_FINISHED").length + return this.getDataManagerLogs.filter((log) => log.statusId === "SERVICE_FINISHED").length }, getErrorFileCount() { - return this.getDataLogs.filter((log) => log.errorRecordContentId !== null).length + return this.getDataManagerLogs.filter((log) => log.errorRecordContentId !== null).length }, - async openDownloadLogsFilePopover(log, event) { + async openDownloadLogsFilePopover(dataManagerLog, event) { const popover = await popoverController.create({ component: DownloadLogsFilePopover, showBackdrop: false, event: event, - componentProps: { log } + componentProps: { dataManagerLog } }); return popover.present() }, @@ -212,14 +211,36 @@ export default defineComponent ({ "inputFields": this.$route.params.jobId }); }, - async downloadCsv(id) { + async fetchDataManagerConfig(configId) { + let resp = {} + const payload = { + "inputFields": { + "configId": configId + }, + "fieldList": ["importPath", "multiThreading", "description", "executionModeId"], + "noConditionFind": "Y", + "viewSize": 1, + "entityName": "DataManagerConfig", + } + + try { + resp = await JobService.fetchDataManagerConfig(payload); + if (resp.status === 200 && resp.data.docs?.length > 0 && !hasError(resp)) { + this.configDetails = resp.data.docs[0]; + } else { + throw resp.data + } + } catch (err) { + logger.error(err); + } + }, + async downloadErrorRecordFile(dataManagerLog) { try { - const dataResource = this.getDataResourceId(id); - if (dataResource) { - const response = await JobService.downloadCsv({ - dataResourceId: dataResource.dataResourceId + if (dataManagerLog?.errorRecordDataResourceId) { + const response = await JobService.fetchFileData({ + dataResourceId: dataManagerLog.errorRecordDataResourceId }); - responseFileType(response.data, dataResource.name); + saveDataFile(response.data, dataManagerLog?.errorRecordContentName); } } catch (error) { logger.error(error); diff --git a/src/components/JobConfiguration.vue b/src/components/JobConfiguration.vue index 292d4b9a..5e39c2fd 100644 --- a/src/components/JobConfiguration.vue +++ b/src/components/JobConfiguration.vue @@ -113,17 +113,17 @@
-
+

{{ translate('Import logs') }}

{{ translate('View details') }}
- + {{ translate('Files received') }} - {{ getDataLogs.length }} + {{ getDataManagerLogs.length }} @@ -244,7 +244,7 @@ export default defineComponent({ currentEComStore: 'user/getCurrentEComStore', currentJob: 'job/getCurrentJob', pendingJobs: 'job/getPendingJobs', - getDataLogs: 'job/getDataLogs', + getDataManagerLogs: 'job/getDataManagerLogs', }), isRequiredParametersMissing() { return this.customRequiredParameters.some((parameter: any) => !parameter.value?.trim()) @@ -256,10 +256,10 @@ export default defineComponent({ }, methods: { getProcessedFileCount() { - return this.getDataLogs?.filter((log: any) => log.statusId === "SERVICE_FINISHED").length + return this.getDataManagerLogs?.filter((log: any) => log.statusId === "SERVICE_FINISHED").length }, getErrorFileCount() { - return this.getDataLogs?.filter((log: any) => log.errorRecordContentId !== null).length + return this.getDataManagerLogs?.filter((log: any) => log.errorRecordContentId !== null).length }, openImportLogsDetails() { this.router.replace({ path: `/import-logs-detail/${this.currentJob.systemJobEnumId}` }) diff --git a/src/locales/en.json b/src/locales/en.json index f3fbb218..a2975e89 100644 --- a/src/locales/en.json +++ b/src/locales/en.json @@ -65,6 +65,7 @@ "Custom": "Custom", "Custom frequency": "Custom frequency", "Custom Parameters": "Custom Parameters", + "Crashed": "Crashed", "Create batches and schedule brokering for different orders.": "Create batches and schedule brokering for different orders.", "Create or update order fulfillment history records from FTP.": "Create or update order fulfillment history records from FTP.", "Create new brokering job": "Create new brokering job", @@ -78,6 +79,7 @@ "Dismiss": "Dismiss", "Don't cancel": "Don't cancel", "Don't skip": "Don't skip", + "Draft": "Draft", "eCom Store": "eCom Store", "eCommerce": "eCommerce", "eCommerce stores are directly connected to one Shop Config. If your OMS is connected to multiple eCommerce stores selling the same catalog operating as one Company, you may have multiple Shop Configs for the selected Product Store.": "eCommerce stores are directly connected to one Shop Config. If your OMS is connected to multiple eCommerce stores selling the same catalog operating as one Company, you may have multiple Shop Configs for the selected Product Store.", @@ -198,6 +200,7 @@ "Promise date change": "Promise date change", "Promise date changes": "Promise date changes", "Provide a future date and time": "Provide a future date and time", + "Queued": "Queued", "Ready to create an app?": "Ready to create an app?", "Realtime adjustments": "Realtime adjustments", "Realtime POS sales": "Realtime POS sales", diff --git a/src/services/JobService.ts b/src/services/JobService.ts index 2fdd4821..4eb37197 100644 --- a/src/services/JobService.ts +++ b/src/services/JobService.ts @@ -89,7 +89,7 @@ const fetchDataManagerConfig = async (payload: any): Promise => { }) } -const downloadCsv = async (payload: any): Promise => { +const fetchFileData = async (payload: any): Promise => { return api ({ url: "DownloadCsvFile", method: "get", @@ -155,5 +155,5 @@ export const JobService = { fetchDataManagerLogs, fetchDataResource, fetchDataManagerConfig, - downloadCsv + fetchFileData } \ No newline at end of file diff --git a/src/store/modules/job/JobState.ts b/src/store/modules/job/JobState.ts index 5ebae996..92e34579 100644 --- a/src/store/modules/job/JobState.ts +++ b/src/store/modules/job/JobState.ts @@ -33,8 +33,5 @@ export default interface JobState { runtime: any, frequency: any, }, - logs: { - list: any, - contentDataResource: any - } + dataManagerLogs: any } \ No newline at end of file diff --git a/src/store/modules/job/actions.ts b/src/store/modules/job/actions.ts index 10aaf7e9..e222156d 100644 --- a/src/store/modules/job/actions.ts +++ b/src/store/modules/job/actions.ts @@ -261,15 +261,14 @@ const actions: ActionTree = { }) }, - async fetchDataManagerLogs ({ commit, dispatch }, params) { + async fetchDataManagerLogs({ commit, dispatch }, configId) { commit(types.JOB_DATA_MANAGER_LOGS_UPDATED, []); let logs = [] as any const payload = { "inputFields": { "statusId": ["SERVICE_CANCELLED", "SERVICE_CRASHED", "SERVICE_FAILED", "SERVICE_FINISHED", "SERVICE_PENDING", "SERVICE_RUNNING", "SERVICE_QUEUED"], - "statusId_op": "in", "systemJobEnumId_op": "not-empty", - "configId": params + "configId": configId }, "fieldList": ["statusId", "logId", "createdDate", "startDateTime", "finishDateTime", "logFileContentId", "errorRecordContentId", "contentName", "dataResourceId"], "noConditionFind": "Y", @@ -280,7 +279,6 @@ const actions: ActionTree = { await JobService.fetchDataManagerLogs(payload).then((resp: any) => { if (resp.status === 200 && resp.data.docs?.length > 0 && !hasError(resp)) { logs = resp.data.docs - dispatch('fetchDataResource', logs) } else { throw resp.data } @@ -291,8 +289,7 @@ const actions: ActionTree = { return logs; }, - async fetchDataResource({ commit }, logs) { - let dataResourceIds = {}; + async fetchDataResource({ commit, state }, logs) { // Extract logFileContentId and errorRecordContentId from logs const contentIds = [].concat(...logs.map((log: any) => [log.logFileContentId, log.errorRecordContentId].filter(id => id))); @@ -308,48 +305,25 @@ const actions: ActionTree = { await JobService.fetchDataResource(payload).then((resp: any) => { if (resp.status === 200 && resp.data.docs?.length > 0 && !hasError(resp)) { - dataResourceIds = contentIds.reduce((acc: any, id: string) => { - const contentId = resp.data.docs.find((doc: any) => doc.coContentId === id); - if (contentId) { - acc[id] = { - dataResourceId: contentId.coDataResourceId, - name: contentId.coContentName - }; + logs.forEach((log: any) => { + const logFileDataResource = resp.data.docs.find((doc: any) => doc.coContentId === log.logFileContentId); + if (logFileDataResource) { + log.logFileDataResourceId = logFileDataResource.coDataResourceId; + log.logFileContentName = logFileDataResource.coContentName; + } + + const errorRecordDataResource = resp.data.docs.find((doc: any) => doc.coContentId === log.errorRecordContentId); + if (errorRecordDataResource) { + log.errorRecordDataResourceId = errorRecordDataResource.coDataResourceId; + log.errorRecordContentName = errorRecordDataResource.coContentName; } - return acc; - }, {}); + }); } }).catch((err: any) => { logger.error(err); }) - commit(types.JOB_DATA_RESOURCE_IDS_UPDATED, dataResourceIds); - }, - - async fetchDataManagerConfig({ commit }, params) { - let resp = {} as any - const payload = { - "inputFields": { - "configId": params - }, - "fieldList": ["importPath", "multiThreading", "description", "executionModeId"], - "noConditionFind": "Y", - "viewSize": 1, - "entityName": "DataManagerConfig", - } - - try { - resp = await JobService.fetchDataManagerConfig(payload); - if (resp.status === 200 && resp.data.docs?.length > 0 && !hasError(resp)) { - return resp.data.docs[0]; - } else { - throw resp.data - } - } catch (err) { - logger.error(err); - } - return resp; + commit(types.JOB_DATA_MANAGER_LOGS_UPDATED, logs); }, - async fetchMiscellaneousJobs({ commit, dispatch, state }, payload){ const fetchJobRequests = []; const params = { diff --git a/src/store/modules/job/getters.ts b/src/store/modules/job/getters.ts index 0115466d..9ff94838 100644 --- a/src/store/modules/job/getters.ts +++ b/src/store/modules/job/getters.ts @@ -85,11 +85,8 @@ const getters: GetterTree = { getGlobalFreq: (state) => { return state.bulk.frequency; }, - getDataLogs: (state) => { - return state.logs.list - }, - getDataResourceIds: (state) => (contentId: string) => { - return state.logs.contentDataResource[contentId]; + getDataManagerLogs: (state) => { + return state.dataManagerLogs } } diff --git a/src/store/modules/job/index.ts b/src/store/modules/job/index.ts index 690e891a..6849eed6 100644 --- a/src/store/modules/job/index.ts +++ b/src/store/modules/job/index.ts @@ -42,10 +42,7 @@ const jobModule: Module = { runtime: '', frequency: '', }, - logs: { - list: [], - contentDataResource: [] - } + dataManagerLogs: [] }, getters, actions, diff --git a/src/store/modules/job/mutation-types.ts b/src/store/modules/job/mutation-types.ts index f6d7120e..87498883 100644 --- a/src/store/modules/job/mutation-types.ts +++ b/src/store/modules/job/mutation-types.ts @@ -14,5 +14,4 @@ export const JOB_BULK_UPDATED = SN_JOB + '/BULK_UPDATED' export const JOB_BULK_FREQUENCY_UPDATED = SN_JOB + '/BULK_FREQUENCY_UPDATED' export const JOB_BULK_RUNTIME_UPDATED = SN_JOB + '/BULK_DATA_UPDATED' export const JOB_BULK_CLEARED = SN_JOB + '/BULK_CLEARED' -export const JOB_DATA_MANAGER_LOGS_UPDATED = SN_JOB + '/DATA_MANAGER_LOGS_UPDATED' -export const JOB_DATA_RESOURCE_IDS_UPDATED = SN_JOB + '/DATA_RESOURCE_IDS_UPDATED' \ No newline at end of file +export const JOB_DATA_MANAGER_LOGS_UPDATED = SN_JOB + '/DATA_MANAGER_LOGS_UPDATED' \ No newline at end of file diff --git a/src/store/modules/job/mutations.ts b/src/store/modules/job/mutations.ts index fceda164..77963a05 100644 --- a/src/store/modules/job/mutations.ts +++ b/src/store/modules/job/mutations.ts @@ -66,10 +66,7 @@ const mutations: MutationTree = { } }, [types.JOB_DATA_MANAGER_LOGS_UPDATED] (state, payload) { - state.logs.list = payload + state.dataManagerLogs = payload }, - [types.JOB_DATA_RESOURCE_IDS_UPDATED] (state, payload) { - state.logs.contentDataResource = payload - } } export default mutations; \ No newline at end of file diff --git a/src/utils/index.ts b/src/utils/index.ts index 0f9b7965..313e99ca 100644 --- a/src/utils/index.ts +++ b/src/utils/index.ts @@ -341,18 +341,15 @@ const copyToClipboard = async (value: string, text?: string) => { }); } -const responseFileType = (response: any, fileName: string) => { +const saveDataFile = async (response: any, fileName: string) => { let data; - + if (typeof response === 'object') { data = JSON.stringify(response) } else { data = response } - saveFile(data, fileName); -} -const saveFile = async (data: any, fileName: string) => { const blob = new Blob([data], {type: "text/plain;charset=utf-8"}) saveAs(blob, fileName); } @@ -374,5 +371,5 @@ export { JsonToCsvOption, isFutureDate, prepareRuntime, - responseFileType + saveDataFile }