diff --git a/actions/graybox/copy-sched.js b/actions/graybox/copy-sched.js new file mode 100644 index 0000000..97f5ee6 --- /dev/null +++ b/actions/graybox/copy-sched.js @@ -0,0 +1,132 @@ +/* ************************************************************************ +* ADOBE CONFIDENTIAL +* ___________________ +* +* Copyright 2024 Adobe +* All Rights Reserved. +* +* NOTICE: All information contained herein is, and remains +* the property of Adobe and its suppliers, if any. The intellectual +* and technical concepts contained herein are proprietary to Adobe +* and its suppliers and are protected by all applicable intellectual +* property laws, including trade secret and copyright laws. +* Dissemination of this information or reproduction of this material +* is strictly forbidden unless prior written permission is obtained +* from Adobe. +************************************************************************* */ + +// eslint-disable-next-line import/no-extraneous-dependencies +const openwhisk = require('openwhisk'); +const { getAioLogger } = require('../utils'); +const initFilesWrapper = require('./filesWrapper'); + +async function main(params) { + const logger = getAioLogger(); + const ow = openwhisk(); + let responsePayload = 'Graybox Copy Scheduler invoked'; + logger.info(responsePayload); + + const filesWrapper = await initFilesWrapper(logger); + + try { + let projectQueue = await filesWrapper.readFileIntoObject('graybox_promote/project_queue.json'); + logger.info(`From Copy-sched Project Queue Json: ${JSON.stringify(projectQueue)}`); + + // Sorting the Promote Projects based on the 'createdTime' property, pick the oldest project + projectQueue = projectQueue.sort((a, b) => a.createdTime - b.createdTime); + + // Find the First Project where status is 'processed' + const projectEntry = projectQueue.find((project) => project.status === 'processed'); + if (projectEntry && projectEntry.projectPath) { + const project = projectEntry.projectPath; + const projectStatusJson = await filesWrapper.readFileIntoObject(`graybox_promote${project}/status.json`); + logger.info(`In Copy-sched Project Status Json: ${JSON.stringify(projectStatusJson)}`); + + // Read the Batch Status in the current project's "batch_status.json" file + const batchStatusJson = await filesWrapper.readFileIntoObject(`graybox_promote${project}/batch_status.json`); + logger.info(`In Copy Sched, batchStatusJson: ${JSON.stringify(batchStatusJson)}`); + + const copyBatchesJson = await filesWrapper.readFileIntoObject(`graybox_promote${project}/copy_batches.json`); + logger.info(`In Copy-sched Copy Batches Json: ${JSON.stringify(copyBatchesJson)}`); + + // Find if any batch is in 'copy_in_progress' status, if yes then don't trigger another copy action for another "processed" batch + const copyOrPromoteInProgressBatch = Object.entries(batchStatusJson) + .find(([batchName, copyBatchJson]) => (copyBatchJson.status === 'copy_in_progress' || copyBatchJson.status === 'promote_in_progress')); + + if (copyOrPromoteInProgressBatch && Array.isArray(copyOrPromoteInProgressBatch) && copyOrPromoteInProgressBatch.length > 0) { + responsePayload = `Promote or Copy Action already in progress for Batch: ${copyOrPromoteInProgressBatch[0]}, not triggering another action until it completes`; + return { + code: 200, + payload: responsePayload + }; + } + + // Find the First Batch where status is 'processed', to promote one batch at a time + const processedBatchName = Object.keys(copyBatchesJson) + .find((batchName) => copyBatchesJson[batchName].status === 'processed'); + // If no batch is found with status 'processed then nothing to promote', return + if (!processedBatchName) { + responsePayload = 'No Copy Batches found with status "processed"'; + return { + code: 200, + payload: responsePayload + }; + } + + if (copyBatchesJson[processedBatchName].status === 'processed') { + // copy all params from json into the params object + const inputParams = projectStatusJson?.params; + Object.keys(inputParams).forEach((key) => { + params[key] = inputParams[key]; + }); + // Set the Project & Batch Name in params for the Copy Content Worker Action to read and process + params.project = project; + params.batchName = processedBatchName; + + logger.info(`In Copy-sched, Invoking Copy Content Worker for Batch: ${processedBatchName} of Project: ${project}`); + try { + return ow.actions.invoke({ + name: 'graybox/copy-worker', + blocking: false, + result: false, + params + }).then(async (result) => { + logger.info(result); + return { + code: 200, + payload: responsePayload + }; + }).catch(async (err) => { + responsePayload = 'Failed to invoke graybox copy action'; + logger.error(`${responsePayload}: ${err}`); + return { + code: 500, + payload: responsePayload + }; + }); + } catch (err) { + responsePayload = 'Unknown error occurred while invoking Copy Content Worker Action'; + logger.error(`${responsePayload}: ${err}`); + responsePayload = err; + } + } + responsePayload = 'Triggered multiple Copy Content Worker Actions'; + return { + code: 200, + payload: responsePayload, + }; + } + } catch (err) { + responsePayload = 'Unknown error occurred while processing the projects for Copy'; + logger.error(`${responsePayload}: ${err}`); + responsePayload = err; + } + + // No errors while initiating all the Copy Content Worker Action for all the projects + return { + code: 200, + payload: responsePayload + }; +} + +exports.main = main; diff --git a/actions/graybox/copy-worker.js b/actions/graybox/copy-worker.js new file mode 100644 index 0000000..a3915fc --- /dev/null +++ b/actions/graybox/copy-worker.js @@ -0,0 +1,176 @@ +/* ************************************************************************ +* ADOBE CONFIDENTIAL +* ___________________ +* +* Copyright 2024 Adobe +* All Rights Reserved. +* +* NOTICE: All information contained herein is, and remains +* the property of Adobe and its suppliers, if any. The intellectual +* and technical concepts contained herein are proprietary to Adobe +* and its suppliers and are protected by all applicable intellectual +* property laws, including trade secret and copyright laws. +* Dissemination of this information or reproduction of this material +* is strictly forbidden unless prior written permission is obtained +* from Adobe. +************************************************************************* */ + +const { getAioLogger, toUTCStr } = require('../utils'); +const AppConfig = require('../appConfig'); +const Sharepoint = require('../sharepoint'); +const initFilesWrapper = require('./filesWrapper'); + +const logger = getAioLogger(); + +async function main(params) { + logger.info('Graybox Copy Content Action triggered'); + + const appConfig = new AppConfig(params); + const { gbRootFolder, experienceName, projectExcelPath } = appConfig.getPayload(); + + const sharepoint = new Sharepoint(appConfig); + + // process data in batches + const filesWrapper = await initFilesWrapper(logger); + let responsePayload; + let promotes = []; + const failedPromotes = []; + + logger.info('In Copy Worker, Processing Copy Content'); + + const project = params.project || ''; + const batchName = params.batchName || ''; + + // Read the Batch Status in the current project's "batch_status.json" file + let batchStatusJson = await filesWrapper.readFileIntoObject(`graybox_promote${project}/batch_status.json`); + + const promoteErrorsJson = await filesWrapper.readFileIntoObject(`graybox_promote${project}/promote_errors.json`); + + let copyBatchesJson = await filesWrapper.readFileIntoObject(`graybox_promote${project}/copy_batches.json`); + + const copyBatchJson = copyBatchesJson[batchName] || {}; + + logger.info(`In Copy Worker, Copy File Paths for batchname ${batchName}: ${JSON.stringify(copyBatchJson)}`); + + // Update & Write the Batch Status to in progress "batch_status.json" file + // So that the scheduler doesn't pick the same batch again + batchStatusJson[batchName] = 'copy_in_progress'; + await filesWrapper.writeFile(`graybox_promote${gbRootFolder}/${experienceName}/batch_status.json`, batchStatusJson); + // Write the copy batches JSON file + copyBatchesJson[batchName].status = 'promote_in_progress'; + await filesWrapper.writeFile(`graybox_promote${gbRootFolder}/${experienceName}/copy_batches.json`, copyBatchesJson); + + // Process the Copy Content + const copyFilePathsJson = copyBatchJson.files || []; + for (let i = 0; i < copyFilePathsJson.length; i += 1) { + const copyPathsEntry = copyFilePathsJson[i]; + // Download the grayboxed file and save it to default content location + // eslint-disable-next-line no-await-in-loop + const { fileDownloadUrl } = await sharepoint.getFileData(copyPathsEntry.copySourceFilePath, true); + // eslint-disable-next-line no-await-in-loop + const file = await sharepoint.getFileUsingDownloadUrl(fileDownloadUrl); + // eslint-disable-next-line no-await-in-loop + const saveStatus = await sharepoint.saveFileSimple(file, copyPathsEntry.copyDestFilePath); + + if (saveStatus?.success) { + promotes.push(copyPathsEntry.copyDestFilePath); + } else if (saveStatus?.errorMsg?.includes('File is locked')) { + failedPromotes.push(`${copyPathsEntry.copyDestFilePath} (locked file)`); + } else { + failedPromotes.push(copyPathsEntry.copyDestFilePath); + } + } + + logger.info(`In Copy Worker, Promotes for batchname ${batchName} no.of files ${promotes.length}, files list: ${JSON.stringify(promotes)}`); + // Update the Promoted Paths in the current project's "promoted_paths.json" file + if (promotes.length > 0) { + const promotedPathsJson = await filesWrapper.readFileIntoObject(`graybox_promote${gbRootFolder}/${experienceName}/promoted_paths.json`) || {}; + // Combined existing If any promotes already exist in promoted_paths.json for the current batch either from Copy action or Promote Action + if (promotedPathsJson[batchName]) { + promotes = promotes.concat(promotedPathsJson[batchName]); + } + promotedPathsJson[batchName] = promotes; + await filesWrapper.writeFile(`graybox_promote${gbRootFolder}/${experienceName}/promoted_paths.json`, promotedPathsJson); + } + + if (failedPromotes.length > 0) { + await filesWrapper.writeFile(`graybox_promote${gbRootFolder}/${experienceName}/promote_errors.json`, promoteErrorsJson.concat(failedPromotes)); + } + + // Update the Copy Batch Status in the current project's "copy_batches.json" file + copyBatchesJson = await filesWrapper.readFileIntoObject(`graybox_promote${project}/copy_batches.json`); + copyBatchesJson[batchName].status = 'promoted'; + // Write the copy batches JSON file + await filesWrapper.writeFile(`graybox_promote${gbRootFolder}/${experienceName}/copy_batches.json`, copyBatchesJson); + + // Check in parallel if the Same Batch Name Exists & is Promoted in the Promote Batches JSON + const promoteBatchesJson = await filesWrapper.readFileIntoObject(`graybox_promote${project}/promote_batches.json`); + const promoteBatchJson = promoteBatchesJson[batchName]; + let markBatchAsPromoted = true; + if (promoteBatchJson) { + markBatchAsPromoted = promoteBatchJson.status === 'promoted'; + } + + batchStatusJson = await filesWrapper.readFileIntoObject(`graybox_promote${project}/batch_status.json`); + if (markBatchAsPromoted) { + // Update the Batch Status in the current project's "batch_status.json" file + if (batchStatusJson && batchStatusJson[batchName] && (promotes.length > 0 || failedPromotes.length > 0)) { + batchStatusJson[batchName] = 'promoted'; + // Write the updated batch_status.json file + await filesWrapper.writeFile(`graybox_promote${gbRootFolder}/${experienceName}/batch_status.json`, batchStatusJson); + } + + // If all batches are promoted, then mark the project as 'promoted' + const allBatchesPromoted = Object.keys(batchStatusJson).every((key) => batchStatusJson[key] === 'promoted'); + if (allBatchesPromoted) { + // Update the Project Status in JSON files + updateProjectStatus(gbRootFolder, experienceName, filesWrapper); + } + } + + // Update the Project Excel with the Promote Status + try { + const sFailedPromoteStatuses = failedPromotes.length > 0 ? `Failed Promotes: \n${failedPromotes.join('\n')}` : ''; + const promoteExcelValues = [[`Step 4 of 5: Promote Copy completed for Batch ${batchName}`, toUTCStr(new Date()), sFailedPromoteStatuses]]; + await sharepoint.updateExcelTable(projectExcelPath, 'PROMOTE_STATUS', promoteExcelValues); + } catch (err) { + logger.error(`Error Occured while updating Excel during Graybox Promote Copy: ${err}`); + } + + responsePayload = `Copy Worker finished promoting content for batch ${batchName}`; + logger.info(responsePayload); + return exitAction({ + body: responsePayload, + statusCode: 200 + }); +} + +/** + * Update the Project Status in the current project's "status.json" file & the parent "project_queue.json" file + * @param {*} gbRootFolder graybox root folder + * @param {*} experienceName graybox experience name + * @param {*} filesWrapper filesWrapper object + * @returns updated project status + */ +async function updateProjectStatus(gbRootFolder, experienceName, filesWrapper) { + const projectStatusJson = await filesWrapper.readFileIntoObject(`graybox_promote${gbRootFolder}/${experienceName}/status.json`); + + // Update the Project Status in the current project's "status.json" file + projectStatusJson.status = 'promoted'; + await filesWrapper.writeFile(`graybox_promote${gbRootFolder}/${experienceName}/status.json`, projectStatusJson); + + // Update the Project Status in the parent "project_queue.json" file + const projectQueue = await filesWrapper.readFileIntoObject('graybox_promote/project_queue.json'); + const index = projectQueue.findIndex((obj) => obj.projectPath === `${gbRootFolder}/${experienceName}`); + if (index !== -1) { + // Replace the object at the found index + projectQueue[index].status = 'promoted'; + await filesWrapper.writeFile('graybox_promote/project_queue.json', projectQueue); + } +} + +function exitAction(resp) { + return resp; +} + +exports.main = main; diff --git a/actions/graybox/filesWrapper.js b/actions/graybox/filesWrapper.js new file mode 100644 index 0000000..4b42004 --- /dev/null +++ b/actions/graybox/filesWrapper.js @@ -0,0 +1,125 @@ +const Files = require('@adobe/aio-lib-files'); +const streamLib = require('stream'); + +const initFilesWrapper = async (logger) => { + const files = await Files.init(); + + const readFileInternal = async (filePath, logFileNotFound = true, options = {}) => { + try { + return await files.read(filePath, options); + } catch (err) { + if (logFileNotFound) { + logger.error(`Error while reading file ${filePath}: ${err.message}`); + } + return null; + } + }; + + const readFileIntoObject = async (filePath, logFileNotFound = true, options = {}) => { + const data = await readFileInternal(filePath, logFileNotFound, options); + try { + if (typeof input === "string") { + return JSON.parse(input); + } + return data ? JSON.parse(data.toString()) : {}; + } catch (err) { + if (logFileNotFound) { + logger.error(`Error while parsing file content of ${filePath}: ${err.message}`); + } + return {}; + } + }; + + const readProperties = async (filePath) => { + try { + return await files.getProperties(filePath); + } catch (err) { + logger.error(`Error while reading metadata of ${filePath}: ${err.message}`); + return null; + } + }; + + /** + * Return the file as Buffer or an empty Buffer, when reading the file errored out. + * + * @param filePath {string} path to the file to read + * @param logFileNotFound {boolean} whether a failure to read the file should be logged - defaults to true + * @param options {object} aio-lib-files "remoteReadOptions" - default to an empty object + * @returns {Buffer} the buffer with the file's content + */ + const readFileIntoBuffer = async (filePath, logFileNotFound = true, options = {}) => { + const data = await readFileInternal(filePath, logFileNotFound, options); + return data ?? Buffer.alloc(0); + }; + + const writeFile = async (filePath, content) => { + let finalData = content; + if (!Buffer.isBuffer(content) && typeof content !== 'string' && !(content instanceof String)) { + finalData = JSON.stringify(content); + } + try { + await files.write(filePath, finalData); + } catch (err) { + logger.error(`Error while writing file ${filePath}: ${err.message}`); + } + }; + + const createReadStream = async (filePath, options = {}) => files.createReadStream(filePath, options); + + const writeFileFromStream = async (filePath, stream) => { + try { + if (stream instanceof streamLib.Readable) { + const chunks = []; + // eslint-disable-next-line no-restricted-syntax + for await (const chunk of stream) { + chunks.push(chunk); + } + await files.write(filePath, Buffer.concat(chunks)); + const fileProps = await files.getProperties(filePath); + if (!fileProps || !fileProps?.contentLength) { + return 'Error: Failed to determine the file size of the stored document.'; + } + return null; + } + return 'Error: Unexpected stream.'; + } catch (err) { + return `Error while writing file ${filePath}: ${err.message}`; + } + }; + + const deleteObject = async (filePath) => { + try { + await files.delete(filePath); + } catch (err) { + logger.error(`Error while deleting ${filePath}: ${err.message}`); + } + }; + + const listFiles = async (filePath) => { + try { + return files.list(filePath); + } catch (err) { + logger.error(`Error while listing files: ${err.message}`); + return []; + } + }; + + const fileExists = async (filePath) => { + const fileList = await listFiles(filePath); + return !Array.isArray(fileList) || fileList.length !== 0; + }; + + return { + writeFileFromStream, + readFileIntoObject, + readProperties, + createReadStream, + listFiles, + fileExists, + writeFile, + deleteObject, + readFileIntoBuffer, + }; +}; + +module.exports = initFilesWrapper; diff --git a/actions/graybox/initiate-promote-worker.js b/actions/graybox/initiate-promote-worker.js new file mode 100644 index 0000000..98384ad --- /dev/null +++ b/actions/graybox/initiate-promote-worker.js @@ -0,0 +1,240 @@ +/* ************************************************************************ +* ADOBE CONFIDENTIAL +* ___________________ +* +* Copyright 2024 Adobe +* All Rights Reserved. +* +* NOTICE: All information contained herein is, and remains +* the property of Adobe and its suppliers, if any. The intellectual +* and technical concepts contained herein are proprietary to Adobe +* and its suppliers and are protected by all applicable intellectual +* property laws, including trade secret and copyright laws. +* Dissemination of this information or reproduction of this material +* is strictly forbidden unless prior written permission is obtained +* from Adobe. +************************************************************************* */ + +const initFilesWrapper = require('./filesWrapper'); +const { + getAioLogger, isFilePatternMatched, toUTCStr +} = require('../utils'); +const AppConfig = require('../appConfig'); +const Sharepoint = require('../sharepoint'); + +const logger = getAioLogger(); +const MAX_CHILDREN = 1000; +const BATCH_REQUEST_PREVIEW = 200; +// const BATCH_REQUEST_PREVIEW = 1; // TODO remove this line and uncomment the above line after testing + +/** + * - Bulk Preview Graybox files + * - GET markdown files using preview-url.md + * - Process markdown - process MDAST by cleaning it up + * - Generate updated Docx file using md2docx lib + * - copy updated docx file to the default content tree + * - run the bulk preview action on the list of files that were copied to default content tree + * - update the project excel file as and when necessary to update the status of the promote action + */ +async function main(params) { + logger.info('Graybox Initiate Promote Worker invoked'); + + const appConfig = new AppConfig(params); + const { + adminPageUri, rootFolder, gbRootFolder, promoteIgnorePaths, experienceName, projectExcelPath, draftsOnly + } = appConfig.getPayload(); + + const filesWrapper = await initFilesWrapper(logger); + const sharepoint = new Sharepoint(appConfig); + + // Update Promote Status + const promoteTriggeredExcelValues = [['Promote triggered', toUTCStr(new Date()), '']]; + await sharepoint.updateExcelTable(projectExcelPath, 'PROMOTE_STATUS', promoteTriggeredExcelValues); + + logger.info(`GB ROOT FOLDER ::: ${gbRootFolder}`); + logger.info(`GB EXP NAME ::: ${experienceName}`); + + // Get all files in the graybox folder for the specific experience name + // NOTE: This does not capture content inside the locale/expName folders yet + const gbFiles = await findAllFiles(experienceName, appConfig, sharepoint); + + // Create Batch Status JSON + const batchStatusJson = {}; + + // Create Project Preview Status JSON + const previewStatusJson = []; + + // Create GBFiles Batches JSON + const gbFileBatchesJson = {}; + + // Preview Errors JSON + const projectPreviewErrorsJson = []; + + // Promoted Paths JSON + const promotedPathsJson = {}; + + // Promote Errors JSON + const promoteErrorsJson = []; + + // Copy Batches JSON + const copyBatchesJson = {}; + + // Promote Batches JSON + const promoteBatchesJson = {}; + + // create batches to process the data + const gbFilesBatchArray = []; + const writeBatchJsonPromises = []; + for (let i = 0, batchCounter = 1; i < gbFiles.length; i += BATCH_REQUEST_PREVIEW, batchCounter += 1) { + const arrayChunk = gbFiles.slice(i, i + BATCH_REQUEST_PREVIEW); + gbFilesBatchArray.push(arrayChunk); + const batchName = `batch_${batchCounter}`; + batchStatusJson[`${batchName}`] = 'initiated'; + + // Each Files Batch is written to a batch_n.json file + writeBatchJsonPromises.push(filesWrapper.writeFile(`graybox_promote${gbRootFolder}/${experienceName}/batches/${batchName}.json`, arrayChunk)); + + // Write the GBFile Batches to the gbfile_batches.json file + gbFileBatchesJson[batchName] = arrayChunk; + } + + await Promise.all(writeBatchJsonPromises); + + const inputParams = {}; + inputParams.rootFolder = rootFolder; + inputParams.gbRootFolder = gbRootFolder; + inputParams.projectExcelPath = projectExcelPath; + inputParams.experienceName = experienceName; + inputParams.adminPageUri = adminPageUri; + inputParams.draftsOnly = draftsOnly; + inputParams.promoteIgnorePaths = promoteIgnorePaths; + + // convert the ignoreUserCheck boolean to string, so the string processing in the appConfig -> ignoreUserCheck works + inputParams.ignoreUserCheck = `${appConfig.ignoreUserCheck()}`; + + // Create Project Queue Json + let projectQueue = []; + // Read the existing Project Queue Json & then merge the current project to it + if (await filesWrapper.fileExists('graybox_promote/project_queue.json')) { + projectQueue = await filesWrapper.readFileIntoObject('graybox_promote/project_queue.json'); + if (!projectQueue) { + projectQueue = []; + } + } + + const newProject = { projectPath: `${gbRootFolder}/${experienceName}`, status: 'initiated', createdTime: Date.now() }; + + // TODO - check if replacing existing project is needed, if not remove this logic and just add the project to the queue + // Find the index of the same experience Project exists, replace it with this one + const index = projectQueue.findIndex((obj) => obj.projectPath === `${gbRootFolder}/${experienceName}`); + if (index !== -1) { + // Replace the object at the found index + projectQueue[index] = newProject; + } else { + // Add the current project to the Project Queue Json & make it the current project + projectQueue.push(newProject); + } + + logger.info(`In Initiate Preview Worker, Project Queue Json: ${JSON.stringify(projectQueue)}`); + + // Create Project Status JSON + const projectStatusJson = { status: 'initiated', params: inputParams }; + + logger.info(`In Initiate Preview Worker, projectStatusJson: ${JSON.stringify(projectStatusJson)}`); + + // write to JSONs to AIO Files for Projects Queue and Project Status + await filesWrapper.writeFile('graybox_promote/project_queue.json', projectQueue); + await filesWrapper.writeFile(`graybox_promote${gbRootFolder}/${experienceName}/status.json`, projectStatusJson); + await filesWrapper.writeFile(`graybox_promote${gbRootFolder}/${experienceName}/gbfile_batches.json`, gbFileBatchesJson); + await filesWrapper.writeFile(`graybox_promote${gbRootFolder}/${experienceName}/batch_status.json`, batchStatusJson); + await filesWrapper.writeFile(`graybox_promote${gbRootFolder}/${experienceName}/preview_status.json`, previewStatusJson); + await filesWrapper.writeFile(`graybox_promote${gbRootFolder}/${experienceName}/preview_errors.json`, projectPreviewErrorsJson); + await filesWrapper.writeFile(`graybox_promote${gbRootFolder}/${experienceName}/promoted_paths.json`, promotedPathsJson); + await filesWrapper.writeFile(`graybox_promote${gbRootFolder}/${experienceName}/promote_errors.json`, promoteErrorsJson); + await filesWrapper.writeFile(`graybox_promote${gbRootFolder}/${experienceName}/promote_batches.json`, promoteBatchesJson); + await filesWrapper.writeFile(`graybox_promote${gbRootFolder}/${experienceName}/copy_batches.json`, copyBatchesJson); + + // read Graybox Project Json from AIO Files + const projectQueueJson = await filesWrapper.readFileIntoObject('graybox_promote/project_queue.json'); + logger.info(`Project Queue Json: ${JSON.stringify(projectQueueJson)}`); + const statusJson = await filesWrapper.readFileIntoObject(`graybox_promote${gbRootFolder}/${experienceName}/status.json`); + logger.info(`Project Status Json: ${JSON.stringify(statusJson)}`); + const projectBatchStatusJson = await filesWrapper.readFileIntoObject(`graybox_promote${gbRootFolder}/${experienceName}/batch_status.json`); + logger.info(`Project Batch Status Json: ${JSON.stringify(projectBatchStatusJson)}`); + + // process data in batches + let responsePayload; + responsePayload = 'Graybox Initiate Promote Worker action completed.'; + logger.info(responsePayload); + return { + body: responsePayload, + }; +} + +/** + * Find all files in the Graybox tree to promote. + */ +async function findAllFiles(experienceName, appConfig, sharepoint) { + const sp = await appConfig.getSpConfig(); + const options = await sharepoint.getAuthorizedRequestOption({ method: 'GET' }); + const promoteIgnoreList = appConfig.getPromoteIgnorePaths(); + logger.info(`Promote ignore list: ${promoteIgnoreList}`); + + return findAllGrayboxFiles({ + baseURI: sp.api.file.get.gbBaseURI, + options, + gbFolders: appConfig.isDraftOnly() ? [`/${experienceName}/drafts`] : [''], + promoteIgnoreList, + downloadBaseURI: sp.api.file.download.baseURI, + experienceName, + sharepoint + }); +} + +/** + * Iteratively finds all files under a specified root folder. + */ +async function findAllGrayboxFiles({ + baseURI, options, gbFolders, promoteIgnoreList, downloadBaseURI, experienceName, sharepoint +}) { + const gbRoot = baseURI.split(':').pop(); + // Regular expression to select the gbRoot and anything before it + // Eg: the regex selects "https://:/-graybox" + const pPathRegExp = new RegExp(`.*:${gbRoot}`); + // Regular expression to select paths that has the experienceName at first or second level + const pathsToSelectRegExp = new RegExp(`^/([^/]+/)?${experienceName}(/.*)?$`); + const gbFiles = []; + while (gbFolders.length !== 0) { + const uri = `${baseURI}${gbFolders.shift()}:/children?$top=${MAX_CHILDREN}`; + // eslint-disable-next-line no-await-in-loop + const res = await sharepoint.fetchWithRetry(uri, options); + logger.info(`Find all Graybox files URI: ${uri} \nResponse: ${res.ok}`); + if (res.ok) { + // eslint-disable-next-line no-await-in-loop + const json = await res.json(); + // eslint-disable-next-line no-await-in-loop + const driveItems = json.value; + for (let di = 0; di < driveItems?.length; di += 1) { + const item = driveItems[di]; + const itemPath = `${item.parentReference.path.replace(pPathRegExp, '')}/${item.name}`; + logger.info(`${itemPath} ::: ${pathsToSelectRegExp.test(itemPath)}`); + if (!isFilePatternMatched(itemPath, promoteIgnoreList)) { + if (item.folder) { + // it is a folder + gbFolders.push(itemPath); + } else if (pathsToSelectRegExp.test(itemPath)) { + // const downloadUrl = `${downloadBaseURI}/${item.id}/content`; + // eslint-disable-next-line no-await-in-loop + // gbFiles.push({ fileDownloadUrl: downloadUrl, filePath: itemPath }); + gbFiles.push(itemPath); + } + } else { + logger.info(`Ignored from promote: ${itemPath}`); + } + } + } + } + return gbFiles; +} + +exports.main = main; diff --git a/actions/graybox/preview-sched.js b/actions/graybox/preview-sched.js new file mode 100644 index 0000000..474f64f --- /dev/null +++ b/actions/graybox/preview-sched.js @@ -0,0 +1,112 @@ +/* ************************************************************************ +* ADOBE CONFIDENTIAL +* ___________________ +* +* Copyright 2024 Adobe +* All Rights Reserved. +* +* NOTICE: All information contained herein is, and remains +* the property of Adobe and its suppliers, if any. The intellectual +* and technical concepts contained herein are proprietary to Adobe +* and its suppliers and are protected by all applicable intellectual +* property laws, including trade secret and copyright laws. +* Dissemination of this information or reproduction of this material +* is strictly forbidden unless prior written permission is obtained +* from Adobe. +************************************************************************* */ + +// eslint-disable-next-line import/no-extraneous-dependencies +const openwhisk = require('openwhisk'); +const { getAioLogger } = require('../utils'); +const initFilesWrapper = require('./filesWrapper'); + +async function main(params) { + const logger = getAioLogger(); + const ow = openwhisk(); + let responsePayload = 'Graybox Preview Scheduler invoked'; + logger.info(responsePayload); + + const filesWrapper = await initFilesWrapper(logger); + + try { + const projectQueue = await filesWrapper.readFileIntoObject('graybox_promote/project_queue.json'); + logger.info(`From Preview-sched Project Queue Json: ${JSON.stringify(projectQueue)}`); + if (!projectQueue) { + responsePayload = 'No projects in the queue'; + logger.info(responsePayload); + return { + code: 200, + payload: responsePayload + }; + } + + // iterate the JSON array projects and extract the project_path where status is 'initiated' + const toBePreviewedProjectPaths = []; + projectQueue.forEach((project) => { + if (project.status === 'initiated' || project.status === 'promoted') { + toBePreviewedProjectPaths.push(project.projectPath); + } + }); + + if (!toBePreviewedProjectPaths || toBePreviewedProjectPaths.length === 0) { + responsePayload = 'No projects in the queue with status initiated'; + logger.info(responsePayload); + return { + code: 200, + payload: responsePayload + }; + } + + toBePreviewedProjectPaths.forEach(async (project) => { + const projectStatusJson = await filesWrapper.readFileIntoObject(`graybox_promote${project}/status.json`); + + // copy all params from json into the params object + const inputParams = projectStatusJson?.params; + Object.keys(inputParams).forEach((key) => { + params[key] = inputParams[key]; + }); + + try { + return ow.actions.invoke({ + name: 'graybox/preview-worker', + blocking: false, + result: false, + params + }).then(async (result) => { + logger.info(result); + return { + code: 200, + payload: responsePayload + }; + }).catch(async (err) => { + responsePayload = 'Failed to invoke graybox preview action'; + logger.error(`${responsePayload}: ${err}`); + return { + code: 500, + payload: responsePayload + }; + }); + } catch (err) { + responsePayload = 'Unknown error occurred'; + logger.error(`${responsePayload}: ${err}`); + responsePayload = err; + } + + return { + code: 500, + payload: responsePayload, + }; + }); + } catch (err) { + responsePayload = 'Unknown error occurred'; + logger.error(`${responsePayload}: ${err}`); + responsePayload = err; + } + + return { + code: 500, + payload: responsePayload, + }; +} + +exports.main = main; diff --git a/actions/graybox/preview-worker.js b/actions/graybox/preview-worker.js new file mode 100644 index 0000000..0e85304 --- /dev/null +++ b/actions/graybox/preview-worker.js @@ -0,0 +1,245 @@ +/* ************************************************************************ +* ADOBE CONFIDENTIAL +* ___________________ +* +* Copyright 2024 Adobe +* All Rights Reserved. +* +* NOTICE: All information contained herein is, and remains +* the property of Adobe and its suppliers, if any. The intellectual +* and technical concepts contained herein are proprietary to Adobe +* and its suppliers and are protected by all applicable intellectual +* property laws, including trade secret and copyright laws. +* Dissemination of this information or reproduction of this material +* is strictly forbidden unless prior written permission is obtained +* from Adobe. +************************************************************************* */ + +const { + getAioLogger, handleExtension, toUTCStr +} = require('../utils'); +const AppConfig = require('../appConfig'); +const HelixUtils = require('../helixUtils'); +const Sharepoint = require('../sharepoint'); +const initFilesWrapper = require('./filesWrapper'); + +const logger = getAioLogger(); + +async function main(params) { + logger.info('Graybox Preview Action triggered'); + + const appConfig = new AppConfig(params); + const { gbRootFolder, experienceName, projectExcelPath } = appConfig.getPayload(); + + const sharepoint = new Sharepoint(appConfig); + + // process data in batches + const helixUtils = new HelixUtils(appConfig); + // Batch Name to Array of Batch Preview Statuses mapping + const previewStatuses = {}; + const filesWrapper = await initFilesWrapper(logger); + let responsePayload; + + // Read the Project Status in the current project's "status.json" file + const projectStatusJson = await filesWrapper.readFileIntoObject(`graybox_promote${gbRootFolder}/${experienceName}/status.json`); + + if (helixUtils.canBulkPreview(true)) { + logger.info('In Preview Worker, Bulk Previewing Graybox files'); + if ((projectStatusJson.status === 'initiated' || projectStatusJson.status === 'promoted')) { + const batchStatusJson = await filesWrapper.readFileIntoObject(`graybox_promote${gbRootFolder}/${experienceName}/batch_status.json`); + + logger.info(`In Preview-Worker, batchStatusJson: ${JSON.stringify(batchStatusJson)}`); + + const noofbatches = batchStatusJson !== undefined ? Object.keys(batchStatusJson).length : 0; + // iterate over batch_status.json file and process each batch + if (projectStatusJson.status === 'initiated') { + const toBeStatus = 'initial_preview_in_progress'; + // Update the In Progress Status in the current project's "status.json" file + projectStatusJson.status = toBeStatus; + await filesWrapper.writeFile(`graybox_promote${gbRootFolder}/${experienceName}/status.json`, projectStatusJson); + + // Update the Project Status in the parent "project_queue.json" file + await changeProjectStatusInQueue(filesWrapper, gbRootFolder, experienceName, toBeStatus); + + // Perform Initial Preview + const batchResults = {}; + // Read the Batch JSON file into an array + const i = 0; // Start with counter as 0 + await iterateAndPreviewBatchJson(i, batchResults, noofbatches, batchStatusJson, true); + } else if (projectStatusJson.status === 'promoted') { + // Update the In Progress Status in the current project's "status.json" file + projectStatusJson.status = 'final_preview_in_progress'; + await filesWrapper.writeFile(`graybox_promote${gbRootFolder}/${experienceName}/status.json`, projectStatusJson); + + // Perform Final Preview + const promotedPathsJson = await filesWrapper.readFileIntoObject(`graybox_promote${gbRootFolder}/${experienceName}/promoted_paths.json`); + const i = 0; // Start with counter as 0 + await iterateAndPreviewBatchJson(i, promotedPathsJson, noofbatches, batchStatusJson, false); + } + + // Write the updated batch_status.json file + await filesWrapper.writeFile(`graybox_promote${gbRootFolder}/${experienceName}/batch_status.json`, batchStatusJson); + logger.info(`In Preview Worker, Updated Batch Status Json: ${JSON.stringify(batchStatusJson)}`); + logger.info(`In Preview Worker, Preview Statuses: ${JSON.stringify(previewStatuses)}`); + + // PreviewStatuses is an object with keys(batchNames) mapping to arrays(previewStauses) + const failedPreviews = Object.keys(previewStatuses).reduce((acc, key) => { + const filteredStatuses = previewStatuses[key] + .filter((status) => !status.success) // Filter out failed statuses + .map((status) => status.path); // Map to get the path of the failed status + return acc.concat(filteredStatuses); // Concatenate to the accumulator + }, []); + // Now failedPreviews contains all the paths from the filtered and mapped arrays + + const previewStatusJson = await filesWrapper.readFileIntoObject(`graybox_promote${gbRootFolder}/${experienceName}/preview_status.json`); + const previewErrorsJson = await filesWrapper.readFileIntoObject(`graybox_promote${gbRootFolder}/${experienceName}/preview_errors.json`); + + // Combine the Preview Statuses for each batch read from AIO Json with the Preview Statuses + if (previewStatusJson) { + Object.entries(previewStatusJson).forEach(([batchName, batchPreviewStatuses]) => { + if (previewStatuses[batchName]) { + previewStatuses[batchName] = previewStatuses[batchName].concat(batchPreviewStatuses); + } else { + previewStatuses[batchName] = batchPreviewStatuses; + } + }); + } + + // Write the updated preview_errors.json file + await filesWrapper.writeFile(`graybox_promote${gbRootFolder}/${experienceName}/preview_status.json`, previewStatuses); + + // Write the updated preview_errors.json file + await filesWrapper.writeFile(`graybox_promote${gbRootFolder}/${experienceName}/preview_errors.json`, previewErrorsJson.concat(failedPreviews)); + + // Update the Project Status in the current project's "status.json" file & the parent "project_queue.json" file + await updateProjectStatus(gbRootFolder, experienceName, filesWrapper); + + try { + logger.info('Updating project excel file with status'); + let excelValues = ''; + if (projectStatusJson.status === 'initial_preview_in_progress') { + const sFailedPreviews = failedPreviews.length > 0 ? `Failed Previews(Promote won't happen for these): \n${failedPreviews.join('\n')}` : ''; + excelValues = [['Step 1 of 5: Initial Preview of Graybox completed', toUTCStr(new Date()), sFailedPreviews]]; + } else if (projectStatusJson.status === 'final_preview_in_progress') { + const sFailedPreviews = failedPreviews.length > 0 ? `Failed Previews: \n${failedPreviews.join('\n')}` : ''; + excelValues = [['Step 5 of 5: Final Preview of Promoted Content completed', toUTCStr(new Date()), sFailedPreviews]]; + } + // Update Preview Status + await sharepoint.updateExcelTable(projectExcelPath, 'PROMOTE_STATUS', excelValues); + } catch (err) { + logger.error(`Error Occured while updating Excel during Graybox Initial Preview: ${err}`); + } + } + + responsePayload = 'Graybox Preview Worker action completed.'; + } else { + responsePayload = 'Bulk Preview not enabled for Graybox Content Tree'; + logger.error(responsePayload); + } + logger.info(responsePayload); + return exitAction({ + body: responsePayload, + statusCode: 200 + }); + + /** + * Iterate over the Batch JSON files, read those into an array and perform Bulk Preview + * @param {*} i counter + * @param {*} batchResults batchResults array + * @param {*} noofbatches total no of batches + * @param {*} filesWrapper filesWrapper object + * @param {*} gbRootFolder graybox root folder + * @param {*} experienceName graybox experience name + */ + async function iterateAndPreviewBatchJson(i, batchResults, noofbatches, batchStatusJson, isGraybox) { + const batchName = `batch_${i + 1}`; + if (i < noofbatches) { + if (batchStatusJson[batchName] === 'initiated' || batchStatusJson[batchName] === 'promoted') { + // Only for initial preview read the files from /batches/ folder, + // Otherwise for final preview use the list passed as-is from copy-worker or promote-worker + if (batchStatusJson[batchName] === 'initiated') { + // Read the Batch JSON file into an batchResults JSON object + const batchJson = await filesWrapper.readFileIntoObject(`graybox_promote${gbRootFolder}/${experienceName}/batches/${batchName}.json`); + batchResults[`${batchName}`] = batchJson; + } + // Perform Bulk Preview of a Batch of Graybox files + await previewBatch(batchName, batchResults, batchStatusJson, isGraybox); + } + + // Recursively call the function to process the next batch + await iterateAndPreviewBatchJson(i + 1, batchResults, noofbatches, batchStatusJson, isGraybox); + } + } + + /** + * Perform a Bulk Preview on a Batch of Graybox files + * @param {*} batchName batchName + * @param {*} previewStatuses returned preview statuses + * @param {*} helixUtils helixUtils object + * @param {*} experienceName graybox experience name + */ + async function previewBatch(batchName, batchResults, batchStatusJson, isGraybox = true) { + const batchJson = batchResults[batchName]; + logger.info(`In Preview-worker, in previewBatch for Batch: ${batchName} Batch JSON: ${JSON.stringify(batchJson)}`); + const paths = []; + if (batchJson) { + batchJson.forEach((gbFile) => paths.push(handleExtension(gbFile))); + + // Perform Bulk Preview of a Batch of Graybox files + if (isGraybox) { + previewStatuses[batchName] = await helixUtils.bulkPreview(paths, helixUtils.getOperations().PREVIEW, experienceName, isGraybox); + batchStatusJson[batchName] = 'initial_preview_done'; + } else { + // Don't pass experienceName for final preview + previewStatuses[batchName] = await helixUtils.bulkPreview(paths, helixUtils.getOperations().PREVIEW); + batchStatusJson[batchName] = 'final_preview_done'; + } + } + } +} + +/** + * Update the Project Status in the current project's "status.json" file & the parent "project_queue.json" file + * @param {*} gbRootFolder graybox root folder + * @param {*} experienceName graybox experience name + * @param {*} filesWrapper filesWrapper object + * @returns updated project status + */ +async function updateProjectStatus(gbRootFolder, experienceName, filesWrapper) { + const projectStatusJson = await filesWrapper.readFileIntoObject(`graybox_promote${gbRootFolder}/${experienceName}/status.json`); + + // Update the Project Status in the current project's "status.json" file + // If the project status is 'initiated', set it to 'initial_preview_done', else if project status is 'promoted' set it to 'final_preview_done' + let toBeStatus; + if (projectStatusJson.status === 'initiated' || projectStatusJson.status === 'initial_preview_in_progress') { + toBeStatus = 'initial_preview_done'; + } else if (projectStatusJson.status === 'promoted' || projectStatusJson.status === 'final_preview_in_progress') { + toBeStatus = 'final_preview_done'; + } + + if (toBeStatus) { + projectStatusJson.status = toBeStatus; + logger.info(`In Preview-sched After Processing Preview, Project Status Json: ${JSON.stringify(projectStatusJson)}`); + await filesWrapper.writeFile(`graybox_promote${gbRootFolder}/${experienceName}/status.json`, projectStatusJson); + + // Update the Project Status in the parent "project_queue.json" file + await changeProjectStatusInQueue(filesWrapper, gbRootFolder, experienceName, toBeStatus); + } +} + +async function changeProjectStatusInQueue(filesWrapper, gbRootFolder, experienceName, toBeStatus) { + const projectQueue = await filesWrapper.readFileIntoObject('graybox_promote/project_queue.json'); + const index = projectQueue.findIndex((obj) => obj.projectPath === `${gbRootFolder}/${experienceName}`); + if (index !== -1) { + // Replace the object at the found index + projectQueue[index].status = toBeStatus; + await filesWrapper.writeFile('graybox_promote/project_queue.json', projectQueue); + logger.info(`In Preview-sched After Processing Preview, Project Queue Json: ${JSON.stringify(projectQueue)}`); + } +} + +function exitAction(resp) { + return resp; +} + +exports.main = main; diff --git a/actions/graybox/process-docx-sched.js b/actions/graybox/process-docx-sched.js new file mode 100644 index 0000000..f5cfc4b --- /dev/null +++ b/actions/graybox/process-docx-sched.js @@ -0,0 +1,110 @@ +/* ************************************************************************ +* ADOBE CONFIDENTIAL +* ___________________ +* +* Copyright 2024 Adobe +* All Rights Reserved. +* +* NOTICE: All information contained herein is, and remains +* the property of Adobe and its suppliers, if any. The intellectual +* and technical concepts contained herein are proprietary to Adobe +* and its suppliers and are protected by all applicable intellectual +* property laws, including trade secret and copyright laws. +* Dissemination of this information or reproduction of this material +* is strictly forbidden unless prior written permission is obtained +* from Adobe. +************************************************************************* */ + +// eslint-disable-next-line import/no-extraneous-dependencies +const openwhisk = require('openwhisk'); +const { getAioLogger } = require('../utils'); +const initFilesWrapper = require('./filesWrapper'); + +async function main(params) { + const logger = getAioLogger(); + const ow = openwhisk(); + let responsePayload = 'Graybox Process Content Scheduler invoked'; + logger.info(responsePayload); + + const filesWrapper = await initFilesWrapper(logger); + + try { + const projectQueue = await filesWrapper.readFileIntoObject('graybox_promote/project_queue.json'); + logger.info(`In Process-content-sched Project Queue Json: ${JSON.stringify(projectQueue)}`); + + if (!projectQueue) { + responsePayload = 'No projects in the queue'; + logger.info(responsePayload); + return { + code: 200, + payload: responsePayload + }; + } + + // iterate the JSON array projects and extract the project_path where status is 'initial_preview_done' + const toBeProcessedProjects = projectQueue + .filter((project) => project.status === 'initial_preview_done') + .map((project) => project.projectPath); + + if (!toBeProcessedProjects || toBeProcessedProjects.length === 0) { + responsePayload = 'No projects in the queue with status initial_preview_done'; + logger.info(responsePayload); + return { + code: 200, + payload: responsePayload + }; + } + + toBeProcessedProjects.forEach(async (project) => { + const projectStatusJson = await filesWrapper.readFileIntoObject(`graybox_promote${project}/status.json`); + + // copy all params from json into the params object + const inputParams = projectStatusJson?.params; + Object.keys(inputParams).forEach((key) => { + params[key] = inputParams[key]; + }); + + try { + return ow.actions.invoke({ + name: 'graybox/process-docx-worker', + blocking: false, + result: false, + params + }).then(async (result) => { + logger.info(result); + return { + code: 200, + payload: responsePayload + }; + }).catch(async (err) => { + responsePayload = 'Failed to invoke graybox process content action'; + logger.error(`${responsePayload}: ${err}`); + return { + code: 500, + payload: responsePayload + }; + }); + } catch (err) { + responsePayload = 'Unknown error occurred'; + logger.error(`${responsePayload}: ${err}`); + responsePayload = err; + } + + return { + code: 500, + payload: responsePayload, + }; + }); + } catch (err) { + responsePayload = 'Unknown error occurred'; + logger.error(`${responsePayload}: ${err}`); + responsePayload = err; + } + + return { + code: 500, + payload: responsePayload, + }; +} + +exports.main = main; diff --git a/actions/graybox/process-docx-worker.js b/actions/graybox/process-docx-worker.js new file mode 100644 index 0000000..d251287 --- /dev/null +++ b/actions/graybox/process-docx-worker.js @@ -0,0 +1,286 @@ +/* ************************************************************************ +* ADOBE CONFIDENTIAL +* ___________________ +* +* Copyright 2024 Adobe +* All Rights Reserved. +* +* NOTICE: All information contained herein is, and remains +* the property of Adobe and its suppliers, if any. The intellectual +* and technical concepts contained herein are proprietary to Adobe +* and its suppliers and are protected by all applicable intellectual +* property laws, including trade secret and copyright laws. +* Dissemination of this information or reproduction of this material +* is strictly forbidden unless prior written permission is obtained +* from Adobe. +************************************************************************* */ + +const fetch = require('node-fetch'); +const { Readable } = require('stream'); +const { + getAioLogger, toUTCStr +} = require('../utils'); +const AppConfig = require('../appConfig'); +const HelixUtils = require('../helixUtils'); +const Sharepoint = require('../sharepoint'); +const updateDocument = require('../docxUpdater'); +const initFilesWrapper = require('./filesWrapper'); + +const gbStyleExpression = 'gb-'; // graybox style expression. need to revisit if there are any more styles to be considered. +const gbDomainSuffix = '-graybox'; + +const BATCH_REQUEST_PROMOTE = 200; + +const logger = getAioLogger(); + +async function main(params) { + logger.info('Graybox Process Content Action triggered'); + + const appConfig = new AppConfig(params); + const { gbRootFolder, experienceName, projectExcelPath } = appConfig.getPayload(); + + const sharepoint = new Sharepoint(appConfig); + // process data in batches + const helixUtils = new HelixUtils(appConfig); + const filesWrapper = await initFilesWrapper(logger); + let responsePayload; + + // Get the Helix Admin API Key for the Graybox content tree, needed for accessing (with auth) Images in graybox tree + const helixAdminApiKey = helixUtils.getAdminApiKey(true); + + const previewStatuses = await filesWrapper.readFileIntoObject(`graybox_promote${gbRootFolder}/${experienceName}/preview_status.json`); + + if (!previewStatuses) { + responsePayload = 'No preview statuses found'; + logger.info(responsePayload); + return exitAction({ + body: responsePayload, + statusCode: 200 + }); + } + logger.info(`In Process-doc-worker, previewStatuses: ${JSON.stringify(previewStatuses)}`); + const processFilesParams = { + previewStatuses, + experienceName, + helixAdminApiKey, + sharepoint, + helixUtils, + appConfig, + filesWrapper, + gbRootFolder, + projectExcelPath + }; + // Promote Graybox files to the default content tree + await processFiles(processFilesParams); + + responsePayload = 'Processing of Graybox Content Tree completed'; + logger.info(responsePayload); + return exitAction({ + body: responsePayload, + statusCode: 200 + }); +} + +/** +* Process files to clean up GB Styles and Link +* @returns +*/ +async function processFiles({ + previewStatuses, experienceName, helixAdminApiKey, sharepoint, helixUtils, filesWrapper, gbRootFolder, projectExcelPath +}) { + const options = {}; + // Passing isGraybox param true to fetch graybox Hlx Admin API Key + const grayboxHlxAdminApiKey = helixUtils.getAdminApiKey(true); + if (grayboxHlxAdminApiKey) { + options.headers = new fetch.Headers(); + options.headers.append('Authorization', `token ${grayboxHlxAdminApiKey}`); + } + + // Read the Project Status in the current project's "status.json" file + const projectStatusJson = await filesWrapper.readFileIntoObject(`graybox_promote${gbRootFolder}/${experienceName}/status.json`); + + const toBeStatus = 'process_content_in_progress'; + // Update the In Progress Status in the current project's "status.json" file + projectStatusJson.status = toBeStatus; + await filesWrapper.writeFile(`graybox_promote${gbRootFolder}/${experienceName}/status.json`, projectStatusJson); + + // Update the Project Status in the parent "project_queue.json" file + await changeProjectStatusInQueue(filesWrapper, gbRootFolder, experienceName, toBeStatus); + + // Read the Batch Status in the current project's "batch_status.json" file + const batchStatusJson = await filesWrapper.readFileIntoObject(`graybox_promote${gbRootFolder}/${experienceName}/batch_status.json`); + + logger.info(`In Process-doc-worker, batchStatusJson: ${JSON.stringify(batchStatusJson)}`); + const promoteBatchesJson = {}; + const copyBatchesJson = {}; + let promoteBatchCount = 0; + let copyBatchCount = 0; + const processDocxErrors = []; + + // iterate through preview statuses, generate docx files and create promote & copy batches + const batchNames = Object.keys(previewStatuses).flat(); + const allProcessingPromises = batchNames.map(async (batchName, index, array) => { + const batchPreviewStatuses = previewStatuses[batchName]; + + // Check if Step 2 finished, do the Step 3, if the batch status is 'initial_preview_done' then process the batch + if (batchStatusJson[batchName] === 'initial_preview_done') { + for (let prevIndex = 0; prevIndex < batchPreviewStatuses.length; prevIndex += 1) { + const status = batchPreviewStatuses[prevIndex]; + if (status.success && status.mdPath) { // If the file is successfully initial previewed and has a mdPath then process the file + // eslint-disable-next-line no-await-in-loop + const response = await sharepoint.fetchWithRetry(`${status.mdPath}`, options); + // eslint-disable-next-line no-await-in-loop + let content = await response.text(); + let docx; + + // Sample Image URL [image0]: https://main--bacom-graybox--adobecom.hlx.page/media_115d4450fd3ef2f1559f63e25d7e299eaba9b79ee.jpeg#width=2560&height=1600 + const imageRegex = /\[image.*\]: https:\/\/.*\/media_.*\.(?:jpg|jpeg|png|gif|bmp|webp)#width=\d+&height=\d+/g; + const imageMatches = content.match(imageRegex); + + // Delete all the images from the content, these get added only in .md file and don't exist in the docx file + if (imageMatches) { + imageMatches.forEach((match) => { + // Remove the image matches from content + content = content.replace(match, ''); + }); + } + + if (content.includes(experienceName) || content.includes(gbStyleExpression) || content.includes(gbDomainSuffix)) { + // Process the Graybox Styles and Links with Mdast to Docx conversion + // eslint-disable-next-line no-await-in-loop + docx = await updateDocument(content, experienceName, helixAdminApiKey); + if (docx) { + const destinationFilePath = `${status.path.substring(0, status.path.lastIndexOf('/') + 1).replace('/'.concat(experienceName), '')}${status.fileName}`; + const docxFileStream = Readable.from(docx); + + // Write the processed documents to the AIO folder for docx files + // eslint-disable-next-line no-await-in-loop + await filesWrapper.writeFileFromStream(`graybox_promote${gbRootFolder}/${experienceName}/docx${destinationFilePath}`, docxFileStream); + + let promoteBatchJson = promoteBatchesJson[batchName]; + if (!promoteBatchJson) { + promoteBatchJson = { status: 'processed', files: [destinationFilePath] }; + } else if (promoteBatchJson.files) { + promoteBatchJson.files.push(destinationFilePath); + } else { + promoteBatchJson.files = [destinationFilePath]; + } + promoteBatchesJson[batchName] = promoteBatchJson; + + logger.info(`In Process-doc-worker Promote Batch JSON after push: ${JSON.stringify(promoteBatchesJson)}`); + + // If the promote batch count reaches the limit, increment the promote batch count + if (promoteBatchCount === BATCH_REQUEST_PROMOTE) { // TODO remove this code if promoteBatchCount is not needed, and instead initial preview batch count is used + promoteBatchCount += 1; + } + + // Write the promote batches JSON file + // eslint-disable-next-line no-await-in-loop + await filesWrapper.writeFile(`graybox_promote${gbRootFolder}/${experienceName}/promote_batches.json`, promoteBatchesJson); + } else { + processDocxErrors.push(`Error processing docx for ${status.fileName}`); + } + + // Update each Batch Status in the current project's "batch_status.json" file + batchStatusJson[batchName] = 'processed'; + + // Update the Project Status & Batch Status in the current project's "status.json" & updated batch_status.json file respectively + // eslint-disable-next-line no-await-in-loop + await filesWrapper.writeFile(`graybox_promote${gbRootFolder}/${experienceName}/batch_status.json`, batchStatusJson); + } else { + // Copy Source full path with file name and extension + const copySourceFilePath = `${status.path.substring(0, status.path.lastIndexOf('/') + 1)}${status.fileName}`; + // Copy Destination folder path, no file name + const copyDestinationFolder = `${status.path.substring(0, status.path.lastIndexOf('/')).replace('/'.concat(experienceName), '')}`; + const copyDestFilePath = `${copyDestinationFolder}/${status.fileName}`; + + // Don't create new batch names, use the same batch names created in the start before initial preview + let copyBatchJson = copyBatchesJson[batchName]; + if (!copyBatchJson) { + copyBatchJson = { status: 'processed', files: [{ copySourceFilePath, copyDestFilePath }] }; + } else if (!copyBatchJson.files) { + copyBatchJson.files = []; + } + copyBatchJson.files.push({ copySourceFilePath, copyDestFilePath }); + copyBatchesJson[batchName] = copyBatchJson; + + // If the copy batch count reaches the limit, increment the copy batch count + if (copyBatchCount === BATCH_REQUEST_PROMOTE) { // TODO remove this code if copyBatchCount is not needed, and instead initial preview batch count is used + copyBatchCount += 1; // Increment the copy batch count + } + logger.info(`In Process-doc-worker Copy Batch JSON after push: ${JSON.stringify(copyBatchesJson)}`); + // Write the copy batches JSON file + // eslint-disable-next-line no-await-in-loop + await filesWrapper.writeFile(`graybox_promote${gbRootFolder}/${experienceName}/copy_batches.json`, copyBatchesJson); + + // Update each Batch Status in the current project's "batch_status.json" file + batchStatusJson[batchName] = 'processed'; + // Update the Project Status & Batch Status in the current project's "status.json" & updated batch_status.json file respectively + // eslint-disable-next-line no-await-in-loop + await filesWrapper.writeFile(`graybox_promote${gbRootFolder}/${experienceName}/batch_status.json`, batchStatusJson); + } + } + } + } + }); + + await Promise.all(allProcessingPromises); // await all async functions in the array are executed + await updateStatuses(promoteBatchesJson, copyBatchesJson, gbRootFolder, experienceName, filesWrapper, processDocxErrors, sharepoint, projectExcelPath); +} + +async function updateStatuses(promoteBatchesJson, copyBatchesJson, gbRootFolder, experienceName, filesWrapper, processContentErrors, sharepoint, projectExcelPath) { + // Write the copy batches JSON file + await filesWrapper.writeFile(`graybox_promote${gbRootFolder}/${experienceName}/copy_batches.json`, copyBatchesJson); + await filesWrapper.writeFile(`graybox_promote${gbRootFolder}/${experienceName}/promote_batches.json`, promoteBatchesJson); + // Update the Project Status in JSON files + updateProjectStatus(gbRootFolder, experienceName, filesWrapper); + + // Write the processDocxErrors to the AIO Files + if (processContentErrors.length > 0) { + await filesWrapper.writeFile(`graybox_promote${gbRootFolder}/${experienceName}/process_content_errors.json`, processContentErrors); + } + + // Update the Project Excel with the Promote Status + try { + const promoteExcelValues = [['Step 2 of 5: Processing files for Graybox blocks, styles and links completed', toUTCStr(new Date()), '']]; + await sharepoint.updateExcelTable(projectExcelPath, 'PROMOTE_STATUS', promoteExcelValues); + } catch (err) { + logger.error(`Error Occured while updating Excel during Graybox Process Content Step: ${err}`); + } +} + +/** + * Update the Project Status in the current project's "status.json" file & the parent "project_queue.json" file + * @param {*} gbRootFolder graybox root folder + * @param {*} experienceName graybox experience name + * @param {*} filesWrapper filesWrapper object + * @returns updated project status + */ +async function updateProjectStatus(gbRootFolder, experienceName, filesWrapper) { + // Update the Project Status in the current project's "status.json" file + const projectStatusJson = await filesWrapper.readFileIntoObject(`graybox_promote${gbRootFolder}/${experienceName}/status.json`); + const toBeStatus = 'processed'; + projectStatusJson.status = toBeStatus; + await filesWrapper.writeFile(`graybox_promote${gbRootFolder}/${experienceName}/status.json`, projectStatusJson); + + // Update the Project Status in the parent "project_queue.json" file + const projectQueue = await changeProjectStatusInQueue(filesWrapper, gbRootFolder, experienceName, toBeStatus); + logger.info(`In process-content-worker After Processing Docx, Project Queue Json: ${JSON.stringify(projectQueue)}`); +} + +async function changeProjectStatusInQueue(filesWrapper, gbRootFolder, experienceName, toBeStatus) { + const projectQueue = await filesWrapper.readFileIntoObject('graybox_promote/project_queue.json'); + const index = projectQueue.findIndex((obj) => obj.projectPath === `${gbRootFolder}/${experienceName}`); + if (index !== -1) { + // Replace the object at the found index + projectQueue[index].status = toBeStatus; + await filesWrapper.writeFile('graybox_promote/project_queue.json', projectQueue); + } + return projectQueue; +} + +function exitAction(resp) { + return resp; +} + +exports.main = main; diff --git a/actions/graybox/promote-sched.js b/actions/graybox/promote-sched.js new file mode 100644 index 0000000..fc06c19 --- /dev/null +++ b/actions/graybox/promote-sched.js @@ -0,0 +1,131 @@ +/* ************************************************************************ +* ADOBE CONFIDENTIAL +* ___________________ +* +* Copyright 2024 Adobe +* All Rights Reserved. +* +* NOTICE: All information contained herein is, and remains +* the property of Adobe and its suppliers, if any. The intellectual +* and technical concepts contained herein are proprietary to Adobe +* and its suppliers and are protected by all applicable intellectual +* property laws, including trade secret and copyright laws. +* Dissemination of this information or reproduction of this material +* is strictly forbidden unless prior written permission is obtained +* from Adobe. +************************************************************************* */ + +// eslint-disable-next-line import/no-extraneous-dependencies +const openwhisk = require('openwhisk'); +const { getAioLogger } = require('../utils'); +const initFilesWrapper = require('./filesWrapper'); + +async function main(params) { + const logger = getAioLogger(); + const ow = openwhisk(); + let responsePayload = 'Graybox Promote Scheduler invoked'; + logger.info(responsePayload); + + const filesWrapper = await initFilesWrapper(logger); + + try { + let projectQueue = await filesWrapper.readFileIntoObject('graybox_promote/project_queue.json'); + logger.info(`From Promote-sched Project Queue Json: ${JSON.stringify(projectQueue)}`); + + // Sorting the Promote Projects based on the 'createdTime' property, pick the oldest project + projectQueue = projectQueue.sort((a, b) => a.createdTime - b.createdTime); + + // Find the First Project where status is 'processed' + const projectEntry = projectQueue.find((project) => project.status === 'processed'); + + if (projectEntry && projectEntry.projectPath) { + const project = projectEntry.projectPath; + const projectStatusJson = await filesWrapper.readFileIntoObject(`graybox_promote${project}/status.json`); + + // Read the Batch Status in the current project's "batch_status.json" file + const batchStatusJson = await filesWrapper.readFileIntoObject(`graybox_promote${project}/batch_status.json`); + logger.info(`In Promote Sched, batchStatusJson: ${JSON.stringify(batchStatusJson)}`); + + // Find if any batch is in 'copy_in_progress' status, if yes then don't trigger another copy action for another "processed" batch + const copyOrPromoteInProgressBatch = Object.entries(batchStatusJson) + .find(([batchName, copyBatchJson]) => (copyBatchJson.status === 'copy_in_progress' || copyBatchJson.status === 'promote_in_progress')); + + if (copyOrPromoteInProgressBatch && Array.isArray(copyOrPromoteInProgressBatch) && copyOrPromoteInProgressBatch.length > 0) { + responsePayload = `Promote or Copy Action already in progress for Batch: ${copyOrPromoteInProgressBatch[0]}, not triggering another action until it completes`; + return { + code: 200, + payload: responsePayload + }; + } + + const promoteBatchesJson = await filesWrapper.readFileIntoObject(`graybox_promote${project}/promote_batches.json`); + + // Find the First Batch where status is 'processed', to promote one batch at a time + const processedBatchName = Object.keys(promoteBatchesJson) + .find((batchName) => promoteBatchesJson[batchName].status === 'processed'); + // If no batch is found with status 'processed then nothing to promote', return + if (!processedBatchName) { + responsePayload = 'No Promote Batches found with status "processed"'; + return { + code: 200, + payload: responsePayload + }; + } + + if (promoteBatchesJson[processedBatchName].status === 'processed') { + // copy all params from json into the params object + const inputParams = projectStatusJson?.params; + Object.keys(inputParams).forEach((key) => { + params[key] = inputParams[key]; + }); + // Set the Project & Batch Name in params for the Promote Content Worker Action to read and process + params.project = project; + params.batchName = processedBatchName; + + logger.info(`In Promote Sched, Invoking Promote Content Worker for Batch: ${processedBatchName} of Project: ${project}`); + try { + return ow.actions.invoke({ + name: 'graybox/promote-worker', + blocking: false, + result: false, + params + }).then(async (result) => { + logger.info(result); + return { + code: 200, + payload: responsePayload + }; + }).catch(async (err) => { + responsePayload = 'Failed to invoke graybox promote action'; + logger.error(`${responsePayload}: ${err}`); + return { + code: 500, + payload: responsePayload + }; + }); + } catch (err) { + responsePayload = 'Unknown error occurred while invoking Promote Content Worker Action'; + logger.error(`${responsePayload}: ${err}`); + responsePayload = err; + } + } + responsePayload = 'Triggered Promote Content Worker Action'; + return { + code: 200, + payload: responsePayload, + }; + } + } catch (err) { + responsePayload = 'Unknown error occurred while processing the projects for Promote'; + logger.error(`${responsePayload}: ${err}`); + responsePayload = err; + } + + // No errors while initiating all the Promote Content Worker Action for all the projects + return { + code: 200, + payload: responsePayload + }; +} + +exports.main = main; diff --git a/actions/graybox/promote-worker.js b/actions/graybox/promote-worker.js index 0cfe838..dc2798e 100644 --- a/actions/graybox/promote-worker.js +++ b/actions/graybox/promote-worker.js @@ -15,258 +15,184 @@ * from Adobe. ************************************************************************* */ -const fetch = require('node-fetch'); -const { - getAioLogger, handleExtension, isFilePatternMatched, toUTCStr -} = require('../utils'); +const { getAioLogger, toUTCStr } = require('../utils'); const AppConfig = require('../appConfig'); -const HelixUtils = require('../helixUtils'); -const updateDocument = require('../docxUpdater'); const Sharepoint = require('../sharepoint'); +const initFilesWrapper = require('./filesWrapper'); const logger = getAioLogger(); -const MAX_CHILDREN = 1000; -const BATCH_REQUEST_PREVIEW = 200; -const gbStyleExpression = 'gb-'; // graybox style expression. need to revisit if there are any more styles to be considered. -const gbDomainSuffix = '-graybox'; - -/** - * - Bulk Preview docx files - * - GET markdown files using preview-url.md - * - Process markdown - process MDAST by cleaning it up - * - Generate updated Docx file using md2docx lib - * - copy updated docx file to the default content tree - * - run the bulk preview action on the list of files that were copied to default content tree - * - update the project excel file as and when necessary to update the status of the promote action - */ async function main(params) { - logger.info('Graybox Promote Worker invoked'); + logger.info('Graybox Promote Content Action triggered'); const appConfig = new AppConfig(params); - const { gbRootFolder, experienceName } = appConfig.getPayload(); - const { projectExcelPath } = appConfig.getPayload(); + const { gbRootFolder, experienceName, projectExcelPath } = appConfig.getPayload(); + const sharepoint = new Sharepoint(appConfig); - // Update Promote Status - const promoteTriggeredExcelValues = [['Promote triggered', toUTCStr(new Date()), '']]; - await sharepoint.updateExcelTable(projectExcelPath, 'PROMOTE_STATUS', promoteTriggeredExcelValues); + // process data in batches + const filesWrapper = await initFilesWrapper(logger); + let responsePayload; + let promotes = []; + const failedPromotes = []; - logger.info(`GB ROOT FOLDER ::: ${gbRootFolder}`); - logger.info(`GB EXP NAME ::: ${experienceName}`); + const project = params.project || ''; + const batchName = params.batchName || ''; - // Get all files in the graybox folder for the specific experience name - // NOTE: This does not capture content inside the locale/expName folders yet - const gbFiles = await findAllFiles(experienceName, appConfig, sharepoint); + logger.info(`In Promote Content Worker, Processing Promote Content for batch: ${batchName}`); - // create batches to process the data - const batchArray = []; - for (let i = 0; i < gbFiles.length; i += BATCH_REQUEST_PREVIEW) { - const arrayChunk = gbFiles.slice(i, i + BATCH_REQUEST_PREVIEW); - batchArray.push(arrayChunk); - } + // Read the Batch Status in the current project's "batch_status.json" file + let batchStatusJson = await filesWrapper.readFileIntoObject(`graybox_promote${project}/batch_status.json`); - // process data in batches - const helixUtils = new HelixUtils(appConfig); - const previewStatuses = []; - let failedPreviews = []; - const promotedPreviewStatuses = []; - let promotedFailedPreviews = []; - let responsePayload; - if (helixUtils.canBulkPreview(true)) { - logger.info('Bulk Previewing Graybox files'); - const paths = []; - batchArray.forEach((batch) => { - batch.forEach((gbFile) => paths.push(handleExtension(gbFile.filePath))); - }); - previewStatuses.push(await helixUtils.bulkPreview(paths, helixUtils.getOperations().PREVIEW, experienceName, true)); + const promoteErrorsJson = await filesWrapper.readFileIntoObject(`graybox_promote${project}/promote_errors.json`); - failedPreviews = previewStatuses.flatMap((statusArray) => statusArray.filter((status) => !status.success)).map((status) => status.path); + let promoteBatchesJson = await filesWrapper.readFileIntoObject(`graybox_promote${project}/promote_batches.json`); + logger.info(`In Promote-worker Promote Batches Json: ${JSON.stringify(promoteBatchesJson)}`); - logger.info('Updating project excel file with status'); - const sFailedPreviews = failedPreviews.length > 0 ? `Failed Previews(Promote won't happen for these): \n${failedPreviews.join('\n')}` : ''; - const excelValues = [['Preview completed', toUTCStr(new Date()), sFailedPreviews]]; - // Update Preview Status - await sharepoint.updateExcelTable(projectExcelPath, 'PROMOTE_STATUS', excelValues); + const toBeStatus = 'promote_in_progress'; + // Update & Write the Batch Status to in progress "batch_status.json" file + // So that the scheduler doesn't pick the same batch again + batchStatusJson[batchName] = toBeStatus; + await filesWrapper.writeFile(`graybox_promote${gbRootFolder}/${experienceName}/batch_status.json`, batchStatusJson); - // Get the Helix Admin API Key for the Graybox content tree, needed for accessing (with auth) Images in graybox tree - const helixAdminApiKey = helixUtils.getAdminApiKey(true); + await changeProjectStatusInQueue(filesWrapper, gbRootFolder, experienceName, toBeStatus); - // Promote Graybox files to the default content tree - const { promotes, failedPromotes } = await promoteFiles(previewStatuses, experienceName, helixAdminApiKey, sharepoint, helixUtils, appConfig); + if (!promoteBatchesJson || !promoteBatchesJson[batchName]) { + responsePayload = `No batch found to promote in Promote Batches JSON for Batch Name: ${batchName} for project`; + logger.info(responsePayload); + return exitAction({ + body: responsePayload, + statusCode: 200 + }); + } - // Update Promote Status - const sFailedPromoteStatuses = failedPromotes.length > 0 ? `Failed Promotes: \n${failedPromotes.join('\n')}` : ''; - const promoteExcelValues = [['Promote completed', toUTCStr(new Date()), sFailedPromoteStatuses]]; - await sharepoint.updateExcelTable(projectExcelPath, 'PROMOTE_STATUS', promoteExcelValues); + promoteBatchesJson[batchName].status = 'promote_in_progress'; + await filesWrapper.writeFile(`graybox_promote${gbRootFolder}/${experienceName}/promote_batches.json`, promoteBatchesJson); - // Handle the extensions of promoted files - const promotedPaths = promotes.map((promote) => handleExtension(promote)); + const promoteFilePaths = promoteBatchesJson[batchName].files || []; - // Perform Preview of all Promoted files in the Default Content Tree - if (helixUtils.canBulkPreview(false)) { - promotedPaths.forEach((promote) => logger.info(`Promoted file in Default folder: ${promote}`)); - // Don't pass the experienceName & isGraybox params for the default content tree - promotedPreviewStatuses.push(await helixUtils.bulkPreview(promotedPaths, helixUtils.getOperations().PREVIEW)); + logger.info(`In Promote Content Worker, for Batch Name ${batchName} promoteFilePaths: ${JSON.stringify(promoteFilePaths)}`); + // Process the Promote Content + // Collect all promises from the forEach loop + // eslint-disable-next-line no-restricted-syntax + for (const promoteFilePath of promoteFilePaths) { + // eslint-disable-next-line no-await-in-loop + const promoteDocx = await filesWrapper.readFileIntoBuffer(`graybox_promote${gbRootFolder}/${experienceName}/docx${promoteFilePath}`); + if (promoteDocx) { + // eslint-disable-next-line no-await-in-loop + const saveStatus = await sharepoint.saveFileSimple(promoteDocx, promoteFilePath); + + if (saveStatus?.success) { + promotes.push(promoteFilePath); + } else if (saveStatus?.errorMsg?.includes('File is locked')) { + failedPromotes.push(`${promoteFilePath} (locked file)`); + } else { + failedPromotes.push(promoteFilePath); + } } + } - promotedFailedPreviews = promotedPreviewStatuses.flatMap((statusArray) => statusArray.filter((status) => !status.success)).map((status) => status.path); - const sFailedPromotedPreviews = promotedFailedPreviews.length > 0 ? `Failed Promoted Previews: \n${promotedFailedPreviews.join('\n')}` : ''; + // Wait for all the promises to resolve - const promotedExcelValues = [['Promoted Files Preview completed', toUTCStr(new Date()), sFailedPromotedPreviews]]; - // Update Promoted Preview Status - await sharepoint.updateExcelTable(projectExcelPath, 'PROMOTE_STATUS', promotedExcelValues); - responsePayload = 'Graybox Promote Worker action completed.'; - } else { - responsePayload = 'Bulk Preview not enabled for Graybox Content Tree'; + // Update the Promoted Paths in the current project's "promoted_paths.json" file + if (promotes.length > 0) { + const promotedPathsJson = await filesWrapper.readFileIntoObject(`graybox_promote${gbRootFolder}/${experienceName}/promoted_paths.json`) || {}; + // Combined existing If any promotes already exist in promoted_paths.json for the current batch either from Copy action or Promote Action + if (promotedPathsJson[batchName]) { + promotes = promotes.concat(promotedPathsJson[batchName]); + } + promotedPathsJson[batchName] = promotes; + await filesWrapper.writeFile(`graybox_promote${gbRootFolder}/${experienceName}/promoted_paths.json`, promotedPathsJson); } - logger.info(responsePayload); - return { - body: responsePayload, - }; -} -/** -* Promote Graybox files to the default content tree - * @param {*} previewStatuses file preview statuses - * @param {*} experienceName graybox experience name - * @param {*} helixAdminApiKey helix admin api key for performing Mdast to Docx conversion - * @param {*} sharepoint sharepoint instance - * @param {*} helixUtils helix utils instance - * @param {*} appConfig app config instance - * @returns JSON array of successful & failed promotes - */ -async function promoteFiles(previewStatuses, experienceName, helixAdminApiKey, sharepoint, helixUtils, appConfig) { - const promotes = []; - const failedPromotes = []; - const options = {}; - // Passing isGraybox param true to fetch graybox Hlx Admin API Key - const grayboxHlxAdminApiKey = helixUtils.getAdminApiKey(true); - if (grayboxHlxAdminApiKey) { - options.headers = new fetch.Headers(); - options.headers.append('Authorization', `token ${grayboxHlxAdminApiKey}`); + if (failedPromotes.length > 0) { + await filesWrapper.writeFile(`graybox_promote${gbRootFolder}/${experienceName}/promote_errors.json`, promoteErrorsJson.concat(failedPromotes)); } - // iterate through preview statuses, generate docx files and promote them - const allPromises = previewStatuses.map(async (status) => { - // check if status is an array and iterate through the array - if (Array.isArray(status)) { - const promises = status.map(async (stat) => { - if (stat.success && stat.mdPath) { - const response = await sharepoint.fetchWithRetry(`${stat.mdPath}`, options); - const content = await response.text(); - let docx; - const sp = await appConfig.getSpConfig(); - - if (content.includes(experienceName) || content.includes(gbStyleExpression) || content.includes(gbDomainSuffix)) { - // Process the Graybox Styles and Links with Mdast to Docx conversion - docx = await updateDocument(content, experienceName, helixAdminApiKey); - if (docx) { - logger.info(`Docx file generated for ${stat.path}`); - // Save file Destination full path with file name and extension - const destinationFilePath = `${stat.path.substring(0, stat.path.lastIndexOf('/') + 1).replace('/'.concat(experienceName), '')}${stat.fileName}`; - const saveStatus = await sharepoint.saveFileSimple(docx, destinationFilePath); - - if (saveStatus?.success) { - promotes.push(destinationFilePath); - } else if (saveStatus?.errorMsg?.includes('File is locked')) { - failedPromotes.push(`${destinationFilePath} (locked file)`); - } else { - failedPromotes.push(destinationFilePath); - } - } else { - logger.error(`Error generating docx file for ${stat.path}`); - } - } else { - const copySourceFilePath = `${stat.path.substring(0, stat.path.lastIndexOf('/') + 1)}${stat.fileName}`; // Copy Source full path with file name and extension - const copyDestinationFolder = `${stat.path.substring(0, stat.path.lastIndexOf('/')).replace('/'.concat(experienceName), '')}`; // Copy Destination folder path, no file name - const destFilePath = `${copyDestinationFolder}/${stat.fileName}`; - - // Download the grayboxed file and save it to default content location - const { fileDownloadUrl } = await sharepoint.getFileData(copySourceFilePath, true); - const file = await sharepoint.getFileUsingDownloadUrl(fileDownloadUrl); - const saveStatus = await sharepoint.saveFileSimple(file, destFilePath); - - if (saveStatus?.success) { - promotes.push(destFilePath); - } else if (saveStatus?.errorMsg?.includes('File is locked')) { - failedPromotes.push(`${destFilePath} (locked file)`); - } else { - failedPromotes.push(destFilePath); - } - } - } - }); - await Promise.all(promises); // await all async functions in the array are executed, before updating the status in the graybox project excel + // Update the Promote Batch Status in the current project's "promote_batches.json" file + promoteBatchesJson = await filesWrapper.readFileIntoObject(`graybox_promote${project}/promote_batches.json`); + promoteBatchesJson[batchName].status = 'promoted'; + // Write the promote batches JSON file + await filesWrapper.writeFile(`graybox_promote${gbRootFolder}/${experienceName}/promote_batches.json`, promoteBatchesJson); + + // Check in parallel if the Same Batch Name Exists & is Promoted in the Copy Batches JSON + const copyBatchesJson = await filesWrapper.readFileIntoObject(`graybox_promote${project}/copy_batches.json`); + const copyBatchJson = copyBatchesJson[batchName]; + let markBatchAsPromoted = true; + if (copyBatchJson) { + markBatchAsPromoted = copyBatchJson.status === 'promoted'; + } + batchStatusJson = await filesWrapper.readFileIntoObject(`graybox_promote${project}/batch_status.json`); + if (markBatchAsPromoted) { + // Update the Batch Status in the current project's "batch_status.json" file + if (batchStatusJson && batchStatusJson[batchName] && (promotes.length > 0 || failedPromotes.length > 0)) { + batchStatusJson[batchName] = 'promoted'; + // Write the updated batch_status.json file + await filesWrapper.writeFile(`graybox_promote${gbRootFolder}/${experienceName}/batch_status.json`, batchStatusJson); } + + // Find if the current batch running is the Last Copy Batch Name, and then mark the project as 'promoted' + const allBatchesPromoted = Object.keys(batchStatusJson).every((key) => batchStatusJson[key] === 'promoted'); + if (allBatchesPromoted) { + // Update the Project Status in JSON files + updateProjectStatus(gbRootFolder, experienceName, filesWrapper); + } + } + + // Update the Project Excel with the Promote Status + try { + const sFailedPromoteStatuses = failedPromotes.length > 0 ? `Failed Promotes: \n${failedPromotes.join('\n')}` : ''; + const promoteExcelValues = [[`Step 3 of 5: Promote completed for Batch ${batchName}`, toUTCStr(new Date()), sFailedPromoteStatuses]]; + await sharepoint.updateExcelTable(projectExcelPath, 'PROMOTE_STATUS', promoteExcelValues); + } catch (err) { + logger.error(`Error Occured while updating Excel during Graybox Promote: ${err}`); + } + + logger.info(`In Promote Content Worker, Promotes: ${JSON.stringify(promotes)}`); + logger.info(`In Promote Content Worker, Failed Promotes: ${JSON.stringify(failedPromotes)}`); + + responsePayload = `Promote Content Worker finished promoting content for batch ${batchName}`; + logger.info(responsePayload); + return exitAction({ + body: responsePayload, + statusCode: 200 }); - await Promise.all(allPromises); // await all async functions in the array are executed, before updating the status in the graybox project excel - return { promotes, failedPromotes }; } /** - * Find all files in the Graybox tree to promote. + * Update the Project Status in the current project's "status.json" file & the parent "project_queue.json" file + * @param {*} gbRootFolder graybox root folder + * @param {*} experienceName graybox experience name + * @param {*} filesWrapper filesWrapper object + * @returns updated project status */ -async function findAllFiles(experienceName, appConfig, sharepoint) { - const sp = await appConfig.getSpConfig(); - const options = await sharepoint.getAuthorizedRequestOption({ method: 'GET' }); - const promoteIgnoreList = appConfig.getPromoteIgnorePaths(); - logger.info(`Promote ignore list: ${promoteIgnoreList}`); - - return findAllGrayboxFiles({ - baseURI: sp.api.file.get.gbBaseURI, - options, - gbFolders: appConfig.isDraftOnly() ? [`/${experienceName}/drafts`] : [''], - promoteIgnoreList, - downloadBaseURI: sp.api.file.download.baseURI, - experienceName, - sharepoint - }); +async function updateProjectStatus(gbRootFolder, experienceName, filesWrapper) { + const projectStatusJson = await filesWrapper.readFileIntoObject(`graybox_promote${gbRootFolder}/${experienceName}/status.json`); + + const toBeStatus = 'promoted'; + // Update the Project Status in the current project's "status.json" file + projectStatusJson.status = toBeStatus; + logger.info(`In Promote-content-worker After Processing Promote, Project Status Json: ${JSON.stringify(projectStatusJson)}`); + await filesWrapper.writeFile(`graybox_promote${gbRootFolder}/${experienceName}/status.json`, projectStatusJson); + + // Update the Project Status in the parent "project_queue.json" file + const projectQueue = await changeProjectStatusInQueue(filesWrapper, gbRootFolder, experienceName, toBeStatus); + logger.info(`In Promote-content-worker After Processing Promote, Project Queue Json: ${JSON.stringify(projectQueue)}`); + await filesWrapper.writeFile('graybox_promote/project_queue.json', projectQueue); } -/** - * Iteratively finds all files under a specified root folder. - */ -async function findAllGrayboxFiles({ - baseURI, options, gbFolders, promoteIgnoreList, downloadBaseURI, experienceName, sharepoint -}) { - const gbRoot = baseURI.split(':').pop(); - // Regular expression to select the gbRoot and anything before it - // Eg: the regex selects "https://:/-graybox" - const pPathRegExp = new RegExp(`.*:${gbRoot}`); - // Regular expression to select paths that has the experienceName at first or second level - const pathsToSelectRegExp = new RegExp(`^/([^/]+/)?${experienceName}(/.*)?$`); - const gbFiles = []; - while (gbFolders.length !== 0) { - const uri = `${baseURI}${gbFolders.shift()}:/children?$top=${MAX_CHILDREN}`; - // eslint-disable-next-line no-await-in-loop - const res = await sharepoint.fetchWithRetry(uri, options); - logger.info(`Find all Graybox files URI: ${uri} \nResponse: ${res.ok}`); - if (res.ok) { - // eslint-disable-next-line no-await-in-loop - const json = await res.json(); - // eslint-disable-next-line no-await-in-loop - const driveItems = json.value; - for (let di = 0; di < driveItems?.length; di += 1) { - const item = driveItems[di]; - const itemPath = `${item.parentReference.path.replace(pPathRegExp, '')}/${item.name}`; - logger.info(`${itemPath} ::: ${pathsToSelectRegExp.test(itemPath)}`); - if (!isFilePatternMatched(itemPath, promoteIgnoreList)) { - if (item.folder) { - // it is a folder - gbFolders.push(itemPath); - } else if (pathsToSelectRegExp.test(itemPath)) { - const downloadUrl = `${downloadBaseURI}/${item.id}/content`; - // eslint-disable-next-line no-await-in-loop - gbFiles.push({ fileDownloadUrl: downloadUrl, filePath: itemPath }); - } - } else { - logger.info(`Ignored from promote: ${itemPath}`); - } - } - } +async function changeProjectStatusInQueue(filesWrapper, gbRootFolder, experienceName, toBeStatus) { + const projectQueue = await filesWrapper.readFileIntoObject('graybox_promote/project_queue.json'); + const index = projectQueue.findIndex((obj) => obj.projectPath === `${gbRootFolder}/${experienceName}`); + if (index !== -1) { + // Replace the object at the found index + projectQueue[index].status = toBeStatus; } - return gbFiles; + return projectQueue; +} + +function exitAction(resp) { + return resp; } exports.main = main; diff --git a/actions/graybox/promote.js b/actions/graybox/promote.js index 7a43641..10b8b09 100644 --- a/actions/graybox/promote.js +++ b/actions/graybox/promote.js @@ -36,7 +36,7 @@ async function main(params) { } return ow.actions.invoke({ - name: 'graybox/promote-worker', + name: 'graybox/initiate-promote-worker', blocking: false, result: false, params diff --git a/actions/graybox/validateAction.js b/actions/graybox/validateAction.js index 5e2ab6e..a7abdd1 100644 --- a/actions/graybox/validateAction.js +++ b/actions/graybox/validateAction.js @@ -17,6 +17,7 @@ const AppConfig = require('../appConfig'); const GrayboxUser = require('../grayboxUser'); +const { getAioLogger } = require('../utils'); function isGrayboxParamsValid(params) { const { @@ -41,6 +42,7 @@ async function isUserAuthorized(params, grpIds) { const appConfig = new AppConfig(params); const grayboxUser = new GrayboxUser({ appConfig }); const found = await grayboxUser.isInGroups(grpIds); + getAioLogger().info(`User is authorized: ${found}`); return found; } diff --git a/actions/sharepoint.js b/actions/sharepoint.js index 8635740..1c4ee69 100644 --- a/actions/sharepoint.js +++ b/actions/sharepoint.js @@ -29,6 +29,7 @@ const TOO_MANY_REQUESTS = '429'; const LOG_RESP_HEADER = false; let nextCallAfter = 0; const itemIdMap = {}; +const logger = getAioLogger(); class Sharepoint { constructor(appConfig) { @@ -194,8 +195,6 @@ class Sharepoint { } async saveFileSimple(file, dest, isGraybox) { - const logger = getAioLogger(); - try { const folder = this.getFolderFromPath(dest); const filename = this.getFileNameFromPath(dest); @@ -286,7 +285,6 @@ class Sharepoint { logHeaders(response) { if (!this.getLogRespHeader()) return; - const logger = getAioLogger(); const hdrStr = this.getHeadersStr(response); const logStr = `Status is ${response.status} with headers ${hdrStr}`; diff --git a/app.config.yaml b/app.config.yaml index f1cb549..a669588 100644 --- a/app.config.yaml +++ b/app.config.yaml @@ -25,7 +25,34 @@ application: web: 'yes' runtime: nodejs:18 inputs: - LOG_LEVEL: debug + LOG_LEVEL: debug + initiate-promote-worker: + function: actions/graybox/initiate-promote-worker.js + web: 'no' + runtime: nodejs:18 + inputs: + LOG_LEVEL: debug + limits: + timeout: 3600000 + memorySize: 2048 + preview-worker: + function: actions/graybox/preview-worker.js + web: 'no' + runtime: nodejs:18 + inputs: + LOG_LEVEL: debug + limits: + timeout: 3600000 + memorySize: 2048 + process-docx-worker: + function: actions/graybox/process-docx-worker.js + web: 'no' + runtime: nodejs:18 + inputs: + LOG_LEVEL: debug + limits: + timeout: 3600000 + memorySize: 2048 promote-worker: function: actions/graybox/promote-worker.js web: 'no' @@ -35,3 +62,78 @@ application: limits: timeout: 3600000 memorySize: 2048 + copy-worker: + function: actions/graybox/copy-worker.js + web: 'no' + runtime: nodejs:18 + inputs: + LOG_LEVEL: debug + limits: + timeout: 3600000 + memorySize: 2048 + preview-sched: + function: actions/graybox/preview-sched.js + web: 'no' + runtime: 'nodejs:18' + inputs: + LOG_LEVEL: debug + limits: + timeout: 900000 + memorySize: 2048 + annotations: + require-adobe-auth: false + final: true + process-docx-sched: + function: actions/graybox/process-docx-sched.js + web: 'no' + runtime: 'nodejs:18' + inputs: + LOG_LEVEL: debug + limits: + timeout: 900000 + memorySize: 2048 + annotations: + require-adobe-auth: false + final: true + promote-sched: + function: actions/graybox/promote-sched.js + web: 'no' + runtime: 'nodejs:18' + inputs: + LOG_LEVEL: debug + limits: + timeout: 900000 + memorySize: 2048 + annotations: + require-adobe-auth: false + final: true + copy-sched: + function: actions/graybox/copy-sched.js + web: 'no' + runtime: 'nodejs:18' + inputs: + LOG_LEVEL: debug + limits: + timeout: 900000 + memorySize: 2048 + annotations: + require-adobe-auth: false + final: true + triggers: + everyMin: + feed: /whisk.system/alarms/interval + inputs: + minutes: 1 + rules: + everyMinRule: + trigger: everyMin + action: preview-sched + everyMinProcessDocxRule: + trigger: everyMin + action: process-docx-sched + everyMinPromoteDocxRule: + trigger: everyMin + action: promote-sched + everyMinCopyDocxRule: + trigger: everyMin + action: copy-sched diff --git a/e2e/graybox/promote.e2e.test.js b/e2e/graybox/promote.e2e.test.js index 1d6990d..a00a313 100644 --- a/e2e/graybox/promote.e2e.test.js +++ b/e2e/graybox/promote.e2e.test.js @@ -24,7 +24,7 @@ const hostname = Config.get('cna.hostname') || 'adobeioruntime.net'; const runtimePackage = 'graybox'; const actionUrl = `https://${namespace}.${hostname}/api/v1/web/${runtimePackage}/promote`; -test('returns a 204 as promote calls async promote-worker action', async () => { +test('returns a 204 as promote calls async initiate-promote-worker action', async () => { const res = await fetch(actionUrl); expect(res).toEqual(expect.objectContaining({ status: 204 diff --git a/package-lock.json b/package-lock.json index 55de4d3..9867da9 100644 --- a/package-lock.json +++ b/package-lock.json @@ -9,6 +9,7 @@ "version": "0.0.1", "dependencies": { "@adobe/aio-lib-core-logging": "^3.0.1", + "@adobe/aio-lib-files": "^4.0.1", "@adobe/aio-sdk": "^5", "@azure/msal-node": "^2.6.5", "milo-md2docx": "^1.8.0", @@ -660,9 +661,9 @@ } }, "node_modules/@adobe/aio-lib-core-networking": { - "version": "5.0.2", - "resolved": "https://registry.npmjs.org/@adobe/aio-lib-core-networking/-/aio-lib-core-networking-5.0.2.tgz", - "integrity": "sha512-Jj5/feIQQm2O59Aprp+tXUYQeJ5zKJrZGQhhD4Du9ApZ5Ulz1Qq360thQg9nWMdy8yAWWbA/bY2WmOm9/uqudg==", + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/@adobe/aio-lib-core-networking/-/aio-lib-core-networking-5.0.1.tgz", + "integrity": "sha512-F/RHorqDXTGuSgOrxsAI6dNEvTWWT86vArfWQPh/UJ3YuERgkgdHt9FYFcHmLARBLDqGVkRaFYbjMdL6YHetew==", "dependencies": { "@adobe/aio-lib-core-config": "^5.0.0", "@adobe/aio-lib-core-errors": "^4.0.0", @@ -677,11 +678,70 @@ "node": ">=18" } }, + "node_modules/@adobe/aio-lib-core-networking/node_modules/@adobe/aio-lib-core-config": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/@adobe/aio-lib-core-config/-/aio-lib-core-config-5.0.1.tgz", + "integrity": "sha512-OQmQublmy/uXM1HC6qXfxSAXEl85nExh/yiajlEfJheKuJ9iPWwVWXR5vBHVVDlOXgWEVMWRUQPMIUu1lmR5lA==", + "dependencies": { + "debug": "^4.1.1", + "deepmerge": "^4.0.0", + "dotenv": "16.3.1", + "hjson": "^3.1.2", + "js-yaml": "^4.1.0" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/@adobe/aio-lib-core-networking/node_modules/@adobe/aio-lib-core-errors": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/@adobe/aio-lib-core-errors/-/aio-lib-core-errors-4.0.1.tgz", + "integrity": "sha512-zrQm9TJh13wEHH5O2TQAUQvYGGe01R9DHzKy+b6B0URbl2lcuqXyNiUx896lpcgXD2bzUoH7ARRH97aCW2tlfw==" + }, + "node_modules/@adobe/aio-lib-core-networking/node_modules/@adobe/aio-lib-core-logging": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/@adobe/aio-lib-core-logging/-/aio-lib-core-logging-3.0.1.tgz", + "integrity": "sha512-WvhFXy5sCIBHwGNP6QS2LiGVWeFb6vxKiZ62W0ahwN5SqHqaBoimBDvTysdH9gANGLShELPPT2gb4255sElf5w==", + "dependencies": { + "debug": "^4.1.1", + "winston": "^3.2.1" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/@adobe/aio-lib-core-networking/node_modules/argparse": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==" + }, + "node_modules/@adobe/aio-lib-core-networking/node_modules/dotenv": { + "version": "16.3.1", + "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-16.3.1.tgz", + "integrity": "sha512-IPzF4w4/Rd94bA9imS68tZBaYyBWSCE47V1RGuMrB94iyTOIEwRmVL2x/4An+6mETpLrKJ5hQkB8W4kFAadeIQ==", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/motdotla/dotenv?sponsor=1" + } + }, "node_modules/@adobe/aio-lib-core-networking/node_modules/fetch-retry": { "version": "5.0.6", "resolved": "https://registry.npmjs.org/fetch-retry/-/fetch-retry-5.0.6.tgz", "integrity": "sha512-3yurQZ2hD9VISAhJJP9bpYFNQrHHBXE2JxxjY5aLEcDi46RmAzJE2OC9FAde0yis5ElW0jTTzs0zfg/Cca4XqQ==" }, + "node_modules/@adobe/aio-lib-core-networking/node_modules/js-yaml": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", + "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", + "dependencies": { + "argparse": "^2.0.1" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, "node_modules/@adobe/aio-lib-core-tvm": { "version": "4.0.2", "resolved": "https://registry.npmjs.org/@adobe/aio-lib-core-tvm/-/aio-lib-core-tvm-4.0.2.tgz", @@ -700,6 +760,23 @@ "node": ">=18" } }, + "node_modules/@adobe/aio-lib-core-tvm/node_modules/@adobe/aio-lib-core-errors": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/@adobe/aio-lib-core-errors/-/aio-lib-core-errors-4.0.1.tgz", + "integrity": "sha512-zrQm9TJh13wEHH5O2TQAUQvYGGe01R9DHzKy+b6B0URbl2lcuqXyNiUx896lpcgXD2bzUoH7ARRH97aCW2tlfw==" + }, + "node_modules/@adobe/aio-lib-core-tvm/node_modules/@adobe/aio-lib-core-logging": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/@adobe/aio-lib-core-logging/-/aio-lib-core-logging-3.0.1.tgz", + "integrity": "sha512-WvhFXy5sCIBHwGNP6QS2LiGVWeFb6vxKiZ62W0ahwN5SqHqaBoimBDvTysdH9gANGLShELPPT2gb4255sElf5w==", + "dependencies": { + "debug": "^4.1.1", + "winston": "^3.2.1" + }, + "engines": { + "node": ">=18" + } + }, "node_modules/@adobe/aio-lib-core-tvm/node_modules/upath": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/upath/-/upath-2.0.1.tgz", @@ -748,6 +825,60 @@ "node": ">=18" } }, + "node_modules/@adobe/aio-lib-env/node_modules/@adobe/aio-lib-core-config": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/@adobe/aio-lib-core-config/-/aio-lib-core-config-5.0.1.tgz", + "integrity": "sha512-OQmQublmy/uXM1HC6qXfxSAXEl85nExh/yiajlEfJheKuJ9iPWwVWXR5vBHVVDlOXgWEVMWRUQPMIUu1lmR5lA==", + "dependencies": { + "debug": "^4.1.1", + "deepmerge": "^4.0.0", + "dotenv": "16.3.1", + "hjson": "^3.1.2", + "js-yaml": "^4.1.0" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/@adobe/aio-lib-env/node_modules/@adobe/aio-lib-core-logging": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/@adobe/aio-lib-core-logging/-/aio-lib-core-logging-3.0.1.tgz", + "integrity": "sha512-WvhFXy5sCIBHwGNP6QS2LiGVWeFb6vxKiZ62W0ahwN5SqHqaBoimBDvTysdH9gANGLShELPPT2gb4255sElf5w==", + "dependencies": { + "debug": "^4.1.1", + "winston": "^3.2.1" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/@adobe/aio-lib-env/node_modules/argparse": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==" + }, + "node_modules/@adobe/aio-lib-env/node_modules/dotenv": { + "version": "16.3.1", + "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-16.3.1.tgz", + "integrity": "sha512-IPzF4w4/Rd94bA9imS68tZBaYyBWSCE47V1RGuMrB94iyTOIEwRmVL2x/4An+6mETpLrKJ5hQkB8W4kFAadeIQ==", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/motdotla/dotenv?sponsor=1" + } + }, + "node_modules/@adobe/aio-lib-env/node_modules/js-yaml": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", + "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", + "dependencies": { + "argparse": "^2.0.1" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, "node_modules/@adobe/aio-lib-events": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/@adobe/aio-lib-events/-/aio-lib-events-4.0.1.tgz", @@ -792,6 +923,11 @@ "node": ">=18" } }, + "node_modules/@adobe/aio-lib-files/node_modules/@adobe/aio-lib-core-errors": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/@adobe/aio-lib-core-errors/-/aio-lib-core-errors-4.0.1.tgz", + "integrity": "sha512-zrQm9TJh13wEHH5O2TQAUQvYGGe01R9DHzKy+b6B0URbl2lcuqXyNiUx896lpcgXD2bzUoH7ARRH97aCW2tlfw==" + }, "node_modules/@adobe/aio-lib-files/node_modules/uuid": { "version": "9.0.1", "resolved": "https://registry.npmjs.org/uuid/-/uuid-9.0.1.tgz", @@ -886,6 +1022,25 @@ "yauzl": "2.10.0" } }, + "node_modules/@adobe/helix-docx2md/node_modules/@adobe/mammoth": { + "version": "1.5.1-bleeding.2", + "resolved": "https://registry.npmjs.org/@adobe/mammoth/-/mammoth-1.5.1-bleeding.2.tgz", + "integrity": "sha512-quhwkeOckKfPv3ubpi+OZImtJeJ9gyHWD//QfDWaY4USsL059Y1uB/Kbzw7RGAWYaNGQiq2vI2jCx6DH3LKeiQ==", + "dependencies": { + "@xmldom/xmldom": "^0.8.6", + "argparse": "~1.0.3", + "bluebird": "~3.4.0", + "dingbat-to-unicode": "^1.0.1", + "jszip": "^3.7.1", + "lop": "^0.4.1", + "path-is-absolute": "^1.0.0", + "underscore": "^1.13.1", + "xmlbuilder": "^10.0.0" + }, + "bin": { + "mammoth": "bin/mammoth" + } + }, "node_modules/@adobe/helix-markdown-support": { "version": "7.0.0", "resolved": "https://registry.npmjs.org/@adobe/helix-markdown-support/-/helix-markdown-support-7.0.0.tgz", @@ -992,17 +1147,17 @@ } }, "node_modules/@adobe/mammoth": { - "version": "1.5.1-bleeding.2", - "resolved": "https://registry.npmjs.org/@adobe/mammoth/-/mammoth-1.5.1-bleeding.2.tgz", - "integrity": "sha512-quhwkeOckKfPv3ubpi+OZImtJeJ9gyHWD//QfDWaY4USsL059Y1uB/Kbzw7RGAWYaNGQiq2vI2jCx6DH3LKeiQ==", + "version": "1.5.1-bleeding.1", + "resolved": "https://registry.npmjs.org/@adobe/mammoth/-/mammoth-1.5.1-bleeding.1.tgz", + "integrity": "sha512-wI9kMxh1ZvgVg/eQuFCuPUYevNRFPShNh5Iq2oDglfI4dJ44NZ7TzTzAfLMADdqd3YgRzAj37QRi+2aOSXwcUw==", "dependencies": { - "@xmldom/xmldom": "^0.8.6", "argparse": "~1.0.3", "bluebird": "~3.4.0", "dingbat-to-unicode": "^1.0.1", "jszip": "^3.7.1", "lop": "^0.4.1", "path-is-absolute": "^1.0.0", + "sax": "~1.1.1", "underscore": "^1.13.1", "xmlbuilder": "^10.0.0" }, @@ -1010,13 +1165,10 @@ "mammoth": "bin/mammoth" } }, - "node_modules/@adobe/mammoth/node_modules/xmlbuilder": { - "version": "10.1.1", - "resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-10.1.1.tgz", - "integrity": "sha512-OyzrcFLL/nb6fMGHbiRDuPup9ljBycsdCypwuyg5AAHvyWzGfChJpCXMG88AGTIMFhGZ9RccFN1e6lhg3hkwKg==", - "engines": { - "node": ">=4.0" - } + "node_modules/@adobe/mammoth/node_modules/sax": { + "version": "1.1.6", + "resolved": "https://registry.npmjs.org/sax/-/sax-1.1.6.tgz", + "integrity": "sha512-8zci48uUQyfqynGDSkUMD7FCJB96hwLnlZOXlgs1l3TX+LW27t3psSWKUxC0fxVgA86i8tL4NwGcY1h/6t3ESg==" }, "node_modules/@adobe/mdast-util-gridtables": { "version": "3.0.1", @@ -14235,6 +14387,14 @@ "xml-js": "bin/cli.js" } }, + "node_modules/xmlbuilder": { + "version": "10.1.1", + "resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-10.1.1.tgz", + "integrity": "sha512-OyzrcFLL/nb6fMGHbiRDuPup9ljBycsdCypwuyg5AAHvyWzGfChJpCXMG88AGTIMFhGZ9RccFN1e6lhg3hkwKg==", + "engines": { + "node": ">=4.0" + } + }, "node_modules/y18n": { "version": "5.0.8", "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", diff --git a/package.json b/package.json index 840decb..bf8778e 100644 --- a/package.json +++ b/package.json @@ -4,6 +4,7 @@ "private": true, "dependencies": { "@adobe/aio-lib-core-logging": "^3.0.1", + "@adobe/aio-lib-files": "^4.0.1", "@adobe/aio-sdk": "^5", "@azure/msal-node": "^2.6.5", "milo-md2docx": "^1.8.0", diff --git a/test/graybox/promote.test.js b/test/graybox/promote.test.js index 6a4656b..4b396e5 100644 --- a/test/graybox/promote.test.js +++ b/test/graybox/promote.test.js @@ -66,7 +66,7 @@ describe('main function', () => { expect(loggerMock.info).toHaveBeenCalledWith('Graybox Promote action invoked'); expect(validateAction).toHaveBeenCalledWith(params, ['group1'], false); expect(owMock.actions.invoke).toHaveBeenCalledWith({ - name: 'graybox/promote-worker', + name: 'graybox/initiate-promote-worker', blocking: false, result: false, params