Skip to content

Commit

Permalink
Promote API Batching Implementation
Browse files Browse the repository at this point in the history
  • Loading branch information
arshadparwaiz committed Aug 7, 2024
1 parent b311d16 commit e768a6d
Show file tree
Hide file tree
Showing 14 changed files with 1,559 additions and 150 deletions.
103 changes: 103 additions & 0 deletions actions/graybox/copy-content-worker.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,103 @@
/* ************************************************************************
* ADOBE CONFIDENTIAL
* ___________________
*
* Copyright 2024 Adobe
* All Rights Reserved.
*
* NOTICE: All information contained herein is, and remains
* the property of Adobe and its suppliers, if any. The intellectual
* and technical concepts contained herein are proprietary to Adobe
* and its suppliers and are protected by all applicable intellectual
* property laws, including trade secret and copyright laws.
* Dissemination of this information or reproduction of this material
* is strictly forbidden unless prior written permission is obtained
* from Adobe.
************************************************************************* */

const {
getAioLogger, handleExtension, toUTCStr
} = require('../utils');
const AppConfig = require('../appConfig');
const Sharepoint = require('../sharepoint');
const initFilesWrapper = require('./filesWrapper');

const logger = getAioLogger();

async function main(params) {
logger.info('Graybox Promote Content Action triggered');

const appConfig = new AppConfig(params);
const {
spToken, adminPageUri, rootFolder, gbRootFolder, promoteIgnorePaths, experienceName, projectExcelPath, draftsOnly
} = appConfig.getPayload();

const sharepoint = new Sharepoint(appConfig);

// process data in batches
const filesWrapper = await initFilesWrapper(logger);
let responsePayload;
const promotes = [];
const failedPromotes = [];

logger.info('In Copy Content Worker, Processing Copy Content');

const project = params.project || '';
const batchName = params.batchName || '';

const copyBatchesJson = await filesWrapper.readFileIntoObject(`graybox_promote${project}/copy_batches.json`);

const copyFilePathsJson = copyBatchesJson[batchName] || {};

logger.info(`In Copy Content Worker, copyFilePaths: ${JSON.stringify(copyFilePathsJson)}`);
// Process the Copy Content
Object.entries(copyFilePathsJson).forEach(async ([copySourceFilePath, copyDestFilePath]) => {
// Download the grayboxed file and save it to default content location
const { fileDownloadUrl } = await sharepoint.getFileData(copySourceFilePath, true);
const file = await sharepoint.getFileUsingDownloadUrl(fileDownloadUrl);
const saveStatus = await sharepoint.saveFileSimple(file, copyDestFilePath);

if (saveStatus?.success) {
promotes.push(copyDestFilePath);
} else if (saveStatus?.errorMsg?.includes('File is locked')) {
failedPromotes.push(`${copyDestFilePath} (locked file)`);
} else {
failedPromotes.push(copyDestFilePath);
}
});

responsePayload = 'Copy Content Worker finished promoting content';
logger.info(responsePayload);
return exitAction({
body: responsePayload,
statusCode: 200
});
}

/**
* Update the Project Status in the current project's "status.json" file & the parent "ongoing_projects.json" file
* @param {*} gbRootFolder graybox root folder
* @param {*} experienceName graybox experience name
* @param {*} filesWrapper filesWrapper object
* @returns updated project status
*/
async function updateProjectStatus(gbRootFolder, experienceName, filesWrapper) {
const projects = await filesWrapper.readFileIntoObject('graybox_promote/ongoing_projects.json');
const projectStatusJson = await filesWrapper.readFileIntoObject(`graybox_promote${gbRootFolder}/${experienceName}/status.json`);

// Update the Project Status in the current project's "status.json" file
projectStatusJson.status = 'initial_preview_done';
logger.info(`In Promote-content-worker After Processing Promote, Project Status Json: ${JSON.stringify(projectStatusJson)}`);
await filesWrapper.writeFile(`graybox_promote${gbRootFolder}/${experienceName}/status.json`, projectStatusJson);

// Update the Project Status in the parent "ongoing_projects.json" file
projects.find((p) => p.project_path === `${gbRootFolder}/${experienceName}`).status = 'initial_preview_done';
logger.info(`In Promote-content-worker After Processing Promote, OnProjects Json: ${JSON.stringify(projects)}`);
await filesWrapper.writeFile('graybox_promote/ongoing_projects.json', projects);
}

function exitAction(resp) {
return resp;
}

exports.main = main;
115 changes: 115 additions & 0 deletions actions/graybox/copy-sched.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,115 @@
/* ************************************************************************
* ADOBE CONFIDENTIAL
* ___________________
*
* Copyright 2024 Adobe
* All Rights Reserved.
*
* NOTICE: All information contained herein is, and remains
* the property of Adobe and its suppliers, if any. The intellectual
* and technical concepts contained herein are proprietary to Adobe
* and its suppliers and are protected by all applicable intellectual
* property laws, including trade secret and copyright laws.
* Dissemination of this information or reproduction of this material
* is strictly forbidden unless prior written permission is obtained
* from Adobe.
************************************************************************* */

// eslint-disable-next-line import/no-extraneous-dependencies
const openwhisk = require('openwhisk');
const { getAioLogger } = require('../utils');
const { validateAction } = require('./validateAction');
const AppConfig = require('../appConfig');
const initFilesWrapper = require('./filesWrapper');

async function main(params) {
const logger = getAioLogger();
const ow = openwhisk();
let responsePayload = 'Graybox Copy Scheduler invoked';
logger.info(responsePayload);

const filesWrapper = await initFilesWrapper(logger);

try {
const projects = await filesWrapper.readFileIntoObject('graybox_promote/ongoing_projects.json');
logger.info(`From Copy-sched Ongoing Projects Json: ${JSON.stringify(projects)}`);

// iterate the JSON array projects and extract the project_path where status is 'initiated'
const ongoingPorcessedProjects = [];
projects.forEach((project) => {
if (project.status === 'processed') {
ongoingPorcessedProjects.push(project.project_path);
}
});

ongoingPorcessedProjects.forEach(async (project) => {
const projectStatusJson = await filesWrapper.readFileIntoObject(`graybox_promote${project}/status.json`);
logger.info(`In Copy-sched Projects Json: ${JSON.stringify(projectStatusJson)}`);

const copyBatchesJson = await filesWrapper.readFileIntoObject(`graybox_promote${project}/copy_batches.json`);
// copy all params from json into the params object
const inputParams = projectStatusJson?.params;
Object.keys(inputParams).forEach((key) => {
params[key] = inputParams[key];
});

Object.entries(copyBatchesJson).forEach(async ([batchName, copyFilePathsJson]) => {
// Set the Project & Batch Name in params for the Copy Content Worker Action to read and process
params.project = project;
params.batchName = batchName;

try {
const appConfig = new AppConfig(params);
const grpIds = appConfig.getConfig().grayboxUserGroups;
const vActData = await validateAction(params, grpIds, params.ignoreUserCheck);
if (vActData && vActData.code !== 200) {
logger.info(`Validation failed: ${JSON.stringify(vActData)}`);
return vActData;
}

return ow.actions.invoke({
name: 'graybox/copy-content-worker',
blocking: false,
result: false,
params
}).then(async (result) => {
logger.info(result);
return {
code: 200,
payload: responsePayload
};
}).catch(async (err) => {
responsePayload = 'Failed to invoke graybox copy action';
logger.error(`${responsePayload}: ${err}`);
return {
code: 500,
payload: responsePayload
};
});
} catch (err) {
responsePayload = 'Unknown error occurred';
logger.error(`${responsePayload}: ${err}`);
responsePayload = err;
}

return {
code: 500,
payload: responsePayload,
};
});
});

logger.info(`Params length after: ${Object.keys(params).length}`);
} catch (err) {
responsePayload = 'Unknown error occurred';
logger.error(`${responsePayload}: ${err}`);
responsePayload = err;
}

return {
code: 500,
payload: responsePayload,
};
}

exports.main = main;
125 changes: 125 additions & 0 deletions actions/graybox/filesWrapper.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,125 @@
const Files = require('@adobe/aio-lib-files');
const streamLib = require('stream');

const initFilesWrapper = async (logger) => {
const files = await Files.init();

const readFileInternal = async (filePath, logFileNotFound = true, options = {}) => {
try {
return await files.read(filePath, options);
} catch (err) {
if (logFileNotFound) {
logger.error(`Error while reading file ${filePath}: ${err.message}`);
}
return null;
}
};

const readFileIntoObject = async (filePath, logFileNotFound = true, options = {}) => {
const data = await readFileInternal(filePath, logFileNotFound, options);
try {
if (typeof input === "string") {
return JSON.parse(input);
}
return data ? JSON.parse(data.toString()) : {};
} catch (err) {
if (logFileNotFound) {
logger.error(`Error while parsing file content of ${filePath}: ${err.message}`);
}
return {};
}
};

const readProperties = async (filePath) => {
try {
return await files.getProperties(filePath);
} catch (err) {
logger.error(`Error while reading metadata of ${filePath}: ${err.message}`);
return null;
}
};

/**
* Return the file as Buffer or an empty Buffer, when reading the file errored out.
*
* @param filePath {string} path to the file to read
* @param logFileNotFound {boolean} whether a failure to read the file should be logged - defaults to true
* @param options {object} aio-lib-files "remoteReadOptions" - default to an empty object
* @returns {Buffer} the buffer with the file's content
*/
const readFileIntoBuffer = async (filePath, logFileNotFound = true, options = {}) => {
const data = await readFileInternal(filePath, logFileNotFound, options);
return data ?? Buffer.alloc(0);
};

const writeFile = async (filePath, content) => {
let finalData = content;
if (!Buffer.isBuffer(content) && typeof content !== 'string' && !(content instanceof String)) {
finalData = JSON.stringify(content);
}
try {
await files.write(filePath, finalData);
} catch (err) {
logger.error(`Error while writing file ${filePath}: ${err.message}`);
}
};

const createReadStream = async (filePath, options = {}) => files.createReadStream(filePath, options);

const writeFileFromStream = async (filePath, stream) => {
try {
if (stream instanceof streamLib.Readable) {
const chunks = [];
// eslint-disable-next-line no-restricted-syntax
for await (const chunk of stream) {
chunks.push(chunk);
}
await files.write(filePath, Buffer.concat(chunks));
const fileProps = await files.getProperties(filePath);
if (!fileProps || !fileProps?.contentLength) {
return 'Error: Failed to determine the file size of the stored document.';
}
return null;
}
return 'Error: Unexpected stream.';
} catch (err) {
return `Error while writing file ${filePath}: ${err.message}`;
}
};

const deleteObject = async (filePath) => {
try {
await files.delete(filePath);
} catch (err) {
logger.error(`Error while deleting ${filePath}: ${err.message}`);
}
};

const listFiles = async (filePath) => {
try {
return files.list(filePath);
} catch (err) {
logger.error(`Error while listing files: ${err.message}`);
return [];
}
};

const fileExists = async (filePath) => {
const fileList = await listFiles(filePath);
return !Array.isArray(fileList) || fileList.length !== 0;
};

return {
writeFileFromStream,
readFileIntoObject,
readProperties,
createReadStream,
listFiles,
fileExists,
writeFile,
deleteObject,
readFileIntoBuffer,
};
};

module.exports = initFilesWrapper;
Loading

0 comments on commit e768a6d

Please sign in to comment.