diff --git a/package.json b/package.json index cab98d40..ecc2029a 100644 --- a/package.json +++ b/package.json @@ -25,7 +25,6 @@ "test:integration:github": "jest int --runInBand --globalSetup='./scripts/spin-up-lambdas.ts' --globalTeardown='./scripts/destroy-lambdas.ts' --testTimeout=20000", "test:integration": "jest int --testTimeout=20000 --runInBand", "test:hotfix:vrm": "jest --testTimeout 20000 hotfix/cb2-10791", - "test:hotfix:plates": "jest --testTimeout 20000 hotfix/cb2-11175", "package": "npm run build:prod", "start:ci": "npm run dynamo:seed && sam local start-api --docker-network $(docker network ls | grep github_network | awk '{print $2}') --warm-containers EAGER", "swagger:open": "docker run --name swagger -d -p 80:8080 -v $(pwd)/docs:/tmp -e SWAGGER_FILE=/tmp/spec.yml swaggerapi/swagger-editor", diff --git a/src/handler/batchPlateCreation.ts b/src/handler/batchPlateCreation.ts new file mode 100644 index 00000000..932e8e67 --- /dev/null +++ b/src/handler/batchPlateCreation.ts @@ -0,0 +1,86 @@ +import { + TechRecordType as TechRecordTypeByVehicle, +} from '@dvsa/cvs-type-definitions/types/v3/tech-record/tech-record-vehicle-type'; +import { TechRecordType } from '@dvsa/cvs-type-definitions/types/v3/tech-record/tech-record-verb'; +import 'dotenv/config'; +import { v4 as uuidv4 } from 'uuid'; +import { SQSEvent } from 'aws-lambda'; +import { PlateReasonForIssue, Plates } from '../models/plate'; +import { DocumentName, SQSRequestBody } from '../models/sqsPayload'; +import { getBySystemNumberAndCreatedTimestamp, inPlaceRecordUpdate } from '../services/database'; +import { addToSqs } from '../services/sqs'; +import { StatusCode } from '../util/enum'; +import { flattenArrays, formatTechRecord } from '../util/formatTechRecord'; +import logger, { logError } from '../util/logger'; +import { BatchPlateData } from '../models/batchPlate'; + +export const handler = async (event: SQSEvent): Promise => { + const batchIssuerName = 'CVS Batch Plate Generation'; + let numberOfRecordsUpdated = 0; + let numberOfSqsAdded = 0; + + try { + const processPromises = event.Records.map(async ({ body }) => { + const data: BatchPlateData = JSON.parse(body) as BatchPlateData; + const { systemNumber, createdTimestamp } = data; + + logger.info(`Processing record: sysNum ${systemNumber}, timestamp ${createdTimestamp}`); + + const dbRecord = await getBySystemNumberAndCreatedTimestamp(systemNumber, createdTimestamp); + + if (!dbRecord || !Object.keys(dbRecord).length) { + throw new Error(`Missing record: sysNum ${systemNumber}, timestamp ${createdTimestamp}`); + } + + if (dbRecord.techRecord_statusCode !== StatusCode.CURRENT) { + throw new Error(`Non current record: statusCode ${dbRecord.techRecord_statusCode}`); + } + if (dbRecord.techRecord_vehicleType !== 'trl' && dbRecord.techRecord_vehicleType !== 'hgv') { + throw new Error(`Invalid vehicle type: ${dbRecord.techRecord_vehicleType}`); + } + + const newPlate: Plates = { + plateSerialNumber: uuidv4(), + plateIssueDate: new Date().toISOString(), + plateReasonForIssue: PlateReasonForIssue.REPLACEMENT, + plateIssuer: batchIssuerName, + }; + + const formattedTechRecord = formatTechRecord>(dbRecord); + + if (formattedTechRecord.techRecord_plates?.some((plate) => plate.plateIssuer === batchIssuerName) ?? false) { + logger.info(`Plate already issued for: sysNum ${systemNumber}, timestamp ${createdTimestamp}`); + return; + } + + if (formattedTechRecord.techRecord_plates) { + formattedTechRecord.techRecord_plates.push(newPlate); + } else { + formattedTechRecord.techRecord_plates = [newPlate]; + } + const flattenedTechRecord = flattenArrays(formattedTechRecord) as TechRecordType<'get'>; + await inPlaceRecordUpdate(flattenedTechRecord); + numberOfRecordsUpdated++; + + const plateSqsPayload: SQSRequestBody = { + techRecord: formattedTechRecord, + plate: newPlate, + documentName: DocumentName.MINISTRY, + recipientEmailAddress: '', + }; + logger.debug('Sending to Doc Gen Queue', JSON.stringify(plateSqsPayload)); + await addToSqs(plateSqsPayload, process.env.DOC_GEN_SQS_QUEUE ?? ''); + + numberOfSqsAdded++; + + logger.info(`Successfully processed: sysNum ${systemNumber}, timestamp ${createdTimestamp}`); + }); + + await Promise.all(processPromises); + + logger.info(`Batch Plate: Updated ${numberOfRecordsUpdated} tech records and added ${numberOfSqsAdded} to SQS`); + } catch (err: unknown) { + logError('Error in batch processing', err); + throw (err); + } +}; diff --git a/src/handler/loadBatchPlate.ts b/src/handler/loadBatchPlate.ts new file mode 100644 index 00000000..ae030032 --- /dev/null +++ b/src/handler/loadBatchPlate.ts @@ -0,0 +1,96 @@ +import { + S3Client, GetObjectCommand, CopyObjectCommand, DeleteObjectCommand, +} from '@aws-sdk/client-s3'; +import { SQSClient, SendMessageCommand } from '@aws-sdk/client-sqs'; +import { S3Event } from 'aws-lambda'; +import { BatchPlateData } from '../models/batchPlate'; +import logger, { logError } from '../util/logger'; + +const s3Client = new S3Client({ region: process.env.DYNAMO_AWS_REGION }); +const sqsClient = new SQSClient({ region: process.env.DYNAMO_AWS_REGION }); + +export const handler = async (event: S3Event): Promise => { + logger.info('Update end point called'); + + try { + await Promise.all(event.Records.map(processRecord)); + logger.info(`Successfully processed ${event.Records.length} files.`); + } catch (error) { + logError('Failed to process one or more files', error); + throw error; + } +}; +async function processRecord(record: S3Event['Records'][0]): Promise { + const bucket = record.s3.bucket.name; + const key = decodeURIComponent(record.s3.object.key.replace(/\+/g, ' ')); + + logger.info(`Processing file: ${key} from ${bucket}`); + + try { + const data = await retrieveJSON(bucket, key); + await Promise.all(data.map((item) => sendToQueue(item))); + await moveProcessedFile(bucket, key); + logger.info(`Successfully processed and moved file: ${key}}`); + } catch (error) { + logError(`Error processing file ${key}`, error); + throw error; + } +} + +/** + * This function will retrieve the json file from the provided s3 bucket + * Then, extract and validate the json file content + * @param bucket + * @param key + */ +async function retrieveJSON(bucket: string, key: string): Promise { + const command = new GetObjectCommand({ Bucket: bucket, Key: key }); + const response = await s3Client.send(command); + const bodyContents = await response.Body?.transformToString(); + + if (!bodyContents) { + throw new Error('Empty JSON file'); + } + + try { + return JSON.parse(bodyContents) as BatchPlateData[]; + } catch (error) { + throw new Error(`Invalid JSON in file: ${error instanceof Error ? error.message : (error as string)}`); + } +} + +/** + * This function will send the systemNumber and createdTimestamp to the doc-gen service. + * @param item + */ +async function sendToQueue(item: BatchPlateData): Promise { + const command = new SendMessageCommand({ + QueueUrl: process.env.SQS_QUEUE_URL, + MessageBody: JSON.stringify(item), + }); + + await sqsClient.send(command); +} + +/** + * This function will copy the file that has been processed and move it to the processed folder + * Then, it will delete the original. + * @param bucket + * @param key + */ +async function moveProcessedFile(bucket: string, key: string): Promise { + const newKey = `processed/${key}`; + + const copyCommand = new CopyObjectCommand({ + Bucket: bucket, + CopySource: `${bucket}/${key}`, + Key: newKey, + }); + await s3Client.send(copyCommand); + + const deleteCommand = new DeleteObjectCommand({ + Bucket: bucket, + Key: key, + }); + await s3Client.send(deleteCommand); +} \ No newline at end of file diff --git a/src/hotfix/cb2-11175/README.md b/src/hotfix/cb2-11175/README.md deleted file mode 100644 index 2e77d633..00000000 --- a/src/hotfix/cb2-11175/README.md +++ /dev/null @@ -1,10 +0,0 @@ -# Hotfix for CB2-11175: Add a plate to around 8000 current HGV and TRL records. - -We need to make plates avaliable for historical purposes. This requires adding a new plate to a lot of vehicles. -A new lambda makes the most sense as we need to remove the validation and also throttle the number of requests. - -## Considerations - -Data remediation app was not setup to update tech records. - -The side effect of the plate being generated will work with no modifications. \ No newline at end of file diff --git a/src/hotfix/cb2-11175/batchPlateCreation.ts b/src/hotfix/cb2-11175/batchPlateCreation.ts deleted file mode 100644 index 63e285b6..00000000 --- a/src/hotfix/cb2-11175/batchPlateCreation.ts +++ /dev/null @@ -1,103 +0,0 @@ -/* eslint-disable no-continue */ -import { TechRecordType as TechRecordTypeByVehicle } from '@dvsa/cvs-type-definitions/types/v3/tech-record/tech-record-vehicle-type'; -import { TechRecordType } from '@dvsa/cvs-type-definitions/types/v3/tech-record/tech-record-verb'; -import 'dotenv/config'; -import { v4 as uuidv4 } from 'uuid'; -import { PlateReasonForIssue, Plates } from '../../models/plate'; -import { DocumentName, SQSRequestBody } from '../../models/sqsPayload'; -import { getBySystemNumberAndCreatedTimestamp, inPlaceRecordUpdate } from '../../services/database'; -import { addToSqs } from '../../services/sqs'; -import { ERRORS, StatusCode } from '../../util/enum'; -import { formatErrorMessage } from '../../util/errorMessage'; -import { flattenArrays, formatTechRecord } from '../../util/formatTechRecord'; -import { addHttpHeaders } from '../../util/httpHeaders'; -import logger from '../../util/logger'; -import batchPlatesData from './resources/batchPlatesData.json'; - -export type BatchPlateData = { - systemNumber: string, - createdTimestamp: string -}; - -export const handler = async (batchPlateRecords: BatchPlateData[]) => { - try { - logger.info('Batch plate generation called'); - - const batchData = batchPlateRecords ?? batchPlatesData; - - const batchIssuerName = 'CVS Batch Plate Generation'; - let numberOfRecordsUpdated = 0; - let numberOfSqsAdded = 0; - - // eslint-disable-next-line no-restricted-syntax - for await (const data of batchData) { - const { systemNumber, createdTimestamp } = data; - logger.info(`Get from database with sysNum ${systemNumber} and timestamp ${createdTimestamp}`); - - try { - const record = await getBySystemNumberAndCreatedTimestamp(systemNumber, createdTimestamp); - logger.debug(`result is: ${JSON.stringify(record)}`); - - if (!record || !Object.keys(record).length) { - logger.error(`Missing record with sysNum ${systemNumber} and timestamp ${createdTimestamp}`); - continue; - } - if (record.techRecord_statusCode !== StatusCode.CURRENT) { - logger.error(`Non current record with sysNum ${systemNumber} and timestamp ${createdTimestamp}`); - continue; - } - if (record.techRecord_vehicleType !== 'trl' && record.techRecord_vehicleType !== 'hgv') { - logger.error(`Non trl or hgv record with sysNum ${systemNumber} and timestamp ${createdTimestamp}`); - continue; - } - - const newPlate: Plates = { - plateSerialNumber: uuidv4(), - plateIssueDate: new Date().toISOString(), - plateReasonForIssue: PlateReasonForIssue.REPLACEMENT, - plateIssuer: batchIssuerName, - }; - - const arrayifiedRecord = formatTechRecord | TechRecordTypeByVehicle<'trl'>>(record); - - if (arrayifiedRecord.techRecord_plates) { - if (arrayifiedRecord.techRecord_plates.some((plate) => plate.plateIssuer === batchIssuerName)) continue; - arrayifiedRecord.techRecord_plates.push(newPlate); - } else { - arrayifiedRecord.techRecord_plates = [newPlate]; - } - - const normalisedRecord = flattenArrays(arrayifiedRecord) as TechRecordType<'get'>; - await inPlaceRecordUpdate(normalisedRecord); - - numberOfRecordsUpdated += 1; - - const plateSqsPayload: SQSRequestBody = { - techRecord: arrayifiedRecord, - plate: newPlate, - documentName: DocumentName.MINISTRY, - recipientEmailAddress: '', - }; - - logger.debug(JSON.stringify(plateSqsPayload)); - - await addToSqs(plateSqsPayload, process.env.DOC_GEN_SQS_QUEUE ?? ''); - - numberOfSqsAdded += 1; - } catch (err) { - logger.error(`${systemNumber}, ${createdTimestamp}, ${JSON.stringify(err)}`); - } - } - - return addHttpHeaders({ - statusCode: 200, - body: `Batch Plate: Updated ${numberOfRecordsUpdated} tech records and added ${numberOfSqsAdded} to sqs`, - }); - } catch (e) { - logger.error(e); - return addHttpHeaders({ - statusCode: 500, - body: formatErrorMessage(ERRORS.FAILED_UPDATE_MESSAGE), - }); - } -}; diff --git a/src/hotfix/cb2-11175/tests/integration/batchPlateCreation.int.test.ts b/src/hotfix/cb2-11175/tests/integration/batchPlateCreation.int.test.ts deleted file mode 100644 index 968cfe65..00000000 --- a/src/hotfix/cb2-11175/tests/integration/batchPlateCreation.int.test.ts +++ /dev/null @@ -1,150 +0,0 @@ -import { chunk } from 'lodash'; -import { seedTables } from '../../../../../scripts/setup-local-tables'; -import { tableName } from '../../../../config'; -import { getBySystemNumberAndCreatedTimestamp } from '../../../../services/database'; -import logger from '../../../../util/logger'; -import { handler } from '../../batchPlateCreation'; -import event from '../resources/event.json'; -import techRecordData from '../resources/technical-records-v3-no-plates.json'; - -const batchIssuerName = 'CVS Batch Plate Generation'; - -describe('batch plate creation', () => { - beforeAll(async () => { - jest.useFakeTimers(); - jest.setSystemTime(new Date()); - - const techRecordChunks = chunk(techRecordData, 25); - /* eslint-disable-next-line no-restricted-syntax */ - for (const techRecordChunk of techRecordChunks) { - /* eslint-disable-next-line no-await-in-loop */ - await seedTables([{ - table: tableName, - data: techRecordChunk, - }]); - } - }); - - describe('happy path', () => { - it('should work when I give it a payload of plates to fix', async () => { - process.env.AWS_SAM_LOCAL = 'true'; - const res = await handler(event); - - expect(res.statusCode).toBe(200); - expect(res.body).toBe(`Batch Plate: Updated ${event.length} tech records and added ${event.length} to sqs`); - - const inspectPlateOne = await getBySystemNumberAndCreatedTimestamp( - event[0].systemNumber, - event[0].createdTimestamp, - ); - const inspectPlateThree = await getBySystemNumberAndCreatedTimestamp( - event[2].systemNumber, - event[2].createdTimestamp, - ); - - expect(inspectPlateOne).toEqual(expect.objectContaining({ techRecord_plates_0_plateIssuer: batchIssuerName })); - expect(inspectPlateThree).toEqual(expect.objectContaining({ techRecord_plates_0_plateIssuer: batchIssuerName })); - }); - }); - - describe('sad path', () => { - beforeEach(() => { - jest.resetAllMocks(); - }); - - it('should only complete 1 of two updates if it cant find a vehicle', async () => { - process.env.AWS_SAM_LOCAL = 'true'; - - const inputEvent = [ - { systemNumber: 'bad', createdTimestamp: 'data' }, - { systemNumber: '12345690', createdTimestamp: '2024-01-30T09:10:32.594Z' }, - ]; - - const spy = jest.spyOn(logger, 'error'); - - const res = await handler(inputEvent); - - expect(spy).toHaveBeenCalledWith( - `Missing record with sysNum ${inputEvent[0].systemNumber} and timestamp ${inputEvent[0].createdTimestamp}`, - ); - - expect(res.statusCode).toBe(200); - expect(res.body).toBe(`Batch Plate: Updated ${inputEvent.length - 1} tech records and added ${inputEvent.length - 1} to sqs`); - - const inspectPlateOne = await getBySystemNumberAndCreatedTimestamp( - inputEvent[0].systemNumber, - inputEvent[0].createdTimestamp, - ); - const inspectPlateTwo = await getBySystemNumberAndCreatedTimestamp( - inputEvent[1].systemNumber, - inputEvent[1].createdTimestamp, - ); - - expect(inspectPlateOne).not.toHaveProperty('techRecord_plates_0_plateIssuer'); - expect(inspectPlateTwo).toEqual(expect.objectContaining({ techRecord_plates_0_plateIssuer: batchIssuerName })); - }); - - it('should only complete 1 of two updates if the record isnt current', async () => { - process.env.AWS_SAM_LOCAL = 'true'; - - const inputEvent = [ - { systemNumber: '12345688', createdTimestamp: '2024-01-31T15:18:53.501Z' }, - { systemNumber: '12345691', createdTimestamp: '2024-01-30T09:01:10.851Z' }, - ]; - - const spy = jest.spyOn(logger, 'error'); - - const res = await handler(inputEvent); - - expect(spy).toHaveBeenCalledWith( - `Non current record with sysNum ${inputEvent[0].systemNumber} and timestamp ${inputEvent[0].createdTimestamp}`, - ); - - expect(res.statusCode).toBe(200); - expect(res.body).toBe(`Batch Plate: Updated ${inputEvent.length - 1} tech records and added ${inputEvent.length - 1} to sqs`); - - const inspectPlateOne = await getBySystemNumberAndCreatedTimestamp( - inputEvent[0].systemNumber, - inputEvent[0].createdTimestamp, - ); - const inspectPlateTwo = await getBySystemNumberAndCreatedTimestamp( - inputEvent[1].systemNumber, - inputEvent[1].createdTimestamp, - ); - - expect(inspectPlateOne).not.toHaveProperty('techRecord_plates_0_plateIssuer'); - expect(inspectPlateTwo).toEqual(expect.objectContaining({ techRecord_plates_0_plateIssuer: batchIssuerName })); - }); - - it('should only complete 1 of two updates if the record isnt a trl or hgv', async () => { - process.env.AWS_SAM_LOCAL = 'true'; - - const inputEvent = [ - { systemNumber: 'SNINVALIDCLASS', createdTimestamp: '2024-01-08T09:14:36.351Z' }, - { systemNumber: '12345692', createdTimestamp: '2024-01-29T14:57:30.871Z' }, - ]; - - const spy = jest.spyOn(logger, 'error'); - - const res = await handler(inputEvent); - - expect(spy).toHaveBeenCalledWith( - `Non trl or hgv record with sysNum ${inputEvent[0].systemNumber} and timestamp ${inputEvent[0].createdTimestamp}`, - ); - expect(res.statusCode).toBe(200); - expect(res.body).toBe(`Batch Plate: Updated ${inputEvent.length - 1} tech records and added ${inputEvent.length - 1} to sqs`); - - const inspectPlateOne = await getBySystemNumberAndCreatedTimestamp( - inputEvent[0].systemNumber, - inputEvent[0].createdTimestamp, - ); - const inspectPlateTwo = await getBySystemNumberAndCreatedTimestamp( - inputEvent[1].systemNumber, - inputEvent[1].createdTimestamp, - ); - - expect(inspectPlateOne).not.toHaveProperty('techRecord_plates_0_plateIssuer'); - expect(inspectPlateTwo).toEqual(expect.objectContaining({ techRecord_plates_0_plateIssuer: batchIssuerName })); - }); - }); -}); diff --git a/src/hotfix/cb2-11175/tests/resources/event.json b/src/hotfix/cb2-11175/tests/resources/event.json deleted file mode 100644 index 86067b7e..00000000 --- a/src/hotfix/cb2-11175/tests/resources/event.json +++ /dev/null @@ -1,7 +0,0 @@ -[ - { "systemNumber": "12345678", "createdTimestamp": "2024-02-05T14:35:00.777Z"}, - { "systemNumber": "12345680", "createdTimestamp": "2024-02-05T11:14:29.738Z"}, - { "systemNumber": "12345681", "createdTimestamp": "2024-02-05T10:02:48.766Z"}, - { "systemNumber": "12345682", "createdTimestamp": "2024-02-05T08:27:22.596Z"}, - { "systemNumber": "12345683", "createdTimestamp": "2024-02-02T12:00:08.761Z"} -] \ No newline at end of file diff --git a/src/models/batchPlate.ts b/src/models/batchPlate.ts new file mode 100644 index 00000000..d357ac64 --- /dev/null +++ b/src/models/batchPlate.ts @@ -0,0 +1,4 @@ +export type BatchPlateData = { + systemNumber: string, + createdTimestamp: string +}; diff --git a/src/util/logger.ts b/src/util/logger.ts index 5876c674..29fa1586 100644 --- a/src/util/logger.ts +++ b/src/util/logger.ts @@ -15,4 +15,12 @@ if (process.env.NODE_ENV !== 'production') { })); } +export const logError = (message: string, error: unknown): void => { + const errorInfo = error instanceof Error + ? { errorMessage: error.message } + : { error: String(error) }; + + logger.error(message, { ...errorInfo }); +}; + export default logger; diff --git a/template.yml b/template.yml index 03333889..865f0ccd 100644 --- a/template.yml +++ b/template.yml @@ -187,7 +187,7 @@ Resources: BatchPlateCreation: Type: 'AWS::Serverless::Function' Properties: - CodeUri: src/hotfix/cb2-11175/ + CodeUri: src/handler/ Handler: batchPlateCreation.handler Runtime: nodejs18.x Timeout: 20 @@ -207,6 +207,23 @@ Resources: Path: /v3/technical-records/uploadPlateSeed Method: get + LoadBatchPlate: + Type: 'AWS::Serverless::Function' + Properties: + CodeUri: src/handler/ + Handler: loadBatchPlate.handler + Runtime: nodejs18.x + Timeout: 20 + Events: + S3Event: + Type: S3 + Events: s3:ObjectCreated:* + Filter: + S3Key: + Rules: + - Name: prefix + Value: value + LocalQueue: Type: AWS::SQS::Queue diff --git a/src/hotfix/cb2-11175/resources/batchPlatesData.json b/tests/resources/batchPlatesData.json similarity index 99% rename from src/hotfix/cb2-11175/resources/batchPlatesData.json rename to tests/resources/batchPlatesData.json index dfc72622..c99ffe95 100644 --- a/src/hotfix/cb2-11175/resources/batchPlatesData.json +++ b/tests/resources/batchPlatesData.json @@ -51,4 +51,4 @@ { "systemNumber": "12345728", "createdTimestamp": "2024-01-08 15:39:04.808" }, { "systemNumber": "12345729", "createdTimestamp": "2024-01-08 15:00:04.361" }, { "systemNumber": "12345730", "createdTimestamp": "2024-01-08 09:14:36.351" } - ] \ No newline at end of file + ] diff --git a/src/hotfix/cb2-11175/tests/resources/technical-records-v3-no-plates.json b/tests/resources/technical-records-v3-no-plates.json similarity index 100% rename from src/hotfix/cb2-11175/tests/resources/technical-records-v3-no-plates.json rename to tests/resources/technical-records-v3-no-plates.json diff --git a/tests/unit/handler/batchPlateCreation.unit.test.ts b/tests/unit/handler/batchPlateCreation.unit.test.ts new file mode 100644 index 00000000..34eb0b77 --- /dev/null +++ b/tests/unit/handler/batchPlateCreation.unit.test.ts @@ -0,0 +1,254 @@ +import { SQSEvent } from 'aws-lambda'; +import { v4 as uuidv4 } from 'uuid'; +import { TechRecordGETHGV, TechRecordGETTRL } from '@dvsa/cvs-type-definitions/types/v3/tech-record/tech-record-verb-vehicle-type'; +import { TechRecordComplete } from '@dvsa/cvs-type-definitions/types/v3/tech-record/tech-record-status'; +import { getBySystemNumberAndCreatedTimestamp, inPlaceRecordUpdate } from '../../../src/services/database'; +import { formatTechRecord } from '../../../src/util/formatTechRecord'; +import { addToSqs } from '../../../src/services/sqs'; +import { handler } from '../../../src/handler/batchPlateCreation'; +import logger, { logError } from '../../../src/util/logger'; +import { StatusCode } from '../../../src/util/enum'; + +jest.mock('uuid'); +jest.mock('../../../src/services/database'); +jest.mock('../../../src/util/formatTechRecord'); +jest.mock('../../../src/services/sqs'); +jest.mock('../../../src/util/logger'); +jest.mock('../../../src/util/errorMessage'); +jest.mock('@aws-sdk/client-sqs'); + +describe('Batch Plate Handler', () => { + beforeEach(() => { + jest.clearAllMocks(); + process.env.DOC_GEN_SQS_QUEUE = 'test-queue-url'; + }); + + it('should process valid records successfully', async () => { + const mockEvent = createMockSQSEvent([ + { systemNumber: '12345690', createdTimestamp: '2024-01-30T09:10:32.594Z' }, + { systemNumber: '12345691', createdTimestamp: '2024-01-30T09:01:10.851Z' }, + ]); + + const mockDbRecord1 = { + systemNumber: '12345690', + createdTimestamp: '2024-01-30T09:10:32.594Z', + techRecord_statusCode: StatusCode.CURRENT, + techRecord_vehicleType: 'hgv', + } as TechRecordGETHGV; + const mockDbRecord2 = { + systemNumber: '12345691', + createdTimestamp: '2024-01-30T09:01:10.851Z', + techRecord_statusCode: StatusCode.CURRENT, + techRecord_vehicleType: 'trl', + } as TechRecordGETTRL; + + (getBySystemNumberAndCreatedTimestamp as jest.Mock) + .mockResolvedValueOnce(mockDbRecord1) + .mockResolvedValueOnce(mockDbRecord2); + + (formatTechRecord as jest.Mock).mockImplementation((record) => ({ + ...record, + techRecord_plates: [], + } as TechRecordComplete)); + + (uuidv4 as jest.Mock).mockReturnValue('mock-uuid'); + + await handler(mockEvent); + + expect(getBySystemNumberAndCreatedTimestamp).toHaveBeenCalledTimes(2); + expect(inPlaceRecordUpdate).toHaveBeenCalledTimes(2); + expect(addToSqs).toHaveBeenCalledTimes(2); + expect(logger.info).toHaveBeenCalledWith('Batch Plate: Updated 2 tech records and added 2 to SQS'); + expect(logError).not.toHaveBeenCalled(); + }); + + it('should handle missing records', async () => { + const mockEvent = createMockSQSEvent([ + { systemNumber: '1234234', createdTimestamp: '2023-01-01T00:00:00.000Z' }, + ]); + + (getBySystemNumberAndCreatedTimestamp as jest.Mock).mockResolvedValue(undefined); + + await expect(handler(mockEvent)).rejects.toThrow('Missing record: sysNum 1234234, timestamp 2023-01-01T00:00:00.000Z'); + expect(inPlaceRecordUpdate).not.toHaveBeenCalled(); + expect(addToSqs).not.toHaveBeenCalled(); + expect(logError).toHaveBeenCalledWith('Error in batch processing', new Error( + 'Missing record: sysNum 1234234, timestamp 2023-01-01T00:00:00.000Z', + )); + }); + + it('should handle non current records', async () => { + const mockEvent = createMockSQSEvent([ + { systemNumber: '12345679', createdTimestamp: '2024-02-05T11:40:52.073Z' }, + ]); + + const mockDbRecord = { + systemNumber: '12345679', + createdTimestamp: '2024-02-05T11:40:52.073Z', + techRecord_statusCode: 'archived' as StatusCode, + techRecord_vehicleType: 'hgv', + } as TechRecordGETHGV; + (getBySystemNumberAndCreatedTimestamp as jest.Mock).mockResolvedValue(mockDbRecord); + + await expect(handler(mockEvent)).rejects.toThrow('Non current record: statusCode archived'); + expect(inPlaceRecordUpdate).not.toHaveBeenCalled(); + expect(addToSqs).not.toHaveBeenCalled(); + expect(logError).toHaveBeenCalledWith( + 'Error in batch processing', + new Error('Non current record: statusCode archived'), + ); + }); + + it('should handle non TRL and non HGV records', async () => { + const mockEvent = createMockSQSEvent([ + { systemNumber: '43124234', createdTimestamp: '2024-01-08T09:14:36.351Z' }, + ]); + const mockDbRecord = { + systemNumber: '43124234', + createdTimestamp: '2024-01-08T09:14:36.351Z', + techRecord_statusCode: StatusCode.CURRENT, + techRecord_vehicleType: 'lgv', + }; + (getBySystemNumberAndCreatedTimestamp as jest.Mock).mockResolvedValue(mockDbRecord); + + await expect(handler(mockEvent)).rejects.toThrow('Invalid vehicle type: lgv'); + expect(inPlaceRecordUpdate).not.toHaveBeenCalled(); + expect(addToSqs).not.toHaveBeenCalled(); + expect(logError).toHaveBeenCalledWith('Error in batch processing', new Error('Invalid vehicle type: lgv')); + }); + + it('should not add a new plate when a batch issuer plate already exists', async () => { + const mockEvent = createMockSQSEvent([ + { systemNumber: '5345635', createdTimestamp: '2024-03-17T10:00:00.000Z' }, + ]); + const mockDbRecord = { + systemNumber: '5345635', + createdTimestamp: '2024-03-17T10:00:00.000Z', + techRecord_statusCode: StatusCode.CURRENT, + techRecord_vehicleType: 'hgv', + techRecord_plates: [ + { + plateSerialNumber: 'existing-batch-plate', + plateIssueDate: '2024-01-01T00:00:00.000Z', + plateReasonForIssue: 'Replacement', + plateIssuer: 'CVS Batch Plate Generation', + }, + ], + } as TechRecordGETHGV; + + (getBySystemNumberAndCreatedTimestamp as jest.Mock).mockResolvedValue(mockDbRecord); + (formatTechRecord as jest.Mock).mockImplementation((record) => ({ ...record } as TechRecordGETHGV)); + + await handler(mockEvent); + + expect(logger.info).toHaveBeenCalledWith('Batch Plate: Updated 0 tech records and added 0 to SQS'); + expect(inPlaceRecordUpdate).not.toHaveBeenCalled(); + expect(addToSqs).not.toHaveBeenCalled(); + expect(logError).not.toHaveBeenCalled(); + }); + + it('should add a new plate when the record has no existing plates', async () => { + const mockEvent = createMockSQSEvent([ + { systemNumber: '9876543', createdTimestamp: '2024-03-20T10:00:00.000Z' }, + ]); + + const mockDbRecord = { + systemNumber: '9876543', + createdTimestamp: '2024-03-20T10:00:00.000Z', + techRecord_statusCode: StatusCode.CURRENT, + techRecord_vehicleType: 'hgv', + }; + + (getBySystemNumberAndCreatedTimestamp as jest.Mock).mockResolvedValue(mockDbRecord); + (formatTechRecord as jest.Mock).mockImplementation((record) => ({ ...record } as TechRecordGETHGV)); + (uuidv4 as jest.Mock).mockReturnValue('new-plate-uuid'); + + const mockDate = new Date('2024-03-20T12:00:00.000Z'); + jest.spyOn(global, 'Date').mockImplementation(() => mockDate); + + await handler(mockEvent); + + expect(inPlaceRecordUpdate).toHaveBeenCalledTimes(1); + expect(addToSqs).toHaveBeenCalledTimes(1); + expect(logger.info).toHaveBeenCalledWith('Batch Plate: Updated 1 tech records and added 1 to SQS'); + expect(logError).not.toHaveBeenCalled(); + }); + + it('should handle errors during individual record processing', async () => { + const mockEvent = createMockSQSEvent([ + { systemNumber: '6574567', createdTimestamp: '2023-01-01T00:00:00.000Z' }, + { systemNumber: '6574568', createdTimestamp: '2023-01-02T00:00:00.000Z' }, + ]); + const mockError = new Error('DynamoDB error'); + (getBySystemNumberAndCreatedTimestamp as jest.Mock) + .mockRejectedValueOnce(mockError) + .mockResolvedValueOnce({ + systemNumber: '6574568', + createdTimestamp: '2023-01-02T00:00:00.000Z', + techRecord_statusCode: StatusCode.CURRENT, + techRecord_vehicleType: 'hgv', + }); + + (formatTechRecord as jest.Mock).mockImplementation((record) => ({ + ...record, + techRecord_plates: [], + } as TechRecordGETHGV)); + + await expect(handler(mockEvent)).rejects.toThrow('DynamoDB error'); + expect(inPlaceRecordUpdate).toHaveBeenCalledTimes(1); + expect(addToSqs).toHaveBeenCalledTimes(1); + expect(logError).toHaveBeenCalledWith('Error in batch processing', new Error('DynamoDB error')); + }); + + it('should use an empty string for SQS queue URL when DOC_GEN_SQS_QUEUE is undefined', async () => { + const mockEvent = createMockSQSEvent([ + { systemNumber: '1122334', createdTimestamp: '2024-03-21T10:00:00.000Z' }, + ]); + + const mockDbRecord = { + systemNumber: '1122334', + createdTimestamp: '2024-03-21T10:00:00.000Z', + techRecord_statusCode: StatusCode.CURRENT, + techRecord_vehicleType: 'hgv', + techRecord_plates: [], + }; + + (getBySystemNumberAndCreatedTimestamp as jest.Mock).mockResolvedValue(mockDbRecord); + (formatTechRecord as jest.Mock).mockImplementation((record) => ({ ...record } as TechRecordGETHGV)); + (uuidv4 as jest.Mock).mockReturnValue('new-uuid'); + + const mockDate = new Date('2024-03-21T12:00:00.000Z'); + jest.spyOn(global, 'Date').mockImplementation(() => mockDate); + const originalEnv = process.env.DOC_GEN_SQS_QUEUE; + delete process.env.DOC_GEN_SQS_QUEUE; + + await handler(mockEvent); + + process.env.DOC_GEN_SQS_QUEUE = originalEnv; + + expect(addToSqs).toHaveBeenCalledTimes(1); + expect(logger.info).toHaveBeenCalledWith('Batch Plate: Updated 1 tech records and added 1 to SQS'); + expect(logError).not.toHaveBeenCalled(); + }); +}); + +function createMockSQSEvent(records: { systemNumber: string; createdTimestamp: string }[]): SQSEvent { + return { + Records: records.map((record) => ({ + messageId: 'messageId', + receiptHandle: 'receiptHandle', + body: JSON.stringify(record), + attributes: { + ApproximateReceiveCount: '1', + SentTimestamp: '1523232000000', + SenderId: '123456789012', + ApproximateFirstReceiveTimestamp: '1523232000001', + }, + messageAttributes: {}, + md5OfBody: 'test-md5', + eventSource: 'aws:sqs', + eventSourceARN: 'arn:aws:sqs:us-east-1:14342343:MyQueue', + awsRegion: 'us-east-1', + })), + }; +} diff --git a/tests/unit/handler/loadBatchPlate.unit.test.ts b/tests/unit/handler/loadBatchPlate.unit.test.ts new file mode 100644 index 00000000..ac2e983d --- /dev/null +++ b/tests/unit/handler/loadBatchPlate.unit.test.ts @@ -0,0 +1,144 @@ +import { S3Event } from 'aws-lambda'; +import { + S3Client, CopyObjectCommand, +} from '@aws-sdk/client-s3'; +import { SQSClient } from '@aws-sdk/client-sqs'; +import { handler } from '../../../src/handler/loadBatchPlate'; +import logger from '../../../src/util/logger'; + +jest.mock('@aws-sdk/client-s3'); +jest.mock('@aws-sdk/client-sqs'); +jest.mock('../../../src/util/logger'); + +const mockS3Send = jest.fn(); +const mockSQSSend = jest.fn(); + +(S3Client.prototype.send as jest.Mock) = mockS3Send; +(SQSClient.prototype.send as jest.Mock) = mockSQSSend; + +describe('S3 Event Handler', () => { + beforeEach(() => { + jest.clearAllMocks(); + process.env.DYNAMO_AWS_REGION = 'eu-west-1'; + process.env.SQS_QUEUE_URL = 'https://sqs.eu-west-1.amazonaws.com/123456789012/test-queue'; + }); + + it('should process valid records successfully', async () => { + const mockEvent = createMockS3Event(['test-file1.json', 'test-file2.json']); + const mockData = [{ systemNumber: '12345', createdTimestamp: '2023-01-01T00:00:00.000Z' }]; + + mockS3Send.mockImplementation(() => ({ + Body: { transformToString: () => Promise.resolve(JSON.stringify(mockData)) }, + })); + + mockSQSSend.mockResolvedValue({}); + + await handler(mockEvent); + + expect(mockS3Send).toHaveBeenCalledTimes(6); + expect(mockSQSSend).toHaveBeenCalledTimes(2); + expect(logger.info).toHaveBeenCalledWith('Successfully processed 2 files.'); + }); + + it('should handle empty JSON files', async () => { + const mockEvent = createMockS3Event(['emptyFile.json']); + + mockS3Send.mockResolvedValue({ + Body: { transformToString: () => Promise.resolve('') }, + }); + + await expect(handler(mockEvent)).rejects.toThrow('Empty JSON file'); + expect(mockS3Send).toHaveBeenCalledTimes(1); + expect(mockSQSSend).not.toHaveBeenCalled(); + }); + + it('should handle invalid JSON files', async () => { + const mockEvent = createMockS3Event(['invalidFile.json']); + + mockS3Send.mockResolvedValue({ + Body: { transformToString: () => Promise.resolve('{ invalid json }') }, + }); + + await expect(handler(mockEvent)).rejects.toThrow('Invalid JSON in file'); + expect(mockS3Send).toHaveBeenCalledTimes(1); + expect(mockSQSSend).not.toHaveBeenCalled(); + }); + + it('should handle S3 errors', async () => { + const mockEvent = createMockS3Event(['errorFile.json']); + + mockS3Send.mockRejectedValue(new Error('S3 Error')); + + await expect(handler(mockEvent)).rejects.toThrow('S3 Error'); + expect(mockS3Send).toHaveBeenCalledTimes(1); + expect(mockSQSSend).not.toHaveBeenCalled(); + }); + + it('should handle SQS errors', async () => { + const mockEvent = createMockS3Event(['testFile.json']); + const mockData = [{ systemNumber: '12345', createdTimestamp: '2023-01-01T00:00:00.000Z' }]; + + mockS3Send.mockResolvedValue({ + Body: { transformToString: () => Promise.resolve(JSON.stringify(mockData)) }, + }); + + mockSQSSend.mockRejectedValue(new Error('SQS Error')); + + await expect(handler(mockEvent)).rejects.toThrow('SQS Error'); + expect(mockS3Send).toHaveBeenCalledTimes(1); + expect(mockSQSSend).toHaveBeenCalledTimes(1); + }); + + it('should handle errors when moving processed files', async () => { + const mockEvent = createMockS3Event(['testFile.json']); + const mockData = [{ systemNumber: '12345', createdTimestamp: '2023-01-01T00:00:00.000Z' }]; + + mockS3Send.mockImplementation((command) => { + if (command instanceof CopyObjectCommand) { + return Promise.reject(new Error('Copy Error')); + } + return Promise.resolve({ + Body: { transformToString: () => Promise.resolve(JSON.stringify(mockData)) }, + }); + }); + + mockSQSSend.mockResolvedValue({}); + + await expect(handler(mockEvent)).rejects.toThrow('Copy Error'); + expect(mockS3Send).toHaveBeenCalledTimes(2); + expect(mockSQSSend).toHaveBeenCalledTimes(1); + }); +}); + +function createMockS3Event(keys: string[]): S3Event { + return { + Records: keys.map((key) => ({ + eventVersion: '2.0', + eventSource: 'aws:s3', + awsRegion: 'eu-west-1', + eventTime: '2024-01-01T02:00:00.000Z', + eventName: 'ObjectCreated:Put', + userIdentity: { principalId: 'EXAMPLE' }, + requestParameters: { sourceIPAddress: '0.0.0.0' }, + responseElements: { + 'x-amz-request-id': '123-123123123-123123', + 'x-amz-id-2': '12344-1231243123-123123', + }, + s3: { + s3SchemaVersion: '1.0', + configurationId: 'testConfiguration', + bucket: { + name: 'example-bucket', + ownerIdentity: { principalId: 'EXAMPLE' }, + arn: 'arn:aws:s3:::example-bucket', + }, + object: { + key, + size: 1024, + eTag: '435235435gsdfgdfsbsfdbsfgdbs', + sequencer: '65363456gbdfbgfbdfbf', + }, + }, + })), + }; +} diff --git a/webpack/webpack.production.js b/webpack/webpack.production.js index 92a850ec..16c2cffc 100644 --- a/webpack/webpack.production.js +++ b/webpack/webpack.production.js @@ -10,7 +10,8 @@ const AwsSamPlugin = require("aws-sam-webpack-plugin"); const LAMBDA_NAMES = ['SearchLambdaFunction', 'GetLambdaFunction', 'PostLambdaFunction', 'PatchLambdaFunction', 'ArchiveLambdaFunction', 'UnarchiveLambdaFunction', 'PromoteLambdaFunction', 'UpdateVrmFunction', 'UpdateVinFunction', 'GeneratePlateFunction', 'GenerateLetterFunction', 'SyncTestResultInfoFunction', - 'GenerateAdrCertificateFunction', 'RemoveInvalidPrimaryVrms', 'BatchPlateCreation', 'MotUpdateVrm', 'UploadPlateSeed']; + 'GenerateAdrCertificateFunction', 'RemoveInvalidPrimaryVrms', 'BatchPlateCreation', 'MotUpdateVrm','LoadBatchPlate', + 'UploadPlateSeed']; const OUTPUT_FOLDER = './' const REPO_NAME = 'cvs-svc-technical-records-v3'; const BRANCH_NAME = branchName().replace(/\//g, "-");