Skip to content

Commit

Permalink
feat: onboard algolia destination to cdk 2.0 (#1474)
Browse files Browse the repository at this point in the history
* feat: onboard algolia to cdk 2.0

* fix: use the dependent modules from v0 destination

* feat: add router workflow

* feat: error handling added to router transform handler

* refactor: remove redundant code

* refactor: router transform workflow

* Fix jsonata single element array issue

* Fix versionedRouter.js

* Fix versionedRouter to handle when RT dest is supported

* Add live compare test versionedRouter test files

* Fix PR comments

* Upgrade rudder-workflow-engine

* Refactor handleCDKV2 to processCdkV2Workflow

Co-authored-by: Dilip Kumar Kola <dilipkola@rudderstack.com>
Co-authored-by: Dilip Kola <33080863+koladilip@users.noreply.github.com>
  • Loading branch information
3 people committed Oct 28, 2022
1 parent 00d16a3 commit e716d84
Show file tree
Hide file tree
Showing 21 changed files with 1,695 additions and 153 deletions.
64 changes: 64 additions & 0 deletions __tests__/algolia-cdk.test.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,64 @@
const fs = require("fs");
const path = require("path");
const { TRANSFORMER_METRIC } = require("../v0/util/constant");
const { processCdkV2Workflow } = require("../cdk/v2/handler");

const integration = "algolia";
const name = "Algolia";

const inputDataFile = fs.readFileSync(
path.resolve(__dirname, `./data/${integration}_input.json`)
);
const outputDataFile = fs.readFileSync(
path.resolve(__dirname, `./data/${integration}_output.json`)
);
const inputData = JSON.parse(inputDataFile);
const expectedData = JSON.parse(outputDataFile);

describe(`${name} Tests`, () => {
describe("Processor Tests", () => {
inputData.forEach((input, index) => {
it(`${name} - payload: ${index}`, async () => {
const expected = expectedData[index];
try {
const output = await processCdkV2Workflow(
integration,
input,
TRANSFORMER_METRIC.ERROR_AT.PROC
);
expect(output).toEqual(expected);
} catch (error) {
expect(error.message).toEqual(expected.message);
}
});
});
});

const inputRouterDataFile = fs.readFileSync(
path.resolve(__dirname, `./data/${integration}_router_input.json`)
);
const outputRouterDataFile = fs.readFileSync(
path.resolve(__dirname, `./data/${integration}_router_output.json`)
);
const inputRouterData = JSON.parse(inputRouterDataFile);
const expectedRouterData = JSON.parse(outputRouterDataFile);

describe("Router Tests", () => {
inputRouterData.forEach((input, index) => {
it(`${name} - payload: ${index}`, async () => {
const expected = expectedRouterData[index];
try {
const output = await processCdkV2Workflow(
integration,
input,
TRANSFORMER_METRIC.ERROR_AT.RT
);
expect(output).toEqual(expected);
} catch (error) {
// console.log(error);
expect(error.message).toEqual(expected.message);
}
});
});
});
});
47 changes: 21 additions & 26 deletions __tests__/algolia.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -30,33 +30,28 @@ inputData.forEach((input, index) => {
});
});

const batchInputDataFile = fs.readFileSync(
path.resolve(__dirname, `./data/${integration}_batch_input.json`)
const routerInputDataFile = fs.readFileSync(
path.resolve(__dirname, `./data/${integration}_router_input.json`)
);
const batchOutputDataFile = fs.readFileSync(
path.resolve(__dirname, `./data/${integration}_batch_output.json`)
const routerInputData = JSON.parse(routerInputDataFile);
const routerOutputDataFile = fs.readFileSync(
path.resolve(__dirname, `./data/${integration}_router_output.json`)
);
const routerOutputData = JSON.parse(routerOutputDataFile);

// const batchInputData = JSON.parse(batchInputDataFile);
// const batchExpectedData = JSON.parse(batchOutputDataFile);

// batchInputData.forEach((input, index) => {
// test(`${name} Batching ${index}`, () => {
// const output = transformer.batch(input);
// //console.log(output);
// expect(Array.isArray(output)).toEqual(true);
// expect(output.length).toEqual(batchExpectedData[index].length);
// output.forEach((input, indexInner) => {
// expect(output[indexInner]).toEqual(batchExpectedData[index][indexInner]);
// })
// });
// });

// Batching using routerTransform
test('Batching', async () => {
const batchInputData = JSON.parse(batchInputDataFile);
const batchExpectedData = JSON.parse(batchOutputDataFile);
const output = await transformer.processRouterDest(batchInputData);
expect(Array.isArray(output)).toEqual(true);
expect(output).toEqual(batchExpectedData);
describe('Router Tests', () => {
routerInputData.forEach((input, index) => {
it(`${name} Tests: payload - ${index}`, async () => {
let output, expected;
try {
output = await transformer.processRouterDest(input);
expected = routerOutputData[index]
} catch (error) {
output = error.message;
// console.log(output);
expected = routerOutputData[index].message;
}
expect(output).toEqual(expected);
});
});
});
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
[
[
{
"message": {
"type": "track",
Expand Down Expand Up @@ -251,5 +252,8 @@
"IsProcessorEnabled": true
}
}
]
],
[],
{}
]

Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
[
[
{
"batchedRequest": {
"version": "1",
Expand Down Expand Up @@ -97,5 +98,14 @@
"IsProcessorEnabled": true
}
}
]
],
{
"message": "Invalid event array",
"statusCode": 400
},
{
"message": "Invalid event array",
"statusCode": 400
}
]

Loading

0 comments on commit e716d84

Please sign in to comment.