From f8495e676e4a3ea950a1eb92d31458bed3df3301 Mon Sep 17 00:00:00 2001 From: nkumar-topcoder <33625707+nkumar-topcoder@users.noreply.github.com> Date: Sun, 12 Apr 2020 19:51:47 +0530 Subject: [PATCH 01/76] [skip ci] [skip ci] --- src/producer.js | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/src/producer.js b/src/producer.js index 983de78..75e11a5 100644 --- a/src/producer.js +++ b/src/producer.js @@ -26,7 +26,15 @@ try { pgClient.on('notification', async (message) => { try { payloadcopy = "" + logger.debug('Entering producer 1') + logger.debug(message.toString()) + logger.debug('Entering producer 2') + logger.debug(message) + logger.debug('Entering producer 3') + logger.debug(JSON.stringify(message.payload)) + const payload = JSON.parse(message.payload) + payloadcopy = message const validTopicAndOriginator = (pgOptions.triggerTopics.includes(payload.topic)) && (pgOptions.triggerOriginators.includes(payload.originator)) // Check if valid topic and originator if (validTopicAndOriginator) { From 4f801ed90450f27d2afe47bdde9126c4b683258f Mon Sep 17 00:00:00 2001 From: nkumar-topcoder <33625707+nkumar-topcoder@users.noreply.github.com> Date: Sun, 12 Apr 2020 19:54:13 +0530 Subject: [PATCH 02/76] Update config.yml --- .circleci/config.yml | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 35be655..a1acf0c 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -42,9 +42,9 @@ build_steps: &build_steps ./awsconfiguration.sh ${DEPLOY_ENV} source awsenvconf - ./buildenv.sh -e ${DEPLOY_ENV} -b ${LOGICAL_ENV}-${APP_NAME}-consumer-deployvar - source buildenvvar - ./master_deploy.sh -d ECS -e ${DEPLOY_ENV} -t latest -s ${GLOBAL_ENV}-global-appvar,${LOGICAL_ENV}-${APP_NAME}-appvar -i postgres-ifx-processer + # ./buildenv.sh -e ${DEPLOY_ENV} -b ${LOGICAL_ENV}-${APP_NAME}-consumer-deployvar + #source buildenvvar + #./master_deploy.sh -d ECS -e ${DEPLOY_ENV} -t latest -s ${GLOBAL_ENV}-global-appvar,${LOGICAL_ENV}-${APP_NAME}-appvar -i postgres-ifx-processer echo "Running Masterscript - deploy postgres-ifx-processer producer" if [ -e ${LOGICAL_ENV}-${APP_NAME}-consumer-deployvar.json ]; then sudo rm -vf ${LOGICAL_ENV}-${APP_NAME}-consumer-deployvar.json; fi @@ -58,11 +58,11 @@ build_steps: &build_steps source buildenvvar ./master_deploy.sh -d ECS -e ${DEPLOY_ENV} -t latest -s ${GLOBAL_ENV}-global-appvar,${LOGICAL_ENV}-${APP_NAME}-appvar -i postgres-ifx-processer - echo "Running Masterscript - deploy postgres-ifx-processer reconsiler1" - if [ -e ${LOGICAL_ENV}-${APP_NAME}-reconsiler1-deployvar.json ]; then sudo rm -vf ${LOGICAL_ENV}-${APP_NAME}-reconsiler1-deployvar.json; fi - ./buildenv.sh -e ${DEPLOY_ENV} -b ${LOGICAL_ENV}-${APP_NAME}-reconsiler1-deployvar - source buildenvvar - ./master_deploy.sh -d ECS -e ${DEPLOY_ENV} -t latest -s ${GLOBAL_ENV}-global-appvar,${LOGICAL_ENV}-${APP_NAME}-appvar -i postgres-ifx-processer + #echo "Running Masterscript - deploy postgres-ifx-processer reconsiler1" + #if [ -e ${LOGICAL_ENV}-${APP_NAME}-reconsiler1-deployvar.json ]; then sudo rm -vf ${LOGICAL_ENV}-${APP_NAME}-reconsiler1-deployvar.json; fi + #./buildenv.sh -e ${DEPLOY_ENV} -b ${LOGICAL_ENV}-${APP_NAME}-reconsiler1-deployvar + #source buildenvvar + #./master_deploy.sh -d ECS -e ${DEPLOY_ENV} -t latest -s ${GLOBAL_ENV}-global-appvar,${LOGICAL_ENV}-${APP_NAME}-appvar -i postgres-ifx-processer jobs: # Build & Deploy against development backend # From 33aa805d40445879823a809627209c165138053c Mon Sep 17 00:00:00 2001 From: nkumar-topcoder <33625707+nkumar-topcoder@users.noreply.github.com> Date: Mon, 11 May 2020 17:06:53 +0530 Subject: [PATCH 03/76] [skip ci] [skip ci] --- src/services/updateInformix.js | 39 ++++++++++++++++++++++++---------- 1 file changed, 28 insertions(+), 11 deletions(-) diff --git a/src/services/updateInformix.js b/src/services/updateInformix.js index 70c868d..32d985e 100644 --- a/src/services/updateInformix.js +++ b/src/services/updateInformix.js @@ -2,43 +2,60 @@ const informix = require('../common/informixWrapper') const logger = require('../common/logger') -/** - * Updates informix database with insert/update/delete operation - * @param {Object} payload The DML trigger data - */ async function updateInformix (payload) { + logger.debug(`Received payload at updateinformix : ${payload}`) logger.debug('=====Starting to update informix with data:====') - //const operation = payload.operation.toLowerCase() const operation = payload.payload.operation.toLowerCase() - console.log("level 1 informix ",operation) + console.log("Informix DML Operation :",operation) let sql = null + let t0 = [] + let paramvalue = null const columns = payload.payload.data + logger.debug(`Columns details at updateinformix : ${columns}`) const primaryKey = payload.payload.Uniquecolumn // Build SQL query switch (operation) { case 'insert': { const columnNames = Object.keys(columns) - sql = `insert into ${payload.payload.schema}:${payload.payload.table} (${columnNames.join(', ')}) values (${columnNames.map((k) => `'${columns[k]}'`).join(', ')});` // "insert into : (col_1, col_2, ...) values (val_1, val_2, ...)" + //sql = `insert into ${payload.payload.schema}:${payload.payload.table} (${columnNames.join(', ')}) values (${columnNames.map((k) => `'${columns[k]}'`).join(', ')});` + sql = `insert into ${payload.payload.schema}:${payload.payload.table} (${columnNames.join(', ')}) values (${columnNames.map((k) => `?`).join(', ')});` + t0 = Object.keys(columns).map((key) => `{"value":"${columns[key]}"}`) + paramvalue = "[" + `${t0}` + "]" } break case 'update': { - sql = `update ${payload.payload.schema}:${payload.payload.table} set ${Object.keys(columns).map((key) => `${key}='${columns[key]}'`).join(', ')} where ${primaryKey}=${columns[primaryKey]};` // "update :
set col_1=val_1, col_2=val_2, ... where primary_key_col=primary_key_val" + //sql = `update ${payload.payload.schema}:${payload.payload.table} set ${Object.keys(columns).map((key) => `${key}='${columns[key]}'`).join(', ')} where ${primaryKey}=${columns[primaryKey]};` + sql = `update ${payload.payload.schema}:${payload.payload.table} set ${Object.keys(columns).map((key) => `${key}= ?`).join(', ')} where ${primaryKey}= ?;` + t0 = Object.keys(columns).map((key) => `{"value":"${columns[key]}"}`) + t0.push(`{"value":"${columns[primaryKey]}"}`) //param value for appended for where clause + paramvalue = "[" + `${t0}` + "]" } break case 'delete': { - sql = `delete from ${payload.payload.schema}:${payload.payload.table} where ${primaryKey}=${columns[primaryKey]};` // ""delete from :
where primary_key_col=primary_key_val" + //sql = `delete from ${payload.payload.schema}:${payload.payload.table} where ${primaryKey}=${columns[primaryKey]};` + sql = `delete from ${payload.payload.schema}:${payload.payload.table} where ${primaryKey}= ?};` + t0.push(`{"value":"${columns[primaryKey]}"}`) + paramvalue = "[" + `${t0}` + "]" } break default: throw new Error(`Operation ${operation} is not supported`) } - const result = await informix.executeQuery(payload.payload.schema, sql, null) - return result + //const result = await informix.executeQuery(payload.payload.schema, sql, null) + //return result + //Preparedstatement for informix + var finalparam = JSON.parse(paramvalue) + console.log(`Typeof finalparam : ${typeof(finalparam)}`) + if (finalparam.constructor === Array ) console.log('isarray') + else console.log('finalparam not an array') + + const result = await informix.executeQuery(payload.payload.schema, sql, finalparam) + return result.then((res)=>{logger.debug("Preparedstmt Result status : ",res)}).catch((e) => {logger.debug("Preparedstmt Result status error", e)}) } module.exports = updateInformix From 4c18ed53fbc2bb658debd3ee346c40fadebedb93 Mon Sep 17 00:00:00 2001 From: nkumar-topcoder <33625707+nkumar-topcoder@users.noreply.github.com> Date: Mon, 11 May 2020 17:46:26 +0530 Subject: [PATCH 04/76] [skip ci] [skip ci] --- src/consumer.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/consumer.js b/src/consumer.js index 9531776..6e0ea1f 100644 --- a/src/consumer.js +++ b/src/consumer.js @@ -49,7 +49,7 @@ let cs_payloadseqid let message try { message = JSON.parse(m.message.value) - logger.debug('Received message from kafka:') + logger.debug(`Received from kafka :${message}`) if (message.payload.payloadseqid) cs_payloadseqid = message.payload.payloadseqid; logger.debug(`consumer : ${message.payload.payloadseqid} ${message.payload.table} ${message.payload.Uniquecolumn} ${message.payload.operation} ${message.timestamp} `); await updateInformix(message) From a9366a4335a7001e466a64ee28d5eafa5a9a68d7 Mon Sep 17 00:00:00 2001 From: nkumar-topcoder <33625707+nkumar-topcoder@users.noreply.github.com> Date: Mon, 11 May 2020 18:26:13 +0530 Subject: [PATCH 05/76] [skip ci] [skip ci] --- src/services/updateInformix.js | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/src/services/updateInformix.js b/src/services/updateInformix.js index 32d985e..f2043fc 100644 --- a/src/services/updateInformix.js +++ b/src/services/updateInformix.js @@ -3,7 +3,7 @@ const informix = require('../common/informixWrapper') const logger = require('../common/logger') async function updateInformix (payload) { - logger.debug(`Received payload at updateinformix : ${payload}`) + logger.debug(`Received payload at updateinformix stringify : ${JSON.stringify(payload)}`) logger.debug('=====Starting to update informix with data:====') const operation = payload.payload.operation.toLowerCase() console.log("Informix DML Operation :",operation) @@ -37,7 +37,7 @@ async function updateInformix (payload) { case 'delete': { //sql = `delete from ${payload.payload.schema}:${payload.payload.table} where ${primaryKey}=${columns[primaryKey]};` - sql = `delete from ${payload.payload.schema}:${payload.payload.table} where ${primaryKey}= ?};` + sql = `delete from ${payload.payload.schema}:${payload.payload.table} where ${primaryKey}= ?;` t0.push(`{"value":"${columns[primaryKey]}"}`) paramvalue = "[" + `${t0}` + "]" } @@ -48,14 +48,16 @@ async function updateInformix (payload) { //const result = await informix.executeQuery(payload.payload.schema, sql, null) //return result - //Preparedstatement for informix + + //Preparedstatement for informix + logger.debug(`Before JSON conversion Parameter values are : ${paramvalue}`); var finalparam = JSON.parse(paramvalue) console.log(`Typeof finalparam : ${typeof(finalparam)}`) if (finalparam.constructor === Array ) console.log('isarray') else console.log('finalparam not an array') const result = await informix.executeQuery(payload.payload.schema, sql, finalparam) - return result.then((res)=>{logger.debug("Preparedstmt Result status : ",res)}).catch((e) => {logger.debug("Preparedstmt Result status error", e)}) + return result.then((res)=>{logger.debug(`Preparedstmt Result status : ${res}`)}).catch((e) => {logger.debug(`Preparedstmt Result error ${e}`)}) } module.exports = updateInformix From 7ae6d171c1ac741cd9fd35d3761d19e8765fe5d2 Mon Sep 17 00:00:00 2001 From: nkumar-topcoder <33625707+nkumar-topcoder@users.noreply.github.com> Date: Mon, 11 May 2020 18:29:30 +0530 Subject: [PATCH 06/76] [skip ci] [skip ci] --- src/consumer.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/consumer.js b/src/consumer.js index 6e0ea1f..55031ac 100644 --- a/src/consumer.js +++ b/src/consumer.js @@ -49,7 +49,7 @@ let cs_payloadseqid let message try { message = JSON.parse(m.message.value) - logger.debug(`Received from kafka :${message}`) + logger.debug(`Received from kafka :${JSON.stringify(message)}`) if (message.payload.payloadseqid) cs_payloadseqid = message.payload.payloadseqid; logger.debug(`consumer : ${message.payload.payloadseqid} ${message.payload.table} ${message.payload.Uniquecolumn} ${message.payload.operation} ${message.timestamp} `); await updateInformix(message) From 0b3329e88af06998b84679e997a7186ff7b72b69 Mon Sep 17 00:00:00 2001 From: nkumar-topcoder <33625707+nkumar-topcoder@users.noreply.github.com> Date: Mon, 11 May 2020 18:33:01 +0530 Subject: [PATCH 07/76] [skip ci] --- config/default.js | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/config/default.js b/config/default.js index 0fc39a8..c64292d 100644 --- a/config/default.js +++ b/config/default.js @@ -22,8 +22,8 @@ module.exports = { database: process.env.PG_DATABASE || 'postgres', // database must exist before running the tool password: process.env.PG_PASSWORD || 'password', port: parseInt(process.env.PG_PORT, 10) || 5432, - triggerFunctions: process.env.TRIGGER_FUNCTIONS || ['dev_db_notifications'], // List of trigger functions to listen to - triggerTopics: process.env.TRIGGER_TOPICS || ['dev.db.postgres.sync'], // Names of the topic in the trigger payload + triggerFunctions: process.env.TRIGGER_FUNCTIONS || ['test_db_notifications'], // List of trigger functions to listen to + triggerTopics: process.env.TRIGGER_TOPICS || ['test.db.postgres.sync'], // Names of the topic in the trigger payload triggerOriginators: process.env.TRIGGER_ORIGINATORS || ['tc-postgres-delta-processor'] // Names of the originator in the trigger payload }, KAFKA: { // Kafka connection options @@ -50,7 +50,7 @@ module.exports = { }, DYNAMODB: { - DYNAMODB_TABLE: process.env.DYNAMODB_TABLE || 'dev_pg_ifx_payload_sync', + DYNAMODB_TABLE: process.env.DYNAMODB_TABLE || 'test_pg_ifx_payload_sync', DD_ElapsedTime: process.env.DD_ElapsedTime || 600000 }, From 740f95cf2de6a47709649ec9bae1c35bbe153d1b Mon Sep 17 00:00:00 2001 From: nkumar-topcoder <33625707+nkumar-topcoder@users.noreply.github.com> Date: Mon, 11 May 2020 18:34:31 +0530 Subject: [PATCH 08/76] Update config.yml --- .circleci/config.yml | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index a1acf0c..35be655 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -42,9 +42,9 @@ build_steps: &build_steps ./awsconfiguration.sh ${DEPLOY_ENV} source awsenvconf - # ./buildenv.sh -e ${DEPLOY_ENV} -b ${LOGICAL_ENV}-${APP_NAME}-consumer-deployvar - #source buildenvvar - #./master_deploy.sh -d ECS -e ${DEPLOY_ENV} -t latest -s ${GLOBAL_ENV}-global-appvar,${LOGICAL_ENV}-${APP_NAME}-appvar -i postgres-ifx-processer + ./buildenv.sh -e ${DEPLOY_ENV} -b ${LOGICAL_ENV}-${APP_NAME}-consumer-deployvar + source buildenvvar + ./master_deploy.sh -d ECS -e ${DEPLOY_ENV} -t latest -s ${GLOBAL_ENV}-global-appvar,${LOGICAL_ENV}-${APP_NAME}-appvar -i postgres-ifx-processer echo "Running Masterscript - deploy postgres-ifx-processer producer" if [ -e ${LOGICAL_ENV}-${APP_NAME}-consumer-deployvar.json ]; then sudo rm -vf ${LOGICAL_ENV}-${APP_NAME}-consumer-deployvar.json; fi @@ -58,11 +58,11 @@ build_steps: &build_steps source buildenvvar ./master_deploy.sh -d ECS -e ${DEPLOY_ENV} -t latest -s ${GLOBAL_ENV}-global-appvar,${LOGICAL_ENV}-${APP_NAME}-appvar -i postgres-ifx-processer - #echo "Running Masterscript - deploy postgres-ifx-processer reconsiler1" - #if [ -e ${LOGICAL_ENV}-${APP_NAME}-reconsiler1-deployvar.json ]; then sudo rm -vf ${LOGICAL_ENV}-${APP_NAME}-reconsiler1-deployvar.json; fi - #./buildenv.sh -e ${DEPLOY_ENV} -b ${LOGICAL_ENV}-${APP_NAME}-reconsiler1-deployvar - #source buildenvvar - #./master_deploy.sh -d ECS -e ${DEPLOY_ENV} -t latest -s ${GLOBAL_ENV}-global-appvar,${LOGICAL_ENV}-${APP_NAME}-appvar -i postgres-ifx-processer + echo "Running Masterscript - deploy postgres-ifx-processer reconsiler1" + if [ -e ${LOGICAL_ENV}-${APP_NAME}-reconsiler1-deployvar.json ]; then sudo rm -vf ${LOGICAL_ENV}-${APP_NAME}-reconsiler1-deployvar.json; fi + ./buildenv.sh -e ${DEPLOY_ENV} -b ${LOGICAL_ENV}-${APP_NAME}-reconsiler1-deployvar + source buildenvvar + ./master_deploy.sh -d ECS -e ${DEPLOY_ENV} -t latest -s ${GLOBAL_ENV}-global-appvar,${LOGICAL_ENV}-${APP_NAME}-appvar -i postgres-ifx-processer jobs: # Build & Deploy against development backend # From 6370ba29b09cd5b1dc50cbf677bfd3b6a3a520a6 Mon Sep 17 00:00:00 2001 From: nkumar-topcoder <33625707+nkumar-topcoder@users.noreply.github.com> Date: Mon, 11 May 2020 18:46:20 +0530 Subject: [PATCH 09/76] Update config.yml --- .circleci/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 35be655..046a22a 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -9,7 +9,7 @@ install_dependency: &install_dependency sudo apt install jq sudo pip install awscli --upgrade sudo pip install docker-compose - sudo apt-get install default-jdk + sudo apt-get install default-jdk --fix-missing install_deploysuite: &install_deploysuite name: Installation of install_deploysuite. #Git Clone -change back to v1.3 or latest once counter var is generalized. From cd70a6a59593d1b33a90e2164b7b37d707a8cbb2 Mon Sep 17 00:00:00 2001 From: nkumar-topcoder <33625707+nkumar-topcoder@users.noreply.github.com> Date: Mon, 11 May 2020 18:48:39 +0530 Subject: [PATCH 10/76] Update config.yml --- .circleci/config.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.circleci/config.yml b/.circleci/config.yml index 046a22a..20f2dc1 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -6,6 +6,7 @@ defaults: &defaults install_dependency: &install_dependency name: Installation of build and deployment dependencies. command: | + sudo apt-get update sudo apt install jq sudo pip install awscli --upgrade sudo pip install docker-compose From dbd886694319514f97f5ae69a514d55ff7d8b8ff Mon Sep 17 00:00:00 2001 From: nkumar-topcoder <33625707+nkumar-topcoder@users.noreply.github.com> Date: Tue, 12 May 2020 07:32:47 +0530 Subject: [PATCH 11/76] [skip ci] [skip ci] --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 5f68789..2f6f563 100644 --- a/package.json +++ b/package.json @@ -17,7 +17,7 @@ "dependencies": { "aws-sdk": "*", "config": "^3.2.2", - "informix-wrapper": "git+https://github.com/appirio-tech/informix-wrapper.git", + "informix-wrapper": "git+https://github.com/appirio-tech/informix-wrapper.git#fix-issue", "no-kafka": "^3.4.3", "pg": "^7.12.1", "sleep": "^6.1.0", From 042130cec902ff9049144d637ddb41c42e306c6e Mon Sep 17 00:00:00 2001 From: nkumar-topcoder <33625707+nkumar-topcoder@users.noreply.github.com> Date: Tue, 12 May 2020 07:33:44 +0530 Subject: [PATCH 12/76] [skip ci] [skip ci] --- src/consumer.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/consumer.js b/src/consumer.js index 55031ac..f1de4bb 100644 --- a/src/consumer.js +++ b/src/consumer.js @@ -49,7 +49,7 @@ let cs_payloadseqid let message try { message = JSON.parse(m.message.value) - logger.debug(`Received from kafka :${JSON.stringify(message)}`) + logger.debug(`Consumer Received from kafka :${JSON.stringify(message)}`) if (message.payload.payloadseqid) cs_payloadseqid = message.payload.payloadseqid; logger.debug(`consumer : ${message.payload.payloadseqid} ${message.payload.table} ${message.payload.Uniquecolumn} ${message.payload.operation} ${message.timestamp} `); await updateInformix(message) From 0f24ce1f797c859f45833f26099a7561bdc0ec0d Mon Sep 17 00:00:00 2001 From: nkumar-topcoder <33625707+nkumar-topcoder@users.noreply.github.com> Date: Tue, 12 May 2020 07:35:33 +0530 Subject: [PATCH 13/76] Update updateInformix.js --- src/services/updateInformix.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/services/updateInformix.js b/src/services/updateInformix.js index f2043fc..9daea0a 100644 --- a/src/services/updateInformix.js +++ b/src/services/updateInformix.js @@ -3,7 +3,7 @@ const informix = require('../common/informixWrapper') const logger = require('../common/logger') async function updateInformix (payload) { - logger.debug(`Received payload at updateinformix stringify : ${JSON.stringify(payload)}`) + logger.debug(`updateinformix received payload -stringify : ${JSON.stringify(payload)}`) logger.debug('=====Starting to update informix with data:====') const operation = payload.payload.operation.toLowerCase() console.log("Informix DML Operation :",operation) @@ -12,7 +12,7 @@ async function updateInformix (payload) { let paramvalue = null const columns = payload.payload.data - logger.debug(`Columns details at updateinformix : ${columns}`) + logger.debug(`updateinformix columns details : ${JSON.stringify(columns)}`) const primaryKey = payload.payload.Uniquecolumn // Build SQL query switch (operation) { From 1a3ddca53bff35f1db983421a6df9ad3a6bc16e3 Mon Sep 17 00:00:00 2001 From: nkumar-topcoder <33625707+nkumar-topcoder@users.noreply.github.com> Date: Wed, 13 May 2020 20:59:49 +0530 Subject: [PATCH 14/76] [skip ci] [skip ci] --- src/consumer.js | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/consumer.js b/src/consumer.js index f1de4bb..a8b2c42 100644 --- a/src/consumer.js +++ b/src/consumer.js @@ -48,11 +48,13 @@ let cs_payloadseqid for (const m of messageSet) { // Process messages sequentially let message try { + let ifxstatus = 0 message = JSON.parse(m.message.value) logger.debug(`Consumer Received from kafka :${JSON.stringify(message)}`) if (message.payload.payloadseqid) cs_payloadseqid = message.payload.payloadseqid; logger.debug(`consumer : ${message.payload.payloadseqid} ${message.payload.table} ${message.payload.Uniquecolumn} ${message.payload.operation} ${message.timestamp} `); - await updateInformix(message) + ifxstatus = await updateInformix(message) + logger.debug(`Consumer : Informix return status : ${ifxstatus}`) await consumer.commitOffset({ topic, partition, offset: m.offset }) // Commit offset only on success if (message.payload['retryCount']) retryvar = message.payload.retryCount; auditTrail([cs_payloadseqid,cs_processId,message.payload.table,message.payload.Uniquecolumn, From 00fa3d08eecfeb8ea0baca82731498ba2488fd26 Mon Sep 17 00:00:00 2001 From: nkumar-topcoder <33625707+nkumar-topcoder@users.noreply.github.com> Date: Wed, 13 May 2020 21:01:23 +0530 Subject: [PATCH 15/76] [skip ci] [skip ci] --- src/services/updateInformix.js | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/services/updateInformix.js b/src/services/updateInformix.js index 9daea0a..b963a69 100644 --- a/src/services/updateInformix.js +++ b/src/services/updateInformix.js @@ -57,7 +57,8 @@ async function updateInformix (payload) { else console.log('finalparam not an array') const result = await informix.executeQuery(payload.payload.schema, sql, finalparam) - return result.then((res)=>{logger.debug(`Preparedstmt Result status : ${res}`)}).catch((e) => {logger.debug(`Preparedstmt Result error ${e}`)}) + //return result.then((res)=>{logger.debug(`Preparedstmt Result status : ${res}`)}).catch((e) => {logger.debug(`Preparedstmt Result error ${e}`)}) + return result } module.exports = updateInformix From d9cd73a222089a4dc8a4f5e92927af256c6d057e Mon Sep 17 00:00:00 2001 From: nkumar-topcoder <33625707+nkumar-topcoder@users.noreply.github.com> Date: Wed, 13 May 2020 21:02:53 +0530 Subject: [PATCH 16/76] Update config.yml --- .circleci/config.yml | 30 +++++++++++++++--------------- 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 20f2dc1..617aea9 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -47,23 +47,23 @@ build_steps: &build_steps source buildenvvar ./master_deploy.sh -d ECS -e ${DEPLOY_ENV} -t latest -s ${GLOBAL_ENV}-global-appvar,${LOGICAL_ENV}-${APP_NAME}-appvar -i postgres-ifx-processer - echo "Running Masterscript - deploy postgres-ifx-processer producer" - if [ -e ${LOGICAL_ENV}-${APP_NAME}-consumer-deployvar.json ]; then sudo rm -vf ${LOGICAL_ENV}-${APP_NAME}-consumer-deployvar.json; fi - ./buildenv.sh -e ${DEPLOY_ENV} -b ${LOGICAL_ENV}-${APP_NAME}-producer-deployvar - source buildenvvar - ./master_deploy.sh -d ECS -e ${DEPLOY_ENV} -t latest -s ${GLOBAL_ENV}-global-appvar,${LOGICAL_ENV}-${APP_NAME}-appvar -i postgres-ifx-processer + #echo "Running Masterscript - deploy postgres-ifx-processer producer" + #if [ -e ${LOGICAL_ENV}-${APP_NAME}-consumer-deployvar.json ]; then sudo rm -vf ${LOGICAL_ENV}-${APP_NAME}-consumer-deployvar.json; fi + #./buildenv.sh -e ${DEPLOY_ENV} -b ${LOGICAL_ENV}-${APP_NAME}-producer-deployvar + #source buildenvvar + #./master_deploy.sh -d ECS -e ${DEPLOY_ENV} -t latest -s ${GLOBAL_ENV}-global-appvar,${LOGICAL_ENV}-${APP_NAME}-appvar -i postgres-ifx-processer - echo "Running Masterscript - deploy postgres-ifx-processer producer_dd" - if [ -e ${LOGICAL_ENV}-${APP_NAME}-producer-deployvar.json ]; then sudo rm -vf ${LOGICAL_ENV}-${APP_NAME}-producer-deployvar.json; fi - ./buildenv.sh -e ${DEPLOY_ENV} -b ${LOGICAL_ENV}-${APP_NAME}-producer_dd-deployvar - source buildenvvar - ./master_deploy.sh -d ECS -e ${DEPLOY_ENV} -t latest -s ${GLOBAL_ENV}-global-appvar,${LOGICAL_ENV}-${APP_NAME}-appvar -i postgres-ifx-processer + #echo "Running Masterscript - deploy postgres-ifx-processer producer_dd" + #if [ -e ${LOGICAL_ENV}-${APP_NAME}-producer-deployvar.json ]; then sudo rm -vf ${LOGICAL_ENV}-${APP_NAME}-producer-deployvar.json; fi + #./buildenv.sh -e ${DEPLOY_ENV} -b ${LOGICAL_ENV}-${APP_NAME}-producer_dd-deployvar + #source buildenvvar + #./master_deploy.sh -d ECS -e ${DEPLOY_ENV} -t latest -s ${GLOBAL_ENV}-global-appvar,${LOGICAL_ENV}-${APP_NAME}-appvar -i postgres-ifx-processer - echo "Running Masterscript - deploy postgres-ifx-processer reconsiler1" - if [ -e ${LOGICAL_ENV}-${APP_NAME}-reconsiler1-deployvar.json ]; then sudo rm -vf ${LOGICAL_ENV}-${APP_NAME}-reconsiler1-deployvar.json; fi - ./buildenv.sh -e ${DEPLOY_ENV} -b ${LOGICAL_ENV}-${APP_NAME}-reconsiler1-deployvar - source buildenvvar - ./master_deploy.sh -d ECS -e ${DEPLOY_ENV} -t latest -s ${GLOBAL_ENV}-global-appvar,${LOGICAL_ENV}-${APP_NAME}-appvar -i postgres-ifx-processer + #echo "Running Masterscript - deploy postgres-ifx-processer reconsiler1" + #if [ -e ${LOGICAL_ENV}-${APP_NAME}-reconsiler1-deployvar.json ]; then sudo rm -vf ${LOGICAL_ENV}-${APP_NAME}-reconsiler1-deployvar.json; fi + #./buildenv.sh -e ${DEPLOY_ENV} -b ${LOGICAL_ENV}-${APP_NAME}-reconsiler1-deployvar + #source buildenvvar + #./master_deploy.sh -d ECS -e ${DEPLOY_ENV} -t latest -s ${GLOBAL_ENV}-global-appvar,${LOGICAL_ENV}-${APP_NAME}-appvar -i postgres-ifx-processer jobs: # Build & Deploy against development backend # From 8e815f66f919916790ec655e00db7254d38907ba Mon Sep 17 00:00:00 2001 From: nkumar-topcoder <33625707+nkumar-topcoder@users.noreply.github.com> Date: Wed, 13 May 2020 21:32:58 +0530 Subject: [PATCH 17/76] Update consumer.js --- src/consumer.js | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/src/consumer.js b/src/consumer.js index a8b2c42..6fc4084 100644 --- a/src/consumer.js +++ b/src/consumer.js @@ -48,13 +48,14 @@ let cs_payloadseqid for (const m of messageSet) { // Process messages sequentially let message try { - let ifxstatus = 0 + // let ifxstatus = 0 message = JSON.parse(m.message.value) logger.debug(`Consumer Received from kafka :${JSON.stringify(message)}`) if (message.payload.payloadseqid) cs_payloadseqid = message.payload.payloadseqid; logger.debug(`consumer : ${message.payload.payloadseqid} ${message.payload.table} ${message.payload.Uniquecolumn} ${message.payload.operation} ${message.timestamp} `); - ifxstatus = await updateInformix(message) - logger.debug(`Consumer : Informix return status : ${ifxstatus}`) + await updateInformix(message) + //ifxstatus = await updateInformix(message) + //logger.debug(`Consumer : Informix return status : ${ifxstatus}`) await consumer.commitOffset({ topic, partition, offset: m.offset }) // Commit offset only on success if (message.payload['retryCount']) retryvar = message.payload.retryCount; auditTrail([cs_payloadseqid,cs_processId,message.payload.table,message.payload.Uniquecolumn, From 58714b30bcd043db6d92cbf1a5a4a73bafac6f23 Mon Sep 17 00:00:00 2001 From: Sachin Maheshwari Date: Fri, 15 May 2020 12:28:04 +0530 Subject: [PATCH 18/76] group consumer changes --- config/default.js | 34 +++++++++++++++++++--------------- src/consumer.js | 17 ++++++++++++++--- 2 files changed, 33 insertions(+), 18 deletions(-) diff --git a/config/default.js b/config/default.js index c64292d..65d48a8 100644 --- a/config/default.js +++ b/config/default.js @@ -36,30 +36,34 @@ module.exports = { partition: process.env.partition || [0], // Kafka partitions to use maxRetry: process.env.MAX_RETRY || 3, errorTopic: process.env.ERROR_TOPIC || 'db.scorecardtable.error', - recipients: ['admin@abc.com'] // Kafka partitions to use + recipients: ['admin@abc.com'], // Kafka partitions to use, + KAFKA_URL: process.env.KAFKA_URL, + KAFKA_GROUP_ID: process.env.KAFKA_GROUP_ID || 'postgres-ifx-consumer', + KAFKA_CLIENT_CERT: process.env.KAFKA_CLIENT_CERT ? process.env.KAFKA_CLIENT_CERT.replace('\\n', '\n') : null, + KAFKA_CLIENT_CERT_KEY: process.env.KAFKA_CLIENT_CERT_KEY ? process.env.KAFKA_CLIENT_CERT_KEY.replace('\\n', '\n') : null, }, SLACK: { URL: process.env.SLACKURL || 'us-east-1', SLACKCHANNEL: process.env.SLACKCHANNEL || 'ifxpg-migrator', - SLACKNOTIFY: process.env.SLACKNOTIFY || 'false' + SLACKNOTIFY: process.env.SLACKNOTIFY || 'false' }, - RECONSILER:{ + RECONSILER: { RECONSILER_START: process.env.RECONSILER_START || 5, RECONSILER_END: process.env.RECONSILER_END || 1, RECONSILER_DURATION_TYPE: process.env.RECONSILER_DURATION_TYPE || 'm' }, DYNAMODB: - { - DYNAMODB_TABLE: process.env.DYNAMODB_TABLE || 'test_pg_ifx_payload_sync', - DD_ElapsedTime: process.env.DD_ElapsedTime || 600000 - }, + { + DYNAMODB_TABLE: process.env.DYNAMODB_TABLE || 'test_pg_ifx_payload_sync', + DD_ElapsedTime: process.env.DD_ElapsedTime || 600000 + }, - AUTH0_URL: process.env.AUTH0_URL , - AUTH0_AUDIENCE: process.env.AUTH0_AUDIENCE , - TOKEN_CACHE_TIME: process.env.TOKEN_CACHE_TIME , - AUTH0_CLIENT_ID: process.env.AUTH0_CLIENT_ID , - AUTH0_CLIENT_SECRET: process.env.AUTH0_CLIENT_SECRET , - BUSAPI_URL : process.env.BUSAPI_URL , - KAFKA_ERROR_TOPIC : process.env.KAFKA_ERROR_TOPIC , - AUTH0_PROXY_SERVER_URL: process.env.AUTH0_PROXY_SERVER_URL + AUTH0_URL: process.env.AUTH0_URL, + AUTH0_AUDIENCE: process.env.AUTH0_AUDIENCE, + TOKEN_CACHE_TIME: process.env.TOKEN_CACHE_TIME, + AUTH0_CLIENT_ID: process.env.AUTH0_CLIENT_ID, + AUTH0_CLIENT_SECRET: process.env.AUTH0_CLIENT_SECRET, + BUSAPI_URL: process.env.BUSAPI_URL, + KAFKA_ERROR_TOPIC: process.env.KAFKA_ERROR_TOPIC, + AUTH0_PROXY_SERVER_URL: process.env.AUTH0_PROXY_SERVER_URL } diff --git a/src/consumer.js b/src/consumer.js index 6fc4084..abddff2 100644 --- a/src/consumer.js +++ b/src/consumer.js @@ -10,8 +10,19 @@ const healthcheck = require('topcoder-healthcheck-dropin'); const auditTrail = require('./services/auditTrail'); const kafkaOptions = config.get('KAFKA') const postMessage = require('./services/posttoslack') -const isSslEnabled = kafkaOptions.SSL && kafkaOptions.SSL.cert && kafkaOptions.SSL.key -const consumer = new Kafka.SimpleConsumer({ +//const isSslEnabled = kafkaOptions.SSL && kafkaOptions.SSL.cert && kafkaOptions.SSL.key + +const options = { + groupId: kafkaOptions.KAFKA_GROUP_ID, + connectionString: kafkaOptions.KAFKA_URL, + ssl: { + cert: kafkaOptions.KAFKA_CLIENT_CERT, + key: kafkaOptions.KAFKA_CLIENT_CERT_KEY + } +}; +const consumer = new Kafka.GroupConsumer(options); + +/*const consumer = new Kafka.SimpleConsumer({ connectionString: kafkaOptions.brokers_url, ...(isSslEnabled && { // Include ssl options if present ssl: { @@ -19,7 +30,7 @@ const consumer = new Kafka.SimpleConsumer({ key: kafkaOptions.SSL.key } }) -}) +})*/ const check = function () { if (!consumer.client.initialBrokers && !consumer.client.initialBrokers.length) { From 4bc2794633625eda033f76eb5b544bdd80fbe053 Mon Sep 17 00:00:00 2001 From: sachin-maheshwari Date: Fri, 15 May 2020 17:07:44 +0530 Subject: [PATCH 19/76] Update consumer.js [skip ci] --- src/consumer.js | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/src/consumer.js b/src/consumer.js index abddff2..0a2a876 100644 --- a/src/consumer.js +++ b/src/consumer.js @@ -151,9 +151,13 @@ if(config.SLACK.SLACKNOTIFY === 'true') { */ async function setupKafkaConsumer() { try { - await consumer.init() + const strategies = [{ + subscriptions: [kafkaOptions.topic], + handler: dataHandler + }]; + await consumer.init(strategies) //await consumer.subscribe(kafkaOptions.topic, kafkaOptions.partition, { time: Kafka.LATEST_OFFSET }, dataHandler) - await consumer.subscribe(kafkaOptions.topic, dataHandler) + //await consumer.subscribe(kafkaOptions.topic, dataHandler) logger.info('Initialized kafka consumer') healthcheck.init([check]) From 81e41e324e989aad4a62ffdd89e71acf27c8799a Mon Sep 17 00:00:00 2001 From: nkumar-topcoder <33625707+nkumar-topcoder@users.noreply.github.com> Date: Sun, 17 May 2020 13:36:15 +0530 Subject: [PATCH 20/76] Update consumer.js --- src/consumer.js | 113 ++++++++++++++++++++++++++++-------------------- 1 file changed, 65 insertions(+), 48 deletions(-) diff --git a/src/consumer.js b/src/consumer.js index 0a2a876..f6735ab 100644 --- a/src/consumer.js +++ b/src/consumer.js @@ -55,63 +55,80 @@ const terminate = () => process.exit() var retryvar=""; //let cs_payloadseqid; async function dataHandler(messageSet, topic, partition) { -let cs_payloadseqid - for (const m of messageSet) { // Process messages sequentially + for (const m of messageSet) { // Process messages sequentially let message try { // let ifxstatus = 0 + let cs_payloadseqid; message = JSON.parse(m.message.value) - logger.debug(`Consumer Received from kafka :${JSON.stringify(message)}`) + //logger.debug(`Consumer Received from kafka :${JSON.stringify(message)}`) if (message.payload.payloadseqid) cs_payloadseqid = message.payload.payloadseqid; logger.debug(`consumer : ${message.payload.payloadseqid} ${message.payload.table} ${message.payload.Uniquecolumn} ${message.payload.operation} ${message.timestamp} `); - await updateInformix(message) - //ifxstatus = await updateInformix(message) - //logger.debug(`Consumer : Informix return status : ${ifxstatus}`) - await consumer.commitOffset({ topic, partition, offset: m.offset }) // Commit offset only on success - if (message.payload['retryCount']) retryvar = message.payload.retryCount; + //await updateInformix(message) + ifxstatus = await updateInformix(message) + if (ifxstatus === 0 && `${message.payload.operation}` === 'INSERT') { + logger.debug(`Consumer :informixt status for ${message.payload.table} ${message.payload.payloadseqid} : ${ifxstatus} - Retrying`) + + auditTrail([cs_payloadseqid,cs_processId,message.payload.table,message.payload.Uniquecolumn, + message.payload.operation,"push-to-kafka",retryvar,"","",JSON.stringify(message), new Date(),message.topic],'consumer') + await retrypushtokakfa(message,topic,m,partition) + } + else { + if (message.payload['retryCount']) retryvar = message.payload.retryCount; auditTrail([cs_payloadseqid,cs_processId,message.payload.table,message.payload.Uniquecolumn, - message.payload.operation,"Informix-updated",retryvar,"","",JSON.stringify(message), message.timestamp,message.topic],'consumer') - } catch (err) { - const errmsg2 = `error-sync: Could not process kafka message or informix DB error: "${err.message}"` + message.payload.operation,"Informix-updated",retryvar,"","",JSON.stringify(message), new Date(),message.topic],'consumer') + logger.debug(`Consumer :informix status for ${message.payload.table} ${message.payload.payloadseqid} : ${ifxstatus}`) + await consumer.commitOffset({ topic, partition, offset: m.offset }) // Commit offset only on success + } }catch (err) { + const errmsg2 = `error-sync: Could not process kafka message or informix DB error: "${err.message}"` logger.error(errmsg2) - logger.debug(`error-sync: consumer "${err.message}"`) + logger.debug(`error-sync: consumer "${err.message}"`) + await retrypushtokakfa(message,topic,m,partition) + } + } +} + +async function retrypushtokakfa(message,topic,m,partition) +{ +let cs_payloadseqid +logger.debug(`Consumer : At retry function`) if (!cs_payloadseqid){ - cs_payloadseqid= 'err-'+(new Date()).getTime().toString(36) + Math.random().toString(36).slice(2);} -/* await auditTrail([cs_payloadseqid,3333,'message.payload.table','message.payload.Uniquecolumn', - 'message.payload.operation',"Error-Consumer","",err.message,"",'message.payload.data',new Date(),'message.topic'],'consumer') - }else{ - auditTrail([cs_payloadseqid,4444,message.payload.table,message.payload.Uniquecolumn, - message.payload.operation,"Informix-updated",retryvar,"consumer2","",JSON.stringify(message), message.timestamp,message.topic],'consumer') - }*/ - - try { - if (message.payload['retryCount']) retryvar = message.payload.retryCount; - await consumer.commitOffset({ topic, partition, offset: m.offset }) // Commit success as will re-publish - logger.debug(`Trying to push same message after adding retryCounter`) - if (!message.payload.retryCount) { - message.payload.retryCount = 0 - logger.debug('setting retry counter to 0 and max try count is : ', config.KAFKA.maxRetry); - } - if (message.payload.retryCount >= config.KAFKA.maxRetry) { - logger.debug('Recached at max retry counter, sending it to error queue: ', config.KAFKA.errorTopic); - logger.debug(`error-sync: consumer max-retry-limit reached`) - // push to slack - alertIt("slack message" - await callposttoslack(`error-sync: postgres-ifx-processor : consumer max-retry-limit reached: "${message.payload.table}": payloadseqid : "${cs_payloadseqid}"`) - let notifiyMessage = Object.assign({}, message, { topic: config.KAFKA.errorTopic }) - notifiyMessage.payload['recipients'] = config.KAFKA.recipients - logger.debug('pushing following message on kafka error alert queue:') - //logger.debug(notifiyMessage) - await pushToKafka(notifiyMessage) - return - } - message.payload['retryCount'] = message.payload.retryCount + 1; - await pushToKafka(message) - var errmsg9 = `error-sync: Retry for Kafka push : retrycount : "${message.payload.retryCount}" : "${cs_payloadseqid}"` - logger.debug(errmsg9) - //await callposttoslack(errmsg9) - } catch (err) { - - await auditTrail([cs_payloadseqid,cs_processId,message.payload.table,message.payload.Uniquecolumn, + cs_payloadseqid= 'err-'+(new Date()).getTime().toString(36) + Math.random().toString(36).slice(2);} + try { + if (message.payload['retryCount']) retryvar = message.payload.retryCount; + await consumer.commitOffset({ topic, partition, offset: m.offset }) // Commit success as will re-publish + logger.debug(`Trying to push same message after adding retryCounter`) + if (!message.payload.retryCount) { + message.payload.retryCount = 0 + logger.debug('setting retry counter to 0 and max try count is : ', config.KAFKA.maxRetry); + } + if (message.payload.retryCount >= config.KAFKA.maxRetry) { + logger.debug('Reached at max retry counter, sending it to error queue: ', config.KAFKA.errorTopic); + logger.debug(`error-sync: consumer max-retry-limit reached`) + await callposttoslack(`error-sync: postgres-ifx-processor : consumer max-retry-limit reached: "${message.payload.table}": payloadseqid : "${cs_payloadseqid}"`) + let notifiyMessage = Object.assign({}, message, { topic: config.KAFKA.errorTopic }) + notifiyMessage.payload['recipients'] = config.KAFKA.recipients + logger.debug('pushing following message on kafka error alert queue:') + //retry push to error topic kafka again + await pushToKafka(notifiyMessage) + return + } + message.payload['retryCount'] = message.payload.retryCount + 1; + await pushToKafka(message) + var errmsg9 = `error-sync: Retry for Kafka push : retrycount : "${message.payload.retryCount}" : "${cs_payloadseqid}"` + logger.debug(errmsg9) + } + catch (err) { + await auditTrail([cs_payloadseqid, cs_processId, message.payload.table, message.payload.Uniquecolumn, + message.payload.operation, "Error-republishing", message.payload['retryCount'], err.message, "", message.payload.data, new Date(), message.topic], 'consumer') + const errmsg1 = `error-sync: postgres-ifx-processor: consumer : Error-republishing: "${err.message}"` + logger.error(errmsg1) + logger.debug(`error-sync: consumer re-publishing "${err.message}"`) + await callposttoslack(errmsg1) + } +} + +await auditTrail([cs_payloadseqid,cs_processId,message.payload.table,message.payload.Uniquecolumn, message.payload.operation,"Error-republishing",message.payload['retryCount'],err.message,"",message.payload.data, message.timestamp,message.topic],'consumer') const errmsg1 = `error-sync: postgres-ifx-processor: consumer : Error-republishing: "${err.message}"` logger.error(errmsg1) From 9d3d0374e0e1dcf6e2ad16f7e977e2cba034d016 Mon Sep 17 00:00:00 2001 From: nkumar-topcoder <33625707+nkumar-topcoder@users.noreply.github.com> Date: Sun, 17 May 2020 13:56:02 +0530 Subject: [PATCH 21/76] Update consumer.js --- src/consumer.js | 14 +------------- 1 file changed, 1 insertion(+), 13 deletions(-) diff --git a/src/consumer.js b/src/consumer.js index f6735ab..9035e4f 100644 --- a/src/consumer.js +++ b/src/consumer.js @@ -105,7 +105,7 @@ logger.debug(`Consumer : At retry function`) if (message.payload.retryCount >= config.KAFKA.maxRetry) { logger.debug('Reached at max retry counter, sending it to error queue: ', config.KAFKA.errorTopic); logger.debug(`error-sync: consumer max-retry-limit reached`) - await callposttoslack(`error-sync: postgres-ifx-processor : consumer max-retry-limit reached: "${message.payload.table}": payloadseqid : "${cs_payloadseqid}"`) + awuditait callposttoslack(`error-sync: postgres-ifx-processor : consumer max-retry-limit reached: "${message.payload.table}": payloadseqid : "${cs_payloadseqid}"`) let notifiyMessage = Object.assign({}, message, { topic: config.KAFKA.errorTopic }) notifiyMessage.payload['recipients'] = config.KAFKA.recipients logger.debug('pushing following message on kafka error alert queue:') @@ -128,18 +128,6 @@ logger.debug(`Consumer : At retry function`) } } -await auditTrail([cs_payloadseqid,cs_processId,message.payload.table,message.payload.Uniquecolumn, - message.payload.operation,"Error-republishing",message.payload['retryCount'],err.message,"",message.payload.data, message.timestamp,message.topic],'consumer') - const errmsg1 = `error-sync: postgres-ifx-processor: consumer : Error-republishing: "${err.message}"` - logger.error(errmsg1) - logger.debug(`error-sync: consumer re-publishing "${err.message}"`) - // push to slack - alertIt("slack message" - await callposttoslack(errmsg1) - } - } - } -} - async function callposttoslack(slackmessage) { if(config.SLACK.SLACKNOTIFY === 'true') { return new Promise(function (resolve, reject) { From 46f8759aa89123431fd5c104a63e7789b3b720b9 Mon Sep 17 00:00:00 2001 From: nkumar-topcoder <33625707+nkumar-topcoder@users.noreply.github.com> Date: Sun, 17 May 2020 14:19:31 +0530 Subject: [PATCH 22/76] Update consumer.js --- src/consumer.js | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/consumer.js b/src/consumer.js index 9035e4f..0a148d6 100644 --- a/src/consumer.js +++ b/src/consumer.js @@ -58,7 +58,7 @@ async function dataHandler(messageSet, topic, partition) { for (const m of messageSet) { // Process messages sequentially let message try { - // let ifxstatus = 0 + let ifxstatus = 0 let cs_payloadseqid; message = JSON.parse(m.message.value) //logger.debug(`Consumer Received from kafka :${JSON.stringify(message)}`) @@ -105,7 +105,7 @@ logger.debug(`Consumer : At retry function`) if (message.payload.retryCount >= config.KAFKA.maxRetry) { logger.debug('Reached at max retry counter, sending it to error queue: ', config.KAFKA.errorTopic); logger.debug(`error-sync: consumer max-retry-limit reached`) - awuditait callposttoslack(`error-sync: postgres-ifx-processor : consumer max-retry-limit reached: "${message.payload.table}": payloadseqid : "${cs_payloadseqid}"`) + //await callposttoslack(`error-sync: postgres-ifx-processor : consumer max-retry-limit reached: "${message.payload.table}": payloadseqid : "${cs_payloadseqid}"`) let notifiyMessage = Object.assign({}, message, { topic: config.KAFKA.errorTopic }) notifiyMessage.payload['recipients'] = config.KAFKA.recipients logger.debug('pushing following message on kafka error alert queue:') @@ -115,7 +115,7 @@ logger.debug(`Consumer : At retry function`) } message.payload['retryCount'] = message.payload.retryCount + 1; await pushToKafka(message) - var errmsg9 = `error-sync: Retry for Kafka push : retrycount : "${message.payload.retryCount}" : "${cs_payloadseqid}"` + var errmsg9 = `consumer : Retry for Kafka push : retrycount : "${message.payload.retryCount}" : "${cs_payloadseqid}"` logger.debug(errmsg9) } catch (err) { @@ -124,7 +124,7 @@ logger.debug(`Consumer : At retry function`) const errmsg1 = `error-sync: postgres-ifx-processor: consumer : Error-republishing: "${err.message}"` logger.error(errmsg1) logger.debug(`error-sync: consumer re-publishing "${err.message}"`) - await callposttoslack(errmsg1) + // await callposttoslack(errmsg1) } } From dcfd1d4459c88590243baab7ae718b643cb180e8 Mon Sep 17 00:00:00 2001 From: nkumar-topcoder <33625707+nkumar-topcoder@users.noreply.github.com> Date: Wed, 20 May 2020 18:03:49 +0530 Subject: [PATCH 23/76] [skip ci] --- .circleci/config.yml | 19 ++++++++++++++++--- 1 file changed, 16 insertions(+), 3 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 617aea9..28e2dcd 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -40,12 +40,25 @@ build_steps: &build_steps - deploy: name: Running MasterScript. command: | + ./awsconfiguration.sh ${DEPLOY_ENV} source awsenvconf - - ./buildenv.sh -e ${DEPLOY_ENV} -b ${LOGICAL_ENV}-${APP_NAME}-consumer-deployvar + #scorecard test consumer remove later + ./buildenv.sh -e ${DEPLOY_ENV} -b ${LOGICAL_ENV}-${APP_NAME}-consumer_scorecard-deployvar + source buildenvvar + ./master_deploy.sh -d ECS -e ${DEPLOY_ENV} -t latest -s ${GLOBAL_ENV}-global-appvar,${LOGICAL_ENV}-${APP_NAME}-appvar -i postgres-ifx-processer + + #scorecard test producer remove later + echo "Running Masterscript - deploy postgres-ifx-processer producer" + if [ -e ${LOGICAL_ENV}-${APP_NAME}-consumer-deployvar.json ]; then sudo rm -vf ${LOGICAL_ENV}-${APP_NAME}-consumer-deployvar.json; fi + ./buildenv.sh -e ${DEPLOY_ENV} -b ${LOGICAL_ENV}-${APP_NAME}-producer_scorecard-deployvar source buildenvvar ./master_deploy.sh -d ECS -e ${DEPLOY_ENV} -t latest -s ${GLOBAL_ENV}-global-appvar,${LOGICAL_ENV}-${APP_NAME}-appvar -i postgres-ifx-processer + + + # ./buildenv.sh -e ${DEPLOY_ENV} -b ${LOGICAL_ENV}-${APP_NAME}-consumer-deployvar + #source buildenvvar + #./master_deploy.sh -d ECS -e ${DEPLOY_ENV} -t latest -s ${GLOBAL_ENV}-global-appvar,${LOGICAL_ENV}-${APP_NAME}-appvar -i postgres-ifx-processer #echo "Running Masterscript - deploy postgres-ifx-processer producer" #if [ -e ${LOGICAL_ENV}-${APP_NAME}-consumer-deployvar.json ]; then sudo rm -vf ${LOGICAL_ENV}-${APP_NAME}-consumer-deployvar.json; fi @@ -104,7 +117,7 @@ workflows: branches: only: - dev - - dev-retryfeature + - dev-test-pg - "build-test": context : org-global filters: From 5901e726fd0dd170a1f4eb3d97e27215fe0beaec Mon Sep 17 00:00:00 2001 From: nkumar-topcoder <33625707+nkumar-topcoder@users.noreply.github.com> Date: Wed, 20 May 2020 18:06:52 +0530 Subject: [PATCH 24/76] Update default.js --- config/default.js | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/config/default.js b/config/default.js index 65d48a8..c30ec3e 100644 --- a/config/default.js +++ b/config/default.js @@ -22,8 +22,8 @@ module.exports = { database: process.env.PG_DATABASE || 'postgres', // database must exist before running the tool password: process.env.PG_PASSWORD || 'password', port: parseInt(process.env.PG_PORT, 10) || 5432, - triggerFunctions: process.env.TRIGGER_FUNCTIONS || ['test_db_notifications'], // List of trigger functions to listen to - triggerTopics: process.env.TRIGGER_TOPICS || ['test.db.postgres.sync'], // Names of the topic in the trigger payload + triggerFunctions: process.env.TRIGGER_FUNCTIONS || ['dev_db_notifications_2'], // List of trigger functions to listen to + triggerTopics: process.env.TRIGGER_TOPICS || ['dev.db.postgres.sync'], // Names of the topic in the trigger payload triggerOriginators: process.env.TRIGGER_ORIGINATORS || ['tc-postgres-delta-processor'] // Names of the originator in the trigger payload }, KAFKA: { // Kafka connection options @@ -54,7 +54,7 @@ module.exports = { }, DYNAMODB: { - DYNAMODB_TABLE: process.env.DYNAMODB_TABLE || 'test_pg_ifx_payload_sync', + DYNAMODB_TABLE: process.env.DYNAMODB_TABLE || 'dev_pg_ifx_payload_sync', DD_ElapsedTime: process.env.DD_ElapsedTime || 600000 }, From 8d61d455aa353468e5b047811c9e999293017e03 Mon Sep 17 00:00:00 2001 From: nkumar-topcoder <33625707+nkumar-topcoder@users.noreply.github.com> Date: Wed, 20 May 2020 21:37:11 +0530 Subject: [PATCH 25/76] [skip ci] [skip ci] --- .circleci/config.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 28e2dcd..3e7f388 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -117,7 +117,6 @@ workflows: branches: only: - dev - - dev-test-pg - "build-test": context : org-global filters: From 1611bfc46c4308f5d7357450aff5e5477b3cf1b1 Mon Sep 17 00:00:00 2001 From: nkumar-topcoder <33625707+nkumar-topcoder@users.noreply.github.com> Date: Fri, 22 May 2020 13:25:53 +0530 Subject: [PATCH 26/76] [skip ci] [skip ci] --- .circleci/config.yml | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 3e7f388..6ebcc0c 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -44,21 +44,21 @@ build_steps: &build_steps ./awsconfiguration.sh ${DEPLOY_ENV} source awsenvconf #scorecard test consumer remove later - ./buildenv.sh -e ${DEPLOY_ENV} -b ${LOGICAL_ENV}-${APP_NAME}-consumer_scorecard-deployvar - source buildenvvar - ./master_deploy.sh -d ECS -e ${DEPLOY_ENV} -t latest -s ${GLOBAL_ENV}-global-appvar,${LOGICAL_ENV}-${APP_NAME}-appvar -i postgres-ifx-processer + #./buildenv.sh -e ${DEPLOY_ENV} -b ${LOGICAL_ENV}-${APP_NAME}-consumer_scorecard-deployvar + #source buildenvvar + #./master_deploy.sh -d ECS -e ${DEPLOY_ENV} -t latest -s ${GLOBAL_ENV}-global-appvar,${LOGICAL_ENV}-${APP_NAME}-appvar -i postgres-ifx-processer #scorecard test producer remove later - echo "Running Masterscript - deploy postgres-ifx-processer producer" - if [ -e ${LOGICAL_ENV}-${APP_NAME}-consumer-deployvar.json ]; then sudo rm -vf ${LOGICAL_ENV}-${APP_NAME}-consumer-deployvar.json; fi - ./buildenv.sh -e ${DEPLOY_ENV} -b ${LOGICAL_ENV}-${APP_NAME}-producer_scorecard-deployvar - source buildenvvar - ./master_deploy.sh -d ECS -e ${DEPLOY_ENV} -t latest -s ${GLOBAL_ENV}-global-appvar,${LOGICAL_ENV}-${APP_NAME}-appvar -i postgres-ifx-processer - - - # ./buildenv.sh -e ${DEPLOY_ENV} -b ${LOGICAL_ENV}-${APP_NAME}-consumer-deployvar + #echo "Running Masterscript - deploy postgres-ifx-processer producer" + #if [ -e ${LOGICAL_ENV}-${APP_NAME}-consumer-deployvar.json ]; then sudo rm -vf ${LOGICAL_ENV}-${APP_NAME}-consumer-deployvar.json; fi + #./buildenv.sh -e ${DEPLOY_ENV} -b ${LOGICAL_ENV}-${APP_NAME}-producer_scorecard-deployvar #source buildenvvar #./master_deploy.sh -d ECS -e ${DEPLOY_ENV} -t latest -s ${GLOBAL_ENV}-global-appvar,${LOGICAL_ENV}-${APP_NAME}-appvar -i postgres-ifx-processer + + + ./buildenv.sh -e ${DEPLOY_ENV} -b ${LOGICAL_ENV}-${APP_NAME}-consumer-deployvar + source buildenvvar + ./master_deploy.sh -d ECS -e ${DEPLOY_ENV} -t latest -s ${GLOBAL_ENV}-global-appvar,${LOGICAL_ENV}-${APP_NAME}-appvar -i postgres-ifx-processer #echo "Running Masterscript - deploy postgres-ifx-processer producer" #if [ -e ${LOGICAL_ENV}-${APP_NAME}-consumer-deployvar.json ]; then sudo rm -vf ${LOGICAL_ENV}-${APP_NAME}-consumer-deployvar.json; fi From 69adfde74df8a6dd1a7d3d3fc3b70d110634eb15 Mon Sep 17 00:00:00 2001 From: nkumar-topcoder <33625707+nkumar-topcoder@users.noreply.github.com> Date: Fri, 22 May 2020 13:35:40 +0530 Subject: [PATCH 27/76] [skip ci] [skip ci] --- src/services/updateInformix.js | 32 +++++++++++++------------------- 1 file changed, 13 insertions(+), 19 deletions(-) diff --git a/src/services/updateInformix.js b/src/services/updateInformix.js index b963a69..1ee880a 100644 --- a/src/services/updateInformix.js +++ b/src/services/updateInformix.js @@ -2,62 +2,56 @@ const informix = require('../common/informixWrapper') const logger = require('../common/logger') +String.prototype.escapeSpecialChars = function() { + return this.replace(/\n/g, "\\n"); +}; + async function updateInformix (payload) { - logger.debug(`updateinformix received payload -stringify : ${JSON.stringify(payload)}`) - logger.debug('=====Starting to update informix with data:====') + logger.debug(`Informix Received from consumer-kafka :${JSON.stringify(payload)}`) const operation = payload.payload.operation.toLowerCase() - console.log("Informix DML Operation :",operation) + console.log("=====Informix DML Operation :==========",operation) let sql = null let t0 = [] let paramvalue = null - const columns = payload.payload.data - logger.debug(`updateinformix columns details : ${JSON.stringify(columns)}`) const primaryKey = payload.payload.Uniquecolumn // Build SQL query switch (operation) { case 'insert': { const columnNames = Object.keys(columns) - //sql = `insert into ${payload.payload.schema}:${payload.payload.table} (${columnNames.join(', ')}) values (${columnNames.map((k) => `'${columns[k]}'`).join(', ')});` sql = `insert into ${payload.payload.schema}:${payload.payload.table} (${columnNames.join(', ')}) values (${columnNames.map((k) => `?`).join(', ')});` t0 = Object.keys(columns).map((key) => `{"value":"${columns[key]}"}`) - paramvalue = "[" + `${t0}` + "]" } break case 'update': { - //sql = `update ${payload.payload.schema}:${payload.payload.table} set ${Object.keys(columns).map((key) => `${key}='${columns[key]}'`).join(', ')} where ${primaryKey}=${columns[primaryKey]};` sql = `update ${payload.payload.schema}:${payload.payload.table} set ${Object.keys(columns).map((key) => `${key}= ?`).join(', ')} where ${primaryKey}= ?;` t0 = Object.keys(columns).map((key) => `{"value":"${columns[key]}"}`) t0.push(`{"value":"${columns[primaryKey]}"}`) //param value for appended for where clause - paramvalue = "[" + `${t0}` + "]" } break case 'delete': { - //sql = `delete from ${payload.payload.schema}:${payload.payload.table} where ${primaryKey}=${columns[primaryKey]};` sql = `delete from ${payload.payload.schema}:${payload.payload.table} where ${primaryKey}= ?;` t0.push(`{"value":"${columns[primaryKey]}"}`) - paramvalue = "[" + `${t0}` + "]" } break default: throw new Error(`Operation ${operation} is not supported`) } - //const result = await informix.executeQuery(payload.payload.schema, sql, null) - //return result - //Preparedstatement for informix - logger.debug(`Before JSON conversion Parameter values are : ${paramvalue}`); - var finalparam = JSON.parse(paramvalue) - console.log(`Typeof finalparam : ${typeof(finalparam)}`) + t0.forEach((name, index) => t0[index] = `${name.escapeSpecialChars()}`); + logger.debug(`Param values : {t0}`); + var temp1 = "[" + `${t0}` + "]" + var finalparam = JSON.parse(temp1) + + /*console.log(`Typeof finalparam : ${typeof(finalparam)}`) if (finalparam.constructor === Array ) console.log('isarray') - else console.log('finalparam not an array') + else console.log('finalparam not an array')*/ const result = await informix.executeQuery(payload.payload.schema, sql, finalparam) - //return result.then((res)=>{logger.debug(`Preparedstmt Result status : ${res}`)}).catch((e) => {logger.debug(`Preparedstmt Result error ${e}`)}) return result } From 84d2584da46b76c1a84e953c0669aa1121e08c1a Mon Sep 17 00:00:00 2001 From: nkumar-topcoder <33625707+nkumar-topcoder@users.noreply.github.com> Date: Fri, 22 May 2020 13:36:34 +0530 Subject: [PATCH 28/76] Update default.js --- config/default.js | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/config/default.js b/config/default.js index c30ec3e..65d48a8 100644 --- a/config/default.js +++ b/config/default.js @@ -22,8 +22,8 @@ module.exports = { database: process.env.PG_DATABASE || 'postgres', // database must exist before running the tool password: process.env.PG_PASSWORD || 'password', port: parseInt(process.env.PG_PORT, 10) || 5432, - triggerFunctions: process.env.TRIGGER_FUNCTIONS || ['dev_db_notifications_2'], // List of trigger functions to listen to - triggerTopics: process.env.TRIGGER_TOPICS || ['dev.db.postgres.sync'], // Names of the topic in the trigger payload + triggerFunctions: process.env.TRIGGER_FUNCTIONS || ['test_db_notifications'], // List of trigger functions to listen to + triggerTopics: process.env.TRIGGER_TOPICS || ['test.db.postgres.sync'], // Names of the topic in the trigger payload triggerOriginators: process.env.TRIGGER_ORIGINATORS || ['tc-postgres-delta-processor'] // Names of the originator in the trigger payload }, KAFKA: { // Kafka connection options @@ -54,7 +54,7 @@ module.exports = { }, DYNAMODB: { - DYNAMODB_TABLE: process.env.DYNAMODB_TABLE || 'dev_pg_ifx_payload_sync', + DYNAMODB_TABLE: process.env.DYNAMODB_TABLE || 'test_pg_ifx_payload_sync', DD_ElapsedTime: process.env.DD_ElapsedTime || 600000 }, From 1f5b9ba13e114855631e7b6369f9ccb5862dc3e0 Mon Sep 17 00:00:00 2001 From: nkumar-topcoder <33625707+nkumar-topcoder@users.noreply.github.com> Date: Fri, 22 May 2020 13:49:14 +0530 Subject: [PATCH 29/76] [skip ci] [skip ci] --- src/services/updateInformix.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/services/updateInformix.js b/src/services/updateInformix.js index 1ee880a..1bbfed4 100644 --- a/src/services/updateInformix.js +++ b/src/services/updateInformix.js @@ -43,7 +43,7 @@ async function updateInformix (payload) { //Preparedstatement for informix t0.forEach((name, index) => t0[index] = `${name.escapeSpecialChars()}`); - logger.debug(`Param values : {t0}`); + logger.debug(`Param values : ${t0}`); var temp1 = "[" + `${t0}` + "]" var finalparam = JSON.parse(temp1) From ec29f73ae1d8b3bb5795b5cdb8263e786e22d3fb Mon Sep 17 00:00:00 2001 From: nkumar-topcoder <33625707+nkumar-topcoder@users.noreply.github.com> Date: Fri, 22 May 2020 15:29:43 +0530 Subject: [PATCH 30/76] Update default.js --- config/default.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/config/default.js b/config/default.js index 65d48a8..2bae733 100644 --- a/config/default.js +++ b/config/default.js @@ -34,7 +34,7 @@ module.exports = { }, topic: process.env.KAFKA_TOPIC || 'db.topic.sync', // Kafka topic to push and receive messages partition: process.env.partition || [0], // Kafka partitions to use - maxRetry: process.env.MAX_RETRY || 3, + maxRetry: process.env.MAX_RETRY || 10, errorTopic: process.env.ERROR_TOPIC || 'db.scorecardtable.error', recipients: ['admin@abc.com'], // Kafka partitions to use, KAFKA_URL: process.env.KAFKA_URL, From d0d9e8e92eb96ebb160fe4f7440a1a25f86662c3 Mon Sep 17 00:00:00 2001 From: Sachin Maheshwari Date: Fri, 22 May 2020 17:23:56 +0530 Subject: [PATCH 31/76] bit clean-up --- src/consumer.js | 104 +++++++++++++++------------------ src/services/updateInformix.js | 35 ++++++----- 2 files changed, 64 insertions(+), 75 deletions(-) diff --git a/src/consumer.js b/src/consumer.js index 0a148d6..5d537ed 100644 --- a/src/consumer.js +++ b/src/consumer.js @@ -22,16 +22,6 @@ const options = { }; const consumer = new Kafka.GroupConsumer(options); -/*const consumer = new Kafka.SimpleConsumer({ - connectionString: kafkaOptions.brokers_url, - ...(isSslEnabled && { // Include ssl options if present - ssl: { - cert: kafkaOptions.SSL.cert, - key: kafkaOptions.SSL.key - } - }) -})*/ - const check = function () { if (!consumer.client.initialBrokers && !consumer.client.initialBrokers.length) { return false; @@ -46,16 +36,17 @@ const check = function () { let cs_processId; const terminate = () => process.exit() + /** * * @param {Array} messageSet List of messages from kafka * @param {String} topic The name of the message topic * @param {Number} partition The kafka partition to which messages are written */ -var retryvar=""; +var retryvar = ""; //let cs_payloadseqid; async function dataHandler(messageSet, topic, partition) { - for (const m of messageSet) { // Process messages sequentially + for (const m of messageSet) { // Process messages sequentially let message try { let ifxstatus = 0 @@ -64,39 +55,39 @@ async function dataHandler(messageSet, topic, partition) { //logger.debug(`Consumer Received from kafka :${JSON.stringify(message)}`) if (message.payload.payloadseqid) cs_payloadseqid = message.payload.payloadseqid; logger.debug(`consumer : ${message.payload.payloadseqid} ${message.payload.table} ${message.payload.Uniquecolumn} ${message.payload.operation} ${message.timestamp} `); - //await updateInformix(message) - ifxstatus = await updateInformix(message) - if (ifxstatus === 0 && `${message.payload.operation}` === 'INSERT') { + //await updateInformix(message) + ifxstatus = await updateInformix(message) + if (ifxstatus === 0 && `${message.payload.operation}` === 'INSERT') { logger.debug(`Consumer :informixt status for ${message.payload.table} ${message.payload.payloadseqid} : ${ifxstatus} - Retrying`) - auditTrail([cs_payloadseqid,cs_processId,message.payload.table,message.payload.Uniquecolumn, - message.payload.operation,"push-to-kafka",retryvar,"","",JSON.stringify(message), new Date(),message.topic],'consumer') - await retrypushtokakfa(message,topic,m,partition) - } - else { + auditTrail([cs_payloadseqid, cs_processId, message.payload.table, message.payload.Uniquecolumn, + message.payload.operation, "push-to-kafka", retryvar, "", "", JSON.stringify(message), new Date(), message.topic], 'consumer') + await retrypushtokakfa(message, topic, m, partition) + } else { if (message.payload['retryCount']) retryvar = message.payload.retryCount; - auditTrail([cs_payloadseqid,cs_processId,message.payload.table,message.payload.Uniquecolumn, - message.payload.operation,"Informix-updated",retryvar,"","",JSON.stringify(message), new Date(),message.topic],'consumer') - logger.debug(`Consumer :informix status for ${message.payload.table} ${message.payload.payloadseqid} : ${ifxstatus}`) - await consumer.commitOffset({ topic, partition, offset: m.offset }) // Commit offset only on success - } }catch (err) { + auditTrail([cs_payloadseqid, cs_processId, message.payload.table, message.payload.Uniquecolumn, + message.payload.operation, "Informix-updated", retryvar, "", "", JSON.stringify(message), new Date(), message.topic], 'consumer') + logger.debug(`Consumer :informix status for ${message.payload.table} ${message.payload.payloadseqid} : ${ifxstatus}`) + } + } catch (err) { const errmsg2 = `error-sync: Could not process kafka message or informix DB error: "${err.message}"` logger.error(errmsg2) logger.debug(`error-sync: consumer "${err.message}"`) - await retrypushtokakfa(message,topic,m,partition) + await retrypushtokakfa(message, topic, m, partition) + } finally { + await consumer.commitOffset({ topic, partition, offset: m.offset }) // Commit offset only on success } } } -async function retrypushtokakfa(message,topic,m,partition) -{ -let cs_payloadseqid -logger.debug(`Consumer : At retry function`) - if (!cs_payloadseqid){ - cs_payloadseqid= 'err-'+(new Date()).getTime().toString(36) + Math.random().toString(36).slice(2);} +async function retrypushtokakfa(message, topic, m, partition) { + let cs_payloadseqid + logger.debug(`Consumer : At retry function`) + if (!cs_payloadseqid) { + cs_payloadseqid = 'err-' + (new Date()).getTime().toString(36) + Math.random().toString(36).slice(2); + } try { if (message.payload['retryCount']) retryvar = message.payload.retryCount; - await consumer.commitOffset({ topic, partition, offset: m.offset }) // Commit success as will re-publish logger.debug(`Trying to push same message after adding retryCounter`) if (!message.payload.retryCount) { message.payload.retryCount = 0 @@ -120,34 +111,36 @@ logger.debug(`Consumer : At retry function`) } catch (err) { await auditTrail([cs_payloadseqid, cs_processId, message.payload.table, message.payload.Uniquecolumn, - message.payload.operation, "Error-republishing", message.payload['retryCount'], err.message, "", message.payload.data, new Date(), message.topic], 'consumer') + message.payload.operation, "Error-republishing", message.payload['retryCount'], err.message, "", message.payload.data, new Date(), message.topic], 'consumer') const errmsg1 = `error-sync: postgres-ifx-processor: consumer : Error-republishing: "${err.message}"` logger.error(errmsg1) logger.debug(`error-sync: consumer re-publishing "${err.message}"`) - // await callposttoslack(errmsg1) + // await callposttoslack(errmsg1) + } finally { + await consumer.commitOffset({ topic, partition, offset: m.offset }) // Commit success as will re-publish } } async function callposttoslack(slackmessage) { -if(config.SLACK.SLACKNOTIFY === 'true') { + if (config.SLACK.SLACKNOTIFY === 'true') { return new Promise(function (resolve, reject) { - postMessage(slackmessage, (response) => { - console.log(`respnse : ${response}`) - if (response.statusCode < 400) { - logger.debug('Message posted successfully'); - //callback(null); - } else if (response.statusCode < 500) { - const errmsg1 =`Slack Error: posting message to Slack API: ${response.statusCode} - ${response.statusMessage}` - logger.debug(`error-sync: ${errmsg1}`) - } - else { - logger.debug(`Server error when processing message: ${response.statusCode} - ${response.statusMessage}`); - //callback(`Server error when processing message: ${response.statusCode} - ${response.statusMessage}`); - } - resolve("done") - }); + postMessage(slackmessage, (response) => { + console.log(`respnse : ${response}`) + if (response.statusCode < 400) { + logger.debug('Message posted successfully'); + //callback(null); + } else if (response.statusCode < 500) { + const errmsg1 = `Slack Error: posting message to Slack API: ${response.statusCode} - ${response.statusMessage}` + logger.debug(`error-sync: ${errmsg1}`) + } + else { + logger.debug(`Server error when processing message: ${response.statusCode} - ${response.statusMessage}`); + //callback(`Server error when processing message: ${response.statusCode} - ${response.statusMessage}`); + } + resolve("done") + }); }) //end -} + } } @@ -156,20 +149,17 @@ if(config.SLACK.SLACKNOTIFY === 'true') { */ async function setupKafkaConsumer() { try { - const strategies = [{ + const strategies = [{ subscriptions: [kafkaOptions.topic], handler: dataHandler }]; await consumer.init(strategies) - //await consumer.subscribe(kafkaOptions.topic, kafkaOptions.partition, { time: Kafka.LATEST_OFFSET }, dataHandler) - //await consumer.subscribe(kafkaOptions.topic, dataHandler) - logger.info('Initialized kafka consumer') healthcheck.init([check]) } catch (err) { logger.error('Could not setup kafka consumer') logger.logFullError(err) - logger.debug(`error-sync: consumer kafka-setup "${err.message}"`) + logger.debug(`error-sync: consumer kafka-setup "${err.message}"`) terminate() } } diff --git a/src/services/updateInformix.js b/src/services/updateInformix.js index 1bbfed4..dd08aaf 100644 --- a/src/services/updateInformix.js +++ b/src/services/updateInformix.js @@ -2,19 +2,18 @@ const informix = require('../common/informixWrapper') const logger = require('../common/logger') -String.prototype.escapeSpecialChars = function() { - return this.replace(/\n/g, "\\n"); +String.prototype.escapeSpecialChars = function () { + return this.replace(/\n/g, "\\n"); }; -async function updateInformix (payload) { +async function updateInformix(payload) { logger.debug(`Informix Received from consumer-kafka :${JSON.stringify(payload)}`) const operation = payload.payload.operation.toLowerCase() - console.log("=====Informix DML Operation :==========",operation) - let sql = null - let t0 = [] - let paramvalue = null - const columns = payload.payload.data - const primaryKey = payload.payload.Uniquecolumn + console.log("=====Informix DML Operation :==========", operation) + let sql = null + let t0 = [] + const columns = payload.payload.data + const primaryKey = payload.payload.Uniquecolumn // Build SQL query switch (operation) { case 'insert': @@ -26,15 +25,15 @@ async function updateInformix (payload) { break case 'update': { - sql = `update ${payload.payload.schema}:${payload.payload.table} set ${Object.keys(columns).map((key) => `${key}= ?`).join(', ')} where ${primaryKey}= ?;` - t0 = Object.keys(columns).map((key) => `{"value":"${columns[key]}"}`) - t0.push(`{"value":"${columns[primaryKey]}"}`) //param value for appended for where clause + sql = `update ${payload.payload.schema}:${payload.payload.table} set ${Object.keys(columns).map((key) => `${key}= ?`).join(', ')} where ${primaryKey}= ?;` + t0 = Object.keys(columns).map((key) => `{"value":"${columns[key]}"}`) + t0.push(`{"value":"${columns[primaryKey]}"}`) //param value for appended for where clause } break case 'delete': { - sql = `delete from ${payload.payload.schema}:${payload.payload.table} where ${primaryKey}= ?;` - t0.push(`{"value":"${columns[primaryKey]}"}`) + sql = `delete from ${payload.payload.schema}:${payload.payload.table} where ${primaryKey}= ?;` + t0.push(`{"value":"${columns[primaryKey]}"}`) } break default: @@ -44,13 +43,13 @@ async function updateInformix (payload) { //Preparedstatement for informix t0.forEach((name, index) => t0[index] = `${name.escapeSpecialChars()}`); logger.debug(`Param values : ${t0}`); - var temp1 = "[" + `${t0}` + "]" - var finalparam = JSON.parse(temp1) - + let temp1 = "[" + `${t0}` + "]" + let finalparam = JSON.parse(temp1) + /*console.log(`Typeof finalparam : ${typeof(finalparam)}`) if (finalparam.constructor === Array ) console.log('isarray') else console.log('finalparam not an array')*/ - + logger.debug(`Operation ${operation} : Final sql and param values are - ${sql} ${finalparam}`); const result = await informix.executeQuery(payload.payload.schema, sql, finalparam) return result } From 67bcea701a62555d9688c1a5174145bf0ebd1432 Mon Sep 17 00:00:00 2001 From: nkumar-topcoder <33625707+nkumar-topcoder@users.noreply.github.com> Date: Sun, 24 May 2020 07:54:22 +0530 Subject: [PATCH 32/76] [skip ci] [skip ci] --- src/consumer.js | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/src/consumer.js b/src/consumer.js index 5d537ed..4750854 100644 --- a/src/consumer.js +++ b/src/consumer.js @@ -58,10 +58,10 @@ async function dataHandler(messageSet, topic, partition) { //await updateInformix(message) ifxstatus = await updateInformix(message) if (ifxstatus === 0 && `${message.payload.operation}` === 'INSERT') { + logger.debug(`operation : ${message.payload.operation}`) logger.debug(`Consumer :informixt status for ${message.payload.table} ${message.payload.payloadseqid} : ${ifxstatus} - Retrying`) - - auditTrail([cs_payloadseqid, cs_processId, message.payload.table, message.payload.Uniquecolumn, - message.payload.operation, "push-to-kafka", retryvar, "", "", JSON.stringify(message), new Date(), message.topic], 'consumer') + // auditTrail([cs_payloadseqid, cs_processId, message.payload.table, message.payload.Uniquecolumn, + // message.payload.operation, "push-to-kafka", retryvar, "", "", JSON.stringify(message), new Date(), message.topic], 'consumer') await retrypushtokakfa(message, topic, m, partition) } else { if (message.payload['retryCount']) retryvar = message.payload.retryCount; @@ -82,6 +82,7 @@ async function dataHandler(messageSet, topic, partition) { async function retrypushtokakfa(message, topic, m, partition) { let cs_payloadseqid + if (message.payload.payloadseqid) cs_payloadseqid = message.payload.payloadseqid; logger.debug(`Consumer : At retry function`) if (!cs_payloadseqid) { cs_payloadseqid = 'err-' + (new Date()).getTime().toString(36) + Math.random().toString(36).slice(2); From f417122ce28155b0cbb2a879251cb2c31cc3cbd2 Mon Sep 17 00:00:00 2001 From: nkumar-topcoder <33625707+nkumar-topcoder@users.noreply.github.com> Date: Sun, 24 May 2020 07:59:03 +0530 Subject: [PATCH 33/76] Update updateInformix.js --- src/services/updateInformix.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/services/updateInformix.js b/src/services/updateInformix.js index dd08aaf..385156f 100644 --- a/src/services/updateInformix.js +++ b/src/services/updateInformix.js @@ -42,14 +42,14 @@ async function updateInformix(payload) { //Preparedstatement for informix t0.forEach((name, index) => t0[index] = `${name.escapeSpecialChars()}`); - logger.debug(`Param values : ${t0}`); + //logger.debug(`Param values : ${t0}`); let temp1 = "[" + `${t0}` + "]" let finalparam = JSON.parse(temp1) /*console.log(`Typeof finalparam : ${typeof(finalparam)}`) if (finalparam.constructor === Array ) console.log('isarray') else console.log('finalparam not an array')*/ - logger.debug(`Operation ${operation} : Final sql and param values are - ${sql} ${finalparam}`); + logger.debug(`Final sql and param values are -- ${sql} ${JSON.stringify(finalparam)}`); const result = await informix.executeQuery(payload.payload.schema, sql, finalparam) return result } From ae345bbf412ad46fd7dc048aa555074013644d38 Mon Sep 17 00:00:00 2001 From: nkumar-topcoder <33625707+nkumar-topcoder@users.noreply.github.com> Date: Wed, 27 May 2020 17:43:19 +0530 Subject: [PATCH 34/76] [skip ci] [skip ci] --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 2f6f563..179b315 100644 --- a/package.json +++ b/package.json @@ -17,7 +17,7 @@ "dependencies": { "aws-sdk": "*", "config": "^3.2.2", - "informix-wrapper": "git+https://github.com/appirio-tech/informix-wrapper.git#fix-issue", + "informix-wrapper": "git+https://github.com/appirio-tech/informix-wrapper.git#prepare_stmt_fix", "no-kafka": "^3.4.3", "pg": "^7.12.1", "sleep": "^6.1.0", From 6df7af9d7458b9ca35c94523b7dcf80b2b7a56f0 Mon Sep 17 00:00:00 2001 From: nkumar-topcoder <33625707+nkumar-topcoder@users.noreply.github.com> Date: Wed, 27 May 2020 17:48:54 +0530 Subject: [PATCH 35/76] [skip ci] [skip ci] --- src/consumer.js | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/src/consumer.js b/src/consumer.js index 4750854..a624de4 100644 --- a/src/consumer.js +++ b/src/consumer.js @@ -57,18 +57,18 @@ async function dataHandler(messageSet, topic, partition) { logger.debug(`consumer : ${message.payload.payloadseqid} ${message.payload.table} ${message.payload.Uniquecolumn} ${message.payload.operation} ${message.timestamp} `); //await updateInformix(message) ifxstatus = await updateInformix(message) - if (ifxstatus === 0 && `${message.payload.operation}` === 'INSERT') { - logger.debug(`operation : ${message.payload.operation}`) - logger.debug(`Consumer :informixt status for ${message.payload.table} ${message.payload.payloadseqid} : ${ifxstatus} - Retrying`) + // if (ifxstatus === 0 && `${message.payload.operation}` === 'INSERT') { + // logger.debug(`operation : ${message.payload.operation}`) + // logger.debug(`Consumer :informixt status for ${message.payload.table} ${message.payload.payloadseqid} : ${ifxstatus} - Retrying`) // auditTrail([cs_payloadseqid, cs_processId, message.payload.table, message.payload.Uniquecolumn, // message.payload.operation, "push-to-kafka", retryvar, "", "", JSON.stringify(message), new Date(), message.topic], 'consumer') - await retrypushtokakfa(message, topic, m, partition) - } else { + // await retrypushtokakfa(message, topic, m, partition) + //} else { + logger.debug(`Consumer :informix status for ${message.payload.table} ${message.payload.payloadseqid} : ${ifxstatus}`) if (message.payload['retryCount']) retryvar = message.payload.retryCount; auditTrail([cs_payloadseqid, cs_processId, message.payload.table, message.payload.Uniquecolumn, - message.payload.operation, "Informix-updated", retryvar, "", "", JSON.stringify(message), new Date(), message.topic], 'consumer') - logger.debug(`Consumer :informix status for ${message.payload.table} ${message.payload.payloadseqid} : ${ifxstatus}`) - } + message.payload.operation, "Informix-updated", retryvar, "", "", JSON.stringify(message), new Date(), message.topic], 'consumer') + //} } catch (err) { const errmsg2 = `error-sync: Could not process kafka message or informix DB error: "${err.message}"` logger.error(errmsg2) From 7261158ea9fbcf1c5336f7ac751c6ab0a72d8740 Mon Sep 17 00:00:00 2001 From: nkumar-topcoder <33625707+nkumar-topcoder@users.noreply.github.com> Date: Wed, 27 May 2020 17:50:44 +0530 Subject: [PATCH 36/76] Update default.js --- config/default.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/config/default.js b/config/default.js index 2bae733..859d680 100644 --- a/config/default.js +++ b/config/default.js @@ -38,7 +38,7 @@ module.exports = { errorTopic: process.env.ERROR_TOPIC || 'db.scorecardtable.error', recipients: ['admin@abc.com'], // Kafka partitions to use, KAFKA_URL: process.env.KAFKA_URL, - KAFKA_GROUP_ID: process.env.KAFKA_GROUP_ID || 'postgres-ifx-consumer', + KAFKA_GROUP_ID: process.env.KAFKA_GROUP_ID || 'test-postgres-ifx-consumer', KAFKA_CLIENT_CERT: process.env.KAFKA_CLIENT_CERT ? process.env.KAFKA_CLIENT_CERT.replace('\\n', '\n') : null, KAFKA_CLIENT_CERT_KEY: process.env.KAFKA_CLIENT_CERT_KEY ? process.env.KAFKA_CLIENT_CERT_KEY.replace('\\n', '\n') : null, }, From 3ba01973633e70d779a880919b7ab52764400871 Mon Sep 17 00:00:00 2001 From: nkumar-topcoder <33625707+nkumar-topcoder@users.noreply.github.com> Date: Wed, 27 May 2020 18:41:51 +0530 Subject: [PATCH 37/76] Update consumer.js --- src/consumer.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/consumer.js b/src/consumer.js index a624de4..a68aece 100644 --- a/src/consumer.js +++ b/src/consumer.js @@ -66,7 +66,7 @@ async function dataHandler(messageSet, topic, partition) { //} else { logger.debug(`Consumer :informix status for ${message.payload.table} ${message.payload.payloadseqid} : ${ifxstatus}`) if (message.payload['retryCount']) retryvar = message.payload.retryCount; - auditTrail([cs_payloadseqid, cs_processId, message.payload.table, message.payload.Uniquecolumn, + await auditTrail([cs_payloadseqid, cs_processId, message.payload.table, message.payload.Uniquecolumn, message.payload.operation, "Informix-updated", retryvar, "", "", JSON.stringify(message), new Date(), message.topic], 'consumer') //} } catch (err) { From 8ca94701b698de9acecb2a0af10b4c0cebbdcbb3 Mon Sep 17 00:00:00 2001 From: nkumar-topcoder <33625707+nkumar-topcoder@users.noreply.github.com> Date: Thu, 28 May 2020 15:23:29 +0530 Subject: [PATCH 38/76] [skip ci] [skip ci] --- .circleci/config.yml | 26 +++++++++++++------------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 6ebcc0c..115758a 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -44,21 +44,21 @@ build_steps: &build_steps ./awsconfiguration.sh ${DEPLOY_ENV} source awsenvconf #scorecard test consumer remove later - #./buildenv.sh -e ${DEPLOY_ENV} -b ${LOGICAL_ENV}-${APP_NAME}-consumer_scorecard-deployvar - #source buildenvvar - #./master_deploy.sh -d ECS -e ${DEPLOY_ENV} -t latest -s ${GLOBAL_ENV}-global-appvar,${LOGICAL_ENV}-${APP_NAME}-appvar -i postgres-ifx-processer + ./buildenv.sh -e ${DEPLOY_ENV} -b ${LOGICAL_ENV}-${APP_NAME}-consumer_scorecard-deployvar + source buildenvvar + ./master_deploy.sh -d ECS -e ${DEPLOY_ENV} -t latest -s ${GLOBAL_ENV}-global-appvar,${LOGICAL_ENV}-${APP_NAME}-appvar -i postgres-ifx-processer #scorecard test producer remove later - #echo "Running Masterscript - deploy postgres-ifx-processer producer" - #if [ -e ${LOGICAL_ENV}-${APP_NAME}-consumer-deployvar.json ]; then sudo rm -vf ${LOGICAL_ENV}-${APP_NAME}-consumer-deployvar.json; fi - #./buildenv.sh -e ${DEPLOY_ENV} -b ${LOGICAL_ENV}-${APP_NAME}-producer_scorecard-deployvar - #source buildenvvar - #./master_deploy.sh -d ECS -e ${DEPLOY_ENV} -t latest -s ${GLOBAL_ENV}-global-appvar,${LOGICAL_ENV}-${APP_NAME}-appvar -i postgres-ifx-processer - - - ./buildenv.sh -e ${DEPLOY_ENV} -b ${LOGICAL_ENV}-${APP_NAME}-consumer-deployvar + echo "Running Masterscript - deploy postgres-ifx-processer producer" + if [ -e ${LOGICAL_ENV}-${APP_NAME}-consumer-deployvar.json ]; then sudo rm -vf ${LOGICAL_ENV}-${APP_NAME}-consumer-deployvar.json; fi + ./buildenv.sh -e ${DEPLOY_ENV} -b ${LOGICAL_ENV}-${APP_NAME}-producer_scorecard-deployvar source buildenvvar ./master_deploy.sh -d ECS -e ${DEPLOY_ENV} -t latest -s ${GLOBAL_ENV}-global-appvar,${LOGICAL_ENV}-${APP_NAME}-appvar -i postgres-ifx-processer + + + # ./buildenv.sh -e ${DEPLOY_ENV} -b ${LOGICAL_ENV}-${APP_NAME}-consumer-deployvar + #source buildenvvar + #./master_deploy.sh -d ECS -e ${DEPLOY_ENV} -t latest -s ${GLOBAL_ENV}-global-appvar,${LOGICAL_ENV}-${APP_NAME}-appvar -i postgres-ifx-processer #echo "Running Masterscript - deploy postgres-ifx-processer producer" #if [ -e ${LOGICAL_ENV}-${APP_NAME}-consumer-deployvar.json ]; then sudo rm -vf ${LOGICAL_ENV}-${APP_NAME}-consumer-deployvar.json; fi @@ -117,12 +117,12 @@ workflows: branches: only: - dev + - dev-test-pg - "build-test": context : org-global filters: branches: - only: - - dev-test-pg + only: - dev-test-pg-rf - "build-prod": context : org-global From c81266a8f0a0f8d880e83868001252f801817900 Mon Sep 17 00:00:00 2001 From: nkumar-topcoder <33625707+nkumar-topcoder@users.noreply.github.com> Date: Thu, 28 May 2020 15:35:31 +0530 Subject: [PATCH 39/76] Update default.js --- config/default.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/config/default.js b/config/default.js index 859d680..6f9bc83 100644 --- a/config/default.js +++ b/config/default.js @@ -22,8 +22,8 @@ module.exports = { database: process.env.PG_DATABASE || 'postgres', // database must exist before running the tool password: process.env.PG_PASSWORD || 'password', port: parseInt(process.env.PG_PORT, 10) || 5432, - triggerFunctions: process.env.TRIGGER_FUNCTIONS || ['test_db_notifications'], // List of trigger functions to listen to - triggerTopics: process.env.TRIGGER_TOPICS || ['test.db.postgres.sync'], // Names of the topic in the trigger payload + triggerFunctions: process.env.TRIGGER_FUNCTIONS || ['dev_db_notifications_2'], // List of trigger functions to listen to + triggerTopics: process.env.TRIGGER_TOPICS || ['dev.db.postgres.sync'], // Names of the topic in the trigger payload triggerOriginators: process.env.TRIGGER_ORIGINATORS || ['tc-postgres-delta-processor'] // Names of the originator in the trigger payload }, KAFKA: { // Kafka connection options From 495e3c41e69c5c0c36f6e55e2fdf835c1f9b247c Mon Sep 17 00:00:00 2001 From: nkumar-topcoder <33625707+nkumar-topcoder@users.noreply.github.com> Date: Mon, 1 Jun 2020 19:32:57 +0530 Subject: [PATCH 40/76] [skip ci] --- .circleci/config.yml | 38 +++++++++++++++++++------------------- 1 file changed, 19 insertions(+), 19 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 115758a..8bf5a7b 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -44,33 +44,33 @@ build_steps: &build_steps ./awsconfiguration.sh ${DEPLOY_ENV} source awsenvconf #scorecard test consumer remove later - ./buildenv.sh -e ${DEPLOY_ENV} -b ${LOGICAL_ENV}-${APP_NAME}-consumer_scorecard-deployvar - source buildenvvar - ./master_deploy.sh -d ECS -e ${DEPLOY_ENV} -t latest -s ${GLOBAL_ENV}-global-appvar,${LOGICAL_ENV}-${APP_NAME}-appvar -i postgres-ifx-processer - - #scorecard test producer remove later - echo "Running Masterscript - deploy postgres-ifx-processer producer" - if [ -e ${LOGICAL_ENV}-${APP_NAME}-consumer-deployvar.json ]; then sudo rm -vf ${LOGICAL_ENV}-${APP_NAME}-consumer-deployvar.json; fi - ./buildenv.sh -e ${DEPLOY_ENV} -b ${LOGICAL_ENV}-${APP_NAME}-producer_scorecard-deployvar - source buildenvvar - ./master_deploy.sh -d ECS -e ${DEPLOY_ENV} -t latest -s ${GLOBAL_ENV}-global-appvar,${LOGICAL_ENV}-${APP_NAME}-appvar -i postgres-ifx-processer - - - # ./buildenv.sh -e ${DEPLOY_ENV} -b ${LOGICAL_ENV}-${APP_NAME}-consumer-deployvar + #./buildenv.sh -e ${DEPLOY_ENV} -b ${LOGICAL_ENV}-${APP_NAME}-consumer_scorecard-deployvar #source buildenvvar #./master_deploy.sh -d ECS -e ${DEPLOY_ENV} -t latest -s ${GLOBAL_ENV}-global-appvar,${LOGICAL_ENV}-${APP_NAME}-appvar -i postgres-ifx-processer + #scorecard test producer remove later #echo "Running Masterscript - deploy postgres-ifx-processer producer" #if [ -e ${LOGICAL_ENV}-${APP_NAME}-consumer-deployvar.json ]; then sudo rm -vf ${LOGICAL_ENV}-${APP_NAME}-consumer-deployvar.json; fi - #./buildenv.sh -e ${DEPLOY_ENV} -b ${LOGICAL_ENV}-${APP_NAME}-producer-deployvar + #./buildenv.sh -e ${DEPLOY_ENV} -b ${LOGICAL_ENV}-${APP_NAME}-producer_scorecard-deployvar #source buildenvvar #./master_deploy.sh -d ECS -e ${DEPLOY_ENV} -t latest -s ${GLOBAL_ENV}-global-appvar,${LOGICAL_ENV}-${APP_NAME}-appvar -i postgres-ifx-processer + + + ./buildenv.sh -e ${DEPLOY_ENV} -b ${LOGICAL_ENV}-${APP_NAME}-consumer-deployvar + source buildenvvar + ./master_deploy.sh -d ECS -e ${DEPLOY_ENV} -t latest -s ${GLOBAL_ENV}-global-appvar,${LOGICAL_ENV}-${APP_NAME}-appvar -i postgres-ifx-processer + + echo "Running Masterscript - deploy postgres-ifx-processer producer" + if [ -e ${LOGICAL_ENV}-${APP_NAME}-consumer-deployvar.json ]; then sudo rm -vf ${LOGICAL_ENV}-${APP_NAME}-consumer-deployvar.json; fi + ./buildenv.sh -e ${DEPLOY_ENV} -b ${LOGICAL_ENV}-${APP_NAME}-producer-deployvar + source buildenvvar + ./master_deploy.sh -d ECS -e ${DEPLOY_ENV} -t latest -s ${GLOBAL_ENV}-global-appvar,${LOGICAL_ENV}-${APP_NAME}-appvar -i postgres-ifx-processer - #echo "Running Masterscript - deploy postgres-ifx-processer producer_dd" - #if [ -e ${LOGICAL_ENV}-${APP_NAME}-producer-deployvar.json ]; then sudo rm -vf ${LOGICAL_ENV}-${APP_NAME}-producer-deployvar.json; fi - #./buildenv.sh -e ${DEPLOY_ENV} -b ${LOGICAL_ENV}-${APP_NAME}-producer_dd-deployvar - #source buildenvvar - #./master_deploy.sh -d ECS -e ${DEPLOY_ENV} -t latest -s ${GLOBAL_ENV}-global-appvar,${LOGICAL_ENV}-${APP_NAME}-appvar -i postgres-ifx-processer + echo "Running Masterscript - deploy postgres-ifx-processer producer_dd" + if [ -e ${LOGICAL_ENV}-${APP_NAME}-producer-deployvar.json ]; then sudo rm -vf ${LOGICAL_ENV}-${APP_NAME}-producer-deployvar.json; fi + ./buildenv.sh -e ${DEPLOY_ENV} -b ${LOGICAL_ENV}-${APP_NAME}-producer_dd-deployvar + source buildenvvar + ./master_deploy.sh -d ECS -e ${DEPLOY_ENV} -t latest -s ${GLOBAL_ENV}-global-appvar,${LOGICAL_ENV}-${APP_NAME}-appvar -i postgres-ifx-processer #echo "Running Masterscript - deploy postgres-ifx-processer reconsiler1" #if [ -e ${LOGICAL_ENV}-${APP_NAME}-reconsiler1-deployvar.json ]; then sudo rm -vf ${LOGICAL_ENV}-${APP_NAME}-reconsiler1-deployvar.json; fi From 131bbda7e563b1256927e36de5cd2be9709839d5 Mon Sep 17 00:00:00 2001 From: nkumar-topcoder <33625707+nkumar-topcoder@users.noreply.github.com> Date: Mon, 1 Jun 2020 19:36:52 +0530 Subject: [PATCH 41/76] [skip ci] --- src/producer.js | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/src/producer.js b/src/producer.js index 75e11a5..b5f9790 100644 --- a/src/producer.js +++ b/src/producer.js @@ -20,18 +20,19 @@ async function setupPgClient() { var payloadcopy try { await pgClient.connect() - for (const triggerFunction of pgOptions.triggerFunctions) { + //for (const triggerFunction of pgOptions.triggerFunctions) { + for (const triggerFunction of pgOptions.triggerFunctions.split(',')) { await pgClient.query(`LISTEN ${triggerFunction}`) } pgClient.on('notification', async (message) => { try { payloadcopy = "" - logger.debug('Entering producer 1') - logger.debug(message.toString()) - logger.debug('Entering producer 2') + //logger.debug('Entering producer 1') + // logger.debug(message.toString()) + logger.debug('Entering producer 2') logger.debug(message) - logger.debug('Entering producer 3') - logger.debug(JSON.stringify(message.payload)) + //logger.debug('Entering producer 3') + //logger.debug(JSON.stringify(message.payload)) const payload = JSON.parse(message.payload) From cd444e4e7b257f08952e171b2d9424a81a59794b Mon Sep 17 00:00:00 2001 From: nkumar-topcoder <33625707+nkumar-topcoder@users.noreply.github.com> Date: Mon, 1 Jun 2020 19:50:29 +0530 Subject: [PATCH 42/76] [skip ci] --- config/default.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/config/default.js b/config/default.js index 6f9bc83..859d680 100644 --- a/config/default.js +++ b/config/default.js @@ -22,8 +22,8 @@ module.exports = { database: process.env.PG_DATABASE || 'postgres', // database must exist before running the tool password: process.env.PG_PASSWORD || 'password', port: parseInt(process.env.PG_PORT, 10) || 5432, - triggerFunctions: process.env.TRIGGER_FUNCTIONS || ['dev_db_notifications_2'], // List of trigger functions to listen to - triggerTopics: process.env.TRIGGER_TOPICS || ['dev.db.postgres.sync'], // Names of the topic in the trigger payload + triggerFunctions: process.env.TRIGGER_FUNCTIONS || ['test_db_notifications'], // List of trigger functions to listen to + triggerTopics: process.env.TRIGGER_TOPICS || ['test.db.postgres.sync'], // Names of the topic in the trigger payload triggerOriginators: process.env.TRIGGER_ORIGINATORS || ['tc-postgres-delta-processor'] // Names of the originator in the trigger payload }, KAFKA: { // Kafka connection options From 07cbe0026fbbab8e848dc77a45dd0d330a2c21b5 Mon Sep 17 00:00:00 2001 From: nkumar-topcoder <33625707+nkumar-topcoder@users.noreply.github.com> Date: Mon, 1 Jun 2020 19:51:57 +0530 Subject: [PATCH 43/76] Update config.yml --- .circleci/config.yml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 8bf5a7b..4c51212 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -56,9 +56,9 @@ build_steps: &build_steps #./master_deploy.sh -d ECS -e ${DEPLOY_ENV} -t latest -s ${GLOBAL_ENV}-global-appvar,${LOGICAL_ENV}-${APP_NAME}-appvar -i postgres-ifx-processer - ./buildenv.sh -e ${DEPLOY_ENV} -b ${LOGICAL_ENV}-${APP_NAME}-consumer-deployvar - source buildenvvar - ./master_deploy.sh -d ECS -e ${DEPLOY_ENV} -t latest -s ${GLOBAL_ENV}-global-appvar,${LOGICAL_ENV}-${APP_NAME}-appvar -i postgres-ifx-processer + # ./buildenv.sh -e ${DEPLOY_ENV} -b ${LOGICAL_ENV}-${APP_NAME}-consumer-deployvar + #source buildenvvar + #./master_deploy.sh -d ECS -e ${DEPLOY_ENV} -t latest -s ${GLOBAL_ENV}-global-appvar,${LOGICAL_ENV}-${APP_NAME}-appvar -i postgres-ifx-processer echo "Running Masterscript - deploy postgres-ifx-processer producer" if [ -e ${LOGICAL_ENV}-${APP_NAME}-consumer-deployvar.json ]; then sudo rm -vf ${LOGICAL_ENV}-${APP_NAME}-consumer-deployvar.json; fi @@ -117,12 +117,12 @@ workflows: branches: only: - dev - - dev-test-pg - "build-test": context : org-global filters: branches: - only: + only: + - dev-test-pg - dev-test-pg-rf - "build-prod": context : org-global From 4e94214833741da990571c0ef1a5ee214285e461 Mon Sep 17 00:00:00 2001 From: nkumar-topcoder <33625707+nkumar-topcoder@users.noreply.github.com> Date: Thu, 4 Jun 2020 18:34:07 +0530 Subject: [PATCH 44/76] [skip ci] --- src/services/auditTrail.js | 46 +++++++++++++++++++++++++------------- 1 file changed, 31 insertions(+), 15 deletions(-) diff --git a/src/services/auditTrail.js b/src/services/auditTrail.js index 7e0d645..ed78523 100644 --- a/src/services/auditTrail.js +++ b/src/services/auditTrail.js @@ -1,12 +1,12 @@ const config = require('config') -const pg = require('pg') +const pgpool = require('./db.js'); const logger = require('../common/logger') - -const pgOptions = config.get('POSTGRES') -const pgConnectionString = `postgresql://${pgOptions.user}:${pgOptions.password}@${pgOptions.host}:${pgOptions.port}/${pgOptions.database}` -let pgClient2 +//const pg = require('pg') +//const pgOptions = config.get('POSTGRES') +//const pgConnectionString = `postgresql://${pgOptions.user}:${pgOptions.password}@${pgOptions.host}:${pgOptions.port}/${pgOptions.database}` +//let pgClient2 //console.log(`"${pgConnectionString}"`); -async function setupPgClient2 () { +/*async function setupPgClient2 () { pgClient2 = new pg.Client(pgConnectionString) try { await pgClient2.connect() @@ -17,12 +17,12 @@ async function setupPgClient2 () { logger.logFullError(err) process.exit() } -} +}*/ async function auditTrail (data,sourcetype) { -if (!pgClient2) { +/*if (!pgClient2) { await setupPgClient2() -} +}*/ if (sourcetype === 'producer'){ sql0 = 'INSERT INTO common_oltp.pgifx_sync_audit(payloadseqid,processId,tablename,uniquecolumn,dboperation,syncstatus,retrycount,consumer_err,producer_err,payload,auditdatetime,topicname) VALUES ($1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12)' sql1= ' on conflict (payloadseqid) DO UPDATE SET (syncstatus,producer_err) = ($6,$9) where pgifx_sync_audit.payloadseqid = $1'; @@ -31,13 +31,9 @@ if (sourcetype === 'producer'){ } else { sql0 = 'INSERT INTO common_oltp.pgifx_sync_audit(payloadseqid,processId,tablename,uniquecolumn,dboperation,syncstatus,retrycount,consumer_err,producer_err,payload,auditdatetime,topicname) VALUES ($1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12)' sql1= ' on conflict (payloadseqid) DO UPDATE SET (syncstatus,consumer_err,retrycount) = ($6,$8,$7)'; - // where pgifx_sync_audit.payloadseqid = $1'; - //and pgifx_sync_audit.processId = $2'; sql = sql0 + sql1 - logger.debug(`--${1} ${3} 1 Audit Trail update consumer--`) - //logger.debug(`sql values "${sql}"`); } - return pgClient2.query(sql, data, (err, res) => { + /*return pgClient2.query(sql, data, (err, res) => { if (err) { logger.debug(`-- Audit Trail update error-- ${err.stack}`) //pgClient2.end() @@ -45,7 +41,27 @@ if (sourcetype === 'producer'){ // logger.debug(`--Audit Trail update success-- `) } }) -pgClient2.end() +pgClient2.end() */ + +pgpool.on('error', (err, client) => { + logger.debug(`Unexpected error on idle client : ${err}`) + process.exit(-1) + }) + +await pgpool.connect(async (err, client, release) => { + if (err) { + return logger.debug(`Error acquiring client : ${err.stack}`) + } + await client.query(sql, data, (err, res) => { + release() + if (err) { + return logger.debug(`Error executing Query : ${err.stack}`) + } + logger.debug(`Audit Trail update : ${res.rowCount}`) + }) + }) + + } From c37fe28a4fc429b71e5ca991428835d6f2445067 Mon Sep 17 00:00:00 2001 From: nkumar-topcoder <33625707+nkumar-topcoder@users.noreply.github.com> Date: Thu, 4 Jun 2020 18:34:45 +0530 Subject: [PATCH 45/76] [skip ci] --- src/services/db.js | 5 +++++ 1 file changed, 5 insertions(+) create mode 100644 src/services/db.js diff --git a/src/services/db.js b/src/services/db.js new file mode 100644 index 0000000..40f7dec --- /dev/null +++ b/src/services/db.js @@ -0,0 +1,5 @@ +const config = require('config') +const pg = require('pg') + const pgOptions = config.get('POSTGRES') + var pool = new pg.Pool(pgOptions); +module.exports = pool; From 61e990d6406ad6db61dfefc433a385b5abbfa6da Mon Sep 17 00:00:00 2001 From: nkumar-topcoder <33625707+nkumar-topcoder@users.noreply.github.com> Date: Thu, 4 Jun 2020 18:38:31 +0530 Subject: [PATCH 46/76] pg pool verify --- src/producer.js | 10 +++------- 1 file changed, 3 insertions(+), 7 deletions(-) diff --git a/src/producer.js b/src/producer.js index b5f9790..e931264 100644 --- a/src/producer.js +++ b/src/producer.js @@ -27,13 +27,8 @@ try { pgClient.on('notification', async (message) => { try { payloadcopy = "" - //logger.debug('Entering producer 1') - // logger.debug(message.toString()) logger.debug('Entering producer 2') logger.debug(message) - //logger.debug('Entering producer 3') - //logger.debug(JSON.stringify(message.payload)) - const payload = JSON.parse(message.payload) payloadcopy = message @@ -43,11 +38,12 @@ try { //logger.info('trying to push on kafka topic') await pushToKafka(payload) logger.info('Push to kafka and added for audit trail') + audit(message) } else { logger.info('Push to dynamodb for reconciliation') await pushToDynamoDb(payload) } - audit(message) + } else { logger.debug('Ignoring message with incorrect topic or originator') // push to slack - alertIt("slack message") @@ -122,7 +118,7 @@ async function audit(message) { } else { logger.debug(`Producer DynamoDb : ${logMessage}`); } - auditTrail([pl_seqid, pl_processid, pl_table, pl_uniquecolumn, pl_operation, "push-to-kafka", "", "", "", JSON.stringify(message), pl_timestamp, pl_topic], 'producer') + await auditTrail([pl_seqid, pl_processid, pl_table, pl_uniquecolumn, pl_operation, "push-to-kafka", "", "", "", JSON.stringify(message), pl_timestamp, pl_topic], 'producer') } else { const pl_randonseq = 'err-' + (new Date()).getTime().toString(36) + Math.random().toString(36).slice(2) if (!isFailover) { From 3995bc45d310e249e049952c6bee07e8ddb38509 Mon Sep 17 00:00:00 2001 From: nkumar-topcoder <33625707+nkumar-topcoder@users.noreply.github.com> Date: Thu, 4 Jun 2020 18:41:17 +0530 Subject: [PATCH 47/76] Update config.yml --- .circleci/config.yml | 17 ++++++++--------- 1 file changed, 8 insertions(+), 9 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 4c51212..a6fc450 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -55,10 +55,9 @@ build_steps: &build_steps #source buildenvvar #./master_deploy.sh -d ECS -e ${DEPLOY_ENV} -t latest -s ${GLOBAL_ENV}-global-appvar,${LOGICAL_ENV}-${APP_NAME}-appvar -i postgres-ifx-processer - - # ./buildenv.sh -e ${DEPLOY_ENV} -b ${LOGICAL_ENV}-${APP_NAME}-consumer-deployvar - #source buildenvvar - #./master_deploy.sh -d ECS -e ${DEPLOY_ENV} -t latest -s ${GLOBAL_ENV}-global-appvar,${LOGICAL_ENV}-${APP_NAME}-appvar -i postgres-ifx-processer + ./buildenv.sh -e ${DEPLOY_ENV} -b ${LOGICAL_ENV}-${APP_NAME}-consumer-deployvar + source buildenvvar + ./master_deploy.sh -d ECS -e ${DEPLOY_ENV} -t latest -s ${GLOBAL_ENV}-global-appvar,${LOGICAL_ENV}-${APP_NAME}-appvar -i postgres-ifx-processer echo "Running Masterscript - deploy postgres-ifx-processer producer" if [ -e ${LOGICAL_ENV}-${APP_NAME}-consumer-deployvar.json ]; then sudo rm -vf ${LOGICAL_ENV}-${APP_NAME}-consumer-deployvar.json; fi @@ -66,11 +65,11 @@ build_steps: &build_steps source buildenvvar ./master_deploy.sh -d ECS -e ${DEPLOY_ENV} -t latest -s ${GLOBAL_ENV}-global-appvar,${LOGICAL_ENV}-${APP_NAME}-appvar -i postgres-ifx-processer - echo "Running Masterscript - deploy postgres-ifx-processer producer_dd" - if [ -e ${LOGICAL_ENV}-${APP_NAME}-producer-deployvar.json ]; then sudo rm -vf ${LOGICAL_ENV}-${APP_NAME}-producer-deployvar.json; fi - ./buildenv.sh -e ${DEPLOY_ENV} -b ${LOGICAL_ENV}-${APP_NAME}-producer_dd-deployvar - source buildenvvar - ./master_deploy.sh -d ECS -e ${DEPLOY_ENV} -t latest -s ${GLOBAL_ENV}-global-appvar,${LOGICAL_ENV}-${APP_NAME}-appvar -i postgres-ifx-processer + #echo "Running Masterscript - deploy postgres-ifx-processer producer_dd" + #if [ -e ${LOGICAL_ENV}-${APP_NAME}-producer-deployvar.json ]; then sudo rm -vf ${LOGICAL_ENV}-${APP_NAME}-producer-deployvar.json; fi + #./buildenv.sh -e ${DEPLOY_ENV} -b ${LOGICAL_ENV}-${APP_NAME}-producer_dd-deployvar + #source buildenvvar + #./master_deploy.sh -d ECS -e ${DEPLOY_ENV} -t latest -s ${GLOBAL_ENV}-global-appvar,${LOGICAL_ENV}-${APP_NAME}-appvar -i postgres-ifx-processer #echo "Running Masterscript - deploy postgres-ifx-processer reconsiler1" #if [ -e ${LOGICAL_ENV}-${APP_NAME}-reconsiler1-deployvar.json ]; then sudo rm -vf ${LOGICAL_ENV}-${APP_NAME}-reconsiler1-deployvar.json; fi From 9e160e8d39ddd1ef7c1e9d1f0b559107961955ca Mon Sep 17 00:00:00 2001 From: nkumar-topcoder <33625707+nkumar-topcoder@users.noreply.github.com> Date: Sat, 6 Jun 2020 17:55:42 +0530 Subject: [PATCH 48/76] [skip ci] [skip ci] --- informix-identity-trigger-proc.sql | 131 ++++++++++++++++++++++++++--- 1 file changed, 121 insertions(+), 10 deletions(-) diff --git a/informix-identity-trigger-proc.sql b/informix-identity-trigger-proc.sql index 21ed378..bb41b66 100644 --- a/informix-identity-trigger-proc.sql +++ b/informix-identity-trigger-proc.sql @@ -1,13 +1,15 @@ database common_oltp + DROP PROCEDURE proc_user_update(varchar,decimal); -DROP PROCEDURE proc_user_update; CREATE PROCEDURE informix.proc_user_update( new_handle varchar(50), user_id decimal(10,0)) if (USER != 'ifxsyncuser') then UPDATE user SET handle_lower = lower(new_handle), modify_date = current WHERE user.user_id = user_id; End if; -end procedure; +end procedure; + +DROP PROCEDURE proc_user_update; create procedure "informix".proc_user_update( user_id DECIMAL(10,0), old_first_name VARCHAR(64), @@ -25,7 +27,7 @@ new_middle_name VARCHAR(64), old_timezone_id decimal(5,0), new_timezone_id decimal(5,0) ) - + if (USER != 'ifxsyncuser') then if ((old_first_name != new_first_name) or (old_last_name != new_last_name ) or (old_middle_name != new_middle_name )) then insert into audit_user (column_name, old_value, new_value, user_id) @@ -57,7 +59,7 @@ user_id) user_id) values ('TIMEZONE_ID', old_timezone_id, new_timezone_id, user_id); End If; - if (USER != 'ifxsyncuser') then + UPDATE user SET handle_lower = lower(new_handle), modify_date = current WHERE user.user_id = user_id; End if; end procedure; @@ -75,7 +77,7 @@ new_primary_ind DECIMAL(1,0), old_status_id DECIMAL(3,0), new_status_id DECIMAL(3,0) ) - + if (USER != 'ifxsyncuser') then if (old_email_type_id != new_email_type_id) then insert into audit_user (column_name, old_value, new_value, user_id) values ('EMAIL_TYPE', old_email_type_id, new_email_type_id, user_id); @@ -95,12 +97,122 @@ new_status_id DECIMAL(3,0) insert into audit_user (column_name, old_value, new_value, user_id) values ('EMAIL_PRIMARY_IND', old_primary_ind, new_primary_ind, user_id); End If; - if (USER != 'ifxsyncuser') then + update email set modify_date = current where email.email_id = email_id; End if; end procedure; +DROP PROCEDURE informix.proc_user_last_login; +CREATE PROCEDURE informix.proc_user_last_login (user_id DECIMAL(10,0), o_last_login DATETIME YEAR TO FRACTION, +n_last_login DATETIME YEAR TO FRACTION) + + if (o_last_login != n_last_login) then + if (USER != 'ifxsyncuser') then + insert into corona_event (corona_event_type_id,user_id, corona_event_timestamp) values (1, user_id, n_last_login); + end if; + End if; +end procedure; + +DROP PROCEDURE informix.proc_phone_update; +CREATE PROCEDURE informix.proc_phone_update( +phone_id decimal(10,0), +user_id DECIMAL(10,0), +old_phone_type_id DECIMAL(5,0), +new_phone_type_id DECIMAL(5,0), +old_number VARCHAR(64), +new_number VARCHAR(64), +old_primary_ind DECIMAL(1,0), +new_primary_ind DECIMAL(1,0) +) + if (USER != 'ifxsyncuser') then + if (old_phone_type_id != new_phone_type_id) then + insert into audit_user (column_name, old_value, new_value, user_id) + values ('PHONE_TYPE', old_phone_type_id, new_phone_type_id, user_id); + End If; + + if (old_number != new_number) then + insert into audit_user (column_name, old_value, new_value, user_id) + values ('PHONE_NUMBER', old_number, new_number, user_id); + End If; + + if (old_primary_ind != new_primary_ind) then + insert into audit_user (column_name, old_value, new_value, user_id) + values ('PHONE_PRIMARY_IND', old_primary_ind, new_primary_ind, user_id); + End If; +update phone set modify_date = current where phone.phone_id = phone_id; +End if; +end procedure; + +DROP PROCEDURE informix.proc_address_update; +CREATE PROCEDURE informix.proc_address_update( + address_id DECIMAL(10,0), + old_address_type_id DECIMAL(5,0), + new_address_type_id DECIMAL(5,0), + old_address1 VARCHAR(254), + new_address1 VARCHAR(254), + old_address2 VARCHAR(254), + new_address2 VARCHAR(254), + old_address3 VARCHAR(254), + new_address3 VARCHAR(254), + old_city VARCHAR(64), + new_city VARCHAR(64), + old_state_code CHAR(2), + new_state_code CHAR(2), + old_province VARCHAR(64), + new_province VARCHAR(64), + old_zip VARCHAR(15), + new_zip VARCHAR(15), + old_country_code CHAR(3), + new_country_code CHAR(3) +) + define user_id DECIMAL(10,0); + let user_id = NVL((select min(x.user_id) from user_address_xref x where x.address_id = address_id), -1); + if (USER != 'ifxsyncuser') then + if (user_id > 0 and old_address1 != new_address1) then + insert into audit_user (column_name, old_value, new_value, +user_id) + values ('ADDRESS1', old_address1, new_address1, user_id); + End If; + if (user_id > 0 and old_address2 != new_address2) then + insert into audit_user (column_name, old_value, new_value, +user_id) + values ('ADDRESS2', old_address2, new_address2, user_id); + End If; + if (user_id > 0 and old_address3 != new_address3) then + insert into audit_user (column_name, old_value, new_value, +user_id) + values ('ADDRESS3', old_address3, new_address3, user_id); + End If; + if (user_id > 0 and old_city != new_city) then + insert into audit_user (column_name, old_value, new_value, +user_id) + values ('ADDRESS_CITY', old_city, new_city, user_id); + End If; + if (user_id > 0 and old_state_code != new_state_code) then + insert into audit_user (column_name, old_value, new_value, +user_id) + values ('ADDRESS_STATE', old_state_code, new_state_code, user_id); + End If; + if (user_id > 0 and old_province != new_province) then + insert into audit_user (column_name, old_value, new_value, +user_id) + values ('ADDRESS_PROVINCE', old_province, new_province, user_id); + End If; + if (user_id > 0 and old_zip != new_zip) then + insert into audit_user (column_name, old_value, new_value, +user_id) + values ('ADDRESS_ZIP', old_zip, new_zip, user_id); + End If; + if (user_id > 0 and old_country_code != new_country_code) then + insert into audit_user (column_name, old_value, new_value, +user_id) + values ('ADDRESS_COUNTRY', old_country_code, new_country_code, user_id); + End If; + update address set modify_date = current where address.address_id = address_id; + End if; +end procedure; + database informixoltp DROP PROCEDURE informix.proc_coder_update; CREATE PROCEDURE informix.proc_coder_update( @@ -110,7 +222,8 @@ v_oldlanguage_id decimal(3,0), v_newlanguage_id decimal(3,0), v_oldcoder_type_id decimal(3,0), v_newcoder_type_id decimal(3,0), v_oldcomp_country_code varchar(3), v_newcomp_country_code varchar(3) ) - + if (USER != 'ifxsyncuser') then + if (v_oldquote != v_newquote) then insert into audit_coder (column_name, old_value, new_value, user_id) values ('QUOTE', v_oldquote , v_newquote, v_oldcoder_id); @@ -131,10 +244,8 @@ v_oldcomp_country_code varchar(3), v_newcomp_country_code varchar(3) values ('COMP_COUNTRY', v_oldcomp_country_code , v_newcomp_country_code, v_oldcoder_id); End if; - if (USER != 'ifxsyncuser') then update coder set modify_date = current where coder_id = v_oldcoder_id; End if; - end procedure; database tcs_catalog; @@ -155,7 +266,7 @@ new_rating decimal(5,4) update user_reliability set modify_date = current where user_id = p_user_id and phase_id = p_phase_id; End if; end procedure; - + DROP PROCEDURE proc_rating_update; CREATE PROCEDURE informix.proc_rating_update( p_user_id DECIMAL(10,0), From 85e47f19940d5f79468bd8a365cde18543f75f5a Mon Sep 17 00:00:00 2001 From: nkumar-topcoder <33625707+nkumar-topcoder@users.noreply.github.com> Date: Sat, 6 Jun 2020 18:54:00 +0530 Subject: [PATCH 49/76] [skip ci] [skip ci] --- pg-identity-func-trig-seq2.sql | 732 +++++++++++++++++++++++++++++++++ 1 file changed, 732 insertions(+) create mode 100644 pg-identity-func-trig-seq2.sql diff --git a/pg-identity-func-trig-seq2.sql b/pg-identity-func-trig-seq2.sql new file mode 100644 index 0000000..60d61f0 --- /dev/null +++ b/pg-identity-func-trig-seq2.sql @@ -0,0 +1,732 @@ +SET search_path TO common_oltp; + +CREATE TABLE sync_test_id +( + uniqid INTEGER NOT NULL, + description varchar(200), + created_at TIMESTAMP(6) WITH TIME ZONE DEFAULT now(), + PRIMARY KEY (uniqid) + ); +CREATE TRIGGER "pg_sync_test_id_trigger" + AFTER INSERT OR DELETE OR UPDATE ON sync_test_id + FOR EACH ROW +EXECUTE PROCEDURE common_oltp.notify_trigger_common_oltp('uniqid', 'description', 'created_at'); + +ALTER TABLE "common_oltp"."sync_test_id" disable TRIGGER "pg_sync_test_id_trigger" + +CREATE OR REPLACE FUNCTION "common_oltp"."notify_trigger_common_oltp" () RETURNS trigger + VOLATILE +AS $body$ +DECLARE + rec RECORD; + payload TEXT; + column_name TEXT; + column_value TEXT; + pguserval TEXT; + --payload_items TEXT[]; + payload_items JSONB; + uniquecolumn TEXT; + logtime TEXT; + payloadseqid INTEGER; +BEGIN + +pguserval := (SELECT current_user); + if pguserval = 'pgsyncuser' then + RAISE notice 'pgsyncuser name : %', pguserval; + + CASE TG_OP + WHEN 'INSERT', 'UPDATE' THEN + rec := NEW; + WHEN 'DELETE' THEN + rec := OLD; + ELSE + RAISE EXCEPTION 'Unknown TG_OP: "%". Should not occur!', TG_OP; + END CASE; + return rec; + -- else + end if; + + + CASE TG_OP + WHEN 'INSERT', 'UPDATE' THEN + rec := NEW; + WHEN 'DELETE' THEN + rec := OLD; + ELSE + RAISE EXCEPTION 'Unknown TG_OP: "%". Should not occur!', TG_OP; + END CASE; + raise notice 'table name : %', TG_TABLE_NAME; + RAISE info 'hello world'; + -- Get required fields + FOREACH column_name IN ARRAY TG_ARGV LOOP + EXECUTE format('SELECT $1.%I::TEXT', column_name) + INTO column_value + USING rec; + case + when + column_name = 'upload_document' then + -- RAISE NOTICE 'upload_document boolean'; + if column_value = 'false' then + column_value = '0'; + else + column_value = '1'; + end if; + when + column_name = 'upload_document_required' then + -- RAISE NOTICE 'upload_document_required boolean'; + if column_value = 'false' then + column_value = '0'; + else + column_value = '1'; + end if; + when + column_name = 'identify_email_enabled' then + if column_value = 'false' then + column_value = '0'; + else + column_value = '1'; + end if; + when + column_name = 'identify_handle_enabled' then + if column_value = 'false' then + column_value = '0'; + else + column_value = '1'; + end if; + when + column_name = 'social_email_verified' then + if column_value = 'false' then + column_value = 'f'; + else + column_value = 't'; + end if; + when + column_name = 'create_date' then + column_value := (select to_char (column_value::timestamp, 'YYYY-MM-DD HH24:MI:SS.MS')); + when + column_name = 'modify_date' then + column_value := (select to_char (column_value::timestamp, 'YYYY-MM-DD HH24:MI:SS.MS')); + when + column_name = 'last_login' then + column_value := (select to_char (column_value::timestamp, 'YYYY-MM-DD HH24:MI:SS.MS')); + when + column_name = 'last_site_hit_date' then + column_value := (select to_char (column_value::timestamp, 'YYYY-MM-DD HH24:MI:SS.MS')); + when + column_name = 'corona_event_timestamp' then + column_value := (select to_char (column_value::timestamp, 'YYYY-MM-DD HH24:MI:SS.MS')); + when + column_name = 'created_at' then + column_value := (select to_char (column_value::timestamp, 'YYYY-MM-DD HH24:MI:SS.MS')); + else + -- RAISE NOTICE ' not boolean'; + end case; + --payload_items := coalesce(payload_items,'{}')::jsonb || json_build_object(column_name,column_value)::jsonb; + payload_items := coalesce(payload_items,'{}')::jsonb || json_build_object(column_name,replace(column_value,'"','\"'))::jsonb; + -- payload_items := array_append(payload_items, '"' || replace(column_name, '"', '\"') || '":"' || replace(column_value, '"', '\"') || '"'); + END LOOP; + --logtime := (select date_display_tz()); + logtime := (SELECT to_char (now()::timestamptz at time zone 'UTC', 'YYYY-MM-DD"T"HH24:MI:SS"Z"')); + payloadseqid := (select nextval('common_oltp.payloadsequence'::regclass)); + + uniquecolumn := (SELECT c.column_name + FROM information_schema.key_column_usage AS c + LEFT JOIN information_schema.table_constraints AS t + ON t.constraint_name = c.constraint_name + WHERE t.table_name = TG_TABLE_NAME AND t.constraint_type = 'PRIMARY KEY' LIMIT 1); + + if (uniquecolumn = '') IS NOT FALSE then + uniquecolumn := 'Not-Available'; + end if; + + -- exclude any null value columns. + payload_items := jsonb_strip_nulls(payload_items); + + -- Build the payload + payload := '' + || '{' + || '"topic":"' || 'dev.db.postgres.sync' || '",' + || '"originator":"' || 'tc-postgres-delta-processor' || '",' + || '"timestamp":"' || logtime || '",' + || '"mime-type":"' || 'application/json' || '",' + || '"payload": {' + || '"payloadseqid":"' || payloadseqid || '",' + || '"Uniquecolumn":"' || uniquecolumn || '",' + || '"operation":"' || TG_OP || '",' + || '"schema":"' || TG_TABLE_SCHEMA || '",' + || '"table":"' || TG_TABLE_NAME || '",' + || '"data": ' || payload_items + || '}}'; + -- Notify the channel + PERFORM pg_notify('dev_db_notifications', payload); + RETURN rec; +END; +$body$ LANGUAGE plpgsql + +CREATE OR REPLACE FUNCTION "common_oltp"."proc_email_update" () RETURNS trigger + VOLATILE +AS $body$ +DECLARE +pguserval TEXT; +BEGIN + pguserval := (SELECT current_user); + if pguserval != 'pgsyncuser' then + if (OLD.email_type_id != NEW.email_type_id) then + insert into common_oltp.audit_user (column_name, old_value, new_value, user_id) + values ('EMAIL_TYPE', OLD.email_type_id, NEW.email_type_id, OLD.user_id); + End If; + + if (OLD.status_id != NEW.status_id) then + insert into common_oltp.audit_user (column_name, old_value, new_value, user_id) + values ('EMAIL_STATUS', OLD.status_id, NEW.status_id, OLD.user_id); + End If; + + if (OLD.address != NEW.address) then + insert into common_oltp.audit_user (column_name, old_value, new_value, user_id) + values ('EMAIL_ADDRESS', OLD.address, NEW.address, OLD.user_id); + End If; + + if (OLD.primary_ind != NEW.primary_ind) then + insert into common_oltp.audit_user (column_name, old_value, new_value, user_id) + values ('EMAIL_PRIMARY_IND', OLD.primary_ind, NEW.primary_ind, OLD.user_id); + End If; + + -- if pguserval != 'pgsyncuser' then + NEW.modify_date = current_timestamp; + end if; + + + RETURN NEW; +END; +$body$ LANGUAGE plpgsql; + +CREATE OR REPLACE FUNCTION "common_oltp"."proc_phone_update" () RETURNS trigger + VOLATILE +AS $body$ +DECLARE +pguserval TEXT; +BEGIN +pguserval := (SELECT current_user); +if pguserval != 'pgsyncuser' then + if (OLD.phone_type_id != NEW.phone_type_id) then + insert into audit_user (column_name, old_value, new_value, user_id) + values ('PHONE_TYPE', OLD.phone_type_id, NEW.phone_type_id, OLD.user_id); + End If; + + if (OLD.phone_number != NEW.phone_number) then + insert into audit_user (column_name, old_value, new_value, user_id) + values ('PHONE_NUMBER', OLD.phone_number, NEW.phone_number, OLD.user_id); + End If; + + if (OLD.primary_ind != NEW.primary_ind) then + insert into audit_user (column_name, old_value, new_value, user_id) + values ('PHONE_PRIMARY_IND', OLD.primary_ind, NEW.primary_ind, OLD.user_id); + End If; + + NEW.modify_date = current_timestamp; + end if; + RETURN NEW; +END; +$body$ LANGUAGE plpgsql + +CREATE OR REPLACE FUNCTION "common_oltp"."proc_user_update" () RETURNS trigger + VOLATILE +AS $body$ +DECLARE +pguserval TEXT; +BEGIN +pguserval := (SELECT current_user); +if pguserval != 'pgsyncuser' then + IF (TG_OP = 'UPDATE') THEN + if ((OLD.first_name != NEW.first_name) or (OLD.last_name != NEW.last_name ) or (OLD.middle_name != NEW.middle_name )) then + insert into common_oltp.audit_user (column_name, old_value, new_value, user_id) + values ('NAME', NULLIF(OLD.first_name, '') || ' ' || NULLIF(OLD.middle_name, '') || ' ' || NULLIF(OLD.last_name, ''), + NULLIF(NEW.first_name, '') || ' ' || NULLIF(NEW.middle_name, '') || ' ' || NULLIF(NEW.last_name, ''), OLD.user_id); + End if; + + if (OLD.handle != NEW.handle) then + insert into common_oltp.audit_user (column_name, old_value, new_value, user_id) + values ('HANDLE', OLD.handle, NEW.handle, OLD.user_id); + End If; + + if (OLD.status != NEW.status) then + insert into common_oltp.audit_user (column_name, old_value, new_value, user_id) + values ('STATUS', OLD.status, NEW.status, OLD.user_id); + End If; + + if (OLD.activation_code != NEW.activation_code) then + insert into common_oltp.audit_user (column_name, old_value, new_value, user_id) + values ('ACTIVATION_CODE', OLD.activation_code, NEW.activation_code, OLD.user_id); + End If; + + if (OLD.timezone_id != NEW.timezone_id) then + insert into common_oltp.audit_user (column_name, old_value, new_value, user_id) + values ('TIMEZONE_ID', OLD.timezone_id, NEW.timezone_id, OLD.user_id); + End If; + + + NEW.modify_date = current_timestamp; + end if; + + + END IF; + + NEW.handle_lower = lower(NEW.handle); + + RETURN NEW; +END; +$body$ LANGUAGE plpgsql + +CREATE OR REPLACE FUNCTION "common_oltp"."proc_address_update" () RETURNS trigger + VOLATILE +AS $body$ +DECLARE +pguserval TEXT; + user_id DECIMAL(10,0); +BEGIN + user_id := NULLIF((select min(x.user_id) from user_address_xref x where x.address_id = OLD.address_id), -1); + pguserval := (SELECT current_user); + if pguserval != 'pgsyncuser' then + if (user_id > 0 and OLD.address1 != NEW.address1) then + insert into audit_user (column_name, old_value, new_value, user_id) + values ('ADDRESS1', OLD.address1, NEW.address1, user_id); + End If; + + if (user_id > 0 and OLD.address2 != NEW.address2) then + insert into audit_user (column_name, old_value, new_value, user_id) + values ('ADDRESS2', OLD.address2, NEW.address2, user_id); + End If; + + if (user_id > 0 and OLD.address3 != NEW.address3) then + insert into audit_user (column_name, old_value, new_value, user_id) + values ('ADDRESS3', OLD.address3, NEW.address3, user_id); + End If; + + if (user_id > 0 and OLD.city != NEW.city) then + insert into audit_user (column_name, old_value, new_value, user_id) + values ('ADDRESS_CITY', OLD.city, NEW.city, user_id); + End If; + + if (user_id > 0 and OLD.state_code != NEW.state_code) then + insert into audit_user (column_name, old_value, new_value, user_id) + values ('ADDRESS_STATE', OLD.state_code, NEW.state_code, user_id); + End If; + + if (user_id > 0 and OLD.province != NEW.province) then + insert into audit_user (column_name, old_value, new_value, user_id) + values ('ADDRESS_PROVINCE', OLD.province, NEW.province, user_id); + End If; + + if (user_id > 0 and OLD.zip != NEW.zip) then + insert into audit_user (column_name, old_value, new_value, user_id) + values ('ADDRESS_ZIP', OLD.zip, NEW.zip, user_id); + End If; + + if (user_id > 0 and OLD.country_code != NEW.country_code) then + insert into audit_user (column_name, old_value, new_value, user_id) + values ('ADDRESS_COUNTRY', OLD.country_code, NEW.country_code, user_id); + End If; + + NEW.modify_date = current_timestamp; + end if; + RETURN NEW; +END; +$body$ LANGUAGE plpgsql + + +CREATE OR REPLACE FUNCTION "common_oltp"."proc_user_last_login" () RETURNS trigger + VOLATILE +AS $body$ +DECLARE +pguserval TEXT; +BEGIN +pguserval := (SELECT current_user); + if pguserval != 'pgsyncuser' then + if (OLD.last_login != NEW.last_login) then + insert into common_oltp.corona_event(corona_event_type_id, user_id, corona_event_timestamp) + values (1, OLD.user_id, NEW.last_login); + end if; + end if; + + RETURN NULL; +END; +$body$ LANGUAGE plpgsql + + +CREATE TRIGGER "pg_security_groups_trigger" + AFTER INSERT OR DELETE OR UPDATE ON security_groups + FOR EACH ROW +EXECUTE PROCEDURE notify_trigger_common_oltp('group_id', 'description', 'challenge_group_ind', 'create_user_id'); + + CREATE TRIGGER "pg_social_login_provider_trigger" +AFTER INSERT OR DELETE OR UPDATE ON social_login_provider +FOR EACH ROW +EXECUTE PROCEDURE notify_trigger_common_oltp('social_login_provider_id', 'name'); + + +CREATE TRIGGER "pg_sso_login_provider_trigger" +AFTER INSERT OR DELETE OR UPDATE ON sso_login_provider +FOR EACH ROW +EXECUTE PROCEDURE notify_trigger_common_oltp('sso_login_provider_id', 'name','type','identify_email_enabled','identify_handle_enabled'); + +CREATE TRIGGER "pg_Country_trigger" +AFTER INSERT OR DELETE OR UPDATE ON Country +FOR EACH ROW +EXECUTE PROCEDURE notify_trigger_common_oltp('country_code', 'country_name','modify_date','participating','default_taxform_id','longitude','latitude','region','iso_name','iso_alpha2_code','iso_alpha3_code'); + +CREATE TRIGGER "pg_invalid_handles_trigger" +AFTER INSERT OR DELETE OR UPDATE ON invalid_handles +FOR EACH ROW +EXECUTE PROCEDURE notify_trigger_common_oltp('invalid_handle_id', 'invalid_handle'); + +CREATE TRIGGER "pg_achievement_type_lu_trigger" +AFTER INSERT OR DELETE OR UPDATE ON achievement_type_lu +FOR EACH ROW +EXECUTE PROCEDURE notify_trigger_common_oltp('achievement_type_id','achievement_type_desc'); + + + +ALTER TABLE "user" DISABLE TRIGGER pg_user_trigger; +ALTER TABLE email DISABLE TRIGGER pg_email_trigger; +ALTER TABLE security_user DISABLE TRIGGER pg_security_user_trigger; +ALTER TABLE user_sso_login DISABLE TRIGGER pg_user_sso_login_trigger; +ALTER TABLE user_achievement DISABLE TRIGGER pg_user_achievement_trigger; +ALTER TABLE user_group_xref DISABLE TRIGGER pg_user_group_xref_trigger; +ALTER TABLE security_groups DISABLE TRIGGER pg_security_groups_trigger; +ALTER TABLE user_social_login DISABLE TRIGGER pg_user_social_login_trigger; +ALTER TABLE social_login_provider DISABLE TRIGGER pg_social_login_provider_trigger; +ALTER TABLE sso_login_provider DISABLE TRIGGER pg_sso_login_provider_trigger; +ALTER TABLE country DISABLE TRIGGER pg_country_trigger; +ALTER TABLE invalid_handles DISABLE TRIGGER pg_invalid_handles_trigger; +ALTER TABLE achievement_type_lu DISABLE TRIGGER pg_achievement_type_lu_trigger; +ALTER TABLE corana_event DISABLE TRIGGER pg_corana_event_trigger; +ALTER TABLE audit_user DISABLE TRIGGER pg_audit_user_trigger; + +DROP sequence "common_oltp"."sequence_user_group_seq"; +CREATE SEQUENCE sequence_user_group_seq INCREMENT BY 1 MINVALUE 1 MAXVALUE 9223372036854775807 +START WITH 951000000 NO CYCLE; + +DROP sequence "common_oltp"."sequence_user_seq"; +CREATE SEQUENCE sequence_user_seq INCREMENT BY 1 MINVALUE 1 MAXVALUE 9223372036854775807 START WITH +488770000 NO CYCLE; + +ALTER SEQUENCE corona_event_corona_event_id_seq RESTART WITH 577770000; + +SET search_path TO informixoltp; + +CREATE OR REPLACE FUNCTION "informixoltp"."notify_trigger_informixoltp" () RETURNS trigger + VOLATILE +AS $body$ +DECLARE + rec RECORD; + payload TEXT; + column_name TEXT; + column_value TEXT; + -- payload_items TEXT[]; + payload_items JSONB; + pguserval TEXT; + uniquecolumn TEXT; + logtime TEXT; + payloadseqid INTEGER; +BEGIN + +pguserval := (SELECT current_user); + if pguserval = 'pgsyncuser' then + RAISE notice 'pgsyncuser name : %', pguserval; + + CASE TG_OP + WHEN 'INSERT', 'UPDATE' THEN + rec := NEW; + WHEN 'DELETE' THEN + rec := OLD; + ELSE + RAISE EXCEPTION 'Unknown TG_OP: "%". Should not occur!', TG_OP; + END CASE; + return rec; + -- else + end if; + + CASE TG_OP + WHEN 'INSERT', 'UPDATE' THEN + rec := NEW; + WHEN 'DELETE' THEN + rec := OLD; + ELSE + RAISE EXCEPTION 'Unknown TG_OP: "%". Should not occur!', TG_OP; + END CASE; + raise notice 'table name : %', TG_TABLE_NAME; + -- RAISE info 'hello world'; + -- Get required fields + FOREACH column_name IN ARRAY TG_ARGV LOOP + EXECUTE format('SELECT $1.%I::TEXT', column_name) + INTO column_value + USING rec; + case + when + column_name = 'upload_document' then + -- RAISE NOTICE 'upload_document boolean'; + if column_value = 'false' then + column_value = '0'; + else + column_value = '1'; + end if; + when + column_name = 'upload_document_required' then + -- RAISE NOTICE 'upload_document_required boolean'; + if column_value = 'false' then + column_value = '0'; + else + column_value = '1'; + end if; + when + column_name = 'identify_email_enabled' then + if column_value = 'false' then + column_value = '0'; + else + column_value = '1'; + end if; + when + column_name = 'identify_handle_enabled' then + if column_value = 'false' then + column_value = '0'; + else + column_value = '1'; + end if; + when + column_name = 'create_date' then + column_value := (select to_char (column_value::timestamp, 'YYYY-MM-DD HH24:MI:SS.MS')); + when + column_name = 'modify_date' then + column_value := (select to_char (column_value::timestamp, 'YYYY-MM-DD HH24:MI:SS.MS')); + when + column_name = 'member_since' then + column_value := (select to_char (column_value::timestamp, 'YYYY-MM-DD HH24:MI:SS.MS')); + else + -- RAISE NOTICE ' not boolean'; + end case; + -- payload_items := array_append(payload_items, '"' || replace(column_name, '"', '\"') || '":"' || replace(column_value, '"', '\"') || '"'); + --payload_items := coalesce(payload_items,'{}')::jsonb || json_build_object(column_name,column_value)::jsonb; + payload_items := coalesce(payload_items,'{}')::jsonb || json_build_object(column_name,replace(column_value,'"','\"'))::jsonb; + END LOOP; + logtime := (SELECT to_char (now()::timestamptz at time zone 'UTC', 'YYYY-MM-DD"T"HH24:MI:SS"Z"')); + payloadseqid := (select nextval('common_oltp.payloadsequence'::regclass)); + + uniquecolumn := (SELECT c.column_name + FROM information_schema.key_column_usage AS c + LEFT JOIN information_schema.table_constraints AS t + ON t.constraint_name = c.constraint_name + WHERE t.table_name = TG_TABLE_NAME AND t.constraint_type = 'PRIMARY KEY' LIMIT 1); + + if (uniquecolumn = '') IS NOT FALSE then + uniquecolumn := 'Not-Available'; + end if; + + -- exclude any null value columns. + payload_items := jsonb_strip_nulls(payload_items); + + -- Build the payload + payload := '' + || '{' + || '"topic":"' || 'dev.db.postgres.sync' || '",' + || '"originator":"' || 'tc-postgres-delta-processor' || '",' + || '"timestamp":"' || logtime || '",' + || '"mime-type":"' || 'application/json' || '",' + || '"payload": {' + || '"payloadseqid":"' || payloadseqid || '",' + || '"Uniquecolumn":"' || uniquecolumn || '",' + || '"operation":"' || TG_OP || '",' + || '"schema":"' || TG_TABLE_SCHEMA || '",' + || '"table":"' || TG_TABLE_NAME || '",' + || '"data": ' || payload_items + || '}}'; + + -- Notify the channel + PERFORM pg_notify('dev_db_notifications', payload); + + RETURN rec; +END; +$body$ LANGUAGE plpgsql + +CREATE OR REPLACE FUNCTION "informixoltp"."proc_coder_update" () RETURNS trigger + VOLATILE +AS $body$ +DECLARE + pguserval TEXT; +begin + if (OLD.quote != NEW.quote) then + insert into audit_coder (column_name, old_value, new_value, user_id) + values ('QUOTE', OLD.quote , NEW.quote, OLD.coder_id); + end if; + + if (OLD.coder_type_id != NEW.coder_type_id) then + insert into audit_coder (column_name, old_value, new_value, user_id) + values ('CODER_TYPE', OLD.coder_type_id , NEW.coder_type_id, OLD.coder_id); + end if; + if (OLD.language_id != NEW.language_id) then + insert into audit_coder (column_name, old_value, new_value, user_id) + values ('LANGUAGE', OLD.language_id , NEW.language_id, OLD.coder_id); + end if; + if (OLD.comp_country_code != NEW.comp_country_code) then + insert into audit_coder (column_name, old_value, new_value, user_id) + values ('COMP_COUNTRY', OLD.comp_country_code , NEW.comp_country_code, OLD.coder_id); + end if; + pguserval := (SELECT current_user); + if pguserval != 'pgsyncuser' then + -- RAISE info 'current_user'; + -- raise notice 'inside current_user : %', current_user; + --update coder set modify_date = current_timestamp where coder_id = OLD.coder_id; + NEW.modify_date = current_timestamp; + end if; + + return NEW; +end ; +$body$ LANGUAGE plpgsql + + +CREATE TRIGGER "pg_coder_referral_trigger" +AFTER INSERT OR DELETE OR UPDATE ON coder_referral +FOR EACH ROW +EXECUTE PROCEDURE notify_trigger_informixoltp('coder_id', 'referral_id','reference_id','other'); + +ALTER TABLE coder DISABLE TRIGGER pg_coder; +ALTER TABLE algo_rating DISABLE TRIGGER pg_algo_rating; +ALTER TABLE coder_referral DISABLE TRIGGER pg_coder_referral_trigger; + +SET search_path TO tcs_catalog; + +CREATE OR REPLACE FUNCTION "tcs_catalog"."notify_trigger" () RETURNS trigger + VOLATILE +AS $body$ +DECLARE + rec RECORD; + payload TEXT; + column_name TEXT; + column_value TEXT; + --payload_items TEXT[]; + payload_items JSONB; + pguserval TEXT; + uniquecolumn TEXT; + logtime TEXT; + payloadseqid INTEGER; +BEGIN + +pguserval := (SELECT current_user); + if pguserval = 'pgsyncuser' then + RAISE notice 'pgsyncuser name : %', pguserval; + + CASE TG_OP + WHEN 'INSERT', 'UPDATE' THEN + rec := NEW; + WHEN 'DELETE' THEN + rec := OLD; + ELSE + RAISE EXCEPTION 'Unknown TG_OP: "%". Should not occur!', TG_OP; + END CASE; + return rec; + -- else + end if; + + -- Set record row depending on operation + CASE TG_OP + WHEN 'INSERT', 'UPDATE' THEN + rec := NEW; + WHEN 'DELETE' THEN + rec := OLD; + ELSE + RAISE EXCEPTION 'Unknown TG_OP: "%". Should not occur!', TG_OP; + END CASE; + -- Get required fields + FOREACH column_name IN ARRAY TG_ARGV LOOP + EXECUTE format('SELECT $1.%I::TEXT', column_name) + INTO column_value + USING rec; + case + when + column_name = 'upload_document' then + if column_value = 'false' then + column_value = '0'; + else + column_value = '1'; + end if; + when + column_name = 'upload_document_required' then + if column_value = 'false' then + column_value = '0'; + else + column_value = '1'; + end if; + else + -- RAISE NOTICE ' not boolean'; + end case; + --payload_items := array_append(payload_items, '"' || replace(column_name, '"', '\"') || '":"' || replace(column_value, '"', '\"') || '"'); + --payload_items := coalesce(payload_items,'{}')::jsonb || json_build_object(column_name,column_value)::jsonb; + payload_items := coalesce(payload_items,'{}')::jsonb || json_build_object(column_name,replace(column_value,'"','\"'))::jsonb; + + END LOOP; + logtime := (SELECT to_char (now()::timestamptz at time zone 'UTC', 'YYYY-MM-DD"T"HH24:MI:SS"Z"')); + payloadseqid := (select nextval('payloadsequence'::regclass)); + + uniquecolumn := (SELECT c.column_name + FROM information_schema.key_column_usage AS c + LEFT JOIN information_schema.table_constraints AS t + ON t.constraint_name = c.constraint_name + WHERE t.table_name = TG_TABLE_NAME AND t.constraint_type = 'PRIMARY KEY' limit 1); + + if (uniquecolumn = '') IS NOT FALSE then + uniquecolumn := 'Not-Available'; + end if; + -- exclude any null value columns. + payload_items := jsonb_strip_nulls(payload_items); + + RAISE Notice ' payload val: "%"', payload; + -- Build the payload + --payload := '' + -- || '{' + -- || '"topic":"' || 'dev.db.postgres.sync' || '",' + -- || '"originator":"' || 'tc-postgres-delta-processor' || '",' + -- || '"timestamp":"' || logtime || '",' + -- || '"mime-type":"' || 'application/json' || '",' + -- || '"payload": {' + -- || '"payloadseqid":"' || payloadseqid || '",' + -- || '"Uniquecolumn":"' || uniquecolumn || '",' + -- || '"operation":"' || TG_OP || '",' + -- || '"schema":"' || TG_TABLE_SCHEMA || '",' + -- || '"table":"' || TG_TABLE_NAME || '",' + -- || '"data": {' || array_to_string(payload_items, ',') || '}' + -- || '}}'; + + payload := '' + || '{' + || '"topic":"' || 'dev.db.postgres.sync' || '",' + || '"originator":"' || 'tc-postgres-delta-processor' || '",' + || '"timestamp":"' || logtime || '",' + || '"mime-type":"' || 'application/json' || '",' + || '"payload": {' + || '"payloadseqid":"' || payloadseqid || '",' + || '"Uniquecolumn":"' || uniquecolumn || '",' + || '"operation":"' || TG_OP || '",' + || '"schema":"' || TG_TABLE_SCHEMA || '",' + || '"table":"' || TG_TABLE_NAME || '",' + || '"data":' || payload_items + || '}}'; + + -- Notify the channel + PERFORM pg_notify('dev_db_notifications', payload); + + RETURN rec; +END; +$body$ LANGUAGE plpgsql; + + +GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA common_oltp,informixoltp,tcs_catalog TO pgsyncuser; +GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA common_oltp,informixoltp,tcs_catalog TO pgsyncuser; + +GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA common_oltp,informixoltp,tcs_catalog TO coder; +GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA common_oltp,informixoltp,tcs_catalog TO coder; + +GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA common_oltp,informixoltp,tcs_catalog TO postgres; +GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA common_oltp,informixoltp,tcs_catalog TO postgres; + +grant USAGE ON SCHEMA common_oltp,informixoltp,tcs_catalog To pgsyncuser; +grant USAGE ON SCHEMA common_oltp,informixoltp,tcs_catalog To coder; +grant USAGE ON SCHEMA common_oltp,informixoltp,tcs_catalog To postgres; From fa505be0514d0ba28b2afa46f83fa3c171e28a48 Mon Sep 17 00:00:00 2001 From: nkumar-topcoder <33625707+nkumar-topcoder@users.noreply.github.com> Date: Mon, 8 Jun 2020 06:28:19 +0530 Subject: [PATCH 50/76] [skip ci] [skip ci] --- src/reconsiler-audit.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/reconsiler-audit.js b/src/reconsiler-audit.js index 4491fd1..a77e3d2 100644 --- a/src/reconsiler-audit.js +++ b/src/reconsiler-audit.js @@ -24,7 +24,7 @@ async function setupPgClient() { rec_d_type = config.RECONSILER.RECONSILER_DURATION_TYPE var paramvalues = ['push-to-kafka',rec_d_start,rec_d_end]; sql1 = "select pgifx_sync_audit.seq_id, pgifx_sync_audit.payloadseqid,pgifx_sync_audit.auditdatetime ,pgifx_sync_audit.syncstatus, pgifx_sync_audit.payload from common_oltp.pgifx_sync_audit where pgifx_sync_audit.syncstatus =($1)" - sql2 = " and pgifx_sync_audit.producer_err <> 'Reconsiler1' and pgifx_sync_audit.auditdatetime between (timezone('utc',now())) - interval '1"+ rec_d_type + "' * ($2)" + sql2 = " and pgifx_sync_audit.tablename != 'sync_test_id' and pgifx_sync_audit.producer_err <> 'Reconsiler1' and pgifx_sync_audit.auditdatetime between (timezone('utc',now())) - interval '1"+ rec_d_type + "' * ($2)" sql3 = " and (timezone('utc',now())) - interval '1"+ rec_d_type + "' * ($3)" sql = sql1 + sql2 + sql3 await pgClient.query(sql,paramvalues, async (err,result) => { From 2e0d6941ea33bcb4c2c9002a97502f3b6b1eaa11 Mon Sep 17 00:00:00 2001 From: nkumar-topcoder <33625707+nkumar-topcoder@users.noreply.github.com> Date: Mon, 8 Jun 2020 06:29:51 +0530 Subject: [PATCH 51/76] [skip ci] [skip ci] --- src/consumer.js | 1 + 1 file changed, 1 insertion(+) diff --git a/src/consumer.js b/src/consumer.js index a68aece..e57257d 100644 --- a/src/consumer.js +++ b/src/consumer.js @@ -70,6 +70,7 @@ async function dataHandler(messageSet, topic, partition) { message.payload.operation, "Informix-updated", retryvar, "", "", JSON.stringify(message), new Date(), message.topic], 'consumer') //} } catch (err) { + logger.debug(`Consumer :inx return status error for ${message.payload.table} ${message.payload.payloadseqid} : ${ifxstatus}`) const errmsg2 = `error-sync: Could not process kafka message or informix DB error: "${err.message}"` logger.error(errmsg2) logger.debug(`error-sync: consumer "${err.message}"`) From 3ac7fee59d643c62dd1c4e4603949d08d8881f59 Mon Sep 17 00:00:00 2001 From: nkumar-topcoder <33625707+nkumar-topcoder@users.noreply.github.com> Date: Mon, 8 Jun 2020 06:30:23 +0530 Subject: [PATCH 52/76] [skip ci] [skip ci] --- src/consumer.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/consumer.js b/src/consumer.js index e57257d..af4cc3f 100644 --- a/src/consumer.js +++ b/src/consumer.js @@ -70,7 +70,7 @@ async function dataHandler(messageSet, topic, partition) { message.payload.operation, "Informix-updated", retryvar, "", "", JSON.stringify(message), new Date(), message.topic], 'consumer') //} } catch (err) { - logger.debug(`Consumer :inx return status error for ${message.payload.table} ${message.payload.payloadseqid} : ${ifxstatus}`) + logger.debug(`Consumer :ifx return status error for ${message.payload.table} ${message.payload.payloadseqid} : ${ifxstatus}`) const errmsg2 = `error-sync: Could not process kafka message or informix DB error: "${err.message}"` logger.error(errmsg2) logger.debug(`error-sync: consumer "${err.message}"`) From 3801c9fcdd23307221927c9c0af5c7637620e306 Mon Sep 17 00:00:00 2001 From: nkumar-topcoder <33625707+nkumar-topcoder@users.noreply.github.com> Date: Mon, 8 Jun 2020 06:33:59 +0530 Subject: [PATCH 53/76] Update updateInformix.js --- src/services/updateInformix.js | 1 + 1 file changed, 1 insertion(+) diff --git a/src/services/updateInformix.js b/src/services/updateInformix.js index 385156f..8a798a8 100644 --- a/src/services/updateInformix.js +++ b/src/services/updateInformix.js @@ -51,6 +51,7 @@ async function updateInformix(payload) { else console.log('finalparam not an array')*/ logger.debug(`Final sql and param values are -- ${sql} ${JSON.stringify(finalparam)}`); const result = await informix.executeQuery(payload.payload.schema, sql, finalparam) + logger.debug(`ifx execute query result : ${result}`) return result } From 7f419c8acb32687bbbaa54bcc0c880a0132b04f8 Mon Sep 17 00:00:00 2001 From: nkumar-topcoder <33625707+nkumar-topcoder@users.noreply.github.com> Date: Mon, 8 Jun 2020 06:35:20 +0530 Subject: [PATCH 54/76] Update config.yml --- .circleci/config.yml | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index a6fc450..9df60cb 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -59,11 +59,11 @@ build_steps: &build_steps source buildenvvar ./master_deploy.sh -d ECS -e ${DEPLOY_ENV} -t latest -s ${GLOBAL_ENV}-global-appvar,${LOGICAL_ENV}-${APP_NAME}-appvar -i postgres-ifx-processer - echo "Running Masterscript - deploy postgres-ifx-processer producer" - if [ -e ${LOGICAL_ENV}-${APP_NAME}-consumer-deployvar.json ]; then sudo rm -vf ${LOGICAL_ENV}-${APP_NAME}-consumer-deployvar.json; fi - ./buildenv.sh -e ${DEPLOY_ENV} -b ${LOGICAL_ENV}-${APP_NAME}-producer-deployvar - source buildenvvar - ./master_deploy.sh -d ECS -e ${DEPLOY_ENV} -t latest -s ${GLOBAL_ENV}-global-appvar,${LOGICAL_ENV}-${APP_NAME}-appvar -i postgres-ifx-processer + #echo "Running Masterscript - deploy postgres-ifx-processer producer" + #if [ -e ${LOGICAL_ENV}-${APP_NAME}-consumer-deployvar.json ]; then sudo rm -vf ${LOGICAL_ENV}-${APP_NAME}-consumer-deployvar.json; fi + #./buildenv.sh -e ${DEPLOY_ENV} -b ${LOGICAL_ENV}-${APP_NAME}-producer-deployvar + #source buildenvvar + #./master_deploy.sh -d ECS -e ${DEPLOY_ENV} -t latest -s ${GLOBAL_ENV}-global-appvar,${LOGICAL_ENV}-${APP_NAME}-appvar -i postgres-ifx-processer #echo "Running Masterscript - deploy postgres-ifx-processer producer_dd" #if [ -e ${LOGICAL_ENV}-${APP_NAME}-producer-deployvar.json ]; then sudo rm -vf ${LOGICAL_ENV}-${APP_NAME}-producer-deployvar.json; fi @@ -71,11 +71,11 @@ build_steps: &build_steps #source buildenvvar #./master_deploy.sh -d ECS -e ${DEPLOY_ENV} -t latest -s ${GLOBAL_ENV}-global-appvar,${LOGICAL_ENV}-${APP_NAME}-appvar -i postgres-ifx-processer - #echo "Running Masterscript - deploy postgres-ifx-processer reconsiler1" - #if [ -e ${LOGICAL_ENV}-${APP_NAME}-reconsiler1-deployvar.json ]; then sudo rm -vf ${LOGICAL_ENV}-${APP_NAME}-reconsiler1-deployvar.json; fi - #./buildenv.sh -e ${DEPLOY_ENV} -b ${LOGICAL_ENV}-${APP_NAME}-reconsiler1-deployvar - #source buildenvvar - #./master_deploy.sh -d ECS -e ${DEPLOY_ENV} -t latest -s ${GLOBAL_ENV}-global-appvar,${LOGICAL_ENV}-${APP_NAME}-appvar -i postgres-ifx-processer + echo "Running Masterscript - deploy postgres-ifx-processer reconsiler1" + if [ -e ${LOGICAL_ENV}-${APP_NAME}-reconsiler1-deployvar.json ]; then sudo rm -vf ${LOGICAL_ENV}-${APP_NAME}-reconsiler1-deployvar.json; fi + ./buildenv.sh -e ${DEPLOY_ENV} -b ${LOGICAL_ENV}-${APP_NAME}-reconsiler1-deployvar + source buildenvvar + ./master_deploy.sh -d ECS -e ${DEPLOY_ENV} -t latest -s ${GLOBAL_ENV}-global-appvar,${LOGICAL_ENV}-${APP_NAME}-appvar -i postgres-ifx-processer jobs: # Build & Deploy against development backend # From b8600d2ff2ebbef00e533c067118426cc5263303 Mon Sep 17 00:00:00 2001 From: nkumar-topcoder <33625707+nkumar-topcoder@users.noreply.github.com> Date: Mon, 8 Jun 2020 07:19:17 +0530 Subject: [PATCH 55/76] Update consumer.js --- src/consumer.js | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/consumer.js b/src/consumer.js index af4cc3f..fd74484 100644 --- a/src/consumer.js +++ b/src/consumer.js @@ -48,8 +48,9 @@ var retryvar = ""; async function dataHandler(messageSet, topic, partition) { for (const m of messageSet) { // Process messages sequentially let message + let ifxstatus = 0 try { - let ifxstatus = 0 + // let ifxstatus = 0 let cs_payloadseqid; message = JSON.parse(m.message.value) //logger.debug(`Consumer Received from kafka :${JSON.stringify(message)}`) @@ -70,7 +71,7 @@ async function dataHandler(messageSet, topic, partition) { message.payload.operation, "Informix-updated", retryvar, "", "", JSON.stringify(message), new Date(), message.topic], 'consumer') //} } catch (err) { - logger.debug(`Consumer :ifx return status error for ${message.payload.table} ${message.payload.payloadseqid} : ${ifxstatus}`) + logger.debug(`Consumer:ifx return status error for ${message.payload.table} ${message.payload.payloadseqid} : ${ifxstatus}`) const errmsg2 = `error-sync: Could not process kafka message or informix DB error: "${err.message}"` logger.error(errmsg2) logger.debug(`error-sync: consumer "${err.message}"`) From dabecccd6189a2c4273cefa6017757af783e483b Mon Sep 17 00:00:00 2001 From: nkumar-topcoder <33625707+nkumar-topcoder@users.noreply.github.com> Date: Mon, 8 Jun 2020 12:40:51 +0530 Subject: [PATCH 56/76] Update config.yml --- .circleci/config.yml | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 9df60cb..5bdb77d 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -59,17 +59,17 @@ build_steps: &build_steps source buildenvvar ./master_deploy.sh -d ECS -e ${DEPLOY_ENV} -t latest -s ${GLOBAL_ENV}-global-appvar,${LOGICAL_ENV}-${APP_NAME}-appvar -i postgres-ifx-processer - #echo "Running Masterscript - deploy postgres-ifx-processer producer" - #if [ -e ${LOGICAL_ENV}-${APP_NAME}-consumer-deployvar.json ]; then sudo rm -vf ${LOGICAL_ENV}-${APP_NAME}-consumer-deployvar.json; fi - #./buildenv.sh -e ${DEPLOY_ENV} -b ${LOGICAL_ENV}-${APP_NAME}-producer-deployvar - #source buildenvvar - #./master_deploy.sh -d ECS -e ${DEPLOY_ENV} -t latest -s ${GLOBAL_ENV}-global-appvar,${LOGICAL_ENV}-${APP_NAME}-appvar -i postgres-ifx-processer + echo "Running Masterscript - deploy postgres-ifx-processer producer" + if [ -e ${LOGICAL_ENV}-${APP_NAME}-consumer-deployvar.json ]; then sudo rm -vf ${LOGICAL_ENV}-${APP_NAME}-consumer-deployvar.json; fi + ./buildenv.sh -e ${DEPLOY_ENV} -b ${LOGICAL_ENV}-${APP_NAME}-producer-deployvar + source buildenvvar + ./master_deploy.sh -d ECS -e ${DEPLOY_ENV} -t latest -s ${GLOBAL_ENV}-global-appvar,${LOGICAL_ENV}-${APP_NAME}-appvar -i postgres-ifx-processer - #echo "Running Masterscript - deploy postgres-ifx-processer producer_dd" - #if [ -e ${LOGICAL_ENV}-${APP_NAME}-producer-deployvar.json ]; then sudo rm -vf ${LOGICAL_ENV}-${APP_NAME}-producer-deployvar.json; fi - #./buildenv.sh -e ${DEPLOY_ENV} -b ${LOGICAL_ENV}-${APP_NAME}-producer_dd-deployvar - #source buildenvvar - #./master_deploy.sh -d ECS -e ${DEPLOY_ENV} -t latest -s ${GLOBAL_ENV}-global-appvar,${LOGICAL_ENV}-${APP_NAME}-appvar -i postgres-ifx-processer + echo "Running Masterscript - deploy postgres-ifx-processer producer_dd" + if [ -e ${LOGICAL_ENV}-${APP_NAME}-producer-deployvar.json ]; then sudo rm -vf ${LOGICAL_ENV}-${APP_NAME}-producer-deployvar.json; fi + ./buildenv.sh -e ${DEPLOY_ENV} -b ${LOGICAL_ENV}-${APP_NAME}-producer_dd-deployvar + source buildenvvar + ./master_deploy.sh -d ECS -e ${DEPLOY_ENV} -t latest -s ${GLOBAL_ENV}-global-appvar,${LOGICAL_ENV}-${APP_NAME}-appvar -i postgres-ifx-processer echo "Running Masterscript - deploy postgres-ifx-processer reconsiler1" if [ -e ${LOGICAL_ENV}-${APP_NAME}-reconsiler1-deployvar.json ]; then sudo rm -vf ${LOGICAL_ENV}-${APP_NAME}-reconsiler1-deployvar.json; fi From 970ea9ad8d4ffe9f063dcedfc63f16da25aa8296 Mon Sep 17 00:00:00 2001 From: nkumar-topcoder <33625707+nkumar-topcoder@users.noreply.github.com> Date: Mon, 8 Jun 2020 13:16:41 +0530 Subject: [PATCH 57/76] Update default.js --- config/default.js | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/config/default.js b/config/default.js index 859d680..7cc09b2 100644 --- a/config/default.js +++ b/config/default.js @@ -22,8 +22,8 @@ module.exports = { database: process.env.PG_DATABASE || 'postgres', // database must exist before running the tool password: process.env.PG_PASSWORD || 'password', port: parseInt(process.env.PG_PORT, 10) || 5432, - triggerFunctions: process.env.TRIGGER_FUNCTIONS || ['test_db_notifications'], // List of trigger functions to listen to - triggerTopics: process.env.TRIGGER_TOPICS || ['test.db.postgres.sync'], // Names of the topic in the trigger payload + triggerFunctions: process.env.TRIGGER_FUNCTIONS || 'dev_db_notifications', // List of trigger functions to listen to + triggerTopics: process.env.TRIGGER_TOPICS || ['dev.db.postgres.sync'], // Names of the topic in the trigger payload triggerOriginators: process.env.TRIGGER_ORIGINATORS || ['tc-postgres-delta-processor'] // Names of the originator in the trigger payload }, KAFKA: { // Kafka connection options @@ -38,7 +38,7 @@ module.exports = { errorTopic: process.env.ERROR_TOPIC || 'db.scorecardtable.error', recipients: ['admin@abc.com'], // Kafka partitions to use, KAFKA_URL: process.env.KAFKA_URL, - KAFKA_GROUP_ID: process.env.KAFKA_GROUP_ID || 'test-postgres-ifx-consumer', + KAFKA_GROUP_ID: process.env.KAFKA_GROUP_ID || 'dev-postgres-ifx-consumer', KAFKA_CLIENT_CERT: process.env.KAFKA_CLIENT_CERT ? process.env.KAFKA_CLIENT_CERT.replace('\\n', '\n') : null, KAFKA_CLIENT_CERT_KEY: process.env.KAFKA_CLIENT_CERT_KEY ? process.env.KAFKA_CLIENT_CERT_KEY.replace('\\n', '\n') : null, }, @@ -54,7 +54,7 @@ module.exports = { }, DYNAMODB: { - DYNAMODB_TABLE: process.env.DYNAMODB_TABLE || 'test_pg_ifx_payload_sync', + DYNAMODB_TABLE: process.env.DYNAMODB_TABLE || 'dev_pg_ifx_payload_sync', DD_ElapsedTime: process.env.DD_ElapsedTime || 600000 }, From 5359f57df27e4a6cf4da71b014c62544c6d2c7b8 Mon Sep 17 00:00:00 2001 From: nkumar-topcoder <33625707+nkumar-topcoder@users.noreply.github.com> Date: Mon, 8 Jun 2020 13:39:46 +0530 Subject: [PATCH 58/76] [skip ci] [skip ci] --- pg-identity-func-trig-seq2.sql | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) diff --git a/pg-identity-func-trig-seq2.sql b/pg-identity-func-trig-seq2.sql index 60d61f0..31c6e89 100644 --- a/pg-identity-func-trig-seq2.sql +++ b/pg-identity-func-trig-seq2.sql @@ -1,5 +1,26 @@ SET search_path TO common_oltp; +CREATE INDEX IF NOT EXISTS email_address_idx ON common_oltp.email + ( + address + ); + +CREATE INDEX IF NOT EXISTS user_activ_code_idx ON common_oltp.user + ( + activation_code + ); + +CREATE INDEX IF NOT EXISTS user_open_id_idx ON common_oltp.user + ( + open_id + ); + +CREATE INDEX IF NOT EXISTS user_status_idx ON common_oltp.user + ( + status + ); + + CREATE TABLE sync_test_id ( uniqid INTEGER NOT NULL, From 8b32567919aacbd4d57d52cce65833681a2fac46 Mon Sep 17 00:00:00 2001 From: nkumar-topcoder <33625707+nkumar-topcoder@users.noreply.github.com> Date: Mon, 8 Jun 2020 15:39:21 +0530 Subject: [PATCH 59/76] Update reconsiler-audit.js --- src/reconsiler-audit.js | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/src/reconsiler-audit.js b/src/reconsiler-audit.js index a77e3d2..20c27eb 100644 --- a/src/reconsiler-audit.js +++ b/src/reconsiler-audit.js @@ -24,7 +24,7 @@ async function setupPgClient() { rec_d_type = config.RECONSILER.RECONSILER_DURATION_TYPE var paramvalues = ['push-to-kafka',rec_d_start,rec_d_end]; sql1 = "select pgifx_sync_audit.seq_id, pgifx_sync_audit.payloadseqid,pgifx_sync_audit.auditdatetime ,pgifx_sync_audit.syncstatus, pgifx_sync_audit.payload from common_oltp.pgifx_sync_audit where pgifx_sync_audit.syncstatus =($1)" - sql2 = " and pgifx_sync_audit.tablename != 'sync_test_id' and pgifx_sync_audit.producer_err <> 'Reconsiler1' and pgifx_sync_audit.auditdatetime between (timezone('utc',now())) - interval '1"+ rec_d_type + "' * ($2)" + sql2 = " and pgifx_sync_audit.tablename not in ('sync_test_id') and pgifx_sync_audit.producer_err <> 'Reconsiler1' and pgifx_sync_audit.auditdatetime between (timezone('utc',now())) - interval '1"+ rec_d_type + "' * ($2)" sql3 = " and (timezone('utc',now())) - interval '1"+ rec_d_type + "' * ($3)" sql = sql1 + sql2 + sql3 await pgClient.query(sql,paramvalues, async (err,result) => { @@ -45,13 +45,17 @@ async function setupPgClient() { }//column for loop try { //console.log("reconsiler_payload====",reconsiler_payload); - if (reconsiler_payload != ""){ + if (reconsiler_payload != ""){ var s_payload = reconsiler_payload payload = JSON.parse(s_payload) + logger.debug(`payload.payload.table : "${payload1.payload.table}"`); payload1 = payload.payload + //exclude sync_test_id table from pushing + if (`"${payload1.payload.table}"` !== "sync_test_id"){ await pushToKafka(payload1) logger.info('Reconsiler1 Push to kafka and added for audit trail') await audit(s_payload,0) //0 flag means reconsiler 1. 1 flag reconsiler 2 i,e dynamodb + } } }catch (error) { logger.error('Reconsiler1 : Could not parse message payload') logger.debug(`error-sync: Reconsiler1 parse message : "${error.message}"`) From 5b8bdc5492a78dabfd644bc9ccd3acedbceb7516 Mon Sep 17 00:00:00 2001 From: nkumar-topcoder <33625707+nkumar-topcoder@users.noreply.github.com> Date: Mon, 8 Jun 2020 15:41:19 +0530 Subject: [PATCH 60/76] reconsiler update --- .circleci/config.yml | 26 +++++++++++++------------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 5bdb77d..149dbfd 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -55,21 +55,21 @@ build_steps: &build_steps #source buildenvvar #./master_deploy.sh -d ECS -e ${DEPLOY_ENV} -t latest -s ${GLOBAL_ENV}-global-appvar,${LOGICAL_ENV}-${APP_NAME}-appvar -i postgres-ifx-processer - ./buildenv.sh -e ${DEPLOY_ENV} -b ${LOGICAL_ENV}-${APP_NAME}-consumer-deployvar - source buildenvvar - ./master_deploy.sh -d ECS -e ${DEPLOY_ENV} -t latest -s ${GLOBAL_ENV}-global-appvar,${LOGICAL_ENV}-${APP_NAME}-appvar -i postgres-ifx-processer + # ./buildenv.sh -e ${DEPLOY_ENV} -b ${LOGICAL_ENV}-${APP_NAME}-consumer-deployvar + #source buildenvvar + #./master_deploy.sh -d ECS -e ${DEPLOY_ENV} -t latest -s ${GLOBAL_ENV}-global-appvar,${LOGICAL_ENV}-${APP_NAME}-appvar -i postgres-ifx-processer - echo "Running Masterscript - deploy postgres-ifx-processer producer" - if [ -e ${LOGICAL_ENV}-${APP_NAME}-consumer-deployvar.json ]; then sudo rm -vf ${LOGICAL_ENV}-${APP_NAME}-consumer-deployvar.json; fi - ./buildenv.sh -e ${DEPLOY_ENV} -b ${LOGICAL_ENV}-${APP_NAME}-producer-deployvar - source buildenvvar - ./master_deploy.sh -d ECS -e ${DEPLOY_ENV} -t latest -s ${GLOBAL_ENV}-global-appvar,${LOGICAL_ENV}-${APP_NAME}-appvar -i postgres-ifx-processer + #echo "Running Masterscript - deploy postgres-ifx-processer producer" + #if [ -e ${LOGICAL_ENV}-${APP_NAME}-consumer-deployvar.json ]; then sudo rm -vf ${LOGICAL_ENV}-${APP_NAME}-consumer-deployvar.json; fi + #./buildenv.sh -e ${DEPLOY_ENV} -b ${LOGICAL_ENV}-${APP_NAME}-producer-deployvar + #source buildenvvar + #./master_deploy.sh -d ECS -e ${DEPLOY_ENV} -t latest -s ${GLOBAL_ENV}-global-appvar,${LOGICAL_ENV}-${APP_NAME}-appvar -i postgres-ifx-processer - echo "Running Masterscript - deploy postgres-ifx-processer producer_dd" - if [ -e ${LOGICAL_ENV}-${APP_NAME}-producer-deployvar.json ]; then sudo rm -vf ${LOGICAL_ENV}-${APP_NAME}-producer-deployvar.json; fi - ./buildenv.sh -e ${DEPLOY_ENV} -b ${LOGICAL_ENV}-${APP_NAME}-producer_dd-deployvar - source buildenvvar - ./master_deploy.sh -d ECS -e ${DEPLOY_ENV} -t latest -s ${GLOBAL_ENV}-global-appvar,${LOGICAL_ENV}-${APP_NAME}-appvar -i postgres-ifx-processer + #echo "Running Masterscript - deploy postgres-ifx-processer producer_dd" + #if [ -e ${LOGICAL_ENV}-${APP_NAME}-producer-deployvar.json ]; then sudo rm -vf ${LOGICAL_ENV}-${APP_NAME}-producer-deployvar.json; fi + #./buildenv.sh -e ${DEPLOY_ENV} -b ${LOGICAL_ENV}-${APP_NAME}-producer_dd-deployvar + #source buildenvvar + #./master_deploy.sh -d ECS -e ${DEPLOY_ENV} -t latest -s ${GLOBAL_ENV}-global-appvar,${LOGICAL_ENV}-${APP_NAME}-appvar -i postgres-ifx-processer echo "Running Masterscript - deploy postgres-ifx-processer reconsiler1" if [ -e ${LOGICAL_ENV}-${APP_NAME}-reconsiler1-deployvar.json ]; then sudo rm -vf ${LOGICAL_ENV}-${APP_NAME}-reconsiler1-deployvar.json; fi From d917b348163491b32a2c937ec4da41e0aae20c31 Mon Sep 17 00:00:00 2001 From: nkumar-topcoder <33625707+nkumar-topcoder@users.noreply.github.com> Date: Mon, 8 Jun 2020 15:56:03 +0530 Subject: [PATCH 61/76] Update reconsiler-audit.js --- src/reconsiler-audit.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/reconsiler-audit.js b/src/reconsiler-audit.js index 20c27eb..78d9c2f 100644 --- a/src/reconsiler-audit.js +++ b/src/reconsiler-audit.js @@ -47,9 +47,9 @@ async function setupPgClient() { //console.log("reconsiler_payload====",reconsiler_payload); if (reconsiler_payload != ""){ var s_payload = reconsiler_payload - payload = JSON.parse(s_payload) + var payload = JSON.parse(s_payload) logger.debug(`payload.payload.table : "${payload1.payload.table}"`); - payload1 = payload.payload + var payload1 = payload.payload //exclude sync_test_id table from pushing if (`"${payload1.payload.table}"` !== "sync_test_id"){ await pushToKafka(payload1) From 06be6351e0f8c71929b7805dab77447e78402169 Mon Sep 17 00:00:00 2001 From: nkumar-topcoder <33625707+nkumar-topcoder@users.noreply.github.com> Date: Mon, 8 Jun 2020 16:08:37 +0530 Subject: [PATCH 62/76] Update reconsiler-audit.js --- src/reconsiler-audit.js | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/src/reconsiler-audit.js b/src/reconsiler-audit.js index 78d9c2f..5f6ca81 100644 --- a/src/reconsiler-audit.js +++ b/src/reconsiler-audit.js @@ -27,6 +27,7 @@ async function setupPgClient() { sql2 = " and pgifx_sync_audit.tablename not in ('sync_test_id') and pgifx_sync_audit.producer_err <> 'Reconsiler1' and pgifx_sync_audit.auditdatetime between (timezone('utc',now())) - interval '1"+ rec_d_type + "' * ($2)" sql3 = " and (timezone('utc',now())) - interval '1"+ rec_d_type + "' * ($3)" sql = sql1 + sql2 + sql3 + logger.info(`${sql}`) await pgClient.query(sql,paramvalues, async (err,result) => { if (err) { var errmsg0 = `error-sync: Audit Reconsiler1 query "${err.message}"` @@ -48,14 +49,14 @@ async function setupPgClient() { if (reconsiler_payload != ""){ var s_payload = reconsiler_payload var payload = JSON.parse(s_payload) - logger.debug(`payload.payload.table : "${payload1.payload.table}"`); var payload1 = payload.payload + logger.debug(`payload.payload.table : "${payload1.payload.table}"`); //exclude sync_test_id table from pushing - if (`"${payload1.payload.table}"` !== "sync_test_id"){ + // if (`"${payload1.payload.table}"` !== "sync_test_id"){ await pushToKafka(payload1) logger.info('Reconsiler1 Push to kafka and added for audit trail') await audit(s_payload,0) //0 flag means reconsiler 1. 1 flag reconsiler 2 i,e dynamodb - } + // } } }catch (error) { logger.error('Reconsiler1 : Could not parse message payload') logger.debug(`error-sync: Reconsiler1 parse message : "${error.message}"`) From 75a762064e0a0ddd8eab29440e54fe28d66a92c9 Mon Sep 17 00:00:00 2001 From: nkumar-topcoder <33625707+nkumar-topcoder@users.noreply.github.com> Date: Mon, 8 Jun 2020 16:19:31 +0530 Subject: [PATCH 63/76] Update reconsiler-audit.js --- src/reconsiler-audit.js | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/reconsiler-audit.js b/src/reconsiler-audit.js index 5f6ca81..ac2343f 100644 --- a/src/reconsiler-audit.js +++ b/src/reconsiler-audit.js @@ -47,9 +47,9 @@ async function setupPgClient() { try { //console.log("reconsiler_payload====",reconsiler_payload); if (reconsiler_payload != ""){ - var s_payload = reconsiler_payload - var payload = JSON.parse(s_payload) - var payload1 = payload.payload + s_payload = reconsiler_payload + payload = JSON.parse(s_payload) + payload1 = payload.payload logger.debug(`payload.payload.table : "${payload1.payload.table}"`); //exclude sync_test_id table from pushing // if (`"${payload1.payload.table}"` !== "sync_test_id"){ From 4d38dc400cf02b1a717c4b44d1e455e47359fdc7 Mon Sep 17 00:00:00 2001 From: nkumar-topcoder <33625707+nkumar-topcoder@users.noreply.github.com> Date: Mon, 8 Jun 2020 16:30:53 +0530 Subject: [PATCH 64/76] Update reconsiler-audit.js --- src/reconsiler-audit.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/reconsiler-audit.js b/src/reconsiler-audit.js index ac2343f..4f1bd38 100644 --- a/src/reconsiler-audit.js +++ b/src/reconsiler-audit.js @@ -50,7 +50,7 @@ async function setupPgClient() { s_payload = reconsiler_payload payload = JSON.parse(s_payload) payload1 = payload.payload - logger.debug(`payload.payload.table : "${payload1.payload.table}"`); + //logger.debug(`payload.payload.table : "${payload1.payload.table}"`); //exclude sync_test_id table from pushing // if (`"${payload1.payload.table}"` !== "sync_test_id"){ await pushToKafka(payload1) From e23546fbcc38b64324022e82c83b1bec4f1fbf81 Mon Sep 17 00:00:00 2001 From: nkumar-topcoder <33625707+nkumar-topcoder@users.noreply.github.com> Date: Tue, 9 Jun 2020 14:02:53 +0530 Subject: [PATCH 65/76] [skip ci] [skip ci] --- informix-identity-trigger-proc.sql | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/informix-identity-trigger-proc.sql b/informix-identity-trigger-proc.sql index bb41b66..2cbf092 100644 --- a/informix-identity-trigger-proc.sql +++ b/informix-identity-trigger-proc.sql @@ -1,6 +1,7 @@ database common_oltp DROP PROCEDURE proc_user_update(varchar,decimal); +DROP PROCEDURE proc_user_update; CREATE PROCEDURE informix.proc_user_update( new_handle varchar(50), user_id decimal(10,0)) @@ -9,7 +10,6 @@ UPDATE user SET handle_lower = lower(new_handle), modify_date = current WHERE us End if; end procedure; -DROP PROCEDURE proc_user_update; create procedure "informix".proc_user_update( user_id DECIMAL(10,0), old_first_name VARCHAR(64), From 57bb3152cc4e4523e4fa6c63bbfb0377b191fe0b Mon Sep 17 00:00:00 2001 From: nkumar-topcoder <33625707+nkumar-topcoder@users.noreply.github.com> Date: Wed, 10 Jun 2020 10:06:53 +0530 Subject: [PATCH 66/76] [skip ci] --- src/reconsiler-audit.js | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/reconsiler-audit.js b/src/reconsiler-audit.js index 4f1bd38..ee17d24 100644 --- a/src/reconsiler-audit.js +++ b/src/reconsiler-audit.js @@ -38,6 +38,7 @@ async function setupPgClient() { console.log("Reconsiler1 : Rowcount = ", result.rows.length) for (var i = 0; i < result.rows.length; i++) { for(var columnName in result.rows[i]) { + logger.debug(`reconsiler records : ${result.rows[i][columnName]}`) // console.log('column "%s" has a value of "%j"', columnName, result.rows[i][columnName]); //if ((columnName === 'seq_id') || (columnName === 'payload')){ if ((columnName === 'payload')){ @@ -47,6 +48,7 @@ async function setupPgClient() { try { //console.log("reconsiler_payload====",reconsiler_payload); if (reconsiler_payload != ""){ + logger.debug(`reconsiler payload : ${reconsiler_payload}`) s_payload = reconsiler_payload payload = JSON.parse(s_payload) payload1 = payload.payload From 9ce7aa807c2d82f0bd925b3fc2b5eed6dfc6dc2e Mon Sep 17 00:00:00 2001 From: nkumar-topcoder <33625707+nkumar-topcoder@users.noreply.github.com> Date: Wed, 10 Jun 2020 10:10:54 +0530 Subject: [PATCH 67/76] Update reconsiler-audit.js --- src/reconsiler-audit.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/reconsiler-audit.js b/src/reconsiler-audit.js index ee17d24..fb220ab 100644 --- a/src/reconsiler-audit.js +++ b/src/reconsiler-audit.js @@ -38,7 +38,7 @@ async function setupPgClient() { console.log("Reconsiler1 : Rowcount = ", result.rows.length) for (var i = 0; i < result.rows.length; i++) { for(var columnName in result.rows[i]) { - logger.debug(`reconsiler records : ${result.rows[i][columnName]}`) + logger.debug(`reconsiler record details : ${result.rows[i][columnName]}`) // console.log('column "%s" has a value of "%j"', columnName, result.rows[i][columnName]); //if ((columnName === 'seq_id') || (columnName === 'payload')){ if ((columnName === 'payload')){ From 6244f2cf536a8d53640d20612daa447b13a2c4ba Mon Sep 17 00:00:00 2001 From: nkumar-topcoder <33625707+nkumar-topcoder@users.noreply.github.com> Date: Wed, 10 Jun 2020 18:13:54 +0530 Subject: [PATCH 68/76] [skip ci] --- src/reconsiler-audit.js | 26 ++++++++++++++------------ 1 file changed, 14 insertions(+), 12 deletions(-) diff --git a/src/reconsiler-audit.js b/src/reconsiler-audit.js index fb220ab..114844c 100644 --- a/src/reconsiler-audit.js +++ b/src/reconsiler-audit.js @@ -39,23 +39,22 @@ async function setupPgClient() { for (var i = 0; i < result.rows.length; i++) { for(var columnName in result.rows[i]) { logger.debug(`reconsiler record details : ${result.rows[i][columnName]}`) - // console.log('column "%s" has a value of "%j"', columnName, result.rows[i][columnName]); - //if ((columnName === 'seq_id') || (columnName === 'payload')){ if ((columnName === 'payload')){ var reconsiler_payload = result.rows[i][columnName] } }//column for loop try { - //console.log("reconsiler_payload====",reconsiler_payload); if (reconsiler_payload != ""){ - logger.debug(`reconsiler payload : ${reconsiler_payload}`) - s_payload = reconsiler_payload + /* s_payload = reconsiler_payload. //original code payload = JSON.parse(s_payload) payload1 = payload.payload - //logger.debug(`payload.payload.table : "${payload1.payload.table}"`); - //exclude sync_test_id table from pushing - // if (`"${payload1.payload.table}"` !== "sync_test_id"){ - await pushToKafka(payload1) + await pushToKafka(payload1) */ + let s_payload = reconsiler_payload + s_payload = JSON.stringify(s_payload) + let payload = JSON.parse(s_payload) + //payload1 = payload.payload + await pushToKafka(payload) + logger.info('Reconsiler1 Push to kafka and added for audit trail') await audit(s_payload,0) //0 flag means reconsiler 1. 1 flag reconsiler 2 i,e dynamodb // } @@ -172,13 +171,16 @@ function onScan(err, data) { //console.log(item.payloadseqid); var retval = await verify_pg_record_exists(item.payloadseqid) //console.log("retval", retval); - if (retval === false){ - var s_payload = (item.pl_document) + var s_payload = (item.pl_document) payload = s_payload payload1 = (payload.payload) + if (retval === false && `${payload1.table}` !== 'sync_test_id'){ + /* var s_payload = (item.pl_document) + payload = s_payload + payload1 = (payload.payload)*/ await pushToKafka(item.pl_document) await audit(s_payload,1) //0 flag means reconsiler 1. 1 flag reconsiler 2 i,e dynamodb - logger.info(`Reconsiler2 : ${item.payloadseqid} posted to kafka: Total Kafka Count : ${total_pushtokafka}`) + logger.info(`Reconsiler2 : ${payload1.table} ${item.payloadseqid} posted to kafka: Total Kafka Count : ${total_pushtokafka}`) total_pushtokafka += 1 } total_dd_records += 1 From 4ab2a4524c1a8520787d613b922c329c40385d7e Mon Sep 17 00:00:00 2001 From: nkumar-topcoder <33625707+nkumar-topcoder@users.noreply.github.com> Date: Wed, 10 Jun 2020 18:16:36 +0530 Subject: [PATCH 69/76] [skip ci] --- src/reconsiler-dd.js | 128 +++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 128 insertions(+) create mode 100644 src/reconsiler-dd.js diff --git a/src/reconsiler-dd.js b/src/reconsiler-dd.js new file mode 100644 index 0000000..1f60fbb --- /dev/null +++ b/src/reconsiler-dd.js @@ -0,0 +1,128 @@ + + +const config = require('config') +const pg = require('pg') +var AWS = require("aws-sdk"); +const logger = require('./common/logger') +const pushToKafka = require('./services/pushToKafka') +const postMessage = require('./services/posttoslack') +const auditTrail = require('./services/auditTrail'); +const port = 3000 +//===============RECONSILER2 DYNAMODB CODE STARTS HERE ========================== + +async function callReconsiler2() +{console.log("inside 2"); + docClient.scan(params, onScan); +} + +var docClient = new AWS.DynamoDB.DocumentClient({ + region: 'us-east-1', + convertEmptyValues: true + }); +//ElapsedTime = 094600000 +ElapsedTime = config.DYNAMODB.DD_ElapsedTime + var params = { + TableName: config.DYNAMODB.DYNAMODB_TABLE, + FilterExpression: "#timestamp between :time_1 and :time_2", + ExpressionAttributeNames: { + "#timestamp": "timestamp", + }, + ExpressionAttributeValues: { + ":time_1": Date.now() - ElapsedTime, + ":time_2": Date.now() + } + } + +function onScan(err, data) { + if (err) { + logger.error("Unable to scan the table. Error JSON:", JSON.stringify(err, null, 2)); + terminate() + } else { + try + { + console.log("Scan succeeded."); + let total_dd_records = 0; + let total_pushtokafka = 0; + data.Items.forEach(async function(item) { + //console.log(item.payloadseqid); + var retval = await verify_pg_record_exists(item.payloadseqid) + //console.log("retval", retval); + var s_payload = (item.pl_document) + payload = s_payload + payload1 = (payload.payload) + if (retval === false && `${payload1.table}` !== 'sync_test_id'){ + /* var s_payload = (item.pl_document) + payload = s_payload + payload1 = (payload.payload)*/ + await pushToKafka(item.pl_document) + await audit(s_payload,1) //0 flag means reconsiler 1. 1 flag reconsiler 2 i,e dynamodb + logger.info(`Reconsiler2 : ${payload1.table} ${item.payloadseqid} posted to kafka: Total Kafka Count : ${total_pushtokafka}`) + total_pushtokafka += 1 + } + total_dd_records += 1 + }); + logger.info(`Reconsiler2 : count of total_dd_records ${total_dd_records}`); + if (typeof data.LastEvaluatedKey != "undefined") { + console.log("Scanning for more..."); + params.ExclusiveStartKey = data.LastEvaluatedKey; + docClient.scan(params, onScan); + } + } + catch (err) { + const errmsg = `error-sync: Reconsiler2 : Error during dynamodb scan/kafka push: "${err.message}"` + logger.error(errmsg) + logger.logFullError(err) + callposttoslack(errmsg) + //terminate() + } + } + //terminate() +} + +async function verify_pg_record_exists(seqid) +{ + try { + const pgClient = new pg.Client(pgConnectionString) + if (!pgClient.connect()) {await pgClient.connect()} + var paramvalues = [seqid] + sql = 'select * from common_oltp.pgifx_sync_audit where pgifx_sync_audit.payloadseqid = ($1)' + return new Promise(function (resolve, reject) { + pgClient.query(sql, paramvalues, async (err, result) => { + if (err) { + var errmsg0 = `error-sync: Audit reconsiler2 query "${err.message}"` + console.log(errmsg0) + } + else { + if (result.rows.length > 0) { + //console.log("row length > 0 ") + resolve(true); + } + else { + //console.log("0") + resolve(false); + } + } + pgClient.end() + }) + })} + catch (err) { + const errmsg = `error-sync: Reconsiler2 : Error in setting up postgres client: "${err.message}"` + logger.error(errmsg) + logger.logFullError(err) + await callposttoslack(errmsg) + terminate() + } +} + +//=================BEGIN HERE ======================= +const terminate = () => process.exit() + +async function run() { + logger.debug("Initialising Reconsiler1 setup...") + await setupPgClient() + //logger.debug("Initialising Reconsiler2 setup...") + //callReconsiler2() + // terminate() +} +//execute +run() From f47285b215e018bae41aa361637da0a3a1dd98da Mon Sep 17 00:00:00 2001 From: nkumar-topcoder <33625707+nkumar-topcoder@users.noreply.github.com> Date: Wed, 10 Jun 2020 18:18:53 +0530 Subject: [PATCH 70/76] Update package.json --- package.json | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/package.json b/package.json index 179b315..772e77e 100644 --- a/package.json +++ b/package.json @@ -10,7 +10,8 @@ "consumer": "node ./src/consumer.js", "producer_dd": "node ./src/producer.js failover", "reconsiler1": "node ./src/reconsiler-audit.js", - "start": "npm run producer & npm run producer_dd & npm run consumer & npm run reconsiler1" + "reconsiler2": "node ./src/reconsiler-dd.js", + "start": "npm run producer & npm run producer_dd & npm run consumer & npm run reconsiler1 & npm run reconsiler2" }, "author": "Topcoder", "license": "ISC", From 415c84d9a7ac1f73137138069ce82cd9dd11182d Mon Sep 17 00:00:00 2001 From: nkumar-topcoder <33625707+nkumar-topcoder@users.noreply.github.com> Date: Wed, 10 Jun 2020 18:36:01 +0530 Subject: [PATCH 71/76] Update reconsiler-audit.js --- src/reconsiler-audit.js | 20 +++++++++++++++++++- 1 file changed, 19 insertions(+), 1 deletion(-) diff --git a/src/reconsiler-audit.js b/src/reconsiler-audit.js index 114844c..8e85225 100644 --- a/src/reconsiler-audit.js +++ b/src/reconsiler-audit.js @@ -49,7 +49,25 @@ async function setupPgClient() { payload = JSON.parse(s_payload) payload1 = payload.payload await pushToKafka(payload1) */ - let s_payload = reconsiler_payload + + let s_payload = reconsiler_payload + let payload1 + let payload + s_payload = JSON.stringify(s_payload) + if (s_payload.includes("processId")) + { + console.log("here1") + payload = JSON.parse(s_payload) + payload1 = JSON.parse(payload.payload) + console.log(payload1) + } else + {console.log("here2") + payload = JSON.parse(s_payload) + payload1 = payload + console.log(payload1) + } + + s_payload = JSON.stringify(s_payload) let payload = JSON.parse(s_payload) //payload1 = payload.payload From bdfb9516e98c1b910793308e28254c47ff53b826 Mon Sep 17 00:00:00 2001 From: nkumar-topcoder <33625707+nkumar-topcoder@users.noreply.github.com> Date: Wed, 10 Jun 2020 18:40:04 +0530 Subject: [PATCH 72/76] Update reconsiler-audit.js --- src/reconsiler-audit.js | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/src/reconsiler-audit.js b/src/reconsiler-audit.js index 8e85225..34ff11a 100644 --- a/src/reconsiler-audit.js +++ b/src/reconsiler-audit.js @@ -66,12 +66,10 @@ async function setupPgClient() { payload1 = payload console.log(payload1) } - - - s_payload = JSON.stringify(s_payload) - let payload = JSON.parse(s_payload) + //s_payload = JSON.stringify(s_payload) + //let payload = JSON.parse(s_payload) //payload1 = payload.payload - await pushToKafka(payload) + await pushToKafka(payload1) logger.info('Reconsiler1 Push to kafka and added for audit trail') await audit(s_payload,0) //0 flag means reconsiler 1. 1 flag reconsiler 2 i,e dynamodb From 3c5ac6dc8149146ede47656fe32465521c191181 Mon Sep 17 00:00:00 2001 From: nkumar-topcoder <33625707+nkumar-topcoder@users.noreply.github.com> Date: Wed, 10 Jun 2020 18:53:36 +0530 Subject: [PATCH 73/76] Update reconsiler-audit.js --- src/reconsiler-audit.js | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/src/reconsiler-audit.js b/src/reconsiler-audit.js index 34ff11a..c75b3dd 100644 --- a/src/reconsiler-audit.js +++ b/src/reconsiler-audit.js @@ -51,18 +51,19 @@ async function setupPgClient() { await pushToKafka(payload1) */ let s_payload = reconsiler_payload + let s_payload1 = JSON.stringify(s_payload) let payload1 let payload - s_payload = JSON.stringify(s_payload) - if (s_payload.includes("processId")) + if (s_payload1.includes("processId")) { console.log("here1") payload = JSON.parse(s_payload) - payload1 = JSON.parse(payload.payload) + //payload1 = JSON.parse(payload.payload) + payload1 = payload.payload console.log(payload1) } else {console.log("here2") - payload = JSON.parse(s_payload) + payload = JSON.parse(s_payload1) payload1 = payload console.log(payload1) } From 886b4961b3872321e532305213b07c68ab03ed6d Mon Sep 17 00:00:00 2001 From: nkumar-topcoder <33625707+nkumar-topcoder@users.noreply.github.com> Date: Wed, 10 Jun 2020 19:56:19 +0530 Subject: [PATCH 74/76] [skip ci] --- src/reconsiler-dd.js | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/reconsiler-dd.js b/src/reconsiler-dd.js index 1f60fbb..2d403d8 100644 --- a/src/reconsiler-dd.js +++ b/src/reconsiler-dd.js @@ -118,10 +118,10 @@ async function verify_pg_record_exists(seqid) const terminate = () => process.exit() async function run() { - logger.debug("Initialising Reconsiler1 setup...") - await setupPgClient() - //logger.debug("Initialising Reconsiler2 setup...") - //callReconsiler2() + //logger.debug("Initialising Reconsiler1 setup...") + //await setupPgClient() + logger.debug("Initialising Reconsiler2 setup...") + callReconsiler2() // terminate() } //execute From 3b86cff61fc6f06658c5639553c97e9525f89672 Mon Sep 17 00:00:00 2001 From: nkumar-topcoder <33625707+nkumar-topcoder@users.noreply.github.com> Date: Wed, 10 Jun 2020 20:11:12 +0530 Subject: [PATCH 75/76] for reconsiler 2 update --- .circleci/config.yml | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 149dbfd..768438b 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -71,12 +71,18 @@ build_steps: &build_steps #source buildenvvar #./master_deploy.sh -d ECS -e ${DEPLOY_ENV} -t latest -s ${GLOBAL_ENV}-global-appvar,${LOGICAL_ENV}-${APP_NAME}-appvar -i postgres-ifx-processer - echo "Running Masterscript - deploy postgres-ifx-processer reconsiler1" - if [ -e ${LOGICAL_ENV}-${APP_NAME}-reconsiler1-deployvar.json ]; then sudo rm -vf ${LOGICAL_ENV}-${APP_NAME}-reconsiler1-deployvar.json; fi - ./buildenv.sh -e ${DEPLOY_ENV} -b ${LOGICAL_ENV}-${APP_NAME}-reconsiler1-deployvar + #echo "Running Masterscript - deploy postgres-ifx-processer reconsiler1" + #if [ -e ${LOGICAL_ENV}-${APP_NAME}-reconsiler1-deployvar.json ]; then sudo rm -vf ${LOGICAL_ENV}-${APP_NAME}-reconsiler1-deployvar.json; fi + #./buildenv.sh -e ${DEPLOY_ENV} -b ${LOGICAL_ENV}-${APP_NAME}-reconsiler1-deployvar + #source buildenvvar + #./master_deploy.sh -d ECS -e ${DEPLOY_ENV} -t latest -s ${GLOBAL_ENV}-global-appvar,${LOGICAL_ENV}-${APP_NAME}-appvar -i postgres-ifx-processer + + echo "Running Masterscript - deploy postgres-ifx-processer reconsiler2" + if [ -e ${LOGICAL_ENV}-${APP_NAME}-reconsiler2-deployvar.json ]; then sudo rm -vf ${LOGICAL_ENV}-${APP_NAME}-reconsiler1-deployvar.json; fi + ./buildenv.sh -e ${DEPLOY_ENV} -b ${LOGICAL_ENV}-${APP_NAME}-reconsiler2-deployvar source buildenvvar ./master_deploy.sh -d ECS -e ${DEPLOY_ENV} -t latest -s ${GLOBAL_ENV}-global-appvar,${LOGICAL_ENV}-${APP_NAME}-appvar -i postgres-ifx-processer - + jobs: # Build & Deploy against development backend # "build-dev": From be514a7ebdfadf0c9cb11d008cabfb788324ccc5 Mon Sep 17 00:00:00 2001 From: nkumar-topcoder <33625707+nkumar-topcoder@users.noreply.github.com> Date: Thu, 11 Jun 2020 11:59:28 +0530 Subject: [PATCH 76/76] Update config.yml --- .circleci/config.yml | 32 +++++++++++++++----------------- 1 file changed, 15 insertions(+), 17 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index e6d51dd..30adcee 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -60,19 +60,17 @@ build_steps: &build_steps #./master_deploy.sh -d ECS -e ${DEPLOY_ENV} -t latest -s ${GLOBAL_ENV}-global-appvar,${LOGICAL_ENV}-${APP_NAME}-appvar -i postgres-ifx-processer - #echo "Running Masterscript - deploy postgres-ifx-processer producer" - #if [ -e ${LOGICAL_ENV}-${APP_NAME}-consumer-deployvar.json ]; then sudo rm -vf ${LOGICAL_ENV}-${APP_NAME}-consumer-deployvar.json; fi - #./buildenv.sh -e ${DEPLOY_ENV} -b ${LOGICAL_ENV}-${APP_NAME}-producer-deployvar - #source buildenvvar - #./master_deploy.sh -d ECS -e ${DEPLOY_ENV} -t latest -s ${GLOBAL_ENV}-global-appvar,${LOGICAL_ENV}-${APP_NAME}-appvar -i postgres-ifx-processer - - + echo "Running Masterscript - deploy postgres-ifx-processer producer" + if [ -e ${LOGICAL_ENV}-${APP_NAME}-consumer-deployvar.json ]; then sudo rm -vf ${LOGICAL_ENV}-${APP_NAME}-consumer-deployvar.json; fi + ./buildenv.sh -e ${DEPLOY_ENV} -b ${LOGICAL_ENV}-${APP_NAME}-producer-deployvar + source buildenvvar + ./master_deploy.sh -d ECS -e ${DEPLOY_ENV} -t latest -s ${GLOBAL_ENV}-global-appvar,${LOGICAL_ENV}-${APP_NAME}-appvar -i postgres-ifx-processer - #echo "Running Masterscript - deploy postgres-ifx-processer producer_dd" - #if [ -e ${LOGICAL_ENV}-${APP_NAME}-producer-deployvar.json ]; then sudo rm -vf ${LOGICAL_ENV}-${APP_NAME}-producer-deployvar.json; fi - #./buildenv.sh -e ${DEPLOY_ENV} -b ${LOGICAL_ENV}-${APP_NAME}-producer_dd-deployvar - #source buildenvvar - #./master_deploy.sh -d ECS -e ${DEPLOY_ENV} -t latest -s ${GLOBAL_ENV}-global-appvar,${LOGICAL_ENV}-${APP_NAME}-appvar -i postgres-ifx-processer + echo "Running Masterscript - deploy postgres-ifx-processer producer_dd" + if [ -e ${LOGICAL_ENV}-${APP_NAME}-producer-deployvar.json ]; then sudo rm -vf ${LOGICAL_ENV}-${APP_NAME}-producer-deployvar.json; fi + ./buildenv.sh -e ${DEPLOY_ENV} -b ${LOGICAL_ENV}-${APP_NAME}-producer_dd-deployvar + source buildenvvar + ./master_deploy.sh -d ECS -e ${DEPLOY_ENV} -t latest -s ${GLOBAL_ENV}-global-appvar,${LOGICAL_ENV}-${APP_NAME}-appvar -i postgres-ifx-processer #echo "Running Masterscript - deploy postgres-ifx-processer reconsiler1" @@ -81,11 +79,11 @@ build_steps: &build_steps #source buildenvvar #./master_deploy.sh -d ECS -e ${DEPLOY_ENV} -t latest -s ${GLOBAL_ENV}-global-appvar,${LOGICAL_ENV}-${APP_NAME}-appvar -i postgres-ifx-processer - echo "Running Masterscript - deploy postgres-ifx-processer reconsiler2" - if [ -e ${LOGICAL_ENV}-${APP_NAME}-reconsiler2-deployvar.json ]; then sudo rm -vf ${LOGICAL_ENV}-${APP_NAME}-reconsiler1-deployvar.json; fi - ./buildenv.sh -e ${DEPLOY_ENV} -b ${LOGICAL_ENV}-${APP_NAME}-reconsiler2-deployvar - source buildenvvar - ./master_deploy.sh -d ECS -e ${DEPLOY_ENV} -t latest -s ${GLOBAL_ENV}-global-appvar,${LOGICAL_ENV}-${APP_NAME}-appvar -i postgres-ifx-processer + #echo "Running Masterscript - deploy postgres-ifx-processer reconsiler2" + #if [ -e ${LOGICAL_ENV}-${APP_NAME}-reconsiler2-deployvar.json ]; then sudo rm -vf ${LOGICAL_ENV}-${APP_NAME}-reconsiler1-deployvar.json; fi + #./buildenv.sh -e ${DEPLOY_ENV} -b ${LOGICAL_ENV}-${APP_NAME}-reconsiler2-deployvar + #source buildenvvar + #./master_deploy.sh -d ECS -e ${DEPLOY_ENV} -t latest -s ${GLOBAL_ENV}-global-appvar,${LOGICAL_ENV}-${APP_NAME}-appvar -i postgres-ifx-processer jobs: # Build & Deploy against development backend #