Skip to content

Dev #2

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 37 commits into from
Sep 26, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
37 commits
Select commit Hold shift + click to select a range
0912d85
initial commit
Aug 23, 2019
5c5bca8
Update package.json
nkumar-topcoder Aug 23, 2019
b068baa
Update producer.js
nkumar-topcoder Aug 29, 2019
3e8f23f
Update consumer.js
nkumar-topcoder Aug 29, 2019
8a7a75f
env changes
Sep 2, 2019
3ec7a6d
add Dockerfile, circleci
Sep 3, 2019
2a9e066
Update config.yml
nkumar-topcoder Sep 3, 2019
ef3d267
Update config.yml
nkumar-topcoder Sep 3, 2019
b3f6f97
update docker file
Sep 3, 2019
c11cac2
update circleci file
Sep 3, 2019
34dabbb
changes to config.yml
Sep 3, 2019
cf437ca
fix javac issue
Sep 3, 2019
8f32aa0
update docker file
Sep 3, 2019
11e97f6
Update Dockerfile
nkumar-topcoder Sep 4, 2019
1bef1e6
Update Dockerfile
nkumar-topcoder Sep 4, 2019
df92242
Update config.yml
nkumar-topcoder Sep 5, 2019
90ecbeb
Update config.yml
nkumar-topcoder Sep 5, 2019
e6cf5bb
Create scorecard_trigger_function.sql
nkumar-topcoder Sep 9, 2019
2dd4555
Update default.js
nkumar-topcoder Sep 9, 2019
9925ee7
Update scorecard_trigger_function.sql
nkumar-topcoder Sep 9, 2019
0559d10
Update scorecard_trigger_function.sql
nkumar-topcoder Sep 11, 2019
1e301fe
Update scorecard_trigger_function.sql
nkumar-topcoder Sep 11, 2019
ef72f26
Update scorecard_trigger_function.sql
nkumar-topcoder Sep 11, 2019
d7845f4
Update consumer.js
nkumar-topcoder Sep 11, 2019
3598497
[skip ci]
nkumar-topcoder Sep 15, 2019
af042f3
Update package.json
nkumar-topcoder Sep 15, 2019
ef21e77
[skip ci]
nkumar-topcoder Sep 19, 2019
9106279
kafka retry feature
Sep 19, 2019
9125a2b
Merge branch 'dev-retryfeature' of https://github.com/topcoder-platfo…
Sep 19, 2019
43fd410
Update producer.js
nkumar-topcoder Sep 19, 2019
c5b829b
kafka audit update
Sep 22, 2019
1584398
Update scorecard_trigger_function.sql
nkumar-topcoder Sep 23, 2019
4ec6c60
Update consumer.js
nkumar-topcoder Sep 23, 2019
eba433f
Update scorecard_trigger_function.sql
nkumar-topcoder Sep 23, 2019
fd14132
Merge pull request #1 from topcoder-platform/dev-retryfeature
nkumar-topcoder Sep 25, 2019
6bcdba5
Update updateInformix.js
nkumar-topcoder Sep 25, 2019
8f546fe
Update consumer.js
nkumar-topcoder Sep 26, 2019
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
91 changes: 91 additions & 0 deletions .circleci/config.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,91 @@
version: 2
defaults: &defaults
docker:
#- image: docker:17.06.1-ce-git
- image: circleci/python:2.7-stretch-browsers
install_dependency: &install_dependency
name: Installation of build and deployment dependencies.
command: |
sudo apt install jq
sudo pip install awscli --upgrade
sudo pip install docker-compose
sudo apt-get install default-jdk
install_deploysuite: &install_deploysuite
name: Installation of install_deploysuite.
#Git Clone -change back to v1.3 or latest once counter var is generalized.
command: |
git clone --branch master https://github.com/topcoder-platform/tc-deploy-scripts ../buildscript
cp ./../buildscript/master_deploy.sh .
cp ./../buildscript/buildenv.sh .
cp ./../buildscript/awsconfiguration.sh .

restore_cache_settings_for_build: &restore_cache_settings_for_build
key: docker-pg-ifx-notify-{{ checksum "package-lock.json" }}

save_cache_settings: &save_cache_settings
key: docker-pg-ifx-notify-{{ checksum "package-lock.json" }}
paths:
- node_modules


build_steps: &build_steps
- checkout
- setup_remote_docker
- run: *install_dependency
- run: *install_deploysuite
- restore_cache: *restore_cache_settings_for_build
- run: docker build -t postgres-ifx-processer:latest .
- save_cache: *save_cache_settings
- deploy:
name: Running MasterScript.
command: |
./awsconfiguration.sh ${DEPLOY_ENV}
source awsenvconf
./buildenv.sh -e ${DEPLOY_ENV} -b ${LOGICAL_ENV}-${APP_NAME}-consumer-deployvar
source buildenvvar
./master_deploy.sh -d ECS -e ${DEPLOY_ENV} -t latest -s ${GLOBAL_ENV}-global-appvar,${LOGICAL_ENV}-${APP_NAME}-appvar -i postgres-ifx-processer

echo "Running Masterscript - deploy postgres-ifx-processer producer"
if [ -e ${LOGICAL_ENV}-${APP_NAME}-consumer-deployvar.json ]; then sudo rm -vf ${LOGICAL_ENV}-${APP_NAME}-consumer-deployvar.json; fi
./buildenv.sh -e ${DEPLOY_ENV} -b ${LOGICAL_ENV}-${APP_NAME}-producer-deployvar
source buildenvvar
./master_deploy.sh -d ECS -e ${DEPLOY_ENV} -t latest -s ${GLOBAL_ENV}-global-appvar,${LOGICAL_ENV}-${APP_NAME}-appvar -i postgres-ifx-processer

jobs:
# Build & Deploy against development backend #
"build-dev":
<<: *defaults
environment:
DEPLOY_ENV: "DEV"
LOGICAL_ENV: "DEV"
GLOBAL_ENV: "dev"
APP_NAME: "postgres-ifx-processer"
steps: *build_steps
# Build & Deploy against production backend
"build-prod":
<<: *defaults
environment:
DEPLOY_ENV: "PROD"
LOGICAL_ENV: "PROD"
GLOBAL_ENV: "prod"
APP_NAME: "postgres-ifx-processer"
steps: *build_steps
workflows:
version: 2
build:
jobs:
# Development builds are executed on "develop" branch only.
- "build-dev":
context : org-global
filters:
branches:
only:
- dev
- dev-retryfeature
- "build-prod":
context : org-global
filters:
branches:
only:
- master

8 changes: 8 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
.DS_Store
node_modules
*.log
env_producer.sh
env_consumer.sh
*.env
*.sh
*.list
10 changes: 10 additions & 0 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
FROM openjdk:11.0.3-jdk-stretch

RUN apt-get update && wget -qO- https://deb.nodesource.com/setup_8.x | bash - && apt-get install -y nodejs libpq-dev g++ make

WORKDIR /opt/app
COPY . .

RUN npm install
#RUN npm install dotenv --save
ENTRYPOINT ["npm","run"]
50 changes: 50 additions & 0 deletions config/default.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
const path = require('path')

module.exports = {
LOG_LEVEL: process.env.LOG_LEVEL || 'debug', // Winston log level
LOG_FILE: path.join(__dirname, '../app.log'), // File to write logs to
INFORMIX: { // Informix connection options
host: process.env.INFORMIX_HOST || 'localhost',
port: parseInt(process.env.INFORMIX_PORT, 10) || 2021,
user: process.env.INFORMIX_USER || 'informix',
password: process.env.INFORMIX_PASSWORD || 'password',
database: process.env.INFORMIX_DATABASE || 'db',
server: process.env.INFORMIX_SERVER || 'informixserver',
minpool: parseInt(process.env.MINPOOL, 10) || 1,
maxpool: parseInt(process.env.MAXPOOL, 10) || 60,
maxsize: parseInt(process.env.MAXSIZE, 10) || 0,
idleTimeout: parseInt(process.env.IDLETIMEOUT, 10) || 3600,
timeout: parseInt(process.env.TIMEOUT, 10) || 30000
},
POSTGRES: { // Postgres connection options
user: process.env.PG_USER || 'pg_user',
host: process.env.PG_HOST || 'localhost',
database: process.env.PG_DATABASE || 'postgres', // database must exist before running the tool
password: process.env.PG_PASSWORD || 'password',
port: parseInt(process.env.PG_PORT, 10) || 5432,
triggerFunctions: process.env.TRIGGER_FUNCTIONS || ['db_notifications'], // List of trigger functions to listen to
triggerTopics: process.env.TRIGGER_TOPICS || ['db.postgres.sync'], // Names of the topic in the trigger payload
triggerOriginators: process.env.TRIGGER_ORIGINATORS || ['tc-postgres-delta-processor'] // Names of the originator in the trigger payload
},
KAFKA: { // Kafka connection options
brokers_url: process.env.KAFKA_URL || 'localhost:9092', // comma delimited list of initial brokers list
SSL: {
cert: process.env.KAFKA_CLIENT_CERT || null, // SSL client certificate file path
key: process.env.KAFKA_CLIENT_CERT_KEY || null // SSL client key file path
},
topic: process.env.KAFKA_TOPIC || 'db.topic.sync', // Kafka topic to push and receive messages
partition: process.env.partition || [0], // Kafka partitions to use
maxRetry: process.env.MAX_RETRY || 3,
errorTopic: process.env.ERROR_TOPIC || 'db.scorecardtable.error',
recipients: ['[email protected]'] // Kafka partitions to use
},

AUTH0_URL: process.env.AUTH0_URL ,
AUTH0_AUDIENCE: process.env.AUTH0_AUDIENCE ,
TOKEN_CACHE_TIME: process.env.TOKEN_CACHE_TIME ,
AUTH0_CLIENT_ID: process.env.AUTH0_CLIENT_ID ,
AUTH0_CLIENT_SECRET: process.env.AUTH0_CLIENT_SECRET ,
BUSAPI_URL : process.env.BUSAPI_URL ,
KAFKA_ERROR_TOPIC : process.env.KAFKA_ERROR_TOPIC ,
AUTH0_PROXY_SERVER_URL: process.env.AUTH0_PROXY_SERVER_URL
}
41 changes: 41 additions & 0 deletions config/test.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
const path = require('path')

module.exports = {
LOG_LEVEL: process.env.LOG_LEVEL || 'debug', // Winston log level
LOG_FILE: path.join(__dirname, '../app.log'), // File to write logs to
INFORMIX: { // Informix connection options
host: process.env.INFORMIX_HOST || 'localhost',
port: parseInt(process.env.INFORMIX_PORT, 10) || 2021,
user: process.env.INFORMIX_USER || 'informix',
password: process.env.INFORMIX_PASSWORD || '1nf0rm1x',
database: process.env.INFORMIX_DATABASE || 'tcs_catalog',
server: process.env.INFORMIX_SERVER || 'informixoltp_tcp',
minpool: parseInt(process.env.MINPOOL, 10) || 1,
maxpool: parseInt(process.env.MAXPOOL, 10) || 60,
maxsize: parseInt(process.env.MAXSIZE, 10) || 0,
idleTimeout: parseInt(process.env.IDLETIMEOUT, 10) || 3600,
timeout: parseInt(process.env.TIMEOUT, 10) || 30000
},
POSTGRES: { // Postgres connection options
user: process.env.PG_USER || 'mayur',
host: process.env.PG_HOST || 'localhost',
database: process.env.PG_DATABASE || 'postgres', // database must exist before running the tool
password: process.env.PG_PASSWORD || 'password',
port: parseInt(process.env.PG_PORT, 10) || 5432,
triggerFunctions: process.env.triggerFunctions || ['db_notifications'], // List of trigger functions to listen to
triggerTopics: process.env.TRIGGER_TOPICS || ['db.postgres.sync'], // Names of the topic in the trigger payload
triggerOriginators: process.env.TRIGGER_ORIGINATORS || ['tc-postgres-delta-processor'] // Names of the originator in the trigger payload
},
KAFKA: { // Kafka connection options
brokers_url: process.env.KAFKA_URL || 'localhost:9092', // comma delimited list of initial brokers list
SSL: {
cert: process.env.KAFKA_SSL_CERT || null, // SSL client certificate file path
key: process.env.KAFKA_SSL_KEY || null // SSL client key file path
},
topic: process.env.KAFKA_TOPIC || 'db.postgres.sync', // Kafka topic to push and receive messages
partition: process.env.partition || [0] // Kafka partitions to use
},
TEST_TABLE: 'scorecard', // Name of test table to use. Triggers for this table must exist
TEST_SCHEMA: 'tcs_catalog', // Name of schema "TEST_TABLE" belongs to
TEST_INTERVAL: 5000 // 5s interval to wait for postgres DML updates to be propagated to informix
}
Loading