Skip to content

Commit 02b70ca

Browse files
Merge pull request #2 from topcoder-platform/dev
merge to master from dev
2 parents 3755023 + 8f546fe commit 02b70ca

15 files changed

+4881
-0
lines changed

.circleci/config.yml

Lines changed: 91 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,91 @@
1+
version: 2
2+
defaults: &defaults
3+
docker:
4+
#- image: docker:17.06.1-ce-git
5+
- image: circleci/python:2.7-stretch-browsers
6+
install_dependency: &install_dependency
7+
name: Installation of build and deployment dependencies.
8+
command: |
9+
sudo apt install jq
10+
sudo pip install awscli --upgrade
11+
sudo pip install docker-compose
12+
sudo apt-get install default-jdk
13+
install_deploysuite: &install_deploysuite
14+
name: Installation of install_deploysuite.
15+
#Git Clone -change back to v1.3 or latest once counter var is generalized.
16+
command: |
17+
git clone --branch master https://github.com/topcoder-platform/tc-deploy-scripts ../buildscript
18+
cp ./../buildscript/master_deploy.sh .
19+
cp ./../buildscript/buildenv.sh .
20+
cp ./../buildscript/awsconfiguration.sh .
21+
22+
restore_cache_settings_for_build: &restore_cache_settings_for_build
23+
key: docker-pg-ifx-notify-{{ checksum "package-lock.json" }}
24+
25+
save_cache_settings: &save_cache_settings
26+
key: docker-pg-ifx-notify-{{ checksum "package-lock.json" }}
27+
paths:
28+
- node_modules
29+
30+
31+
build_steps: &build_steps
32+
- checkout
33+
- setup_remote_docker
34+
- run: *install_dependency
35+
- run: *install_deploysuite
36+
- restore_cache: *restore_cache_settings_for_build
37+
- run: docker build -t postgres-ifx-processer:latest .
38+
- save_cache: *save_cache_settings
39+
- deploy:
40+
name: Running MasterScript.
41+
command: |
42+
./awsconfiguration.sh ${DEPLOY_ENV}
43+
source awsenvconf
44+
./buildenv.sh -e ${DEPLOY_ENV} -b ${LOGICAL_ENV}-${APP_NAME}-consumer-deployvar
45+
source buildenvvar
46+
./master_deploy.sh -d ECS -e ${DEPLOY_ENV} -t latest -s ${GLOBAL_ENV}-global-appvar,${LOGICAL_ENV}-${APP_NAME}-appvar -i postgres-ifx-processer
47+
48+
echo "Running Masterscript - deploy postgres-ifx-processer producer"
49+
if [ -e ${LOGICAL_ENV}-${APP_NAME}-consumer-deployvar.json ]; then sudo rm -vf ${LOGICAL_ENV}-${APP_NAME}-consumer-deployvar.json; fi
50+
./buildenv.sh -e ${DEPLOY_ENV} -b ${LOGICAL_ENV}-${APP_NAME}-producer-deployvar
51+
source buildenvvar
52+
./master_deploy.sh -d ECS -e ${DEPLOY_ENV} -t latest -s ${GLOBAL_ENV}-global-appvar,${LOGICAL_ENV}-${APP_NAME}-appvar -i postgres-ifx-processer
53+
54+
jobs:
55+
# Build & Deploy against development backend #
56+
"build-dev":
57+
<<: *defaults
58+
environment:
59+
DEPLOY_ENV: "DEV"
60+
LOGICAL_ENV: "DEV"
61+
GLOBAL_ENV: "dev"
62+
APP_NAME: "postgres-ifx-processer"
63+
steps: *build_steps
64+
# Build & Deploy against production backend
65+
"build-prod":
66+
<<: *defaults
67+
environment:
68+
DEPLOY_ENV: "PROD"
69+
LOGICAL_ENV: "PROD"
70+
GLOBAL_ENV: "prod"
71+
APP_NAME: "postgres-ifx-processer"
72+
steps: *build_steps
73+
workflows:
74+
version: 2
75+
build:
76+
jobs:
77+
# Development builds are executed on "develop" branch only.
78+
- "build-dev":
79+
context : org-global
80+
filters:
81+
branches:
82+
only:
83+
- dev
84+
- dev-retryfeature
85+
- "build-prod":
86+
context : org-global
87+
filters:
88+
branches:
89+
only:
90+
- master
91+

.gitignore

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,8 @@
1+
.DS_Store
2+
node_modules
3+
*.log
4+
env_producer.sh
5+
env_consumer.sh
6+
*.env
7+
*.sh
8+
*.list

Dockerfile

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,10 @@
1+
FROM openjdk:11.0.3-jdk-stretch
2+
3+
RUN apt-get update && wget -qO- https://deb.nodesource.com/setup_8.x | bash - && apt-get install -y nodejs libpq-dev g++ make
4+
5+
WORKDIR /opt/app
6+
COPY . .
7+
8+
RUN npm install
9+
#RUN npm install dotenv --save
10+
ENTRYPOINT ["npm","run"]

config/default.js

Lines changed: 50 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,50 @@
1+
const path = require('path')
2+
3+
module.exports = {
4+
LOG_LEVEL: process.env.LOG_LEVEL || 'debug', // Winston log level
5+
LOG_FILE: path.join(__dirname, '../app.log'), // File to write logs to
6+
INFORMIX: { // Informix connection options
7+
host: process.env.INFORMIX_HOST || 'localhost',
8+
port: parseInt(process.env.INFORMIX_PORT, 10) || 2021,
9+
user: process.env.INFORMIX_USER || 'informix',
10+
password: process.env.INFORMIX_PASSWORD || 'password',
11+
database: process.env.INFORMIX_DATABASE || 'db',
12+
server: process.env.INFORMIX_SERVER || 'informixserver',
13+
minpool: parseInt(process.env.MINPOOL, 10) || 1,
14+
maxpool: parseInt(process.env.MAXPOOL, 10) || 60,
15+
maxsize: parseInt(process.env.MAXSIZE, 10) || 0,
16+
idleTimeout: parseInt(process.env.IDLETIMEOUT, 10) || 3600,
17+
timeout: parseInt(process.env.TIMEOUT, 10) || 30000
18+
},
19+
POSTGRES: { // Postgres connection options
20+
user: process.env.PG_USER || 'pg_user',
21+
host: process.env.PG_HOST || 'localhost',
22+
database: process.env.PG_DATABASE || 'postgres', // database must exist before running the tool
23+
password: process.env.PG_PASSWORD || 'password',
24+
port: parseInt(process.env.PG_PORT, 10) || 5432,
25+
triggerFunctions: process.env.TRIGGER_FUNCTIONS || ['db_notifications'], // List of trigger functions to listen to
26+
triggerTopics: process.env.TRIGGER_TOPICS || ['db.postgres.sync'], // Names of the topic in the trigger payload
27+
triggerOriginators: process.env.TRIGGER_ORIGINATORS || ['tc-postgres-delta-processor'] // Names of the originator in the trigger payload
28+
},
29+
KAFKA: { // Kafka connection options
30+
brokers_url: process.env.KAFKA_URL || 'localhost:9092', // comma delimited list of initial brokers list
31+
SSL: {
32+
cert: process.env.KAFKA_CLIENT_CERT || null, // SSL client certificate file path
33+
key: process.env.KAFKA_CLIENT_CERT_KEY || null // SSL client key file path
34+
},
35+
topic: process.env.KAFKA_TOPIC || 'db.topic.sync', // Kafka topic to push and receive messages
36+
partition: process.env.partition || [0], // Kafka partitions to use
37+
maxRetry: process.env.MAX_RETRY || 3,
38+
errorTopic: process.env.ERROR_TOPIC || 'db.scorecardtable.error',
39+
recipients: ['[email protected]'] // Kafka partitions to use
40+
},
41+
42+
AUTH0_URL: process.env.AUTH0_URL ,
43+
AUTH0_AUDIENCE: process.env.AUTH0_AUDIENCE ,
44+
TOKEN_CACHE_TIME: process.env.TOKEN_CACHE_TIME ,
45+
AUTH0_CLIENT_ID: process.env.AUTH0_CLIENT_ID ,
46+
AUTH0_CLIENT_SECRET: process.env.AUTH0_CLIENT_SECRET ,
47+
BUSAPI_URL : process.env.BUSAPI_URL ,
48+
KAFKA_ERROR_TOPIC : process.env.KAFKA_ERROR_TOPIC ,
49+
AUTH0_PROXY_SERVER_URL: process.env.AUTH0_PROXY_SERVER_URL
50+
}

config/test.js

Lines changed: 41 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,41 @@
1+
const path = require('path')
2+
3+
module.exports = {
4+
LOG_LEVEL: process.env.LOG_LEVEL || 'debug', // Winston log level
5+
LOG_FILE: path.join(__dirname, '../app.log'), // File to write logs to
6+
INFORMIX: { // Informix connection options
7+
host: process.env.INFORMIX_HOST || 'localhost',
8+
port: parseInt(process.env.INFORMIX_PORT, 10) || 2021,
9+
user: process.env.INFORMIX_USER || 'informix',
10+
password: process.env.INFORMIX_PASSWORD || '1nf0rm1x',
11+
database: process.env.INFORMIX_DATABASE || 'tcs_catalog',
12+
server: process.env.INFORMIX_SERVER || 'informixoltp_tcp',
13+
minpool: parseInt(process.env.MINPOOL, 10) || 1,
14+
maxpool: parseInt(process.env.MAXPOOL, 10) || 60,
15+
maxsize: parseInt(process.env.MAXSIZE, 10) || 0,
16+
idleTimeout: parseInt(process.env.IDLETIMEOUT, 10) || 3600,
17+
timeout: parseInt(process.env.TIMEOUT, 10) || 30000
18+
},
19+
POSTGRES: { // Postgres connection options
20+
user: process.env.PG_USER || 'mayur',
21+
host: process.env.PG_HOST || 'localhost',
22+
database: process.env.PG_DATABASE || 'postgres', // database must exist before running the tool
23+
password: process.env.PG_PASSWORD || 'password',
24+
port: parseInt(process.env.PG_PORT, 10) || 5432,
25+
triggerFunctions: process.env.triggerFunctions || ['db_notifications'], // List of trigger functions to listen to
26+
triggerTopics: process.env.TRIGGER_TOPICS || ['db.postgres.sync'], // Names of the topic in the trigger payload
27+
triggerOriginators: process.env.TRIGGER_ORIGINATORS || ['tc-postgres-delta-processor'] // Names of the originator in the trigger payload
28+
},
29+
KAFKA: { // Kafka connection options
30+
brokers_url: process.env.KAFKA_URL || 'localhost:9092', // comma delimited list of initial brokers list
31+
SSL: {
32+
cert: process.env.KAFKA_SSL_CERT || null, // SSL client certificate file path
33+
key: process.env.KAFKA_SSL_KEY || null // SSL client key file path
34+
},
35+
topic: process.env.KAFKA_TOPIC || 'db.postgres.sync', // Kafka topic to push and receive messages
36+
partition: process.env.partition || [0] // Kafka partitions to use
37+
},
38+
TEST_TABLE: 'scorecard', // Name of test table to use. Triggers for this table must exist
39+
TEST_SCHEMA: 'tcs_catalog', // Name of schema "TEST_TABLE" belongs to
40+
TEST_INTERVAL: 5000 // 5s interval to wait for postgres DML updates to be propagated to informix
41+
}

0 commit comments

Comments
 (0)