Skip to content

Commit dcdc554

Browse files
committed
fix: isExternalMember
1 parent 41a9706 commit dcdc554

File tree

4 files changed

+47
-111
lines changed

4 files changed

+47
-111
lines changed

app-routes.js

+30
Original file line numberDiff line numberDiff line change
@@ -59,6 +59,36 @@ module.exports = (app) => {
5959
next()
6060
}
6161
})
62+
} else {
63+
// public API, but still try to authenticate token if provided, but allow missing/invalid token
64+
actions.push((req, res, next) => {
65+
const interceptRes = {}
66+
interceptRes.status = () => interceptRes
67+
interceptRes.json = () => interceptRes
68+
interceptRes.send = () => next()
69+
authenticator(_.pick(config, ['AUTH_SECRET', 'VALID_ISSUERS']))(req, interceptRes, next)
70+
})
71+
72+
actions.push((req, res, next) => {
73+
if (!req.authUser) {
74+
next()
75+
} else if (req.authUser.isMachine) {
76+
if (!def.scopes || !req.authUser.scopes || !helper.checkIfExists(def.scopes, req.authUser.scopes)) {
77+
req.authUser = undefined
78+
}
79+
next()
80+
} else {
81+
req.authUser.jwtToken = req.headers.authorization
82+
// check if user has full manage permission
83+
if (_.intersection(req.authUser.roles, constants.FullManagePermissionRoles).length) {
84+
req.authUser.hasManagePermission = true
85+
}
86+
if (_.includes(req.authUser.roles, constants.UserRoles.ConnectManager)) {
87+
req.authUser.isConnectManager = true
88+
}
89+
next()
90+
}
91+
})
6292
}
6393

6494
actions.push(method)

docs/Topcoder-bookings-api.postman_collection.json

+10-104
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
{
22
"info": {
3-
"_postman_id": "6f274c86-24a5-412e-95e6-fafa34e2a936",
3+
"_postman_id": "15f10b58-dda5-4aaf-96e5-061a5c901717",
44
"name": "Topcoder-bookings-api",
55
"schema": "https://schema.getpostman.com/json/collection/v2.1.0/collection.json"
66
},
@@ -18153,16 +18153,14 @@
1815318153
"response": []
1815418154
},
1815518155
{
18156-
"name": "send request with invalid token",
18156+
"name": "send request with public",
1815718157
"event": [
1815818158
{
1815918159
"listen": "test",
1816018160
"script": {
1816118161
"exec": [
18162-
"pm.test('Status code is 401', function () {\r",
18163-
" pm.response.to.have.status(401);\r",
18164-
" const response = pm.response.json()\r",
18165-
" pm.expect(response.message).to.eq(\"Invalid Token.\")\r",
18162+
"pm.test('Status code is 200', function () {\r",
18163+
" pm.response.to.have.status(200);\r",
1816618164
"});"
1816718165
],
1816818166
"type": "text/javascript"
@@ -18171,16 +18169,10 @@
1817118169
],
1817218170
"request": {
1817318171
"method": "POST",
18174-
"header": [
18175-
{
18176-
"key": "Authorization",
18177-
"value": "Bearer invalid_token",
18178-
"type": "text"
18179-
}
18180-
],
18172+
"header": [],
1818118173
"body": {
1818218174
"mode": "raw",
18183-
"raw": "{\r\n \"jobDescription\": \"Should have these skills: Machine Learning, Dropwizard, NGINX, appcelerator\"\r\n}",
18175+
"raw": "{\r\n \"jobDescription\": \"Should have these skills: Machine Learning, Dropwizard, NGINX, appcelerator, C#\"\r\n}",
1818418176
"options": {
1818518177
"raw": {
1818618178
"language": "json"
@@ -18210,7 +18202,7 @@
1821018202
"pm.test('Status code is 400', function () {\r",
1821118203
" pm.response.to.have.status(400);\r",
1821218204
" const response = pm.response.json()\r",
18213-
" pm.expect(response.message).to.eq(\"\\\"data\\\" must have at least 1 key\")\r",
18205+
" pm.expect(response.message).to.eq(\"\\\"data\\\" must contain at least one of [roleId, jobDescription, skills]\")\r",
1821418206
"});"
1821518207
],
1821618208
"type": "text/javascript"
@@ -19211,71 +19203,7 @@
1921119203
],
1921219204
"request": {
1921319205
"method": "POST",
19214-
"header": [
19215-
{
19216-
"key": "Authorization",
19217-
"type": "text",
19218-
"value": "Bearer {{token_administrator}}"
19219-
},
19220-
{
19221-
"key": "Content-Type",
19222-
"type": "text",
19223-
"value": "application/json"
19224-
}
19225-
],
19226-
"body": {
19227-
"mode": "raw",
19228-
"raw": "{ \"description\": \"Description A global leading healthcare company is seeking a strong Databricks Engineer to join their development team as they build their new Databricks workspace. Development efforts will contribute to the migration of data from Hadoop to Databricks to prepare data for visualization. Candidate must be well-versed in Databricks components and best practices, be an excellent problem solver and be comfortable working in a fast-moving, rapidly changing, and dynamic environment via Agile, SCRUM, and DevOps. PREFERRED QUALIFICATIONS: 2+ years of Azure Data Stack experience: Azure Data Services using ADF, ADLS, Databricks with PySpark, Azure DevOps & Azure Key Vault. Strong knowledge of various data warehousing methodologies and data modeling concepts. Hands-on experience using Azure, Azure data lake, Azure functions & Databricks Minimum 2-3+ years of Python experience (PySpark) Design & Develop Azure native solutions for Data Platform Minimum 3+ years of experience using Big Data ecosystem (Cloudera/Hortonworks) using Oozie, Hive, Impala, and Spark Expert in SQL and performance tuning\" }",
19229-
"options": {
19230-
"raw": {
19231-
"language": "json"
19232-
}
19233-
}
19234-
},
19235-
"url": {
19236-
"raw": "{{URL}}/taas-teams/getSkillsByJobDescription",
19237-
"host": [
19238-
"{{URL}}"
19239-
],
19240-
"path": [
19241-
"taas-teams",
19242-
"getSkillsByJobDescription"
19243-
]
19244-
}
19245-
},
19246-
"response": []
19247-
},
19248-
{
19249-
"name": "get skills by invalid token",
19250-
"event": [
19251-
{
19252-
"listen": "test",
19253-
"script": {
19254-
"exec": [
19255-
"pm.test('Status code is 401', function () {\r",
19256-
" pm.response.to.have.status(401);\r",
19257-
" const response = pm.response.json()\r",
19258-
" pm.expect(response.message).to.eq(\"Invalid Token.\")\r",
19259-
"});"
19260-
],
19261-
"type": "text/javascript"
19262-
}
19263-
}
19264-
],
19265-
"request": {
19266-
"method": "POST",
19267-
"header": [
19268-
{
19269-
"key": "Authorization",
19270-
"type": "text",
19271-
"value": "Bearer invalid_token"
19272-
},
19273-
{
19274-
"key": "Content-Type",
19275-
"type": "text",
19276-
"value": "application/json"
19277-
}
19278-
],
19206+
"header": [],
1927919207
"body": {
1928019208
"mode": "raw",
1928119209
"raw": "{ \"description\": \"Description A global leading healthcare company is seeking a strong Databricks Engineer to join their development team as they build their new Databricks workspace. Development efforts will contribute to the migration of data from Hadoop to Databricks to prepare data for visualization. Candidate must be well-versed in Databricks components and best practices, be an excellent problem solver and be comfortable working in a fast-moving, rapidly changing, and dynamic environment via Agile, SCRUM, and DevOps. PREFERRED QUALIFICATIONS: 2+ years of Azure Data Stack experience: Azure Data Services using ADF, ADLS, Databricks with PySpark, Azure DevOps & Azure Key Vault. Strong knowledge of various data warehousing methodologies and data modeling concepts. Hands-on experience using Azure, Azure data lake, Azure functions & Databricks Minimum 2-3+ years of Python experience (PySpark) Design & Develop Azure native solutions for Data Platform Minimum 3+ years of experience using Big Data ecosystem (Cloudera/Hortonworks) using Oozie, Hive, Impala, and Spark Expert in SQL and performance tuning\" }",
@@ -19317,18 +19245,7 @@
1931719245
],
1931819246
"request": {
1931919247
"method": "POST",
19320-
"header": [
19321-
{
19322-
"key": "Authorization",
19323-
"type": "text",
19324-
"value": "Bearer {{token_administrator}}"
19325-
},
19326-
{
19327-
"key": "Content-Type",
19328-
"type": "text",
19329-
"value": "application/json"
19330-
}
19331-
],
19248+
"header": [],
1933219249
"body": {
1933319250
"mode": "raw",
1933419251
"raw": "{ \"description\": \"\" }",
@@ -19370,18 +19287,7 @@
1937019287
],
1937119288
"request": {
1937219289
"method": "POST",
19373-
"header": [
19374-
{
19375-
"key": "Authorization",
19376-
"type": "text",
19377-
"value": "Bearer {{token_administrator}}"
19378-
},
19379-
{
19380-
"key": "Content-Type",
19381-
"type": "text",
19382-
"value": "application/json"
19383-
}
19384-
],
19290+
"header": [],
1938519291
"body": {
1938619292
"mode": "raw",
1938719293
"raw": "{}",

src/routes/TeamRoutes.js

+3-3
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@ module.exports = {
2323
'/taas-teams/skills': {
2424
get: {
2525
controller: 'TeamController',
26-
method: 'searchSkills',
26+
method: 'searchSkills'
2727
}
2828
},
2929
'/taas-teams/me': {
@@ -37,7 +37,7 @@ module.exports = {
3737
'/taas-teams/getSkillsByJobDescription': {
3838
post: {
3939
controller: 'TeamController',
40-
method: 'getSkillsByJobDescription',
40+
method: 'getSkillsByJobDescription'
4141
}
4242
},
4343
'/taas-teams/:id': {
@@ -89,7 +89,7 @@ module.exports = {
8989
'/taas-teams/sendRoleSearchRequest': {
9090
post: {
9191
controller: 'TeamController',
92-
method: 'roleSearchRequest',
92+
method: 'roleSearchRequest'
9393
}
9494
},
9595
'/taas-teams/submitTeamRequest': {

src/services/TeamService.js

+4-4
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@ const errors = require('../common/errors')
1313
const JobService = require('./JobService')
1414
const ResourceBookingService = require('./ResourceBookingService')
1515
const HttpStatus = require('http-status-codes')
16-
const { Op, where, fn, col } = require('sequelize')
16+
const { Op } = require('sequelize')
1717
const models = require('../models')
1818
const stopWords = require('../../data/stopWords.json')
1919
const { getAuditM2Muser } = require('../common/helper')
@@ -769,15 +769,15 @@ async function roleSearchRequest (currentUser, data) {
769769
role = await getRoleBySkills(skills)
770770
} else {
771771
// if only job description is provided, collect skill names from description
772-
const tags = await getSkillsByJobDescription(currentUser, { description: data.jobDescription })
772+
const tags = await getSkillsByJobDescription({ description: data.jobDescription })
773773
const skills = _.map(tags, 'tag')
774774
// find the best matching role
775775
role = await getRoleBySkills(skills)
776776
}
777777
data.roleId = role.id
778778
// create roleSearchRequest entity with found roleId
779779
const { id: roleSearchRequestId, jobTitle } = await createRoleSearchRequest(currentUser, data)
780-
const entity = jobTitle ? { jobTitle, roleSearchRequestId } : { roleSearchRequestId };
780+
const entity = jobTitle ? { jobTitle, roleSearchRequestId } : { roleSearchRequestId }
781781
// clean Role
782782
role = await _cleanRoleDTO(currentUser, role)
783783
// return Role
@@ -985,7 +985,7 @@ createRoleSearchRequest.schema = Joi.object()
985985
*/
986986
async function _cleanRoleDTO (currentUser, role) {
987987
// if current user is machine, it means user is not logged in
988-
if (currentUser.isMachine || await isExternalMember(currentUser.userId)) {
988+
if (_.isNil(currentUser) || currentUser.isMachine || await isExternalMember(currentUser.userId)) {
989989
role.isExternalMember = true
990990
if (role.rates) {
991991
role.rates = _.map(role.rates, rate =>

0 commit comments

Comments
 (0)