Skip to content

Commit 49acb15

Browse files
authored
Merge pull request #385 from topcoder-platform/dev
TaaS Intake - v2.02
2 parents 7c87a61 + 051608b commit 49acb15

11 files changed

+104
-139
lines changed

app-routes.js

Lines changed: 30 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -59,6 +59,36 @@ module.exports = (app) => {
5959
next()
6060
}
6161
})
62+
} else {
63+
// public API, but still try to authenticate token if provided, but allow missing/invalid token
64+
actions.push((req, res, next) => {
65+
const interceptRes = {}
66+
interceptRes.status = () => interceptRes
67+
interceptRes.json = () => interceptRes
68+
interceptRes.send = () => next()
69+
authenticator(_.pick(config, ['AUTH_SECRET', 'VALID_ISSUERS']))(req, interceptRes, next)
70+
})
71+
72+
actions.push((req, res, next) => {
73+
if (!req.authUser) {
74+
next()
75+
} else if (req.authUser.isMachine) {
76+
if (!def.scopes || !req.authUser.scopes || !helper.checkIfExists(def.scopes, req.authUser.scopes)) {
77+
req.authUser = undefined
78+
}
79+
next()
80+
} else {
81+
req.authUser.jwtToken = req.headers.authorization
82+
// check if user has full manage permission
83+
if (_.intersection(req.authUser.roles, constants.FullManagePermissionRoles).length) {
84+
req.authUser.hasManagePermission = true
85+
}
86+
if (_.includes(req.authUser.roles, constants.UserRoles.ConnectManager)) {
87+
req.authUser.isConnectManager = true
88+
}
89+
next()
90+
}
91+
})
6292
}
6393

6494
actions.push(method)

data/demo-data.json

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -7646,10 +7646,10 @@
76467646
"name": "Angular Developer",
76477647
"description": "* Writes tested and documented JavaScript, HTML and CSS\n* Makes design and technical decisions for AngularJS projects\n* Develops application code and unit test in the AngularJS, Rest Web Services and Java technologies",
76487648
"listOfSkills": [
7649-
"database",
7650-
"winforms",
7651-
"user interface (ui)",
7652-
"photoshop"
7649+
"Database",
7650+
"Winforms",
7651+
"User Interface (Ui)",
7652+
"Photoshop"
76537653
],
76547654
"rates": [
76557655
{
@@ -7678,10 +7678,10 @@
76787678
"name": "Dev Ops Engineer",
76797679
"description": "* Introduces processes, tools, and methodologies\n* Balances needs throughout the software development life cycle\n* Configures server images, optimizes task performance in correspondence with engineers",
76807680
"listOfSkills": [
7681-
"dropwizard",
7682-
"nginx",
7683-
"machine learning",
7684-
"force.com"
7681+
"Dropwizard",
7682+
"NGINX",
7683+
"Machine Learning",
7684+
"Force.com"
76857685
],
76867686
"rates": [
76877687
{
@@ -7722,10 +7722,10 @@
77227722
"name": "Salesforce Developer",
77237723
"description": "* Meets with project managers to determine CRM needs\n* Develops customized solutions within the Salesforce platform\n* Designs, codes, and implements Salesforce applications\n* Creates timelines and development goals\n* Tests the stability and functionality of the application\n* Troubleshoots and fixes bugs\n* Writes documents and provides technical training for Salesforce Staff\n* Maintains the security and integrity of the application software",
77247724
"listOfSkills": [
7725-
"docker",
7726-
".net",
7725+
"Docker",
7726+
".NET",
77277727
"appcelerator",
7728-
"flux"
7728+
"Flux"
77297729
],
77307730
"rates": [
77317731
{

docs/Topcoder-bookings-api.postman_collection.json

Lines changed: 10 additions & 104 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
{
22
"info": {
3-
"_postman_id": "6f274c86-24a5-412e-95e6-fafa34e2a936",
3+
"_postman_id": "15f10b58-dda5-4aaf-96e5-061a5c901717",
44
"name": "Topcoder-bookings-api",
55
"schema": "https://schema.getpostman.com/json/collection/v2.1.0/collection.json"
66
},
@@ -18153,16 +18153,14 @@
1815318153
"response": []
1815418154
},
1815518155
{
18156-
"name": "send request with invalid token",
18156+
"name": "send request with public",
1815718157
"event": [
1815818158
{
1815918159
"listen": "test",
1816018160
"script": {
1816118161
"exec": [
18162-
"pm.test('Status code is 401', function () {\r",
18163-
" pm.response.to.have.status(401);\r",
18164-
" const response = pm.response.json()\r",
18165-
" pm.expect(response.message).to.eq(\"Invalid Token.\")\r",
18162+
"pm.test('Status code is 200', function () {\r",
18163+
" pm.response.to.have.status(200);\r",
1816618164
"});"
1816718165
],
1816818166
"type": "text/javascript"
@@ -18171,16 +18169,10 @@
1817118169
],
1817218170
"request": {
1817318171
"method": "POST",
18174-
"header": [
18175-
{
18176-
"key": "Authorization",
18177-
"value": "Bearer invalid_token",
18178-
"type": "text"
18179-
}
18180-
],
18172+
"header": [],
1818118173
"body": {
1818218174
"mode": "raw",
18183-
"raw": "{\r\n \"jobDescription\": \"Should have these skills: Machine Learning, Dropwizard, NGINX, appcelerator\"\r\n}",
18175+
"raw": "{\r\n \"jobDescription\": \"Should have these skills: Machine Learning, Dropwizard, NGINX, appcelerator, C#\"\r\n}",
1818418176
"options": {
1818518177
"raw": {
1818618178
"language": "json"
@@ -18210,7 +18202,7 @@
1821018202
"pm.test('Status code is 400', function () {\r",
1821118203
" pm.response.to.have.status(400);\r",
1821218204
" const response = pm.response.json()\r",
18213-
" pm.expect(response.message).to.eq(\"\\\"data\\\" must have at least 1 key\")\r",
18205+
" pm.expect(response.message).to.eq(\"\\\"data\\\" must contain at least one of [roleId, jobDescription, skills]\")\r",
1821418206
"});"
1821518207
],
1821618208
"type": "text/javascript"
@@ -19211,71 +19203,7 @@
1921119203
],
1921219204
"request": {
1921319205
"method": "POST",
19214-
"header": [
19215-
{
19216-
"key": "Authorization",
19217-
"type": "text",
19218-
"value": "Bearer {{token_administrator}}"
19219-
},
19220-
{
19221-
"key": "Content-Type",
19222-
"type": "text",
19223-
"value": "application/json"
19224-
}
19225-
],
19226-
"body": {
19227-
"mode": "raw",
19228-
"raw": "{ \"description\": \"Description A global leading healthcare company is seeking a strong Databricks Engineer to join their development team as they build their new Databricks workspace. Development efforts will contribute to the migration of data from Hadoop to Databricks to prepare data for visualization. Candidate must be well-versed in Databricks components and best practices, be an excellent problem solver and be comfortable working in a fast-moving, rapidly changing, and dynamic environment via Agile, SCRUM, and DevOps. PREFERRED QUALIFICATIONS: 2+ years of Azure Data Stack experience: Azure Data Services using ADF, ADLS, Databricks with PySpark, Azure DevOps & Azure Key Vault. Strong knowledge of various data warehousing methodologies and data modeling concepts. Hands-on experience using Azure, Azure data lake, Azure functions & Databricks Minimum 2-3+ years of Python experience (PySpark) Design & Develop Azure native solutions for Data Platform Minimum 3+ years of experience using Big Data ecosystem (Cloudera/Hortonworks) using Oozie, Hive, Impala, and Spark Expert in SQL and performance tuning\" }",
19229-
"options": {
19230-
"raw": {
19231-
"language": "json"
19232-
}
19233-
}
19234-
},
19235-
"url": {
19236-
"raw": "{{URL}}/taas-teams/getSkillsByJobDescription",
19237-
"host": [
19238-
"{{URL}}"
19239-
],
19240-
"path": [
19241-
"taas-teams",
19242-
"getSkillsByJobDescription"
19243-
]
19244-
}
19245-
},
19246-
"response": []
19247-
},
19248-
{
19249-
"name": "get skills by invalid token",
19250-
"event": [
19251-
{
19252-
"listen": "test",
19253-
"script": {
19254-
"exec": [
19255-
"pm.test('Status code is 401', function () {\r",
19256-
" pm.response.to.have.status(401);\r",
19257-
" const response = pm.response.json()\r",
19258-
" pm.expect(response.message).to.eq(\"Invalid Token.\")\r",
19259-
"});"
19260-
],
19261-
"type": "text/javascript"
19262-
}
19263-
}
19264-
],
19265-
"request": {
19266-
"method": "POST",
19267-
"header": [
19268-
{
19269-
"key": "Authorization",
19270-
"type": "text",
19271-
"value": "Bearer invalid_token"
19272-
},
19273-
{
19274-
"key": "Content-Type",
19275-
"type": "text",
19276-
"value": "application/json"
19277-
}
19278-
],
19206+
"header": [],
1927919207
"body": {
1928019208
"mode": "raw",
1928119209
"raw": "{ \"description\": \"Description A global leading healthcare company is seeking a strong Databricks Engineer to join their development team as they build their new Databricks workspace. Development efforts will contribute to the migration of data from Hadoop to Databricks to prepare data for visualization. Candidate must be well-versed in Databricks components and best practices, be an excellent problem solver and be comfortable working in a fast-moving, rapidly changing, and dynamic environment via Agile, SCRUM, and DevOps. PREFERRED QUALIFICATIONS: 2+ years of Azure Data Stack experience: Azure Data Services using ADF, ADLS, Databricks with PySpark, Azure DevOps & Azure Key Vault. Strong knowledge of various data warehousing methodologies and data modeling concepts. Hands-on experience using Azure, Azure data lake, Azure functions & Databricks Minimum 2-3+ years of Python experience (PySpark) Design & Develop Azure native solutions for Data Platform Minimum 3+ years of experience using Big Data ecosystem (Cloudera/Hortonworks) using Oozie, Hive, Impala, and Spark Expert in SQL and performance tuning\" }",
@@ -19317,18 +19245,7 @@
1931719245
],
1931819246
"request": {
1931919247
"method": "POST",
19320-
"header": [
19321-
{
19322-
"key": "Authorization",
19323-
"type": "text",
19324-
"value": "Bearer {{token_administrator}}"
19325-
},
19326-
{
19327-
"key": "Content-Type",
19328-
"type": "text",
19329-
"value": "application/json"
19330-
}
19331-
],
19248+
"header": [],
1933219249
"body": {
1933319250
"mode": "raw",
1933419251
"raw": "{ \"description\": \"\" }",
@@ -19370,18 +19287,7 @@
1937019287
],
1937119288
"request": {
1937219289
"method": "POST",
19373-
"header": [
19374-
{
19375-
"key": "Authorization",
19376-
"type": "text",
19377-
"value": "Bearer {{token_administrator}}"
19378-
},
19379-
{
19380-
"key": "Content-Type",
19381-
"type": "text",
19382-
"value": "application/json"
19383-
}
19384-
],
19290+
"header": [],
1938519291
"body": {
1938619292
"mode": "raw",
1938719293
"raw": "{}",

docs/swagger.yaml

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5249,6 +5249,9 @@ components:
52495249
jobDescription:
52505250
type: string
52515251
description: "The description of the job."
5252+
jobTitle:
5253+
type: string
5254+
description: "An optional job title."
52525255
- type: object
52535256
required:
52545257
- skills
@@ -5281,6 +5284,10 @@ components:
52815284
format: float
52825285
description: "Rate at which searched skills match the given role"
52835286
example: 0.75
5287+
jobTitle:
5288+
type: string
5289+
description: "Optional job title."
5290+
example: "Lead Application Developer"
52845291
SubmitTeamRequestBody:
52855292
properties:
52865293
teamName:
Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,18 @@
1+
const config = require('config')
2+
3+
/**
4+
* Add jobTitle field to the RoleSearchRequest model.
5+
*/
6+
7+
module.exports = {
8+
up: async (queryInterface, Sequelize) => {
9+
await queryInterface.addColumn({ tableName: 'role_search_requests', schema: config.DB_SCHEMA_NAME }, 'job_title',
10+
{
11+
type: Sequelize.STRING(100),
12+
allowNull: true
13+
})
14+
},
15+
down: async (queryInterface, Sequelize) => {
16+
await queryInterface.removeColumn({ tableName: 'role_search_requests', schema: config.DB_SCHEMA_NAME}, 'job_title')
17+
}
18+
}

src/common/helper.js

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1960,8 +1960,8 @@ function removeTextFormatting (text) {
19601960
// Remove footnotes
19611961
text = _.replace(text, /\[\^.+?\](: .*?$)?/g, ' ')
19621962
text = _.replace(text, /\s{0,2}\[.*?\]: .*?$/g, ' ')
1963-
// Remove images
1964-
text = _.replace(text, /!\[(.*?)\][[(].*?[\])]/g, ' $1 ')
1963+
// Remove images and keep description unless it is default description "image"
1964+
text = _.replace(text, /!(\[((?!image).*?)\]|\[.*?\])[[(].*?[\])]/g, ' $2 ')
19651965
// Remove inline links
19661966
text = _.replace(text, /\[(.*?)\][[(].*?[\])]/g, ' $1 ')
19671967
// Remove blockquotes

src/controllers/TeamController.js

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -114,7 +114,7 @@ async function getMe (req, res) {
114114
* @param res the response
115115
*/
116116
async function getSkillsByJobDescription (req, res) {
117-
res.send(await service.getSkillsByJobDescription(req.authUser, req.body))
117+
res.send(await service.getSkillsByJobDescription(req.body))
118118
}
119119

120120
/**

src/models/RoleSearchRequest.js

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -62,6 +62,11 @@ module.exports = (sequelize) => {
6262
type: Sequelize.UUID
6363
})
6464
},
65+
jobTitle: {
66+
field: 'job_title',
67+
type: Sequelize.STRING(100),
68+
allowNull: true
69+
},
6570
createdBy: {
6671
field: 'created_by',
6772
type: Sequelize.UUID,

src/routes/TeamRoutes.js

Lines changed: 3 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@ module.exports = {
2323
'/taas-teams/skills': {
2424
get: {
2525
controller: 'TeamController',
26-
method: 'searchSkills',
26+
method: 'searchSkills'
2727
}
2828
},
2929
'/taas-teams/me': {
@@ -37,9 +37,7 @@ module.exports = {
3737
'/taas-teams/getSkillsByJobDescription': {
3838
post: {
3939
controller: 'TeamController',
40-
method: 'getSkillsByJobDescription',
41-
auth: 'jwt',
42-
scopes: [constants.Scopes.READ_TAAS_TEAM]
40+
method: 'getSkillsByJobDescription'
4341
}
4442
},
4543
'/taas-teams/:id': {
@@ -91,7 +89,7 @@ module.exports = {
9189
'/taas-teams/sendRoleSearchRequest': {
9290
post: {
9391
controller: 'TeamController',
94-
method: 'roleSearchRequest',
92+
method: 'roleSearchRequest'
9593
}
9694
},
9795
'/taas-teams/submitTeamRequest': {

src/services/PaymentSchedulerService.js

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -90,7 +90,7 @@ async function processPayment (workPeriodPayment) {
9090
const oldValue = workPeriodPayment.toJSON()
9191
const updated = await workPeriodPayment.update({ status: 'in-progress' })
9292
// Update the modified status to es
93-
await postEvent(config.TAAS_WORK_PERIOD_PAYMENT_UPDATE_TOPIC, updated.toJSON(), { oldValue })
93+
await postEvent(config.TAAS_WORK_PERIOD_PAYMENT_UPDATE_TOPIC, updated.toJSON(), { oldValue, key: `workPeriodPayment.billingAccountId:${updated.billingAccountId}` })
9494
}
9595
// Check whether the number of processed records per minute exceeds the specified number, if it exceeds, wait for the next minute before processing
9696
await checkWait(PaymentSchedulerStatus.START_PROCESS)
@@ -115,7 +115,7 @@ async function processPayment (workPeriodPayment) {
115115
// 5. update wp and save it should only update already existent Work Period Payment record with created "challengeId" and "status=completed".
116116
const updated = await workPeriodPayment.update({ challengeId: paymentScheduler.challengeId, status: 'completed' })
117117
// Update the modified status to es
118-
await postEvent(config.TAAS_WORK_PERIOD_PAYMENT_UPDATE_TOPIC, updated.toJSON(), { oldValue })
118+
await postEvent(config.TAAS_WORK_PERIOD_PAYMENT_UPDATE_TOPIC, updated.toJSON(), { oldValue, key: `workPeriodPayment.billingAccountId:${updated.billingAccountId}` })
119119

120120
await paymentScheduler.update({ step: PaymentSchedulerStatus.CLOSE_CHALLENGE, userId: paymentScheduler.userId, status: 'completed' })
121121

@@ -128,7 +128,7 @@ async function processPayment (workPeriodPayment) {
128128
// If payment processing failed Work Periods Payment "status" should be changed to "failed" and populate "statusDetails" field with error details in JSON format.
129129
const updated = await workPeriodPayment.update({ statusDetails, status: 'failed' })
130130
// Update the modified status to es
131-
await postEvent(config.TAAS_WORK_PERIOD_PAYMENT_UPDATE_TOPIC, updated.toJSON(), { oldValue })
131+
await postEvent(config.TAAS_WORK_PERIOD_PAYMENT_UPDATE_TOPIC, updated.toJSON(), { oldValue, key: `workPeriodPayment.billingAccountId:${updated.billingAccountId}` })
132132

133133
if (paymentScheduler) {
134134
await paymentScheduler.update({ step: _.get(err, 'step'), userId: paymentScheduler.userId, status: 'failed' })

0 commit comments

Comments
 (0)