forked from Midburn/Volunteers
-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
shay
committed
Nov 24, 2017
1 parent
66cbadf
commit 50cf644
Showing
5 changed files
with
75 additions
and
291 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,85 +1,28 @@ | ||
const express = require('express'); | ||
const router = express.Router(); | ||
const co = require('co'); | ||
const _ = require('lodash'); | ||
const SparkFacade = require('../spark/spark'); | ||
|
||
const devMode = (process.env.ENVIRONMENT === 'debug'); | ||
const SPARK_HOST = (process.env.LOCAL_SPARK != 'true') ? process.env.SPARK_HOST : 'http://localhost:3000'; | ||
const SPARK_HOST = (process.env.LOCAL_SPARK !== 'true') ? process.env.SPARK_HOST : 'http://localhost:3000'; | ||
|
||
const sparkFacade = new SparkFacade(SPARK_HOST); | ||
|
||
const handleSparkProxy = handler => (req, res) => { | ||
console.log(`${req.method.toUpperCase()} ${req.path}`); | ||
return handler(req, res).then(data => res.status(200).send(data)).catch(e => { | ||
console.log(e); | ||
res.status(500).send(devMode ? e.toString() : 'Internal server error'); | ||
}) | ||
console.log(`${req.method.toUpperCase()} ${req.path}`); | ||
return handler(req, res) | ||
.then(data => res.status(200).send(data)) | ||
.catch(e => { | ||
console.log(e); | ||
res.status(500).send(devMode ? e.toString() : 'Internal server error'); | ||
}) | ||
}; | ||
|
||
// READ VOLUNTEER ROLES | ||
router.get('/volunteers/roles/me', handleSparkProxy(req => | ||
sparkFacade.rolesByUser(req.token, req.userDetails.email))); | ||
|
||
//READ DEPARTMENTS | ||
router.get('/departments', co.wrap(function*(req, res) { | ||
|
||
const departmentsToExclude = [16, 10]; | ||
let departments; | ||
|
||
try { | ||
const roles = yield sparkFacade.rolesByUser(req.token, req.userDetails.email); | ||
departments = yield sparkFacade.departments(req.token); | ||
|
||
const isAdmin = roles.find(role => role.permission === 1); | ||
|
||
if (isAdmin) { | ||
return res.json(departments.filter(department => departmentsToExclude.indexOf(department.id) === -1)); | ||
} | ||
|
||
res.json(departments.filter(department => roles.find(role => role.department_id === department.id))); | ||
} | ||
catch (e) { | ||
console.log(e); | ||
return res.status(500).json({error: "Internal server error"}); | ||
} | ||
})); | ||
|
||
//READ ROLES | ||
router.get('/roles', | ||
handleSparkProxy(req => | ||
sparkFacade.allRoles(req.token))); | ||
|
||
//READ ALL VOLUNTEERINGS - READ | ||
router.get('/volunteers', handleSparkProxy(req => sparkFacade.volunteers(req.token))); | ||
|
||
//READ ALL VOLUNTEERS IN SPECIFIC DEPARTMENT | ||
router.get('/departments/:dId/volunteers', | ||
handleSparkProxy(req => | ||
sparkFacade.volunteersByDepartment(req.token, req.params.dId))); | ||
|
||
//POST MULTIPLE VOLUNTEERINGS - CREATE | ||
router.post('/departments/:dId/volunteers/', handleSparkProxy(req => | ||
sparkFacade.addVolunteers(req.token, req.params.dId, req.body.emails.map(email => ({ | ||
email, | ||
role_id: req.body.role, | ||
is_production: req.body.is_production | ||
}))))); | ||
|
||
//PUT SINGLE VOLUNTEERING - UPDATE | ||
router.put('/departments/:dId/volunteers/:uid', | ||
handleSparkProxy(req => | ||
sparkFacade.updateVolunteer(req.token, req.params.dId, req.params.uid, _.pick(req.body, ['role_id', 'is_production'])))); | ||
|
||
//DELETE SINGLUE VOLUNTEERING - REMOVE | ||
router.delete('/departments/:dId/volunteers/:uid', | ||
handleSparkProxy(req => | ||
sparkFacade.deleteVolunteer(req.token, req.params.dId, req.params.uid))); | ||
|
||
//DELETE VOLUNTEERS CACHE | ||
router.delete('/cache/volunteers', | ||
handleSparkProxy(req => | ||
sparkFacade.deleteVolunteersCache())); | ||
// GET VOLUNTEER | ||
router.get('/users', handleSparkProxy(req => | ||
sparkFacade.getUserDetailByMails(req.token, req.body.emails))); | ||
|
||
// GET ALL EVENTS | ||
router.get('/events', handleSparkProxy(req => | ||
sparkFacade.getAllEvents(req.token))); | ||
|
||
module.exports = router; |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,125 +1,39 @@ | ||
const path = require('path'); | ||
const fs = require('fs'); | ||
const http = require('http'); | ||
const axios = require('axios'); | ||
const NodeCache = require('node-cache'); | ||
const _ = require('lodash'); | ||
// const NodeCache = require('node-cache'); | ||
|
||
const SessionCookieName = process.env.JWT_KEY; | ||
const sparkCache = new NodeCache(); | ||
// const sparkCache = new NodeCache(); | ||
|
||
class SparkFacade { | ||
|
||
constructor(baseUrl = 'http://localhost:3000') { | ||
this.baseUrl = baseUrl; | ||
} | ||
|
||
departments(token) { | ||
return this.fetchSpark('/volunteers/departments', {headers: this.authHeader(token)}) | ||
.then(depts => depts.map(n => _.assign({name: n.name_en}, n))); | ||
} | ||
|
||
allRoles(token) { | ||
return this.fetchSpark('/volunteers/roles', {headers: this.authHeader(token)}); | ||
} | ||
|
||
rolesByUser(token, userEmail) { | ||
return this.fetchSpark(`/volunteers/user_roles?email=${userEmail}`, this.authHeader(token)); | ||
} | ||
|
||
// rolesByUser(token, userId) { | ||
// return this.fetchSpark(`/volunteers/${userId}/roles`, this.authHeader(token)); | ||
// } | ||
|
||
volunteers(token) { | ||
let volunteers; | ||
|
||
volunteers = sparkCache.get('volunteers'); | ||
|
||
if (volunteers) { | ||
return new Promise((resolve, reject) => { | ||
resolve(volunteers); | ||
}); | ||
static authHeader(token) { | ||
return {'Authorization': `${SessionCookieName}=${token}`}; | ||
} | ||
|
||
return this.fetchSpark('/volunteers/volunteers', {headers: this.authHeader(token)}).then(data => { | ||
volunteers = data.map(item => _.assign({profile_id: item.user_id, phone: item.phone_number}, | ||
_.pick(item, ['department_id', 'email', 'first_name', 'last_name', 'got_ticket', 'is_production', 'role_id']))); | ||
|
||
sparkCache.set('volunteers', volunteers); | ||
|
||
return volunteers; | ||
} | ||
); | ||
} | ||
|
||
volunteersByDepartment(token, departmentId) { | ||
|
||
let volunteers; | ||
|
||
volunteers = sparkCache.get(`volunteers-${departmentId}`); | ||
|
||
if (volunteers) { | ||
return new Promise((resolve, reject) => { | ||
resolve(volunteers); | ||
}); | ||
constructor(baseUrl = 'http://localhost:3000') { | ||
this.baseUrl = baseUrl; | ||
} | ||
|
||
return this.fetchSpark(`/volunteers/departments/${departmentId}/volunteers/`, | ||
{headers: this.authHeader(token)}).then(data => { | ||
volunteers = data.map(item => _.assign({profile_id: item.user_id, phone: item.phone_number}, | ||
_.pick(item, ['department_id', 'email', 'first_name', 'last_name', 'got_ticket', 'is_production', 'role_id'])) | ||
); | ||
|
||
sparkCache.set(`volunteers-${departmentId}`, volunteers); | ||
|
||
return volunteers; | ||
}); | ||
} | ||
|
||
addVolunteers(token, departmentId, volunteers) { | ||
sparkCache.del('volunteers'); | ||
sparkCache.del(`volunteers-${departmentId}`); | ||
|
||
return this.fetchSpark(`/volunteers/departments/${departmentId}/volunteers`, | ||
{headers: this.authHeader(token), method: 'post', data: volunteers}); | ||
} | ||
|
||
updateVolunteer(token, departmentId, volunteerId, volunteer) { | ||
sparkCache.del('volunteers'); | ||
sparkCache.del(`volunteers-${departmentId}`); | ||
|
||
return this.fetchSpark(`/volunteers/departments/${departmentId}/volunteers/${volunteerId}`, | ||
{ | ||
headers: this.authHeader(token), | ||
method: 'put', | ||
data: volunteer | ||
}).then(data => _.pick(data, ['status'])) | ||
} | ||
|
||
deleteVolunteer(token, departmentId, volunteerId) { | ||
sparkCache.del('volunteers'); | ||
sparkCache.del(`volunteers-${departmentId}`); | ||
|
||
return this.fetchSpark(`/volunteers/departments/${departmentId}/volunteers/${volunteerId}/`, { | ||
headers: this.authHeader(token), method: 'delete' | ||
}).then(data => _.pick(data, ['status'])) | ||
} | ||
|
||
deleteVolunteersCache() { | ||
sparkCache.flushAll(); | ||
|
||
return new Promise((resolve, reject) => resolve({result: true})); | ||
} | ||
getUserDetailByMails(token, emails) { | ||
return this.fetchSpark( | ||
'/users', | ||
token, | ||
{emails: emails}); | ||
} | ||
|
||
// private | ||
fetchSpark(path, options) { | ||
return axios(`${this.baseUrl}${path}`, options).then(r => r.data); | ||
} | ||
getAllEvents(token) { | ||
return this.fetchSpark('/events', token); | ||
} | ||
|
||
authHeader(token) { | ||
return {'Cookie': `${SessionCookieName}=${token}`}; | ||
} | ||
// private | ||
fetchSpark(path, token, options) { | ||
return axios( | ||
`${this.baseUrl}${path}`, | ||
{ | ||
headers: SparkFacade.authHeader(token), | ||
...options | ||
}) | ||
.then(response => response.data); | ||
} | ||
} | ||
|
||
module.exports = SparkFacade; |
Oops, something went wrong.