Skip to content

Commit

Permalink
chore(spark) - create mocks
Browse files Browse the repository at this point in the history
  • Loading branch information
shay committed Nov 24, 2017
1 parent 66cbadf commit 50cf644
Show file tree
Hide file tree
Showing 5 changed files with 75 additions and 291 deletions.
2 changes: 1 addition & 1 deletion .env
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,6 @@
ENVIRONMENT=debug
PORT=8080
DB_URL=mongodb://localhost/volunteers
SPARK_HOST=http://sparkstaging.midburn.org
SPARK_HOST=http://spark.midburn.org
SECRET=secret
JWT_KEY=authToken
85 changes: 14 additions & 71 deletions server/routes/spark.js
Original file line number Diff line number Diff line change
@@ -1,85 +1,28 @@
const express = require('express');
const router = express.Router();
const co = require('co');
const _ = require('lodash');
const SparkFacade = require('../spark/spark');

const devMode = (process.env.ENVIRONMENT === 'debug');
const SPARK_HOST = (process.env.LOCAL_SPARK != 'true') ? process.env.SPARK_HOST : 'http://localhost:3000';
const SPARK_HOST = (process.env.LOCAL_SPARK !== 'true') ? process.env.SPARK_HOST : 'http://localhost:3000';

const sparkFacade = new SparkFacade(SPARK_HOST);

const handleSparkProxy = handler => (req, res) => {
console.log(`${req.method.toUpperCase()} ${req.path}`);
return handler(req, res).then(data => res.status(200).send(data)).catch(e => {
console.log(e);
res.status(500).send(devMode ? e.toString() : 'Internal server error');
})
console.log(`${req.method.toUpperCase()} ${req.path}`);
return handler(req, res)
.then(data => res.status(200).send(data))
.catch(e => {
console.log(e);
res.status(500).send(devMode ? e.toString() : 'Internal server error');
})
};

// READ VOLUNTEER ROLES
router.get('/volunteers/roles/me', handleSparkProxy(req =>
sparkFacade.rolesByUser(req.token, req.userDetails.email)));

//READ DEPARTMENTS
router.get('/departments', co.wrap(function*(req, res) {

const departmentsToExclude = [16, 10];
let departments;

try {
const roles = yield sparkFacade.rolesByUser(req.token, req.userDetails.email);
departments = yield sparkFacade.departments(req.token);

const isAdmin = roles.find(role => role.permission === 1);

if (isAdmin) {
return res.json(departments.filter(department => departmentsToExclude.indexOf(department.id) === -1));
}

res.json(departments.filter(department => roles.find(role => role.department_id === department.id)));
}
catch (e) {
console.log(e);
return res.status(500).json({error: "Internal server error"});
}
}));

//READ ROLES
router.get('/roles',
handleSparkProxy(req =>
sparkFacade.allRoles(req.token)));

//READ ALL VOLUNTEERINGS - READ
router.get('/volunteers', handleSparkProxy(req => sparkFacade.volunteers(req.token)));

//READ ALL VOLUNTEERS IN SPECIFIC DEPARTMENT
router.get('/departments/:dId/volunteers',
handleSparkProxy(req =>
sparkFacade.volunteersByDepartment(req.token, req.params.dId)));

//POST MULTIPLE VOLUNTEERINGS - CREATE
router.post('/departments/:dId/volunteers/', handleSparkProxy(req =>
sparkFacade.addVolunteers(req.token, req.params.dId, req.body.emails.map(email => ({
email,
role_id: req.body.role,
is_production: req.body.is_production
})))));

//PUT SINGLE VOLUNTEERING - UPDATE
router.put('/departments/:dId/volunteers/:uid',
handleSparkProxy(req =>
sparkFacade.updateVolunteer(req.token, req.params.dId, req.params.uid, _.pick(req.body, ['role_id', 'is_production']))));

//DELETE SINGLUE VOLUNTEERING - REMOVE
router.delete('/departments/:dId/volunteers/:uid',
handleSparkProxy(req =>
sparkFacade.deleteVolunteer(req.token, req.params.dId, req.params.uid)));

//DELETE VOLUNTEERS CACHE
router.delete('/cache/volunteers',
handleSparkProxy(req =>
sparkFacade.deleteVolunteersCache()));
// GET VOLUNTEER
router.get('/users', handleSparkProxy(req =>
sparkFacade.getUserDetailByMails(req.token, req.body.emails)));

// GET ALL EVENTS
router.get('/events', handleSparkProxy(req =>
sparkFacade.getAllEvents(req.token)));

module.exports = router;
6 changes: 2 additions & 4 deletions server/server.js
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,6 @@ const React = require('react');
const webpack = require('webpack');
const webpackDevServer = require('webpack-dev-server');
const webpackConfig = require("../webpack.config.js");
const http = require('http');
const axios = require('axios');
const mongoose = require('mongoose');
const cookieParser = require('cookie-parser');
const jwt = require('jsonwebtoken');
Expand Down Expand Up @@ -68,7 +66,6 @@ app.use('/api/v1', require('./routes/spark'));
app.use('/api/v1', require('./routes/shifts'));

app.use('/login', (req, res) => {

let token = req.query.token;
if (!token && devMode && process.env.LOCAL_SPARK === 'true') {
token = jwt.sign({
Expand Down Expand Up @@ -144,5 +141,6 @@ mongoose.Promise = Promise;
const server = app.listen(process.env.PORT, function () {
const host = server.address().address;
const port = server.address().port;
console.log("Listening at http://%s:%s", host, port)
console.log("Listening at http://%s:%s", host, port);
console.log(`Go to http://${host}:${port}/login`);
});
136 changes: 25 additions & 111 deletions server/spark/spark.js
Original file line number Diff line number Diff line change
@@ -1,125 +1,39 @@
const path = require('path');
const fs = require('fs');
const http = require('http');
const axios = require('axios');
const NodeCache = require('node-cache');
const _ = require('lodash');
// const NodeCache = require('node-cache');

const SessionCookieName = process.env.JWT_KEY;
const sparkCache = new NodeCache();
// const sparkCache = new NodeCache();

class SparkFacade {

constructor(baseUrl = 'http://localhost:3000') {
this.baseUrl = baseUrl;
}

departments(token) {
return this.fetchSpark('/volunteers/departments', {headers: this.authHeader(token)})
.then(depts => depts.map(n => _.assign({name: n.name_en}, n)));
}

allRoles(token) {
return this.fetchSpark('/volunteers/roles', {headers: this.authHeader(token)});
}

rolesByUser(token, userEmail) {
return this.fetchSpark(`/volunteers/user_roles?email=${userEmail}`, this.authHeader(token));
}

// rolesByUser(token, userId) {
// return this.fetchSpark(`/volunteers/${userId}/roles`, this.authHeader(token));
// }

volunteers(token) {
let volunteers;

volunteers = sparkCache.get('volunteers');

if (volunteers) {
return new Promise((resolve, reject) => {
resolve(volunteers);
});
static authHeader(token) {
return {'Authorization': `${SessionCookieName}=${token}`};
}

return this.fetchSpark('/volunteers/volunteers', {headers: this.authHeader(token)}).then(data => {
volunteers = data.map(item => _.assign({profile_id: item.user_id, phone: item.phone_number},
_.pick(item, ['department_id', 'email', 'first_name', 'last_name', 'got_ticket', 'is_production', 'role_id'])));

sparkCache.set('volunteers', volunteers);

return volunteers;
}
);
}

volunteersByDepartment(token, departmentId) {

let volunteers;

volunteers = sparkCache.get(`volunteers-${departmentId}`);

if (volunteers) {
return new Promise((resolve, reject) => {
resolve(volunteers);
});
constructor(baseUrl = 'http://localhost:3000') {
this.baseUrl = baseUrl;
}

return this.fetchSpark(`/volunteers/departments/${departmentId}/volunteers/`,
{headers: this.authHeader(token)}).then(data => {
volunteers = data.map(item => _.assign({profile_id: item.user_id, phone: item.phone_number},
_.pick(item, ['department_id', 'email', 'first_name', 'last_name', 'got_ticket', 'is_production', 'role_id']))
);

sparkCache.set(`volunteers-${departmentId}`, volunteers);

return volunteers;
});
}

addVolunteers(token, departmentId, volunteers) {
sparkCache.del('volunteers');
sparkCache.del(`volunteers-${departmentId}`);

return this.fetchSpark(`/volunteers/departments/${departmentId}/volunteers`,
{headers: this.authHeader(token), method: 'post', data: volunteers});
}

updateVolunteer(token, departmentId, volunteerId, volunteer) {
sparkCache.del('volunteers');
sparkCache.del(`volunteers-${departmentId}`);

return this.fetchSpark(`/volunteers/departments/${departmentId}/volunteers/${volunteerId}`,
{
headers: this.authHeader(token),
method: 'put',
data: volunteer
}).then(data => _.pick(data, ['status']))
}

deleteVolunteer(token, departmentId, volunteerId) {
sparkCache.del('volunteers');
sparkCache.del(`volunteers-${departmentId}`);

return this.fetchSpark(`/volunteers/departments/${departmentId}/volunteers/${volunteerId}/`, {
headers: this.authHeader(token), method: 'delete'
}).then(data => _.pick(data, ['status']))
}

deleteVolunteersCache() {
sparkCache.flushAll();

return new Promise((resolve, reject) => resolve({result: true}));
}
getUserDetailByMails(token, emails) {
return this.fetchSpark(
'/users',
token,
{emails: emails});
}

// private
fetchSpark(path, options) {
return axios(`${this.baseUrl}${path}`, options).then(r => r.data);
}
getAllEvents(token) {
return this.fetchSpark('/events', token);
}

authHeader(token) {
return {'Cookie': `${SessionCookieName}=${token}`};
}
// private
fetchSpark(path, token, options) {
return axios(
`${this.baseUrl}${path}`,
{
headers: SparkFacade.authHeader(token),
...options
})
.then(response => response.data);
}
}

module.exports = SparkFacade;
Loading

0 comments on commit 50cf644

Please sign in to comment.