diff --git a/.eslintrc.json b/.eslintrc.json index 9ee6c64d..8893390f 100644 --- a/.eslintrc.json +++ b/.eslintrc.json @@ -13,7 +13,6 @@ "afterEach": true }, "rules": { - "arrow-parens": 0, "react/prop-types": 0, "no-var": ["error"], "arrow-parens": ["error", "always"], @@ -22,6 +21,7 @@ "anonymous": "always", "named": "never" }], + "object-curly-spacing": ["error", "never"], "prefer-const": ["error"] } } diff --git a/config/strings.yaml b/config/strings.yaml index 9b28e397..488c5d55 100644 --- a/config/strings.yaml +++ b/config/strings.yaml @@ -67,11 +67,6 @@ playlist: previous: < next: '>' -# pages/move-file -moveFile: - title: !!js/function (title) => `Move ${title}` - prompt: !!js/function (title) => `Choose a folder to move '${title}' to` - # errors error: 403: diff --git a/layouts/pages/move-file.ejs b/layouts/pages/move-file.ejs deleted file mode 100644 index 176a2927..00000000 --- a/layouts/pages/move-file.ejs +++ /dev/null @@ -1,18 +0,0 @@ - - - <%- include('../partials/head', {title: template('moveFile.title', locals.prettyName)}) %> - - <%- include('../partials/header', {parentLinks: [], title: template('moveFile.title', locals.prettyName) }) %> - -
- <%- include('../partials/nav') %> -
-
-
-

<%- template('moveFile.prompt', locals.prettyName) %>

- <%- include('../partials/folderList', {folders, id, parents}) %> -
- <%- include('../partials/footer', { pageType: 'search' }) %> -
- - diff --git a/layouts/partials/footer.ejs b/layouts/partials/footer.ejs index 5741dc42..1ccff568 100644 --- a/layouts/partials/footer.ejs +++ b/layouts/partials/footer.ejs @@ -7,7 +7,6 @@ <% } %> <% if (locals.editLink) { %> <%- template('footer.buttons.edit') %> - <%- template('footer.buttons.move') %> <% } %> <% if (locals.parentId) { %> target="_blank"><%- template('footer.buttons.create') %> diff --git a/server/cache.js b/server/cache.js index ada518f3..2808468f 100644 --- a/server/cache.js +++ b/server/cache.js @@ -5,6 +5,7 @@ const middlewareRouter = require('express-promise-router')() const log = require('./logger') const {requireWithFallback} = require('./utils') +const {parseUrl} = require('./urlParser') const cache = requireWithFallback('cache/store') @@ -13,15 +14,18 @@ const noCacheDelay = parseInt(process.env.EDIT_CACHE_DELAY, 10) || 60 * 60 exports.get = cache.get // expose the ability to retreive cache data internally -// detects purge requests and serves cached responses when available +// detects purge requests in the query string middlewareRouter.use(async (req, res) => { // handle the purge request if purge or edit params are present const {purge, edit, ignore} = req.query + if (purge || edit) { const {email} = edit ? req.userInfo : {} const overrides = ignore ? ignore.split(',') : null + const {meta} = await parseUrl(req.path) + return purgeCache({ - url: req.path, + id: meta.id, editEmail: email, ignore: overrides }).then(() => res.end('OK')).catch((err) => { @@ -30,117 +34,63 @@ middlewareRouter.use(async (req, res) => { }) } - // otherwise consult cache for stored html - const data = await cache.get(req.path) - - const {html, redirectUrl, id} = data || {} - if (redirectUrl) return res.redirect(redirectUrl) - - // if no html was returned proceed to next middleware - if (!html) return 'next' - - // attach doc id to the request for reading history tracking - res.locals.docId = id - log.info(`CACHE HIT ${req.path}.`) - res.end(html) + return 'next' }) exports.middleware = middlewareRouter -exports.add = async (id, newModified, path, html) => { +exports.add = async (id, newModified, content) => { if (!newModified) throw new Error('Refusing to store new item without modified time.') - const data = await cache.get(path) - const {modified, noCache, html: oldHtml} = data || {} + const data = await cache.get(id) + const {modified, noCache, content: oldContent} = data || {} // don't store any items over noCache entries if (noCache) return // refuse to cache any items that are being edited // if there was previous data and it is not older than the new data, don't do anything - if (oldHtml && modified && !isNewer(modified, newModified)) return // nothing to do if data is current + if (oldContent && modified && !isNewer(modified, newModified)) return // nothing to do if data is current // store new data in the cache - return cache.set(path, {html, modified: newModified, id}) -} - -// redirects when a url changes -// should we expose a cb here for testing? -exports.redirect = async (path, newPath, modified) => { - const data = await cache.get(path) - const {noCache, redirectUrl} = data || {} - - // since we run multiple pods, we don't need to set the redirect more than once - if (redirectUrl === newPath) throw new Error('Already configured that redirect') - - log.info(`ADDING REDIRECT: ${path} => ${newPath}`) - - await cache.set(path, {redirectUrl: newPath}).catch((err) => { - if (err) log.warn(`Failed setting redirect for ${path} => ${newPath}`, err) - return err - }) - - const preventCacheReason = noCache ? 'redirect_detected' : null - return purgeCache({ - url: newPath, - modified, - editEmail: preventCacheReason, - ignore: ['redirect', 'missing', 'modified'] - }).catch((err) => { - if (err && err.message !== 'Not found') log.warn(`Failed purging redirect destination ${newPath}`, err) - throw err - }) + return cache.set(id, {content, modified: newModified, id}) } // expose the purgeCache method externally so that list can call while building tree exports.purge = purgeCache -async function purgeCache({url, modified, editEmail, ignore}) { +async function purgeCache({id, modified, editEmail, ignore}) { modified = modified || moment().subtract(1, 'hour').utc().format('YYYY-MM-DDTHH:mm:ss.SSS[Z]') const overrides = ignore && Array.isArray(ignore) ? new Set(ignore) : new Set().add(ignore) const shouldIgnore = (type) => overrides.has(type) || overrides.has('all') || overrides.has('1') - if (!url) throw new Error(`Can't purge cache without url! Given url was ${url}`) + if (!id) throw new Error(`Can't purge cache without a document id! Given id was ${id}`) - const data = await cache.get(url) - // compare current cache entry data vs this request - const {redirectUrl, noCache, html, modified: oldModified, purgeId: lastPurgeId} = data || {} + const data = await cache.get(id) - if (redirectUrl && !shouldIgnore('redirect')) throw new Error('Unauthorized') - // edit is considered its own override for everything but redirect + if (!data) return // if no currently cached item, don't attempt to purge + + // compare current cache entry data vs this request + const {noCache, content, purgeId: lastPurgeId} = data || {} + const html = content && content.html // FIXME: this should be more robust if (editEmail && editEmail.includes('@')) { - log.info(`CACHE PURGE PERSIST for ${noCacheDelay}s (${editEmail}): ${url}`) - return cache.set(url, {noCache: true}, {ttl: noCacheDelay}) + log.info(`CACHE PURGE PERSIST for ${noCacheDelay}s (${editEmail}): ${id}`) + return cache.set(id, {noCache: true}, {ttl: noCacheDelay}) } const purgeId = `${modified}-${editEmail || ''}-${ignore}` - // if attempting to purge /trash but nothing has changed, skip. - if (purgeId === lastPurgeId && url === '/trash') return - - // const isTrashed = url.split('/')[1] === 'trash' // try and dedupe extra requests from multiple pods (tidier logs) - if (purgeId === lastPurgeId && !shouldIgnore('all')) throw new Error(`Same purge id as previous request ${purgeId} for ${url}`) + if (purgeId === lastPurgeId && !shouldIgnore('all')) throw new Error(`Same purge id as previous request ${purgeId} for docId ${id}`) + // by default, don't try to purge empty if (!html && !shouldIgnore('missing')) throw new Error('Not found') + // by default, don't purge a noCache entry if (noCache && !shouldIgnore('editing')) throw new Error('Unauthorized') - // by default, don't purge when the modification time is not fresher than previous - if (!isNewer(oldModified, modified) && !shouldIgnore('modified')) throw new Error(`No purge of fresh content for ${url}`) - - // if we passed all the checks, determine all ancestor links and purge - const segments = url.split('/').map((segment, i, segments) => { - return segments.slice(0, i).concat([segment]).join('/') - }).filter((s) => s.length) // don't allow purging empty string - - // call the callback when all segments have been purged - return Promise.all( - segments.map((path) => { - log.info(`CACHE PURGE ${path} FROM CHANGE AT ${url}`) - // there is an edge here where a homepage upstream was being edited and already not in cache. - // we need to get the cache entries for all of these in case and not purge them to account for that edge - cache.set(path, {modified, purgeId}) - }) - ) + + // if all checks pass, purge + log.info(`CACHE PURGE ${id}`) + cache.set(id, {modified, purgeId}) } function isNewer(oldModified, newModified) { diff --git a/server/docs.js b/server/docs.js index a80cbce5..485fccb1 100644 --- a/server/docs.js +++ b/server/docs.js @@ -5,11 +5,10 @@ const cheerio = require('cheerio') const slugify = require('slugify') const xlsx = require('xlsx') -const {getAuth} = require('./auth') -const log = require('./logger') -const {stringTemplate} = require('./utils') - +const cache = require('./cache') const formatter = require('./formatter') +const log = require('./logger') +const {getAuth} = require('./auth') const supportedTypes = new Set(['document', 'spreadsheet', 'text/html']) @@ -31,41 +30,23 @@ exports.slugify = (text = '') => { } exports.fetchDoc = async (id, resourceType, req) => { + const data = await cache.get(id) + if (data && data.content) { + log.info(`CACHE HIT ${req.path}`) + return data.content + } + const auth = await getAuth() - const [html, originalRevision] = await fetch({id, resourceType, req}, auth) - const processedHtml = formatter.getProcessedHtml(html) - const sections = getSections(html) - // maybe we should pull out headers here - return {html: processedHtml, originalRevision, sections, template: stringTemplate} -} + const driveDoc = await fetch({id, resourceType, req}, auth) + const originalRevision = driveDoc[1] -exports.fetchByline = (html, creatorOfDoc) => { - let byline = creatorOfDoc - const $ = cheerio.load(html) - - // Iterates through all p tags to find byline - $('p').each((index, p) => { - // don't search any empty p tags - if (p.children.length < 1) return - - // regex that checks for byline - const r = /^by.+[^.\n]$/mig - if (r.test(p.children[0].data)) { - byline = p.children[0].data - // Removes the word "By" - byline = byline.slice(3) - $(p).remove() - } - - // only check the first p tag - return false - }) + const {html, byline, createdBy, sections} = formatter.getProcessedDocAttributes(driveDoc) + const payload = {html, byline, createdBy, sections} - return { - byline, - html: $.html() - } + // cache only information from document body + cache.add(id, originalRevision.data.modifiedTime, payload) + return payload } async function fetchHTMLForId(id, resourceType, req, drive) { @@ -94,7 +75,7 @@ async function fetchOriginalRevisions(id, resourceType, req, drive) { if (!revisionSupported.has(resourceType)) { log.info(`Revision data not supported for ${resourceType}:${id}`) - return {data: { lastModifyingUser: {} }} // return mock/empty revision object + return {data: {lastModifyingUser: {}}} // return mock/empty revision object } return drive.revisions.get({ fileId: id, @@ -102,7 +83,7 @@ async function fetchOriginalRevisions(id, resourceType, req, drive) { fields: '*' }).catch((err) => { log.warn(`Failed retrieving revision data for ${resourceType}:${id}. Error was:`, err) - return {data: { lastModifyingUser: {} }} // return mock/empty revision object + return {data: {lastModifyingUser: {}}} // return mock/empty revision object }) } @@ -169,36 +150,3 @@ async function fetchHTML(drive, id) { }) return data } - -function getSections(html) { - const $ = cheerio.load(html) - const headers = ['h1', 'h2'] - .map((h) => `body ${h}`) - .join(', ') - - const ordered = $(headers).map((i, el) => { - const tag = el.name - const $el = $(el) - const name = $el.text() - const url = `#${$el.attr('id')}` - return { - name, - url, - level: parseInt(tag.slice(-1), 10) - } - }).toArray() - - // take our ordered sections and turn them into appropriately nested headings - const nested = ordered.reduce((memo, heading) => { - const tail = memo.slice(-1)[0] - const extended = Object.assign({}, heading, {subsections: []}) - if (!tail || heading.level <= tail.level) { - return memo.concat(extended) - } - - tail.subsections.push(heading) - return memo - }, []) - - return nested -} diff --git a/server/formatter.js b/server/formatter.js index 861439e2..0026b3f5 100644 --- a/server/formatter.js +++ b/server/formatter.js @@ -4,6 +4,8 @@ const qs = require('querystring') const unescape = require('unescape') const list = require('./list') +/* Your one stop shop for all your document processing needs. */ + const allowInlineCode = (process.env.ALLOW_INLINE_CODE || '').toLowerCase() === 'true' // this is getting a little long, maybe tweak so that we do subtasks separately function normalizeHtml(html) { @@ -145,6 +147,67 @@ function checkForTableOfContents($, aTags) { /(\d+$)/mg.test($(aTags[1]).text()) // the second link should contain only a number } +function fetchByline(html, creatorOfDoc) { + let byline = creatorOfDoc + const $ = cheerio.load(html, {xmlMode: true}) // prevent wrapping html tags, see cheerio/issues/1031 + + // Iterates through all p tags to find byline + $('p').each((index, p) => { + // don't search any empty p tags + if (p.children.length < 1) return + + // regex that checks for byline + const r = /^by.+[^.\n]$/mig + if (r.test(p.children[0].data)) { + byline = p.children[0].data + // Removes the word "By" + byline = byline.slice(3) + $(p).remove() + } + + // only check the first p tag + return false + }) + + return { + html: $.html(), + byline + } +} + +function fetchSections(html) { + const $ = cheerio.load(html) + const headers = ['h1', 'h2'] + .map((h) => `body ${h}`) + .join(', ') + + const ordered = $(headers).map((i, el) => { + const tag = el.name + const $el = $(el) + const name = $el.text() + const url = `#${$el.attr('id')}` + return { + name, + url, + level: parseInt(tag.slice(-1), 10) + } + }).toArray() + + // take our ordered sections and turn them into appropriately nested headings + const nested = ordered.reduce((memo, heading) => { + const tail = memo.slice(-1)[0] + const extended = Object.assign({}, heading, {subsections: []}) + if (!tail || heading.level <= tail.level) { + return memo.concat(extended) + } + + tail.subsections.push(heading) + return memo + }, []) + + return nested +} + function convertYoutubeUrl(content) { // convert youtube url into embeded const youtubeUrl = /(>(?:https?:\/\/)?(?:www\.)?(?:youtube\.com|youtu\.be)\/(?:watch\?v=)?(.+?)<)/g @@ -153,10 +216,24 @@ function convertYoutubeUrl(content) { return content } -exports.getProcessedHtml = (src) => { +function getProcessedHtml(src) { let html = normalizeHtml(src) html = convertYoutubeUrl(html) html = formatCode(html) html = pretty(html) return html } + +exports.getProcessedDocAttributes = (driveDoc) => { + // document information + // TODO: guard against null revision data? + const [originalHtml, {data: revisionData}] = driveDoc + // clean and prettify the HTML + const processedHtml = getProcessedHtml(originalHtml) + // crawl processed html for the bylines and sections + const sections = fetchSections(processedHtml) + const createdBy = revisionData.lastModifyingUser.displayName + const {byline, html} = fetchByline(processedHtml, createdBy) + + return {html, byline, createdBy, sections} +} diff --git a/server/index.js b/server/index.js index 7cdf4d32..73992d52 100644 --- a/server/index.js +++ b/server/index.js @@ -12,6 +12,7 @@ const pages = require('./routes/pages') const categories = require('./routes/categories') const playlists = require('./routes/playlists') const readingHistory = require('./routes/readingHistory') +const redirects = require('./routes/redirects') const errorPages = require('./routes/errors') const userAuth = requireWithFallback('userAuth') @@ -77,6 +78,9 @@ app.use(playlists) postload.forEach((middleware) => app.use(middleware)) +// if no page has been served, check for a redirect before erroring +app.use(redirects) + // error handler for rendering the 404 and 500 pages, must go last app.use(errorPages) diff --git a/server/list.js b/server/list.js index 93c342d0..79bfa2e4 100644 --- a/server/list.js +++ b/server/list.js @@ -71,7 +71,7 @@ async function updateTree() { const files = await fetchAllFiles({drive, driveType}) const updatedData = produceTree(files, driveId) - const { tree, filenames } = updatedData + const {tree, filenames} = updatedData currentTree = tree currentFilenames = filenames @@ -211,12 +211,31 @@ function produceTree(files, firstParent) { const oldInfo = docsInfo const oldBranches = driveBranches tags = tagIds - docsInfo = addPaths(byId) // update our outer cache w/ data including path information + + const newDocsInfo = addPaths(byId) + + // if docsInfo exists, asynchrononsly check if any files have been moved + if (Object.keys(docsInfo).length) setRedirects(docsInfo, newDocsInfo) + + docsInfo = newDocsInfo // update our outer cache w/ data including path information driveBranches = byParent const tree = buildTreeFromData(firstParent, {info: oldInfo, tree: oldBranches}) return {tree: tree, filenames: fileNames} } +async function setRedirects(oldDocsInfo, newDocsInfo) { + Object.keys(newDocsInfo).forEach((id) => { + const currPath = newDocsInfo[id] && newDocsInfo[id].path + const lastPath = oldDocsInfo[id] && oldDocsInfo[id].path + // if no currPath, file was removed from the drive + // if no lastPath, file is a new addition to the drive + if (currPath && lastPath && currPath !== lastPath) { + log.info(`Doc ${id} moved, REDIRECT ${lastPath} → ${currPath}`) + cache.add(lastPath, new Date(), {redirect: currPath}) + } + }) +} + function buildTreeFromData(rootParent, previousData, breadcrumb) { const {children, home, homePrettyName} = driveBranches[rootParent] || {} const parentInfo = docsInfo[rootParent] || {} @@ -241,20 +260,20 @@ function buildTreeFromData(rootParent, previousData, breadcrumb) { // we have to assemble these paths differently return children.reduce((memo, id) => { const {slug} = docsInfo[id] - const nextCrumb = breadcrumb ? breadcrumb.concat({ id: rootParent, slug: parentInfo.slug }) : [] + const nextCrumb = breadcrumb ? breadcrumb.concat({id: rootParent, slug: parentInfo.slug}) : [] if (!memo.children[slug]) { // recurse building up breadcrumb memo.children[slug] = buildTreeFromData(id, previousData, nextCrumb) } else { log.warn(`Folder ${parentInfo.name} contains duplicate resources with slug ${slug}`) - const { name } = docsInfo[id] + const {name} = docsInfo[id] const previousDupes = memo.children[slug].duplicates || [] memo.children[slug].duplicates = previousDupes.concat(name) } return memo - }, Object.assign({}, parentNode, { children: {} })) + }, Object.assign({}, parentNode, {children: {}})) } function addPaths(byId) { @@ -308,7 +327,6 @@ async function retrievePlaylistData(id) { function handleUpdates(id, {info: lastInfo, tree: lastTree}) { const currentNode = driveBranches[id] || {} const lastNode = lastTree[id] || {} - const isFirstRun = !Object.keys(lastTree).length // oldTree is empty on the first check // combine current and previous children ids uniquely const allPages = (currentNode.children || []) @@ -320,49 +338,28 @@ function handleUpdates(id, {info: lastInfo, tree: lastTree}) { // check all the nodes to see if they have changes allPages.forEach((id) => { // compare old item to new item - const newItem = docsInfo[id] - const oldItem = lastInfo[id] - - // since we have a "trash" folder we need to account - // for both missing items and "trashed" items - const isTrashed = (item) => !item || item.path.split('/')[1] === 'trash' - if (!isFirstRun && (isTrashed(newItem) || isTrashed(oldItem))) { - const item = isTrashed(oldItem) ? newItem : oldItem - const {path, modifiedTime} = item - const action = isTrashed(oldItem) ? 'Added' : 'Removed' - // FIXME: This does not restore deleted documents which are undone to the same location - return cache.purge({ - url: path, - modified: modifiedTime, - editEmail: `item${action}`, - ignore: ['missing', 'modified'] - }).catch((err) => { - log.debug('Error purging trashed item cache', err) - }) - } + const newItem = docsInfo[id] || {} + const oldItem = lastInfo[id] || {} - // don't allow direct purges updates for folders with a home file - const hasHome = newItem && (driveBranches[newItem.id] || {}).home - if (hasHome) return + const newModified = new Date(newItem.modifiedTime) + const oldModified = new Date(oldItem.modifiedTime) + const hasUpdates = newModified > oldModified - // if this existed before and the path changed, issue redirects - if (oldItem && newItem.path !== oldItem.path) { - cache.redirect(oldItem.path, newItem.path, newItem.modifiedTime) - } else { - // basically we are just calling purge because we don't know the last modified - cache.purge({url: newItem.path, modified: newItem.modifiedTime}).catch((err) => { - if (!err) return + // if no updates reported from drive API, don't purge. + if (!hasUpdates) return - // Duplicate purge errors should be logged at debug level only - if (err.message.includes('Same purge id as previous')) return log.debug(`Ignoring duplicate cache purge for ${newItem.path}`, err) + cache.purge({id: newItem.id, modified: newItem.modifiedTime}).catch((err) => { + if (!err) return - // Ignore errors if not found or no fresh content, just allow the purge to stop - if (err.message.includes('Not found') || err.message.includes('No purge of fresh content')) return + // Duplicate purge errors should be logged at debug level only + if (err.message.includes('Same purge id as previous')) return log.debug(`Ignoring duplicate cache purge for ${newItem.path}`) - // Log all other cache purge errors as warnings - log.warn(`Cache purging error for ${newItem.path}`, err) - }) - } + // Ignore errors if not found or no fresh content, just allow the purge to stop + if (err.message.includes('Not found') || err.message.includes('No purge of fresh content')) return + + // Log all other cache purge errors as warnings + log.warn(`Cache purging error for ${newItem.path}`, err) + }) }) } diff --git a/server/move.js b/server/move.js deleted file mode 100644 index 19ac8494..00000000 --- a/server/move.js +++ /dev/null @@ -1,112 +0,0 @@ -'use strict' -const {google} = require('googleapis') - -const log = require('./logger') -const list = require('./list') -const cache = require('./cache') -const {getAuth} = require('./auth') -const {sortDocs, stringTemplate} = require('./utils') - -const driveId = process.env.DRIVE_ID - -// return the folder html (or at least json object) that can be templated -exports.getFolders = async () => { - const data = await list.getTree() - - // map to just the data that we need, the ignore the top level drive entry - const extended = extendTree(data) - const folders = Object.assign({}, selectFolders(extended), { - // The drive doesn't have the same props as other folders - prettyName: stringTemplate('branding.prettyName'), - isTrashCan: false - }) - - return [folders] -} - -exports.moveFile = async (id, destination, driveType = 'team') => { - const {parents, slug} = list.getMeta(id) || {} - const {path: basePath} = list.getMeta(destination) || {} - - if (!parents) return Error('Not found') - - const authClient = await getAuth() - - const drive = google.drive({version: 'v3', auth: authClient}) - - const baseOptions = { - fileId: id, - addParents: [destination], - removeParents: parents - } - - const teamOptions = { - teamDriveId: driveId, - corpora: 'teamDrive', - supportsTeamDrives: true, - includeTeamDriveItems: true, - ...baseOptions - } - - const options = driveType === 'folder' ? baseOptions : teamOptions - await drive.files.update(options) - - const oldUrls = parents.map((id) => { - const {path} = list.getMeta(id) || {} - return path ? `${path}/${slug}` : `/${slug}` - }) - - if (basePath === '/trash') { - oldUrls.forEach((url) => log.info(`TRASHED ${url}`)) - return '/' - } - - const newUrl = basePath ? `${basePath}/${slug}` : `/${slug}` - - // log that we moved the page(s) to the new url - oldUrls.forEach((url) => { - log.info(`MOVED ${url} => ${newUrl}`) - }) - - // fake the drive updating immediately by manually copying cache - const data = await Promise.all(oldUrls.map((url) => { - return cache.get(url) - })).catch((err) => { log.warn('Error gettng cached URLs', err) }) - - // cache stores urls and page data, make sure to find actual data object for page - const hasHtml = data.filter(({html} = {}) => html && html.length) - if (!hasHtml.length) return '/' - - const {docId, modified, html} = hasHtml[0] - - return cache.add(docId, modified, newUrl, html).then(() => { - return newUrl - }).catch((err) => { - log.warn(`Failed saving new cache data for ${newUrl}`, err) - return '/' - }) -} - -// converts raw tree data used for routing into sorted lists with resource -function extendTree({id, children: keys}) { - const {prettyName, resourceType, sort, isTrashCan} = list.getMeta(id) || {} - - const children = Object.values(keys || {}) - const extended = children && children.length && !isTrashCan - ? children.map(extendTree).sort(sortDocs) - : [] - - return Object.assign({}, {id, prettyName, resourceType, sort, isTrashCan}, { children: extended }) -} - -function selectFolders({id, prettyName, children, isTrashCan}) { - const filtered = children - .filter(isFolder) - .map(selectFolders) - - return {id, prettyName, children: filtered, isTrashCan} -} - -function isFolder({resourceType}) { - return resourceType && resourceType === 'folder' -} diff --git a/server/routes/categories.js b/server/routes/categories.js index 7dfdae77..3a1b9380 100644 --- a/server/routes/categories.js +++ b/server/routes/categories.js @@ -2,10 +2,9 @@ const router = require('express-promise-router')() -const cache = require('../cache') const log = require('../logger') const {getMeta} = require('../list') -const {fetchDoc, cleanName, fetchByline} = require('../docs') +const {fetchDoc, cleanName} = require('../docs') const {getTemplates, sortDocs, stringTemplate} = require('../utils') const {parseUrl} = require('../urlParser') @@ -13,6 +12,7 @@ router.get('*', handleCategory) module.exports = router const categories = getTemplates('categories') + async function handleCategory(req, res) { log.info(`GET ${req.path}`) // FIXME: consider putting this in middleware and save on req @@ -50,25 +50,21 @@ async function handleCategory(req, res) { if (resourceType === 'folder') { return res.render(template, baseRenderData, (err, html) => { if (err) throw err - - cache.add(id, meta.modifiedTime, req.path, html) res.end(html) }) } - // for docs, fetch the html and then combine with the base data - const {html, originalRevision, sections} = await fetchDoc(id, resourceType, req) res.locals.docId = data.id // we need this for history later - const revisionData = originalRevision.data || { lastModifyingUser: {} } - const payload = fetchByline(html, revisionData.lastModifyingUser.displayName) + // for docs, fetch the html and then combine with the base data + const {html, byline, createdBy, sections} = await fetchDoc(id, resourceType, req) + res.render(template, Object.assign({}, baseRenderData, { - content: payload.html, - byline: payload.byline, - createdBy: revisionData.lastModifyingUser.displayName, + content: html, + byline, + createdBy, sections }), (err, html) => { if (err) throw err - cache.add(id, meta.modifiedTime, req.path, html) res.end(html) }) } diff --git a/server/routes/pages.js b/server/routes/pages.js index 7b74dc25..dec840ee 100644 --- a/server/routes/pages.js +++ b/server/routes/pages.js @@ -1,12 +1,11 @@ 'use strict' const search = require('../search') -const move = require('../move') const router = require('express-promise-router')() -const { getTree, getFilenames, getMeta, getTagged } = require('../list') -const { getTemplates, sortDocs, stringTemplate, getConfig } = require('../utils') +const {getTree, getFilenames, getMeta, getTagged} = require('../list') +const {getTemplates, sortDocs, stringTemplate, getConfig} = require('../utils') router.get('/', handlePage) router.get('/:page', handlePage) @@ -29,7 +28,7 @@ async function handlePage(req, res) { if (!pages.has(page)) return 'next' const template = `pages/${page}` - const { q, id, dest, autocomplete } = req.query + const {q, autocomplete} = req.query if (page === 'search' && q) { return search.run(q, driveType).then((results) => { // special rule for the autocomplete case, go directly to the item if we find it. @@ -39,30 +38,20 @@ async function handlePage(req, res) { if (exactMatches.length === 1) return res.redirect(exactMatches[0].path) } - res.render(template, { q, results, template: stringTemplate }) + res.render(template, {q, results, template: stringTemplate}) }) } - if (page === 'move-file' && id) { - if (!dest) { - const folders = await move.getFolders(id) - const { prettyName, parents } = getMeta(id) - return res.render(template, { prettyName, folders, id, parents, template: stringTemplate }) - } - - return move.moveFile(id, dest, driveType).then((result) => { - res.redirect(result) - }) - } + // TODO: repurpose old getFolders/folder view from move-file as tree view for files if (page === 'categories' || page === 'index') { const tree = await getTree() const categories = buildDisplayCategories(tree) - res.render(template, { ...categories, template: stringTemplate }) + res.render(template, {...categories, template: stringTemplate}) return } - res.render(template, { template: stringTemplate }) + res.render(template, {template: stringTemplate}) } function buildDisplayCategories(tree) { @@ -75,14 +64,14 @@ function buildDisplayCategories(tree) { // Ignore pages at the root of the site on the category page const all = categories .map((c) => Object.assign({}, c, getMeta(c.id))) - .filter(({ resourceType, tags, isTrashCan }) => resourceType === 'folder' && !tags.includes('hidden') && !isTrashCan) + .filter(({resourceType, tags, isTrashCan}) => resourceType === 'folder' && !tags.includes('hidden') && !isTrashCan) .sort(sortDocs) .map((category) => { - category.children = Object.values(category.children || {}).map(({ id }) => { - const { prettyName: name, path: url, resourceType, sort, tags } = getMeta(id) - return { name, resourceType, url, sort, tags } + category.children = Object.values(category.children || {}).map(({id}) => { + const {prettyName: name, path: url, resourceType, sort, tags} = getMeta(id) + return {name, resourceType, url, sort, tags} }) - .filter(({ tags }) => !tags.includes('hidden')) + .filter(({tags}) => !tags.includes('hidden')) .sort(sortDocs) return category }) @@ -94,8 +83,8 @@ function buildDisplayCategories(tree) { .map(getMeta) .sort(sortDocs) - return { ...module, items } + return {...module, items} }) - return { all, modules } + return {all, modules} } diff --git a/server/routes/playlists.js b/server/routes/playlists.js index 42f3df3e..833e8937 100644 --- a/server/routes/playlists.js +++ b/server/routes/playlists.js @@ -4,7 +4,7 @@ const router = require('express-promise-router')() const log = require('../logger') const {getMeta, getPlaylist} = require('../list') -const {fetchDoc, cleanName, fetchByline} = require('../docs') +const {fetchDoc, cleanName} = require('../docs') const {stringTemplate} = require('../utils') const {parseUrl} = require('../urlParser') @@ -14,7 +14,7 @@ module.exports = router async function handlePlaylist(req, res) { const {meta, parent, data} = await parseUrl(req.path) - if (!meta || !data) throw new Error('Not found') + if (!meta || !data) return 'next' const {resourceType, tags, id} = meta const {breadcrumb} = data @@ -39,17 +39,15 @@ async function handlePlaylist(req, res) { log.info('Getting page in playlist') // process data - const {html, originalRevision, sections} = await fetchDoc(id, resourceType, req) - const revisionData = originalRevision.data - const payload = fetchByline(html, revisionData.lastModifyingUser.displayName) + const {html, byline, createdBy, sections} = await fetchDoc(id, resourceType, req) const playlistPageData = await preparePlaylistPage(data, req.path, parentMeta) // render as a playlist return res.render(`playlists/leaf`, Object.assign({}, playlistPageData, { template: stringTemplate, - content: payload.html, - byline: payload.byline, - createdBy: revisionData.lastModifyingUser.displayName, + content: html, + byline: byline, + createdBy, sections, title: meta.prettyName }), (err, html) => { diff --git a/server/routes/readingHistory.js b/server/routes/readingHistory.js index 763c395a..6ae19ce3 100644 --- a/server/routes/readingHistory.js +++ b/server/routes/readingHistory.js @@ -18,6 +18,7 @@ router.use(async (req, res) => { if (!datastoreClient) { datastoreClient = await getDatastoreClient() + if (!datastoreClient) return 'next' // if there is still no client, continue } req.on('end', () => { @@ -57,11 +58,11 @@ async function fetchHistory(userInfo, historyType, queryLimit) { const mostViewedQuery = client.createQuery(['LibraryView' + historyType]) .filter('userId', '=', userInfo.userId) - .order('viewCount', { descending: true }) + .order('viewCount', {descending: true}) .limit(datastoreLimit) const lastViewedQuery = client.createQuery(['LibraryView' + historyType]) .filter('userId', '=', userInfo.userId) - .order('lastViewedAt', { descending: true }) + .order('lastViewedAt', {descending: true}) .limit(datastoreLimit) const results = await Promise.all([ @@ -121,12 +122,12 @@ async function getDatastoreClient() { function recordView(docMeta, userInfo, datastoreClient) { const docKey = datastoreClient.key(['LibraryViewDoc', [userInfo.userId, docMeta.id].join(':')]) - updateViewRecord(docKey, { documentId: docMeta.id }, userInfo, datastoreClient) + updateViewRecord(docKey, {documentId: docMeta.id}, userInfo, datastoreClient) if (docMeta.topLevelFolder && docMeta.topLevelFolder.tags.includes('team')) { const teamId = docMeta.topLevelFolder.id const teamKey = datastoreClient.key(['LibraryViewTeam', [userInfo.userId, teamId].join(':')]) - updateViewRecord(teamKey, { teamId: teamId }, userInfo, datastoreClient) + updateViewRecord(teamKey, {teamId: teamId}, userInfo, datastoreClient) } } @@ -156,5 +157,9 @@ function updateViewRecord(viewKey, metadata, userInfo, datastoreClient) { }).catch((err) => { log.error('Failed saving reading history to GCloud datastore:', err) }) + }).catch((err) => { + // TODO: don't attempt to store if datastore is not enabled + if (err.code === 7) return log.warn('Cloud datastore not enabled. Reading history was not recorded.') + log.error(err) }) } diff --git a/server/routes/redirects.js b/server/routes/redirects.js new file mode 100644 index 00000000..f8d3539e --- /dev/null +++ b/server/routes/redirects.js @@ -0,0 +1,17 @@ +'use strict' + +const router = require('express-promise-router')() + +const cache = require('../cache') + +router.get('*', handleRedirects) +module.exports = router + +async function handleRedirects(req, res) { + const pathCache = await cache.get(req.path) + if (!pathCache) throw new Error('Not found') + + const {content} = pathCache + if (content.redirect) return res.redirect(content.redirect) + throw new Error('Not found') +} diff --git a/styles/partials/core/_pages.scss b/styles/partials/core/_pages.scss index 63f5b48b..1c8c4b1c 100644 --- a/styles/partials/core/_pages.scss +++ b/styles/partials/core/_pages.scss @@ -402,29 +402,6 @@ } } -#move-file-page { - - li { - text-indent: 0; - display: block; - } - - .folder { - - i { - color: mix(darken($accent-light, 15%), $gray-20, 50%); - } - - &.active { - > i { - color: $black; - } - } - - } - -} - // Playlist Page //------------------------------------------------------------------------------ diff --git a/test/functional/pages.test.js b/test/functional/pages.test.js index bf7f3e9c..eefe2ceb 100644 --- a/test/functional/pages.test.js +++ b/test/functional/pages.test.js @@ -58,18 +58,6 @@ describe('Server responses', () => { }) }) - it('should render folder list when moving a file', () => { - return request(app) - .get('/move-file?id=Test3') - .expect(200) - .then((res) => { - expect(res.text).to.include('

Choose a folder to move \'Test 3\' to

') - // check it has folder list and a folder to move it to - expect(res.text).to.include('