diff --git a/server/activity/activity.controller.js b/server/activity/activity.controller.js index 0e974e762..c3be0fe7b 100644 --- a/server/activity/activity.controller.js +++ b/server/activity/activity.controller.js @@ -1,10 +1,10 @@ 'use strict'; +const { Activity, Repository } = require('../shared/database'); const { getOutlineLevels, isOutlineActivity } = require('../../config/shared/activities'); -const { Activity } = require('../shared/database'); const { fetchActivityContent } = require('../shared/publishing/helpers'); const find = require('lodash/find'); const get = require('lodash/get'); @@ -66,13 +66,15 @@ function publish({ activity }, res) { .then(data => res.json({ data })); } -function clone({ activity, body, user }, res) { +async function clone({ activity, body, user }, res) { const { repositoryId, parentId, position } = body; + // req.repository can not be used as it is an origin repository + const repository = await Repository.findByPk(repositoryId); const context = { userId: user.id }; - return activity.clone(repositoryId, parentId, position, context).then(mappings => { - const opts = { where: { id: Object.values(mappings) } }; - return Activity.findAll(opts).then(data => res.json({ data })); - }); + const mappings = await activity.clone(repository, parentId, position, context); + const opts = { where: { id: Object.values(mappings) } }; + const activities = await Activity.findAll(opts); + return res.json({ data: activities }); } function getPreviewUrl({ activity }, res) { diff --git a/server/activity/activity.model.js b/server/activity/activity.model.js index 0cfa5fb79..a6d198ffe 100644 --- a/server/activity/activity.model.js +++ b/server/activity/activity.model.js @@ -154,11 +154,11 @@ class Activity extends Model { return Events; } - static async cloneActivities(src, dstRepositoryId, dstParentId, opts) { + static async cloneActivities(src, dstRepository, dstParentId, opts) { if (!opts.idMappings) opts.idMappings = {}; const { idMappings, context, transaction } = opts; const dstActivities = await Activity.bulkCreate(map(src, it => ({ - repositoryId: dstRepositoryId, + repositoryId: dstRepository.id, parentId: dstParentId, ...pick(it, ['type', 'position', 'data', 'refs', 'modifiedAt']) })), { returning: true, context, transaction }); @@ -168,18 +168,18 @@ class Activity extends Model { acc[it.id] = parent.id; const where = { activityId: it.id, detached: false }; const elements = await ContentElement.findAll({ where, transaction }); - await ContentElement.cloneElements(elements, parent, { context, transaction }); + await ContentElement.cloneElements(elements, parent, dstRepository, { context, transaction }); const children = await it.getChildren({ where: { detached: false } }); if (!children.length) return acc; - return Activity.cloneActivities(children, dstRepositoryId, parent.id, opts); + return Activity.cloneActivities(children, dstRepository, parent.id, opts); }, idMappings); } - clone(repositoryId, parentId, position, context) { + async clone(repository, parentId, position, context) { return this.sequelize.transaction(transaction => { if (position) this.position = position; return Activity.cloneActivities( - [this], repositoryId, parentId, { context, transaction } + [this], repository, parentId, { context, transaction } ); }); } diff --git a/server/content-element/content-element.model.js b/server/content-element/content-element.model.js index 00d7865e5..4de56d6b4 100644 --- a/server/content-element/content-element.model.js +++ b/server/content-element/content-element.model.js @@ -2,10 +2,15 @@ const { Model, Op } = require('sequelize'); const calculatePosition = require('../shared/util/calculatePosition'); +const cloneContentElement = require('../shared/util/cloneContentElement'); const { ContentElement: Events } = require('../../common/sse'); +const get = require('lodash/get'); +const getFileMetas = require('../shared/util/getFileMetas'); const hooks = require('./hooks'); const isNumber = require('lodash/isNumber'); const pick = require('lodash/pick'); +const { SCHEMAS } = require('../../config/shared/activities'); +const storage = require('../repository/storage'); class ContentElement extends Model { static fields(DataTypes) { @@ -115,20 +120,18 @@ class ContentElement extends Model { : ContentElement.findAll(opt).map(hooks.applyFetchHooks); } - static cloneElements(src, container, options) { + static async cloneElements(src, container, repository, options) { const { id: activityId, repositoryId } = container; const { context, transaction } = options; - return this.bulkCreate(src.map(it => { - return Object.assign(pick(it, [ - 'type', - 'position', - 'data', - 'contentId', - 'contentSignature', - 'refs', - 'meta' - ]), { activityId, repositoryId }); - }), { returning: true, context, transaction }); + const repositoryAssetsPath = storage.getPath(repository.id); + const metaBySchemaType = getFileMetas(SCHEMAS); + const metaByElementType = get(metaBySchemaType, [repository.schema, 'element']); + const elements = await Promise.all(src.map(async it => { + const element = pick(it, ['type', 'position', 'data', 'contentId', 'contentSignature', 'refs', 'meta']); + const { data, meta } = await cloneContentElement(element, repositoryAssetsPath, metaByElementType); + return Object.assign(element, { activityId, data, meta, repositoryId }); + })); + return this.bulkCreate(elements, { returning: true, context, transaction }); } /** diff --git a/server/repository/repository.model.js b/server/repository/repository.model.js index 04305fb6c..214207097 100644 --- a/server/repository/repository.model.js +++ b/server/repository/repository.model.js @@ -131,7 +131,7 @@ class Repository extends Model { const src = await Activity.findAll({ where: { repositoryId: this.id, parentId: null }, transaction }); - const idMap = await Activity.cloneActivities(src, dst.id, null, { context, transaction }); + const idMap = await Activity.cloneActivities(src, dst, null, { context, transaction }); await dst.mapClonedReferences(idMap, transaction); return dst; }); diff --git a/server/script/migrateAssetsLocation.js b/server/script/migrateAssetsLocation.js index c3b83bd41..dd87cebc9 100644 --- a/server/script/migrateAssetsLocation.js +++ b/server/script/migrateAssetsLocation.js @@ -7,16 +7,15 @@ const { Revision, sequelize } = require('../shared/database'); +const cloneContentElement = require('../shared/util/cloneContentElement'); +const cloneFileMeta = require('../shared/util/cloneFileMeta'); const get = require('lodash/get'); +const getFileMetas = require('../shared/util/getFileMetas'); const Listr = require('listr'); -const path = require('path'); const Promise = require('bluebird'); -const { protocol } = require('../../config/server/storage'); const { SCHEMAS } = require('../../config/shared/activities'); const storage = require('../repository/storage'); -const toPairs = require('lodash/toPairs'); -const ASSET_PATH_REGEX = /(?repository\/assets\/(?[^?]*))/; const CHUNK_SIZE = 2000; const ENTITIES = { @@ -28,7 +27,7 @@ const ENTITIES = { const mapEntityToAction = { [ENTITIES.REPOSITORY]: 'migrateRepository', [ENTITIES.ACTIVITY]: 'migrateActivity', - [ENTITIES.CONTENT_ELEMENT]: 'migrateContentElement' + [ENTITIES.CONTENT_ELEMENT]: 'cloneContentElement' }; migrate() @@ -43,13 +42,13 @@ migrate() async function migrate() { const transaction = await sequelize.transaction(); - const metaBySchemaType = getFileMetas(SCHEMAS); - const tasks = await getTasks(metaBySchemaType, transaction); + const tasks = await getTasks(transaction); return tasks.run().then(() => transaction.commit()); } -async function getTasks(metaBySchemaType, transaction) { +async function getTasks(transaction) { const repositories = await Repository.findAll({ transaction }); + const metaBySchemaType = getFileMetas(SCHEMAS); const tasks = repositories.map(repository => ({ title: `Migrate repository "${repository.name}"`, task: () => { @@ -107,7 +106,7 @@ class RepositoryMigration { async migrateRepository() { const { repository, repositoryMeta: metaConfigs } = this; - const data = await this.migrateFileMeta(repository.data, metaConfigs); + const data = await cloneFileMeta(repository.data, metaConfigs, this.repositoryAssetsPath); return { data }; } @@ -129,7 +128,7 @@ class RepositoryMigration { async migrateActivity(activity) { const { type, data: metaInputs } = activity; const metaConfigs = get(this.metaByActivityType, type, []); - const data = await this.migrateFileMeta(metaInputs, metaConfigs); + const data = await cloneFileMeta(metaInputs, metaConfigs, this.repositoryAssetsPath); return { data }; } @@ -141,31 +140,11 @@ class RepositoryMigration { { transaction } ); return Promise.each(contentElements, async it => { - const payload = await this.migrateContentElement(it); + const payload = await cloneContentElement(it, this.repositoryAssetsPath, this.metaByElementType); return it.update(payload, { transaction }); }); } - async migrateContentElement(element) { - const data = await this.migrateContentElementData(element); - const meta = await this.migrateContentElementMeta(element); - return { data, meta }; - } - - async migrateContentElementData(element) { - const { type, data } = element; - if (type === 'IMAGE') return this.imageMigrationHandler(element); - const embeds = data.embeds && (await this.embedsMigrationHandler(element)); - const assets = data.assets && (await this.defaultMigrationHandler(element)); - return { ...data, ...embeds, ...assets }; - } - - async migrateContentElementMeta(element) { - const { type, meta: metaInputs } = element; - const metaConfigs = get(this.metaByElementType, type, []); - return this.migrateFileMeta(metaInputs, metaConfigs); - } - async migrateRevisions(entity) { const { repositoryId, transaction } = this; const options = { @@ -199,97 +178,4 @@ class RepositoryMigration { const payload = await (this[handler] && this[handler](state)); return { state: { ...state, ...payload } }; } - - async imageMigrationHandler({ data }) { - const url = get(data, 'url'); - if (!url) return data; - const { key, newKey } = resolveNewURL(url, this.repositoryAssetsPath) || {}; - if (!key || !newKey) return data; - await storage.copyFile(key, newKey); - return { ...data, url: newKey }; - } - - async embedsMigrationHandler(element) { - const { repositoryId, data } = element; - const embeds = await Promise.reduce(Object.entries(data.embeds), async (acc, [id, embed]) => { - const payload = await this.migrateContentElement({ repositoryId, ...embed }); - return { ...acc, [id]: { ...embed, ...payload } }; - }, {}); - return { embeds }; - } - - async defaultMigrationHandler({ data }) { - const updatedAssets = await Promise - .filter(toPairs(data.assets), ([_, value]) => value.startsWith(protocol)) - .reduce(async (acc, [key, value]) => { - const { key: oldKey, newKey } = resolveNewURL(value, this.repositoryAssetsPath) || {}; - if (!oldKey || !newKey) return { ...acc, [key]: value }; - await storage.copyFile(oldKey, newKey); - return { ...acc, [key]: `${protocol}${newKey}` }; - }, {}); - return { assets: { ...data.assets, ...updatedAssets } }; - } - - async migrateFileMeta(metaInputs, metaConfigs) { - const newMeta = await Promise.reduce(metaConfigs, async (acc, metaKey) => { - const meta = get(metaInputs, metaKey); - if (!meta) return acc; - const url = get(meta, 'url'); - if (!url) return acc; - const { key, newKey } = resolveNewURL(url, this.repositoryAssetsPath) || {}; - if (!key || !newKey) return acc; - await storage.copyFile(key, newKey); - return { - ...acc, - [metaKey]: { - ...meta, - key: newKey, - url: `${protocol}${newKey}` - } - }; - }, {}); - return { ...metaInputs, ...newMeta }; - } -} - -function getFileMetas(schemas) { - return schemas.reduce((acc, { id, meta, structure, elementMeta }) => { - return { - ...acc, - [id]: { - repository: getFileMetaKeys(meta), - activity: getMetaByActivityType(structure), - element: getMetaByElementType(elementMeta) - } - }; - }, {}); -} - -function getMetaByActivityType(structure = []) { - return structure.reduce((acc, { type, meta }) => { - const fileMetaKeys = getFileMetaKeys(meta); - if (!fileMetaKeys.length) return acc; - return { ...acc, [type]: fileMetaKeys }; - }, {}); -} - -function getMetaByElementType(elementMeta = []) { - return elementMeta.reduce((acc, { type, inputs }) => { - const fileMetaKeys = getFileMetaKeys(inputs); - if (!fileMetaKeys.length) return acc; - return { ...acc, [type]: fileMetaKeys }; - }, {}); -} - -function getFileMetaKeys(meta = []) { - return meta.filter(it => it.type === 'FILE').map(it => it.key); -} - -function resolveNewURL(assetUrl, targetDir) { - if (assetUrl.startsWith(protocol)) assetUrl = assetUrl.substr(protocol.length); - const result = assetUrl.match(ASSET_PATH_REGEX); - if (!result) return; - const { groups: { directory, fileName } } = result; - const newKey = path.join(targetDir, fileName); - return { key: directory, newKey }; } diff --git a/server/shared/transfer/default/processors.js b/server/shared/transfer/default/processors.js index 6b12633f6..777adb3ad 100644 --- a/server/shared/transfer/default/processors.js +++ b/server/shared/transfer/default/processors.js @@ -6,8 +6,11 @@ const { Repository, RepositoryUser } = require('../../database'); +const cloneContentElement = require('../../util/cloneContentElement'); const filter = require('lodash/filter'); const forEach = require('lodash/forEach'); +const get = require('lodash/get'); +const getFileMetas = require('../../util/getFileMetas'); const isEmpty = require('lodash/isEmpty'); const last = require('lodash/last'); const map = require('lodash/map'); @@ -70,10 +73,11 @@ async function processRepository(repository, _enc, { context, transaction }) { Object.assign(repository, { description, name }); const options = { context: { userId }, transaction }; const repositoryRecord = omit(repository, IGNORE_ATTRS); - const { id } = await Repository.create(repositoryRecord, options); + const { id, schema } = await Repository.create(repositoryRecord, options); const userRecord = { userId, repositoryId: id, role: ADMIN }; await RepositoryUser.create(userRecord, { transaction }); context.repositoryId = id; + context.repoSchema = schema; } async function processActivities(activities, _enc, options) { @@ -129,15 +133,19 @@ function remapActivityRefs(activity, { context, transaction }) { return activity.save({ transaction }); } -function insertElements(elements, { context, transaction }) { - const { activityIdMap, repositoryId, userId } = context; +async function insertElements(elements, { context, storage, transaction }) { + const { activityIdMap, repositoryId, repoSchema, userId } = context; if (!repositoryId) throw new Error('Invalid repository id'); - const elementRecords = map(elements, it => { + const metaBySchemaType = getFileMetas(SCHEMAS); + const metaByElementType = get(metaBySchemaType, [repoSchema, 'element']); + const repositoryAssetsPath = storage.getPath(repositoryId); + const elementRecords = await Promise.all(map(elements, async it => { const activityId = activityIdMap[it.activityId]; if (!activityId) throw new Error('Invalid activity id'); - Object.assign(it, { activityId, repositoryId }); + const { data, meta } = await cloneContentElement(it, repositoryAssetsPath, metaByElementType); + Object.assign(it, { activityId, data, meta, repositoryId }); return omit(it, IGNORE_ATTRS); - }); + })); const options = { context: { userId }, returning: true, transaction }; return ContentElement.bulkCreate(elementRecords, options); } diff --git a/server/shared/util/cloneContentElement.js b/server/shared/util/cloneContentElement.js new file mode 100644 index 000000000..db47cd40e --- /dev/null +++ b/server/shared/util/cloneContentElement.js @@ -0,0 +1,63 @@ +'use strict'; + +const cloneFileMeta = require('./cloneFileMeta'); +const get = require('lodash/get'); +const Promise = require('bluebird'); +const { protocol } = require('../../../config/server/storage'); +const resolveAssetURL = require('./resolveAssetURL'); +const storage = require('../../repository/storage'); +const toPairs = require('lodash/toPairs'); + +const IMAGE_ELEMENT_TYPE = 'IMAGE'; + +async function embedsMigrationHandler(element, repositoryAssetsPath) { + const { repositoryId, data } = element; + const embeds = await Promise.reduce(Object.entries(data.embeds), async (acc, [id, embed]) => { + const payload = await cloneContentElement({ repositoryId, ...embed }, repositoryAssetsPath); + return { ...acc, [id]: { ...embed, ...payload } }; + }, {}); + return { embeds }; +} + +async function defaultMigrationHandler({ data }, repositoryAssetsPath) { + const updatedAssets = await Promise + .filter(toPairs(data.assets), ([_, value]) => value.startsWith(protocol)) + .reduce(async (acc, [key, value]) => { + const { key: oldKey, newKey } = resolveAssetURL(value, repositoryAssetsPath) || {}; + if (!oldKey || !newKey) return { ...acc, [key]: value }; + await storage.copyFile(oldKey, newKey); + return { ...acc, [key]: `${protocol}${newKey}` }; + }, {}); + return { assets: { ...data.assets, ...updatedAssets } }; +} + +async function imageMigrationHandler({ data }, repositoryAssetsPath) { + const url = get(data, 'url'); + if (!url) return data; + const { key, newKey } = resolveAssetURL(url, repositoryAssetsPath) || {}; + if (!key || !newKey) return data; + await storage.copyFile(key, newKey); + return { ...data, url: newKey }; +} + +async function migrateData(element, repositoryAssetsPath) { + const { type, data } = element; + if (type === IMAGE_ELEMENT_TYPE) return imageMigrationHandler(element, repositoryAssetsPath); + const embeds = data.embeds && (await embedsMigrationHandler(element, repositoryAssetsPath)); + const assets = data.assets && (await defaultMigrationHandler(element, repositoryAssetsPath)); + return { ...data, ...embeds, ...assets }; +} + +async function migrateMeta(element, repositoryAssetsPath, metaByElementType) { + const { type, meta: metaInputs } = element; + const metaConfigs = get(metaByElementType, type, []); + return cloneFileMeta(metaInputs, metaConfigs, repositoryAssetsPath); +} + +async function cloneContentElement(element, repositoryAssetsPath, metaByElementType) { + const data = await migrateData(element, repositoryAssetsPath); + const meta = await migrateMeta(element, repositoryAssetsPath, metaByElementType); + return { data, meta }; +} + +module.exports = cloneContentElement; diff --git a/server/shared/util/cloneFileMeta.js b/server/shared/util/cloneFileMeta.js new file mode 100644 index 000000000..8605484a6 --- /dev/null +++ b/server/shared/util/cloneFileMeta.js @@ -0,0 +1,28 @@ +'use strict'; + +const get = require('lodash/get'); +const Promise = require('bluebird'); +const { protocol } = require('../../../config/server/storage'); +const resolveAssetURL = require('./resolveAssetURL'); +const storage = require('../../repository/storage'); + +module.exports = async (metaInputs, metaConfigs, repositoryAssetsPath) => { + const newMeta = await Promise.reduce(metaConfigs, async (acc, metaKey) => { + const meta = get(metaInputs, metaKey); + if (!meta) return acc; + const url = get(meta, 'url'); + if (!url) return acc; + const { key, newKey } = resolveAssetURL(url, repositoryAssetsPath) || {}; + if (!key || !newKey) return acc; + await storage.copyFile(key, newKey); + return { + ...acc, + [metaKey]: { + ...meta, + key: newKey, + url: `${protocol}${newKey}` + } + }; + }, {}); + return { ...metaInputs, ...newMeta }; +}; diff --git a/server/shared/util/getFileMetas.js b/server/shared/util/getFileMetas.js new file mode 100644 index 000000000..5685cb5b9 --- /dev/null +++ b/server/shared/util/getFileMetas.js @@ -0,0 +1,36 @@ +'use strict'; + +const FILE_ELEMENT_TYPE = 'FILE'; + +function getMetaByActivityType(structure = []) { + return structure.reduce((acc, { type, meta }) => { + const fileMetaKeys = getFileMetaKeys(meta); + if (!fileMetaKeys.length) return acc; + return { ...acc, [type]: fileMetaKeys }; + }, {}); +} + +function getMetaByElementType(elementMeta = []) { + return elementMeta.reduce((acc, { type, inputs }) => { + const fileMetaKeys = getFileMetaKeys(inputs); + if (!fileMetaKeys.length) return acc; + return { ...acc, [type]: fileMetaKeys }; + }, {}); +} + +function getFileMetaKeys(meta = []) { + return meta.filter(it => it.type === FILE_ELEMENT_TYPE).map(it => it.key); +} + +module.exports = schemas => { + return schemas.reduce((acc, { id, meta, structure, elementMeta }) => { + return { + ...acc, + [id]: { + repository: getFileMetaKeys(meta), + activity: getMetaByActivityType(structure), + element: getMetaByElementType(elementMeta) + } + }; + }, {}); +}; diff --git a/server/shared/util/resolveAssetURL.js b/server/shared/util/resolveAssetURL.js new file mode 100644 index 000000000..05b10358d --- /dev/null +++ b/server/shared/util/resolveAssetURL.js @@ -0,0 +1,40 @@ +'use strict'; + +const path = require('path'); +const { protocol } = require('../../../config/server/storage'); + +/** + * The regular expression matching old assets directory structure. + * @type {RegExp} + * @const + * @private + */ +const OLD_ASSET_PATH_REGEX = /(?repository\/assets\/(?[^?]*))/; + +/** + * The regular expression matching new assets directory structure. + * @type {RegExp} + * @const + * @private + */ +const NEW_ASSET_PATH_REGEX = /(?repository\/\d+\/assets\/(?[^?]*))/; + +/** + * Resolves with a new asset URL if `assetUrl` can be matched with regular expression matching old, or new, assets + * directory structure. `undefined` value is returned in case regular expressions can't be matched or if `assetUrl` + * contains `targetDir`, meaning they already have the same path. + * @param {string} assetUrl The current URL of an asset. + * @param {string} targetDir The target directory in which an asset should be stored. + * @return {Object} An object containing old and new directory path, or an `undefined` value if `assetUrl` starts with + * `targetDir` or if `assetsUrl` can't be matched with either regular expression. + * @public + */ +module.exports = (assetUrl, targetDir) => { + if (assetUrl.startsWith(protocol)) assetUrl = assetUrl.slice(protocol.length); + if (assetUrl.startsWith(targetDir)) return; + const result = assetUrl.match(OLD_ASSET_PATH_REGEX) || assetUrl.match(NEW_ASSET_PATH_REGEX); + if (!result) return; + const { groups: { directory, fileName } } = result; + const newKey = path.join(targetDir, fileName); + return { key: directory, newKey }; +};