From 8094698bd7c12f3180b7c831903c40d4afc59611 Mon Sep 17 00:00:00 2001 From: John McSwain Date: Wed, 12 Apr 2023 15:11:24 -0500 Subject: [PATCH 1/2] the first of several merge requests to leverage the VM blob URLs --- src/common/storage/backends/udcp/Client.js | 80 ++++++++++++++++++++ src/common/storage/backends/udcp/metadata.js | 26 +++++++ 2 files changed, 106 insertions(+) create mode 100644 src/common/storage/backends/udcp/Client.js create mode 100644 src/common/storage/backends/udcp/metadata.js diff --git a/src/common/storage/backends/udcp/Client.js b/src/common/storage/backends/udcp/Client.js new file mode 100644 index 00000000..4d9ea6fd --- /dev/null +++ b/src/common/storage/backends/udcp/Client.js @@ -0,0 +1,80 @@ +/* globals define */ +define([ + '../StorageClient', + 'blob/BlobClient' +], function( + StorageClient, + BlobClient +) { + const URLPREFIX = "https://wellcomewebgme.centralus.cloudapp.azure.com/rest/blob/download/" + const UDCPStorage = function(id, name, logger, config={}) { + StorageClient.apply(this, arguments); + // const params = this.getBlobClientParams(); + // params.apiToken = config.apiToken; + // this.blobClient = new BlobClient(params); + }; + + UDCPStorage.prototype = Object.create(StorageClient.prototype); + + // UDCPStorage.prototype.getBlobClientParams = function() { + // const params = { + // logger: this.logger.fork('BlobClient') + // }; + // if (!require.isBrowser) { + // const [url, isHttps] = this.getServerURL(); + // const defaultPort = isHttps ? '443' : '80'; + // const [server, port=defaultPort] = url.split(':'); + // params.server = server; + // params.serverPort = +port; + // params.httpsecure = isHttps; + // } + // return params; + // }; + + UDCPStorage.prototype.getFile = async function(dataInfo) { + const {data} = dataInfo; + //return await this.fetch(`${URLPREFIX}${dataInfo}`) + return await this.blobClient.getObject(`${URLPREFIX}${dataInfo}`); + }; + + UDCPStorage.prototype.getFileStream = async function(dataInfo) { + const url = await this.getDownloadURL(dataInfo); + const response = await this.fetch(`${URLPREFIX}${dataInfo}`, {method: 'GET'}); + return response.body; + }; + + UDCPStorage.prototype.putFile = async function(filename, content) { + const hash = await this.blobClient.putFile(filename, content); + return this.createDataInfo(hash); + }; + + UDCPStorage.prototype.putFileStream = async function(filename, stream) { + this.ensureStreamSupport(); + this.ensureReadableStream(stream); + const hash = await this.blobClient.putFile(filename, stream); + return this.createDataInfo(hash); + }; + + UDCPStorage.prototype.deleteDir = + UDCPStorage.prototype.deleteFile = async function() {}; + + UDCPStorage.prototype.getMetadata = async function(dataInfo) { + const {data} = dataInfo; + return await this.blobClient.getMetadata(data); + }; + + UDCPStorage.prototype.getDownloadURL = async function(dataInfo) { + const {data} = dataInfo; + return this.blobClient.getDownloadURL(`${URLPREFIX}${dataInfo}`); + }; + + UDCPStorage.prototype.getCachePath = async function(dataInfo) { + const metadata = await this.getMetadata(dataInfo); + const hash = metadata.content; + const dir = hash.substring(0, 2); + const filename = hash.substring(2); + return `${this.id}/${dir}/${filename}`; + }; + + return UDCPStorage; +}); diff --git a/src/common/storage/backends/udcp/metadata.js b/src/common/storage/backends/udcp/metadata.js new file mode 100644 index 00000000..bffcfe1c --- /dev/null +++ b/src/common/storage/backends/udcp/metadata.js @@ -0,0 +1,26 @@ +/*global define*/ +define([ + 'deepforge/udcpConfig', +], function( + config, +) { + const metadata = { + name: 'UDCP Blob Storage', + configStructure: [], + url: "" + }; + + + if (config.authentication.enable) { + metadata.configStructure.push({ + name: 'apiToken', + displayName: 'Access Token', + value: '', + valueType: 'string', + readOnly: false, + isAuth: true, + isRequiredForBrowser: false, + }); + } + return metadata; +}); From 69eeefa50eeebbf82d44f8428660504daa5bd1d4 Mon Sep 17 00:00:00 2001 From: John McSwain Date: Fri, 14 Apr 2023 10:19:29 -0500 Subject: [PATCH 2/2] uploading changes for PR and awaiting feedback --- src/common/storage/backends/Storage.js | 223 ++++++++++++++++++ .../storage/backends/TaxonomyReference.js | 133 +++++++++++ src/common/storage/backends/Utils.js | 183 ++++++++++++++ src/common/storage/backends/udcp/Client.js | 22 +- src/common/storage/backends/udcp/metadata.js | 2 +- 5 files changed, 551 insertions(+), 12 deletions(-) create mode 100644 src/common/storage/backends/Storage.js create mode 100644 src/common/storage/backends/TaxonomyReference.js create mode 100644 src/common/storage/backends/Utils.js diff --git a/src/common/storage/backends/Storage.js b/src/common/storage/backends/Storage.js new file mode 100644 index 00000000..53df2a31 --- /dev/null +++ b/src/common/storage/backends/Storage.js @@ -0,0 +1,223 @@ +import TaxonomyReference from "./TaxonomyReference" +import { assert, Result } from "./Utils" +import { filterMap } from "./Utils" +import { writable } from "svelte/store" + +class Storage { + constructor() { + const chunks = window.location.href.split("/") // TODO: + chunks.pop() + chunks.pop() + this.baseUrl = chunks.join("/") + "/artifacts/" + } + + async listArtifacts() { + const result = await this._fetchJson(this.baseUrl, null, ListError) + const items = await result.unwrap() + return filterMap(items, item => ArtifactSet.tryFrom(item)) + } + + async getDownloadUrl(parentId, ...ids) { + // TODO: add item IDs + const qs = `ids=${encodeURIComponent(JSON.stringify(ids))}` + return this.baseUrl + parentId + `/download?${qs}` + } + + _uploadFile({ method, url, headers }, file) { + const { subscribe, set } = writable(0) + const request = new XMLHttpRequest() + request.upload.addEventListener( + "progress", + ev => { + set(ev.loaded / ev.total) + }, + false + ) + const promise = new Promise(function(resolve, reject) { + request.addEventListener( + "load", + () => { + set(1) + resolve(true) + }, + false + ) + request.addEventListener( + "error", + () => { + const error = new AppendDataError( + request.statusText || "Upload failed" + ) + reject(error) + }, + false + ) + request.addEventListener("abort", () => resolve(false), false) + }) + request.open(method, url) + Object.entries(headers || {}).forEach(([name, value]) => + request.setRequestHeader(name, value) + ) + request.send(file) + return Object.assign(promise, { + file, + subscribe, + abort() { + request.abort() + } + }) + } + + async appendArtifact(artifactSet, metadata, files) { + console.log({ action: "append", metadata, files }) + const url = this.baseUrl + artifactSet.id + "/append" + const filenames = files.map(file => file.name) + + const opts = { + method: "post", + headers: { + "Content-Type": "application/json" + }, + body: JSON.stringify({ + metadata, + filenames + }) + } + + const appendResult = await ( + await this._fetchJson(url, opts, AppendDataError) + ).unwrap() + + return appendResult.files.map(({ name, params }) => { + const targetFile = files.find(a => a.name == name) + assert( + !!targetFile, + new AppendDataError("Could not find upload info for " + name) + ) + return this._uploadFile(params, targetFile) + }) + } + + async updateArtifact(metadata, newContent) { + console.log("Updating artifact:", metadata, newContent) + } + + async createArtifact(metadata, files) { + console.log("Creating artifact:", metadata, files) + metadata.taxonomyTags = metadata.taxonomyTags || [] + const opts = { + method: "POST", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify({ + metadata + }) + } + return (await this._fetchJson(this.baseUrl, opts, CreateError)).unwrap() + } + + async _fetch(url, opts = null, ErrorClass = RequestError) { + const response = await fetch(url, opts) + let error = null + if (response.status === 422) { + const data = await response.json() + const context = new ModelContext( + data.context.projectId, + data.context.branch, + data.context.nodeId + ) + error = new ModelError(data.message, context) + } else if (response.status > 399) { + error = new ErrorClass(await response.text()) + } + return new Result(response, error) + } + + async _fetchJson(url, opts = null, ErrorClass = RequestError) { + return (await this._fetch(url, opts, ErrorClass)).map(response => + response.json() + ) + } +} + +export class RequestError extends Error { + constructor(msg) { + super(msg) + } +} + +class ModelContext { + constructor(projectId, branch, nodeId) { + this.projectId = projectId + this.nodeId = nodeId + this.branch = branch + } + + toQueryParams() { + const params = new URLSearchParams({ + project: this.projectId, + branch: this.branch, + node: this.nodeId + }) + return params.toString() + } +} + +export class ModelError extends Error { + constructor(msg, context) { + super(msg) + this.context = context + } +} + +class StorageError extends RequestError { + constructor(actionDisplayName, msg) { + super(`Unable to ${actionDisplayName}: ${msg}`) + } +} + +class ListError extends StorageError { + constructor(msg) { + super("list artifacts", msg) // FIXME: rename "artifact"? + } +} + +class DownloadError extends StorageError { + constructor(msg) { + super("download", msg) + } +} + +class CreateError extends StorageError { + constructor(msg) { + super("create", msg) + } +} + +class AppendDataError extends StorageError { + constructor(msg) { + super("append", msg) + } +} + +class ArtifactSet { + static tryFrom(item) { + if (!item.displayName) { + console.log("Found malformed data. Filtering out. Data:", item) + } else { + const hash = [ + item.id, + ...item.children.map(child => child.id).sort() + ].join("/") + item.hash = hash + item.children = item.children.map(child => { + if (child.taxonomy) { + child.taxonomy = TaxonomyReference.from(child.taxonomy) + } + return child + }) + return item + } + } +} + +export default Storage diff --git a/src/common/storage/backends/TaxonomyReference.js b/src/common/storage/backends/TaxonomyReference.js new file mode 100644 index 00000000..f4ccfa02 --- /dev/null +++ b/src/common/storage/backends/TaxonomyReference.js @@ -0,0 +1,133 @@ +"use strict"; +var __extends = (this && this.__extends) || (function () { + var extendStatics = function (d, b) { + extendStatics = Object.setPrototypeOf || + ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || + function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; + return extendStatics(d, b); + }; + return function (d, b) { + if (typeof b !== "function" && b !== null) + throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); + extendStatics(d, b); + function __() { this.constructor = d; } + d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); + }; +})(); +Object.defineProperty(exports, "__esModule", { value: true }); +exports.SemanticVersion = exports.ParseError = exports.Branch = exports.Tag = exports.Commit = void 0; +var TaxonomyReference = /** @class */ (function () { + function TaxonomyReference(id, version) { + this.id = id; + this.version = version; + } + TaxonomyReference.prototype.supports = function (otherVersion) { + return this.id === otherVersion.id && + this.version.supports(otherVersion.version); + }; + TaxonomyReference.from = function (taxonomyVersion) { + var version; + if (taxonomyVersion.tag) { + version = new Tag(taxonomyVersion.commit, taxonomyVersion.tag); + } + else if (taxonomyVersion.branch) { + version = new Branch(taxonomyVersion.commit, taxonomyVersion.branch); + } + else if (taxonomyVersion.commit) { + version = new Commit(taxonomyVersion.commit); + } + else { + var taxVersion = JSON.stringify(taxonomyVersion); + throw new Error("Could not find tag, branch, or commit in ".concat(taxVersion)); + } + return new TaxonomyReference(taxonomyVersion.id, version); + }; + return TaxonomyReference; +}()); +exports.default = TaxonomyReference; +var Commit = /** @class */ (function () { + function Commit(hash) { + this.hash = hash; + } + Commit.prototype.supports = function (otherVersion) { + return otherVersion.hash === this.hash; + }; + return Commit; +}()); +exports.Commit = Commit; +var Tag = /** @class */ (function (_super) { + __extends(Tag, _super); + function Tag(hash, versionString) { + var _this = _super.call(this, hash) || this; + _this.version = SemanticVersion.parse(versionString); + return _this; + } + Tag.prototype.supports = function (otherTag) { + if (otherTag instanceof Tag) { + return this.version.major === otherTag.version.major && + this.version.gte(otherTag.version); + } + else { + return _super.prototype.supports.call(this, otherTag); + } + }; + return Tag; +}(Commit)); +exports.Tag = Tag; +var Branch = /** @class */ (function (_super) { + __extends(Branch, _super); + function Branch(hash, name) { + var _this = _super.call(this, hash) || this; + _this.name = name; + return _this; + } + Branch.prototype.supports = function (otherVersion) { + if (otherVersion instanceof Branch) { + return otherVersion.name === this.name; + } + else { + return _super.prototype.supports.call(this, otherVersion); + } + }; + return Branch; +}(Commit)); +exports.Branch = Branch; +var ParseError = /** @class */ (function (_super) { + __extends(ParseError, _super); + function ParseError(input) { + return _super.call(this, "Unable to parse: ".concat(input)) || this; + } + return ParseError; +}(Error)); +exports.ParseError = ParseError; +var SemanticVersion = /** @class */ (function () { + function SemanticVersion(major, minor, patch) { + if (minor === void 0) { minor = 0; } + if (patch === void 0) { patch = 0; } + this.major = major; + this.minor = minor; + this.patch = patch; + } + SemanticVersion.prototype.gte = function (other) { + if (this.major < other.major) + return false; + if (this.minor < other.minor) + return false; + if (this.patch < other.patch) + return false; + return true; + }; + SemanticVersion.parse = function (versionString) { + versionString = versionString.replace(/^v?/, ""); + var _a = versionString.split(".") + .map(function (str) { + if (!/\d+/.test(str)) { + throw new ParseError(versionString); + } + return parseInt(str); + }), major = _a[0], _b = _a[1], minor = _b === void 0 ? 0 : _b, _c = _a[2], patch = _c === void 0 ? 0 : _c; + return new SemanticVersion(major, minor, patch); + }; + return SemanticVersion; +}()); +exports.SemanticVersion = SemanticVersion; diff --git a/src/common/storage/backends/Utils.js b/src/common/storage/backends/Utils.js new file mode 100644 index 00000000..382913d8 --- /dev/null +++ b/src/common/storage/backends/Utils.js @@ -0,0 +1,183 @@ +"use strict"; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __generator = (this && this.__generator) || function (thisArg, body) { + var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; + return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; + function verb(n) { return function (v) { return step([n, v]); }; } + function step(op) { + if (f) throw new TypeError("Generator is already executing."); + while (g && (g = 0, op[0] && (_ = 0)), _) try { + if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; + if (y = 0, t) op = [op[0] & 2, t.value]; + switch (op[0]) { + case 0: case 1: t = op; break; + case 4: _.label++; return { value: op[1], done: false }; + case 5: _.label++; y = op[1]; op = [0]; continue; + case 7: op = _.ops.pop(); _.trys.pop(); continue; + default: + if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } + if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } + if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } + if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } + if (t[2]) _.ops.pop(); + _.trys.pop(); continue; + } + op = body.call(thisArg, _); + } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } + if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; + } +}; +var __spreadArray = (this && this.__spreadArray) || function (to, from, pack) { + if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) { + if (ar || !(i in from)) { + if (!ar) ar = Array.prototype.slice.call(from, 0, i); + ar[i] = from[i]; + } + } + return to.concat(ar || Array.prototype.slice.call(from)); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.filterMap = exports.readFile = exports.isDefined = exports.isObject = exports.encodeQueryParams = exports.openUrl = exports.capitalize = exports.getLatestArtifact = exports.assert = exports.arraysEqual = exports.Result = void 0; +/** + * A Result is the result from a request. Errors can be mapped (like + * combinators). Unwrapping the result will either throw an error (if an error + * occurred) or return the parsed result from the request. + */ +var Result = /** @class */ (function () { + function Result(value, error) { + this._value = value; + this._error = error; + } + Result.prototype.map = function (fn) { + if (this._error) { + return new Result(null, this._error); + } + else { + var result = fn(this._value); + return new Result(result, null); + } + }; + Result.prototype.mapError = function (errFn) { + if (this._error) { + var result = errFn(this._error); + return new Result(null, result); + } + else { + return new Result(this._value, null); + } + }; + Result.prototype.unwrap = function () { + if (this._error) { + throw this._error; + } + else { + return this._value; + } + }; + return Result; +}()); +exports.Result = Result; +/** + * Returns whether the two arrays are equal. + * + * @template T The type of the array elements. + * @param array1 The first array to compare. + * @param array2 The second array to compare. + * @param [options] Options for the comparison. If `ignoreOrder` is `true`, then both arrays are sorted before comparison. + * @return `true` if the arrays are equal, `false` otherwise. + */ +function arraysEqual(array1, array2, options) { + var _a; + if (array1 === array2) + return true; + if (array1.length !== array2.length) + return false; + var _b = (options === null || options === void 0 ? void 0 : options.ignoreOrder) + ? [__spreadArray([], array1, true).sort(), __spreadArray([], array2, true).sort()] + : [array1, array2], arr1 = _b[0], arr2 = _b[1]; + var equals = (_a = options === null || options === void 0 ? void 0 : options.equals) !== null && _a !== void 0 ? _a : Object.is; + return !arr1.some(function (elem, index) { return !equals(elem, arr2[index]); }); +} +exports.arraysEqual = arraysEqual; +function assert(cond, err) { + if (!cond) { + throw err; + } +} +exports.assert = assert; +// FIXME: we need to combine Artifact.js (in the router directory) w/ a TS +// definition and share the generated code across the client and server. This +// method should be available on the ArtifactSet class instead of here +function getLatestArtifact(artifactSet) { + artifactSet.children.sort(function (i1, i2) { + if (i1.time === i2.time) { + return i1.displayName < i2.displayName ? -1 : 1; + } + return i1.time < i2.time ? -1 : 1; + }); + return artifactSet.children[artifactSet.children.length - 1]; +} +exports.getLatestArtifact = getLatestArtifact; +function capitalize(word) { + return word[0].toUpperCase() + word.substring(1); +} +exports.capitalize = capitalize; +function openUrl(url) { + return window.open(url, "_blank"); +} +exports.openUrl = openUrl; +function encodeQueryParams(dict) { + return Object.entries(dict) + .map(function (_a) { + var key = _a[0], value = _a[1]; + return "".concat(key, "=").concat(encodeURIComponent(value)); + }) + .join("&"); +} +exports.encodeQueryParams = encodeQueryParams; +function isObject(thing) { + return typeof thing === "object" && !Array.isArray(thing); +} +exports.isObject = isObject; +function isDefined(thing) { + return (thing != null) || (thing === null); +} +exports.isDefined = isDefined; +function readFile(file) { + return __awaiter(this, void 0, void 0, function () { + return __generator(this, function (_a) { + return [2 /*return*/, new Promise(function (res, rej) { + var reader = new FileReader(); + reader.onload = function () { + if (reader.error) { + console.log("error:", reader.error); + return rej(reader.error); + } + else { + return res(reader.result); + } + }; + reader.readAsText(file); + })]; + }); + }); +} +exports.readFile = readFile; +function filterMap(list, fn) { + return list.reduce(function (items, input) { + var mapped = fn(input); + if (isDefined(mapped)) { + items.push(mapped); + } + return items; + }, []); +} +exports.filterMap = filterMap; diff --git a/src/common/storage/backends/udcp/Client.js b/src/common/storage/backends/udcp/Client.js index 4d9ea6fd..c3937a30 100644 --- a/src/common/storage/backends/udcp/Client.js +++ b/src/common/storage/backends/udcp/Client.js @@ -1,4 +1,6 @@ /* globals define */ +import {Storage} from "../Storage" + define([ '../StorageClient', 'blob/BlobClient' @@ -6,12 +8,11 @@ define([ StorageClient, BlobClient ) { - const URLPREFIX = "https://wellcomewebgme.centralus.cloudapp.azure.com/rest/blob/download/" - const UDCPStorage = function(id, name, logger, config={}) { + //const URLPREFIX = 'https://wellcomewebgme.centralus.cloudapp.azure.com/rest/blob/download/' + const UDCPStorage = function(id, name, logger) { StorageClient.apply(this, arguments); - // const params = this.getBlobClientParams(); - // params.apiToken = config.apiToken; - // this.blobClient = new BlobClient(params); + //this.blobClient = new BlobClient(); + this.Storage = new Storage() }; UDCPStorage.prototype = Object.create(StorageClient.prototype); @@ -33,25 +34,24 @@ define([ UDCPStorage.prototype.getFile = async function(dataInfo) { const {data} = dataInfo; - //return await this.fetch(`${URLPREFIX}${dataInfo}`) - return await this.blobClient.getObject(`${URLPREFIX}${dataInfo}`); + this.Storage.getFile(data) }; UDCPStorage.prototype.getFileStream = async function(dataInfo) { const url = await this.getDownloadURL(dataInfo); - const response = await this.fetch(`${URLPREFIX}${dataInfo}`, {method: 'GET'}); + const response = await this.fetch(url, {method: 'GET'}); return response.body; }; UDCPStorage.prototype.putFile = async function(filename, content) { - const hash = await this.blobClient.putFile(filename, content); + const hash = await this.Storage.appendArtifact(filename, content); return this.createDataInfo(hash); }; UDCPStorage.prototype.putFileStream = async function(filename, stream) { this.ensureStreamSupport(); this.ensureReadableStream(stream); - const hash = await this.blobClient.putFile(filename, stream); + const hash = await this.Storage.appendArtifact(filename, stream); return this.createDataInfo(hash); }; @@ -65,7 +65,7 @@ define([ UDCPStorage.prototype.getDownloadURL = async function(dataInfo) { const {data} = dataInfo; - return this.blobClient.getDownloadURL(`${URLPREFIX}${dataInfo}`); + return this.Storage.getDownloadURL(data); }; UDCPStorage.prototype.getCachePath = async function(dataInfo) { diff --git a/src/common/storage/backends/udcp/metadata.js b/src/common/storage/backends/udcp/metadata.js index bffcfe1c..b4b9b633 100644 --- a/src/common/storage/backends/udcp/metadata.js +++ b/src/common/storage/backends/udcp/metadata.js @@ -7,7 +7,7 @@ define([ const metadata = { name: 'UDCP Blob Storage', configStructure: [], - url: "" + url: '' };