diff --git a/examples/super_resolution/SuperResolutionExample.js b/examples/super_resolution/SuperResolutionExample.js
index 46588e7e3..3fdcdcda5 100644
--- a/examples/super_resolution/SuperResolutionExample.js
+++ b/examples/super_resolution/SuperResolutionExample.js
@@ -13,7 +13,13 @@ class SuperResolutionExample extends BaseCameraExample {
};
const drawOutput = (outputTensor,srcElement, height, preOptions) => {
- const width = height;
+ let width;
+ if(height == 1080) {
+ width = 1920;
+ }
+ else {
+ width = height;
+ }
const mean = preOptions.mean;
const offset = preOptions.std;
const bytes = new Uint8ClampedArray(width * height * 4);
@@ -21,9 +27,18 @@ class SuperResolutionExample extends BaseCameraExample {
for (let i = 0; i < height * width; ++i) {
let j = i * 4;
- let r = outputTensor[i * 3] * mean[0] + offset[0];
- let g = outputTensor[i * 3 + 1] * mean[1] + offset[1];
- let b = outputTensor[i * 3 + 2] * mean[2] + offset[2];
+ let r, g, b;
+ if(height == 1080) {
+ r = outputTensor[i * 3] * 255;
+ g = outputTensor[i * 3 + 1] * 255;
+ b = outputTensor[i * 3 + 2] * 255;
+ }
+ else {
+ r = outputTensor[i * 3] * mean[0] + offset[0];
+ g = outputTensor[i * 3 + 1] * mean[1] + offset[1];
+ b = outputTensor[i * 3 + 2] * mean[2] + offset[2];
+ }
+
bytes[j + 0] = Math.round(r);
bytes[j + 1] = Math.round(g);
bytes[j + 2] = Math.round(b);
diff --git a/examples/super_resolution/index.html b/examples/super_resolution/index.html
index 94b76ebab..fcda1886b 100755
--- a/examples/super_resolution/index.html
+++ b/examples/super_resolution/index.html
@@ -335,6 +335,9 @@
Subgraphs Summary
+
+
+
diff --git a/examples/util/modelZoo.js b/examples/util/modelZoo.js
index d929422aa..5c47c47a7 100644
--- a/examples/util/modelZoo.js
+++ b/examples/util/modelZoo.js
@@ -746,6 +746,36 @@ const modelZoo = {
intro: 'Photo-realistic single image Super-Resolution using a generative adversarial network.',
paperUrl: 'https://arxiv.org/abs/1609.04802'
},
+ {
+ modelName: 'single-image-super-resolution-1032',
+ format: 'OpenVINO',
+ modelId: 'image-super-resolution-1032model',
+ modelSize: '120KB',
+ inputSize: [270, 480, 3],
+ outputSize: [1080, 1920, 3],
+ scale: 4,
+ modelFile: '../super_resolution/model/single-image-super-resolution-1032.bin',
+ preOptions: {
+ channelScheme: 'BGR',
+ },
+ intro: 'An Attention-Based Approach for Single Image Super Resolution',
+ paperUrl: 'https://arxiv.org/abs/1807.06779'
+ },
+ {
+ modelName: 'single-image-super-resolution-1033',
+ format: 'OpenVINO',
+ modelId: 'image-super-resolution-1033model',
+ modelSize: '122KB',
+ inputSize: [360, 640, 3],
+ outputSize: [1080, 1920, 3],
+ scale: 3,
+ modelFile: '../super_resolution/model/single-image-super-resolution-1033.bin',
+ preOptions: {
+ channelScheme: 'BGR',
+ },
+ intro: 'An Attention-Based Approach for Single Image Super Resolution',
+ paperUrl: 'https://arxiv.org/abs/1807.06779'
+ },
{
modelName: 'SRGAN 128x4 (TFLite)',
format: 'TFLite',
diff --git a/examples/util/openvino/OpenVINOModelImporter.js b/examples/util/openvino/OpenVINOModelImporter.js
index 140f5a093..0db96ce04 100644
--- a/examples/util/openvino/OpenVINOModelImporter.js
+++ b/examples/util/openvino/OpenVINOModelImporter.js
@@ -403,17 +403,18 @@ class OpenVINOModelImporter {
}
let output = node.outputs[0];
- const nextNode = graph.nodes[i+1];
- if (nextNode && ['Clamp', 'ReLU'].includes(nextNode.operator) &&
- node.outputs[0].graphId() === nextNode.inputs[0].graphId()) {
- // Fuse relu
- inputs.push(this._addScalarInt32(this._getFuseCode(nextNode)));
- i++;
- console.log(` fuse relu: output of ${nextNode.name}->${node.name}`);
- output = nextNode.outputs[0];
- } else {
- inputs.push(this._addScalarInt32(this._nn.FUSED_NONE));
- }
+ // const nextNode = graph.nodes[i+1];
+ // if (nextNode && ['Clamp', 'ReLU'].includes(nextNode.operator) &&
+ // node.outputs[0].graphId() === nextNode.inputs[0].graphId()) {
+ // // Fuse relu
+ // inputs.push(this._addScalarInt32(this._getFuseCode(nextNode)));
+ // i++;
+ // console.log(` fuse relu: output of ${nextNode.name}->${node.name}`);
+ // output = nextNode.outputs[0];
+ // } else {
+ // inputs.push(this._addScalarInt32(this._nn.FUSED_NONE));
+ // }
+ inputs.push(this._addScalarInt32(this._nn.FUSED_NONE));
// Add outputs
const outDims = output.shape();
@@ -693,10 +694,27 @@ class OpenVINOModelImporter {
console.log(` output shape: [${outDims}]`);
this._addOperation(this._nn.TRANSPOSE, inputs, outputs);
- } else {
+ } else {
+ if(order.length === 6) {
+ console.log(` input shape: [${inDims}]`);
+ // no specific rules for tensor6D format so didn't reorder here
+ inputs.push(inputId);
+ inputs.push(this._addTensorInt32(order, [6]));
+
+ const outDims = output.shape();
+ const outputType = {
+ type: this._getTypeCode(output.dataType()), dimensions: outDims
+ };
+ const outputId = this._addNamedOperand(outputName, outputType);
+ outputs.push(outputId);
+ console.log(` output shape: [${outDims}]`);
+
+ this._addOperation(this._nn.TRANSPOSE, inputs, outputs);
+ }
+ else {
throw new Error(`Permuting to ${order} is not supported`);
}
- }
+ } }
} break;
case 'Const': {
// initializer is contained in the node
@@ -891,6 +909,53 @@ class OpenVINOModelImporter {
outputs.push(outputId);
console.log(` output shape: [${outDims}]`);
} break;
+ case 'ReLU': {
+ const input = node.inputs[0];
+ inputs.push(this._getTensorId(input));
+ console.log(` inputs shape: ` +
+ `[${node.inputs.map((input) => input.shape()).join('], [')}]`);
+
+ const output = node.outputs[0];
+ const outDims = output.shape();
+ const outputType = {
+ type: this._getTypeCode(output.dataType()), dimensions: outDims
+ };
+ const outputId = this._addNamedOperand(output.graphId(), outputType);
+ outputs.push(outputId);
+ console.log(` output shape: [${outDims}]`);
+ opCode = this._nn.RELU;
+ } break;
+ case 'Power': {
+ const input = node.inputs[0];
+ inputs.push(this._getTensorId(input));
+ console.log(` inputs shape: ` +
+ `[${node.inputs.map((input) => input.shape()).join('], [')}]`);
+
+ const power = node.getInt('power',1);
+ const scale = node.getFloat('scale',1.0);
+ const shift = node.getInt('shift',0);
+
+ if(power === 1 && shift === 0) {
+ const dims = [1, 1, 1, 1];
+
+ inputs.push(this._addTensorFloat32(new Float32Array([scale]), dims));
+ inputs.push(this._addScalarInt32(this._nn.FUSED_NONE));
+
+ const output = node.outputs[0];
+ const outDims = output.shape();
+ const outputType = {
+ type: this._getTypeCode(output.dataType()), dimensions: outDims
+ };
+ const outputId = this._addNamedOperand(output.graphId(), outputType);
+ outputs.push(outputId);
+ console.log(` output shape: [${outDims}]`);
+
+ this._addOperation(this._nn.MUL,inputs,outputs);
+ }
+ else {
+ // TODO find ops to replace power
+ }
+ } break;
default: {
throw new Error(`${node.operator} is not supported.`);
}
diff --git a/examples/util/openvino/OpenVINOModelUtils.js b/examples/util/openvino/OpenVINOModelUtils.js
index 8290e233a..cbd9938a9 100644
--- a/examples/util/openvino/OpenVINOModelUtils.js
+++ b/examples/util/openvino/OpenVINOModelUtils.js
@@ -200,6 +200,26 @@ class OpenVINOModel {
const ctor = this._getConstructorFromType(tensor.type.dataType);
const length = size / ctor.BYTES_PER_ELEMENT;
const data = new ctor(this._weights, offset, length);
+ if (dimHints === 6) {
+ if (OpenVINOUtils.product(dimHints) !== length) {
+ throw new Error(`Product of ${dimHints} doesn't match the length ${length}`);
+ }
+ // 6d tensor permute
+ // NC[3]HW => NHWC[3]
+ const nhwc3Data = new ctor(data.length);
+ const [N, H, W, C1, C2, C3] = dimHints;
+ const C = C1 * C2 * C3;
+ for (let n = 0; n < N; ++n) {
+ for (let c = 0; c < C; ++c) {
+ for (let h = 0; h < H; ++h) {
+ for (let w = 0; w < W; ++w) {
+ nhwc3Data[n*H*W*C + h*W*C + w*C + c] = data[n*C*H*W + c*H*W + h*W + w];
+ }
+ }
+ }
+ }
+ return nhwc3Data;
+ }
if (typeof dimHints === 'undefined' || dimHints.length !== 4) {
return data;
}
@@ -233,11 +253,21 @@ class OpenVINOModel {
getTensorShape(arg) {
const dims = this._getTensorType(arg).shape.dimensions;
- if (dims.length !== 4) {
+ if (dims.length !== 4 && dims.length !== 6) {
return dims;
} else {
- const [N, C, H, W] = dims;
- return [N, H, W, C];
+ if(dims.length === 4) {
+ const [N, C, H, W] = dims;
+ return [N, H, W, C];
+ }
+ else { if(dims[5] === 2 || dims[5] === 3) { //used in permute ops
+ const [N, C1, C2, C3, H, W] = dims;
+ return [N, C2, C3, H, W, C1];
+ } else { //used in reshape ops
+ const [N, C1, C2, C3, H, W] = dims;
+ return [N, H, W, C1, C2, C3];
+ }
+ }
}
}
diff --git a/src/nn/tfjs/TfjsModel.js b/src/nn/tfjs/TfjsModel.js
index 1f479870c..33d215562 100644
--- a/src/nn/tfjs/TfjsModel.js
+++ b/src/nn/tfjs/TfjsModel.js
@@ -710,6 +710,11 @@ export default class TfjsModel {
const output = operands[outputs[0]];
output.assign(tf.sigmoid(input1));
} break;
+ case OperationCode.RELU: {
+ const input1 = operands[inputs[0]];
+ const output = operands[outputs[0]];
+ output.assign(tf.relu(input1));
+ } break;
default: {
throw new Error(`Operation ${op} is not supported`);
}