|
1 | | -import SparkMD5 from "spark-md5"; |
| 1 | +importScripts("https://cdnjs.cloudflare.com/ajax/libs/spark-md5/3.0.2/spark-md5.min.js") |
2 | 2 |
|
3 | 3 | // Function to calculate the checksum for multipart files |
4 | 4 | const calculateMultipartChecksum = (file, partSize) => |
@@ -53,19 +53,65 @@ const calculateMultipartChecksum = (file, partSize) => |
53 | 53 | loadNext(); |
54 | 54 | }); |
55 | 55 |
|
| 56 | + |
| 57 | +const incrementalMD5 = file => new Promise((resolve, reject) => { |
| 58 | + var loaded = 0; |
| 59 | + var startTime = performance.now(); |
| 60 | + var tSize = file.size; |
| 61 | + const fileReader = new FileReader(); |
| 62 | + const spark = new SparkMD5.ArrayBuffer(); |
| 63 | + const chunkSize = 2097152; // Read in chunks of 2MB |
| 64 | + const chunks = Math.ceil(file.size / chunkSize); |
| 65 | + let currentChunk = 0; |
| 66 | + |
| 67 | + fileReader.onload = event => { |
| 68 | + spark.append(event.target.result); // Append array buffer |
| 69 | + ++currentChunk; |
| 70 | + if (currentChunk < chunks) { |
| 71 | + loadNext(); |
| 72 | + } else { |
| 73 | + resolve(spark.end()); // Compute hash |
| 74 | + } |
| 75 | + }; |
| 76 | + |
| 77 | + fileReader.addEventListener("progress", event => { |
| 78 | + loaded += event.loaded; |
| 79 | + let pE = Math.round((loaded / tSize) * 100); |
| 80 | + let rS = pE + "%"; |
| 81 | + // console.log(rS) |
| 82 | + }); |
| 83 | + |
| 84 | + fileReader.addEventListener("loadend", event => { |
| 85 | + if (event.total > 0) { |
| 86 | + var endTime = performance.now(); |
| 87 | + // console.log(`Took ${endTime - startTime} milliseconds`) |
| 88 | + } |
| 89 | + }); |
| 90 | + |
| 91 | + fileReader.onerror = () => reject(fileReader.error); |
| 92 | + |
| 93 | + const loadNext = () => { |
| 94 | + const start = currentChunk * chunkSize; |
| 95 | + const end = start + chunkSize >= file.size ? file.size : start + chunkSize; |
| 96 | + fileReader.readAsArrayBuffer(File.prototype.slice.call(file, start, end)); |
| 97 | + }; |
| 98 | + |
| 99 | + loadNext(); |
| 100 | +}); |
| 101 | + |
56 | 102 | // Main worker handler |
57 | 103 | self.onmessage = async function (event) { |
58 | 104 | if (event.data.file && event.data.msg == "begin hash") { |
59 | | - console.log("ello chum!"); |
60 | 105 | const file = event.data.file; |
61 | | - const multipartThreshold = PydioApi.getMultipartPartSize(); // Get the current multipart chunk size |
| 106 | + const multipartThreshold = event.data.multipartThreshold; |
| 107 | + const multipartPartSize = event.data.multipartPartSize; |
62 | 108 |
|
63 | 109 | if (file.size > multipartThreshold) { |
64 | 110 | // Only run multipart checksum logic for files above the threshold |
65 | 111 | try { |
66 | 112 | const finalChecksum = await calculateMultipartChecksum( |
67 | 113 | file, |
68 | | - multipartThreshold |
| 114 | + multipartPartSize |
69 | 115 | ); |
70 | 116 | postMessage({ status: "complete", hash: finalChecksum }); |
71 | 117 | } catch (error) { |
|
0 commit comments