Commit abd620c1 authored by chenqikuai's avatar chenqikuai

使用workerjs计算hash

parent c551ad75
......@@ -793,6 +793,11 @@
"resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz",
"integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA=="
},
"comlink": {
"version": "4.3.1",
"resolved": "https://registry.npmjs.org/comlink/-/comlink-4.3.1.tgz",
"integrity": "sha512-+YbhUdNrpBZggBAHWcgQMLPLH1KDF3wJpeqrCKieWQ8RL7atmgsgTQko1XEBK6PsecfopWNntopJ+ByYG1lRaA=="
},
"constantinople": {
"version": "4.0.1",
"resolved": "https://registry.npmjs.org/constantinople/-/constantinople-4.0.1.tgz",
......
......@@ -9,6 +9,7 @@
},
"dependencies": {
"axios": "^0.26.1",
"comlink": "^4.3.1",
"cqk-sy-ui": "^1.2.28",
"crypto-js": "^4.1.1",
"element-plus": "^2.1.9",
......
......@@ -19,4 +19,4 @@ createApp(App)
.directive("loading", ElLoading.directive)
.mount("#app");
// import "@/Test/index"
\ No newline at end of file
// import "@/Test/index"
......@@ -4,6 +4,9 @@ import { ElMessage } from "element-plus";
import { $ajax } from ".";
import { sha256 } from "js-sha256";
import axios, { Canceler } from "axios";
import type { getFileHash } from "@/worker";
import Worker from "@/worker?worker";
import { wrap } from "comlink";
// 分块文件检查
export function fileCheck(query: any) {
......@@ -75,6 +78,11 @@ interface iFileChunkPayload {
total_size: number; //总大小
}
async function worker_getFileHash(file: File) {
const workerGetFileHash = wrap<typeof getFileHash>(new Worker());
return workerGetFileHash(file);
}
function splitFileToFileChunks(file: File, chunkSize = 16) {
const fileChunks: Blob[] = [];
let slicesize = file.size;
......@@ -103,139 +111,125 @@ export function handleBigFileUpload(
unzip: (category_id: number) => Promise<any>;
uploadAllFileChunks: Promise<boolean>;
cancelUpload?: () => void;
}>((rootResolve, rootReject) => {
}>(async (rootResolve, rootReject) => {
let isCancel = false;
let __sendingAmount = 0;
let __total = 0;
let __sendingIncreasing = false;
const cancelTokenList: Canceler[] = [];
let progressLoaded = 0;
const FILE_SIZE = file.size;
const fileChunks = splitFileToFileChunks(file);
const fileReader = new FileReader();
let fileHash = sha256.create();
fileReader.onload = function (event: Event) {
const result = (event!.target! as any).result;
fileHash.update(result);
fileHash.update(file.name);
fileCheck({ file_hash: fileHash.hex() }).then((res: any) => {
if (!res) return;
let { is_done, file_info } = res.data;
if (is_done && file_info) {
// 上传完成
ElMessage({
message: "上传成功",
type: "success",
});
rootResolve({
unzip: (category_id: number) =>
fileUnzip({
category_id,
file_hash: res.data.file_hash,
file_name: file_info.name,
}),
uploadAllFileChunks: Promise.resolve(true),
});
return;
}
const fileHash = await worker_getFileHash(file);
fileCheck({ file_hash: fileHash }).then((res: any) => {
if (!res) return;
let { is_done, file_info } = res.data;
if (is_done && file_info) {
// 上传完成
ElMessage({
message: "上传成功",
type: "success",
});
rootResolve({
unzip: (category_id: number) =>
fileUnzip({
category_id,
file_hash: res.data.file_hash,
file_name: file_info.name,
}),
uploadAllFileChunks: Promise.resolve(true),
});
return;
}
const uploadAllFileChunks = new Promise<boolean>(
(resolveAllFileUpload, rejectAllFileUpload) => {
function uploadFileChunk(
fileChunkPayload: iFileChunkPayload,
numOfFileChunks: number
) {
return new Promise((resolveFileChunkUpload) => {
if (isCancel) {
return;
}
let lastLoaded = 0;
const afterFileUpload = fileUpload(
fileChunkPayload,
(progressEvent) => {
progressLoaded -= lastLoaded;
lastLoaded =
(progressEvent.loaded / progressEvent.total) *
fileChunkPayload.current_size;
progressLoaded +=
(progressEvent.loaded / progressEvent.total) *
fileChunkPayload.current_size;
const uploadAllFileChunks = new Promise<boolean>(
(resolveAllFileUpload, rejectAllFileUpload) => {
function uploadFileChunk(
fileChunkPayload: iFileChunkPayload,
numOfFileChunks: number
) {
return new Promise((resolveFileChunkUpload) => {
if (isCancel) {
console.log(progressLoaded / FILE_SIZE);
onPercentCompleteChange &&
onPercentCompleteChange(progressLoaded / FILE_SIZE);
}
);
cancelTokenList.push(afterFileUpload.cancel);
afterFileUpload.then((res) => {
if (!res && !isCancel) {
ElMessage({
message: "上传失败,请重新上传",
type: "warning",
});
rejectAllFileUpload("上传失败,请重新上传");
return;
}
__sendingAmount += 1;
let lastLoaded = 0;
const afterFileUpload = fileUpload(
fileChunkPayload,
(progressEvent) => {
progressLoaded -= lastLoaded;
lastLoaded =
(progressEvent.loaded / progressEvent.total) *
fileChunkPayload.current_size;
progressLoaded +=
(progressEvent.loaded / progressEvent.total) *
fileChunkPayload.current_size;
console.log(progressLoaded / FILE_SIZE);
onPercentCompleteChange &&
onPercentCompleteChange(progressLoaded / FILE_SIZE);
}
);
cancelTokenList.push(afterFileUpload.cancel);
afterFileUpload.then((res) => {
if (!res && !isCancel) {
ElMessage({
message: "上传失败,请重新上传",
type: "warning",
});
rejectAllFileUpload("上传失败,请重新上传");
return;
}
__total += 1;
if (__total === fileChunks.length) {
onPercentCompleteChange && onPercentCompleteChange(1);
onDown();
console.log(1, "上传完成");
}
if (__total === numOfFileChunks) {
resolveAllFileUpload(true);
}
resolveFileChunkUpload(true);
});
__total += 1;
if (__total === fileChunks.length) {
onPercentCompleteChange && onPercentCompleteChange(1);
onDown();
console.log(1, "上传完成");
}
if (__total === numOfFileChunks) {
resolveAllFileUpload(true);
}
resolveFileChunkUpload(true);
});
}
const promiseFnList = fileChunks.map((fileChunk, index) => {
return () =>
blobToBase64(fileChunk).then((base64FileData: string) => {
return uploadFileChunk(
{
current_data: base64FileData, //当前块数据
current_seq: index + 1, // 当前块序号
current_size: fileChunk.size, //当前块大小
file_name: file.name, //文件名称
file_hash: fileHash.hex(), //文件hash
total_seq: fileChunks.length, //总序号
total_size: file.size, //总大小
},
fileChunks.length
);
});
});
executePromiseArray(promiseFnList);
}
);
rootResolve({
uploadAllFileChunks,
unzip: (category_id: number) => {
return uploadAllFileChunks.then(() => {
return fileUnzip({
category_id,
file_hash: fileHash.hex(),
file_name: file.name,
const promiseFnList = fileChunks.map((fileChunk, index) => {
return () =>
blobToBase64(fileChunk).then((base64FileData: string) => {
return uploadFileChunk(
{
current_data: base64FileData, //当前块数据
current_seq: index + 1, // 当前块序号
current_size: fileChunk.size, //当前块大小
file_name: file.name, //文件名称
file_hash: fileHash, //文件hash
total_seq: fileChunks.length, //总序号
total_size: file.size, //总大小
},
fileChunks.length
);
});
});
executePromiseArray(promiseFnList);
}
);
rootResolve({
uploadAllFileChunks,
unzip: (category_id: number) => {
return uploadAllFileChunks.then(() => {
return fileUnzip({
category_id,
file_hash: fileHash,
file_name: file.name,
});
},
cancelUpload: () => {
isCancel = true;
cancelTokenList.forEach((cancel) => cancel());
},
});
});
},
cancelUpload: () => {
isCancel = true;
cancelTokenList.forEach((cancel) => cancel());
},
});
};
//读取失败回调
fileReader.onerror = function (err: any) {
rootReject();
};
fileReader.readAsArrayBuffer(file);
});
});
}
import { expose } from "comlink";
import { blobToBase64 } from "@/utils/file";
import { sha256 } from "js-sha256";
export function workerBlobToBase64() {}
export function getFileHash(file: File) {
return new Promise<string>((r) => {
const fileReader = new FileReader();
let fileHash = sha256.create();
fileReader.onload = function (event: Event) {
const result = (event!.target! as any).result;
fileHash.update(result);
fileHash.update(file.name);
r(fileHash.hex());
};
fileReader.readAsArrayBuffer(file);
});
}
expose(getFileHash);
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment