client: use web-woker to calculate md5

This commit is contained in:
fantasticit 2022-06-04 19:03:39 +08:00
parent a05908d28f
commit 1f94d0e465
4 changed files with 103 additions and 55 deletions

View File

@ -4,40 +4,22 @@ import SparkMD5 from 'spark-md5';
import { HttpClient } from './http-client'; import { HttpClient } from './http-client';
const splitBigFile = (file: File): Promise<{ chunks: File[]; md5: string }> => { const splitBigFile = (file: File): Promise<{ chunks: File[]; md5: string }> => {
return new Promise((resolve, reject) => { return new Promise((resolve) => {
const spark = new SparkMD5.ArrayBuffer();
const fileReader = new FileReader();
const chunks = []; const chunks = [];
const len = Math.ceil(file.size / FILE_CHUNK_SIZE); const len = Math.ceil(file.size / FILE_CHUNK_SIZE);
let current = 0; const sparkWorker = new Worker(new URL('./spark-md5.js', import.meta.url));
sparkWorker.onmessage = (evt) => {
fileReader.onload = (e) => { resolve({ md5: evt.data.md5, chunks });
current++;
const chunk = e.target.result;
spark.append(chunk);
if (current < len) {
loadChunk();
} else {
resolve({ chunks, md5: spark.end() });
}
}; };
fileReader.onerror = (err) => { for (let i = 0; i < len; i++) {
reject(err); const start = i * FILE_CHUNK_SIZE;
};
const loadChunk = () => {
const start = current * FILE_CHUNK_SIZE;
const end = Math.min(start + FILE_CHUNK_SIZE, file.size); const end = Math.min(start + FILE_CHUNK_SIZE, file.size);
const chunk = file.slice(start, end); const chunk = file.slice(start, end);
chunks.push(chunk); chunks.push(chunk);
fileReader.readAsArrayBuffer(chunk); }
};
loadChunk(); sparkWorker.postMessage({ chunks });
}); });
}; };
@ -74,12 +56,21 @@ const uploadFileToServer = (arg: {
}); });
}; };
export const uploadFile = async (file: File, onUploadProgress?: (progress: number) => void) => { export const uploadFile = async (
file: File,
onUploadProgress?: (progress: number) => void,
onTooLarge?: () => void
) => {
const wraponUploadProgress = (percent) => { const wraponUploadProgress = (percent) => {
return onUploadProgress && onUploadProgress(Math.ceil(percent * 100)); return onUploadProgress && onUploadProgress(Math.ceil(percent * 100));
}; };
const filename = file.name; const filename = file.name;
if (file.size > FILE_CHUNK_SIZE * 5) {
onTooLarge && onTooLarge();
}
if (file.size <= FILE_CHUNK_SIZE) { if (file.size <= FILE_CHUNK_SIZE) {
const spark = new SparkMD5.ArrayBuffer(); const spark = new SparkMD5.ArrayBuffer();
spark.append(file); spark.append(file);
@ -91,33 +82,57 @@ export const uploadFile = async (file: File, onUploadProgress?: (progress: numbe
const unitPercent = 1 / chunks.length; const unitPercent = 1 / chunks.length;
const progressMap = {}; const progressMap = {};
await Promise.all( /**
chunks.map((chunk, index) => *
uploadFileToServer({ */
filename, let url = await uploadFileToServer({
file: chunk, filename,
chunkIndex: index + 1, file: chunks[0],
md5, chunkIndex: 1,
isChunk: true, md5,
onUploadProgress: (progress) => { isChunk: true,
progressMap[index] = progress * unitPercent; onUploadProgress: (progress) => {
wraponUploadProgress( progressMap[0] = progress * unitPercent;
Object.keys(progressMap).reduce((a, c) => { wraponUploadProgress(
return (a += progressMap[c]); Object.keys(progressMap).reduce((a, c) => {
}, 0) return (a += progressMap[c]);
); }, 0)
}, );
})
)
);
const url = await HttpClient.request({
method: FileApiDefinition.mergeChunk.method,
url: FileApiDefinition.mergeChunk.client(),
params: {
filename,
md5,
}, },
}); });
if (!url) {
await Promise.all(
chunks.slice(1).map((chunk, index) =>
uploadFileToServer({
filename,
file: chunk,
chunkIndex: index + 1 + 1,
md5,
isChunk: true,
onUploadProgress: (progress) => {
progressMap[index + 1] = progress * unitPercent;
wraponUploadProgress(
Object.keys(progressMap).reduce((a, c) => {
return (a += progressMap[c]);
}, 0)
);
},
})
)
);
url = await HttpClient.request({
method: FileApiDefinition.mergeChunk.method,
url: FileApiDefinition.mergeChunk.client(),
params: {
filename,
md5,
},
});
} else {
wraponUploadProgress(1);
}
return url; return url;
} }
}; };

View File

@ -0,0 +1,31 @@
import SparkMD5 from 'spark-md5';
addEventListener('message', (e) => {
const chunks = e.data.chunks || [];
if (!chunks.length) return;
const spark = new SparkMD5.ArrayBuffer();
const reader = new FileReader();
let index = 0;
const load = () => {
const chunk = chunks[index];
reader.readAsArrayBuffer(chunk);
};
reader.onload = (e) => {
spark.append(e.target.result);
if (index === chunks.length - 1) {
const md5 = spark.end();
postMessage({ md5 });
self.close();
} else {
index++;
load();
}
};
load();
});

View File

@ -1,5 +1,5 @@
import { IconClose, IconDownload, IconPlayCircle } from '@douyinfe/semi-icons'; import { IconClose, IconDownload, IconPlayCircle } from '@douyinfe/semi-icons';
import { Button, Collapsible, Progress, Space, Spin, Typography } from '@douyinfe/semi-ui'; import { Button, Collapsible, Progress, Space, Spin, Toast, Typography } from '@douyinfe/semi-ui';
import { FILE_CHUNK_SIZE } from '@think/domains'; import { FILE_CHUNK_SIZE } from '@think/domains';
import { NodeViewWrapper } from '@tiptap/react'; import { NodeViewWrapper } from '@tiptap/react';
import cls from 'classnames'; import cls from 'classnames';
@ -46,7 +46,9 @@ export const AttachmentWrapper = ({ editor, node, updateAttributes }) => {
} }
try { try {
const url = await uploadFile(file, setUploadProgress); const url = await uploadFile(file, setUploadProgress, () => {
Toast.info('文件较大,文件将在后台进行上传处理,您可继续其他操作');
});
updateAttributes({ ...fileInfo, url }); updateAttributes({ ...fileInfo, url });
toggleLoading(false); toggleLoading(false);
setUploadProgress(0); setUploadProgress(0);

View File

@ -31,6 +31,6 @@
"thirtypart/*": ["thirtypart/*"] "thirtypart/*": ["thirtypart/*"]
} }
}, },
"include": ["next-env.d.ts", "global.d.ts", "**/*.ts", "**/*.tsx"], "include": ["next-env.d.ts", "global.d.ts", "**/*.ts", "**/*.tsx", "src/services/spark-md5.js"],
"exclude": ["node_modules", "next.config.js"] "exclude": ["node_modules", "next.config.js"]
} }