mirror of https://github.com/fantasticit/think.git
client: use web-woker to calculate md5
This commit is contained in:
parent
a05908d28f
commit
1f94d0e465
|
@ -4,40 +4,22 @@ import SparkMD5 from 'spark-md5';
|
|||
import { HttpClient } from './http-client';
|
||||
|
||||
const splitBigFile = (file: File): Promise<{ chunks: File[]; md5: string }> => {
|
||||
return new Promise((resolve, reject) => {
|
||||
const spark = new SparkMD5.ArrayBuffer();
|
||||
const fileReader = new FileReader();
|
||||
return new Promise((resolve) => {
|
||||
const chunks = [];
|
||||
const len = Math.ceil(file.size / FILE_CHUNK_SIZE);
|
||||
let current = 0;
|
||||
|
||||
fileReader.onload = (e) => {
|
||||
current++;
|
||||
|
||||
const chunk = e.target.result;
|
||||
spark.append(chunk);
|
||||
|
||||
if (current < len) {
|
||||
loadChunk();
|
||||
} else {
|
||||
resolve({ chunks, md5: spark.end() });
|
||||
}
|
||||
const sparkWorker = new Worker(new URL('./spark-md5.js', import.meta.url));
|
||||
sparkWorker.onmessage = (evt) => {
|
||||
resolve({ md5: evt.data.md5, chunks });
|
||||
};
|
||||
|
||||
fileReader.onerror = (err) => {
|
||||
reject(err);
|
||||
};
|
||||
|
||||
const loadChunk = () => {
|
||||
const start = current * FILE_CHUNK_SIZE;
|
||||
for (let i = 0; i < len; i++) {
|
||||
const start = i * FILE_CHUNK_SIZE;
|
||||
const end = Math.min(start + FILE_CHUNK_SIZE, file.size);
|
||||
const chunk = file.slice(start, end);
|
||||
|
||||
chunks.push(chunk);
|
||||
fileReader.readAsArrayBuffer(chunk);
|
||||
};
|
||||
}
|
||||
|
||||
loadChunk();
|
||||
sparkWorker.postMessage({ chunks });
|
||||
});
|
||||
};
|
||||
|
||||
|
@ -74,12 +56,21 @@ const uploadFileToServer = (arg: {
|
|||
});
|
||||
};
|
||||
|
||||
export const uploadFile = async (file: File, onUploadProgress?: (progress: number) => void) => {
|
||||
export const uploadFile = async (
|
||||
file: File,
|
||||
onUploadProgress?: (progress: number) => void,
|
||||
onTooLarge?: () => void
|
||||
) => {
|
||||
const wraponUploadProgress = (percent) => {
|
||||
return onUploadProgress && onUploadProgress(Math.ceil(percent * 100));
|
||||
};
|
||||
|
||||
const filename = file.name;
|
||||
|
||||
if (file.size > FILE_CHUNK_SIZE * 5) {
|
||||
onTooLarge && onTooLarge();
|
||||
}
|
||||
|
||||
if (file.size <= FILE_CHUNK_SIZE) {
|
||||
const spark = new SparkMD5.ArrayBuffer();
|
||||
spark.append(file);
|
||||
|
@ -91,16 +82,36 @@ export const uploadFile = async (file: File, onUploadProgress?: (progress: numbe
|
|||
const unitPercent = 1 / chunks.length;
|
||||
const progressMap = {};
|
||||
|
||||
await Promise.all(
|
||||
chunks.map((chunk, index) =>
|
||||
uploadFileToServer({
|
||||
/**
|
||||
* 先上传一块分块,如果文件已上传,即无需上传后续分块
|
||||
*/
|
||||
let url = await uploadFileToServer({
|
||||
filename,
|
||||
file: chunk,
|
||||
chunkIndex: index + 1,
|
||||
file: chunks[0],
|
||||
chunkIndex: 1,
|
||||
md5,
|
||||
isChunk: true,
|
||||
onUploadProgress: (progress) => {
|
||||
progressMap[index] = progress * unitPercent;
|
||||
progressMap[0] = progress * unitPercent;
|
||||
wraponUploadProgress(
|
||||
Object.keys(progressMap).reduce((a, c) => {
|
||||
return (a += progressMap[c]);
|
||||
}, 0)
|
||||
);
|
||||
},
|
||||
});
|
||||
|
||||
if (!url) {
|
||||
await Promise.all(
|
||||
chunks.slice(1).map((chunk, index) =>
|
||||
uploadFileToServer({
|
||||
filename,
|
||||
file: chunk,
|
||||
chunkIndex: index + 1 + 1,
|
||||
md5,
|
||||
isChunk: true,
|
||||
onUploadProgress: (progress) => {
|
||||
progressMap[index + 1] = progress * unitPercent;
|
||||
wraponUploadProgress(
|
||||
Object.keys(progressMap).reduce((a, c) => {
|
||||
return (a += progressMap[c]);
|
||||
|
@ -110,7 +121,7 @@ export const uploadFile = async (file: File, onUploadProgress?: (progress: numbe
|
|||
})
|
||||
)
|
||||
);
|
||||
const url = await HttpClient.request({
|
||||
url = await HttpClient.request({
|
||||
method: FileApiDefinition.mergeChunk.method,
|
||||
url: FileApiDefinition.mergeChunk.client(),
|
||||
params: {
|
||||
|
@ -118,6 +129,10 @@ export const uploadFile = async (file: File, onUploadProgress?: (progress: numbe
|
|||
md5,
|
||||
},
|
||||
});
|
||||
} else {
|
||||
wraponUploadProgress(1);
|
||||
}
|
||||
|
||||
return url;
|
||||
}
|
||||
};
|
||||
|
|
|
@ -0,0 +1,31 @@
|
|||
import SparkMD5 from 'spark-md5';
|
||||
|
||||
addEventListener('message', (e) => {
|
||||
const chunks = e.data.chunks || [];
|
||||
|
||||
if (!chunks.length) return;
|
||||
|
||||
const spark = new SparkMD5.ArrayBuffer();
|
||||
const reader = new FileReader();
|
||||
let index = 0;
|
||||
|
||||
const load = () => {
|
||||
const chunk = chunks[index];
|
||||
reader.readAsArrayBuffer(chunk);
|
||||
};
|
||||
|
||||
reader.onload = (e) => {
|
||||
spark.append(e.target.result);
|
||||
|
||||
if (index === chunks.length - 1) {
|
||||
const md5 = spark.end();
|
||||
postMessage({ md5 });
|
||||
self.close();
|
||||
} else {
|
||||
index++;
|
||||
load();
|
||||
}
|
||||
};
|
||||
|
||||
load();
|
||||
});
|
|
@ -1,5 +1,5 @@
|
|||
import { IconClose, IconDownload, IconPlayCircle } from '@douyinfe/semi-icons';
|
||||
import { Button, Collapsible, Progress, Space, Spin, Typography } from '@douyinfe/semi-ui';
|
||||
import { Button, Collapsible, Progress, Space, Spin, Toast, Typography } from '@douyinfe/semi-ui';
|
||||
import { FILE_CHUNK_SIZE } from '@think/domains';
|
||||
import { NodeViewWrapper } from '@tiptap/react';
|
||||
import cls from 'classnames';
|
||||
|
@ -46,7 +46,9 @@ export const AttachmentWrapper = ({ editor, node, updateAttributes }) => {
|
|||
}
|
||||
|
||||
try {
|
||||
const url = await uploadFile(file, setUploadProgress);
|
||||
const url = await uploadFile(file, setUploadProgress, () => {
|
||||
Toast.info('文件较大,文件将在后台进行上传处理,您可继续其他操作');
|
||||
});
|
||||
updateAttributes({ ...fileInfo, url });
|
||||
toggleLoading(false);
|
||||
setUploadProgress(0);
|
||||
|
|
|
@ -31,6 +31,6 @@
|
|||
"thirtypart/*": ["thirtypart/*"]
|
||||
}
|
||||
},
|
||||
"include": ["next-env.d.ts", "global.d.ts", "**/*.ts", "**/*.tsx"],
|
||||
"include": ["next-env.d.ts", "global.d.ts", "**/*.ts", "**/*.tsx", "src/services/spark-md5.js"],
|
||||
"exclude": ["node_modules", "next.config.js"]
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue