mirror of https://github.com/fantasticit/think.git
client: use web-woker to calculate md5
This commit is contained in:
parent
a05908d28f
commit
1f94d0e465
|
@ -4,40 +4,22 @@ import SparkMD5 from 'spark-md5';
|
||||||
import { HttpClient } from './http-client';
|
import { HttpClient } from './http-client';
|
||||||
|
|
||||||
const splitBigFile = (file: File): Promise<{ chunks: File[]; md5: string }> => {
|
const splitBigFile = (file: File): Promise<{ chunks: File[]; md5: string }> => {
|
||||||
return new Promise((resolve, reject) => {
|
return new Promise((resolve) => {
|
||||||
const spark = new SparkMD5.ArrayBuffer();
|
|
||||||
const fileReader = new FileReader();
|
|
||||||
const chunks = [];
|
const chunks = [];
|
||||||
const len = Math.ceil(file.size / FILE_CHUNK_SIZE);
|
const len = Math.ceil(file.size / FILE_CHUNK_SIZE);
|
||||||
let current = 0;
|
const sparkWorker = new Worker(new URL('./spark-md5.js', import.meta.url));
|
||||||
|
sparkWorker.onmessage = (evt) => {
|
||||||
fileReader.onload = (e) => {
|
resolve({ md5: evt.data.md5, chunks });
|
||||||
current++;
|
|
||||||
|
|
||||||
const chunk = e.target.result;
|
|
||||||
spark.append(chunk);
|
|
||||||
|
|
||||||
if (current < len) {
|
|
||||||
loadChunk();
|
|
||||||
} else {
|
|
||||||
resolve({ chunks, md5: spark.end() });
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
|
|
||||||
fileReader.onerror = (err) => {
|
for (let i = 0; i < len; i++) {
|
||||||
reject(err);
|
const start = i * FILE_CHUNK_SIZE;
|
||||||
};
|
|
||||||
|
|
||||||
const loadChunk = () => {
|
|
||||||
const start = current * FILE_CHUNK_SIZE;
|
|
||||||
const end = Math.min(start + FILE_CHUNK_SIZE, file.size);
|
const end = Math.min(start + FILE_CHUNK_SIZE, file.size);
|
||||||
const chunk = file.slice(start, end);
|
const chunk = file.slice(start, end);
|
||||||
|
|
||||||
chunks.push(chunk);
|
chunks.push(chunk);
|
||||||
fileReader.readAsArrayBuffer(chunk);
|
}
|
||||||
};
|
|
||||||
|
|
||||||
loadChunk();
|
sparkWorker.postMessage({ chunks });
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -74,12 +56,21 @@ const uploadFileToServer = (arg: {
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
export const uploadFile = async (file: File, onUploadProgress?: (progress: number) => void) => {
|
export const uploadFile = async (
|
||||||
|
file: File,
|
||||||
|
onUploadProgress?: (progress: number) => void,
|
||||||
|
onTooLarge?: () => void
|
||||||
|
) => {
|
||||||
const wraponUploadProgress = (percent) => {
|
const wraponUploadProgress = (percent) => {
|
||||||
return onUploadProgress && onUploadProgress(Math.ceil(percent * 100));
|
return onUploadProgress && onUploadProgress(Math.ceil(percent * 100));
|
||||||
};
|
};
|
||||||
|
|
||||||
const filename = file.name;
|
const filename = file.name;
|
||||||
|
|
||||||
|
if (file.size > FILE_CHUNK_SIZE * 5) {
|
||||||
|
onTooLarge && onTooLarge();
|
||||||
|
}
|
||||||
|
|
||||||
if (file.size <= FILE_CHUNK_SIZE) {
|
if (file.size <= FILE_CHUNK_SIZE) {
|
||||||
const spark = new SparkMD5.ArrayBuffer();
|
const spark = new SparkMD5.ArrayBuffer();
|
||||||
spark.append(file);
|
spark.append(file);
|
||||||
|
@ -91,16 +82,36 @@ export const uploadFile = async (file: File, onUploadProgress?: (progress: numbe
|
||||||
const unitPercent = 1 / chunks.length;
|
const unitPercent = 1 / chunks.length;
|
||||||
const progressMap = {};
|
const progressMap = {};
|
||||||
|
|
||||||
await Promise.all(
|
/**
|
||||||
chunks.map((chunk, index) =>
|
* 先上传一块分块,如果文件已上传,即无需上传后续分块
|
||||||
uploadFileToServer({
|
*/
|
||||||
|
let url = await uploadFileToServer({
|
||||||
filename,
|
filename,
|
||||||
file: chunk,
|
file: chunks[0],
|
||||||
chunkIndex: index + 1,
|
chunkIndex: 1,
|
||||||
md5,
|
md5,
|
||||||
isChunk: true,
|
isChunk: true,
|
||||||
onUploadProgress: (progress) => {
|
onUploadProgress: (progress) => {
|
||||||
progressMap[index] = progress * unitPercent;
|
progressMap[0] = progress * unitPercent;
|
||||||
|
wraponUploadProgress(
|
||||||
|
Object.keys(progressMap).reduce((a, c) => {
|
||||||
|
return (a += progressMap[c]);
|
||||||
|
}, 0)
|
||||||
|
);
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!url) {
|
||||||
|
await Promise.all(
|
||||||
|
chunks.slice(1).map((chunk, index) =>
|
||||||
|
uploadFileToServer({
|
||||||
|
filename,
|
||||||
|
file: chunk,
|
||||||
|
chunkIndex: index + 1 + 1,
|
||||||
|
md5,
|
||||||
|
isChunk: true,
|
||||||
|
onUploadProgress: (progress) => {
|
||||||
|
progressMap[index + 1] = progress * unitPercent;
|
||||||
wraponUploadProgress(
|
wraponUploadProgress(
|
||||||
Object.keys(progressMap).reduce((a, c) => {
|
Object.keys(progressMap).reduce((a, c) => {
|
||||||
return (a += progressMap[c]);
|
return (a += progressMap[c]);
|
||||||
|
@ -110,7 +121,7 @@ export const uploadFile = async (file: File, onUploadProgress?: (progress: numbe
|
||||||
})
|
})
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
const url = await HttpClient.request({
|
url = await HttpClient.request({
|
||||||
method: FileApiDefinition.mergeChunk.method,
|
method: FileApiDefinition.mergeChunk.method,
|
||||||
url: FileApiDefinition.mergeChunk.client(),
|
url: FileApiDefinition.mergeChunk.client(),
|
||||||
params: {
|
params: {
|
||||||
|
@ -118,6 +129,10 @@ export const uploadFile = async (file: File, onUploadProgress?: (progress: numbe
|
||||||
md5,
|
md5,
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
} else {
|
||||||
|
wraponUploadProgress(1);
|
||||||
|
}
|
||||||
|
|
||||||
return url;
|
return url;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
|
@ -0,0 +1,31 @@
|
||||||
|
import SparkMD5 from 'spark-md5';
|
||||||
|
|
||||||
|
addEventListener('message', (e) => {
|
||||||
|
const chunks = e.data.chunks || [];
|
||||||
|
|
||||||
|
if (!chunks.length) return;
|
||||||
|
|
||||||
|
const spark = new SparkMD5.ArrayBuffer();
|
||||||
|
const reader = new FileReader();
|
||||||
|
let index = 0;
|
||||||
|
|
||||||
|
const load = () => {
|
||||||
|
const chunk = chunks[index];
|
||||||
|
reader.readAsArrayBuffer(chunk);
|
||||||
|
};
|
||||||
|
|
||||||
|
reader.onload = (e) => {
|
||||||
|
spark.append(e.target.result);
|
||||||
|
|
||||||
|
if (index === chunks.length - 1) {
|
||||||
|
const md5 = spark.end();
|
||||||
|
postMessage({ md5 });
|
||||||
|
self.close();
|
||||||
|
} else {
|
||||||
|
index++;
|
||||||
|
load();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
load();
|
||||||
|
});
|
|
@ -1,5 +1,5 @@
|
||||||
import { IconClose, IconDownload, IconPlayCircle } from '@douyinfe/semi-icons';
|
import { IconClose, IconDownload, IconPlayCircle } from '@douyinfe/semi-icons';
|
||||||
import { Button, Collapsible, Progress, Space, Spin, Typography } from '@douyinfe/semi-ui';
|
import { Button, Collapsible, Progress, Space, Spin, Toast, Typography } from '@douyinfe/semi-ui';
|
||||||
import { FILE_CHUNK_SIZE } from '@think/domains';
|
import { FILE_CHUNK_SIZE } from '@think/domains';
|
||||||
import { NodeViewWrapper } from '@tiptap/react';
|
import { NodeViewWrapper } from '@tiptap/react';
|
||||||
import cls from 'classnames';
|
import cls from 'classnames';
|
||||||
|
@ -46,7 +46,9 @@ export const AttachmentWrapper = ({ editor, node, updateAttributes }) => {
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const url = await uploadFile(file, setUploadProgress);
|
const url = await uploadFile(file, setUploadProgress, () => {
|
||||||
|
Toast.info('文件较大,文件将在后台进行上传处理,您可继续其他操作');
|
||||||
|
});
|
||||||
updateAttributes({ ...fileInfo, url });
|
updateAttributes({ ...fileInfo, url });
|
||||||
toggleLoading(false);
|
toggleLoading(false);
|
||||||
setUploadProgress(0);
|
setUploadProgress(0);
|
||||||
|
|
|
@ -31,6 +31,6 @@
|
||||||
"thirtypart/*": ["thirtypart/*"]
|
"thirtypart/*": ["thirtypart/*"]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"include": ["next-env.d.ts", "global.d.ts", "**/*.ts", "**/*.tsx"],
|
"include": ["next-env.d.ts", "global.d.ts", "**/*.ts", "**/*.tsx", "src/services/spark-md5.js"],
|
||||||
"exclude": ["node_modules", "next.config.js"]
|
"exclude": ["node_modules", "next.config.js"]
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue