mirror of https://github.com/fantasticit/think.git
Compare commits
No commits in common. "7ebbdabd76f65a804d6a5416bef61615279f15cd" and "d48aa62b5ec5222d72967fe846a8f5d8e7e8f93d" have entirely different histories.
7ebbdabd76
...
d48aa62b5e
|
@ -48,7 +48,6 @@ db:
|
||||||
host: '127.0.0.1'
|
host: '127.0.0.1'
|
||||||
port: '6379'
|
port: '6379'
|
||||||
password: 'root'
|
password: 'root'
|
||||||
db: 0
|
|
||||||
|
|
||||||
# oss 文件存储服务
|
# oss 文件存储服务
|
||||||
oss:
|
oss:
|
||||||
|
@ -56,21 +55,24 @@ oss:
|
||||||
enable: true
|
enable: true
|
||||||
# 线上更改为服务端地址(如:https://api.codingit.cn)
|
# 线上更改为服务端地址(如:https://api.codingit.cn)
|
||||||
server: 'http://localhost:5002'
|
server: 'http://localhost:5002'
|
||||||
s3:
|
# 以下为各厂商 sdk 配置,不要修改字段,填入值即可
|
||||||
enable: true
|
tencent:
|
||||||
|
enable: false
|
||||||
config:
|
config:
|
||||||
# isp 提供商,可选 minio,s3,aliyun,tencent
|
SecretId: ''
|
||||||
cloudisp: 'minio'
|
SecretKey: ''
|
||||||
accessKeyId: ''
|
Bucket: ''
|
||||||
secretAccessKey: ''
|
Region: ''
|
||||||
bucket: ''
|
aliyun:
|
||||||
region: ''
|
enable: false
|
||||||
# 仅cloudisp 的值为 minio/s3 时配置,其它提供商为空
|
config:
|
||||||
forcePathStyle: false
|
accessKeyId: ''
|
||||||
# 仅cloudisp 的值为 minio 时配置,其它提供商为空
|
accessKeySecret: ''
|
||||||
endpoint: ''
|
bucket: ''
|
||||||
|
https: true
|
||||||
|
region: ''
|
||||||
|
|
||||||
# jwt 配置
|
# jwt 配置
|
||||||
jwt:
|
jwt:
|
||||||
secretkey: 'zA_Think+KNOWLEDGE+WIKI+DOCUMENTS@2022'
|
secretkey: 'zA_Think+KNOWLEDGE+WIKI+DOCUMENTS@2022'
|
||||||
expiresIn: '6h'
|
expiresIn: '6h'
|
||||||
|
|
|
@ -23,7 +23,7 @@ const nextConfig = semi({
|
||||||
env: {
|
env: {
|
||||||
SERVER_API_URL: config.client.apiUrl,
|
SERVER_API_URL: config.client.apiUrl,
|
||||||
COLLABORATION_API_URL: config.client.collaborationUrl,
|
COLLABORATION_API_URL: config.client.collaborationUrl,
|
||||||
ENABLE_OSS_S3: config.oss.s3.enable,
|
ENABLE_ALIYUN_OSS: !!config.oss.aliyun.accessKeyId,
|
||||||
DNS_PREFETCH: (config.client.dnsPrefetch || '').split(' '),
|
DNS_PREFETCH: (config.client.dnsPrefetch || '').split(' '),
|
||||||
SEO_APPNAME: config.client.seoAppName,
|
SEO_APPNAME: config.client.seoAppName,
|
||||||
SEO_DESCRIPTION: config.client.seoDescription,
|
SEO_DESCRIPTION: config.client.seoDescription,
|
||||||
|
|
|
@ -1,203 +0,0 @@
|
||||||
<mxStylesheet>
|
|
||||||
<add as="defaultVertex">
|
|
||||||
<add as="shape" value="label"/>
|
|
||||||
<add as="perimeter" value="rectanglePerimeter"/>
|
|
||||||
<add as="fontSize" value="12"/>
|
|
||||||
<add as="fontFamily" value="Helvetica"/>
|
|
||||||
<add as="align" value="center"/>
|
|
||||||
<add as="verticalAlign" value="middle"/>
|
|
||||||
<add as="fillColor" value="default"/>
|
|
||||||
<add as="strokeColor" value="default"/>
|
|
||||||
<add as="fontColor" value="default"/>
|
|
||||||
</add>
|
|
||||||
<add as="defaultEdge">
|
|
||||||
<add as="shape" value="connector"/>
|
|
||||||
<add as="labelBackgroundColor" value="default"/>
|
|
||||||
<add as="endArrow" value="classic"/>
|
|
||||||
<add as="fontSize" value="11"/>
|
|
||||||
<add as="fontFamily" value="Helvetica"/>
|
|
||||||
<add as="align" value="center"/>
|
|
||||||
<add as="verticalAlign" value="middle"/>
|
|
||||||
<add as="rounded" value="1"/>
|
|
||||||
<add as="strokeColor" value="default"/>
|
|
||||||
<add as="fontColor" value="default"/>
|
|
||||||
</add>
|
|
||||||
<add as="text">
|
|
||||||
<add as="fillColor" value="none"/>
|
|
||||||
<add as="gradientColor" value="none"/>
|
|
||||||
<add as="strokeColor" value="none"/>
|
|
||||||
<add as="align" value="left"/>
|
|
||||||
<add as="verticalAlign" value="top"/>
|
|
||||||
</add>
|
|
||||||
<add as="edgeLabel" extend="text">
|
|
||||||
<add as="labelBackgroundColor" value="default"/>
|
|
||||||
<add as="fontSize" value="11"/>
|
|
||||||
</add>
|
|
||||||
<add as="label">
|
|
||||||
<add as="fontStyle" value="1"/>
|
|
||||||
<add as="align" value="left"/>
|
|
||||||
<add as="verticalAlign" value="middle"/>
|
|
||||||
<add as="spacing" value="2"/>
|
|
||||||
<add as="spacingLeft" value="52"/>
|
|
||||||
<add as="imageWidth" value="42"/>
|
|
||||||
<add as="imageHeight" value="42"/>
|
|
||||||
<add as="rounded" value="1"/>
|
|
||||||
</add>
|
|
||||||
<add as="icon" extend="label">
|
|
||||||
<add as="align" value="center"/>
|
|
||||||
<add as="imageAlign" value="center"/>
|
|
||||||
<add as="verticalLabelPosition" value="bottom"/>
|
|
||||||
<add as="verticalAlign" value="top"/>
|
|
||||||
<add as="spacingTop" value="4"/>
|
|
||||||
<add as="labelBackgroundColor" value="default"/>
|
|
||||||
<add as="spacing" value="0"/>
|
|
||||||
<add as="spacingLeft" value="0"/>
|
|
||||||
<add as="spacingTop" value="6"/>
|
|
||||||
<add as="fontStyle" value="0"/>
|
|
||||||
<add as="imageWidth" value="48"/>
|
|
||||||
<add as="imageHeight" value="48"/>
|
|
||||||
</add>
|
|
||||||
<add as="swimlane">
|
|
||||||
<add as="shape" value="swimlane"/>
|
|
||||||
<add as="fontSize" value="12"/>
|
|
||||||
<add as="fontStyle" value="1"/>
|
|
||||||
<add as="startSize" value="23"/>
|
|
||||||
</add>
|
|
||||||
<add as="group">
|
|
||||||
<add as="verticalAlign" value="top"/>
|
|
||||||
<add as="fillColor" value="none"/>
|
|
||||||
<add as="strokeColor" value="none"/>
|
|
||||||
<add as="gradientColor" value="none"/>
|
|
||||||
<add as="pointerEvents" value="0"/>
|
|
||||||
</add>
|
|
||||||
<add as="ellipse">
|
|
||||||
<add as="shape" value="ellipse"/>
|
|
||||||
<add as="perimeter" value="ellipsePerimeter"/>
|
|
||||||
</add>
|
|
||||||
<add as="rhombus">
|
|
||||||
<add as="shape" value="rhombus"/>
|
|
||||||
<add as="perimeter" value="rhombusPerimeter"/>
|
|
||||||
</add>
|
|
||||||
<add as="triangle">
|
|
||||||
<add as="shape" value="triangle"/>
|
|
||||||
<add as="perimeter" value="trianglePerimeter"/>
|
|
||||||
</add>
|
|
||||||
<add as="line">
|
|
||||||
<add as="shape" value="line"/>
|
|
||||||
<add as="strokeWidth" value="4"/>
|
|
||||||
<add as="labelBackgroundColor" value="default"/>
|
|
||||||
<add as="verticalAlign" value="top"/>
|
|
||||||
<add as="spacingTop" value="8"/>
|
|
||||||
</add>
|
|
||||||
<add as="image">
|
|
||||||
<add as="shape" value="image"/>
|
|
||||||
<add as="labelBackgroundColor" value="default"/>
|
|
||||||
<add as="verticalAlign" value="top"/>
|
|
||||||
<add as="verticalLabelPosition" value="bottom"/>
|
|
||||||
</add>
|
|
||||||
<add as="roundImage" extend="image">
|
|
||||||
<add as="perimeter" value="ellipsePerimeter"/>
|
|
||||||
</add>
|
|
||||||
<add as="rhombusImage" extend="image">
|
|
||||||
<add as="perimeter" value="rhombusPerimeter"/>
|
|
||||||
</add>
|
|
||||||
<add as="arrow">
|
|
||||||
<add as="shape" value="arrow"/>
|
|
||||||
<add as="edgeStyle" value="none"/>
|
|
||||||
<add as="fillColor" value="default"/>
|
|
||||||
</add>
|
|
||||||
<add as="fancy">
|
|
||||||
<add as="shadow" value="1"/>
|
|
||||||
<add as="glass" value="1"/>
|
|
||||||
</add>
|
|
||||||
<add as="gray" extend="fancy">
|
|
||||||
<add as="gradientColor" value="#B3B3B3"/>
|
|
||||||
<add as="fillColor" value="#F5F5F5"/>
|
|
||||||
<add as="strokeColor" value="#666666"/>
|
|
||||||
</add>
|
|
||||||
<add as="blue" extend="fancy">
|
|
||||||
<add as="gradientColor" value="#7EA6E0"/>
|
|
||||||
<add as="fillColor" value="#DAE8FC"/>
|
|
||||||
<add as="strokeColor" value="#6C8EBF"/>
|
|
||||||
</add>
|
|
||||||
<add as="green" extend="fancy">
|
|
||||||
<add as="gradientColor" value="#97D077"/>
|
|
||||||
<add as="fillColor" value="#D5E8D4"/>
|
|
||||||
<add as="strokeColor" value="#82B366"/>
|
|
||||||
</add>
|
|
||||||
<add as="turquoise" extend="fancy">
|
|
||||||
<add as="gradientColor" value="#67AB9F"/>
|
|
||||||
<add as="fillColor" value="#D5E8D4"/>
|
|
||||||
<add as="strokeColor" value="#6A9153"/>
|
|
||||||
</add>
|
|
||||||
<add as="yellow" extend="fancy">
|
|
||||||
<add as="gradientColor" value="#FFD966"/>
|
|
||||||
<add as="fillColor" value="#FFF2CC"/>
|
|
||||||
<add as="strokeColor" value="#D6B656"/>
|
|
||||||
</add>
|
|
||||||
<add as="orange" extend="fancy">
|
|
||||||
<add as="gradientColor" value="#FFA500"/>
|
|
||||||
<add as="fillColor" value="#FFCD28"/>
|
|
||||||
<add as="strokeColor" value="#D79B00"/>
|
|
||||||
</add>
|
|
||||||
<add as="red" extend="fancy">
|
|
||||||
<add as="gradientColor" value="#EA6B66"/>
|
|
||||||
<add as="fillColor" value="#F8CECC"/>
|
|
||||||
<add as="strokeColor" value="#B85450"/>
|
|
||||||
</add>
|
|
||||||
<add as="pink" extend="fancy">
|
|
||||||
<add as="gradientColor" value="#B5739D"/>
|
|
||||||
<add as="fillColor" value="#E6D0DE"/>
|
|
||||||
<add as="strokeColor" value="#996185"/>
|
|
||||||
</add>
|
|
||||||
<add as="purple" extend="fancy">
|
|
||||||
<add as="gradientColor" value="#8C6C9C"/>
|
|
||||||
<add as="fillColor" value="#E1D5E7"/>
|
|
||||||
<add as="strokeColor" value="#9673A6"/>
|
|
||||||
</add>
|
|
||||||
<add as="plain-gray">
|
|
||||||
<add as="gradientColor" value="#B3B3B3"/>
|
|
||||||
<add as="fillColor" value="#F5F5F5"/>
|
|
||||||
<add as="strokeColor" value="#666666"/>
|
|
||||||
</add>
|
|
||||||
<add as="plain-blue">
|
|
||||||
<add as="gradientColor" value="#7EA6E0"/>
|
|
||||||
<add as="fillColor" value="#DAE8FC"/>
|
|
||||||
<add as="strokeColor" value="#6C8EBF"/>
|
|
||||||
</add>
|
|
||||||
<add as="plain-green">
|
|
||||||
<add as="gradientColor" value="#97D077"/>
|
|
||||||
<add as="fillColor" value="#D5E8D4"/>
|
|
||||||
<add as="strokeColor" value="#82B366"/>
|
|
||||||
</add>
|
|
||||||
<add as="plain-turquoise">
|
|
||||||
<add as="gradientColor" value="#67AB9F"/>
|
|
||||||
<add as="fillColor" value="#D5E8D4"/>
|
|
||||||
<add as="strokeColor" value="#6A9153"/>
|
|
||||||
</add>
|
|
||||||
<add as="plain-yellow">
|
|
||||||
<add as="gradientColor" value="#FFD966"/>
|
|
||||||
<add as="fillColor" value="#FFF2CC"/>
|
|
||||||
<add as="strokeColor" value="#D6B656"/>
|
|
||||||
</add>
|
|
||||||
<add as="plain-orange">
|
|
||||||
<add as="gradientColor" value="#FFA500"/>
|
|
||||||
<add as="fillColor" value="#FFCD28"/>
|
|
||||||
<add as="strokeColor" value="#D79B00"/>
|
|
||||||
</add>
|
|
||||||
<add as="plain-red">
|
|
||||||
<add as="gradientColor" value="#EA6B66"/>
|
|
||||||
<add as="fillColor" value="#F8CECC"/>
|
|
||||||
<add as="strokeColor" value="#B85450"/>
|
|
||||||
</add>
|
|
||||||
<add as="plain-pink">
|
|
||||||
<add as="gradientColor" value="#B5739D"/>
|
|
||||||
<add as="fillColor" value="#E6D0DE"/>
|
|
||||||
<add as="strokeColor" value="#996185"/>
|
|
||||||
</add>
|
|
||||||
<add as="plain-purple">
|
|
||||||
<add as="gradientColor" value="#8C6C9C"/>
|
|
||||||
<add as="fillColor" value="#E1D5E7"/>
|
|
||||||
<add as="strokeColor" value="#9673A6"/>
|
|
||||||
</add>
|
|
||||||
</mxStylesheet>
|
|
Binary file not shown.
Before Width: | Height: | Size: 9.9 KiB |
|
@ -9,8 +9,8 @@ import styles from './style.module.scss';
|
||||||
type ISize = { width: number; height: number };
|
type ISize = { width: number; height: number };
|
||||||
|
|
||||||
interface IProps {
|
interface IProps {
|
||||||
width: number | string;
|
width: number;
|
||||||
height: number | string;
|
height: number;
|
||||||
maxWidth?: number;
|
maxWidth?: number;
|
||||||
isEditable?: boolean;
|
isEditable?: boolean;
|
||||||
onChange?: (arg: ISize) => void;
|
onChange?: (arg: ISize) => void;
|
||||||
|
|
|
@ -1,11 +1,6 @@
|
||||||
import { Toast } from '@douyinfe/semi-ui';
|
|
||||||
|
|
||||||
import { FILE_CHUNK_SIZE, FileApiDefinition } from '@think/domains';
|
import { FILE_CHUNK_SIZE, FileApiDefinition } from '@think/domains';
|
||||||
|
|
||||||
import axios from 'axios';
|
|
||||||
import { url } from 'inspector';
|
|
||||||
import { string } from 'lib0';
|
import { string } from 'lib0';
|
||||||
import { timeout } from 'lib0/eventloop';
|
|
||||||
import SparkMD5 from 'spark-md5';
|
import SparkMD5 from 'spark-md5';
|
||||||
|
|
||||||
import { HttpClient } from './http-client';
|
import { HttpClient } from './http-client';
|
||||||
|
@ -40,6 +35,7 @@ const uploadFileToServer = (arg: {
|
||||||
}) => {
|
}) => {
|
||||||
const { filename, file, md5, isChunk, chunkIndex, onUploadProgress } = arg;
|
const { filename, file, md5, isChunk, chunkIndex, onUploadProgress } = arg;
|
||||||
const api = isChunk ? 'uploadChunk' : 'upload';
|
const api = isChunk ? 'uploadChunk' : 'upload';
|
||||||
|
|
||||||
const formData = new FormData();
|
const formData = new FormData();
|
||||||
formData.append('file', file);
|
formData.append('file', file);
|
||||||
|
|
||||||
|
@ -55,7 +51,6 @@ const uploadFileToServer = (arg: {
|
||||||
md5,
|
md5,
|
||||||
chunkIndex,
|
chunkIndex,
|
||||||
},
|
},
|
||||||
timeout: 30 * 1000,
|
|
||||||
onUploadProgress: (progress) => {
|
onUploadProgress: (progress) => {
|
||||||
const percent = progress.loaded / progress.total;
|
const percent = progress.loaded / progress.total;
|
||||||
onUploadProgress && onUploadProgress(percent);
|
onUploadProgress && onUploadProgress(percent);
|
||||||
|
@ -72,152 +67,68 @@ export const uploadFile = async (
|
||||||
return onUploadProgress && onUploadProgress(Math.ceil(percent * 100));
|
return onUploadProgress && onUploadProgress(Math.ceil(percent * 100));
|
||||||
};
|
};
|
||||||
|
|
||||||
// 开启s3 文件上传支持
|
const filename = file.name;
|
||||||
if (!process.env.ENABLE_OSS_S3) {
|
|
||||||
const filename = file.name;
|
|
||||||
console.debug('当前没有开启oss 对象存储,使用本地上传方案');
|
|
||||||
if (file.size > FILE_CHUNK_SIZE) {
|
|
||||||
onTooLarge && onTooLarge();
|
|
||||||
}
|
|
||||||
|
|
||||||
if (file.size <= FILE_CHUNK_SIZE) {
|
if (file.size > FILE_CHUNK_SIZE * 5) {
|
||||||
const spark = new SparkMD5();
|
onTooLarge && onTooLarge();
|
||||||
spark.append(file);
|
}
|
||||||
spark.append(file.lastModified);
|
|
||||||
spark.append(file.type);
|
|
||||||
const md5 = spark.end();
|
|
||||||
const url = await uploadFileToServer({ filename, file, md5, onUploadProgress: wraponUploadProgress });
|
|
||||||
return url;
|
|
||||||
} else {
|
|
||||||
const { chunks, md5 } = await splitBigFile(file);
|
|
||||||
const unitPercent = 1 / chunks.length;
|
|
||||||
const progressMap = {};
|
|
||||||
|
|
||||||
let url = await HttpClient.request<string | undefined>({
|
if (file.size <= FILE_CHUNK_SIZE) {
|
||||||
method: FileApiDefinition.initChunk.method,
|
const spark = new SparkMD5();
|
||||||
url: FileApiDefinition.initChunk.client(),
|
spark.append(file);
|
||||||
|
spark.append(file.lastModified);
|
||||||
|
spark.append(file.type);
|
||||||
|
const md5 = spark.end();
|
||||||
|
const url = await uploadFileToServer({ filename, file, md5, onUploadProgress: wraponUploadProgress });
|
||||||
|
return url;
|
||||||
|
} else {
|
||||||
|
const { chunks, md5 } = await splitBigFile(file);
|
||||||
|
const unitPercent = 1 / chunks.length;
|
||||||
|
const progressMap = {};
|
||||||
|
|
||||||
|
let url = await HttpClient.request<string | undefined>({
|
||||||
|
method: FileApiDefinition.initChunk.method,
|
||||||
|
url: FileApiDefinition.initChunk.client(),
|
||||||
|
params: {
|
||||||
|
filename,
|
||||||
|
md5,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!url) {
|
||||||
|
await Promise.all(
|
||||||
|
chunks.map((chunk, index) => {
|
||||||
|
return uploadFileToServer({
|
||||||
|
filename,
|
||||||
|
file: chunk,
|
||||||
|
chunkIndex: index + 1,
|
||||||
|
md5,
|
||||||
|
isChunk: true,
|
||||||
|
onUploadProgress: (progress) => {
|
||||||
|
progressMap[index] = progress * unitPercent;
|
||||||
|
wraponUploadProgress(
|
||||||
|
Math.min(
|
||||||
|
Object.keys(progressMap).reduce((a, c) => {
|
||||||
|
return (a += progressMap[c]);
|
||||||
|
}, 0),
|
||||||
|
// 剩下的 5% 交给 merge
|
||||||
|
0.95
|
||||||
|
)
|
||||||
|
);
|
||||||
|
},
|
||||||
|
});
|
||||||
|
})
|
||||||
|
);
|
||||||
|
url = await HttpClient.request({
|
||||||
|
method: FileApiDefinition.mergeChunk.method,
|
||||||
|
url: FileApiDefinition.mergeChunk.client(),
|
||||||
params: {
|
params: {
|
||||||
filename,
|
filename,
|
||||||
md5,
|
md5,
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
if (!url) {
|
|
||||||
await Promise.all(
|
|
||||||
chunks.map((chunk, index) => {
|
|
||||||
return uploadFileToServer({
|
|
||||||
filename,
|
|
||||||
file: chunk,
|
|
||||||
chunkIndex: index + 1,
|
|
||||||
md5,
|
|
||||||
isChunk: true,
|
|
||||||
onUploadProgress: (progress) => {
|
|
||||||
progressMap[index] = progress * unitPercent;
|
|
||||||
wraponUploadProgress(
|
|
||||||
Math.min(
|
|
||||||
Object.keys(progressMap).reduce((a, c) => {
|
|
||||||
return (a += progressMap[c]);
|
|
||||||
}, 0),
|
|
||||||
// 剩下的 5% 交给 merge
|
|
||||||
0.95
|
|
||||||
)
|
|
||||||
);
|
|
||||||
},
|
|
||||||
});
|
|
||||||
})
|
|
||||||
);
|
|
||||||
url = await HttpClient.request({
|
|
||||||
method: FileApiDefinition.mergeChunk.method,
|
|
||||||
url: FileApiDefinition.mergeChunk.client(),
|
|
||||||
params: {
|
|
||||||
filename,
|
|
||||||
md5,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
}
|
|
||||||
wraponUploadProgress(1);
|
|
||||||
return url;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// S3 后端签名 前端文件直传 方案
|
|
||||||
else {
|
|
||||||
// 前端计算文件的md5
|
|
||||||
console.log('计算待上传的文件{' + file.name + '}的md5...');
|
|
||||||
const { chunks, md5 } = await splitBigFile(file);
|
|
||||||
console.log('文件{' + file.name + '}的md5:' + md5);
|
|
||||||
const filename = file.name;
|
|
||||||
|
|
||||||
// 请求后端检查指定的文件是不是已经存在
|
|
||||||
const res = await HttpClient.request({
|
|
||||||
method: FileApiDefinition.ossSign.method,
|
|
||||||
url: FileApiDefinition.ossSign.client(),
|
|
||||||
data: { filename, md5, fileSize: file.size },
|
|
||||||
});
|
|
||||||
// 如果后端反应文件已经存在
|
|
||||||
if (res['isExist']) {
|
|
||||||
Toast.info('文件秒传成功!');
|
|
||||||
return res['objectUrl'];
|
|
||||||
} else {
|
|
||||||
//console.log('文件不存在,需要上传文件');
|
|
||||||
// 后端认为文件小,前端直接put 上传
|
|
||||||
if (!res['MultipartUpload']) {
|
|
||||||
console.log('前端直接PUT上传文件');
|
|
||||||
const signUrl = res['signUrl'];
|
|
||||||
await axios.put(signUrl, file, {
|
|
||||||
timeout: 120 * 1000,
|
|
||||||
onUploadProgress: (process) => {
|
|
||||||
const uploadLoaded = process.loaded;
|
|
||||||
const uploadTotal = file.size;
|
|
||||||
const uploadPercent = uploadLoaded / uploadTotal;
|
|
||||||
wraponUploadProgress(uploadPercent);
|
|
||||||
},
|
|
||||||
});
|
|
||||||
const upres = await HttpClient.request({
|
|
||||||
method: FileApiDefinition.ossSign.method,
|
|
||||||
url: FileApiDefinition.ossSign.client(),
|
|
||||||
data: { filename, md5, fileSize: file.size },
|
|
||||||
});
|
|
||||||
return upres['objectUrl'];
|
|
||||||
}
|
|
||||||
// 前端进入分片上传流程
|
|
||||||
else {
|
|
||||||
const upload_id = res['uploadId'];
|
|
||||||
// console.log('分片文件上传,upload_id:' + upload_id);
|
|
||||||
const MultipartUpload = [];
|
|
||||||
for (let index = 0; index < chunks.length; index++) {
|
|
||||||
const chunk = chunks[index];
|
|
||||||
const res = await HttpClient.request({
|
|
||||||
method: FileApiDefinition.ossChunk.method,
|
|
||||||
url: FileApiDefinition.ossChunk.client(),
|
|
||||||
data: { filename, md5, uploadId: upload_id, chunkIndex: index + 1 },
|
|
||||||
});
|
|
||||||
// 上传文件分块到s3
|
|
||||||
// 直接用原生请求不走拦截器
|
|
||||||
const upload_res = await axios.put(res['signUrl'], chunk, {
|
|
||||||
timeout: 120 * 1000,
|
|
||||||
onUploadProgress: (process) => {
|
|
||||||
const uploadLoaded = process.loaded + FILE_CHUNK_SIZE * index;
|
|
||||||
const uploadTotal = file.size;
|
|
||||||
const uploadPercent = uploadLoaded / uploadTotal;
|
|
||||||
//console.log(uploadLoaded, uploadTotal, uploadPercent);
|
|
||||||
wraponUploadProgress(uploadPercent);
|
|
||||||
},
|
|
||||||
});
|
|
||||||
const upload_etag = upload_res.headers['etag'];
|
|
||||||
const response_part = { PartNumber: index + 1, ETag: upload_etag };
|
|
||||||
MultipartUpload.push(response_part);
|
|
||||||
//console.log('文件分片{' + (index + 1) + '上传成功,etag:' + upload_etag);
|
|
||||||
}
|
|
||||||
// 文件已经全部上传OK
|
|
||||||
// 请求后端合并文件
|
|
||||||
const payload = { filename, md5, uploadId: upload_id, MultipartUpload };
|
|
||||||
const upres = await HttpClient.request({
|
|
||||||
method: FileApiDefinition.ossMerge.method,
|
|
||||||
url: FileApiDefinition.ossMerge.client(),
|
|
||||||
data: payload,
|
|
||||||
});
|
|
||||||
return '' + upres;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
wraponUploadProgress(1);
|
||||||
|
return url;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
|
@ -39,7 +39,6 @@ HttpClient.interceptors.response.use(
|
||||||
isBrowser && Toast.error(data.data.message);
|
isBrowser && Toast.error(data.data.message);
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
// 如果是 204 请求 那么直接返回 data.headers
|
|
||||||
|
|
||||||
const res = data.data;
|
const res = data.data;
|
||||||
|
|
||||||
|
|
|
@ -88,8 +88,9 @@ export const ImageWrapper = ({ editor, node, updateAttributes }) => {
|
||||||
<NodeViewWrapper style={{ textAlign, fontSize: 0, maxWidth: '100%' }}>
|
<NodeViewWrapper style={{ textAlign, fontSize: 0, maxWidth: '100%' }}>
|
||||||
<Resizeable
|
<Resizeable
|
||||||
className={'render-wrapper'}
|
className={'render-wrapper'}
|
||||||
width="100%"
|
width={width || maxWidth}
|
||||||
height="100%"
|
height={height}
|
||||||
|
maxWidth={maxWidth}
|
||||||
isEditable={isEditable}
|
isEditable={isEditable}
|
||||||
onChangeEnd={onResize}
|
onChangeEnd={onResize}
|
||||||
>
|
>
|
||||||
|
@ -105,9 +106,11 @@ export const ImageWrapper = ({ editor, node, updateAttributes }) => {
|
||||||
</Spin>
|
</Spin>
|
||||||
</div>
|
</div>
|
||||||
) : (
|
) : (
|
||||||
<div className={styles.wrap} title="parent_image_div">
|
<div className={styles.wrap}>
|
||||||
<div
|
<div
|
||||||
style={{
|
style={{
|
||||||
|
height: '100%',
|
||||||
|
maxHeight: '100%',
|
||||||
padding: 24,
|
padding: 24,
|
||||||
overflow: 'hidden',
|
overflow: 'hidden',
|
||||||
display: 'flex',
|
display: 'flex',
|
||||||
|
@ -117,7 +120,7 @@ export const ImageWrapper = ({ editor, node, updateAttributes }) => {
|
||||||
transition: `all ease-in-out .3s`,
|
transition: `all ease-in-out .3s`,
|
||||||
}}
|
}}
|
||||||
>
|
>
|
||||||
<LazyLoadImage src={src} alt={alt} width={'100%'} />
|
<LazyLoadImage src={src} alt={alt} width={'100%'} height={'100%'} />
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div className={styles.handlerWrap}>
|
<div className={styles.handlerWrap}>
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
import FileSaver from 'file-saver';
|
import FileSaver from 'file-saver';
|
||||||
|
|
||||||
export function download(url, name) {
|
export function download(url, name) {
|
||||||
if (url.startsWith('http://') || url.startsWith('https://')) window.open(url, '文件下载...');
|
FileSaver.saveAs(url, name);
|
||||||
else FileSaver.saveAs(url, name);
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
|
|
||||||
export declare const FileApiDefinition: {
|
export declare const FileApiDefinition: {
|
||||||
/**
|
/**
|
||||||
* 上传文件
|
* 上传文件
|
||||||
|
@ -32,33 +31,5 @@ export declare const FileApiDefinition: {
|
||||||
server: "merge/chunk";
|
server: "merge/chunk";
|
||||||
client: () => string;
|
client: () => string;
|
||||||
};
|
};
|
||||||
|
|
||||||
/**
|
|
||||||
* 后端签名生成需要上传的文件
|
|
||||||
*/
|
|
||||||
ossSign:{
|
|
||||||
method: "post";
|
|
||||||
server: "upload/ossSign";
|
|
||||||
client: () => string;
|
|
||||||
};
|
|
||||||
|
|
||||||
/**
|
|
||||||
* 后端签名上传分片
|
|
||||||
*/
|
|
||||||
ossChunk:{
|
|
||||||
method: "post";
|
|
||||||
server: "upload/ossChunk";
|
|
||||||
client: () => string;
|
|
||||||
};
|
|
||||||
|
|
||||||
/**
|
|
||||||
* 后端签名上传结束
|
|
||||||
*/
|
|
||||||
ossMerge:{
|
|
||||||
method: "post";
|
|
||||||
server: "upload/ossMerge";
|
|
||||||
client: () => string;
|
|
||||||
};
|
|
||||||
|
|
||||||
};
|
};
|
||||||
export declare const FILE_CHUNK_SIZE: number;
|
export declare const FILE_CHUNK_SIZE: number;
|
||||||
|
|
|
@ -33,23 +33,6 @@ exports.FileApiDefinition = {
|
||||||
method: 'post',
|
method: 'post',
|
||||||
server: 'merge/chunk',
|
server: 'merge/chunk',
|
||||||
client: function () { return '/file/merge/chunk'; }
|
client: function () { return '/file/merge/chunk'; }
|
||||||
},
|
}
|
||||||
ossSign:{
|
|
||||||
method: "post",
|
|
||||||
server: "upload/ossSign",
|
|
||||||
client: function () { return '/file/upload/ossSign'; }
|
|
||||||
},
|
|
||||||
ossChunk:{
|
|
||||||
method: "post",
|
|
||||||
server: "upload/ossChunk",
|
|
||||||
client: function () { return '/file/upload/ossChunk'; }
|
|
||||||
},
|
|
||||||
ossMerge:{
|
|
||||||
method: "post",
|
|
||||||
server: "upload/ossMerge",
|
|
||||||
client: function () { return '/file/upload/ossMerge'; }
|
|
||||||
},
|
|
||||||
};
|
};
|
||||||
// 设置文件分片的大小 改成 8 M
|
exports.FILE_CHUNK_SIZE = 2 * 1024 * 1024;
|
||||||
// MINIO 等oss 有最小分片的限制
|
|
||||||
exports.FILE_CHUNK_SIZE = 8 * 1024 * 1024;
|
|
||||||
|
|
|
@ -18,8 +18,6 @@
|
||||||
"test:e2e": "jest --config ./test/jest-e2e.json"
|
"test:e2e": "jest --config ./test/jest-e2e.json"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@aws-sdk/client-s3": "^3.645.0",
|
|
||||||
"@aws-sdk/s3-request-presigner": "^3.645.0",
|
|
||||||
"@hocuspocus/server": "^1.0.0-alpha.91",
|
"@hocuspocus/server": "^1.0.0-alpha.91",
|
||||||
"@hocuspocus/transformer": "^1.0.0-alpha.18",
|
"@hocuspocus/transformer": "^1.0.0-alpha.18",
|
||||||
"@nestjs/common": "^8.0.0",
|
"@nestjs/common": "^8.0.0",
|
||||||
|
@ -63,7 +61,6 @@
|
||||||
"reflect-metadata": "^0.1.13",
|
"reflect-metadata": "^0.1.13",
|
||||||
"rimraf": "^3.0.2",
|
"rimraf": "^3.0.2",
|
||||||
"rxjs": "^7.2.0",
|
"rxjs": "^7.2.0",
|
||||||
"spark-md5": "^3.0.2",
|
|
||||||
"typeorm": "^0.2.41",
|
"typeorm": "^0.2.41",
|
||||||
"ua-parser-js": "^1.0.2",
|
"ua-parser-js": "^1.0.2",
|
||||||
"validator": "^13.7.0",
|
"validator": "^13.7.0",
|
||||||
|
|
|
@ -1,10 +1,10 @@
|
||||||
import { Body, Controller, Post, Query, UploadedFile, UseGuards, UseInterceptors } from '@nestjs/common';
|
import { Controller, Post, Query, UploadedFile, UseGuards, UseInterceptors } from '@nestjs/common';
|
||||||
import { FileInterceptor } from '@nestjs/platform-express';
|
import { FileInterceptor } from '@nestjs/platform-express';
|
||||||
|
|
||||||
import { FILE_CHUNK_SIZE, FileApiDefinition } from '@think/domains';
|
import { FILE_CHUNK_SIZE, FileApiDefinition } from '@think/domains';
|
||||||
|
|
||||||
import { JwtGuard } from '@guard/jwt.guard';
|
import { JwtGuard } from '@guard/jwt.guard';
|
||||||
import { FileMerge, FileQuery } from '@helpers/file.helper/oss.client';
|
import { FileQuery } from '@helpers/file.helper/oss.client';
|
||||||
import { FileService } from '@services/file.service';
|
import { FileService } from '@services/file.service';
|
||||||
|
|
||||||
@Controller('file')
|
@Controller('file')
|
||||||
|
@ -64,31 +64,4 @@ export class FileController {
|
||||||
mergeChunk(@Query() query: FileQuery) {
|
mergeChunk(@Query() query: FileQuery) {
|
||||||
return this.fileService.mergeChunk(query);
|
return this.fileService.mergeChunk(query);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* 请求后端签名前端直传
|
|
||||||
*/
|
|
||||||
@Post(FileApiDefinition.ossSign.server)
|
|
||||||
@UseGuards(JwtGuard)
|
|
||||||
ossSign(@Body() data: FileQuery) {
|
|
||||||
return this.fileService.ossSign(data);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* 请求后端对分片上传的文件进行签名
|
|
||||||
*/
|
|
||||||
@Post(FileApiDefinition.ossChunk.server)
|
|
||||||
@UseGuards(JwtGuard)
|
|
||||||
ossChunk(@Body() data: FileQuery) {
|
|
||||||
return this.fileService.ossChunk(data);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* 请求后端合并分片上传的文件
|
|
||||||
*/
|
|
||||||
@Post(FileApiDefinition.ossMerge.server)
|
|
||||||
@UseGuards(JwtGuard)
|
|
||||||
ossMerge(@Body() data: FileMerge) {
|
|
||||||
return this.fileService.ossMerge(data);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,14 +1,19 @@
|
||||||
import { ConfigService } from '@nestjs/config';
|
import { ConfigService } from '@nestjs/config';
|
||||||
|
|
||||||
|
import { AliyunOssClient } from './aliyun.client';
|
||||||
import { LocalOssClient } from './local.client';
|
import { LocalOssClient } from './local.client';
|
||||||
import { OssClient } from './oss.client';
|
import { OssClient } from './oss.client';
|
||||||
import { S3OssClient } from './s3.client';
|
import { TencentOssClient } from './tencent.client';
|
||||||
|
|
||||||
export { OssClient };
|
export { OssClient };
|
||||||
|
|
||||||
export const getOssClient = (configService: ConfigService): OssClient => {
|
export const getOssClient = (configService: ConfigService): OssClient => {
|
||||||
if (configService.get('oss.s3.enable')) {
|
if (configService.get('oss.tencent.enable')) {
|
||||||
return new S3OssClient(configService);
|
return new TencentOssClient(configService);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (configService.get('oss.aliyun.enable')) {
|
||||||
|
return new AliyunOssClient(configService);
|
||||||
}
|
}
|
||||||
|
|
||||||
return new LocalOssClient(configService);
|
return new LocalOssClient(configService);
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
import { FILE_CHUNK_SIZE } from '@think/domains';
|
import { FILE_CHUNK_SIZE } from '@think/domains';
|
||||||
|
|
||||||
import * as fs from 'fs-extra';
|
import * as fs from 'fs-extra';
|
||||||
import Redis from 'ioredis';
|
|
||||||
import * as path from 'path';
|
import * as path from 'path';
|
||||||
|
|
||||||
import { BaseOssClient, FileQuery } from './oss.client';
|
import { BaseOssClient, FileQuery } from './oss.client';
|
||||||
|
@ -21,8 +20,6 @@ export const pipeWriteStream = (filepath, writeStream): Promise<void> => {
|
||||||
};
|
};
|
||||||
|
|
||||||
export class LocalOssClient extends BaseOssClient {
|
export class LocalOssClient extends BaseOssClient {
|
||||||
private redis: Redis | null;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* 文件存储路径
|
* 文件存储路径
|
||||||
* @param md5
|
* @param md5
|
||||||
|
@ -37,10 +34,6 @@ export class LocalOssClient extends BaseOssClient {
|
||||||
return { relative: filepath.replace(FILE_ROOT_PATH, FILE_DEST), absolute: filepath };
|
return { relative: filepath.replace(FILE_ROOT_PATH, FILE_DEST), absolute: filepath };
|
||||||
}
|
}
|
||||||
|
|
||||||
async setRedis(redis: Redis) {
|
|
||||||
this.redis = redis;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* 将文件存储的相对路径拼接为可访问 URL
|
* 将文件存储的相对路径拼接为可访问 URL
|
||||||
* @param serverRoot
|
* @param serverRoot
|
||||||
|
|
|
@ -1,53 +1,16 @@
|
||||||
import { ConfigService } from '@nestjs/config';
|
import { ConfigService } from '@nestjs/config';
|
||||||
|
|
||||||
import exp from 'constants';
|
|
||||||
import Redis from 'ioredis';
|
|
||||||
|
|
||||||
export type FileQuery = {
|
export type FileQuery = {
|
||||||
filename: string;
|
filename: string;
|
||||||
md5: string;
|
md5: string;
|
||||||
chunkIndex?: number;
|
chunkIndex?: number;
|
||||||
fileSize?: number;
|
|
||||||
uploadId?: string;
|
|
||||||
};
|
|
||||||
|
|
||||||
export type FileMerge = {
|
|
||||||
filename: string;
|
|
||||||
md5: string;
|
|
||||||
uploadId: string;
|
|
||||||
MultipartUpload: any;
|
|
||||||
};
|
|
||||||
|
|
||||||
export type chunkUpload = {
|
|
||||||
uploadId: string;
|
|
||||||
chunkIndex: number;
|
|
||||||
etag: string;
|
|
||||||
};
|
|
||||||
|
|
||||||
export type ossSignReponse = {
|
|
||||||
MultipartUpload: boolean;
|
|
||||||
isExist: boolean;
|
|
||||||
uploadId: string | null;
|
|
||||||
objectKey: string;
|
|
||||||
objectUrl: string | null;
|
|
||||||
signUrl: string | null;
|
|
||||||
};
|
|
||||||
|
|
||||||
export type ossChunkResponse = {
|
|
||||||
signUrl: string;
|
|
||||||
uploadId: string;
|
|
||||||
chunkIndex: number;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
export abstract class OssClient {
|
export abstract class OssClient {
|
||||||
[x: string]: any;
|
|
||||||
abstract uploadFile(file: Express.Multer.File, query: FileQuery): Promise<string>;
|
abstract uploadFile(file: Express.Multer.File, query: FileQuery): Promise<string>;
|
||||||
abstract initChunk(query: FileQuery): Promise<void | string>;
|
abstract initChunk(query: FileQuery): Promise<void | string>;
|
||||||
abstract uploadChunk(file: Express.Multer.File, query: FileQuery): Promise<void>;
|
abstract uploadChunk(file: Express.Multer.File, query: FileQuery): Promise<void>;
|
||||||
abstract mergeChunk(query: FileQuery): Promise<string>;
|
abstract mergeChunk(query: FileQuery): Promise<string>;
|
||||||
abstract ossSign(query: FileQuery): Promise<ossSignReponse>;
|
|
||||||
abstract ossChunk(query: FileQuery): Promise<ossChunkResponse>;
|
|
||||||
abstract ossMerge(query: FileMerge): Promise<string>;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export class BaseOssClient implements OssClient {
|
export class BaseOssClient implements OssClient {
|
||||||
|
@ -76,24 +39,4 @@ export class BaseOssClient implements OssClient {
|
||||||
mergeChunk(query: FileQuery): Promise<string> {
|
mergeChunk(query: FileQuery): Promise<string> {
|
||||||
throw new Error('Method not implemented.');
|
throw new Error('Method not implemented.');
|
||||||
}
|
}
|
||||||
|
|
||||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
|
||||||
ossSign(query: FileQuery): Promise<ossSignReponse> {
|
|
||||||
throw new Error('Method not implemented.');
|
|
||||||
}
|
|
||||||
|
|
||||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
|
||||||
ossMerge(query: FileMerge): Promise<string> {
|
|
||||||
throw new Error('Method not implemented.');
|
|
||||||
}
|
|
||||||
|
|
||||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
|
||||||
ossChunk(query: FileQuery): Promise<ossChunkResponse> {
|
|
||||||
throw new Error('Method not implemented.');
|
|
||||||
}
|
|
||||||
|
|
||||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
|
||||||
setRedis(redis: Redis): Promise<void> {
|
|
||||||
throw new Error('Method not implemented.');
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,306 +0,0 @@
|
||||||
import { FILE_CHUNK_SIZE } from '@think/domains';
|
|
||||||
|
|
||||||
import {
|
|
||||||
CompleteMultipartUploadCommand,
|
|
||||||
CreateMultipartUploadCommand,
|
|
||||||
GetObjectCommand,
|
|
||||||
HeadObjectCommand,
|
|
||||||
PutObjectCommand,
|
|
||||||
S3Client,
|
|
||||||
UploadPartCommand,
|
|
||||||
} from '@aws-sdk/client-s3';
|
|
||||||
import { getSignedUrl } from '@aws-sdk/s3-request-presigner';
|
|
||||||
import Redis from 'ioredis';
|
|
||||||
|
|
||||||
import { BaseOssClient, FileMerge, FileQuery, ossChunkResponse, ossSignReponse } from './oss.client';
|
|
||||||
|
|
||||||
export class S3OssClient extends BaseOssClient {
|
|
||||||
private client: S3Client | null;
|
|
||||||
private bucket: string | null;
|
|
||||||
private redis: Redis | null;
|
|
||||||
|
|
||||||
async setRedis(redis: Redis) {
|
|
||||||
this.redis = redis;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* 构建 s3 客户端
|
|
||||||
* @returns
|
|
||||||
*/
|
|
||||||
private ensureS3OssClient(): S3Client {
|
|
||||||
if (this.client) {
|
|
||||||
return this.client;
|
|
||||||
}
|
|
||||||
|
|
||||||
const config = this.configService.get('oss.s3.config');
|
|
||||||
try {
|
|
||||||
this.bucket = config.bucket;
|
|
||||||
|
|
||||||
if (config.cloudisp == 'minio') {
|
|
||||||
this.client = new S3Client({
|
|
||||||
endpoint: config.endpoint,
|
|
||||||
region: config.region,
|
|
||||||
forcePathStyle: config.forcePathStyle,
|
|
||||||
credentials: {
|
|
||||||
accessKeyId: config.accessKeyId,
|
|
||||||
secretAccessKey: config.secretAccessKey,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
if (config.cloudisp == 's3') {
|
|
||||||
this.client = new S3Client({
|
|
||||||
region: config.region,
|
|
||||||
forcePathStyle: config.forcePathStyle,
|
|
||||||
credentials: {
|
|
||||||
accessKeyId: config.accessKeyId,
|
|
||||||
secretAccessKey: config.secretAccessKey,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
if (config.cloudisp == 'aliyun') {
|
|
||||||
this.client = new S3Client({
|
|
||||||
region: config.region,
|
|
||||||
endpoint: 'https://' + config.region + '.aliyuncs.com',
|
|
||||||
// 阿里云不支持 虚拟路径,这里必须为false
|
|
||||||
forcePathStyle: false,
|
|
||||||
credentials: {
|
|
||||||
accessKeyId: config.accessKeyId,
|
|
||||||
secretAccessKey: config.secretAccessKey,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
if (config.cloudisp == 'tencent') {
|
|
||||||
this.client = new S3Client({
|
|
||||||
region: config.region,
|
|
||||||
endpoint: 'https://cos.' + config.region + '.myqcloud.com',
|
|
||||||
// 不支持 虚拟路径,这里必须为false
|
|
||||||
forcePathStyle: false,
|
|
||||||
credentials: {
|
|
||||||
accessKeyId: config.accessKeyId,
|
|
||||||
secretAccessKey: config.secretAccessKey,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
return this.client;
|
|
||||||
} catch (err) {
|
|
||||||
console.log('无法启动S3存储服务,请检查S3配置是否正确', err.message);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* 获取上传文件名
|
|
||||||
* @param md5
|
|
||||||
* @param filename
|
|
||||||
* @returns
|
|
||||||
*/
|
|
||||||
private getInOssFileName(md5, filename) {
|
|
||||||
return `think/${md5}/${filename}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
private async getObjectUrl(bucket, key) {
|
|
||||||
this.ensureS3OssClient();
|
|
||||||
const command = new GetObjectCommand({ Bucket: bucket, Key: key });
|
|
||||||
const signUrl = await getSignedUrl(this.client, command);
|
|
||||||
return signUrl.split('?')[0];
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* 检查文件是否已存储到 oss
|
|
||||||
* @param md5
|
|
||||||
* @param filename
|
|
||||||
* @returns
|
|
||||||
*/
|
|
||||||
private async checkIfAlreadyInOss(md5, filename) {
|
|
||||||
this.ensureS3OssClient();
|
|
||||||
const inOssFileName = this.getInOssFileName(md5, filename);
|
|
||||||
const command = new HeadObjectCommand({ Bucket: this.bucket, Key: inOssFileName });
|
|
||||||
try {
|
|
||||||
await this.client.send(command);
|
|
||||||
return await this.getObjectUrl(this.bucket, inOssFileName);
|
|
||||||
} catch (err) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* 上传小文件
|
|
||||||
* @param file
|
|
||||||
* @param query
|
|
||||||
* @returns
|
|
||||||
*/
|
|
||||||
async uploadFile(file: Express.Multer.File, query: FileQuery): Promise<string> {
|
|
||||||
this.ensureS3OssClient();
|
|
||||||
const { filename, md5 } = query;
|
|
||||||
const maybeOssURL = await this.checkIfAlreadyInOss(md5, filename);
|
|
||||||
if (maybeOssURL) {
|
|
||||||
return maybeOssURL;
|
|
||||||
}
|
|
||||||
|
|
||||||
const inOssFileName = this.getInOssFileName(md5, filename);
|
|
||||||
const command = new PutObjectCommand({
|
|
||||||
Bucket: this.bucket,
|
|
||||||
Key: inOssFileName,
|
|
||||||
Body: file.buffer,
|
|
||||||
});
|
|
||||||
await this.client.send(command);
|
|
||||||
return await this.getObjectUrl(this.bucket, inOssFileName);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* 初始分片
|
|
||||||
* @param file
|
|
||||||
* @param query
|
|
||||||
* @returns
|
|
||||||
*/
|
|
||||||
async initChunk(query: FileQuery): Promise<string | void> {
|
|
||||||
const { md5, filename } = query;
|
|
||||||
this.ensureS3OssClient();
|
|
||||||
|
|
||||||
const inOssFileName = this.getInOssFileName(md5, filename);
|
|
||||||
const maybeOssURL = await this.checkIfAlreadyInOss(md5, filename);
|
|
||||||
|
|
||||||
if (maybeOssURL) {
|
|
||||||
return maybeOssURL as string;
|
|
||||||
}
|
|
||||||
const command = new CreateMultipartUploadCommand({ Bucket: this.bucket, Key: inOssFileName });
|
|
||||||
const response = await this.client.send(command);
|
|
||||||
const upload_id = response['UploadId'];
|
|
||||||
// 这里使用redis 来存储 upload_id
|
|
||||||
await this.redis.del('think:oss:chunk:' + md5);
|
|
||||||
await this.redis.del('think:oss:chunk:' + md5 + '*');
|
|
||||||
this.redis.set('think:oss:chunk:' + md5, upload_id);
|
|
||||||
return '';
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* 上传分片
|
|
||||||
* @param file
|
|
||||||
* @param query
|
|
||||||
* @returns
|
|
||||||
*/
|
|
||||||
async uploadChunk(file: Express.Multer.File, query: FileQuery): Promise<void> {
|
|
||||||
const { md5, filename, chunkIndex } = query;
|
|
||||||
|
|
||||||
if (!('chunkIndex' in query)) {
|
|
||||||
throw new Error('请指定 chunkIndex');
|
|
||||||
}
|
|
||||||
|
|
||||||
this.ensureS3OssClient();
|
|
||||||
const inOssFileName = this.getInOssFileName(md5, filename);
|
|
||||||
const upload_id = await this.redis.get('think:oss:chunk:' + md5);
|
|
||||||
|
|
||||||
const command = new UploadPartCommand({
|
|
||||||
Body: file.buffer,
|
|
||||||
Bucket: this.bucket,
|
|
||||||
Key: inOssFileName,
|
|
||||||
PartNumber: chunkIndex,
|
|
||||||
UploadId: upload_id,
|
|
||||||
});
|
|
||||||
const response = await this.client.send(command);
|
|
||||||
this.redis.set(
|
|
||||||
'think:oss:chunk:' + md5 + ':' + chunkIndex,
|
|
||||||
JSON.stringify({ PartNumber: chunkIndex, ETag: response['ETag'] })
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* 合并分片
|
|
||||||
* @param query
|
|
||||||
* @returns
|
|
||||||
*/
|
|
||||||
async mergeChunk(query: FileQuery): Promise<string> {
|
|
||||||
const { filename, md5 } = query;
|
|
||||||
const inOssFileName = this.getInOssFileName(md5, filename);
|
|
||||||
const upload_id = await this.redis.get('think:oss:chunk:' + md5);
|
|
||||||
const etags = await this.redis.keys('think:oss:chunk:' + md5 + ':*');
|
|
||||||
const MultipartUpload = { Parts: [] };
|
|
||||||
for (let i = 1; i <= etags.length; i++) {
|
|
||||||
const obj = JSON.parse(await this.redis.get('think:oss:chunk:' + md5 + ':' + i));
|
|
||||||
MultipartUpload.Parts.push(obj);
|
|
||||||
}
|
|
||||||
const command = new CompleteMultipartUploadCommand({
|
|
||||||
Bucket: this.bucket,
|
|
||||||
Key: inOssFileName,
|
|
||||||
UploadId: upload_id,
|
|
||||||
MultipartUpload: MultipartUpload,
|
|
||||||
});
|
|
||||||
|
|
||||||
await this.client.send(command);
|
|
||||||
await this.redis.del('think:oss:chunk:' + md5);
|
|
||||||
await this.redis.del('think:oss:chunk:' + md5 + '*');
|
|
||||||
return await this.getObjectUrl(this.bucket, inOssFileName);
|
|
||||||
}
|
|
||||||
|
|
||||||
async ossSign(query: FileQuery): Promise<ossSignReponse> {
|
|
||||||
const { filename, md5, fileSize } = query;
|
|
||||||
const inOssFileName = await this.getInOssFileName(md5, filename);
|
|
||||||
this.ensureS3OssClient();
|
|
||||||
const objectUrl = await this.checkIfAlreadyInOss(md5, filename);
|
|
||||||
if (objectUrl) {
|
|
||||||
return {
|
|
||||||
signUrl: null,
|
|
||||||
MultipartUpload: false,
|
|
||||||
uploadId: null,
|
|
||||||
objectKey: inOssFileName,
|
|
||||||
isExist: true,
|
|
||||||
objectUrl: objectUrl,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
if (fileSize <= FILE_CHUNK_SIZE) {
|
|
||||||
const command = new PutObjectCommand({ Bucket: this.bucket, Key: inOssFileName });
|
|
||||||
const signUrl = await getSignedUrl(this.client, command);
|
|
||||||
return {
|
|
||||||
signUrl: signUrl,
|
|
||||||
MultipartUpload: false,
|
|
||||||
uploadId: null,
|
|
||||||
objectKey: inOssFileName,
|
|
||||||
isExist: false,
|
|
||||||
objectUrl: null,
|
|
||||||
};
|
|
||||||
} else {
|
|
||||||
const command = new CreateMultipartUploadCommand({ Bucket: this.bucket, Key: inOssFileName });
|
|
||||||
const response = await this.client.send(command);
|
|
||||||
const upload_id = response['UploadId'];
|
|
||||||
return {
|
|
||||||
signUrl: null,
|
|
||||||
MultipartUpload: true,
|
|
||||||
uploadId: upload_id,
|
|
||||||
objectKey: inOssFileName,
|
|
||||||
isExist: false,
|
|
||||||
objectUrl: null,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async ossChunk(query: FileQuery): Promise<ossChunkResponse> {
|
|
||||||
this.ensureS3OssClient();
|
|
||||||
const { filename, md5 } = query;
|
|
||||||
const inOssFileName = await this.getInOssFileName(md5, filename);
|
|
||||||
const command = new UploadPartCommand({
|
|
||||||
UploadId: query.uploadId,
|
|
||||||
Bucket: this.bucket,
|
|
||||||
Key: inOssFileName,
|
|
||||||
PartNumber: query.chunkIndex,
|
|
||||||
});
|
|
||||||
const signUrl = await getSignedUrl(this.client, command);
|
|
||||||
return { signUrl: signUrl, uploadId: query.uploadId, chunkIndex: query.chunkIndex };
|
|
||||||
}
|
|
||||||
|
|
||||||
async ossMerge(query: FileMerge): Promise<string> {
|
|
||||||
this.ensureS3OssClient();
|
|
||||||
const { filename, md5 } = query;
|
|
||||||
const inOssFileName = await this.getInOssFileName(md5, filename);
|
|
||||||
const command = new CompleteMultipartUploadCommand({
|
|
||||||
Bucket: this.bucket,
|
|
||||||
Key: inOssFileName,
|
|
||||||
UploadId: query.uploadId,
|
|
||||||
MultipartUpload: { Parts: query.MultipartUpload },
|
|
||||||
});
|
|
||||||
await this.client.send(command);
|
|
||||||
return await this.getObjectUrl(this.bucket, inOssFileName);
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -18,7 +18,7 @@ export const buildRedis = (db: RedisDBEnum): Promise<Redis> => {
|
||||||
...redisConfig,
|
...redisConfig,
|
||||||
showFriendlyErrorStack: true,
|
showFriendlyErrorStack: true,
|
||||||
lazyConnect: true,
|
lazyConnect: true,
|
||||||
db: redisConfig.db,
|
db,
|
||||||
});
|
});
|
||||||
redis.on('ready', () => {
|
redis.on('ready', () => {
|
||||||
resolve(redis);
|
resolve(redis);
|
||||||
|
|
|
@ -1,40 +1,14 @@
|
||||||
/*
|
|
||||||
* @Author: SudemQaQ
|
|
||||||
* @Date: 2024-09-09 10:28:02
|
|
||||||
* @email: mail@szhcloud.cn
|
|
||||||
* @Blog: https://blog.szhcloud.cn
|
|
||||||
* @github: https://github.com/sang8052
|
|
||||||
* @LastEditors: SudemQaQ
|
|
||||||
* @LastEditTime: 2024-09-10 07:46:50
|
|
||||||
* @Description:
|
|
||||||
*/
|
|
||||||
import { Injectable } from '@nestjs/common';
|
import { Injectable } from '@nestjs/common';
|
||||||
import { ConfigService } from '@nestjs/config';
|
import { ConfigService } from '@nestjs/config';
|
||||||
|
|
||||||
import { RedisDBEnum } from '@constants/*';
|
|
||||||
import { getOssClient, OssClient } from '@helpers/file.helper';
|
import { getOssClient, OssClient } from '@helpers/file.helper';
|
||||||
import { buildRedis } from '@helpers/redis.helper';
|
|
||||||
import Redis from 'ioredis';
|
|
||||||
|
|
||||||
@Injectable()
|
@Injectable()
|
||||||
export class FileService {
|
export class FileService {
|
||||||
[x: string]: any;
|
|
||||||
private ossClient: OssClient;
|
private ossClient: OssClient;
|
||||||
private redis: Redis;
|
|
||||||
|
|
||||||
constructor(private readonly configService: ConfigService) {
|
constructor(private readonly configService: ConfigService) {
|
||||||
this.ossClient = getOssClient(this.configService);
|
this.ossClient = getOssClient(this.configService);
|
||||||
this.buildRedis();
|
|
||||||
}
|
|
||||||
|
|
||||||
private async buildRedis() {
|
|
||||||
try {
|
|
||||||
this.redis = await buildRedis(RedisDBEnum.view);
|
|
||||||
console.log('[think] 文件服务启动成功');
|
|
||||||
this.ossClient.setRedis(this.redis);
|
|
||||||
} catch (e) {
|
|
||||||
console.error(`[think] 文件服务启动错误: "${e.message}"`);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async uploadFile(file, query) {
|
async uploadFile(file, query) {
|
||||||
|
@ -52,16 +26,4 @@ export class FileService {
|
||||||
async mergeChunk(query) {
|
async mergeChunk(query) {
|
||||||
return this.ossClient.mergeChunk(query);
|
return this.ossClient.mergeChunk(query);
|
||||||
}
|
}
|
||||||
|
|
||||||
async ossSign(query) {
|
|
||||||
return this.ossClient.ossSign(query);
|
|
||||||
}
|
|
||||||
|
|
||||||
async ossChunk(query) {
|
|
||||||
return this.ossClient.ossChunk(query);
|
|
||||||
}
|
|
||||||
|
|
||||||
async ossMerge(query) {
|
|
||||||
return this.ossClient.ossMerge(query);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
1237
pnpm-lock.yaml
1237
pnpm-lock.yaml
File diff suppressed because it is too large
Load Diff
Loading…
Reference in New Issue