Merge pull request #280 from sang8052/main

对接S3 文件上传
This commit is contained in:
fantasticit 2024-09-10 17:13:14 +08:00 committed by GitHub
commit 7ebbdabd76
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
20 changed files with 2101 additions and 96 deletions

View File

@ -48,6 +48,7 @@ db:
host: '127.0.0.1' host: '127.0.0.1'
port: '6379' port: '6379'
password: 'root' password: 'root'
db: 0
# oss 文件存储服务 # oss 文件存储服务
oss: oss:
@ -55,22 +56,19 @@ oss:
enable: true enable: true
# 线上更改为服务端地址https://api.codingit.cn # 线上更改为服务端地址https://api.codingit.cn
server: 'http://localhost:5002' server: 'http://localhost:5002'
# 以下为各厂商 sdk 配置,不要修改字段,填入值即可 s3:
tencent: enable: true
enable: false
config: config:
SecretId: '' # isp 提供商,可选 minio,s3,aliyun,tencent
SecretKey: '' cloudisp: 'minio'
Bucket: '' accessKeyId: ''
Region: '' secretAccessKey: ''
aliyun: bucket: ''
enable: false region: ''
config: # 仅cloudisp 的值为 minio/s3 时配置,其它提供商为空
accessKeyId: '' forcePathStyle: false
accessKeySecret: '' # 仅cloudisp 的值为 minio 时配置,其它提供商为空
bucket: '' endpoint: ''
https: true
region: ''
# jwt 配置 # jwt 配置
jwt: jwt:

View File

@ -23,7 +23,7 @@ const nextConfig = semi({
env: { env: {
SERVER_API_URL: config.client.apiUrl, SERVER_API_URL: config.client.apiUrl,
COLLABORATION_API_URL: config.client.collaborationUrl, COLLABORATION_API_URL: config.client.collaborationUrl,
ENABLE_ALIYUN_OSS: !!config.oss.aliyun.accessKeyId, ENABLE_OSS_S3: config.oss.s3.enable,
DNS_PREFETCH: (config.client.dnsPrefetch || '').split(' '), DNS_PREFETCH: (config.client.dnsPrefetch || '').split(' '),
SEO_APPNAME: config.client.seoAppName, SEO_APPNAME: config.client.seoAppName,
SEO_DESCRIPTION: config.client.seoDescription, SEO_DESCRIPTION: config.client.seoDescription,

View File

@ -0,0 +1,203 @@
<mxStylesheet>
<add as="defaultVertex">
<add as="shape" value="label"/>
<add as="perimeter" value="rectanglePerimeter"/>
<add as="fontSize" value="12"/>
<add as="fontFamily" value="Helvetica"/>
<add as="align" value="center"/>
<add as="verticalAlign" value="middle"/>
<add as="fillColor" value="default"/>
<add as="strokeColor" value="default"/>
<add as="fontColor" value="default"/>
</add>
<add as="defaultEdge">
<add as="shape" value="connector"/>
<add as="labelBackgroundColor" value="default"/>
<add as="endArrow" value="classic"/>
<add as="fontSize" value="11"/>
<add as="fontFamily" value="Helvetica"/>
<add as="align" value="center"/>
<add as="verticalAlign" value="middle"/>
<add as="rounded" value="1"/>
<add as="strokeColor" value="default"/>
<add as="fontColor" value="default"/>
</add>
<add as="text">
<add as="fillColor" value="none"/>
<add as="gradientColor" value="none"/>
<add as="strokeColor" value="none"/>
<add as="align" value="left"/>
<add as="verticalAlign" value="top"/>
</add>
<add as="edgeLabel" extend="text">
<add as="labelBackgroundColor" value="default"/>
<add as="fontSize" value="11"/>
</add>
<add as="label">
<add as="fontStyle" value="1"/>
<add as="align" value="left"/>
<add as="verticalAlign" value="middle"/>
<add as="spacing" value="2"/>
<add as="spacingLeft" value="52"/>
<add as="imageWidth" value="42"/>
<add as="imageHeight" value="42"/>
<add as="rounded" value="1"/>
</add>
<add as="icon" extend="label">
<add as="align" value="center"/>
<add as="imageAlign" value="center"/>
<add as="verticalLabelPosition" value="bottom"/>
<add as="verticalAlign" value="top"/>
<add as="spacingTop" value="4"/>
<add as="labelBackgroundColor" value="default"/>
<add as="spacing" value="0"/>
<add as="spacingLeft" value="0"/>
<add as="spacingTop" value="6"/>
<add as="fontStyle" value="0"/>
<add as="imageWidth" value="48"/>
<add as="imageHeight" value="48"/>
</add>
<add as="swimlane">
<add as="shape" value="swimlane"/>
<add as="fontSize" value="12"/>
<add as="fontStyle" value="1"/>
<add as="startSize" value="23"/>
</add>
<add as="group">
<add as="verticalAlign" value="top"/>
<add as="fillColor" value="none"/>
<add as="strokeColor" value="none"/>
<add as="gradientColor" value="none"/>
<add as="pointerEvents" value="0"/>
</add>
<add as="ellipse">
<add as="shape" value="ellipse"/>
<add as="perimeter" value="ellipsePerimeter"/>
</add>
<add as="rhombus">
<add as="shape" value="rhombus"/>
<add as="perimeter" value="rhombusPerimeter"/>
</add>
<add as="triangle">
<add as="shape" value="triangle"/>
<add as="perimeter" value="trianglePerimeter"/>
</add>
<add as="line">
<add as="shape" value="line"/>
<add as="strokeWidth" value="4"/>
<add as="labelBackgroundColor" value="default"/>
<add as="verticalAlign" value="top"/>
<add as="spacingTop" value="8"/>
</add>
<add as="image">
<add as="shape" value="image"/>
<add as="labelBackgroundColor" value="default"/>
<add as="verticalAlign" value="top"/>
<add as="verticalLabelPosition" value="bottom"/>
</add>
<add as="roundImage" extend="image">
<add as="perimeter" value="ellipsePerimeter"/>
</add>
<add as="rhombusImage" extend="image">
<add as="perimeter" value="rhombusPerimeter"/>
</add>
<add as="arrow">
<add as="shape" value="arrow"/>
<add as="edgeStyle" value="none"/>
<add as="fillColor" value="default"/>
</add>
<add as="fancy">
<add as="shadow" value="1"/>
<add as="glass" value="1"/>
</add>
<add as="gray" extend="fancy">
<add as="gradientColor" value="#B3B3B3"/>
<add as="fillColor" value="#F5F5F5"/>
<add as="strokeColor" value="#666666"/>
</add>
<add as="blue" extend="fancy">
<add as="gradientColor" value="#7EA6E0"/>
<add as="fillColor" value="#DAE8FC"/>
<add as="strokeColor" value="#6C8EBF"/>
</add>
<add as="green" extend="fancy">
<add as="gradientColor" value="#97D077"/>
<add as="fillColor" value="#D5E8D4"/>
<add as="strokeColor" value="#82B366"/>
</add>
<add as="turquoise" extend="fancy">
<add as="gradientColor" value="#67AB9F"/>
<add as="fillColor" value="#D5E8D4"/>
<add as="strokeColor" value="#6A9153"/>
</add>
<add as="yellow" extend="fancy">
<add as="gradientColor" value="#FFD966"/>
<add as="fillColor" value="#FFF2CC"/>
<add as="strokeColor" value="#D6B656"/>
</add>
<add as="orange" extend="fancy">
<add as="gradientColor" value="#FFA500"/>
<add as="fillColor" value="#FFCD28"/>
<add as="strokeColor" value="#D79B00"/>
</add>
<add as="red" extend="fancy">
<add as="gradientColor" value="#EA6B66"/>
<add as="fillColor" value="#F8CECC"/>
<add as="strokeColor" value="#B85450"/>
</add>
<add as="pink" extend="fancy">
<add as="gradientColor" value="#B5739D"/>
<add as="fillColor" value="#E6D0DE"/>
<add as="strokeColor" value="#996185"/>
</add>
<add as="purple" extend="fancy">
<add as="gradientColor" value="#8C6C9C"/>
<add as="fillColor" value="#E1D5E7"/>
<add as="strokeColor" value="#9673A6"/>
</add>
<add as="plain-gray">
<add as="gradientColor" value="#B3B3B3"/>
<add as="fillColor" value="#F5F5F5"/>
<add as="strokeColor" value="#666666"/>
</add>
<add as="plain-blue">
<add as="gradientColor" value="#7EA6E0"/>
<add as="fillColor" value="#DAE8FC"/>
<add as="strokeColor" value="#6C8EBF"/>
</add>
<add as="plain-green">
<add as="gradientColor" value="#97D077"/>
<add as="fillColor" value="#D5E8D4"/>
<add as="strokeColor" value="#82B366"/>
</add>
<add as="plain-turquoise">
<add as="gradientColor" value="#67AB9F"/>
<add as="fillColor" value="#D5E8D4"/>
<add as="strokeColor" value="#6A9153"/>
</add>
<add as="plain-yellow">
<add as="gradientColor" value="#FFD966"/>
<add as="fillColor" value="#FFF2CC"/>
<add as="strokeColor" value="#D6B656"/>
</add>
<add as="plain-orange">
<add as="gradientColor" value="#FFA500"/>
<add as="fillColor" value="#FFCD28"/>
<add as="strokeColor" value="#D79B00"/>
</add>
<add as="plain-red">
<add as="gradientColor" value="#EA6B66"/>
<add as="fillColor" value="#F8CECC"/>
<add as="strokeColor" value="#B85450"/>
</add>
<add as="plain-pink">
<add as="gradientColor" value="#B5739D"/>
<add as="fillColor" value="#E6D0DE"/>
<add as="strokeColor" value="#996185"/>
</add>
<add as="plain-purple">
<add as="gradientColor" value="#8C6C9C"/>
<add as="fillColor" value="#E1D5E7"/>
<add as="strokeColor" value="#9673A6"/>
</add>
</mxStylesheet>

Binary file not shown.

After

Width:  |  Height:  |  Size: 9.9 KiB

View File

@ -9,8 +9,8 @@ import styles from './style.module.scss';
type ISize = { width: number; height: number }; type ISize = { width: number; height: number };
interface IProps { interface IProps {
width: number; width: number | string;
height: number; height: number | string;
maxWidth?: number; maxWidth?: number;
isEditable?: boolean; isEditable?: boolean;
onChange?: (arg: ISize) => void; onChange?: (arg: ISize) => void;

View File

@ -1,6 +1,11 @@
import { Toast } from '@douyinfe/semi-ui';
import { FILE_CHUNK_SIZE, FileApiDefinition } from '@think/domains'; import { FILE_CHUNK_SIZE, FileApiDefinition } from '@think/domains';
import axios from 'axios';
import { url } from 'inspector';
import { string } from 'lib0'; import { string } from 'lib0';
import { timeout } from 'lib0/eventloop';
import SparkMD5 from 'spark-md5'; import SparkMD5 from 'spark-md5';
import { HttpClient } from './http-client'; import { HttpClient } from './http-client';
@ -35,7 +40,6 @@ const uploadFileToServer = (arg: {
}) => { }) => {
const { filename, file, md5, isChunk, chunkIndex, onUploadProgress } = arg; const { filename, file, md5, isChunk, chunkIndex, onUploadProgress } = arg;
const api = isChunk ? 'uploadChunk' : 'upload'; const api = isChunk ? 'uploadChunk' : 'upload';
const formData = new FormData(); const formData = new FormData();
formData.append('file', file); formData.append('file', file);
@ -51,6 +55,7 @@ const uploadFileToServer = (arg: {
md5, md5,
chunkIndex, chunkIndex,
}, },
timeout: 30 * 1000,
onUploadProgress: (progress) => { onUploadProgress: (progress) => {
const percent = progress.loaded / progress.total; const percent = progress.loaded / progress.total;
onUploadProgress && onUploadProgress(percent); onUploadProgress && onUploadProgress(percent);
@ -67,68 +72,152 @@ export const uploadFile = async (
return onUploadProgress && onUploadProgress(Math.ceil(percent * 100)); return onUploadProgress && onUploadProgress(Math.ceil(percent * 100));
}; };
const filename = file.name; // 开启s3 文件上传支持
if (!process.env.ENABLE_OSS_S3) {
const filename = file.name;
console.debug('当前没有开启oss 对象存储,使用本地上传方案');
if (file.size > FILE_CHUNK_SIZE) {
onTooLarge && onTooLarge();
}
if (file.size > FILE_CHUNK_SIZE * 5) { if (file.size <= FILE_CHUNK_SIZE) {
onTooLarge && onTooLarge(); const spark = new SparkMD5();
} spark.append(file);
spark.append(file.lastModified);
spark.append(file.type);
const md5 = spark.end();
const url = await uploadFileToServer({ filename, file, md5, onUploadProgress: wraponUploadProgress });
return url;
} else {
const { chunks, md5 } = await splitBigFile(file);
const unitPercent = 1 / chunks.length;
const progressMap = {};
if (file.size <= FILE_CHUNK_SIZE) { let url = await HttpClient.request<string | undefined>({
const spark = new SparkMD5(); method: FileApiDefinition.initChunk.method,
spark.append(file); url: FileApiDefinition.initChunk.client(),
spark.append(file.lastModified);
spark.append(file.type);
const md5 = spark.end();
const url = await uploadFileToServer({ filename, file, md5, onUploadProgress: wraponUploadProgress });
return url;
} else {
const { chunks, md5 } = await splitBigFile(file);
const unitPercent = 1 / chunks.length;
const progressMap = {};
let url = await HttpClient.request<string | undefined>({
method: FileApiDefinition.initChunk.method,
url: FileApiDefinition.initChunk.client(),
params: {
filename,
md5,
},
});
if (!url) {
await Promise.all(
chunks.map((chunk, index) => {
return uploadFileToServer({
filename,
file: chunk,
chunkIndex: index + 1,
md5,
isChunk: true,
onUploadProgress: (progress) => {
progressMap[index] = progress * unitPercent;
wraponUploadProgress(
Math.min(
Object.keys(progressMap).reduce((a, c) => {
return (a += progressMap[c]);
}, 0),
// 剩下的 5% 交给 merge
0.95
)
);
},
});
})
);
url = await HttpClient.request({
method: FileApiDefinition.mergeChunk.method,
url: FileApiDefinition.mergeChunk.client(),
params: { params: {
filename, filename,
md5, md5,
}, },
}); });
if (!url) {
await Promise.all(
chunks.map((chunk, index) => {
return uploadFileToServer({
filename,
file: chunk,
chunkIndex: index + 1,
md5,
isChunk: true,
onUploadProgress: (progress) => {
progressMap[index] = progress * unitPercent;
wraponUploadProgress(
Math.min(
Object.keys(progressMap).reduce((a, c) => {
return (a += progressMap[c]);
}, 0),
// 剩下的 5% 交给 merge
0.95
)
);
},
});
})
);
url = await HttpClient.request({
method: FileApiDefinition.mergeChunk.method,
url: FileApiDefinition.mergeChunk.client(),
params: {
filename,
md5,
},
});
}
wraponUploadProgress(1);
return url;
}
}
// S3 后端签名 前端文件直传 方案
else {
// 前端计算文件的md5
console.log('计算待上传的文件{' + file.name + '}的md5...');
const { chunks, md5 } = await splitBigFile(file);
console.log('文件{' + file.name + '}的md5:' + md5);
const filename = file.name;
// 请求后端检查指定的文件是不是已经存在
const res = await HttpClient.request({
method: FileApiDefinition.ossSign.method,
url: FileApiDefinition.ossSign.client(),
data: { filename, md5, fileSize: file.size },
});
// 如果后端反应文件已经存在
if (res['isExist']) {
Toast.info('文件秒传成功!');
return res['objectUrl'];
} else {
//console.log('文件不存在,需要上传文件');
// 后端认为文件小,前端直接put 上传
if (!res['MultipartUpload']) {
console.log('前端直接PUT上传文件');
const signUrl = res['signUrl'];
await axios.put(signUrl, file, {
timeout: 120 * 1000,
onUploadProgress: (process) => {
const uploadLoaded = process.loaded;
const uploadTotal = file.size;
const uploadPercent = uploadLoaded / uploadTotal;
wraponUploadProgress(uploadPercent);
},
});
const upres = await HttpClient.request({
method: FileApiDefinition.ossSign.method,
url: FileApiDefinition.ossSign.client(),
data: { filename, md5, fileSize: file.size },
});
return upres['objectUrl'];
}
// 前端进入分片上传流程
else {
const upload_id = res['uploadId'];
// console.log('分片文件上传,upload_id:' + upload_id);
const MultipartUpload = [];
for (let index = 0; index < chunks.length; index++) {
const chunk = chunks[index];
const res = await HttpClient.request({
method: FileApiDefinition.ossChunk.method,
url: FileApiDefinition.ossChunk.client(),
data: { filename, md5, uploadId: upload_id, chunkIndex: index + 1 },
});
// 上传文件分块到s3
// 直接用原生请求不走拦截器
const upload_res = await axios.put(res['signUrl'], chunk, {
timeout: 120 * 1000,
onUploadProgress: (process) => {
const uploadLoaded = process.loaded + FILE_CHUNK_SIZE * index;
const uploadTotal = file.size;
const uploadPercent = uploadLoaded / uploadTotal;
//console.log(uploadLoaded, uploadTotal, uploadPercent);
wraponUploadProgress(uploadPercent);
},
});
const upload_etag = upload_res.headers['etag'];
const response_part = { PartNumber: index + 1, ETag: upload_etag };
MultipartUpload.push(response_part);
//console.log('文件分片{' + (index + 1) + '上传成功etag:' + upload_etag);
}
// 文件已经全部上传OK
// 请求后端合并文件
const payload = { filename, md5, uploadId: upload_id, MultipartUpload };
const upres = await HttpClient.request({
method: FileApiDefinition.ossMerge.method,
url: FileApiDefinition.ossMerge.client(),
data: payload,
});
return '' + upres;
}
} }
wraponUploadProgress(1);
return url;
} }
}; };

View File

@ -39,6 +39,7 @@ HttpClient.interceptors.response.use(
isBrowser && Toast.error(data.data.message); isBrowser && Toast.error(data.data.message);
return null; return null;
} }
// 如果是 204 请求 那么直接返回 data.headers
const res = data.data; const res = data.data;

View File

@ -88,9 +88,8 @@ export const ImageWrapper = ({ editor, node, updateAttributes }) => {
<NodeViewWrapper style={{ textAlign, fontSize: 0, maxWidth: '100%' }}> <NodeViewWrapper style={{ textAlign, fontSize: 0, maxWidth: '100%' }}>
<Resizeable <Resizeable
className={'render-wrapper'} className={'render-wrapper'}
width={width || maxWidth} width="100%"
height={height} height="100%"
maxWidth={maxWidth}
isEditable={isEditable} isEditable={isEditable}
onChangeEnd={onResize} onChangeEnd={onResize}
> >
@ -106,11 +105,9 @@ export const ImageWrapper = ({ editor, node, updateAttributes }) => {
</Spin> </Spin>
</div> </div>
) : ( ) : (
<div className={styles.wrap}> <div className={styles.wrap} title="parent_image_div">
<div <div
style={{ style={{
height: '100%',
maxHeight: '100%',
padding: 24, padding: 24,
overflow: 'hidden', overflow: 'hidden',
display: 'flex', display: 'flex',
@ -120,7 +117,7 @@ export const ImageWrapper = ({ editor, node, updateAttributes }) => {
transition: `all ease-in-out .3s`, transition: `all ease-in-out .3s`,
}} }}
> >
<LazyLoadImage src={src} alt={alt} width={'100%'} height={'100%'} /> <LazyLoadImage src={src} alt={alt} width={'100%'} />
</div> </div>
<div className={styles.handlerWrap}> <div className={styles.handlerWrap}>

View File

@ -1,5 +1,6 @@
import FileSaver from 'file-saver'; import FileSaver from 'file-saver';
export function download(url, name) { export function download(url, name) {
FileSaver.saveAs(url, name); if (url.startsWith('http://') || url.startsWith('https://')) window.open(url, '文件下载...');
else FileSaver.saveAs(url, name);
} }

View File

@ -1,3 +1,4 @@
export declare const FileApiDefinition: { export declare const FileApiDefinition: {
/** /**
* *
@ -31,5 +32,33 @@ export declare const FileApiDefinition: {
server: "merge/chunk"; server: "merge/chunk";
client: () => string; client: () => string;
}; };
/**
*
*/
ossSign:{
method: "post";
server: "upload/ossSign";
client: () => string;
};
/**
*
*/
ossChunk:{
method: "post";
server: "upload/ossChunk";
client: () => string;
};
/**
*
*/
ossMerge:{
method: "post";
server: "upload/ossMerge";
client: () => string;
};
}; };
export declare const FILE_CHUNK_SIZE: number; export declare const FILE_CHUNK_SIZE: number;

View File

@ -33,6 +33,23 @@ exports.FileApiDefinition = {
method: 'post', method: 'post',
server: 'merge/chunk', server: 'merge/chunk',
client: function () { return '/file/merge/chunk'; } client: function () { return '/file/merge/chunk'; }
} },
ossSign:{
method: "post",
server: "upload/ossSign",
client: function () { return '/file/upload/ossSign'; }
},
ossChunk:{
method: "post",
server: "upload/ossChunk",
client: function () { return '/file/upload/ossChunk'; }
},
ossMerge:{
method: "post",
server: "upload/ossMerge",
client: function () { return '/file/upload/ossMerge'; }
},
}; };
exports.FILE_CHUNK_SIZE = 2 * 1024 * 1024; // 设置文件分片的大小 改成 8 M
// MINIO 等oss 有最小分片的限制
exports.FILE_CHUNK_SIZE = 8 * 1024 * 1024;

View File

@ -18,6 +18,8 @@
"test:e2e": "jest --config ./test/jest-e2e.json" "test:e2e": "jest --config ./test/jest-e2e.json"
}, },
"dependencies": { "dependencies": {
"@aws-sdk/client-s3": "^3.645.0",
"@aws-sdk/s3-request-presigner": "^3.645.0",
"@hocuspocus/server": "^1.0.0-alpha.91", "@hocuspocus/server": "^1.0.0-alpha.91",
"@hocuspocus/transformer": "^1.0.0-alpha.18", "@hocuspocus/transformer": "^1.0.0-alpha.18",
"@nestjs/common": "^8.0.0", "@nestjs/common": "^8.0.0",
@ -61,6 +63,7 @@
"reflect-metadata": "^0.1.13", "reflect-metadata": "^0.1.13",
"rimraf": "^3.0.2", "rimraf": "^3.0.2",
"rxjs": "^7.2.0", "rxjs": "^7.2.0",
"spark-md5": "^3.0.2",
"typeorm": "^0.2.41", "typeorm": "^0.2.41",
"ua-parser-js": "^1.0.2", "ua-parser-js": "^1.0.2",
"validator": "^13.7.0", "validator": "^13.7.0",

View File

@ -1,10 +1,10 @@
import { Controller, Post, Query, UploadedFile, UseGuards, UseInterceptors } from '@nestjs/common'; import { Body, Controller, Post, Query, UploadedFile, UseGuards, UseInterceptors } from '@nestjs/common';
import { FileInterceptor } from '@nestjs/platform-express'; import { FileInterceptor } from '@nestjs/platform-express';
import { FILE_CHUNK_SIZE, FileApiDefinition } from '@think/domains'; import { FILE_CHUNK_SIZE, FileApiDefinition } from '@think/domains';
import { JwtGuard } from '@guard/jwt.guard'; import { JwtGuard } from '@guard/jwt.guard';
import { FileQuery } from '@helpers/file.helper/oss.client'; import { FileMerge, FileQuery } from '@helpers/file.helper/oss.client';
import { FileService } from '@services/file.service'; import { FileService } from '@services/file.service';
@Controller('file') @Controller('file')
@ -64,4 +64,31 @@ export class FileController {
mergeChunk(@Query() query: FileQuery) { mergeChunk(@Query() query: FileQuery) {
return this.fileService.mergeChunk(query); return this.fileService.mergeChunk(query);
} }
/**
*
*/
@Post(FileApiDefinition.ossSign.server)
@UseGuards(JwtGuard)
ossSign(@Body() data: FileQuery) {
return this.fileService.ossSign(data);
}
/**
*
*/
@Post(FileApiDefinition.ossChunk.server)
@UseGuards(JwtGuard)
ossChunk(@Body() data: FileQuery) {
return this.fileService.ossChunk(data);
}
/**
*
*/
@Post(FileApiDefinition.ossMerge.server)
@UseGuards(JwtGuard)
ossMerge(@Body() data: FileMerge) {
return this.fileService.ossMerge(data);
}
} }

View File

@ -1,19 +1,14 @@
import { ConfigService } from '@nestjs/config'; import { ConfigService } from '@nestjs/config';
import { AliyunOssClient } from './aliyun.client';
import { LocalOssClient } from './local.client'; import { LocalOssClient } from './local.client';
import { OssClient } from './oss.client'; import { OssClient } from './oss.client';
import { TencentOssClient } from './tencent.client'; import { S3OssClient } from './s3.client';
export { OssClient }; export { OssClient };
export const getOssClient = (configService: ConfigService): OssClient => { export const getOssClient = (configService: ConfigService): OssClient => {
if (configService.get('oss.tencent.enable')) { if (configService.get('oss.s3.enable')) {
return new TencentOssClient(configService); return new S3OssClient(configService);
}
if (configService.get('oss.aliyun.enable')) {
return new AliyunOssClient(configService);
} }
return new LocalOssClient(configService); return new LocalOssClient(configService);

View File

@ -1,6 +1,7 @@
import { FILE_CHUNK_SIZE } from '@think/domains'; import { FILE_CHUNK_SIZE } from '@think/domains';
import * as fs from 'fs-extra'; import * as fs from 'fs-extra';
import Redis from 'ioredis';
import * as path from 'path'; import * as path from 'path';
import { BaseOssClient, FileQuery } from './oss.client'; import { BaseOssClient, FileQuery } from './oss.client';
@ -20,6 +21,8 @@ export const pipeWriteStream = (filepath, writeStream): Promise<void> => {
}; };
export class LocalOssClient extends BaseOssClient { export class LocalOssClient extends BaseOssClient {
private redis: Redis | null;
/** /**
* *
* @param md5 * @param md5
@ -34,6 +37,10 @@ export class LocalOssClient extends BaseOssClient {
return { relative: filepath.replace(FILE_ROOT_PATH, FILE_DEST), absolute: filepath }; return { relative: filepath.replace(FILE_ROOT_PATH, FILE_DEST), absolute: filepath };
} }
async setRedis(redis: Redis) {
this.redis = redis;
}
/** /**
* 访 URL * 访 URL
* @param serverRoot * @param serverRoot

View File

@ -1,16 +1,53 @@
import { ConfigService } from '@nestjs/config'; import { ConfigService } from '@nestjs/config';
import exp from 'constants';
import Redis from 'ioredis';
export type FileQuery = { export type FileQuery = {
filename: string; filename: string;
md5: string; md5: string;
chunkIndex?: number; chunkIndex?: number;
fileSize?: number;
uploadId?: string;
};
export type FileMerge = {
filename: string;
md5: string;
uploadId: string;
MultipartUpload: any;
};
export type chunkUpload = {
uploadId: string;
chunkIndex: number;
etag: string;
};
export type ossSignReponse = {
MultipartUpload: boolean;
isExist: boolean;
uploadId: string | null;
objectKey: string;
objectUrl: string | null;
signUrl: string | null;
};
export type ossChunkResponse = {
signUrl: string;
uploadId: string;
chunkIndex: number;
}; };
export abstract class OssClient { export abstract class OssClient {
[x: string]: any;
abstract uploadFile(file: Express.Multer.File, query: FileQuery): Promise<string>; abstract uploadFile(file: Express.Multer.File, query: FileQuery): Promise<string>;
abstract initChunk(query: FileQuery): Promise<void | string>; abstract initChunk(query: FileQuery): Promise<void | string>;
abstract uploadChunk(file: Express.Multer.File, query: FileQuery): Promise<void>; abstract uploadChunk(file: Express.Multer.File, query: FileQuery): Promise<void>;
abstract mergeChunk(query: FileQuery): Promise<string>; abstract mergeChunk(query: FileQuery): Promise<string>;
abstract ossSign(query: FileQuery): Promise<ossSignReponse>;
abstract ossChunk(query: FileQuery): Promise<ossChunkResponse>;
abstract ossMerge(query: FileMerge): Promise<string>;
} }
export class BaseOssClient implements OssClient { export class BaseOssClient implements OssClient {
@ -39,4 +76,24 @@ export class BaseOssClient implements OssClient {
mergeChunk(query: FileQuery): Promise<string> { mergeChunk(query: FileQuery): Promise<string> {
throw new Error('Method not implemented.'); throw new Error('Method not implemented.');
} }
// eslint-disable-next-line @typescript-eslint/no-unused-vars
ossSign(query: FileQuery): Promise<ossSignReponse> {
throw new Error('Method not implemented.');
}
// eslint-disable-next-line @typescript-eslint/no-unused-vars
ossMerge(query: FileMerge): Promise<string> {
throw new Error('Method not implemented.');
}
// eslint-disable-next-line @typescript-eslint/no-unused-vars
ossChunk(query: FileQuery): Promise<ossChunkResponse> {
throw new Error('Method not implemented.');
}
// eslint-disable-next-line @typescript-eslint/no-unused-vars
setRedis(redis: Redis): Promise<void> {
throw new Error('Method not implemented.');
}
} }

View File

@ -0,0 +1,306 @@
import { FILE_CHUNK_SIZE } from '@think/domains';
import {
CompleteMultipartUploadCommand,
CreateMultipartUploadCommand,
GetObjectCommand,
HeadObjectCommand,
PutObjectCommand,
S3Client,
UploadPartCommand,
} from '@aws-sdk/client-s3';
import { getSignedUrl } from '@aws-sdk/s3-request-presigner';
import Redis from 'ioredis';
import { BaseOssClient, FileMerge, FileQuery, ossChunkResponse, ossSignReponse } from './oss.client';
export class S3OssClient extends BaseOssClient {
private client: S3Client | null;
private bucket: string | null;
private redis: Redis | null;
async setRedis(redis: Redis) {
this.redis = redis;
}
/**
* s3
* @returns
*/
private ensureS3OssClient(): S3Client {
if (this.client) {
return this.client;
}
const config = this.configService.get('oss.s3.config');
try {
this.bucket = config.bucket;
if (config.cloudisp == 'minio') {
this.client = new S3Client({
endpoint: config.endpoint,
region: config.region,
forcePathStyle: config.forcePathStyle,
credentials: {
accessKeyId: config.accessKeyId,
secretAccessKey: config.secretAccessKey,
},
});
}
if (config.cloudisp == 's3') {
this.client = new S3Client({
region: config.region,
forcePathStyle: config.forcePathStyle,
credentials: {
accessKeyId: config.accessKeyId,
secretAccessKey: config.secretAccessKey,
},
});
}
if (config.cloudisp == 'aliyun') {
this.client = new S3Client({
region: config.region,
endpoint: 'https://' + config.region + '.aliyuncs.com',
// 阿里云不支持 虚拟路径,这里必须为false
forcePathStyle: false,
credentials: {
accessKeyId: config.accessKeyId,
secretAccessKey: config.secretAccessKey,
},
});
}
if (config.cloudisp == 'tencent') {
this.client = new S3Client({
region: config.region,
endpoint: 'https://cos.' + config.region + '.myqcloud.com',
// 不支持 虚拟路径,这里必须为false
forcePathStyle: false,
credentials: {
accessKeyId: config.accessKeyId,
secretAccessKey: config.secretAccessKey,
},
});
}
return this.client;
} catch (err) {
console.log('无法启动S3存储服务请检查S3配置是否正确', err.message);
}
}
/**
*
* @param md5
* @param filename
* @returns
*/
private getInOssFileName(md5, filename) {
return `think/${md5}/${filename}`;
}
private async getObjectUrl(bucket, key) {
this.ensureS3OssClient();
const command = new GetObjectCommand({ Bucket: bucket, Key: key });
const signUrl = await getSignedUrl(this.client, command);
return signUrl.split('?')[0];
}
/**
* oss
* @param md5
* @param filename
* @returns
*/
private async checkIfAlreadyInOss(md5, filename) {
this.ensureS3OssClient();
const inOssFileName = this.getInOssFileName(md5, filename);
const command = new HeadObjectCommand({ Bucket: this.bucket, Key: inOssFileName });
try {
await this.client.send(command);
return await this.getObjectUrl(this.bucket, inOssFileName);
} catch (err) {
return false;
}
}
/**
*
* @param file
* @param query
* @returns
*/
async uploadFile(file: Express.Multer.File, query: FileQuery): Promise<string> {
this.ensureS3OssClient();
const { filename, md5 } = query;
const maybeOssURL = await this.checkIfAlreadyInOss(md5, filename);
if (maybeOssURL) {
return maybeOssURL;
}
const inOssFileName = this.getInOssFileName(md5, filename);
const command = new PutObjectCommand({
Bucket: this.bucket,
Key: inOssFileName,
Body: file.buffer,
});
await this.client.send(command);
return await this.getObjectUrl(this.bucket, inOssFileName);
}
/**
*
* @param file
* @param query
* @returns
*/
async initChunk(query: FileQuery): Promise<string | void> {
const { md5, filename } = query;
this.ensureS3OssClient();
const inOssFileName = this.getInOssFileName(md5, filename);
const maybeOssURL = await this.checkIfAlreadyInOss(md5, filename);
if (maybeOssURL) {
return maybeOssURL as string;
}
const command = new CreateMultipartUploadCommand({ Bucket: this.bucket, Key: inOssFileName });
const response = await this.client.send(command);
const upload_id = response['UploadId'];
// 这里使用redis 来存储 upload_id
await this.redis.del('think:oss:chunk:' + md5);
await this.redis.del('think:oss:chunk:' + md5 + '*');
this.redis.set('think:oss:chunk:' + md5, upload_id);
return '';
}
/**
*
* @param file
* @param query
* @returns
*/
async uploadChunk(file: Express.Multer.File, query: FileQuery): Promise<void> {
const { md5, filename, chunkIndex } = query;
if (!('chunkIndex' in query)) {
throw new Error('请指定 chunkIndex');
}
this.ensureS3OssClient();
const inOssFileName = this.getInOssFileName(md5, filename);
const upload_id = await this.redis.get('think:oss:chunk:' + md5);
const command = new UploadPartCommand({
Body: file.buffer,
Bucket: this.bucket,
Key: inOssFileName,
PartNumber: chunkIndex,
UploadId: upload_id,
});
const response = await this.client.send(command);
this.redis.set(
'think:oss:chunk:' + md5 + ':' + chunkIndex,
JSON.stringify({ PartNumber: chunkIndex, ETag: response['ETag'] })
);
}
/**
*
* @param query
* @returns
*/
async mergeChunk(query: FileQuery): Promise<string> {
const { filename, md5 } = query;
const inOssFileName = this.getInOssFileName(md5, filename);
const upload_id = await this.redis.get('think:oss:chunk:' + md5);
const etags = await this.redis.keys('think:oss:chunk:' + md5 + ':*');
const MultipartUpload = { Parts: [] };
for (let i = 1; i <= etags.length; i++) {
const obj = JSON.parse(await this.redis.get('think:oss:chunk:' + md5 + ':' + i));
MultipartUpload.Parts.push(obj);
}
const command = new CompleteMultipartUploadCommand({
Bucket: this.bucket,
Key: inOssFileName,
UploadId: upload_id,
MultipartUpload: MultipartUpload,
});
await this.client.send(command);
await this.redis.del('think:oss:chunk:' + md5);
await this.redis.del('think:oss:chunk:' + md5 + '*');
return await this.getObjectUrl(this.bucket, inOssFileName);
}
async ossSign(query: FileQuery): Promise<ossSignReponse> {
const { filename, md5, fileSize } = query;
const inOssFileName = await this.getInOssFileName(md5, filename);
this.ensureS3OssClient();
const objectUrl = await this.checkIfAlreadyInOss(md5, filename);
if (objectUrl) {
return {
signUrl: null,
MultipartUpload: false,
uploadId: null,
objectKey: inOssFileName,
isExist: true,
objectUrl: objectUrl,
};
}
if (fileSize <= FILE_CHUNK_SIZE) {
const command = new PutObjectCommand({ Bucket: this.bucket, Key: inOssFileName });
const signUrl = await getSignedUrl(this.client, command);
return {
signUrl: signUrl,
MultipartUpload: false,
uploadId: null,
objectKey: inOssFileName,
isExist: false,
objectUrl: null,
};
} else {
const command = new CreateMultipartUploadCommand({ Bucket: this.bucket, Key: inOssFileName });
const response = await this.client.send(command);
const upload_id = response['UploadId'];
return {
signUrl: null,
MultipartUpload: true,
uploadId: upload_id,
objectKey: inOssFileName,
isExist: false,
objectUrl: null,
};
}
}
async ossChunk(query: FileQuery): Promise<ossChunkResponse> {
this.ensureS3OssClient();
const { filename, md5 } = query;
const inOssFileName = await this.getInOssFileName(md5, filename);
const command = new UploadPartCommand({
UploadId: query.uploadId,
Bucket: this.bucket,
Key: inOssFileName,
PartNumber: query.chunkIndex,
});
const signUrl = await getSignedUrl(this.client, command);
return { signUrl: signUrl, uploadId: query.uploadId, chunkIndex: query.chunkIndex };
}
async ossMerge(query: FileMerge): Promise<string> {
this.ensureS3OssClient();
const { filename, md5 } = query;
const inOssFileName = await this.getInOssFileName(md5, filename);
const command = new CompleteMultipartUploadCommand({
Bucket: this.bucket,
Key: inOssFileName,
UploadId: query.uploadId,
MultipartUpload: { Parts: query.MultipartUpload },
});
await this.client.send(command);
return await this.getObjectUrl(this.bucket, inOssFileName);
}
}

View File

@ -18,7 +18,7 @@ export const buildRedis = (db: RedisDBEnum): Promise<Redis> => {
...redisConfig, ...redisConfig,
showFriendlyErrorStack: true, showFriendlyErrorStack: true,
lazyConnect: true, lazyConnect: true,
db, db: redisConfig.db,
}); });
redis.on('ready', () => { redis.on('ready', () => {
resolve(redis); resolve(redis);

View File

@ -1,14 +1,40 @@
/*
* @Author: SudemQaQ
* @Date: 2024-09-09 10:28:02
* @email: mail@szhcloud.cn
* @Blog: https://blog.szhcloud.cn
* @github: https://github.com/sang8052
* @LastEditors: SudemQaQ
* @LastEditTime: 2024-09-10 07:46:50
* @Description:
*/
import { Injectable } from '@nestjs/common'; import { Injectable } from '@nestjs/common';
import { ConfigService } from '@nestjs/config'; import { ConfigService } from '@nestjs/config';
import { RedisDBEnum } from '@constants/*';
import { getOssClient, OssClient } from '@helpers/file.helper'; import { getOssClient, OssClient } from '@helpers/file.helper';
import { buildRedis } from '@helpers/redis.helper';
import Redis from 'ioredis';
@Injectable() @Injectable()
export class FileService { export class FileService {
[x: string]: any;
private ossClient: OssClient; private ossClient: OssClient;
private redis: Redis;
constructor(private readonly configService: ConfigService) { constructor(private readonly configService: ConfigService) {
this.ossClient = getOssClient(this.configService); this.ossClient = getOssClient(this.configService);
this.buildRedis();
}
private async buildRedis() {
try {
this.redis = await buildRedis(RedisDBEnum.view);
console.log('[think] 文件服务启动成功');
this.ossClient.setRedis(this.redis);
} catch (e) {
console.error(`[think] 文件服务启动错误: "${e.message}"`);
}
} }
async uploadFile(file, query) { async uploadFile(file, query) {
@ -26,4 +52,16 @@ export class FileService {
async mergeChunk(query) { async mergeChunk(query) {
return this.ossClient.mergeChunk(query); return this.ossClient.mergeChunk(query);
} }
async ossSign(query) {
return this.ossClient.ossSign(query);
}
async ossChunk(query) {
return this.ossClient.ossChunk(query);
}
async ossMerge(query) {
return this.ossClient.ossMerge(query);
}
} }

File diff suppressed because it is too large Load Diff