mirror of https://github.com/fantasticit/think.git
feat: add local file oss
parent
c7eeb104b4
commit
a05908d28f
|
@ -5,6 +5,7 @@ lib
|
|||
|
||||
**/.next/**
|
||||
**/dist/**
|
||||
**/static/**
|
||||
**/build/**
|
||||
**/public/**
|
||||
**/diagram.js
|
||||
|
|
|
@ -42,7 +42,12 @@ db:
|
|||
|
||||
# oss 文件存储服务
|
||||
oss:
|
||||
local:
|
||||
enable: true
|
||||
server: 'http://localhost:5002'
|
||||
aliyun:
|
||||
enable: false
|
||||
config:
|
||||
accessKeyId: ''
|
||||
accessKeySecret: ''
|
||||
bucket: ''
|
||||
|
|
|
@ -91,6 +91,7 @@
|
|||
"requestidlecallback-polyfill": "^1.0.2",
|
||||
"resize-observer-polyfill": "^1.5.1",
|
||||
"scroll-into-view-if-needed": "^2.2.29",
|
||||
"spark-md5": "^3.0.2",
|
||||
"timeago.js": "^4.0.2",
|
||||
"tippy.js": "^6.3.7",
|
||||
"toggle-selection": "^1.0.6",
|
||||
|
|
|
@ -1,30 +1,123 @@
|
|||
import { FILE_CHUNK_SIZE, FileApiDefinition } from '@think/domains';
|
||||
import SparkMD5 from 'spark-md5';
|
||||
|
||||
import { HttpClient } from './http-client';
|
||||
|
||||
const ONE_MB = 1 * 1024 * 1024;
|
||||
const splitBigFile = (file: File): Promise<{ chunks: File[]; md5: string }> => {
|
||||
return new Promise((resolve, reject) => {
|
||||
const spark = new SparkMD5.ArrayBuffer();
|
||||
const fileReader = new FileReader();
|
||||
const chunks = [];
|
||||
const len = Math.ceil(file.size / FILE_CHUNK_SIZE);
|
||||
let current = 0;
|
||||
|
||||
export const readFileAsDataURL = (file): Promise<string | ArrayBuffer> => {
|
||||
if (file.size > ONE_MB) {
|
||||
return Promise.reject(new Error('文件过大,请实现文件上传到存储服务!'));
|
||||
fileReader.onload = (e) => {
|
||||
current++;
|
||||
|
||||
const chunk = e.target.result;
|
||||
spark.append(chunk);
|
||||
|
||||
if (current < len) {
|
||||
loadChunk();
|
||||
} else {
|
||||
resolve({ chunks, md5: spark.end() });
|
||||
}
|
||||
};
|
||||
|
||||
return new Promise((resolve) => {
|
||||
const reader = new FileReader();
|
||||
reader.addEventListener('load', (e) => resolve(e.target.result), { once: true });
|
||||
reader.readAsDataURL(file);
|
||||
fileReader.onerror = (err) => {
|
||||
reject(err);
|
||||
};
|
||||
|
||||
const loadChunk = () => {
|
||||
const start = current * FILE_CHUNK_SIZE;
|
||||
const end = Math.min(start + FILE_CHUNK_SIZE, file.size);
|
||||
const chunk = file.slice(start, end);
|
||||
|
||||
chunks.push(chunk);
|
||||
fileReader.readAsArrayBuffer(chunk);
|
||||
};
|
||||
|
||||
loadChunk();
|
||||
});
|
||||
};
|
||||
|
||||
export const uploadFile = async (file: Blob): Promise<string | ArrayBuffer> => {
|
||||
if (!process.env.ENABLE_ALIYUN_OSS) {
|
||||
return readFileAsDataURL(file);
|
||||
}
|
||||
const uploadFileToServer = (arg: {
|
||||
filename: string;
|
||||
file: File;
|
||||
md5: string;
|
||||
isChunk?: boolean;
|
||||
chunkIndex?: number;
|
||||
onUploadProgress?: (progress: number) => void;
|
||||
}) => {
|
||||
const { filename, file, md5, isChunk, chunkIndex, onUploadProgress } = arg;
|
||||
const api = isChunk ? 'uploadChunk' : 'upload';
|
||||
|
||||
const formData = new FormData();
|
||||
formData.append('file', file);
|
||||
|
||||
return HttpClient.post('/file/upload', formData, {
|
||||
return HttpClient.request({
|
||||
method: FileApiDefinition[api].method,
|
||||
url: FileApiDefinition[api].client(),
|
||||
data: formData,
|
||||
headers: {
|
||||
'Content-Type': 'multipart/form-data',
|
||||
},
|
||||
params: {
|
||||
filename,
|
||||
md5,
|
||||
chunkIndex,
|
||||
},
|
||||
onUploadProgress: (progress) => {
|
||||
const percent = progress.loaded / progress.total;
|
||||
onUploadProgress && onUploadProgress(percent);
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
export const uploadFile = async (file: File, onUploadProgress?: (progress: number) => void) => {
|
||||
const wraponUploadProgress = (percent) => {
|
||||
return onUploadProgress && onUploadProgress(Math.ceil(percent * 100));
|
||||
};
|
||||
|
||||
const filename = file.name;
|
||||
if (file.size <= FILE_CHUNK_SIZE) {
|
||||
const spark = new SparkMD5.ArrayBuffer();
|
||||
spark.append(file);
|
||||
const md5 = spark.end();
|
||||
const url = await uploadFileToServer({ filename, file, md5, onUploadProgress: wraponUploadProgress });
|
||||
return url;
|
||||
} else {
|
||||
const { chunks, md5 } = await splitBigFile(file);
|
||||
const unitPercent = 1 / chunks.length;
|
||||
const progressMap = {};
|
||||
|
||||
await Promise.all(
|
||||
chunks.map((chunk, index) =>
|
||||
uploadFileToServer({
|
||||
filename,
|
||||
file: chunk,
|
||||
chunkIndex: index + 1,
|
||||
md5,
|
||||
isChunk: true,
|
||||
onUploadProgress: (progress) => {
|
||||
progressMap[index] = progress * unitPercent;
|
||||
wraponUploadProgress(
|
||||
Object.keys(progressMap).reduce((a, c) => {
|
||||
return (a += progressMap[c]);
|
||||
}, 0)
|
||||
);
|
||||
},
|
||||
})
|
||||
)
|
||||
);
|
||||
const url = await HttpClient.request({
|
||||
method: FileApiDefinition.mergeChunk.method,
|
||||
url: FileApiDefinition.mergeChunk.client(),
|
||||
params: {
|
||||
filename,
|
||||
md5,
|
||||
},
|
||||
});
|
||||
return url;
|
||||
}
|
||||
};
|
||||
|
|
|
@ -1,10 +1,11 @@
|
|||
import { IconClose, IconDownload, IconPlayCircle } from '@douyinfe/semi-icons';
|
||||
import { Button, Collapsible, Space, Spin, Typography } from '@douyinfe/semi-ui';
|
||||
import { Button, Collapsible, Progress, Space, Spin, Typography } from '@douyinfe/semi-ui';
|
||||
import { FILE_CHUNK_SIZE } from '@think/domains';
|
||||
import { NodeViewWrapper } from '@tiptap/react';
|
||||
import cls from 'classnames';
|
||||
import { Tooltip } from 'components/tooltip';
|
||||
import { useToggle } from 'hooks/use-toggle';
|
||||
import { useCallback, useEffect, useRef } from 'react';
|
||||
import { useCallback, useEffect, useRef, useState } from 'react';
|
||||
import { uploadFile } from 'services/file';
|
||||
import { download, extractFileExtension, extractFilename, normalizeFileSize } from 'tiptap/prose-utils';
|
||||
|
||||
|
@ -20,6 +21,8 @@ export const AttachmentWrapper = ({ editor, node, updateAttributes }) => {
|
|||
const { hasTrigger, fileName, fileSize, fileExt, fileType, url, error } = node.attrs;
|
||||
const [loading, toggleLoading] = useToggle(false);
|
||||
const [visible, toggleVisible] = useToggle(false);
|
||||
const [showProgress, toggleShowProgress] = useToggle(false);
|
||||
const [uploadProgress, setUploadProgress] = useState(0);
|
||||
|
||||
const selectFile = useCallback(() => {
|
||||
if (!isEditable || url) return;
|
||||
|
@ -29,6 +32,7 @@ export const AttachmentWrapper = ({ editor, node, updateAttributes }) => {
|
|||
const handleFile = useCallback(
|
||||
async (e) => {
|
||||
const file = e.target.files && e.target.files[0];
|
||||
if (!file) return;
|
||||
const fileInfo = {
|
||||
fileName: extractFilename(file.name),
|
||||
fileSize: file.size,
|
||||
|
@ -36,16 +40,26 @@ export const AttachmentWrapper = ({ editor, node, updateAttributes }) => {
|
|||
fileExt: extractFileExtension(file.name),
|
||||
};
|
||||
toggleLoading(true);
|
||||
|
||||
if (file.size > FILE_CHUNK_SIZE) {
|
||||
toggleShowProgress(true);
|
||||
}
|
||||
|
||||
try {
|
||||
const url = await uploadFile(file);
|
||||
const url = await uploadFile(file, setUploadProgress);
|
||||
updateAttributes({ ...fileInfo, url });
|
||||
toggleLoading(false);
|
||||
setUploadProgress(0);
|
||||
toggleShowProgress(false);
|
||||
} catch (error) {
|
||||
updateAttributes({ error: '文件上传失败:' + (error && error.message) || '未知错误' });
|
||||
toggleLoading(false);
|
||||
setUploadProgress(0);
|
||||
toggleShowProgress(false);
|
||||
$upload.current.value = '';
|
||||
}
|
||||
},
|
||||
[toggleLoading, updateAttributes]
|
||||
[toggleLoading, toggleShowProgress, updateAttributes]
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
|
@ -61,7 +75,21 @@ export const AttachmentWrapper = ({ editor, node, updateAttributes }) => {
|
|||
<div className={cls(styles.wrap, 'render-wrapper')}>
|
||||
<Spin spinning={loading}>
|
||||
<Text style={{ cursor: 'pointer' }} onClick={selectFile}>
|
||||
{loading ? '正在上传中' : '请选择文件'}
|
||||
{loading ? (
|
||||
showProgress ? (
|
||||
<Progress
|
||||
percent={uploadProgress}
|
||||
showInfo
|
||||
style={{
|
||||
margin: '10px 0',
|
||||
}}
|
||||
/>
|
||||
) : (
|
||||
'正在上传中'
|
||||
)
|
||||
) : (
|
||||
'请选择文件'
|
||||
)}
|
||||
</Text>
|
||||
<input ref={$upload} type="file" hidden onChange={handleFile} />
|
||||
</Spin>
|
||||
|
|
|
@ -7,4 +7,21 @@ export declare const FileApiDefinition: {
|
|||
server: "upload";
|
||||
client: () => string;
|
||||
};
|
||||
/**
|
||||
* 上传分块文件
|
||||
*/
|
||||
uploadChunk: {
|
||||
method: "post";
|
||||
server: "upload/chunk";
|
||||
client: () => string;
|
||||
};
|
||||
/**
|
||||
* 上传分块文件
|
||||
*/
|
||||
mergeChunk: {
|
||||
method: "post";
|
||||
server: "merge/chunk";
|
||||
client: () => string;
|
||||
};
|
||||
};
|
||||
export declare const FILE_CHUNK_SIZE: number;
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
"use strict";
|
||||
exports.__esModule = true;
|
||||
exports.FileApiDefinition = void 0;
|
||||
exports.FILE_CHUNK_SIZE = exports.FileApiDefinition = void 0;
|
||||
exports.FileApiDefinition = {
|
||||
/**
|
||||
* 上传文件
|
||||
|
@ -9,5 +9,22 @@ exports.FileApiDefinition = {
|
|||
method: 'post',
|
||||
server: 'upload',
|
||||
client: function () { return '/file/upload'; }
|
||||
},
|
||||
/**
|
||||
* 上传分块文件
|
||||
*/
|
||||
uploadChunk: {
|
||||
method: 'post',
|
||||
server: 'upload/chunk',
|
||||
client: function () { return '/file/upload/chunk'; }
|
||||
},
|
||||
/**
|
||||
* 上传分块文件
|
||||
*/
|
||||
mergeChunk: {
|
||||
method: 'post',
|
||||
server: 'merge/chunk',
|
||||
client: function () { return '/file/merge/chunk'; }
|
||||
}
|
||||
};
|
||||
exports.FILE_CHUNK_SIZE = 2 * 1024 * 1024;
|
||||
|
|
|
@ -7,4 +7,24 @@ export const FileApiDefinition = {
|
|||
server: 'upload' as const,
|
||||
client: () => '/file/upload',
|
||||
},
|
||||
|
||||
/**
|
||||
* 上传分块文件
|
||||
*/
|
||||
uploadChunk: {
|
||||
method: 'post' as const,
|
||||
server: 'upload/chunk' as const,
|
||||
client: () => '/file/upload/chunk',
|
||||
},
|
||||
|
||||
/**
|
||||
* 上传分块文件
|
||||
*/
|
||||
mergeChunk: {
|
||||
method: 'post' as const,
|
||||
server: 'merge/chunk' as const,
|
||||
client: () => '/file/merge/chunk',
|
||||
},
|
||||
};
|
||||
|
||||
export const FILE_CHUNK_SIZE = 2 * 1024 * 1024;
|
||||
|
|
|
@ -33,3 +33,6 @@ lerna-debug.log*
|
|||
!.vscode/tasks.json
|
||||
!.vscode/launch.json
|
||||
!.vscode/extensions.json
|
||||
|
||||
# 静态文件
|
||||
/static
|
||||
|
|
|
@ -31,6 +31,7 @@
|
|||
"@think/config": "workspace:^1.0.0",
|
||||
"@think/constants": "workspace:^1.0.0",
|
||||
"@think/domains": "workspace:^1.0.0",
|
||||
"@types/multer": "^1.4.7",
|
||||
"ali-oss": "^6.16.0",
|
||||
"bcryptjs": "^2.4.3",
|
||||
"class-transformer": "^0.5.1",
|
||||
|
|
|
@ -25,6 +25,7 @@ import { Cron, ScheduleModule } from '@nestjs/schedule';
|
|||
import { TypeOrmModule, TypeOrmModuleOptions } from '@nestjs/typeorm';
|
||||
import { getConfig } from '@think/config';
|
||||
import * as fs from 'fs-extra';
|
||||
import * as lodash from 'lodash';
|
||||
import { LoggerModule } from 'nestjs-pino';
|
||||
import * as path from 'path';
|
||||
import pino from 'pino';
|
||||
|
@ -54,6 +55,8 @@ const MODULES = [
|
|||
ViewModule,
|
||||
];
|
||||
|
||||
console.log(lodash.get(getConfig(), 'oss.local.enable'));
|
||||
|
||||
@Module({
|
||||
imports: [
|
||||
ConfigModule.forRoot({
|
||||
|
@ -85,6 +88,7 @@ const MODULES = [
|
|||
} as TypeOrmModuleOptions;
|
||||
},
|
||||
}),
|
||||
|
||||
...MODULES,
|
||||
].filter(Boolean),
|
||||
controllers: [],
|
||||
|
|
|
@ -1,27 +1,55 @@
|
|||
import { JwtGuard } from '@guard/jwt.guard';
|
||||
import { Controller, Post, UploadedFile, UseGuards, UseInterceptors } from '@nestjs/common';
|
||||
import { FileQuery } from '@helpers/file.helper/oss.client';
|
||||
import { Controller, Post, Query, UploadedFile, UseGuards, UseInterceptors } from '@nestjs/common';
|
||||
import { FileInterceptor } from '@nestjs/platform-express';
|
||||
import { FileService } from '@services/file.service';
|
||||
import { FileApiDefinition } from '@think/domains';
|
||||
import { FILE_CHUNK_SIZE, FileApiDefinition } from '@think/domains';
|
||||
|
||||
@Controller('file')
|
||||
export class FileController {
|
||||
constructor(private readonly fileService: FileService) {}
|
||||
|
||||
/**
|
||||
* 上传文件
|
||||
* 上传小文件
|
||||
* @param file
|
||||
*/
|
||||
@Post(FileApiDefinition.upload.server)
|
||||
@UseInterceptors(
|
||||
FileInterceptor('file', {
|
||||
limits: {
|
||||
fieldSize: 50 * 1024 * 1024,
|
||||
fieldSize: FILE_CHUNK_SIZE,
|
||||
},
|
||||
})
|
||||
)
|
||||
@UseGuards(JwtGuard)
|
||||
uploadFile(@UploadedFile() file) {
|
||||
return this.fileService.uploadFile(file);
|
||||
uploadFile(@UploadedFile() file: Express.Multer.File, @Query() query: FileQuery) {
|
||||
return this.fileService.uploadFile(file, query);
|
||||
}
|
||||
|
||||
/**
|
||||
* 上传分块文件
|
||||
* @param file
|
||||
*/
|
||||
@Post(FileApiDefinition.uploadChunk.server)
|
||||
@UseInterceptors(
|
||||
FileInterceptor('file', {
|
||||
limits: {
|
||||
fieldSize: FILE_CHUNK_SIZE,
|
||||
},
|
||||
})
|
||||
)
|
||||
@UseGuards(JwtGuard)
|
||||
uploadChunk(@UploadedFile() file: Express.Multer.File, @Query() query: FileQuery) {
|
||||
return this.fileService.uploadChunk(file, query);
|
||||
}
|
||||
|
||||
/**
|
||||
* 合并分块文件
|
||||
* @param file
|
||||
*/
|
||||
@Post(FileApiDefinition.mergeChunk.server)
|
||||
@UseGuards(JwtGuard)
|
||||
mergeChunk(@Query() query: FileQuery) {
|
||||
return this.fileService.mergeChunk(query);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,14 @@
|
|||
import { ConfigService } from '@nestjs/config';
|
||||
|
||||
import { LocalOssClient } from './local.client';
|
||||
import { OssClient } from './oss.client';
|
||||
|
||||
export { OssClient };
|
||||
|
||||
export const getOssClient = (configService: ConfigService): OssClient => {
|
||||
if (configService.get('oss.local.enable')) {
|
||||
return new LocalOssClient(configService);
|
||||
}
|
||||
|
||||
return new LocalOssClient(configService);
|
||||
};
|
|
@ -0,0 +1,124 @@
|
|||
import { FILE_CHUNK_SIZE } from '@think/domains';
|
||||
import * as fs from 'fs-extra';
|
||||
import * as path from 'path';
|
||||
|
||||
import { BaseOssClient, FileQuery } from './oss.client';
|
||||
|
||||
export const FILE_DEST = '/' + 'static';
|
||||
export const FILE_ROOT_PATH = path.join(__dirname, '../../../', FILE_DEST);
|
||||
|
||||
const pipeWriteStream = (filepath, writeStream): Promise<void> => {
|
||||
return new Promise((resolve) => {
|
||||
const readStream = fs.createReadStream(filepath);
|
||||
readStream.on('end', () => {
|
||||
fs.removeSync(filepath);
|
||||
resolve();
|
||||
});
|
||||
readStream.pipe(writeStream);
|
||||
});
|
||||
};
|
||||
|
||||
export class LocalOssClient extends BaseOssClient {
|
||||
/**
|
||||
* 文件存储路径
|
||||
* @param md5
|
||||
* @returns
|
||||
*/
|
||||
protected storeFilePath(md5: string): {
|
||||
relative: string;
|
||||
absolute: string;
|
||||
} {
|
||||
const filepath = path.join(FILE_ROOT_PATH, md5);
|
||||
fs.ensureDirSync(filepath);
|
||||
return { relative: filepath.replace(FILE_ROOT_PATH, FILE_DEST), absolute: filepath };
|
||||
}
|
||||
|
||||
/**
|
||||
* 将文件存储的相对路径拼接为可访问 URL
|
||||
* @param serverRoot
|
||||
* @param relativeFilePath
|
||||
* @returns
|
||||
*/
|
||||
protected serveFilePath(relativeFilePath: string) {
|
||||
const serverRoot = this.configService.get('oss.local.server');
|
||||
|
||||
if (!serverRoot) {
|
||||
throw new Error(`本地文件存储已启动,但未配置 oss.local.server,请在 config 完善!`);
|
||||
}
|
||||
|
||||
return new URL(relativeFilePath, serverRoot).href;
|
||||
}
|
||||
|
||||
/**
|
||||
* 小文件上传
|
||||
* @param file
|
||||
* @param query
|
||||
* @returns
|
||||
*/
|
||||
async uploadFile(file: Express.Multer.File, query: FileQuery): Promise<string> {
|
||||
const { filename, md5 } = query;
|
||||
const { absolute, relative } = this.storeFilePath(md5);
|
||||
const absoluteFilepath = path.join(absolute, filename);
|
||||
const relativeFilePath = path.join(relative, filename);
|
||||
|
||||
if (!fs.existsSync(absoluteFilepath)) {
|
||||
fs.writeFileSync(absoluteFilepath, file.buffer);
|
||||
}
|
||||
|
||||
return this.serveFilePath(relativeFilePath);
|
||||
}
|
||||
|
||||
/**
|
||||
* 文件分块上传
|
||||
* @param file
|
||||
* @param query
|
||||
*/
|
||||
async uploadChunk(file: Express.Multer.File, query: FileQuery): Promise<void> {
|
||||
const { md5, chunkIndex } = query;
|
||||
|
||||
if (!('chunkIndex' in query)) {
|
||||
throw new Error('请指定 chunkIndex');
|
||||
}
|
||||
|
||||
const { absolute } = this.storeFilePath(md5);
|
||||
const chunksDir = path.join(absolute, 'chunks');
|
||||
fs.ensureDirSync(chunksDir);
|
||||
fs.writeFileSync(path.join(chunksDir, '' + chunkIndex), file.buffer);
|
||||
}
|
||||
|
||||
/**
|
||||
* 合并分块
|
||||
* @param query
|
||||
* @returns
|
||||
*/
|
||||
async mergeChunk(query: FileQuery): Promise<string> {
|
||||
const { filename, md5 } = query;
|
||||
const { absolute, relative } = this.storeFilePath(md5);
|
||||
const absoluteFilepath = path.join(absolute, filename);
|
||||
const relativeFilePath = path.join(relative, filename);
|
||||
|
||||
if (!fs.existsSync(absoluteFilepath)) {
|
||||
const chunksDir = path.join(absolute, 'chunks');
|
||||
const chunks = fs.readdirSync(chunksDir);
|
||||
chunks.sort((a, b) => Number(a) - Number(b));
|
||||
|
||||
await Promise.all(
|
||||
chunks.map((chunk, index) => {
|
||||
const writeStream = fs.createWriteStream(absoluteFilepath, {
|
||||
start: index * FILE_CHUNK_SIZE,
|
||||
});
|
||||
|
||||
if (index === chunks.length - 1) {
|
||||
writeStream.on('finish', () => {
|
||||
fs.removeSync(chunksDir);
|
||||
});
|
||||
}
|
||||
|
||||
pipeWriteStream(path.join(chunksDir, chunk), writeStream);
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
return this.serveFilePath(relativeFilePath);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,36 @@
|
|||
import { ConfigService } from '@nestjs/config';
|
||||
|
||||
export type FileQuery = {
|
||||
filename: string;
|
||||
md5: string;
|
||||
chunkIndex?: number;
|
||||
};
|
||||
|
||||
export abstract class OssClient {
|
||||
abstract uploadFile(file: Express.Multer.File, query: FileQuery): Promise<string>;
|
||||
abstract uploadChunk(file: Express.Multer.File, query: FileQuery): Promise<void>;
|
||||
abstract mergeChunk(query: FileQuery): Promise<string>;
|
||||
}
|
||||
|
||||
export class BaseOssClient implements OssClient {
|
||||
protected configService: ConfigService;
|
||||
|
||||
constructor(configService) {
|
||||
this.configService = configService;
|
||||
}
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
uploadFile(file: Express.Multer.File, query: FileQuery): Promise<string> {
|
||||
throw new Error('Method not implemented.');
|
||||
}
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
uploadChunk(file: Express.Multer.File, query: FileQuery): Promise<void> {
|
||||
throw new Error('Method not implemented.');
|
||||
}
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
mergeChunk(query: FileQuery): Promise<string> {
|
||||
throw new Error('Method not implemented.');
|
||||
}
|
||||
}
|
|
@ -1,5 +1,6 @@
|
|||
import { HttpResponseExceptionFilter } from '@exceptions/http-response.exception';
|
||||
import { IS_PRODUCTION } from '@helpers/env.helper';
|
||||
import { FILE_DEST, FILE_ROOT_PATH } from '@helpers/file.helper/local.client';
|
||||
import { ConfigService } from '@nestjs/config';
|
||||
import { NestFactory } from '@nestjs/core';
|
||||
import { ValidationPipe } from '@pipes/validation.pipe';
|
||||
|
@ -31,6 +32,7 @@ async function bootstrap() {
|
|||
max: config.get('server.rateLimitMax'),
|
||||
})
|
||||
);
|
||||
|
||||
app.use(cookieParser());
|
||||
app.use(compression());
|
||||
app.use(helmet());
|
||||
|
@ -41,7 +43,16 @@ async function bootstrap() {
|
|||
app.useGlobalPipes(new ValidationPipe());
|
||||
app.setGlobalPrefix(config.get('server.prefix') || '/');
|
||||
|
||||
if (config.get('oss.local.enable')) {
|
||||
const serverStatic = express.static(FILE_ROOT_PATH);
|
||||
app.use(FILE_DEST, (req, res, next) => {
|
||||
res.header('Cross-Origin-Resource-Policy', 'cross-origin');
|
||||
return serverStatic(req, res, next);
|
||||
});
|
||||
}
|
||||
|
||||
await app.listen(port);
|
||||
|
||||
console.log(`[think] 主服务启动成功,端口:${port}`);
|
||||
}
|
||||
|
||||
|
|
|
@ -1,25 +1,24 @@
|
|||
import { AliyunOssClient } from '@helpers/aliyun.helper';
|
||||
import { dateFormat } from '@helpers/date.helper';
|
||||
import { uniqueid } from '@helpers/uniqueid.helper';
|
||||
import { getOssClient, OssClient } from '@helpers/file.helper';
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { ConfigService } from '@nestjs/config';
|
||||
|
||||
@Injectable()
|
||||
export class FileService {
|
||||
private ossClient: AliyunOssClient;
|
||||
private ossClient: OssClient;
|
||||
|
||||
constructor(private readonly configService: ConfigService) {
|
||||
this.ossClient = new AliyunOssClient(this.configService);
|
||||
this.ossClient = getOssClient(this.configService);
|
||||
}
|
||||
|
||||
/**
|
||||
* 上传文件
|
||||
* @param file
|
||||
*/
|
||||
async uploadFile(file) {
|
||||
const { originalname, buffer } = file;
|
||||
const filename = `/${dateFormat(new Date(), 'yyyy-MM-dd')}/${uniqueid()}/${originalname}`;
|
||||
const url = await this.ossClient.putFile(filename, buffer);
|
||||
return url;
|
||||
async uploadFile(file, query) {
|
||||
return this.ossClient.uploadFile(file, query);
|
||||
}
|
||||
|
||||
async uploadChunk(file, query) {
|
||||
return this.ossClient.uploadChunk(file, query);
|
||||
}
|
||||
|
||||
async mergeChunk(query) {
|
||||
return this.ossClient.mergeChunk(query);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -23,5 +23,8 @@
|
|||
"@controllers/*": ["src/controllers/*"],
|
||||
"@modules/*": ["src/modules/*"]
|
||||
}
|
||||
},
|
||||
"watchOptions": {
|
||||
"excludeFiles": ["static"]
|
||||
}
|
||||
}
|
||||
|
|
|
@ -136,6 +136,7 @@ importers:
|
|||
requestidlecallback-polyfill: ^1.0.2
|
||||
resize-observer-polyfill: ^1.5.1
|
||||
scroll-into-view-if-needed: ^2.2.29
|
||||
spark-md5: ^3.0.2
|
||||
timeago.js: ^4.0.2
|
||||
tippy.js: ^6.3.7
|
||||
toggle-selection: ^1.0.6
|
||||
|
@ -226,6 +227,7 @@ importers:
|
|||
requestidlecallback-polyfill: 1.0.2
|
||||
resize-observer-polyfill: 1.5.1
|
||||
scroll-into-view-if-needed: 2.2.29
|
||||
spark-md5: 3.0.2
|
||||
timeago.js: 4.0.2
|
||||
tippy.js: 6.3.7
|
||||
toggle-selection: 1.0.6
|
||||
|
@ -289,6 +291,7 @@ importers:
|
|||
'@types/express': ^4.17.13
|
||||
'@types/jest': 27.0.2
|
||||
'@types/lodash': ^4.14.182
|
||||
'@types/multer': ^1.4.7
|
||||
'@types/node': ^16.0.0
|
||||
'@types/supertest': ^2.0.11
|
||||
'@typescript-eslint/eslint-plugin': ^5.21.0
|
||||
|
@ -350,6 +353,7 @@ importers:
|
|||
'@think/config': link:../config
|
||||
'@think/constants': link:../constants
|
||||
'@think/domains': link:../domains
|
||||
'@types/multer': 1.4.7
|
||||
ali-oss: 6.16.0
|
||||
bcryptjs: 2.4.3
|
||||
class-transformer: 0.5.1
|
||||
|
@ -3103,13 +3107,11 @@ packages:
|
|||
dependencies:
|
||||
'@types/connect': 3.4.35
|
||||
'@types/node': 16.11.21
|
||||
dev: true
|
||||
|
||||
/@types/connect/3.4.35:
|
||||
resolution: {integrity: sha512-cdeYyv4KWoEgpBISTxWvqYsVy444DOqehiF3fM3ne10AmJ62RSyNkUnxMJXHQWRQQX2eR94m5y1IZyDwBjV9FQ==}
|
||||
dependencies:
|
||||
'@types/node': 16.11.21
|
||||
dev: true
|
||||
|
||||
/@types/cookie-parser/1.4.3:
|
||||
resolution: {integrity: sha512-CqSKwFwefj4PzZ5n/iwad/bow2hTCh0FlNAeWLtQM3JA/NX/iYagIpWG2cf1bQKQ2c9gU2log5VUCrn7LDOs0w==}
|
||||
|
@ -3156,7 +3158,6 @@ packages:
|
|||
'@types/node': 16.11.21
|
||||
'@types/qs': 6.9.7
|
||||
'@types/range-parser': 1.2.4
|
||||
dev: true
|
||||
|
||||
/@types/express/4.17.13:
|
||||
resolution: {integrity: sha512-6bSZTPaTIACxn48l50SR+axgrqm6qXFIxrdAKaG6PaJk3+zuUr35hBlgT7vOmJcum+OEaIBLtHV/qloEAFITeA==}
|
||||
|
@ -3165,7 +3166,6 @@ packages:
|
|||
'@types/express-serve-static-core': 4.17.28
|
||||
'@types/qs': 6.9.7
|
||||
'@types/serve-static': 1.13.10
|
||||
dev: true
|
||||
|
||||
/@types/glob/7.2.0:
|
||||
resolution: {integrity: sha512-ZUxbzKl0IfJILTS6t7ip5fQQM/J3TJYubDm3nMbgubNNYS62eXeUpoLUC8/7fJNiFYHTrGPQn7hspDUzIHX3UA==}
|
||||
|
@ -3236,7 +3236,6 @@ packages:
|
|||
|
||||
/@types/mime/1.3.2:
|
||||
resolution: {integrity: sha512-YATxVxgRqNH6nHEIsvg6k2Boc1JHI9ZbH5iWFFv/MTkchz3b1ieGDa5T0a9RznNdI0KhVbdbWSN+KWWrQZRxTw==}
|
||||
dev: true
|
||||
|
||||
/@types/minimatch/3.0.5:
|
||||
resolution: {integrity: sha512-Klz949h02Gz2uZCMGwDUSDS1YBlTdDDgbWHi+81l29tQALUtvz4rAYi5uoVhE5Lagoq6DeqAUlbrHvW/mXDgdQ==}
|
||||
|
@ -3246,6 +3245,12 @@ packages:
|
|||
resolution: {integrity: sha512-jhuKLIRrhvCPLqwPcx6INqmKeiA5EWrsCOPhrlFSrbrmU4ZMPjj5Ul/oLCMDO98XRUIwVm78xICz4EPCektzeQ==}
|
||||
dev: true
|
||||
|
||||
/@types/multer/1.4.7:
|
||||
resolution: {integrity: sha512-/SNsDidUFCvqqcWDwxv2feww/yqhNeTRL5CVoL3jU4Goc4kKEL10T7Eye65ZqPNi4HRx8sAEX59pV1aEH7drNA==}
|
||||
dependencies:
|
||||
'@types/express': 4.17.13
|
||||
dev: false
|
||||
|
||||
/@types/node/16.11.21:
|
||||
resolution: {integrity: sha512-Pf8M1XD9i1ksZEcCP8vuSNwooJ/bZapNmIzpmsMaL+jMI+8mEYU3PKvs+xDNuQcJWF/x24WzY4qxLtB0zNow9A==}
|
||||
|
||||
|
@ -3349,11 +3354,9 @@ packages:
|
|||
|
||||
/@types/qs/6.9.7:
|
||||
resolution: {integrity: sha512-FGa1F62FT09qcrueBA6qYTrJPVDzah9a+493+o2PCXsesWHIn27G98TsSMs3WPNbZIEj4+VJf6saSFpvD+3Zsw==}
|
||||
dev: true
|
||||
|
||||
/@types/range-parser/1.2.4:
|
||||
resolution: {integrity: sha512-EEhsLsD6UsDM1yFhAvy0Cjr6VwmpMWqFBCb9w07wVugF7w9nfajxLuVmngTIpgS6svCnm6Vaw+MZhoDCKnOfsw==}
|
||||
dev: true
|
||||
|
||||
/@types/react-window/1.8.5:
|
||||
resolution: {integrity: sha512-V9q3CvhC9Jk9bWBOysPGaWy/Z0lxYcTXLtLipkt2cnRj1JOSFNF7wqGpkScSXMgBwC+fnVRg/7shwgddBG5ICw==}
|
||||
|
@ -3382,7 +3385,6 @@ packages:
|
|||
dependencies:
|
||||
'@types/mime': 1.3.2
|
||||
'@types/node': 16.11.21
|
||||
dev: true
|
||||
|
||||
/@types/stack-utils/2.0.1:
|
||||
resolution: {integrity: sha512-Hl219/BT5fLAaz6NDkSuhzasy49dwQS/DSdu4MdggFB8zcXv7vflBI3xp7FEmkmdDkBUI2bPUNeMttp2knYdxw==}
|
||||
|
@ -9920,6 +9922,10 @@ packages:
|
|||
/sourcemap-codec/1.4.8:
|
||||
resolution: {integrity: sha512-9NykojV5Uih4lgo5So5dtw+f0JgJX30KCNI8gwhz2J9A15wD0Ml6tjHKwf6fTSa6fAdVBdZeNOs9eJ71qCk8vA==}
|
||||
|
||||
/spark-md5/3.0.2:
|
||||
resolution: {integrity: sha512-wcFzz9cDfbuqe0FZzfi2or1sgyIrsDwmPwfZC4hiNidPdPINjeUwNfv5kldczoEAcjl9Y1L3SM7Uz2PUEQzxQw==}
|
||||
dev: false
|
||||
|
||||
/spawn-command/0.0.2-1:
|
||||
resolution: {integrity: sha1-YvXpRmmBwbeW3Fkpk34RycaSG9A=}
|
||||
dev: false
|
||||
|
|
Loading…
Reference in New Issue