|
|
@@ -0,0 +1,325 @@
|
|
|
+import {
|
|
|
+ BadRequestException,
|
|
|
+ Injectable,
|
|
|
+ InternalServerErrorException,
|
|
|
+} from '@nestjs/common';
|
|
|
+import type { MultipartFile } from '@fastify/multipart';
|
|
|
+import { SysConfigReaderService } from '@box/core/sys-config/sys-config-reader.service';
|
|
|
+import * as path from 'path';
|
|
|
+import { createReadStream, createWriteStream } from 'fs';
|
|
|
+import { mkdir, rm } from 'fs/promises';
|
|
|
+import { Transform, pipeline as pipelineCallback } from 'stream';
|
|
|
+import { promisify } from 'util';
|
|
|
+import { PutObjectCommand, S3Client } from '@aws-sdk/client-s3';
|
|
|
+
|
|
|
+export type UploadFileType = 'video' | 'image' | 'voice' | 'other';
|
|
|
+export type ImageStorage = 'LOCAL_ONLY' | 'S3_ONLY' | 'S3_AND_LOCAL';
|
|
|
+type S3Config = {
|
|
|
+ accessKeyId: string;
|
|
|
+ secretAccessKey: string;
|
|
|
+ bucket: string;
|
|
|
+ region: string;
|
|
|
+ endpointUrl?: string;
|
|
|
+};
|
|
|
+
|
|
|
+export type KeyPathResult = {
|
|
|
+ baseKeyPath: string;
|
|
|
+ localKeyPath: string;
|
|
|
+ s3KeyPath: string;
|
|
|
+};
|
|
|
+
|
|
|
+const normalizeSegment = (input: string) => {
|
|
|
+ const withForwardSlashes = (input || '').replace(/\\/g, '/');
|
|
|
+ const collapsed = withForwardSlashes.replace(/\/+/g, '/');
|
|
|
+ const trimmed = collapsed.replace(/^\/+/, '').replace(/\/+$/, '');
|
|
|
+ return trimmed;
|
|
|
+};
|
|
|
+
|
|
|
+class S3UploadFailedError extends Error {}
|
|
|
+
|
|
|
+export const buildUploadKeyPaths = (
|
|
|
+ folder: string,
|
|
|
+ filename: string,
|
|
|
+): KeyPathResult => {
|
|
|
+ const normalizedFolder = normalizeSegment(folder);
|
|
|
+ const normalizedFilename = normalizeSegment(filename);
|
|
|
+
|
|
|
+ if (!normalizedFilename) {
|
|
|
+ throw new BadRequestException('Filename is required');
|
|
|
+ }
|
|
|
+
|
|
|
+ const parts = normalizedFolder
|
|
|
+ ? [normalizedFolder, normalizedFilename]
|
|
|
+ : [normalizedFilename];
|
|
|
+ const baseKeyPath = parts.join('/');
|
|
|
+ const segments = baseKeyPath.split('/');
|
|
|
+
|
|
|
+ if (segments.some((segment) => segment === '..')) {
|
|
|
+ throw new BadRequestException('Path traversal is not allowed');
|
|
|
+ }
|
|
|
+
|
|
|
+ return {
|
|
|
+ baseKeyPath,
|
|
|
+ localKeyPath: `local/${baseKeyPath}`,
|
|
|
+ s3KeyPath: baseKeyPath,
|
|
|
+ };
|
|
|
+};
|
|
|
+
|
|
|
+@Injectable()
|
|
|
+export class UploaderService {
|
|
|
+ private s3Client?: S3Client;
|
|
|
+ private s3ClientKey?: string;
|
|
|
+
|
|
|
+ constructor(private readonly sysConfigReader: SysConfigReaderService) {}
|
|
|
+
|
|
|
+ getKeyPaths(folder: string, filename: string): KeyPathResult {
|
|
|
+ return buildUploadKeyPaths(folder, filename);
|
|
|
+ }
|
|
|
+
|
|
|
+ async upload(
|
|
|
+ file: MultipartFile,
|
|
|
+ folder: string,
|
|
|
+ filename: string,
|
|
|
+ fileType: UploadFileType,
|
|
|
+ ) {
|
|
|
+ const keyPaths = buildUploadKeyPaths(folder, filename);
|
|
|
+
|
|
|
+ const imageConfig = await this.sysConfigReader.getImageConfig();
|
|
|
+ const preferredStrategy =
|
|
|
+ (imageConfig.storageStrategy as
|
|
|
+ | 'LOCAL_ONLY'
|
|
|
+ | 'S3_ONLY'
|
|
|
+ | 'S3_AND_LOCAL'
|
|
|
+ | undefined) ?? 'LOCAL_ONLY';
|
|
|
+ const s3Config = this.resolveS3Config(imageConfig);
|
|
|
+
|
|
|
+ const maxBytes = this.resolveMaxBytes(fileType, imageConfig?.limitsMb);
|
|
|
+ const localRootPath = this.resolveLocalRootPath(
|
|
|
+ imageConfig?.local?.rootPath,
|
|
|
+ );
|
|
|
+ const fullLocalPath = await this.writeLocalFile(
|
|
|
+ file,
|
|
|
+ keyPaths.localKeyPath,
|
|
|
+ maxBytes,
|
|
|
+ localRootPath,
|
|
|
+ );
|
|
|
+
|
|
|
+ if (preferredStrategy === 'LOCAL_ONLY') {
|
|
|
+ return {
|
|
|
+ keyPath: keyPaths.localKeyPath,
|
|
|
+ imageStorage: 'LOCAL_ONLY',
|
|
|
+ };
|
|
|
+ }
|
|
|
+
|
|
|
+ if (preferredStrategy === 'S3_AND_LOCAL') {
|
|
|
+ if (!s3Config) {
|
|
|
+ throw new InternalServerErrorException(
|
|
|
+ 'S3 is disabled or misconfigured',
|
|
|
+ );
|
|
|
+ }
|
|
|
+ try {
|
|
|
+ await this.uploadLocalFileToS3(
|
|
|
+ fullLocalPath,
|
|
|
+ keyPaths.baseKeyPath,
|
|
|
+ s3Config,
|
|
|
+ );
|
|
|
+ } catch (err) {
|
|
|
+ if (err instanceof S3UploadFailedError) {
|
|
|
+ throw new InternalServerErrorException('Failed to upload to S3');
|
|
|
+ }
|
|
|
+ throw err;
|
|
|
+ }
|
|
|
+ return {
|
|
|
+ keyPath: keyPaths.localKeyPath,
|
|
|
+ imageStorage: 'S3_AND_LOCAL',
|
|
|
+ };
|
|
|
+ }
|
|
|
+
|
|
|
+ if (preferredStrategy === 'S3_ONLY') {
|
|
|
+ if (!s3Config) {
|
|
|
+ return {
|
|
|
+ keyPath: keyPaths.localKeyPath,
|
|
|
+ imageStorage: 'LOCAL_ONLY',
|
|
|
+ };
|
|
|
+ }
|
|
|
+ try {
|
|
|
+ await this.uploadLocalFileToS3(
|
|
|
+ fullLocalPath,
|
|
|
+ keyPaths.baseKeyPath,
|
|
|
+ s3Config,
|
|
|
+ );
|
|
|
+ await this.deleteLocalFile(fullLocalPath);
|
|
|
+ return {
|
|
|
+ keyPath: keyPaths.baseKeyPath,
|
|
|
+ imageStorage: 'S3_ONLY',
|
|
|
+ };
|
|
|
+ } catch (err) {
|
|
|
+ return {
|
|
|
+ keyPath: keyPaths.localKeyPath,
|
|
|
+ imageStorage: 'LOCAL_ONLY',
|
|
|
+ };
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ return {
|
|
|
+ keyPath: keyPaths.localKeyPath,
|
|
|
+ imageStorage: 'LOCAL_ONLY',
|
|
|
+ };
|
|
|
+ }
|
|
|
+
|
|
|
+ private resolveMaxBytes(
|
|
|
+ fileType: UploadFileType,
|
|
|
+ limitsMb?: { image?: number; video?: number },
|
|
|
+ ): number {
|
|
|
+ const defaults = {
|
|
|
+ image: 10,
|
|
|
+ video: 100,
|
|
|
+ };
|
|
|
+
|
|
|
+ if (fileType === 'video') {
|
|
|
+ return (limitsMb?.video ?? defaults.video) * 1024 * 1024;
|
|
|
+ }
|
|
|
+
|
|
|
+ return (limitsMb?.image ?? defaults.image) * 1024 * 1024;
|
|
|
+ }
|
|
|
+
|
|
|
+ private resolveLocalRootPath(rootPath?: string): string {
|
|
|
+ const trimmed = rootPath?.trim();
|
|
|
+ if (!trimmed) {
|
|
|
+ throw new InternalServerErrorException(
|
|
|
+ 'ImageConfig.local.rootPath is required for uploads',
|
|
|
+ );
|
|
|
+ }
|
|
|
+ return trimmed;
|
|
|
+ }
|
|
|
+
|
|
|
+ private async writeLocalFile(
|
|
|
+ file: MultipartFile,
|
|
|
+ localKeyPath: string,
|
|
|
+ maxBytes: number,
|
|
|
+ rootPath: string,
|
|
|
+ ): Promise<string> {
|
|
|
+ const source = file.file as NodeJS.ReadableStream;
|
|
|
+ if (!source) {
|
|
|
+ throw new BadRequestException('Upload stream is missing');
|
|
|
+ }
|
|
|
+
|
|
|
+ const fullPath = path.join(rootPath, localKeyPath);
|
|
|
+ const dir = path.dirname(fullPath);
|
|
|
+ await mkdir(dir, { recursive: true });
|
|
|
+
|
|
|
+ const pipeline = promisify(pipelineCallback);
|
|
|
+ const destination = createWriteStream(fullPath);
|
|
|
+ let remaining = maxBytes;
|
|
|
+ const countingTransform = new Transform({
|
|
|
+ transform: (chunk, _enc, callback) => {
|
|
|
+ if (!Buffer.isBuffer(chunk)) {
|
|
|
+ callback(null, chunk);
|
|
|
+ return;
|
|
|
+ }
|
|
|
+
|
|
|
+ if (chunk.length > 0) {
|
|
|
+ remaining -= chunk.length;
|
|
|
+ if (remaining < 0) {
|
|
|
+ callback(
|
|
|
+ new BadRequestException('File size exceeds allowed limit'),
|
|
|
+ );
|
|
|
+ return;
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ callback(null, chunk);
|
|
|
+ },
|
|
|
+ });
|
|
|
+
|
|
|
+ try {
|
|
|
+ await pipeline(source, countingTransform, destination);
|
|
|
+ return fullPath;
|
|
|
+ } catch (err) {
|
|
|
+ destination.destroy();
|
|
|
+ await rm(fullPath, { force: true }).catch(() => undefined);
|
|
|
+ if (err instanceof BadRequestException) {
|
|
|
+ throw err;
|
|
|
+ }
|
|
|
+ throw err;
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ private resolveS3Config(imageConfig: any): S3Config | undefined {
|
|
|
+ if (!imageConfig?.s3Enabled) return undefined;
|
|
|
+ const s3 = imageConfig?.s3 ?? {};
|
|
|
+ const accessKeyId = s3.accessKeyId?.trim();
|
|
|
+ const secretAccessKey = s3.secretAccessKey?.trim();
|
|
|
+ const bucket = s3.bucket?.trim();
|
|
|
+ const region = s3.region?.trim();
|
|
|
+ const endpointUrl = s3.endpointUrl?.trim();
|
|
|
+ if (!accessKeyId || !secretAccessKey || !bucket || !region) {
|
|
|
+ return undefined;
|
|
|
+ }
|
|
|
+ return { accessKeyId, secretAccessKey, bucket, region, endpointUrl };
|
|
|
+ }
|
|
|
+
|
|
|
+ private createS3Client(config: S3Config): S3Client {
|
|
|
+ const key = JSON.stringify({
|
|
|
+ accessKeyId: config.accessKeyId,
|
|
|
+ bucket: config.bucket,
|
|
|
+ region: config.region,
|
|
|
+ endpointUrl: config.endpointUrl,
|
|
|
+ });
|
|
|
+ if (this.s3Client && this.s3ClientKey === key) {
|
|
|
+ return this.s3Client;
|
|
|
+ }
|
|
|
+ this.s3Client = new S3Client({
|
|
|
+ region: config.region,
|
|
|
+ credentials: {
|
|
|
+ accessKeyId: config.accessKeyId,
|
|
|
+ secretAccessKey: config.secretAccessKey,
|
|
|
+ },
|
|
|
+ endpoint: config.endpointUrl,
|
|
|
+ });
|
|
|
+ this.s3ClientKey = key;
|
|
|
+ return this.s3Client;
|
|
|
+ }
|
|
|
+
|
|
|
+ private async uploadLocalFileToS3(
|
|
|
+ localPath: string,
|
|
|
+ key: string,
|
|
|
+ config: S3Config,
|
|
|
+ ): Promise<void> {
|
|
|
+ const client = this.createS3Client(config);
|
|
|
+ const bodyStream = createReadStream(localPath);
|
|
|
+ let streamError: unknown;
|
|
|
+ bodyStream.on('error', (err) => {
|
|
|
+ streamError = err;
|
|
|
+ });
|
|
|
+
|
|
|
+ try {
|
|
|
+ await client.send(
|
|
|
+ new PutObjectCommand({
|
|
|
+ Bucket: config.bucket,
|
|
|
+ Key: key,
|
|
|
+ Body: bodyStream,
|
|
|
+ }),
|
|
|
+ );
|
|
|
+ if (streamError) {
|
|
|
+ const error = new S3UploadFailedError('S3 upload failed');
|
|
|
+ (error as any).cause = streamError;
|
|
|
+ throw error;
|
|
|
+ }
|
|
|
+ } catch (err) {
|
|
|
+ bodyStream.destroy();
|
|
|
+ const wrapped =
|
|
|
+ err instanceof S3UploadFailedError
|
|
|
+ ? err
|
|
|
+ : new S3UploadFailedError('S3 upload failed');
|
|
|
+ if (!(wrapped as any).cause) {
|
|
|
+ (wrapped as any).cause = err ?? streamError;
|
|
|
+ }
|
|
|
+ throw wrapped;
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ private async deleteLocalFile(fullPath: string): Promise<void> {
|
|
|
+ await rm(fullPath, { force: true }).catch(() => undefined);
|
|
|
+ }
|
|
|
+}
|