我的主要目标是将用户手机上的大型视频文件上传到 s3。
过去,对于 Web 应用程序,我能够对用户的视频选择进行分块,并将其部分上传到 s3,并带有预先签名的 url。
在 RN Expo 上最快上传大型视频文件的最佳方式是什么?我似乎无法对本地视频文件进行分块。我尝试过但不起作用的事情:
const fileStream = await fetch(uri);
——导致网络请求失败
const fileData = await FileSystem.readAsStringAsync(uri, { encoding: FileSystem.EncodingType.Base64 });
-- 由于内存问题失败
{ S3Client, S3 } from "@aws-sdk/client-s3"
——这些 AWS 库似乎是为 Web 而不是 RN 构建的。由于缺少依赖项(缓冲区、utils、http2、http - 是的,我尝试 npm 安装它们)而失败
IG 和 Tiktok 等应用程序如何处理从手机上传视频?这是 RN Expo 的限制吗?
这是我所做的工作的粗略概述:
const uploadVideos = async () => {
if (!videoUri && !imageUri) {
return
}
setLoading(true)
const { uri } = videoData
const CHUNK_SIZE = 5 * 1024 * 1024
const fileInfo = await FileSystem.getInfoAsync(uri)
const fileSize = fileInfo.size
const partSize = Math.min(CHUNK_SIZE, fileSize)
const parts = Math.ceil(fileSize / partSize)
const { signedUrlsDict, videoId, uploadId } = await createVideoPresignedUrl(parts)
let position = 0
let partsComplete = 0
const promises = []
while (position < fileSize) {
const length = Math.min(CHUNK_SIZE, fileSize - partsComplete)
const chunk = await FileSystem.readAsStringAsync(uri, {
encoding: FileSystem.EncodingType.Base64,
length,
position,
})
const binaryData = Buffer.from(chunk, 'base64').buffer
promises.push(
fetch(signedUrlsDict[position / CHUNK_SIZE], {
method: 'PUT',
headers: {
'Content-Type': 'video/mp4',
},
body: binaryData,
})
)
position += CHUNK_SIZE
partsComplete += length
const currentPercent = Math.ceil(position / fileSize * 100)
if (currentPercent < 100) {
setProgressMessage(`Video is ${currentPercent}% complete`)
} else {
setProgressMessage('Finalizing upload..')
}
}
const resParts = await Promise.all(promises)
const eTags = resParts.map((res, index) => {
return {
ETag: res.headers.get('etag'),
PartNumber: index + 1,
}
})
await completeUpload(eTags, uploadId, videoId)
setProgressMessage('Uploading thumbail.. ')
uploadThumbnail(videoId)
}
创建预签名网址:
const { parts } = req.body
const videoId = uuidv4()
const params = {
Bucket: BUCKET_NAME,
Key: videoId,
ContentType: 'video/mp4'
}
try {
const command = new CreateMultipartUploadCommand(params)
const { UploadId } = await s3Client.send(command)
const promises = []
for (let index = 0; index < parts; index++) {
const command = new UploadPartCommand({
...params,
UploadId,
PartNumber: index + 1
})
const signedPromise = getSignedUrl(s3Client, command, {
expiresIn: expirationTime
})
promises.push(signedPromise)
}
const resPromises = await Promise.all(promises)
const signedUrlsDict = {}
resPromises.forEach((url, i) => {
signedUrlsDict[i] = url
})
return res.status(200).send({
signedUrlsDict,
videoId,
uploadId: UploadId
})
视频上传完成后,您必须在 BE 上运行
CompleteCommand
。
const { parts, uploadId, videoId } = req.body
const params = {
Bucket: BUCKET_NAME,
Key: videoId,
MultipartUpload: {
Parts: parts
},
UploadId: uploadId
}
try {
const command = new CompleteMultipartUploadCommand(params)
await s3Client.send(command)
}
再次强调,请勿复制和粘贴此内容。它不是 100% 与我的应用程序分离,但 99% 是分离的。希望这对某人有帮助。
对于来自 expo React-native 和 Express 后端的多部分 + 预签名上传,如下所示:
--前端
import * as FileSystem from 'expo-file-system';
import { Buffer } from 'buffer';
import { v4 as uuidv4 } from 'uuid';
import { lookup } from 'react-native-mime-types';
import axios from 'axios';
const CHUNK_SIZE = 5 * 1000 * 1000; // 5MB chunk size
const fetchFileBase64 = async (uri: string): Promise<string> => {
const base64 = await FileSystem.readAsStringAsync(uri, {
encoding: FileSystem.EncodingType.Base64,
});
return base64;
};
const getExtension = (uri: string, mimeType: string): string => {
const uriExtension = uri.split('.').pop();
if (uriExtension && uriExtension !== uri) {
return uriExtension;
}
const mimeExtension = mimeType.split('/').pop();
if (mimeExtension) {
return mimeExtension;
}
return mimeType.startsWith('image') ? 'jpg' : mimeType.startsWith('video') ? 'mp4' : 'bin';
};
const generateFileName = (uri: string, mimeType: string): string => {
const randomUUID = uuidv4();
const fileExtension = getExtension(uri, mimeType);
return `${randomUUID}.${fileExtension}`;
};
const uploadPartWithRetry = async (signedUrl: string, binaryData: ArrayBuffer, retries = 3): Promise<{ ETag: string | null; PartNumber: number }> => {
for (let attempt = 1; attempt <= retries; attempt++) {
try {
// console.log(`Uploading part with URL: ${signedUrl}`);
// console.log(`Binary data length: ${binaryData.byteLength}`);
const response = await fetch(signedUrl, {
method: 'PUT',
body: binaryData,
});
if (!response.ok) {
const errorText = await response.text();
console.log(`Failed to upload part: ${errorText}`);
throw new Error(`Failed to upload part: ${response.statusText} - ${errorText}`);
}
const etag = response.headers.get('etag');
const partNumber = parseInt(signedUrl.split('partNumber=')[1].split('&')[0], 10);
// console.log(`Successfully uploaded part ${partNumber} with ETag: ${etag}`);
return {
ETag: etag,
PartNumber: partNumber,
};
} catch (error) {
console.log(`Attempt ${attempt} failed for partNumber: ${signedUrl.split('partNumber=')[1].split('&')[0]}`, error);
if (attempt === retries) {
throw new Error(`Failed to upload part after ${retries} attempts`);
}
}
}
throw new Error('Exceeded maximum retry attempts');
};
interface UploadResponse {
signedUrlsDict: { [key: number]: string };
uploadId: string;
}
interface CompleteMultipartUploadParams {
Key: string;
UploadId: string;
MultipartUpload: {
Parts: { ETag: string | null; PartNumber: number }[];
};
}
interface FileData {
uri: string;
type?: string;
}
export const upload = async (file: FileData, access_token: string): Promise<{ status: string }> => {
const { uri, type } = file;
const mimeType = lookup(uri.split('/').pop() as string) || type;
const fileName = generateFileName(uri, mimeType as string);
try {
const fileInfo = await FileSystem.getInfoAsync(uri);
const fileSize = fileInfo.size;
const partSize = Math.min(CHUNK_SIZE, fileSize);
const parts = Math.ceil(fileSize / partSize);
// Get presigned URLs from backend
const { data }: { data: UploadResponse } = await axios.post(`${process.env.EXPO_API_ENDPOINT}/up/presigned`, {
file_name: fileName,
file_size: fileSize,
parts,
}, {
headers: {
Authorization: `Bearer ${access_token}`,
},
});
const { signedUrlsDict, uploadId } = data;
if (!signedUrlsDict) {
throw new Error('Failed to retrieve signed URLs');
}
// console.log('Received presigned URLs:', signedUrlsDict);
let position = 0;
const uploadPromises: Promise<void>[] = [];
const etags: { ETag: string | null; PartNumber: number }[] = [];
for (let partNumber = 0; partNumber < parts; partNumber++) {
const chunk = await FileSystem.readAsStringAsync(uri, {
encoding: FileSystem.EncodingType.Base64,
length: partSize,
position,
});
// console.log(`Chunk length: ${chunk.length}`);
const binaryData = Buffer.from(chunk, 'base64').buffer;
console.log(`Binary data length after conversion: ${binaryData.byteLength}`);
const uploadPromise = uploadPartWithRetry(signedUrlsDict[partNumber + 1], binaryData)
.then((etag) => {
etags.push(etag);
});
uploadPromises.push(uploadPromise);
position += partSize;
}
// Wait for all parts to be uploaded
await Promise.all(uploadPromises);
// Complete the multipart upload
const completeMultipartUploadParams: CompleteMultipartUploadParams = {
Key: fileName,
UploadId: uploadId,
MultipartUpload: { Parts: etags },
};
await axios.post(`${process.env.EXPO_API_ENDPOINT}/up/complete`, completeMultipartUploadParams, {
headers: {
Authorization: `Bearer ${access_token}`,
},
});
return { status: 'Uploaded successfully!' };
} catch (error) {
console.log('Upload failed:', error);
throw new Error('Upload failed: ' + error.message);
}
};
--后端
// controllers/LabelController.ts
import { Response } from 'express';
import { AuthRequest } from '@root/types/AuthRequest';
import httpStatus from 'http-status';
import { pick } from '../utils/pick';
import config from '../config/config';
import AWS from 'aws-sdk';
import logger from '../config/logger';
// Initialize the S3 client
const s3 = new AWS.S3({
region: config.aws.region,
});
interface PreSignedUrlPayload {
file_name: string;
file_size: number;
parts: number;
}
interface CompleteUploadPayload {
Key: string;
UploadId: string;
MultipartUpload: {
Parts: {
ETag: string;
PartNumber: number;
}[];
};
}
export async function getPreSignedUrls(req: AuthRequest, res: Response): Promise<void> {
try {
const payload = pick(req.body, ['file_name', 'file_size', 'parts']) as PreSignedUrlPayload;
const params = {
Bucket: config.aws.media_bucket,
Key: payload.file_name,
};
const createMultipartUpload = await s3.createMultipartUpload(params).promise();
const { UploadId } = createMultipartUpload;
const numParts = payload.parts;
const signedUrlsDict: { [key: number]: string } = {};
for (let partNumber = 1; partNumber <= numParts; partNumber++) {
const signedUrl = await s3.getSignedUrlPromise('uploadPart', {
Bucket: config.aws.media_bucket,
Key: payload.file_name,
PartNumber: partNumber,
UploadId,
});
signedUrlsDict[partNumber] = signedUrl;
}
res.status(httpStatus.OK).send({
uploadId: UploadId,
signedUrlsDict,
});
} catch (error) {
logger.error('Error generating pre-signed URLs', error);
res.status(httpStatus.INTERNAL_SERVER_ERROR).send({
message: 'Error generating pre-signed URLs',
});
}
}
export async function markUploadComplete(req: AuthRequest, res: Response): Promise<void> {
try {
const payload = pick(req.body, ['Key', 'UploadId', 'MultipartUpload']) as CompleteUploadPayload;
if (!payload.Key || !payload.UploadId || !payload.MultipartUpload) {
throw new Error('Missing required fields in payload');
}
const params = {
Bucket: config.aws.media_bucket,
Key: payload.Key,
UploadId: payload.UploadId,
MultipartUpload: payload.MultipartUpload,
};
await s3.completeMultipartUpload(params).promise();
res.status(httpStatus.OK).send({
message: 'Upload marked as completed',
});
} catch (error) {
console.error('Error completing upload', error);
res.status(httpStatus.INTERNAL_SERVER_ERROR).send({
message: 'Error completing upload',
});
}
}