src for upload file to S3 object storage

This commit is contained in:
2026-01-08 17:37:55 +07:00
parent 7c17aa7843
commit 6f8c8d7dfa
9 changed files with 3329 additions and 0 deletions

3
upload-large-file/.gitignore vendored Normal file
View File

@@ -0,0 +1,3 @@
node_modules
src/be/uploads
.env

2938
upload-large-file/package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,21 @@
{
"name": "upload-large-file",
"version": "1.0.0",
"main": "index.js",
"scripts": {
"start:api": "node src/be/index.js",
"start:client": "http-server src/fe -p 8080"
},
"author": "",
"license": "ISC",
"description": "",
"dependencies": {
"@aws-sdk/client-s3": "3.670.0",
"@aws-sdk/s3-request-presigner": "3.670.0",
"aws-sdk": "2.1691.0",
"cors": "2.8.5",
"dotenv": "16.4.5",
"express": "4.21.1",
"multer": "1.4.5-lts.1"
}
}

View File

@@ -0,0 +1,42 @@
const {
S3Client,
PutObjectCommand,
PutBucketCorsCommand,
} = require("@aws-sdk/client-s3");
const { getSignedUrl } = require("@aws-sdk/s3-request-presigner");
const s3Client = new S3Client({
region: "auto",
endpoint: process.env.R2_ENDPOINT,
credentials: {
accessKeyId: process.env.R2_ACCESS_KEY_ID,
secretAccessKey: process.env.R2_SECRET_ACCESS_KEY,
},
});
const generatePresignedUrlHandler = async (_req, res) => {
const targetBucket = "test-large-files";
const fileKey = `${targetBucket}/${Date.now()}-${Math.random()
.toString(36)
.substring(7)}`;
const expiresIn = 60 * 60; // 1 hour expiration
try {
const presignedUrl = await getPresignedUrlForUpload(fileKey, expiresIn);
res.json({ url: presignedUrl });
} catch (error) {
console.error("Error generating pre-signed URL", error);
res.status(500).json({ error: "Failed to generate pre-signed URL" });
}
};
async function getPresignedUrlForUpload(key, expiresIn) {
const command = new PutObjectCommand({
Bucket: process.env.R2_BUCKET_NAME,
Key: key,
});
return await getSignedUrl(s3Client, command, { expiresIn });
}
module.exports = { generatePresignedUrlHandler };

View File

@@ -0,0 +1,60 @@
require("dotenv").config();
const express = require("express");
const cors = require("cors");
const {
generatePresignedUrlHandler,
enableBucketCorsHandler,
} = require("./generate-presigned-url-handler");
const { uploadFileHandler } = require("./upload-file-handler");
const upload = require("./upload-middleware");
const {
initiateMultiPartsHandler,
getPresignedUrlHandler,
completeMultiPartsHandler,
} = require("./multi-part-handler");
const app = express();
const port = 3000;
app.use(cors());
app.use(express.json());
// NOTE: The presigned URL routes
app.post("/generate-presigned-url", generatePresignedUrlHandler);
// NOTE: The multipart upload routes
app.post("/initiate-multipart-upload", initiateMultiPartsHandler);
app.post("/get-presigned-url", getPresignedUrlHandler);
app.post("/complete-multipart-upload", completeMultiPartsHandler);
app.post("/enable-bucket-cors", async (req, res) => {
try {
const response = await s3Client.send(
new PutBucketCorsCommand({
Bucket: process.env.R2_BUCKET_NAME,
CORSConfiguration: {
CORSRules: [
{
AllowedHeaders: ["*"],
AllowedMethods: ["GET", "PUT", "HEAD", "POST", "OPTIONS"],
AllowedOrigins: ["*"],
ExposeHeaders: ["ETag"],
MaxAgeSeconds: 3000,
},
],
},
})
);
console.log("CORS configuration set successfully:", response);
res.json({ response });
} catch (error) {
console.error("Error setting CORS configuration:", error);
res.status(500).json({ error: "Failed to set CORS configuration" });
}
});
app.post("/upload", upload.single("video"), uploadFileHandler);
app.listen(port, () => {
console.log(`Server running at http://localhost:${port}`);
});

View File

@@ -0,0 +1,72 @@
const {
CreateMultipartUploadCommand,
UploadPartCommand,
S3Client,
CompleteMultipartUploadCommand,
} = require("@aws-sdk/client-s3");
const { getSignedUrl } = require("@aws-sdk/s3-request-presigner");
const s3Client = new S3Client({
region: "auto",
endpoint: process.env.R2_ENDPOINT,
credentials: {
accessKeyId: process.env.R2_ACCESS_KEY_ID,
secretAccessKey: process.env.R2_SECRET_ACCESS_KEY,
},
});
const targetFolder = "test-large-files";
const initiateMultiPartsHandler = async (req, res) => {
const { fileName } = req.body;
const params = {
Bucket: process.env.R2_BUCKET_NAME,
Key: `${targetFolder}/${fileName}`,
};
const command = new CreateMultipartUploadCommand(params);
const { UploadId } = await s3Client.send(command);
res.json({ uploadId: UploadId });
};
const getPresignedUrlHandler = async (req, res) => {
const { uploadId, partNumber, fileName } = req.body;
const params = {
Bucket: process.env.R2_BUCKET_NAME,
Key: `${targetFolder}/${fileName}`,
PartNumber: partNumber,
UploadId: uploadId,
};
const command = new UploadPartCommand(params);
const presignedUrl = await getSignedUrl(s3Client, command, {
expiresIn: 3600,
});
res.json({ url: presignedUrl });
};
const completeMultiPartsHandler = async (req, res) => {
const { uploadId, parts, fileName } = req.body;
const params = {
Bucket: process.env.R2_BUCKET_NAME,
Key: `${targetFolder}/${fileName}`,
UploadId: uploadId,
MultipartUpload: { Parts: parts },
};
const command = new CompleteMultipartUploadCommand(params);
await s3Client.send(command);
res.json({ message: "Upload completed" });
};
module.exports = {
initiateMultiPartsHandler,
getPresignedUrlHandler,
completeMultiPartsHandler,
};

View File

@@ -0,0 +1,45 @@
const AWS = require("aws-sdk");
const fs = require("fs");
const s3 = new AWS.S3({
accessKeyId: process.env.R2_ACCESS_KEY_ID,
secretAccessKey: process.env.R2_SECRET_ACCESS_KEY,
endpoint: process.env.R2_ENDPOINT,
signatureVersion: "v4",
});
const uploadFileHandler = async (req, res) => {
const file = req.file;
if (!file) return res.status(400).json({ message: "No file uploaded" });
const fileStream = fs.createReadStream(file.path);
// NOTE: This is the simplest way to upload a file to S3.
const uploadParams = {
Bucket: process.env.R2_BUCKET_NAME,
Key: file.filename,
Body: fileStream,
ContentType: file.mimetype,
};
const multipartUploadParams = {
Bucket: process.env.R2_BUCKET_NAME,
Key: `uploads/test-mootod/${file.filename}`,
ContentType: file.mimetype,
PartSize: 10 * 1024 * 1024, // 10MB per part
Body: fileStream,
};
try {
const data = await s3.upload(multipartUploadParams).promise();
res.json({ message: "File uploaded successfully", url: data.Location });
} catch (error) {
console.error("Error uploading file:", error);
res
.status(500)
.json({ message: "File upload failed", error: error.message });
} finally {
fs.unlinkSync(file.path);
}
};
module.exports = { uploadFileHandler };

View File

@@ -0,0 +1,15 @@
const multer = require("multer");
const path = require("path");
const storage = multer.diskStorage({
destination: (req, file, cb) => {
cb(null, path.join(__dirname, "uploads"));
},
filename: (req, file, cb) => {
cb(null, Date.now() + path.extname(file.originalname));
},
});
const upload = multer({ storage });
module.exports = upload;

View File

@@ -0,0 +1,133 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>Upload Video to R2</title>
</head>
<body>
<h1>Upload a Video File</h1>
<form id="uploadForm" enctype="multipart/form-data">
<input type="file" id="fileInput" />
<button type="submit">Upload</button>
</form>
<progress id="progressBar" value="0" max="100"></progress>
<script>
document
.getElementById("uploadForm")
.addEventListener("submit", async (e) => {
e.preventDefault();
await uploadFile();
});
async function uploadFile() {
const fileInput = document.getElementById("fileInput");
const file = fileInput.files[0];
if (!file) {
alert("Please select a file.");
return;
}
const uploadId = await initiateMultipartUpload(file.name);
/**
* NOTE: The maximum part size is 5 GB. For simplicity, we are using 500 MB here.
* Refer to the S3 documentation for more information: https://docs.aws.amazon.com/AmazonS3/latest/userguide/upload-objects.html
*/
const partSize = 500 * 1024 * 1024; // 500 MB per part
const parts = [];
const uploadPromises = [];
let uploadedSize = 0;
for (let i = 0; i < file.size; i += partSize) {
const partNumber = Math.floor(i / partSize) + 1;
const chunk = file.slice(i, i + partSize);
const uploadPromise = (async (partNumber) => {
const presignedUrl = await getPresignedUrl(
uploadId,
partNumber,
file.name
);
const etag = await uploadPartToS3(presignedUrl, chunk);
parts.push({
PartNumber: partNumber,
ETag: etag,
});
uploadedSize += chunk.size;
const progressBarEle = document.getElementById("progressBar");
progressBarEle.value = (uploadedSize / file.size) * 100;
})(partNumber);
uploadPromises.push(uploadPromise);
}
await Promise.all(uploadPromises);
await completeMultipartUpload(uploadId, parts, file.name);
alert("File uploaded successfully!");
}
async function initiateMultipartUpload(fileName) {
const response = await fetch(
"http://localhost:3000/initiate-multipart-upload",
{
method: "POST",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify({ fileName }),
}
);
const { uploadId } = await response.json();
return uploadId;
}
async function getPresignedUrl(uploadId, partNumber, fileName) {
const response = await fetch(
"http://localhost:3000/get-presigned-url",
{
method: "POST",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify({ uploadId, partNumber, fileName }),
}
);
const { url } = await response.json();
return url;
}
async function uploadPartToS3(url, chunk) {
const response = await fetch(url, {
method: "PUT",
headers: {
"Content-Type": "application/octet-stream",
},
body: chunk,
});
if (!response.ok) throw new Error("Failed to upload part");
const etag = `${response.headers.get("ETag")}`.replace(/"/g, "");
if (!etag) {
throw new Error("Failed to retrieve ETag from the response.");
}
return etag;
}
async function completeMultipartUpload(uploadId, parts, fileName) {
await fetch("http://localhost:3000/complete-multipart-upload", {
method: "POST",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify({ uploadId, parts, fileName }),
});
}
</script>
</body>
</html>