π¦ Resumable File Uploads with React and Node.js using resumable.js
Uploading large files comes with risksβββunstable connections, browser crashes, or poor bandwidth can cause full uploads to fail.
Instead of starting over every time, resumable uploads allow you to:
β
Upload files in small chunks
β
Resume if the connection breaks
β
Prevent re-uploads of completed files
β
Efficiently handle large files (GBs+)
π What We'll Build
We'll create a fully working Resumable File Uploader using:
- Frontend: React
- Backend: Node.js (Express + Multer)
-
Library:
resumable.js
π§° What You'll Learn
- Chunked uploads with
resumable.js - Handling file chunks in Node.js with
multer - Merging chunks into a complete file
- Preventing duplicate uploads using
localStorage - Deleting chunks after merge for clean storage
βοΈ Backend Setup (Node.js + Express)
Step 1: Initialize
mkdir resumable-uploader-backend
cd resumable-uploader-backend
npm init -y
npm install express multer cors
Step 2: server.js
// β
server.js
const express = require("express");
const cors = require("cors");
const fs = require("fs");
const path = require("path");
const multer = require("multer");
const app = express();
app.use(cors());
app.use(express.json());
app.use(express.urlencoded({ extended: true }));
const UPLOAD_DIR = path.join(__dirname, "uploads");
if (!fs.existsSync(UPLOAD_DIR)) fs.mkdirSync(UPLOAD_DIR);
π Handle Chunk Requests
// Check if chunk exists
app.get("/upload", (req, res) => {
const { resumableIdentifier, resumableChunkNumber } = req.query;
const chunkFile = path.join(UPLOAD_DIR, `${resumableIdentifier}.${resumableChunkNumber}`);
fs.existsSync(chunkFile) ? res.status(200).send("Found") : res.status(204).send("Not Found");
});
β¬οΈ Handle Chunk Upload with Multer
const storage = multer.diskStorage({
destination: (req, file, cb) => cb(null, UPLOAD_DIR),
filename: (req, file, cb) => {
const { resumableIdentifier, resumableChunkNumber } = req.body;
cb(null, `${resumableIdentifier}.${resumableChunkNumber}`);
},
});
const upload = multer({ storage });
app.post("/upload", upload.single("file"), (req, res) => {
res.status(200).send("Chunk uploaded");
});
π§© Merge Chunks After Upload
app.post("/merge", (req, res) => {
const { filename, totalChunks, identifier } = req.body;
const finalPath = path.join(UPLOAD_DIR, filename);
const writeStream = fs.createWriteStream(finalPath);
let index = 1;
const appendChunk = () => {
const chunkPath = path.join(UPLOAD_DIR, `${identifier}.${index}`);
fs.createReadStream(chunkPath)
.on("error", () => res.status(500).send("Chunk read error"))
.on("end", () => {
fs.unlink(chunkPath, () => {});
if (++index <= totalChunks) appendChunk();
else writeStream.end(() => res.send("File merged successfully"));
})
.pipe(writeStream, { end: false });
};
appendChunk();
});
app.listen(5000, () => console.log("β
Server running on http://localhost:5000"));
π» Frontend Setup (React + Resumable.js)
Step 1: Install
npm install resumablejs
Step 2: ResumableUploader.js
import React, { useEffect, useRef, useState } from "react";
import Resumable from "resumablejs";
const ResumableUploader = () => {
const browseRef = useRef(null);
const [uploadProgress, setUploadProgress] = useState(0);
const [status, setStatus] = useState("");
const resumableRef = useRef(null);
const handleFileAdded = (file) => {
const uploaded = JSON.parse(localStorage.getItem("uploaded") || "[]");
if (uploaded.includes(file.uniqueIdentifier)) {
setStatus("File already uploaded.");
return;
}
setStatus("Uploadingβ¦");
resumableRef.current.upload();
};
const handleFileSuccess = async (file) => {
const uploaded = JSON.parse(localStorage.getItem("uploaded") || "[]");
uploaded.push(file.uniqueIdentifier);
localStorage.setItem("uploaded", JSON.stringify(uploaded));
await fetch("http://localhost:5000/merge", {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({
filename: file.fileName,
totalChunks: file.chunks.length,
identifier: file.uniqueIdentifier,
}),
});
resumableRef.current.removeFile(file);
setStatus("Upload complete and merged.");
};
useEffect(() => {
const r = new Resumable({
target: "http://localhost:5000/upload",
chunkSize: 1 * 1024 * 1024, // 1MB
fileParameterName: "file",
testChunks: true,
throttleProgressCallbacks: 1,
});
resumableRef.current = r;
r.assignBrowse(browseRef.current);
r.on("fileAdded", handleFileAdded);
r.on("fileProgress", file => setUploadProgress(Math.floor(file.progress() * 100)));
r.on("fileSuccess", handleFileSuccess);
r.on("fileError", () => setStatus("Upload failed."));
}, []);
return (
<div style={{ padding: 20 }}>
<h2>Resumable File Uploader</h2>
<button ref={browseRef}>Choose File</button>
{uploadProgress > 0 && (
<>
<progress value={uploadProgress} max="100" />
<p>{uploadProgress}%</p>
</>
)}
{status && <p><strong>Status:</strong> {status}</p>}
</div>
);
};
export default ResumableUploader;
β Final Result
With this setup, you can:
- Upload large files chunk-by-chunk
- Resume uploads even after refreshing
- Track real-time progress
- Auto-merge on successful upload
- Clean up disk space by deleting chunks
π§ Final Thoughts
Chunked uploads are essential for large files and unstable connections.
By using resumable.js, React, and Node.js, you gain:
π Full control over uploads
βοΈ Flexibility to extend to AWS/GCP
π§Ό Cleanup logic for old/expired chunks
π Organized file management by user/project
--
Top comments (0)