Chunked Uploads
Upload large files by splitting them into smaller chunks. This enables:
- Upload of files larger than server limits
- Resumable uploads after network failures
- Better progress tracking
- Reduced memory usage
Basic Chunked Upload
import { useDropup, createChunkedUploader } from '@samithahansaka/dropup';
function ChunkedUploader() {
const { files, actions, state, getDropProps, getInputProps } = useDropup({
upload: createChunkedUploader({
url: '/api/upload/chunk',
chunkSize: 5 * 1024 * 1024, // 5MB chunks
}),
});
return (
<div>
<div {...getDropProps()} style={styles.dropzone}>
<input {...getInputProps()} />
<p>Drop large files here - they'll upload in chunks</p>
</div>
{files.map(file => (
<div key={file.id} style={styles.fileItem}>
<span>{file.name}</span>
<span>{(file.size / 1024 / 1024).toFixed(1)} MB</span>
{file.status === 'uploading' && (
<div style={styles.progressBar}>
<div
style={{
...styles.progress,
width: `${file.progress}%`,
}}
/>
</div>
)}
<span>{file.status}</span>
</div>
))}
<button
onClick={() => actions.upload()}
disabled={state.isUploading}
>
Upload
</button>
</div>
);
}
const styles = {
dropzone: {
border: '2px dashed #ccc',
borderRadius: 8,
padding: 40,
textAlign: 'center' as const,
marginBottom: 20,
},
fileItem: {
display: 'flex',
alignItems: 'center',
gap: 12,
padding: 12,
borderBottom: '1px solid #eee',
},
progressBar: {
flex: 1,
height: 8,
backgroundColor: '#eee',
borderRadius: 4,
overflow: 'hidden',
},
progress: {
height: '100%',
backgroundColor: '#4caf50',
transition: 'width 0.2s',
},
};
Chunked Upload Options
createChunkedUploader({
// Required
url: '/api/upload/chunk',
// Optional settings
chunkSize: 5 * 1024 * 1024, // 5MB (default)
concurrency: 3, // Parallel chunks (default: 1)
retries: 3, // Retry failed chunks (default: 3)
// Headers for all chunk requests
headers: {
'Authorization': 'Bearer token',
},
// Custom chunk metadata
getChunkMeta: (file, chunkIndex, totalChunks) => ({
fileId: file.id,
fileName: file.name,
chunkIndex,
totalChunks,
}),
});
Server-Side Handling
Your server needs to handle chunk uploads and reassemble them.
Example: Node.js/Express
const express = require('express');
const multer = require('multer');
const fs = require('fs');
const path = require('path');
const app = express();
const upload = multer({ dest: 'chunks/' });
const uploadState = new Map();
app.post('/api/upload/chunk', upload.single('chunk'), (req, res) => {
const { fileId, fileName, chunkIndex, totalChunks } = req.body;
// Store chunk info
if (!uploadState.has(fileId)) {
uploadState.set(fileId, {
fileName,
totalChunks: parseInt(totalChunks),
chunks: [],
});
}
const state = uploadState.get(fileId);
state.chunks.push({
index: parseInt(chunkIndex),
path: req.file.path,
});
// Check if all chunks received
if (state.chunks.length === state.totalChunks) {
// Sort chunks by index
state.chunks.sort((a, b) => a.index - b.index);
// Combine chunks
const finalPath = path.join('uploads', fileName);
const writeStream = fs.createWriteStream(finalPath);
for (const chunk of state.chunks) {
const data = fs.readFileSync(chunk.path);
writeStream.write(data);
fs.unlinkSync(chunk.path); // Clean up chunk
}
writeStream.end();
uploadState.delete(fileId);
return res.json({
complete: true,
url: `/uploads/${fileName}`,
});
}
res.json({
complete: false,
received: state.chunks.length,
total: state.totalChunks,
});
});
tus Protocol
For robust resumable uploads, use the tus protocol:
import { useDropup } from '@samithahansaka/dropup';
import { createTusUploader } from '@samithahansaka/dropup/tus';
function TusUploader() {
const { files, actions, state, getDropProps, getInputProps } = useDropup({
upload: createTusUploader({
endpoint: 'https://tusd.tusdemo.net/files/',
// Optional settings
chunkSize: 5 * 1024 * 1024,
retryDelays: [0, 1000, 3000, 5000],
// Metadata for the server
metadata: {
filetype: 'file.type',
filename: 'file.name',
},
// Resume from localStorage
storeFingerprintForResuming: true,
}),
onUploadComplete: (file) => {
console.log('tus upload complete:', file.uploadedUrl);
},
});
return (
<div>
<div {...getDropProps()} style={styles.dropzone}>
<input {...getInputProps()} />
<p>Drop files for resumable upload</p>
</div>
{files.map(file => (
<div key={file.id} style={styles.fileItem}>
<span>{file.name}</span>
<span>{file.progress}%</span>
<span>{file.status}</span>
</div>
))}
<button onClick={() => actions.upload()}>
Upload
</button>
</div>
);
}
Pause and Resume
With chunked uploads, you can pause and resume:
function PausableUploader() {
const { files, actions, getDropProps, getInputProps } = useDropup({
upload: createChunkedUploader({
url: '/api/upload/chunk',
}),
});
return (
<div>
<div {...getDropProps()}>
<input {...getInputProps()} />
<p>Drop files here</p>
</div>
{files.map(file => (
<div key={file.id}>
<span>{file.name}</span>
<span>{file.progress}%</span>
<span>{file.status}</span>
{file.status === 'uploading' && (
<button onClick={() => actions.cancel(file.id)}>
Pause
</button>
)}
{file.status === 'paused' && (
<button onClick={() => actions.retry([file.id])}>
Resume
</button>
)}
</div>
))}
<button onClick={() => actions.upload()}>
Start All
</button>
</div>
);
}
Progress Tracking with Chunks
import { useState } from 'react';
function DetailedChunkProgress() {
const [chunkProgress, setChunkProgress] = useState<Map<string, number[]>>(
new Map()
);
const { files, actions, getDropProps, getInputProps } = useDropup({
upload: createChunkedUploader({
url: '/api/upload/chunk',
chunkSize: 1024 * 1024, // 1MB chunks for visibility
onChunkProgress: (file, chunkIndex, progress) => {
setChunkProgress(prev => {
const next = new Map(prev);
const fileProgress = next.get(file.id) || [];
fileProgress[chunkIndex] = progress;
next.set(file.id, fileProgress);
return next;
});
},
}),
});
return (
<div>
<div {...getDropProps()} style={styles.dropzone}>
<input {...getInputProps()} />
<p>Drop files here</p>
</div>
{files.map(file => {
const chunks = chunkProgress.get(file.id) || [];
const totalChunks = Math.ceil(file.size / (1024 * 1024));
return (
<div key={file.id} style={styles.fileCard}>
<p>{file.name}</p>
{/* Chunk progress visualization */}
<div style={styles.chunkGrid}>
{Array.from({ length: totalChunks }).map((_, i) => (
<div
key={i}
style={{
...styles.chunkBlock,
backgroundColor: chunks[i] === 100
? '#4caf50'
: chunks[i] > 0
? '#8bc34a'
: '#eee',
}}
title={`Chunk ${i + 1}: ${chunks[i] || 0}%`}
/>
))}
</div>
<p>Overall: {file.progress}%</p>
</div>
);
})}
<button onClick={() => actions.upload()}>
Upload
</button>
</div>
);
}
const styles = {
dropzone: {
border: '2px dashed #ccc',
padding: 40,
textAlign: 'center' as const,
marginBottom: 20,
},
fileCard: {
padding: 16,
border: '1px solid #eee',
borderRadius: 8,
marginBottom: 12,
},
chunkGrid: {
display: 'flex',
flexWrap: 'wrap' as const,
gap: 4,
margin: '12px 0',
},
chunkBlock: {
width: 20,
height: 20,
borderRadius: 4,
transition: 'background-color 0.2s',
},
};
Chunk Size Recommendations
| File Size | Recommended Chunk Size |
|---|---|
| < 10 MB | No chunking needed |
| 10-100 MB | 5 MB |
| 100 MB - 1 GB | 10 MB |
| > 1 GB | 20-50 MB |
Performance
Larger chunks = fewer requests, but longer recovery on failure. Smaller chunks = more overhead, but better resumability.