Cloud Storage
Upload files directly to cloud storage providers without proxying through your server.
Amazon S3
Installation
npm install @samithahansaka/dropup
Basic Usage
import { useDropup } from '@samithahansaka/dropup';
import { createS3Uploader } from '@samithahansaka/dropup/cloud/s3';
function S3Uploader() {
const { files, actions, getDropProps, getInputProps } = useDropup({
upload: createS3Uploader({
getPresignedUrl: async (file) => {
// Call your backend to get a presigned URL
const response = await fetch('/api/s3/presign', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
filename: file.name,
contentType: file.type,
}),
});
return response.json();
},
}),
onUploadComplete: (file) => {
console.log('Uploaded to S3:', file.uploadedUrl);
},
});
return (
<div {...getDropProps()}>
<input {...getInputProps()} />
<p>Drop files to upload to S3</p>
</div>
);
}
Backend: Generate Presigned URL
// Node.js / Express example
import { S3Client, PutObjectCommand } from '@aws-sdk/client-s3';
import { getSignedUrl } from '@aws-sdk/s3-request-presigner';
const s3 = new S3Client({
region: process.env.AWS_REGION,
credentials: {
accessKeyId: process.env.AWS_ACCESS_KEY_ID,
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
},
});
app.post('/api/s3/presign', async (req, res) => {
const { filename, contentType } = req.body;
const key = `uploads/${Date.now()}-${filename}`;
const command = new PutObjectCommand({
Bucket: process.env.S3_BUCKET,
Key: key,
ContentType: contentType,
});
const url = await getSignedUrl(s3, command, { expiresIn: 3600 });
res.json({
url,
fields: {}, // For simple PUT, no additional fields needed
});
});
S3 with POST (Multipart Form)
For S3 POST policies:
createS3Uploader({
getPresignedUrl: async (file) => {
const response = await fetch('/api/s3/presign-post', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
filename: file.name,
contentType: file.type,
}),
});
const { url, fields } = await response.json();
return {
url, // S3 bucket URL
fields, // Policy fields to include in form
};
},
});
Google Cloud Storage
import { createGCSUploader } from '@samithahansaka/dropup/cloud/gcs';
function GCSUploader() {
const { files, getDropProps, getInputProps } = useDropup({
upload: createGCSUploader({
getSignedUrl: async (file) => {
const response = await fetch('/api/gcs/sign', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
filename: file.name,
contentType: file.type,
}),
});
return response.json();
},
}),
});
return (
<div {...getDropProps()}>
<input {...getInputProps()} />
<p>Upload to Google Cloud Storage</p>
</div>
);
}
Backend: GCS Signed URL
// Node.js example
import { Storage } from '@google-cloud/storage';
const storage = new Storage();
const bucket = storage.bucket(process.env.GCS_BUCKET);
app.post('/api/gcs/sign', async (req, res) => {
const { filename, contentType } = req.body;
const blob = bucket.file(`uploads/${Date.now()}-${filename}`);
const [url] = await blob.getSignedUrl({
version: 'v4',
action: 'write',
expires: Date.now() + 15 * 60 * 1000, // 15 minutes
contentType,
});
res.json({ url });
});
Azure Blob Storage
import { createAzureUploader } from '@samithahansaka/dropup/cloud/azure';
function AzureUploader() {
const { files, getDropProps, getInputProps } = useDropup({
upload: createAzureUploader({
getSasUrl: async (file) => {
const response = await fetch('/api/azure/sas', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
filename: file.name,
contentType: file.type,
}),
});
return response.json();
},
}),
});
return (
<div {...getDropProps()}>
<input {...getInputProps()} />
<p>Upload to Azure Blob Storage</p>
</div>
);
}
Backend: Azure SAS URL
// Node.js example
import {
BlobServiceClient,
generateBlobSASQueryParameters,
BlobSASPermissions,
} from '@azure/storage-blob';
const blobServiceClient = BlobServiceClient.fromConnectionString(
process.env.AZURE_STORAGE_CONNECTION_STRING
);
app.post('/api/azure/sas', async (req, res) => {
const { filename, contentType } = req.body;
const containerClient = blobServiceClient.getContainerClient('uploads');
const blobName = `${Date.now()}-${filename}`;
const blobClient = containerClient.getBlockBlobClient(blobName);
const sasToken = generateBlobSASQueryParameters(
{
containerName: 'uploads',
blobName,
permissions: BlobSASPermissions.parse('cw'), // Create, Write
expiresOn: new Date(Date.now() + 15 * 60 * 1000),
},
blobServiceClient.credential
).toString();
res.json({
url: `${blobClient.url}?${sasToken}`,
headers: {
'x-ms-blob-type': 'BlockBlob',
'Content-Type': contentType,
},
});
});
Cloudflare R2
R2 is S3-compatible, so use the S3 uploader:
import { createS3Uploader } from '@samithahansaka/dropup/cloud/s3';
function R2Uploader() {
const { files, getDropProps, getInputProps } = useDropup({
upload: createS3Uploader({
getPresignedUrl: async (file) => {
const response = await fetch('/api/r2/presign', {
method: 'POST',
body: JSON.stringify({ filename: file.name }),
});
return response.json();
},
}),
});
return (
<div {...getDropProps()}>
<input {...getInputProps()} />
<p>Upload to Cloudflare R2</p>
</div>
);
}
Backend: R2 Presigned URL
import { S3Client, PutObjectCommand } from '@aws-sdk/client-s3';
import { getSignedUrl } from '@aws-sdk/s3-request-presigner';
const s3 = new S3Client({
region: 'auto',
endpoint: `https://${process.env.CF_ACCOUNT_ID}.r2.cloudflarestorage.com`,
credentials: {
accessKeyId: process.env.R2_ACCESS_KEY_ID,
secretAccessKey: process.env.R2_SECRET_ACCESS_KEY,
},
});
app.post('/api/r2/presign', async (req, res) => {
const { filename } = req.body;
const command = new PutObjectCommand({
Bucket: process.env.R2_BUCKET,
Key: `uploads/${Date.now()}-${filename}`,
});
const url = await getSignedUrl(s3, command, { expiresIn: 3600 });
res.json({ url });
});
DigitalOcean Spaces
Also S3-compatible:
// Same as S3, just update your backend endpoint config
const s3 = new S3Client({
region: 'nyc3',
endpoint: 'https://nyc3.digitaloceanspaces.com',
credentials: {
accessKeyId: process.env.DO_SPACES_KEY,
secretAccessKey: process.env.DO_SPACES_SECRET,
},
});
Custom Cloud Provider
Create your own uploader for any cloud service:
import { useDropup, type CustomUploader } from '@samithahansaka/dropup';
const customCloudUploader: CustomUploader = async (file, options) => {
// 1. Get upload URL from your backend
const { uploadUrl, fileUrl } = await fetch('/api/custom-cloud/init', {
method: 'POST',
body: JSON.stringify({ filename: file.name, size: file.size }),
}).then(r => r.json());
// 2. Upload file
const xhr = new XMLHttpRequest();
return new Promise((resolve, reject) => {
xhr.upload.onprogress = (e) => {
if (e.lengthComputable) {
options.onProgress((e.loaded / e.total) * 100);
}
};
xhr.onload = () => {
if (xhr.status >= 200 && xhr.status < 300) {
resolve({ url: fileUrl });
} else {
reject(new Error('Upload failed'));
}
};
xhr.onerror = () => reject(new Error('Network error'));
// Handle cancellation
options.signal.addEventListener('abort', () => xhr.abort());
xhr.open('PUT', uploadUrl);
xhr.send(file.file);
});
};
function CustomCloudUploader() {
const { files, getDropProps, getInputProps } = useDropup({
upload: customCloudUploader,
});
return (
<div {...getDropProps()}>
<input {...getInputProps()} />
<p>Upload to custom cloud</p>
</div>
);
}
Security Best Practices
- Never expose credentials on the client - Always generate signed URLs on your backend
- Use short expiration times - 5-15 minutes is usually sufficient
- Validate file types on the backend - Don't rely solely on client-side validation
- Set appropriate CORS policies on your cloud storage
- Limit file sizes in your presigned URL policies
- Use separate buckets for user uploads vs. application assets