Hey there, fellow developer! Ready to dive into the world of Amazon S3 integration? You're in the right place. We'll be using the aws-sdk package to make our lives easier. Let's get cracking!
Before we jump in, make sure you've got:
Let's kick things off:
mkdir s3-integration && cd s3-integration npm init -y npm install aws-sdk
Easy peasy, right?
There are a couple of ways to do this, but let's keep it simple with environment variables:
export AWS_ACCESS_KEY_ID=your_access_key export AWS_SECRET_ACCESS_KEY=your_secret_key export AWS_REGION=your_preferred_region
Pro tip: Add these to your .bashrc
or .zshrc
to make your life easier.
Time to get our hands dirty with some code:
const AWS = require('aws-sdk'); const s3 = new AWS.S3();
That's it! You're ready to rock and roll with S3.
Let's run through some common operations:
async function listBuckets() { const { Buckets } = await s3.listBuckets().promise(); console.log(Buckets); }
async function createBucket(bucketName) { await s3.createBucket({ Bucket: bucketName }).promise(); console.log(`Bucket ${bucketName} created`); }
async function uploadFile(bucketName, key, body) { await s3.putObject({ Bucket: bucketName, Key: key, Body: body }).promise(); console.log(`File uploaded successfully`); }
async function downloadFile(bucketName, key) { const { Body } = await s3.getObject({ Bucket: bucketName, Key: key }).promise(); console.log(Body.toString()); }
async function deleteFile(bucketName, key) { await s3.deleteObject({ Bucket: bucketName, Key: key }).promise(); console.log(`File deleted successfully`); }
async function deleteBucket(bucketName) { await s3.deleteBucket({ Bucket: bucketName }).promise(); console.log(`Bucket ${bucketName} deleted`); }
Want to level up? Here are a few more tricks:
async function setBucketPolicy(bucketName, policy) { await s3.putBucketPolicy({ Bucket: bucketName, Policy: JSON.stringify(policy) }).promise(); console.log(`Bucket policy set for ${bucketName}`); }
function getSignedUrl(bucketName, key, expirationInSeconds = 60) { return s3.getSignedUrlPromise('getObject', { Bucket: bucketName, Key: key, Expires: expirationInSeconds }); }
Always wrap your S3 operations in try/catch blocks:
try { await s3.putObject(params).promise(); } catch (error) { if (error.code === 'NoSuchBucket') { console.error('Bucket does not exist'); } else { console.error('Unexpected error', error); } }
For retry logic, consider using a library like async-retry
.
When dealing with large files, streams are your best friend:
const fs = require('fs'); async function uploadLargeFile(bucketName, key, filePath) { const fileStream = fs.createReadStream(filePath); await s3.upload({ Bucket: bucketName, Key: key, Body: fileStream }).promise(); console.log('Large file uploaded successfully'); }
And there you have it! You're now equipped to integrate Amazon S3 into your JavaScript projects like a pro. Remember, practice makes perfect, so don't be afraid to experiment and push the boundaries.
For more advanced S3 usage, check out the AWS SDK documentation. Happy coding!