@aws-sdk/client-s3 is the official AWS SDK library for working with the S3 service in JavaScript. It allows you to upload and download files, list and delete objects, perform multipart uploads, and generate pre-signed URLs.
Install the package using npm or yarn:
npm install @aws-sdk/client-s3
Or:
yarn add @aws-sdk/client-s3
When using @aws-sdk/client-s3, access credentials can be configured in several ways.
When you use the AWS CLI, a .aws directory containing configuration files is automatically created in the user’s home directory. If necessary, you can create this directory and the files manually without installing the AWS CLI.
The ~/.aws/credentials file should contain the following:
[default]
aws_access_key_id = <ACCESS_KEY>
aws_secret_access_key = <SECRET_KEY>
And ~/.aws/config:
[default]
output = json
endpoint_url = https://s3.hmstorage.net
region = us-2
Credentials can also be provided via environment variables:
export AWS_ACCESS_KEY_ID=<ACCESS_KEY>
export AWS_SECRET_ACCESS_KEY=<SECRET_KEY>
Full client configuration with all parameters explicitly specified:
const { S3Client } = require("@aws-sdk/client-s3");
const s3 = new S3Client({
region: "us-2",
endpoint: "https://s3.hmstorage.net",
credentials: {
accessKeyId: "<ACCESS_KEY>",
secretAccessKey: "<SECRET_KEY>"
}
});
If multiple AWS CLI profiles are configured, you can select the required one using an environment variable:
export AWS_PROFILE=myprofile
In this case, the client can be created without explicitly passing access keys:
const s3 = new S3Client({
region: "us-2",
endpoint: "https://s3.hmstorage.net"
});
The example below demonstrates basic object operations using the SDK. The script performs the following actions:
Before running the script, create a file named example.txt in the same directory as the script.
Example code:
const {
S3Client,
ListObjectsV2Command,
PutObjectCommand,
GetObjectCommand,
DeleteObjectCommand
} = require("@aws-sdk/client-s3");
const { createReadStream, createWriteStream, existsSync } = require("fs");
const { pipeline } = require("stream");
const { promisify } = require("util");
const pipe = promisify(pipeline);
const bucketName = "bucket_name";
const region = "us-2";
const endpoint = "https://s3.hmstorage.net";
const localUploadPath = "example.txt";
const s3Key = "example.txt";
const localDownloadPath = "downloaded_example.txt";
// Create S3 client
const s3 = new S3Client({
region,
endpoint,
});
async function main() {
// 1. Upload file
if (existsSync(localUploadPath)) {
console.log(`Uploading ${localUploadPath} to the bucket...`);
await s3.send(new PutObjectCommand({
Bucket: bucketName,
Key: s3Key,
Body: createReadStream(localUploadPath),
}));
console.log("Upload completed.");
} else {
console.log(`File ${localUploadPath} not found. Skipping upload.`);
}
// 2. List objects
console.log("\nBucket contents:");
const list = await s3.send(new ListObjectsV2Command({ Bucket: bucketName }));
(list.Contents || []).forEach((obj) => {
console.log(`- ${obj.Key}`);
});
// 3. Download file
console.log(`\nDownloading ${s3Key} to ${localDownloadPath}...`);
const { Body } = await s3.send(new GetObjectCommand({
Bucket: bucketName,
Key: s3Key,
}));
if (Body) {
await pipe(Body, createWriteStream(localDownloadPath));
console.log("Download completed.");
}
// 4. Delete file
console.log(`\nDeleting ${s3Key} from the bucket...`);
await s3.send(new DeleteObjectCommand({ Bucket: bucketName, Key: s3Key }));
console.log("Deletion completed.");
}
main().catch((err) => {
console.error("Error while performing operations:", err);
});