In some cases, using this library's operations might impact performance and skew your test results.
To ensure accurate results, consider executing these operations in the setup and teardownlifecycle functions. These functions run before and after the test run and have no impact on the test results.
S3Client interacts with the AWS Simple Storage Service (S3).
With it, you can do several operations such as list buckets, list objects in a bucket, or download objects from a bucket. For a full list of supported operations, see Methods.
Both the dedicated s3.js jslib bundle and the all-encompassing aws.js bundle include the S3Client.
S3 Client methods will throw errors in case of failure.
Error
Condition
InvalidSignatureError
when invalid credentials were provided.
S3ServiceError
when AWS replied to the requested operation with an error.
Examples
JavaScript
import{ check }from'k6';import exec from'k6/execution';import http from'k6/http';import{
AWSConfig,
S3Client,}from'https://jslib.k6.io/aws/0.12.3/s3.js';const awsConfig =newAWSConfig({region: __ENV.AWS_REGION,accessKeyId: __ENV.AWS_ACCESS_KEY_ID,secretAccessKey: __ENV.AWS_SECRET_ACCESS_KEY,});const s3 =newS3Client(awsConfig);const testBucketName ='test-jslib-aws';const testInputFileKey ='productIDs.json';const testOutputFileKey =`results-${Date.now()}.json`;exportasyncfunctionsetup(){// If our test bucket does not exist, abort the execution.const buckets =await s3.listBuckets();if(buckets.filter((b)=> b.name === testBucketName).length ==0){
exec.test.abort();}// If our test object does not exist, abort the execution.const objects =await s3.listObjects(testBucketName);if(objects.filter((o)=> o.key === testInputFileKey).length ==0){
exec.test.abort();}// Download the S3 object containing our test dataconst inputObject =await s3.getObject(testBucketName, testInputFileKey);// Let's return the downloaded S3 object's data from the// setup function to allow the default function to use it.return{productIDs:JSON.parse(inputObject.data),};}exportdefaultasyncfunction(data){// Pick a random product ID from our test dataconst randomProductID = data.productIDs[Math.floor(Math.random()* data.productIDs.length)];// Query our ecommerce website's product page using the IDconst res =await http.asyncRequest('GET',`http://your.website.com/product/${randomProductID}/`);check(res,{'is status 200': res.status ===200});}exportasyncfunctionhandleSummary(data){// Once the load test is over, let's upload the results to our// S3 bucket. This is executed after teardown.await s3.putObject(testBucketName, testOutputFileKey,JSON.stringify(data));}
Multipart uploads
JavaScript
import crypto from'k6/crypto';import exec from'k6/execution';import{
AWSConfig,
S3Client,}from'https://jslib.k6.io/aws/0.12.3/s3.js';const awsConfig =newAWSConfig({region: __ENV.AWS_REGION,accessKeyId: __ENV.AWS_ACCESS_KEY_ID,secretAccessKey: __ENV.AWS_SECRET_ACCESS_KEY,sessionToken: __ENV.AWS_SESSION_TOKEN,});const s3 =newS3Client(awsConfig);const testBucketName ='test-jslib-aws';const testFileKey ='multipart.txt';exportdefaultasyncfunction(){// List the buckets the AWS authentication configuration// gives us access to.const buckets =await s3.listBuckets();// If our test bucket does not exist, abort the execution.if(buckets.filter((b)=> b.name === testBucketName).length ==0){
exec.test.abort();}// Produce random bytes to upload of size ~12MB, that// we will upload in two 6MB parts. This is done as the// minimum part size supported by S3 is 5MB.const bigFile = crypto.randomBytes(12*1024*1024);// Initialize a multipart uploadconst multipartUpload =await s3.createMultipartUpload(testBucketName, testFileKey);// Upload the first partconst firstPartData = bigFile.slice(0,6*1024*1024);const firstPart =await s3.uploadPart(
testBucketName,
testFileKey,
multipartUpload.uploadId,1,
firstPartData
);// Upload the second partconst secondPartData = bigFile.slice(6*1024*1024,12*1024*1024);const secondPart =await s3.uploadPart(
testBucketName,
testFileKey,
multipartUpload.uploadId,2,
secondPartData
);// Complete the multipart uploadawait s3.completeMultipartUpload(testBucketName, testFileKey, multipartUpload.uploadId,[
firstPart,
secondPart,]);// Let's redownload it verify it's correct, and delete itconst obj =await s3.getObject(testBucketName, testFileKey);await s3.deleteObject(testBucketName, testFileKey);}