Make Amazon S3 conditional requests using an AWS SDK - Amazon Simple Storage Service

Make Amazon S3 conditional requests using an AWS SDK

The following code examples show how to add preconditions to Amazon S3 requests.

.NET
AWS SDK for .NET
Note

There's more on GitHub. Find the complete example and learn how to set up and run in the AWS Code Examples Repository.

Run an interactive scenario demonstrating Amazon S3 conditional request features.

using Amazon.S3; using Microsoft.Extensions.Configuration; using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.Hosting; using Microsoft.Extensions.Logging; using Microsoft.Extensions.Logging.Console; using Microsoft.Extensions.Logging.Debug; namespace S3ConditionalRequestsScenario; public static class S3ConditionalRequestsScenario { /* Before running this .NET code example, set up your development environment, including your credentials. This example demonstrates the use of conditional requests for S3 operations. You can use conditional requests to add preconditions to S3 read requests to return or copy an object based on its Entity tag (ETag), or last modified date. You can use a conditional write requests to prevent overwrites by ensuring there is no existing object with the same key. */ public static S3ActionsWrapper _s3ActionsWrapper = null!; public static IConfiguration _configuration = null!; public static string _resourcePrefix = null!; public static string _sourceBucketName = null!; public static string _destinationBucketName = null!; public static string _sampleObjectKey = null!; public static string _sampleObjectEtag = null!; public static bool _interactive = true; public static async Task Main(string[] args) { // Set up dependency injection for the Amazon service. using var host = Host.CreateDefaultBuilder(args) .ConfigureLogging(logging => logging.AddFilter("System", LogLevel.Debug) .AddFilter<DebugLoggerProvider>("Microsoft", LogLevel.Information) .AddFilter<ConsoleLoggerProvider>("Microsoft", LogLevel.Trace)) .ConfigureServices((_, services) => services.AddAWSService<IAmazonS3>() .AddTransient<S3ActionsWrapper>() ) .Build(); _configuration = new ConfigurationBuilder() .SetBasePath(Directory.GetCurrentDirectory()) .AddJsonFile("settings.json") // Load settings from .json file. .AddJsonFile("settings.local.json", true) // Optionally, load local settings. .Build(); ServicesSetup(host); try { Console.WriteLine(new string('-', 80)); Console.WriteLine("Welcome to the Amazon Simple Storage Service (S3) Conditional Requests Feature Scenario."); Console.WriteLine(new string('-', 80)); ConfigurationSetup(); _sampleObjectEtag = await Setup(_sourceBucketName, _destinationBucketName, _sampleObjectKey); await DisplayDemoChoices(_sourceBucketName, _destinationBucketName, _sampleObjectKey, _sampleObjectEtag, 0); Console.WriteLine(new string('-', 80)); Console.WriteLine("Cleaning up resources."); Console.WriteLine(new string('-', 80)); await Cleanup(true); Console.WriteLine(new string('-', 80)); Console.WriteLine("Amazon S3 Conditional Requests Feature Scenario is complete."); Console.WriteLine(new string('-', 80)); } catch (Exception ex) { Console.WriteLine(new string('-', 80)); Console.WriteLine($"There was a problem: {ex.Message}"); await CleanupScenario(_sourceBucketName, _destinationBucketName); Console.WriteLine(new string('-', 80)); } } /// <summary> /// Populate the services for use within the console application. /// </summary> /// <param name="host">The services host.</param> private static void ServicesSetup(IHost host) { _s3ActionsWrapper = host.Services.GetRequiredService<S3ActionsWrapper>(); } /// <summary> /// Any setup operations needed. /// </summary> public static void ConfigurationSetup() { _resourcePrefix = _configuration["resourcePrefix"] ?? "dotnet-example"; _sourceBucketName = _resourcePrefix + "-source"; _destinationBucketName = _resourcePrefix + "-dest"; _sampleObjectKey = _resourcePrefix + "-sample-object.txt"; } /// <summary> /// Sets up the scenario by creating a source and destination bucket, and uploading a test file to the source bucket. /// </summary> /// <param name="sourceBucket">The name of the source bucket.</param> /// <param name="destBucket">The name of the destination bucket.</param> /// <param name="objectKey">The name of the test file to add to the source bucket.</param> /// <returns>The ETag of the uploaded test file.</returns> public static async Task<string> Setup(string sourceBucket, string destBucket, string objectKey) { Console.WriteLine( "\nFor this scenario, we will use the AWS SDK for .NET to create several S3\n" + "buckets and files to demonstrate working with S3 conditional requests.\n" + "This example demonstrates the use of conditional requests for S3 operations.\r\n" + "You can use conditional requests to add preconditions to S3 read requests to return or copy\r\n" + "an object based on its Entity tag (ETag), or last modified date. \r\n" + "You can use a conditional write requests to prevent overwrites by ensuring \r\n" + "there is no existing object with the same key. \r\n\r\n" + "This example will allow you to perform conditional reads\r\n" + "and writes that will succeed or fail based on your selected options.\r\n\r\n" + "Sample buckets and a sample object will be created as part of the example."); Console.WriteLine(new string('-', 80)); Console.WriteLine("Press Enter when you are ready to start."); if (_interactive) Console.ReadLine(); await _s3ActionsWrapper.CreateBucketWithName(sourceBucket); await _s3ActionsWrapper.CreateBucketWithName(destBucket); var eTag = await _s3ActionsWrapper.PutObjectConditional(objectKey, sourceBucket, "Test file content."); return eTag; } /// <summary> /// Cleans up the scenario by deleting the source and destination buckets. /// </summary> /// <param name="sourceBucket">The name of the source bucket.</param> /// <param name="destBucket">The name of the destination bucket.</param> public static async Task CleanupScenario(string sourceBucket, string destBucket) { await _s3ActionsWrapper.CleanupBucketByName(sourceBucket); await _s3ActionsWrapper.CleanupBucketByName(destBucket); } /// <summary> /// Displays a list of the objects in the test buckets. /// </summary> /// <param name="sourceBucket">The name of the source bucket.</param> /// <param name="destBucket">The name of the destination bucket.</param> public static async Task DisplayBuckets(string sourceBucket, string destBucket) { await _s3ActionsWrapper.ListBucketContentsByName(sourceBucket); await _s3ActionsWrapper.ListBucketContentsByName(destBucket); } /// <summary> /// Displays the menu of conditional request options for the user. /// </summary> /// <param name="sourceBucket">The name of the source bucket.</param> /// <param name="destBucket">The name of the destination bucket.</param> /// <param name="objectKey">The key of the test object in the source bucket.</param> /// <param name="etag">The ETag of the test object in the source bucket.</param> public static async Task DisplayDemoChoices(string sourceBucket, string destBucket, string objectKey, string etag, int defaultChoice) { var actions = new[] { "Print a list of bucket items.", "Perform a conditional read.", "Perform a conditional copy.", "Perform a conditional write.", "Clean up and exit." }; var conditions = new[] { "If-Match: using the object's ETag. This condition should succeed.", "If-None-Match: using the object's ETag. This condition should fail.", "If-Modified-Since: using yesterday's date. This condition should succeed.", "If-Unmodified-Since: using yesterday's date. This condition should fail." }; var conditionTypes = new[] { S3ConditionType.IfMatch, S3ConditionType.IfNoneMatch, S3ConditionType.IfModifiedSince, S3ConditionType.IfUnmodifiedSince, }; var yesterdayDate = DateTime.UtcNow.AddDays(-1); int choice; while ((choice = GetChoiceResponse("\nExplore the S3 conditional request features by selecting one of the following choices:", actions, defaultChoice)) != 4) { switch (choice) { case 0: Console.WriteLine("Listing the objects and buckets."); await DisplayBuckets(sourceBucket, destBucket); break; case 1: int conditionTypeIndex = GetChoiceResponse("Perform a conditional read:", conditions, 1); if (conditionTypeIndex == 0 || conditionTypeIndex == 1) { await _s3ActionsWrapper.GetObjectConditional(objectKey, sourceBucket, conditionTypes[conditionTypeIndex], null, _sampleObjectEtag); } else if (conditionTypeIndex == 2 || conditionTypeIndex == 3) { await _s3ActionsWrapper.GetObjectConditional(objectKey, sourceBucket, conditionTypes[conditionTypeIndex], yesterdayDate); } break; case 2: int copyConditionTypeIndex = GetChoiceResponse("Perform a conditional copy:", conditions, 1); string destKey = GetStringResponse("Enter an object key:", "sampleObjectKey"); if (copyConditionTypeIndex == 0 || copyConditionTypeIndex == 1) { await _s3ActionsWrapper.CopyObjectConditional(objectKey, destKey, sourceBucket, destBucket, conditionTypes[copyConditionTypeIndex], null, etag); } else if (copyConditionTypeIndex == 2 || copyConditionTypeIndex == 3) { await _s3ActionsWrapper.CopyObjectConditional(objectKey, destKey, sourceBucket, destBucket, conditionTypes[copyConditionTypeIndex], yesterdayDate); } break; case 3: Console.WriteLine("Perform a conditional write using IfNoneMatch condition on the object key."); Console.WriteLine("If the key is a duplicate, the write will fail."); string newObjectKey = GetStringResponse("Enter an object key:", "newObjectKey"); await _s3ActionsWrapper.PutObjectConditional(newObjectKey, sourceBucket, "Conditional write example data."); break; } if (!_interactive) { break; } } Console.WriteLine("Proceeding to cleanup."); } // <summary> /// Clean up the resources from the scenario. /// </summary> /// <param name="interactive">True to run as interactive.</param> /// <returns>True if successful.</returns> public static async Task<bool> Cleanup(bool interactive) { Console.WriteLine(new string('-', 80)); if (!interactive || GetYesNoResponse("Do you want to clean up all files and buckets? (y/n) ")) { await _s3ActionsWrapper.CleanUpBucketByName(_sourceBucketName); await _s3ActionsWrapper.CleanUpBucketByName(_destinationBucketName); } else { Console.WriteLine( "Ok, we'll leave the resources intact.\n" + "Don't forget to delete them when you're done with them or you might incur unexpected charges." ); } Console.WriteLine(new string('-', 80)); return true; } /// <summary> /// Helper method to get a yes or no response from the user. /// </summary> /// <param name="question">The question string to print on the console.</param> /// <returns>True if the user responds with a yes.</returns> private static bool GetYesNoResponse(string question) { Console.WriteLine(question); var ynResponse = Console.ReadLine(); var response = ynResponse != null && ynResponse.Equals("y", StringComparison.InvariantCultureIgnoreCase); return response; } /// <summary> /// Helper method to get a choice response from the user. /// </summary> /// <param name="question">The question string to print on the console.</param> /// <param name="choices">The choices to print on the console.</param> /// <returns>The index of the selected choice</returns> private static int GetChoiceResponse(string? question, string[] choices, int defaultChoice) { if (question != null) { Console.WriteLine(question); for (int i = 0; i < choices.Length; i++) { Console.WriteLine($"\t{i + 1}. {choices[i]}"); } } if (!_interactive) return defaultChoice; var choiceNumber = 0; while (choiceNumber < 1 || choiceNumber > choices.Length) { var choice = Console.ReadLine(); Int32.TryParse(choice, out choiceNumber); } return choiceNumber - 1; } /// <summary> /// Get a string response from the user. /// </summary> /// <param name="question">The question to print.</param> /// <param name="defaultAnswer">A default answer to use when not interactive.</param> /// <returns>The string response.</returns> public static string GetStringResponse(string? question, string defaultAnswer) { string? answer = ""; if (_interactive) { do { Console.WriteLine(question); answer = Console.ReadLine(); } while (string.IsNullOrWhiteSpace(answer)); } else { answer = defaultAnswer; } return answer; } }

A wrapper class for S3 functions.

using System.Net; using Amazon.S3; using Amazon.S3.Model; using Microsoft.Extensions.Logging; namespace S3ConditionalRequestsScenario; /// <summary> /// Encapsulate the Amazon S3 operations. /// </summary> public class S3ActionsWrapper { private readonly IAmazonS3 _amazonS3; private readonly ILogger<S3ActionsWrapper> _logger; /// <summary> /// Constructor for the S3ActionsWrapper. /// </summary> /// <param name="amazonS3">The injected S3 client.</param> /// <param name="logger">The class logger.</param> public S3ActionsWrapper(IAmazonS3 amazonS3, ILogger<S3ActionsWrapper> logger) { _amazonS3 = amazonS3; _logger = logger; } /// <summary> /// Retrieves an object from Amazon S3 with a conditional request. /// </summary> /// <param name="objectKey">The key of the object to retrieve.</param> /// <param name="sourceBucket">The source bucket of the object.</param> /// <param name="conditionType">The type of condition: 'IfMatch', 'IfNoneMatch', 'IfModifiedSince', 'IfUnmodifiedSince'.</param> /// <param name="conditionDateValue">The value to use for the condition for dates.</param> /// <param name="etagConditionalValue">The value to use for the condition for etags.</param> /// <returns>True if the conditional read is successful, False otherwise.</returns> public async Task<bool> GetObjectConditional(string objectKey, string sourceBucket, S3ConditionType conditionType, DateTime? conditionDateValue = null, string? etagConditionalValue = null) { try { var getObjectRequest = new GetObjectRequest { BucketName = sourceBucket, Key = objectKey }; switch (conditionType) { case S3ConditionType.IfMatch: getObjectRequest.EtagToMatch = etagConditionalValue; break; case S3ConditionType.IfNoneMatch: getObjectRequest.EtagToNotMatch = etagConditionalValue; break; case S3ConditionType.IfModifiedSince: getObjectRequest.ModifiedSinceDateUtc = conditionDateValue.GetValueOrDefault(); break; case S3ConditionType.IfUnmodifiedSince: getObjectRequest.UnmodifiedSinceDateUtc = conditionDateValue.GetValueOrDefault(); break; default: throw new ArgumentOutOfRangeException(nameof(conditionType), conditionType, null); } var response = await _amazonS3.GetObjectAsync(getObjectRequest); var sampleBytes = new byte[20]; await response.ResponseStream.ReadAsync(sampleBytes, 0, 20); _logger.LogInformation($"Conditional read successful. Here are the first 20 bytes of the object:\n{System.Text.Encoding.UTF8.GetString(sampleBytes)}"); return true; } catch (AmazonS3Exception e) { if (e.ErrorCode == "PreconditionFailed") { _logger.LogError("Conditional read failed: Precondition failed"); } else if (e.ErrorCode == "NotModified") { _logger.LogError("Conditional read failed: Object not modified"); } else { _logger.LogError($"Unexpected error: {e.ErrorCode}"); throw; } return false; } } /// <summary> /// Uploads an object to Amazon S3 with a conditional request. Prevents overwrite using an IfNoneMatch condition for the object key. /// </summary> /// <param name="objectKey">The key of the object to upload.</param> /// <param name="bucket">The source bucket of the object.</param> /// <param name="content">The content to upload as a string.</param> /// <returns>The ETag if the conditional write is successful, empty otherwise.</returns> public async Task<string> PutObjectConditional(string objectKey, string bucket, string content) { try { var putObjectRequest = new PutObjectRequest { BucketName = bucket, Key = objectKey, ContentBody = content, IfNoneMatch = "*" }; var putResult = await _amazonS3.PutObjectAsync(putObjectRequest); _logger.LogInformation($"Conditional write successful for key {objectKey} in bucket {bucket}."); return putResult.ETag; } catch (AmazonS3Exception e) { if (e.ErrorCode == "PreconditionFailed") { _logger.LogError("Conditional write failed: Precondition failed"); } else { _logger.LogError($"Unexpected error: {e.ErrorCode}"); throw; } return string.Empty; } } /// <summary> /// Copies an object from one Amazon S3 bucket to another with a conditional request. /// </summary> /// <param name="sourceKey">The key of the source object to copy.</param> /// <param name="destKey">The key of the destination object.</param> /// <param name="sourceBucket">The source bucket of the object.</param> /// <param name="destBucket">The destination bucket of the object.</param> /// <param name="conditionType">The type of condition to apply, e.g. 'CopySourceIfMatch', 'CopySourceIfNoneMatch', 'CopySourceIfModifiedSince', 'CopySourceIfUnmodifiedSince'.</param> /// <param name="conditionDateValue">The value to use for the condition for dates.</param> /// <param name="etagConditionalValue">The value to use for the condition for etags.</param> /// <returns>True if the conditional copy is successful, False otherwise.</returns> public async Task<bool> CopyObjectConditional(string sourceKey, string destKey, string sourceBucket, string destBucket, S3ConditionType conditionType, DateTime? conditionDateValue = null, string? etagConditionalValue = null) { try { var copyObjectRequest = new CopyObjectRequest { DestinationBucket = destBucket, DestinationKey = destKey, SourceBucket = sourceBucket, SourceKey = sourceKey }; switch (conditionType) { case S3ConditionType.IfMatch: copyObjectRequest.ETagToMatch = etagConditionalValue; break; case S3ConditionType.IfNoneMatch: copyObjectRequest.ETagToNotMatch = etagConditionalValue; break; case S3ConditionType.IfModifiedSince: copyObjectRequest.ModifiedSinceDateUtc = conditionDateValue.GetValueOrDefault(); break; case S3ConditionType.IfUnmodifiedSince: copyObjectRequest.UnmodifiedSinceDateUtc = conditionDateValue.GetValueOrDefault(); break; default: throw new ArgumentOutOfRangeException(nameof(conditionType), conditionType, null); } await _amazonS3.CopyObjectAsync(copyObjectRequest); _logger.LogInformation($"Conditional copy successful for key {destKey} in bucket {destBucket}."); return true; } catch (AmazonS3Exception e) { if (e.ErrorCode == "PreconditionFailed") { _logger.LogError("Conditional copy failed: Precondition failed"); } else if (e.ErrorCode == "304") { _logger.LogError("Conditional copy failed: Object not modified"); } else { _logger.LogError($"Unexpected error: {e.ErrorCode}"); throw; } return false; } } /// <summary> /// Create a new Amazon S3 bucket with a specified name and check that the bucket is ready. /// </summary> /// <param name="bucketName">The name of the bucket to create.</param> /// <returns>True if successful.</returns> public async Task<bool> CreateBucketWithName(string bucketName) { Console.WriteLine($"\tCreating bucket {bucketName}."); try { var request = new PutBucketRequest { BucketName = bucketName, UseClientRegion = true }; await _amazonS3.PutBucketAsync(request); var bucketReady = false; var retries = 5; while (!bucketReady && retries > 0) { Thread.Sleep(5000); bucketReady = await Amazon.S3.Util.AmazonS3Util.DoesS3BucketExistV2Async(_amazonS3, bucketName); retries--; } return bucketReady; } catch (BucketAlreadyExistsException ex) { Console.WriteLine($"Bucket already exists: '{ex.Message}'"); return true; } catch (AmazonS3Exception ex) { Console.WriteLine($"Error creating bucket: '{ex.Message}'"); return false; } } /// <summary> /// Cleans up objects and deletes the bucket by name. /// </summary> /// <param name="bucketName">The name of the bucket.</param> /// <returns>Async task.</returns> public async Task CleanupBucketByName(string bucketName) { try { var listObjectsResponse = await _amazonS3.ListObjectsV2Async(new ListObjectsV2Request { BucketName = bucketName }); foreach (var obj in listObjectsResponse.S3Objects) { await _amazonS3.DeleteObjectAsync(new DeleteObjectRequest { BucketName = bucketName, Key = obj.Key }); } await _amazonS3.DeleteBucketAsync(new DeleteBucketRequest { BucketName = bucketName }); Console.WriteLine($"Cleaned up bucket: {bucketName}."); } catch (AmazonS3Exception e) { if (e.ErrorCode == "NoSuchBucket") { Console.WriteLine($"Bucket {bucketName} does not exist, skipping cleanup."); } else { Console.WriteLine($"Error deleting bucket: {e.ErrorCode}"); throw; } } } /// <summary> /// List the contents of the bucket with their ETag. /// </summary> /// <param name="bucketName">The name of the bucket.</param> /// <returns>Async task.</returns> public async Task<List<S3Object>> ListBucketContentsByName(string bucketName) { var results = new List<S3Object>(); try { Console.WriteLine($"\t Items in bucket {bucketName}"); var listObjectsResponse = await _amazonS3.ListObjectsV2Async(new ListObjectsV2Request { BucketName = bucketName }); if (listObjectsResponse.S3Objects.Count == 0) { Console.WriteLine("\t\tNo objects found."); } else { foreach (var obj in listObjectsResponse.S3Objects) { Console.WriteLine($"\t\t object: {obj.Key} ETag {obj.ETag}"); } } results = listObjectsResponse.S3Objects; } catch (AmazonS3Exception e) { if (e.ErrorCode == "NoSuchBucket") { _logger.LogError($"Bucket {bucketName} does not exist."); } else { _logger.LogError($"Error listing bucket and objects: {e.ErrorCode}"); throw; } } return results; } /// <summary> /// Delete an object from a specific bucket. /// </summary> /// <param name="bucketName">The Amazon S3 bucket to use.</param> /// <param name="objectKey">The key of the object to delete.</param> /// <returns>True if successful.</returns> public async Task<bool> DeleteObjectFromBucket(string bucketName, string objectKey) { try { var request = new DeleteObjectRequest() { BucketName = bucketName, Key = objectKey }; await _amazonS3.DeleteObjectAsync(request); Console.WriteLine($"Deleted {objectKey} in {bucketName}."); return true; } catch (AmazonS3Exception ex) { Console.WriteLine($"\tUnable to delete object {objectKey} in bucket {bucketName}: " + ex.Message); return false; } } /// <summary> /// Delete a specific bucket by deleting the objects and then the bucket itself. /// </summary> /// <param name="bucketName">The Amazon S3 bucket to use.</param> /// <param name="objectKey">The key of the object to delete.</param> /// <param name="versionId">Optional versionId.</param> /// <returns>True if successful.</returns> public async Task<bool> CleanUpBucketByName(string bucketName) { try { var allFiles = await ListBucketContentsByName(bucketName); foreach (var fileInfo in allFiles) { await DeleteObjectFromBucket(fileInfo.BucketName, fileInfo.Key); } var request = new DeleteBucketRequest() { BucketName = bucketName, }; var response = await _amazonS3.DeleteBucketAsync(request); Console.WriteLine($"\tDelete for {bucketName} complete."); return response.HttpStatusCode == HttpStatusCode.OK; } catch (AmazonS3Exception ex) { Console.WriteLine($"\tUnable to delete bucket {bucketName}: " + ex.Message); return false; } } }
Python
SDK for Python (Boto3)
Note

There's more on GitHub. Find the complete example and learn how to set up and run in the AWS Code Examples Repository.

Run an interactive scenario demonstrating Amazon S3 conditional requests.

""" Purpose Shows how to use AWS SDK for Python (Boto3) to get started using conditional requests for Amazon Simple Storage Service (Amazon S3). """ import logging import random import sys import datetime import boto3 from botocore.exceptions import ClientError from s3_conditional_requests import S3ConditionalRequests # Add relative path to include demo_tools in this code example without need for setup. sys.path.append("../../../..") import demo_tools.question as q # noqa # Constants FILE_CONTENT = "This is a test file for S3 conditional requests." RANDOM_SUFFIX = str(random.randint(100, 999)) logger = logging.getLogger(__name__) class ConditionalRequestsScenario: """Runs a scenario that shows how to use S3 Conditional Requests.""" def __init__(self, conditional_requests, s3_client): """ :param conditional_requests: An object that wraps S3 conditional request actions. :param s3_client: A Boto3 S3 client for setup and cleanup operations. """ self.conditional_requests = conditional_requests self.s3_client = s3_client def setup_scenario(self, source_bucket: str, dest_bucket: str, object_key: str): """ Sets up the scenario by creating a source and destination bucket. Prompts the user to provide a bucket name prefix. :param source_bucket: The name of the source bucket. :param dest_bucket: The name of the destination bucket. :param object_key: The name of a test file to add to the source bucket. """ # Create the buckets. try: self.s3_client.create_bucket(Bucket=source_bucket) self.s3_client.create_bucket(Bucket=dest_bucket) print( f"Created source bucket: {source_bucket} and destination bucket: {dest_bucket}" ) except ClientError as e: error_code = e.response["Error"]["Code"] logger.error(f"Error creating buckets: {error_code}") raise # Upload test file into the source bucket. try: print(f"Uploading file {object_key} to bucket {source_bucket}") response = self.s3_client.put_object( Bucket=source_bucket, Key=object_key, Body=FILE_CONTENT ) object_etag = response["ETag"] return object_etag except Exception as e: logger.error( f"Failed to upload file {object_key} to bucket {source_bucket}: {e}" ) def cleanup_scenario(self, source_bucket: str, dest_bucket: str): """ Cleans up the scenario by deleting the source and destination buckets. :param source_bucket: The name of the source bucket. :param dest_bucket: The name of the destination bucket. """ self.cleanup_bucket(source_bucket) self.cleanup_bucket(dest_bucket) def cleanup_bucket(self, bucket_name: str): """ Cleans up the bucket by deleting all objects and then the bucket itself. :param bucket_name: The name of the bucket. """ try: # Get list of all objects in the bucket. list_response = self.s3_client.list_objects_v2(Bucket=bucket_name) objs = list_response.get("Contents", []) for obj in objs: key = obj["Key"] self.s3_client.delete_object(Bucket=bucket_name, Key=key) self.s3_client.delete_bucket(Bucket=bucket_name) print(f"Cleaned up bucket: {bucket_name}.") except ClientError as e: error_code = e.response["Error"]["Code"] if error_code == "NoSuchBucket": logger.info(f"Bucket {bucket_name} does not exist, skipping cleanup.") else: logger.error(f"Error deleting bucket: {error_code}") raise def display_buckets(self, source_bucket: str, dest_bucket: str): """ Display a list of the objects in the test buckets. :param source_bucket: The name of the source bucket. :param dest_bucket: The name of the destination bucket. """ self.list_bucket_contents(source_bucket) self.list_bucket_contents(dest_bucket) def list_bucket_contents(self, bucket_name): """ Display a list of the objects in the bucket. :param bucket_name: The name of the bucket. """ try: # Get list of all objects in the bucket. print(f"\t Items in bucket {bucket_name}") list_response = self.s3_client.list_objects_v2(Bucket=bucket_name) objs = list_response.get("Contents", []) if not objs: print("\t\tNo objects found.") for obj in objs: key = obj["Key"] print(f"\t\t object: {key} ETag {obj['ETag']}") return objs except ClientError as e: error_code = e.response["Error"]["Code"] if error_code == "NoSuchBucket": logger.info(f"Bucket {bucket_name} does not exist.") else: logger.error(f"Error listing bucket and objects: {error_code}") raise def display_menu( self, source_bucket: str, dest_bucket: str, object_key: str, etag: str ): """ Displays the menu of conditional request options for the user. :param source_bucket: The name of the source bucket. :param dest_bucket: The name of the destination bucket. :param object_key: The key of the test object in the source bucket. :param etag: The etag of the test object in the source bucket. """ actions = [ "Print list of bucket items.", "Perform a conditional read.", "Perform a conditional copy.", "Perform a conditional write.", "Clean up and exit.", ] conditions = [ "If-Match: using the object's ETag. This condition should succeed.", "If-None-Match: using the object's ETag. This condition should fail.", "If-Modified-Since: using yesterday's date. This condition should succeed.", "If-Unmodified-Since: using yesterday's date. This condition should fail.", ] condition_types = [ "IfMatch", "IfNoneMatch", "IfModifiedSince", "IfUnmodifiedSince", ] copy_condition_types = [ "CopySourceIfMatch", "CopySourceIfNoneMatch", "CopySourceIfModifiedSince", "CopySourceIfUnmodifiedSince", ] yesterday_date = datetime.datetime.utcnow() - datetime.timedelta(days=1) choice = 0 while choice != 4: print("-" * 88) print("Choose an action to explore some example conditional requests.") choice = q.choose("Which action would you like to take? ", actions) if choice == 0: print("Listing the objects and buckets.") self.display_buckets(source_bucket, dest_bucket) elif choice == 1: print("Perform a conditional read.") condition_type = q.choose("Enter the condition type : ", conditions) if condition_type == 0 or condition_type == 1: self.conditional_requests.get_object_conditional( object_key, source_bucket, condition_types[condition_type], etag ) elif condition_type == 2 or condition_type == 3: self.conditional_requests.get_object_conditional( object_key, source_bucket, condition_types[condition_type], yesterday_date, ) elif choice == 2: print("Perform a conditional copy.") condition_type = q.choose("Enter the condition type : ", conditions) dest_key = q.ask("Enter an object key: ", q.non_empty) if condition_type == 0 or condition_type == 1: self.conditional_requests.copy_object_conditional( object_key, dest_key, source_bucket, dest_bucket, copy_condition_types[condition_type], etag, ) elif condition_type == 2 or condition_type == 3: self.conditional_requests.copy_object_conditional( object_key, dest_key, copy_condition_types[condition_type], yesterday_date, ) elif choice == 3: print( "Perform a conditional write using IfNoneMatch condition on the object key." ) print("If the key is a duplicate, the write will fail.") object_key = q.ask("Enter an object key: ", q.non_empty) self.conditional_requests.put_object_conditional( object_key, source_bucket, b"Conditional write example data." ) elif choice == 4: print("Proceeding to cleanup.") def run_scenario(self): """ Runs the interactive scenario. """ print("-" * 88) print("Welcome to the Amazon S3 conditional requests example.") print("-" * 88) print( f"""\ This example demonstrates the use of conditional requests for S3 operations. You can use conditional requests to add preconditions to S3 read requests to return or copy an object based on its Entity tag (ETag), or last modified date. You can use a conditional write requests to prevent overwrites by ensuring there is no existing object with the same key. This example will allow you to perform conditional reads and writes that will succeed or fail based on your selected options. Sample buckets and a sample object will be created as part of the example. """ ) bucket_prefix = q.ask("Enter a bucket name prefix: ", q.non_empty) source_bucket_name = f"{bucket_prefix}-source-{RANDOM_SUFFIX}" dest_bucket_name = f"{bucket_prefix}-dest-{RANDOM_SUFFIX}" object_key = "test-upload-file.txt" try: etag = self.setup_scenario(source_bucket_name, dest_bucket_name, object_key) self.display_menu(source_bucket_name, dest_bucket_name, object_key, etag) finally: self.cleanup_scenario(source_bucket_name, dest_bucket_name) print("-" * 88) print("Thanks for watching.") print("-" * 88) if __name__ == "__main__": scenario = ConditionalRequestsScenario( S3ConditionalRequests.from_client(), boto3.client("s3") ) scenario.run_scenario()

A wrapper class that defines the conditional request operations.

import boto3 import logging from botocore.exceptions import ClientError # Configure logging logger = logging.getLogger(__name__) class S3ConditionalRequests: """Encapsulates S3 conditional request operations.""" def __init__(self, s3_client): self.s3 = s3_client @classmethod def from_client(cls): """ Instantiates this class from a Boto3 client. """ s3_client = boto3.client("s3") return cls(s3_client) def get_object_conditional( self, object_key: str, source_bucket: str, condition_type: str, condition_value: str, ): """ Retrieves an object from Amazon S3 with a conditional request. :param object_key: The key of the object to retrieve. :param source_bucket: The source bucket of the object. :param condition_type: The type of condition: 'IfMatch', 'IfNoneMatch', 'IfModifiedSince', 'IfUnmodifiedSince'. :param condition_value: The value to use for the condition. """ try: response = self.s3.get_object( Bucket=source_bucket, Key=object_key, **{condition_type: condition_value}, ) sample_bytes = response["Body"].read(20) print( f"\tConditional read successful. Here are the first 20 bytes of the object:\n" ) print(f"\t{sample_bytes}") except ClientError as e: error_code = e.response["Error"]["Code"] if error_code == "PreconditionFailed": print("\tConditional read failed: Precondition failed") elif error_code == "304": # Not modified error code. print("\tConditional read failed: Object not modified") else: logger.error(f"Unexpected error: {error_code}") raise def put_object_conditional(self, object_key: str, source_bucket: str, data: bytes): """ Uploads an object to Amazon S3 with a conditional request. Prevents overwrite using an IfNoneMatch condition for the object key. :param object_key: The key of the object to upload. :param source_bucket: The source bucket of the object. :param data: The data to upload. """ try: self.s3.put_object( Bucket=source_bucket, Key=object_key, Body=data, IfNoneMatch="*" ) print( f"\tConditional write successful for key {object_key} in bucket {source_bucket}." ) except ClientError as e: error_code = e.response["Error"]["Code"] if error_code == "PreconditionFailed": print("\tConditional write failed: Precondition failed") else: logger.error(f"Unexpected error: {error_code}") raise def copy_object_conditional( self, source_key: str, dest_key: str, source_bucket: str, dest_bucket: str, condition_type: str, condition_value: str, ): """ Copies an object from one Amazon S3 bucket to another with a conditional request. :param source_key: The key of the source object to copy. :param dest_key: The key of the destination object. :param source_bucket: The source bucket of the object. :param dest_bucket: The destination bucket of the object. :param condition_type: The type of condition to apply, e.g. 'CopySourceIfMatch', 'CopySourceIfNoneMatch', 'CopySourceIfModifiedSince', 'CopySourceIfUnmodifiedSince'. :param condition_value: The value to use for the condition. """ try: self.s3.copy_object( Bucket=dest_bucket, Key=dest_key, CopySource={"Bucket": source_bucket, "Key": source_key}, **{condition_type: condition_value}, ) print( f"\tConditional copy successful for key {dest_key} in bucket {dest_bucket}." ) except ClientError as e: error_code = e.response["Error"]["Code"] if error_code == "PreconditionFailed": print("\tConditional copy failed: Precondition failed") elif error_code == "304": # Not modified error code. print("\tConditional copy failed: Object not modified") else: logger.error(f"Unexpected error: {error_code}") raise

For a complete list of AWS SDK developer guides and code examples, see Developing with Amazon S3 using the AWS SDKs. This topic also includes information about getting started and details about previous SDK versions.