Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
18 changes: 18 additions & 0 deletions awscli/customizations/s3/s3handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -348,9 +348,27 @@ class UploadRequestSubmitter(BaseTransferRequestSubmitter):
REQUEST_MAPPER_METHOD = RequestParamsMapper.map_put_object_params
RESULT_SUBSCRIBER_CLASS = UploadResultSubscriber

def __init__(self, transfer_manager, result_queue, cli_params):
super().__init__(transfer_manager, result_queue, cli_params)
self._dryrun_validated_buckets = set()

def can_submit(self, fileinfo):
return fileinfo.operation_name == 'upload'

def _submit_dryrun(self, fileinfo):
bucket, key = find_bucket_key(fileinfo.dest)
# Validate bucket access before reporting dryrun success.
# This ensures dryrun fails if the bucket doesn't exist or the user
# lacks basic access permissions, providing behavior consistent with
# download operations which call HeadObject during file discovery.
# Note: head_bucket checks bucket existence and basic access but does
# not specifically validate s3:PutObject permission.
if bucket not in self._dryrun_validated_buckets:
client = self._transfer_manager.client
client.head_bucket(Bucket=bucket)
self._dryrun_validated_buckets.add(bucket)
super()._submit_dryrun(fileinfo)

def _add_additional_subscribers(self, subscribers, fileinfo):
subscribers.append(ProvideSizeSubscriber(fileinfo.size))
if self._should_inject_content_type():
Expand Down
106 changes: 106 additions & 0 deletions tests/unit/customizations/s3/test_s3handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
# language governing permissions and limitations under the License.
import os

from botocore.exceptions import ClientError
from s3transfer.manager import TransferManager

from awscli.testutils import mock
Expand Down Expand Up @@ -365,6 +366,7 @@ def test_warn_on_too_large_transfer(self):

def test_dry_run(self):
self.cli_params['dryrun'] = True
self.transfer_manager.client = mock.Mock()
self.transfer_request_submitter = UploadRequestSubmitter(
self.transfer_manager, self.result_queue, self.cli_params)
fileinfo = FileInfo(
Expand All @@ -378,6 +380,110 @@ def test_dry_run(self):
self.assertTrue(result.src.endswith(self.filename))
self.assertEqual(result.dest, 's3://' + self.bucket + '/' + self.key)

def test_dry_run_validates_bucket_access(self):
# Regression test for https://github.com/aws/aws-cli/issues/9935
# Verify that dryrun validates bucket access by calling head_bucket
self.cli_params['dryrun'] = True
self.transfer_manager.client = mock.Mock()
self.transfer_request_submitter = UploadRequestSubmitter(
self.transfer_manager, self.result_queue, self.cli_params)
fileinfo = FileInfo(
src=self.filename, src_type='local', operation_name='upload',
dest=self.bucket + '/' + self.key, dest_type='s3')
self.transfer_request_submitter.submit(fileinfo)

# Verify head_bucket was called to validate bucket access
self.transfer_manager.client.head_bucket.assert_called_once_with(
Bucket=self.bucket
)

def test_dry_run_fails_on_access_denied(self):
# Regression test for https://github.com/aws/aws-cli/issues/9935
# Verify that dryrun fails with AccessDenied when user lacks
# bucket access permissions
self.cli_params['dryrun'] = True
self.transfer_manager.client = mock.Mock()
self.transfer_manager.client.head_bucket.side_effect = ClientError(
{'Error': {'Code': '403', 'Message': 'Forbidden'}},
'HeadBucket'
)
self.transfer_request_submitter = UploadRequestSubmitter(
self.transfer_manager, self.result_queue, self.cli_params)
fileinfo = FileInfo(
src=self.filename, src_type='local', operation_name='upload',
dest=self.bucket + '/' + self.key, dest_type='s3')

with self.assertRaises(ClientError) as context:
self.transfer_request_submitter.submit(fileinfo)

self.assertEqual(context.exception.response['Error']['Code'], '403')

def test_dry_run_fails_on_nonexistent_bucket(self):
# Regression test for https://github.com/aws/aws-cli/issues/9935
# Verify that dryrun fails when bucket does not exist
self.cli_params['dryrun'] = True
self.transfer_manager.client = mock.Mock()
self.transfer_manager.client.head_bucket.side_effect = ClientError(
{'Error': {'Code': '404', 'Message': 'Not Found'}},
'HeadBucket'
)
self.transfer_request_submitter = UploadRequestSubmitter(
self.transfer_manager, self.result_queue, self.cli_params)
fileinfo = FileInfo(
src=self.filename, src_type='local', operation_name='upload',
dest=self.bucket + '/' + self.key, dest_type='s3')

with self.assertRaises(ClientError) as context:
self.transfer_request_submitter.submit(fileinfo)

self.assertEqual(context.exception.response['Error']['Code'], '404')

def test_dry_run_caches_bucket_validation(self):
# Regression test for https://github.com/aws/aws-cli/issues/9935
# Verify that bucket access is only validated once per bucket
self.cli_params['dryrun'] = True
self.transfer_manager.client = mock.Mock()
self.transfer_request_submitter = UploadRequestSubmitter(
self.transfer_manager, self.result_queue, self.cli_params)

# Submit multiple files to the same bucket
for i in range(3):
fileinfo = FileInfo(
src=self.filename, src_type='local', operation_name='upload',
dest=self.bucket + '/key' + str(i), dest_type='s3')
self.transfer_request_submitter.submit(fileinfo)

# head_bucket should only be called once per bucket
self.assertEqual(
self.transfer_manager.client.head_bucket.call_count, 1
)

# All three should result in DryRunResult
for i in range(3):
result = self.result_queue.get()
self.assertIsInstance(result, DryRunResult)

def test_dry_run_validates_each_bucket_separately(self):
# Regression test for https://github.com/aws/aws-cli/issues/9935
# Verify that different buckets are validated separately
self.cli_params['dryrun'] = True
self.transfer_manager.client = mock.Mock()
self.transfer_request_submitter = UploadRequestSubmitter(
self.transfer_manager, self.result_queue, self.cli_params)

# Submit files to different buckets
buckets = ['bucket1', 'bucket2', 'bucket3']
for bucket in buckets:
fileinfo = FileInfo(
src=self.filename, src_type='local', operation_name='upload',
dest=bucket + '/' + self.key, dest_type='s3')
self.transfer_request_submitter.submit(fileinfo)

# head_bucket should be called once per bucket
self.assertEqual(
self.transfer_manager.client.head_bucket.call_count, 3
)

def test_submit_move_adds_delete_source_subscriber(self):
fileinfo = FileInfo(
src=self.filename, dest=self.bucket+'/'+self.key)
Expand Down