Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
31 changes: 30 additions & 1 deletion docs/reference/kombu.transport.SQS.rst
Original file line number Diff line number Diff line change
Expand Up @@ -70,4 +70,33 @@ Message Attributes

SQS supports sending message attributes along with the message body.
To use this feature, you can pass a 'message_attributes' as keyword argument
to `basic_publish` method.
to `basic_publish` method.

Large Message Support
------------------------

SQS has a maximum message size limit of 256KB. To handle larger messages,
the SQS transport automatically supports the Amazon SQS Extended Client Library,
which uses S3 to store message payloads that exceed the SQS size limit.

This feature is automatically available when using the SQS transport - no
additional installation or configuration is required as the necessary
dependencies are included with the SQS extras.

**How it works:**

- When sending a message larger than 256KB, the transport automatically stores
the message body in S3
- SQS receives a reference pointer to the S3 object instead of the actual message
- When receiving the message, the transport transparently retrieves the payload
from S3

**IAM Permissions:**

To use this feature, your AWS credentials need appropriate S3 permissions in
addition to standard SQS permissions:

- ``s3:GetObject`` - for retrieving large messages
- ``s3:PutObject`` - for storing large messages

The S3 bucket used for storage is managed by the SQS Extended Client Library.
5 changes: 5 additions & 0 deletions kombu/asynchronous/aws/ext.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,11 @@ def get_cert_path() -> str:
"get_cert_path is unavailable because boto3 or botocore is not installed."
)

try:
import sqs_extended_client
except ImportError:
sqs_extended_client = None

__all__ = (
'exceptions', 'AWSRequest', 'get_response', 'get_cert_path',
)
5 changes: 5 additions & 0 deletions kombu/asynchronous/aws/sqs/ext.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,3 +7,8 @@
import boto3
except ImportError:
boto3 = None

try:
import sqs_extended_client
except ImportError:
sqs_extended_client = None
75 changes: 73 additions & 2 deletions kombu/transport/SQS.py
Original file line number Diff line number Diff line change
Expand Up @@ -125,6 +125,26 @@
For a complete list of settings you can adjust using this option see
https://botocore.amazonaws.com/v1/documentation/api/latest/reference/config.html

Large Message Support
---------------------
SQS has a maximum message size limit of 256KB. To handle larger messages,
this transport automatically supports the Amazon SQS Extended Client Library,
which uses S3 to store message payloads that exceed the SQS size limit.

This feature is automatically available when using the SQS transport - no
additional installation or configuration is required as the necessary
dependencies are included with the SQS extras.

When a large message is sent:
- The message body is automatically stored in S3
- SQS receives a reference pointer to the S3 object
- When the message is received, the transport transparently retrieves
the payload from S3

Note: You need appropriate S3 permissions in addition to SQS permissions
for this feature to work. The IAM policy should include s3:GetObject and
s3:PutObject permissions for the S3 bucket used by the extended client.

Features
========
* Type: Virtual
Expand All @@ -140,6 +160,7 @@

import base64
import binascii
import json
import re
import socket
import string
Expand All @@ -154,7 +175,7 @@
from vine import ensure_promise, promise, transform

from kombu.asynchronous import get_event_loop
from kombu.asynchronous.aws.ext import boto3, exceptions
from kombu.asynchronous.aws.ext import boto3, exceptions, sqs_extended_client
from kombu.asynchronous.aws.sqs.connection import AsyncSQSConnection
from kombu.asynchronous.aws.sqs.message import AsyncMessage
from kombu.log import get_logger
Expand Down Expand Up @@ -507,6 +528,24 @@ def _message_to_python(self, message, queue_name, q_url):
self._delete_message(queue_name, message)
return payload

# Check if this is a large payload stored in S3
if (
sqs_extended_client and
isinstance(payload, list)
Copy link

Copilot AI Jul 21, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The condition lacks proper error handling for cases where payload[0] access might fail if payload is an empty list. Consider adding bounds checking.

Suggested change
isinstance(payload, list)
isinstance(payload, list)
and len(payload) > 0

Copilot uses AI. Check for mistakes.

and payload[0] == sqs_extended_client.client.MESSAGE_POINTER_CLASS
Copy link

Copilot AI Jul 21, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

[nitpick] Consider extracting the sqs_extended_client.client.MESSAGE_POINTER_CLASS into a constant for better readability and maintainability.

Suggested change
and payload[0] == sqs_extended_client.client.MESSAGE_POINTER_CLASS
and payload[0] == MESSAGE_POINTER_CLASS

Copilot uses AI. Check for mistakes.

):
# Used the sqs_extended_client, so we need to fetch the file from S3 and use that as the payload
s3_details = payload[1]
s3_bucket_name, s3_key = s3_details["s3BucketName"], s3_details["s3Key"]

s3_client = self.s3()
response = s3_client.get_object(Bucket=s3_bucket_name, Key=s3_key)

# The message body is under a wrapper class called StreamingBody
streaming_body = response["Body"]
body = self._optional_b64_decode(streaming_body.read())
payload = json.loads(body)

return self._envelope_payload(payload, text, message, q_url)

def _messages_to_python(self, messages, queue):
Expand Down Expand Up @@ -740,6 +779,32 @@ def close(self):
# if "can't set attribute" not in str(exc):
# raise

def new_s3_client(
self, region, access_key_id, secret_access_key, session_token=None
):
session = boto3.session.Session(
region_name=region,
aws_access_key_id=access_key_id,
aws_secret_access_key=secret_access_key,
aws_session_token=session_token,
)
is_secure = self.is_secure if self.is_secure is not None else True
client_kwargs = {"use_ssl": is_secure}

if self.endpoint_url is not None:
client_kwargs["endpoint_url"] = self.endpoint_url

client = session.client("s3", **client_kwargs)

return client

def s3(self):
return self.new_s3_client(
region=self.region,
access_key_id=self.conninfo.userid,
secret_access_key=self.conninfo.password,
)

def new_sqs_client(self, region, access_key_id,
secret_access_key, session_token=None):
session = boto3.session.Session(
Expand All @@ -756,7 +821,13 @@ def new_sqs_client(self, region, access_key_id,
client_kwargs['endpoint_url'] = self.endpoint_url
client_config = self.transport_options.get('client-config') or {}
config = Config(**client_config)
return session.client('sqs', config=config, **client_kwargs)
client = session.client('sqs', config=config, **client_kwargs)

if self.transport_options.get('large_payload_bucket') and sqs_extended_client:
client.large_payload_support = self.transport_options.get('large_payload_bucket')
client.use_legacy_attribute = False

return client

def sqs(self, queue=None):
if queue is not None and self.predefined_queues:
Expand Down
1 change: 1 addition & 0 deletions requirements/extras/sqs.txt
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
boto3>=1.26.143
pycurl>=7.43.0.5; sys_platform != 'win32' and platform_python_implementation=="CPython"
urllib3>=1.26.16
amazon-sqs-extended-client>=1.0.1
4 changes: 1 addition & 3 deletions requirements/test-ci.txt
Original file line number Diff line number Diff line change
Expand Up @@ -8,9 +8,7 @@ pymongo>=4.1.1; sys_platform != 'win32'
-r extras/msgpack.txt
-r extras/azureservicebus.txt
-r extras/azurestoragequeues.txt
boto3>=1.26.143; sys_platform != 'win32'
pycurl>=7.43.0.5; sys_platform != 'win32' and platform_python_implementation=="CPython"
urllib3>=1.26.16; sys_platform != 'win32'
-r extras/sqs.txt
-r extras/consul.txt
-r extras/zookeeper.txt
-r extras/brotli.txt
Expand Down
Loading
Loading