How to run python based AWS lambda tests using pytest and localstack

Turns out, setting up pytest and localstack is more difficult than expected, but got it working 🙂

I have two modes:

  1. Local mode: localstack is running in a terminal window using localstack start. This is useful for TDD, as you don’t spin up new containers everytime.
  2. CICD mode: localstack is not running and it would be automatically started before all tests start.

  3. Install packages:
    pip install docker boto3 localstack_utils localstack-client

  4. Setup pytest conftest:

conftest.py

import boto3
import docker
import pytest
import localstack_client.session
from localstack_utils.localstack import startup_localstack, stop_localstack
# patch boto3 to automatically use localstack.
@pytest.fixture(autouse=True)
def boto3_localstack_patch(monkeypatch):
session_ls = localstack_client.session.Session()
monkeypatch.setattr(boto3, "client", session_ls.client)
monkeypatch.setattr(boto3, "resource", session_ls.resource)
# check if localstack running locally using docker lib, if not running, use localstack lib to start it.
def is_localstack_running() -> bool:
try:
docker_client = docker.DockerClient(base_url='unix://var/run/docker.sock')
container = docker_client.containers.get("localstack-main")
return container.attrs['State']['Status'] == "running"
except:
return False
@pytest.fixture(autouse=True, scope="session")
def setup_localstack():
if not is_localstack_running():
print("Setup localstack")
startup_localstack()
yield
if not is_localstack_running():
print("Teardown localstack")
stop_localstack()
import boto3
import docker
import pytest
import localstack_client.session
from localstack_utils.localstack import startup_localstack, stop_localstack

# patch boto3 to automatically use localstack.
@pytest.fixture(autouse=True)
def boto3_localstack_patch(monkeypatch):
    session_ls = localstack_client.session.Session()
    monkeypatch.setattr(boto3, "client", session_ls.client)
    monkeypatch.setattr(boto3, "resource", session_ls.resource)

# check if localstack running locally using docker lib, if not running, use localstack lib to start it.
def is_localstack_running() -> bool:
    try:
        docker_client = docker.DockerClient(base_url='unix://var/run/docker.sock')
        container = docker_client.containers.get("localstack-main")
        return container.attrs['State']['Status'] == "running"
    except:
        return False

@pytest.fixture(autouse=True, scope="session")
def setup_localstack():
    if not is_localstack_running():
        print("Setup localstack")
        startup_localstack()
    yield
    if not is_localstack_running():
        print("Teardown localstack")
        stop_localstack()
import boto3 import docker import pytest import localstack_client.session from localstack_utils.localstack import startup_localstack, stop_localstack # patch boto3 to automatically use localstack. @pytest.fixture(autouse=True) def boto3_localstack_patch(monkeypatch): session_ls = localstack_client.session.Session() monkeypatch.setattr(boto3, "client", session_ls.client) monkeypatch.setattr(boto3, "resource", session_ls.resource) # check if localstack running locally using docker lib, if not running, use localstack lib to start it. def is_localstack_running() -> bool: try: docker_client = docker.DockerClient(base_url='unix://var/run/docker.sock') container = docker_client.containers.get("localstack-main") return container.attrs['State']['Status'] == "running" except: return False @pytest.fixture(autouse=True, scope="session") def setup_localstack(): if not is_localstack_running(): print("Setup localstack") startup_localstack() yield if not is_localstack_running(): print("Teardown localstack") stop_localstack()

Enter fullscreen mode Exit fullscreen mode

Fixture to cleanup after each test:

@pytest.fixture(autouse=True)
def aws_fixture():
# print("Setup")
yield
# print("Cleanup")
# Cleanup S3
s3 = boto3.client("s3")
buckets = [item["Name"] for item in s3.list_buckets()["Buckets"]]
for bucket in buckets:
s3.delete_bucket(Bucket=bucket)
# Cleanup DynamoDB"
dynamodb = boto3.client("dynamodb")
tables_names = dynamodb.list_tables()["TableNames"]
for table_name in tables_names:
dynamodb.delete_table(TableName=table_name)
@pytest.fixture(autouse=True)
def aws_fixture():
    # print("Setup")
    yield
    # print("Cleanup")

    # Cleanup S3
    s3 = boto3.client("s3")
    buckets = [item["Name"] for item in s3.list_buckets()["Buckets"]]
    for bucket in buckets:
        s3.delete_bucket(Bucket=bucket)

    # Cleanup DynamoDB"
    dynamodb = boto3.client("dynamodb")
    tables_names = dynamodb.list_tables()["TableNames"]
    for table_name in tables_names:
        dynamodb.delete_table(TableName=table_name)
@pytest.fixture(autouse=True) def aws_fixture(): # print("Setup") yield # print("Cleanup") # Cleanup S3 s3 = boto3.client("s3") buckets = [item["Name"] for item in s3.list_buckets()["Buckets"]] for bucket in buckets: s3.delete_bucket(Bucket=bucket) # Cleanup DynamoDB" dynamodb = boto3.client("dynamodb") tables_names = dynamodb.list_tables()["TableNames"] for table_name in tables_names: dynamodb.delete_table(TableName=table_name)

Enter fullscreen mode Exit fullscreen mode

Then in the handler, add boto3 client. This funky way complies with AWS recommended way initialising global libraries and pytest can patch boto3 too.

handler.py

import functools
# DynamoDB Client
@functools.cache
def dynamodb_client() -> DynamoDBClient:
return boto3.client("dynamodb")
import functools

# DynamoDB Client
@functools.cache
def dynamodb_client() -> DynamoDBClient:
    return boto3.client("dynamodb")
import functools # DynamoDB Client @functools.cache def dynamodb_client() -> DynamoDBClient: return boto3.client("dynamodb")

Enter fullscreen mode Exit fullscreen mode

Thats it.

Full example:

handler_blog.py

from aws_lambda_powertools.utilities.data_classes.sqs_event import SQSEvent
from aws_lambda_powertools.utilities.typing import LambdaContext
import functools
from mypy_boto3_dynamodb import DynamoDBClient
import boto3
# DynamoDB Client setup
@functools.cache
def dynamodb_client() -> DynamoDBClient:
return boto3.client("dynamodb")
def handler(event: SQSEvent, context: LambdaContext):
dynamodb_client().update_item(
TableName="super-table",
Key={"objectId": {"N": "123456"}},
ExpressionAttributeNames={"#name": "name"},
ExpressionAttributeValues={":name": {"S": "batman"}},
UpdateExpression="set #name = :name",
ReturnValues="NONE",
)
return []
from aws_lambda_powertools.utilities.data_classes.sqs_event import SQSEvent
from aws_lambda_powertools.utilities.typing import LambdaContext
import functools
from mypy_boto3_dynamodb import DynamoDBClient
import boto3


# DynamoDB Client setup
@functools.cache
def dynamodb_client() -> DynamoDBClient:
    return boto3.client("dynamodb")


def handler(event: SQSEvent, context: LambdaContext):
    dynamodb_client().update_item(
        TableName="super-table",
        Key={"objectId": {"N": "123456"}},
        ExpressionAttributeNames={"#name": "name"},
        ExpressionAttributeValues={":name": {"S": "batman"}},
        UpdateExpression="set #name = :name",
        ReturnValues="NONE",
    )
    return []
from aws_lambda_powertools.utilities.data_classes.sqs_event import SQSEvent from aws_lambda_powertools.utilities.typing import LambdaContext import functools from mypy_boto3_dynamodb import DynamoDBClient import boto3 # DynamoDB Client setup @functools.cache def dynamodb_client() -> DynamoDBClient: return boto3.client("dynamodb") def handler(event: SQSEvent, context: LambdaContext): dynamodb_client().update_item( TableName="super-table", Key={"objectId": {"N": "123456"}}, ExpressionAttributeNames={"#name": "name"}, ExpressionAttributeValues={":name": {"S": "batman"}}, UpdateExpression="set #name = :name", ReturnValues="NONE", ) return []

Enter fullscreen mode Exit fullscreen mode

handler_blog_test.py

import pytest
import boto3
from typing import Literal
from pydantic import BaseModel
from aws_lambda_powertools.utilities.data_classes.sqs_event import SQSEvent
from .handler_blog import handler
@pytest.fixture(autouse=True)
def aws_fixture():
# print("Setup")
yield
# print("Cleanup")
# DynamoDB"
dynamodb = boto3.client("dynamodb")
tables_names = dynamodb.list_tables()["TableNames"]
for table_name in tables_names:
dynamodb.delete_table(TableName=table_name)
class CreateDynamoDB(BaseModel):
table_name: str
pk_name: str
pk_type: Literal["S", "N"]
@pytest.fixture
def create_dynamodb_table(create_dynamodb_table_config: CreateDynamoDB):
config = create_dynamodb_table_config
dynamodb = boto3.client("dynamodb")
dynamodb.create_table(
TableName=config.table_name,
KeySchema=[
{"AttributeName": config.pk_name, "KeyType": "HASH"},
],
AttributeDefinitions=[
{
"AttributeName": config.pk_name,
"AttributeType": config.pk_type,
}
],
BillingMode="PAY_PER_REQUEST",
)
class InsertDataSQSMessage(BaseModel):
id: int
name: str
def get_sqs_event_stub(body: InsertDataSQSMessage) -> SQSEvent:
return SQSEvent(
{
"Records": [
{
"messageId": "4f332f15-3930-4a00-8831-1706016678846",
"receiptHandle": "AQEB/123456789012+bjpfjcbH0fslWvMxNSXEJWn/VNCIi0TYmuZakYNQpQhhcl2EoPseeM4ctyfd/OQ5eiMqWhta+L+iZYIuHRQiIIjmMgJrfJsl6aVHI1vYQvTTwhxaBJh2582kvuAaRvQ0gbLzT/Pe+Zp+123456789012/2Luka8cdrsLlSHEHI+21N+tN5dOaxBoGCJk1wZti6UmcrEzz3T+123456789012/O+mbqSPvJEJnbGasJRUFcKIfocbokN4sMSl8eJJKN1QkWPqxinVmk1DkEYzyY+rzSTjE8IBgcGRrxc293eYDJdfzISXo8j97h83ITP4fm1vMDA2w0/cDvvL3m4ACmZjwoZWdfoBTvJwbB8bXEa86Ykew==",
"body": body.model_dump_json(),
"attributes": {
"ApproximateReceiveCount": "1",
"SentTimestamp": "1706016678845",
"SenderId": "XXXXXXXXXXXXXXXXXXXXX",
"ApproximateFirstReceiveTimestamp": "1706016678846",
},
"messageAttributes": {},
"md5OfBody": "28f07e09c08aba530422dd193f991111",
"eventSource": "aws:sqs",
"eventSourceARN": "arn:aws:sqs:eu-central-1:123456789012:data",
"awsRegion": "eu-central-1",
}
]
}
)
class TestHandlerBlog:
def setup_method(self, method):
pass
def teardown_method(self, method):
pass
create_dynamodb_table_config = CreateDynamoDB(table_name="super-table", pk_name="objectId", pk_type="N")
@pytest.mark.parametrize("create_dynamodb_table_config", [create_dynamodb_table_config])
def test_insert_some_data_to_dynamodb(self, create_dynamodb_table):
# Prepare
# Act
event = get_sqs_event_stub(InsertDataSQSMessage(id=18393972737, name="company"))
result = handler(event=event, context={})
# Assert function
assert result == []
# Assert Table content
response = boto3.client("dynamodb").scan(TableName="super-table")
table_records = response["Items"]
assert len(table_records) == 1
assert table_records == [
{
"objectId": {"N": "123456"},
"name": {"S": "batman"},
}
]
import pytest
import boto3
from typing import Literal
from pydantic import BaseModel
from aws_lambda_powertools.utilities.data_classes.sqs_event import SQSEvent

from .handler_blog import handler


@pytest.fixture(autouse=True)
def aws_fixture():
    # print("Setup")
    yield
    # print("Cleanup")

    # DynamoDB"
    dynamodb = boto3.client("dynamodb")
    tables_names = dynamodb.list_tables()["TableNames"]
    for table_name in tables_names:
        dynamodb.delete_table(TableName=table_name)


class CreateDynamoDB(BaseModel):
    table_name: str
    pk_name: str
    pk_type: Literal["S", "N"]


@pytest.fixture
def create_dynamodb_table(create_dynamodb_table_config: CreateDynamoDB):
    config = create_dynamodb_table_config
    dynamodb = boto3.client("dynamodb")
    dynamodb.create_table(
        TableName=config.table_name,
        KeySchema=[
            {"AttributeName": config.pk_name, "KeyType": "HASH"},
        ],
        AttributeDefinitions=[
            {
                "AttributeName": config.pk_name,
                "AttributeType": config.pk_type,
            }
        ],
        BillingMode="PAY_PER_REQUEST",
    )


class InsertDataSQSMessage(BaseModel):
    id: int
    name: str


def get_sqs_event_stub(body: InsertDataSQSMessage) -> SQSEvent:
    return SQSEvent(
        {
            "Records": [
                {
                    "messageId": "4f332f15-3930-4a00-8831-1706016678846",
                    "receiptHandle": "AQEB/123456789012+bjpfjcbH0fslWvMxNSXEJWn/VNCIi0TYmuZakYNQpQhhcl2EoPseeM4ctyfd/OQ5eiMqWhta+L+iZYIuHRQiIIjmMgJrfJsl6aVHI1vYQvTTwhxaBJh2582kvuAaRvQ0gbLzT/Pe+Zp+123456789012/2Luka8cdrsLlSHEHI+21N+tN5dOaxBoGCJk1wZti6UmcrEzz3T+123456789012/O+mbqSPvJEJnbGasJRUFcKIfocbokN4sMSl8eJJKN1QkWPqxinVmk1DkEYzyY+rzSTjE8IBgcGRrxc293eYDJdfzISXo8j97h83ITP4fm1vMDA2w0/cDvvL3m4ACmZjwoZWdfoBTvJwbB8bXEa86Ykew==",
                    "body": body.model_dump_json(),
                    "attributes": {
                        "ApproximateReceiveCount": "1",
                        "SentTimestamp": "1706016678845",
                        "SenderId": "XXXXXXXXXXXXXXXXXXXXX",
                        "ApproximateFirstReceiveTimestamp": "1706016678846",
                    },
                    "messageAttributes": {},
                    "md5OfBody": "28f07e09c08aba530422dd193f991111",
                    "eventSource": "aws:sqs",
                    "eventSourceARN": "arn:aws:sqs:eu-central-1:123456789012:data",
                    "awsRegion": "eu-central-1",
                }
            ]
        }
    )


class TestHandlerBlog:

    def setup_method(self, method):
        pass

    def teardown_method(self, method):
        pass

    create_dynamodb_table_config = CreateDynamoDB(table_name="super-table", pk_name="objectId", pk_type="N")

    @pytest.mark.parametrize("create_dynamodb_table_config", [create_dynamodb_table_config])
    def test_insert_some_data_to_dynamodb(self, create_dynamodb_table):
        # Prepare
        # Act
        event = get_sqs_event_stub(InsertDataSQSMessage(id=18393972737, name="company"))
        result = handler(event=event, context={})

        # Assert function
        assert result == []

        # Assert Table content
        response = boto3.client("dynamodb").scan(TableName="super-table")
        table_records = response["Items"]
        assert len(table_records) == 1
        assert table_records == [
            {
                "objectId": {"N": "123456"},
                "name": {"S": "batman"},
            }
        ]
import pytest import boto3 from typing import Literal from pydantic import BaseModel from aws_lambda_powertools.utilities.data_classes.sqs_event import SQSEvent from .handler_blog import handler @pytest.fixture(autouse=True) def aws_fixture(): # print("Setup") yield # print("Cleanup") # DynamoDB" dynamodb = boto3.client("dynamodb") tables_names = dynamodb.list_tables()["TableNames"] for table_name in tables_names: dynamodb.delete_table(TableName=table_name) class CreateDynamoDB(BaseModel): table_name: str pk_name: str pk_type: Literal["S", "N"] @pytest.fixture def create_dynamodb_table(create_dynamodb_table_config: CreateDynamoDB): config = create_dynamodb_table_config dynamodb = boto3.client("dynamodb") dynamodb.create_table( TableName=config.table_name, KeySchema=[ {"AttributeName": config.pk_name, "KeyType": "HASH"}, ], AttributeDefinitions=[ { "AttributeName": config.pk_name, "AttributeType": config.pk_type, } ], BillingMode="PAY_PER_REQUEST", ) class InsertDataSQSMessage(BaseModel): id: int name: str def get_sqs_event_stub(body: InsertDataSQSMessage) -> SQSEvent: return SQSEvent( { "Records": [ { "messageId": "4f332f15-3930-4a00-8831-1706016678846", "receiptHandle": "AQEB/123456789012+bjpfjcbH0fslWvMxNSXEJWn/VNCIi0TYmuZakYNQpQhhcl2EoPseeM4ctyfd/OQ5eiMqWhta+L+iZYIuHRQiIIjmMgJrfJsl6aVHI1vYQvTTwhxaBJh2582kvuAaRvQ0gbLzT/Pe+Zp+123456789012/2Luka8cdrsLlSHEHI+21N+tN5dOaxBoGCJk1wZti6UmcrEzz3T+123456789012/O+mbqSPvJEJnbGasJRUFcKIfocbokN4sMSl8eJJKN1QkWPqxinVmk1DkEYzyY+rzSTjE8IBgcGRrxc293eYDJdfzISXo8j97h83ITP4fm1vMDA2w0/cDvvL3m4ACmZjwoZWdfoBTvJwbB8bXEa86Ykew==", "body": body.model_dump_json(), "attributes": { "ApproximateReceiveCount": "1", "SentTimestamp": "1706016678845", "SenderId": "XXXXXXXXXXXXXXXXXXXXX", "ApproximateFirstReceiveTimestamp": "1706016678846", }, "messageAttributes": {}, "md5OfBody": "28f07e09c08aba530422dd193f991111", "eventSource": "aws:sqs", "eventSourceARN": "arn:aws:sqs:eu-central-1:123456789012:data", "awsRegion": "eu-central-1", } ] } ) class TestHandlerBlog: def setup_method(self, method): pass def teardown_method(self, method): pass create_dynamodb_table_config = CreateDynamoDB(table_name="super-table", pk_name="objectId", pk_type="N") @pytest.mark.parametrize("create_dynamodb_table_config", [create_dynamodb_table_config]) def test_insert_some_data_to_dynamodb(self, create_dynamodb_table): # Prepare # Act event = get_sqs_event_stub(InsertDataSQSMessage(id=18393972737, name="company")) result = handler(event=event, context={}) # Assert function assert result == [] # Assert Table content response = boto3.client("dynamodb").scan(TableName="super-table") table_records = response["Items"] assert len(table_records) == 1 assert table_records == [ { "objectId": {"N": "123456"}, "name": {"S": "batman"}, } ]

Enter fullscreen mode Exit fullscreen mode

ps. I also mapped vs code key “F1” to “testing.runAtCursor” and “F2” to “testing.runAll”.

原文链接:How to run python based AWS lambda tests using pytest and localstack

© 版权声明
THE END
喜欢就支持一下吧
点赞8 分享
Not afraid of people blocking, I'm afraid their surrender.
不怕万人阻挡,只怕自己投降
评论 抢沙发

请登录后发表评论

    暂无评论内容