Reputation: 117
I'm trying to write a test for my lambda handler, which uses boto3 to download a file from a bucket and store it locally:
s3_resource = boto3.resource('s3')
TEMP_FILE = '/tmp/file.csv'
def lambda_handler(event, context):
bucket_name = event['detail']['bucket']['name']
file_name = event['detail']['object']['key']
s3_resource.Bucket(bucket_name).download_file(Key=file_name, Filename=TEMP_FILE)
Because I don't want to actually interact with s3 in my test, I created a dummy s3_upload_event to pass into the function call. I also used moto to create a mocked s3 bucket and put some dummy test_data in it, as well as a mocked iam user with s3 permissions:
TEST_BUCKET = "test_bucket_name"
S3_TEST_FILE_KEY = 'path/to/test.csv'
@pytest.fixture
def s3_upload_event():
return {"detail":{"bucket":{"name": TEST_BUCKET}, "object": {"key": S3_TEST_FILE_KEY}}}
@pytest.fixture
def context():
return object()
@pytest.fixture
def test_data():
return b'col_1,col_2\n1,2\n3,4\n'
@pytest.fixture
@mock_iam
def mock_user(user_name="test-user"):
# create user
client = boto3.client("iam", region_name="us-west-2")
client.create_user(UserName=user_name)
# create and attach policy
policy_document = {
"Version": "2012-10-17",
"Statement": [{
"Effect": "Allow",
"Action": ["s3:*", "s3-object-lambda:*"],
"Resource": "*"
}]
}
policy_arn = client.create_policy(
PolicyName="test",
PolicyDocument=json.dumps(policy_document))["policy"]["Arn"]
client.attach_user_policy(UserName=user_name, PolicyArn=policy_arn)
# Return access keys
yield client.create_access_key(UserName=user_name)["AccessKey"]
@pytest.fixture
@mock_s3
def mock_s3(test_data, mock_user):
s3 = boto3.client(
"s3",
region_name="us-west-2",
aws_access_key_id=mock_user["AccessKeyId"],
aws_secret_access_key=mock_user["SecretAccessKey"])
s3.create_bucket(Bucket=TEST_BUCKET)
s3.put_object(Bucket=TEST_BUCKET, Key=S3_TEST_FILE_KEY, Body=test_data)
yield s3
I inject those mocked fixtures to my test as follows:
class TestLambdaHandler:
def test_lambda_handler(self, mock_user, mock_s3, s3_upload_event, context):
response = lambda_handler(event = s3_upload_event, context = context)
assert response["statusCode"] == 200
But when I run the test botocore throws an exception when it reaches this line of the code: s3_resource.Bucket(bucket_name).download_file(Key=file_name, Filename=TEMP_FILE)
:
botocore.exceptions.ClientError: An error occurred (403) when calling the HeadObject operation: Forbidden
When I googled this error, it seems that this has to do with missing IAM permissions. The PolicyDocument I'm using for my mocked user is the same as the actual policy I'm using in the code, so I don't see why it would be able to download the file in real life but fail in the test. Is there anything I'm missing in my mocked user?
Upvotes: 3
Views: 1531
Reputation: 117
I managed to get the mocks working after re-reading the setup instructions for Moto (http://docs.getmoto.org/en/latest/docs/getting_started.html), there were a few issues with the code above.
Updated fixtures:
@pytest.fixture(scope='function')
def aws_credentials():
"""Mocked AWS Credentials for moto."""
os.environ['AWS_ACCESS_KEY_ID'] = 'testing'
os.environ['AWS_SECRET_ACCESS_KEY'] = 'testing'
os.environ['AWS_SECURITY_TOKEN'] = 'testing'
os.environ['AWS_SESSION_TOKEN'] = 'testing'
@pytest.fixture(scope='function')
def s3(aws_credentials):
with mock_s3():
yield boto3.client('s3', region_name="us-west-2")
I then updated the test to take in the s3 fixture and created the test "bucket" and put the test "object" as part of the test setup.
VALID_TEST_FILE_KEY = "path/to/test.csv"
VALID_DATA = "tests/data/valid_data.csv"
TEST_BUCKET = "test_bucket_name"
def test_lambda_handler(self, s3_upload_valid_file, context, s3):
#arrange
self._create_bucket_and_add_file(s3, VALID_DATA, VALID_TEST_FILE_KEY)
#act
response = lambda_handler(event = s3_upload_valid_file, context = context)
assert response["statusCode"] == 200
### Helper Method for Test ###
def _create_bucket_and_add_file(self, s3, data_file, key):
with open(data_file, 'r') as f:
test_data = f.read()
s3.create_bucket(Bucket=TEST_BUCKET, CreateBucketConfiguration={'LocationConstraint': "us-west-2"})
s3.put_object(Bucket=TEST_BUCKET, Key=key, Body=test_data)
Upvotes: 2