weekly misc items: October 12, 2020

diary of a codelovingyogi
3 min readJun 22, 2021
  1. mocking lambda invoke
from moto import mock_iam, mock_lambdadef get_role_name():
with mock_iam():
iam = boto3.client("iam", region_name=_lambda_region)
try:
return iam.get_role(RoleName="my-role")["Role"]["Arn"]
except ClientError:
return iam.create_role(
RoleName="my-role",
AssumeRolePolicyDocument="some policy",
Path="/my-path/",
)["Role"]["Arn"]
@mock_lambda
def setup_mock_client():
mock_lambda().start()
lambda_client = boto3.client("lambda")
conn.create_function(
FunctionName="testFunction",
Runtime="python3.7",
Role=get_role_name(),
Handler="lambda_function.lambda_handler",
Code={"ZipFile": get_test_zip_file1()},
Description="test lambda function",
Timeout=3,
MemorySize=128,
Publish=True,
)

2. mocking s3fs

in some of our code we use the s3fs python library to navigate aws s3 like a filesystem. came across code that uses this and needed to mock the functionality.

the code calls an api and writes to a file in s3. it looks something like this:

import s3fs
import uuid
def fetch_data():
fs = s3fs.S3FileSystem()
s3_path = f'{bucket}/{prefix}/{uuid.uuid4()}.json'
with fs.open(s3_path, 'wb') as f:
for row in page:
record = row['fields']
record['id'] = row['id']
f.write(f'{json.dumps(record)}\n'.encode())
f.close()
return

to mock this, you can use the moto python library to mock the s3 client and create the bucket:

from moto import mock_s3@mock_s3
@pytest.fixture(autouse=True)
def setup_mocks(monkeypatch):
mock_s3().start()
s3_client = boto3.client('s3')
s3_client.create_bucket(Bucket=f'{bucket}')

to use this, we include the function name as a param in our test:

def test_fetch_date(setup_mocks):
result = fetch_data()
assert result == <something>

if you have to read from an s3 file, for example:

import csv
import json
import s3fs
def read_from_s3():
fs = s3fs.S3FileSystem(anon=False)
query_output = f'{output_location}/{query_id}.csv'
with open(query_output, 'r') as csv_file:
csvReader = csv.DictReader(csv_file)
for row in csvReader:
print(json.dumps(dict(row)))

in your setup fixture you will need to create test files for mock purposes:

@pytest.fixture
def setup_mock_client(monkeypatch):
test_output_location_file = f'{TEST_DIR}/{TEST_OUTPUT_LOCATION}.csv'
with open(test_output_location_file, 'w') as output_location:
output_location.write('cola, colb')

yield

if os.path.exists(test_output_location_file):
os.remove(test_output_location_file)

in the test method, you need to use a mock s3fs path:

def setup_mock_s3fs(test_path):
class MockS3FileSystem:
def __init__(self, anon=False):
self.anon = anon
def open(self, path, mode, acl):
return open(test_path, mode)
return MockS3FileSystem
@pytest.fixture
def mock_s3fs_path(setup_mock_client, monkeypatch):
test_path = f'{TEST_DIR}/{TEST_FILE_LOCATION}'
monkeypatch.setattr(s3fs, 'S3FileSystem', setup_mock_s3fs(test_path))
yield test_path
def test_write_to_s3(mock_s3fs_path, mocker):
mocker.patch.object(lambda_function, 'wait_until_file_available')
lambda_function.wait_until_file_available.return_value = True
with open(mock_s3fs_path) as f:
response = write_to_s3(
os.path.join(TEST_DIR, TEST_FILE_LOCATION), TEST_DIR, TEST_OUTPUT_LOCATION
)
assert response == True

another note, in our setup fixture we also create a file that the codebase reads from. this file is usually our configuration file that gets created during the repo’s build process. to do this, the code looks like:

import os
from configparser import ConfigParser
@pytest.fixture(autouse=True)
def setup_mocks(monkeypatch):
config = ConfigParser()
config['<config header name>'] = {
'key': 'value'
}
with open('config.cfg', 'w') as configfile:
config.write(configfile)
yield
if os.path.exists('config.cfg'):
os.remove('config.cfg')

3. s3 -> sns

trying to add event trigger so that when s3 objects get created, notifications get sent to aws sns topic. in s3/Properties/Advanced settings you can add Events:

if you get this error when trying to add the s3 event

Unable to validate the following destination configurations. Permissions on the destination topic do not allow S3 to publish notifications from this bucket

you need to add Access policy in the SNS topic to allow publishing of notifications to the topic from the s3 bucket. open up your SNS topic and go to Access policy to edit:

the Statement is an array, add this to it:

  {
"Sid": "<for publishing events to topic from <bucket-name>",
"Effect": "Allow",
"Principal": {
"AWS": "*"
},
"Action": "SNS:Publish",
"Resource": "<sns-arn>",
"Condition": {
"StringEquals": {
"aws:SourceAccount": "<aws-account-id>"
},
"ArnLike": {
"aws:SourceArn": "arn:aws:s3:*:*:<bucket-name>"
}
}
}

--

--