## page was renamed from AWS/Localstack <> = Localstack = https://github.com/localstack/localstack [[AWS/LocalStack|LocalStack]] is a cloud service emulator that runs in a single container on your laptop or in your CI environment. With [[AWS/LocalStack|LocalStack]], you can run your AWS applications or Lambdas entirely on your local machine without connecting to a remote cloud provider == AWS CLI installation == {{{#!highlight sh # https://aws.amazon.com/cli/ cd ~/Downloads curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "awscliv2.zip" unzip awscliv2.zip sudo ./aws/install /usr/local/bin/aws --version # aws-cli/2.33.12 Python/3.13.11 Linux/6.1.0-42-amd64 exe/x86_64.debian.12 export AWS_ACCESS_KEY_ID="test" export AWS_SECRET_ACCESS_KEY="test" export AWS_DEFAULT_REGION="us-east-1" export AWS_ENDPOINT_URL="http://localhost:4566/" }}} == Localstack in Debian == {{{#!highlight sh docker run --rm -it -p 127.0.0.1:4566:4566 -p 127.0.0.1:4510-4559:4510-4559 \ -v /var/run/docker.sock:/var/run/docker.sock --name localstack localstack/localstack # LocalStack version: 4.13.1.dev6 sudo apt install jq curl http://localhost:4566/_localstack/health | jq . export AWS_ACCESS_KEY_ID="test" export AWS_SECRET_ACCESS_KEY="test" export AWS_DEFAULT_REGION="us-east-1" export AWS_ENDPOINT_URL="http://localhost:4566/" aws s3 ls aws s3api create-bucket --bucket my-bucket # https://docs.aws.amazon.com/cli/latest/reference/s3api/ echo "test" > test.txt aws s3api put-object --bucket my-bucket --key dir-1/test.txt --body test.txt aws s3api get-object --bucket my-bucket --key dir-1/test.txt test2.txt cat test2.txt }}} == Check localstack services and health == {{{#!highlight sh curl -s http://localhost:4566/_localstack/health | jq . }}} {{{#!highlight json { "services": { "acm": "available", "apigateway": "available", "cloudformation": "available", "cloudwatch": "running", "config": "available", "dynamodb": "available", "dynamodbstreams": "available", "ec2": "running", "es": "available", "events": "available", "firehose": "available", "iam": "available", "kinesis": "available", "kms": "available", "lambda": "running", "logs": "running", "opensearch": "available", "redshift": "available", "resource-groups": "available", "resourcegroupstaggingapi": "available", "route53": "available", "route53resolver": "available", "s3": "running", "s3control": "available", "scheduler": "available", "secretsmanager": "available", "ses": "available", "sns": "available", "sqs": "available", "ssm": "available", "stepfunctions": "available", "sts": "running", "support": "available", "swf": "available", "transcribe": "available" }, "edition": "community", "version": "4.13.1.dev6" } }}} == Python lambda and s3 == Lambda image https://github.com/aws/aws-lambda-base-images/tree/python3.13 * public.ecr.aws/lambda/python:3.13 === run.sh === {{{#!highlight sh export AWS_ACCESS_KEY_ID="test" export AWS_SECRET_ACCESS_KEY="test" export AWS_DEFAULT_REGION="us-east-1" export AWS_ENDPOINT_URL="http://localhost:4566/" zip py-my-function.zip lambda_function.py aws lambda delete-function --function-name py-my-function aws lambda create-function --function-name py-my-function \ --zip-file fileb://py-my-function.zip --handler lambda_function.lambda_handler \ --runtime python3.13 --role arn:aws:iam::000000000000:role/lambda-ex \ --timeout 30 PAYLOAD=$( echo "{ \"first_name\": \"Bob\",\"last_name\":\"Squarepants\" }" | base64 ) aws lambda invoke --function-name py-my-function \ --payload $PAYLOAD \ response.json cat response.json aws s3api get-object --bucket examplebucket --key examplebucket/response.txt r.txt }}} === lambda_function.py === {{{#!highlight python import boto3 import os def lambda_handler(event, context): message = 'Hello {} {}!'.format(event['first_name'], event['last_name']) session = boto3.session.Session() s3 = session.client( service_name='s3' ) buckets=[] for bucket in s3.list_buckets()['Buckets']: buckets.append(bucket['Name']) response = s3.create_bucket(Bucket='examplebucket') body = { 'message' : message, 'buckets' : buckets, 'AWS_ACCESS_KEY_ID' : os.environ["AWS_ACCESS_KEY_ID"], 'AWS_SECRET_ACCESS_KEY' : os.environ["AWS_SECRET_ACCESS_KEY"], 'AWS_DEFAULT_REGION' : os.environ["AWS_DEFAULT_REGION"], 'AWS_ENDPOINT_URL': os.environ['AWS_ENDPOINT_URL'] } s3.put_object(Body=str(body), Bucket='examplebucket', Key='examplebucket/response.txt') return body }}} == Access localstack from docker container == {{{#!highlight bash docker run -d --name localstack --rm -it -p 4566:4566 -p 4571:4571 -v /var/run/docker.sock:/var/run/docker.sock localstack/localstack # run container docker exec -it localstack bash # connect to Localstack container cat /etc/os-release | grep -i pretty # PRETTY_NAME="Debian GNU/Linux 13 (trixie)" curl http://localhost:4566/_localstack/health awslocal s3api list-buckets awslocal s3api create-bucket --bucket my-bucket echo "test" > test.txt awslocal s3api put-object --bucket my-bucket --key dir-1/test.txt --body test.txt awslocal s3api get-object --bucket my-bucket --key dir-1/test.txt test2.txt cat test2.txt apt install nano vim yajl-tools -y # https://hub.docker.com/r/localstack/localstack # https://github.com/localstack/localstack node -v # v22.22.0 python -V # Python 3.13.11 pip3 freeze curl http://localhost:4566/_localstack/health | json_reformat awslocal ec2 run-instances --image-id prod-df2jln3gjtwps --count 1 --instance-type t2.micro awslocal ec2 describe-instances --filters "Name=instance-type,Values=t2.micro" --query "Reservations[].Instances[].InstanceId" awslocal ec2 describe-instances }}} == Java 21 lambda handler == Lambda image https://github.com/aws/aws-lambda-base-images/tree/java21 * public.ecr.aws/lambda/java:21 === Steps === {{{#!highlight sh mkdir -p ~/Documents/Java8LambdaHandler cd ~/Documents/Java8LambdaHandler mkdir -p src/main/java/com/mooo/bitarus/ }}} === build.sh === {{{#!highlight sh FUNCTION_NAME=lambda-function aws lambda delete-function --function-name $FUNCTION_NAME sleep 5 mvn clean install sleep 5 aws lambda create-function --function-name $FUNCTION_NAME \ --zip-file fileb://target/lambda-function-1.0-SNAPSHOT.jar \ --handler com.mooo.bitarus.Handler --runtime java21 \ --role arn:aws:iam::000000000000:role/lambda-ex --timeout 30 #awslocal lambda update-function-configuration --function-name $FUNCTION_NAME \ # --timeout 15 sleep 15 }}} === latest_log.sh === {{{#!highlight sh LOG_GROUP="/aws/lambda/lambda-function" LOG_STREAM=$(aws logs describe-log-streams \ --log-group-name $LOG_GROUP \ --order-by LastEventTime --descending | \ grep logStreamName | head -1 | awk '//{print $2}' | sed "s/,//g" | sed 's/\"//g' ) echo $LOG_GROUP echo $LOG_STREAM aws logs get-log-events --log-group-name $LOG_GROUP \ --log-stream-name "$LOG_STREAM" \ | grep message | sed 's/"message"\://g' | sed 's/ //g' }}} === pom.xml === {{{#!highlight xml 4.0.0 com.mooo.bitarus lambda-function jar 1.0-SNAPSHOT lambda-function UTF-8 21 21 com.amazonaws aws-lambda-java-core 1.3.0 com.google.code.gson gson 2.10.1 maven-surefire-plugin 2.22.2 org.apache.maven.plugins maven-shade-plugin 3.2.2 false package shade org.apache.maven.plugins maven-compiler-plugin 3.8.1 21 21 }}} === run.sh === {{{#!highlight sh PAYLOAD=$( echo "{\"first_name\": \"Bob\",\"last_name\":\"Marley\"}" | base64 ) #aws lambda wait function-active-v2 --function-name lambda-function aws lambda invoke --function-name lambda-function \ --payload $PAYLOAD response.json cat response.json }}} === Handler.java === {{{#!highlight java // src/main/java/com/mooo/bitarus/Handler.java package com.mooo.bitarus; import com.amazonaws.services.lambda.runtime.Context; import com.amazonaws.services.lambda.runtime.RequestHandler; import com.amazonaws.services.lambda.runtime.LambdaLogger; import com.google.gson.Gson; import com.google.gson.GsonBuilder; import java.util.Map; import java.util.HashMap; public class Handler implements RequestHandler, String>{ Gson gson = new GsonBuilder().setPrettyPrinting().create(); @Override public String handleRequest(Map event, Context context) { LambdaLogger logger = context.getLogger(); System.out.println(">>> sout test"); logger.log("Stuff logged"); String response = "Java Lambda invocation response 20260131"; logger.log( event.get("first_name") ); logger.log("EVENT TYPE: " + event.getClass()); Map hashReturn = new java.util.HashMap(); hashReturn.put("response",response); return gson.toJson(hashReturn); } } }}} == SPA app + API gateway + lambda function == To host a SPA (Single Page Application) in LocalStack that uses API Gateway, we must simulate the AWS environment where Amazon S3 acts as a static file web server and API Gateway acts as the backend calling lambda functions. Localstack complete flow: * The user goes to S3 URL in the browser * The browser downloads index.html and the SPA JavaScript from S3 (LocalStack). * The SPA makes a POST call to an API Gateway endpoint (LocalStack). * API Gateway triggers a Lambda Java (that makes the sent string uppercase). * Lambda returns a JSON and the SPA updates its screen with the JSON data == DynamoDB == [[ https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/Introduction.html | DynamoDB ]] is a key value and documents NoSQL DB. {{{#!highlight sh # create table aws dynamodb create-table --table-name Contacts \ --attribute-definitions AttributeName=Email,AttributeType=S \ --key-schema AttributeName=Email,KeyType=HASH \ --provisioned-throughput ReadCapacityUnits=5,WriteCapacityUnits=5 # list tables aws dynamodb list-tables # insert an item aws dynamodb put-item --table-name Contacts \ --item '{ "Email": {"S": "john@example.org"}, "Name": {"S": "John Doe"}, "Phone": {"S": "912345678"} }' # all elements in table aws dynamodb scan --table-name Contacts # table scan reserved word Name aws dynamodb scan --table-name Contacts \ --filter-expression "contains(#n, :v)" \ --expression-attribute-names '{"#n": "Name"}' \ --expression-attribute-values '{":v": {"S": "Doe"}}' # table scan aws dynamodb scan --table-name Contacts \ --filter-expression "contains(Email, :n)" \ --expression-attribute-values '{":n": {"S": "exem"}}' \ # query using exact match and contains match aws dynamodb query --table-name Contacts \ --key-condition-expression "Email = :e" \ --filter-expression "contains(Phone, :t)" \ --expression-attribute-values '{ ":e": {"S": "john@exemplo.org"}, ":t": {"S": "912"} }' }}} == Fat Zip python lambda == === run-lambda.sh === {{{#!highlight sh export AWS_ACCESS_KEY_ID="test" export AWS_SECRET_ACCESS_KEY="test" export AWS_DEFAULT_REGION="us-east-1" export AWS_ENDPOINT_URL="http://localhost:4566/" FUNCTION_NAME=lambda-pg-function aws lambda invoke --function-name $FUNCTION_NAME response.json echo "Lambda output" cat response.json }}} === requirements.txt === {{{#!highlight sh pg8000 requests }}} === test_lambda.py === {{{#!highlight python import unittest import json from unittest.mock import patch, MagicMock from lambda_function import lambda_handler class TestLambda(unittest.TestCase): @patch('pg8000.native.Connection') @patch('requests.get') def test_handler_success(self, mock_get, mock_conn): # Mocking the API response mock_get.return_value.status_code = 200 # Mocking the DB response mock_instance = MagicMock() mock_instance.run.return_value = [["PostgreSQL 15.0"]] mock_conn.return_value = mock_instance # Execute lambda handler event = {} context = None response = lambda_handler(event, context) body = json.loads(response['body']) # Assertions self.assertEqual(body['api_status'], 200) self.assertIn('PostgreSQL 15.0', response['body']) if __name__ == '__main__': unittest.main() }}} === start-servers.sh === {{{#!highlight sh #!/bin/sh NETWORK=mynet echo "Creating network" docker network create $NETWORK echo "Launch localstack" docker run --rm -it -d -p 127.0.0.1:4566:4566 -p 127.0.0.1:4510-4559:4510-4559 \ -v /var/run/docker.sock:/var/run/docker.sock --network $NETWORK \ --name localstack localstack/localstack echo "Launch postgres" docker run -p 54320:5432 --rm --name postgres-server -e POSTGRES_PASSWORD=postgres \ --network $NETWORK -d postgres:15.3-alpine }}} === deploy-lambda.sh === {{{#!highlight sh export AWS_ACCESS_KEY_ID="test" export AWS_SECRET_ACCESS_KEY="test" export AWS_DEFAULT_REGION="us-east-1" export AWS_ENDPOINT_URL="http://localhost:4566/" FUNCTION_NAME=lambda-pg-function aws lambda delete-function --function-name $FUNCTION_NAME sleep 5 aws lambda create-function --function-name $FUNCTION_NAME \ --zip-file fileb://lambda_deployment.zip --handler lambda_function.lambda_handler \ --runtime python3.14 --role arn:aws:iam::000000000000:role/lambda-ex \ --timeout 30 }}} === build.sh === {{{#!highlight sh #!/bin/bash PACKAGE_NAME="lambda_deployment.zip" BUILD_DIR="dist" HANDLER_FILE="lambda_function.py" TEST_FILE="test_lambda.py" echo "Cleaning up old builds" rm -rf $BUILD_DIR rm -f $PACKAGE_NAME mkdir $BUILD_DIR echo "Installing dependencies in $BUILD_DIR ..." pip install -r requirements.txt -t $BUILD_DIR/ echo "Running tests..." export PYTHONPATH=$PYTHONPATH:$(pwd)/$BUILD_DIR python3 -m unittest $TEST_FILE if [ $? -eq 0 ]; then echo "Tests passed! Proceeding to build..." else echo "Tests failed. Build aborted." exit 1 fi echo "Clean up pycache" find $BUILD_DIR -type d -name "__pycache__" -exec rm -rf {} + echo "Copy source code" cp $HANDLER_FILE $BUILD_DIR/ echo "Creating the ZIP $PACKAGE_NAME" cd $BUILD_DIR zip -r ../$PACKAGE_NAME . cd .. echo "Deployment package ready: $PACKAGE_NAME" }}} === lambda_function.py === {{{#!highlight python import pg8000.native import requests import json def lambda_handler(event, context): con = pg8000.native.Connection(user="postgres", password="postgres", host="postgres-server", database="postgres", port=5432 ) try: rows = con.run("SELECT version();") response = requests.get("https://api.github.com") return { 'statusCode': 200, 'body': json.dumps({ 'db_version': rows[0][0], 'api_status': response.status_code }) } finally: con.close() }}}