Closed seoyeong200 closed 8 months ago
import json
print('Loading function')
def lambda_handler(event, context):
#print("Received event: " + json.dumps(event, indent=2))
print("value1 = " + event['key1'])
print("value2 = " + event['key2'])
print("value3 = " + event['key3'])
return event['key1'] # Echo back the first key value
#raise Exception('Something went wrong')
simple backend(read, write to DynamoDB) + restful API endpoint (Amazon API Gateway)
import boto3
import json
print('Loading function')
dynamo = boto3.client('dynamodb')
def respond(err, res=None):
return {
'statusCode': '400' if err else '200',
'body': err.message if err else json.dumps(res),
'headers': {
'Content-Type': 'application/json',
},
}
def lambda_handler(event, context):
'''Demonstrates a simple HTTP endpoint using API Gateway. You have full access to the request and response payload, including headers and status code.
To scan a DynamoDB table, make a GET request with the TableName as a query string parameter.
To put, update, or delete an item, make a POST, PUT, or DELETE request respectively, passing in the payload to the DynamoDB API as a JSON body.
'''
#print("Received event: " + json.dumps(event, indent=2))
operations = {
'DELETE': lambda dynamo, x: dynamo.delete_item(**x),
'GET': lambda dynamo, x: dynamo.scan(**x),
'POST': lambda dynamo, x: dynamo.put_item(**x),
'PUT': lambda dynamo, x: dynamo.update_item(**x),
}
operation = event['httpMethod']
if operation in operations:
payload = event['queryStringParameters'] if operation == 'GET' else json.loads(event['body'])
return respond(None, operations[operation](dynamo, payload))
else:
return respond(ValueError('Unsupported method "{}"'.format(operation)))
submit an aws batch jon and returns the jobid
import json
import boto3
print('Loading function')
batch = boto3.client('batch')
def lambda_handler(event, context):
# Log the received event
print("Received event: " + json.dumps(event, indent=2))
# Get parameters for the SubmitJob call
# http://docs.aws.amazon.com/batch/latest/APIReference/API_SubmitJob.html
jobName = event['jobName']
jobQueue = event['jobQueue']
jobDefinition = event['jobDefinition']
# containerOverrides and parameters are optional
if event.get('containerOverrides'):
containerOverrides = event['containerOverrides']
else:
containerOverrides = {}
if event.get('parameters'):
parameters = event['parameters']
else:
parameters = {}
try:
# Submit a Batch Job
response = batch.submit_job(jobQueue=jobQueue, jobName=jobName, jobDefinition=jobDefinition,
containerOverrides=containerOverrides, parameters=parameters)
# Log response from AWS Batch
print("Response: " + json.dumps(response, indent=2))
# Return the jobId
jobId = response['jobId']
return {
'jobId': jobId
}
except Exception as e:
print(e)
message = 'Error submitting Batch Job'
print(message)
raise Exception(message)
returns the current status of an aws batch job
import json
import boto3
print('Loading function')
batch = boto3.client('batch')
def lambda_handler(event, context):
print("Received event: " + json.dumps(event, indent=2)) # Log the received event
jobId = event['jobId'] # Get jobId from the event
try:
# Call DescribeJobs
response = batch.describe_jobs(jobs=[jobId])
# Log response from AWS Batch
print("Response: " + json.dumps(response, indent=2))
# Return the jobStatus
jobStatus = response['jobs'][0]['status']
return jobStatus
except Exception as e:
print(e)
message = 'Error getting Batch Job status'
print(message)
raise Exception(message)
available options
on-premise
(literally in my own labtop. local environment)
EC2
- kinda on-premise but in the cloud environment
ECS
- Elastic Container Servcie.
Lambda
what is needed