やりたきこと
- API Gatewayを叩く
- API GatewayからLambdaを起動
- Lambdaからredshiftにqueryを投げる
- 結果をsnippetで通知
Api Gateway+Lambdaのterraform
基本的にpostを受けてそれをそのままlamdaに渡しているだけ
lambda側はそれを解読して返答する
認証にはapi-keyを使用
api_gateway.tf
resource "aws_api_gateway_rest_api" "test_api" {
name = "${terraform.env}_test_api"
}
//APIのパス
resource "aws_api_gateway_resource" "test_resource" {
rest_api_id = "${aws_api_gateway_rest_api.test_api.id}"
parent_id = "${aws_api_gateway_rest_api.test_api.root_resource_id}"
path_part = "test_root"
}
//GET、POST等に対応
resource "aws_api_gateway_method" "test_method" {
rest_api_id = "${aws_api_gateway_rest_api.test_api.id}"
resource_id = "${aws_api_gateway_resource.test_resource}"
authorization = "NONE"
api_key_required = true
}
resource "aws_api_gateway_usage_plan" "myusageplan" {
name = "my_usage_plan"
description = "my description"
api_stages {
api_id = "${aws_api_gateway_rest_api.test_api.id}"
stage = "${aws_api_gateway_stage.test_stage.stage_name}"
}
}
resource "aws_api_gateway_api_key" "test_key" {
name = "${terraform.env}_test_key"
}
resource "aws_api_gateway_usage_plan_key" "main" {
key_id = "${aws_api_gateway_api_key.test_key.id}"
key_type = "API_KEY"
usage_plan_id = "${aws_api_gateway_usage_plan.myusageplan.id}"
}
resource "aws_api_gateway_stage" "test_stage" {
stage_name = "${aws_api_gateway_deployment.MyDemoDeployment.stage_name}"
rest_api_id = "${aws_api_gateway_rest_api.test_api.id}"
deployment_id = "${aws_api_gateway_deployment.MyDemoDeployment.id}"
}
resource "aws_api_gateway_deployment" "MyDemoDeployment" {
depends_on = ["aws_api_gateway_method.test_method"]
rest_api_id = "${aws_api_gateway_rest_api.test_api.id}"
stage_name = "${terraform.env}_test_stage"
}
resource "aws_api_gateway_integration" "test_integration" {
rest_api_id = "${aws_api_gateway_rest_api.test_api.id}"
resource_id = "${aws_api_gateway_resource.test_resource.id}"
http_method = "${aws_api_gateway_method.test_method.http_method}"
integration_http_method = "POST"
type = "AWS_PROXY"
uri = "arn:aws:apigateway:REGION:lambda:path/2015-03-31/functions/${aws_lambda_function.test_function.arn}/invocations"
}
// Lambda
resource "aws_lambda_permission" "apigw_lambda" {
statement_id = "AllowExecutionFromAPIGateway"
action = "lambda:InvokeFunction"
function_name = "${aws_lambda_function.test_function.arn}"
principal = "apigateway.amazonaws.com"
}
lambda.tf
resource "aws_lambda_function" "test_function" {
s3_bucket = "${terraform.env}-test-function"
s3_key = "test_function.zip"
function_name = "${terraform.env}_test_function"
role = "${var.sys_lambdarole_arn}"
handler = "lambda_function.handler"
timeout = 300
runtime = "python3.6"
vpc_config = {
subnet_ids = [
"${var.subnet_az1_id}",
"${var.subnet_az2_id}"
]
security_group_ids = [
"${var.test_sg_id}"
]
}
}
Lambdaの中身
lambdaに関してモジュールを一緒にいれなといけないのが大変...
今回の例ではDBにqueryを投げてる
kmsも使っているのでhost名やtokenがバレる心配もなし
結果はsnippetとしてslack側に通知
ちなみにDBにIP制限をかけている場合subnet範囲の通信を通しておく
lambda.py
port requests
import psycopg2
import json
import base64
import boto3
db_database = "DBNAME"
db_user = "USER_ID" #これもkmsで暗号化すべき...
db_password = "USER_PASS" #これもkmsで暗号化すべき...
db_port = "DB_PORT"
db_host_encoded = "base64 encoded ciphertext"
slack_token_encoded = "base64 encoded ciphertext"
slack_url = "https://slack.com/api/files.upload"
def handler(event, context):
event_dict = json.loads(event["body"])
# ここは渡し方次第でどうにでもなる
channel_name = event_dict["***"]["channel_name"]
req_usr = event_dict["***"]["req_usr"]
magic_words = event_dict["***"]["magic_words"]
schema_name = ***
table_name = ***
try:
conn = psycopg2.connect("dbname=" + db_database
+ " user=" + db_user
+ " password=" + db_password
+ " port=" + db_port
+ " host=" + kms_plain_text(db_host_encoded))
conn.autocommit = True
cur = conn.cursor()
cur.execute(test_sql(schema_name, table_name))
result = ''
for row in cur:
result += "".join(map(str, row)) + "\n"
cur.close()
conn.close()
# ここは用途に合わせて変える
if result is None:
responseObj = {
"statusCode": 201,
"body": "No such table"
}
if result:
upload_file('test', result, channel_name)
responseObj = {
"statusCode": 201,
"body": result
}
else:
responseObj = {
"statusCode": 500,
"body": "Internal Server Error"
}
print(event)
print(context)
return responseObj
except Exception as e:
print(e)
raise e
def test_sql(schema_name, table_name):
sql = "SELECT * FROM " + schema_name+"."+table_name
return sql
def upload_file(filename, content, channel):
res = requests.post(slack_url, {
'title': filename,
'filetype': u'sql',
'content': content,
'link_names': 1, # メンションを有効にする
'channels': channel,
'token': kms_plain_text(slack_token_encoded)
})
print(res)
def kms_plain_text(ciphertext_blob_encoded):
ciphertext_blob = base64.b64decode(ciphertext_blob_encoded)
kms = boto3.client('kms') //どのkeyを使うかはciphertextに含まれているらしい
dec = kms.decrypt(CiphertextBlob=ciphertext_blob)
return dec['Plaintext'].decode('ascii') # plaintext
# テスト実行用
if __name__ == '__main__':
handler('', '')
終わりに
これでapi keyを持っていればいつでもredshiftにQueryを投げれる状態になりました
slackにpostを投げるbotを常駐させておけばいつでも情報をとってこれるslack botの出来上がりです