背景
エージェントとの対話機能関連pocにおいて、対話性能のみに集中して検証したい場合はStreamlitよりChainlitの方がシンプルかつ高機能のため何とかして使いこなしたい。
一方で、会話履歴をサイドバーっぽく表示するには認証とDatalayerの実装が必要になる。会話履歴はNoSQLに入れがちなので、Chainlit公式が出しているDynamoDBDataLayerを使いたいが、ローカル開発では使いづらいかも。
したがってLocalstackを立ち上げてその中でDynamoDBたててそれを指定することで解決したい。
やってみた
Localstackを立ち上げてDynamoDB、S3を作成する
docker-compose.yml
services:
localstack:
container_name: "${LOCALSTACK_DOCKER_NAME:-localstack_main}"
image: localstack/localstack:latest
ports:
- "4566:4566"
environment:
- SERVICES=dynamodb,s3
- PERSISTENCE=1
- DEBUG=1
volumes:
- localstack_data:/var/lib/localstack
networks:
- localstack_network
volumes:
localstack_data:
driver: local
networks:
localstack_network:
driver: bridge
コンテナの中に入ってtableを作る
awslocal dynamodb create-table \
--table-name User \
--attribute-definitions \
AttributeName=PK,AttributeType=S \
AttributeName=SK,AttributeType=S \
AttributeName=UserThreadPK,AttributeType=S \
AttributeName=UserThreadSK,AttributeType=S \
--key-schema \
AttributeName=PK,KeyType=HASH \
AttributeName=SK,KeyType=RANGE \
--global-secondary-indexes \
'IndexName=UserThread,KeySchema=[{AttributeName=UserThreadPK,KeyType=HASH},{AttributeName=UserThreadSK,KeyType=RANGE}],Projection={ProjectionType=INCLUDE,NonKeyAttributes=[id,name]}' \
--billing-mode PAY_PER_REQUEST
Chainlit公式がBucket作ってたと思うのでそれにも倣う
awslocal s3 mb s3://userdata
Chainlit書いてみる
from operator import itemgetter
import os
import boto3
from botocore.config import Config
from langchain_openai import ChatOpenAI
from langchain.prompts import ChatPromptTemplate, MessagesPlaceholder
from langchain.schema.output_parser import StrOutputParser
from langchain.schema.runnable import Runnable, RunnablePassthrough, RunnableLambda
from langchain.schema.runnable.config import RunnableConfig
from langchain.memory import ConversationBufferMemory
from chainlit.types import ThreadDict
import chainlit as cl
import chainlit.data as cl_data
from chainlit.data.dynamodb import DynamoDBDataLayer
from chainlit.data.storage_clients.s3 import S3StorageClient
from dotenv import load_dotenv
import os
load_dotenv()
# LocalStack用のAWS設定
def configure_aws_for_localstack():
"""LocalStack用のAWS設定を行う"""
# 環境変数を設定(LocalStack用)
os.environ.setdefault('AWS_ACCESS_KEY_ID', 'test')
os.environ.setdefault('AWS_SECRET_ACCESS_KEY', 'test')
os.environ.setdefault('AWS_DEFAULT_REGION', 'us-east-1')
os.environ.setdefault('AWS_ENDPOINT_URL', 'http://localhost:4566')
# LocalStack設定を適用
configure_aws_for_localstack()
# LocalStack用のS3クライアント設定
storage_client = S3StorageClient(
bucket=os.getenv("AWS_S3_BUCKET_NAME", "userdata"),
# LocalStack用の設定
client_kwargs={
"endpoint_url": "http://localhost:4566",
"aws_access_key_id": "test",
"aws_secret_access_key": "test",
"region_name": "us-east-1"
}
)
# LocalStack用のDynamoDBクライアント設定
dynamodb_client = boto3.client(
'dynamodb',
endpoint_url='http://localhost:4566',
aws_access_key_id='test',
aws_secret_access_key='test',
region_name='us-east-1'
)
# DynamoDBDataLayerにカスタムクライアントを渡す
cl_data._data_layer = DynamoDBDataLayer(
table_name=os.getenv("AWS_DYNAMODB_TABLE_NAME", "User"),
storage_provider=storage_client,
client=dynamodb_client
)
def setup_runnable():
memory = cl.user_session.get("memory") # type: ConversationBufferMemory
model = ChatOpenAI(streaming=True)
prompt = ChatPromptTemplate.from_messages(
[
("system", "You are a helpful chatbot"),
MessagesPlaceholder(variable_name="history"),
("human", "{question}"),
]
)
runnable = (
RunnablePassthrough.assign(
history=RunnableLambda(memory.load_memory_variables) | itemgetter("history")
)
| prompt
| model
| StrOutputParser()
)
cl.user_session.set("runnable", runnable)
@cl.password_auth_callback
def auth():
return cl.User(identifier="test")
@cl.on_chat_start
async def on_chat_start():
cl.user_session.set("memory", ConversationBufferMemory(return_messages=True))
setup_runnable()
@cl.on_chat_resume
async def on_chat_resume(thread: ThreadDict):
memory = ConversationBufferMemory(return_messages=True)
root_messages = [m for m in thread["steps"] if m["parentId"] == None]
for message in root_messages:
if message["type"] == "user_message":
memory.chat_memory.add_user_message(message["output"])
else:
memory.chat_memory.add_ai_message(message["output"])
cl.user_session.set("memory", memory)
setup_runnable()
@cl.on_message
async def on_message(message: cl.Message):
memory = cl.user_session.get("memory") # type: ConversationBufferMemory
runnable = cl.user_session.get("runnable") # type: Runnable
res = cl.Message(content="")
async for chunk in runnable.astream(
{"question": message.content},
config=RunnableConfig(callbacks=[cl.LangchainCallbackHandler()]),
):
await res.stream_token(chunk)
await res.send()
memory.chat_memory.add_user_message(message.content)
memory.chat_memory.add_ai_message(res.content)
みんなも楽しいChainlit lifeを!
おまけ:StreamlitにChainlitのChatUIを埋め込む
HTMLで埋め込んでしまう
import streamlit as st
import streamlit.components.v1 as stc
st.write("## With Chainlit UI.")
# iFrameのHTMLコード
html_code = """
<iframe
src="http://localhost:8000"
style="width: 100%; height: 100%; min-height: 700px; border: 2;"
allow="microphone">
</iframe>
"""
# HTMLをStreamlitアプリに埋め込む
stc.html(html_code, height=800)