This repository has been archived by the owner on Oct 19, 2023. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 136
/
app.py
51 lines (43 loc) · 1.34 KB
/
app.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
import os
from typing import Callable, List
import langchain
from langchain.agents import AgentExecutor, ConversationalAgent
from langchain.callbacks.manager import CallbackManager
from langchain.memory import ChatMessageHistory
from langchain.prompts import PromptTemplate
from langchain.tools import Tool
from lcserve import get_memory, slackbot
def update_cache(path):
from langchain.cache import SQLiteCache
langchain.llm_cache = SQLiteCache(database_path=os.path.join(path, "llm_cache.db"))
@slackbot(openai_tracing=True)
def agent(
message: str,
prompt: PromptTemplate,
history: ChatMessageHistory,
tools: List[Tool],
reply: Callable,
tracing_handler,
workspace: str,
**kwargs,
):
from langchain import LLMChain
from langchain.chat_models import ChatOpenAI
update_cache(workspace)
memory = get_memory(history)
agent = ConversationalAgent(
llm_chain=LLMChain(
llm=ChatOpenAI(temperature=0, verbose=True, callbacks=[tracing_handler]),
prompt=prompt,
),
allowed_tools=[tool.name for tool in tools],
)
agent_executor = AgentExecutor.from_agent_and_tools(
agent=agent,
tools=tools,
memory=memory,
verbose=True,
max_iterations=4,
handle_parsing_errors=True,
)
reply(agent_executor.run(message))