Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

small updates/fixes #79

Merged
merged 1 commit into from Mar 20, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
2 changes: 1 addition & 1 deletion bigquery/.env.example
@@ -1,7 +1,7 @@
OPENAI_API_KEY=

# Optional
# CHAINLIT_API_KEY=
# LITERAL_API_KEY=

# Optional
# OAUTH_GOOGLE_CLIENT_ID=
Expand Down
4 changes: 2 additions & 2 deletions langchain-ask-human/app.py
@@ -1,6 +1,6 @@
from langchain.chains import LLMMathChain
from langchain.agents import initialize_agent, Tool, AgentType, AgentExecutor
from langchain.llms.openai import OpenAI
from langchain_community.chat_models import ChatOpenAI
from typing import *
from langchain.tools import BaseTool

Expand Down Expand Up @@ -40,7 +40,7 @@ async def _arun(

@cl.on_chat_start
def start():
llm = OpenAI(temperature=0, streaming=True)
llm = ChatOpenAI(temperature=0, streaming=True, model_name="gpt-4-turbo-preview")
llm_math_chain = LLMMathChain.from_llm(llm=llm, verbose=True)

tools = [
Expand Down
4 changes: 2 additions & 2 deletions openai-functions/app.py
Expand Up @@ -97,7 +97,7 @@ async def call_gpt4(message_history):
provider="openai-chat",
messages=[
cl.GenerationMessage(
formatted=m["content"], name=m.get("name"), role=m["role"]
content=m["content"], name=m.get("name", "function"), role=m["role"]
)
for m in message_history
],
Expand Down Expand Up @@ -131,7 +131,7 @@ async def call_gpt4(message_history):
@cl.on_message
async def run_conversation(message: cl.Message):
message_history = cl.user_session.get("message_history")
message_history.append({"role": "user", "content": message.content})
message_history.append({"name": "user", "role": "user", "content": message.content})

cur_iter = 0

Expand Down
2 changes: 1 addition & 1 deletion resume-chat/.env.example
@@ -1,3 +1,3 @@
CHAINLIT_API_KEY=
LITERAL_API_KEY=
CHAINLIT_AUTH_SECRET=
OPENAI_API_KEY=
4 changes: 2 additions & 2 deletions resume-chat/app.py
@@ -1,6 +1,6 @@
from operator import itemgetter

from langchain.chat_models import ChatOpenAI
from langchain_community.chat_models import ChatOpenAI
from langchain.prompts import ChatPromptTemplate, MessagesPlaceholder
from langchain.schema.output_parser import StrOutputParser
from langchain.schema.runnable import Runnable, RunnablePassthrough, RunnableLambda
Expand Down Expand Up @@ -49,7 +49,7 @@ async def on_chat_resume(thread: ThreadDict):
memory = ConversationBufferMemory(return_messages=True)
root_messages = [m for m in thread["steps"] if m["parentId"] == None]
for message in root_messages:
if message["type"] == "USER_MESSAGE":
if message["type"] == "user_message":
memory.chat_memory.add_user_message(message["output"])
else:
memory.chat_memory.add_ai_message(message["output"])
Expand Down