Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix llama index example #97

Merged
merged 2 commits into from Mar 29, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
8 changes: 4 additions & 4 deletions custom-frontend/frontend/package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion custom-frontend/frontend/package.json
Expand Up @@ -10,7 +10,7 @@
"preview": "vite preview"
},
"dependencies": {
"@chainlit/react-client": "0.0.2",
"@chainlit/react-client": "0.0.6",
"@radix-ui/react-slot": "^1.0.2",
"class-variance-authority": "^0.7.0",
"clsx": "^2.0.0",
Expand Down
31 changes: 18 additions & 13 deletions custom-frontend/frontend/src/App.tsx
@@ -1,32 +1,37 @@
import { useEffect } from "react";

import { ChainlitAPI, sessionState, useChatSession } from "@chainlit/react-client";
import {
ChainlitAPI,
sessionState,
useChatSession,
} from "@chainlit/react-client";
import { Playground } from "./components/playground";
import { useRecoilValue } from "recoil";

const CHAINLIT_SERVER = "http://localhost:8000";
const userEnv = {};

const apiClient = new ChainlitAPI(CHAINLIT_SERVER);
const apiClient = new ChainlitAPI(CHAINLIT_SERVER, "app");

function App() {
const { connect } = useChatSession();
const session = useRecoilValue(sessionState);

useEffect(() => {
if (session?.socket.connected) {
return
return;
}
fetch(apiClient
.buildEndpoint("/custom-auth")).then((res) => {
fetch(apiClient.buildEndpoint("/custom-auth"))
.then((res) => {
return res.json();
}
).then((data) => {
connect({ client: apiClient, userEnv, accessToken: `Bearer: ${data.token}` });
}
)
}, [session, connect]);

})
.then((data) => {
connect({
client: apiClient,
userEnv,
accessToken: `Bearer: ${data.token}`,
});
});
}, [connect]);

return (
<>
Expand Down
6 changes: 5 additions & 1 deletion llama-index/app.py
Expand Up @@ -11,6 +11,9 @@
)
from llama_index.llms.openai import OpenAI
from llama_index.embeddings.openai import OpenAIEmbedding
from llama_index.core.query_engine.retriever_query_engine import RetrieverQueryEngine
from llama_index.core.callbacks import CallbackManager
from llama_index.core.service_context import ServiceContext

openai.api_key = os.environ.get("OPENAI_API_KEY")

Expand All @@ -33,7 +36,8 @@ async def start():
Settings.embed_model = OpenAIEmbedding(model="text-embedding-3-small")
Settings.context_window = 4096

query_engine = index.as_query_engine(streaming=True, similarity_top_k=2)
service_context = ServiceContext.from_defaults(callback_manager=CallbackManager([cl.LlamaIndexCallbackHandler()]))
query_engine = index.as_query_engine(streaming=True, similarity_top_k=2, service_context=service_context)
cl.user_session.set("query_engine", query_engine)

await cl.Message(
Expand Down