Skip to content

Commit

Permalink
Wd/update literalsdk (#885)
Browse files Browse the repository at this point in the history
* update to async literal sdk

* changelog
  • Loading branch information
willydouhard committed Apr 8, 2024
1 parent c18a578 commit 7a05e0f
Show file tree
Hide file tree
Showing 4 changed files with 21 additions and 12 deletions.
14 changes: 14 additions & 0 deletions CHANGELOG.md
Expand Up @@ -6,10 +6,24 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).

## [Unreleased]

Nothing unreleased!

## [1.0.501] - 2024-04-08

### Added

- Messages and steps now accept tags and metadata (useful for the data layer)

### Changed

- The LLama Index callback handler should now show retrieved chunks in the intermadiary steps
- Renamed the Literal environment variable to `LITERAL_API_URL` (it used to be `LITERAL_SERVER`)

### Fixed

- Starting a new conversation should close the element side bar
- Resolved security issues by upgrading starlette dependency

## [1.0.500] - 2024-04-02

### Added
Expand Down
12 changes: 5 additions & 7 deletions backend/chainlit/data/__init__.py
Expand Up @@ -10,10 +10,8 @@
from chainlit.logger import logger
from chainlit.session import WebsocketSession
from chainlit.types import Feedback, Pagination, ThreadDict, ThreadFilter
from chainlit.user import PersistedUser, User, UserDict
from literalai import Attachment, PageInfo, PaginatedResponse
from literalai import Score as LiteralScore
from literalai import Step as LiteralStep
from chainlit.user import PersistedUser, User
from literalai import Attachment, PageInfo, PaginatedResponse, Score as LiteralScore, Step as LiteralStep
from literalai.filter import threads_filters as LiteralThreadsFilters
from literalai.step import StepDict as LiteralStepDict

Expand Down Expand Up @@ -128,9 +126,9 @@ async def delete_user_session(self, id: str) -> bool:

class ChainlitDataLayer(BaseDataLayer):
def __init__(self, api_key: str, server: Optional[str]):
from literalai import LiteralClient
from literalai import AsyncLiteralClient

self.client = LiteralClient(api_key=api_key, url=server)
self.client = AsyncLiteralClient(api_key=api_key, url=server)
logger.info("Chainlit data layer initialized")

def attachment_to_element_dict(self, attachment: Attachment) -> "ElementDict":
Expand Down Expand Up @@ -457,7 +455,7 @@ async def update_thread(
tags: Optional[List[str]] = None,
):
await self.client.api.upsert_thread(
thread_id=thread_id,
id=thread_id,
name=name,
participant_id=user_id,
metadata=metadata,
Expand Down
3 changes: 0 additions & 3 deletions backend/chainlit/llama_index/callbacks.py
Expand Up @@ -173,12 +173,10 @@ def on_event_end(
token_count = self.total_llm_token_count or None
raw_response = response.raw if response else None
model = raw_response.get("model", None) if raw_response else None
provider = "openai"

if messages and isinstance(response, ChatResponse):
msg: ChatMessage = response.message
step.generation = ChatGeneration(
provider=provider,
model=model,
messages=messages,
message_completion=GenerationMessage(
Expand All @@ -189,7 +187,6 @@ def on_event_end(
)
elif formatted_prompt:
step.generation = CompletionGeneration(
provider=provider,
model=model,
prompt=formatted_prompt,
completion=content,
Expand Down
4 changes: 2 additions & 2 deletions backend/pyproject.toml
@@ -1,6 +1,6 @@
[tool.poetry]
name = "chainlit"
version = "1.0.500"
version = "1.0.501"
keywords = ['LLM', 'Agents', 'gen ai', 'chat ui', 'chatbot ui', 'openai', 'copilot', 'langchain', 'conversational ai']
description = "Build Conversational AI."
authors = ["Chainlit"]
Expand All @@ -23,7 +23,7 @@ chainlit = 'chainlit.cli:cli'
[tool.poetry.dependencies]
python = ">=3.8.1,<4.0.0"
httpx = ">=0.23.0"
literalai = "0.0.401"
literalai = "0.0.500"
dataclasses_json = "^0.5.7"
fastapi = "^0.110.1"
starlette = "^0.37.2"
Expand Down

0 comments on commit 7a05e0f

Please sign in to comment.