Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
* Add OpenAI integration - This simplifies instrumenting OpenAI calls - Reuses the literalai OpenAI instrumentation - Creates a new step for each OpenAI call, with the call details in the generation property * add a 1ms delay to the `on_message` callback - This makes sure any children step starts after the parent message step - replaced `generation.settings.model` by `generation.model` * move the openai version check inside the instrumentation call - avoids erroring if a user isn't using `instrument_openai` and hasn't installed openai
- Loading branch information
Showing
3 changed files
with
65 additions
and
1 deletion.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,59 @@ | ||
from typing import TYPE_CHECKING, Union | ||
|
||
from chainlit.context import get_context | ||
from chainlit.step import Step | ||
from chainlit.sync import run_sync | ||
from chainlit.utils import check_module_version | ||
from literalai import ChatGeneration, CompletionGeneration | ||
|
||
|
||
def instrument_openai(): | ||
if not check_module_version("openai", "1.0.0"): | ||
raise ValueError( | ||
"Expected OpenAI version >= 1.0.0. Run `pip install openai --upgrade`" | ||
) | ||
|
||
from literalai.instrumentation.openai import instrument_openai | ||
|
||
async def on_new_generation( | ||
generation: Union["ChatGeneration", "CompletionGeneration"], timing | ||
): | ||
context = get_context() | ||
|
||
parent_id = None | ||
if context.current_step: | ||
parent_id = context.current_step.id | ||
elif context.session.root_message: | ||
parent_id = context.session.root_message.id | ||
|
||
step = Step( | ||
name=generation.model if generation.model else generation.provider, | ||
type="llm", | ||
parent_id=parent_id, | ||
) | ||
step.generation = generation | ||
# Convert start/end time from seconds to milliseconds | ||
step.start = ( | ||
timing.get("start") * 1000 | ||
if timing.get("start", None) is not None | ||
else None | ||
) | ||
step.end = ( | ||
timing.get("end") * 1000 if timing.get("end", None) is not None else None | ||
) | ||
|
||
if isinstance(generation, ChatGeneration): | ||
step.input = generation.messages | ||
step.output = generation.message_completion # type: ignore | ||
else: | ||
step.input = generation.prompt | ||
step.output = generation.completion | ||
|
||
await step.send() | ||
|
||
def on_new_generation_sync( | ||
generation: Union["ChatGeneration", "CompletionGeneration"], timing | ||
): | ||
run_sync(on_new_generation(generation, timing)) | ||
|
||
instrument_openai(None, on_new_generation_sync) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters