Skip to content

Commit

Permalink
chore: add openai response type (#199)
Browse files Browse the repository at this point in the history
  • Loading branch information
henomis committed May 20, 2024
1 parent cbc913b commit 3930e2b
Show file tree
Hide file tree
Showing 3 changed files with 54 additions and 7 deletions.
26 changes: 26 additions & 0 deletions examples/llm/openai/response_format/main.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
package main

import (
"context"
"fmt"

"github.com/henomis/lingoose/llm/openai"
"github.com/henomis/lingoose/thread"
)

func main() {
openaillm := openai.New().WithModel(openai.GPT4o).WithResponseFormat(openai.ResponseFormatJSONObject).WithMaxTokens(1000)

t := thread.New().AddMessage(
thread.NewUserMessage().AddContent(
thread.NewTextContent("Give me a JSON object that describes a person"),
),
)

err := openaillm.Generate(context.Background(), t)
if err != nil {
panic(err)
}

fmt.Println(t)
}
7 changes: 7 additions & 0 deletions llm/openai/common.go
Original file line number Diff line number Diff line change
Expand Up @@ -57,3 +57,10 @@ const (

type UsageCallback func(types.Meta)
type StreamCallback func(string)

type ResponseFormat = openai.ChatCompletionResponseFormatType

const (
ResponseFormatJSONObject ResponseFormat = openai.ChatCompletionResponseFormatTypeJSONObject
ResponseFormatText ResponseFormat = openai.ChatCompletionResponseFormatTypeText
)
28 changes: 21 additions & 7 deletions llm/openai/openai.go
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,7 @@ type OpenAI struct {
usageCallback UsageCallback
functions map[string]Function
streamCallbackFn StreamCallback
responseFormat *ResponseFormat
toolChoice *string
cache *cache.Cache
Name string
Expand Down Expand Up @@ -99,6 +100,11 @@ func (o *OpenAI) WithCache(cache *cache.Cache) *OpenAI {
return o
}

func (o *OpenAI) WithResponseFormat(responseFormat ResponseFormat) *OpenAI {
o.responseFormat = &responseFormat
return o
}

// SetStop sets the stop sequences for the completion.
func (o *OpenAI) SetStop(stop []string) {
o.stop = stop
Expand Down Expand Up @@ -347,14 +353,22 @@ func (o *OpenAI) generate(
}

func (o *OpenAI) buildChatCompletionRequest(t *thread.Thread) openai.ChatCompletionRequest {
var responseFormat *openai.ChatCompletionResponseFormat
if o.responseFormat != nil {
responseFormat = &openai.ChatCompletionResponseFormat{
Type: *o.responseFormat,
}
}

return openai.ChatCompletionRequest{
Model: string(o.model),
Messages: threadToChatCompletionMessages(t),
MaxTokens: o.maxTokens,
Temperature: o.temperature,
N: DefaultOpenAINumResults,
TopP: DefaultOpenAITopP,
Stop: o.stop,
Model: string(o.model),
Messages: threadToChatCompletionMessages(t),
MaxTokens: o.maxTokens,
Temperature: o.temperature,
N: DefaultOpenAINumResults,
TopP: DefaultOpenAITopP,
Stop: o.stop,
ResponseFormat: responseFormat,
}
}

Expand Down

0 comments on commit 3930e2b

Please sign in to comment.