From 92c46cf76e2473e0e28c581713aa38490a18e95f Mon Sep 17 00:00:00 2001 From: Hiromu OCHIAI Date: Sun, 18 Jun 2023 16:10:03 +0900 Subject: [PATCH 1/3] Fix interface wording: shorthand for chatting --- README.md | 4 ++-- chat.go | 29 ++++++++++++++++------------- completion.go | 10 +++++----- edits.go | 8 ++++---- endpoints.go | 4 ++-- response.go | 2 +- testapp/main.go | 10 +++++----- 7 files changed, 35 insertions(+), 32 deletions(-) diff --git a/README.md b/README.md index 19e1a47..2be4b66 100644 --- a/README.md +++ b/README.md @@ -30,9 +30,9 @@ import ( func main() { client := openaigo.NewClient(os.Getenv("OPENAI_API_KEY")) - request := openaigo.ChatCompletionRequestBody{ + request := openaigo.ChatRequest{ Model: "gpt-3.5-turbo", - Messages: []openaigo.ChatMessage{ + Messages: []openaigo.Message{ {Role: "user", Content: "Hello!"}, }, } diff --git a/chat.go b/chat.go index c07dac3..c489883 100644 --- a/chat.go +++ b/chat.go @@ -15,7 +15,7 @@ type ChatCompletionRequestBody struct { // In the example above, the user’s final question of “Where was it played?” only makes sense in the context of the prior messages about the World Series of 2020. // Because the models have no memory of past requests, all relevant information must be supplied via the conversation. // If a conversation cannot fit within the model’s token limit, it will need to be shortened in some way. - Messages []ChatMessage `json:"messages"` + Messages []Message `json:"messages"` // Temperature: What sampling temperature to use, between 0 and 2. // Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic. @@ -80,12 +80,15 @@ type ChatCompletionRequestBody struct { User string `json:"user,omitempty"` } -// ChatMessage: An element of messages parameter. +// ChatRequest is just an alias of ChatCompletionRequestBody. +type ChatRequest ChatCompletionRequestBody + +// Message: An element of messages parameter. // The main input is the messages parameter. Messages must be an array of message objects, // where each object has a role (either “system”, “user”, or “assistant”) // and content (the content of the message). // Conversations can be as short as 1 message or fill many pages. -type ChatMessage struct { +type Message struct { // Role: Either of "system", "user", "assistant". // Typically, a conversation is formatted with a system message first, followed by alternating user and assistant messages. @@ -99,16 +102,16 @@ type ChatMessage struct { } type ChatCompletionResponse struct { - ID string `json:"id"` - Object string `json:"object"` - Created int64 `json:"created"` - Choices []ChatChoice `json:"choices"` - Usage Usage `json:"usage"` + ID string `json:"id"` + Object string `json:"object"` + Created int64 `json:"created"` + Choices []Choice `json:"choices"` + Usage Usage `json:"usage"` } -type ChatChoice struct { - Index int `json:"index"` - Message ChatMessage `json:"message"` - FinishReason string `json:"finish_reason"` - Delta ChatMessage `json:"delta"` // Only appears in stream response +type Choice struct { + Index int `json:"index"` + Message Message `json:"message"` + FinishReason string `json:"finish_reason"` + Delta Message `json:"delta"` // Only appears in stream response } diff --git a/completion.go b/completion.go index 7b601c2..cf6dbfc 100644 --- a/completion.go +++ b/completion.go @@ -89,10 +89,10 @@ type CompletionRequestBody struct { } type CompletionResponse struct { - ID string `json:"id"` - Object ObjectType `json:"object"` - Created int64 `json:"created"` - Model string `json:"model"` - Choices []Choice `json:"choices"` + ID string `json:"id"` + Object ObjectType `json:"object"` + Created int64 `json:"created"` + Model string `json:"model"` + Choices []CompletionChoice `json:"choices"` Usage Usage } diff --git a/edits.go b/edits.go index 3f6120d..3c52b42 100644 --- a/edits.go +++ b/edits.go @@ -10,8 +10,8 @@ type EditCreateRequestBody struct { } type EditCreateResponse struct { - Object ObjectType `json:"object"` - Created int64 `json:"created"` - Choices []Choice `json:"choices"` - Usage Usage `json:"usage"` + Object ObjectType `json:"object"` + Created int64 `json:"created"` + Choices []CompletionChoice `json:"choices"` + Usage Usage `json:"usage"` } diff --git a/endpoints.go b/endpoints.go index 9815a7b..99ddfb0 100644 --- a/endpoints.go +++ b/endpoints.go @@ -193,8 +193,8 @@ func (client *Client) DeleteFineTuneModel(ctx context.Context, id string) (resp // Chat, short-hand of ChatCompletion. // Creates a completion for the chat message. -func (client *Client) Chat(ctx context.Context, body ChatCompletionRequestBody) (resp ChatCompletionResponse, err error) { - return client.ChatCompletion(ctx, body) +func (client *Client) Chat(ctx context.Context, body ChatRequest) (resp ChatCompletionResponse, err error) { + return client.ChatCompletion(ctx, ChatCompletionRequestBody(body)) } // ChatCompletion: POST https://api.openai.com/v1/chat/completions diff --git a/response.go b/response.go index 9ced8f5..529a442 100644 --- a/response.go +++ b/response.go @@ -1,6 +1,6 @@ package openaigo -type Choice struct { +type CompletionChoice struct { Text string `json:"text"` Index int `json:"index"` LogProbs int `json:"logprobs"` diff --git a/testapp/main.go b/testapp/main.go index f4872fd..52f019f 100644 --- a/testapp/main.go +++ b/testapp/main.go @@ -73,9 +73,9 @@ var ( Name: "chat_completion", Run: func() (any, error) { client := openaigo.NewClient(OPENAI_API_KEY) - request := openaigo.ChatCompletionRequestBody{ + request := openaigo.ChatRequest{ Model: openaigo.GPT3_5Turbo, - Messages: []openaigo.ChatMessage{ + Messages: []openaigo.Message{ {Role: "user", Content: "Hello!"}, }, } @@ -87,9 +87,9 @@ var ( Name: "[SKIP] chat_completion_GPT4", Run: func() (any, error) { client := openaigo.NewClient(OPENAI_API_KEY) - request := openaigo.ChatCompletionRequestBody{ + request := openaigo.ChatRequest{ Model: openaigo.GPT4, - Messages: []openaigo.ChatMessage{ + Messages: []openaigo.Message{ {Role: "user", Content: "Who are you?"}, }, } @@ -114,7 +114,7 @@ var ( request := openaigo.ChatCompletionRequestBody{ Model: openaigo.GPT3_5Turbo_0613, StreamCallback: calback, - Messages: []openaigo.ChatMessage{ + Messages: []openaigo.Message{ { Role: "user", Content: fmt.Sprintf("What are the historical events happend on %s", time.Now().Format("01/02"))}, From 9c1cf23194a74817f0a1e1db5d0feb5469fdae18 Mon Sep 17 00:00:00 2001 From: Hiromu OCHIAI Date: Mon, 19 Jun 2023 07:06:09 +0900 Subject: [PATCH 2/3] Support function_call --- chat.go | 40 ++++++++++++++++++++++++++++++++++++++++ chat_test.go | 31 +++++++++++++++++++++++++++++++ testapp/main.go | 39 +++++++++++++++++++++++++++++++++++++++ 3 files changed, 110 insertions(+) diff --git a/chat.go b/chat.go index c489883..7211164 100644 --- a/chat.go +++ b/chat.go @@ -78,6 +78,24 @@ type ChatCompletionRequestBody struct { // User: A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse. Learn more. // https://platform.openai.com/docs/guides/safety-best-practices/end-user-ids User string `json:"user,omitempty"` + + // Functions: A list of functions which GPT is allowed to request to call. + Functions []Function `json:"functions,omitempty"` + + // FunctionCall: You ain't need it. Default is "auto". + FunctionCall string `json:"function_call,omitempty"` +} + +type Function struct { + Name string `json:"name,omitempty"` + Description string `json:"description,omitempty"` + Parameters Parameters `json:"parameters,omitempty"` +} + +type Parameters struct { + Type string `json:"type,omitempty"` // Must be "object" + Properties map[string]map[string]any `json:"properties,omitempty"` + Required []string `json:"required,omitempty"` } // ChatRequest is just an alias of ChatCompletionRequestBody. @@ -88,6 +106,7 @@ type ChatRequest ChatCompletionRequestBody // where each object has a role (either “system”, “user”, or “assistant”) // and content (the content of the message). // Conversations can be as short as 1 message or fill many pages. +// See https://platform.openai.com/docs/api-reference/chat/create#chat/create-messages type Message struct { // Role: Either of "system", "user", "assistant". @@ -99,8 +118,29 @@ type Message struct { // Content: A content of the message. Content string `json:"content"` + + // FunctionCall requested by ChatGPT. + // Only appears in a response from ChatGPT in which ChatGPT wants to call a function. + FunctionCall *FunctionCall `json:"function_call,omitempty"` + + // Name of the function called, to tell this message is a result of function_call. + // Only appears in a request from us when the previous message is "function_call" requested by ChatGPT. + Name string `json:"name,omitempty"` } +type FunctionCall struct { + Name string `json:"name,omitempty"` + ArgumentsRaw string `json:"arguments,omitempty"` + // Arguments map[string]any `json:"arguments,omitempty"` +} + +// func Arg[T any](fc FunctionCall, name string) (res T) { +// if fc.Arguments == nil || fc.Arguments[name] == nil { +// return +// } +// return fc.Arguments[name].(T) +// } + type ChatCompletionResponse struct { ID string `json:"id"` Object string `json:"object"` diff --git a/chat_test.go b/chat_test.go index 3117358..a39deb1 100644 --- a/chat_test.go +++ b/chat_test.go @@ -34,3 +34,34 @@ func TestClient_ChatCompletion_Stream(t *testing.T) { Expect(t, res).TypeOf("openaigo.ChatCompletionResponse") wg.Wait() } + +func TestClient_ChatCompletion_FunctionCall(t *testing.T) { + client := NewClient("") + client.BaseURL = mockserver.URL + res, err := client.Chat(nil, ChatRequest{ + Model: GPT3_5Turbo, + Messages: []Message{ + { + Role: "user", Content: "Hello, I'm John.", + }, + }, + Functions: []Function{ + { + Name: "test_method", + Parameters: Parameters{ + Type: "object", + Properties: map[string]map[string]any{ + "arg_0": { + "type": "string", + "description": "This is a test", + }, + }, + Required: []string{"arg_0"}, + }, + }, + }, + FunctionCall: "auto", + }) + Expect(t, err).ToBe(nil) + Expect(t, res).TypeOf("openaigo.ChatCompletionResponse") +} diff --git a/testapp/main.go b/testapp/main.go index 52f019f..9e1a426 100644 --- a/testapp/main.go +++ b/testapp/main.go @@ -135,6 +135,45 @@ var ( } }, }, + + // Test case using "function_call" + { + Name: "function_call", + Run: func() (any, error) { + conversation := []openaigo.Message{ + {Role: "user", Content: "What's the weather in Tokyo today?"}, + } + client := openaigo.NewClient(OPENAI_API_KEY) + request := openaigo.ChatRequest{ + Model: openaigo.GPT3_5Turbo_0613, + Messages: conversation, + Functions: []openaigo.Function{ + { + Name: "get_weather", + Description: "A function to get weather information", + Parameters: openaigo.Parameters{ + Type: "object", + Properties: map[string]map[string]any{ + "location": {"type": "string"}, + "date": {"type": "string", "description": "ISO 8601 date string"}, + }, + Required: []string{"location"}, + }, + }, + }, + } + res0, err := client.Chat(nil, request) + conversation = append(conversation, res0.Choices[0].Message) + conversation = append(conversation, openaigo.Message{ + Role: "function", + Name: "get_weather", + Content: "20%:thunderstorm,70%:sandstorm,10%:snowy", + }) + request.Messages = conversation + res, err := client.Chat(nil, request) + return res, err + }, + }, } list bool From da0673f89d920cba2fc550ab9e6c3c312ee7561d Mon Sep 17 00:00:00 2001 From: Hiromu OCHIAI Date: Mon, 19 Jun 2023 08:06:58 +0900 Subject: [PATCH 3/3] Update README --- README.md | 32 ++++++++++++++++++++++++++++++-- 1 file changed, 30 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 2be4b66..e64f08b 100644 --- a/README.md +++ b/README.md @@ -51,6 +51,8 @@ cd openaigo OPENAI_API_KEY=YourAPIKey go run ./testapp/main.go ``` +See [test app](https://github.com/otiai10/openaigo/blob/main/testapp/main.go) as a working example. + # API Keys? Visit https://beta.openai.com/account/api-keys and you can create your own API key to get started [for free](https://openai.com/api/pricing/). @@ -62,8 +64,9 @@ Visit https://beta.openai.com/account/api-keys and you can create your own API k - [x] [Retrieve model](https://beta.openai.com/docs/api-reference/models/retrieve) - Text Completions - [x] [Create completion](https://beta.openai.com/docs/api-reference/completions/create) -- **Chat Completions** <- NEW! +- **Chat Completions** - [x] [Create Chat Completions](https://platform.openai.com/docs/api-reference/chat/create) + - [x] [with function_call](https://openai.com/blog/function-calling-and-other-api-updates) <- New - Edits - [x] [Create edits](https://beta.openai.com/docs/api-reference/edits/create) - Images @@ -91,11 +94,36 @@ Visit https://beta.openai.com/account/api-keys and you can create your own API k - ~~[List engines](https://beta.openai.com/docs/api-reference/engines/list)~~ - ~~[Retrieve engine](https://beta.openai.com/docs/api-reference/engines/retrieve)~~ +# Need `function_call`? + +```go +request := openaigo.ChatRequest{ + Messages: []openaigo.Message{ + {Role: "user", Content: "How's the weather today in Tokyo?"}, + }, + Functions: []openaigo.Function{ + { + Name: "get_weather", + Parameters: openaigo.Parameters{ + Type: "object", + Properties: map[string]map[string]any{ + "location": {"type": "string"}, + "date": {"type": "string", "description": "ISO 8601 date string"}, + }, + Required: []string{"location"}, + }, + } + }, +} +``` + +See [test app](https://github.com/otiai10/openaigo/blob/main/testapp/main.go) as a working example. + # Need `stream`? ```go client := openaigo.NewClient(OPENAI_API_KEY) -request := openaigo.ChatCompletionRequestBody{ +request := openaigo.ChatRequest{ Stream: true, StreamCallback: func(res ChatCompletionResponse, done bool, err error) { // Do what you want!