Skip to content

Commit

Permalink
Merge pull request #21 from otiai10/develop
Browse files Browse the repository at this point in the history
Support function_call
  • Loading branch information
otiai10 authored Jun 18, 2023
2 parents 4fe5772 + da0673f commit 4d20a5b
Show file tree
Hide file tree
Showing 8 changed files with 175 additions and 34 deletions.
36 changes: 32 additions & 4 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -30,9 +30,9 @@ import (

func main() {
client := openaigo.NewClient(os.Getenv("OPENAI_API_KEY"))
request := openaigo.ChatCompletionRequestBody{
request := openaigo.ChatRequest{
Model: "gpt-3.5-turbo",
Messages: []openaigo.ChatMessage{
Messages: []openaigo.Message{
{Role: "user", Content: "Hello!"},
},
}
Expand All @@ -51,6 +51,8 @@ cd openaigo
OPENAI_API_KEY=YourAPIKey go run ./testapp/main.go
```

See [test app](https://github.com/otiai10/openaigo/blob/main/testapp/main.go) as a working example.

# API Keys?

Visit https://beta.openai.com/account/api-keys and you can create your own API key to get started [for free](https://openai.com/api/pricing/).
Expand All @@ -62,8 +64,9 @@ Visit https://beta.openai.com/account/api-keys and you can create your own API k
- [x] [Retrieve model](https://beta.openai.com/docs/api-reference/models/retrieve)
- Text Completions
- [x] [Create completion](https://beta.openai.com/docs/api-reference/completions/create)
- **Chat Completions** <- NEW!
- **Chat Completions**
- [x] [Create Chat Completions](https://platform.openai.com/docs/api-reference/chat/create)
- [x] [with function_call](https://openai.com/blog/function-calling-and-other-api-updates) <- New
- Edits
- [x] [Create edits](https://beta.openai.com/docs/api-reference/edits/create)
- Images
Expand Down Expand Up @@ -91,11 +94,36 @@ Visit https://beta.openai.com/account/api-keys and you can create your own API k
- ~~[List engines](https://beta.openai.com/docs/api-reference/engines/list)~~
- ~~[Retrieve engine](https://beta.openai.com/docs/api-reference/engines/retrieve)~~

# Need `function_call`?

```go
request := openaigo.ChatRequest{
Messages: []openaigo.Message{
{Role: "user", Content: "How's the weather today in Tokyo?"},
},
Functions: []openaigo.Function{
{
Name: "get_weather",
Parameters: openaigo.Parameters{
Type: "object",
Properties: map[string]map[string]any{
"location": {"type": "string"},
"date": {"type": "string", "description": "ISO 8601 date string"},
},
Required: []string{"location"},
},
}
},
}
```

See [test app](https://github.com/otiai10/openaigo/blob/main/testapp/main.go) as a working example.

# Need `stream`?

```go
client := openaigo.NewClient(OPENAI_API_KEY)
request := openaigo.ChatCompletionRequestBody{
request := openaigo.ChatRequest{
Stream: true,
StreamCallback: func(res ChatCompletionResponse, done bool, err error) {
// Do what you want!
Expand Down
69 changes: 56 additions & 13 deletions chat.go
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ type ChatCompletionRequestBody struct {
// In the example above, the user’s final question of “Where was it played?” only makes sense in the context of the prior messages about the World Series of 2020.
// Because the models have no memory of past requests, all relevant information must be supplied via the conversation.
// If a conversation cannot fit within the model’s token limit, it will need to be shortened in some way.
Messages []ChatMessage `json:"messages"`
Messages []Message `json:"messages"`

// Temperature: What sampling temperature to use, between 0 and 2.
// Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic.
Expand Down Expand Up @@ -78,14 +78,36 @@ type ChatCompletionRequestBody struct {
// User: A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse. Learn more.
// https://platform.openai.com/docs/guides/safety-best-practices/end-user-ids
User string `json:"user,omitempty"`

// Functions: A list of functions which GPT is allowed to request to call.
Functions []Function `json:"functions,omitempty"`

// FunctionCall: You ain't need it. Default is "auto".
FunctionCall string `json:"function_call,omitempty"`
}

type Function struct {
Name string `json:"name,omitempty"`
Description string `json:"description,omitempty"`
Parameters Parameters `json:"parameters,omitempty"`
}

type Parameters struct {
Type string `json:"type,omitempty"` // Must be "object"
Properties map[string]map[string]any `json:"properties,omitempty"`
Required []string `json:"required,omitempty"`
}

// ChatMessage: An element of messages parameter.
// ChatRequest is just an alias of ChatCompletionRequestBody.
type ChatRequest ChatCompletionRequestBody

// Message: An element of messages parameter.
// The main input is the messages parameter. Messages must be an array of message objects,
// where each object has a role (either “system”, “user”, or “assistant”)
// and content (the content of the message).
// Conversations can be as short as 1 message or fill many pages.
type ChatMessage struct {
// See https://platform.openai.com/docs/api-reference/chat/create#chat/create-messages
type Message struct {

// Role: Either of "system", "user", "assistant".
// Typically, a conversation is formatted with a system message first, followed by alternating user and assistant messages.
Expand All @@ -96,19 +118,40 @@ type ChatMessage struct {

// Content: A content of the message.
Content string `json:"content"`

// FunctionCall requested by ChatGPT.
// Only appears in a response from ChatGPT in which ChatGPT wants to call a function.
FunctionCall *FunctionCall `json:"function_call,omitempty"`

// Name of the function called, to tell this message is a result of function_call.
// Only appears in a request from us when the previous message is "function_call" requested by ChatGPT.
Name string `json:"name,omitempty"`
}

type FunctionCall struct {
Name string `json:"name,omitempty"`
ArgumentsRaw string `json:"arguments,omitempty"`
// Arguments map[string]any `json:"arguments,omitempty"`
}

// func Arg[T any](fc FunctionCall, name string) (res T) {
// if fc.Arguments == nil || fc.Arguments[name] == nil {
// return
// }
// return fc.Arguments[name].(T)
// }

type ChatCompletionResponse struct {
ID string `json:"id"`
Object string `json:"object"`
Created int64 `json:"created"`
Choices []ChatChoice `json:"choices"`
Usage Usage `json:"usage"`
ID string `json:"id"`
Object string `json:"object"`
Created int64 `json:"created"`
Choices []Choice `json:"choices"`
Usage Usage `json:"usage"`
}

type ChatChoice struct {
Index int `json:"index"`
Message ChatMessage `json:"message"`
FinishReason string `json:"finish_reason"`
Delta ChatMessage `json:"delta"` // Only appears in stream response
type Choice struct {
Index int `json:"index"`
Message Message `json:"message"`
FinishReason string `json:"finish_reason"`
Delta Message `json:"delta"` // Only appears in stream response
}
31 changes: 31 additions & 0 deletions chat_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -34,3 +34,34 @@ func TestClient_ChatCompletion_Stream(t *testing.T) {
Expect(t, res).TypeOf("openaigo.ChatCompletionResponse")
wg.Wait()
}

func TestClient_ChatCompletion_FunctionCall(t *testing.T) {
client := NewClient("")
client.BaseURL = mockserver.URL
res, err := client.Chat(nil, ChatRequest{
Model: GPT3_5Turbo,
Messages: []Message{
{
Role: "user", Content: "Hello, I'm John.",
},
},
Functions: []Function{
{
Name: "test_method",
Parameters: Parameters{
Type: "object",
Properties: map[string]map[string]any{
"arg_0": {
"type": "string",
"description": "This is a test",
},
},
Required: []string{"arg_0"},
},
},
},
FunctionCall: "auto",
})
Expect(t, err).ToBe(nil)
Expect(t, res).TypeOf("openaigo.ChatCompletionResponse")
}
10 changes: 5 additions & 5 deletions completion.go
Original file line number Diff line number Diff line change
Expand Up @@ -89,10 +89,10 @@ type CompletionRequestBody struct {
}

type CompletionResponse struct {
ID string `json:"id"`
Object ObjectType `json:"object"`
Created int64 `json:"created"`
Model string `json:"model"`
Choices []Choice `json:"choices"`
ID string `json:"id"`
Object ObjectType `json:"object"`
Created int64 `json:"created"`
Model string `json:"model"`
Choices []CompletionChoice `json:"choices"`
Usage Usage
}
8 changes: 4 additions & 4 deletions edits.go
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,8 @@ type EditCreateRequestBody struct {
}

type EditCreateResponse struct {
Object ObjectType `json:"object"`
Created int64 `json:"created"`
Choices []Choice `json:"choices"`
Usage Usage `json:"usage"`
Object ObjectType `json:"object"`
Created int64 `json:"created"`
Choices []CompletionChoice `json:"choices"`
Usage Usage `json:"usage"`
}
4 changes: 2 additions & 2 deletions endpoints.go
Original file line number Diff line number Diff line change
Expand Up @@ -193,8 +193,8 @@ func (client *Client) DeleteFineTuneModel(ctx context.Context, id string) (resp

// Chat, short-hand of ChatCompletion.
// Creates a completion for the chat message.
func (client *Client) Chat(ctx context.Context, body ChatCompletionRequestBody) (resp ChatCompletionResponse, err error) {
return client.ChatCompletion(ctx, body)
func (client *Client) Chat(ctx context.Context, body ChatRequest) (resp ChatCompletionResponse, err error) {
return client.ChatCompletion(ctx, ChatCompletionRequestBody(body))
}

// ChatCompletion: POST https://api.openai.com/v1/chat/completions
Expand Down
2 changes: 1 addition & 1 deletion response.go
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
package openaigo

type Choice struct {
type CompletionChoice struct {
Text string `json:"text"`
Index int `json:"index"`
LogProbs int `json:"logprobs"`
Expand Down
49 changes: 44 additions & 5 deletions testapp/main.go
Original file line number Diff line number Diff line change
Expand Up @@ -73,9 +73,9 @@ var (
Name: "chat_completion",
Run: func() (any, error) {
client := openaigo.NewClient(OPENAI_API_KEY)
request := openaigo.ChatCompletionRequestBody{
request := openaigo.ChatRequest{
Model: openaigo.GPT3_5Turbo,
Messages: []openaigo.ChatMessage{
Messages: []openaigo.Message{
{Role: "user", Content: "Hello!"},
},
}
Expand All @@ -87,9 +87,9 @@ var (
Name: "[SKIP] chat_completion_GPT4",
Run: func() (any, error) {
client := openaigo.NewClient(OPENAI_API_KEY)
request := openaigo.ChatCompletionRequestBody{
request := openaigo.ChatRequest{
Model: openaigo.GPT4,
Messages: []openaigo.ChatMessage{
Messages: []openaigo.Message{
{Role: "user", Content: "Who are you?"},
},
}
Expand All @@ -114,7 +114,7 @@ var (
request := openaigo.ChatCompletionRequestBody{
Model: openaigo.GPT3_5Turbo_0613,
StreamCallback: calback,
Messages: []openaigo.ChatMessage{
Messages: []openaigo.Message{
{
Role: "user",
Content: fmt.Sprintf("What are the historical events happend on %s", time.Now().Format("01/02"))},
Expand All @@ -135,6 +135,45 @@ var (
}
},
},

// Test case using "function_call"
{
Name: "function_call",
Run: func() (any, error) {
conversation := []openaigo.Message{
{Role: "user", Content: "What's the weather in Tokyo today?"},
}
client := openaigo.NewClient(OPENAI_API_KEY)
request := openaigo.ChatRequest{
Model: openaigo.GPT3_5Turbo_0613,
Messages: conversation,
Functions: []openaigo.Function{
{
Name: "get_weather",
Description: "A function to get weather information",
Parameters: openaigo.Parameters{
Type: "object",
Properties: map[string]map[string]any{
"location": {"type": "string"},
"date": {"type": "string", "description": "ISO 8601 date string"},
},
Required: []string{"location"},
},
},
},
}
res0, err := client.Chat(nil, request)
conversation = append(conversation, res0.Choices[0].Message)
conversation = append(conversation, openaigo.Message{
Role: "function",
Name: "get_weather",
Content: "20%:thunderstorm,70%:sandstorm,10%:snowy",
})
request.Messages = conversation
res, err := client.Chat(nil, request)
return res, err
},
},
}

list bool
Expand Down

0 comments on commit 4d20a5b

Please sign in to comment.