This is a Go SDK for the Azure OpenAI service.
The model code is auto-generated by autorest from 2023-03-01-preview swagger model.
- Authenticate by API key or AAD.
- Stream response for chat-completions and completions are supported.
Here's an example of how you can use this SDK to chat with GPT model.
package main
import (
"context"
"fmt"
"os"
"github.com/Azure/azure-sdk-for-go/sdk/azidentity"
"github.com/Azure/azure-sdk-for-go/sdk/azcore/to"
"github.com/xuxife/azopenai"
)
func main() {
// Create a new client from API key
client, err := azopenai.NewChatCompletionsClientFromAPIKey("<YOUR_ENDPOINT>.openai.azure.com", "<YOUR_API_KEY>", nil)
// handle err
// AAD (azidentity) is also supported
cred, err := azidentity.NewDefaultAzureCredential(nil)
// handle err
client, err = azopenai.NewChatCompletionsClient("<YOUR_ENDPOINT>.openai.azure.com", cred, nil)
// handle err
// Chat with GPT model
resp, err := client.Create(
context.Background(),
"<YOUR_DEPLOYMENT>",
azopenai.ChatCompletionsCreateParameters{
Message: azopenai.ChatCompletionMessage{
{
Role: to.Ptr(azopenai.ChatCompletionMessageRoleSystem),
Content: to.Ptr("You are an AI assistant that helps people find information."),
},
{
Role: to.Ptr(azopenai.ChatCompletionMessageRoleUser),
Content: to.Ptr("Hi GPT, I'm looking for a good restaurant in Seattle."), // this question is from GitHub Copilot ;)
},
},
},
nil,
)
// handle err
// Print the response
fmt.Println(resp.Choices[0].Message.Content)
// And you can also stream the response
streamResp, err := client.CreateStream(
// same as above...
)
respChan, err := streamResp.RecvChan(func(err error) {
// callback function to handle error happened in stream
})
for resp := range respChan {
// handle response token by token
if resp.Choices[0].Delta.Message != nil {
fmt.Fprint(os.Stdout, resp.Choices[0].Delta.Content)
}
}
}