Skip to content

Commit

Permalink
Add batch API to integration tests and optimize batch API example
Browse files Browse the repository at this point in the history
  • Loading branch information
eiixy committed Jul 29, 2024
1 parent 4f72284 commit 82d9324
Show file tree
Hide file tree
Showing 2 changed files with 87 additions and 26 deletions.
68 changes: 68 additions & 0 deletions api_integration_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ package openai_test
import (
"context"
"errors"
"fmt"
"io"
"os"
"testing"
Expand Down Expand Up @@ -144,6 +145,73 @@ func TestCompletionStream(t *testing.T) {
}
}

func TestBatchAPI(t *testing.T) {
ctx := context.Background()
apiToken := os.Getenv("OPENAI_TOKEN")
if apiToken == "" {
t.Skip("Skipping testing against production OpenAI API. Set OPENAI_TOKEN environment variable to enable it.")
}
var err error
c := openai.NewClient(apiToken)

req := openai.CreateBatchWithUploadFileRequest{
Endpoint: openai.BatchEndpointChatCompletions,
CompletionWindow: "24h",
}
for i := 0; i < 5; i++ {
req.AddChatCompletion(fmt.Sprintf("req-%d", i), openai.ChatCompletionRequest{
Model: openai.GPT4oMini,
Messages: []openai.ChatCompletionMessage{
{
Role: openai.ChatMessageRoleUser,
Content: fmt.Sprintf("What is the square of %d?", i+1),
},
},
})
}
_, err = c.CreateBatchWithUploadFile(ctx, req)
checks.NoError(t, err, "CreateBatchWithUploadFile error")

var chatCompletions = make([]openai.BatchChatCompletion, 5)
for i := 0; i < 5; i++ {
chatCompletions[i] = openai.BatchChatCompletion{
CustomID: fmt.Sprintf("req-%d", i),
ChatCompletion: openai.ChatCompletionRequest{
Model: openai.GPT4oMini,
Messages: []openai.ChatCompletionMessage{
{
Role: openai.ChatMessageRoleUser,
Content: fmt.Sprintf("What is the square of %d?", i+1),
},
},
},
}
}
_, err = c.CreateBatchWithChatCompletions(ctx, openai.CreateBatchWithChatCompletionsRequest{
ChatCompletions: chatCompletions,
})
checks.NoError(t, err, "CreateBatchWithChatCompletions error")

var embeddings = make([]openai.BatchEmbedding, 3)
for i := 0; i < 3; i++ {
embeddings[i] = openai.BatchEmbedding{
CustomID: fmt.Sprintf("req-%d", i),
Embedding: openai.EmbeddingRequest{
Input: "The food was delicious and the waiter...",
Model: openai.AdaEmbeddingV2,
EncodingFormat: openai.EmbeddingEncodingFormatFloat,
},
}
}
_, err = c.CreateBatchWithEmbeddings(ctx, openai.CreateBatchWithEmbeddingsRequest{
Embeddings: embeddings,
})
checks.NoError(t, err, "CreateBatchWithEmbeddings error")

_, err = c.ListBatch(ctx, nil, nil)
checks.NoError(t, err, "ListBatch error")
}

func TestAPIError(t *testing.T) {
apiToken := os.Getenv("OPENAI_TOKEN")
if apiToken == "" {
Expand Down
45 changes: 19 additions & 26 deletions examples/batch/main.go
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ func main() {
ctx := context.Background()

// create batch
response, err := createBatch(ctx, client)
response, err := createBatchChatCompletion(ctx, client)
if err != nil {
log.Fatal(err)
}
Expand All @@ -25,33 +25,26 @@ func main() {
//retrieveBatch(ctx, client, batchID)
}

func createBatch(ctx context.Context, client *openai.Client) (openai.BatchResponse, error) {
req := openai.CreateBatchWithUploadFileRequest{
Endpoint: openai.BatchEndpointChatCompletions,
}
comments := []string{
"it's a good bike but if you have a problem after the sale they either do not respond to you or the parts are not available",
"I ordered 2 Mars 2.0.A blue and an Orange.Blue came first and had shipping damage to the seat post.It came with a flip seat.The Orange came about 10 days later and didnt have a flip seat.I notified customer service about both issues.They shipped a new seat post but it will not fit the blue bike because it is for a non flip seat.I am still waiting for a fix both both of these problems.\nI do not like the fact that the throttle cannot be used without the peddle assist being on.At time I feel the peddle assist is dangerous.You better not try to make a turn with the peddle assist on.",
"This was my first E-bike. Love it so far, it has plenty power and range. I use it for hunting on our land. Works well for me, I am very satisfied.",
"I would definitely recommend this bike. Easy to use. Great battery life, quick delivery!",
"Slight difficulty setting up bike but it’s perfect and love it’s speed and power",
}
prompt := "Please analyze the following product review and extract the mentioned dimensions and reasons.\n\nReview example:\n```\nThese headphones have excellent sound quality, perfect for music lovers. I wear them every day during my commute, and the noise cancellation is great. The customer service is also very good; they patiently solved my issues. The only downside is that wearing them for long periods makes my ears hurt.\n```\n\nExpected JSON output example:\n```json\n{\n \"dimensions\": [\n {\n \"dimension\": \"Usage Scenario\",\n \"value\": \"during commute\",\n \"reason\": \"user wears them every day during commute\"\n },\n {\n \"dimension\": \"Target Audience\",\n \"value\": \"music lovers\",\n \"reason\": \"user is a music lover\"\n },\n {\n \"dimension\": \"Positive Experience\",\n \"value\": \"excellent sound quality\",\n \"reason\": \"user thinks the headphones have excellent sound quality\"\n },\n {\n \"dimension\": \"Positive Experience\",\n \"value\": \"great noise cancellation\",\n \"reason\": \"user thinks the noise cancellation is great\"\n },\n {\n \"dimension\": \"Negative Experience\",\n \"value\": \"ears hurt after long periods\",\n \"reason\": \"user thinks wearing them for long periods makes ears hurt\"\n }\n ]\n}\n```\nPlease analyze accordingly and return the results in JSON format."

for i, comment := range comments {
req.AddChatCompletion(fmt.Sprintf("req-%d", i), openai.ChatCompletionRequest{
Model: openai.GPT4oMini20240718,
ResponseFormat: &openai.ChatCompletionResponseFormat{
Type: openai.ChatCompletionResponseFormatTypeJSONObject,
func createBatchChatCompletion(ctx context.Context, client *openai.Client) (openai.BatchResponse, error) {
var chatCompletions = make([]openai.BatchChatCompletion, 5)
for i := 0; i < 5; i++ {
chatCompletions[i] = openai.BatchChatCompletion{
CustomID: fmt.Sprintf("req-%d", i),
ChatCompletion: openai.ChatCompletionRequest{
Model: openai.GPT4oMini,
Messages: []openai.ChatCompletionMessage{
{
Role: openai.ChatMessageRoleUser,
Content: fmt.Sprintf("What is the square of %d?", i+1),
},
},
},
Messages: []openai.ChatCompletionMessage{
{Role: openai.ChatMessageRoleSystem, Content: prompt},
{Role: openai.ChatMessageRoleUser, Content: comment},
},
MaxTokens: 2000,
})
}
}
return client.CreateBatchWithUploadFile(ctx, req)

return client.CreateBatchWithChatCompletions(ctx, openai.CreateBatchWithChatCompletionsRequest{
ChatCompletions: chatCompletions,
})
}

func retrieveBatch(ctx context.Context, client *openai.Client, batchID string) {
Expand Down

0 comments on commit 82d9324

Please sign in to comment.