Skip to content

Commit

Permalink
depricated model GPT3Ada changed to GPT3Babbage002 (#843)
Browse files Browse the repository at this point in the history
* depricated model GPT3Ada changed to GPT3Babbage002

* Delete test.mp3
  • Loading branch information
Arundas666 authored Sep 4, 2024
1 parent c37cf9a commit 643da8d
Show file tree
Hide file tree
Showing 4 changed files with 6 additions and 6 deletions.
4 changes: 2 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -141,7 +141,7 @@ func main() {
ctx := context.Background()

req := openai.CompletionRequest{
Model: openai.GPT3Ada,
Model: openai.GPT3Babbage002,
MaxTokens: 5,
Prompt: "Lorem ipsum",
}
Expand Down Expand Up @@ -174,7 +174,7 @@ func main() {
ctx := context.Background()

req := openai.CompletionRequest{
Model: openai.GPT3Ada,
Model: openai.GPT3Babbage002,
MaxTokens: 5,
Prompt: "Lorem ipsum",
Stream: true,
Expand Down
4 changes: 2 additions & 2 deletions example_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,7 @@ func ExampleClient_CreateCompletion() {
resp, err := client.CreateCompletion(
context.Background(),
openai.CompletionRequest{
Model: openai.GPT3Ada,
Model: openai.GPT3Babbage002,
MaxTokens: 5,
Prompt: "Lorem ipsum",
},
Expand All @@ -99,7 +99,7 @@ func ExampleClient_CreateCompletionStream() {
stream, err := client.CreateCompletionStream(
context.Background(),
openai.CompletionRequest{
Model: openai.GPT3Ada,
Model: openai.GPT3Babbage002,
MaxTokens: 5,
Prompt: "Lorem ipsum",
Stream: true,
Expand Down
2 changes: 1 addition & 1 deletion examples/completion/main.go
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ func main() {
resp, err := client.CreateCompletion(
context.Background(),
openai.CompletionRequest{
Model: openai.GPT3Ada,
Model: openai.GPT3Babbage002,
MaxTokens: 5,
Prompt: "Lorem ipsum",
},
Expand Down
2 changes: 1 addition & 1 deletion stream_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -169,7 +169,7 @@ func TestCreateCompletionStreamRateLimitError(t *testing.T) {
var apiErr *openai.APIError
_, err := client.CreateCompletionStream(context.Background(), openai.CompletionRequest{
MaxTokens: 5,
Model: openai.GPT3Ada,
Model: openai.GPT3Babbage002,
Prompt: "Hello!",
Stream: true,
})
Expand Down

0 comments on commit 643da8d

Please sign in to comment.