-
Notifications
You must be signed in to change notification settings - Fork 28
/
completion_test.go
59 lines (55 loc) · 1.78 KB
/
completion_test.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
package openai_test
import (
"context"
"errors"
"os"
"testing"
"github.com/openai/openai-go"
"github.com/openai/openai-go/internal/testutil"
"github.com/openai/openai-go/option"
"github.com/openai/openai-go/shared"
)
func TestCompletionNewWithOptionalParams(t *testing.T) {
baseURL := "http://localhost:4010"
if envURL, ok := os.LookupEnv("TEST_API_BASE_URL"); ok {
baseURL = envURL
}
if !testutil.CheckTestServer(t, baseURL) {
return
}
client := openai.NewClient(
option.WithBaseURL(baseURL),
option.WithAPIKey("My API Key"),
)
_, err := client.Completions.New(context.TODO(), openai.CompletionNewParams{
Model: openai.F(openai.CompletionNewParamsModelGPT3_5TurboInstruct),
Prompt: openai.F[openai.CompletionNewParamsPromptUnion](shared.UnionString("This is a test.")),
BestOf: openai.F(int64(0)),
Echo: openai.F(true),
FrequencyPenalty: openai.F(-2.000000),
LogitBias: openai.F(map[string]int64{
"foo": int64(0),
}),
Logprobs: openai.F(int64(0)),
MaxTokens: openai.F(int64(16)),
N: openai.F(int64(1)),
PresencePenalty: openai.F(-2.000000),
Seed: openai.F(int64(-9007199254740991)),
Stop: openai.F[openai.CompletionNewParamsStopUnion](shared.UnionString("\n")),
StreamOptions: openai.F(openai.ChatCompletionStreamOptionsParam{
IncludeUsage: openai.F(true),
}),
Suffix: openai.F("test."),
Temperature: openai.F(1.000000),
TopP: openai.F(1.000000),
User: openai.F("user-1234"),
})
if err != nil {
var apierr *openai.Error
if errors.As(err, &apierr) {
t.Log(string(apierr.DumpRequest(true)))
}
t.Fatalf("err should be nil: %s", err.Error())
}
}