mirror of
https://github.com/openai/openai-go.git
synced 2026-04-01 17:17:14 +09:00
135 lines
3.4 KiB
Go
135 lines
3.4 KiB
Go
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
|
|
|
package openai_test
|
|
|
|
import (
|
|
"context"
|
|
"errors"
|
|
"os"
|
|
"testing"
|
|
|
|
"github.com/openai/openai-go"
|
|
"github.com/openai/openai-go/internal/testutil"
|
|
"github.com/openai/openai-go/option"
|
|
"github.com/openai/openai-go/shared"
|
|
)
|
|
|
|
func TestVectorStoreFileBatchNewWithOptionalParams(t *testing.T) {
|
|
baseURL := "http://localhost:4010"
|
|
if envURL, ok := os.LookupEnv("TEST_API_BASE_URL"); ok {
|
|
baseURL = envURL
|
|
}
|
|
if !testutil.CheckTestServer(t, baseURL) {
|
|
return
|
|
}
|
|
client := openai.NewClient(
|
|
option.WithBaseURL(baseURL),
|
|
option.WithAPIKey("My API Key"),
|
|
)
|
|
_, err := client.VectorStores.FileBatches.New(
|
|
context.TODO(),
|
|
"vs_abc123",
|
|
openai.VectorStoreFileBatchNewParams{
|
|
FileIDs: openai.F([]string{"string"}),
|
|
Attributes: openai.F(map[string]openai.VectorStoreFileBatchNewParamsAttributesUnion{
|
|
"foo": shared.UnionString("string"),
|
|
}),
|
|
ChunkingStrategy: openai.F[openai.FileChunkingStrategyParamUnion](openai.AutoFileChunkingStrategyParam{
|
|
Type: openai.F(openai.AutoFileChunkingStrategyParamTypeAuto),
|
|
}),
|
|
},
|
|
)
|
|
if err != nil {
|
|
var apierr *openai.Error
|
|
if errors.As(err, &apierr) {
|
|
t.Log(string(apierr.DumpRequest(true)))
|
|
}
|
|
t.Fatalf("err should be nil: %s", err.Error())
|
|
}
|
|
}
|
|
|
|
func TestVectorStoreFileBatchGet(t *testing.T) {
|
|
baseURL := "http://localhost:4010"
|
|
if envURL, ok := os.LookupEnv("TEST_API_BASE_URL"); ok {
|
|
baseURL = envURL
|
|
}
|
|
if !testutil.CheckTestServer(t, baseURL) {
|
|
return
|
|
}
|
|
client := openai.NewClient(
|
|
option.WithBaseURL(baseURL),
|
|
option.WithAPIKey("My API Key"),
|
|
)
|
|
_, err := client.VectorStores.FileBatches.Get(
|
|
context.TODO(),
|
|
"vs_abc123",
|
|
"vsfb_abc123",
|
|
)
|
|
if err != nil {
|
|
var apierr *openai.Error
|
|
if errors.As(err, &apierr) {
|
|
t.Log(string(apierr.DumpRequest(true)))
|
|
}
|
|
t.Fatalf("err should be nil: %s", err.Error())
|
|
}
|
|
}
|
|
|
|
func TestVectorStoreFileBatchCancel(t *testing.T) {
|
|
baseURL := "http://localhost:4010"
|
|
if envURL, ok := os.LookupEnv("TEST_API_BASE_URL"); ok {
|
|
baseURL = envURL
|
|
}
|
|
if !testutil.CheckTestServer(t, baseURL) {
|
|
return
|
|
}
|
|
client := openai.NewClient(
|
|
option.WithBaseURL(baseURL),
|
|
option.WithAPIKey("My API Key"),
|
|
)
|
|
_, err := client.VectorStores.FileBatches.Cancel(
|
|
context.TODO(),
|
|
"vector_store_id",
|
|
"batch_id",
|
|
)
|
|
if err != nil {
|
|
var apierr *openai.Error
|
|
if errors.As(err, &apierr) {
|
|
t.Log(string(apierr.DumpRequest(true)))
|
|
}
|
|
t.Fatalf("err should be nil: %s", err.Error())
|
|
}
|
|
}
|
|
|
|
func TestVectorStoreFileBatchListFilesWithOptionalParams(t *testing.T) {
|
|
baseURL := "http://localhost:4010"
|
|
if envURL, ok := os.LookupEnv("TEST_API_BASE_URL"); ok {
|
|
baseURL = envURL
|
|
}
|
|
if !testutil.CheckTestServer(t, baseURL) {
|
|
return
|
|
}
|
|
client := openai.NewClient(
|
|
option.WithBaseURL(baseURL),
|
|
option.WithAPIKey("My API Key"),
|
|
)
|
|
_, err := client.VectorStores.FileBatches.ListFiles(
|
|
context.TODO(),
|
|
"vector_store_id",
|
|
"batch_id",
|
|
openai.VectorStoreFileBatchListFilesParams{
|
|
After: openai.F("after"),
|
|
Before: openai.F("before"),
|
|
Filter: openai.F(openai.VectorStoreFileBatchListFilesParamsFilterInProgress),
|
|
Limit: openai.F(int64(0)),
|
|
Order: openai.F(openai.VectorStoreFileBatchListFilesParamsOrderAsc),
|
|
},
|
|
)
|
|
if err != nil {
|
|
var apierr *openai.Error
|
|
if errors.As(err, &apierr) {
|
|
t.Log(string(apierr.DumpRequest(true)))
|
|
}
|
|
t.Fatalf("err should be nil: %s", err.Error())
|
|
}
|
|
}
|