mirror of
https://github.com/openai/openai-go.git
synced 2026-04-02 01:19:53 +09:00
examples and stuff work
This commit is contained in:
66
README.md
66
README.md
@@ -51,11 +51,14 @@ func main() {
|
||||
option.WithAPIKey("My API Key"), // defaults to os.LookupEnv("OPENAI_API_KEY")
|
||||
)
|
||||
chatCompletion, err := client.Chat.Completions.New(context.TODO(), openai.ChatCompletionNewParams{
|
||||
Messages: openai.F([]openai.ChatCompletionMessageParamUnion{openai.ChatCompletionUserMessageParam{
|
||||
Role: openai.F(openai.ChatCompletionUserMessageParamRoleUser),
|
||||
Content: openai.F([]openai.ChatCompletionContentPartUnionParam{openai.ChatCompletionContentPartTextParam{Text: openai.F("text"), Type: openai.F(openai.ChatCompletionContentPartTextTypeText)}}),
|
||||
}}),
|
||||
Model: openai.F(openai.ChatModelO3Mini),
|
||||
Messages: []openai.ChatCompletionMessageParamUnion{{
|
||||
OfUser: &openai.ChatCompletionUserMessageParam{
|
||||
Content: []openai.ChatCompletionContentPartUnionParam{{
|
||||
OfText: &openai.ChatCompletionContentPartTextParam{Text: openai.String("text")},
|
||||
}},
|
||||
},
|
||||
}},
|
||||
Model: openai.ChatModelO3Mini,
|
||||
})
|
||||
if err != nil {
|
||||
panic(err.Error())
|
||||
@@ -167,7 +170,7 @@ You can use `.ListAutoPaging()` methods to iterate through items across all page
|
||||
|
||||
```go
|
||||
iter := client.FineTuning.Jobs.ListAutoPaging(context.TODO(), openai.FineTuningJobListParams{
|
||||
Limit: openai.F(int64(20)),
|
||||
Limit: openai.Int(20),
|
||||
})
|
||||
// Automatically fetches more pages as needed.
|
||||
for iter.Next() {
|
||||
@@ -184,7 +187,7 @@ with additional helper methods like `.GetNextPage()`, e.g.:
|
||||
|
||||
```go
|
||||
page, err := client.FineTuning.Jobs.List(context.TODO(), openai.FineTuningJobListParams{
|
||||
Limit: openai.F(int64(20)),
|
||||
Limit: openai.Int(20),
|
||||
})
|
||||
for page != nil {
|
||||
for _, job := range page.Data {
|
||||
@@ -208,8 +211,8 @@ To handle errors, we recommend that you use the `errors.As` pattern:
|
||||
|
||||
```go
|
||||
_, err := client.FineTuning.Jobs.New(context.TODO(), openai.FineTuningJobNewParams{
|
||||
Model: openai.F(openai.FineTuningJobNewParamsModelBabbage002),
|
||||
TrainingFile: openai.F("file-abc123"),
|
||||
Model: "babbage-002",
|
||||
TrainingFile: openai.String("file-abc123"),
|
||||
})
|
||||
if err != nil {
|
||||
var apierr *openai.Error
|
||||
@@ -238,11 +241,14 @@ defer cancel()
|
||||
client.Chat.Completions.New(
|
||||
ctx,
|
||||
openai.ChatCompletionNewParams{
|
||||
Messages: openai.F([]openai.ChatCompletionMessageParamUnion{openai.ChatCompletionUserMessageParam{
|
||||
Role: openai.F(openai.ChatCompletionUserMessageParamRoleUser),
|
||||
Content: openai.F([]openai.ChatCompletionContentPartUnionParam{openai.ChatCompletionContentPartTextParam{Text: openai.F("text"), Type: openai.F(openai.ChatCompletionContentPartTextTypeText)}}),
|
||||
}}),
|
||||
Model: openai.F(openai.ChatModelO3Mini),
|
||||
Messages: []openai.ChatCompletionMessageParamUnion{{
|
||||
OfUser: &openai.ChatCompletionUserMessageParam{
|
||||
Content: []openai.ChatCompletionContentPartUnionParam{{
|
||||
OfText: &openai.ChatCompletionContentPartTextParam{Text: openai.String("text")},
|
||||
}},
|
||||
},
|
||||
}},
|
||||
Model: openai.ChatModelO3Mini,
|
||||
},
|
||||
// This sets the per-retry timeout
|
||||
option.WithRequestTimeout(20*time.Second),
|
||||
@@ -267,19 +273,19 @@ which can be used to wrap any `io.Reader` with the appropriate file name and con
|
||||
file, err := os.Open("input.jsonl")
|
||||
openai.FileNewParams{
|
||||
File: openai.F[io.Reader](file),
|
||||
Purpose: openai.F(openai.FilePurposeFineTune),
|
||||
Purpose: openai.FilePurposeFineTune,
|
||||
}
|
||||
|
||||
// A file from a string
|
||||
openai.FileNewParams{
|
||||
File: openai.F[io.Reader](strings.NewReader("my file contents")),
|
||||
Purpose: openai.F(openai.FilePurposeFineTune),
|
||||
Purpose: openai.FilePurposeFineTune,
|
||||
}
|
||||
|
||||
// With a custom filename and contentType
|
||||
openai.FileNewParams{
|
||||
File: openai.FileParam(strings.NewReader(`{"hello": "foo"}`), "file.go", "application/json"),
|
||||
Purpose: openai.F(openai.FilePurposeFineTune),
|
||||
Purpose: openai.FilePurposeFineTune,
|
||||
}
|
||||
```
|
||||
|
||||
@@ -301,11 +307,14 @@ client := openai.NewClient(
|
||||
client.Chat.Completions.New(
|
||||
context.TODO(),
|
||||
openai.ChatCompletionNewParams{
|
||||
Messages: openai.F([]openai.ChatCompletionMessageParamUnion{openai.ChatCompletionUserMessageParam{
|
||||
Role: openai.F(openai.ChatCompletionUserMessageParamRoleUser),
|
||||
Content: openai.F([]openai.ChatCompletionContentPartUnionParam{openai.ChatCompletionContentPartTextParam{Text: openai.F("text"), Type: openai.F(openai.ChatCompletionContentPartTextTypeText)}}),
|
||||
}}),
|
||||
Model: openai.F(openai.ChatModelO3Mini),
|
||||
Messages: []openai.ChatCompletionMessageParamUnion{{
|
||||
OfUser: &openai.ChatCompletionUserMessageParam{
|
||||
Content: []openai.ChatCompletionContentPartUnionParam{{
|
||||
OfText: &openai.ChatCompletionContentPartTextParam{Text: openai.String("text")},
|
||||
}},
|
||||
},
|
||||
}},
|
||||
Model: openai.ChatModelO3Mini,
|
||||
},
|
||||
option.WithMaxRetries(5),
|
||||
)
|
||||
@@ -322,11 +331,14 @@ var response *http.Response
|
||||
chatCompletion, err := client.Chat.Completions.New(
|
||||
context.TODO(),
|
||||
openai.ChatCompletionNewParams{
|
||||
Messages: openai.F([]openai.ChatCompletionMessageParamUnion{openai.ChatCompletionUserMessageParam{
|
||||
Role: openai.F(openai.ChatCompletionUserMessageParamRoleUser),
|
||||
Content: openai.F([]openai.ChatCompletionContentPartUnionParam{openai.ChatCompletionContentPartTextParam{Text: openai.F("text"), Type: openai.F(openai.ChatCompletionContentPartTextTypeText)}}),
|
||||
}}),
|
||||
Model: openai.F(openai.ChatModelO3Mini),
|
||||
Messages: []openai.ChatCompletionMessageParamUnion{{
|
||||
OfUser: &openai.ChatCompletionUserMessageParam{
|
||||
Content: []openai.ChatCompletionContentPartUnionParam{{
|
||||
OfText: &openai.ChatCompletionContentPartTextParam{Text: openai.String("text")},
|
||||
}},
|
||||
},
|
||||
}},
|
||||
Model: openai.ChatModelO3Mini,
|
||||
},
|
||||
option.WithResponseInto(&response),
|
||||
)
|
||||
|
||||
118
aliases.go
118
aliases.go
@@ -3,10 +3,21 @@
|
||||
package openai
|
||||
|
||||
import (
|
||||
"time"
|
||||
|
||||
"github.com/openai/openai-go/internal/apierror"
|
||||
"github.com/openai/openai-go/packages/param"
|
||||
"github.com/openai/openai-go/packages/resp"
|
||||
"github.com/openai/openai-go/shared"
|
||||
"github.com/openai/openai-go/shared/constant"
|
||||
)
|
||||
|
||||
// aliased to make param.APIUnion private when embedding
|
||||
type apiunion = param.APIUnion
|
||||
|
||||
// aliased to make param.APIObject private when embedding
|
||||
type apiobject = param.APIObject
|
||||
|
||||
type Error = apierror.Error
|
||||
|
||||
// This is an alias to an internal type.
|
||||
@@ -52,35 +63,98 @@ type MetadataParam = shared.MetadataParam
|
||||
// This is an alias to an internal type.
|
||||
type ResponseFormatJSONObjectParam = shared.ResponseFormatJSONObjectParam
|
||||
|
||||
// The type of response format being defined: `json_object`
|
||||
//
|
||||
// This is an alias to an internal type.
|
||||
type ResponseFormatJSONObjectType = shared.ResponseFormatJSONObjectType
|
||||
|
||||
// This is an alias to an internal value.
|
||||
const ResponseFormatJSONObjectTypeJSONObject = shared.ResponseFormatJSONObjectTypeJSONObject
|
||||
|
||||
// This is an alias to an internal type.
|
||||
type ResponseFormatJSONSchemaParam = shared.ResponseFormatJSONSchemaParam
|
||||
|
||||
// This is an alias to an internal type.
|
||||
type ResponseFormatJSONSchemaJSONSchemaParam = shared.ResponseFormatJSONSchemaJSONSchemaParam
|
||||
|
||||
// The type of response format being defined: `json_schema`
|
||||
//
|
||||
// This is an alias to an internal type.
|
||||
type ResponseFormatJSONSchemaType = shared.ResponseFormatJSONSchemaType
|
||||
|
||||
// This is an alias to an internal value.
|
||||
const ResponseFormatJSONSchemaTypeJSONSchema = shared.ResponseFormatJSONSchemaTypeJSONSchema
|
||||
|
||||
// This is an alias to an internal type.
|
||||
type ResponseFormatTextParam = shared.ResponseFormatTextParam
|
||||
|
||||
// The type of response format being defined: `text`
|
||||
//
|
||||
// This is an alias to an internal type.
|
||||
type ResponseFormatTextType = shared.ResponseFormatTextType
|
||||
// Internal helpers for converting from response to param types
|
||||
|
||||
// This is an alias to an internal value.
|
||||
const ResponseFormatTextTypeText = shared.ResponseFormatTextTypeText
|
||||
func newString(value string) param.String {
|
||||
res := param.NeverOmitted[param.String]()
|
||||
res.V = value
|
||||
return res
|
||||
}
|
||||
|
||||
func newInt(value int64) param.Int {
|
||||
res := param.NeverOmitted[param.Int]()
|
||||
res.V = value
|
||||
return res
|
||||
}
|
||||
|
||||
func newBool(value bool) param.Bool {
|
||||
res := param.NeverOmitted[param.Bool]()
|
||||
res.V = value
|
||||
return res
|
||||
}
|
||||
|
||||
func newFloat(value float64) param.Float {
|
||||
res := param.NeverOmitted[param.Float]()
|
||||
res.V = value
|
||||
return res
|
||||
}
|
||||
|
||||
func newDatetime(value time.Time) param.Datetime {
|
||||
res := param.NeverOmitted[param.Datetime]()
|
||||
res.V = value
|
||||
return res
|
||||
}
|
||||
|
||||
func newDate(value time.Time) param.Date {
|
||||
res := param.NeverOmitted[param.Date]()
|
||||
res.V = value
|
||||
return res
|
||||
}
|
||||
|
||||
func toParamString(value string, meta resp.Field) param.String {
|
||||
if !meta.IsMissing() {
|
||||
return newString(value)
|
||||
}
|
||||
return param.String{}
|
||||
}
|
||||
|
||||
func toParamInt(value int64, meta resp.Field) param.Int {
|
||||
if !meta.IsMissing() {
|
||||
return newInt(value)
|
||||
}
|
||||
return param.Int{}
|
||||
}
|
||||
|
||||
func toParamBool(value bool, meta resp.Field) param.Bool {
|
||||
if !meta.IsMissing() {
|
||||
return newBool(value)
|
||||
}
|
||||
return param.Bool{}
|
||||
}
|
||||
|
||||
func toParamFloat(value float64, meta resp.Field) param.Float {
|
||||
if !meta.IsMissing() {
|
||||
return newFloat(value)
|
||||
}
|
||||
return param.Float{}
|
||||
}
|
||||
|
||||
func toParamDatetime(value time.Time, meta resp.Field) param.Datetime {
|
||||
if !meta.IsMissing() {
|
||||
return newDatetime(value)
|
||||
}
|
||||
return param.Datetime{}
|
||||
}
|
||||
|
||||
func toParamDate(value time.Time, meta resp.Field) param.Date {
|
||||
if !meta.IsMissing() {
|
||||
return newDate(value)
|
||||
}
|
||||
return param.Date{}
|
||||
}
|
||||
|
||||
func ptrToConstant[T constant.Constant[T]](c T) *T {
|
||||
if param.IsOmitted(c) {
|
||||
c = c.Default()
|
||||
}
|
||||
return &c
|
||||
}
|
||||
|
||||
18
api.md
18
api.md
@@ -245,7 +245,7 @@ Params Types:
|
||||
|
||||
Response Types:
|
||||
|
||||
- <a href="https://pkg.go.dev/github.com/openai/openai-go">openai</a>.<a href="https://pkg.go.dev/github.com/openai/openai-go#FileChunkingStrategy">FileChunkingStrategy</a>
|
||||
- <a href="https://pkg.go.dev/github.com/openai/openai-go">openai</a>.<a href="https://pkg.go.dev/github.com/openai/openai-go#FileChunkingStrategyUnion">FileChunkingStrategyUnion</a>
|
||||
- <a href="https://pkg.go.dev/github.com/openai/openai-go">openai</a>.<a href="https://pkg.go.dev/github.com/openai/openai-go#OtherFileChunkingStrategyObject">OtherFileChunkingStrategyObject</a>
|
||||
- <a href="https://pkg.go.dev/github.com/openai/openai-go">openai</a>.<a href="https://pkg.go.dev/github.com/openai/openai-go#StaticFileChunkingStrategy">StaticFileChunkingStrategy</a>
|
||||
- <a href="https://pkg.go.dev/github.com/openai/openai-go">openai</a>.<a href="https://pkg.go.dev/github.com/openai/openai-go#StaticFileChunkingStrategyObject">StaticFileChunkingStrategyObject</a>
|
||||
@@ -300,8 +300,8 @@ Response Types:
|
||||
|
||||
- <a href="https://pkg.go.dev/github.com/openai/openai-go">openai</a>.<a href="https://pkg.go.dev/github.com/openai/openai-go#Assistant">Assistant</a>
|
||||
- <a href="https://pkg.go.dev/github.com/openai/openai-go">openai</a>.<a href="https://pkg.go.dev/github.com/openai/openai-go#AssistantDeleted">AssistantDeleted</a>
|
||||
- <a href="https://pkg.go.dev/github.com/openai/openai-go">openai</a>.<a href="https://pkg.go.dev/github.com/openai/openai-go#AssistantStreamEvent">AssistantStreamEvent</a>
|
||||
- <a href="https://pkg.go.dev/github.com/openai/openai-go">openai</a>.<a href="https://pkg.go.dev/github.com/openai/openai-go#AssistantTool">AssistantTool</a>
|
||||
- <a href="https://pkg.go.dev/github.com/openai/openai-go">openai</a>.<a href="https://pkg.go.dev/github.com/openai/openai-go#AssistantStreamEventUnion">AssistantStreamEventUnion</a>
|
||||
- <a href="https://pkg.go.dev/github.com/openai/openai-go">openai</a>.<a href="https://pkg.go.dev/github.com/openai/openai-go#AssistantToolUnion">AssistantToolUnion</a>
|
||||
- <a href="https://pkg.go.dev/github.com/openai/openai-go">openai</a>.<a href="https://pkg.go.dev/github.com/openai/openai-go#CodeInterpreterTool">CodeInterpreterTool</a>
|
||||
- <a href="https://pkg.go.dev/github.com/openai/openai-go">openai</a>.<a href="https://pkg.go.dev/github.com/openai/openai-go#FileSearchTool">FileSearchTool</a>
|
||||
- <a href="https://pkg.go.dev/github.com/openai/openai-go">openai</a>.<a href="https://pkg.go.dev/github.com/openai/openai-go#FunctionTool">FunctionTool</a>
|
||||
@@ -376,8 +376,8 @@ Response Types:
|
||||
- <a href="https://pkg.go.dev/github.com/openai/openai-go">openai</a>.<a href="https://pkg.go.dev/github.com/openai/openai-go#RunStepDelta">RunStepDelta</a>
|
||||
- <a href="https://pkg.go.dev/github.com/openai/openai-go">openai</a>.<a href="https://pkg.go.dev/github.com/openai/openai-go#RunStepDeltaEvent">RunStepDeltaEvent</a>
|
||||
- <a href="https://pkg.go.dev/github.com/openai/openai-go">openai</a>.<a href="https://pkg.go.dev/github.com/openai/openai-go#RunStepDeltaMessageDelta">RunStepDeltaMessageDelta</a>
|
||||
- <a href="https://pkg.go.dev/github.com/openai/openai-go">openai</a>.<a href="https://pkg.go.dev/github.com/openai/openai-go#ToolCall">ToolCall</a>
|
||||
- <a href="https://pkg.go.dev/github.com/openai/openai-go">openai</a>.<a href="https://pkg.go.dev/github.com/openai/openai-go#ToolCallDelta">ToolCallDelta</a>
|
||||
- <a href="https://pkg.go.dev/github.com/openai/openai-go">openai</a>.<a href="https://pkg.go.dev/github.com/openai/openai-go#ToolCallUnion">ToolCallUnion</a>
|
||||
- <a href="https://pkg.go.dev/github.com/openai/openai-go">openai</a>.<a href="https://pkg.go.dev/github.com/openai/openai-go#ToolCallDeltaUnion">ToolCallDeltaUnion</a>
|
||||
- <a href="https://pkg.go.dev/github.com/openai/openai-go">openai</a>.<a href="https://pkg.go.dev/github.com/openai/openai-go#ToolCallDeltaObject">ToolCallDeltaObject</a>
|
||||
- <a href="https://pkg.go.dev/github.com/openai/openai-go">openai</a>.<a href="https://pkg.go.dev/github.com/openai/openai-go#ToolCallsStepDetails">ToolCallsStepDetails</a>
|
||||
|
||||
@@ -399,8 +399,8 @@ Params Types:
|
||||
|
||||
Response Types:
|
||||
|
||||
- <a href="https://pkg.go.dev/github.com/openai/openai-go">openai</a>.<a href="https://pkg.go.dev/github.com/openai/openai-go#Annotation">Annotation</a>
|
||||
- <a href="https://pkg.go.dev/github.com/openai/openai-go">openai</a>.<a href="https://pkg.go.dev/github.com/openai/openai-go#AnnotationDelta">AnnotationDelta</a>
|
||||
- <a href="https://pkg.go.dev/github.com/openai/openai-go">openai</a>.<a href="https://pkg.go.dev/github.com/openai/openai-go#AnnotationUnion">AnnotationUnion</a>
|
||||
- <a href="https://pkg.go.dev/github.com/openai/openai-go">openai</a>.<a href="https://pkg.go.dev/github.com/openai/openai-go#AnnotationDeltaUnion">AnnotationDeltaUnion</a>
|
||||
- <a href="https://pkg.go.dev/github.com/openai/openai-go">openai</a>.<a href="https://pkg.go.dev/github.com/openai/openai-go#FileCitationAnnotation">FileCitationAnnotation</a>
|
||||
- <a href="https://pkg.go.dev/github.com/openai/openai-go">openai</a>.<a href="https://pkg.go.dev/github.com/openai/openai-go#FileCitationDeltaAnnotation">FileCitationDeltaAnnotation</a>
|
||||
- <a href="https://pkg.go.dev/github.com/openai/openai-go">openai</a>.<a href="https://pkg.go.dev/github.com/openai/openai-go#FilePathAnnotation">FilePathAnnotation</a>
|
||||
@@ -414,8 +414,8 @@ Response Types:
|
||||
- <a href="https://pkg.go.dev/github.com/openai/openai-go">openai</a>.<a href="https://pkg.go.dev/github.com/openai/openai-go#ImageURLDelta">ImageURLDelta</a>
|
||||
- <a href="https://pkg.go.dev/github.com/openai/openai-go">openai</a>.<a href="https://pkg.go.dev/github.com/openai/openai-go#ImageURLDeltaBlock">ImageURLDeltaBlock</a>
|
||||
- <a href="https://pkg.go.dev/github.com/openai/openai-go">openai</a>.<a href="https://pkg.go.dev/github.com/openai/openai-go#Message">Message</a>
|
||||
- <a href="https://pkg.go.dev/github.com/openai/openai-go">openai</a>.<a href="https://pkg.go.dev/github.com/openai/openai-go#MessageContent">MessageContent</a>
|
||||
- <a href="https://pkg.go.dev/github.com/openai/openai-go">openai</a>.<a href="https://pkg.go.dev/github.com/openai/openai-go#MessageContentDelta">MessageContentDelta</a>
|
||||
- <a href="https://pkg.go.dev/github.com/openai/openai-go">openai</a>.<a href="https://pkg.go.dev/github.com/openai/openai-go#MessageContentUnion">MessageContentUnion</a>
|
||||
- <a href="https://pkg.go.dev/github.com/openai/openai-go">openai</a>.<a href="https://pkg.go.dev/github.com/openai/openai-go#MessageContentDeltaUnion">MessageContentDeltaUnion</a>
|
||||
- <a href="https://pkg.go.dev/github.com/openai/openai-go">openai</a>.<a href="https://pkg.go.dev/github.com/openai/openai-go#MessageDeleted">MessageDeleted</a>
|
||||
- <a href="https://pkg.go.dev/github.com/openai/openai-go">openai</a>.<a href="https://pkg.go.dev/github.com/openai/openai-go#MessageDelta">MessageDelta</a>
|
||||
- <a href="https://pkg.go.dev/github.com/openai/openai-go">openai</a>.<a href="https://pkg.go.dev/github.com/openai/openai-go#MessageDeltaEvent">MessageDeltaEvent</a>
|
||||
|
||||
18
audio.go
18
audio.go
@@ -14,16 +14,16 @@ import (
|
||||
// the [NewAudioService] method instead.
|
||||
type AudioService struct {
|
||||
Options []option.RequestOption
|
||||
Transcriptions *AudioTranscriptionService
|
||||
Translations *AudioTranslationService
|
||||
Speech *AudioSpeechService
|
||||
Transcriptions AudioTranscriptionService
|
||||
Translations AudioTranslationService
|
||||
Speech AudioSpeechService
|
||||
}
|
||||
|
||||
// NewAudioService generates a new service that applies the given options to each
|
||||
// request. These options are applied after the parent client's options (if there
|
||||
// is one), and before any request-specific options.
|
||||
func NewAudioService(opts ...option.RequestOption) (r *AudioService) {
|
||||
r = &AudioService{}
|
||||
func NewAudioService(opts ...option.RequestOption) (r AudioService) {
|
||||
r = AudioService{}
|
||||
r.Options = opts
|
||||
r.Transcriptions = NewAudioTranscriptionService(opts...)
|
||||
r.Translations = NewAudioTranslationService(opts...)
|
||||
@@ -48,11 +48,3 @@ const (
|
||||
AudioResponseFormatVerboseJSON AudioResponseFormat = "verbose_json"
|
||||
AudioResponseFormatVTT AudioResponseFormat = "vtt"
|
||||
)
|
||||
|
||||
func (r AudioResponseFormat) IsKnown() bool {
|
||||
switch r {
|
||||
case AudioResponseFormatJSON, AudioResponseFormatText, AudioResponseFormatSRT, AudioResponseFormatVerboseJSON, AudioResponseFormatVTT:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
@@ -6,10 +6,9 @@ import (
|
||||
"context"
|
||||
"net/http"
|
||||
|
||||
"github.com/openai/openai-go/internal/apijson"
|
||||
"github.com/openai/openai-go/internal/param"
|
||||
"github.com/openai/openai-go/internal/requestconfig"
|
||||
"github.com/openai/openai-go/option"
|
||||
"github.com/openai/openai-go/packages/param"
|
||||
)
|
||||
|
||||
// AudioSpeechService contains methods and other services that help with
|
||||
@@ -25,8 +24,8 @@ type AudioSpeechService struct {
|
||||
// NewAudioSpeechService generates a new service that applies the given options to
|
||||
// each request. These options are applied after the parent client's options (if
|
||||
// there is one), and before any request-specific options.
|
||||
func NewAudioSpeechService(opts ...option.RequestOption) (r *AudioSpeechService) {
|
||||
r = &AudioSpeechService{}
|
||||
func NewAudioSpeechService(opts ...option.RequestOption) (r AudioSpeechService) {
|
||||
r = AudioSpeechService{}
|
||||
r.Options = opts
|
||||
return
|
||||
}
|
||||
@@ -49,25 +48,34 @@ const (
|
||||
|
||||
type AudioSpeechNewParams struct {
|
||||
// The text to generate audio for. The maximum length is 4096 characters.
|
||||
Input param.Field[string] `json:"input,required"`
|
||||
Input param.String `json:"input,omitzero,required"`
|
||||
// One of the available [TTS models](https://platform.openai.com/docs/models#tts):
|
||||
// `tts-1` or `tts-1-hd`
|
||||
Model param.Field[SpeechModel] `json:"model,required"`
|
||||
Model SpeechModel `json:"model,omitzero,required"`
|
||||
// The voice to use when generating the audio. Supported voices are `alloy`, `ash`,
|
||||
// `coral`, `echo`, `fable`, `onyx`, `nova`, `sage` and `shimmer`. Previews of the
|
||||
// voices are available in the
|
||||
// [Text to speech guide](https://platform.openai.com/docs/guides/text-to-speech#voice-options).
|
||||
Voice param.Field[AudioSpeechNewParamsVoice] `json:"voice,required"`
|
||||
//
|
||||
// Any of "alloy", "ash", "coral", "echo", "fable", "onyx", "nova", "sage",
|
||||
// "shimmer"
|
||||
Voice AudioSpeechNewParamsVoice `json:"voice,omitzero,required"`
|
||||
// The format to audio in. Supported formats are `mp3`, `opus`, `aac`, `flac`,
|
||||
// `wav`, and `pcm`.
|
||||
ResponseFormat param.Field[AudioSpeechNewParamsResponseFormat] `json:"response_format"`
|
||||
//
|
||||
// Any of "mp3", "opus", "aac", "flac", "wav", "pcm"
|
||||
ResponseFormat AudioSpeechNewParamsResponseFormat `json:"response_format,omitzero"`
|
||||
// The speed of the generated audio. Select a value from `0.25` to `4.0`. `1.0` is
|
||||
// the default.
|
||||
Speed param.Field[float64] `json:"speed"`
|
||||
Speed param.Float `json:"speed,omitzero"`
|
||||
apiobject
|
||||
}
|
||||
|
||||
func (f AudioSpeechNewParams) IsMissing() bool { return param.IsOmitted(f) || f.IsNull() }
|
||||
|
||||
func (r AudioSpeechNewParams) MarshalJSON() (data []byte, err error) {
|
||||
return apijson.MarshalRoot(r)
|
||||
type shadow AudioSpeechNewParams
|
||||
return param.MarshalObject(r, (*shadow)(&r))
|
||||
}
|
||||
|
||||
// The voice to use when generating the audio. Supported voices are `alloy`, `ash`,
|
||||
@@ -88,14 +96,6 @@ const (
|
||||
AudioSpeechNewParamsVoiceShimmer AudioSpeechNewParamsVoice = "shimmer"
|
||||
)
|
||||
|
||||
func (r AudioSpeechNewParamsVoice) IsKnown() bool {
|
||||
switch r {
|
||||
case AudioSpeechNewParamsVoiceAlloy, AudioSpeechNewParamsVoiceAsh, AudioSpeechNewParamsVoiceCoral, AudioSpeechNewParamsVoiceEcho, AudioSpeechNewParamsVoiceFable, AudioSpeechNewParamsVoiceOnyx, AudioSpeechNewParamsVoiceNova, AudioSpeechNewParamsVoiceSage, AudioSpeechNewParamsVoiceShimmer:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// The format to audio in. Supported formats are `mp3`, `opus`, `aac`, `flac`,
|
||||
// `wav`, and `pcm`.
|
||||
type AudioSpeechNewParamsResponseFormat string
|
||||
@@ -108,11 +108,3 @@ const (
|
||||
AudioSpeechNewParamsResponseFormatWAV AudioSpeechNewParamsResponseFormat = "wav"
|
||||
AudioSpeechNewParamsResponseFormatPCM AudioSpeechNewParamsResponseFormat = "pcm"
|
||||
)
|
||||
|
||||
func (r AudioSpeechNewParamsResponseFormat) IsKnown() bool {
|
||||
switch r {
|
||||
case AudioSpeechNewParamsResponseFormatMP3, AudioSpeechNewParamsResponseFormatOpus, AudioSpeechNewParamsResponseFormatAAC, AudioSpeechNewParamsResponseFormatFLAC, AudioSpeechNewParamsResponseFormatWAV, AudioSpeechNewParamsResponseFormatPCM:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
@@ -27,11 +27,11 @@ func TestAudioSpeechNewWithOptionalParams(t *testing.T) {
|
||||
option.WithAPIKey("My API Key"),
|
||||
)
|
||||
resp, err := client.Audio.Speech.New(context.TODO(), openai.AudioSpeechNewParams{
|
||||
Input: openai.F("input"),
|
||||
Model: openai.F(openai.SpeechModelTTS1),
|
||||
Voice: openai.F(openai.AudioSpeechNewParamsVoiceAlloy),
|
||||
ResponseFormat: openai.F(openai.AudioSpeechNewParamsResponseFormatMP3),
|
||||
Speed: openai.F(0.250000),
|
||||
Input: openai.String("input"),
|
||||
Model: openai.SpeechModelTTS1,
|
||||
Voice: openai.AudioSpeechNewParamsVoiceAlloy,
|
||||
ResponseFormat: openai.AudioSpeechNewParamsResponseFormatMP3,
|
||||
Speed: openai.Float(0.25),
|
||||
})
|
||||
if err != nil {
|
||||
var apierr *openai.Error
|
||||
|
||||
@@ -11,9 +11,10 @@ import (
|
||||
|
||||
"github.com/openai/openai-go/internal/apiform"
|
||||
"github.com/openai/openai-go/internal/apijson"
|
||||
"github.com/openai/openai-go/internal/param"
|
||||
"github.com/openai/openai-go/internal/requestconfig"
|
||||
"github.com/openai/openai-go/option"
|
||||
"github.com/openai/openai-go/packages/param"
|
||||
"github.com/openai/openai-go/packages/resp"
|
||||
)
|
||||
|
||||
// AudioTranscriptionService contains methods and other services that help with
|
||||
@@ -29,17 +30,22 @@ type AudioTranscriptionService struct {
|
||||
// NewAudioTranscriptionService generates a new service that applies the given
|
||||
// options to each request. These options are applied after the parent client's
|
||||
// options (if there is one), and before any request-specific options.
|
||||
func NewAudioTranscriptionService(opts ...option.RequestOption) (r *AudioTranscriptionService) {
|
||||
r = &AudioTranscriptionService{}
|
||||
func NewAudioTranscriptionService(opts ...option.RequestOption) (r AudioTranscriptionService) {
|
||||
r = AudioTranscriptionService{}
|
||||
r.Options = opts
|
||||
return
|
||||
}
|
||||
|
||||
// Transcribes audio into the input language.
|
||||
func (r *AudioTranscriptionService) New(ctx context.Context, body AudioTranscriptionNewParams, opts ...option.RequestOption) (res *Transcription, err error) {
|
||||
var env apijson.UnionUnmarshaler[Transcription]
|
||||
opts = append(r.Options[:], opts...)
|
||||
path := "audio/transcriptions"
|
||||
err = requestconfig.ExecuteNewRequest(ctx, http.MethodPost, path, body, &res, opts...)
|
||||
err = requestconfig.ExecuteNewRequest(ctx, http.MethodPost, path, body, &env, opts...)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
res = &env.Value
|
||||
return
|
||||
}
|
||||
|
||||
@@ -47,58 +53,56 @@ func (r *AudioTranscriptionService) New(ctx context.Context, body AudioTranscrip
|
||||
// input.
|
||||
type Transcription struct {
|
||||
// The transcribed text.
|
||||
Text string `json:"text,required"`
|
||||
JSON transcriptionJSON `json:"-"`
|
||||
Text string `json:"text,omitzero,required"`
|
||||
JSON struct {
|
||||
Text resp.Field
|
||||
raw string
|
||||
} `json:"-"`
|
||||
}
|
||||
|
||||
// transcriptionJSON contains the JSON metadata for the struct [Transcription]
|
||||
type transcriptionJSON struct {
|
||||
Text apijson.Field
|
||||
raw string
|
||||
ExtraFields map[string]apijson.Field
|
||||
}
|
||||
|
||||
func (r *Transcription) UnmarshalJSON(data []byte) (err error) {
|
||||
func (r Transcription) RawJSON() string { return r.JSON.raw }
|
||||
func (r *Transcription) UnmarshalJSON(data []byte) error {
|
||||
return apijson.UnmarshalRoot(data, r)
|
||||
}
|
||||
|
||||
func (r transcriptionJSON) RawJSON() string {
|
||||
return r.raw
|
||||
}
|
||||
|
||||
type AudioTranscriptionNewParams struct {
|
||||
// The audio file object (not file name) to transcribe, in one of these formats:
|
||||
// flac, mp3, mp4, mpeg, mpga, m4a, ogg, wav, or webm.
|
||||
File param.Field[io.Reader] `json:"file,required" format:"binary"`
|
||||
File io.Reader `json:"file,omitzero,required" format:"binary"`
|
||||
// ID of the model to use. Only `whisper-1` (which is powered by our open source
|
||||
// Whisper V2 model) is currently available.
|
||||
Model param.Field[AudioModel] `json:"model,required"`
|
||||
Model AudioModel `json:"model,omitzero,required"`
|
||||
// The language of the input audio. Supplying the input language in
|
||||
// [ISO-639-1](https://en.wikipedia.org/wiki/List_of_ISO_639-1_codes) (e.g. `en`)
|
||||
// format will improve accuracy and latency.
|
||||
Language param.Field[string] `json:"language"`
|
||||
Language param.String `json:"language,omitzero"`
|
||||
// An optional text to guide the model's style or continue a previous audio
|
||||
// segment. The
|
||||
// [prompt](https://platform.openai.com/docs/guides/speech-to-text#prompting)
|
||||
// should match the audio language.
|
||||
Prompt param.Field[string] `json:"prompt"`
|
||||
Prompt param.String `json:"prompt,omitzero"`
|
||||
// The format of the output, in one of these options: `json`, `text`, `srt`,
|
||||
// `verbose_json`, or `vtt`.
|
||||
ResponseFormat param.Field[AudioResponseFormat] `json:"response_format"`
|
||||
//
|
||||
// Any of "json", "text", "srt", "verbose_json", "vtt"
|
||||
ResponseFormat AudioResponseFormat `json:"response_format,omitzero"`
|
||||
// The sampling temperature, between 0 and 1. Higher values like 0.8 will make the
|
||||
// output more random, while lower values like 0.2 will make it more focused and
|
||||
// deterministic. If set to 0, the model will use
|
||||
// [log probability](https://en.wikipedia.org/wiki/Log_probability) to
|
||||
// automatically increase the temperature until certain thresholds are hit.
|
||||
Temperature param.Field[float64] `json:"temperature"`
|
||||
Temperature param.Float `json:"temperature,omitzero"`
|
||||
// The timestamp granularities to populate for this transcription.
|
||||
// `response_format` must be set `verbose_json` to use timestamp granularities.
|
||||
// Either or both of these options are supported: `word`, or `segment`. Note: There
|
||||
// is no additional latency for segment timestamps, but generating word timestamps
|
||||
// incurs additional latency.
|
||||
TimestampGranularities param.Field[[]AudioTranscriptionNewParamsTimestampGranularity] `json:"timestamp_granularities"`
|
||||
TimestampGranularities []string `json:"timestamp_granularities,omitzero"`
|
||||
apiobject
|
||||
}
|
||||
|
||||
func (f AudioTranscriptionNewParams) IsMissing() bool { return param.IsOmitted(f) || f.IsNull() }
|
||||
|
||||
func (r AudioTranscriptionNewParams) MarshalMultipart() (data []byte, contentType string, err error) {
|
||||
buf := bytes.NewBuffer(nil)
|
||||
writer := multipart.NewWriter(buf)
|
||||
@@ -114,17 +118,9 @@ func (r AudioTranscriptionNewParams) MarshalMultipart() (data []byte, contentTyp
|
||||
return buf.Bytes(), writer.FormDataContentType(), nil
|
||||
}
|
||||
|
||||
type AudioTranscriptionNewParamsTimestampGranularity string
|
||||
type AudioTranscriptionNewParamsTimestampGranularity = string
|
||||
|
||||
const (
|
||||
AudioTranscriptionNewParamsTimestampGranularityWord AudioTranscriptionNewParamsTimestampGranularity = "word"
|
||||
AudioTranscriptionNewParamsTimestampGranularitySegment AudioTranscriptionNewParamsTimestampGranularity = "segment"
|
||||
)
|
||||
|
||||
func (r AudioTranscriptionNewParamsTimestampGranularity) IsKnown() bool {
|
||||
switch r {
|
||||
case AudioTranscriptionNewParamsTimestampGranularityWord, AudioTranscriptionNewParamsTimestampGranularitySegment:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
@@ -28,13 +28,13 @@ func TestAudioTranscriptionNewWithOptionalParams(t *testing.T) {
|
||||
option.WithAPIKey("My API Key"),
|
||||
)
|
||||
_, err := client.Audio.Transcriptions.New(context.TODO(), openai.AudioTranscriptionNewParams{
|
||||
File: openai.F(io.Reader(bytes.NewBuffer([]byte("some file contents")))),
|
||||
Model: openai.F(openai.AudioModelWhisper1),
|
||||
Language: openai.F("language"),
|
||||
Prompt: openai.F("prompt"),
|
||||
ResponseFormat: openai.F(openai.AudioResponseFormatJSON),
|
||||
Temperature: openai.F(0.000000),
|
||||
TimestampGranularities: openai.F([]openai.AudioTranscriptionNewParamsTimestampGranularity{openai.AudioTranscriptionNewParamsTimestampGranularityWord}),
|
||||
File: io.Reader(bytes.NewBuffer([]byte("some file contents"))),
|
||||
Model: openai.AudioModelWhisper1,
|
||||
Language: openai.String("language"),
|
||||
Prompt: openai.String("prompt"),
|
||||
ResponseFormat: openai.AudioResponseFormatJSON,
|
||||
Temperature: openai.Float(0),
|
||||
TimestampGranularities: []string{"word"},
|
||||
})
|
||||
if err != nil {
|
||||
var apierr *openai.Error
|
||||
|
||||
@@ -11,9 +11,10 @@ import (
|
||||
|
||||
"github.com/openai/openai-go/internal/apiform"
|
||||
"github.com/openai/openai-go/internal/apijson"
|
||||
"github.com/openai/openai-go/internal/param"
|
||||
"github.com/openai/openai-go/internal/requestconfig"
|
||||
"github.com/openai/openai-go/option"
|
||||
"github.com/openai/openai-go/packages/param"
|
||||
"github.com/openai/openai-go/packages/resp"
|
||||
)
|
||||
|
||||
// AudioTranslationService contains methods and other services that help with
|
||||
@@ -29,63 +30,66 @@ type AudioTranslationService struct {
|
||||
// NewAudioTranslationService generates a new service that applies the given
|
||||
// options to each request. These options are applied after the parent client's
|
||||
// options (if there is one), and before any request-specific options.
|
||||
func NewAudioTranslationService(opts ...option.RequestOption) (r *AudioTranslationService) {
|
||||
r = &AudioTranslationService{}
|
||||
func NewAudioTranslationService(opts ...option.RequestOption) (r AudioTranslationService) {
|
||||
r = AudioTranslationService{}
|
||||
r.Options = opts
|
||||
return
|
||||
}
|
||||
|
||||
// Translates audio into English.
|
||||
func (r *AudioTranslationService) New(ctx context.Context, body AudioTranslationNewParams, opts ...option.RequestOption) (res *Translation, err error) {
|
||||
var env apijson.UnionUnmarshaler[Translation]
|
||||
opts = append(r.Options[:], opts...)
|
||||
path := "audio/translations"
|
||||
err = requestconfig.ExecuteNewRequest(ctx, http.MethodPost, path, body, &res, opts...)
|
||||
err = requestconfig.ExecuteNewRequest(ctx, http.MethodPost, path, body, &env, opts...)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
res = &env.Value
|
||||
return
|
||||
}
|
||||
|
||||
type Translation struct {
|
||||
Text string `json:"text,required"`
|
||||
JSON translationJSON `json:"-"`
|
||||
Text string `json:"text,omitzero,required"`
|
||||
JSON struct {
|
||||
Text resp.Field
|
||||
raw string
|
||||
} `json:"-"`
|
||||
}
|
||||
|
||||
// translationJSON contains the JSON metadata for the struct [Translation]
|
||||
type translationJSON struct {
|
||||
Text apijson.Field
|
||||
raw string
|
||||
ExtraFields map[string]apijson.Field
|
||||
}
|
||||
|
||||
func (r *Translation) UnmarshalJSON(data []byte) (err error) {
|
||||
func (r Translation) RawJSON() string { return r.JSON.raw }
|
||||
func (r *Translation) UnmarshalJSON(data []byte) error {
|
||||
return apijson.UnmarshalRoot(data, r)
|
||||
}
|
||||
|
||||
func (r translationJSON) RawJSON() string {
|
||||
return r.raw
|
||||
}
|
||||
|
||||
type AudioTranslationNewParams struct {
|
||||
// The audio file object (not file name) translate, in one of these formats: flac,
|
||||
// mp3, mp4, mpeg, mpga, m4a, ogg, wav, or webm.
|
||||
File param.Field[io.Reader] `json:"file,required" format:"binary"`
|
||||
File io.Reader `json:"file,omitzero,required" format:"binary"`
|
||||
// ID of the model to use. Only `whisper-1` (which is powered by our open source
|
||||
// Whisper V2 model) is currently available.
|
||||
Model param.Field[AudioModel] `json:"model,required"`
|
||||
Model AudioModel `json:"model,omitzero,required"`
|
||||
// An optional text to guide the model's style or continue a previous audio
|
||||
// segment. The
|
||||
// [prompt](https://platform.openai.com/docs/guides/speech-to-text#prompting)
|
||||
// should be in English.
|
||||
Prompt param.Field[string] `json:"prompt"`
|
||||
Prompt param.String `json:"prompt,omitzero"`
|
||||
// The format of the output, in one of these options: `json`, `text`, `srt`,
|
||||
// `verbose_json`, or `vtt`.
|
||||
ResponseFormat param.Field[AudioResponseFormat] `json:"response_format"`
|
||||
//
|
||||
// Any of "json", "text", "srt", "verbose_json", "vtt"
|
||||
ResponseFormat AudioResponseFormat `json:"response_format,omitzero"`
|
||||
// The sampling temperature, between 0 and 1. Higher values like 0.8 will make the
|
||||
// output more random, while lower values like 0.2 will make it more focused and
|
||||
// deterministic. If set to 0, the model will use
|
||||
// [log probability](https://en.wikipedia.org/wiki/Log_probability) to
|
||||
// automatically increase the temperature until certain thresholds are hit.
|
||||
Temperature param.Field[float64] `json:"temperature"`
|
||||
Temperature param.Float `json:"temperature,omitzero"`
|
||||
apiobject
|
||||
}
|
||||
|
||||
func (f AudioTranslationNewParams) IsMissing() bool { return param.IsOmitted(f) || f.IsNull() }
|
||||
|
||||
func (r AudioTranslationNewParams) MarshalMultipart() (data []byte, contentType string, err error) {
|
||||
buf := bytes.NewBuffer(nil)
|
||||
writer := multipart.NewWriter(buf)
|
||||
|
||||
@@ -28,11 +28,11 @@ func TestAudioTranslationNewWithOptionalParams(t *testing.T) {
|
||||
option.WithAPIKey("My API Key"),
|
||||
)
|
||||
_, err := client.Audio.Translations.New(context.TODO(), openai.AudioTranslationNewParams{
|
||||
File: openai.F(io.Reader(bytes.NewBuffer([]byte("some file contents")))),
|
||||
Model: openai.F(openai.AudioModelWhisper1),
|
||||
Prompt: openai.F("prompt"),
|
||||
ResponseFormat: openai.F(openai.AudioResponseFormatJSON),
|
||||
Temperature: openai.F(0.000000),
|
||||
File: io.Reader(bytes.NewBuffer([]byte("some file contents"))),
|
||||
Model: openai.AudioModelWhisper1,
|
||||
Prompt: openai.String("prompt"),
|
||||
ResponseFormat: openai.AudioResponseFormatJSON,
|
||||
Temperature: openai.Float(0),
|
||||
})
|
||||
if err != nil {
|
||||
var apierr *openai.Error
|
||||
|
||||
264
batch.go
264
batch.go
@@ -11,11 +11,13 @@ import (
|
||||
|
||||
"github.com/openai/openai-go/internal/apijson"
|
||||
"github.com/openai/openai-go/internal/apiquery"
|
||||
"github.com/openai/openai-go/internal/param"
|
||||
"github.com/openai/openai-go/internal/requestconfig"
|
||||
"github.com/openai/openai-go/option"
|
||||
"github.com/openai/openai-go/packages/pagination"
|
||||
"github.com/openai/openai-go/packages/param"
|
||||
"github.com/openai/openai-go/packages/resp"
|
||||
"github.com/openai/openai-go/shared"
|
||||
"github.com/openai/openai-go/shared/constant"
|
||||
)
|
||||
|
||||
// BatchService contains methods and other services that help with interacting with
|
||||
@@ -31,8 +33,8 @@ type BatchService struct {
|
||||
// NewBatchService generates a new service that applies the given options to each
|
||||
// request. These options are applied after the parent client's options (if there
|
||||
// is one), and before any request-specific options.
|
||||
func NewBatchService(opts ...option.RequestOption) (r *BatchService) {
|
||||
r = &BatchService{}
|
||||
func NewBatchService(opts ...option.RequestOption) (r BatchService) {
|
||||
r = BatchService{}
|
||||
r.Options = opts
|
||||
return
|
||||
}
|
||||
@@ -95,103 +97,86 @@ func (r *BatchService) Cancel(ctx context.Context, batchID string, opts ...optio
|
||||
}
|
||||
|
||||
type Batch struct {
|
||||
ID string `json:"id,required"`
|
||||
ID string `json:"id,omitzero,required"`
|
||||
// The time frame within which the batch should be processed.
|
||||
CompletionWindow string `json:"completion_window,required"`
|
||||
CompletionWindow string `json:"completion_window,omitzero,required"`
|
||||
// The Unix timestamp (in seconds) for when the batch was created.
|
||||
CreatedAt int64 `json:"created_at,required"`
|
||||
CreatedAt int64 `json:"created_at,omitzero,required"`
|
||||
// The OpenAI API endpoint used by the batch.
|
||||
Endpoint string `json:"endpoint,required"`
|
||||
Endpoint string `json:"endpoint,omitzero,required"`
|
||||
// The ID of the input file for the batch.
|
||||
InputFileID string `json:"input_file_id,required"`
|
||||
InputFileID string `json:"input_file_id,omitzero,required"`
|
||||
// The object type, which is always `batch`.
|
||||
Object BatchObject `json:"object,required"`
|
||||
//
|
||||
// This field can be elided, and will be automatically set as "batch".
|
||||
Object constant.Batch `json:"object,required"`
|
||||
// The current status of the batch.
|
||||
Status BatchStatus `json:"status,required"`
|
||||
//
|
||||
// Any of "validating", "failed", "in_progress", "finalizing", "completed",
|
||||
// "expired", "cancelling", "cancelled"
|
||||
Status string `json:"status,omitzero,required"`
|
||||
// The Unix timestamp (in seconds) for when the batch was cancelled.
|
||||
CancelledAt int64 `json:"cancelled_at"`
|
||||
CancelledAt int64 `json:"cancelled_at,omitzero"`
|
||||
// The Unix timestamp (in seconds) for when the batch started cancelling.
|
||||
CancellingAt int64 `json:"cancelling_at"`
|
||||
CancellingAt int64 `json:"cancelling_at,omitzero"`
|
||||
// The Unix timestamp (in seconds) for when the batch was completed.
|
||||
CompletedAt int64 `json:"completed_at"`
|
||||
CompletedAt int64 `json:"completed_at,omitzero"`
|
||||
// The ID of the file containing the outputs of requests with errors.
|
||||
ErrorFileID string `json:"error_file_id"`
|
||||
Errors BatchErrors `json:"errors"`
|
||||
ErrorFileID string `json:"error_file_id,omitzero"`
|
||||
Errors BatchErrors `json:"errors,omitzero"`
|
||||
// The Unix timestamp (in seconds) for when the batch expired.
|
||||
ExpiredAt int64 `json:"expired_at"`
|
||||
ExpiredAt int64 `json:"expired_at,omitzero"`
|
||||
// The Unix timestamp (in seconds) for when the batch will expire.
|
||||
ExpiresAt int64 `json:"expires_at"`
|
||||
ExpiresAt int64 `json:"expires_at,omitzero"`
|
||||
// The Unix timestamp (in seconds) for when the batch failed.
|
||||
FailedAt int64 `json:"failed_at"`
|
||||
FailedAt int64 `json:"failed_at,omitzero"`
|
||||
// The Unix timestamp (in seconds) for when the batch started finalizing.
|
||||
FinalizingAt int64 `json:"finalizing_at"`
|
||||
FinalizingAt int64 `json:"finalizing_at,omitzero"`
|
||||
// The Unix timestamp (in seconds) for when the batch started processing.
|
||||
InProgressAt int64 `json:"in_progress_at"`
|
||||
InProgressAt int64 `json:"in_progress_at,omitzero"`
|
||||
// Set of 16 key-value pairs that can be attached to an object. This can be useful
|
||||
// for storing additional information about the object in a structured format, and
|
||||
// querying for objects via API or the dashboard.
|
||||
//
|
||||
// Keys are strings with a maximum length of 64 characters. Values are strings with
|
||||
// a maximum length of 512 characters.
|
||||
Metadata shared.Metadata `json:"metadata,nullable"`
|
||||
Metadata shared.Metadata `json:"metadata,omitzero,nullable"`
|
||||
// The ID of the file containing the outputs of successfully executed requests.
|
||||
OutputFileID string `json:"output_file_id"`
|
||||
OutputFileID string `json:"output_file_id,omitzero"`
|
||||
// The request counts for different statuses within the batch.
|
||||
RequestCounts BatchRequestCounts `json:"request_counts"`
|
||||
JSON batchJSON `json:"-"`
|
||||
RequestCounts BatchRequestCounts `json:"request_counts,omitzero"`
|
||||
JSON struct {
|
||||
ID resp.Field
|
||||
CompletionWindow resp.Field
|
||||
CreatedAt resp.Field
|
||||
Endpoint resp.Field
|
||||
InputFileID resp.Field
|
||||
Object resp.Field
|
||||
Status resp.Field
|
||||
CancelledAt resp.Field
|
||||
CancellingAt resp.Field
|
||||
CompletedAt resp.Field
|
||||
ErrorFileID resp.Field
|
||||
Errors resp.Field
|
||||
ExpiredAt resp.Field
|
||||
ExpiresAt resp.Field
|
||||
FailedAt resp.Field
|
||||
FinalizingAt resp.Field
|
||||
InProgressAt resp.Field
|
||||
Metadata resp.Field
|
||||
OutputFileID resp.Field
|
||||
RequestCounts resp.Field
|
||||
raw string
|
||||
} `json:"-"`
|
||||
}
|
||||
|
||||
// batchJSON contains the JSON metadata for the struct [Batch]
|
||||
type batchJSON struct {
|
||||
ID apijson.Field
|
||||
CompletionWindow apijson.Field
|
||||
CreatedAt apijson.Field
|
||||
Endpoint apijson.Field
|
||||
InputFileID apijson.Field
|
||||
Object apijson.Field
|
||||
Status apijson.Field
|
||||
CancelledAt apijson.Field
|
||||
CancellingAt apijson.Field
|
||||
CompletedAt apijson.Field
|
||||
ErrorFileID apijson.Field
|
||||
Errors apijson.Field
|
||||
ExpiredAt apijson.Field
|
||||
ExpiresAt apijson.Field
|
||||
FailedAt apijson.Field
|
||||
FinalizingAt apijson.Field
|
||||
InProgressAt apijson.Field
|
||||
Metadata apijson.Field
|
||||
OutputFileID apijson.Field
|
||||
RequestCounts apijson.Field
|
||||
raw string
|
||||
ExtraFields map[string]apijson.Field
|
||||
}
|
||||
|
||||
func (r *Batch) UnmarshalJSON(data []byte) (err error) {
|
||||
func (r Batch) RawJSON() string { return r.JSON.raw }
|
||||
func (r *Batch) UnmarshalJSON(data []byte) error {
|
||||
return apijson.UnmarshalRoot(data, r)
|
||||
}
|
||||
|
||||
func (r batchJSON) RawJSON() string {
|
||||
return r.raw
|
||||
}
|
||||
|
||||
// The object type, which is always `batch`.
|
||||
type BatchObject string
|
||||
|
||||
const (
|
||||
BatchObjectBatch BatchObject = "batch"
|
||||
)
|
||||
|
||||
func (r BatchObject) IsKnown() bool {
|
||||
switch r {
|
||||
case BatchObjectBatch:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// The current status of the batch.
|
||||
type BatchStatus string
|
||||
type BatchStatus = string
|
||||
|
||||
const (
|
||||
BatchStatusValidating BatchStatus = "validating"
|
||||
@@ -204,105 +189,79 @@ const (
|
||||
BatchStatusCancelled BatchStatus = "cancelled"
|
||||
)
|
||||
|
||||
func (r BatchStatus) IsKnown() bool {
|
||||
switch r {
|
||||
case BatchStatusValidating, BatchStatusFailed, BatchStatusInProgress, BatchStatusFinalizing, BatchStatusCompleted, BatchStatusExpired, BatchStatusCancelling, BatchStatusCancelled:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
type BatchErrors struct {
|
||||
Data []BatchError `json:"data"`
|
||||
Data []BatchError `json:"data,omitzero"`
|
||||
// The object type, which is always `list`.
|
||||
Object string `json:"object"`
|
||||
JSON batchErrorsJSON `json:"-"`
|
||||
Object string `json:"object,omitzero"`
|
||||
JSON struct {
|
||||
Data resp.Field
|
||||
Object resp.Field
|
||||
raw string
|
||||
} `json:"-"`
|
||||
}
|
||||
|
||||
// batchErrorsJSON contains the JSON metadata for the struct [BatchErrors]
|
||||
type batchErrorsJSON struct {
|
||||
Data apijson.Field
|
||||
Object apijson.Field
|
||||
raw string
|
||||
ExtraFields map[string]apijson.Field
|
||||
}
|
||||
|
||||
func (r *BatchErrors) UnmarshalJSON(data []byte) (err error) {
|
||||
func (r BatchErrors) RawJSON() string { return r.JSON.raw }
|
||||
func (r *BatchErrors) UnmarshalJSON(data []byte) error {
|
||||
return apijson.UnmarshalRoot(data, r)
|
||||
}
|
||||
|
||||
func (r batchErrorsJSON) RawJSON() string {
|
||||
return r.raw
|
||||
}
|
||||
|
||||
type BatchError struct {
|
||||
// An error code identifying the error type.
|
||||
Code string `json:"code"`
|
||||
Code string `json:"code,omitzero"`
|
||||
// The line number of the input file where the error occurred, if applicable.
|
||||
Line int64 `json:"line,nullable"`
|
||||
Line int64 `json:"line,omitzero,nullable"`
|
||||
// A human-readable message providing more details about the error.
|
||||
Message string `json:"message"`
|
||||
Message string `json:"message,omitzero"`
|
||||
// The name of the parameter that caused the error, if applicable.
|
||||
Param string `json:"param,nullable"`
|
||||
JSON batchErrorJSON `json:"-"`
|
||||
Param string `json:"param,omitzero,nullable"`
|
||||
JSON struct {
|
||||
Code resp.Field
|
||||
Line resp.Field
|
||||
Message resp.Field
|
||||
Param resp.Field
|
||||
raw string
|
||||
} `json:"-"`
|
||||
}
|
||||
|
||||
// batchErrorJSON contains the JSON metadata for the struct [BatchError]
|
||||
type batchErrorJSON struct {
|
||||
Code apijson.Field
|
||||
Line apijson.Field
|
||||
Message apijson.Field
|
||||
Param apijson.Field
|
||||
raw string
|
||||
ExtraFields map[string]apijson.Field
|
||||
}
|
||||
|
||||
func (r *BatchError) UnmarshalJSON(data []byte) (err error) {
|
||||
func (r BatchError) RawJSON() string { return r.JSON.raw }
|
||||
func (r *BatchError) UnmarshalJSON(data []byte) error {
|
||||
return apijson.UnmarshalRoot(data, r)
|
||||
}
|
||||
|
||||
func (r batchErrorJSON) RawJSON() string {
|
||||
return r.raw
|
||||
}
|
||||
|
||||
// The request counts for different statuses within the batch.
|
||||
type BatchRequestCounts struct {
|
||||
// Number of requests that have been completed successfully.
|
||||
Completed int64 `json:"completed,required"`
|
||||
Completed int64 `json:"completed,omitzero,required"`
|
||||
// Number of requests that have failed.
|
||||
Failed int64 `json:"failed,required"`
|
||||
Failed int64 `json:"failed,omitzero,required"`
|
||||
// Total number of requests in the batch.
|
||||
Total int64 `json:"total,required"`
|
||||
JSON batchRequestCountsJSON `json:"-"`
|
||||
Total int64 `json:"total,omitzero,required"`
|
||||
JSON struct {
|
||||
Completed resp.Field
|
||||
Failed resp.Field
|
||||
Total resp.Field
|
||||
raw string
|
||||
} `json:"-"`
|
||||
}
|
||||
|
||||
// batchRequestCountsJSON contains the JSON metadata for the struct
|
||||
// [BatchRequestCounts]
|
||||
type batchRequestCountsJSON struct {
|
||||
Completed apijson.Field
|
||||
Failed apijson.Field
|
||||
Total apijson.Field
|
||||
raw string
|
||||
ExtraFields map[string]apijson.Field
|
||||
}
|
||||
|
||||
func (r *BatchRequestCounts) UnmarshalJSON(data []byte) (err error) {
|
||||
func (r BatchRequestCounts) RawJSON() string { return r.JSON.raw }
|
||||
func (r *BatchRequestCounts) UnmarshalJSON(data []byte) error {
|
||||
return apijson.UnmarshalRoot(data, r)
|
||||
}
|
||||
|
||||
func (r batchRequestCountsJSON) RawJSON() string {
|
||||
return r.raw
|
||||
}
|
||||
|
||||
type BatchNewParams struct {
|
||||
// The time frame within which the batch should be processed. Currently only `24h`
|
||||
// is supported.
|
||||
CompletionWindow param.Field[BatchNewParamsCompletionWindow] `json:"completion_window,required"`
|
||||
//
|
||||
// Any of "24h"
|
||||
CompletionWindow BatchNewParamsCompletionWindow `json:"completion_window,required"`
|
||||
// The endpoint to be used for all requests in the batch. Currently
|
||||
// `/v1/chat/completions`, `/v1/embeddings`, and `/v1/completions` are supported.
|
||||
// Note that `/v1/embeddings` batches are also restricted to a maximum of 50,000
|
||||
// embedding inputs across all requests in the batch.
|
||||
Endpoint param.Field[BatchNewParamsEndpoint] `json:"endpoint,required"`
|
||||
//
|
||||
// Any of "/v1/chat/completions", "/v1/embeddings", "/v1/completions"
|
||||
Endpoint BatchNewParamsEndpoint `json:"endpoint,omitzero,required"`
|
||||
// The ID of an uploaded file that contains requests for the new batch.
|
||||
//
|
||||
// See [upload file](https://platform.openai.com/docs/api-reference/files/create)
|
||||
@@ -312,18 +271,22 @@ type BatchNewParams struct {
|
||||
// [JSONL file](https://platform.openai.com/docs/api-reference/batch/request-input),
|
||||
// and must be uploaded with the purpose `batch`. The file can contain up to 50,000
|
||||
// requests, and can be up to 200 MB in size.
|
||||
InputFileID param.Field[string] `json:"input_file_id,required"`
|
||||
InputFileID param.String `json:"input_file_id,omitzero,required"`
|
||||
// Set of 16 key-value pairs that can be attached to an object. This can be useful
|
||||
// for storing additional information about the object in a structured format, and
|
||||
// querying for objects via API or the dashboard.
|
||||
//
|
||||
// Keys are strings with a maximum length of 64 characters. Values are strings with
|
||||
// a maximum length of 512 characters.
|
||||
Metadata param.Field[shared.MetadataParam] `json:"metadata"`
|
||||
Metadata shared.MetadataParam `json:"metadata,omitzero"`
|
||||
apiobject
|
||||
}
|
||||
|
||||
func (f BatchNewParams) IsMissing() bool { return param.IsOmitted(f) || f.IsNull() }
|
||||
|
||||
func (r BatchNewParams) MarshalJSON() (data []byte, err error) {
|
||||
return apijson.MarshalRoot(r)
|
||||
type shadow BatchNewParams
|
||||
return param.MarshalObject(r, (*shadow)(&r))
|
||||
}
|
||||
|
||||
// The time frame within which the batch should be processed. Currently only `24h`
|
||||
@@ -334,14 +297,6 @@ const (
|
||||
BatchNewParamsCompletionWindow24h BatchNewParamsCompletionWindow = "24h"
|
||||
)
|
||||
|
||||
func (r BatchNewParamsCompletionWindow) IsKnown() bool {
|
||||
switch r {
|
||||
case BatchNewParamsCompletionWindow24h:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// The endpoint to be used for all requests in the batch. Currently
|
||||
// `/v1/chat/completions`, `/v1/embeddings`, and `/v1/completions` are supported.
|
||||
// Note that `/v1/embeddings` batches are also restricted to a maximum of 50,000
|
||||
@@ -354,25 +309,20 @@ const (
|
||||
BatchNewParamsEndpointV1Completions BatchNewParamsEndpoint = "/v1/completions"
|
||||
)
|
||||
|
||||
func (r BatchNewParamsEndpoint) IsKnown() bool {
|
||||
switch r {
|
||||
case BatchNewParamsEndpointV1ChatCompletions, BatchNewParamsEndpointV1Embeddings, BatchNewParamsEndpointV1Completions:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
type BatchListParams struct {
|
||||
// A cursor for use in pagination. `after` is an object ID that defines your place
|
||||
// in the list. For instance, if you make a list request and receive 100 objects,
|
||||
// ending with obj_foo, your subsequent call can include after=obj_foo in order to
|
||||
// fetch the next page of the list.
|
||||
After param.Field[string] `query:"after"`
|
||||
After param.String `query:"after,omitzero"`
|
||||
// A limit on the number of objects to be returned. Limit can range between 1 and
|
||||
// 100, and the default is 20.
|
||||
Limit param.Field[int64] `query:"limit"`
|
||||
Limit param.Int `query:"limit,omitzero"`
|
||||
apiobject
|
||||
}
|
||||
|
||||
func (f BatchListParams) IsMissing() bool { return param.IsOmitted(f) || f.IsNull() }
|
||||
|
||||
// URLQuery serializes [BatchListParams]'s query parameters as `url.Values`.
|
||||
func (r BatchListParams) URLQuery() (v url.Values) {
|
||||
return apiquery.MarshalWithSettings(r, apiquery.QuerySettings{
|
||||
|
||||
@@ -27,12 +27,12 @@ func TestBatchNewWithOptionalParams(t *testing.T) {
|
||||
option.WithAPIKey("My API Key"),
|
||||
)
|
||||
_, err := client.Batches.New(context.TODO(), openai.BatchNewParams{
|
||||
CompletionWindow: openai.F(openai.BatchNewParamsCompletionWindow24h),
|
||||
Endpoint: openai.F(openai.BatchNewParamsEndpointV1ChatCompletions),
|
||||
InputFileID: openai.F("input_file_id"),
|
||||
Metadata: openai.F(shared.MetadataParam{
|
||||
CompletionWindow: openai.BatchNewParamsCompletionWindow24h,
|
||||
Endpoint: openai.BatchNewParamsEndpointV1ChatCompletions,
|
||||
InputFileID: openai.String("input_file_id"),
|
||||
Metadata: shared.MetadataParam{
|
||||
"foo": "string",
|
||||
}),
|
||||
},
|
||||
})
|
||||
if err != nil {
|
||||
var apierr *openai.Error
|
||||
@@ -78,8 +78,8 @@ func TestBatchListWithOptionalParams(t *testing.T) {
|
||||
option.WithAPIKey("My API Key"),
|
||||
)
|
||||
_, err := client.Batches.List(context.TODO(), openai.BatchListParams{
|
||||
After: openai.F("after"),
|
||||
Limit: openai.F(int64(0)),
|
||||
After: openai.String("after"),
|
||||
Limit: openai.Int(0),
|
||||
})
|
||||
if err != nil {
|
||||
var apierr *openai.Error
|
||||
|
||||
10
beta.go
10
beta.go
@@ -14,16 +14,16 @@ import (
|
||||
// the [NewBetaService] method instead.
|
||||
type BetaService struct {
|
||||
Options []option.RequestOption
|
||||
VectorStores *BetaVectorStoreService
|
||||
Assistants *BetaAssistantService
|
||||
Threads *BetaThreadService
|
||||
VectorStores BetaVectorStoreService
|
||||
Assistants BetaAssistantService
|
||||
Threads BetaThreadService
|
||||
}
|
||||
|
||||
// NewBetaService generates a new service that applies the given options to each
|
||||
// request. These options are applied after the parent client's options (if there
|
||||
// is one), and before any request-specific options.
|
||||
func NewBetaService(opts ...option.RequestOption) (r *BetaService) {
|
||||
r = &BetaService{}
|
||||
func NewBetaService(opts ...option.RequestOption) (r BetaService) {
|
||||
r = BetaService{}
|
||||
r.Options = opts
|
||||
r.VectorStores = NewBetaVectorStoreService(opts...)
|
||||
r.Assistants = NewBetaAssistantService(opts...)
|
||||
|
||||
2544
betaassistant.go
2544
betaassistant.go
File diff suppressed because it is too large
Load Diff
@@ -27,36 +27,36 @@ func TestBetaAssistantNewWithOptionalParams(t *testing.T) {
|
||||
option.WithAPIKey("My API Key"),
|
||||
)
|
||||
_, err := client.Beta.Assistants.New(context.TODO(), openai.BetaAssistantNewParams{
|
||||
Model: openai.F(openai.ChatModelO3Mini),
|
||||
Description: openai.F("description"),
|
||||
Instructions: openai.F("instructions"),
|
||||
Metadata: openai.F(shared.MetadataParam{
|
||||
Model: openai.ChatModelO3Mini,
|
||||
Description: openai.String("description"),
|
||||
Instructions: openai.String("instructions"),
|
||||
Metadata: shared.MetadataParam{
|
||||
"foo": "string",
|
||||
}),
|
||||
Name: openai.F("name"),
|
||||
ReasoningEffort: openai.F(openai.BetaAssistantNewParamsReasoningEffortLow),
|
||||
Temperature: openai.F(1.000000),
|
||||
ToolResources: openai.F(openai.BetaAssistantNewParamsToolResources{
|
||||
CodeInterpreter: openai.F(openai.BetaAssistantNewParamsToolResourcesCodeInterpreter{
|
||||
FileIDs: openai.F([]string{"string"}),
|
||||
}),
|
||||
FileSearch: openai.F(openai.BetaAssistantNewParamsToolResourcesFileSearch{
|
||||
VectorStoreIDs: openai.F([]string{"string"}),
|
||||
VectorStores: openai.F([]openai.BetaAssistantNewParamsToolResourcesFileSearchVectorStore{{
|
||||
ChunkingStrategy: openai.F[openai.FileChunkingStrategyParamUnion](openai.AutoFileChunkingStrategyParam{
|
||||
Type: openai.F(openai.AutoFileChunkingStrategyParamTypeAuto),
|
||||
}),
|
||||
FileIDs: openai.F([]string{"string"}),
|
||||
Metadata: openai.F(shared.MetadataParam{
|
||||
},
|
||||
Name: openai.String("name"),
|
||||
ReasoningEffort: openai.BetaAssistantNewParamsReasoningEffortLow,
|
||||
Temperature: openai.Float(1),
|
||||
ToolResources: openai.BetaAssistantNewParamsToolResources{
|
||||
CodeInterpreter: openai.BetaAssistantNewParamsToolResourcesCodeInterpreter{
|
||||
FileIDs: []string{"string"},
|
||||
},
|
||||
FileSearch: openai.BetaAssistantNewParamsToolResourcesFileSearch{
|
||||
VectorStoreIDs: []string{"string"},
|
||||
VectorStores: []openai.BetaAssistantNewParamsToolResourcesFileSearchVectorStore{{
|
||||
ChunkingStrategy: openai.FileChunkingStrategyParamUnion{
|
||||
OfAuto: &openai.AutoFileChunkingStrategyParam{},
|
||||
},
|
||||
FileIDs: []string{"string"},
|
||||
Metadata: shared.MetadataParam{
|
||||
"foo": "string",
|
||||
}),
|
||||
}}),
|
||||
}),
|
||||
}),
|
||||
Tools: openai.F([]openai.AssistantToolUnionParam{openai.CodeInterpreterToolParam{
|
||||
Type: openai.F(openai.CodeInterpreterToolTypeCodeInterpreter),
|
||||
}}),
|
||||
TopP: openai.F(1.000000),
|
||||
},
|
||||
}},
|
||||
},
|
||||
},
|
||||
Tools: []openai.AssistantToolUnionParam{{
|
||||
OfCodeInterpreter: &openai.CodeInterpreterToolParam{},
|
||||
}},
|
||||
TopP: openai.Float(1),
|
||||
})
|
||||
if err != nil {
|
||||
var apierr *openai.Error
|
||||
@@ -105,27 +105,27 @@ func TestBetaAssistantUpdateWithOptionalParams(t *testing.T) {
|
||||
context.TODO(),
|
||||
"assistant_id",
|
||||
openai.BetaAssistantUpdateParams{
|
||||
Description: openai.F("description"),
|
||||
Instructions: openai.F("instructions"),
|
||||
Metadata: openai.F(shared.MetadataParam{
|
||||
Description: openai.String("description"),
|
||||
Instructions: openai.String("instructions"),
|
||||
Metadata: shared.MetadataParam{
|
||||
"foo": "string",
|
||||
}),
|
||||
Model: openai.F(openai.BetaAssistantUpdateParamsModelO3Mini),
|
||||
Name: openai.F("name"),
|
||||
ReasoningEffort: openai.F(openai.BetaAssistantUpdateParamsReasoningEffortLow),
|
||||
Temperature: openai.F(1.000000),
|
||||
ToolResources: openai.F(openai.BetaAssistantUpdateParamsToolResources{
|
||||
CodeInterpreter: openai.F(openai.BetaAssistantUpdateParamsToolResourcesCodeInterpreter{
|
||||
FileIDs: openai.F([]string{"string"}),
|
||||
}),
|
||||
FileSearch: openai.F(openai.BetaAssistantUpdateParamsToolResourcesFileSearch{
|
||||
VectorStoreIDs: openai.F([]string{"string"}),
|
||||
}),
|
||||
}),
|
||||
Tools: openai.F([]openai.AssistantToolUnionParam{openai.CodeInterpreterToolParam{
|
||||
Type: openai.F(openai.CodeInterpreterToolTypeCodeInterpreter),
|
||||
}}),
|
||||
TopP: openai.F(1.000000),
|
||||
},
|
||||
Model: "o3-mini",
|
||||
Name: openai.String("name"),
|
||||
ReasoningEffort: openai.BetaAssistantUpdateParamsReasoningEffortLow,
|
||||
Temperature: openai.Float(1),
|
||||
ToolResources: openai.BetaAssistantUpdateParamsToolResources{
|
||||
CodeInterpreter: openai.BetaAssistantUpdateParamsToolResourcesCodeInterpreter{
|
||||
FileIDs: []string{"string"},
|
||||
},
|
||||
FileSearch: openai.BetaAssistantUpdateParamsToolResourcesFileSearch{
|
||||
VectorStoreIDs: []string{"string"},
|
||||
},
|
||||
},
|
||||
Tools: []openai.AssistantToolUnionParam{{
|
||||
OfCodeInterpreter: &openai.CodeInterpreterToolParam{},
|
||||
}},
|
||||
TopP: openai.Float(1),
|
||||
},
|
||||
)
|
||||
if err != nil {
|
||||
@@ -150,10 +150,10 @@ func TestBetaAssistantListWithOptionalParams(t *testing.T) {
|
||||
option.WithAPIKey("My API Key"),
|
||||
)
|
||||
_, err := client.Beta.Assistants.List(context.TODO(), openai.BetaAssistantListParams{
|
||||
After: openai.F("after"),
|
||||
Before: openai.F("before"),
|
||||
Limit: openai.F(int64(0)),
|
||||
Order: openai.F(openai.BetaAssistantListParamsOrderAsc),
|
||||
After: openai.String("after"),
|
||||
Before: openai.String("before"),
|
||||
Limit: openai.Int(0),
|
||||
Order: openai.BetaAssistantListParamsOrderAsc,
|
||||
})
|
||||
if err != nil {
|
||||
var apierr *openai.Error
|
||||
|
||||
928
betathread.go
928
betathread.go
File diff suppressed because it is too large
Load Diff
@@ -27,39 +27,41 @@ func TestBetaThreadNewWithOptionalParams(t *testing.T) {
|
||||
option.WithAPIKey("My API Key"),
|
||||
)
|
||||
_, err := client.Beta.Threads.New(context.TODO(), openai.BetaThreadNewParams{
|
||||
Messages: openai.F([]openai.BetaThreadNewParamsMessage{{
|
||||
Content: openai.F([]openai.MessageContentPartParamUnion{openai.ImageFileContentBlockParam{ImageFile: openai.F(openai.ImageFileParam{FileID: openai.F("file_id"), Detail: openai.F(openai.ImageFileDetailAuto)}), Type: openai.F(openai.ImageFileContentBlockTypeImageFile)}}),
|
||||
Role: openai.F(openai.BetaThreadNewParamsMessagesRoleUser),
|
||||
Attachments: openai.F([]openai.BetaThreadNewParamsMessagesAttachment{{
|
||||
FileID: openai.F("file_id"),
|
||||
Tools: openai.F([]openai.BetaThreadNewParamsMessagesAttachmentsToolUnion{openai.CodeInterpreterToolParam{
|
||||
Type: openai.F(openai.CodeInterpreterToolTypeCodeInterpreter),
|
||||
}}),
|
||||
}}),
|
||||
Metadata: openai.F(shared.MetadataParam{
|
||||
Messages: []openai.BetaThreadNewParamsMessage{{
|
||||
Content: []openai.MessageContentPartParamUnion{{
|
||||
OfImageFile: &openai.ImageFileContentBlockParam{ImageFile: openai.ImageFileParam{FileID: openai.String("file_id"), Detail: "auto"}},
|
||||
}},
|
||||
Role: "user",
|
||||
Attachments: []openai.BetaThreadNewParamsMessagesAttachment{{
|
||||
FileID: openai.String("file_id"),
|
||||
Tools: []openai.BetaThreadNewParamsMessagesAttachmentsToolUnion{{
|
||||
OfCodeInterpreter: &openai.CodeInterpreterToolParam{},
|
||||
}},
|
||||
}},
|
||||
Metadata: shared.MetadataParam{
|
||||
"foo": "string",
|
||||
}),
|
||||
}}),
|
||||
Metadata: openai.F(shared.MetadataParam{
|
||||
},
|
||||
}},
|
||||
Metadata: shared.MetadataParam{
|
||||
"foo": "string",
|
||||
}),
|
||||
ToolResources: openai.F(openai.BetaThreadNewParamsToolResources{
|
||||
CodeInterpreter: openai.F(openai.BetaThreadNewParamsToolResourcesCodeInterpreter{
|
||||
FileIDs: openai.F([]string{"string"}),
|
||||
}),
|
||||
FileSearch: openai.F(openai.BetaThreadNewParamsToolResourcesFileSearch{
|
||||
VectorStoreIDs: openai.F([]string{"string"}),
|
||||
VectorStores: openai.F([]openai.BetaThreadNewParamsToolResourcesFileSearchVectorStore{{
|
||||
ChunkingStrategy: openai.F[openai.FileChunkingStrategyParamUnion](openai.AutoFileChunkingStrategyParam{
|
||||
Type: openai.F(openai.AutoFileChunkingStrategyParamTypeAuto),
|
||||
}),
|
||||
FileIDs: openai.F([]string{"string"}),
|
||||
Metadata: openai.F(shared.MetadataParam{
|
||||
},
|
||||
ToolResources: openai.BetaThreadNewParamsToolResources{
|
||||
CodeInterpreter: openai.BetaThreadNewParamsToolResourcesCodeInterpreter{
|
||||
FileIDs: []string{"string"},
|
||||
},
|
||||
FileSearch: openai.BetaThreadNewParamsToolResourcesFileSearch{
|
||||
VectorStoreIDs: []string{"string"},
|
||||
VectorStores: []openai.BetaThreadNewParamsToolResourcesFileSearchVectorStore{{
|
||||
ChunkingStrategy: openai.FileChunkingStrategyParamUnion{
|
||||
OfAuto: &openai.AutoFileChunkingStrategyParam{},
|
||||
},
|
||||
FileIDs: []string{"string"},
|
||||
Metadata: shared.MetadataParam{
|
||||
"foo": "string",
|
||||
}),
|
||||
}}),
|
||||
}),
|
||||
}),
|
||||
},
|
||||
}},
|
||||
},
|
||||
},
|
||||
})
|
||||
if err != nil {
|
||||
var apierr *openai.Error
|
||||
@@ -108,17 +110,17 @@ func TestBetaThreadUpdateWithOptionalParams(t *testing.T) {
|
||||
context.TODO(),
|
||||
"thread_id",
|
||||
openai.BetaThreadUpdateParams{
|
||||
Metadata: openai.F(shared.MetadataParam{
|
||||
Metadata: shared.MetadataParam{
|
||||
"foo": "string",
|
||||
}),
|
||||
ToolResources: openai.F(openai.BetaThreadUpdateParamsToolResources{
|
||||
CodeInterpreter: openai.F(openai.BetaThreadUpdateParamsToolResourcesCodeInterpreter{
|
||||
FileIDs: openai.F([]string{"string"}),
|
||||
}),
|
||||
FileSearch: openai.F(openai.BetaThreadUpdateParamsToolResourcesFileSearch{
|
||||
VectorStoreIDs: openai.F([]string{"string"}),
|
||||
}),
|
||||
}),
|
||||
},
|
||||
ToolResources: openai.BetaThreadUpdateParamsToolResources{
|
||||
CodeInterpreter: openai.BetaThreadUpdateParamsToolResourcesCodeInterpreter{
|
||||
FileIDs: []string{"string"},
|
||||
},
|
||||
FileSearch: openai.BetaThreadUpdateParamsToolResourcesFileSearch{
|
||||
VectorStoreIDs: []string{"string"},
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
if err != nil {
|
||||
@@ -165,68 +167,72 @@ func TestBetaThreadNewAndRunWithOptionalParams(t *testing.T) {
|
||||
option.WithAPIKey("My API Key"),
|
||||
)
|
||||
_, err := client.Beta.Threads.NewAndRun(context.TODO(), openai.BetaThreadNewAndRunParams{
|
||||
AssistantID: openai.F("assistant_id"),
|
||||
Instructions: openai.F("instructions"),
|
||||
MaxCompletionTokens: openai.F(int64(256)),
|
||||
MaxPromptTokens: openai.F(int64(256)),
|
||||
Metadata: openai.F(shared.MetadataParam{
|
||||
AssistantID: openai.String("assistant_id"),
|
||||
Instructions: openai.String("instructions"),
|
||||
MaxCompletionTokens: openai.Int(256),
|
||||
MaxPromptTokens: openai.Int(256),
|
||||
Metadata: shared.MetadataParam{
|
||||
"foo": "string",
|
||||
}),
|
||||
Model: openai.F(openai.ChatModelO3Mini),
|
||||
ParallelToolCalls: openai.F(true),
|
||||
Temperature: openai.F(1.000000),
|
||||
Thread: openai.F(openai.BetaThreadNewAndRunParamsThread{
|
||||
Messages: openai.F([]openai.BetaThreadNewAndRunParamsThreadMessage{{
|
||||
Content: openai.F([]openai.MessageContentPartParamUnion{openai.ImageFileContentBlockParam{ImageFile: openai.F(openai.ImageFileParam{FileID: openai.F("file_id"), Detail: openai.F(openai.ImageFileDetailAuto)}), Type: openai.F(openai.ImageFileContentBlockTypeImageFile)}}),
|
||||
Role: openai.F(openai.BetaThreadNewAndRunParamsThreadMessagesRoleUser),
|
||||
Attachments: openai.F([]openai.BetaThreadNewAndRunParamsThreadMessagesAttachment{{
|
||||
FileID: openai.F("file_id"),
|
||||
Tools: openai.F([]openai.BetaThreadNewAndRunParamsThreadMessagesAttachmentsToolUnion{openai.CodeInterpreterToolParam{
|
||||
Type: openai.F(openai.CodeInterpreterToolTypeCodeInterpreter),
|
||||
}}),
|
||||
}}),
|
||||
Metadata: openai.F(shared.MetadataParam{
|
||||
},
|
||||
Model: openai.ChatModelO3Mini,
|
||||
ParallelToolCalls: openai.Bool(true),
|
||||
Temperature: openai.Float(1),
|
||||
Thread: openai.BetaThreadNewAndRunParamsThread{
|
||||
Messages: []openai.BetaThreadNewAndRunParamsThreadMessage{{
|
||||
Content: []openai.MessageContentPartParamUnion{{
|
||||
OfImageFile: &openai.ImageFileContentBlockParam{ImageFile: openai.ImageFileParam{FileID: openai.String("file_id"), Detail: "auto"}},
|
||||
}},
|
||||
Role: "user",
|
||||
Attachments: []openai.BetaThreadNewAndRunParamsThreadMessagesAttachment{{
|
||||
FileID: openai.String("file_id"),
|
||||
Tools: []openai.BetaThreadNewAndRunParamsThreadMessagesAttachmentsToolUnion{{
|
||||
OfCodeInterpreter: &openai.CodeInterpreterToolParam{},
|
||||
}},
|
||||
}},
|
||||
Metadata: shared.MetadataParam{
|
||||
"foo": "string",
|
||||
}),
|
||||
}}),
|
||||
Metadata: openai.F(shared.MetadataParam{
|
||||
},
|
||||
}},
|
||||
Metadata: shared.MetadataParam{
|
||||
"foo": "string",
|
||||
}),
|
||||
ToolResources: openai.F(openai.BetaThreadNewAndRunParamsThreadToolResources{
|
||||
CodeInterpreter: openai.F(openai.BetaThreadNewAndRunParamsThreadToolResourcesCodeInterpreter{
|
||||
FileIDs: openai.F([]string{"string"}),
|
||||
}),
|
||||
FileSearch: openai.F(openai.BetaThreadNewAndRunParamsThreadToolResourcesFileSearch{
|
||||
VectorStoreIDs: openai.F([]string{"string"}),
|
||||
VectorStores: openai.F([]openai.BetaThreadNewAndRunParamsThreadToolResourcesFileSearchVectorStore{{
|
||||
ChunkingStrategy: openai.F[openai.FileChunkingStrategyParamUnion](openai.AutoFileChunkingStrategyParam{
|
||||
Type: openai.F(openai.AutoFileChunkingStrategyParamTypeAuto),
|
||||
}),
|
||||
FileIDs: openai.F([]string{"string"}),
|
||||
Metadata: openai.F(shared.MetadataParam{
|
||||
},
|
||||
ToolResources: openai.BetaThreadNewAndRunParamsThreadToolResources{
|
||||
CodeInterpreter: openai.BetaThreadNewAndRunParamsThreadToolResourcesCodeInterpreter{
|
||||
FileIDs: []string{"string"},
|
||||
},
|
||||
FileSearch: openai.BetaThreadNewAndRunParamsThreadToolResourcesFileSearch{
|
||||
VectorStoreIDs: []string{"string"},
|
||||
VectorStores: []openai.BetaThreadNewAndRunParamsThreadToolResourcesFileSearchVectorStore{{
|
||||
ChunkingStrategy: openai.FileChunkingStrategyParamUnion{
|
||||
OfAuto: &openai.AutoFileChunkingStrategyParam{},
|
||||
},
|
||||
FileIDs: []string{"string"},
|
||||
Metadata: shared.MetadataParam{
|
||||
"foo": "string",
|
||||
}),
|
||||
}}),
|
||||
}),
|
||||
}),
|
||||
}),
|
||||
ToolChoice: openai.F[openai.AssistantToolChoiceOptionUnionParam](openai.AssistantToolChoiceOptionAuto(openai.AssistantToolChoiceOptionAutoNone)),
|
||||
ToolResources: openai.F(openai.BetaThreadNewAndRunParamsToolResources{
|
||||
CodeInterpreter: openai.F(openai.BetaThreadNewAndRunParamsToolResourcesCodeInterpreter{
|
||||
FileIDs: openai.F([]string{"string"}),
|
||||
}),
|
||||
FileSearch: openai.F(openai.BetaThreadNewAndRunParamsToolResourcesFileSearch{
|
||||
VectorStoreIDs: openai.F([]string{"string"}),
|
||||
}),
|
||||
}),
|
||||
Tools: openai.F([]openai.BetaThreadNewAndRunParamsToolUnion{openai.CodeInterpreterToolParam{
|
||||
Type: openai.F(openai.CodeInterpreterToolTypeCodeInterpreter),
|
||||
}}),
|
||||
TopP: openai.F(1.000000),
|
||||
TruncationStrategy: openai.F(openai.BetaThreadNewAndRunParamsTruncationStrategy{
|
||||
Type: openai.F(openai.BetaThreadNewAndRunParamsTruncationStrategyTypeAuto),
|
||||
LastMessages: openai.F(int64(1)),
|
||||
}),
|
||||
},
|
||||
}},
|
||||
},
|
||||
},
|
||||
},
|
||||
ToolChoice: openai.AssistantToolChoiceOptionUnionParam{
|
||||
OfAuto: "none",
|
||||
},
|
||||
ToolResources: openai.BetaThreadNewAndRunParamsToolResources{
|
||||
CodeInterpreter: openai.BetaThreadNewAndRunParamsToolResourcesCodeInterpreter{
|
||||
FileIDs: []string{"string"},
|
||||
},
|
||||
FileSearch: openai.BetaThreadNewAndRunParamsToolResourcesFileSearch{
|
||||
VectorStoreIDs: []string{"string"},
|
||||
},
|
||||
},
|
||||
Tools: []openai.BetaThreadNewAndRunParamsToolUnion{{
|
||||
OfCodeInterpreterTool: &openai.CodeInterpreterToolParam{},
|
||||
}},
|
||||
TopP: openai.Float(1),
|
||||
TruncationStrategy: openai.BetaThreadNewAndRunParamsTruncationStrategy{
|
||||
Type: "auto",
|
||||
LastMessages: openai.Int(1),
|
||||
},
|
||||
})
|
||||
if err != nil {
|
||||
var apierr *openai.Error
|
||||
|
||||
2253
betathreadmessage.go
2253
betathreadmessage.go
File diff suppressed because it is too large
Load Diff
@@ -30,17 +30,19 @@ func TestBetaThreadMessageNewWithOptionalParams(t *testing.T) {
|
||||
context.TODO(),
|
||||
"thread_id",
|
||||
openai.BetaThreadMessageNewParams{
|
||||
Content: openai.F([]openai.MessageContentPartParamUnion{openai.ImageFileContentBlockParam{ImageFile: openai.F(openai.ImageFileParam{FileID: openai.F("file_id"), Detail: openai.F(openai.ImageFileDetailAuto)}), Type: openai.F(openai.ImageFileContentBlockTypeImageFile)}}),
|
||||
Role: openai.F(openai.BetaThreadMessageNewParamsRoleUser),
|
||||
Attachments: openai.F([]openai.BetaThreadMessageNewParamsAttachment{{
|
||||
FileID: openai.F("file_id"),
|
||||
Tools: openai.F([]openai.BetaThreadMessageNewParamsAttachmentsToolUnion{openai.CodeInterpreterToolParam{
|
||||
Type: openai.F(openai.CodeInterpreterToolTypeCodeInterpreter),
|
||||
}}),
|
||||
}}),
|
||||
Metadata: openai.F(shared.MetadataParam{
|
||||
Content: []openai.MessageContentPartParamUnion{{
|
||||
OfImageFile: &openai.ImageFileContentBlockParam{ImageFile: openai.ImageFileParam{FileID: openai.String("file_id"), Detail: "auto"}},
|
||||
}},
|
||||
Role: openai.BetaThreadMessageNewParamsRoleUser,
|
||||
Attachments: []openai.BetaThreadMessageNewParamsAttachment{{
|
||||
FileID: openai.String("file_id"),
|
||||
Tools: []openai.BetaThreadMessageNewParamsAttachmentsToolUnion{{
|
||||
OfCodeInterpreter: &openai.CodeInterpreterToolParam{},
|
||||
}},
|
||||
}},
|
||||
Metadata: shared.MetadataParam{
|
||||
"foo": "string",
|
||||
}),
|
||||
},
|
||||
},
|
||||
)
|
||||
if err != nil {
|
||||
@@ -95,9 +97,9 @@ func TestBetaThreadMessageUpdateWithOptionalParams(t *testing.T) {
|
||||
"thread_id",
|
||||
"message_id",
|
||||
openai.BetaThreadMessageUpdateParams{
|
||||
Metadata: openai.F(shared.MetadataParam{
|
||||
Metadata: shared.MetadataParam{
|
||||
"foo": "string",
|
||||
}),
|
||||
},
|
||||
},
|
||||
)
|
||||
if err != nil {
|
||||
@@ -125,11 +127,11 @@ func TestBetaThreadMessageListWithOptionalParams(t *testing.T) {
|
||||
context.TODO(),
|
||||
"thread_id",
|
||||
openai.BetaThreadMessageListParams{
|
||||
After: openai.F("after"),
|
||||
Before: openai.F("before"),
|
||||
Limit: openai.F(int64(0)),
|
||||
Order: openai.F(openai.BetaThreadMessageListParamsOrderAsc),
|
||||
RunID: openai.F("run_id"),
|
||||
After: openai.String("after"),
|
||||
Before: openai.String("before"),
|
||||
Limit: openai.Int(0),
|
||||
Order: openai.BetaThreadMessageListParamsOrderAsc,
|
||||
RunID: openai.String("run_id"),
|
||||
},
|
||||
)
|
||||
if err != nil {
|
||||
|
||||
681
betathreadrun.go
681
betathreadrun.go
File diff suppressed because it is too large
Load Diff
@@ -30,41 +30,45 @@ func TestBetaThreadRunNewWithOptionalParams(t *testing.T) {
|
||||
context.TODO(),
|
||||
"thread_id",
|
||||
openai.BetaThreadRunNewParams{
|
||||
AssistantID: openai.F("assistant_id"),
|
||||
Include: openai.F([]openai.RunStepInclude{openai.RunStepIncludeStepDetailsToolCallsFileSearchResultsContent}),
|
||||
AdditionalInstructions: openai.F("additional_instructions"),
|
||||
AdditionalMessages: openai.F([]openai.BetaThreadRunNewParamsAdditionalMessage{{
|
||||
Content: openai.F([]openai.MessageContentPartParamUnion{openai.ImageFileContentBlockParam{ImageFile: openai.F(openai.ImageFileParam{FileID: openai.F("file_id"), Detail: openai.F(openai.ImageFileDetailAuto)}), Type: openai.F(openai.ImageFileContentBlockTypeImageFile)}}),
|
||||
Role: openai.F(openai.BetaThreadRunNewParamsAdditionalMessagesRoleUser),
|
||||
Attachments: openai.F([]openai.BetaThreadRunNewParamsAdditionalMessagesAttachment{{
|
||||
FileID: openai.F("file_id"),
|
||||
Tools: openai.F([]openai.BetaThreadRunNewParamsAdditionalMessagesAttachmentsToolUnion{openai.CodeInterpreterToolParam{
|
||||
Type: openai.F(openai.CodeInterpreterToolTypeCodeInterpreter),
|
||||
}}),
|
||||
}}),
|
||||
Metadata: openai.F(shared.MetadataParam{
|
||||
AssistantID: openai.String("assistant_id"),
|
||||
Include: []openai.RunStepInclude{openai.RunStepIncludeStepDetailsToolCallsFileSearchResultsContent},
|
||||
AdditionalInstructions: openai.String("additional_instructions"),
|
||||
AdditionalMessages: []openai.BetaThreadRunNewParamsAdditionalMessage{{
|
||||
Content: []openai.MessageContentPartParamUnion{{
|
||||
OfImageFile: &openai.ImageFileContentBlockParam{ImageFile: openai.ImageFileParam{FileID: openai.String("file_id"), Detail: "auto"}},
|
||||
}},
|
||||
Role: "user",
|
||||
Attachments: []openai.BetaThreadRunNewParamsAdditionalMessagesAttachment{{
|
||||
FileID: openai.String("file_id"),
|
||||
Tools: []openai.BetaThreadRunNewParamsAdditionalMessagesAttachmentsToolUnion{{
|
||||
OfCodeInterpreter: &openai.CodeInterpreterToolParam{},
|
||||
}},
|
||||
}},
|
||||
Metadata: shared.MetadataParam{
|
||||
"foo": "string",
|
||||
}),
|
||||
}}),
|
||||
Instructions: openai.F("instructions"),
|
||||
MaxCompletionTokens: openai.F(int64(256)),
|
||||
MaxPromptTokens: openai.F(int64(256)),
|
||||
Metadata: openai.F(shared.MetadataParam{
|
||||
},
|
||||
}},
|
||||
Instructions: openai.String("instructions"),
|
||||
MaxCompletionTokens: openai.Int(256),
|
||||
MaxPromptTokens: openai.Int(256),
|
||||
Metadata: shared.MetadataParam{
|
||||
"foo": "string",
|
||||
}),
|
||||
Model: openai.F(openai.ChatModelO3Mini),
|
||||
ParallelToolCalls: openai.F(true),
|
||||
ReasoningEffort: openai.F(openai.BetaThreadRunNewParamsReasoningEffortLow),
|
||||
Temperature: openai.F(1.000000),
|
||||
ToolChoice: openai.F[openai.AssistantToolChoiceOptionUnionParam](openai.AssistantToolChoiceOptionAuto(openai.AssistantToolChoiceOptionAutoNone)),
|
||||
Tools: openai.F([]openai.AssistantToolUnionParam{openai.CodeInterpreterToolParam{
|
||||
Type: openai.F(openai.CodeInterpreterToolTypeCodeInterpreter),
|
||||
}}),
|
||||
TopP: openai.F(1.000000),
|
||||
TruncationStrategy: openai.F(openai.BetaThreadRunNewParamsTruncationStrategy{
|
||||
Type: openai.F(openai.BetaThreadRunNewParamsTruncationStrategyTypeAuto),
|
||||
LastMessages: openai.F(int64(1)),
|
||||
}),
|
||||
},
|
||||
Model: openai.ChatModelO3Mini,
|
||||
ParallelToolCalls: openai.Bool(true),
|
||||
ReasoningEffort: openai.BetaThreadRunNewParamsReasoningEffortLow,
|
||||
Temperature: openai.Float(1),
|
||||
ToolChoice: openai.AssistantToolChoiceOptionUnionParam{
|
||||
OfAuto: "none",
|
||||
},
|
||||
Tools: []openai.AssistantToolUnionParam{{
|
||||
OfCodeInterpreter: &openai.CodeInterpreterToolParam{},
|
||||
}},
|
||||
TopP: openai.Float(1),
|
||||
TruncationStrategy: openai.BetaThreadRunNewParamsTruncationStrategy{
|
||||
Type: "auto",
|
||||
LastMessages: openai.Int(1),
|
||||
},
|
||||
},
|
||||
)
|
||||
if err != nil {
|
||||
@@ -119,9 +123,9 @@ func TestBetaThreadRunUpdateWithOptionalParams(t *testing.T) {
|
||||
"thread_id",
|
||||
"run_id",
|
||||
openai.BetaThreadRunUpdateParams{
|
||||
Metadata: openai.F(shared.MetadataParam{
|
||||
Metadata: shared.MetadataParam{
|
||||
"foo": "string",
|
||||
}),
|
||||
},
|
||||
},
|
||||
)
|
||||
if err != nil {
|
||||
@@ -149,10 +153,10 @@ func TestBetaThreadRunListWithOptionalParams(t *testing.T) {
|
||||
context.TODO(),
|
||||
"thread_id",
|
||||
openai.BetaThreadRunListParams{
|
||||
After: openai.F("after"),
|
||||
Before: openai.F("before"),
|
||||
Limit: openai.F(int64(0)),
|
||||
Order: openai.F(openai.BetaThreadRunListParamsOrderAsc),
|
||||
After: openai.String("after"),
|
||||
Before: openai.String("before"),
|
||||
Limit: openai.Int(0),
|
||||
Order: openai.BetaThreadRunListParamsOrderAsc,
|
||||
},
|
||||
)
|
||||
if err != nil {
|
||||
@@ -207,10 +211,10 @@ func TestBetaThreadRunSubmitToolOutputsWithOptionalParams(t *testing.T) {
|
||||
"thread_id",
|
||||
"run_id",
|
||||
openai.BetaThreadRunSubmitToolOutputsParams{
|
||||
ToolOutputs: openai.F([]openai.BetaThreadRunSubmitToolOutputsParamsToolOutput{{
|
||||
Output: openai.F("output"),
|
||||
ToolCallID: openai.F("tool_call_id"),
|
||||
}}),
|
||||
ToolOutputs: []openai.BetaThreadRunSubmitToolOutputsParamsToolOutput{{
|
||||
Output: openai.String("output"),
|
||||
ToolCallID: openai.String("tool_call_id"),
|
||||
}},
|
||||
},
|
||||
)
|
||||
if err != nil {
|
||||
|
||||
2044
betathreadrunstep.go
2044
betathreadrunstep.go
File diff suppressed because it is too large
Load Diff
@@ -31,7 +31,7 @@ func TestBetaThreadRunStepGetWithOptionalParams(t *testing.T) {
|
||||
"run_id",
|
||||
"step_id",
|
||||
openai.BetaThreadRunStepGetParams{
|
||||
Include: openai.F([]openai.RunStepInclude{openai.RunStepIncludeStepDetailsToolCallsFileSearchResultsContent}),
|
||||
Include: []openai.RunStepInclude{openai.RunStepIncludeStepDetailsToolCallsFileSearchResultsContent},
|
||||
},
|
||||
)
|
||||
if err != nil {
|
||||
@@ -60,11 +60,11 @@ func TestBetaThreadRunStepListWithOptionalParams(t *testing.T) {
|
||||
"thread_id",
|
||||
"run_id",
|
||||
openai.BetaThreadRunStepListParams{
|
||||
After: openai.F("after"),
|
||||
Before: openai.F("before"),
|
||||
Include: openai.F([]openai.RunStepInclude{openai.RunStepIncludeStepDetailsToolCallsFileSearchResultsContent}),
|
||||
Limit: openai.F(int64(0)),
|
||||
Order: openai.F(openai.BetaThreadRunStepListParamsOrderAsc),
|
||||
After: openai.String("after"),
|
||||
Before: openai.String("before"),
|
||||
Include: []openai.RunStepInclude{openai.RunStepIncludeStepDetailsToolCallsFileSearchResultsContent},
|
||||
Limit: openai.Int(0),
|
||||
Order: openai.BetaThreadRunStepListParamsOrderAsc,
|
||||
},
|
||||
)
|
||||
if err != nil {
|
||||
|
||||
@@ -4,20 +4,21 @@ package openai
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"reflect"
|
||||
|
||||
"github.com/openai/openai-go/internal/apijson"
|
||||
"github.com/openai/openai-go/internal/apiquery"
|
||||
"github.com/openai/openai-go/internal/param"
|
||||
"github.com/openai/openai-go/internal/requestconfig"
|
||||
"github.com/openai/openai-go/option"
|
||||
"github.com/openai/openai-go/packages/pagination"
|
||||
"github.com/openai/openai-go/packages/param"
|
||||
"github.com/openai/openai-go/packages/resp"
|
||||
"github.com/openai/openai-go/shared"
|
||||
"github.com/tidwall/gjson"
|
||||
"github.com/openai/openai-go/shared/constant"
|
||||
)
|
||||
|
||||
// BetaVectorStoreService contains methods and other services that help with
|
||||
@@ -28,15 +29,15 @@ import (
|
||||
// the [NewBetaVectorStoreService] method instead.
|
||||
type BetaVectorStoreService struct {
|
||||
Options []option.RequestOption
|
||||
Files *BetaVectorStoreFileService
|
||||
FileBatches *BetaVectorStoreFileBatchService
|
||||
Files BetaVectorStoreFileService
|
||||
FileBatches BetaVectorStoreFileBatchService
|
||||
}
|
||||
|
||||
// NewBetaVectorStoreService generates a new service that applies the given options
|
||||
// to each request. These options are applied after the parent client's options (if
|
||||
// there is one), and before any request-specific options.
|
||||
func NewBetaVectorStoreService(opts ...option.RequestOption) (r *BetaVectorStoreService) {
|
||||
r = &BetaVectorStoreService{}
|
||||
func NewBetaVectorStoreService(opts ...option.RequestOption) (r BetaVectorStoreService) {
|
||||
r = BetaVectorStoreService{}
|
||||
r.Options = opts
|
||||
r.Files = NewBetaVectorStoreFileService(opts...)
|
||||
r.FileBatches = NewBetaVectorStoreFileBatchService(opts...)
|
||||
@@ -118,148 +119,98 @@ func (r *BetaVectorStoreService) Delete(ctx context.Context, vectorStoreID strin
|
||||
// `800` and `chunk_overlap_tokens` of `400`.
|
||||
type AutoFileChunkingStrategyParam struct {
|
||||
// Always `auto`.
|
||||
Type param.Field[AutoFileChunkingStrategyParamType] `json:"type,required"`
|
||||
//
|
||||
// This field can be elided, and will be automatically set as "auto".
|
||||
Type constant.Auto `json:"type,required"`
|
||||
apiobject
|
||||
}
|
||||
|
||||
func (f AutoFileChunkingStrategyParam) IsMissing() bool { return param.IsOmitted(f) || f.IsNull() }
|
||||
|
||||
func (r AutoFileChunkingStrategyParam) MarshalJSON() (data []byte, err error) {
|
||||
return apijson.MarshalRoot(r)
|
||||
type shadow AutoFileChunkingStrategyParam
|
||||
return param.MarshalObject(r, (*shadow)(&r))
|
||||
}
|
||||
|
||||
func (r AutoFileChunkingStrategyParam) implementsFileChunkingStrategyParamUnion() {}
|
||||
|
||||
// Always `auto`.
|
||||
type AutoFileChunkingStrategyParamType string
|
||||
|
||||
const (
|
||||
AutoFileChunkingStrategyParamTypeAuto AutoFileChunkingStrategyParamType = "auto"
|
||||
)
|
||||
|
||||
func (r AutoFileChunkingStrategyParamType) IsKnown() bool {
|
||||
switch r {
|
||||
case AutoFileChunkingStrategyParamTypeAuto:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// The strategy used to chunk the file.
|
||||
type FileChunkingStrategy struct {
|
||||
// Always `static`.
|
||||
Type FileChunkingStrategyType `json:"type,required"`
|
||||
type FileChunkingStrategyUnion struct {
|
||||
Static StaticFileChunkingStrategy `json:"static"`
|
||||
JSON fileChunkingStrategyJSON `json:"-"`
|
||||
union FileChunkingStrategyUnion
|
||||
Type string `json:"type"`
|
||||
JSON struct {
|
||||
Static resp.Field
|
||||
Type resp.Field
|
||||
raw string
|
||||
} `json:"-"`
|
||||
}
|
||||
|
||||
// fileChunkingStrategyJSON contains the JSON metadata for the struct
|
||||
// [FileChunkingStrategy]
|
||||
type fileChunkingStrategyJSON struct {
|
||||
Type apijson.Field
|
||||
Static apijson.Field
|
||||
raw string
|
||||
ExtraFields map[string]apijson.Field
|
||||
}
|
||||
|
||||
func (r fileChunkingStrategyJSON) RawJSON() string {
|
||||
return r.raw
|
||||
}
|
||||
|
||||
func (r *FileChunkingStrategy) UnmarshalJSON(data []byte) (err error) {
|
||||
*r = FileChunkingStrategy{}
|
||||
err = apijson.UnmarshalRoot(data, &r.union)
|
||||
if err != nil {
|
||||
return err
|
||||
// note: this function is generated only for discriminated unions
|
||||
func (u FileChunkingStrategyUnion) Variant() (res struct {
|
||||
OfStatic *StaticFileChunkingStrategyObject
|
||||
OfOther *OtherFileChunkingStrategyObject
|
||||
}) {
|
||||
switch u.Type {
|
||||
case "static":
|
||||
v := u.AsStatic()
|
||||
res.OfStatic = &v
|
||||
case "other":
|
||||
v := u.AsOther()
|
||||
res.OfOther = &v
|
||||
}
|
||||
return apijson.Port(r.union, &r)
|
||||
return
|
||||
}
|
||||
|
||||
// AsUnion returns a [FileChunkingStrategyUnion] interface which you can cast to
|
||||
// the specific types for more type safety.
|
||||
//
|
||||
// Possible runtime types of the union are [StaticFileChunkingStrategyObject],
|
||||
// [OtherFileChunkingStrategyObject].
|
||||
func (r FileChunkingStrategy) AsUnion() FileChunkingStrategyUnion {
|
||||
return r.union
|
||||
func (u FileChunkingStrategyUnion) WhichKind() string {
|
||||
return u.Type
|
||||
}
|
||||
|
||||
// The strategy used to chunk the file.
|
||||
//
|
||||
// Union satisfied by [StaticFileChunkingStrategyObject] or
|
||||
// [OtherFileChunkingStrategyObject].
|
||||
type FileChunkingStrategyUnion interface {
|
||||
implementsFileChunkingStrategy()
|
||||
func (u FileChunkingStrategyUnion) AsStatic() (v StaticFileChunkingStrategyObject) {
|
||||
apijson.UnmarshalRoot(json.RawMessage(u.JSON.raw), &v)
|
||||
return
|
||||
}
|
||||
|
||||
func init() {
|
||||
apijson.RegisterUnion(
|
||||
reflect.TypeOf((*FileChunkingStrategyUnion)(nil)).Elem(),
|
||||
"type",
|
||||
apijson.UnionVariant{
|
||||
TypeFilter: gjson.JSON,
|
||||
Type: reflect.TypeOf(StaticFileChunkingStrategyObject{}),
|
||||
DiscriminatorValue: "static",
|
||||
},
|
||||
apijson.UnionVariant{
|
||||
TypeFilter: gjson.JSON,
|
||||
Type: reflect.TypeOf(OtherFileChunkingStrategyObject{}),
|
||||
DiscriminatorValue: "other",
|
||||
},
|
||||
)
|
||||
func (u FileChunkingStrategyUnion) AsOther() (v OtherFileChunkingStrategyObject) {
|
||||
apijson.UnmarshalRoot(json.RawMessage(u.JSON.raw), &v)
|
||||
return
|
||||
}
|
||||
|
||||
// Always `static`.
|
||||
type FileChunkingStrategyType string
|
||||
func (u FileChunkingStrategyUnion) RawJSON() string { return u.JSON.raw }
|
||||
|
||||
const (
|
||||
FileChunkingStrategyTypeStatic FileChunkingStrategyType = "static"
|
||||
FileChunkingStrategyTypeOther FileChunkingStrategyType = "other"
|
||||
)
|
||||
func (r *FileChunkingStrategyUnion) UnmarshalJSON(data []byte) error {
|
||||
return apijson.UnmarshalRoot(data, r)
|
||||
}
|
||||
|
||||
func (r FileChunkingStrategyType) IsKnown() bool {
|
||||
switch r {
|
||||
case FileChunkingStrategyTypeStatic, FileChunkingStrategyTypeOther:
|
||||
return true
|
||||
func NewFileChunkingStrategyParamOfStatic(static StaticFileChunkingStrategyParam) FileChunkingStrategyParamUnion {
|
||||
var variant StaticFileChunkingStrategyObjectParam
|
||||
variant.Static = static
|
||||
return FileChunkingStrategyParamUnion{OfStatic: &variant}
|
||||
}
|
||||
|
||||
// Only one field can be non-zero
|
||||
type FileChunkingStrategyParamUnion struct {
|
||||
OfAuto *AutoFileChunkingStrategyParam
|
||||
OfStatic *StaticFileChunkingStrategyObjectParam
|
||||
apiunion
|
||||
}
|
||||
|
||||
func (u FileChunkingStrategyParamUnion) IsMissing() bool { return param.IsOmitted(u) || u.IsNull() }
|
||||
|
||||
func (u FileChunkingStrategyParamUnion) MarshalJSON() ([]byte, error) {
|
||||
return param.MarshalUnion[FileChunkingStrategyParamUnion](u.OfAuto, u.OfStatic)
|
||||
}
|
||||
|
||||
func (u FileChunkingStrategyParamUnion) GetStatic() *StaticFileChunkingStrategyParam {
|
||||
if vt := u.OfStatic; vt != nil {
|
||||
return &vt.Static
|
||||
}
|
||||
return false
|
||||
return nil
|
||||
}
|
||||
|
||||
// The chunking strategy used to chunk the file(s). If not set, will use the `auto`
|
||||
// strategy. Only applicable if `file_ids` is non-empty.
|
||||
type FileChunkingStrategyParam struct {
|
||||
// Always `auto`.
|
||||
Type param.Field[FileChunkingStrategyParamType] `json:"type,required"`
|
||||
Static param.Field[StaticFileChunkingStrategyParam] `json:"static"`
|
||||
}
|
||||
|
||||
func (r FileChunkingStrategyParam) MarshalJSON() (data []byte, err error) {
|
||||
return apijson.MarshalRoot(r)
|
||||
}
|
||||
|
||||
func (r FileChunkingStrategyParam) implementsFileChunkingStrategyParamUnion() {}
|
||||
|
||||
// The chunking strategy used to chunk the file(s). If not set, will use the `auto`
|
||||
// strategy. Only applicable if `file_ids` is non-empty.
|
||||
//
|
||||
// Satisfied by [AutoFileChunkingStrategyParam],
|
||||
// [StaticFileChunkingStrategyObjectParam], [FileChunkingStrategyParam].
|
||||
type FileChunkingStrategyParamUnion interface {
|
||||
implementsFileChunkingStrategyParamUnion()
|
||||
}
|
||||
|
||||
// Always `auto`.
|
||||
type FileChunkingStrategyParamType string
|
||||
|
||||
const (
|
||||
FileChunkingStrategyParamTypeAuto FileChunkingStrategyParamType = "auto"
|
||||
FileChunkingStrategyParamTypeStatic FileChunkingStrategyParamType = "static"
|
||||
)
|
||||
|
||||
func (r FileChunkingStrategyParamType) IsKnown() bool {
|
||||
switch r {
|
||||
case FileChunkingStrategyParamTypeAuto, FileChunkingStrategyParamTypeStatic:
|
||||
return true
|
||||
func (u FileChunkingStrategyParamUnion) GetType() *string {
|
||||
if vt := u.OfAuto; vt != nil {
|
||||
return (*string)(&vt.Type)
|
||||
} else if vt := u.OfStatic; vt != nil {
|
||||
return (*string)(&vt.Type)
|
||||
}
|
||||
return false
|
||||
return nil
|
||||
}
|
||||
|
||||
// This is returned when the chunking strategy is unknown. Typically, this is
|
||||
@@ -267,265 +218,190 @@ func (r FileChunkingStrategyParamType) IsKnown() bool {
|
||||
// introduced in the API.
|
||||
type OtherFileChunkingStrategyObject struct {
|
||||
// Always `other`.
|
||||
Type OtherFileChunkingStrategyObjectType `json:"type,required"`
|
||||
JSON otherFileChunkingStrategyObjectJSON `json:"-"`
|
||||
//
|
||||
// This field can be elided, and will be automatically set as "other".
|
||||
Type constant.Other `json:"type,required"`
|
||||
JSON struct {
|
||||
Type resp.Field
|
||||
raw string
|
||||
} `json:"-"`
|
||||
}
|
||||
|
||||
// otherFileChunkingStrategyObjectJSON contains the JSON metadata for the struct
|
||||
// [OtherFileChunkingStrategyObject]
|
||||
type otherFileChunkingStrategyObjectJSON struct {
|
||||
Type apijson.Field
|
||||
raw string
|
||||
ExtraFields map[string]apijson.Field
|
||||
}
|
||||
|
||||
func (r *OtherFileChunkingStrategyObject) UnmarshalJSON(data []byte) (err error) {
|
||||
func (r OtherFileChunkingStrategyObject) RawJSON() string { return r.JSON.raw }
|
||||
func (r *OtherFileChunkingStrategyObject) UnmarshalJSON(data []byte) error {
|
||||
return apijson.UnmarshalRoot(data, r)
|
||||
}
|
||||
|
||||
func (r otherFileChunkingStrategyObjectJSON) RawJSON() string {
|
||||
return r.raw
|
||||
}
|
||||
|
||||
func (r OtherFileChunkingStrategyObject) implementsFileChunkingStrategy() {}
|
||||
|
||||
// Always `other`.
|
||||
type OtherFileChunkingStrategyObjectType string
|
||||
|
||||
const (
|
||||
OtherFileChunkingStrategyObjectTypeOther OtherFileChunkingStrategyObjectType = "other"
|
||||
)
|
||||
|
||||
func (r OtherFileChunkingStrategyObjectType) IsKnown() bool {
|
||||
switch r {
|
||||
case OtherFileChunkingStrategyObjectTypeOther:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
type StaticFileChunkingStrategy struct {
|
||||
// The number of tokens that overlap between chunks. The default value is `400`.
|
||||
//
|
||||
// Note that the overlap must not exceed half of `max_chunk_size_tokens`.
|
||||
ChunkOverlapTokens int64 `json:"chunk_overlap_tokens,required"`
|
||||
ChunkOverlapTokens int64 `json:"chunk_overlap_tokens,omitzero,required"`
|
||||
// The maximum number of tokens in each chunk. The default value is `800`. The
|
||||
// minimum value is `100` and the maximum value is `4096`.
|
||||
MaxChunkSizeTokens int64 `json:"max_chunk_size_tokens,required"`
|
||||
JSON staticFileChunkingStrategyJSON `json:"-"`
|
||||
MaxChunkSizeTokens int64 `json:"max_chunk_size_tokens,omitzero,required"`
|
||||
JSON struct {
|
||||
ChunkOverlapTokens resp.Field
|
||||
MaxChunkSizeTokens resp.Field
|
||||
raw string
|
||||
} `json:"-"`
|
||||
}
|
||||
|
||||
// staticFileChunkingStrategyJSON contains the JSON metadata for the struct
|
||||
// [StaticFileChunkingStrategy]
|
||||
type staticFileChunkingStrategyJSON struct {
|
||||
ChunkOverlapTokens apijson.Field
|
||||
MaxChunkSizeTokens apijson.Field
|
||||
raw string
|
||||
ExtraFields map[string]apijson.Field
|
||||
}
|
||||
|
||||
func (r *StaticFileChunkingStrategy) UnmarshalJSON(data []byte) (err error) {
|
||||
func (r StaticFileChunkingStrategy) RawJSON() string { return r.JSON.raw }
|
||||
func (r *StaticFileChunkingStrategy) UnmarshalJSON(data []byte) error {
|
||||
return apijson.UnmarshalRoot(data, r)
|
||||
}
|
||||
|
||||
func (r staticFileChunkingStrategyJSON) RawJSON() string {
|
||||
return r.raw
|
||||
// ToParam converts this StaticFileChunkingStrategy to a
|
||||
// StaticFileChunkingStrategyParam.
|
||||
//
|
||||
// Warning: the fields of the param type will not be present. ToParam should only
|
||||
// be used at the last possible moment before sending a request. Test for this with
|
||||
// StaticFileChunkingStrategyParam.IsOverridden()
|
||||
func (r StaticFileChunkingStrategy) ToParam() StaticFileChunkingStrategyParam {
|
||||
return param.Override[StaticFileChunkingStrategyParam](r.RawJSON())
|
||||
}
|
||||
|
||||
type StaticFileChunkingStrategyParam struct {
|
||||
// The number of tokens that overlap between chunks. The default value is `400`.
|
||||
//
|
||||
// Note that the overlap must not exceed half of `max_chunk_size_tokens`.
|
||||
ChunkOverlapTokens param.Field[int64] `json:"chunk_overlap_tokens,required"`
|
||||
ChunkOverlapTokens param.Int `json:"chunk_overlap_tokens,omitzero,required"`
|
||||
// The maximum number of tokens in each chunk. The default value is `800`. The
|
||||
// minimum value is `100` and the maximum value is `4096`.
|
||||
MaxChunkSizeTokens param.Field[int64] `json:"max_chunk_size_tokens,required"`
|
||||
MaxChunkSizeTokens param.Int `json:"max_chunk_size_tokens,omitzero,required"`
|
||||
apiobject
|
||||
}
|
||||
|
||||
func (f StaticFileChunkingStrategyParam) IsMissing() bool { return param.IsOmitted(f) || f.IsNull() }
|
||||
|
||||
func (r StaticFileChunkingStrategyParam) MarshalJSON() (data []byte, err error) {
|
||||
return apijson.MarshalRoot(r)
|
||||
type shadow StaticFileChunkingStrategyParam
|
||||
return param.MarshalObject(r, (*shadow)(&r))
|
||||
}
|
||||
|
||||
type StaticFileChunkingStrategyObject struct {
|
||||
Static StaticFileChunkingStrategy `json:"static,required"`
|
||||
Static StaticFileChunkingStrategy `json:"static,omitzero,required"`
|
||||
// Always `static`.
|
||||
Type StaticFileChunkingStrategyObjectType `json:"type,required"`
|
||||
JSON staticFileChunkingStrategyObjectJSON `json:"-"`
|
||||
//
|
||||
// This field can be elided, and will be automatically set as "static".
|
||||
Type constant.Static `json:"type,required"`
|
||||
JSON struct {
|
||||
Static resp.Field
|
||||
Type resp.Field
|
||||
raw string
|
||||
} `json:"-"`
|
||||
}
|
||||
|
||||
// staticFileChunkingStrategyObjectJSON contains the JSON metadata for the struct
|
||||
// [StaticFileChunkingStrategyObject]
|
||||
type staticFileChunkingStrategyObjectJSON struct {
|
||||
Static apijson.Field
|
||||
Type apijson.Field
|
||||
raw string
|
||||
ExtraFields map[string]apijson.Field
|
||||
}
|
||||
|
||||
func (r *StaticFileChunkingStrategyObject) UnmarshalJSON(data []byte) (err error) {
|
||||
func (r StaticFileChunkingStrategyObject) RawJSON() string { return r.JSON.raw }
|
||||
func (r *StaticFileChunkingStrategyObject) UnmarshalJSON(data []byte) error {
|
||||
return apijson.UnmarshalRoot(data, r)
|
||||
}
|
||||
|
||||
func (r staticFileChunkingStrategyObjectJSON) RawJSON() string {
|
||||
return r.raw
|
||||
}
|
||||
|
||||
func (r StaticFileChunkingStrategyObject) implementsFileChunkingStrategy() {}
|
||||
|
||||
// Always `static`.
|
||||
type StaticFileChunkingStrategyObjectType string
|
||||
|
||||
const (
|
||||
StaticFileChunkingStrategyObjectTypeStatic StaticFileChunkingStrategyObjectType = "static"
|
||||
)
|
||||
|
||||
func (r StaticFileChunkingStrategyObjectType) IsKnown() bool {
|
||||
switch r {
|
||||
case StaticFileChunkingStrategyObjectTypeStatic:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
type StaticFileChunkingStrategyObjectParam struct {
|
||||
Static param.Field[StaticFileChunkingStrategyParam] `json:"static,required"`
|
||||
Static StaticFileChunkingStrategyParam `json:"static,omitzero,required"`
|
||||
// Always `static`.
|
||||
Type param.Field[StaticFileChunkingStrategyObjectParamType] `json:"type,required"`
|
||||
//
|
||||
// This field can be elided, and will be automatically set as "static".
|
||||
Type constant.Static `json:"type,required"`
|
||||
apiobject
|
||||
}
|
||||
|
||||
func (f StaticFileChunkingStrategyObjectParam) IsMissing() bool {
|
||||
return param.IsOmitted(f) || f.IsNull()
|
||||
}
|
||||
|
||||
func (r StaticFileChunkingStrategyObjectParam) MarshalJSON() (data []byte, err error) {
|
||||
return apijson.MarshalRoot(r)
|
||||
}
|
||||
|
||||
func (r StaticFileChunkingStrategyObjectParam) implementsFileChunkingStrategyParamUnion() {}
|
||||
|
||||
// Always `static`.
|
||||
type StaticFileChunkingStrategyObjectParamType string
|
||||
|
||||
const (
|
||||
StaticFileChunkingStrategyObjectParamTypeStatic StaticFileChunkingStrategyObjectParamType = "static"
|
||||
)
|
||||
|
||||
func (r StaticFileChunkingStrategyObjectParamType) IsKnown() bool {
|
||||
switch r {
|
||||
case StaticFileChunkingStrategyObjectParamTypeStatic:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
type shadow StaticFileChunkingStrategyObjectParam
|
||||
return param.MarshalObject(r, (*shadow)(&r))
|
||||
}
|
||||
|
||||
// A vector store is a collection of processed files can be used by the
|
||||
// `file_search` tool.
|
||||
type VectorStore struct {
|
||||
// The identifier, which can be referenced in API endpoints.
|
||||
ID string `json:"id,required"`
|
||||
ID string `json:"id,omitzero,required"`
|
||||
// The Unix timestamp (in seconds) for when the vector store was created.
|
||||
CreatedAt int64 `json:"created_at,required"`
|
||||
FileCounts VectorStoreFileCounts `json:"file_counts,required"`
|
||||
CreatedAt int64 `json:"created_at,omitzero,required"`
|
||||
FileCounts VectorStoreFileCounts `json:"file_counts,omitzero,required"`
|
||||
// The Unix timestamp (in seconds) for when the vector store was last active.
|
||||
LastActiveAt int64 `json:"last_active_at,required,nullable"`
|
||||
LastActiveAt int64 `json:"last_active_at,omitzero,required,nullable"`
|
||||
// Set of 16 key-value pairs that can be attached to an object. This can be useful
|
||||
// for storing additional information about the object in a structured format, and
|
||||
// querying for objects via API or the dashboard.
|
||||
//
|
||||
// Keys are strings with a maximum length of 64 characters. Values are strings with
|
||||
// a maximum length of 512 characters.
|
||||
Metadata shared.Metadata `json:"metadata,required,nullable"`
|
||||
Metadata shared.Metadata `json:"metadata,omitzero,required,nullable"`
|
||||
// The name of the vector store.
|
||||
Name string `json:"name,required"`
|
||||
Name string `json:"name,omitzero,required"`
|
||||
// The object type, which is always `vector_store`.
|
||||
Object VectorStoreObject `json:"object,required"`
|
||||
//
|
||||
// This field can be elided, and will be automatically set as "vector_store".
|
||||
Object constant.VectorStore `json:"object,required"`
|
||||
// The status of the vector store, which can be either `expired`, `in_progress`, or
|
||||
// `completed`. A status of `completed` indicates that the vector store is ready
|
||||
// for use.
|
||||
Status VectorStoreStatus `json:"status,required"`
|
||||
//
|
||||
// Any of "expired", "in_progress", "completed"
|
||||
Status string `json:"status,omitzero,required"`
|
||||
// The total number of bytes used by the files in the vector store.
|
||||
UsageBytes int64 `json:"usage_bytes,required"`
|
||||
UsageBytes int64 `json:"usage_bytes,omitzero,required"`
|
||||
// The expiration policy for a vector store.
|
||||
ExpiresAfter VectorStoreExpiresAfter `json:"expires_after"`
|
||||
ExpiresAfter VectorStoreExpiresAfter `json:"expires_after,omitzero"`
|
||||
// The Unix timestamp (in seconds) for when the vector store will expire.
|
||||
ExpiresAt int64 `json:"expires_at,nullable"`
|
||||
JSON vectorStoreJSON `json:"-"`
|
||||
ExpiresAt int64 `json:"expires_at,omitzero,nullable"`
|
||||
JSON struct {
|
||||
ID resp.Field
|
||||
CreatedAt resp.Field
|
||||
FileCounts resp.Field
|
||||
LastActiveAt resp.Field
|
||||
Metadata resp.Field
|
||||
Name resp.Field
|
||||
Object resp.Field
|
||||
Status resp.Field
|
||||
UsageBytes resp.Field
|
||||
ExpiresAfter resp.Field
|
||||
ExpiresAt resp.Field
|
||||
raw string
|
||||
} `json:"-"`
|
||||
}
|
||||
|
||||
// vectorStoreJSON contains the JSON metadata for the struct [VectorStore]
|
||||
type vectorStoreJSON struct {
|
||||
ID apijson.Field
|
||||
CreatedAt apijson.Field
|
||||
FileCounts apijson.Field
|
||||
LastActiveAt apijson.Field
|
||||
Metadata apijson.Field
|
||||
Name apijson.Field
|
||||
Object apijson.Field
|
||||
Status apijson.Field
|
||||
UsageBytes apijson.Field
|
||||
ExpiresAfter apijson.Field
|
||||
ExpiresAt apijson.Field
|
||||
raw string
|
||||
ExtraFields map[string]apijson.Field
|
||||
}
|
||||
|
||||
func (r *VectorStore) UnmarshalJSON(data []byte) (err error) {
|
||||
func (r VectorStore) RawJSON() string { return r.JSON.raw }
|
||||
func (r *VectorStore) UnmarshalJSON(data []byte) error {
|
||||
return apijson.UnmarshalRoot(data, r)
|
||||
}
|
||||
|
||||
func (r vectorStoreJSON) RawJSON() string {
|
||||
return r.raw
|
||||
}
|
||||
|
||||
type VectorStoreFileCounts struct {
|
||||
// The number of files that were cancelled.
|
||||
Cancelled int64 `json:"cancelled,required"`
|
||||
Cancelled int64 `json:"cancelled,omitzero,required"`
|
||||
// The number of files that have been successfully processed.
|
||||
Completed int64 `json:"completed,required"`
|
||||
Completed int64 `json:"completed,omitzero,required"`
|
||||
// The number of files that have failed to process.
|
||||
Failed int64 `json:"failed,required"`
|
||||
Failed int64 `json:"failed,omitzero,required"`
|
||||
// The number of files that are currently being processed.
|
||||
InProgress int64 `json:"in_progress,required"`
|
||||
InProgress int64 `json:"in_progress,omitzero,required"`
|
||||
// The total number of files.
|
||||
Total int64 `json:"total,required"`
|
||||
JSON vectorStoreFileCountsJSON `json:"-"`
|
||||
Total int64 `json:"total,omitzero,required"`
|
||||
JSON struct {
|
||||
Cancelled resp.Field
|
||||
Completed resp.Field
|
||||
Failed resp.Field
|
||||
InProgress resp.Field
|
||||
Total resp.Field
|
||||
raw string
|
||||
} `json:"-"`
|
||||
}
|
||||
|
||||
// vectorStoreFileCountsJSON contains the JSON metadata for the struct
|
||||
// [VectorStoreFileCounts]
|
||||
type vectorStoreFileCountsJSON struct {
|
||||
Cancelled apijson.Field
|
||||
Completed apijson.Field
|
||||
Failed apijson.Field
|
||||
InProgress apijson.Field
|
||||
Total apijson.Field
|
||||
raw string
|
||||
ExtraFields map[string]apijson.Field
|
||||
}
|
||||
|
||||
func (r *VectorStoreFileCounts) UnmarshalJSON(data []byte) (err error) {
|
||||
func (r VectorStoreFileCounts) RawJSON() string { return r.JSON.raw }
|
||||
func (r *VectorStoreFileCounts) UnmarshalJSON(data []byte) error {
|
||||
return apijson.UnmarshalRoot(data, r)
|
||||
}
|
||||
|
||||
func (r vectorStoreFileCountsJSON) RawJSON() string {
|
||||
return r.raw
|
||||
}
|
||||
|
||||
// The object type, which is always `vector_store`.
|
||||
type VectorStoreObject string
|
||||
|
||||
const (
|
||||
VectorStoreObjectVectorStore VectorStoreObject = "vector_store"
|
||||
)
|
||||
|
||||
func (r VectorStoreObject) IsKnown() bool {
|
||||
switch r {
|
||||
case VectorStoreObjectVectorStore:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// The status of the vector store, which can be either `expired`, `in_progress`, or
|
||||
// `completed`. A status of `completed` indicates that the vector store is ready
|
||||
// for use.
|
||||
type VectorStoreStatus string
|
||||
type VectorStoreStatus = string
|
||||
|
||||
const (
|
||||
VectorStoreStatusExpired VectorStoreStatus = "expired"
|
||||
@@ -533,195 +409,137 @@ const (
|
||||
VectorStoreStatusCompleted VectorStoreStatus = "completed"
|
||||
)
|
||||
|
||||
func (r VectorStoreStatus) IsKnown() bool {
|
||||
switch r {
|
||||
case VectorStoreStatusExpired, VectorStoreStatusInProgress, VectorStoreStatusCompleted:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// The expiration policy for a vector store.
|
||||
type VectorStoreExpiresAfter struct {
|
||||
// Anchor timestamp after which the expiration policy applies. Supported anchors:
|
||||
// `last_active_at`.
|
||||
Anchor VectorStoreExpiresAfterAnchor `json:"anchor,required"`
|
||||
//
|
||||
// This field can be elided, and will be automatically set as "last_active_at".
|
||||
Anchor constant.LastActiveAt `json:"anchor,required"`
|
||||
// The number of days after the anchor time that the vector store will expire.
|
||||
Days int64 `json:"days,required"`
|
||||
JSON vectorStoreExpiresAfterJSON `json:"-"`
|
||||
Days int64 `json:"days,omitzero,required"`
|
||||
JSON struct {
|
||||
Anchor resp.Field
|
||||
Days resp.Field
|
||||
raw string
|
||||
} `json:"-"`
|
||||
}
|
||||
|
||||
// vectorStoreExpiresAfterJSON contains the JSON metadata for the struct
|
||||
// [VectorStoreExpiresAfter]
|
||||
type vectorStoreExpiresAfterJSON struct {
|
||||
Anchor apijson.Field
|
||||
Days apijson.Field
|
||||
raw string
|
||||
ExtraFields map[string]apijson.Field
|
||||
}
|
||||
|
||||
func (r *VectorStoreExpiresAfter) UnmarshalJSON(data []byte) (err error) {
|
||||
func (r VectorStoreExpiresAfter) RawJSON() string { return r.JSON.raw }
|
||||
func (r *VectorStoreExpiresAfter) UnmarshalJSON(data []byte) error {
|
||||
return apijson.UnmarshalRoot(data, r)
|
||||
}
|
||||
|
||||
func (r vectorStoreExpiresAfterJSON) RawJSON() string {
|
||||
return r.raw
|
||||
}
|
||||
|
||||
// Anchor timestamp after which the expiration policy applies. Supported anchors:
|
||||
// `last_active_at`.
|
||||
type VectorStoreExpiresAfterAnchor string
|
||||
|
||||
const (
|
||||
VectorStoreExpiresAfterAnchorLastActiveAt VectorStoreExpiresAfterAnchor = "last_active_at"
|
||||
)
|
||||
|
||||
func (r VectorStoreExpiresAfterAnchor) IsKnown() bool {
|
||||
switch r {
|
||||
case VectorStoreExpiresAfterAnchorLastActiveAt:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
type VectorStoreDeleted struct {
|
||||
ID string `json:"id,required"`
|
||||
Deleted bool `json:"deleted,required"`
|
||||
Object VectorStoreDeletedObject `json:"object,required"`
|
||||
JSON vectorStoreDeletedJSON `json:"-"`
|
||||
ID string `json:"id,omitzero,required"`
|
||||
Deleted bool `json:"deleted,omitzero,required"`
|
||||
// This field can be elided, and will be automatically set as
|
||||
// "vector_store.deleted".
|
||||
Object constant.VectorStoreDeleted `json:"object,required"`
|
||||
JSON struct {
|
||||
ID resp.Field
|
||||
Deleted resp.Field
|
||||
Object resp.Field
|
||||
raw string
|
||||
} `json:"-"`
|
||||
}
|
||||
|
||||
// vectorStoreDeletedJSON contains the JSON metadata for the struct
|
||||
// [VectorStoreDeleted]
|
||||
type vectorStoreDeletedJSON struct {
|
||||
ID apijson.Field
|
||||
Deleted apijson.Field
|
||||
Object apijson.Field
|
||||
raw string
|
||||
ExtraFields map[string]apijson.Field
|
||||
}
|
||||
|
||||
func (r *VectorStoreDeleted) UnmarshalJSON(data []byte) (err error) {
|
||||
func (r VectorStoreDeleted) RawJSON() string { return r.JSON.raw }
|
||||
func (r *VectorStoreDeleted) UnmarshalJSON(data []byte) error {
|
||||
return apijson.UnmarshalRoot(data, r)
|
||||
}
|
||||
|
||||
func (r vectorStoreDeletedJSON) RawJSON() string {
|
||||
return r.raw
|
||||
}
|
||||
|
||||
type VectorStoreDeletedObject string
|
||||
|
||||
const (
|
||||
VectorStoreDeletedObjectVectorStoreDeleted VectorStoreDeletedObject = "vector_store.deleted"
|
||||
)
|
||||
|
||||
func (r VectorStoreDeletedObject) IsKnown() bool {
|
||||
switch r {
|
||||
case VectorStoreDeletedObjectVectorStoreDeleted:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
type BetaVectorStoreNewParams struct {
|
||||
// The chunking strategy used to chunk the file(s). If not set, will use the `auto`
|
||||
// strategy. Only applicable if `file_ids` is non-empty.
|
||||
ChunkingStrategy param.Field[FileChunkingStrategyParamUnion] `json:"chunking_strategy"`
|
||||
ChunkingStrategy FileChunkingStrategyParamUnion `json:"chunking_strategy,omitzero"`
|
||||
// The expiration policy for a vector store.
|
||||
ExpiresAfter param.Field[BetaVectorStoreNewParamsExpiresAfter] `json:"expires_after"`
|
||||
ExpiresAfter BetaVectorStoreNewParamsExpiresAfter `json:"expires_after,omitzero"`
|
||||
// A list of [File](https://platform.openai.com/docs/api-reference/files) IDs that
|
||||
// the vector store should use. Useful for tools like `file_search` that can access
|
||||
// files.
|
||||
FileIDs param.Field[[]string] `json:"file_ids"`
|
||||
FileIDs []string `json:"file_ids,omitzero"`
|
||||
// Set of 16 key-value pairs that can be attached to an object. This can be useful
|
||||
// for storing additional information about the object in a structured format, and
|
||||
// querying for objects via API or the dashboard.
|
||||
//
|
||||
// Keys are strings with a maximum length of 64 characters. Values are strings with
|
||||
// a maximum length of 512 characters.
|
||||
Metadata param.Field[shared.MetadataParam] `json:"metadata"`
|
||||
Metadata shared.MetadataParam `json:"metadata,omitzero"`
|
||||
// The name of the vector store.
|
||||
Name param.Field[string] `json:"name"`
|
||||
Name param.String `json:"name,omitzero"`
|
||||
apiobject
|
||||
}
|
||||
|
||||
func (f BetaVectorStoreNewParams) IsMissing() bool { return param.IsOmitted(f) || f.IsNull() }
|
||||
|
||||
func (r BetaVectorStoreNewParams) MarshalJSON() (data []byte, err error) {
|
||||
return apijson.MarshalRoot(r)
|
||||
type shadow BetaVectorStoreNewParams
|
||||
return param.MarshalObject(r, (*shadow)(&r))
|
||||
}
|
||||
|
||||
// The expiration policy for a vector store.
|
||||
type BetaVectorStoreNewParamsExpiresAfter struct {
|
||||
// Anchor timestamp after which the expiration policy applies. Supported anchors:
|
||||
// `last_active_at`.
|
||||
Anchor param.Field[BetaVectorStoreNewParamsExpiresAfterAnchor] `json:"anchor,required"`
|
||||
//
|
||||
// This field can be elided, and will be automatically set as "last_active_at".
|
||||
Anchor constant.LastActiveAt `json:"anchor,required"`
|
||||
// The number of days after the anchor time that the vector store will expire.
|
||||
Days param.Field[int64] `json:"days,required"`
|
||||
Days param.Int `json:"days,omitzero,required"`
|
||||
apiobject
|
||||
}
|
||||
|
||||
func (f BetaVectorStoreNewParamsExpiresAfter) IsMissing() bool {
|
||||
return param.IsOmitted(f) || f.IsNull()
|
||||
}
|
||||
|
||||
func (r BetaVectorStoreNewParamsExpiresAfter) MarshalJSON() (data []byte, err error) {
|
||||
return apijson.MarshalRoot(r)
|
||||
}
|
||||
|
||||
// Anchor timestamp after which the expiration policy applies. Supported anchors:
|
||||
// `last_active_at`.
|
||||
type BetaVectorStoreNewParamsExpiresAfterAnchor string
|
||||
|
||||
const (
|
||||
BetaVectorStoreNewParamsExpiresAfterAnchorLastActiveAt BetaVectorStoreNewParamsExpiresAfterAnchor = "last_active_at"
|
||||
)
|
||||
|
||||
func (r BetaVectorStoreNewParamsExpiresAfterAnchor) IsKnown() bool {
|
||||
switch r {
|
||||
case BetaVectorStoreNewParamsExpiresAfterAnchorLastActiveAt:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
type shadow BetaVectorStoreNewParamsExpiresAfter
|
||||
return param.MarshalObject(r, (*shadow)(&r))
|
||||
}
|
||||
|
||||
type BetaVectorStoreUpdateParams struct {
|
||||
// The expiration policy for a vector store.
|
||||
ExpiresAfter param.Field[BetaVectorStoreUpdateParamsExpiresAfter] `json:"expires_after"`
|
||||
ExpiresAfter BetaVectorStoreUpdateParamsExpiresAfter `json:"expires_after,omitzero"`
|
||||
// Set of 16 key-value pairs that can be attached to an object. This can be useful
|
||||
// for storing additional information about the object in a structured format, and
|
||||
// querying for objects via API or the dashboard.
|
||||
//
|
||||
// Keys are strings with a maximum length of 64 characters. Values are strings with
|
||||
// a maximum length of 512 characters.
|
||||
Metadata param.Field[shared.MetadataParam] `json:"metadata"`
|
||||
Metadata shared.MetadataParam `json:"metadata,omitzero"`
|
||||
// The name of the vector store.
|
||||
Name param.Field[string] `json:"name"`
|
||||
Name param.String `json:"name,omitzero"`
|
||||
apiobject
|
||||
}
|
||||
|
||||
func (f BetaVectorStoreUpdateParams) IsMissing() bool { return param.IsOmitted(f) || f.IsNull() }
|
||||
|
||||
func (r BetaVectorStoreUpdateParams) MarshalJSON() (data []byte, err error) {
|
||||
return apijson.MarshalRoot(r)
|
||||
type shadow BetaVectorStoreUpdateParams
|
||||
return param.MarshalObject(r, (*shadow)(&r))
|
||||
}
|
||||
|
||||
// The expiration policy for a vector store.
|
||||
type BetaVectorStoreUpdateParamsExpiresAfter struct {
|
||||
// Anchor timestamp after which the expiration policy applies. Supported anchors:
|
||||
// `last_active_at`.
|
||||
Anchor param.Field[BetaVectorStoreUpdateParamsExpiresAfterAnchor] `json:"anchor,required"`
|
||||
//
|
||||
// This field can be elided, and will be automatically set as "last_active_at".
|
||||
Anchor constant.LastActiveAt `json:"anchor,required"`
|
||||
// The number of days after the anchor time that the vector store will expire.
|
||||
Days param.Field[int64] `json:"days,required"`
|
||||
Days param.Int `json:"days,omitzero,required"`
|
||||
apiobject
|
||||
}
|
||||
|
||||
func (f BetaVectorStoreUpdateParamsExpiresAfter) IsMissing() bool {
|
||||
return param.IsOmitted(f) || f.IsNull()
|
||||
}
|
||||
|
||||
func (r BetaVectorStoreUpdateParamsExpiresAfter) MarshalJSON() (data []byte, err error) {
|
||||
return apijson.MarshalRoot(r)
|
||||
}
|
||||
|
||||
// Anchor timestamp after which the expiration policy applies. Supported anchors:
|
||||
// `last_active_at`.
|
||||
type BetaVectorStoreUpdateParamsExpiresAfterAnchor string
|
||||
|
||||
const (
|
||||
BetaVectorStoreUpdateParamsExpiresAfterAnchorLastActiveAt BetaVectorStoreUpdateParamsExpiresAfterAnchor = "last_active_at"
|
||||
)
|
||||
|
||||
func (r BetaVectorStoreUpdateParamsExpiresAfterAnchor) IsKnown() bool {
|
||||
switch r {
|
||||
case BetaVectorStoreUpdateParamsExpiresAfterAnchorLastActiveAt:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
type shadow BetaVectorStoreUpdateParamsExpiresAfter
|
||||
return param.MarshalObject(r, (*shadow)(&r))
|
||||
}
|
||||
|
||||
type BetaVectorStoreListParams struct {
|
||||
@@ -729,20 +547,25 @@ type BetaVectorStoreListParams struct {
|
||||
// in the list. For instance, if you make a list request and receive 100 objects,
|
||||
// ending with obj_foo, your subsequent call can include after=obj_foo in order to
|
||||
// fetch the next page of the list.
|
||||
After param.Field[string] `query:"after"`
|
||||
After param.String `query:"after,omitzero"`
|
||||
// A cursor for use in pagination. `before` is an object ID that defines your place
|
||||
// in the list. For instance, if you make a list request and receive 100 objects,
|
||||
// starting with obj_foo, your subsequent call can include before=obj_foo in order
|
||||
// to fetch the previous page of the list.
|
||||
Before param.Field[string] `query:"before"`
|
||||
Before param.String `query:"before,omitzero"`
|
||||
// A limit on the number of objects to be returned. Limit can range between 1 and
|
||||
// 100, and the default is 20.
|
||||
Limit param.Field[int64] `query:"limit"`
|
||||
Limit param.Int `query:"limit,omitzero"`
|
||||
// Sort order by the `created_at` timestamp of the objects. `asc` for ascending
|
||||
// order and `desc` for descending order.
|
||||
Order param.Field[BetaVectorStoreListParamsOrder] `query:"order"`
|
||||
//
|
||||
// Any of "asc", "desc"
|
||||
Order BetaVectorStoreListParamsOrder `query:"order,omitzero"`
|
||||
apiobject
|
||||
}
|
||||
|
||||
func (f BetaVectorStoreListParams) IsMissing() bool { return param.IsOmitted(f) || f.IsNull() }
|
||||
|
||||
// URLQuery serializes [BetaVectorStoreListParams]'s query parameters as
|
||||
// `url.Values`.
|
||||
func (r BetaVectorStoreListParams) URLQuery() (v url.Values) {
|
||||
@@ -760,11 +583,3 @@ const (
|
||||
BetaVectorStoreListParamsOrderAsc BetaVectorStoreListParamsOrder = "asc"
|
||||
BetaVectorStoreListParamsOrderDesc BetaVectorStoreListParamsOrder = "desc"
|
||||
)
|
||||
|
||||
func (r BetaVectorStoreListParamsOrder) IsKnown() bool {
|
||||
switch r {
|
||||
case BetaVectorStoreListParamsOrderAsc, BetaVectorStoreListParamsOrderDesc:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
@@ -27,18 +27,17 @@ func TestBetaVectorStoreNewWithOptionalParams(t *testing.T) {
|
||||
option.WithAPIKey("My API Key"),
|
||||
)
|
||||
_, err := client.Beta.VectorStores.New(context.TODO(), openai.BetaVectorStoreNewParams{
|
||||
ChunkingStrategy: openai.F[openai.FileChunkingStrategyParamUnion](openai.AutoFileChunkingStrategyParam{
|
||||
Type: openai.F(openai.AutoFileChunkingStrategyParamTypeAuto),
|
||||
}),
|
||||
ExpiresAfter: openai.F(openai.BetaVectorStoreNewParamsExpiresAfter{
|
||||
Anchor: openai.F(openai.BetaVectorStoreNewParamsExpiresAfterAnchorLastActiveAt),
|
||||
Days: openai.F(int64(1)),
|
||||
}),
|
||||
FileIDs: openai.F([]string{"string"}),
|
||||
Metadata: openai.F(shared.MetadataParam{
|
||||
ChunkingStrategy: openai.FileChunkingStrategyParamUnion{
|
||||
OfAuto: &openai.AutoFileChunkingStrategyParam{},
|
||||
},
|
||||
ExpiresAfter: openai.BetaVectorStoreNewParamsExpiresAfter{
|
||||
Days: openai.Int(1),
|
||||
},
|
||||
FileIDs: []string{"string"},
|
||||
Metadata: shared.MetadataParam{
|
||||
"foo": "string",
|
||||
}),
|
||||
Name: openai.F("name"),
|
||||
},
|
||||
Name: openai.String("name"),
|
||||
})
|
||||
if err != nil {
|
||||
var apierr *openai.Error
|
||||
@@ -87,14 +86,13 @@ func TestBetaVectorStoreUpdateWithOptionalParams(t *testing.T) {
|
||||
context.TODO(),
|
||||
"vector_store_id",
|
||||
openai.BetaVectorStoreUpdateParams{
|
||||
ExpiresAfter: openai.F(openai.BetaVectorStoreUpdateParamsExpiresAfter{
|
||||
Anchor: openai.F(openai.BetaVectorStoreUpdateParamsExpiresAfterAnchorLastActiveAt),
|
||||
Days: openai.F(int64(1)),
|
||||
}),
|
||||
Metadata: openai.F(shared.MetadataParam{
|
||||
ExpiresAfter: openai.BetaVectorStoreUpdateParamsExpiresAfter{
|
||||
Days: openai.Int(1),
|
||||
},
|
||||
Metadata: shared.MetadataParam{
|
||||
"foo": "string",
|
||||
}),
|
||||
Name: openai.F("name"),
|
||||
},
|
||||
Name: openai.String("name"),
|
||||
},
|
||||
)
|
||||
if err != nil {
|
||||
@@ -119,10 +117,10 @@ func TestBetaVectorStoreListWithOptionalParams(t *testing.T) {
|
||||
option.WithAPIKey("My API Key"),
|
||||
)
|
||||
_, err := client.Beta.VectorStores.List(context.TODO(), openai.BetaVectorStoreListParams{
|
||||
After: openai.F("after"),
|
||||
Before: openai.F("before"),
|
||||
Limit: openai.F(int64(0)),
|
||||
Order: openai.F(openai.BetaVectorStoreListParamsOrderAsc),
|
||||
After: openai.String("after"),
|
||||
Before: openai.String("before"),
|
||||
Limit: openai.Int(0),
|
||||
Order: openai.BetaVectorStoreListParamsOrderAsc,
|
||||
})
|
||||
if err != nil {
|
||||
var apierr *openai.Error
|
||||
|
||||
@@ -11,10 +11,12 @@ import (
|
||||
|
||||
"github.com/openai/openai-go/internal/apijson"
|
||||
"github.com/openai/openai-go/internal/apiquery"
|
||||
"github.com/openai/openai-go/internal/param"
|
||||
"github.com/openai/openai-go/internal/requestconfig"
|
||||
"github.com/openai/openai-go/option"
|
||||
"github.com/openai/openai-go/packages/pagination"
|
||||
"github.com/openai/openai-go/packages/param"
|
||||
"github.com/openai/openai-go/packages/resp"
|
||||
"github.com/openai/openai-go/shared/constant"
|
||||
)
|
||||
|
||||
// BetaVectorStoreFileService contains methods and other services that help with
|
||||
@@ -30,8 +32,8 @@ type BetaVectorStoreFileService struct {
|
||||
// NewBetaVectorStoreFileService generates a new service that applies the given
|
||||
// options to each request. These options are applied after the parent client's
|
||||
// options (if there is one), and before any request-specific options.
|
||||
func NewBetaVectorStoreFileService(opts ...option.RequestOption) (r *BetaVectorStoreFileService) {
|
||||
r = &BetaVectorStoreFileService{}
|
||||
func NewBetaVectorStoreFileService(opts ...option.RequestOption) (r BetaVectorStoreFileService) {
|
||||
r = BetaVectorStoreFileService{}
|
||||
r.Options = opts
|
||||
return
|
||||
}
|
||||
@@ -118,82 +120,73 @@ func (r *BetaVectorStoreFileService) Delete(ctx context.Context, vectorStoreID s
|
||||
// A list of files attached to a vector store.
|
||||
type VectorStoreFile struct {
|
||||
// The identifier, which can be referenced in API endpoints.
|
||||
ID string `json:"id,required"`
|
||||
ID string `json:"id,omitzero,required"`
|
||||
// The Unix timestamp (in seconds) for when the vector store file was created.
|
||||
CreatedAt int64 `json:"created_at,required"`
|
||||
CreatedAt int64 `json:"created_at,omitzero,required"`
|
||||
// The last error associated with this vector store file. Will be `null` if there
|
||||
// are no errors.
|
||||
LastError VectorStoreFileLastError `json:"last_error,required,nullable"`
|
||||
LastError VectorStoreFileLastError `json:"last_error,omitzero,required,nullable"`
|
||||
// The object type, which is always `vector_store.file`.
|
||||
Object VectorStoreFileObject `json:"object,required"`
|
||||
//
|
||||
// This field can be elided, and will be automatically set as "vector_store.file".
|
||||
Object constant.VectorStoreFile `json:"object,required"`
|
||||
// The status of the vector store file, which can be either `in_progress`,
|
||||
// `completed`, `cancelled`, or `failed`. The status `completed` indicates that the
|
||||
// vector store file is ready for use.
|
||||
Status VectorStoreFileStatus `json:"status,required"`
|
||||
//
|
||||
// Any of "in_progress", "completed", "cancelled", "failed"
|
||||
Status string `json:"status,omitzero,required"`
|
||||
// The total vector store usage in bytes. Note that this may be different from the
|
||||
// original file size.
|
||||
UsageBytes int64 `json:"usage_bytes,required"`
|
||||
UsageBytes int64 `json:"usage_bytes,omitzero,required"`
|
||||
// The ID of the
|
||||
// [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object)
|
||||
// that the [File](https://platform.openai.com/docs/api-reference/files) is
|
||||
// attached to.
|
||||
VectorStoreID string `json:"vector_store_id,required"`
|
||||
VectorStoreID string `json:"vector_store_id,omitzero,required"`
|
||||
// The strategy used to chunk the file.
|
||||
ChunkingStrategy FileChunkingStrategy `json:"chunking_strategy"`
|
||||
JSON vectorStoreFileJSON `json:"-"`
|
||||
ChunkingStrategy FileChunkingStrategyUnion `json:"chunking_strategy,omitzero"`
|
||||
JSON struct {
|
||||
ID resp.Field
|
||||
CreatedAt resp.Field
|
||||
LastError resp.Field
|
||||
Object resp.Field
|
||||
Status resp.Field
|
||||
UsageBytes resp.Field
|
||||
VectorStoreID resp.Field
|
||||
ChunkingStrategy resp.Field
|
||||
raw string
|
||||
} `json:"-"`
|
||||
}
|
||||
|
||||
// vectorStoreFileJSON contains the JSON metadata for the struct [VectorStoreFile]
|
||||
type vectorStoreFileJSON struct {
|
||||
ID apijson.Field
|
||||
CreatedAt apijson.Field
|
||||
LastError apijson.Field
|
||||
Object apijson.Field
|
||||
Status apijson.Field
|
||||
UsageBytes apijson.Field
|
||||
VectorStoreID apijson.Field
|
||||
ChunkingStrategy apijson.Field
|
||||
raw string
|
||||
ExtraFields map[string]apijson.Field
|
||||
}
|
||||
|
||||
func (r *VectorStoreFile) UnmarshalJSON(data []byte) (err error) {
|
||||
func (r VectorStoreFile) RawJSON() string { return r.JSON.raw }
|
||||
func (r *VectorStoreFile) UnmarshalJSON(data []byte) error {
|
||||
return apijson.UnmarshalRoot(data, r)
|
||||
}
|
||||
|
||||
func (r vectorStoreFileJSON) RawJSON() string {
|
||||
return r.raw
|
||||
}
|
||||
|
||||
// The last error associated with this vector store file. Will be `null` if there
|
||||
// are no errors.
|
||||
type VectorStoreFileLastError struct {
|
||||
// One of `server_error` or `rate_limit_exceeded`.
|
||||
Code VectorStoreFileLastErrorCode `json:"code,required"`
|
||||
//
|
||||
// Any of "server_error", "unsupported_file", "invalid_file"
|
||||
Code string `json:"code,omitzero,required"`
|
||||
// A human-readable description of the error.
|
||||
Message string `json:"message,required"`
|
||||
JSON vectorStoreFileLastErrorJSON `json:"-"`
|
||||
Message string `json:"message,omitzero,required"`
|
||||
JSON struct {
|
||||
Code resp.Field
|
||||
Message resp.Field
|
||||
raw string
|
||||
} `json:"-"`
|
||||
}
|
||||
|
||||
// vectorStoreFileLastErrorJSON contains the JSON metadata for the struct
|
||||
// [VectorStoreFileLastError]
|
||||
type vectorStoreFileLastErrorJSON struct {
|
||||
Code apijson.Field
|
||||
Message apijson.Field
|
||||
raw string
|
||||
ExtraFields map[string]apijson.Field
|
||||
}
|
||||
|
||||
func (r *VectorStoreFileLastError) UnmarshalJSON(data []byte) (err error) {
|
||||
func (r VectorStoreFileLastError) RawJSON() string { return r.JSON.raw }
|
||||
func (r *VectorStoreFileLastError) UnmarshalJSON(data []byte) error {
|
||||
return apijson.UnmarshalRoot(data, r)
|
||||
}
|
||||
|
||||
func (r vectorStoreFileLastErrorJSON) RawJSON() string {
|
||||
return r.raw
|
||||
}
|
||||
|
||||
// One of `server_error` or `rate_limit_exceeded`.
|
||||
type VectorStoreFileLastErrorCode string
|
||||
type VectorStoreFileLastErrorCode = string
|
||||
|
||||
const (
|
||||
VectorStoreFileLastErrorCodeServerError VectorStoreFileLastErrorCode = "server_error"
|
||||
@@ -201,33 +194,10 @@ const (
|
||||
VectorStoreFileLastErrorCodeInvalidFile VectorStoreFileLastErrorCode = "invalid_file"
|
||||
)
|
||||
|
||||
func (r VectorStoreFileLastErrorCode) IsKnown() bool {
|
||||
switch r {
|
||||
case VectorStoreFileLastErrorCodeServerError, VectorStoreFileLastErrorCodeUnsupportedFile, VectorStoreFileLastErrorCodeInvalidFile:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// The object type, which is always `vector_store.file`.
|
||||
type VectorStoreFileObject string
|
||||
|
||||
const (
|
||||
VectorStoreFileObjectVectorStoreFile VectorStoreFileObject = "vector_store.file"
|
||||
)
|
||||
|
||||
func (r VectorStoreFileObject) IsKnown() bool {
|
||||
switch r {
|
||||
case VectorStoreFileObjectVectorStoreFile:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// The status of the vector store file, which can be either `in_progress`,
|
||||
// `completed`, `cancelled`, or `failed`. The status `completed` indicates that the
|
||||
// vector store file is ready for use.
|
||||
type VectorStoreFileStatus string
|
||||
type VectorStoreFileStatus = string
|
||||
|
||||
const (
|
||||
VectorStoreFileStatusInProgress VectorStoreFileStatus = "in_progress"
|
||||
@@ -236,65 +206,41 @@ const (
|
||||
VectorStoreFileStatusFailed VectorStoreFileStatus = "failed"
|
||||
)
|
||||
|
||||
func (r VectorStoreFileStatus) IsKnown() bool {
|
||||
switch r {
|
||||
case VectorStoreFileStatusInProgress, VectorStoreFileStatusCompleted, VectorStoreFileStatusCancelled, VectorStoreFileStatusFailed:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
type VectorStoreFileDeleted struct {
|
||||
ID string `json:"id,required"`
|
||||
Deleted bool `json:"deleted,required"`
|
||||
Object VectorStoreFileDeletedObject `json:"object,required"`
|
||||
JSON vectorStoreFileDeletedJSON `json:"-"`
|
||||
ID string `json:"id,omitzero,required"`
|
||||
Deleted bool `json:"deleted,omitzero,required"`
|
||||
// This field can be elided, and will be automatically set as
|
||||
// "vector_store.file.deleted".
|
||||
Object constant.VectorStoreFileDeleted `json:"object,required"`
|
||||
JSON struct {
|
||||
ID resp.Field
|
||||
Deleted resp.Field
|
||||
Object resp.Field
|
||||
raw string
|
||||
} `json:"-"`
|
||||
}
|
||||
|
||||
// vectorStoreFileDeletedJSON contains the JSON metadata for the struct
|
||||
// [VectorStoreFileDeleted]
|
||||
type vectorStoreFileDeletedJSON struct {
|
||||
ID apijson.Field
|
||||
Deleted apijson.Field
|
||||
Object apijson.Field
|
||||
raw string
|
||||
ExtraFields map[string]apijson.Field
|
||||
}
|
||||
|
||||
func (r *VectorStoreFileDeleted) UnmarshalJSON(data []byte) (err error) {
|
||||
func (r VectorStoreFileDeleted) RawJSON() string { return r.JSON.raw }
|
||||
func (r *VectorStoreFileDeleted) UnmarshalJSON(data []byte) error {
|
||||
return apijson.UnmarshalRoot(data, r)
|
||||
}
|
||||
|
||||
func (r vectorStoreFileDeletedJSON) RawJSON() string {
|
||||
return r.raw
|
||||
}
|
||||
|
||||
type VectorStoreFileDeletedObject string
|
||||
|
||||
const (
|
||||
VectorStoreFileDeletedObjectVectorStoreFileDeleted VectorStoreFileDeletedObject = "vector_store.file.deleted"
|
||||
)
|
||||
|
||||
func (r VectorStoreFileDeletedObject) IsKnown() bool {
|
||||
switch r {
|
||||
case VectorStoreFileDeletedObjectVectorStoreFileDeleted:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
type BetaVectorStoreFileNewParams struct {
|
||||
// A [File](https://platform.openai.com/docs/api-reference/files) ID that the
|
||||
// vector store should use. Useful for tools like `file_search` that can access
|
||||
// files.
|
||||
FileID param.Field[string] `json:"file_id,required"`
|
||||
FileID param.String `json:"file_id,omitzero,required"`
|
||||
// The chunking strategy used to chunk the file(s). If not set, will use the `auto`
|
||||
// strategy. Only applicable if `file_ids` is non-empty.
|
||||
ChunkingStrategy param.Field[FileChunkingStrategyParamUnion] `json:"chunking_strategy"`
|
||||
ChunkingStrategy FileChunkingStrategyParamUnion `json:"chunking_strategy,omitzero"`
|
||||
apiobject
|
||||
}
|
||||
|
||||
func (f BetaVectorStoreFileNewParams) IsMissing() bool { return param.IsOmitted(f) || f.IsNull() }
|
||||
|
||||
func (r BetaVectorStoreFileNewParams) MarshalJSON() (data []byte, err error) {
|
||||
return apijson.MarshalRoot(r)
|
||||
type shadow BetaVectorStoreFileNewParams
|
||||
return param.MarshalObject(r, (*shadow)(&r))
|
||||
}
|
||||
|
||||
type BetaVectorStoreFileListParams struct {
|
||||
@@ -302,22 +248,29 @@ type BetaVectorStoreFileListParams struct {
|
||||
// in the list. For instance, if you make a list request and receive 100 objects,
|
||||
// ending with obj_foo, your subsequent call can include after=obj_foo in order to
|
||||
// fetch the next page of the list.
|
||||
After param.Field[string] `query:"after"`
|
||||
After param.String `query:"after,omitzero"`
|
||||
// A cursor for use in pagination. `before` is an object ID that defines your place
|
||||
// in the list. For instance, if you make a list request and receive 100 objects,
|
||||
// starting with obj_foo, your subsequent call can include before=obj_foo in order
|
||||
// to fetch the previous page of the list.
|
||||
Before param.Field[string] `query:"before"`
|
||||
Before param.String `query:"before,omitzero"`
|
||||
// Filter by file status. One of `in_progress`, `completed`, `failed`, `cancelled`.
|
||||
Filter param.Field[BetaVectorStoreFileListParamsFilter] `query:"filter"`
|
||||
//
|
||||
// Any of "in_progress", "completed", "failed", "cancelled"
|
||||
Filter BetaVectorStoreFileListParamsFilter `query:"filter,omitzero"`
|
||||
// A limit on the number of objects to be returned. Limit can range between 1 and
|
||||
// 100, and the default is 20.
|
||||
Limit param.Field[int64] `query:"limit"`
|
||||
Limit param.Int `query:"limit,omitzero"`
|
||||
// Sort order by the `created_at` timestamp of the objects. `asc` for ascending
|
||||
// order and `desc` for descending order.
|
||||
Order param.Field[BetaVectorStoreFileListParamsOrder] `query:"order"`
|
||||
//
|
||||
// Any of "asc", "desc"
|
||||
Order BetaVectorStoreFileListParamsOrder `query:"order,omitzero"`
|
||||
apiobject
|
||||
}
|
||||
|
||||
func (f BetaVectorStoreFileListParams) IsMissing() bool { return param.IsOmitted(f) || f.IsNull() }
|
||||
|
||||
// URLQuery serializes [BetaVectorStoreFileListParams]'s query parameters as
|
||||
// `url.Values`.
|
||||
func (r BetaVectorStoreFileListParams) URLQuery() (v url.Values) {
|
||||
@@ -337,14 +290,6 @@ const (
|
||||
BetaVectorStoreFileListParamsFilterCancelled BetaVectorStoreFileListParamsFilter = "cancelled"
|
||||
)
|
||||
|
||||
func (r BetaVectorStoreFileListParamsFilter) IsKnown() bool {
|
||||
switch r {
|
||||
case BetaVectorStoreFileListParamsFilterInProgress, BetaVectorStoreFileListParamsFilterCompleted, BetaVectorStoreFileListParamsFilterFailed, BetaVectorStoreFileListParamsFilterCancelled:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// Sort order by the `created_at` timestamp of the objects. `asc` for ascending
|
||||
// order and `desc` for descending order.
|
||||
type BetaVectorStoreFileListParamsOrder string
|
||||
@@ -353,11 +298,3 @@ const (
|
||||
BetaVectorStoreFileListParamsOrderAsc BetaVectorStoreFileListParamsOrder = "asc"
|
||||
BetaVectorStoreFileListParamsOrderDesc BetaVectorStoreFileListParamsOrder = "desc"
|
||||
)
|
||||
|
||||
func (r BetaVectorStoreFileListParamsOrder) IsKnown() bool {
|
||||
switch r {
|
||||
case BetaVectorStoreFileListParamsOrderAsc, BetaVectorStoreFileListParamsOrderDesc:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
@@ -29,10 +29,10 @@ func TestBetaVectorStoreFileNewWithOptionalParams(t *testing.T) {
|
||||
context.TODO(),
|
||||
"vs_abc123",
|
||||
openai.BetaVectorStoreFileNewParams{
|
||||
FileID: openai.F("file_id"),
|
||||
ChunkingStrategy: openai.F[openai.FileChunkingStrategyParamUnion](openai.AutoFileChunkingStrategyParam{
|
||||
Type: openai.F(openai.AutoFileChunkingStrategyParamTypeAuto),
|
||||
}),
|
||||
FileID: openai.String("file_id"),
|
||||
ChunkingStrategy: openai.FileChunkingStrategyParamUnion{
|
||||
OfAuto: &openai.AutoFileChunkingStrategyParam{},
|
||||
},
|
||||
},
|
||||
)
|
||||
if err != nil {
|
||||
@@ -86,11 +86,11 @@ func TestBetaVectorStoreFileListWithOptionalParams(t *testing.T) {
|
||||
context.TODO(),
|
||||
"vector_store_id",
|
||||
openai.BetaVectorStoreFileListParams{
|
||||
After: openai.F("after"),
|
||||
Before: openai.F("before"),
|
||||
Filter: openai.F(openai.BetaVectorStoreFileListParamsFilterInProgress),
|
||||
Limit: openai.F(int64(0)),
|
||||
Order: openai.F(openai.BetaVectorStoreFileListParamsOrderAsc),
|
||||
After: openai.String("after"),
|
||||
Before: openai.String("before"),
|
||||
Filter: openai.BetaVectorStoreFileListParamsFilterInProgress,
|
||||
Limit: openai.Int(0),
|
||||
Order: openai.BetaVectorStoreFileListParamsOrderAsc,
|
||||
},
|
||||
)
|
||||
if err != nil {
|
||||
|
||||
@@ -11,10 +11,12 @@ import (
|
||||
|
||||
"github.com/openai/openai-go/internal/apijson"
|
||||
"github.com/openai/openai-go/internal/apiquery"
|
||||
"github.com/openai/openai-go/internal/param"
|
||||
"github.com/openai/openai-go/internal/requestconfig"
|
||||
"github.com/openai/openai-go/option"
|
||||
"github.com/openai/openai-go/packages/pagination"
|
||||
"github.com/openai/openai-go/packages/param"
|
||||
"github.com/openai/openai-go/packages/resp"
|
||||
"github.com/openai/openai-go/shared/constant"
|
||||
)
|
||||
|
||||
// BetaVectorStoreFileBatchService contains methods and other services that help
|
||||
@@ -30,8 +32,8 @@ type BetaVectorStoreFileBatchService struct {
|
||||
// NewBetaVectorStoreFileBatchService generates a new service that applies the
|
||||
// given options to each request. These options are applied after the parent
|
||||
// client's options (if there is one), and before any request-specific options.
|
||||
func NewBetaVectorStoreFileBatchService(opts ...option.RequestOption) (r *BetaVectorStoreFileBatchService) {
|
||||
r = &BetaVectorStoreFileBatchService{}
|
||||
func NewBetaVectorStoreFileBatchService(opts ...option.RequestOption) (r BetaVectorStoreFileBatchService) {
|
||||
r = BetaVectorStoreFileBatchService{}
|
||||
r.Options = opts
|
||||
return
|
||||
}
|
||||
@@ -118,97 +120,71 @@ func (r *BetaVectorStoreFileBatchService) ListFilesAutoPaging(ctx context.Contex
|
||||
// A batch of files attached to a vector store.
|
||||
type VectorStoreFileBatch struct {
|
||||
// The identifier, which can be referenced in API endpoints.
|
||||
ID string `json:"id,required"`
|
||||
ID string `json:"id,omitzero,required"`
|
||||
// The Unix timestamp (in seconds) for when the vector store files batch was
|
||||
// created.
|
||||
CreatedAt int64 `json:"created_at,required"`
|
||||
FileCounts VectorStoreFileBatchFileCounts `json:"file_counts,required"`
|
||||
CreatedAt int64 `json:"created_at,omitzero,required"`
|
||||
FileCounts VectorStoreFileBatchFileCounts `json:"file_counts,omitzero,required"`
|
||||
// The object type, which is always `vector_store.file_batch`.
|
||||
Object VectorStoreFileBatchObject `json:"object,required"`
|
||||
//
|
||||
// This field can be elided, and will be automatically set as
|
||||
// "vector_store.files_batch".
|
||||
Object constant.VectorStoreFilesBatch `json:"object,required"`
|
||||
// The status of the vector store files batch, which can be either `in_progress`,
|
||||
// `completed`, `cancelled` or `failed`.
|
||||
Status VectorStoreFileBatchStatus `json:"status,required"`
|
||||
//
|
||||
// Any of "in_progress", "completed", "cancelled", "failed"
|
||||
Status string `json:"status,omitzero,required"`
|
||||
// The ID of the
|
||||
// [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object)
|
||||
// that the [File](https://platform.openai.com/docs/api-reference/files) is
|
||||
// attached to.
|
||||
VectorStoreID string `json:"vector_store_id,required"`
|
||||
JSON vectorStoreFileBatchJSON `json:"-"`
|
||||
VectorStoreID string `json:"vector_store_id,omitzero,required"`
|
||||
JSON struct {
|
||||
ID resp.Field
|
||||
CreatedAt resp.Field
|
||||
FileCounts resp.Field
|
||||
Object resp.Field
|
||||
Status resp.Field
|
||||
VectorStoreID resp.Field
|
||||
raw string
|
||||
} `json:"-"`
|
||||
}
|
||||
|
||||
// vectorStoreFileBatchJSON contains the JSON metadata for the struct
|
||||
// [VectorStoreFileBatch]
|
||||
type vectorStoreFileBatchJSON struct {
|
||||
ID apijson.Field
|
||||
CreatedAt apijson.Field
|
||||
FileCounts apijson.Field
|
||||
Object apijson.Field
|
||||
Status apijson.Field
|
||||
VectorStoreID apijson.Field
|
||||
raw string
|
||||
ExtraFields map[string]apijson.Field
|
||||
}
|
||||
|
||||
func (r *VectorStoreFileBatch) UnmarshalJSON(data []byte) (err error) {
|
||||
func (r VectorStoreFileBatch) RawJSON() string { return r.JSON.raw }
|
||||
func (r *VectorStoreFileBatch) UnmarshalJSON(data []byte) error {
|
||||
return apijson.UnmarshalRoot(data, r)
|
||||
}
|
||||
|
||||
func (r vectorStoreFileBatchJSON) RawJSON() string {
|
||||
return r.raw
|
||||
}
|
||||
|
||||
type VectorStoreFileBatchFileCounts struct {
|
||||
// The number of files that where cancelled.
|
||||
Cancelled int64 `json:"cancelled,required"`
|
||||
Cancelled int64 `json:"cancelled,omitzero,required"`
|
||||
// The number of files that have been processed.
|
||||
Completed int64 `json:"completed,required"`
|
||||
Completed int64 `json:"completed,omitzero,required"`
|
||||
// The number of files that have failed to process.
|
||||
Failed int64 `json:"failed,required"`
|
||||
Failed int64 `json:"failed,omitzero,required"`
|
||||
// The number of files that are currently being processed.
|
||||
InProgress int64 `json:"in_progress,required"`
|
||||
InProgress int64 `json:"in_progress,omitzero,required"`
|
||||
// The total number of files.
|
||||
Total int64 `json:"total,required"`
|
||||
JSON vectorStoreFileBatchFileCountsJSON `json:"-"`
|
||||
Total int64 `json:"total,omitzero,required"`
|
||||
JSON struct {
|
||||
Cancelled resp.Field
|
||||
Completed resp.Field
|
||||
Failed resp.Field
|
||||
InProgress resp.Field
|
||||
Total resp.Field
|
||||
raw string
|
||||
} `json:"-"`
|
||||
}
|
||||
|
||||
// vectorStoreFileBatchFileCountsJSON contains the JSON metadata for the struct
|
||||
// [VectorStoreFileBatchFileCounts]
|
||||
type vectorStoreFileBatchFileCountsJSON struct {
|
||||
Cancelled apijson.Field
|
||||
Completed apijson.Field
|
||||
Failed apijson.Field
|
||||
InProgress apijson.Field
|
||||
Total apijson.Field
|
||||
raw string
|
||||
ExtraFields map[string]apijson.Field
|
||||
}
|
||||
|
||||
func (r *VectorStoreFileBatchFileCounts) UnmarshalJSON(data []byte) (err error) {
|
||||
func (r VectorStoreFileBatchFileCounts) RawJSON() string { return r.JSON.raw }
|
||||
func (r *VectorStoreFileBatchFileCounts) UnmarshalJSON(data []byte) error {
|
||||
return apijson.UnmarshalRoot(data, r)
|
||||
}
|
||||
|
||||
func (r vectorStoreFileBatchFileCountsJSON) RawJSON() string {
|
||||
return r.raw
|
||||
}
|
||||
|
||||
// The object type, which is always `vector_store.file_batch`.
|
||||
type VectorStoreFileBatchObject string
|
||||
|
||||
const (
|
||||
VectorStoreFileBatchObjectVectorStoreFilesBatch VectorStoreFileBatchObject = "vector_store.files_batch"
|
||||
)
|
||||
|
||||
func (r VectorStoreFileBatchObject) IsKnown() bool {
|
||||
switch r {
|
||||
case VectorStoreFileBatchObjectVectorStoreFilesBatch:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// The status of the vector store files batch, which can be either `in_progress`,
|
||||
// `completed`, `cancelled` or `failed`.
|
||||
type VectorStoreFileBatchStatus string
|
||||
type VectorStoreFileBatchStatus = string
|
||||
|
||||
const (
|
||||
VectorStoreFileBatchStatusInProgress VectorStoreFileBatchStatus = "in_progress"
|
||||
@@ -217,26 +193,22 @@ const (
|
||||
VectorStoreFileBatchStatusFailed VectorStoreFileBatchStatus = "failed"
|
||||
)
|
||||
|
||||
func (r VectorStoreFileBatchStatus) IsKnown() bool {
|
||||
switch r {
|
||||
case VectorStoreFileBatchStatusInProgress, VectorStoreFileBatchStatusCompleted, VectorStoreFileBatchStatusCancelled, VectorStoreFileBatchStatusFailed:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
type BetaVectorStoreFileBatchNewParams struct {
|
||||
// A list of [File](https://platform.openai.com/docs/api-reference/files) IDs that
|
||||
// the vector store should use. Useful for tools like `file_search` that can access
|
||||
// files.
|
||||
FileIDs param.Field[[]string] `json:"file_ids,required"`
|
||||
FileIDs []string `json:"file_ids,omitzero,required"`
|
||||
// The chunking strategy used to chunk the file(s). If not set, will use the `auto`
|
||||
// strategy. Only applicable if `file_ids` is non-empty.
|
||||
ChunkingStrategy param.Field[FileChunkingStrategyParamUnion] `json:"chunking_strategy"`
|
||||
ChunkingStrategy FileChunkingStrategyParamUnion `json:"chunking_strategy,omitzero"`
|
||||
apiobject
|
||||
}
|
||||
|
||||
func (f BetaVectorStoreFileBatchNewParams) IsMissing() bool { return param.IsOmitted(f) || f.IsNull() }
|
||||
|
||||
func (r BetaVectorStoreFileBatchNewParams) MarshalJSON() (data []byte, err error) {
|
||||
return apijson.MarshalRoot(r)
|
||||
type shadow BetaVectorStoreFileBatchNewParams
|
||||
return param.MarshalObject(r, (*shadow)(&r))
|
||||
}
|
||||
|
||||
type BetaVectorStoreFileBatchListFilesParams struct {
|
||||
@@ -244,20 +216,29 @@ type BetaVectorStoreFileBatchListFilesParams struct {
|
||||
// in the list. For instance, if you make a list request and receive 100 objects,
|
||||
// ending with obj_foo, your subsequent call can include after=obj_foo in order to
|
||||
// fetch the next page of the list.
|
||||
After param.Field[string] `query:"after"`
|
||||
After param.String `query:"after,omitzero"`
|
||||
// A cursor for use in pagination. `before` is an object ID that defines your place
|
||||
// in the list. For instance, if you make a list request and receive 100 objects,
|
||||
// starting with obj_foo, your subsequent call can include before=obj_foo in order
|
||||
// to fetch the previous page of the list.
|
||||
Before param.Field[string] `query:"before"`
|
||||
Before param.String `query:"before,omitzero"`
|
||||
// Filter by file status. One of `in_progress`, `completed`, `failed`, `cancelled`.
|
||||
Filter param.Field[BetaVectorStoreFileBatchListFilesParamsFilter] `query:"filter"`
|
||||
//
|
||||
// Any of "in_progress", "completed", "failed", "cancelled"
|
||||
Filter BetaVectorStoreFileBatchListFilesParamsFilter `query:"filter,omitzero"`
|
||||
// A limit on the number of objects to be returned. Limit can range between 1 and
|
||||
// 100, and the default is 20.
|
||||
Limit param.Field[int64] `query:"limit"`
|
||||
Limit param.Int `query:"limit,omitzero"`
|
||||
// Sort order by the `created_at` timestamp of the objects. `asc` for ascending
|
||||
// order and `desc` for descending order.
|
||||
Order param.Field[BetaVectorStoreFileBatchListFilesParamsOrder] `query:"order"`
|
||||
//
|
||||
// Any of "asc", "desc"
|
||||
Order BetaVectorStoreFileBatchListFilesParamsOrder `query:"order,omitzero"`
|
||||
apiobject
|
||||
}
|
||||
|
||||
func (f BetaVectorStoreFileBatchListFilesParams) IsMissing() bool {
|
||||
return param.IsOmitted(f) || f.IsNull()
|
||||
}
|
||||
|
||||
// URLQuery serializes [BetaVectorStoreFileBatchListFilesParams]'s query parameters
|
||||
@@ -279,14 +260,6 @@ const (
|
||||
BetaVectorStoreFileBatchListFilesParamsFilterCancelled BetaVectorStoreFileBatchListFilesParamsFilter = "cancelled"
|
||||
)
|
||||
|
||||
func (r BetaVectorStoreFileBatchListFilesParamsFilter) IsKnown() bool {
|
||||
switch r {
|
||||
case BetaVectorStoreFileBatchListFilesParamsFilterInProgress, BetaVectorStoreFileBatchListFilesParamsFilterCompleted, BetaVectorStoreFileBatchListFilesParamsFilterFailed, BetaVectorStoreFileBatchListFilesParamsFilterCancelled:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// Sort order by the `created_at` timestamp of the objects. `asc` for ascending
|
||||
// order and `desc` for descending order.
|
||||
type BetaVectorStoreFileBatchListFilesParamsOrder string
|
||||
@@ -295,11 +268,3 @@ const (
|
||||
BetaVectorStoreFileBatchListFilesParamsOrderAsc BetaVectorStoreFileBatchListFilesParamsOrder = "asc"
|
||||
BetaVectorStoreFileBatchListFilesParamsOrderDesc BetaVectorStoreFileBatchListFilesParamsOrder = "desc"
|
||||
)
|
||||
|
||||
func (r BetaVectorStoreFileBatchListFilesParamsOrder) IsKnown() bool {
|
||||
switch r {
|
||||
case BetaVectorStoreFileBatchListFilesParamsOrderAsc, BetaVectorStoreFileBatchListFilesParamsOrderDesc:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
@@ -29,10 +29,10 @@ func TestBetaVectorStoreFileBatchNewWithOptionalParams(t *testing.T) {
|
||||
context.TODO(),
|
||||
"vs_abc123",
|
||||
openai.BetaVectorStoreFileBatchNewParams{
|
||||
FileIDs: openai.F([]string{"string"}),
|
||||
ChunkingStrategy: openai.F[openai.FileChunkingStrategyParamUnion](openai.AutoFileChunkingStrategyParam{
|
||||
Type: openai.F(openai.AutoFileChunkingStrategyParamTypeAuto),
|
||||
}),
|
||||
FileIDs: []string{"string"},
|
||||
ChunkingStrategy: openai.FileChunkingStrategyParamUnion{
|
||||
OfAuto: &openai.AutoFileChunkingStrategyParam{},
|
||||
},
|
||||
},
|
||||
)
|
||||
if err != nil {
|
||||
@@ -113,11 +113,11 @@ func TestBetaVectorStoreFileBatchListFilesWithOptionalParams(t *testing.T) {
|
||||
"vector_store_id",
|
||||
"batch_id",
|
||||
openai.BetaVectorStoreFileBatchListFilesParams{
|
||||
After: openai.F("after"),
|
||||
Before: openai.F("before"),
|
||||
Filter: openai.F(openai.BetaVectorStoreFileBatchListFilesParamsFilterInProgress),
|
||||
Limit: openai.F(int64(0)),
|
||||
Order: openai.F(openai.BetaVectorStoreFileBatchListFilesParamsOrderAsc),
|
||||
After: openai.String("after"),
|
||||
Before: openai.String("before"),
|
||||
Filter: openai.BetaVectorStoreFileBatchListFilesParamsFilterInProgress,
|
||||
Limit: openai.Int(0),
|
||||
Order: openai.BetaVectorStoreFileBatchListFilesParamsOrderAsc,
|
||||
},
|
||||
)
|
||||
if err != nil {
|
||||
|
||||
6
chat.go
6
chat.go
@@ -14,14 +14,14 @@ import (
|
||||
// the [NewChatService] method instead.
|
||||
type ChatService struct {
|
||||
Options []option.RequestOption
|
||||
Completions *ChatCompletionService
|
||||
Completions ChatCompletionService
|
||||
}
|
||||
|
||||
// NewChatService generates a new service that applies the given options to each
|
||||
// request. These options are applied after the parent client's options (if there
|
||||
// is one), and before any request-specific options.
|
||||
func NewChatService(opts ...option.RequestOption) (r *ChatService) {
|
||||
r = &ChatService{}
|
||||
func NewChatService(opts ...option.RequestOption) (r ChatService) {
|
||||
r = ChatService{}
|
||||
r.Options = opts
|
||||
r.Completions = NewChatCompletionService(opts...)
|
||||
return
|
||||
|
||||
2049
chatcompletion.go
2049
chatcompletion.go
File diff suppressed because it is too large
Load Diff
@@ -27,69 +27,74 @@ func TestChatCompletionNewWithOptionalParams(t *testing.T) {
|
||||
option.WithAPIKey("My API Key"),
|
||||
)
|
||||
_, err := client.Chat.Completions.New(context.TODO(), openai.ChatCompletionNewParams{
|
||||
Messages: openai.F([]openai.ChatCompletionMessageParamUnion{openai.ChatCompletionDeveloperMessageParam{
|
||||
Content: openai.F([]openai.ChatCompletionContentPartTextParam{{Text: openai.F("text"), Type: openai.F(openai.ChatCompletionContentPartTextTypeText)}}),
|
||||
Role: openai.F(openai.ChatCompletionDeveloperMessageParamRoleDeveloper),
|
||||
Name: openai.F("name"),
|
||||
}}),
|
||||
Model: openai.F(openai.ChatModelO3Mini),
|
||||
Audio: openai.F(openai.ChatCompletionAudioParam{
|
||||
Format: openai.F(openai.ChatCompletionAudioParamFormatWAV),
|
||||
Voice: openai.F(openai.ChatCompletionAudioParamVoiceAlloy),
|
||||
}),
|
||||
FrequencyPenalty: openai.F(-2.000000),
|
||||
FunctionCall: openai.F[openai.ChatCompletionNewParamsFunctionCallUnion](openai.ChatCompletionNewParamsFunctionCallAuto(openai.ChatCompletionNewParamsFunctionCallAutoNone)),
|
||||
Functions: openai.F([]openai.ChatCompletionNewParamsFunction{{
|
||||
Name: openai.F("name"),
|
||||
Description: openai.F("description"),
|
||||
Parameters: openai.F(shared.FunctionParameters{
|
||||
Messages: []openai.ChatCompletionMessageParamUnion{{
|
||||
OfDeveloper: &openai.ChatCompletionDeveloperMessageParam{
|
||||
Content: []openai.ChatCompletionContentPartTextParam{{Text: openai.String("text")}},
|
||||
Name: openai.String("name"),
|
||||
},
|
||||
}},
|
||||
Model: openai.ChatModelO3Mini,
|
||||
Audio: openai.ChatCompletionAudioParam{
|
||||
Format: "wav",
|
||||
Voice: "alloy",
|
||||
},
|
||||
FrequencyPenalty: openai.Float(-2),
|
||||
FunctionCall: openai.ChatCompletionNewParamsFunctionCallUnion{
|
||||
OfAuto: "none",
|
||||
},
|
||||
Functions: []openai.ChatCompletionNewParamsFunction{{
|
||||
Name: openai.String("name"),
|
||||
Description: openai.String("description"),
|
||||
Parameters: shared.FunctionParameters{
|
||||
"foo": "bar",
|
||||
}),
|
||||
}}),
|
||||
LogitBias: openai.F(map[string]int64{
|
||||
"foo": int64(0),
|
||||
}),
|
||||
Logprobs: openai.F(true),
|
||||
MaxCompletionTokens: openai.F(int64(0)),
|
||||
MaxTokens: openai.F(int64(0)),
|
||||
Metadata: openai.F(shared.MetadataParam{
|
||||
},
|
||||
}},
|
||||
LogitBias: map[string]int64{
|
||||
"foo": 0,
|
||||
},
|
||||
Logprobs: openai.Bool(true),
|
||||
MaxCompletionTokens: openai.Int(0),
|
||||
MaxTokens: openai.Int(0),
|
||||
Metadata: shared.MetadataParam{
|
||||
"foo": "string",
|
||||
}),
|
||||
Modalities: openai.F([]openai.ChatCompletionModality{openai.ChatCompletionModalityText}),
|
||||
N: openai.F(int64(1)),
|
||||
ParallelToolCalls: openai.F(true),
|
||||
Prediction: openai.F(openai.ChatCompletionPredictionContentParam{
|
||||
Content: openai.F([]openai.ChatCompletionContentPartTextParam{{Text: openai.F("text"), Type: openai.F(openai.ChatCompletionContentPartTextTypeText)}}),
|
||||
Type: openai.F(openai.ChatCompletionPredictionContentTypeContent),
|
||||
}),
|
||||
PresencePenalty: openai.F(-2.000000),
|
||||
ReasoningEffort: openai.F(openai.ChatCompletionReasoningEffortLow),
|
||||
ResponseFormat: openai.F[openai.ChatCompletionNewParamsResponseFormatUnion](shared.ResponseFormatTextParam{
|
||||
Type: openai.F(shared.ResponseFormatTextTypeText),
|
||||
}),
|
||||
Seed: openai.F(int64(0)),
|
||||
ServiceTier: openai.F(openai.ChatCompletionNewParamsServiceTierAuto),
|
||||
Stop: openai.F[openai.ChatCompletionNewParamsStopUnion](shared.UnionString("string")),
|
||||
Store: openai.F(true),
|
||||
StreamOptions: openai.F(openai.ChatCompletionStreamOptionsParam{
|
||||
IncludeUsage: openai.F(true),
|
||||
}),
|
||||
Temperature: openai.F(1.000000),
|
||||
ToolChoice: openai.F[openai.ChatCompletionToolChoiceOptionUnionParam](openai.ChatCompletionToolChoiceOptionAuto(openai.ChatCompletionToolChoiceOptionAutoNone)),
|
||||
Tools: openai.F([]openai.ChatCompletionToolParam{{
|
||||
Function: openai.F(shared.FunctionDefinitionParam{
|
||||
Name: openai.F("name"),
|
||||
Description: openai.F("description"),
|
||||
Parameters: openai.F(shared.FunctionParameters{
|
||||
},
|
||||
Modalities: []openai.ChatCompletionModality{openai.ChatCompletionModalityText},
|
||||
N: openai.Int(1),
|
||||
ParallelToolCalls: openai.Bool(true),
|
||||
Prediction: openai.ChatCompletionPredictionContentParam{
|
||||
Content: []openai.ChatCompletionContentPartTextParam{{Text: openai.String("text")}},
|
||||
},
|
||||
PresencePenalty: openai.Float(-2),
|
||||
ReasoningEffort: openai.ChatCompletionReasoningEffortLow,
|
||||
ResponseFormat: openai.ChatCompletionNewParamsResponseFormatUnion{
|
||||
OfResponseFormatText: &shared.ResponseFormatTextParam{},
|
||||
},
|
||||
Seed: openai.Int(0),
|
||||
ServiceTier: openai.ChatCompletionNewParamsServiceTierAuto,
|
||||
Stop: openai.ChatCompletionNewParamsStopUnion{
|
||||
OfString: openai.String("string"),
|
||||
},
|
||||
Store: openai.Bool(true),
|
||||
StreamOptions: openai.ChatCompletionStreamOptionsParam{
|
||||
IncludeUsage: openai.Bool(true),
|
||||
},
|
||||
Temperature: openai.Float(1),
|
||||
ToolChoice: openai.ChatCompletionToolChoiceOptionUnionParam{
|
||||
OfAuto: "none",
|
||||
},
|
||||
Tools: []openai.ChatCompletionToolParam{{
|
||||
Function: shared.FunctionDefinitionParam{
|
||||
Name: openai.String("name"),
|
||||
Description: openai.String("description"),
|
||||
Parameters: shared.FunctionParameters{
|
||||
"foo": "bar",
|
||||
}),
|
||||
Strict: openai.F(true),
|
||||
}),
|
||||
Type: openai.F(openai.ChatCompletionToolTypeFunction),
|
||||
}}),
|
||||
TopLogprobs: openai.F(int64(0)),
|
||||
TopP: openai.F(1.000000),
|
||||
User: openai.F("user-1234"),
|
||||
},
|
||||
Strict: openai.Bool(true),
|
||||
},
|
||||
}},
|
||||
TopLogprobs: openai.Int(0),
|
||||
TopP: openai.Float(1),
|
||||
User: openai.String("user-1234"),
|
||||
})
|
||||
if err != nil {
|
||||
var apierr *openai.Error
|
||||
@@ -138,9 +143,9 @@ func TestChatCompletionUpdate(t *testing.T) {
|
||||
context.TODO(),
|
||||
"completion_id",
|
||||
openai.ChatCompletionUpdateParams{
|
||||
Metadata: openai.F(shared.MetadataParam{
|
||||
Metadata: shared.MetadataParam{
|
||||
"foo": "string",
|
||||
}),
|
||||
},
|
||||
},
|
||||
)
|
||||
if err != nil {
|
||||
@@ -165,13 +170,13 @@ func TestChatCompletionListWithOptionalParams(t *testing.T) {
|
||||
option.WithAPIKey("My API Key"),
|
||||
)
|
||||
_, err := client.Chat.Completions.List(context.TODO(), openai.ChatCompletionListParams{
|
||||
After: openai.F("after"),
|
||||
Limit: openai.F(int64(0)),
|
||||
Metadata: openai.F(shared.MetadataParam{
|
||||
After: openai.String("after"),
|
||||
Limit: openai.Int(0),
|
||||
Metadata: shared.MetadataParam{
|
||||
"foo": "string",
|
||||
}),
|
||||
Model: openai.F("model"),
|
||||
Order: openai.F(openai.ChatCompletionListParamsOrderAsc),
|
||||
},
|
||||
Model: openai.String("model"),
|
||||
Order: openai.ChatCompletionListParamsOrderAsc,
|
||||
})
|
||||
if err != nil {
|
||||
var apierr *openai.Error
|
||||
|
||||
@@ -10,10 +10,10 @@ import (
|
||||
"net/url"
|
||||
|
||||
"github.com/openai/openai-go/internal/apiquery"
|
||||
"github.com/openai/openai-go/internal/param"
|
||||
"github.com/openai/openai-go/internal/requestconfig"
|
||||
"github.com/openai/openai-go/option"
|
||||
"github.com/openai/openai-go/packages/pagination"
|
||||
"github.com/openai/openai-go/packages/param"
|
||||
)
|
||||
|
||||
// ChatCompletionMessageService contains methods and other services that help with
|
||||
@@ -29,8 +29,8 @@ type ChatCompletionMessageService struct {
|
||||
// NewChatCompletionMessageService generates a new service that applies the given
|
||||
// options to each request. These options are applied after the parent client's
|
||||
// options (if there is one), and before any request-specific options.
|
||||
func NewChatCompletionMessageService(opts ...option.RequestOption) (r *ChatCompletionMessageService) {
|
||||
r = &ChatCompletionMessageService{}
|
||||
func NewChatCompletionMessageService(opts ...option.RequestOption) (r ChatCompletionMessageService) {
|
||||
r = ChatCompletionMessageService{}
|
||||
r.Options = opts
|
||||
return
|
||||
}
|
||||
@@ -66,14 +66,19 @@ func (r *ChatCompletionMessageService) ListAutoPaging(ctx context.Context, compl
|
||||
|
||||
type ChatCompletionMessageListParams struct {
|
||||
// Identifier for the last message from the previous pagination request.
|
||||
After param.Field[string] `query:"after"`
|
||||
After param.String `query:"after,omitzero"`
|
||||
// Number of messages to retrieve.
|
||||
Limit param.Field[int64] `query:"limit"`
|
||||
Limit param.Int `query:"limit,omitzero"`
|
||||
// Sort order for messages by timestamp. Use `asc` for ascending order or `desc`
|
||||
// for descending order. Defaults to `asc`.
|
||||
Order param.Field[ChatCompletionMessageListParamsOrder] `query:"order"`
|
||||
//
|
||||
// Any of "asc", "desc"
|
||||
Order ChatCompletionMessageListParamsOrder `query:"order,omitzero"`
|
||||
apiobject
|
||||
}
|
||||
|
||||
func (f ChatCompletionMessageListParams) IsMissing() bool { return param.IsOmitted(f) || f.IsNull() }
|
||||
|
||||
// URLQuery serializes [ChatCompletionMessageListParams]'s query parameters as
|
||||
// `url.Values`.
|
||||
func (r ChatCompletionMessageListParams) URLQuery() (v url.Values) {
|
||||
@@ -91,11 +96,3 @@ const (
|
||||
ChatCompletionMessageListParamsOrderAsc ChatCompletionMessageListParamsOrder = "asc"
|
||||
ChatCompletionMessageListParamsOrderDesc ChatCompletionMessageListParamsOrder = "desc"
|
||||
)
|
||||
|
||||
func (r ChatCompletionMessageListParamsOrder) IsKnown() bool {
|
||||
switch r {
|
||||
case ChatCompletionMessageListParamsOrderAsc, ChatCompletionMessageListParamsOrderDesc:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
@@ -29,9 +29,9 @@ func TestChatCompletionMessageListWithOptionalParams(t *testing.T) {
|
||||
context.TODO(),
|
||||
"completion_id",
|
||||
openai.ChatCompletionMessageListParams{
|
||||
After: openai.F("after"),
|
||||
Limit: openai.F(int64(0)),
|
||||
Order: openai.F(openai.ChatCompletionMessageListParamsOrderAsc),
|
||||
After: openai.String("after"),
|
||||
Limit: openai.Int(0),
|
||||
Order: openai.ChatCompletionMessageListParamsOrderAsc,
|
||||
},
|
||||
)
|
||||
if err != nil {
|
||||
|
||||
28
client.go
28
client.go
@@ -16,25 +16,25 @@ import (
|
||||
// directly, and instead use the [NewClient] method instead.
|
||||
type Client struct {
|
||||
Options []option.RequestOption
|
||||
Completions *CompletionService
|
||||
Chat *ChatService
|
||||
Embeddings *EmbeddingService
|
||||
Files *FileService
|
||||
Images *ImageService
|
||||
Audio *AudioService
|
||||
Moderations *ModerationService
|
||||
Models *ModelService
|
||||
FineTuning *FineTuningService
|
||||
Beta *BetaService
|
||||
Batches *BatchService
|
||||
Uploads *UploadService
|
||||
Completions CompletionService
|
||||
Chat ChatService
|
||||
Embeddings EmbeddingService
|
||||
Files FileService
|
||||
Images ImageService
|
||||
Audio AudioService
|
||||
Moderations ModerationService
|
||||
Models ModelService
|
||||
FineTuning FineTuningService
|
||||
Beta BetaService
|
||||
Batches BatchService
|
||||
Uploads UploadService
|
||||
}
|
||||
|
||||
// NewClient generates a new client with the default option read from the
|
||||
// environment (OPENAI_API_KEY, OPENAI_ORG_ID, OPENAI_PROJECT_ID). The option
|
||||
// passed in as arguments are applied after these default arguments, and all option
|
||||
// will be passed down to the services and requests that this client makes.
|
||||
func NewClient(opts ...option.RequestOption) (r *Client) {
|
||||
func NewClient(opts ...option.RequestOption) (r Client) {
|
||||
defaults := []option.RequestOption{option.WithEnvironmentProduction()}
|
||||
if o, ok := os.LookupEnv("OPENAI_API_KEY"); ok {
|
||||
defaults = append(defaults, option.WithAPIKey(o))
|
||||
@@ -47,7 +47,7 @@ func NewClient(opts ...option.RequestOption) (r *Client) {
|
||||
}
|
||||
opts = append(defaults, opts...)
|
||||
|
||||
r = &Client{Options: opts}
|
||||
r = Client{Options: opts}
|
||||
|
||||
r.Completions = NewCompletionService(opts...)
|
||||
r.Chat = NewChatService(opts...)
|
||||
|
||||
126
client_test.go
126
client_test.go
@@ -39,11 +39,14 @@ func TestUserAgentHeader(t *testing.T) {
|
||||
}),
|
||||
)
|
||||
client.Chat.Completions.New(context.Background(), openai.ChatCompletionNewParams{
|
||||
Messages: openai.F([]openai.ChatCompletionMessageParamUnion{openai.ChatCompletionUserMessageParam{
|
||||
Role: openai.F(openai.ChatCompletionUserMessageParamRoleUser),
|
||||
Content: openai.F([]openai.ChatCompletionContentPartUnionParam{openai.ChatCompletionContentPartTextParam{Text: openai.F("text"), Type: openai.F(openai.ChatCompletionContentPartTextTypeText)}}),
|
||||
}}),
|
||||
Model: openai.F(openai.ChatModelO3Mini),
|
||||
Messages: []openai.ChatCompletionMessageParamUnion{{
|
||||
OfUser: &openai.ChatCompletionUserMessageParam{
|
||||
Content: []openai.ChatCompletionContentPartUnionParam{{
|
||||
OfText: &openai.ChatCompletionContentPartTextParam{Text: openai.String("text")},
|
||||
}},
|
||||
},
|
||||
}},
|
||||
Model: openai.ChatModelO3Mini,
|
||||
})
|
||||
if userAgent != fmt.Sprintf("OpenAI/Go %s", internal.PackageVersion) {
|
||||
t.Errorf("Expected User-Agent to be correct, but got: %#v", userAgent)
|
||||
@@ -68,11 +71,14 @@ func TestRetryAfter(t *testing.T) {
|
||||
}),
|
||||
)
|
||||
_, err := client.Chat.Completions.New(context.Background(), openai.ChatCompletionNewParams{
|
||||
Messages: openai.F([]openai.ChatCompletionMessageParamUnion{openai.ChatCompletionUserMessageParam{
|
||||
Role: openai.F(openai.ChatCompletionUserMessageParamRoleUser),
|
||||
Content: openai.F([]openai.ChatCompletionContentPartUnionParam{openai.ChatCompletionContentPartTextParam{Text: openai.F("text"), Type: openai.F(openai.ChatCompletionContentPartTextTypeText)}}),
|
||||
}}),
|
||||
Model: openai.F(openai.ChatModelO3Mini),
|
||||
Messages: []openai.ChatCompletionMessageParamUnion{{
|
||||
OfUser: &openai.ChatCompletionUserMessageParam{
|
||||
Content: []openai.ChatCompletionContentPartUnionParam{{
|
||||
OfText: &openai.ChatCompletionContentPartTextParam{Text: openai.String("text")},
|
||||
}},
|
||||
},
|
||||
}},
|
||||
Model: openai.ChatModelO3Mini,
|
||||
})
|
||||
if err == nil {
|
||||
t.Error("Expected there to be a cancel error")
|
||||
@@ -108,11 +114,14 @@ func TestDeleteRetryCountHeader(t *testing.T) {
|
||||
option.WithHeaderDel("X-Stainless-Retry-Count"),
|
||||
)
|
||||
_, err := client.Chat.Completions.New(context.Background(), openai.ChatCompletionNewParams{
|
||||
Messages: openai.F([]openai.ChatCompletionMessageParamUnion{openai.ChatCompletionUserMessageParam{
|
||||
Role: openai.F(openai.ChatCompletionUserMessageParamRoleUser),
|
||||
Content: openai.F([]openai.ChatCompletionContentPartUnionParam{openai.ChatCompletionContentPartTextParam{Text: openai.F("text"), Type: openai.F(openai.ChatCompletionContentPartTextTypeText)}}),
|
||||
}}),
|
||||
Model: openai.F(openai.ChatModelO3Mini),
|
||||
Messages: []openai.ChatCompletionMessageParamUnion{{
|
||||
OfUser: &openai.ChatCompletionUserMessageParam{
|
||||
Content: []openai.ChatCompletionContentPartUnionParam{{
|
||||
OfText: &openai.ChatCompletionContentPartTextParam{Text: openai.String("text")},
|
||||
}},
|
||||
},
|
||||
}},
|
||||
Model: openai.ChatModelO3Mini,
|
||||
})
|
||||
if err == nil {
|
||||
t.Error("Expected there to be a cancel error")
|
||||
@@ -143,11 +152,14 @@ func TestOverwriteRetryCountHeader(t *testing.T) {
|
||||
option.WithHeader("X-Stainless-Retry-Count", "42"),
|
||||
)
|
||||
_, err := client.Chat.Completions.New(context.Background(), openai.ChatCompletionNewParams{
|
||||
Messages: openai.F([]openai.ChatCompletionMessageParamUnion{openai.ChatCompletionUserMessageParam{
|
||||
Role: openai.F(openai.ChatCompletionUserMessageParamRoleUser),
|
||||
Content: openai.F([]openai.ChatCompletionContentPartUnionParam{openai.ChatCompletionContentPartTextParam{Text: openai.F("text"), Type: openai.F(openai.ChatCompletionContentPartTextTypeText)}}),
|
||||
}}),
|
||||
Model: openai.F(openai.ChatModelO3Mini),
|
||||
Messages: []openai.ChatCompletionMessageParamUnion{{
|
||||
OfUser: &openai.ChatCompletionUserMessageParam{
|
||||
Content: []openai.ChatCompletionContentPartUnionParam{{
|
||||
OfText: &openai.ChatCompletionContentPartTextParam{Text: openai.String("text")},
|
||||
}},
|
||||
},
|
||||
}},
|
||||
Model: openai.ChatModelO3Mini,
|
||||
})
|
||||
if err == nil {
|
||||
t.Error("Expected there to be a cancel error")
|
||||
@@ -177,11 +189,14 @@ func TestRetryAfterMs(t *testing.T) {
|
||||
}),
|
||||
)
|
||||
_, err := client.Chat.Completions.New(context.Background(), openai.ChatCompletionNewParams{
|
||||
Messages: openai.F([]openai.ChatCompletionMessageParamUnion{openai.ChatCompletionUserMessageParam{
|
||||
Role: openai.F(openai.ChatCompletionUserMessageParamRoleUser),
|
||||
Content: openai.F([]openai.ChatCompletionContentPartUnionParam{openai.ChatCompletionContentPartTextParam{Text: openai.F("text"), Type: openai.F(openai.ChatCompletionContentPartTextTypeText)}}),
|
||||
}}),
|
||||
Model: openai.F(openai.ChatModelO3Mini),
|
||||
Messages: []openai.ChatCompletionMessageParamUnion{{
|
||||
OfUser: &openai.ChatCompletionUserMessageParam{
|
||||
Content: []openai.ChatCompletionContentPartUnionParam{{
|
||||
OfText: &openai.ChatCompletionContentPartTextParam{Text: openai.String("text")},
|
||||
}},
|
||||
},
|
||||
}},
|
||||
Model: openai.ChatModelO3Mini,
|
||||
})
|
||||
if err == nil {
|
||||
t.Error("Expected there to be a cancel error")
|
||||
@@ -205,11 +220,14 @@ func TestContextCancel(t *testing.T) {
|
||||
cancelCtx, cancel := context.WithCancel(context.Background())
|
||||
cancel()
|
||||
_, err := client.Chat.Completions.New(cancelCtx, openai.ChatCompletionNewParams{
|
||||
Messages: openai.F([]openai.ChatCompletionMessageParamUnion{openai.ChatCompletionUserMessageParam{
|
||||
Role: openai.F(openai.ChatCompletionUserMessageParamRoleUser),
|
||||
Content: openai.F([]openai.ChatCompletionContentPartUnionParam{openai.ChatCompletionContentPartTextParam{Text: openai.F("text"), Type: openai.F(openai.ChatCompletionContentPartTextTypeText)}}),
|
||||
}}),
|
||||
Model: openai.F(openai.ChatModelO3Mini),
|
||||
Messages: []openai.ChatCompletionMessageParamUnion{{
|
||||
OfUser: &openai.ChatCompletionUserMessageParam{
|
||||
Content: []openai.ChatCompletionContentPartUnionParam{{
|
||||
OfText: &openai.ChatCompletionContentPartTextParam{Text: openai.String("text")},
|
||||
}},
|
||||
},
|
||||
}},
|
||||
Model: openai.ChatModelO3Mini,
|
||||
})
|
||||
if err == nil {
|
||||
t.Error("Expected there to be a cancel error")
|
||||
@@ -230,11 +248,14 @@ func TestContextCancelDelay(t *testing.T) {
|
||||
cancelCtx, cancel := context.WithTimeout(context.Background(), 2*time.Millisecond)
|
||||
defer cancel()
|
||||
_, err := client.Chat.Completions.New(cancelCtx, openai.ChatCompletionNewParams{
|
||||
Messages: openai.F([]openai.ChatCompletionMessageParamUnion{openai.ChatCompletionUserMessageParam{
|
||||
Role: openai.F(openai.ChatCompletionUserMessageParamRoleUser),
|
||||
Content: openai.F([]openai.ChatCompletionContentPartUnionParam{openai.ChatCompletionContentPartTextParam{Text: openai.F("text"), Type: openai.F(openai.ChatCompletionContentPartTextTypeText)}}),
|
||||
}}),
|
||||
Model: openai.F(openai.ChatModelO3Mini),
|
||||
Messages: []openai.ChatCompletionMessageParamUnion{{
|
||||
OfUser: &openai.ChatCompletionUserMessageParam{
|
||||
Content: []openai.ChatCompletionContentPartUnionParam{{
|
||||
OfText: &openai.ChatCompletionContentPartTextParam{Text: openai.String("text")},
|
||||
}},
|
||||
},
|
||||
}},
|
||||
Model: openai.ChatModelO3Mini,
|
||||
})
|
||||
if err == nil {
|
||||
t.Error("expected there to be a cancel error")
|
||||
@@ -261,11 +282,14 @@ func TestContextDeadline(t *testing.T) {
|
||||
}),
|
||||
)
|
||||
_, err := client.Chat.Completions.New(deadlineCtx, openai.ChatCompletionNewParams{
|
||||
Messages: openai.F([]openai.ChatCompletionMessageParamUnion{openai.ChatCompletionUserMessageParam{
|
||||
Role: openai.F(openai.ChatCompletionUserMessageParamRoleUser),
|
||||
Content: openai.F([]openai.ChatCompletionContentPartUnionParam{openai.ChatCompletionContentPartTextParam{Text: openai.F("text"), Type: openai.F(openai.ChatCompletionContentPartTextTypeText)}}),
|
||||
}}),
|
||||
Model: openai.F(openai.ChatModelO3Mini),
|
||||
Messages: []openai.ChatCompletionMessageParamUnion{{
|
||||
OfUser: &openai.ChatCompletionUserMessageParam{
|
||||
Content: []openai.ChatCompletionContentPartUnionParam{{
|
||||
OfText: &openai.ChatCompletionContentPartTextParam{Text: openai.String("text")},
|
||||
}},
|
||||
},
|
||||
}},
|
||||
Model: openai.ChatModelO3Mini,
|
||||
})
|
||||
if err == nil {
|
||||
t.Error("expected there to be a deadline error")
|
||||
@@ -311,11 +335,12 @@ func TestContextDeadlineStreaming(t *testing.T) {
|
||||
}),
|
||||
)
|
||||
stream := client.Chat.Completions.NewStreaming(deadlineCtx, openai.ChatCompletionNewParams{
|
||||
Messages: openai.F([]openai.ChatCompletionMessageParamUnion{openai.ChatCompletionDeveloperMessageParam{
|
||||
Content: openai.F([]openai.ChatCompletionContentPartTextParam{{Text: openai.F("text"), Type: openai.F(openai.ChatCompletionContentPartTextTypeText)}}),
|
||||
Role: openai.F(openai.ChatCompletionDeveloperMessageParamRoleDeveloper),
|
||||
}}),
|
||||
Model: openai.F(openai.ChatModelO3Mini),
|
||||
Messages: []openai.ChatCompletionMessageParamUnion{{
|
||||
OfDeveloper: &openai.ChatCompletionDeveloperMessageParam{
|
||||
Content: []openai.ChatCompletionContentPartTextParam{{Text: openai.String("text")}},
|
||||
},
|
||||
}},
|
||||
Model: openai.ChatModelO3Mini,
|
||||
})
|
||||
for stream.Next() {
|
||||
_ = stream.Current()
|
||||
@@ -363,11 +388,12 @@ func TestContextDeadlineStreamingWithRequestTimeout(t *testing.T) {
|
||||
stream := client.Chat.Completions.NewStreaming(
|
||||
context.Background(),
|
||||
openai.ChatCompletionNewParams{
|
||||
Messages: openai.F([]openai.ChatCompletionMessageParamUnion{openai.ChatCompletionDeveloperMessageParam{
|
||||
Content: openai.F([]openai.ChatCompletionContentPartTextParam{{Text: openai.F("text"), Type: openai.F(openai.ChatCompletionContentPartTextTypeText)}}),
|
||||
Role: openai.F(openai.ChatCompletionDeveloperMessageParamRoleDeveloper),
|
||||
}}),
|
||||
Model: openai.F(openai.ChatModelO3Mini),
|
||||
Messages: []openai.ChatCompletionMessageParamUnion{{
|
||||
OfDeveloper: &openai.ChatCompletionDeveloperMessageParam{
|
||||
Content: []openai.ChatCompletionContentPartTextParam{{Text: openai.String("text")}},
|
||||
},
|
||||
}},
|
||||
Model: openai.ChatModelO3Mini,
|
||||
},
|
||||
option.WithRequestTimeout((100 * time.Millisecond)),
|
||||
)
|
||||
|
||||
347
completion.go
347
completion.go
@@ -7,10 +7,12 @@ import (
|
||||
"net/http"
|
||||
|
||||
"github.com/openai/openai-go/internal/apijson"
|
||||
"github.com/openai/openai-go/internal/param"
|
||||
"github.com/openai/openai-go/internal/requestconfig"
|
||||
"github.com/openai/openai-go/option"
|
||||
"github.com/openai/openai-go/packages/param"
|
||||
"github.com/openai/openai-go/packages/resp"
|
||||
"github.com/openai/openai-go/packages/ssestream"
|
||||
"github.com/openai/openai-go/shared/constant"
|
||||
)
|
||||
|
||||
// CompletionService contains methods and other services that help with interacting
|
||||
@@ -26,8 +28,8 @@ type CompletionService struct {
|
||||
// NewCompletionService generates a new service that applies the given options to
|
||||
// each request. These options are applied after the parent client's options (if
|
||||
// there is one), and before any request-specific options.
|
||||
func NewCompletionService(opts ...option.RequestOption) (r *CompletionService) {
|
||||
r = &CompletionService{}
|
||||
func NewCompletionService(opts ...option.RequestOption) (r CompletionService) {
|
||||
r = CompletionService{}
|
||||
r.Options = opts
|
||||
return
|
||||
}
|
||||
@@ -57,97 +59,71 @@ func (r *CompletionService) NewStreaming(ctx context.Context, body CompletionNew
|
||||
// non-streamed response objects share the same shape (unlike the chat endpoint).
|
||||
type Completion struct {
|
||||
// A unique identifier for the completion.
|
||||
ID string `json:"id,required"`
|
||||
ID string `json:"id,omitzero,required"`
|
||||
// The list of completion choices the model generated for the input prompt.
|
||||
Choices []CompletionChoice `json:"choices,required"`
|
||||
Choices []CompletionChoice `json:"choices,omitzero,required"`
|
||||
// The Unix timestamp (in seconds) of when the completion was created.
|
||||
Created int64 `json:"created,required"`
|
||||
Created int64 `json:"created,omitzero,required"`
|
||||
// The model used for completion.
|
||||
Model string `json:"model,required"`
|
||||
Model string `json:"model,omitzero,required"`
|
||||
// The object type, which is always "text_completion"
|
||||
Object CompletionObject `json:"object,required"`
|
||||
//
|
||||
// This field can be elided, and will be automatically set as "text_completion".
|
||||
Object constant.TextCompletion `json:"object,required"`
|
||||
// This fingerprint represents the backend configuration that the model runs with.
|
||||
//
|
||||
// Can be used in conjunction with the `seed` request parameter to understand when
|
||||
// backend changes have been made that might impact determinism.
|
||||
SystemFingerprint string `json:"system_fingerprint"`
|
||||
SystemFingerprint string `json:"system_fingerprint,omitzero"`
|
||||
// Usage statistics for the completion request.
|
||||
Usage CompletionUsage `json:"usage"`
|
||||
JSON completionJSON `json:"-"`
|
||||
Usage CompletionUsage `json:"usage,omitzero"`
|
||||
JSON struct {
|
||||
ID resp.Field
|
||||
Choices resp.Field
|
||||
Created resp.Field
|
||||
Model resp.Field
|
||||
Object resp.Field
|
||||
SystemFingerprint resp.Field
|
||||
Usage resp.Field
|
||||
raw string
|
||||
} `json:"-"`
|
||||
}
|
||||
|
||||
// completionJSON contains the JSON metadata for the struct [Completion]
|
||||
type completionJSON struct {
|
||||
ID apijson.Field
|
||||
Choices apijson.Field
|
||||
Created apijson.Field
|
||||
Model apijson.Field
|
||||
Object apijson.Field
|
||||
SystemFingerprint apijson.Field
|
||||
Usage apijson.Field
|
||||
raw string
|
||||
ExtraFields map[string]apijson.Field
|
||||
}
|
||||
|
||||
func (r *Completion) UnmarshalJSON(data []byte) (err error) {
|
||||
func (r Completion) RawJSON() string { return r.JSON.raw }
|
||||
func (r *Completion) UnmarshalJSON(data []byte) error {
|
||||
return apijson.UnmarshalRoot(data, r)
|
||||
}
|
||||
|
||||
func (r completionJSON) RawJSON() string {
|
||||
return r.raw
|
||||
}
|
||||
|
||||
// The object type, which is always "text_completion"
|
||||
type CompletionObject string
|
||||
|
||||
const (
|
||||
CompletionObjectTextCompletion CompletionObject = "text_completion"
|
||||
)
|
||||
|
||||
func (r CompletionObject) IsKnown() bool {
|
||||
switch r {
|
||||
case CompletionObjectTextCompletion:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
type CompletionChoice struct {
|
||||
// The reason the model stopped generating tokens. This will be `stop` if the model
|
||||
// hit a natural stop point or a provided stop sequence, `length` if the maximum
|
||||
// number of tokens specified in the request was reached, or `content_filter` if
|
||||
// content was omitted due to a flag from our content filters.
|
||||
FinishReason CompletionChoiceFinishReason `json:"finish_reason,required"`
|
||||
Index int64 `json:"index,required"`
|
||||
Logprobs CompletionChoiceLogprobs `json:"logprobs,required,nullable"`
|
||||
Text string `json:"text,required"`
|
||||
JSON completionChoiceJSON `json:"-"`
|
||||
//
|
||||
// Any of "stop", "length", "content_filter"
|
||||
FinishReason string `json:"finish_reason,omitzero,required"`
|
||||
Index int64 `json:"index,omitzero,required"`
|
||||
Logprobs CompletionChoiceLogprobs `json:"logprobs,omitzero,required,nullable"`
|
||||
Text string `json:"text,omitzero,required"`
|
||||
JSON struct {
|
||||
FinishReason resp.Field
|
||||
Index resp.Field
|
||||
Logprobs resp.Field
|
||||
Text resp.Field
|
||||
raw string
|
||||
} `json:"-"`
|
||||
}
|
||||
|
||||
// completionChoiceJSON contains the JSON metadata for the struct
|
||||
// [CompletionChoice]
|
||||
type completionChoiceJSON struct {
|
||||
FinishReason apijson.Field
|
||||
Index apijson.Field
|
||||
Logprobs apijson.Field
|
||||
Text apijson.Field
|
||||
raw string
|
||||
ExtraFields map[string]apijson.Field
|
||||
}
|
||||
|
||||
func (r *CompletionChoice) UnmarshalJSON(data []byte) (err error) {
|
||||
func (r CompletionChoice) RawJSON() string { return r.JSON.raw }
|
||||
func (r *CompletionChoice) UnmarshalJSON(data []byte) error {
|
||||
return apijson.UnmarshalRoot(data, r)
|
||||
}
|
||||
|
||||
func (r completionChoiceJSON) RawJSON() string {
|
||||
return r.raw
|
||||
}
|
||||
|
||||
// The reason the model stopped generating tokens. This will be `stop` if the model
|
||||
// hit a natural stop point or a provided stop sequence, `length` if the maximum
|
||||
// number of tokens specified in the request was reached, or `content_filter` if
|
||||
// content was omitted due to a flag from our content filters.
|
||||
type CompletionChoiceFinishReason string
|
||||
type CompletionChoiceFinishReason = string
|
||||
|
||||
const (
|
||||
CompletionChoiceFinishReasonStop CompletionChoiceFinishReason = "stop"
|
||||
@@ -155,151 +131,112 @@ const (
|
||||
CompletionChoiceFinishReasonContentFilter CompletionChoiceFinishReason = "content_filter"
|
||||
)
|
||||
|
||||
func (r CompletionChoiceFinishReason) IsKnown() bool {
|
||||
switch r {
|
||||
case CompletionChoiceFinishReasonStop, CompletionChoiceFinishReasonLength, CompletionChoiceFinishReasonContentFilter:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
type CompletionChoiceLogprobs struct {
|
||||
TextOffset []int64 `json:"text_offset"`
|
||||
TokenLogprobs []float64 `json:"token_logprobs"`
|
||||
Tokens []string `json:"tokens"`
|
||||
TopLogprobs []map[string]float64 `json:"top_logprobs"`
|
||||
JSON completionChoiceLogprobsJSON `json:"-"`
|
||||
TextOffset []int64 `json:"text_offset,omitzero"`
|
||||
TokenLogprobs []float64 `json:"token_logprobs,omitzero"`
|
||||
Tokens []string `json:"tokens,omitzero"`
|
||||
TopLogprobs []map[string]float64 `json:"top_logprobs,omitzero"`
|
||||
JSON struct {
|
||||
TextOffset resp.Field
|
||||
TokenLogprobs resp.Field
|
||||
Tokens resp.Field
|
||||
TopLogprobs resp.Field
|
||||
raw string
|
||||
} `json:"-"`
|
||||
}
|
||||
|
||||
// completionChoiceLogprobsJSON contains the JSON metadata for the struct
|
||||
// [CompletionChoiceLogprobs]
|
||||
type completionChoiceLogprobsJSON struct {
|
||||
TextOffset apijson.Field
|
||||
TokenLogprobs apijson.Field
|
||||
Tokens apijson.Field
|
||||
TopLogprobs apijson.Field
|
||||
raw string
|
||||
ExtraFields map[string]apijson.Field
|
||||
}
|
||||
|
||||
func (r *CompletionChoiceLogprobs) UnmarshalJSON(data []byte) (err error) {
|
||||
func (r CompletionChoiceLogprobs) RawJSON() string { return r.JSON.raw }
|
||||
func (r *CompletionChoiceLogprobs) UnmarshalJSON(data []byte) error {
|
||||
return apijson.UnmarshalRoot(data, r)
|
||||
}
|
||||
|
||||
func (r completionChoiceLogprobsJSON) RawJSON() string {
|
||||
return r.raw
|
||||
}
|
||||
|
||||
// Usage statistics for the completion request.
|
||||
type CompletionUsage struct {
|
||||
// Number of tokens in the generated completion.
|
||||
CompletionTokens int64 `json:"completion_tokens,required"`
|
||||
CompletionTokens int64 `json:"completion_tokens,omitzero,required"`
|
||||
// Number of tokens in the prompt.
|
||||
PromptTokens int64 `json:"prompt_tokens,required"`
|
||||
PromptTokens int64 `json:"prompt_tokens,omitzero,required"`
|
||||
// Total number of tokens used in the request (prompt + completion).
|
||||
TotalTokens int64 `json:"total_tokens,required"`
|
||||
TotalTokens int64 `json:"total_tokens,omitzero,required"`
|
||||
// Breakdown of tokens used in a completion.
|
||||
CompletionTokensDetails CompletionUsageCompletionTokensDetails `json:"completion_tokens_details"`
|
||||
CompletionTokensDetails CompletionUsageCompletionTokensDetails `json:"completion_tokens_details,omitzero"`
|
||||
// Breakdown of tokens used in the prompt.
|
||||
PromptTokensDetails CompletionUsagePromptTokensDetails `json:"prompt_tokens_details"`
|
||||
JSON completionUsageJSON `json:"-"`
|
||||
PromptTokensDetails CompletionUsagePromptTokensDetails `json:"prompt_tokens_details,omitzero"`
|
||||
JSON struct {
|
||||
CompletionTokens resp.Field
|
||||
PromptTokens resp.Field
|
||||
TotalTokens resp.Field
|
||||
CompletionTokensDetails resp.Field
|
||||
PromptTokensDetails resp.Field
|
||||
raw string
|
||||
} `json:"-"`
|
||||
}
|
||||
|
||||
// completionUsageJSON contains the JSON metadata for the struct [CompletionUsage]
|
||||
type completionUsageJSON struct {
|
||||
CompletionTokens apijson.Field
|
||||
PromptTokens apijson.Field
|
||||
TotalTokens apijson.Field
|
||||
CompletionTokensDetails apijson.Field
|
||||
PromptTokensDetails apijson.Field
|
||||
raw string
|
||||
ExtraFields map[string]apijson.Field
|
||||
}
|
||||
|
||||
func (r *CompletionUsage) UnmarshalJSON(data []byte) (err error) {
|
||||
func (r CompletionUsage) RawJSON() string { return r.JSON.raw }
|
||||
func (r *CompletionUsage) UnmarshalJSON(data []byte) error {
|
||||
return apijson.UnmarshalRoot(data, r)
|
||||
}
|
||||
|
||||
func (r completionUsageJSON) RawJSON() string {
|
||||
return r.raw
|
||||
}
|
||||
|
||||
// Breakdown of tokens used in a completion.
|
||||
type CompletionUsageCompletionTokensDetails struct {
|
||||
// When using Predicted Outputs, the number of tokens in the prediction that
|
||||
// appeared in the completion.
|
||||
AcceptedPredictionTokens int64 `json:"accepted_prediction_tokens"`
|
||||
AcceptedPredictionTokens int64 `json:"accepted_prediction_tokens,omitzero"`
|
||||
// Audio input tokens generated by the model.
|
||||
AudioTokens int64 `json:"audio_tokens"`
|
||||
AudioTokens int64 `json:"audio_tokens,omitzero"`
|
||||
// Tokens generated by the model for reasoning.
|
||||
ReasoningTokens int64 `json:"reasoning_tokens"`
|
||||
ReasoningTokens int64 `json:"reasoning_tokens,omitzero"`
|
||||
// When using Predicted Outputs, the number of tokens in the prediction that did
|
||||
// not appear in the completion. However, like reasoning tokens, these tokens are
|
||||
// still counted in the total completion tokens for purposes of billing, output,
|
||||
// and context window limits.
|
||||
RejectedPredictionTokens int64 `json:"rejected_prediction_tokens"`
|
||||
JSON completionUsageCompletionTokensDetailsJSON `json:"-"`
|
||||
RejectedPredictionTokens int64 `json:"rejected_prediction_tokens,omitzero"`
|
||||
JSON struct {
|
||||
AcceptedPredictionTokens resp.Field
|
||||
AudioTokens resp.Field
|
||||
ReasoningTokens resp.Field
|
||||
RejectedPredictionTokens resp.Field
|
||||
raw string
|
||||
} `json:"-"`
|
||||
}
|
||||
|
||||
// completionUsageCompletionTokensDetailsJSON contains the JSON metadata for the
|
||||
// struct [CompletionUsageCompletionTokensDetails]
|
||||
type completionUsageCompletionTokensDetailsJSON struct {
|
||||
AcceptedPredictionTokens apijson.Field
|
||||
AudioTokens apijson.Field
|
||||
ReasoningTokens apijson.Field
|
||||
RejectedPredictionTokens apijson.Field
|
||||
raw string
|
||||
ExtraFields map[string]apijson.Field
|
||||
}
|
||||
|
||||
func (r *CompletionUsageCompletionTokensDetails) UnmarshalJSON(data []byte) (err error) {
|
||||
func (r CompletionUsageCompletionTokensDetails) RawJSON() string { return r.JSON.raw }
|
||||
func (r *CompletionUsageCompletionTokensDetails) UnmarshalJSON(data []byte) error {
|
||||
return apijson.UnmarshalRoot(data, r)
|
||||
}
|
||||
|
||||
func (r completionUsageCompletionTokensDetailsJSON) RawJSON() string {
|
||||
return r.raw
|
||||
}
|
||||
|
||||
// Breakdown of tokens used in the prompt.
|
||||
type CompletionUsagePromptTokensDetails struct {
|
||||
// Audio input tokens present in the prompt.
|
||||
AudioTokens int64 `json:"audio_tokens"`
|
||||
AudioTokens int64 `json:"audio_tokens,omitzero"`
|
||||
// Cached tokens present in the prompt.
|
||||
CachedTokens int64 `json:"cached_tokens"`
|
||||
JSON completionUsagePromptTokensDetailsJSON `json:"-"`
|
||||
CachedTokens int64 `json:"cached_tokens,omitzero"`
|
||||
JSON struct {
|
||||
AudioTokens resp.Field
|
||||
CachedTokens resp.Field
|
||||
raw string
|
||||
} `json:"-"`
|
||||
}
|
||||
|
||||
// completionUsagePromptTokensDetailsJSON contains the JSON metadata for the struct
|
||||
// [CompletionUsagePromptTokensDetails]
|
||||
type completionUsagePromptTokensDetailsJSON struct {
|
||||
AudioTokens apijson.Field
|
||||
CachedTokens apijson.Field
|
||||
raw string
|
||||
ExtraFields map[string]apijson.Field
|
||||
}
|
||||
|
||||
func (r *CompletionUsagePromptTokensDetails) UnmarshalJSON(data []byte) (err error) {
|
||||
func (r CompletionUsagePromptTokensDetails) RawJSON() string { return r.JSON.raw }
|
||||
func (r *CompletionUsagePromptTokensDetails) UnmarshalJSON(data []byte) error {
|
||||
return apijson.UnmarshalRoot(data, r)
|
||||
}
|
||||
|
||||
func (r completionUsagePromptTokensDetailsJSON) RawJSON() string {
|
||||
return r.raw
|
||||
}
|
||||
|
||||
type CompletionNewParams struct {
|
||||
// ID of the model to use. You can use the
|
||||
// [List models](https://platform.openai.com/docs/api-reference/models/list) API to
|
||||
// see all of your available models, or see our
|
||||
// [Model overview](https://platform.openai.com/docs/models) for descriptions of
|
||||
// them.
|
||||
Model param.Field[CompletionNewParamsModel] `json:"model,required"`
|
||||
Model string `json:"model,omitzero,required"`
|
||||
// The prompt(s) to generate completions for, encoded as a string, array of
|
||||
// strings, array of tokens, or array of token arrays.
|
||||
//
|
||||
// Note that <|endoftext|> is the document separator that the model sees during
|
||||
// training, so if a prompt is not specified the model will generate as if from the
|
||||
// beginning of a new document.
|
||||
Prompt param.Field[CompletionNewParamsPromptUnion] `json:"prompt,required"`
|
||||
Prompt CompletionNewParamsPromptUnion `json:"prompt,omitzero,required"`
|
||||
// Generates `best_of` completions server-side and returns the "best" (the one with
|
||||
// the highest log probability per token). Results cannot be streamed.
|
||||
//
|
||||
@@ -309,15 +246,15 @@ type CompletionNewParams struct {
|
||||
// **Note:** Because this parameter generates many completions, it can quickly
|
||||
// consume your token quota. Use carefully and ensure that you have reasonable
|
||||
// settings for `max_tokens` and `stop`.
|
||||
BestOf param.Field[int64] `json:"best_of"`
|
||||
BestOf param.Int `json:"best_of,omitzero"`
|
||||
// Echo back the prompt in addition to the completion
|
||||
Echo param.Field[bool] `json:"echo"`
|
||||
Echo param.Bool `json:"echo,omitzero"`
|
||||
// Number between -2.0 and 2.0. Positive values penalize new tokens based on their
|
||||
// existing frequency in the text so far, decreasing the model's likelihood to
|
||||
// repeat the same line verbatim.
|
||||
//
|
||||
// [See more information about frequency and presence penalties.](https://platform.openai.com/docs/guides/text-generation)
|
||||
FrequencyPenalty param.Field[float64] `json:"frequency_penalty"`
|
||||
FrequencyPenalty param.Float `json:"frequency_penalty,omitzero"`
|
||||
// Modify the likelihood of specified tokens appearing in the completion.
|
||||
//
|
||||
// Accepts a JSON object that maps tokens (specified by their token ID in the GPT
|
||||
@@ -330,14 +267,14 @@ type CompletionNewParams struct {
|
||||
//
|
||||
// As an example, you can pass `{"50256": -100}` to prevent the <|endoftext|> token
|
||||
// from being generated.
|
||||
LogitBias param.Field[map[string]int64] `json:"logit_bias"`
|
||||
LogitBias map[string]int64 `json:"logit_bias,omitzero"`
|
||||
// Include the log probabilities on the `logprobs` most likely output tokens, as
|
||||
// well the chosen tokens. For example, if `logprobs` is 5, the API will return a
|
||||
// list of the 5 most likely tokens. The API will always return the `logprob` of
|
||||
// the sampled token, so there may be up to `logprobs+1` elements in the response.
|
||||
//
|
||||
// The maximum value for `logprobs` is 5.
|
||||
Logprobs param.Field[int64] `json:"logprobs"`
|
||||
Logprobs param.Int `json:"logprobs,omitzero"`
|
||||
// The maximum number of [tokens](/tokenizer) that can be generated in the
|
||||
// completion.
|
||||
//
|
||||
@@ -345,55 +282,59 @@ type CompletionNewParams struct {
|
||||
// context length.
|
||||
// [Example Python code](https://cookbook.openai.com/examples/how_to_count_tokens_with_tiktoken)
|
||||
// for counting tokens.
|
||||
MaxTokens param.Field[int64] `json:"max_tokens"`
|
||||
MaxTokens param.Int `json:"max_tokens,omitzero"`
|
||||
// How many completions to generate for each prompt.
|
||||
//
|
||||
// **Note:** Because this parameter generates many completions, it can quickly
|
||||
// consume your token quota. Use carefully and ensure that you have reasonable
|
||||
// settings for `max_tokens` and `stop`.
|
||||
N param.Field[int64] `json:"n"`
|
||||
N param.Int `json:"n,omitzero"`
|
||||
// Number between -2.0 and 2.0. Positive values penalize new tokens based on
|
||||
// whether they appear in the text so far, increasing the model's likelihood to
|
||||
// talk about new topics.
|
||||
//
|
||||
// [See more information about frequency and presence penalties.](https://platform.openai.com/docs/guides/text-generation)
|
||||
PresencePenalty param.Field[float64] `json:"presence_penalty"`
|
||||
PresencePenalty param.Float `json:"presence_penalty,omitzero"`
|
||||
// If specified, our system will make a best effort to sample deterministically,
|
||||
// such that repeated requests with the same `seed` and parameters should return
|
||||
// the same result.
|
||||
//
|
||||
// Determinism is not guaranteed, and you should refer to the `system_fingerprint`
|
||||
// response parameter to monitor changes in the backend.
|
||||
Seed param.Field[int64] `json:"seed"`
|
||||
Seed param.Int `json:"seed,omitzero"`
|
||||
// Up to 4 sequences where the API will stop generating further tokens. The
|
||||
// returned text will not contain the stop sequence.
|
||||
Stop param.Field[CompletionNewParamsStopUnion] `json:"stop"`
|
||||
Stop CompletionNewParamsStopUnion `json:"stop,omitzero"`
|
||||
// Options for streaming response. Only set this when you set `stream: true`.
|
||||
StreamOptions param.Field[ChatCompletionStreamOptionsParam] `json:"stream_options"`
|
||||
StreamOptions ChatCompletionStreamOptionsParam `json:"stream_options,omitzero"`
|
||||
// The suffix that comes after a completion of inserted text.
|
||||
//
|
||||
// This parameter is only supported for `gpt-3.5-turbo-instruct`.
|
||||
Suffix param.Field[string] `json:"suffix"`
|
||||
Suffix param.String `json:"suffix,omitzero"`
|
||||
// What sampling temperature to use, between 0 and 2. Higher values like 0.8 will
|
||||
// make the output more random, while lower values like 0.2 will make it more
|
||||
// focused and deterministic.
|
||||
//
|
||||
// We generally recommend altering this or `top_p` but not both.
|
||||
Temperature param.Field[float64] `json:"temperature"`
|
||||
Temperature param.Float `json:"temperature,omitzero"`
|
||||
// An alternative to sampling with temperature, called nucleus sampling, where the
|
||||
// model considers the results of the tokens with top_p probability mass. So 0.1
|
||||
// means only the tokens comprising the top 10% probability mass are considered.
|
||||
//
|
||||
// We generally recommend altering this or `temperature` but not both.
|
||||
TopP param.Field[float64] `json:"top_p"`
|
||||
TopP param.Float `json:"top_p,omitzero"`
|
||||
// A unique identifier representing your end-user, which can help OpenAI to monitor
|
||||
// and detect abuse.
|
||||
// [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids).
|
||||
User param.Field[string] `json:"user"`
|
||||
User param.String `json:"user,omitzero"`
|
||||
apiobject
|
||||
}
|
||||
|
||||
func (f CompletionNewParams) IsMissing() bool { return param.IsOmitted(f) || f.IsNull() }
|
||||
|
||||
func (r CompletionNewParams) MarshalJSON() (data []byte, err error) {
|
||||
return apijson.MarshalRoot(r)
|
||||
type shadow CompletionNewParams
|
||||
return param.MarshalObject(r, (*shadow)(&r))
|
||||
}
|
||||
|
||||
// ID of the model to use. You can use the
|
||||
@@ -401,7 +342,7 @@ func (r CompletionNewParams) MarshalJSON() (data []byte, err error) {
|
||||
// see all of your available models, or see our
|
||||
// [Model overview](https://platform.openai.com/docs/models) for descriptions of
|
||||
// them.
|
||||
type CompletionNewParamsModel string
|
||||
type CompletionNewParamsModel = string
|
||||
|
||||
const (
|
||||
CompletionNewParamsModelGPT3_5TurboInstruct CompletionNewParamsModel = "gpt-3.5-turbo-instruct"
|
||||
@@ -409,48 +350,30 @@ const (
|
||||
CompletionNewParamsModelBabbage002 CompletionNewParamsModel = "babbage-002"
|
||||
)
|
||||
|
||||
func (r CompletionNewParamsModel) IsKnown() bool {
|
||||
switch r {
|
||||
case CompletionNewParamsModelGPT3_5TurboInstruct, CompletionNewParamsModelDavinci002, CompletionNewParamsModelBabbage002:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
// Only one field can be non-zero
|
||||
type CompletionNewParamsPromptUnion struct {
|
||||
OfString param.String
|
||||
OfArrayOfStrings []string
|
||||
OfArrayOfTokens []int64
|
||||
OfArrayOfTokenArrays [][]int64
|
||||
apiunion
|
||||
}
|
||||
|
||||
// The prompt(s) to generate completions for, encoded as a string, array of
|
||||
// strings, array of tokens, or array of token arrays.
|
||||
//
|
||||
// Note that <|endoftext|> is the document separator that the model sees during
|
||||
// training, so if a prompt is not specified the model will generate as if from the
|
||||
// beginning of a new document.
|
||||
//
|
||||
// Satisfied by [shared.UnionString], [CompletionNewParamsPromptArrayOfStrings],
|
||||
// [CompletionNewParamsPromptArrayOfTokens],
|
||||
// [CompletionNewParamsPromptArrayOfTokenArrays].
|
||||
type CompletionNewParamsPromptUnion interface {
|
||||
ImplementsCompletionNewParamsPromptUnion()
|
||||
func (u CompletionNewParamsPromptUnion) IsMissing() bool { return param.IsOmitted(u) || u.IsNull() }
|
||||
|
||||
func (u CompletionNewParamsPromptUnion) MarshalJSON() ([]byte, error) {
|
||||
return param.MarshalUnion[CompletionNewParamsPromptUnion](u.OfString, u.OfArrayOfStrings, u.OfArrayOfTokens, u.OfArrayOfTokenArrays)
|
||||
}
|
||||
|
||||
type CompletionNewParamsPromptArrayOfStrings []string
|
||||
|
||||
func (r CompletionNewParamsPromptArrayOfStrings) ImplementsCompletionNewParamsPromptUnion() {}
|
||||
|
||||
type CompletionNewParamsPromptArrayOfTokens []int64
|
||||
|
||||
func (r CompletionNewParamsPromptArrayOfTokens) ImplementsCompletionNewParamsPromptUnion() {}
|
||||
|
||||
type CompletionNewParamsPromptArrayOfTokenArrays [][]int64
|
||||
|
||||
func (r CompletionNewParamsPromptArrayOfTokenArrays) ImplementsCompletionNewParamsPromptUnion() {}
|
||||
|
||||
// Up to 4 sequences where the API will stop generating further tokens. The
|
||||
// returned text will not contain the stop sequence.
|
||||
//
|
||||
// Satisfied by [shared.UnionString], [CompletionNewParamsStopArray].
|
||||
type CompletionNewParamsStopUnion interface {
|
||||
ImplementsCompletionNewParamsStopUnion()
|
||||
// Only one field can be non-zero
|
||||
type CompletionNewParamsStopUnion struct {
|
||||
OfString param.String
|
||||
OfCompletionNewsStopArray []string
|
||||
apiunion
|
||||
}
|
||||
|
||||
type CompletionNewParamsStopArray []string
|
||||
func (u CompletionNewParamsStopUnion) IsMissing() bool { return param.IsOmitted(u) || u.IsNull() }
|
||||
|
||||
func (r CompletionNewParamsStopArray) ImplementsCompletionNewParamsStopUnion() {}
|
||||
func (u CompletionNewParamsStopUnion) MarshalJSON() ([]byte, error) {
|
||||
return param.MarshalUnion[CompletionNewParamsStopUnion](u.OfString, u.OfCompletionNewsStopArray)
|
||||
}
|
||||
|
||||
@@ -11,7 +11,6 @@ import (
|
||||
"github.com/openai/openai-go"
|
||||
"github.com/openai/openai-go/internal/testutil"
|
||||
"github.com/openai/openai-go/option"
|
||||
"github.com/openai/openai-go/shared"
|
||||
)
|
||||
|
||||
func TestCompletionNewWithOptionalParams(t *testing.T) {
|
||||
@@ -27,27 +26,31 @@ func TestCompletionNewWithOptionalParams(t *testing.T) {
|
||||
option.WithAPIKey("My API Key"),
|
||||
)
|
||||
_, err := client.Completions.New(context.TODO(), openai.CompletionNewParams{
|
||||
Model: openai.F(openai.CompletionNewParamsModelGPT3_5TurboInstruct),
|
||||
Prompt: openai.F[openai.CompletionNewParamsPromptUnion](shared.UnionString("This is a test.")),
|
||||
BestOf: openai.F(int64(0)),
|
||||
Echo: openai.F(true),
|
||||
FrequencyPenalty: openai.F(-2.000000),
|
||||
LogitBias: openai.F(map[string]int64{
|
||||
"foo": int64(0),
|
||||
}),
|
||||
Logprobs: openai.F(int64(0)),
|
||||
MaxTokens: openai.F(int64(16)),
|
||||
N: openai.F(int64(1)),
|
||||
PresencePenalty: openai.F(-2.000000),
|
||||
Seed: openai.F(int64(0)),
|
||||
Stop: openai.F[openai.CompletionNewParamsStopUnion](shared.UnionString("\n")),
|
||||
StreamOptions: openai.F(openai.ChatCompletionStreamOptionsParam{
|
||||
IncludeUsage: openai.F(true),
|
||||
}),
|
||||
Suffix: openai.F("test."),
|
||||
Temperature: openai.F(1.000000),
|
||||
TopP: openai.F(1.000000),
|
||||
User: openai.F("user-1234"),
|
||||
Model: "gpt-3.5-turbo-instruct",
|
||||
Prompt: openai.CompletionNewParamsPromptUnion{
|
||||
OfString: openai.String("This is a test."),
|
||||
},
|
||||
BestOf: openai.Int(0),
|
||||
Echo: openai.Bool(true),
|
||||
FrequencyPenalty: openai.Float(-2),
|
||||
LogitBias: map[string]int64{
|
||||
"foo": 0,
|
||||
},
|
||||
Logprobs: openai.Int(0),
|
||||
MaxTokens: openai.Int(16),
|
||||
N: openai.Int(1),
|
||||
PresencePenalty: openai.Float(-2),
|
||||
Seed: openai.Int(0),
|
||||
Stop: openai.CompletionNewParamsStopUnion{
|
||||
OfString: openai.String("\n"),
|
||||
},
|
||||
StreamOptions: openai.ChatCompletionStreamOptionsParam{
|
||||
IncludeUsage: openai.Bool(true),
|
||||
},
|
||||
Suffix: openai.String("test."),
|
||||
Temperature: openai.Float(1),
|
||||
TopP: openai.Float(1),
|
||||
User: openai.String("user-1234"),
|
||||
})
|
||||
if err != nil {
|
||||
var apierr *openai.Error
|
||||
|
||||
192
embedding.go
192
embedding.go
@@ -7,9 +7,11 @@ import (
|
||||
"net/http"
|
||||
|
||||
"github.com/openai/openai-go/internal/apijson"
|
||||
"github.com/openai/openai-go/internal/param"
|
||||
"github.com/openai/openai-go/internal/requestconfig"
|
||||
"github.com/openai/openai-go/option"
|
||||
"github.com/openai/openai-go/packages/param"
|
||||
"github.com/openai/openai-go/packages/resp"
|
||||
"github.com/openai/openai-go/shared/constant"
|
||||
)
|
||||
|
||||
// EmbeddingService contains methods and other services that help with interacting
|
||||
@@ -25,8 +27,8 @@ type EmbeddingService struct {
|
||||
// NewEmbeddingService generates a new service that applies the given options to
|
||||
// each request. These options are applied after the parent client's options (if
|
||||
// there is one), and before any request-specific options.
|
||||
func NewEmbeddingService(opts ...option.RequestOption) (r *EmbeddingService) {
|
||||
r = &EmbeddingService{}
|
||||
func NewEmbeddingService(opts ...option.RequestOption) (r EmbeddingService) {
|
||||
r = EmbeddingService{}
|
||||
r.Options = opts
|
||||
return
|
||||
}
|
||||
@@ -41,121 +43,72 @@ func (r *EmbeddingService) New(ctx context.Context, body EmbeddingNewParams, opt
|
||||
|
||||
type CreateEmbeddingResponse struct {
|
||||
// The list of embeddings generated by the model.
|
||||
Data []Embedding `json:"data,required"`
|
||||
Data []Embedding `json:"data,omitzero,required"`
|
||||
// The name of the model used to generate the embedding.
|
||||
Model string `json:"model,required"`
|
||||
Model string `json:"model,omitzero,required"`
|
||||
// The object type, which is always "list".
|
||||
Object CreateEmbeddingResponseObject `json:"object,required"`
|
||||
//
|
||||
// This field can be elided, and will be automatically set as "list".
|
||||
Object constant.List `json:"object,required"`
|
||||
// The usage information for the request.
|
||||
Usage CreateEmbeddingResponseUsage `json:"usage,required"`
|
||||
JSON createEmbeddingResponseJSON `json:"-"`
|
||||
Usage CreateEmbeddingResponseUsage `json:"usage,omitzero,required"`
|
||||
JSON struct {
|
||||
Data resp.Field
|
||||
Model resp.Field
|
||||
Object resp.Field
|
||||
Usage resp.Field
|
||||
raw string
|
||||
} `json:"-"`
|
||||
}
|
||||
|
||||
// createEmbeddingResponseJSON contains the JSON metadata for the struct
|
||||
// [CreateEmbeddingResponse]
|
||||
type createEmbeddingResponseJSON struct {
|
||||
Data apijson.Field
|
||||
Model apijson.Field
|
||||
Object apijson.Field
|
||||
Usage apijson.Field
|
||||
raw string
|
||||
ExtraFields map[string]apijson.Field
|
||||
}
|
||||
|
||||
func (r *CreateEmbeddingResponse) UnmarshalJSON(data []byte) (err error) {
|
||||
func (r CreateEmbeddingResponse) RawJSON() string { return r.JSON.raw }
|
||||
func (r *CreateEmbeddingResponse) UnmarshalJSON(data []byte) error {
|
||||
return apijson.UnmarshalRoot(data, r)
|
||||
}
|
||||
|
||||
func (r createEmbeddingResponseJSON) RawJSON() string {
|
||||
return r.raw
|
||||
}
|
||||
|
||||
// The object type, which is always "list".
|
||||
type CreateEmbeddingResponseObject string
|
||||
|
||||
const (
|
||||
CreateEmbeddingResponseObjectList CreateEmbeddingResponseObject = "list"
|
||||
)
|
||||
|
||||
func (r CreateEmbeddingResponseObject) IsKnown() bool {
|
||||
switch r {
|
||||
case CreateEmbeddingResponseObjectList:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// The usage information for the request.
|
||||
type CreateEmbeddingResponseUsage struct {
|
||||
// The number of tokens used by the prompt.
|
||||
PromptTokens int64 `json:"prompt_tokens,required"`
|
||||
PromptTokens int64 `json:"prompt_tokens,omitzero,required"`
|
||||
// The total number of tokens used by the request.
|
||||
TotalTokens int64 `json:"total_tokens,required"`
|
||||
JSON createEmbeddingResponseUsageJSON `json:"-"`
|
||||
TotalTokens int64 `json:"total_tokens,omitzero,required"`
|
||||
JSON struct {
|
||||
PromptTokens resp.Field
|
||||
TotalTokens resp.Field
|
||||
raw string
|
||||
} `json:"-"`
|
||||
}
|
||||
|
||||
// createEmbeddingResponseUsageJSON contains the JSON metadata for the struct
|
||||
// [CreateEmbeddingResponseUsage]
|
||||
type createEmbeddingResponseUsageJSON struct {
|
||||
PromptTokens apijson.Field
|
||||
TotalTokens apijson.Field
|
||||
raw string
|
||||
ExtraFields map[string]apijson.Field
|
||||
}
|
||||
|
||||
func (r *CreateEmbeddingResponseUsage) UnmarshalJSON(data []byte) (err error) {
|
||||
func (r CreateEmbeddingResponseUsage) RawJSON() string { return r.JSON.raw }
|
||||
func (r *CreateEmbeddingResponseUsage) UnmarshalJSON(data []byte) error {
|
||||
return apijson.UnmarshalRoot(data, r)
|
||||
}
|
||||
|
||||
func (r createEmbeddingResponseUsageJSON) RawJSON() string {
|
||||
return r.raw
|
||||
}
|
||||
|
||||
// Represents an embedding vector returned by embedding endpoint.
|
||||
type Embedding struct {
|
||||
// The embedding vector, which is a list of floats. The length of vector depends on
|
||||
// the model as listed in the
|
||||
// [embedding guide](https://platform.openai.com/docs/guides/embeddings).
|
||||
Embedding []float64 `json:"embedding,required"`
|
||||
Embedding []float64 `json:"embedding,omitzero,required"`
|
||||
// The index of the embedding in the list of embeddings.
|
||||
Index int64 `json:"index,required"`
|
||||
Index int64 `json:"index,omitzero,required"`
|
||||
// The object type, which is always "embedding".
|
||||
Object EmbeddingObject `json:"object,required"`
|
||||
JSON embeddingJSON `json:"-"`
|
||||
//
|
||||
// This field can be elided, and will be automatically set as "embedding".
|
||||
Object constant.Embedding `json:"object,required"`
|
||||
JSON struct {
|
||||
Embedding resp.Field
|
||||
Index resp.Field
|
||||
Object resp.Field
|
||||
raw string
|
||||
} `json:"-"`
|
||||
}
|
||||
|
||||
// embeddingJSON contains the JSON metadata for the struct [Embedding]
|
||||
type embeddingJSON struct {
|
||||
Embedding apijson.Field
|
||||
Index apijson.Field
|
||||
Object apijson.Field
|
||||
raw string
|
||||
ExtraFields map[string]apijson.Field
|
||||
}
|
||||
|
||||
func (r *Embedding) UnmarshalJSON(data []byte) (err error) {
|
||||
func (r Embedding) RawJSON() string { return r.JSON.raw }
|
||||
func (r *Embedding) UnmarshalJSON(data []byte) error {
|
||||
return apijson.UnmarshalRoot(data, r)
|
||||
}
|
||||
|
||||
func (r embeddingJSON) RawJSON() string {
|
||||
return r.raw
|
||||
}
|
||||
|
||||
// The object type, which is always "embedding".
|
||||
type EmbeddingObject string
|
||||
|
||||
const (
|
||||
EmbeddingObjectEmbedding EmbeddingObject = "embedding"
|
||||
)
|
||||
|
||||
func (r EmbeddingObject) IsKnown() bool {
|
||||
switch r {
|
||||
case EmbeddingObjectEmbedding:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
type EmbeddingModel = string
|
||||
|
||||
const (
|
||||
@@ -173,56 +126,49 @@ type EmbeddingNewParams struct {
|
||||
// [Example Python code](https://cookbook.openai.com/examples/how_to_count_tokens_with_tiktoken)
|
||||
// for counting tokens. Some models may also impose a limit on total number of
|
||||
// tokens summed across inputs.
|
||||
Input param.Field[EmbeddingNewParamsInputUnion] `json:"input,required"`
|
||||
Input EmbeddingNewParamsInputUnion `json:"input,omitzero,required"`
|
||||
// ID of the model to use. You can use the
|
||||
// [List models](https://platform.openai.com/docs/api-reference/models/list) API to
|
||||
// see all of your available models, or see our
|
||||
// [Model overview](https://platform.openai.com/docs/models) for descriptions of
|
||||
// them.
|
||||
Model param.Field[EmbeddingModel] `json:"model,required"`
|
||||
Model EmbeddingModel `json:"model,omitzero,required"`
|
||||
// The number of dimensions the resulting output embeddings should have. Only
|
||||
// supported in `text-embedding-3` and later models.
|
||||
Dimensions param.Field[int64] `json:"dimensions"`
|
||||
Dimensions param.Int `json:"dimensions,omitzero"`
|
||||
// The format to return the embeddings in. Can be either `float` or
|
||||
// [`base64`](https://pypi.org/project/pybase64/).
|
||||
EncodingFormat param.Field[EmbeddingNewParamsEncodingFormat] `json:"encoding_format"`
|
||||
//
|
||||
// Any of "float", "base64"
|
||||
EncodingFormat EmbeddingNewParamsEncodingFormat `json:"encoding_format,omitzero"`
|
||||
// A unique identifier representing your end-user, which can help OpenAI to monitor
|
||||
// and detect abuse.
|
||||
// [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids).
|
||||
User param.Field[string] `json:"user"`
|
||||
User param.String `json:"user,omitzero"`
|
||||
apiobject
|
||||
}
|
||||
|
||||
func (f EmbeddingNewParams) IsMissing() bool { return param.IsOmitted(f) || f.IsNull() }
|
||||
|
||||
func (r EmbeddingNewParams) MarshalJSON() (data []byte, err error) {
|
||||
return apijson.MarshalRoot(r)
|
||||
type shadow EmbeddingNewParams
|
||||
return param.MarshalObject(r, (*shadow)(&r))
|
||||
}
|
||||
|
||||
// Input text to embed, encoded as a string or array of tokens. To embed multiple
|
||||
// inputs in a single request, pass an array of strings or array of token arrays.
|
||||
// The input must not exceed the max input tokens for the model (8192 tokens for
|
||||
// `text-embedding-ada-002`), cannot be an empty string, and any array must be 2048
|
||||
// dimensions or less.
|
||||
// [Example Python code](https://cookbook.openai.com/examples/how_to_count_tokens_with_tiktoken)
|
||||
// for counting tokens. Some models may also impose a limit on total number of
|
||||
// tokens summed across inputs.
|
||||
//
|
||||
// Satisfied by [shared.UnionString], [EmbeddingNewParamsInputArrayOfStrings],
|
||||
// [EmbeddingNewParamsInputArrayOfTokens],
|
||||
// [EmbeddingNewParamsInputArrayOfTokenArrays].
|
||||
type EmbeddingNewParamsInputUnion interface {
|
||||
ImplementsEmbeddingNewParamsInputUnion()
|
||||
// Only one field can be non-zero
|
||||
type EmbeddingNewParamsInputUnion struct {
|
||||
OfString param.String
|
||||
OfArrayOfStrings []string
|
||||
OfArrayOfTokens []int64
|
||||
OfArrayOfTokenArrays [][]int64
|
||||
apiunion
|
||||
}
|
||||
|
||||
type EmbeddingNewParamsInputArrayOfStrings []string
|
||||
func (u EmbeddingNewParamsInputUnion) IsMissing() bool { return param.IsOmitted(u) || u.IsNull() }
|
||||
|
||||
func (r EmbeddingNewParamsInputArrayOfStrings) ImplementsEmbeddingNewParamsInputUnion() {}
|
||||
|
||||
type EmbeddingNewParamsInputArrayOfTokens []int64
|
||||
|
||||
func (r EmbeddingNewParamsInputArrayOfTokens) ImplementsEmbeddingNewParamsInputUnion() {}
|
||||
|
||||
type EmbeddingNewParamsInputArrayOfTokenArrays [][]int64
|
||||
|
||||
func (r EmbeddingNewParamsInputArrayOfTokenArrays) ImplementsEmbeddingNewParamsInputUnion() {}
|
||||
func (u EmbeddingNewParamsInputUnion) MarshalJSON() ([]byte, error) {
|
||||
return param.MarshalUnion[EmbeddingNewParamsInputUnion](u.OfString, u.OfArrayOfStrings, u.OfArrayOfTokens, u.OfArrayOfTokenArrays)
|
||||
}
|
||||
|
||||
// The format to return the embeddings in. Can be either `float` or
|
||||
// [`base64`](https://pypi.org/project/pybase64/).
|
||||
@@ -232,11 +178,3 @@ const (
|
||||
EmbeddingNewParamsEncodingFormatFloat EmbeddingNewParamsEncodingFormat = "float"
|
||||
EmbeddingNewParamsEncodingFormatBase64 EmbeddingNewParamsEncodingFormat = "base64"
|
||||
)
|
||||
|
||||
func (r EmbeddingNewParamsEncodingFormat) IsKnown() bool {
|
||||
switch r {
|
||||
case EmbeddingNewParamsEncodingFormatFloat, EmbeddingNewParamsEncodingFormatBase64:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
@@ -11,7 +11,6 @@ import (
|
||||
"github.com/openai/openai-go"
|
||||
"github.com/openai/openai-go/internal/testutil"
|
||||
"github.com/openai/openai-go/option"
|
||||
"github.com/openai/openai-go/shared"
|
||||
)
|
||||
|
||||
func TestEmbeddingNewWithOptionalParams(t *testing.T) {
|
||||
@@ -27,11 +26,13 @@ func TestEmbeddingNewWithOptionalParams(t *testing.T) {
|
||||
option.WithAPIKey("My API Key"),
|
||||
)
|
||||
_, err := client.Embeddings.New(context.TODO(), openai.EmbeddingNewParams{
|
||||
Input: openai.F[openai.EmbeddingNewParamsInputUnion](shared.UnionString("The quick brown fox jumped over the lazy dog")),
|
||||
Model: openai.F(openai.EmbeddingModelTextEmbeddingAda002),
|
||||
Dimensions: openai.F(int64(1)),
|
||||
EncodingFormat: openai.F(openai.EmbeddingNewParamsEncodingFormatFloat),
|
||||
User: openai.F("user-1234"),
|
||||
Input: openai.EmbeddingNewParamsInputUnion{
|
||||
OfString: openai.String("The quick brown fox jumped over the lazy dog"),
|
||||
},
|
||||
Model: openai.EmbeddingModelTextEmbeddingAda002,
|
||||
Dimensions: openai.Int(1),
|
||||
EncodingFormat: openai.EmbeddingNewParamsEncodingFormatFloat,
|
||||
User: openai.String("user-1234"),
|
||||
})
|
||||
if err != nil {
|
||||
var apierr *openai.Error
|
||||
|
||||
76
field.go
76
field.go
@@ -1,43 +1,51 @@
|
||||
package openai
|
||||
|
||||
import (
|
||||
"github.com/openai/openai-go/internal/param"
|
||||
"github.com/openai/openai-go/packages/param"
|
||||
"io"
|
||||
"time"
|
||||
)
|
||||
|
||||
// F is a param field helper used to initialize a [param.Field] generic struct.
|
||||
// This helps specify null, zero values, and overrides, as well as normal values.
|
||||
// You can read more about this in our [README].
|
||||
//
|
||||
// [README]: https://pkg.go.dev/github.com/openai/openai-go#readme-request-fields
|
||||
func F[T any](value T) param.Field[T] { return param.Field[T]{Value: value, Present: true} }
|
||||
func String(s string) param.String {
|
||||
fld := param.NeverOmitted[param.String]()
|
||||
fld.V = s
|
||||
return fld
|
||||
}
|
||||
|
||||
// Null is a param field helper which explicitly sends null to the API.
|
||||
func Null[T any]() param.Field[T] { return param.Field[T]{Null: true, Present: true} }
|
||||
func Int(i int64) param.Int {
|
||||
fld := param.NeverOmitted[param.Int]()
|
||||
fld.V = i
|
||||
return fld
|
||||
}
|
||||
|
||||
// Raw is a param field helper for specifying values for fields when the
|
||||
// type you are looking to send is different from the type that is specified in
|
||||
// the SDK. For example, if the type of the field is an integer, but you want
|
||||
// to send a float, you could do that by setting the corresponding field with
|
||||
// Raw[int](0.5).
|
||||
func Raw[T any](value any) param.Field[T] { return param.Field[T]{Raw: value, Present: true} }
|
||||
func Bool(b bool) param.Bool {
|
||||
fld := param.NeverOmitted[param.Bool]()
|
||||
fld.V = b
|
||||
return fld
|
||||
}
|
||||
|
||||
// Int is a param field helper which helps specify integers. This is
|
||||
// particularly helpful when specifying integer constants for fields.
|
||||
func Int(value int64) param.Field[int64] { return F(value) }
|
||||
func Float(f float64) param.Float {
|
||||
fld := param.NeverOmitted[param.Float]()
|
||||
fld.V = f
|
||||
return fld
|
||||
}
|
||||
|
||||
// String is a param field helper which helps specify strings.
|
||||
func String(value string) param.Field[string] { return F(value) }
|
||||
func Datetime(t time.Time) param.Datetime {
|
||||
fld := param.NeverOmitted[param.Datetime]()
|
||||
fld.V = t
|
||||
return fld
|
||||
}
|
||||
|
||||
// Float is a param field helper which helps specify floats.
|
||||
func Float(value float64) param.Field[float64] { return F(value) }
|
||||
func Date(t time.Time) param.Date {
|
||||
fld := param.NeverOmitted[param.Date]()
|
||||
fld.V = t
|
||||
return fld
|
||||
}
|
||||
|
||||
// Bool is a param field helper which helps specify bools.
|
||||
func Bool(value bool) param.Field[bool] { return F(value) }
|
||||
func Ptr[T any](v T) *T { return &v }
|
||||
|
||||
// FileParam is a param field helper which helps files with a mime content-type.
|
||||
func FileParam(reader io.Reader, filename string, contentType string) param.Field[io.Reader] {
|
||||
return F[io.Reader](&file{reader, filename, contentType})
|
||||
func File(rdr io.Reader, filename string, contentType string) file {
|
||||
return file{rdr, filename, contentType}
|
||||
}
|
||||
|
||||
type file struct {
|
||||
@@ -46,5 +54,15 @@ type file struct {
|
||||
contentType string
|
||||
}
|
||||
|
||||
func (f *file) ContentType() string { return f.contentType }
|
||||
func (f *file) Filename() string { return f.name }
|
||||
func (f file) Filename() string {
|
||||
if f.name != "" {
|
||||
return f.name
|
||||
} else if named, ok := f.Reader.(interface{ Name() string }); ok {
|
||||
return named.Name()
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (f file) ContentType() string {
|
||||
return f.contentType
|
||||
}
|
||||
|
||||
181
file.go
181
file.go
@@ -15,10 +15,12 @@ import (
|
||||
"github.com/openai/openai-go/internal/apiform"
|
||||
"github.com/openai/openai-go/internal/apijson"
|
||||
"github.com/openai/openai-go/internal/apiquery"
|
||||
"github.com/openai/openai-go/internal/param"
|
||||
"github.com/openai/openai-go/internal/requestconfig"
|
||||
"github.com/openai/openai-go/option"
|
||||
"github.com/openai/openai-go/packages/pagination"
|
||||
"github.com/openai/openai-go/packages/param"
|
||||
"github.com/openai/openai-go/packages/resp"
|
||||
"github.com/openai/openai-go/shared/constant"
|
||||
)
|
||||
|
||||
// FileService contains methods and other services that help with interacting with
|
||||
@@ -34,8 +36,8 @@ type FileService struct {
|
||||
// NewFileService generates a new service that applies the given options to each
|
||||
// request. These options are applied after the parent client's options (if there
|
||||
// is one), and before any request-specific options.
|
||||
func NewFileService(opts ...option.RequestOption) (r *FileService) {
|
||||
r = &FileService{}
|
||||
func NewFileService(opts ...option.RequestOption) (r FileService) {
|
||||
r = FileService{}
|
||||
r.Options = opts
|
||||
return
|
||||
}
|
||||
@@ -129,113 +131,78 @@ func (r *FileService) Content(ctx context.Context, fileID string, opts ...option
|
||||
}
|
||||
|
||||
type FileDeleted struct {
|
||||
ID string `json:"id,required"`
|
||||
Deleted bool `json:"deleted,required"`
|
||||
Object FileDeletedObject `json:"object,required"`
|
||||
JSON fileDeletedJSON `json:"-"`
|
||||
ID string `json:"id,omitzero,required"`
|
||||
Deleted bool `json:"deleted,omitzero,required"`
|
||||
// This field can be elided, and will be automatically set as "file".
|
||||
Object constant.File `json:"object,required"`
|
||||
JSON struct {
|
||||
ID resp.Field
|
||||
Deleted resp.Field
|
||||
Object resp.Field
|
||||
raw string
|
||||
} `json:"-"`
|
||||
}
|
||||
|
||||
// fileDeletedJSON contains the JSON metadata for the struct [FileDeleted]
|
||||
type fileDeletedJSON struct {
|
||||
ID apijson.Field
|
||||
Deleted apijson.Field
|
||||
Object apijson.Field
|
||||
raw string
|
||||
ExtraFields map[string]apijson.Field
|
||||
}
|
||||
|
||||
func (r *FileDeleted) UnmarshalJSON(data []byte) (err error) {
|
||||
func (r FileDeleted) RawJSON() string { return r.JSON.raw }
|
||||
func (r *FileDeleted) UnmarshalJSON(data []byte) error {
|
||||
return apijson.UnmarshalRoot(data, r)
|
||||
}
|
||||
|
||||
func (r fileDeletedJSON) RawJSON() string {
|
||||
return r.raw
|
||||
}
|
||||
|
||||
type FileDeletedObject string
|
||||
|
||||
const (
|
||||
FileDeletedObjectFile FileDeletedObject = "file"
|
||||
)
|
||||
|
||||
func (r FileDeletedObject) IsKnown() bool {
|
||||
switch r {
|
||||
case FileDeletedObjectFile:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// The `File` object represents a document that has been uploaded to OpenAI.
|
||||
type FileObject struct {
|
||||
// The file identifier, which can be referenced in the API endpoints.
|
||||
ID string `json:"id,required"`
|
||||
ID string `json:"id,omitzero,required"`
|
||||
// The size of the file, in bytes.
|
||||
Bytes int64 `json:"bytes,required"`
|
||||
Bytes int64 `json:"bytes,omitzero,required"`
|
||||
// The Unix timestamp (in seconds) for when the file was created.
|
||||
CreatedAt int64 `json:"created_at,required"`
|
||||
CreatedAt int64 `json:"created_at,omitzero,required"`
|
||||
// The name of the file.
|
||||
Filename string `json:"filename,required"`
|
||||
Filename string `json:"filename,omitzero,required"`
|
||||
// The object type, which is always `file`.
|
||||
Object FileObjectObject `json:"object,required"`
|
||||
//
|
||||
// This field can be elided, and will be automatically set as "file".
|
||||
Object constant.File `json:"object,required"`
|
||||
// The intended purpose of the file. Supported values are `assistants`,
|
||||
// `assistants_output`, `batch`, `batch_output`, `fine-tune`, `fine-tune-results`
|
||||
// and `vision`.
|
||||
Purpose FileObjectPurpose `json:"purpose,required"`
|
||||
//
|
||||
// Any of "assistants", "assistants_output", "batch", "batch_output", "fine-tune",
|
||||
// "fine-tune-results", "vision"
|
||||
Purpose string `json:"purpose,omitzero,required"`
|
||||
// Deprecated. The current status of the file, which can be either `uploaded`,
|
||||
// `processed`, or `error`.
|
||||
//
|
||||
// Any of "uploaded", "processed", "error"
|
||||
//
|
||||
// Deprecated: deprecated
|
||||
Status FileObjectStatus `json:"status,required"`
|
||||
Status string `json:"status,omitzero,required"`
|
||||
// Deprecated. For details on why a fine-tuning training file failed validation,
|
||||
// see the `error` field on `fine_tuning.job`.
|
||||
//
|
||||
// Deprecated: deprecated
|
||||
StatusDetails string `json:"status_details"`
|
||||
JSON fileObjectJSON `json:"-"`
|
||||
StatusDetails string `json:"status_details,omitzero"`
|
||||
JSON struct {
|
||||
ID resp.Field
|
||||
Bytes resp.Field
|
||||
CreatedAt resp.Field
|
||||
Filename resp.Field
|
||||
Object resp.Field
|
||||
Purpose resp.Field
|
||||
Status resp.Field
|
||||
StatusDetails resp.Field
|
||||
raw string
|
||||
} `json:"-"`
|
||||
}
|
||||
|
||||
// fileObjectJSON contains the JSON metadata for the struct [FileObject]
|
||||
type fileObjectJSON struct {
|
||||
ID apijson.Field
|
||||
Bytes apijson.Field
|
||||
CreatedAt apijson.Field
|
||||
Filename apijson.Field
|
||||
Object apijson.Field
|
||||
Purpose apijson.Field
|
||||
Status apijson.Field
|
||||
StatusDetails apijson.Field
|
||||
raw string
|
||||
ExtraFields map[string]apijson.Field
|
||||
}
|
||||
|
||||
func (r *FileObject) UnmarshalJSON(data []byte) (err error) {
|
||||
func (r FileObject) RawJSON() string { return r.JSON.raw }
|
||||
func (r *FileObject) UnmarshalJSON(data []byte) error {
|
||||
return apijson.UnmarshalRoot(data, r)
|
||||
}
|
||||
|
||||
func (r fileObjectJSON) RawJSON() string {
|
||||
return r.raw
|
||||
}
|
||||
|
||||
// The object type, which is always `file`.
|
||||
type FileObjectObject string
|
||||
|
||||
const (
|
||||
FileObjectObjectFile FileObjectObject = "file"
|
||||
)
|
||||
|
||||
func (r FileObjectObject) IsKnown() bool {
|
||||
switch r {
|
||||
case FileObjectObjectFile:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// The intended purpose of the file. Supported values are `assistants`,
|
||||
// `assistants_output`, `batch`, `batch_output`, `fine-tune`, `fine-tune-results`
|
||||
// and `vision`.
|
||||
type FileObjectPurpose string
|
||||
type FileObjectPurpose = string
|
||||
|
||||
const (
|
||||
FileObjectPurposeAssistants FileObjectPurpose = "assistants"
|
||||
@@ -247,17 +214,9 @@ const (
|
||||
FileObjectPurposeVision FileObjectPurpose = "vision"
|
||||
)
|
||||
|
||||
func (r FileObjectPurpose) IsKnown() bool {
|
||||
switch r {
|
||||
case FileObjectPurposeAssistants, FileObjectPurposeAssistantsOutput, FileObjectPurposeBatch, FileObjectPurposeBatchOutput, FileObjectPurposeFineTune, FileObjectPurposeFineTuneResults, FileObjectPurposeVision:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// Deprecated. The current status of the file, which can be either `uploaded`,
|
||||
// `processed`, or `error`.
|
||||
type FileObjectStatus string
|
||||
type FileObjectStatus = string
|
||||
|
||||
const (
|
||||
FileObjectStatusUploaded FileObjectStatus = "uploaded"
|
||||
@@ -265,14 +224,6 @@ const (
|
||||
FileObjectStatusError FileObjectStatus = "error"
|
||||
)
|
||||
|
||||
func (r FileObjectStatus) IsKnown() bool {
|
||||
switch r {
|
||||
case FileObjectStatusUploaded, FileObjectStatusProcessed, FileObjectStatusError:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// The intended purpose of the uploaded file.
|
||||
//
|
||||
// Use "assistants" for
|
||||
@@ -290,17 +241,9 @@ const (
|
||||
FilePurposeVision FilePurpose = "vision"
|
||||
)
|
||||
|
||||
func (r FilePurpose) IsKnown() bool {
|
||||
switch r {
|
||||
case FilePurposeAssistants, FilePurposeBatch, FilePurposeFineTune, FilePurposeVision:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
type FileNewParams struct {
|
||||
// The File object (not file name) to be uploaded.
|
||||
File param.Field[io.Reader] `json:"file,required" format:"binary"`
|
||||
File io.Reader `json:"file,omitzero,required" format:"binary"`
|
||||
// The intended purpose of the uploaded file.
|
||||
//
|
||||
// Use "assistants" for
|
||||
@@ -309,9 +252,14 @@ type FileNewParams struct {
|
||||
// "vision" for Assistants image file inputs, "batch" for
|
||||
// [Batch API](https://platform.openai.com/docs/guides/batch), and "fine-tune" for
|
||||
// [Fine-tuning](https://platform.openai.com/docs/api-reference/fine-tuning).
|
||||
Purpose param.Field[FilePurpose] `json:"purpose,required"`
|
||||
//
|
||||
// Any of "assistants", "batch", "fine-tune", "vision"
|
||||
Purpose FilePurpose `json:"purpose,omitzero,required"`
|
||||
apiobject
|
||||
}
|
||||
|
||||
func (f FileNewParams) IsMissing() bool { return param.IsOmitted(f) || f.IsNull() }
|
||||
|
||||
func (r FileNewParams) MarshalMultipart() (data []byte, contentType string, err error) {
|
||||
buf := bytes.NewBuffer(nil)
|
||||
writer := multipart.NewWriter(buf)
|
||||
@@ -332,17 +280,22 @@ type FileListParams struct {
|
||||
// in the list. For instance, if you make a list request and receive 100 objects,
|
||||
// ending with obj_foo, your subsequent call can include after=obj_foo in order to
|
||||
// fetch the next page of the list.
|
||||
After param.Field[string] `query:"after"`
|
||||
After param.String `query:"after,omitzero"`
|
||||
// A limit on the number of objects to be returned. Limit can range between 1 and
|
||||
// 10,000, and the default is 10,000.
|
||||
Limit param.Field[int64] `query:"limit"`
|
||||
Limit param.Int `query:"limit,omitzero"`
|
||||
// Sort order by the `created_at` timestamp of the objects. `asc` for ascending
|
||||
// order and `desc` for descending order.
|
||||
Order param.Field[FileListParamsOrder] `query:"order"`
|
||||
//
|
||||
// Any of "asc", "desc"
|
||||
Order FileListParamsOrder `query:"order,omitzero"`
|
||||
// Only return files with the given purpose.
|
||||
Purpose param.Field[string] `query:"purpose"`
|
||||
Purpose param.String `query:"purpose,omitzero"`
|
||||
apiobject
|
||||
}
|
||||
|
||||
func (f FileListParams) IsMissing() bool { return param.IsOmitted(f) || f.IsNull() }
|
||||
|
||||
// URLQuery serializes [FileListParams]'s query parameters as `url.Values`.
|
||||
func (r FileListParams) URLQuery() (v url.Values) {
|
||||
return apiquery.MarshalWithSettings(r, apiquery.QuerySettings{
|
||||
@@ -359,11 +312,3 @@ const (
|
||||
FileListParamsOrderAsc FileListParamsOrder = "asc"
|
||||
FileListParamsOrderDesc FileListParamsOrder = "desc"
|
||||
)
|
||||
|
||||
func (r FileListParamsOrder) IsKnown() bool {
|
||||
switch r {
|
||||
case FileListParamsOrderAsc, FileListParamsOrderDesc:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
12
file_test.go
12
file_test.go
@@ -30,8 +30,8 @@ func TestFileNew(t *testing.T) {
|
||||
option.WithAPIKey("My API Key"),
|
||||
)
|
||||
_, err := client.Files.New(context.TODO(), openai.FileNewParams{
|
||||
File: openai.F(io.Reader(bytes.NewBuffer([]byte("some file contents")))),
|
||||
Purpose: openai.F(openai.FilePurposeAssistants),
|
||||
File: io.Reader(bytes.NewBuffer([]byte("some file contents"))),
|
||||
Purpose: openai.FilePurposeAssistants,
|
||||
})
|
||||
if err != nil {
|
||||
var apierr *openai.Error
|
||||
@@ -77,10 +77,10 @@ func TestFileListWithOptionalParams(t *testing.T) {
|
||||
option.WithAPIKey("My API Key"),
|
||||
)
|
||||
_, err := client.Files.List(context.TODO(), openai.FileListParams{
|
||||
After: openai.F("after"),
|
||||
Limit: openai.F(int64(0)),
|
||||
Order: openai.F(openai.FileListParamsOrderAsc),
|
||||
Purpose: openai.F("purpose"),
|
||||
After: openai.String("after"),
|
||||
Limit: openai.Int(0),
|
||||
Order: openai.FileListParamsOrderAsc,
|
||||
Purpose: openai.String("purpose"),
|
||||
})
|
||||
if err != nil {
|
||||
var apierr *openai.Error
|
||||
|
||||
@@ -14,14 +14,14 @@ import (
|
||||
// the [NewFineTuningService] method instead.
|
||||
type FineTuningService struct {
|
||||
Options []option.RequestOption
|
||||
Jobs *FineTuningJobService
|
||||
Jobs FineTuningJobService
|
||||
}
|
||||
|
||||
// NewFineTuningService generates a new service that applies the given options to
|
||||
// each request. These options are applied after the parent client's options (if
|
||||
// there is one), and before any request-specific options.
|
||||
func NewFineTuningService(opts ...option.RequestOption) (r *FineTuningService) {
|
||||
r = &FineTuningService{}
|
||||
func NewFineTuningService(opts ...option.RequestOption) (r FineTuningService) {
|
||||
r = FineTuningService{}
|
||||
r.Options = opts
|
||||
r.Jobs = NewFineTuningJobService(opts...)
|
||||
return
|
||||
|
||||
1497
finetuningjob.go
1497
finetuningjob.go
File diff suppressed because it is too large
Load Diff
@@ -11,6 +11,7 @@ import (
|
||||
"github.com/openai/openai-go"
|
||||
"github.com/openai/openai-go/internal/testutil"
|
||||
"github.com/openai/openai-go/option"
|
||||
"github.com/openai/openai-go/shared/constant"
|
||||
)
|
||||
|
||||
func TestFineTuningJobNewWithOptionalParams(t *testing.T) {
|
||||
@@ -26,43 +27,62 @@ func TestFineTuningJobNewWithOptionalParams(t *testing.T) {
|
||||
option.WithAPIKey("My API Key"),
|
||||
)
|
||||
_, err := client.FineTuning.Jobs.New(context.TODO(), openai.FineTuningJobNewParams{
|
||||
Model: openai.F(openai.FineTuningJobNewParamsModelBabbage002),
|
||||
TrainingFile: openai.F("file-abc123"),
|
||||
Hyperparameters: openai.F(openai.FineTuningJobNewParamsHyperparameters{
|
||||
BatchSize: openai.F[openai.FineTuningJobNewParamsHyperparametersBatchSizeUnion](openai.FineTuningJobNewParamsHyperparametersBatchSizeAuto(openai.FineTuningJobNewParamsHyperparametersBatchSizeAutoAuto)),
|
||||
LearningRateMultiplier: openai.F[openai.FineTuningJobNewParamsHyperparametersLearningRateMultiplierUnion](openai.FineTuningJobNewParamsHyperparametersLearningRateMultiplierAuto(openai.FineTuningJobNewParamsHyperparametersLearningRateMultiplierAutoAuto)),
|
||||
NEpochs: openai.F[openai.FineTuningJobNewParamsHyperparametersNEpochsUnion](openai.FineTuningJobNewParamsHyperparametersNEpochsAuto(openai.FineTuningJobNewParamsHyperparametersNEpochsAutoAuto)),
|
||||
}),
|
||||
Integrations: openai.F([]openai.FineTuningJobNewParamsIntegration{{
|
||||
Type: openai.F(openai.FineTuningJobNewParamsIntegrationsTypeWandb),
|
||||
Wandb: openai.F(openai.FineTuningJobNewParamsIntegrationsWandb{
|
||||
Project: openai.F("my-wandb-project"),
|
||||
Entity: openai.F("entity"),
|
||||
Name: openai.F("name"),
|
||||
Tags: openai.F([]string{"custom-tag"}),
|
||||
}),
|
||||
}}),
|
||||
Method: openai.F(openai.FineTuningJobNewParamsMethod{
|
||||
Dpo: openai.F(openai.FineTuningJobNewParamsMethodDpo{
|
||||
Hyperparameters: openai.F(openai.FineTuningJobNewParamsMethodDpoHyperparameters{
|
||||
BatchSize: openai.F[openai.FineTuningJobNewParamsMethodDpoHyperparametersBatchSizeUnion](openai.FineTuningJobNewParamsMethodDpoHyperparametersBatchSizeAuto(openai.FineTuningJobNewParamsMethodDpoHyperparametersBatchSizeAutoAuto)),
|
||||
Beta: openai.F[openai.FineTuningJobNewParamsMethodDpoHyperparametersBetaUnion](openai.FineTuningJobNewParamsMethodDpoHyperparametersBetaAuto(openai.FineTuningJobNewParamsMethodDpoHyperparametersBetaAutoAuto)),
|
||||
LearningRateMultiplier: openai.F[openai.FineTuningJobNewParamsMethodDpoHyperparametersLearningRateMultiplierUnion](openai.FineTuningJobNewParamsMethodDpoHyperparametersLearningRateMultiplierAuto(openai.FineTuningJobNewParamsMethodDpoHyperparametersLearningRateMultiplierAutoAuto)),
|
||||
NEpochs: openai.F[openai.FineTuningJobNewParamsMethodDpoHyperparametersNEpochsUnion](openai.FineTuningJobNewParamsMethodDpoHyperparametersNEpochsAuto(openai.FineTuningJobNewParamsMethodDpoHyperparametersNEpochsAutoAuto)),
|
||||
}),
|
||||
}),
|
||||
Supervised: openai.F(openai.FineTuningJobNewParamsMethodSupervised{
|
||||
Hyperparameters: openai.F(openai.FineTuningJobNewParamsMethodSupervisedHyperparameters{
|
||||
BatchSize: openai.F[openai.FineTuningJobNewParamsMethodSupervisedHyperparametersBatchSizeUnion](openai.FineTuningJobNewParamsMethodSupervisedHyperparametersBatchSizeAuto(openai.FineTuningJobNewParamsMethodSupervisedHyperparametersBatchSizeAutoAuto)),
|
||||
LearningRateMultiplier: openai.F[openai.FineTuningJobNewParamsMethodSupervisedHyperparametersLearningRateMultiplierUnion](openai.FineTuningJobNewParamsMethodSupervisedHyperparametersLearningRateMultiplierAuto(openai.FineTuningJobNewParamsMethodSupervisedHyperparametersLearningRateMultiplierAutoAuto)),
|
||||
NEpochs: openai.F[openai.FineTuningJobNewParamsMethodSupervisedHyperparametersNEpochsUnion](openai.FineTuningJobNewParamsMethodSupervisedHyperparametersNEpochsAuto(openai.FineTuningJobNewParamsMethodSupervisedHyperparametersNEpochsAutoAuto)),
|
||||
}),
|
||||
}),
|
||||
Type: openai.F(openai.FineTuningJobNewParamsMethodTypeSupervised),
|
||||
}),
|
||||
Seed: openai.F(int64(42)),
|
||||
Suffix: openai.F("x"),
|
||||
ValidationFile: openai.F("file-abc123"),
|
||||
Model: "babbage-002",
|
||||
TrainingFile: openai.String("file-abc123"),
|
||||
Hyperparameters: openai.FineTuningJobNewParamsHyperparameters{
|
||||
BatchSize: openai.FineTuningJobNewParamsHyperparametersBatchSizeUnion{
|
||||
OfAuto: constant.ValueOf[constant.Auto](),
|
||||
},
|
||||
LearningRateMultiplier: openai.FineTuningJobNewParamsHyperparametersLearningRateMultiplierUnion{
|
||||
OfAuto: constant.ValueOf[constant.Auto](),
|
||||
},
|
||||
NEpochs: openai.FineTuningJobNewParamsHyperparametersNEpochsUnion{
|
||||
OfAuto: constant.ValueOf[constant.Auto](),
|
||||
},
|
||||
},
|
||||
Integrations: []openai.FineTuningJobNewParamsIntegration{{
|
||||
Wandb: openai.FineTuningJobNewParamsIntegrationsWandb{
|
||||
Project: openai.String("my-wandb-project"),
|
||||
Entity: openai.String("entity"),
|
||||
Name: openai.String("name"),
|
||||
Tags: []string{"custom-tag"},
|
||||
},
|
||||
}},
|
||||
Method: openai.FineTuningJobNewParamsMethod{
|
||||
Dpo: openai.FineTuningJobNewParamsMethodDpo{
|
||||
Hyperparameters: openai.FineTuningJobNewParamsMethodDpoHyperparameters{
|
||||
BatchSize: openai.FineTuningJobNewParamsMethodDpoHyperparametersBatchSizeUnion{
|
||||
OfAuto: constant.ValueOf[constant.Auto](),
|
||||
},
|
||||
Beta: openai.FineTuningJobNewParamsMethodDpoHyperparametersBetaUnion{
|
||||
OfAuto: constant.ValueOf[constant.Auto](),
|
||||
},
|
||||
LearningRateMultiplier: openai.FineTuningJobNewParamsMethodDpoHyperparametersLearningRateMultiplierUnion{
|
||||
OfAuto: constant.ValueOf[constant.Auto](),
|
||||
},
|
||||
NEpochs: openai.FineTuningJobNewParamsMethodDpoHyperparametersNEpochsUnion{
|
||||
OfAuto: constant.ValueOf[constant.Auto](),
|
||||
},
|
||||
},
|
||||
},
|
||||
Supervised: openai.FineTuningJobNewParamsMethodSupervised{
|
||||
Hyperparameters: openai.FineTuningJobNewParamsMethodSupervisedHyperparameters{
|
||||
BatchSize: openai.FineTuningJobNewParamsMethodSupervisedHyperparametersBatchSizeUnion{
|
||||
OfAuto: constant.ValueOf[constant.Auto](),
|
||||
},
|
||||
LearningRateMultiplier: openai.FineTuningJobNewParamsMethodSupervisedHyperparametersLearningRateMultiplierUnion{
|
||||
OfAuto: constant.ValueOf[constant.Auto](),
|
||||
},
|
||||
NEpochs: openai.FineTuningJobNewParamsMethodSupervisedHyperparametersNEpochsUnion{
|
||||
OfAuto: constant.ValueOf[constant.Auto](),
|
||||
},
|
||||
},
|
||||
},
|
||||
Type: "supervised",
|
||||
},
|
||||
Seed: openai.Int(42),
|
||||
Suffix: openai.String("x"),
|
||||
ValidationFile: openai.String("file-abc123"),
|
||||
})
|
||||
if err != nil {
|
||||
var apierr *openai.Error
|
||||
@@ -108,8 +128,8 @@ func TestFineTuningJobListWithOptionalParams(t *testing.T) {
|
||||
option.WithAPIKey("My API Key"),
|
||||
)
|
||||
_, err := client.FineTuning.Jobs.List(context.TODO(), openai.FineTuningJobListParams{
|
||||
After: openai.F("after"),
|
||||
Limit: openai.F(int64(0)),
|
||||
After: openai.String("after"),
|
||||
Limit: openai.Int(0),
|
||||
})
|
||||
if err != nil {
|
||||
var apierr *openai.Error
|
||||
@@ -158,8 +178,8 @@ func TestFineTuningJobListEventsWithOptionalParams(t *testing.T) {
|
||||
context.TODO(),
|
||||
"ft-AF1WoRqd3aJAHsqc9NY7iL8F",
|
||||
openai.FineTuningJobListEventsParams{
|
||||
After: openai.F("after"),
|
||||
Limit: openai.F(int64(0)),
|
||||
After: openai.String("after"),
|
||||
Limit: openai.Int(0),
|
||||
},
|
||||
)
|
||||
if err != nil {
|
||||
|
||||
@@ -11,10 +11,12 @@ import (
|
||||
|
||||
"github.com/openai/openai-go/internal/apijson"
|
||||
"github.com/openai/openai-go/internal/apiquery"
|
||||
"github.com/openai/openai-go/internal/param"
|
||||
"github.com/openai/openai-go/internal/requestconfig"
|
||||
"github.com/openai/openai-go/option"
|
||||
"github.com/openai/openai-go/packages/pagination"
|
||||
"github.com/openai/openai-go/packages/param"
|
||||
"github.com/openai/openai-go/packages/resp"
|
||||
"github.com/openai/openai-go/shared/constant"
|
||||
)
|
||||
|
||||
// FineTuningJobCheckpointService contains methods and other services that help
|
||||
@@ -30,8 +32,8 @@ type FineTuningJobCheckpointService struct {
|
||||
// NewFineTuningJobCheckpointService generates a new service that applies the given
|
||||
// options to each request. These options are applied after the parent client's
|
||||
// options (if there is one), and before any request-specific options.
|
||||
func NewFineTuningJobCheckpointService(opts ...option.RequestOption) (r *FineTuningJobCheckpointService) {
|
||||
r = &FineTuningJobCheckpointService{}
|
||||
func NewFineTuningJobCheckpointService(opts ...option.RequestOption) (r FineTuningJobCheckpointService) {
|
||||
r = FineTuningJobCheckpointService{}
|
||||
r.Options = opts
|
||||
return
|
||||
}
|
||||
@@ -67,100 +69,75 @@ func (r *FineTuningJobCheckpointService) ListAutoPaging(ctx context.Context, fin
|
||||
// fine-tuning job that is ready to use.
|
||||
type FineTuningJobCheckpoint struct {
|
||||
// The checkpoint identifier, which can be referenced in the API endpoints.
|
||||
ID string `json:"id,required"`
|
||||
ID string `json:"id,omitzero,required"`
|
||||
// The Unix timestamp (in seconds) for when the checkpoint was created.
|
||||
CreatedAt int64 `json:"created_at,required"`
|
||||
CreatedAt int64 `json:"created_at,omitzero,required"`
|
||||
// The name of the fine-tuned checkpoint model that is created.
|
||||
FineTunedModelCheckpoint string `json:"fine_tuned_model_checkpoint,required"`
|
||||
FineTunedModelCheckpoint string `json:"fine_tuned_model_checkpoint,omitzero,required"`
|
||||
// The name of the fine-tuning job that this checkpoint was created from.
|
||||
FineTuningJobID string `json:"fine_tuning_job_id,required"`
|
||||
FineTuningJobID string `json:"fine_tuning_job_id,omitzero,required"`
|
||||
// Metrics at the step number during the fine-tuning job.
|
||||
Metrics FineTuningJobCheckpointMetrics `json:"metrics,required"`
|
||||
Metrics FineTuningJobCheckpointMetrics `json:"metrics,omitzero,required"`
|
||||
// The object type, which is always "fine_tuning.job.checkpoint".
|
||||
Object FineTuningJobCheckpointObject `json:"object,required"`
|
||||
//
|
||||
// This field can be elided, and will be automatically set as
|
||||
// "fine_tuning.job.checkpoint".
|
||||
Object constant.FineTuningJobCheckpoint `json:"object,required"`
|
||||
// The step number that the checkpoint was created at.
|
||||
StepNumber int64 `json:"step_number,required"`
|
||||
JSON fineTuningJobCheckpointJSON `json:"-"`
|
||||
StepNumber int64 `json:"step_number,omitzero,required"`
|
||||
JSON struct {
|
||||
ID resp.Field
|
||||
CreatedAt resp.Field
|
||||
FineTunedModelCheckpoint resp.Field
|
||||
FineTuningJobID resp.Field
|
||||
Metrics resp.Field
|
||||
Object resp.Field
|
||||
StepNumber resp.Field
|
||||
raw string
|
||||
} `json:"-"`
|
||||
}
|
||||
|
||||
// fineTuningJobCheckpointJSON contains the JSON metadata for the struct
|
||||
// [FineTuningJobCheckpoint]
|
||||
type fineTuningJobCheckpointJSON struct {
|
||||
ID apijson.Field
|
||||
CreatedAt apijson.Field
|
||||
FineTunedModelCheckpoint apijson.Field
|
||||
FineTuningJobID apijson.Field
|
||||
Metrics apijson.Field
|
||||
Object apijson.Field
|
||||
StepNumber apijson.Field
|
||||
raw string
|
||||
ExtraFields map[string]apijson.Field
|
||||
}
|
||||
|
||||
func (r *FineTuningJobCheckpoint) UnmarshalJSON(data []byte) (err error) {
|
||||
func (r FineTuningJobCheckpoint) RawJSON() string { return r.JSON.raw }
|
||||
func (r *FineTuningJobCheckpoint) UnmarshalJSON(data []byte) error {
|
||||
return apijson.UnmarshalRoot(data, r)
|
||||
}
|
||||
|
||||
func (r fineTuningJobCheckpointJSON) RawJSON() string {
|
||||
return r.raw
|
||||
}
|
||||
|
||||
// Metrics at the step number during the fine-tuning job.
|
||||
type FineTuningJobCheckpointMetrics struct {
|
||||
FullValidLoss float64 `json:"full_valid_loss"`
|
||||
FullValidMeanTokenAccuracy float64 `json:"full_valid_mean_token_accuracy"`
|
||||
Step float64 `json:"step"`
|
||||
TrainLoss float64 `json:"train_loss"`
|
||||
TrainMeanTokenAccuracy float64 `json:"train_mean_token_accuracy"`
|
||||
ValidLoss float64 `json:"valid_loss"`
|
||||
ValidMeanTokenAccuracy float64 `json:"valid_mean_token_accuracy"`
|
||||
JSON fineTuningJobCheckpointMetricsJSON `json:"-"`
|
||||
FullValidLoss float64 `json:"full_valid_loss,omitzero"`
|
||||
FullValidMeanTokenAccuracy float64 `json:"full_valid_mean_token_accuracy,omitzero"`
|
||||
Step float64 `json:"step,omitzero"`
|
||||
TrainLoss float64 `json:"train_loss,omitzero"`
|
||||
TrainMeanTokenAccuracy float64 `json:"train_mean_token_accuracy,omitzero"`
|
||||
ValidLoss float64 `json:"valid_loss,omitzero"`
|
||||
ValidMeanTokenAccuracy float64 `json:"valid_mean_token_accuracy,omitzero"`
|
||||
JSON struct {
|
||||
FullValidLoss resp.Field
|
||||
FullValidMeanTokenAccuracy resp.Field
|
||||
Step resp.Field
|
||||
TrainLoss resp.Field
|
||||
TrainMeanTokenAccuracy resp.Field
|
||||
ValidLoss resp.Field
|
||||
ValidMeanTokenAccuracy resp.Field
|
||||
raw string
|
||||
} `json:"-"`
|
||||
}
|
||||
|
||||
// fineTuningJobCheckpointMetricsJSON contains the JSON metadata for the struct
|
||||
// [FineTuningJobCheckpointMetrics]
|
||||
type fineTuningJobCheckpointMetricsJSON struct {
|
||||
FullValidLoss apijson.Field
|
||||
FullValidMeanTokenAccuracy apijson.Field
|
||||
Step apijson.Field
|
||||
TrainLoss apijson.Field
|
||||
TrainMeanTokenAccuracy apijson.Field
|
||||
ValidLoss apijson.Field
|
||||
ValidMeanTokenAccuracy apijson.Field
|
||||
raw string
|
||||
ExtraFields map[string]apijson.Field
|
||||
}
|
||||
|
||||
func (r *FineTuningJobCheckpointMetrics) UnmarshalJSON(data []byte) (err error) {
|
||||
func (r FineTuningJobCheckpointMetrics) RawJSON() string { return r.JSON.raw }
|
||||
func (r *FineTuningJobCheckpointMetrics) UnmarshalJSON(data []byte) error {
|
||||
return apijson.UnmarshalRoot(data, r)
|
||||
}
|
||||
|
||||
func (r fineTuningJobCheckpointMetricsJSON) RawJSON() string {
|
||||
return r.raw
|
||||
}
|
||||
|
||||
// The object type, which is always "fine_tuning.job.checkpoint".
|
||||
type FineTuningJobCheckpointObject string
|
||||
|
||||
const (
|
||||
FineTuningJobCheckpointObjectFineTuningJobCheckpoint FineTuningJobCheckpointObject = "fine_tuning.job.checkpoint"
|
||||
)
|
||||
|
||||
func (r FineTuningJobCheckpointObject) IsKnown() bool {
|
||||
switch r {
|
||||
case FineTuningJobCheckpointObjectFineTuningJobCheckpoint:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
type FineTuningJobCheckpointListParams struct {
|
||||
// Identifier for the last checkpoint ID from the previous pagination request.
|
||||
After param.Field[string] `query:"after"`
|
||||
After param.String `query:"after,omitzero"`
|
||||
// Number of checkpoints to retrieve.
|
||||
Limit param.Field[int64] `query:"limit"`
|
||||
Limit param.Int `query:"limit,omitzero"`
|
||||
apiobject
|
||||
}
|
||||
|
||||
func (f FineTuningJobCheckpointListParams) IsMissing() bool { return param.IsOmitted(f) || f.IsNull() }
|
||||
|
||||
// URLQuery serializes [FineTuningJobCheckpointListParams]'s query parameters as
|
||||
// `url.Values`.
|
||||
func (r FineTuningJobCheckpointListParams) URLQuery() (v url.Values) {
|
||||
|
||||
@@ -29,8 +29,8 @@ func TestFineTuningJobCheckpointListWithOptionalParams(t *testing.T) {
|
||||
context.TODO(),
|
||||
"ft-AF1WoRqd3aJAHsqc9NY7iL8F",
|
||||
openai.FineTuningJobCheckpointListParams{
|
||||
After: openai.F("after"),
|
||||
Limit: openai.F(int64(0)),
|
||||
After: openai.String("after"),
|
||||
Limit: openai.Int(0),
|
||||
},
|
||||
)
|
||||
if err != nil {
|
||||
|
||||
197
image.go
197
image.go
@@ -11,9 +11,10 @@ import (
|
||||
|
||||
"github.com/openai/openai-go/internal/apiform"
|
||||
"github.com/openai/openai-go/internal/apijson"
|
||||
"github.com/openai/openai-go/internal/param"
|
||||
"github.com/openai/openai-go/internal/requestconfig"
|
||||
"github.com/openai/openai-go/option"
|
||||
"github.com/openai/openai-go/packages/param"
|
||||
"github.com/openai/openai-go/packages/resp"
|
||||
)
|
||||
|
||||
// ImageService contains methods and other services that help with interacting with
|
||||
@@ -29,8 +30,8 @@ type ImageService struct {
|
||||
// NewImageService generates a new service that applies the given options to each
|
||||
// request. These options are applied after the parent client's options (if there
|
||||
// is one), and before any request-specific options.
|
||||
func NewImageService(opts ...option.RequestOption) (r *ImageService) {
|
||||
r = &ImageService{}
|
||||
func NewImageService(opts ...option.RequestOption) (r ImageService) {
|
||||
r = ImageService{}
|
||||
r.Options = opts
|
||||
return
|
||||
}
|
||||
@@ -63,32 +64,25 @@ func (r *ImageService) Generate(ctx context.Context, body ImageGenerateParams, o
|
||||
type Image struct {
|
||||
// The base64-encoded JSON of the generated image, if `response_format` is
|
||||
// `b64_json`.
|
||||
B64JSON string `json:"b64_json"`
|
||||
B64JSON string `json:"b64_json,omitzero"`
|
||||
// The prompt that was used to generate the image, if there was any revision to the
|
||||
// prompt.
|
||||
RevisedPrompt string `json:"revised_prompt"`
|
||||
RevisedPrompt string `json:"revised_prompt,omitzero"`
|
||||
// The URL of the generated image, if `response_format` is `url` (default).
|
||||
URL string `json:"url"`
|
||||
JSON imageJSON `json:"-"`
|
||||
URL string `json:"url,omitzero"`
|
||||
JSON struct {
|
||||
B64JSON resp.Field
|
||||
RevisedPrompt resp.Field
|
||||
URL resp.Field
|
||||
raw string
|
||||
} `json:"-"`
|
||||
}
|
||||
|
||||
// imageJSON contains the JSON metadata for the struct [Image]
|
||||
type imageJSON struct {
|
||||
B64JSON apijson.Field
|
||||
RevisedPrompt apijson.Field
|
||||
URL apijson.Field
|
||||
raw string
|
||||
ExtraFields map[string]apijson.Field
|
||||
}
|
||||
|
||||
func (r *Image) UnmarshalJSON(data []byte) (err error) {
|
||||
func (r Image) RawJSON() string { return r.JSON.raw }
|
||||
func (r *Image) UnmarshalJSON(data []byte) error {
|
||||
return apijson.UnmarshalRoot(data, r)
|
||||
}
|
||||
|
||||
func (r imageJSON) RawJSON() string {
|
||||
return r.raw
|
||||
}
|
||||
|
||||
type ImageModel = string
|
||||
|
||||
const (
|
||||
@@ -97,50 +91,50 @@ const (
|
||||
)
|
||||
|
||||
type ImagesResponse struct {
|
||||
Created int64 `json:"created,required"`
|
||||
Data []Image `json:"data,required"`
|
||||
JSON imagesResponseJSON `json:"-"`
|
||||
Created int64 `json:"created,omitzero,required"`
|
||||
Data []Image `json:"data,omitzero,required"`
|
||||
JSON struct {
|
||||
Created resp.Field
|
||||
Data resp.Field
|
||||
raw string
|
||||
} `json:"-"`
|
||||
}
|
||||
|
||||
// imagesResponseJSON contains the JSON metadata for the struct [ImagesResponse]
|
||||
type imagesResponseJSON struct {
|
||||
Created apijson.Field
|
||||
Data apijson.Field
|
||||
raw string
|
||||
ExtraFields map[string]apijson.Field
|
||||
}
|
||||
|
||||
func (r *ImagesResponse) UnmarshalJSON(data []byte) (err error) {
|
||||
func (r ImagesResponse) RawJSON() string { return r.JSON.raw }
|
||||
func (r *ImagesResponse) UnmarshalJSON(data []byte) error {
|
||||
return apijson.UnmarshalRoot(data, r)
|
||||
}
|
||||
|
||||
func (r imagesResponseJSON) RawJSON() string {
|
||||
return r.raw
|
||||
}
|
||||
|
||||
type ImageNewVariationParams struct {
|
||||
// The image to use as the basis for the variation(s). Must be a valid PNG file,
|
||||
// less than 4MB, and square.
|
||||
Image param.Field[io.Reader] `json:"image,required" format:"binary"`
|
||||
Image io.Reader `json:"image,omitzero,required" format:"binary"`
|
||||
// The model to use for image generation. Only `dall-e-2` is supported at this
|
||||
// time.
|
||||
Model param.Field[ImageModel] `json:"model"`
|
||||
Model ImageModel `json:"model,omitzero"`
|
||||
// The number of images to generate. Must be between 1 and 10. For `dall-e-3`, only
|
||||
// `n=1` is supported.
|
||||
N param.Field[int64] `json:"n"`
|
||||
N param.Int `json:"n,omitzero"`
|
||||
// The format in which the generated images are returned. Must be one of `url` or
|
||||
// `b64_json`. URLs are only valid for 60 minutes after the image has been
|
||||
// generated.
|
||||
ResponseFormat param.Field[ImageNewVariationParamsResponseFormat] `json:"response_format"`
|
||||
//
|
||||
// Any of "url", "b64_json"
|
||||
ResponseFormat ImageNewVariationParamsResponseFormat `json:"response_format,omitzero"`
|
||||
// The size of the generated images. Must be one of `256x256`, `512x512`, or
|
||||
// `1024x1024`.
|
||||
Size param.Field[ImageNewVariationParamsSize] `json:"size"`
|
||||
//
|
||||
// Any of "256x256", "512x512", "1024x1024"
|
||||
Size ImageNewVariationParamsSize `json:"size,omitzero"`
|
||||
// A unique identifier representing your end-user, which can help OpenAI to monitor
|
||||
// and detect abuse.
|
||||
// [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids).
|
||||
User param.Field[string] `json:"user"`
|
||||
User param.String `json:"user,omitzero"`
|
||||
apiobject
|
||||
}
|
||||
|
||||
func (f ImageNewVariationParams) IsMissing() bool { return param.IsOmitted(f) || f.IsNull() }
|
||||
|
||||
func (r ImageNewVariationParams) MarshalMultipart() (data []byte, contentType string, err error) {
|
||||
buf := bytes.NewBuffer(nil)
|
||||
writer := multipart.NewWriter(buf)
|
||||
@@ -166,14 +160,6 @@ const (
|
||||
ImageNewVariationParamsResponseFormatB64JSON ImageNewVariationParamsResponseFormat = "b64_json"
|
||||
)
|
||||
|
||||
func (r ImageNewVariationParamsResponseFormat) IsKnown() bool {
|
||||
switch r {
|
||||
case ImageNewVariationParamsResponseFormatURL, ImageNewVariationParamsResponseFormatB64JSON:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// The size of the generated images. Must be one of `256x256`, `512x512`, or
|
||||
// `1024x1024`.
|
||||
type ImageNewVariationParamsSize string
|
||||
@@ -184,43 +170,42 @@ const (
|
||||
ImageNewVariationParamsSize1024x1024 ImageNewVariationParamsSize = "1024x1024"
|
||||
)
|
||||
|
||||
func (r ImageNewVariationParamsSize) IsKnown() bool {
|
||||
switch r {
|
||||
case ImageNewVariationParamsSize256x256, ImageNewVariationParamsSize512x512, ImageNewVariationParamsSize1024x1024:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
type ImageEditParams struct {
|
||||
// The image to edit. Must be a valid PNG file, less than 4MB, and square. If mask
|
||||
// is not provided, image must have transparency, which will be used as the mask.
|
||||
Image param.Field[io.Reader] `json:"image,required" format:"binary"`
|
||||
Image io.Reader `json:"image,omitzero,required" format:"binary"`
|
||||
// A text description of the desired image(s). The maximum length is 1000
|
||||
// characters.
|
||||
Prompt param.Field[string] `json:"prompt,required"`
|
||||
Prompt param.String `json:"prompt,omitzero,required"`
|
||||
// An additional image whose fully transparent areas (e.g. where alpha is zero)
|
||||
// indicate where `image` should be edited. Must be a valid PNG file, less than
|
||||
// 4MB, and have the same dimensions as `image`.
|
||||
Mask param.Field[io.Reader] `json:"mask" format:"binary"`
|
||||
Mask io.Reader `json:"mask,omitzero" format:"binary"`
|
||||
// The model to use for image generation. Only `dall-e-2` is supported at this
|
||||
// time.
|
||||
Model param.Field[ImageModel] `json:"model"`
|
||||
Model ImageModel `json:"model,omitzero"`
|
||||
// The number of images to generate. Must be between 1 and 10.
|
||||
N param.Field[int64] `json:"n"`
|
||||
N param.Int `json:"n,omitzero"`
|
||||
// The format in which the generated images are returned. Must be one of `url` or
|
||||
// `b64_json`. URLs are only valid for 60 minutes after the image has been
|
||||
// generated.
|
||||
ResponseFormat param.Field[ImageEditParamsResponseFormat] `json:"response_format"`
|
||||
//
|
||||
// Any of "url", "b64_json"
|
||||
ResponseFormat ImageEditParamsResponseFormat `json:"response_format,omitzero"`
|
||||
// The size of the generated images. Must be one of `256x256`, `512x512`, or
|
||||
// `1024x1024`.
|
||||
Size param.Field[ImageEditParamsSize] `json:"size"`
|
||||
//
|
||||
// Any of "256x256", "512x512", "1024x1024"
|
||||
Size ImageEditParamsSize `json:"size,omitzero"`
|
||||
// A unique identifier representing your end-user, which can help OpenAI to monitor
|
||||
// and detect abuse.
|
||||
// [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids).
|
||||
User param.Field[string] `json:"user"`
|
||||
User param.String `json:"user,omitzero"`
|
||||
apiobject
|
||||
}
|
||||
|
||||
func (f ImageEditParams) IsMissing() bool { return param.IsOmitted(f) || f.IsNull() }
|
||||
|
||||
func (r ImageEditParams) MarshalMultipart() (data []byte, contentType string, err error) {
|
||||
buf := bytes.NewBuffer(nil)
|
||||
writer := multipart.NewWriter(buf)
|
||||
@@ -246,14 +231,6 @@ const (
|
||||
ImageEditParamsResponseFormatB64JSON ImageEditParamsResponseFormat = "b64_json"
|
||||
)
|
||||
|
||||
func (r ImageEditParamsResponseFormat) IsKnown() bool {
|
||||
switch r {
|
||||
case ImageEditParamsResponseFormatURL, ImageEditParamsResponseFormatB64JSON:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// The size of the generated images. Must be one of `256x256`, `512x512`, or
|
||||
// `1024x1024`.
|
||||
type ImageEditParamsSize string
|
||||
@@ -264,48 +241,52 @@ const (
|
||||
ImageEditParamsSize1024x1024 ImageEditParamsSize = "1024x1024"
|
||||
)
|
||||
|
||||
func (r ImageEditParamsSize) IsKnown() bool {
|
||||
switch r {
|
||||
case ImageEditParamsSize256x256, ImageEditParamsSize512x512, ImageEditParamsSize1024x1024:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
type ImageGenerateParams struct {
|
||||
// A text description of the desired image(s). The maximum length is 1000
|
||||
// characters for `dall-e-2` and 4000 characters for `dall-e-3`.
|
||||
Prompt param.Field[string] `json:"prompt,required"`
|
||||
Prompt param.String `json:"prompt,omitzero,required"`
|
||||
// The model to use for image generation.
|
||||
Model param.Field[ImageModel] `json:"model"`
|
||||
Model ImageModel `json:"model,omitzero"`
|
||||
// The number of images to generate. Must be between 1 and 10. For `dall-e-3`, only
|
||||
// `n=1` is supported.
|
||||
N param.Field[int64] `json:"n"`
|
||||
N param.Int `json:"n,omitzero"`
|
||||
// The quality of the image that will be generated. `hd` creates images with finer
|
||||
// details and greater consistency across the image. This param is only supported
|
||||
// for `dall-e-3`.
|
||||
Quality param.Field[ImageGenerateParamsQuality] `json:"quality"`
|
||||
//
|
||||
// Any of "standard", "hd"
|
||||
Quality ImageGenerateParamsQuality `json:"quality,omitzero"`
|
||||
// The format in which the generated images are returned. Must be one of `url` or
|
||||
// `b64_json`. URLs are only valid for 60 minutes after the image has been
|
||||
// generated.
|
||||
ResponseFormat param.Field[ImageGenerateParamsResponseFormat] `json:"response_format"`
|
||||
//
|
||||
// Any of "url", "b64_json"
|
||||
ResponseFormat ImageGenerateParamsResponseFormat `json:"response_format,omitzero"`
|
||||
// The size of the generated images. Must be one of `256x256`, `512x512`, or
|
||||
// `1024x1024` for `dall-e-2`. Must be one of `1024x1024`, `1792x1024`, or
|
||||
// `1024x1792` for `dall-e-3` models.
|
||||
Size param.Field[ImageGenerateParamsSize] `json:"size"`
|
||||
//
|
||||
// Any of "256x256", "512x512", "1024x1024", "1792x1024", "1024x1792"
|
||||
Size ImageGenerateParamsSize `json:"size,omitzero"`
|
||||
// The style of the generated images. Must be one of `vivid` or `natural`. Vivid
|
||||
// causes the model to lean towards generating hyper-real and dramatic images.
|
||||
// Natural causes the model to produce more natural, less hyper-real looking
|
||||
// images. This param is only supported for `dall-e-3`.
|
||||
Style param.Field[ImageGenerateParamsStyle] `json:"style"`
|
||||
//
|
||||
// Any of "vivid", "natural"
|
||||
Style ImageGenerateParamsStyle `json:"style,omitzero"`
|
||||
// A unique identifier representing your end-user, which can help OpenAI to monitor
|
||||
// and detect abuse.
|
||||
// [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids).
|
||||
User param.Field[string] `json:"user"`
|
||||
User param.String `json:"user,omitzero"`
|
||||
apiobject
|
||||
}
|
||||
|
||||
func (f ImageGenerateParams) IsMissing() bool { return param.IsOmitted(f) || f.IsNull() }
|
||||
|
||||
func (r ImageGenerateParams) MarshalJSON() (data []byte, err error) {
|
||||
return apijson.MarshalRoot(r)
|
||||
type shadow ImageGenerateParams
|
||||
return param.MarshalObject(r, (*shadow)(&r))
|
||||
}
|
||||
|
||||
// The quality of the image that will be generated. `hd` creates images with finer
|
||||
@@ -318,14 +299,6 @@ const (
|
||||
ImageGenerateParamsQualityHD ImageGenerateParamsQuality = "hd"
|
||||
)
|
||||
|
||||
func (r ImageGenerateParamsQuality) IsKnown() bool {
|
||||
switch r {
|
||||
case ImageGenerateParamsQualityStandard, ImageGenerateParamsQualityHD:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// The format in which the generated images are returned. Must be one of `url` or
|
||||
// `b64_json`. URLs are only valid for 60 minutes after the image has been
|
||||
// generated.
|
||||
@@ -336,14 +309,6 @@ const (
|
||||
ImageGenerateParamsResponseFormatB64JSON ImageGenerateParamsResponseFormat = "b64_json"
|
||||
)
|
||||
|
||||
func (r ImageGenerateParamsResponseFormat) IsKnown() bool {
|
||||
switch r {
|
||||
case ImageGenerateParamsResponseFormatURL, ImageGenerateParamsResponseFormatB64JSON:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// The size of the generated images. Must be one of `256x256`, `512x512`, or
|
||||
// `1024x1024` for `dall-e-2`. Must be one of `1024x1024`, `1792x1024`, or
|
||||
// `1024x1792` for `dall-e-3` models.
|
||||
@@ -357,14 +322,6 @@ const (
|
||||
ImageGenerateParamsSize1024x1792 ImageGenerateParamsSize = "1024x1792"
|
||||
)
|
||||
|
||||
func (r ImageGenerateParamsSize) IsKnown() bool {
|
||||
switch r {
|
||||
case ImageGenerateParamsSize256x256, ImageGenerateParamsSize512x512, ImageGenerateParamsSize1024x1024, ImageGenerateParamsSize1792x1024, ImageGenerateParamsSize1024x1792:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// The style of the generated images. Must be one of `vivid` or `natural`. Vivid
|
||||
// causes the model to lean towards generating hyper-real and dramatic images.
|
||||
// Natural causes the model to produce more natural, less hyper-real looking
|
||||
@@ -375,11 +332,3 @@ const (
|
||||
ImageGenerateParamsStyleVivid ImageGenerateParamsStyle = "vivid"
|
||||
ImageGenerateParamsStyleNatural ImageGenerateParamsStyle = "natural"
|
||||
)
|
||||
|
||||
func (r ImageGenerateParamsStyle) IsKnown() bool {
|
||||
switch r {
|
||||
case ImageGenerateParamsStyleVivid, ImageGenerateParamsStyleNatural:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
@@ -28,12 +28,12 @@ func TestImageNewVariationWithOptionalParams(t *testing.T) {
|
||||
option.WithAPIKey("My API Key"),
|
||||
)
|
||||
_, err := client.Images.NewVariation(context.TODO(), openai.ImageNewVariationParams{
|
||||
Image: openai.F(io.Reader(bytes.NewBuffer([]byte("some file contents")))),
|
||||
Model: openai.F(openai.ImageModelDallE2),
|
||||
N: openai.F(int64(1)),
|
||||
ResponseFormat: openai.F(openai.ImageNewVariationParamsResponseFormatURL),
|
||||
Size: openai.F(openai.ImageNewVariationParamsSize256x256),
|
||||
User: openai.F("user-1234"),
|
||||
Image: io.Reader(bytes.NewBuffer([]byte("some file contents"))),
|
||||
Model: openai.ImageModelDallE2,
|
||||
N: openai.Int(1),
|
||||
ResponseFormat: openai.ImageNewVariationParamsResponseFormatURL,
|
||||
Size: openai.ImageNewVariationParamsSize256x256,
|
||||
User: openai.String("user-1234"),
|
||||
})
|
||||
if err != nil {
|
||||
var apierr *openai.Error
|
||||
@@ -57,14 +57,14 @@ func TestImageEditWithOptionalParams(t *testing.T) {
|
||||
option.WithAPIKey("My API Key"),
|
||||
)
|
||||
_, err := client.Images.Edit(context.TODO(), openai.ImageEditParams{
|
||||
Image: openai.F(io.Reader(bytes.NewBuffer([]byte("some file contents")))),
|
||||
Prompt: openai.F("A cute baby sea otter wearing a beret"),
|
||||
Mask: openai.F(io.Reader(bytes.NewBuffer([]byte("some file contents")))),
|
||||
Model: openai.F(openai.ImageModelDallE2),
|
||||
N: openai.F(int64(1)),
|
||||
ResponseFormat: openai.F(openai.ImageEditParamsResponseFormatURL),
|
||||
Size: openai.F(openai.ImageEditParamsSize256x256),
|
||||
User: openai.F("user-1234"),
|
||||
Image: io.Reader(bytes.NewBuffer([]byte("some file contents"))),
|
||||
Prompt: openai.String("A cute baby sea otter wearing a beret"),
|
||||
Mask: io.Reader(bytes.NewBuffer([]byte("some file contents"))),
|
||||
Model: openai.ImageModelDallE2,
|
||||
N: openai.Int(1),
|
||||
ResponseFormat: openai.ImageEditParamsResponseFormatURL,
|
||||
Size: openai.ImageEditParamsSize256x256,
|
||||
User: openai.String("user-1234"),
|
||||
})
|
||||
if err != nil {
|
||||
var apierr *openai.Error
|
||||
@@ -88,14 +88,14 @@ func TestImageGenerateWithOptionalParams(t *testing.T) {
|
||||
option.WithAPIKey("My API Key"),
|
||||
)
|
||||
_, err := client.Images.Generate(context.TODO(), openai.ImageGenerateParams{
|
||||
Prompt: openai.F("A cute baby sea otter"),
|
||||
Model: openai.F(openai.ImageModelDallE2),
|
||||
N: openai.F(int64(1)),
|
||||
Quality: openai.F(openai.ImageGenerateParamsQualityStandard),
|
||||
ResponseFormat: openai.F(openai.ImageGenerateParamsResponseFormatURL),
|
||||
Size: openai.F(openai.ImageGenerateParamsSize256x256),
|
||||
Style: openai.F(openai.ImageGenerateParamsStyleVivid),
|
||||
User: openai.F("user-1234"),
|
||||
Prompt: openai.String("A cute baby sea otter"),
|
||||
Model: openai.ImageModelDallE2,
|
||||
N: openai.Int(1),
|
||||
Quality: openai.ImageGenerateParamsQualityStandard,
|
||||
ResponseFormat: openai.ImageGenerateParamsResponseFormatURL,
|
||||
Size: openai.ImageGenerateParamsSize256x256,
|
||||
Style: openai.ImageGenerateParamsStyleVivid,
|
||||
User: openai.String("user-1234"),
|
||||
})
|
||||
if err != nil {
|
||||
var apierr *openai.Error
|
||||
|
||||
@@ -8,43 +8,44 @@ import (
|
||||
"net/http/httputil"
|
||||
|
||||
"github.com/openai/openai-go/internal/apijson"
|
||||
"github.com/openai/openai-go/packages/param"
|
||||
"github.com/openai/openai-go/packages/resp"
|
||||
)
|
||||
|
||||
// aliased to make param.APIUnion private when embedding
|
||||
type apiunion = param.APIUnion
|
||||
|
||||
// aliased to make param.APIObject private when embedding
|
||||
type apiobject = param.APIObject
|
||||
|
||||
// Error represents an error that originates from the API, i.e. when a request is
|
||||
// made and the API returns a response with a HTTP status code. Other errors are
|
||||
// not wrapped by this SDK.
|
||||
type Error struct {
|
||||
Code string `json:"code,required,nullable"`
|
||||
Message string `json:"message,required"`
|
||||
Param string `json:"param,required,nullable"`
|
||||
Type string `json:"type,required"`
|
||||
JSON errorJSON `json:"-"`
|
||||
Code string `json:"code,omitzero,required,nullable"`
|
||||
Message string `json:"message,omitzero,required"`
|
||||
Param string `json:"param,omitzero,required,nullable"`
|
||||
Type string `json:"type,omitzero,required"`
|
||||
JSON struct {
|
||||
Code resp.Field
|
||||
Message resp.Field
|
||||
Param resp.Field
|
||||
Type resp.Field
|
||||
raw string
|
||||
} `json:"-"`
|
||||
StatusCode int
|
||||
Request *http.Request
|
||||
Response *http.Response
|
||||
}
|
||||
|
||||
// errorJSON contains the JSON metadata for the struct [Error]
|
||||
type errorJSON struct {
|
||||
Code apijson.Field
|
||||
Message apijson.Field
|
||||
Param apijson.Field
|
||||
Type apijson.Field
|
||||
raw string
|
||||
ExtraFields map[string]apijson.Field
|
||||
}
|
||||
|
||||
func (r *Error) UnmarshalJSON(data []byte) (err error) {
|
||||
func (r Error) RawJSON() string { return r.JSON.raw }
|
||||
func (r *Error) UnmarshalJSON(data []byte) error {
|
||||
return apijson.UnmarshalRoot(data, r)
|
||||
}
|
||||
|
||||
func (r errorJSON) RawJSON() string {
|
||||
return r.raw
|
||||
}
|
||||
|
||||
func (r *Error) Error() string {
|
||||
// Attempt to re-populate the response body
|
||||
return fmt.Sprintf("%s \"%s\": %d %s %s", r.Request.Method, r.Request.URL, r.Response.StatusCode, http.StatusText(r.Response.StatusCode), r.JSON.RawJSON())
|
||||
return fmt.Sprintf("%s %q: %d %s %s", r.Request.Method, r.Request.URL, r.Response.StatusCode, http.StatusText(r.Response.StatusCode), r.JSON.raw)
|
||||
}
|
||||
|
||||
func (r *Error) DumpRequest(body bool) []byte {
|
||||
|
||||
27
internal/apifield/metadata.go
Normal file
27
internal/apifield/metadata.go
Normal file
@@ -0,0 +1,27 @@
|
||||
package apifield
|
||||
|
||||
import "encoding/json"
|
||||
|
||||
type ExplicitNull struct{}
|
||||
type NeverOmitted struct{}
|
||||
type CustomValue struct{ Override any }
|
||||
type ResponseData json.RawMessage
|
||||
type ExtraFields map[string]any
|
||||
|
||||
func (ExplicitNull) IsNull() bool { return true }
|
||||
func (NeverOmitted) IsNull() bool { return false }
|
||||
func (v CustomValue) IsNull() bool { return v.Override == nil }
|
||||
func (r ResponseData) IsNull() bool { return string(r) == `null` }
|
||||
func (ExtraFields) IsNull() bool { return false }
|
||||
|
||||
func (ExplicitNull) RawResponse() json.RawMessage { return nil }
|
||||
func (NeverOmitted) RawResponse() json.RawMessage { return nil }
|
||||
func (r ResponseData) RawResponse() json.RawMessage { return json.RawMessage(r) }
|
||||
func (CustomValue) RawResponse() json.RawMessage { return nil }
|
||||
func (ExtraFields) RawResponse() json.RawMessage { return nil }
|
||||
|
||||
func (ExplicitNull) IsOverridden() (any, bool) { return nil, false }
|
||||
func (NeverOmitted) IsOverridden() (any, bool) { return nil, false }
|
||||
func (v CustomValue) IsOverridden() (any, bool) { return v.Override, true }
|
||||
func (ResponseData) IsOverridden() (any, bool) { return nil, false }
|
||||
func (ExtraFields) IsOverridden() (any, bool) { return nil, false }
|
||||
@@ -13,7 +13,8 @@ import (
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/openai/openai-go/internal/param"
|
||||
internalparam "github.com/openai/openai-go/internal/param"
|
||||
"github.com/openai/openai-go/packages/param"
|
||||
)
|
||||
|
||||
var encoders sync.Map // map[encoderEntry]encoderFunc
|
||||
@@ -180,10 +181,14 @@ func (e *encoder) newArrayTypeEncoder(t reflect.Type) encoderFunc {
|
||||
}
|
||||
|
||||
func (e *encoder) newStructTypeEncoder(t reflect.Type) encoderFunc {
|
||||
if t.Implements(reflect.TypeOf((*param.FieldLike)(nil)).Elem()) {
|
||||
if t.Implements(reflect.TypeOf((*internalparam.FieldLike)(nil)).Elem()) {
|
||||
return e.newFieldTypeEncoder(t)
|
||||
}
|
||||
|
||||
if idx, ok := param.RichPrimitiveTypes[t]; ok {
|
||||
return e.newRichFieldTypeEncoder(t, idx)
|
||||
}
|
||||
|
||||
encoderFields := []encoderField{}
|
||||
extraEncoder := (*encoderField)(nil)
|
||||
|
||||
|
||||
27
internal/apiform/richparam.go
Normal file
27
internal/apiform/richparam.go
Normal file
@@ -0,0 +1,27 @@
|
||||
package apiform
|
||||
|
||||
import (
|
||||
"github.com/openai/openai-go/packages/param"
|
||||
"mime/multipart"
|
||||
"reflect"
|
||||
)
|
||||
|
||||
// TODO(v2): verify this is correct, w.r.t. to null, overrides and omit
|
||||
func (e *encoder) newRichFieldTypeEncoder(t reflect.Type, underlyingValueIdx []int) encoderFunc {
|
||||
underlying := t.FieldByIndex(underlyingValueIdx)
|
||||
primitiveEncoder := e.newPrimitiveTypeEncoder(underlying.Type)
|
||||
return func(key string, value reflect.Value, writer *multipart.Writer) error {
|
||||
if fielder, ok := value.Interface().(param.Fielder); ok {
|
||||
if fielder.IsNull() {
|
||||
return writer.WriteField(key, "null")
|
||||
} else if ovr, ok := fielder.IsOverridden(); ok {
|
||||
ovr := reflect.ValueOf(ovr)
|
||||
encode := e.newTypeEncoder(ovr.Type())
|
||||
return encode(key, ovr, writer)
|
||||
} else if !param.IsOmitted(fielder) {
|
||||
return primitiveEncoder(key, value.FieldByName("V"), writer)
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
}
|
||||
@@ -433,9 +433,7 @@ func (d *decoderBuilder) newStructTypeDecoder(t reflect.Type) decoderFunc {
|
||||
status: valid,
|
||||
}
|
||||
}
|
||||
if metadata := getSubField(value, inlineDecoder.idx, inlineDecoder.goname); metadata.IsValid() {
|
||||
metadata.Set(reflect.ValueOf(meta))
|
||||
}
|
||||
setSubField(value, inlineDecoder.idx, inlineDecoder.goname, meta)
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -489,9 +487,7 @@ func (d *decoderBuilder) newStructTypeDecoder(t reflect.Type) decoderFunc {
|
||||
}
|
||||
|
||||
if explicit {
|
||||
if metadata := getSubField(value, df.idx, df.goname); metadata.IsValid() {
|
||||
metadata.Set(reflect.ValueOf(meta))
|
||||
}
|
||||
setSubField(value, df.idx, df.goname, meta)
|
||||
}
|
||||
if !explicit {
|
||||
untypedExtraFields[fieldName] = meta
|
||||
|
||||
@@ -381,7 +381,7 @@ func (e *encoder) encodeMapEntries(json []byte, v reflect.Value) ([]byte, error)
|
||||
return json, nil
|
||||
}
|
||||
|
||||
func (e *encoder) newMapEncoder(t reflect.Type) encoderFunc {
|
||||
func (e *encoder) newMapEncoder(_ reflect.Type) encoderFunc {
|
||||
return func(value reflect.Value) ([]byte, error) {
|
||||
json := []byte("{}")
|
||||
var err error
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
package apijson
|
||||
|
||||
import "reflect"
|
||||
|
||||
type status uint8
|
||||
|
||||
const (
|
||||
@@ -23,19 +21,3 @@ func (j Field) IsNull() bool { return j.status <= null }
|
||||
func (j Field) IsMissing() bool { return j.status == missing }
|
||||
func (j Field) IsInvalid() bool { return j.status == invalid }
|
||||
func (j Field) Raw() string { return j.raw }
|
||||
|
||||
func getSubField(root reflect.Value, index []int, name string) reflect.Value {
|
||||
strct := root.FieldByIndex(index[:len(index)-1])
|
||||
if !strct.IsValid() {
|
||||
panic("couldn't find encapsulating struct for field " + name)
|
||||
}
|
||||
meta := strct.FieldByName("JSON")
|
||||
if !meta.IsValid() {
|
||||
return reflect.Value{}
|
||||
}
|
||||
field := meta.FieldByName(name)
|
||||
if !field.IsValid() {
|
||||
return reflect.Value{}
|
||||
}
|
||||
return field
|
||||
}
|
||||
|
||||
@@ -86,8 +86,8 @@ type JSONFieldStruct struct {
|
||||
B int64 `json:"b"`
|
||||
C string `json:"c"`
|
||||
D string `json:"d"`
|
||||
ExtraFields map[string]int64 `json:"-,extras"`
|
||||
JSON JSONFieldStructJSON `json:"-,metadata"`
|
||||
ExtraFields map[string]int64 `json:",extras"`
|
||||
JSON JSONFieldStructJSON `json:",metadata"`
|
||||
}
|
||||
|
||||
type JSONFieldStructJSON struct {
|
||||
@@ -112,13 +112,13 @@ type Union interface {
|
||||
}
|
||||
|
||||
type Inline struct {
|
||||
InlineField Primitives `json:"-,inline"`
|
||||
JSON InlineJSON `json:"-,metadata"`
|
||||
InlineField Primitives `json:",inline"`
|
||||
JSON InlineJSON `json:",metadata"`
|
||||
}
|
||||
|
||||
type InlineArray struct {
|
||||
InlineField []string `json:"-,inline"`
|
||||
JSON InlineJSON `json:"-,metadata"`
|
||||
InlineField []string `json:",inline"`
|
||||
JSON InlineJSON `json:",metadata"`
|
||||
}
|
||||
|
||||
type InlineJSON struct {
|
||||
|
||||
@@ -53,7 +53,7 @@ func Port(from any, to any) error {
|
||||
for i := 0; i < t.NumField(); i++ {
|
||||
field := t.Field(i)
|
||||
ptag, ok := parseJSONStructTag(field)
|
||||
if !ok || ptag.name == "-" {
|
||||
if !ok || ptag.name == "-" || ptag.name == "" {
|
||||
continue
|
||||
}
|
||||
values[ptag.name] = v.Field(i)
|
||||
|
||||
45
internal/apijson/subfield.go
Normal file
45
internal/apijson/subfield.go
Normal file
@@ -0,0 +1,45 @@
|
||||
package apijson
|
||||
|
||||
import (
|
||||
"github.com/openai/openai-go/packages/resp"
|
||||
"reflect"
|
||||
)
|
||||
|
||||
func getSubField(root reflect.Value, index []int, name string) reflect.Value {
|
||||
strct := root.FieldByIndex(index[:len(index)-1])
|
||||
if !strct.IsValid() {
|
||||
panic("couldn't find encapsulating struct for field " + name)
|
||||
}
|
||||
meta := strct.FieldByName("JSON")
|
||||
if !meta.IsValid() {
|
||||
return reflect.Value{}
|
||||
}
|
||||
field := meta.FieldByName(name)
|
||||
if !field.IsValid() {
|
||||
return reflect.Value{}
|
||||
}
|
||||
return field
|
||||
}
|
||||
|
||||
var respFieldType = reflect.TypeOf(resp.Field{})
|
||||
var fieldType = reflect.TypeOf(Field{})
|
||||
|
||||
func setSubField(root reflect.Value, index []int, name string, meta Field) {
|
||||
if metadata := getSubField(root, index, name); metadata.IsValid() {
|
||||
if metadata.Type() == respFieldType {
|
||||
var rf resp.Field
|
||||
if meta.IsNull() {
|
||||
rf = resp.NewNullField()
|
||||
} else if meta.IsMissing() {
|
||||
_ = rf
|
||||
} else if meta.IsInvalid() {
|
||||
rf = resp.NewInvalidField(meta.raw)
|
||||
} else {
|
||||
rf = resp.NewValidField(meta.raw)
|
||||
}
|
||||
metadata.Set(reflect.ValueOf(rf))
|
||||
} else if metadata.Type() == fieldType {
|
||||
metadata.Set(reflect.ValueOf(meta))
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -9,7 +9,8 @@ import (
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/openai/openai-go/internal/param"
|
||||
internalparam "github.com/openai/openai-go/internal/param"
|
||||
"github.com/openai/openai-go/packages/param"
|
||||
)
|
||||
|
||||
var encoders sync.Map // map[reflect.Type]encoderFunc
|
||||
@@ -85,6 +86,7 @@ func (e *encoder) newTypeEncoder(t reflect.Type) encoderFunc {
|
||||
if t.ConvertibleTo(reflect.TypeOf(time.Time{})) {
|
||||
return e.newTimeTypeEncoder(t)
|
||||
}
|
||||
|
||||
if !e.root && t.Implements(reflect.TypeOf((*json.Marshaler)(nil)).Elem()) {
|
||||
return marshalerEncoder
|
||||
}
|
||||
@@ -115,10 +117,14 @@ func (e *encoder) newTypeEncoder(t reflect.Type) encoderFunc {
|
||||
}
|
||||
|
||||
func (e *encoder) newStructTypeEncoder(t reflect.Type) encoderFunc {
|
||||
if t.Implements(reflect.TypeOf((*param.FieldLike)(nil)).Elem()) {
|
||||
if t.Implements(reflect.TypeOf((*internalparam.FieldLike)(nil)).Elem()) {
|
||||
return e.newFieldTypeEncoder(t)
|
||||
}
|
||||
|
||||
if idx, ok := param.RichPrimitiveTypes[t]; ok {
|
||||
return e.newRichFieldTypeEncoder(t, idx)
|
||||
}
|
||||
|
||||
encoderFields := []encoderField{}
|
||||
|
||||
// This helper allows us to recursively collect field encoders into a flat
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
package apiquery
|
||||
|
||||
import (
|
||||
"github.com/openai/openai-go/packages/param"
|
||||
"net/url"
|
||||
"testing"
|
||||
"time"
|
||||
@@ -101,6 +102,10 @@ type DeeplyNested3 struct {
|
||||
D *string `query:"d"`
|
||||
}
|
||||
|
||||
type RichPrimitives struct {
|
||||
A param.String `query:"a"`
|
||||
}
|
||||
|
||||
var tests = map[string]struct {
|
||||
enc string
|
||||
val interface{}
|
||||
@@ -320,6 +325,14 @@ var tests = map[string]struct {
|
||||
},
|
||||
QuerySettings{NestedFormat: NestedQueryFormatDots},
|
||||
},
|
||||
|
||||
"rich_primitives": {
|
||||
`a=hello`,
|
||||
RichPrimitives{
|
||||
A: param.String{V: "hello"},
|
||||
},
|
||||
QuerySettings{},
|
||||
},
|
||||
}
|
||||
|
||||
func TestEncode(t *testing.T) {
|
||||
|
||||
29
internal/apiquery/richparam.go
Normal file
29
internal/apiquery/richparam.go
Normal file
@@ -0,0 +1,29 @@
|
||||
package apiquery
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/openai/openai-go/packages/param"
|
||||
"reflect"
|
||||
)
|
||||
|
||||
// TODO(v2): verify this is correct w.r.t. to null, override and omit handling
|
||||
func (e *encoder) newRichFieldTypeEncoder(t reflect.Type, underlyingValueIdx []int) encoderFunc {
|
||||
underlying := t.FieldByIndex(underlyingValueIdx)
|
||||
primitiveEncoder := e.newPrimitiveTypeEncoder(underlying.Type)
|
||||
return func(key string, value reflect.Value) []Pair {
|
||||
if fielder, ok := value.Interface().(param.Fielder); ok {
|
||||
if fielder.IsNull() {
|
||||
return []Pair{{key, "null"}}
|
||||
} else if ovr, ok := fielder.IsOverridden(); ok {
|
||||
ovr := reflect.ValueOf(ovr)
|
||||
encode := e.newTypeEncoder(ovr.Type())
|
||||
return encode(key, ovr)
|
||||
} else if !param.IsOmitted(fielder) {
|
||||
res := primitiveEncoder(key, value.FieldByName("V"))
|
||||
fmt.Printf("%#v\n", res)
|
||||
return res
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
}
|
||||
1324
internal/encoding/json/decode.go
Normal file
1324
internal/encoding/json/decode.go
Normal file
File diff suppressed because it is too large
Load Diff
1349
internal/encoding/json/encode.go
Normal file
1349
internal/encoding/json/encode.go
Normal file
File diff suppressed because it is too large
Load Diff
48
internal/encoding/json/fold.go
Normal file
48
internal/encoding/json/fold.go
Normal file
@@ -0,0 +1,48 @@
|
||||
// Copyright 2013 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package json
|
||||
|
||||
import (
|
||||
"unicode"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
// foldName returns a folded string such that foldName(x) == foldName(y)
|
||||
// is identical to bytes.EqualFold(x, y).
|
||||
func foldName(in []byte) []byte {
|
||||
// This is inlinable to take advantage of "function outlining".
|
||||
var arr [32]byte // large enough for most JSON names
|
||||
return appendFoldedName(arr[:0], in)
|
||||
}
|
||||
|
||||
func appendFoldedName(out, in []byte) []byte {
|
||||
for i := 0; i < len(in); {
|
||||
// Handle single-byte ASCII.
|
||||
if c := in[i]; c < utf8.RuneSelf {
|
||||
if 'a' <= c && c <= 'z' {
|
||||
c -= 'a' - 'A'
|
||||
}
|
||||
out = append(out, c)
|
||||
i++
|
||||
continue
|
||||
}
|
||||
// Handle multi-byte Unicode.
|
||||
r, n := utf8.DecodeRune(in[i:])
|
||||
out = utf8.AppendRune(out, foldRune(r))
|
||||
i += n
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
// foldRune is returns the smallest rune for all runes in the same fold set.
|
||||
func foldRune(r rune) rune {
|
||||
for {
|
||||
r2 := unicode.SimpleFold(r)
|
||||
if r2 <= r {
|
||||
return r2
|
||||
}
|
||||
r = r2
|
||||
}
|
||||
}
|
||||
182
internal/encoding/json/indent.go
Normal file
182
internal/encoding/json/indent.go
Normal file
@@ -0,0 +1,182 @@
|
||||
// Copyright 2010 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package json
|
||||
|
||||
import "bytes"
|
||||
|
||||
// HTMLEscape appends to dst the JSON-encoded src with <, >, &, U+2028 and U+2029
|
||||
// characters inside string literals changed to \u003c, \u003e, \u0026, \u2028, \u2029
|
||||
// so that the JSON will be safe to embed inside HTML <script> tags.
|
||||
// For historical reasons, web browsers don't honor standard HTML
|
||||
// escaping within <script> tags, so an alternative JSON encoding must be used.
|
||||
func HTMLEscape(dst *bytes.Buffer, src []byte) {
|
||||
dst.Grow(len(src))
|
||||
dst.Write(appendHTMLEscape(dst.AvailableBuffer(), src))
|
||||
}
|
||||
|
||||
func appendHTMLEscape(dst, src []byte) []byte {
|
||||
// The characters can only appear in string literals,
|
||||
// so just scan the string one byte at a time.
|
||||
start := 0
|
||||
for i, c := range src {
|
||||
if c == '<' || c == '>' || c == '&' {
|
||||
dst = append(dst, src[start:i]...)
|
||||
dst = append(dst, '\\', 'u', '0', '0', hex[c>>4], hex[c&0xF])
|
||||
start = i + 1
|
||||
}
|
||||
// Convert U+2028 and U+2029 (E2 80 A8 and E2 80 A9).
|
||||
if c == 0xE2 && i+2 < len(src) && src[i+1] == 0x80 && src[i+2]&^1 == 0xA8 {
|
||||
dst = append(dst, src[start:i]...)
|
||||
dst = append(dst, '\\', 'u', '2', '0', '2', hex[src[i+2]&0xF])
|
||||
start = i + len("\u2029")
|
||||
}
|
||||
}
|
||||
return append(dst, src[start:]...)
|
||||
}
|
||||
|
||||
// Compact appends to dst the JSON-encoded src with
|
||||
// insignificant space characters elided.
|
||||
func Compact(dst *bytes.Buffer, src []byte) error {
|
||||
dst.Grow(len(src))
|
||||
b := dst.AvailableBuffer()
|
||||
b, err := appendCompact(b, src, false)
|
||||
dst.Write(b)
|
||||
return err
|
||||
}
|
||||
|
||||
func appendCompact(dst, src []byte, escape bool) ([]byte, error) {
|
||||
origLen := len(dst)
|
||||
scan := newScanner()
|
||||
defer freeScanner(scan)
|
||||
start := 0
|
||||
for i, c := range src {
|
||||
if escape && (c == '<' || c == '>' || c == '&') {
|
||||
if start < i {
|
||||
dst = append(dst, src[start:i]...)
|
||||
}
|
||||
dst = append(dst, '\\', 'u', '0', '0', hex[c>>4], hex[c&0xF])
|
||||
start = i + 1
|
||||
}
|
||||
// Convert U+2028 and U+2029 (E2 80 A8 and E2 80 A9).
|
||||
if escape && c == 0xE2 && i+2 < len(src) && src[i+1] == 0x80 && src[i+2]&^1 == 0xA8 {
|
||||
if start < i {
|
||||
dst = append(dst, src[start:i]...)
|
||||
}
|
||||
dst = append(dst, '\\', 'u', '2', '0', '2', hex[src[i+2]&0xF])
|
||||
start = i + 3
|
||||
}
|
||||
v := scan.step(scan, c)
|
||||
if v >= scanSkipSpace {
|
||||
if v == scanError {
|
||||
break
|
||||
}
|
||||
if start < i {
|
||||
dst = append(dst, src[start:i]...)
|
||||
}
|
||||
start = i + 1
|
||||
}
|
||||
}
|
||||
if scan.eof() == scanError {
|
||||
return dst[:origLen], scan.err
|
||||
}
|
||||
if start < len(src) {
|
||||
dst = append(dst, src[start:]...)
|
||||
}
|
||||
return dst, nil
|
||||
}
|
||||
|
||||
func appendNewline(dst []byte, prefix, indent string, depth int) []byte {
|
||||
dst = append(dst, '\n')
|
||||
dst = append(dst, prefix...)
|
||||
for i := 0; i < depth; i++ {
|
||||
dst = append(dst, indent...)
|
||||
}
|
||||
return dst
|
||||
}
|
||||
|
||||
// indentGrowthFactor specifies the growth factor of indenting JSON input.
|
||||
// Empirically, the growth factor was measured to be between 1.4x to 1.8x
|
||||
// for some set of compacted JSON with the indent being a single tab.
|
||||
// Specify a growth factor slightly larger than what is observed
|
||||
// to reduce probability of allocation in appendIndent.
|
||||
// A factor no higher than 2 ensures that wasted space never exceeds 50%.
|
||||
const indentGrowthFactor = 2
|
||||
|
||||
// Indent appends to dst an indented form of the JSON-encoded src.
|
||||
// Each element in a JSON object or array begins on a new,
|
||||
// indented line beginning with prefix followed by one or more
|
||||
// copies of indent according to the indentation nesting.
|
||||
// The data appended to dst does not begin with the prefix nor
|
||||
// any indentation, to make it easier to embed inside other formatted JSON data.
|
||||
// Although leading space characters (space, tab, carriage return, newline)
|
||||
// at the beginning of src are dropped, trailing space characters
|
||||
// at the end of src are preserved and copied to dst.
|
||||
// For example, if src has no trailing spaces, neither will dst;
|
||||
// if src ends in a trailing newline, so will dst.
|
||||
func Indent(dst *bytes.Buffer, src []byte, prefix, indent string) error {
|
||||
dst.Grow(indentGrowthFactor * len(src))
|
||||
b := dst.AvailableBuffer()
|
||||
b, err := appendIndent(b, src, prefix, indent)
|
||||
dst.Write(b)
|
||||
return err
|
||||
}
|
||||
|
||||
func appendIndent(dst, src []byte, prefix, indent string) ([]byte, error) {
|
||||
origLen := len(dst)
|
||||
scan := newScanner()
|
||||
defer freeScanner(scan)
|
||||
needIndent := false
|
||||
depth := 0
|
||||
for _, c := range src {
|
||||
scan.bytes++
|
||||
v := scan.step(scan, c)
|
||||
if v == scanSkipSpace {
|
||||
continue
|
||||
}
|
||||
if v == scanError {
|
||||
break
|
||||
}
|
||||
if needIndent && v != scanEndObject && v != scanEndArray {
|
||||
needIndent = false
|
||||
depth++
|
||||
dst = appendNewline(dst, prefix, indent, depth)
|
||||
}
|
||||
|
||||
// Emit semantically uninteresting bytes
|
||||
// (in particular, punctuation in strings) unmodified.
|
||||
if v == scanContinue {
|
||||
dst = append(dst, c)
|
||||
continue
|
||||
}
|
||||
|
||||
// Add spacing around real punctuation.
|
||||
switch c {
|
||||
case '{', '[':
|
||||
// delay indent so that empty object and array are formatted as {} and [].
|
||||
needIndent = true
|
||||
dst = append(dst, c)
|
||||
case ',':
|
||||
dst = append(dst, c)
|
||||
dst = appendNewline(dst, prefix, indent, depth)
|
||||
case ':':
|
||||
dst = append(dst, c, ' ')
|
||||
case '}', ']':
|
||||
if needIndent {
|
||||
// suppress indent in empty object/array
|
||||
needIndent = false
|
||||
} else {
|
||||
depth--
|
||||
dst = appendNewline(dst, prefix, indent, depth)
|
||||
}
|
||||
dst = append(dst, c)
|
||||
default:
|
||||
dst = append(dst, c)
|
||||
}
|
||||
}
|
||||
if scan.eof() == scanError {
|
||||
return dst[:origLen], scan.err
|
||||
}
|
||||
return dst, nil
|
||||
}
|
||||
610
internal/encoding/json/scanner.go
Normal file
610
internal/encoding/json/scanner.go
Normal file
@@ -0,0 +1,610 @@
|
||||
// Copyright 2010 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package json
|
||||
|
||||
// JSON value parser state machine.
|
||||
// Just about at the limit of what is reasonable to write by hand.
|
||||
// Some parts are a bit tedious, but overall it nicely factors out the
|
||||
// otherwise common code from the multiple scanning functions
|
||||
// in this package (Compact, Indent, checkValid, etc).
|
||||
//
|
||||
// This file starts with two simple examples using the scanner
|
||||
// before diving into the scanner itself.
|
||||
|
||||
import (
|
||||
"strconv"
|
||||
"sync"
|
||||
)
|
||||
|
||||
// Valid reports whether data is a valid JSON encoding.
|
||||
func Valid(data []byte) bool {
|
||||
scan := newScanner()
|
||||
defer freeScanner(scan)
|
||||
return checkValid(data, scan) == nil
|
||||
}
|
||||
|
||||
// checkValid verifies that data is valid JSON-encoded data.
|
||||
// scan is passed in for use by checkValid to avoid an allocation.
|
||||
// checkValid returns nil or a SyntaxError.
|
||||
func checkValid(data []byte, scan *scanner) error {
|
||||
scan.reset()
|
||||
for _, c := range data {
|
||||
scan.bytes++
|
||||
if scan.step(scan, c) == scanError {
|
||||
return scan.err
|
||||
}
|
||||
}
|
||||
if scan.eof() == scanError {
|
||||
return scan.err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// A SyntaxError is a description of a JSON syntax error.
|
||||
// [Unmarshal] will return a SyntaxError if the JSON can't be parsed.
|
||||
type SyntaxError struct {
|
||||
msg string // description of error
|
||||
Offset int64 // error occurred after reading Offset bytes
|
||||
}
|
||||
|
||||
func (e *SyntaxError) Error() string { return e.msg }
|
||||
|
||||
// A scanner is a JSON scanning state machine.
|
||||
// Callers call scan.reset and then pass bytes in one at a time
|
||||
// by calling scan.step(&scan, c) for each byte.
|
||||
// The return value, referred to as an opcode, tells the
|
||||
// caller about significant parsing events like beginning
|
||||
// and ending literals, objects, and arrays, so that the
|
||||
// caller can follow along if it wishes.
|
||||
// The return value scanEnd indicates that a single top-level
|
||||
// JSON value has been completed, *before* the byte that
|
||||
// just got passed in. (The indication must be delayed in order
|
||||
// to recognize the end of numbers: is 123 a whole value or
|
||||
// the beginning of 12345e+6?).
|
||||
type scanner struct {
|
||||
// The step is a func to be called to execute the next transition.
|
||||
// Also tried using an integer constant and a single func
|
||||
// with a switch, but using the func directly was 10% faster
|
||||
// on a 64-bit Mac Mini, and it's nicer to read.
|
||||
step func(*scanner, byte) int
|
||||
|
||||
// Reached end of top-level value.
|
||||
endTop bool
|
||||
|
||||
// Stack of what we're in the middle of - array values, object keys, object values.
|
||||
parseState []int
|
||||
|
||||
// Error that happened, if any.
|
||||
err error
|
||||
|
||||
// total bytes consumed, updated by decoder.Decode (and deliberately
|
||||
// not set to zero by scan.reset)
|
||||
bytes int64
|
||||
}
|
||||
|
||||
var scannerPool = sync.Pool{
|
||||
New: func() any {
|
||||
return &scanner{}
|
||||
},
|
||||
}
|
||||
|
||||
func newScanner() *scanner {
|
||||
scan := scannerPool.Get().(*scanner)
|
||||
// scan.reset by design doesn't set bytes to zero
|
||||
scan.bytes = 0
|
||||
scan.reset()
|
||||
return scan
|
||||
}
|
||||
|
||||
func freeScanner(scan *scanner) {
|
||||
// Avoid hanging on to too much memory in extreme cases.
|
||||
if len(scan.parseState) > 1024 {
|
||||
scan.parseState = nil
|
||||
}
|
||||
scannerPool.Put(scan)
|
||||
}
|
||||
|
||||
// These values are returned by the state transition functions
|
||||
// assigned to scanner.state and the method scanner.eof.
|
||||
// They give details about the current state of the scan that
|
||||
// callers might be interested to know about.
|
||||
// It is okay to ignore the return value of any particular
|
||||
// call to scanner.state: if one call returns scanError,
|
||||
// every subsequent call will return scanError too.
|
||||
const (
|
||||
// Continue.
|
||||
scanContinue = iota // uninteresting byte
|
||||
scanBeginLiteral // end implied by next result != scanContinue
|
||||
scanBeginObject // begin object
|
||||
scanObjectKey // just finished object key (string)
|
||||
scanObjectValue // just finished non-last object value
|
||||
scanEndObject // end object (implies scanObjectValue if possible)
|
||||
scanBeginArray // begin array
|
||||
scanArrayValue // just finished array value
|
||||
scanEndArray // end array (implies scanArrayValue if possible)
|
||||
scanSkipSpace // space byte; can skip; known to be last "continue" result
|
||||
|
||||
// Stop.
|
||||
scanEnd // top-level value ended *before* this byte; known to be first "stop" result
|
||||
scanError // hit an error, scanner.err.
|
||||
)
|
||||
|
||||
// These values are stored in the parseState stack.
|
||||
// They give the current state of a composite value
|
||||
// being scanned. If the parser is inside a nested value
|
||||
// the parseState describes the nested state, outermost at entry 0.
|
||||
const (
|
||||
parseObjectKey = iota // parsing object key (before colon)
|
||||
parseObjectValue // parsing object value (after colon)
|
||||
parseArrayValue // parsing array value
|
||||
)
|
||||
|
||||
// This limits the max nesting depth to prevent stack overflow.
|
||||
// This is permitted by https://tools.ietf.org/html/rfc7159#section-9
|
||||
const maxNestingDepth = 10000
|
||||
|
||||
// reset prepares the scanner for use.
|
||||
// It must be called before calling s.step.
|
||||
func (s *scanner) reset() {
|
||||
s.step = stateBeginValue
|
||||
s.parseState = s.parseState[0:0]
|
||||
s.err = nil
|
||||
s.endTop = false
|
||||
}
|
||||
|
||||
// eof tells the scanner that the end of input has been reached.
|
||||
// It returns a scan status just as s.step does.
|
||||
func (s *scanner) eof() int {
|
||||
if s.err != nil {
|
||||
return scanError
|
||||
}
|
||||
if s.endTop {
|
||||
return scanEnd
|
||||
}
|
||||
s.step(s, ' ')
|
||||
if s.endTop {
|
||||
return scanEnd
|
||||
}
|
||||
if s.err == nil {
|
||||
s.err = &SyntaxError{"unexpected end of JSON input", s.bytes}
|
||||
}
|
||||
return scanError
|
||||
}
|
||||
|
||||
// pushParseState pushes a new parse state p onto the parse stack.
|
||||
// an error state is returned if maxNestingDepth was exceeded, otherwise successState is returned.
|
||||
func (s *scanner) pushParseState(c byte, newParseState int, successState int) int {
|
||||
s.parseState = append(s.parseState, newParseState)
|
||||
if len(s.parseState) <= maxNestingDepth {
|
||||
return successState
|
||||
}
|
||||
return s.error(c, "exceeded max depth")
|
||||
}
|
||||
|
||||
// popParseState pops a parse state (already obtained) off the stack
|
||||
// and updates s.step accordingly.
|
||||
func (s *scanner) popParseState() {
|
||||
n := len(s.parseState) - 1
|
||||
s.parseState = s.parseState[0:n]
|
||||
if n == 0 {
|
||||
s.step = stateEndTop
|
||||
s.endTop = true
|
||||
} else {
|
||||
s.step = stateEndValue
|
||||
}
|
||||
}
|
||||
|
||||
func isSpace(c byte) bool {
|
||||
return c <= ' ' && (c == ' ' || c == '\t' || c == '\r' || c == '\n')
|
||||
}
|
||||
|
||||
// stateBeginValueOrEmpty is the state after reading `[`.
|
||||
func stateBeginValueOrEmpty(s *scanner, c byte) int {
|
||||
if isSpace(c) {
|
||||
return scanSkipSpace
|
||||
}
|
||||
if c == ']' {
|
||||
return stateEndValue(s, c)
|
||||
}
|
||||
return stateBeginValue(s, c)
|
||||
}
|
||||
|
||||
// stateBeginValue is the state at the beginning of the input.
|
||||
func stateBeginValue(s *scanner, c byte) int {
|
||||
if isSpace(c) {
|
||||
return scanSkipSpace
|
||||
}
|
||||
switch c {
|
||||
case '{':
|
||||
s.step = stateBeginStringOrEmpty
|
||||
return s.pushParseState(c, parseObjectKey, scanBeginObject)
|
||||
case '[':
|
||||
s.step = stateBeginValueOrEmpty
|
||||
return s.pushParseState(c, parseArrayValue, scanBeginArray)
|
||||
case '"':
|
||||
s.step = stateInString
|
||||
return scanBeginLiteral
|
||||
case '-':
|
||||
s.step = stateNeg
|
||||
return scanBeginLiteral
|
||||
case '0': // beginning of 0.123
|
||||
s.step = state0
|
||||
return scanBeginLiteral
|
||||
case 't': // beginning of true
|
||||
s.step = stateT
|
||||
return scanBeginLiteral
|
||||
case 'f': // beginning of false
|
||||
s.step = stateF
|
||||
return scanBeginLiteral
|
||||
case 'n': // beginning of null
|
||||
s.step = stateN
|
||||
return scanBeginLiteral
|
||||
}
|
||||
if '1' <= c && c <= '9' { // beginning of 1234.5
|
||||
s.step = state1
|
||||
return scanBeginLiteral
|
||||
}
|
||||
return s.error(c, "looking for beginning of value")
|
||||
}
|
||||
|
||||
// stateBeginStringOrEmpty is the state after reading `{`.
|
||||
func stateBeginStringOrEmpty(s *scanner, c byte) int {
|
||||
if isSpace(c) {
|
||||
return scanSkipSpace
|
||||
}
|
||||
if c == '}' {
|
||||
n := len(s.parseState)
|
||||
s.parseState[n-1] = parseObjectValue
|
||||
return stateEndValue(s, c)
|
||||
}
|
||||
return stateBeginString(s, c)
|
||||
}
|
||||
|
||||
// stateBeginString is the state after reading `{"key": value,`.
|
||||
func stateBeginString(s *scanner, c byte) int {
|
||||
if isSpace(c) {
|
||||
return scanSkipSpace
|
||||
}
|
||||
if c == '"' {
|
||||
s.step = stateInString
|
||||
return scanBeginLiteral
|
||||
}
|
||||
return s.error(c, "looking for beginning of object key string")
|
||||
}
|
||||
|
||||
// stateEndValue is the state after completing a value,
|
||||
// such as after reading `{}` or `true` or `["x"`.
|
||||
func stateEndValue(s *scanner, c byte) int {
|
||||
n := len(s.parseState)
|
||||
if n == 0 {
|
||||
// Completed top-level before the current byte.
|
||||
s.step = stateEndTop
|
||||
s.endTop = true
|
||||
return stateEndTop(s, c)
|
||||
}
|
||||
if isSpace(c) {
|
||||
s.step = stateEndValue
|
||||
return scanSkipSpace
|
||||
}
|
||||
ps := s.parseState[n-1]
|
||||
switch ps {
|
||||
case parseObjectKey:
|
||||
if c == ':' {
|
||||
s.parseState[n-1] = parseObjectValue
|
||||
s.step = stateBeginValue
|
||||
return scanObjectKey
|
||||
}
|
||||
return s.error(c, "after object key")
|
||||
case parseObjectValue:
|
||||
if c == ',' {
|
||||
s.parseState[n-1] = parseObjectKey
|
||||
s.step = stateBeginString
|
||||
return scanObjectValue
|
||||
}
|
||||
if c == '}' {
|
||||
s.popParseState()
|
||||
return scanEndObject
|
||||
}
|
||||
return s.error(c, "after object key:value pair")
|
||||
case parseArrayValue:
|
||||
if c == ',' {
|
||||
s.step = stateBeginValue
|
||||
return scanArrayValue
|
||||
}
|
||||
if c == ']' {
|
||||
s.popParseState()
|
||||
return scanEndArray
|
||||
}
|
||||
return s.error(c, "after array element")
|
||||
}
|
||||
return s.error(c, "")
|
||||
}
|
||||
|
||||
// stateEndTop is the state after finishing the top-level value,
|
||||
// such as after reading `{}` or `[1,2,3]`.
|
||||
// Only space characters should be seen now.
|
||||
func stateEndTop(s *scanner, c byte) int {
|
||||
if !isSpace(c) {
|
||||
// Complain about non-space byte on next call.
|
||||
s.error(c, "after top-level value")
|
||||
}
|
||||
return scanEnd
|
||||
}
|
||||
|
||||
// stateInString is the state after reading `"`.
|
||||
func stateInString(s *scanner, c byte) int {
|
||||
if c == '"' {
|
||||
s.step = stateEndValue
|
||||
return scanContinue
|
||||
}
|
||||
if c == '\\' {
|
||||
s.step = stateInStringEsc
|
||||
return scanContinue
|
||||
}
|
||||
if c < 0x20 {
|
||||
return s.error(c, "in string literal")
|
||||
}
|
||||
return scanContinue
|
||||
}
|
||||
|
||||
// stateInStringEsc is the state after reading `"\` during a quoted string.
|
||||
func stateInStringEsc(s *scanner, c byte) int {
|
||||
switch c {
|
||||
case 'b', 'f', 'n', 'r', 't', '\\', '/', '"':
|
||||
s.step = stateInString
|
||||
return scanContinue
|
||||
case 'u':
|
||||
s.step = stateInStringEscU
|
||||
return scanContinue
|
||||
}
|
||||
return s.error(c, "in string escape code")
|
||||
}
|
||||
|
||||
// stateInStringEscU is the state after reading `"\u` during a quoted string.
|
||||
func stateInStringEscU(s *scanner, c byte) int {
|
||||
if '0' <= c && c <= '9' || 'a' <= c && c <= 'f' || 'A' <= c && c <= 'F' {
|
||||
s.step = stateInStringEscU1
|
||||
return scanContinue
|
||||
}
|
||||
// numbers
|
||||
return s.error(c, "in \\u hexadecimal character escape")
|
||||
}
|
||||
|
||||
// stateInStringEscU1 is the state after reading `"\u1` during a quoted string.
|
||||
func stateInStringEscU1(s *scanner, c byte) int {
|
||||
if '0' <= c && c <= '9' || 'a' <= c && c <= 'f' || 'A' <= c && c <= 'F' {
|
||||
s.step = stateInStringEscU12
|
||||
return scanContinue
|
||||
}
|
||||
// numbers
|
||||
return s.error(c, "in \\u hexadecimal character escape")
|
||||
}
|
||||
|
||||
// stateInStringEscU12 is the state after reading `"\u12` during a quoted string.
|
||||
func stateInStringEscU12(s *scanner, c byte) int {
|
||||
if '0' <= c && c <= '9' || 'a' <= c && c <= 'f' || 'A' <= c && c <= 'F' {
|
||||
s.step = stateInStringEscU123
|
||||
return scanContinue
|
||||
}
|
||||
// numbers
|
||||
return s.error(c, "in \\u hexadecimal character escape")
|
||||
}
|
||||
|
||||
// stateInStringEscU123 is the state after reading `"\u123` during a quoted string.
|
||||
func stateInStringEscU123(s *scanner, c byte) int {
|
||||
if '0' <= c && c <= '9' || 'a' <= c && c <= 'f' || 'A' <= c && c <= 'F' {
|
||||
s.step = stateInString
|
||||
return scanContinue
|
||||
}
|
||||
// numbers
|
||||
return s.error(c, "in \\u hexadecimal character escape")
|
||||
}
|
||||
|
||||
// stateNeg is the state after reading `-` during a number.
|
||||
func stateNeg(s *scanner, c byte) int {
|
||||
if c == '0' {
|
||||
s.step = state0
|
||||
return scanContinue
|
||||
}
|
||||
if '1' <= c && c <= '9' {
|
||||
s.step = state1
|
||||
return scanContinue
|
||||
}
|
||||
return s.error(c, "in numeric literal")
|
||||
}
|
||||
|
||||
// state1 is the state after reading a non-zero integer during a number,
|
||||
// such as after reading `1` or `100` but not `0`.
|
||||
func state1(s *scanner, c byte) int {
|
||||
if '0' <= c && c <= '9' {
|
||||
s.step = state1
|
||||
return scanContinue
|
||||
}
|
||||
return state0(s, c)
|
||||
}
|
||||
|
||||
// state0 is the state after reading `0` during a number.
|
||||
func state0(s *scanner, c byte) int {
|
||||
if c == '.' {
|
||||
s.step = stateDot
|
||||
return scanContinue
|
||||
}
|
||||
if c == 'e' || c == 'E' {
|
||||
s.step = stateE
|
||||
return scanContinue
|
||||
}
|
||||
return stateEndValue(s, c)
|
||||
}
|
||||
|
||||
// stateDot is the state after reading the integer and decimal point in a number,
|
||||
// such as after reading `1.`.
|
||||
func stateDot(s *scanner, c byte) int {
|
||||
if '0' <= c && c <= '9' {
|
||||
s.step = stateDot0
|
||||
return scanContinue
|
||||
}
|
||||
return s.error(c, "after decimal point in numeric literal")
|
||||
}
|
||||
|
||||
// stateDot0 is the state after reading the integer, decimal point, and subsequent
|
||||
// digits of a number, such as after reading `3.14`.
|
||||
func stateDot0(s *scanner, c byte) int {
|
||||
if '0' <= c && c <= '9' {
|
||||
return scanContinue
|
||||
}
|
||||
if c == 'e' || c == 'E' {
|
||||
s.step = stateE
|
||||
return scanContinue
|
||||
}
|
||||
return stateEndValue(s, c)
|
||||
}
|
||||
|
||||
// stateE is the state after reading the mantissa and e in a number,
|
||||
// such as after reading `314e` or `0.314e`.
|
||||
func stateE(s *scanner, c byte) int {
|
||||
if c == '+' || c == '-' {
|
||||
s.step = stateESign
|
||||
return scanContinue
|
||||
}
|
||||
return stateESign(s, c)
|
||||
}
|
||||
|
||||
// stateESign is the state after reading the mantissa, e, and sign in a number,
|
||||
// such as after reading `314e-` or `0.314e+`.
|
||||
func stateESign(s *scanner, c byte) int {
|
||||
if '0' <= c && c <= '9' {
|
||||
s.step = stateE0
|
||||
return scanContinue
|
||||
}
|
||||
return s.error(c, "in exponent of numeric literal")
|
||||
}
|
||||
|
||||
// stateE0 is the state after reading the mantissa, e, optional sign,
|
||||
// and at least one digit of the exponent in a number,
|
||||
// such as after reading `314e-2` or `0.314e+1` or `3.14e0`.
|
||||
func stateE0(s *scanner, c byte) int {
|
||||
if '0' <= c && c <= '9' {
|
||||
return scanContinue
|
||||
}
|
||||
return stateEndValue(s, c)
|
||||
}
|
||||
|
||||
// stateT is the state after reading `t`.
|
||||
func stateT(s *scanner, c byte) int {
|
||||
if c == 'r' {
|
||||
s.step = stateTr
|
||||
return scanContinue
|
||||
}
|
||||
return s.error(c, "in literal true (expecting 'r')")
|
||||
}
|
||||
|
||||
// stateTr is the state after reading `tr`.
|
||||
func stateTr(s *scanner, c byte) int {
|
||||
if c == 'u' {
|
||||
s.step = stateTru
|
||||
return scanContinue
|
||||
}
|
||||
return s.error(c, "in literal true (expecting 'u')")
|
||||
}
|
||||
|
||||
// stateTru is the state after reading `tru`.
|
||||
func stateTru(s *scanner, c byte) int {
|
||||
if c == 'e' {
|
||||
s.step = stateEndValue
|
||||
return scanContinue
|
||||
}
|
||||
return s.error(c, "in literal true (expecting 'e')")
|
||||
}
|
||||
|
||||
// stateF is the state after reading `f`.
|
||||
func stateF(s *scanner, c byte) int {
|
||||
if c == 'a' {
|
||||
s.step = stateFa
|
||||
return scanContinue
|
||||
}
|
||||
return s.error(c, "in literal false (expecting 'a')")
|
||||
}
|
||||
|
||||
// stateFa is the state after reading `fa`.
|
||||
func stateFa(s *scanner, c byte) int {
|
||||
if c == 'l' {
|
||||
s.step = stateFal
|
||||
return scanContinue
|
||||
}
|
||||
return s.error(c, "in literal false (expecting 'l')")
|
||||
}
|
||||
|
||||
// stateFal is the state after reading `fal`.
|
||||
func stateFal(s *scanner, c byte) int {
|
||||
if c == 's' {
|
||||
s.step = stateFals
|
||||
return scanContinue
|
||||
}
|
||||
return s.error(c, "in literal false (expecting 's')")
|
||||
}
|
||||
|
||||
// stateFals is the state after reading `fals`.
|
||||
func stateFals(s *scanner, c byte) int {
|
||||
if c == 'e' {
|
||||
s.step = stateEndValue
|
||||
return scanContinue
|
||||
}
|
||||
return s.error(c, "in literal false (expecting 'e')")
|
||||
}
|
||||
|
||||
// stateN is the state after reading `n`.
|
||||
func stateN(s *scanner, c byte) int {
|
||||
if c == 'u' {
|
||||
s.step = stateNu
|
||||
return scanContinue
|
||||
}
|
||||
return s.error(c, "in literal null (expecting 'u')")
|
||||
}
|
||||
|
||||
// stateNu is the state after reading `nu`.
|
||||
func stateNu(s *scanner, c byte) int {
|
||||
if c == 'l' {
|
||||
s.step = stateNul
|
||||
return scanContinue
|
||||
}
|
||||
return s.error(c, "in literal null (expecting 'l')")
|
||||
}
|
||||
|
||||
// stateNul is the state after reading `nul`.
|
||||
func stateNul(s *scanner, c byte) int {
|
||||
if c == 'l' {
|
||||
s.step = stateEndValue
|
||||
return scanContinue
|
||||
}
|
||||
return s.error(c, "in literal null (expecting 'l')")
|
||||
}
|
||||
|
||||
// stateError is the state after reaching a syntax error,
|
||||
// such as after reading `[1}` or `5.1.2`.
|
||||
func stateError(s *scanner, c byte) int {
|
||||
return scanError
|
||||
}
|
||||
|
||||
// error records an error and switches to the error state.
|
||||
func (s *scanner) error(c byte, context string) int {
|
||||
s.step = stateError
|
||||
s.err = &SyntaxError{"invalid character " + quoteChar(c) + " " + context, s.bytes}
|
||||
return scanError
|
||||
}
|
||||
|
||||
// quoteChar formats c as a quoted character literal.
|
||||
func quoteChar(c byte) string {
|
||||
// special cases - different from quoted strings
|
||||
if c == '\'' {
|
||||
return `'\''`
|
||||
}
|
||||
if c == '"' {
|
||||
return `'"'`
|
||||
}
|
||||
|
||||
// use quoted string with different quotation marks
|
||||
s := strconv.Quote(string(c))
|
||||
return "'" + s[1:len(s)-1] + "'"
|
||||
}
|
||||
111
internal/encoding/json/shims/shims.go
Normal file
111
internal/encoding/json/shims/shims.go
Normal file
@@ -0,0 +1,111 @@
|
||||
// This package provides shims over Go 1.2{2,3} APIs
|
||||
// which are missing from Go 1.21, and used by the Go 1.24 encoding/json package.
|
||||
//
|
||||
// Inside the vendored package, all shim code has comments that begin look like
|
||||
// // SHIM(...): ...
|
||||
package shims
|
||||
|
||||
import (
|
||||
"encoding/base64"
|
||||
"reflect"
|
||||
"slices"
|
||||
)
|
||||
|
||||
type OverflowableType struct{ reflect.Type }
|
||||
|
||||
func (t OverflowableType) OverflowInt(x int64) bool {
|
||||
k := t.Kind()
|
||||
switch k {
|
||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
||||
bitSize := t.Size() * 8
|
||||
trunc := (x << (64 - bitSize)) >> (64 - bitSize)
|
||||
return x != trunc
|
||||
}
|
||||
panic("reflect: OverflowInt of non-int type " + t.String())
|
||||
}
|
||||
|
||||
func (t OverflowableType) OverflowUint(x uint64) bool {
|
||||
k := t.Kind()
|
||||
switch k {
|
||||
case reflect.Uint, reflect.Uintptr, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
|
||||
bitSize := t.Size() * 8
|
||||
trunc := (x << (64 - bitSize)) >> (64 - bitSize)
|
||||
return x != trunc
|
||||
}
|
||||
panic("reflect: OverflowUint of non-uint type " + t.String())
|
||||
}
|
||||
|
||||
// Original src code from Go 1.23 go/src/reflect/type.go (taken 1/9/25)
|
||||
/*
|
||||
|
||||
func (t *rtype) OverflowInt(x int64) bool {
|
||||
k := t.Kind()
|
||||
switch k {
|
||||
case Int, Int8, Int16, Int32, Int64:
|
||||
bitSize := t.Size() * 8
|
||||
trunc := (x << (64 - bitSize)) >> (64 - bitSize)
|
||||
return x != trunc
|
||||
}
|
||||
panic("reflect: OverflowInt of non-int type " + t.String())
|
||||
}
|
||||
|
||||
func (t *rtype) OverflowUint(x uint64) bool {
|
||||
k := t.Kind()
|
||||
switch k {
|
||||
case Uint, Uintptr, Uint8, Uint16, Uint32, Uint64:
|
||||
bitSize := t.Size() * 8
|
||||
trunc := (x << (64 - bitSize)) >> (64 - bitSize)
|
||||
return x != trunc
|
||||
}
|
||||
panic("reflect: OverflowUint of non-uint type " + t.String())
|
||||
}
|
||||
|
||||
*/
|
||||
|
||||
// TypeFor returns the [Type] that represents the type argument T.
|
||||
func TypeFor[T any]() reflect.Type {
|
||||
var v T
|
||||
if t := reflect.TypeOf(v); t != nil {
|
||||
return t // optimize for T being a non-interface kind
|
||||
}
|
||||
return reflect.TypeOf((*T)(nil)).Elem() // only for an interface kind
|
||||
}
|
||||
|
||||
// Original src code from Go 1.23 go/src/reflect/type.go (taken 1/9/25)
|
||||
/*
|
||||
|
||||
// TypeFor returns the [Type] that represents the type argument T.
|
||||
func TypeFor[T any]() Type {
|
||||
var v T
|
||||
if t := TypeOf(v); t != nil {
|
||||
return t // optimize for T being a non-interface kind
|
||||
}
|
||||
return TypeOf((*T)(nil)).Elem() // only for an interface kind
|
||||
}
|
||||
|
||||
*/
|
||||
|
||||
type AppendableStdEncoding struct{ *base64.Encoding }
|
||||
|
||||
// AppendEncode appends the base64 encoded src to dst
|
||||
// and returns the extended buffer.
|
||||
func (enc AppendableStdEncoding) AppendEncode(dst, src []byte) []byte {
|
||||
n := enc.EncodedLen(len(src))
|
||||
dst = slices.Grow(dst, n)
|
||||
enc.Encode(dst[len(dst):][:n], src)
|
||||
return dst[:len(dst)+n]
|
||||
}
|
||||
|
||||
// Original src code from Go 1.23.4 go/src/encoding/base64/base64.go (taken 1/9/25)
|
||||
/*
|
||||
|
||||
// AppendEncode appends the base64 encoded src to dst
|
||||
// and returns the extended buffer.
|
||||
func (enc *Encoding) AppendEncode(dst, src []byte) []byte {
|
||||
n := enc.EncodedLen(len(src))
|
||||
dst = slices.Grow(dst, n)
|
||||
enc.Encode(dst[len(dst):][:n], src)
|
||||
return dst[:len(dst)+n]
|
||||
}
|
||||
|
||||
*/
|
||||
512
internal/encoding/json/stream.go
Normal file
512
internal/encoding/json/stream.go
Normal file
@@ -0,0 +1,512 @@
|
||||
// Copyright 2010 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package json
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"errors"
|
||||
"io"
|
||||
)
|
||||
|
||||
// A Decoder reads and decodes JSON values from an input stream.
|
||||
type Decoder struct {
|
||||
r io.Reader
|
||||
buf []byte
|
||||
d decodeState
|
||||
scanp int // start of unread data in buf
|
||||
scanned int64 // amount of data already scanned
|
||||
scan scanner
|
||||
err error
|
||||
|
||||
tokenState int
|
||||
tokenStack []int
|
||||
}
|
||||
|
||||
// NewDecoder returns a new decoder that reads from r.
|
||||
//
|
||||
// The decoder introduces its own buffering and may
|
||||
// read data from r beyond the JSON values requested.
|
||||
func NewDecoder(r io.Reader) *Decoder {
|
||||
return &Decoder{r: r}
|
||||
}
|
||||
|
||||
// UseNumber causes the Decoder to unmarshal a number into an
|
||||
// interface value as a [Number] instead of as a float64.
|
||||
func (dec *Decoder) UseNumber() { dec.d.useNumber = true }
|
||||
|
||||
// DisallowUnknownFields causes the Decoder to return an error when the destination
|
||||
// is a struct and the input contains object keys which do not match any
|
||||
// non-ignored, exported fields in the destination.
|
||||
func (dec *Decoder) DisallowUnknownFields() { dec.d.disallowUnknownFields = true }
|
||||
|
||||
// Decode reads the next JSON-encoded value from its
|
||||
// input and stores it in the value pointed to by v.
|
||||
//
|
||||
// See the documentation for [Unmarshal] for details about
|
||||
// the conversion of JSON into a Go value.
|
||||
func (dec *Decoder) Decode(v any) error {
|
||||
if dec.err != nil {
|
||||
return dec.err
|
||||
}
|
||||
|
||||
if err := dec.tokenPrepareForDecode(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if !dec.tokenValueAllowed() {
|
||||
return &SyntaxError{msg: "not at beginning of value", Offset: dec.InputOffset()}
|
||||
}
|
||||
|
||||
// Read whole value into buffer.
|
||||
n, err := dec.readValue()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
dec.d.init(dec.buf[dec.scanp : dec.scanp+n])
|
||||
dec.scanp += n
|
||||
|
||||
// Don't save err from unmarshal into dec.err:
|
||||
// the connection is still usable since we read a complete JSON
|
||||
// object from it before the error happened.
|
||||
err = dec.d.unmarshal(v)
|
||||
|
||||
// fixup token streaming state
|
||||
dec.tokenValueEnd()
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
// Buffered returns a reader of the data remaining in the Decoder's
|
||||
// buffer. The reader is valid until the next call to [Decoder.Decode].
|
||||
func (dec *Decoder) Buffered() io.Reader {
|
||||
return bytes.NewReader(dec.buf[dec.scanp:])
|
||||
}
|
||||
|
||||
// readValue reads a JSON value into dec.buf.
|
||||
// It returns the length of the encoding.
|
||||
func (dec *Decoder) readValue() (int, error) {
|
||||
dec.scan.reset()
|
||||
|
||||
scanp := dec.scanp
|
||||
var err error
|
||||
Input:
|
||||
// help the compiler see that scanp is never negative, so it can remove
|
||||
// some bounds checks below.
|
||||
for scanp >= 0 {
|
||||
|
||||
// Look in the buffer for a new value.
|
||||
for ; scanp < len(dec.buf); scanp++ {
|
||||
c := dec.buf[scanp]
|
||||
dec.scan.bytes++
|
||||
switch dec.scan.step(&dec.scan, c) {
|
||||
case scanEnd:
|
||||
// scanEnd is delayed one byte so we decrement
|
||||
// the scanner bytes count by 1 to ensure that
|
||||
// this value is correct in the next call of Decode.
|
||||
dec.scan.bytes--
|
||||
break Input
|
||||
case scanEndObject, scanEndArray:
|
||||
// scanEnd is delayed one byte.
|
||||
// We might block trying to get that byte from src,
|
||||
// so instead invent a space byte.
|
||||
if stateEndValue(&dec.scan, ' ') == scanEnd {
|
||||
scanp++
|
||||
break Input
|
||||
}
|
||||
case scanError:
|
||||
dec.err = dec.scan.err
|
||||
return 0, dec.scan.err
|
||||
}
|
||||
}
|
||||
|
||||
// Did the last read have an error?
|
||||
// Delayed until now to allow buffer scan.
|
||||
if err != nil {
|
||||
if err == io.EOF {
|
||||
if dec.scan.step(&dec.scan, ' ') == scanEnd {
|
||||
break Input
|
||||
}
|
||||
if nonSpace(dec.buf) {
|
||||
err = io.ErrUnexpectedEOF
|
||||
}
|
||||
}
|
||||
dec.err = err
|
||||
return 0, err
|
||||
}
|
||||
|
||||
n := scanp - dec.scanp
|
||||
err = dec.refill()
|
||||
scanp = dec.scanp + n
|
||||
}
|
||||
return scanp - dec.scanp, nil
|
||||
}
|
||||
|
||||
func (dec *Decoder) refill() error {
|
||||
// Make room to read more into the buffer.
|
||||
// First slide down data already consumed.
|
||||
if dec.scanp > 0 {
|
||||
dec.scanned += int64(dec.scanp)
|
||||
n := copy(dec.buf, dec.buf[dec.scanp:])
|
||||
dec.buf = dec.buf[:n]
|
||||
dec.scanp = 0
|
||||
}
|
||||
|
||||
// Grow buffer if not large enough.
|
||||
const minRead = 512
|
||||
if cap(dec.buf)-len(dec.buf) < minRead {
|
||||
newBuf := make([]byte, len(dec.buf), 2*cap(dec.buf)+minRead)
|
||||
copy(newBuf, dec.buf)
|
||||
dec.buf = newBuf
|
||||
}
|
||||
|
||||
// Read. Delay error for next iteration (after scan).
|
||||
n, err := dec.r.Read(dec.buf[len(dec.buf):cap(dec.buf)])
|
||||
dec.buf = dec.buf[0 : len(dec.buf)+n]
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
func nonSpace(b []byte) bool {
|
||||
for _, c := range b {
|
||||
if !isSpace(c) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// An Encoder writes JSON values to an output stream.
|
||||
type Encoder struct {
|
||||
w io.Writer
|
||||
err error
|
||||
escapeHTML bool
|
||||
|
||||
indentBuf []byte
|
||||
indentPrefix string
|
||||
indentValue string
|
||||
}
|
||||
|
||||
// NewEncoder returns a new encoder that writes to w.
|
||||
func NewEncoder(w io.Writer) *Encoder {
|
||||
return &Encoder{w: w, escapeHTML: true}
|
||||
}
|
||||
|
||||
// Encode writes the JSON encoding of v to the stream,
|
||||
// with insignificant space characters elided,
|
||||
// followed by a newline character.
|
||||
//
|
||||
// See the documentation for [Marshal] for details about the
|
||||
// conversion of Go values to JSON.
|
||||
func (enc *Encoder) Encode(v any) error {
|
||||
if enc.err != nil {
|
||||
return enc.err
|
||||
}
|
||||
|
||||
e := newEncodeState()
|
||||
defer encodeStatePool.Put(e)
|
||||
|
||||
err := e.marshal(v, encOpts{escapeHTML: enc.escapeHTML})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Terminate each value with a newline.
|
||||
// This makes the output look a little nicer
|
||||
// when debugging, and some kind of space
|
||||
// is required if the encoded value was a number,
|
||||
// so that the reader knows there aren't more
|
||||
// digits coming.
|
||||
e.WriteByte('\n')
|
||||
|
||||
b := e.Bytes()
|
||||
if enc.indentPrefix != "" || enc.indentValue != "" {
|
||||
enc.indentBuf, err = appendIndent(enc.indentBuf[:0], b, enc.indentPrefix, enc.indentValue)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
b = enc.indentBuf
|
||||
}
|
||||
if _, err = enc.w.Write(b); err != nil {
|
||||
enc.err = err
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
// SetIndent instructs the encoder to format each subsequent encoded
|
||||
// value as if indented by the package-level function Indent(dst, src, prefix, indent).
|
||||
// Calling SetIndent("", "") disables indentation.
|
||||
func (enc *Encoder) SetIndent(prefix, indent string) {
|
||||
enc.indentPrefix = prefix
|
||||
enc.indentValue = indent
|
||||
}
|
||||
|
||||
// SetEscapeHTML specifies whether problematic HTML characters
|
||||
// should be escaped inside JSON quoted strings.
|
||||
// The default behavior is to escape &, <, and > to \u0026, \u003c, and \u003e
|
||||
// to avoid certain safety problems that can arise when embedding JSON in HTML.
|
||||
//
|
||||
// In non-HTML settings where the escaping interferes with the readability
|
||||
// of the output, SetEscapeHTML(false) disables this behavior.
|
||||
func (enc *Encoder) SetEscapeHTML(on bool) {
|
||||
enc.escapeHTML = on
|
||||
}
|
||||
|
||||
// RawMessage is a raw encoded JSON value.
|
||||
// It implements [Marshaler] and [Unmarshaler] and can
|
||||
// be used to delay JSON decoding or precompute a JSON encoding.
|
||||
type RawMessage []byte
|
||||
|
||||
// MarshalJSON returns m as the JSON encoding of m.
|
||||
func (m RawMessage) MarshalJSON() ([]byte, error) {
|
||||
if m == nil {
|
||||
return []byte("null"), nil
|
||||
}
|
||||
return m, nil
|
||||
}
|
||||
|
||||
// UnmarshalJSON sets *m to a copy of data.
|
||||
func (m *RawMessage) UnmarshalJSON(data []byte) error {
|
||||
if m == nil {
|
||||
return errors.New("json.RawMessage: UnmarshalJSON on nil pointer")
|
||||
}
|
||||
*m = append((*m)[0:0], data...)
|
||||
return nil
|
||||
}
|
||||
|
||||
var _ Marshaler = (*RawMessage)(nil)
|
||||
var _ Unmarshaler = (*RawMessage)(nil)
|
||||
|
||||
// A Token holds a value of one of these types:
|
||||
//
|
||||
// - [Delim], for the four JSON delimiters [ ] { }
|
||||
// - bool, for JSON booleans
|
||||
// - float64, for JSON numbers
|
||||
// - [Number], for JSON numbers
|
||||
// - string, for JSON string literals
|
||||
// - nil, for JSON null
|
||||
type Token any
|
||||
|
||||
const (
|
||||
tokenTopValue = iota
|
||||
tokenArrayStart
|
||||
tokenArrayValue
|
||||
tokenArrayComma
|
||||
tokenObjectStart
|
||||
tokenObjectKey
|
||||
tokenObjectColon
|
||||
tokenObjectValue
|
||||
tokenObjectComma
|
||||
)
|
||||
|
||||
// advance tokenstate from a separator state to a value state
|
||||
func (dec *Decoder) tokenPrepareForDecode() error {
|
||||
// Note: Not calling peek before switch, to avoid
|
||||
// putting peek into the standard Decode path.
|
||||
// peek is only called when using the Token API.
|
||||
switch dec.tokenState {
|
||||
case tokenArrayComma:
|
||||
c, err := dec.peek()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if c != ',' {
|
||||
return &SyntaxError{"expected comma after array element", dec.InputOffset()}
|
||||
}
|
||||
dec.scanp++
|
||||
dec.tokenState = tokenArrayValue
|
||||
case tokenObjectColon:
|
||||
c, err := dec.peek()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if c != ':' {
|
||||
return &SyntaxError{"expected colon after object key", dec.InputOffset()}
|
||||
}
|
||||
dec.scanp++
|
||||
dec.tokenState = tokenObjectValue
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (dec *Decoder) tokenValueAllowed() bool {
|
||||
switch dec.tokenState {
|
||||
case tokenTopValue, tokenArrayStart, tokenArrayValue, tokenObjectValue:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (dec *Decoder) tokenValueEnd() {
|
||||
switch dec.tokenState {
|
||||
case tokenArrayStart, tokenArrayValue:
|
||||
dec.tokenState = tokenArrayComma
|
||||
case tokenObjectValue:
|
||||
dec.tokenState = tokenObjectComma
|
||||
}
|
||||
}
|
||||
|
||||
// A Delim is a JSON array or object delimiter, one of [ ] { or }.
|
||||
type Delim rune
|
||||
|
||||
func (d Delim) String() string {
|
||||
return string(d)
|
||||
}
|
||||
|
||||
// Token returns the next JSON token in the input stream.
|
||||
// At the end of the input stream, Token returns nil, [io.EOF].
|
||||
//
|
||||
// Token guarantees that the delimiters [ ] { } it returns are
|
||||
// properly nested and matched: if Token encounters an unexpected
|
||||
// delimiter in the input, it will return an error.
|
||||
//
|
||||
// The input stream consists of basic JSON values—bool, string,
|
||||
// number, and null—along with delimiters [ ] { } of type [Delim]
|
||||
// to mark the start and end of arrays and objects.
|
||||
// Commas and colons are elided.
|
||||
func (dec *Decoder) Token() (Token, error) {
|
||||
for {
|
||||
c, err := dec.peek()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
switch c {
|
||||
case '[':
|
||||
if !dec.tokenValueAllowed() {
|
||||
return dec.tokenError(c)
|
||||
}
|
||||
dec.scanp++
|
||||
dec.tokenStack = append(dec.tokenStack, dec.tokenState)
|
||||
dec.tokenState = tokenArrayStart
|
||||
return Delim('['), nil
|
||||
|
||||
case ']':
|
||||
if dec.tokenState != tokenArrayStart && dec.tokenState != tokenArrayComma {
|
||||
return dec.tokenError(c)
|
||||
}
|
||||
dec.scanp++
|
||||
dec.tokenState = dec.tokenStack[len(dec.tokenStack)-1]
|
||||
dec.tokenStack = dec.tokenStack[:len(dec.tokenStack)-1]
|
||||
dec.tokenValueEnd()
|
||||
return Delim(']'), nil
|
||||
|
||||
case '{':
|
||||
if !dec.tokenValueAllowed() {
|
||||
return dec.tokenError(c)
|
||||
}
|
||||
dec.scanp++
|
||||
dec.tokenStack = append(dec.tokenStack, dec.tokenState)
|
||||
dec.tokenState = tokenObjectStart
|
||||
return Delim('{'), nil
|
||||
|
||||
case '}':
|
||||
if dec.tokenState != tokenObjectStart && dec.tokenState != tokenObjectComma {
|
||||
return dec.tokenError(c)
|
||||
}
|
||||
dec.scanp++
|
||||
dec.tokenState = dec.tokenStack[len(dec.tokenStack)-1]
|
||||
dec.tokenStack = dec.tokenStack[:len(dec.tokenStack)-1]
|
||||
dec.tokenValueEnd()
|
||||
return Delim('}'), nil
|
||||
|
||||
case ':':
|
||||
if dec.tokenState != tokenObjectColon {
|
||||
return dec.tokenError(c)
|
||||
}
|
||||
dec.scanp++
|
||||
dec.tokenState = tokenObjectValue
|
||||
continue
|
||||
|
||||
case ',':
|
||||
if dec.tokenState == tokenArrayComma {
|
||||
dec.scanp++
|
||||
dec.tokenState = tokenArrayValue
|
||||
continue
|
||||
}
|
||||
if dec.tokenState == tokenObjectComma {
|
||||
dec.scanp++
|
||||
dec.tokenState = tokenObjectKey
|
||||
continue
|
||||
}
|
||||
return dec.tokenError(c)
|
||||
|
||||
case '"':
|
||||
if dec.tokenState == tokenObjectStart || dec.tokenState == tokenObjectKey {
|
||||
var x string
|
||||
old := dec.tokenState
|
||||
dec.tokenState = tokenTopValue
|
||||
err := dec.Decode(&x)
|
||||
dec.tokenState = old
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
dec.tokenState = tokenObjectColon
|
||||
return x, nil
|
||||
}
|
||||
fallthrough
|
||||
|
||||
default:
|
||||
if !dec.tokenValueAllowed() {
|
||||
return dec.tokenError(c)
|
||||
}
|
||||
var x any
|
||||
if err := dec.Decode(&x); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return x, nil
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (dec *Decoder) tokenError(c byte) (Token, error) {
|
||||
var context string
|
||||
switch dec.tokenState {
|
||||
case tokenTopValue:
|
||||
context = " looking for beginning of value"
|
||||
case tokenArrayStart, tokenArrayValue, tokenObjectValue:
|
||||
context = " looking for beginning of value"
|
||||
case tokenArrayComma:
|
||||
context = " after array element"
|
||||
case tokenObjectKey:
|
||||
context = " looking for beginning of object key string"
|
||||
case tokenObjectColon:
|
||||
context = " after object key"
|
||||
case tokenObjectComma:
|
||||
context = " after object key:value pair"
|
||||
}
|
||||
return nil, &SyntaxError{"invalid character " + quoteChar(c) + context, dec.InputOffset()}
|
||||
}
|
||||
|
||||
// More reports whether there is another element in the
|
||||
// current array or object being parsed.
|
||||
func (dec *Decoder) More() bool {
|
||||
c, err := dec.peek()
|
||||
return err == nil && c != ']' && c != '}'
|
||||
}
|
||||
|
||||
func (dec *Decoder) peek() (byte, error) {
|
||||
var err error
|
||||
for {
|
||||
for i := dec.scanp; i < len(dec.buf); i++ {
|
||||
c := dec.buf[i]
|
||||
if isSpace(c) {
|
||||
continue
|
||||
}
|
||||
dec.scanp = i
|
||||
return c, nil
|
||||
}
|
||||
// buffer has been scanned, now report any error
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
err = dec.refill()
|
||||
}
|
||||
}
|
||||
|
||||
// InputOffset returns the input stream byte offset of the current decoder position.
|
||||
// The offset gives the location of the end of the most recently returned token
|
||||
// and the beginning of the next token.
|
||||
func (dec *Decoder) InputOffset() int64 {
|
||||
return dec.scanned + int64(dec.scanp)
|
||||
}
|
||||
218
internal/encoding/json/tables.go
Normal file
218
internal/encoding/json/tables.go
Normal file
@@ -0,0 +1,218 @@
|
||||
// Copyright 2016 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package json
|
||||
|
||||
import "unicode/utf8"
|
||||
|
||||
// safeSet holds the value true if the ASCII character with the given array
|
||||
// position can be represented inside a JSON string without any further
|
||||
// escaping.
|
||||
//
|
||||
// All values are true except for the ASCII control characters (0-31), the
|
||||
// double quote ("), and the backslash character ("\").
|
||||
var safeSet = [utf8.RuneSelf]bool{
|
||||
' ': true,
|
||||
'!': true,
|
||||
'"': false,
|
||||
'#': true,
|
||||
'$': true,
|
||||
'%': true,
|
||||
'&': true,
|
||||
'\'': true,
|
||||
'(': true,
|
||||
')': true,
|
||||
'*': true,
|
||||
'+': true,
|
||||
',': true,
|
||||
'-': true,
|
||||
'.': true,
|
||||
'/': true,
|
||||
'0': true,
|
||||
'1': true,
|
||||
'2': true,
|
||||
'3': true,
|
||||
'4': true,
|
||||
'5': true,
|
||||
'6': true,
|
||||
'7': true,
|
||||
'8': true,
|
||||
'9': true,
|
||||
':': true,
|
||||
';': true,
|
||||
'<': true,
|
||||
'=': true,
|
||||
'>': true,
|
||||
'?': true,
|
||||
'@': true,
|
||||
'A': true,
|
||||
'B': true,
|
||||
'C': true,
|
||||
'D': true,
|
||||
'E': true,
|
||||
'F': true,
|
||||
'G': true,
|
||||
'H': true,
|
||||
'I': true,
|
||||
'J': true,
|
||||
'K': true,
|
||||
'L': true,
|
||||
'M': true,
|
||||
'N': true,
|
||||
'O': true,
|
||||
'P': true,
|
||||
'Q': true,
|
||||
'R': true,
|
||||
'S': true,
|
||||
'T': true,
|
||||
'U': true,
|
||||
'V': true,
|
||||
'W': true,
|
||||
'X': true,
|
||||
'Y': true,
|
||||
'Z': true,
|
||||
'[': true,
|
||||
'\\': false,
|
||||
']': true,
|
||||
'^': true,
|
||||
'_': true,
|
||||
'`': true,
|
||||
'a': true,
|
||||
'b': true,
|
||||
'c': true,
|
||||
'd': true,
|
||||
'e': true,
|
||||
'f': true,
|
||||
'g': true,
|
||||
'h': true,
|
||||
'i': true,
|
||||
'j': true,
|
||||
'k': true,
|
||||
'l': true,
|
||||
'm': true,
|
||||
'n': true,
|
||||
'o': true,
|
||||
'p': true,
|
||||
'q': true,
|
||||
'r': true,
|
||||
's': true,
|
||||
't': true,
|
||||
'u': true,
|
||||
'v': true,
|
||||
'w': true,
|
||||
'x': true,
|
||||
'y': true,
|
||||
'z': true,
|
||||
'{': true,
|
||||
'|': true,
|
||||
'}': true,
|
||||
'~': true,
|
||||
'\u007f': true,
|
||||
}
|
||||
|
||||
// htmlSafeSet holds the value true if the ASCII character with the given
|
||||
// array position can be safely represented inside a JSON string, embedded
|
||||
// inside of HTML <script> tags, without any additional escaping.
|
||||
//
|
||||
// All values are true except for the ASCII control characters (0-31), the
|
||||
// double quote ("), the backslash character ("\"), HTML opening and closing
|
||||
// tags ("<" and ">"), and the ampersand ("&").
|
||||
var htmlSafeSet = [utf8.RuneSelf]bool{
|
||||
' ': true,
|
||||
'!': true,
|
||||
'"': false,
|
||||
'#': true,
|
||||
'$': true,
|
||||
'%': true,
|
||||
'&': false,
|
||||
'\'': true,
|
||||
'(': true,
|
||||
')': true,
|
||||
'*': true,
|
||||
'+': true,
|
||||
',': true,
|
||||
'-': true,
|
||||
'.': true,
|
||||
'/': true,
|
||||
'0': true,
|
||||
'1': true,
|
||||
'2': true,
|
||||
'3': true,
|
||||
'4': true,
|
||||
'5': true,
|
||||
'6': true,
|
||||
'7': true,
|
||||
'8': true,
|
||||
'9': true,
|
||||
':': true,
|
||||
';': true,
|
||||
'<': false,
|
||||
'=': true,
|
||||
'>': false,
|
||||
'?': true,
|
||||
'@': true,
|
||||
'A': true,
|
||||
'B': true,
|
||||
'C': true,
|
||||
'D': true,
|
||||
'E': true,
|
||||
'F': true,
|
||||
'G': true,
|
||||
'H': true,
|
||||
'I': true,
|
||||
'J': true,
|
||||
'K': true,
|
||||
'L': true,
|
||||
'M': true,
|
||||
'N': true,
|
||||
'O': true,
|
||||
'P': true,
|
||||
'Q': true,
|
||||
'R': true,
|
||||
'S': true,
|
||||
'T': true,
|
||||
'U': true,
|
||||
'V': true,
|
||||
'W': true,
|
||||
'X': true,
|
||||
'Y': true,
|
||||
'Z': true,
|
||||
'[': true,
|
||||
'\\': false,
|
||||
']': true,
|
||||
'^': true,
|
||||
'_': true,
|
||||
'`': true,
|
||||
'a': true,
|
||||
'b': true,
|
||||
'c': true,
|
||||
'd': true,
|
||||
'e': true,
|
||||
'f': true,
|
||||
'g': true,
|
||||
'h': true,
|
||||
'i': true,
|
||||
'j': true,
|
||||
'k': true,
|
||||
'l': true,
|
||||
'm': true,
|
||||
'n': true,
|
||||
'o': true,
|
||||
'p': true,
|
||||
'q': true,
|
||||
'r': true,
|
||||
's': true,
|
||||
't': true,
|
||||
'u': true,
|
||||
'v': true,
|
||||
'w': true,
|
||||
'x': true,
|
||||
'y': true,
|
||||
'z': true,
|
||||
'{': true,
|
||||
'|': true,
|
||||
'}': true,
|
||||
'~': true,
|
||||
'\u007f': true,
|
||||
}
|
||||
38
internal/encoding/json/tags.go
Normal file
38
internal/encoding/json/tags.go
Normal file
@@ -0,0 +1,38 @@
|
||||
// Copyright 2011 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package json
|
||||
|
||||
import (
|
||||
"strings"
|
||||
)
|
||||
|
||||
// tagOptions is the string following a comma in a struct field's "json"
|
||||
// tag, or the empty string. It does not include the leading comma.
|
||||
type tagOptions string
|
||||
|
||||
// parseTag splits a struct field's json tag into its name and
|
||||
// comma-separated options.
|
||||
func parseTag(tag string) (string, tagOptions) {
|
||||
tag, opt, _ := strings.Cut(tag, ",")
|
||||
return tag, tagOptions(opt)
|
||||
}
|
||||
|
||||
// Contains reports whether a comma-separated list of options
|
||||
// contains a particular substr flag. substr must be surrounded by a
|
||||
// string boundary or commas.
|
||||
func (o tagOptions) Contains(optionName string) bool {
|
||||
if len(o) == 0 {
|
||||
return false
|
||||
}
|
||||
s := string(o)
|
||||
for s != "" {
|
||||
var name string
|
||||
name, s, _ = strings.Cut(s, ",")
|
||||
if name == optionName {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
@@ -1,8 +1,6 @@
|
||||
package param
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
)
|
||||
import "fmt"
|
||||
|
||||
type FieldLike interface{ field() }
|
||||
|
||||
|
||||
85
model.go
85
model.go
@@ -12,6 +12,8 @@ import (
|
||||
"github.com/openai/openai-go/internal/requestconfig"
|
||||
"github.com/openai/openai-go/option"
|
||||
"github.com/openai/openai-go/packages/pagination"
|
||||
"github.com/openai/openai-go/packages/resp"
|
||||
"github.com/openai/openai-go/shared/constant"
|
||||
)
|
||||
|
||||
// ModelService contains methods and other services that help with interacting with
|
||||
@@ -27,8 +29,8 @@ type ModelService struct {
|
||||
// NewModelService generates a new service that applies the given options to each
|
||||
// request. These options are applied after the parent client's options (if there
|
||||
// is one), and before any request-specific options.
|
||||
func NewModelService(opts ...option.RequestOption) (r *ModelService) {
|
||||
r = &ModelService{}
|
||||
func NewModelService(opts ...option.RequestOption) (r ModelService) {
|
||||
r = ModelService{}
|
||||
r.Options = opts
|
||||
return
|
||||
}
|
||||
@@ -87,69 +89,42 @@ func (r *ModelService) Delete(ctx context.Context, model string, opts ...option.
|
||||
// Describes an OpenAI model offering that can be used with the API.
|
||||
type Model struct {
|
||||
// The model identifier, which can be referenced in the API endpoints.
|
||||
ID string `json:"id,required"`
|
||||
ID string `json:"id,omitzero,required"`
|
||||
// The Unix timestamp (in seconds) when the model was created.
|
||||
Created int64 `json:"created,required"`
|
||||
Created int64 `json:"created,omitzero,required"`
|
||||
// The object type, which is always "model".
|
||||
Object ModelObject `json:"object,required"`
|
||||
//
|
||||
// This field can be elided, and will be automatically set as "model".
|
||||
Object constant.Model `json:"object,required"`
|
||||
// The organization that owns the model.
|
||||
OwnedBy string `json:"owned_by,required"`
|
||||
JSON modelJSON `json:"-"`
|
||||
OwnedBy string `json:"owned_by,omitzero,required"`
|
||||
JSON struct {
|
||||
ID resp.Field
|
||||
Created resp.Field
|
||||
Object resp.Field
|
||||
OwnedBy resp.Field
|
||||
raw string
|
||||
} `json:"-"`
|
||||
}
|
||||
|
||||
// modelJSON contains the JSON metadata for the struct [Model]
|
||||
type modelJSON struct {
|
||||
ID apijson.Field
|
||||
Created apijson.Field
|
||||
Object apijson.Field
|
||||
OwnedBy apijson.Field
|
||||
raw string
|
||||
ExtraFields map[string]apijson.Field
|
||||
}
|
||||
|
||||
func (r *Model) UnmarshalJSON(data []byte) (err error) {
|
||||
func (r Model) RawJSON() string { return r.JSON.raw }
|
||||
func (r *Model) UnmarshalJSON(data []byte) error {
|
||||
return apijson.UnmarshalRoot(data, r)
|
||||
}
|
||||
|
||||
func (r modelJSON) RawJSON() string {
|
||||
return r.raw
|
||||
}
|
||||
|
||||
// The object type, which is always "model".
|
||||
type ModelObject string
|
||||
|
||||
const (
|
||||
ModelObjectModel ModelObject = "model"
|
||||
)
|
||||
|
||||
func (r ModelObject) IsKnown() bool {
|
||||
switch r {
|
||||
case ModelObjectModel:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
type ModelDeleted struct {
|
||||
ID string `json:"id,required"`
|
||||
Deleted bool `json:"deleted,required"`
|
||||
Object string `json:"object,required"`
|
||||
JSON modelDeletedJSON `json:"-"`
|
||||
ID string `json:"id,omitzero,required"`
|
||||
Deleted bool `json:"deleted,omitzero,required"`
|
||||
Object string `json:"object,omitzero,required"`
|
||||
JSON struct {
|
||||
ID resp.Field
|
||||
Deleted resp.Field
|
||||
Object resp.Field
|
||||
raw string
|
||||
} `json:"-"`
|
||||
}
|
||||
|
||||
// modelDeletedJSON contains the JSON metadata for the struct [ModelDeleted]
|
||||
type modelDeletedJSON struct {
|
||||
ID apijson.Field
|
||||
Deleted apijson.Field
|
||||
Object apijson.Field
|
||||
raw string
|
||||
ExtraFields map[string]apijson.Field
|
||||
}
|
||||
|
||||
func (r *ModelDeleted) UnmarshalJSON(data []byte) (err error) {
|
||||
func (r ModelDeleted) RawJSON() string { return r.JSON.raw }
|
||||
func (r *ModelDeleted) UnmarshalJSON(data []byte) error {
|
||||
return apijson.UnmarshalRoot(data, r)
|
||||
}
|
||||
|
||||
func (r modelDeletedJSON) RawJSON() string {
|
||||
return r.raw
|
||||
}
|
||||
|
||||
578
moderation.go
578
moderation.go
@@ -7,9 +7,11 @@ import (
|
||||
"net/http"
|
||||
|
||||
"github.com/openai/openai-go/internal/apijson"
|
||||
"github.com/openai/openai-go/internal/param"
|
||||
"github.com/openai/openai-go/internal/requestconfig"
|
||||
"github.com/openai/openai-go/option"
|
||||
"github.com/openai/openai-go/packages/param"
|
||||
"github.com/openai/openai-go/packages/resp"
|
||||
"github.com/openai/openai-go/shared/constant"
|
||||
)
|
||||
|
||||
// ModerationService contains methods and other services that help with interacting
|
||||
@@ -25,8 +27,8 @@ type ModerationService struct {
|
||||
// NewModerationService generates a new service that applies the given options to
|
||||
// each request. These options are applied after the parent client's options (if
|
||||
// there is one), and before any request-specific options.
|
||||
func NewModerationService(opts ...option.RequestOption) (r *ModerationService) {
|
||||
r = &ModerationService{}
|
||||
func NewModerationService(opts ...option.RequestOption) (r ModerationService) {
|
||||
r = ModerationService{}
|
||||
r.Options = opts
|
||||
return
|
||||
}
|
||||
@@ -42,453 +44,313 @@ func (r *ModerationService) New(ctx context.Context, body ModerationNewParams, o
|
||||
|
||||
type Moderation struct {
|
||||
// A list of the categories, and whether they are flagged or not.
|
||||
Categories ModerationCategories `json:"categories,required"`
|
||||
Categories ModerationCategories `json:"categories,omitzero,required"`
|
||||
// A list of the categories along with the input type(s) that the score applies to.
|
||||
CategoryAppliedInputTypes ModerationCategoryAppliedInputTypes `json:"category_applied_input_types,required"`
|
||||
CategoryAppliedInputTypes ModerationCategoryAppliedInputTypes `json:"category_applied_input_types,omitzero,required"`
|
||||
// A list of the categories along with their scores as predicted by model.
|
||||
CategoryScores ModerationCategoryScores `json:"category_scores,required"`
|
||||
CategoryScores ModerationCategoryScores `json:"category_scores,omitzero,required"`
|
||||
// Whether any of the below categories are flagged.
|
||||
Flagged bool `json:"flagged,required"`
|
||||
JSON moderationJSON `json:"-"`
|
||||
Flagged bool `json:"flagged,omitzero,required"`
|
||||
JSON struct {
|
||||
Categories resp.Field
|
||||
CategoryAppliedInputTypes resp.Field
|
||||
CategoryScores resp.Field
|
||||
Flagged resp.Field
|
||||
raw string
|
||||
} `json:"-"`
|
||||
}
|
||||
|
||||
// moderationJSON contains the JSON metadata for the struct [Moderation]
|
||||
type moderationJSON struct {
|
||||
Categories apijson.Field
|
||||
CategoryAppliedInputTypes apijson.Field
|
||||
CategoryScores apijson.Field
|
||||
Flagged apijson.Field
|
||||
raw string
|
||||
ExtraFields map[string]apijson.Field
|
||||
}
|
||||
|
||||
func (r *Moderation) UnmarshalJSON(data []byte) (err error) {
|
||||
func (r Moderation) RawJSON() string { return r.JSON.raw }
|
||||
func (r *Moderation) UnmarshalJSON(data []byte) error {
|
||||
return apijson.UnmarshalRoot(data, r)
|
||||
}
|
||||
|
||||
func (r moderationJSON) RawJSON() string {
|
||||
return r.raw
|
||||
}
|
||||
|
||||
// A list of the categories, and whether they are flagged or not.
|
||||
type ModerationCategories struct {
|
||||
// Content that expresses, incites, or promotes harassing language towards any
|
||||
// target.
|
||||
Harassment bool `json:"harassment,required"`
|
||||
Harassment bool `json:"harassment,omitzero,required"`
|
||||
// Harassment content that also includes violence or serious harm towards any
|
||||
// target.
|
||||
HarassmentThreatening bool `json:"harassment/threatening,required"`
|
||||
HarassmentThreatening bool `json:"harassment/threatening,omitzero,required"`
|
||||
// Content that expresses, incites, or promotes hate based on race, gender,
|
||||
// ethnicity, religion, nationality, sexual orientation, disability status, or
|
||||
// caste. Hateful content aimed at non-protected groups (e.g., chess players) is
|
||||
// harassment.
|
||||
Hate bool `json:"hate,required"`
|
||||
Hate bool `json:"hate,omitzero,required"`
|
||||
// Hateful content that also includes violence or serious harm towards the targeted
|
||||
// group based on race, gender, ethnicity, religion, nationality, sexual
|
||||
// orientation, disability status, or caste.
|
||||
HateThreatening bool `json:"hate/threatening,required"`
|
||||
HateThreatening bool `json:"hate/threatening,omitzero,required"`
|
||||
// Content that includes instructions or advice that facilitate the planning or
|
||||
// execution of wrongdoing, or that gives advice or instruction on how to commit
|
||||
// illicit acts. For example, "how to shoplift" would fit this category.
|
||||
Illicit bool `json:"illicit,required,nullable"`
|
||||
Illicit bool `json:"illicit,omitzero,required,nullable"`
|
||||
// Content that includes instructions or advice that facilitate the planning or
|
||||
// execution of wrongdoing that also includes violence, or that gives advice or
|
||||
// instruction on the procurement of any weapon.
|
||||
IllicitViolent bool `json:"illicit/violent,required,nullable"`
|
||||
IllicitViolent bool `json:"illicit/violent,omitzero,required,nullable"`
|
||||
// Content that promotes, encourages, or depicts acts of self-harm, such as
|
||||
// suicide, cutting, and eating disorders.
|
||||
SelfHarm bool `json:"self-harm,required"`
|
||||
SelfHarm bool `json:"self-harm,omitzero,required"`
|
||||
// Content that encourages performing acts of self-harm, such as suicide, cutting,
|
||||
// and eating disorders, or that gives instructions or advice on how to commit such
|
||||
// acts.
|
||||
SelfHarmInstructions bool `json:"self-harm/instructions,required"`
|
||||
SelfHarmInstructions bool `json:"self-harm/instructions,omitzero,required"`
|
||||
// Content where the speaker expresses that they are engaging or intend to engage
|
||||
// in acts of self-harm, such as suicide, cutting, and eating disorders.
|
||||
SelfHarmIntent bool `json:"self-harm/intent,required"`
|
||||
SelfHarmIntent bool `json:"self-harm/intent,omitzero,required"`
|
||||
// Content meant to arouse sexual excitement, such as the description of sexual
|
||||
// activity, or that promotes sexual services (excluding sex education and
|
||||
// wellness).
|
||||
Sexual bool `json:"sexual,required"`
|
||||
Sexual bool `json:"sexual,omitzero,required"`
|
||||
// Sexual content that includes an individual who is under 18 years old.
|
||||
SexualMinors bool `json:"sexual/minors,required"`
|
||||
SexualMinors bool `json:"sexual/minors,omitzero,required"`
|
||||
// Content that depicts death, violence, or physical injury.
|
||||
Violence bool `json:"violence,required"`
|
||||
Violence bool `json:"violence,omitzero,required"`
|
||||
// Content that depicts death, violence, or physical injury in graphic detail.
|
||||
ViolenceGraphic bool `json:"violence/graphic,required"`
|
||||
JSON moderationCategoriesJSON `json:"-"`
|
||||
ViolenceGraphic bool `json:"violence/graphic,omitzero,required"`
|
||||
JSON struct {
|
||||
Harassment resp.Field
|
||||
HarassmentThreatening resp.Field
|
||||
Hate resp.Field
|
||||
HateThreatening resp.Field
|
||||
Illicit resp.Field
|
||||
IllicitViolent resp.Field
|
||||
SelfHarm resp.Field
|
||||
SelfHarmInstructions resp.Field
|
||||
SelfHarmIntent resp.Field
|
||||
Sexual resp.Field
|
||||
SexualMinors resp.Field
|
||||
Violence resp.Field
|
||||
ViolenceGraphic resp.Field
|
||||
raw string
|
||||
} `json:"-"`
|
||||
}
|
||||
|
||||
// moderationCategoriesJSON contains the JSON metadata for the struct
|
||||
// [ModerationCategories]
|
||||
type moderationCategoriesJSON struct {
|
||||
Harassment apijson.Field
|
||||
HarassmentThreatening apijson.Field
|
||||
Hate apijson.Field
|
||||
HateThreatening apijson.Field
|
||||
Illicit apijson.Field
|
||||
IllicitViolent apijson.Field
|
||||
SelfHarm apijson.Field
|
||||
SelfHarmInstructions apijson.Field
|
||||
SelfHarmIntent apijson.Field
|
||||
Sexual apijson.Field
|
||||
SexualMinors apijson.Field
|
||||
Violence apijson.Field
|
||||
ViolenceGraphic apijson.Field
|
||||
raw string
|
||||
ExtraFields map[string]apijson.Field
|
||||
}
|
||||
|
||||
func (r *ModerationCategories) UnmarshalJSON(data []byte) (err error) {
|
||||
func (r ModerationCategories) RawJSON() string { return r.JSON.raw }
|
||||
func (r *ModerationCategories) UnmarshalJSON(data []byte) error {
|
||||
return apijson.UnmarshalRoot(data, r)
|
||||
}
|
||||
|
||||
func (r moderationCategoriesJSON) RawJSON() string {
|
||||
return r.raw
|
||||
}
|
||||
|
||||
// A list of the categories along with the input type(s) that the score applies to.
|
||||
type ModerationCategoryAppliedInputTypes struct {
|
||||
// The applied input type(s) for the category 'harassment'.
|
||||
Harassment []ModerationCategoryAppliedInputTypesHarassment `json:"harassment,required"`
|
||||
Harassment []string `json:"harassment,omitzero,required"`
|
||||
// The applied input type(s) for the category 'harassment/threatening'.
|
||||
HarassmentThreatening []ModerationCategoryAppliedInputTypesHarassmentThreatening `json:"harassment/threatening,required"`
|
||||
HarassmentThreatening []string `json:"harassment/threatening,omitzero,required"`
|
||||
// The applied input type(s) for the category 'hate'.
|
||||
Hate []ModerationCategoryAppliedInputTypesHate `json:"hate,required"`
|
||||
Hate []string `json:"hate,omitzero,required"`
|
||||
// The applied input type(s) for the category 'hate/threatening'.
|
||||
HateThreatening []ModerationCategoryAppliedInputTypesHateThreatening `json:"hate/threatening,required"`
|
||||
HateThreatening []string `json:"hate/threatening,omitzero,required"`
|
||||
// The applied input type(s) for the category 'illicit'.
|
||||
Illicit []ModerationCategoryAppliedInputTypesIllicit `json:"illicit,required"`
|
||||
Illicit []string `json:"illicit,omitzero,required"`
|
||||
// The applied input type(s) for the category 'illicit/violent'.
|
||||
IllicitViolent []ModerationCategoryAppliedInputTypesIllicitViolent `json:"illicit/violent,required"`
|
||||
IllicitViolent []string `json:"illicit/violent,omitzero,required"`
|
||||
// The applied input type(s) for the category 'self-harm'.
|
||||
SelfHarm []ModerationCategoryAppliedInputTypesSelfHarm `json:"self-harm,required"`
|
||||
SelfHarm []string `json:"self-harm,omitzero,required"`
|
||||
// The applied input type(s) for the category 'self-harm/instructions'.
|
||||
SelfHarmInstructions []ModerationCategoryAppliedInputTypesSelfHarmInstruction `json:"self-harm/instructions,required"`
|
||||
SelfHarmInstructions []string `json:"self-harm/instructions,omitzero,required"`
|
||||
// The applied input type(s) for the category 'self-harm/intent'.
|
||||
SelfHarmIntent []ModerationCategoryAppliedInputTypesSelfHarmIntent `json:"self-harm/intent,required"`
|
||||
SelfHarmIntent []string `json:"self-harm/intent,omitzero,required"`
|
||||
// The applied input type(s) for the category 'sexual'.
|
||||
Sexual []ModerationCategoryAppliedInputTypesSexual `json:"sexual,required"`
|
||||
Sexual []string `json:"sexual,omitzero,required"`
|
||||
// The applied input type(s) for the category 'sexual/minors'.
|
||||
SexualMinors []ModerationCategoryAppliedInputTypesSexualMinor `json:"sexual/minors,required"`
|
||||
SexualMinors []string `json:"sexual/minors,omitzero,required"`
|
||||
// The applied input type(s) for the category 'violence'.
|
||||
Violence []ModerationCategoryAppliedInputTypesViolence `json:"violence,required"`
|
||||
Violence []string `json:"violence,omitzero,required"`
|
||||
// The applied input type(s) for the category 'violence/graphic'.
|
||||
ViolenceGraphic []ModerationCategoryAppliedInputTypesViolenceGraphic `json:"violence/graphic,required"`
|
||||
JSON moderationCategoryAppliedInputTypesJSON `json:"-"`
|
||||
ViolenceGraphic []string `json:"violence/graphic,omitzero,required"`
|
||||
JSON struct {
|
||||
Harassment resp.Field
|
||||
HarassmentThreatening resp.Field
|
||||
Hate resp.Field
|
||||
HateThreatening resp.Field
|
||||
Illicit resp.Field
|
||||
IllicitViolent resp.Field
|
||||
SelfHarm resp.Field
|
||||
SelfHarmInstructions resp.Field
|
||||
SelfHarmIntent resp.Field
|
||||
Sexual resp.Field
|
||||
SexualMinors resp.Field
|
||||
Violence resp.Field
|
||||
ViolenceGraphic resp.Field
|
||||
raw string
|
||||
} `json:"-"`
|
||||
}
|
||||
|
||||
// moderationCategoryAppliedInputTypesJSON contains the JSON metadata for the
|
||||
// struct [ModerationCategoryAppliedInputTypes]
|
||||
type moderationCategoryAppliedInputTypesJSON struct {
|
||||
Harassment apijson.Field
|
||||
HarassmentThreatening apijson.Field
|
||||
Hate apijson.Field
|
||||
HateThreatening apijson.Field
|
||||
Illicit apijson.Field
|
||||
IllicitViolent apijson.Field
|
||||
SelfHarm apijson.Field
|
||||
SelfHarmInstructions apijson.Field
|
||||
SelfHarmIntent apijson.Field
|
||||
Sexual apijson.Field
|
||||
SexualMinors apijson.Field
|
||||
Violence apijson.Field
|
||||
ViolenceGraphic apijson.Field
|
||||
raw string
|
||||
ExtraFields map[string]apijson.Field
|
||||
}
|
||||
|
||||
func (r *ModerationCategoryAppliedInputTypes) UnmarshalJSON(data []byte) (err error) {
|
||||
func (r ModerationCategoryAppliedInputTypes) RawJSON() string { return r.JSON.raw }
|
||||
func (r *ModerationCategoryAppliedInputTypes) UnmarshalJSON(data []byte) error {
|
||||
return apijson.UnmarshalRoot(data, r)
|
||||
}
|
||||
|
||||
func (r moderationCategoryAppliedInputTypesJSON) RawJSON() string {
|
||||
return r.raw
|
||||
}
|
||||
|
||||
type ModerationCategoryAppliedInputTypesHarassment string
|
||||
type ModerationCategoryAppliedInputTypesHarassment = string
|
||||
|
||||
const (
|
||||
ModerationCategoryAppliedInputTypesHarassmentText ModerationCategoryAppliedInputTypesHarassment = "text"
|
||||
)
|
||||
|
||||
func (r ModerationCategoryAppliedInputTypesHarassment) IsKnown() bool {
|
||||
switch r {
|
||||
case ModerationCategoryAppliedInputTypesHarassmentText:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
type ModerationCategoryAppliedInputTypesHarassmentThreatening string
|
||||
type ModerationCategoryAppliedInputTypesHarassmentThreatening = string
|
||||
|
||||
const (
|
||||
ModerationCategoryAppliedInputTypesHarassmentThreateningText ModerationCategoryAppliedInputTypesHarassmentThreatening = "text"
|
||||
)
|
||||
|
||||
func (r ModerationCategoryAppliedInputTypesHarassmentThreatening) IsKnown() bool {
|
||||
switch r {
|
||||
case ModerationCategoryAppliedInputTypesHarassmentThreateningText:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
type ModerationCategoryAppliedInputTypesHate string
|
||||
type ModerationCategoryAppliedInputTypesHate = string
|
||||
|
||||
const (
|
||||
ModerationCategoryAppliedInputTypesHateText ModerationCategoryAppliedInputTypesHate = "text"
|
||||
)
|
||||
|
||||
func (r ModerationCategoryAppliedInputTypesHate) IsKnown() bool {
|
||||
switch r {
|
||||
case ModerationCategoryAppliedInputTypesHateText:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
type ModerationCategoryAppliedInputTypesHateThreatening string
|
||||
type ModerationCategoryAppliedInputTypesHateThreatening = string
|
||||
|
||||
const (
|
||||
ModerationCategoryAppliedInputTypesHateThreateningText ModerationCategoryAppliedInputTypesHateThreatening = "text"
|
||||
)
|
||||
|
||||
func (r ModerationCategoryAppliedInputTypesHateThreatening) IsKnown() bool {
|
||||
switch r {
|
||||
case ModerationCategoryAppliedInputTypesHateThreateningText:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
type ModerationCategoryAppliedInputTypesIllicit string
|
||||
type ModerationCategoryAppliedInputTypesIllicit = string
|
||||
|
||||
const (
|
||||
ModerationCategoryAppliedInputTypesIllicitText ModerationCategoryAppliedInputTypesIllicit = "text"
|
||||
)
|
||||
|
||||
func (r ModerationCategoryAppliedInputTypesIllicit) IsKnown() bool {
|
||||
switch r {
|
||||
case ModerationCategoryAppliedInputTypesIllicitText:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
type ModerationCategoryAppliedInputTypesIllicitViolent string
|
||||
type ModerationCategoryAppliedInputTypesIllicitViolent = string
|
||||
|
||||
const (
|
||||
ModerationCategoryAppliedInputTypesIllicitViolentText ModerationCategoryAppliedInputTypesIllicitViolent = "text"
|
||||
)
|
||||
|
||||
func (r ModerationCategoryAppliedInputTypesIllicitViolent) IsKnown() bool {
|
||||
switch r {
|
||||
case ModerationCategoryAppliedInputTypesIllicitViolentText:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
type ModerationCategoryAppliedInputTypesSelfHarm string
|
||||
type ModerationCategoryAppliedInputTypesSelfHarm = string
|
||||
|
||||
const (
|
||||
ModerationCategoryAppliedInputTypesSelfHarmText ModerationCategoryAppliedInputTypesSelfHarm = "text"
|
||||
ModerationCategoryAppliedInputTypesSelfHarmImage ModerationCategoryAppliedInputTypesSelfHarm = "image"
|
||||
)
|
||||
|
||||
func (r ModerationCategoryAppliedInputTypesSelfHarm) IsKnown() bool {
|
||||
switch r {
|
||||
case ModerationCategoryAppliedInputTypesSelfHarmText, ModerationCategoryAppliedInputTypesSelfHarmImage:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
type ModerationCategoryAppliedInputTypesSelfHarmInstruction string
|
||||
type ModerationCategoryAppliedInputTypesSelfHarmInstruction = string
|
||||
|
||||
const (
|
||||
ModerationCategoryAppliedInputTypesSelfHarmInstructionText ModerationCategoryAppliedInputTypesSelfHarmInstruction = "text"
|
||||
ModerationCategoryAppliedInputTypesSelfHarmInstructionImage ModerationCategoryAppliedInputTypesSelfHarmInstruction = "image"
|
||||
)
|
||||
|
||||
func (r ModerationCategoryAppliedInputTypesSelfHarmInstruction) IsKnown() bool {
|
||||
switch r {
|
||||
case ModerationCategoryAppliedInputTypesSelfHarmInstructionText, ModerationCategoryAppliedInputTypesSelfHarmInstructionImage:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
type ModerationCategoryAppliedInputTypesSelfHarmIntent string
|
||||
type ModerationCategoryAppliedInputTypesSelfHarmIntent = string
|
||||
|
||||
const (
|
||||
ModerationCategoryAppliedInputTypesSelfHarmIntentText ModerationCategoryAppliedInputTypesSelfHarmIntent = "text"
|
||||
ModerationCategoryAppliedInputTypesSelfHarmIntentImage ModerationCategoryAppliedInputTypesSelfHarmIntent = "image"
|
||||
)
|
||||
|
||||
func (r ModerationCategoryAppliedInputTypesSelfHarmIntent) IsKnown() bool {
|
||||
switch r {
|
||||
case ModerationCategoryAppliedInputTypesSelfHarmIntentText, ModerationCategoryAppliedInputTypesSelfHarmIntentImage:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
type ModerationCategoryAppliedInputTypesSexual string
|
||||
type ModerationCategoryAppliedInputTypesSexual = string
|
||||
|
||||
const (
|
||||
ModerationCategoryAppliedInputTypesSexualText ModerationCategoryAppliedInputTypesSexual = "text"
|
||||
ModerationCategoryAppliedInputTypesSexualImage ModerationCategoryAppliedInputTypesSexual = "image"
|
||||
)
|
||||
|
||||
func (r ModerationCategoryAppliedInputTypesSexual) IsKnown() bool {
|
||||
switch r {
|
||||
case ModerationCategoryAppliedInputTypesSexualText, ModerationCategoryAppliedInputTypesSexualImage:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
type ModerationCategoryAppliedInputTypesSexualMinor string
|
||||
type ModerationCategoryAppliedInputTypesSexualMinor = string
|
||||
|
||||
const (
|
||||
ModerationCategoryAppliedInputTypesSexualMinorText ModerationCategoryAppliedInputTypesSexualMinor = "text"
|
||||
)
|
||||
|
||||
func (r ModerationCategoryAppliedInputTypesSexualMinor) IsKnown() bool {
|
||||
switch r {
|
||||
case ModerationCategoryAppliedInputTypesSexualMinorText:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
type ModerationCategoryAppliedInputTypesViolence string
|
||||
type ModerationCategoryAppliedInputTypesViolence = string
|
||||
|
||||
const (
|
||||
ModerationCategoryAppliedInputTypesViolenceText ModerationCategoryAppliedInputTypesViolence = "text"
|
||||
ModerationCategoryAppliedInputTypesViolenceImage ModerationCategoryAppliedInputTypesViolence = "image"
|
||||
)
|
||||
|
||||
func (r ModerationCategoryAppliedInputTypesViolence) IsKnown() bool {
|
||||
switch r {
|
||||
case ModerationCategoryAppliedInputTypesViolenceText, ModerationCategoryAppliedInputTypesViolenceImage:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
type ModerationCategoryAppliedInputTypesViolenceGraphic string
|
||||
type ModerationCategoryAppliedInputTypesViolenceGraphic = string
|
||||
|
||||
const (
|
||||
ModerationCategoryAppliedInputTypesViolenceGraphicText ModerationCategoryAppliedInputTypesViolenceGraphic = "text"
|
||||
ModerationCategoryAppliedInputTypesViolenceGraphicImage ModerationCategoryAppliedInputTypesViolenceGraphic = "image"
|
||||
)
|
||||
|
||||
func (r ModerationCategoryAppliedInputTypesViolenceGraphic) IsKnown() bool {
|
||||
switch r {
|
||||
case ModerationCategoryAppliedInputTypesViolenceGraphicText, ModerationCategoryAppliedInputTypesViolenceGraphicImage:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// A list of the categories along with their scores as predicted by model.
|
||||
type ModerationCategoryScores struct {
|
||||
// The score for the category 'harassment'.
|
||||
Harassment float64 `json:"harassment,required"`
|
||||
Harassment float64 `json:"harassment,omitzero,required"`
|
||||
// The score for the category 'harassment/threatening'.
|
||||
HarassmentThreatening float64 `json:"harassment/threatening,required"`
|
||||
HarassmentThreatening float64 `json:"harassment/threatening,omitzero,required"`
|
||||
// The score for the category 'hate'.
|
||||
Hate float64 `json:"hate,required"`
|
||||
Hate float64 `json:"hate,omitzero,required"`
|
||||
// The score for the category 'hate/threatening'.
|
||||
HateThreatening float64 `json:"hate/threatening,required"`
|
||||
HateThreatening float64 `json:"hate/threatening,omitzero,required"`
|
||||
// The score for the category 'illicit'.
|
||||
Illicit float64 `json:"illicit,required"`
|
||||
Illicit float64 `json:"illicit,omitzero,required"`
|
||||
// The score for the category 'illicit/violent'.
|
||||
IllicitViolent float64 `json:"illicit/violent,required"`
|
||||
IllicitViolent float64 `json:"illicit/violent,omitzero,required"`
|
||||
// The score for the category 'self-harm'.
|
||||
SelfHarm float64 `json:"self-harm,required"`
|
||||
SelfHarm float64 `json:"self-harm,omitzero,required"`
|
||||
// The score for the category 'self-harm/instructions'.
|
||||
SelfHarmInstructions float64 `json:"self-harm/instructions,required"`
|
||||
SelfHarmInstructions float64 `json:"self-harm/instructions,omitzero,required"`
|
||||
// The score for the category 'self-harm/intent'.
|
||||
SelfHarmIntent float64 `json:"self-harm/intent,required"`
|
||||
SelfHarmIntent float64 `json:"self-harm/intent,omitzero,required"`
|
||||
// The score for the category 'sexual'.
|
||||
Sexual float64 `json:"sexual,required"`
|
||||
Sexual float64 `json:"sexual,omitzero,required"`
|
||||
// The score for the category 'sexual/minors'.
|
||||
SexualMinors float64 `json:"sexual/minors,required"`
|
||||
SexualMinors float64 `json:"sexual/minors,omitzero,required"`
|
||||
// The score for the category 'violence'.
|
||||
Violence float64 `json:"violence,required"`
|
||||
Violence float64 `json:"violence,omitzero,required"`
|
||||
// The score for the category 'violence/graphic'.
|
||||
ViolenceGraphic float64 `json:"violence/graphic,required"`
|
||||
JSON moderationCategoryScoresJSON `json:"-"`
|
||||
ViolenceGraphic float64 `json:"violence/graphic,omitzero,required"`
|
||||
JSON struct {
|
||||
Harassment resp.Field
|
||||
HarassmentThreatening resp.Field
|
||||
Hate resp.Field
|
||||
HateThreatening resp.Field
|
||||
Illicit resp.Field
|
||||
IllicitViolent resp.Field
|
||||
SelfHarm resp.Field
|
||||
SelfHarmInstructions resp.Field
|
||||
SelfHarmIntent resp.Field
|
||||
Sexual resp.Field
|
||||
SexualMinors resp.Field
|
||||
Violence resp.Field
|
||||
ViolenceGraphic resp.Field
|
||||
raw string
|
||||
} `json:"-"`
|
||||
}
|
||||
|
||||
// moderationCategoryScoresJSON contains the JSON metadata for the struct
|
||||
// [ModerationCategoryScores]
|
||||
type moderationCategoryScoresJSON struct {
|
||||
Harassment apijson.Field
|
||||
HarassmentThreatening apijson.Field
|
||||
Hate apijson.Field
|
||||
HateThreatening apijson.Field
|
||||
Illicit apijson.Field
|
||||
IllicitViolent apijson.Field
|
||||
SelfHarm apijson.Field
|
||||
SelfHarmInstructions apijson.Field
|
||||
SelfHarmIntent apijson.Field
|
||||
Sexual apijson.Field
|
||||
SexualMinors apijson.Field
|
||||
Violence apijson.Field
|
||||
ViolenceGraphic apijson.Field
|
||||
raw string
|
||||
ExtraFields map[string]apijson.Field
|
||||
}
|
||||
|
||||
func (r *ModerationCategoryScores) UnmarshalJSON(data []byte) (err error) {
|
||||
func (r ModerationCategoryScores) RawJSON() string { return r.JSON.raw }
|
||||
func (r *ModerationCategoryScores) UnmarshalJSON(data []byte) error {
|
||||
return apijson.UnmarshalRoot(data, r)
|
||||
}
|
||||
|
||||
func (r moderationCategoryScoresJSON) RawJSON() string {
|
||||
return r.raw
|
||||
}
|
||||
|
||||
// An object describing an image to classify.
|
||||
type ModerationImageURLInputParam struct {
|
||||
// Contains either an image URL or a data URL for a base64 encoded image.
|
||||
ImageURL param.Field[ModerationImageURLInputImageURLParam] `json:"image_url,required"`
|
||||
ImageURL ModerationImageURLInputImageURLParam `json:"image_url,omitzero,required"`
|
||||
// Always `image_url`.
|
||||
Type param.Field[ModerationImageURLInputType] `json:"type,required"`
|
||||
//
|
||||
// This field can be elided, and will be automatically set as "image_url".
|
||||
Type constant.ImageURL `json:"type,required"`
|
||||
apiobject
|
||||
}
|
||||
|
||||
func (f ModerationImageURLInputParam) IsMissing() bool { return param.IsOmitted(f) || f.IsNull() }
|
||||
|
||||
func (r ModerationImageURLInputParam) MarshalJSON() (data []byte, err error) {
|
||||
return apijson.MarshalRoot(r)
|
||||
type shadow ModerationImageURLInputParam
|
||||
return param.MarshalObject(r, (*shadow)(&r))
|
||||
}
|
||||
|
||||
func (r ModerationImageURLInputParam) implementsModerationMultiModalInputUnionParam() {}
|
||||
|
||||
// Contains either an image URL or a data URL for a base64 encoded image.
|
||||
type ModerationImageURLInputImageURLParam struct {
|
||||
// Either a URL of the image or the base64 encoded image data.
|
||||
URL param.Field[string] `json:"url,required" format:"uri"`
|
||||
URL param.String `json:"url,omitzero,required" format:"uri"`
|
||||
apiobject
|
||||
}
|
||||
|
||||
func (f ModerationImageURLInputImageURLParam) IsMissing() bool {
|
||||
return param.IsOmitted(f) || f.IsNull()
|
||||
}
|
||||
|
||||
func (r ModerationImageURLInputImageURLParam) MarshalJSON() (data []byte, err error) {
|
||||
return apijson.MarshalRoot(r)
|
||||
}
|
||||
|
||||
// Always `image_url`.
|
||||
type ModerationImageURLInputType string
|
||||
|
||||
const (
|
||||
ModerationImageURLInputTypeImageURL ModerationImageURLInputType = "image_url"
|
||||
)
|
||||
|
||||
func (r ModerationImageURLInputType) IsKnown() bool {
|
||||
switch r {
|
||||
case ModerationImageURLInputTypeImageURL:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
type shadow ModerationImageURLInputImageURLParam
|
||||
return param.MarshalObject(r, (*shadow)(&r))
|
||||
}
|
||||
|
||||
type ModerationModel = string
|
||||
@@ -500,132 +362,124 @@ const (
|
||||
ModerationModelTextModerationStable ModerationModel = "text-moderation-stable"
|
||||
)
|
||||
|
||||
// An object describing an image to classify.
|
||||
type ModerationMultiModalInputParam struct {
|
||||
// Always `image_url`.
|
||||
Type param.Field[ModerationMultiModalInputType] `json:"type,required"`
|
||||
ImageURL param.Field[interface{}] `json:"image_url"`
|
||||
// A string of text to classify.
|
||||
Text param.Field[string] `json:"text"`
|
||||
func NewModerationMultiModalInputOfImageURL(imageURL ModerationImageURLInputImageURLParam) ModerationMultiModalInputUnionParam {
|
||||
var image_url ModerationImageURLInputParam
|
||||
image_url.ImageURL = imageURL
|
||||
return ModerationMultiModalInputUnionParam{OfImageURL: &image_url}
|
||||
}
|
||||
|
||||
func (r ModerationMultiModalInputParam) MarshalJSON() (data []byte, err error) {
|
||||
return apijson.MarshalRoot(r)
|
||||
func NewModerationMultiModalInputOfText(text string) ModerationMultiModalInputUnionParam {
|
||||
var variant ModerationTextInputParam
|
||||
variant.Text = newString(text)
|
||||
return ModerationMultiModalInputUnionParam{OfText: &variant}
|
||||
}
|
||||
|
||||
func (r ModerationMultiModalInputParam) implementsModerationMultiModalInputUnionParam() {}
|
||||
|
||||
// An object describing an image to classify.
|
||||
//
|
||||
// Satisfied by [ModerationImageURLInputParam], [ModerationTextInputParam],
|
||||
// [ModerationMultiModalInputParam].
|
||||
type ModerationMultiModalInputUnionParam interface {
|
||||
implementsModerationMultiModalInputUnionParam()
|
||||
// Only one field can be non-zero
|
||||
type ModerationMultiModalInputUnionParam struct {
|
||||
OfImageURL *ModerationImageURLInputParam
|
||||
OfText *ModerationTextInputParam
|
||||
apiunion
|
||||
}
|
||||
|
||||
// Always `image_url`.
|
||||
type ModerationMultiModalInputType string
|
||||
func (u ModerationMultiModalInputUnionParam) IsMissing() bool {
|
||||
return param.IsOmitted(u) || u.IsNull()
|
||||
}
|
||||
|
||||
const (
|
||||
ModerationMultiModalInputTypeImageURL ModerationMultiModalInputType = "image_url"
|
||||
ModerationMultiModalInputTypeText ModerationMultiModalInputType = "text"
|
||||
)
|
||||
func (u ModerationMultiModalInputUnionParam) MarshalJSON() ([]byte, error) {
|
||||
return param.MarshalUnion[ModerationMultiModalInputUnionParam](u.OfImageURL, u.OfText)
|
||||
}
|
||||
|
||||
func (r ModerationMultiModalInputType) IsKnown() bool {
|
||||
switch r {
|
||||
case ModerationMultiModalInputTypeImageURL, ModerationMultiModalInputTypeText:
|
||||
return true
|
||||
func (u ModerationMultiModalInputUnionParam) GetImageURL() *ModerationImageURLInputImageURLParam {
|
||||
if vt := u.OfImageURL; vt != nil {
|
||||
return &vt.ImageURL
|
||||
}
|
||||
return false
|
||||
return nil
|
||||
}
|
||||
|
||||
func (u ModerationMultiModalInputUnionParam) GetText() *string {
|
||||
if vt := u.OfText; vt != nil && !vt.Text.IsOmitted() {
|
||||
return &vt.Text.V
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (u ModerationMultiModalInputUnionParam) GetType() *string {
|
||||
if vt := u.OfImageURL; vt != nil {
|
||||
return (*string)(&vt.Type)
|
||||
} else if vt := u.OfText; vt != nil {
|
||||
return (*string)(&vt.Type)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// An object describing text to classify.
|
||||
type ModerationTextInputParam struct {
|
||||
// A string of text to classify.
|
||||
Text param.Field[string] `json:"text,required"`
|
||||
Text param.String `json:"text,omitzero,required"`
|
||||
// Always `text`.
|
||||
Type param.Field[ModerationTextInputType] `json:"type,required"`
|
||||
//
|
||||
// This field can be elided, and will be automatically set as "text".
|
||||
Type constant.Text `json:"type,required"`
|
||||
apiobject
|
||||
}
|
||||
|
||||
func (f ModerationTextInputParam) IsMissing() bool { return param.IsOmitted(f) || f.IsNull() }
|
||||
|
||||
func (r ModerationTextInputParam) MarshalJSON() (data []byte, err error) {
|
||||
return apijson.MarshalRoot(r)
|
||||
}
|
||||
|
||||
func (r ModerationTextInputParam) implementsModerationMultiModalInputUnionParam() {}
|
||||
|
||||
// Always `text`.
|
||||
type ModerationTextInputType string
|
||||
|
||||
const (
|
||||
ModerationTextInputTypeText ModerationTextInputType = "text"
|
||||
)
|
||||
|
||||
func (r ModerationTextInputType) IsKnown() bool {
|
||||
switch r {
|
||||
case ModerationTextInputTypeText:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
type shadow ModerationTextInputParam
|
||||
return param.MarshalObject(r, (*shadow)(&r))
|
||||
}
|
||||
|
||||
// Represents if a given text input is potentially harmful.
|
||||
type ModerationNewResponse struct {
|
||||
// The unique identifier for the moderation request.
|
||||
ID string `json:"id,required"`
|
||||
ID string `json:"id,omitzero,required"`
|
||||
// The model used to generate the moderation results.
|
||||
Model string `json:"model,required"`
|
||||
Model string `json:"model,omitzero,required"`
|
||||
// A list of moderation objects.
|
||||
Results []Moderation `json:"results,required"`
|
||||
JSON moderationNewResponseJSON `json:"-"`
|
||||
Results []Moderation `json:"results,omitzero,required"`
|
||||
JSON struct {
|
||||
ID resp.Field
|
||||
Model resp.Field
|
||||
Results resp.Field
|
||||
raw string
|
||||
} `json:"-"`
|
||||
}
|
||||
|
||||
// moderationNewResponseJSON contains the JSON metadata for the struct
|
||||
// [ModerationNewResponse]
|
||||
type moderationNewResponseJSON struct {
|
||||
ID apijson.Field
|
||||
Model apijson.Field
|
||||
Results apijson.Field
|
||||
raw string
|
||||
ExtraFields map[string]apijson.Field
|
||||
}
|
||||
|
||||
func (r *ModerationNewResponse) UnmarshalJSON(data []byte) (err error) {
|
||||
func (r ModerationNewResponse) RawJSON() string { return r.JSON.raw }
|
||||
func (r *ModerationNewResponse) UnmarshalJSON(data []byte) error {
|
||||
return apijson.UnmarshalRoot(data, r)
|
||||
}
|
||||
|
||||
func (r moderationNewResponseJSON) RawJSON() string {
|
||||
return r.raw
|
||||
}
|
||||
|
||||
type ModerationNewParams struct {
|
||||
// Input (or inputs) to classify. Can be a single string, an array of strings, or
|
||||
// an array of multi-modal input objects similar to other models.
|
||||
Input param.Field[ModerationNewParamsInputUnion] `json:"input,required"`
|
||||
Input ModerationNewParamsInputUnion `json:"input,omitzero,required"`
|
||||
// The content moderation model you would like to use. Learn more in
|
||||
// [the moderation guide](https://platform.openai.com/docs/guides/moderation), and
|
||||
// learn about available models
|
||||
// [here](https://platform.openai.com/docs/models#moderation).
|
||||
Model param.Field[ModerationModel] `json:"model"`
|
||||
Model ModerationModel `json:"model,omitzero"`
|
||||
apiobject
|
||||
}
|
||||
|
||||
func (f ModerationNewParams) IsMissing() bool { return param.IsOmitted(f) || f.IsNull() }
|
||||
|
||||
func (r ModerationNewParams) MarshalJSON() (data []byte, err error) {
|
||||
return apijson.MarshalRoot(r)
|
||||
type shadow ModerationNewParams
|
||||
return param.MarshalObject(r, (*shadow)(&r))
|
||||
}
|
||||
|
||||
// Input (or inputs) to classify. Can be a single string, an array of strings, or
|
||||
// an array of multi-modal input objects similar to other models.
|
||||
//
|
||||
// Satisfied by [shared.UnionString], [ModerationNewParamsInputArray],
|
||||
// [ModerationNewParamsInputModerationMultiModalArray].
|
||||
type ModerationNewParamsInputUnion interface {
|
||||
ImplementsModerationNewParamsInputUnion()
|
||||
// Only one field can be non-zero
|
||||
type ModerationNewParamsInputUnion struct {
|
||||
OfString param.String
|
||||
OfModerationNewsInputArray []string
|
||||
OfModerationMultiModalArray []ModerationMultiModalInputUnionParam
|
||||
apiunion
|
||||
}
|
||||
|
||||
type ModerationNewParamsInputArray []string
|
||||
func (u ModerationNewParamsInputUnion) IsMissing() bool { return param.IsOmitted(u) || u.IsNull() }
|
||||
|
||||
func (r ModerationNewParamsInputArray) ImplementsModerationNewParamsInputUnion() {}
|
||||
|
||||
type ModerationNewParamsInputModerationMultiModalArray []ModerationMultiModalInputUnionParam
|
||||
|
||||
func (r ModerationNewParamsInputModerationMultiModalArray) ImplementsModerationNewParamsInputUnion() {
|
||||
func (u ModerationNewParamsInputUnion) MarshalJSON() ([]byte, error) {
|
||||
return param.MarshalUnion[ModerationNewParamsInputUnion](u.OfString, u.OfModerationNewsInputArray, u.OfModerationMultiModalArray)
|
||||
}
|
||||
|
||||
@@ -11,7 +11,6 @@ import (
|
||||
"github.com/openai/openai-go"
|
||||
"github.com/openai/openai-go/internal/testutil"
|
||||
"github.com/openai/openai-go/option"
|
||||
"github.com/openai/openai-go/shared"
|
||||
)
|
||||
|
||||
func TestModerationNewWithOptionalParams(t *testing.T) {
|
||||
@@ -27,8 +26,10 @@ func TestModerationNewWithOptionalParams(t *testing.T) {
|
||||
option.WithAPIKey("My API Key"),
|
||||
)
|
||||
_, err := client.Moderations.New(context.TODO(), openai.ModerationNewParams{
|
||||
Input: openai.F[openai.ModerationNewParamsInputUnion](shared.UnionString("I want to kill them.")),
|
||||
Model: openai.F(openai.ModerationModelOmniModerationLatest),
|
||||
Input: openai.ModerationNewParamsInputUnion{
|
||||
OfString: openai.String("I want to kill them."),
|
||||
},
|
||||
Model: openai.ModerationModelOmniModerationLatest,
|
||||
})
|
||||
if err != nil {
|
||||
var apierr *openai.Error
|
||||
|
||||
@@ -9,32 +9,33 @@ import (
|
||||
"github.com/openai/openai-go/internal/apijson"
|
||||
"github.com/openai/openai-go/internal/requestconfig"
|
||||
"github.com/openai/openai-go/option"
|
||||
"github.com/openai/openai-go/packages/param"
|
||||
"github.com/openai/openai-go/packages/resp"
|
||||
)
|
||||
|
||||
// aliased to make param.APIUnion private when embedding
|
||||
type apiunion = param.APIUnion
|
||||
|
||||
// aliased to make param.APIObject private when embedding
|
||||
type apiobject = param.APIObject
|
||||
|
||||
type Page[T any] struct {
|
||||
Data []T `json:"data"`
|
||||
Object string `json:"object,required"`
|
||||
JSON pageJSON `json:"-"`
|
||||
cfg *requestconfig.RequestConfig
|
||||
res *http.Response
|
||||
Data []T `json:"data,omitzero"`
|
||||
Object string `json:"object,omitzero,required"`
|
||||
JSON struct {
|
||||
Data resp.Field
|
||||
Object resp.Field
|
||||
raw string
|
||||
} `json:"-"`
|
||||
cfg *requestconfig.RequestConfig
|
||||
res *http.Response
|
||||
}
|
||||
|
||||
// pageJSON contains the JSON metadata for the struct [Page[T]]
|
||||
type pageJSON struct {
|
||||
Data apijson.Field
|
||||
Object apijson.Field
|
||||
raw string
|
||||
ExtraFields map[string]apijson.Field
|
||||
}
|
||||
|
||||
func (r *Page[T]) UnmarshalJSON(data []byte) (err error) {
|
||||
func (r Page[T]) RawJSON() string { return r.JSON.raw }
|
||||
func (r *Page[T]) UnmarshalJSON(data []byte) error {
|
||||
return apijson.UnmarshalRoot(data, r)
|
||||
}
|
||||
|
||||
func (r pageJSON) RawJSON() string {
|
||||
return r.raw
|
||||
}
|
||||
|
||||
// GetNextPage returns the next page as defined by this pagination style. When
|
||||
// there is no next page, this function will return a 'nil' for the page value, but
|
||||
// will not return an error
|
||||
@@ -70,6 +71,7 @@ type PageAutoPager[T any] struct {
|
||||
idx int
|
||||
run int
|
||||
err error
|
||||
apiobject
|
||||
}
|
||||
|
||||
func NewPageAutoPager[T any](page *Page[T], err error) *PageAutoPager[T] {
|
||||
@@ -109,29 +111,22 @@ func (r *PageAutoPager[T]) Index() int {
|
||||
}
|
||||
|
||||
type CursorPage[T any] struct {
|
||||
Data []T `json:"data"`
|
||||
HasMore bool `json:"has_more"`
|
||||
JSON cursorPageJSON `json:"-"`
|
||||
cfg *requestconfig.RequestConfig
|
||||
res *http.Response
|
||||
Data []T `json:"data,omitzero"`
|
||||
HasMore bool `json:"has_more,omitzero"`
|
||||
JSON struct {
|
||||
Data resp.Field
|
||||
HasMore resp.Field
|
||||
raw string
|
||||
} `json:"-"`
|
||||
cfg *requestconfig.RequestConfig
|
||||
res *http.Response
|
||||
}
|
||||
|
||||
// cursorPageJSON contains the JSON metadata for the struct [CursorPage[T]]
|
||||
type cursorPageJSON struct {
|
||||
Data apijson.Field
|
||||
HasMore apijson.Field
|
||||
raw string
|
||||
ExtraFields map[string]apijson.Field
|
||||
}
|
||||
|
||||
func (r *CursorPage[T]) UnmarshalJSON(data []byte) (err error) {
|
||||
func (r CursorPage[T]) RawJSON() string { return r.JSON.raw }
|
||||
func (r *CursorPage[T]) UnmarshalJSON(data []byte) error {
|
||||
return apijson.UnmarshalRoot(data, r)
|
||||
}
|
||||
|
||||
func (r cursorPageJSON) RawJSON() string {
|
||||
return r.raw
|
||||
}
|
||||
|
||||
// GetNextPage returns the next page as defined by this pagination style. When
|
||||
// there is no next page, this function will return a 'nil' for the page value, but
|
||||
// will not return an error
|
||||
@@ -172,6 +167,7 @@ type CursorPageAutoPager[T any] struct {
|
||||
idx int
|
||||
run int
|
||||
err error
|
||||
apiobject
|
||||
}
|
||||
|
||||
func NewCursorPageAutoPager[T any](page *CursorPage[T], err error) *CursorPageAutoPager[T] {
|
||||
|
||||
115
packages/param/encoder.go
Normal file
115
packages/param/encoder.go
Normal file
@@ -0,0 +1,115 @@
|
||||
package param
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"reflect"
|
||||
|
||||
"github.com/openai/openai-go/internal/apifield"
|
||||
shimjson "github.com/openai/openai-go/internal/encoding/json"
|
||||
)
|
||||
|
||||
// This uses a shimmed 'encoding/json' from Go 1.24, to support the 'omitzero' tag
|
||||
func MarshalObject[T ObjectFielder](f T, underlying any) ([]byte, error) {
|
||||
if f.IsNull() {
|
||||
return []byte("null"), nil
|
||||
} else if ovr, ok := f.IsOverridden(); ok {
|
||||
// TODO(v2): handle if ovr.(ExtraFields)
|
||||
return shimjson.Marshal(ovr)
|
||||
} else {
|
||||
return shimjson.Marshal(underlying)
|
||||
}
|
||||
}
|
||||
|
||||
// This uses a shimmed 'encoding/json' from Go 1.24, to support the 'omitzero' tag
|
||||
func MarshalUnion[T any](variants ...any) ([]byte, error) {
|
||||
nPresent := 0
|
||||
idx := -1
|
||||
for i, variant := range variants {
|
||||
if !IsOmitted(variant) {
|
||||
nPresent++
|
||||
idx = i
|
||||
}
|
||||
}
|
||||
if nPresent == 0 || idx == -1 {
|
||||
return []byte(`{}`), nil
|
||||
} else if nPresent > 1 {
|
||||
return nil, &json.MarshalerError{
|
||||
Type: reflect.TypeOf((*T)(nil)).Elem(),
|
||||
Err: fmt.Errorf("expected union to have one present variant, got %d", nPresent),
|
||||
}
|
||||
}
|
||||
return shimjson.Marshal(variants[idx])
|
||||
}
|
||||
|
||||
// This uses a shimmed stdlib 'encoding/json' from Go 1.24, to support omitzero
|
||||
func marshalField[T interface {
|
||||
Fielder
|
||||
IsOmitted() bool
|
||||
}](f T, happyPath any) ([]byte, error) {
|
||||
if f.IsNull() {
|
||||
return []byte("null"), nil
|
||||
} else if ovr, ok := f.IsOverridden(); ok {
|
||||
return shimjson.Marshal(ovr)
|
||||
} else {
|
||||
return shimjson.Marshal(happyPath)
|
||||
}
|
||||
}
|
||||
|
||||
// This uses a shimmed 'encoding/json' from Go 1.24, to support the 'omitzero' tag
|
||||
func unmarshalField[T any](underlying *T, meta *metadata, data []byte) error {
|
||||
if string(data) == `null` {
|
||||
meta.setMetadata(apifield.ExplicitNull{})
|
||||
return nil
|
||||
}
|
||||
if err := shimjson.Unmarshal(data, &underlying); err != nil {
|
||||
meta.setMetadata(apifield.ResponseData(data))
|
||||
return err
|
||||
}
|
||||
meta.setMetadata(apifield.NeverOmitted{})
|
||||
return nil
|
||||
}
|
||||
|
||||
func stringifyField[T Fielder](f T, fallback any) string {
|
||||
if f.IsNull() {
|
||||
return "null"
|
||||
}
|
||||
if v, ok := f.IsOverridden(); ok {
|
||||
return fmt.Sprintf("%v", v)
|
||||
}
|
||||
return fmt.Sprintf("%v", fallback)
|
||||
}
|
||||
|
||||
// shimmed from Go 1.23 "reflect" package
|
||||
func TypeFor[T any]() reflect.Type {
|
||||
var v T
|
||||
if t := reflect.TypeOf(v); t != nil {
|
||||
return t // optimize for T being a non-interface kind
|
||||
}
|
||||
return reflect.TypeOf((*T)(nil)).Elem() // only for an interface kind
|
||||
}
|
||||
|
||||
var richStringType = TypeFor[String]()
|
||||
var richIntType = TypeFor[Int]()
|
||||
var richFloatType = TypeFor[Float]()
|
||||
var richBoolType = TypeFor[Bool]()
|
||||
var richDateType = TypeFor[Date]()
|
||||
var richDatetimeType = TypeFor[Datetime]()
|
||||
|
||||
// indexOfUnderlyingValueField must only be called at initialization time
|
||||
func indexOfUnderlyingValueField(t reflect.Type) []int {
|
||||
field, ok := t.FieldByName("V")
|
||||
if !ok {
|
||||
panic("unreachable: initialization issue, underlying value field not found")
|
||||
}
|
||||
return field.Index
|
||||
}
|
||||
|
||||
var RichPrimitiveTypes = map[reflect.Type][]int{
|
||||
richStringType: indexOfUnderlyingValueField(richStringType),
|
||||
richIntType: indexOfUnderlyingValueField(richIntType),
|
||||
richFloatType: indexOfUnderlyingValueField(richFloatType),
|
||||
richBoolType: indexOfUnderlyingValueField(richBoolType),
|
||||
richDateType: indexOfUnderlyingValueField(richDateType),
|
||||
richDatetimeType: indexOfUnderlyingValueField(richDatetimeType),
|
||||
}
|
||||
84
packages/param/encoder_test.go
Normal file
84
packages/param/encoder_test.go
Normal file
@@ -0,0 +1,84 @@
|
||||
package param_test
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/openai/openai-go/packages/param"
|
||||
)
|
||||
|
||||
type Struct struct {
|
||||
A string `json:"a,omitzero"`
|
||||
B int64 `json:"b,omitzero"`
|
||||
param.APIObject
|
||||
}
|
||||
|
||||
func (r Struct) MarshalJSON() (data []byte, err error) {
|
||||
type shadow Struct
|
||||
return param.MarshalObject(r, (*shadow)(&r))
|
||||
}
|
||||
|
||||
type FieldStruct struct {
|
||||
A param.String `json:"a,omitzero"`
|
||||
B param.Int `json:"b,omitzero"`
|
||||
C Struct `json:"c,omitzero"`
|
||||
D param.Date `json:"d,omitzero"`
|
||||
E param.Datetime `json:"e,omitzero"`
|
||||
F param.Int `json:"f,omitzero"`
|
||||
param.APIObject
|
||||
}
|
||||
|
||||
func (r FieldStruct) MarshalJSON() (data []byte, err error) {
|
||||
type shadow FieldStruct
|
||||
return param.MarshalObject(r, (*shadow)(&r))
|
||||
}
|
||||
|
||||
func TestFieldMarshal(t *testing.T) {
|
||||
tests := map[string]struct {
|
||||
value interface{}
|
||||
expected string
|
||||
}{
|
||||
"null_string": {param.Null[param.String](), "null"},
|
||||
"null_int": {param.Null[param.Int](), "null"},
|
||||
"null_int64": {param.Null[param.Int](), "null"},
|
||||
"null_struct": {param.Null[Struct](), "null"},
|
||||
|
||||
"string": {param.String{V: "string"}, `"string"`},
|
||||
"int": {param.Int{V: 123}, "123"},
|
||||
"int64": {param.Int{V: int64(123456789123456789)}, "123456789123456789"},
|
||||
"struct": {Struct{A: "yo", B: 123}, `{"a":"yo","b":123}`},
|
||||
"date": {param.Date{V: time.Date(2023, time.March, 18, 14, 47, 38, 0, time.UTC)}, `"2023-03-18"`},
|
||||
"datetime": {
|
||||
param.Datetime{V: time.Date(2023, time.March, 18, 14, 47, 38, 0, time.UTC)},
|
||||
`"2023-03-18T14:47:38Z"`,
|
||||
},
|
||||
|
||||
"string_raw": {param.Override[param.Int]("string"), `"string"`},
|
||||
"int_raw": {param.Override[param.Int](123), "123"},
|
||||
"int64_raw": {param.Override[param.Int](int64(123456789123456789)), "123456789123456789"},
|
||||
"struct_raw": {param.Override[param.Int](Struct{A: "yo", B: 123}), `{"a":"yo","b":123}`},
|
||||
|
||||
"param_struct": {
|
||||
FieldStruct{
|
||||
A: param.String{V: "hello"},
|
||||
B: param.Int{V: int64(12)},
|
||||
D: param.Date{V: time.Date(2023, time.March, 18, 14, 47, 38, 0, time.UTC)},
|
||||
E: param.Datetime{V: time.Date(2023, time.March, 18, 14, 47, 38, 0, time.UTC)},
|
||||
},
|
||||
`{"a":"hello","b":12,"d":"2023-03-18","e":"2023-03-18T14:47:38Z"}`,
|
||||
},
|
||||
}
|
||||
|
||||
for name, test := range tests {
|
||||
t.Run(name, func(t *testing.T) {
|
||||
b, err := json.Marshal(test.value)
|
||||
if err != nil {
|
||||
t.Fatalf("didn't expect error %v, expected %s", err, test.expected)
|
||||
}
|
||||
if string(b) != test.expected {
|
||||
t.Fatalf("expected %s, received %s", test.expected, string(b))
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
9
packages/param/metadata.go
Normal file
9
packages/param/metadata.go
Normal file
@@ -0,0 +1,9 @@
|
||||
package param
|
||||
|
||||
import "encoding/json"
|
||||
|
||||
type MetadataProvider interface {
|
||||
IsNull() bool
|
||||
RawResponse() json.RawMessage
|
||||
IsOverridden() (any, bool)
|
||||
}
|
||||
212
packages/param/param.go
Normal file
212
packages/param/param.go
Normal file
@@ -0,0 +1,212 @@
|
||||
package param
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"github.com/openai/openai-go/internal/apifield"
|
||||
"reflect"
|
||||
"time"
|
||||
)
|
||||
|
||||
func IsOmitted(v any) bool {
|
||||
if i, ok := v.(interface{ IsOmitted() bool }); ok {
|
||||
return i.IsOmitted()
|
||||
}
|
||||
return reflect.ValueOf(v).IsZero()
|
||||
}
|
||||
|
||||
type Fielder interface {
|
||||
IsNull() bool
|
||||
IsOverridden() (any, bool)
|
||||
RawResponse() json.RawMessage
|
||||
}
|
||||
|
||||
type ObjectFielder interface {
|
||||
Fielder
|
||||
IsFieldNull(string) bool
|
||||
IsFieldOverridden(string) (any, bool)
|
||||
IsFieldRawResponse(string) json.RawMessage
|
||||
}
|
||||
|
||||
// This pattern allows mutable generics, no code should require that this type be provided.
|
||||
type SettableFielder[T Fielder] interface {
|
||||
setMetadata(MetadataProvider)
|
||||
*T
|
||||
}
|
||||
|
||||
// Override the field with a custom json value, the v parameter uses
|
||||
// the same semantics as json.Marshal from encoding/json.
|
||||
//
|
||||
// The SettableFielder type parameter should never be provided, it is always inferred.
|
||||
//
|
||||
// var f param.String = param.Override[param.String](12)
|
||||
// json.Marshal(f) == `12`
|
||||
func Override[T Fielder, PT SettableFielder[T]](v any) T {
|
||||
var x T
|
||||
PT(&x).setMetadata(apifield.CustomValue{Override: v})
|
||||
return x
|
||||
}
|
||||
|
||||
// Set the field to null
|
||||
//
|
||||
// The SettableFielder type parameter should never be provided, it is always inferred.
|
||||
//
|
||||
// var f param.String = param.Null[param.String]()
|
||||
// json.Marshal(f) == `null`
|
||||
func Null[T Fielder, PT SettableFielder[T]]() T {
|
||||
var x T
|
||||
PT(&x).setMetadata(apifield.ExplicitNull{})
|
||||
return x
|
||||
}
|
||||
|
||||
// Constructs a field whose zero value is never omitted.
|
||||
// This is useful for internal code, there are other preferred ways to construct non-omitted fields.
|
||||
func NeverOmitted[T Fielder, PT SettableFielder[T]]() T {
|
||||
var x T
|
||||
PT(&x).setMetadata(apifield.NeverOmitted{})
|
||||
return x
|
||||
}
|
||||
|
||||
// MarshalExtraField adds an additional field to be set with custom json. The v parameter
|
||||
// uses the same semantics as json.Marshal from encoding/json.
|
||||
// If any native field with a matching json field name will be zeroed and omitted.
|
||||
// func InsertFields(obj MutableFieldLike, k string, v any) {}
|
||||
|
||||
// UnmarshalExtraField accesses an extra field and unmarshals the result in the value pointed to by v.
|
||||
// UnmarshalExtraField uses similar semantics to json.Unmarshal from encoding/json. However,
|
||||
// if v is nil or not a pointer, or the extra field cannot be unmarshaled into the the value v,
|
||||
// UnmarshalExtraField returns false.
|
||||
// func GetField(obj ObjectFieldLike, k string, v any) (exists bool) {
|
||||
// return apifield.UnmarshalExtraField(obj, k, v)
|
||||
// }
|
||||
|
||||
type String struct {
|
||||
V string
|
||||
metadata
|
||||
}
|
||||
|
||||
type Int struct {
|
||||
V int64
|
||||
metadata
|
||||
}
|
||||
|
||||
type Bool struct {
|
||||
V bool
|
||||
metadata
|
||||
}
|
||||
|
||||
type Float struct {
|
||||
V float64
|
||||
metadata
|
||||
}
|
||||
|
||||
type Datetime struct {
|
||||
V time.Time
|
||||
metadata
|
||||
}
|
||||
|
||||
type Date struct {
|
||||
V time.Time
|
||||
metadata
|
||||
}
|
||||
|
||||
// Either null or omitted
|
||||
func (f String) IsMissing() bool { return IsOmitted(f) || f.IsNull() }
|
||||
func (f String) IsOmitted() bool { return f == String{} }
|
||||
func (f String) MarshalJSON() ([]byte, error) { return marshalField(f, f.V) }
|
||||
func (f String) UnmarshalJSON(data []byte) error { return unmarshalField(&f.V, &f.metadata, data) }
|
||||
func (f String) String() string { return stringifyField(f, f.V) }
|
||||
|
||||
// Either null or omitted
|
||||
func (f Int) IsMissing() bool { return f.IsOmitted() || f.IsNull() }
|
||||
func (f Int) IsOmitted() bool { return f == Int{} }
|
||||
func (f Int) MarshalJSON() ([]byte, error) { return marshalField(f, f.V) }
|
||||
func (f Int) UnmarshalJSON(data []byte) error { return unmarshalField(&f.V, &f.metadata, data) }
|
||||
func (f Int) String() string { return stringifyField(f, f.V) }
|
||||
|
||||
// Either null or omitted
|
||||
func (f Bool) IsMissing() bool { return f.IsOmitted() || f.IsNull() }
|
||||
func (f Bool) IsOmitted() bool { return f == Bool{} }
|
||||
func (f Bool) MarshalJSON() ([]byte, error) { return marshalField(f, f.V) }
|
||||
func (f Bool) UnmarshalJSON(data []byte) error { return unmarshalField(&f.V, &f.metadata, data) }
|
||||
func (f Bool) String() string { return stringifyField(f, f.V) }
|
||||
|
||||
// Either null or omitted
|
||||
func (f Float) IsMissing() bool { return f.IsOmitted() || f.IsNull() }
|
||||
func (f Float) IsOmitted() bool { return f == Float{} }
|
||||
func (f Float) MarshalJSON() ([]byte, error) { return marshalField(f, f.V) }
|
||||
func (f Float) UnmarshalJSON(data []byte) error { return unmarshalField(&f.V, &f.metadata, data) }
|
||||
func (f Float) String() string { return stringifyField(f, f.V) }
|
||||
|
||||
// Either null or omitted
|
||||
func (f Datetime) IsMissing() bool { return f.IsOmitted() || f.IsNull() }
|
||||
func (f Datetime) IsOmitted() bool { return f == Datetime{} }
|
||||
func (f Datetime) MarshalJSON() ([]byte, error) { return marshalField(f, f.V.Format(time.RFC3339)) }
|
||||
func (f Datetime) UnmarshalJSON(data []byte) error { return unmarshalField(&f.V, &f.metadata, data) }
|
||||
func (f Datetime) String() string { return stringifyField(f, f.V.Format(time.RFC3339)) }
|
||||
|
||||
// Either null or omitted
|
||||
func (f Date) IsMissing() bool { return f.IsOmitted() || f.IsNull() }
|
||||
func (f Date) IsOmitted() bool { return f == Date{} }
|
||||
func (f Date) MarshalJSON() ([]byte, error) { return marshalField(f, f.V.Format("2006-01-02")) }
|
||||
func (f Date) UnmarshalJSON(data []byte) error { return unmarshalField(&f.V, &f.metadata, data) }
|
||||
func (f Date) String() string { return stringifyField(f, f.V.Format("2006-01-02")) }
|
||||
|
||||
// APIObject should be embedded in api object fields, preferably using an alias to make private
|
||||
type APIObject struct {
|
||||
metadata
|
||||
}
|
||||
|
||||
// APIUnion should be embedded in all api unions fields, preferably using an alias to make private
|
||||
type APIUnion struct {
|
||||
metadata
|
||||
}
|
||||
|
||||
func (o APIObject) IsFieldNull(string) bool { return false }
|
||||
func (o APIObject) IsFieldOverridden(string) (any, bool) { return nil, false }
|
||||
func (o APIObject) IsFieldRawResponse(string) json.RawMessage { return nil }
|
||||
|
||||
type metadata struct {
|
||||
// provider is an interface used to determine the status of the field.
|
||||
// As an optimization, we expect certain concrete types.
|
||||
//
|
||||
// While there are simpler ways to implement metadata, the primary incentive here is to
|
||||
// minimize the bytes in the struct, since it will be embedded in every field.
|
||||
provider MetadataProvider
|
||||
}
|
||||
|
||||
func (m metadata) IsNull() bool {
|
||||
if m.provider == nil {
|
||||
return false
|
||||
}
|
||||
|
||||
// avoid dynamic dispatch call for the most common cases
|
||||
if _, ok := m.provider.(apifield.NeverOmitted); ok {
|
||||
return false
|
||||
} else if _, ok := m.provider.(apifield.ExplicitNull); ok {
|
||||
return true
|
||||
}
|
||||
|
||||
return m.provider.IsNull()
|
||||
}
|
||||
|
||||
func (m metadata) RawResponse() json.RawMessage {
|
||||
if m.provider == nil {
|
||||
return nil
|
||||
}
|
||||
// avoid dynamic dispatch call for the most common case
|
||||
if r, ok := m.provider.(apifield.ResponseData); ok {
|
||||
return json.RawMessage(r)
|
||||
}
|
||||
return m.provider.RawResponse()
|
||||
}
|
||||
|
||||
func (m metadata) IsOverridden() (any, bool) {
|
||||
if m.provider == nil {
|
||||
return nil, false
|
||||
}
|
||||
return m.provider.IsOverridden()
|
||||
}
|
||||
|
||||
func (m *metadata) setMetadata(mp MetadataProvider) {
|
||||
m.provider = mp
|
||||
}
|
||||
47
packages/resp/resp.go
Normal file
47
packages/resp/resp.go
Normal file
@@ -0,0 +1,47 @@
|
||||
package resp
|
||||
|
||||
type Field struct {
|
||||
// This implementation has more complexity than necessary, but it keeps the Field
|
||||
// object as small as possible, which helps when repeated often.
|
||||
f *field
|
||||
}
|
||||
|
||||
type field struct {
|
||||
status
|
||||
raw string
|
||||
}
|
||||
|
||||
const (
|
||||
valid = iota
|
||||
invalid
|
||||
)
|
||||
|
||||
type status int8
|
||||
|
||||
var fnull = field{raw: "null"}
|
||||
var fmissing = field{}
|
||||
|
||||
// Returns true if the field is explicitly `null` _or_ if it is not present at all (ie, missing).
|
||||
// To check if the field's key is present in the JSON with an explicit null value,
|
||||
// you must check `f.IsNull() && !f.IsMissing()`.
|
||||
func (j Field) IsNull() bool { return j.f == &fnull }
|
||||
func (j Field) IsMissing() bool { return j.f == nil }
|
||||
func (j Field) IsInvalid() bool { return j.f != nil && j.f.status == invalid }
|
||||
func (j Field) Raw() string {
|
||||
if j.f == nil {
|
||||
return ""
|
||||
}
|
||||
return j.f.raw
|
||||
}
|
||||
|
||||
func NewValidField(raw string) Field {
|
||||
return Field{f: &field{raw: string(raw), status: valid}}
|
||||
}
|
||||
|
||||
func NewNullField() Field {
|
||||
return Field{f: &fnull}
|
||||
}
|
||||
|
||||
func NewInvalidField(raw string) Field {
|
||||
return Field{f: &field{status: invalid, raw: string(raw)}}
|
||||
}
|
||||
@@ -25,7 +25,7 @@ func TestAutoPagination(t *testing.T) {
|
||||
option.WithAPIKey("My API Key"),
|
||||
)
|
||||
iter := client.FineTuning.Jobs.ListAutoPaging(context.TODO(), openai.FineTuningJobListParams{
|
||||
Limit: openai.F(int64(20)),
|
||||
Limit: openai.Int(20),
|
||||
})
|
||||
// Prism mock isn't going to give us real pagination
|
||||
for i := 0; i < 3 && iter.Next(); i++ {
|
||||
|
||||
@@ -25,7 +25,7 @@ func TestManualPagination(t *testing.T) {
|
||||
option.WithAPIKey("My API Key"),
|
||||
)
|
||||
page, err := client.FineTuning.Jobs.List(context.TODO(), openai.FineTuningJobListParams{
|
||||
Limit: openai.F(int64(20)),
|
||||
Limit: openai.Int(20),
|
||||
})
|
||||
if err != nil {
|
||||
t.Fatalf("err should be nil: %s", err.Error())
|
||||
|
||||
282
shared/constant/constants.go
Normal file
282
shared/constant/constants.go
Normal file
@@ -0,0 +1,282 @@
|
||||
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
|
||||
package constant
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
)
|
||||
|
||||
type Constant[T any] interface {
|
||||
Default() T
|
||||
}
|
||||
|
||||
// ValueOf gives the default value of a constant from its type. It's helpful when
|
||||
// constructing constants as variants in a one-of. Note that empty structs are
|
||||
// marshalled by default. Usage: constant.ValueOf[constant.Foo]()
|
||||
func ValueOf[T Constant[T]]() T {
|
||||
var t T
|
||||
return t.Default()
|
||||
}
|
||||
|
||||
type Assistant string // Always "assistant"
|
||||
type AssistantDeleted string // Always "assistant.deleted"
|
||||
type Auto string // Always "auto"
|
||||
type Batch string // Always "batch"
|
||||
type ChatCompletion string // Always "chat.completion"
|
||||
type ChatCompletionChunk string // Always "chat.completion.chunk"
|
||||
type ChatCompletionDeleted string // Always "chat.completion.deleted"
|
||||
type CodeInterpreter string // Always "code_interpreter"
|
||||
type Content string // Always "content"
|
||||
type Default2024_08_21 string // Always "default_2024_08_21"
|
||||
type Developer string // Always "developer"
|
||||
type Embedding string // Always "embedding"
|
||||
type Error string // Always "error"
|
||||
type File string // Always "file"
|
||||
type FileCitation string // Always "file_citation"
|
||||
type FilePath string // Always "file_path"
|
||||
type FileSearch string // Always "file_search"
|
||||
type FineTuningJob string // Always "fine_tuning.job"
|
||||
type FineTuningJobCheckpoint string // Always "fine_tuning.job.checkpoint"
|
||||
type FineTuningJobEvent string // Always "fine_tuning.job.event"
|
||||
type Function string // Always "function"
|
||||
type Image string // Always "image"
|
||||
type ImageFile string // Always "image_file"
|
||||
type ImageURL string // Always "image_url"
|
||||
type InputAudio string // Always "input_audio"
|
||||
type JSONObject string // Always "json_object"
|
||||
type JSONSchema string // Always "json_schema"
|
||||
type LastActiveAt string // Always "last_active_at"
|
||||
type List string // Always "list"
|
||||
type Logs string // Always "logs"
|
||||
type MessageCreation string // Always "message_creation"
|
||||
type Model string // Always "model"
|
||||
type Other string // Always "other"
|
||||
type Refusal string // Always "refusal"
|
||||
type Static string // Always "static"
|
||||
type SubmitToolOutputs string // Always "submit_tool_outputs"
|
||||
type System string // Always "system"
|
||||
type Text string // Always "text"
|
||||
type TextCompletion string // Always "text_completion"
|
||||
type Thread string // Always "thread"
|
||||
type ThreadCreated string // Always "thread.created"
|
||||
type ThreadDeleted string // Always "thread.deleted"
|
||||
type ThreadMessage string // Always "thread.message"
|
||||
type ThreadMessageCompleted string // Always "thread.message.completed"
|
||||
type ThreadMessageCreated string // Always "thread.message.created"
|
||||
type ThreadMessageDeleted string // Always "thread.message.deleted"
|
||||
type ThreadMessageDelta string // Always "thread.message.delta"
|
||||
type ThreadMessageInProgress string // Always "thread.message.in_progress"
|
||||
type ThreadMessageIncomplete string // Always "thread.message.incomplete"
|
||||
type ThreadRun string // Always "thread.run"
|
||||
type ThreadRunCancelled string // Always "thread.run.cancelled"
|
||||
type ThreadRunCancelling string // Always "thread.run.cancelling"
|
||||
type ThreadRunCompleted string // Always "thread.run.completed"
|
||||
type ThreadRunCreated string // Always "thread.run.created"
|
||||
type ThreadRunExpired string // Always "thread.run.expired"
|
||||
type ThreadRunFailed string // Always "thread.run.failed"
|
||||
type ThreadRunInProgress string // Always "thread.run.in_progress"
|
||||
type ThreadRunIncomplete string // Always "thread.run.incomplete"
|
||||
type ThreadRunQueued string // Always "thread.run.queued"
|
||||
type ThreadRunRequiresAction string // Always "thread.run.requires_action"
|
||||
type ThreadRunStep string // Always "thread.run.step"
|
||||
type ThreadRunStepCancelled string // Always "thread.run.step.cancelled"
|
||||
type ThreadRunStepCompleted string // Always "thread.run.step.completed"
|
||||
type ThreadRunStepCreated string // Always "thread.run.step.created"
|
||||
type ThreadRunStepDelta string // Always "thread.run.step.delta"
|
||||
type ThreadRunStepExpired string // Always "thread.run.step.expired"
|
||||
type ThreadRunStepFailed string // Always "thread.run.step.failed"
|
||||
type ThreadRunStepInProgress string // Always "thread.run.step.in_progress"
|
||||
type Tool string // Always "tool"
|
||||
type ToolCalls string // Always "tool_calls"
|
||||
type Upload string // Always "upload"
|
||||
type UploadPart string // Always "upload.part"
|
||||
type User string // Always "user"
|
||||
type VectorStore string // Always "vector_store"
|
||||
type VectorStoreDeleted string // Always "vector_store.deleted"
|
||||
type VectorStoreFile string // Always "vector_store.file"
|
||||
type VectorStoreFileDeleted string // Always "vector_store.file.deleted"
|
||||
type VectorStoreFilesBatch string // Always "vector_store.files_batch"
|
||||
type Wandb string // Always "wandb"
|
||||
|
||||
func (c Assistant) Default() Assistant { return "assistant" }
|
||||
func (c AssistantDeleted) Default() AssistantDeleted { return "assistant.deleted" }
|
||||
func (c Auto) Default() Auto { return "auto" }
|
||||
func (c Batch) Default() Batch { return "batch" }
|
||||
func (c ChatCompletion) Default() ChatCompletion { return "chat.completion" }
|
||||
func (c ChatCompletionChunk) Default() ChatCompletionChunk { return "chat.completion.chunk" }
|
||||
func (c ChatCompletionDeleted) Default() ChatCompletionDeleted { return "chat.completion.deleted" }
|
||||
func (c CodeInterpreter) Default() CodeInterpreter { return "code_interpreter" }
|
||||
func (c Content) Default() Content { return "content" }
|
||||
func (c Default2024_08_21) Default() Default2024_08_21 { return "default_2024_08_21" }
|
||||
func (c Developer) Default() Developer { return "developer" }
|
||||
func (c Embedding) Default() Embedding { return "embedding" }
|
||||
func (c Error) Default() Error { return "error" }
|
||||
func (c File) Default() File { return "file" }
|
||||
func (c FileCitation) Default() FileCitation { return "file_citation" }
|
||||
func (c FilePath) Default() FilePath { return "file_path" }
|
||||
func (c FileSearch) Default() FileSearch { return "file_search" }
|
||||
func (c FineTuningJob) Default() FineTuningJob { return "fine_tuning.job" }
|
||||
func (c FineTuningJobCheckpoint) Default() FineTuningJobCheckpoint {
|
||||
return "fine_tuning.job.checkpoint"
|
||||
}
|
||||
func (c FineTuningJobEvent) Default() FineTuningJobEvent { return "fine_tuning.job.event" }
|
||||
func (c Function) Default() Function { return "function" }
|
||||
func (c Image) Default() Image { return "image" }
|
||||
func (c ImageFile) Default() ImageFile { return "image_file" }
|
||||
func (c ImageURL) Default() ImageURL { return "image_url" }
|
||||
func (c InputAudio) Default() InputAudio { return "input_audio" }
|
||||
func (c JSONObject) Default() JSONObject { return "json_object" }
|
||||
func (c JSONSchema) Default() JSONSchema { return "json_schema" }
|
||||
func (c LastActiveAt) Default() LastActiveAt { return "last_active_at" }
|
||||
func (c List) Default() List { return "list" }
|
||||
func (c Logs) Default() Logs { return "logs" }
|
||||
func (c MessageCreation) Default() MessageCreation { return "message_creation" }
|
||||
func (c Model) Default() Model { return "model" }
|
||||
func (c Other) Default() Other { return "other" }
|
||||
func (c Refusal) Default() Refusal { return "refusal" }
|
||||
func (c Static) Default() Static { return "static" }
|
||||
func (c SubmitToolOutputs) Default() SubmitToolOutputs { return "submit_tool_outputs" }
|
||||
func (c System) Default() System { return "system" }
|
||||
func (c Text) Default() Text { return "text" }
|
||||
func (c TextCompletion) Default() TextCompletion { return "text_completion" }
|
||||
func (c Thread) Default() Thread { return "thread" }
|
||||
func (c ThreadCreated) Default() ThreadCreated { return "thread.created" }
|
||||
func (c ThreadDeleted) Default() ThreadDeleted { return "thread.deleted" }
|
||||
func (c ThreadMessage) Default() ThreadMessage { return "thread.message" }
|
||||
func (c ThreadMessageCompleted) Default() ThreadMessageCompleted { return "thread.message.completed" }
|
||||
func (c ThreadMessageCreated) Default() ThreadMessageCreated { return "thread.message.created" }
|
||||
func (c ThreadMessageDeleted) Default() ThreadMessageDeleted { return "thread.message.deleted" }
|
||||
func (c ThreadMessageDelta) Default() ThreadMessageDelta { return "thread.message.delta" }
|
||||
func (c ThreadMessageInProgress) Default() ThreadMessageInProgress {
|
||||
return "thread.message.in_progress"
|
||||
}
|
||||
func (c ThreadMessageIncomplete) Default() ThreadMessageIncomplete {
|
||||
return "thread.message.incomplete"
|
||||
}
|
||||
func (c ThreadRun) Default() ThreadRun { return "thread.run" }
|
||||
func (c ThreadRunCancelled) Default() ThreadRunCancelled { return "thread.run.cancelled" }
|
||||
func (c ThreadRunCancelling) Default() ThreadRunCancelling { return "thread.run.cancelling" }
|
||||
func (c ThreadRunCompleted) Default() ThreadRunCompleted { return "thread.run.completed" }
|
||||
func (c ThreadRunCreated) Default() ThreadRunCreated { return "thread.run.created" }
|
||||
func (c ThreadRunExpired) Default() ThreadRunExpired { return "thread.run.expired" }
|
||||
func (c ThreadRunFailed) Default() ThreadRunFailed { return "thread.run.failed" }
|
||||
func (c ThreadRunInProgress) Default() ThreadRunInProgress { return "thread.run.in_progress" }
|
||||
func (c ThreadRunIncomplete) Default() ThreadRunIncomplete { return "thread.run.incomplete" }
|
||||
func (c ThreadRunQueued) Default() ThreadRunQueued { return "thread.run.queued" }
|
||||
func (c ThreadRunRequiresAction) Default() ThreadRunRequiresAction {
|
||||
return "thread.run.requires_action"
|
||||
}
|
||||
func (c ThreadRunStep) Default() ThreadRunStep { return "thread.run.step" }
|
||||
func (c ThreadRunStepCancelled) Default() ThreadRunStepCancelled { return "thread.run.step.cancelled" }
|
||||
func (c ThreadRunStepCompleted) Default() ThreadRunStepCompleted { return "thread.run.step.completed" }
|
||||
func (c ThreadRunStepCreated) Default() ThreadRunStepCreated { return "thread.run.step.created" }
|
||||
func (c ThreadRunStepDelta) Default() ThreadRunStepDelta { return "thread.run.step.delta" }
|
||||
func (c ThreadRunStepExpired) Default() ThreadRunStepExpired { return "thread.run.step.expired" }
|
||||
func (c ThreadRunStepFailed) Default() ThreadRunStepFailed { return "thread.run.step.failed" }
|
||||
func (c ThreadRunStepInProgress) Default() ThreadRunStepInProgress {
|
||||
return "thread.run.step.in_progress"
|
||||
}
|
||||
func (c Tool) Default() Tool { return "tool" }
|
||||
func (c ToolCalls) Default() ToolCalls { return "tool_calls" }
|
||||
func (c Upload) Default() Upload { return "upload" }
|
||||
func (c UploadPart) Default() UploadPart { return "upload.part" }
|
||||
func (c User) Default() User { return "user" }
|
||||
func (c VectorStore) Default() VectorStore { return "vector_store" }
|
||||
func (c VectorStoreDeleted) Default() VectorStoreDeleted { return "vector_store.deleted" }
|
||||
func (c VectorStoreFile) Default() VectorStoreFile { return "vector_store.file" }
|
||||
func (c VectorStoreFileDeleted) Default() VectorStoreFileDeleted { return "vector_store.file.deleted" }
|
||||
func (c VectorStoreFilesBatch) Default() VectorStoreFilesBatch { return "vector_store.files_batch" }
|
||||
func (c Wandb) Default() Wandb { return "wandb" }
|
||||
|
||||
func (c Assistant) MarshalJSON() ([]byte, error) { return marshalString(c) }
|
||||
func (c AssistantDeleted) MarshalJSON() ([]byte, error) { return marshalString(c) }
|
||||
func (c Auto) MarshalJSON() ([]byte, error) { return marshalString(c) }
|
||||
func (c Batch) MarshalJSON() ([]byte, error) { return marshalString(c) }
|
||||
func (c ChatCompletion) MarshalJSON() ([]byte, error) { return marshalString(c) }
|
||||
func (c ChatCompletionChunk) MarshalJSON() ([]byte, error) { return marshalString(c) }
|
||||
func (c ChatCompletionDeleted) MarshalJSON() ([]byte, error) { return marshalString(c) }
|
||||
func (c CodeInterpreter) MarshalJSON() ([]byte, error) { return marshalString(c) }
|
||||
func (c Content) MarshalJSON() ([]byte, error) { return marshalString(c) }
|
||||
func (c Default2024_08_21) MarshalJSON() ([]byte, error) { return marshalString(c) }
|
||||
func (c Developer) MarshalJSON() ([]byte, error) { return marshalString(c) }
|
||||
func (c Embedding) MarshalJSON() ([]byte, error) { return marshalString(c) }
|
||||
func (c Error) MarshalJSON() ([]byte, error) { return marshalString(c) }
|
||||
func (c File) MarshalJSON() ([]byte, error) { return marshalString(c) }
|
||||
func (c FileCitation) MarshalJSON() ([]byte, error) { return marshalString(c) }
|
||||
func (c FilePath) MarshalJSON() ([]byte, error) { return marshalString(c) }
|
||||
func (c FileSearch) MarshalJSON() ([]byte, error) { return marshalString(c) }
|
||||
func (c FineTuningJob) MarshalJSON() ([]byte, error) { return marshalString(c) }
|
||||
func (c FineTuningJobCheckpoint) MarshalJSON() ([]byte, error) { return marshalString(c) }
|
||||
func (c FineTuningJobEvent) MarshalJSON() ([]byte, error) { return marshalString(c) }
|
||||
func (c Function) MarshalJSON() ([]byte, error) { return marshalString(c) }
|
||||
func (c Image) MarshalJSON() ([]byte, error) { return marshalString(c) }
|
||||
func (c ImageFile) MarshalJSON() ([]byte, error) { return marshalString(c) }
|
||||
func (c ImageURL) MarshalJSON() ([]byte, error) { return marshalString(c) }
|
||||
func (c InputAudio) MarshalJSON() ([]byte, error) { return marshalString(c) }
|
||||
func (c JSONObject) MarshalJSON() ([]byte, error) { return marshalString(c) }
|
||||
func (c JSONSchema) MarshalJSON() ([]byte, error) { return marshalString(c) }
|
||||
func (c LastActiveAt) MarshalJSON() ([]byte, error) { return marshalString(c) }
|
||||
func (c List) MarshalJSON() ([]byte, error) { return marshalString(c) }
|
||||
func (c Logs) MarshalJSON() ([]byte, error) { return marshalString(c) }
|
||||
func (c MessageCreation) MarshalJSON() ([]byte, error) { return marshalString(c) }
|
||||
func (c Model) MarshalJSON() ([]byte, error) { return marshalString(c) }
|
||||
func (c Other) MarshalJSON() ([]byte, error) { return marshalString(c) }
|
||||
func (c Refusal) MarshalJSON() ([]byte, error) { return marshalString(c) }
|
||||
func (c Static) MarshalJSON() ([]byte, error) { return marshalString(c) }
|
||||
func (c SubmitToolOutputs) MarshalJSON() ([]byte, error) { return marshalString(c) }
|
||||
func (c System) MarshalJSON() ([]byte, error) { return marshalString(c) }
|
||||
func (c Text) MarshalJSON() ([]byte, error) { return marshalString(c) }
|
||||
func (c TextCompletion) MarshalJSON() ([]byte, error) { return marshalString(c) }
|
||||
func (c Thread) MarshalJSON() ([]byte, error) { return marshalString(c) }
|
||||
func (c ThreadCreated) MarshalJSON() ([]byte, error) { return marshalString(c) }
|
||||
func (c ThreadDeleted) MarshalJSON() ([]byte, error) { return marshalString(c) }
|
||||
func (c ThreadMessage) MarshalJSON() ([]byte, error) { return marshalString(c) }
|
||||
func (c ThreadMessageCompleted) MarshalJSON() ([]byte, error) { return marshalString(c) }
|
||||
func (c ThreadMessageCreated) MarshalJSON() ([]byte, error) { return marshalString(c) }
|
||||
func (c ThreadMessageDeleted) MarshalJSON() ([]byte, error) { return marshalString(c) }
|
||||
func (c ThreadMessageDelta) MarshalJSON() ([]byte, error) { return marshalString(c) }
|
||||
func (c ThreadMessageInProgress) MarshalJSON() ([]byte, error) { return marshalString(c) }
|
||||
func (c ThreadMessageIncomplete) MarshalJSON() ([]byte, error) { return marshalString(c) }
|
||||
func (c ThreadRun) MarshalJSON() ([]byte, error) { return marshalString(c) }
|
||||
func (c ThreadRunCancelled) MarshalJSON() ([]byte, error) { return marshalString(c) }
|
||||
func (c ThreadRunCancelling) MarshalJSON() ([]byte, error) { return marshalString(c) }
|
||||
func (c ThreadRunCompleted) MarshalJSON() ([]byte, error) { return marshalString(c) }
|
||||
func (c ThreadRunCreated) MarshalJSON() ([]byte, error) { return marshalString(c) }
|
||||
func (c ThreadRunExpired) MarshalJSON() ([]byte, error) { return marshalString(c) }
|
||||
func (c ThreadRunFailed) MarshalJSON() ([]byte, error) { return marshalString(c) }
|
||||
func (c ThreadRunInProgress) MarshalJSON() ([]byte, error) { return marshalString(c) }
|
||||
func (c ThreadRunIncomplete) MarshalJSON() ([]byte, error) { return marshalString(c) }
|
||||
func (c ThreadRunQueued) MarshalJSON() ([]byte, error) { return marshalString(c) }
|
||||
func (c ThreadRunRequiresAction) MarshalJSON() ([]byte, error) { return marshalString(c) }
|
||||
func (c ThreadRunStep) MarshalJSON() ([]byte, error) { return marshalString(c) }
|
||||
func (c ThreadRunStepCancelled) MarshalJSON() ([]byte, error) { return marshalString(c) }
|
||||
func (c ThreadRunStepCompleted) MarshalJSON() ([]byte, error) { return marshalString(c) }
|
||||
func (c ThreadRunStepCreated) MarshalJSON() ([]byte, error) { return marshalString(c) }
|
||||
func (c ThreadRunStepDelta) MarshalJSON() ([]byte, error) { return marshalString(c) }
|
||||
func (c ThreadRunStepExpired) MarshalJSON() ([]byte, error) { return marshalString(c) }
|
||||
func (c ThreadRunStepFailed) MarshalJSON() ([]byte, error) { return marshalString(c) }
|
||||
func (c ThreadRunStepInProgress) MarshalJSON() ([]byte, error) { return marshalString(c) }
|
||||
func (c Tool) MarshalJSON() ([]byte, error) { return marshalString(c) }
|
||||
func (c ToolCalls) MarshalJSON() ([]byte, error) { return marshalString(c) }
|
||||
func (c Upload) MarshalJSON() ([]byte, error) { return marshalString(c) }
|
||||
func (c UploadPart) MarshalJSON() ([]byte, error) { return marshalString(c) }
|
||||
func (c User) MarshalJSON() ([]byte, error) { return marshalString(c) }
|
||||
func (c VectorStore) MarshalJSON() ([]byte, error) { return marshalString(c) }
|
||||
func (c VectorStoreDeleted) MarshalJSON() ([]byte, error) { return marshalString(c) }
|
||||
func (c VectorStoreFile) MarshalJSON() ([]byte, error) { return marshalString(c) }
|
||||
func (c VectorStoreFileDeleted) MarshalJSON() ([]byte, error) { return marshalString(c) }
|
||||
func (c VectorStoreFilesBatch) MarshalJSON() ([]byte, error) { return marshalString(c) }
|
||||
func (c Wandb) MarshalJSON() ([]byte, error) { return marshalString(c) }
|
||||
|
||||
type constant[T any] interface {
|
||||
Constant[T]
|
||||
*T
|
||||
}
|
||||
|
||||
func marshalString[T ~string, PT constant[T]](v T) ([]byte, error) {
|
||||
var zero T
|
||||
if v == zero {
|
||||
v = PT(&v).Default()
|
||||
}
|
||||
return json.Marshal(string(v))
|
||||
}
|
||||
195
shared/shared.go
195
shared/shared.go
@@ -4,42 +4,43 @@ package shared
|
||||
|
||||
import (
|
||||
"github.com/openai/openai-go/internal/apijson"
|
||||
"github.com/openai/openai-go/internal/param"
|
||||
"github.com/openai/openai-go/packages/param"
|
||||
"github.com/openai/openai-go/packages/resp"
|
||||
"github.com/openai/openai-go/shared/constant"
|
||||
)
|
||||
|
||||
// aliased to make param.APIUnion private when embedding
|
||||
type apiunion = param.APIUnion
|
||||
|
||||
// aliased to make param.APIObject private when embedding
|
||||
type apiobject = param.APIObject
|
||||
|
||||
type ErrorObject struct {
|
||||
Code string `json:"code,required,nullable"`
|
||||
Message string `json:"message,required"`
|
||||
Param string `json:"param,required,nullable"`
|
||||
Type string `json:"type,required"`
|
||||
JSON errorObjectJSON `json:"-"`
|
||||
Code string `json:"code,omitzero,required,nullable"`
|
||||
Message string `json:"message,omitzero,required"`
|
||||
Param string `json:"param,omitzero,required,nullable"`
|
||||
Type string `json:"type,omitzero,required"`
|
||||
JSON struct {
|
||||
Code resp.Field
|
||||
Message resp.Field
|
||||
Param resp.Field
|
||||
Type resp.Field
|
||||
raw string
|
||||
} `json:"-"`
|
||||
}
|
||||
|
||||
// errorObjectJSON contains the JSON metadata for the struct [ErrorObject]
|
||||
type errorObjectJSON struct {
|
||||
Code apijson.Field
|
||||
Message apijson.Field
|
||||
Param apijson.Field
|
||||
Type apijson.Field
|
||||
raw string
|
||||
ExtraFields map[string]apijson.Field
|
||||
}
|
||||
|
||||
func (r *ErrorObject) UnmarshalJSON(data []byte) (err error) {
|
||||
func (r ErrorObject) RawJSON() string { return r.JSON.raw }
|
||||
func (r *ErrorObject) UnmarshalJSON(data []byte) error {
|
||||
return apijson.UnmarshalRoot(data, r)
|
||||
}
|
||||
|
||||
func (r errorObjectJSON) RawJSON() string {
|
||||
return r.raw
|
||||
}
|
||||
|
||||
type FunctionDefinition struct {
|
||||
// The name of the function to be called. Must be a-z, A-Z, 0-9, or contain
|
||||
// underscores and dashes, with a maximum length of 64.
|
||||
Name string `json:"name,required"`
|
||||
Name string `json:"name,omitzero,required"`
|
||||
// A description of what the function does, used by the model to choose when and
|
||||
// how to call the function.
|
||||
Description string `json:"description"`
|
||||
Description string `json:"description,omitzero"`
|
||||
// The parameters the functions accepts, described as a JSON Schema object. See the
|
||||
// [guide](https://platform.openai.com/docs/guides/function-calling) for examples,
|
||||
// and the
|
||||
@@ -47,42 +48,43 @@ type FunctionDefinition struct {
|
||||
// documentation about the format.
|
||||
//
|
||||
// Omitting `parameters` defines a function with an empty parameter list.
|
||||
Parameters FunctionParameters `json:"parameters"`
|
||||
Parameters FunctionParameters `json:"parameters,omitzero"`
|
||||
// Whether to enable strict schema adherence when generating the function call. If
|
||||
// set to true, the model will follow the exact schema defined in the `parameters`
|
||||
// field. Only a subset of JSON Schema is supported when `strict` is `true`. Learn
|
||||
// more about Structured Outputs in the
|
||||
// [function calling guide](docs/guides/function-calling).
|
||||
Strict bool `json:"strict,nullable"`
|
||||
JSON functionDefinitionJSON `json:"-"`
|
||||
Strict bool `json:"strict,omitzero,nullable"`
|
||||
JSON struct {
|
||||
Name resp.Field
|
||||
Description resp.Field
|
||||
Parameters resp.Field
|
||||
Strict resp.Field
|
||||
raw string
|
||||
} `json:"-"`
|
||||
}
|
||||
|
||||
// functionDefinitionJSON contains the JSON metadata for the struct
|
||||
// [FunctionDefinition]
|
||||
type functionDefinitionJSON struct {
|
||||
Name apijson.Field
|
||||
Description apijson.Field
|
||||
Parameters apijson.Field
|
||||
Strict apijson.Field
|
||||
raw string
|
||||
ExtraFields map[string]apijson.Field
|
||||
}
|
||||
|
||||
func (r *FunctionDefinition) UnmarshalJSON(data []byte) (err error) {
|
||||
func (r FunctionDefinition) RawJSON() string { return r.JSON.raw }
|
||||
func (r *FunctionDefinition) UnmarshalJSON(data []byte) error {
|
||||
return apijson.UnmarshalRoot(data, r)
|
||||
}
|
||||
|
||||
func (r functionDefinitionJSON) RawJSON() string {
|
||||
return r.raw
|
||||
// ToParam converts this FunctionDefinition to a FunctionDefinitionParam.
|
||||
//
|
||||
// Warning: the fields of the param type will not be present. ToParam should only
|
||||
// be used at the last possible moment before sending a request. Test for this with
|
||||
// FunctionDefinitionParam.IsOverridden()
|
||||
func (r FunctionDefinition) ToParam() FunctionDefinitionParam {
|
||||
return param.Override[FunctionDefinitionParam](r.RawJSON())
|
||||
}
|
||||
|
||||
type FunctionDefinitionParam struct {
|
||||
// The name of the function to be called. Must be a-z, A-Z, 0-9, or contain
|
||||
// underscores and dashes, with a maximum length of 64.
|
||||
Name param.Field[string] `json:"name,required"`
|
||||
Name param.String `json:"name,omitzero,required"`
|
||||
// A description of what the function does, used by the model to choose when and
|
||||
// how to call the function.
|
||||
Description param.Field[string] `json:"description"`
|
||||
Description param.String `json:"description,omitzero"`
|
||||
// The parameters the functions accepts, described as a JSON Schema object. See the
|
||||
// [guide](https://platform.openai.com/docs/guides/function-calling) for examples,
|
||||
// and the
|
||||
@@ -90,17 +92,21 @@ type FunctionDefinitionParam struct {
|
||||
// documentation about the format.
|
||||
//
|
||||
// Omitting `parameters` defines a function with an empty parameter list.
|
||||
Parameters param.Field[FunctionParameters] `json:"parameters"`
|
||||
Parameters FunctionParameters `json:"parameters,omitzero"`
|
||||
// Whether to enable strict schema adherence when generating the function call. If
|
||||
// set to true, the model will follow the exact schema defined in the `parameters`
|
||||
// field. Only a subset of JSON Schema is supported when `strict` is `true`. Learn
|
||||
// more about Structured Outputs in the
|
||||
// [function calling guide](docs/guides/function-calling).
|
||||
Strict param.Field[bool] `json:"strict"`
|
||||
Strict param.Bool `json:"strict,omitzero"`
|
||||
apiobject
|
||||
}
|
||||
|
||||
func (f FunctionDefinitionParam) IsMissing() bool { return param.IsOmitted(f) || f.IsNull() }
|
||||
|
||||
func (r FunctionDefinitionParam) MarshalJSON() (data []byte, err error) {
|
||||
return apijson.MarshalRoot(r)
|
||||
type shadow FunctionDefinitionParam
|
||||
return param.MarshalObject(r, (*shadow)(&r))
|
||||
}
|
||||
|
||||
type FunctionParameters map[string]interface{}
|
||||
@@ -111,100 +117,73 @@ type MetadataParam map[string]string
|
||||
|
||||
type ResponseFormatJSONObjectParam struct {
|
||||
// The type of response format being defined: `json_object`
|
||||
Type param.Field[ResponseFormatJSONObjectType] `json:"type,required"`
|
||||
//
|
||||
// This field can be elided, and will be automatically set as "json_object".
|
||||
Type constant.JSONObject `json:"type,required"`
|
||||
apiobject
|
||||
}
|
||||
|
||||
func (f ResponseFormatJSONObjectParam) IsMissing() bool { return param.IsOmitted(f) || f.IsNull() }
|
||||
|
||||
func (r ResponseFormatJSONObjectParam) MarshalJSON() (data []byte, err error) {
|
||||
return apijson.MarshalRoot(r)
|
||||
}
|
||||
|
||||
func (r ResponseFormatJSONObjectParam) ImplementsChatCompletionNewParamsResponseFormatUnion() {}
|
||||
|
||||
// The type of response format being defined: `json_object`
|
||||
type ResponseFormatJSONObjectType string
|
||||
|
||||
const (
|
||||
ResponseFormatJSONObjectTypeJSONObject ResponseFormatJSONObjectType = "json_object"
|
||||
)
|
||||
|
||||
func (r ResponseFormatJSONObjectType) IsKnown() bool {
|
||||
switch r {
|
||||
case ResponseFormatJSONObjectTypeJSONObject:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
type shadow ResponseFormatJSONObjectParam
|
||||
return param.MarshalObject(r, (*shadow)(&r))
|
||||
}
|
||||
|
||||
type ResponseFormatJSONSchemaParam struct {
|
||||
JSONSchema param.Field[ResponseFormatJSONSchemaJSONSchemaParam] `json:"json_schema,required"`
|
||||
JSONSchema ResponseFormatJSONSchemaJSONSchemaParam `json:"json_schema,omitzero,required"`
|
||||
// The type of response format being defined: `json_schema`
|
||||
Type param.Field[ResponseFormatJSONSchemaType] `json:"type,required"`
|
||||
//
|
||||
// This field can be elided, and will be automatically set as "json_schema".
|
||||
Type constant.JSONSchema `json:"type,required"`
|
||||
apiobject
|
||||
}
|
||||
|
||||
func (f ResponseFormatJSONSchemaParam) IsMissing() bool { return param.IsOmitted(f) || f.IsNull() }
|
||||
|
||||
func (r ResponseFormatJSONSchemaParam) MarshalJSON() (data []byte, err error) {
|
||||
return apijson.MarshalRoot(r)
|
||||
type shadow ResponseFormatJSONSchemaParam
|
||||
return param.MarshalObject(r, (*shadow)(&r))
|
||||
}
|
||||
|
||||
func (r ResponseFormatJSONSchemaParam) ImplementsChatCompletionNewParamsResponseFormatUnion() {}
|
||||
|
||||
type ResponseFormatJSONSchemaJSONSchemaParam struct {
|
||||
// The name of the response format. Must be a-z, A-Z, 0-9, or contain underscores
|
||||
// and dashes, with a maximum length of 64.
|
||||
Name param.Field[string] `json:"name,required"`
|
||||
Name param.String `json:"name,omitzero,required"`
|
||||
// A description of what the response format is for, used by the model to determine
|
||||
// how to respond in the format.
|
||||
Description param.Field[string] `json:"description"`
|
||||
Description param.String `json:"description,omitzero"`
|
||||
// The schema for the response format, described as a JSON Schema object.
|
||||
Schema param.Field[map[string]interface{}] `json:"schema"`
|
||||
Schema map[string]interface{} `json:"schema,omitzero"`
|
||||
// Whether to enable strict schema adherence when generating the output. If set to
|
||||
// true, the model will always follow the exact schema defined in the `schema`
|
||||
// field. Only a subset of JSON Schema is supported when `strict` is `true`. To
|
||||
// learn more, read the
|
||||
// [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs).
|
||||
Strict param.Field[bool] `json:"strict"`
|
||||
Strict param.Bool `json:"strict,omitzero"`
|
||||
apiobject
|
||||
}
|
||||
|
||||
func (f ResponseFormatJSONSchemaJSONSchemaParam) IsMissing() bool {
|
||||
return param.IsOmitted(f) || f.IsNull()
|
||||
}
|
||||
|
||||
func (r ResponseFormatJSONSchemaJSONSchemaParam) MarshalJSON() (data []byte, err error) {
|
||||
return apijson.MarshalRoot(r)
|
||||
}
|
||||
|
||||
// The type of response format being defined: `json_schema`
|
||||
type ResponseFormatJSONSchemaType string
|
||||
|
||||
const (
|
||||
ResponseFormatJSONSchemaTypeJSONSchema ResponseFormatJSONSchemaType = "json_schema"
|
||||
)
|
||||
|
||||
func (r ResponseFormatJSONSchemaType) IsKnown() bool {
|
||||
switch r {
|
||||
case ResponseFormatJSONSchemaTypeJSONSchema:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
type shadow ResponseFormatJSONSchemaJSONSchemaParam
|
||||
return param.MarshalObject(r, (*shadow)(&r))
|
||||
}
|
||||
|
||||
type ResponseFormatTextParam struct {
|
||||
// The type of response format being defined: `text`
|
||||
Type param.Field[ResponseFormatTextType] `json:"type,required"`
|
||||
//
|
||||
// This field can be elided, and will be automatically set as "text".
|
||||
Type constant.Text `json:"type,required"`
|
||||
apiobject
|
||||
}
|
||||
|
||||
func (f ResponseFormatTextParam) IsMissing() bool { return param.IsOmitted(f) || f.IsNull() }
|
||||
|
||||
func (r ResponseFormatTextParam) MarshalJSON() (data []byte, err error) {
|
||||
return apijson.MarshalRoot(r)
|
||||
}
|
||||
|
||||
func (r ResponseFormatTextParam) ImplementsChatCompletionNewParamsResponseFormatUnion() {}
|
||||
|
||||
// The type of response format being defined: `text`
|
||||
type ResponseFormatTextType string
|
||||
|
||||
const (
|
||||
ResponseFormatTextTypeText ResponseFormatTextType = "text"
|
||||
)
|
||||
|
||||
func (r ResponseFormatTextType) IsKnown() bool {
|
||||
switch r {
|
||||
case ResponseFormatTextTypeText:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
type shadow ResponseFormatTextParam
|
||||
return param.MarshalObject(r, (*shadow)(&r))
|
||||
}
|
||||
|
||||
118
upload.go
118
upload.go
@@ -9,9 +9,11 @@ import (
|
||||
"net/http"
|
||||
|
||||
"github.com/openai/openai-go/internal/apijson"
|
||||
"github.com/openai/openai-go/internal/param"
|
||||
"github.com/openai/openai-go/internal/requestconfig"
|
||||
"github.com/openai/openai-go/option"
|
||||
"github.com/openai/openai-go/packages/param"
|
||||
"github.com/openai/openai-go/packages/resp"
|
||||
"github.com/openai/openai-go/shared/constant"
|
||||
)
|
||||
|
||||
// UploadService contains methods and other services that help with interacting
|
||||
@@ -22,14 +24,14 @@ import (
|
||||
// the [NewUploadService] method instead.
|
||||
type UploadService struct {
|
||||
Options []option.RequestOption
|
||||
Parts *UploadPartService
|
||||
Parts UploadPartService
|
||||
}
|
||||
|
||||
// NewUploadService generates a new service that applies the given options to each
|
||||
// request. These options are applied after the parent client's options (if there
|
||||
// is one), and before any request-specific options.
|
||||
func NewUploadService(opts ...option.RequestOption) (r *UploadService) {
|
||||
r = &UploadService{}
|
||||
func NewUploadService(opts ...option.RequestOption) (r UploadService) {
|
||||
r = UploadService{}
|
||||
r.Options = opts
|
||||
r.Parts = NewUploadPartService(opts...)
|
||||
return
|
||||
@@ -101,68 +103,50 @@ func (r *UploadService) Complete(ctx context.Context, uploadID string, body Uplo
|
||||
// The Upload object can accept byte chunks in the form of Parts.
|
||||
type Upload struct {
|
||||
// The Upload unique identifier, which can be referenced in API endpoints.
|
||||
ID string `json:"id,required"`
|
||||
ID string `json:"id,omitzero,required"`
|
||||
// The intended number of bytes to be uploaded.
|
||||
Bytes int64 `json:"bytes,required"`
|
||||
Bytes int64 `json:"bytes,omitzero,required"`
|
||||
// The Unix timestamp (in seconds) for when the Upload was created.
|
||||
CreatedAt int64 `json:"created_at,required"`
|
||||
CreatedAt int64 `json:"created_at,omitzero,required"`
|
||||
// The Unix timestamp (in seconds) for when the Upload was created.
|
||||
ExpiresAt int64 `json:"expires_at,required"`
|
||||
ExpiresAt int64 `json:"expires_at,omitzero,required"`
|
||||
// The name of the file to be uploaded.
|
||||
Filename string `json:"filename,required"`
|
||||
Filename string `json:"filename,omitzero,required"`
|
||||
// The object type, which is always "upload".
|
||||
Object UploadObject `json:"object,required"`
|
||||
//
|
||||
// This field can be elided, and will be automatically set as "upload".
|
||||
Object constant.Upload `json:"object,required"`
|
||||
// The intended purpose of the file.
|
||||
// [Please refer here](https://platform.openai.com/docs/api-reference/files/object#files/object-purpose)
|
||||
// for acceptable values.
|
||||
Purpose string `json:"purpose,required"`
|
||||
Purpose string `json:"purpose,omitzero,required"`
|
||||
// The status of the Upload.
|
||||
Status UploadStatus `json:"status,required"`
|
||||
//
|
||||
// Any of "pending", "completed", "cancelled", "expired"
|
||||
Status string `json:"status,omitzero,required"`
|
||||
// The `File` object represents a document that has been uploaded to OpenAI.
|
||||
File FileObject `json:"file,nullable"`
|
||||
JSON uploadJSON `json:"-"`
|
||||
File FileObject `json:"file,omitzero,nullable"`
|
||||
JSON struct {
|
||||
ID resp.Field
|
||||
Bytes resp.Field
|
||||
CreatedAt resp.Field
|
||||
ExpiresAt resp.Field
|
||||
Filename resp.Field
|
||||
Object resp.Field
|
||||
Purpose resp.Field
|
||||
Status resp.Field
|
||||
File resp.Field
|
||||
raw string
|
||||
} `json:"-"`
|
||||
}
|
||||
|
||||
// uploadJSON contains the JSON metadata for the struct [Upload]
|
||||
type uploadJSON struct {
|
||||
ID apijson.Field
|
||||
Bytes apijson.Field
|
||||
CreatedAt apijson.Field
|
||||
ExpiresAt apijson.Field
|
||||
Filename apijson.Field
|
||||
Object apijson.Field
|
||||
Purpose apijson.Field
|
||||
Status apijson.Field
|
||||
File apijson.Field
|
||||
raw string
|
||||
ExtraFields map[string]apijson.Field
|
||||
}
|
||||
|
||||
func (r *Upload) UnmarshalJSON(data []byte) (err error) {
|
||||
func (r Upload) RawJSON() string { return r.JSON.raw }
|
||||
func (r *Upload) UnmarshalJSON(data []byte) error {
|
||||
return apijson.UnmarshalRoot(data, r)
|
||||
}
|
||||
|
||||
func (r uploadJSON) RawJSON() string {
|
||||
return r.raw
|
||||
}
|
||||
|
||||
// The object type, which is always "upload".
|
||||
type UploadObject string
|
||||
|
||||
const (
|
||||
UploadObjectUpload UploadObject = "upload"
|
||||
)
|
||||
|
||||
func (r UploadObject) IsKnown() bool {
|
||||
switch r {
|
||||
case UploadObjectUpload:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// The status of the Upload.
|
||||
type UploadStatus string
|
||||
type UploadStatus = string
|
||||
|
||||
const (
|
||||
UploadStatusPending UploadStatus = "pending"
|
||||
@@ -171,43 +155,45 @@ const (
|
||||
UploadStatusExpired UploadStatus = "expired"
|
||||
)
|
||||
|
||||
func (r UploadStatus) IsKnown() bool {
|
||||
switch r {
|
||||
case UploadStatusPending, UploadStatusCompleted, UploadStatusCancelled, UploadStatusExpired:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
type UploadNewParams struct {
|
||||
// The number of bytes in the file you are uploading.
|
||||
Bytes param.Field[int64] `json:"bytes,required"`
|
||||
Bytes param.Int `json:"bytes,omitzero,required"`
|
||||
// The name of the file to upload.
|
||||
Filename param.Field[string] `json:"filename,required"`
|
||||
Filename param.String `json:"filename,omitzero,required"`
|
||||
// The MIME type of the file.
|
||||
//
|
||||
// This must fall within the supported MIME types for your file purpose. See the
|
||||
// supported MIME types for assistants and vision.
|
||||
MimeType param.Field[string] `json:"mime_type,required"`
|
||||
MimeType param.String `json:"mime_type,omitzero,required"`
|
||||
// The intended purpose of the uploaded file.
|
||||
//
|
||||
// See the
|
||||
// [documentation on File purposes](https://platform.openai.com/docs/api-reference/files/create#files-create-purpose).
|
||||
Purpose param.Field[FilePurpose] `json:"purpose,required"`
|
||||
//
|
||||
// Any of "assistants", "batch", "fine-tune", "vision"
|
||||
Purpose FilePurpose `json:"purpose,omitzero,required"`
|
||||
apiobject
|
||||
}
|
||||
|
||||
func (f UploadNewParams) IsMissing() bool { return param.IsOmitted(f) || f.IsNull() }
|
||||
|
||||
func (r UploadNewParams) MarshalJSON() (data []byte, err error) {
|
||||
return apijson.MarshalRoot(r)
|
||||
type shadow UploadNewParams
|
||||
return param.MarshalObject(r, (*shadow)(&r))
|
||||
}
|
||||
|
||||
type UploadCompleteParams struct {
|
||||
// The ordered list of Part IDs.
|
||||
PartIDs param.Field[[]string] `json:"part_ids,required"`
|
||||
PartIDs []string `json:"part_ids,omitzero,required"`
|
||||
// The optional md5 checksum for the file contents to verify if the bytes uploaded
|
||||
// matches what you expect.
|
||||
Md5 param.Field[string] `json:"md5"`
|
||||
Md5 param.String `json:"md5,omitzero"`
|
||||
apiobject
|
||||
}
|
||||
|
||||
func (f UploadCompleteParams) IsMissing() bool { return param.IsOmitted(f) || f.IsNull() }
|
||||
|
||||
func (r UploadCompleteParams) MarshalJSON() (data []byte, err error) {
|
||||
return apijson.MarshalRoot(r)
|
||||
type shadow UploadCompleteParams
|
||||
return param.MarshalObject(r, (*shadow)(&r))
|
||||
}
|
||||
|
||||
@@ -26,10 +26,10 @@ func TestUploadNew(t *testing.T) {
|
||||
option.WithAPIKey("My API Key"),
|
||||
)
|
||||
_, err := client.Uploads.New(context.TODO(), openai.UploadNewParams{
|
||||
Bytes: openai.F(int64(0)),
|
||||
Filename: openai.F("filename"),
|
||||
MimeType: openai.F("mime_type"),
|
||||
Purpose: openai.F(openai.FilePurposeAssistants),
|
||||
Bytes: openai.Int(0),
|
||||
Filename: openai.String("filename"),
|
||||
MimeType: openai.String("mime_type"),
|
||||
Purpose: openai.FilePurposeAssistants,
|
||||
})
|
||||
if err != nil {
|
||||
var apierr *openai.Error
|
||||
@@ -78,8 +78,8 @@ func TestUploadCompleteWithOptionalParams(t *testing.T) {
|
||||
context.TODO(),
|
||||
"upload_abc123",
|
||||
openai.UploadCompleteParams{
|
||||
PartIDs: openai.F([]string{"string"}),
|
||||
Md5: openai.F("md5"),
|
||||
PartIDs: []string{"string"},
|
||||
Md5: openai.String("md5"),
|
||||
},
|
||||
)
|
||||
if err != nil {
|
||||
|
||||
@@ -13,9 +13,11 @@ import (
|
||||
|
||||
"github.com/openai/openai-go/internal/apiform"
|
||||
"github.com/openai/openai-go/internal/apijson"
|
||||
"github.com/openai/openai-go/internal/param"
|
||||
"github.com/openai/openai-go/internal/requestconfig"
|
||||
"github.com/openai/openai-go/option"
|
||||
"github.com/openai/openai-go/packages/param"
|
||||
"github.com/openai/openai-go/packages/resp"
|
||||
"github.com/openai/openai-go/shared/constant"
|
||||
)
|
||||
|
||||
// UploadPartService contains methods and other services that help with interacting
|
||||
@@ -31,8 +33,8 @@ type UploadPartService struct {
|
||||
// NewUploadPartService generates a new service that applies the given options to
|
||||
// each request. These options are applied after the parent client's options (if
|
||||
// there is one), and before any request-specific options.
|
||||
func NewUploadPartService(opts ...option.RequestOption) (r *UploadPartService) {
|
||||
r = &UploadPartService{}
|
||||
func NewUploadPartService(opts ...option.RequestOption) (r UploadPartService) {
|
||||
r = UploadPartService{}
|
||||
r.Options = opts
|
||||
return
|
||||
}
|
||||
@@ -62,54 +64,37 @@ func (r *UploadPartService) New(ctx context.Context, uploadID string, body Uploa
|
||||
// The upload Part represents a chunk of bytes we can add to an Upload object.
|
||||
type UploadPart struct {
|
||||
// The upload Part unique identifier, which can be referenced in API endpoints.
|
||||
ID string `json:"id,required"`
|
||||
ID string `json:"id,omitzero,required"`
|
||||
// The Unix timestamp (in seconds) for when the Part was created.
|
||||
CreatedAt int64 `json:"created_at,required"`
|
||||
CreatedAt int64 `json:"created_at,omitzero,required"`
|
||||
// The object type, which is always `upload.part`.
|
||||
Object UploadPartObject `json:"object,required"`
|
||||
//
|
||||
// This field can be elided, and will be automatically set as "upload.part".
|
||||
Object constant.UploadPart `json:"object,required"`
|
||||
// The ID of the Upload object that this Part was added to.
|
||||
UploadID string `json:"upload_id,required"`
|
||||
JSON uploadPartJSON `json:"-"`
|
||||
UploadID string `json:"upload_id,omitzero,required"`
|
||||
JSON struct {
|
||||
ID resp.Field
|
||||
CreatedAt resp.Field
|
||||
Object resp.Field
|
||||
UploadID resp.Field
|
||||
raw string
|
||||
} `json:"-"`
|
||||
}
|
||||
|
||||
// uploadPartJSON contains the JSON metadata for the struct [UploadPart]
|
||||
type uploadPartJSON struct {
|
||||
ID apijson.Field
|
||||
CreatedAt apijson.Field
|
||||
Object apijson.Field
|
||||
UploadID apijson.Field
|
||||
raw string
|
||||
ExtraFields map[string]apijson.Field
|
||||
}
|
||||
|
||||
func (r *UploadPart) UnmarshalJSON(data []byte) (err error) {
|
||||
func (r UploadPart) RawJSON() string { return r.JSON.raw }
|
||||
func (r *UploadPart) UnmarshalJSON(data []byte) error {
|
||||
return apijson.UnmarshalRoot(data, r)
|
||||
}
|
||||
|
||||
func (r uploadPartJSON) RawJSON() string {
|
||||
return r.raw
|
||||
}
|
||||
|
||||
// The object type, which is always `upload.part`.
|
||||
type UploadPartObject string
|
||||
|
||||
const (
|
||||
UploadPartObjectUploadPart UploadPartObject = "upload.part"
|
||||
)
|
||||
|
||||
func (r UploadPartObject) IsKnown() bool {
|
||||
switch r {
|
||||
case UploadPartObjectUploadPart:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
type UploadPartNewParams struct {
|
||||
// The chunk of bytes for this Part.
|
||||
Data param.Field[io.Reader] `json:"data,required" format:"binary"`
|
||||
Data io.Reader `json:"data,omitzero,required" format:"binary"`
|
||||
apiobject
|
||||
}
|
||||
|
||||
func (f UploadPartNewParams) IsMissing() bool { return param.IsOmitted(f) || f.IsNull() }
|
||||
|
||||
func (r UploadPartNewParams) MarshalMultipart() (data []byte, contentType string, err error) {
|
||||
buf := bytes.NewBuffer(nil)
|
||||
writer := multipart.NewWriter(buf)
|
||||
|
||||
@@ -31,7 +31,7 @@ func TestUploadPartNew(t *testing.T) {
|
||||
context.TODO(),
|
||||
"upload_abc123",
|
||||
openai.UploadPartNewParams{
|
||||
Data: openai.F(io.Reader(bytes.NewBuffer([]byte("some file contents")))),
|
||||
Data: io.Reader(bytes.NewBuffer([]byte("some file contents"))),
|
||||
},
|
||||
)
|
||||
if err != nil {
|
||||
|
||||
@@ -25,14 +25,17 @@ func TestUsage(t *testing.T) {
|
||||
option.WithAPIKey("My API Key"),
|
||||
)
|
||||
chatCompletion, err := client.Chat.Completions.New(context.TODO(), openai.ChatCompletionNewParams{
|
||||
Messages: openai.F([]openai.ChatCompletionMessageParamUnion{openai.ChatCompletionUserMessageParam{
|
||||
Role: openai.F(openai.ChatCompletionUserMessageParamRoleUser),
|
||||
Content: openai.F([]openai.ChatCompletionContentPartUnionParam{openai.ChatCompletionContentPartTextParam{Text: openai.F("text"), Type: openai.F(openai.ChatCompletionContentPartTextTypeText)}}),
|
||||
}}),
|
||||
Model: openai.F(openai.ChatModelO3Mini),
|
||||
Messages: []openai.ChatCompletionMessageParamUnion{{
|
||||
OfUser: &openai.ChatCompletionUserMessageParam{
|
||||
Content: []openai.ChatCompletionContentPartUnionParam{{
|
||||
OfText: &openai.ChatCompletionContentPartTextParam{Text: openai.String("text")},
|
||||
}},
|
||||
},
|
||||
}},
|
||||
Model: openai.ChatModelO3Mini,
|
||||
})
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
t.Fatalf("err should be nil: %s", err.Error())
|
||||
}
|
||||
t.Logf("%+v\n", chatCompletion)
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user