mirror of
https://github.com/openai/openai-go.git
synced 2026-04-01 09:07:22 +09:00
243 lines
8.4 KiB
Go
243 lines
8.4 KiB
Go
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
|
|
|
package openai
|
|
|
|
import (
|
|
"context"
|
|
"net/http"
|
|
|
|
"github.com/openai/openai-go/internal/apijson"
|
|
"github.com/openai/openai-go/internal/param"
|
|
"github.com/openai/openai-go/internal/requestconfig"
|
|
"github.com/openai/openai-go/option"
|
|
)
|
|
|
|
// EmbeddingService contains methods and other services that help with interacting
|
|
// with the openai API.
|
|
//
|
|
// Note, unlike clients, this service does not read variables from the environment
|
|
// automatically. You should not instantiate this service directly, and instead use
|
|
// the [NewEmbeddingService] method instead.
|
|
type EmbeddingService struct {
|
|
Options []option.RequestOption
|
|
}
|
|
|
|
// NewEmbeddingService generates a new service that applies the given options to
|
|
// each request. These options are applied after the parent client's options (if
|
|
// there is one), and before any request-specific options.
|
|
func NewEmbeddingService(opts ...option.RequestOption) (r *EmbeddingService) {
|
|
r = &EmbeddingService{}
|
|
r.Options = opts
|
|
return
|
|
}
|
|
|
|
// Creates an embedding vector representing the input text.
|
|
func (r *EmbeddingService) New(ctx context.Context, body EmbeddingNewParams, opts ...option.RequestOption) (res *CreateEmbeddingResponse, err error) {
|
|
opts = append(r.Options[:], opts...)
|
|
path := "embeddings"
|
|
err = requestconfig.ExecuteNewRequest(ctx, http.MethodPost, path, body, &res, opts...)
|
|
return
|
|
}
|
|
|
|
type CreateEmbeddingResponse struct {
|
|
// The list of embeddings generated by the model.
|
|
Data []Embedding `json:"data,required"`
|
|
// The name of the model used to generate the embedding.
|
|
Model string `json:"model,required"`
|
|
// The object type, which is always "list".
|
|
Object CreateEmbeddingResponseObject `json:"object,required"`
|
|
// The usage information for the request.
|
|
Usage CreateEmbeddingResponseUsage `json:"usage,required"`
|
|
JSON createEmbeddingResponseJSON `json:"-"`
|
|
}
|
|
|
|
// createEmbeddingResponseJSON contains the JSON metadata for the struct
|
|
// [CreateEmbeddingResponse]
|
|
type createEmbeddingResponseJSON struct {
|
|
Data apijson.Field
|
|
Model apijson.Field
|
|
Object apijson.Field
|
|
Usage apijson.Field
|
|
raw string
|
|
ExtraFields map[string]apijson.Field
|
|
}
|
|
|
|
func (r *CreateEmbeddingResponse) UnmarshalJSON(data []byte) (err error) {
|
|
return apijson.UnmarshalRoot(data, r)
|
|
}
|
|
|
|
func (r createEmbeddingResponseJSON) RawJSON() string {
|
|
return r.raw
|
|
}
|
|
|
|
// The object type, which is always "list".
|
|
type CreateEmbeddingResponseObject string
|
|
|
|
const (
|
|
CreateEmbeddingResponseObjectList CreateEmbeddingResponseObject = "list"
|
|
)
|
|
|
|
func (r CreateEmbeddingResponseObject) IsKnown() bool {
|
|
switch r {
|
|
case CreateEmbeddingResponseObjectList:
|
|
return true
|
|
}
|
|
return false
|
|
}
|
|
|
|
// The usage information for the request.
|
|
type CreateEmbeddingResponseUsage struct {
|
|
// The number of tokens used by the prompt.
|
|
PromptTokens int64 `json:"prompt_tokens,required"`
|
|
// The total number of tokens used by the request.
|
|
TotalTokens int64 `json:"total_tokens,required"`
|
|
JSON createEmbeddingResponseUsageJSON `json:"-"`
|
|
}
|
|
|
|
// createEmbeddingResponseUsageJSON contains the JSON metadata for the struct
|
|
// [CreateEmbeddingResponseUsage]
|
|
type createEmbeddingResponseUsageJSON struct {
|
|
PromptTokens apijson.Field
|
|
TotalTokens apijson.Field
|
|
raw string
|
|
ExtraFields map[string]apijson.Field
|
|
}
|
|
|
|
func (r *CreateEmbeddingResponseUsage) UnmarshalJSON(data []byte) (err error) {
|
|
return apijson.UnmarshalRoot(data, r)
|
|
}
|
|
|
|
func (r createEmbeddingResponseUsageJSON) RawJSON() string {
|
|
return r.raw
|
|
}
|
|
|
|
// Represents an embedding vector returned by embedding endpoint.
|
|
type Embedding struct {
|
|
// The embedding vector, which is a list of floats. The length of vector depends on
|
|
// the model as listed in the
|
|
// [embedding guide](https://platform.openai.com/docs/guides/embeddings).
|
|
Embedding []float64 `json:"embedding,required"`
|
|
// The index of the embedding in the list of embeddings.
|
|
Index int64 `json:"index,required"`
|
|
// The object type, which is always "embedding".
|
|
Object EmbeddingObject `json:"object,required"`
|
|
JSON embeddingJSON `json:"-"`
|
|
}
|
|
|
|
// embeddingJSON contains the JSON metadata for the struct [Embedding]
|
|
type embeddingJSON struct {
|
|
Embedding apijson.Field
|
|
Index apijson.Field
|
|
Object apijson.Field
|
|
raw string
|
|
ExtraFields map[string]apijson.Field
|
|
}
|
|
|
|
func (r *Embedding) UnmarshalJSON(data []byte) (err error) {
|
|
return apijson.UnmarshalRoot(data, r)
|
|
}
|
|
|
|
func (r embeddingJSON) RawJSON() string {
|
|
return r.raw
|
|
}
|
|
|
|
// The object type, which is always "embedding".
|
|
type EmbeddingObject string
|
|
|
|
const (
|
|
EmbeddingObjectEmbedding EmbeddingObject = "embedding"
|
|
)
|
|
|
|
func (r EmbeddingObject) IsKnown() bool {
|
|
switch r {
|
|
case EmbeddingObjectEmbedding:
|
|
return true
|
|
}
|
|
return false
|
|
}
|
|
|
|
type EmbeddingModel = string
|
|
|
|
const (
|
|
EmbeddingModelTextEmbeddingAda002 EmbeddingModel = "text-embedding-ada-002"
|
|
EmbeddingModelTextEmbedding3Small EmbeddingModel = "text-embedding-3-small"
|
|
EmbeddingModelTextEmbedding3Large EmbeddingModel = "text-embedding-3-large"
|
|
)
|
|
|
|
type EmbeddingNewParams struct {
|
|
// Input text to embed, encoded as a string or array of tokens. To embed multiple
|
|
// inputs in a single request, pass an array of strings or array of token arrays.
|
|
// The input must not exceed the max input tokens for the model (8192 tokens for
|
|
// `text-embedding-ada-002`), cannot be an empty string, and any array must be 2048
|
|
// dimensions or less.
|
|
// [Example Python code](https://cookbook.openai.com/examples/how_to_count_tokens_with_tiktoken)
|
|
// for counting tokens. Some models may also impose a limit on total number of
|
|
// tokens summed across inputs.
|
|
Input param.Field[EmbeddingNewParamsInputUnion] `json:"input,required"`
|
|
// ID of the model to use. You can use the
|
|
// [List models](https://platform.openai.com/docs/api-reference/models/list) API to
|
|
// see all of your available models, or see our
|
|
// [Model overview](https://platform.openai.com/docs/models) for descriptions of
|
|
// them.
|
|
Model param.Field[EmbeddingModel] `json:"model,required"`
|
|
// The number of dimensions the resulting output embeddings should have. Only
|
|
// supported in `text-embedding-3` and later models.
|
|
Dimensions param.Field[int64] `json:"dimensions"`
|
|
// The format to return the embeddings in. Can be either `float` or
|
|
// [`base64`](https://pypi.org/project/pybase64/).
|
|
EncodingFormat param.Field[EmbeddingNewParamsEncodingFormat] `json:"encoding_format"`
|
|
// A unique identifier representing your end-user, which can help OpenAI to monitor
|
|
// and detect abuse.
|
|
// [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids).
|
|
User param.Field[string] `json:"user"`
|
|
}
|
|
|
|
func (r EmbeddingNewParams) MarshalJSON() (data []byte, err error) {
|
|
return apijson.MarshalRoot(r)
|
|
}
|
|
|
|
// Input text to embed, encoded as a string or array of tokens. To embed multiple
|
|
// inputs in a single request, pass an array of strings or array of token arrays.
|
|
// The input must not exceed the max input tokens for the model (8192 tokens for
|
|
// `text-embedding-ada-002`), cannot be an empty string, and any array must be 2048
|
|
// dimensions or less.
|
|
// [Example Python code](https://cookbook.openai.com/examples/how_to_count_tokens_with_tiktoken)
|
|
// for counting tokens. Some models may also impose a limit on total number of
|
|
// tokens summed across inputs.
|
|
//
|
|
// Satisfied by [shared.UnionString], [EmbeddingNewParamsInputArrayOfStrings],
|
|
// [EmbeddingNewParamsInputArrayOfTokens],
|
|
// [EmbeddingNewParamsInputArrayOfTokenArrays].
|
|
type EmbeddingNewParamsInputUnion interface {
|
|
ImplementsEmbeddingNewParamsInputUnion()
|
|
}
|
|
|
|
type EmbeddingNewParamsInputArrayOfStrings []string
|
|
|
|
func (r EmbeddingNewParamsInputArrayOfStrings) ImplementsEmbeddingNewParamsInputUnion() {}
|
|
|
|
type EmbeddingNewParamsInputArrayOfTokens []int64
|
|
|
|
func (r EmbeddingNewParamsInputArrayOfTokens) ImplementsEmbeddingNewParamsInputUnion() {}
|
|
|
|
type EmbeddingNewParamsInputArrayOfTokenArrays [][]int64
|
|
|
|
func (r EmbeddingNewParamsInputArrayOfTokenArrays) ImplementsEmbeddingNewParamsInputUnion() {}
|
|
|
|
// The format to return the embeddings in. Can be either `float` or
|
|
// [`base64`](https://pypi.org/project/pybase64/).
|
|
type EmbeddingNewParamsEncodingFormat string
|
|
|
|
const (
|
|
EmbeddingNewParamsEncodingFormatFloat EmbeddingNewParamsEncodingFormat = "float"
|
|
EmbeddingNewParamsEncodingFormatBase64 EmbeddingNewParamsEncodingFormat = "base64"
|
|
)
|
|
|
|
func (r EmbeddingNewParamsEncodingFormat) IsKnown() bool {
|
|
switch r {
|
|
case EmbeddingNewParamsEncodingFormatFloat, EmbeddingNewParamsEncodingFormatBase64:
|
|
return true
|
|
}
|
|
return false
|
|
}
|