// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. package openai import ( "context" "net/http" "slices" "github.com/openai/openai-go/v3/internal/apijson" "github.com/openai/openai-go/v3/internal/requestconfig" "github.com/openai/openai-go/v3/option" "github.com/openai/openai-go/v3/packages/param" "github.com/openai/openai-go/v3/packages/respjson" "github.com/openai/openai-go/v3/shared/constant" ) // Get a vector representation of a given input that can be easily consumed by // machine learning models and algorithms. // // EmbeddingService contains methods and other services that help with interacting // with the openai API. // // Note, unlike clients, this service does not read variables from the environment // automatically. You should not instantiate this service directly, and instead use // the [NewEmbeddingService] method instead. type EmbeddingService struct { Options []option.RequestOption } // NewEmbeddingService generates a new service that applies the given options to // each request. These options are applied after the parent client's options (if // there is one), and before any request-specific options. func NewEmbeddingService(opts ...option.RequestOption) (r EmbeddingService) { r = EmbeddingService{} r.Options = opts return } // Creates an embedding vector representing the input text. func (r *EmbeddingService) New(ctx context.Context, body EmbeddingNewParams, opts ...option.RequestOption) (res *CreateEmbeddingResponse, err error) { opts = slices.Concat(r.Options, opts) path := "embeddings" err = requestconfig.ExecuteNewRequest(ctx, http.MethodPost, path, body, &res, opts...) return res, err } type CreateEmbeddingResponse struct { // The list of embeddings generated by the model. Data []Embedding `json:"data" api:"required"` // The name of the model used to generate the embedding. Model string `json:"model" api:"required"` // The object type, which is always "list". Object constant.List `json:"object" default:"list"` // The usage information for the request. Usage CreateEmbeddingResponseUsage `json:"usage" api:"required"` // JSON contains metadata for fields, check presence with [respjson.Field.Valid]. JSON struct { Data respjson.Field Model respjson.Field Object respjson.Field Usage respjson.Field ExtraFields map[string]respjson.Field raw string } `json:"-"` } // Returns the unmodified JSON received from the API func (r CreateEmbeddingResponse) RawJSON() string { return r.JSON.raw } func (r *CreateEmbeddingResponse) UnmarshalJSON(data []byte) error { return apijson.UnmarshalRoot(data, r) } // The usage information for the request. type CreateEmbeddingResponseUsage struct { // The number of tokens used by the prompt. PromptTokens int64 `json:"prompt_tokens" api:"required"` // The total number of tokens used by the request. TotalTokens int64 `json:"total_tokens" api:"required"` // JSON contains metadata for fields, check presence with [respjson.Field.Valid]. JSON struct { PromptTokens respjson.Field TotalTokens respjson.Field ExtraFields map[string]respjson.Field raw string } `json:"-"` } // Returns the unmodified JSON received from the API func (r CreateEmbeddingResponseUsage) RawJSON() string { return r.JSON.raw } func (r *CreateEmbeddingResponseUsage) UnmarshalJSON(data []byte) error { return apijson.UnmarshalRoot(data, r) } // Represents an embedding vector returned by embedding endpoint. type Embedding struct { // The embedding vector, which is a list of floats. The length of vector depends on // the model as listed in the // [embedding guide](https://platform.openai.com/docs/guides/embeddings). Embedding []float64 `json:"embedding" api:"required"` // The index of the embedding in the list of embeddings. Index int64 `json:"index" api:"required"` // The object type, which is always "embedding". Object constant.Embedding `json:"object" default:"embedding"` // JSON contains metadata for fields, check presence with [respjson.Field.Valid]. JSON struct { Embedding respjson.Field Index respjson.Field Object respjson.Field ExtraFields map[string]respjson.Field raw string } `json:"-"` } // Returns the unmodified JSON received from the API func (r Embedding) RawJSON() string { return r.JSON.raw } func (r *Embedding) UnmarshalJSON(data []byte) error { return apijson.UnmarshalRoot(data, r) } type EmbeddingModel = string const ( EmbeddingModelTextEmbeddingAda002 EmbeddingModel = "text-embedding-ada-002" EmbeddingModelTextEmbedding3Small EmbeddingModel = "text-embedding-3-small" EmbeddingModelTextEmbedding3Large EmbeddingModel = "text-embedding-3-large" ) type EmbeddingNewParams struct { // Input text to embed, encoded as a string or array of tokens. To embed multiple // inputs in a single request, pass an array of strings or array of token arrays. // The input must not exceed the max input tokens for the model (8192 tokens for // all embedding models), cannot be an empty string, and any array must be 2048 // dimensions or less. // [Example Python code](https://cookbook.openai.com/examples/how_to_count_tokens_with_tiktoken) // for counting tokens. In addition to the per-input token limit, all embedding // models enforce a maximum of 300,000 tokens summed across all inputs in a single // request. Input EmbeddingNewParamsInputUnion `json:"input,omitzero" api:"required"` // ID of the model to use. You can use the // [List models](https://platform.openai.com/docs/api-reference/models/list) API to // see all of your available models, or see our // [Model overview](https://platform.openai.com/docs/models) for descriptions of // them. Model EmbeddingModel `json:"model,omitzero" api:"required"` // The number of dimensions the resulting output embeddings should have. Only // supported in `text-embedding-3` and later models. Dimensions param.Opt[int64] `json:"dimensions,omitzero"` // A unique identifier representing your end-user, which can help OpenAI to monitor // and detect abuse. // [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). User param.Opt[string] `json:"user,omitzero"` // The format to return the embeddings in. Can be either `float` or // [`base64`](https://pypi.org/project/pybase64/). // // Any of "float", "base64". EncodingFormat EmbeddingNewParamsEncodingFormat `json:"encoding_format,omitzero"` paramObj } func (r EmbeddingNewParams) MarshalJSON() (data []byte, err error) { type shadow EmbeddingNewParams return param.MarshalObject(r, (*shadow)(&r)) } func (r *EmbeddingNewParams) UnmarshalJSON(data []byte) error { return apijson.UnmarshalRoot(data, r) } // Only one field can be non-zero. // // Use [param.IsOmitted] to confirm if a field is set. type EmbeddingNewParamsInputUnion struct { OfString param.Opt[string] `json:",omitzero,inline"` OfArrayOfStrings []string `json:",omitzero,inline"` OfArrayOfTokens []int64 `json:",omitzero,inline"` OfArrayOfTokenArrays [][]int64 `json:",omitzero,inline"` paramUnion } func (u EmbeddingNewParamsInputUnion) MarshalJSON() ([]byte, error) { return param.MarshalUnion(u, u.OfString, u.OfArrayOfStrings, u.OfArrayOfTokens, u.OfArrayOfTokenArrays) } func (u *EmbeddingNewParamsInputUnion) UnmarshalJSON(data []byte) error { return apijson.UnmarshalRoot(data, u) } func (u *EmbeddingNewParamsInputUnion) asAny() any { if !param.IsOmitted(u.OfString) { return &u.OfString.Value } else if !param.IsOmitted(u.OfArrayOfStrings) { return &u.OfArrayOfStrings } else if !param.IsOmitted(u.OfArrayOfTokens) { return &u.OfArrayOfTokens } else if !param.IsOmitted(u.OfArrayOfTokenArrays) { return &u.OfArrayOfTokenArrays } return nil } // The format to return the embeddings in. Can be either `float` or // [`base64`](https://pypi.org/project/pybase64/). type EmbeddingNewParamsEncodingFormat string const ( EmbeddingNewParamsEncodingFormatFloat EmbeddingNewParamsEncodingFormat = "float" EmbeddingNewParamsEncodingFormatBase64 EmbeddingNewParamsEncodingFormat = "base64" )