package ollama import ( "bytes" "encoding/json" "errors" "fmt" "net/http" ) type GenerateEmbeddingsRequest struct { Model string `json:"model"` Input []string `json:"input"` Truncate *bool `json:"truncate,omitempty"` Dimensions *int `json:"dimensions,omitempty"` KeepAlive string `json:"keep_alive,omitempty"` Options *GenerateEmbeddingsRequestOptions `json:"options,omitempty"` } type GenerateEmbeddingsRequestOptions struct { Seed *int `json:"seed,omitempty"` Temperature *float32 `json:"temperature,omitempty"` TopK *int `json:"top_k,omitempty"` TopP *float32 `json:"top_p,omitempty"` MinP *float32 `json:"min_p,omitempty"` Stop []string `json:"stop,omitempty"` NumCtx *int `json:"num_ctx,omitempty"` NumPredict *int `json:"num_predict,omitempty"` } type GenerateEmbeddingsResponse struct { Model string `json:"model"` Embeddings [][]float32 `json:"embeddings"` TotalDuration int `json:"total_duration"` LoadDuration int `json:"load_duration"` PromptEvalCount int `json:"prompt_eval_count"` } func (o Ollama) GenerateEmbeddings(reqBody GenerateEmbeddingsRequest) (GenerateEmbeddingsResponse, int, error) { reqBodyBytes, err := json.Marshal(reqBody) if err != nil { return GenerateEmbeddingsResponse{}, -1, err } req, err := http.NewRequest(http.MethodPost, fmt.Sprintf("%s/embed", o.baseUrl), bytes.NewReader(reqBodyBytes)) if err != nil { return GenerateEmbeddingsResponse{}, -1, err } for key, val := range o.customHeaders { req.Header.Set(key, val) } req.Header.Set("Content-Type", "application/json") resp, err := http.DefaultClient.Do(req) if err != nil { return GenerateEmbeddingsResponse{}, -1, err } defer resp.Body.Close() if resp.StatusCode != 200 { return GenerateEmbeddingsResponse{}, resp.StatusCode, errors.New("status code is not 200") } var respBody GenerateEmbeddingsResponse if err := json.NewDecoder(resp.Body).Decode(&respBody); err != nil { return GenerateEmbeddingsResponse{}, -1, err } return respBody, resp.StatusCode, nil }