go-ollama: first release v0.1.0

This commit is contained in:
2026-02-23 11:40:05 +01:00
parent ad8e4025b2
commit 69172f6d5f
15 changed files with 787 additions and 0 deletions

42
copy_model.go Normal file
View File

@@ -0,0 +1,42 @@
package ollama
import (
"bytes"
"encoding/json"
"errors"
"fmt"
"net/http"
)
type CopyModelRequest struct {
Source string `json:"source"`
Destination string `json:"destination"`
}
func (o Ollama) CopyModel(reqBody CopyModelRequest) (int, error) {
reqBodyBytes, err := json.Marshal(reqBody)
if err != nil {
return -1, err
}
req, err := http.NewRequest(http.MethodPost, fmt.Sprintf("%s/create", o.baseUrl), bytes.NewReader(reqBodyBytes))
if err != nil {
return -1, err
}
for key, val := range o.customHeaders {
req.Header.Set(key, val)
}
req.Header.Set("Content-Type", "application/json")
resp, err := http.DefaultClient.Do(req)
if err != nil {
return -1, err
}
defer resp.Body.Close()
if resp.StatusCode != 200 {
return resp.StatusCode, errors.New("status code is not 200")
}
return resp.StatusCode, nil
}

75
create_model.go Normal file
View File

@@ -0,0 +1,75 @@
package ollama
import (
"bytes"
"encoding/json"
"errors"
"fmt"
"net/http"
)
type CreateModelRequest struct {
Model string `json:"model"`
From string `json:"from,omitempty"`
Template string `json:"template,omitempty"`
License string `json:"license,omitempty"`
System string `json:"system,omitempty"`
Parameters map[string]any `json:"parameters,omitempty"`
Messages []CreateModelRequestMessage `json:"messages,omitempty"`
Quantize string `json:"quantize,omitempty"`
Stream *bool `json:"stream,omitempty"`
}
type CreateModelRequestMessage struct {
Role string `json:"role"`
Content string `json:"content"`
Images []string `json:"images,omitempty"`
ToolCalls []CreateModelRequestMessageToolCall `json:"tool_calls,omitempty"`
}
type CreateModelRequestMessageToolCall struct {
Function *CreateModelRequestMessageToolCallFunction `json:"function,omitempty"`
}
type CreateModelRequestMessageToolCallFunction struct {
Name string `json:"name"`
Description string `json:"description,omitempty"`
Arguments map[string]any `json:"arguments,omitempty"`
}
type CreateModelResponse struct {
Status string `json:"status"`
}
func (o Ollama) CreateModel(reqBody CreateModelRequest) (CreateModelResponse, int, error) {
reqBodyBytes, err := json.Marshal(reqBody)
if err != nil {
return CreateModelResponse{}, -1, err
}
req, err := http.NewRequest(http.MethodPost, fmt.Sprintf("%s/create", o.baseUrl), bytes.NewReader(reqBodyBytes))
if err != nil {
return CreateModelResponse{}, -1, err
}
for key, val := range o.customHeaders {
req.Header.Set(key, val)
}
req.Header.Set("Content-Type", "application/json")
resp, err := http.DefaultClient.Do(req)
if err != nil {
return CreateModelResponse{}, -1, err
}
defer resp.Body.Close()
if resp.StatusCode != 200 {
return CreateModelResponse{}, resp.StatusCode, errors.New("status code is not 200")
}
var respBody CreateModelResponse
if err := json.NewDecoder(resp.Body).Decode(&respBody); err != nil {
return CreateModelResponse{}, -1, err
}
return respBody, resp.StatusCode, nil
}

41
delete_model.go Normal file
View File

@@ -0,0 +1,41 @@
package ollama
import (
"bytes"
"encoding/json"
"errors"
"fmt"
"net/http"
)
type DeleteModelRequest struct {
Model string `json:"model"`
}
func (o Ollama) DeleteModel(reqBody DeleteModelRequest) (int, error) {
reqBodyBytes, err := json.Marshal(reqBody)
if err != nil {
return -1, err
}
req, err := http.NewRequest(http.MethodDelete, fmt.Sprintf("%s/delete", o.baseUrl), bytes.NewReader(reqBodyBytes))
if err != nil {
return -1, err
}
for key, val := range o.customHeaders {
req.Header.Set(key, val)
}
req.Header.Set("Content-Type", "application/json")
resp, err := http.DefaultClient.Do(req)
if err != nil {
return -1, err
}
defer resp.Body.Close()
if resp.StatusCode != 200 {
return resp.StatusCode, errors.New("status code is not 200")
}
return resp.StatusCode, nil
}

130
generate_chat_message.go Normal file
View File

@@ -0,0 +1,130 @@
package ollama
import (
"bytes"
"encoding/json"
"errors"
"fmt"
"net/http"
)
type GenerateChatMessageRequest struct {
Model string `json:"model"`
Messages []GenerateChatMessageRequestMessage `json:"messages"`
Tools []GenerateChatMessageRequestTool `json:"tools,omitempty"`
Format string `json:"format,omitempty"`
Options *GenerateChatMessageRequestOptions `json:"options,omitempty"`
Stream *bool `json:"stream,omitempty"`
Think *bool `json:"think,omitempty"`
KeepAlive string `json:"keep_alive,omitempty"`
Logprobs *bool `json:"logprobs,omitempty"`
TopLogprobs *int `json:"top_logprobs,omitempty"`
}
type GenerateChatMessageRequestMessage struct {
Role string `json:"role"`
Content string `json:"content"`
Images []string `json:"images,omitempty"`
ToolCalls []GenerateChatMessageRequestMessageToolCall `json:"tool_calls,omitempty"`
}
type GenerateChatMessageRequestMessageToolCall struct {
Function *GenerateChatMessageRequestMessageToolCallFunction `json:"function,omitempty"`
}
type GenerateChatMessageRequestMessageToolCallFunction struct {
Name string `json:"name"`
Description string `json:"description,omitempty"`
Arguments map[string]any `json:"arguments,omitempty"`
}
type GenerateChatMessageRequestTool struct {
Type string `json:"type"`
Function GenerateChatMessageRequestToolFunction `json:"function"`
}
type GenerateChatMessageRequestToolFunction struct {
Name string `json:"name"`
Description string `json:"description"`
Parameters map[string]any `json:"parameters,omitempty"`
}
type GenerateChatMessageRequestOptions struct {
Seed *int `json:"seed,omitempty"`
Temperature *float32 `json:"temperature,omitempty"`
TopK *int `json:"top_k,omitempty"`
TopP *float32 `json:"top_p,omitempty"`
MinP *float32 `json:"min_p,omitempty"`
Stop []string `json:"stop,omitempty"`
NumCtx *int `json:"num_ctx,omitempty"`
NumPredict *int `json:"num_predict,omitempty"`
}
type GenerateChatMessageResponse struct {
Model string `json:"model"`
CreatedAt string `json:"created_at"`
Message struct {
Role string `json:"role"`
Content string `json:"content"`
Thinking string `json:"thinking"`
ToolCalls []struct {
Function struct {
Name string `json:"name"`
Description string `json:"description"`
Arguments map[string]any `json:"arguments"`
} `json:"function"`
} `json:"tool_calls"`
Images []string `json:"images"`
} `json:"message"`
Done bool `json:"done"`
DoneReason string `json:"done_reason"`
TotalDuration int `json:"total_duration"`
LoadDuration int `json:"load_duration"`
PromptEvalCount int `json:"prompt_eval_count"`
PromptEvalDuration int `json:"prompt_eval_duration"`
EvalCount int `json:"eval_count"`
EvalDuration int `json:"eval_duration"`
Logprobs []struct {
Token string `json:"token"`
Logprob int `json:"logprob"`
Bytes []int `json:"bytes"`
TopLogprobs []struct {
Token string `json:"token"`
Logprob int `json:"logprob"`
Bytes []int `json:"bytes"`
} `json:"top_logprobs"`
} `json:"logprobs"`
}
func (o Ollama) GenerateChatMessage(reqBody GenerateChatMessageRequest) (GenerateChatMessageResponse, int, error) {
reqBodyBytes, err := json.Marshal(reqBody)
if err != nil {
return GenerateChatMessageResponse{}, -1, err
}
req, err := http.NewRequest(http.MethodPost, fmt.Sprintf("%s/chat", o.baseUrl), bytes.NewReader(reqBodyBytes))
if err != nil {
return GenerateChatMessageResponse{}, -1, err
}
for key, val := range o.customHeaders {
req.Header.Set(key, val)
}
req.Header.Set("Content-Type", "application/json")
resp, err := http.DefaultClient.Do(req)
if err != nil {
return GenerateChatMessageResponse{}, -1, err
}
defer resp.Body.Close()
if resp.StatusCode != 200 {
return GenerateChatMessageResponse{}, resp.StatusCode, errors.New("status code is not 200")
}
var respBody GenerateChatMessageResponse
if err := json.NewDecoder(resp.Body).Decode(&respBody); err != nil {
return GenerateChatMessageResponse{}, -1, err
}
return respBody, resp.StatusCode, nil
}

70
generate_embeddings.go Normal file
View File

@@ -0,0 +1,70 @@
package ollama
import (
"bytes"
"encoding/json"
"errors"
"fmt"
"net/http"
)
type GenerateEmbeddingsRequest struct {
Model string `json:"model"`
Input []string `json:"input"`
Truncate *bool `json:"truncate,omitempty"`
Dimensions *int `json:"dimensions,omitempty"`
KeepAlive string `json:"keep_alive,omitempty"`
Options *GenerateEmbeddingsRequestOptions `json:"options,omitempty"`
}
type GenerateEmbeddingsRequestOptions struct {
Seed *int `json:"seed,omitempty"`
Temperature *float32 `json:"temperature,omitempty"`
TopK *int `json:"top_k,omitempty"`
TopP *float32 `json:"top_p,omitempty"`
MinP *float32 `json:"min_p,omitempty"`
Stop []string `json:"stop,omitempty"`
NumCtx *int `json:"num_ctx,omitempty"`
NumPredict *int `json:"num_predict,omitempty"`
}
type GenerateEmbeddingsResponse struct {
Model string `json:"model"`
Embeddings [][]float32 `json:"embeddings"`
TotalDuration int `json:"total_duration"`
LoadDuration int `json:"load_duration"`
PromptEvalCount int `json:"prompt_eval_count"`
}
func (o Ollama) GenerateEmbeddings(reqBody GenerateEmbeddingsRequest) (GenerateEmbeddingsResponse, int, error) {
reqBodyBytes, err := json.Marshal(reqBody)
if err != nil {
return GenerateEmbeddingsResponse{}, -1, err
}
req, err := http.NewRequest(http.MethodPost, fmt.Sprintf("%s/embed", o.baseUrl), bytes.NewReader(reqBodyBytes))
if err != nil {
return GenerateEmbeddingsResponse{}, -1, err
}
for key, val := range o.customHeaders {
req.Header.Set(key, val)
}
req.Header.Set("Content-Type", "application/json")
resp, err := http.DefaultClient.Do(req)
if err != nil {
return GenerateEmbeddingsResponse{}, -1, err
}
defer resp.Body.Close()
if resp.StatusCode != 200 {
return GenerateEmbeddingsResponse{}, resp.StatusCode, errors.New("status code is not 200")
}
var respBody GenerateEmbeddingsResponse
if err := json.NewDecoder(resp.Body).Decode(&respBody); err != nil {
return GenerateEmbeddingsResponse{}, -1, err
}
return respBody, resp.StatusCode, nil
}

93
generate_response.go Normal file
View File

@@ -0,0 +1,93 @@
package ollama
import (
"bytes"
"encoding/json"
"errors"
"fmt"
"net/http"
)
type GenerateResponseRequest struct {
Model string `json:"model"`
Prompt string `json:"prompt,omitempty"`
Suffix string `json:"suffix,omitempty"`
Images []string `json:"images,omitempty"`
Format string `json:"format,omitempty"`
System string `json:"system,omitempty"`
Stream *bool `json:"stream,omitempty"`
Raw *bool `json:"raw,omitempty"`
KeepAlive string `json:"keep_alive,omitempty"`
Options *GenerateResponseRequestOptions `json:"options,omitempty"`
Logprobs *bool `json:"logprobs,omitempty"`
TopLogprobs *int `json:"top_logprobs,omitempty"`
}
type GenerateResponseRequestOptions struct {
Seed *int `json:"seed,omitempty"`
Temperature *float32 `json:"temperature,omitempty"`
TopK *int `json:"top_k,omitempty"`
TopP *float32 `json:"top_p,omitempty"`
MinP *float32 `json:"min_p,omitempty"`
Stop []string `json:"stop,omitempty"`
NumCtx *int `json:"num_ctx,omitempty"`
NumPredict *int `json:"num_predict,omitempty"`
}
type GenerateResponseResponse struct {
Model string `json:"model"`
CreatedAt string `json:"created_at"`
Response string `json:"response"`
Thinking string `json:"thinking"`
Done bool `json:"done"`
DoneReason string `json:"done_reason"`
TotalDuration int `json:"total_duration"`
LoadDuration int `json:"load_duration"`
PromptEvalCount int `json:"prompt_eval_count"`
PromptEvalDuration int `json:"prompt_eval_duration"`
EvalCount int `json:"eval_count"`
EvalDuration int `json:"eval_duration"`
Logprobs []struct {
Token string `json:"token"`
Logprob int `json:"logprob"`
Bytes []int `json:"bytes"`
TopLogprobs []struct {
Token string `json:"token"`
Logprob int `json:"logprob"`
Bytes []int `json:"bytes"`
} `json:"top_logprobs"`
} `json:"logprobs"`
}
func (o Ollama) GenerateResponse(reqBody GenerateResponseRequest) (GenerateResponseResponse, int, error) {
reqBodyBytes, err := json.Marshal(reqBody)
if err != nil {
return GenerateResponseResponse{}, -1, err
}
req, err := http.NewRequest(http.MethodPost, fmt.Sprintf("%s/generate", o.baseUrl), bytes.NewReader(reqBodyBytes))
if err != nil {
return GenerateResponseResponse{}, -1, err
}
for key, val := range o.customHeaders {
req.Header.Set(key, val)
}
req.Header.Set("Content-Type", "application/json")
resp, err := http.DefaultClient.Do(req)
if err != nil {
return GenerateResponseResponse{}, -1, err
}
defer resp.Body.Close()
if resp.StatusCode != 200 {
return GenerateResponseResponse{}, resp.StatusCode, errors.New("status code is not 200")
}
var respBody GenerateResponseResponse
if err := json.NewDecoder(resp.Body).Decode(&respBody); err != nil {
return GenerateResponseResponse{}, -1, err
}
return respBody, resp.StatusCode, nil
}

39
get_version.go Normal file
View File

@@ -0,0 +1,39 @@
package ollama
import (
"encoding/json"
"errors"
"fmt"
"net/http"
)
type GetVersionResponse struct {
Version string `json:"version"`
}
func (o Ollama) GetVersion() (GetVersionResponse, int, error) {
req, err := http.NewRequest(http.MethodGet, fmt.Sprintf("%s/version", o.baseUrl), nil)
if err != nil {
return GetVersionResponse{}, -1, err
}
for key, val := range o.customHeaders {
req.Header.Set(key, val)
}
resp, err := http.DefaultClient.Do(req)
if err != nil {
return GetVersionResponse{}, -1, err
}
defer resp.Body.Close()
if resp.StatusCode != 200 {
return GetVersionResponse{}, resp.StatusCode, errors.New("status code is not 200")
}
var respBody GetVersionResponse
if err := json.NewDecoder(resp.Body).Decode(&respBody); err != nil {
return GetVersionResponse{}, -1, err
}
return respBody, resp.StatusCode, nil
}

3
go.mod Normal file
View File

@@ -0,0 +1,3 @@
module git.trcreatives.at/trcreatives/go-ollama
go 1.25.6

54
list_models.go Normal file
View File

@@ -0,0 +1,54 @@
package ollama
import (
"encoding/json"
"errors"
"fmt"
"net/http"
)
type ListModelsResponse struct {
Models []struct {
Name string `json:"name"`
Model string `json:"model"`
RemoteModel string `json:"remote_model"`
RemoteHost string `json:"remote_host"`
ModifiedAt string `json:"modified_at"`
Size int `json:"size"`
Digest string `json:"digest"`
Details struct {
Format string `json:"format"`
Family string `json:"family"`
Families []string `json:"families"`
ParameterSize string `json:"parameter_size"`
QuantizationLevel string `json:"quantization_level"`
} `json:"details"`
} `json:"models"`
}
func (o Ollama) ListModels() (ListModelsResponse, int, error) {
req, err := http.NewRequest(http.MethodGet, fmt.Sprintf("%s/tags", o.baseUrl), nil)
if err != nil {
return ListModelsResponse{}, -1, err
}
for key, val := range o.customHeaders {
req.Header.Set(key, val)
}
resp, err := http.DefaultClient.Do(req)
if err != nil {
return ListModelsResponse{}, -1, err
}
defer resp.Body.Close()
if resp.StatusCode != 200 {
return ListModelsResponse{}, resp.StatusCode, errors.New("status code is not 200")
}
var respBody ListModelsResponse
if err := json.NewDecoder(resp.Body).Decode(&respBody); err != nil {
return ListModelsResponse{}, -1, err
}
return respBody, resp.StatusCode, nil
}

54
list_running_models.go Normal file
View File

@@ -0,0 +1,54 @@
package ollama
import (
"encoding/json"
"errors"
"fmt"
"net/http"
)
type ListRunningModelsResponse struct {
Models []struct {
Name string `json:"name"`
Model string `json:"model"`
Size int `json:"size"`
Digest string `json:"digest"`
Details struct {
Format string `json:"format"`
Family string `json:"family"`
Families []string `json:"families"`
ParameterSize string `json:"parameter_size"`
QuantizationLevel string `json:"quantization_level"`
} `json:"details"`
ExpiresAt string `json:"expires_at"`
SizeVram string `json:"size_vram"`
ContextLength int `json:"context_length"`
} `json:"models"`
}
func (o Ollama) ListRunningModels() (ListRunningModelsResponse, int, error) {
req, err := http.NewRequest(http.MethodGet, fmt.Sprintf("%s/ps", o.baseUrl), nil)
if err != nil {
return ListRunningModelsResponse{}, -1, err
}
for key, val := range o.customHeaders {
req.Header.Set(key, val)
}
resp, err := http.DefaultClient.Do(req)
if err != nil {
return ListRunningModelsResponse{}, -1, err
}
defer resp.Body.Close()
if resp.StatusCode != 200 {
return ListRunningModelsResponse{}, resp.StatusCode, errors.New("status code is not 200")
}
var respBody ListRunningModelsResponse
if err := json.NewDecoder(resp.Body).Decode(&respBody); err != nil {
return ListRunningModelsResponse{}, -1, err
}
return respBody, resp.StatusCode, nil
}

14
ollama.go Normal file
View File

@@ -0,0 +1,14 @@
package ollama
type Ollama struct {
baseUrl string
customHeaders map[string]string
}
func New(baseUrl string) *Ollama {
return &Ollama{baseUrl: baseUrl, customHeaders: make(map[string]string)}
}
func (o *Ollama) SetCustomHeaders(customHeaders map[string]string) {
o.customHeaders = customHeaders
}

52
pull_model.go Normal file
View File

@@ -0,0 +1,52 @@
package ollama
import (
"bytes"
"encoding/json"
"errors"
"fmt"
"net/http"
)
type PullModelRequest struct {
Model string `json:"name"`
Insecure *bool `json:"insecure,omitempty"`
Stream *bool `json:"stream,omitempty"`
}
type PullModelResponse struct {
Status string `json:"status"`
}
func (o Ollama) PullModel(reqBody PullModelRequest) (PullModelResponse, int, error) {
reqBodyBytes, err := json.Marshal(reqBody)
if err != nil {
return PullModelResponse{}, -1, err
}
req, err := http.NewRequest(http.MethodPost, fmt.Sprintf("%s/pull", o.baseUrl), bytes.NewReader(reqBodyBytes))
if err != nil {
return PullModelResponse{}, -1, err
}
for key, val := range o.customHeaders {
req.Header.Set(key, val)
}
req.Header.Set("Content-Type", "application/json")
resp, err := http.DefaultClient.Do(req)
if err != nil {
return PullModelResponse{}, -1, err
}
defer resp.Body.Close()
if resp.StatusCode != 200 {
return PullModelResponse{}, resp.StatusCode, errors.New("status code is not 200")
}
var respBody PullModelResponse
if err := json.NewDecoder(resp.Body).Decode(&respBody); err != nil {
return PullModelResponse{}, -1, err
}
return respBody, resp.StatusCode, nil
}

52
push_model.go Normal file
View File

@@ -0,0 +1,52 @@
package ollama
import (
"bytes"
"encoding/json"
"errors"
"fmt"
"net/http"
)
type PushModelRequest struct {
Model string `json:"name"`
Insecure *bool `json:"insecure,omitempty"`
Stream *bool `json:"stream,omitempty"`
}
type PushModelResponse struct {
Status string `json:"status"`
}
func (o Ollama) PushModel(reqBody PushModelRequest) (PushModelResponse, int, error) {
reqBodyBytes, err := json.Marshal(reqBody)
if err != nil {
return PushModelResponse{}, -1, err
}
req, err := http.NewRequest(http.MethodPost, fmt.Sprintf("%s/pull", o.baseUrl), bytes.NewReader(reqBodyBytes))
if err != nil {
return PushModelResponse{}, -1, err
}
for key, val := range o.customHeaders {
req.Header.Set(key, val)
}
req.Header.Set("Content-Type", "application/json")
resp, err := http.DefaultClient.Do(req)
if err != nil {
return PushModelResponse{}, -1, err
}
defer resp.Body.Close()
if resp.StatusCode != 200 {
return PushModelResponse{}, resp.StatusCode, errors.New("status code is not 200")
}
var respBody PushModelResponse
if err := json.NewDecoder(resp.Body).Decode(&respBody); err != nil {
return PushModelResponse{}, -1, err
}
return respBody, resp.StatusCode, nil
}

63
show_model_details.go Normal file
View File

@@ -0,0 +1,63 @@
package ollama
import (
"bytes"
"encoding/json"
"errors"
"fmt"
"net/http"
)
type ShowModelDetailsRequest struct {
Model string `json:"model"`
Verbose *bool `json:"verbose,omitempty"`
}
type ShowModelDetailsResponse struct {
Parameters string `json:"parameters"`
License string `json:"license"`
ModifiedAt string `json:"modified_at"`
Details struct {
Format string `json:"format"`
Family string `json:"family"`
Families []string `json:"families"`
ParameterSize string `json:"parameter_size"`
QuantizationLevel string `json:"quantization_level"`
} `json:"details"`
Template string `json:"template"`
Capabilities []string `json:"capabilities"`
ModelInfo map[string]any `json:"model_info"`
}
func (o Ollama) ShowModelDetails(reqBody ShowModelDetailsRequest) (ShowModelDetailsResponse, int, error) {
reqBodyBytes, err := json.Marshal(reqBody)
if err != nil {
return ShowModelDetailsResponse{}, -1, err
}
req, err := http.NewRequest(http.MethodPost, fmt.Sprintf("%s/show", o.baseUrl), bytes.NewReader(reqBodyBytes))
if err != nil {
return ShowModelDetailsResponse{}, -1, err
}
for key, val := range o.customHeaders {
req.Header.Set(key, val)
}
req.Header.Set("Content-Type", "application/json")
resp, err := http.DefaultClient.Do(req)
if err != nil {
return ShowModelDetailsResponse{}, -1, err
}
defer resp.Body.Close()
if resp.StatusCode != 200 {
return ShowModelDetailsResponse{}, resp.StatusCode, errors.New("status code is not 200")
}
var respBody ShowModelDetailsResponse
if err := json.NewDecoder(resp.Body).Decode(&respBody); err != nil {
return ShowModelDetailsResponse{}, -1, err
}
return respBody, resp.StatusCode, nil
}

5
utils.go Normal file
View File

@@ -0,0 +1,5 @@
package ollama
func PtrOf[T any](v T) *T {
return &v
}