go-ollama: first release v0.1.0
This commit is contained in:
130
generate_chat_message.go
Normal file
130
generate_chat_message.go
Normal file
@@ -0,0 +1,130 @@
|
||||
package ollama
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"net/http"
|
||||
)
|
||||
|
||||
type GenerateChatMessageRequest struct {
|
||||
Model string `json:"model"`
|
||||
Messages []GenerateChatMessageRequestMessage `json:"messages"`
|
||||
Tools []GenerateChatMessageRequestTool `json:"tools,omitempty"`
|
||||
Format string `json:"format,omitempty"`
|
||||
Options *GenerateChatMessageRequestOptions `json:"options,omitempty"`
|
||||
Stream *bool `json:"stream,omitempty"`
|
||||
Think *bool `json:"think,omitempty"`
|
||||
KeepAlive string `json:"keep_alive,omitempty"`
|
||||
Logprobs *bool `json:"logprobs,omitempty"`
|
||||
TopLogprobs *int `json:"top_logprobs,omitempty"`
|
||||
}
|
||||
|
||||
type GenerateChatMessageRequestMessage struct {
|
||||
Role string `json:"role"`
|
||||
Content string `json:"content"`
|
||||
Images []string `json:"images,omitempty"`
|
||||
ToolCalls []GenerateChatMessageRequestMessageToolCall `json:"tool_calls,omitempty"`
|
||||
}
|
||||
|
||||
type GenerateChatMessageRequestMessageToolCall struct {
|
||||
Function *GenerateChatMessageRequestMessageToolCallFunction `json:"function,omitempty"`
|
||||
}
|
||||
|
||||
type GenerateChatMessageRequestMessageToolCallFunction struct {
|
||||
Name string `json:"name"`
|
||||
Description string `json:"description,omitempty"`
|
||||
Arguments map[string]any `json:"arguments,omitempty"`
|
||||
}
|
||||
|
||||
type GenerateChatMessageRequestTool struct {
|
||||
Type string `json:"type"`
|
||||
Function GenerateChatMessageRequestToolFunction `json:"function"`
|
||||
}
|
||||
|
||||
type GenerateChatMessageRequestToolFunction struct {
|
||||
Name string `json:"name"`
|
||||
Description string `json:"description"`
|
||||
Parameters map[string]any `json:"parameters,omitempty"`
|
||||
}
|
||||
|
||||
type GenerateChatMessageRequestOptions struct {
|
||||
Seed *int `json:"seed,omitempty"`
|
||||
Temperature *float32 `json:"temperature,omitempty"`
|
||||
TopK *int `json:"top_k,omitempty"`
|
||||
TopP *float32 `json:"top_p,omitempty"`
|
||||
MinP *float32 `json:"min_p,omitempty"`
|
||||
Stop []string `json:"stop,omitempty"`
|
||||
NumCtx *int `json:"num_ctx,omitempty"`
|
||||
NumPredict *int `json:"num_predict,omitempty"`
|
||||
}
|
||||
|
||||
type GenerateChatMessageResponse struct {
|
||||
Model string `json:"model"`
|
||||
CreatedAt string `json:"created_at"`
|
||||
Message struct {
|
||||
Role string `json:"role"`
|
||||
Content string `json:"content"`
|
||||
Thinking string `json:"thinking"`
|
||||
ToolCalls []struct {
|
||||
Function struct {
|
||||
Name string `json:"name"`
|
||||
Description string `json:"description"`
|
||||
Arguments map[string]any `json:"arguments"`
|
||||
} `json:"function"`
|
||||
} `json:"tool_calls"`
|
||||
Images []string `json:"images"`
|
||||
} `json:"message"`
|
||||
Done bool `json:"done"`
|
||||
DoneReason string `json:"done_reason"`
|
||||
TotalDuration int `json:"total_duration"`
|
||||
LoadDuration int `json:"load_duration"`
|
||||
PromptEvalCount int `json:"prompt_eval_count"`
|
||||
PromptEvalDuration int `json:"prompt_eval_duration"`
|
||||
EvalCount int `json:"eval_count"`
|
||||
EvalDuration int `json:"eval_duration"`
|
||||
Logprobs []struct {
|
||||
Token string `json:"token"`
|
||||
Logprob int `json:"logprob"`
|
||||
Bytes []int `json:"bytes"`
|
||||
TopLogprobs []struct {
|
||||
Token string `json:"token"`
|
||||
Logprob int `json:"logprob"`
|
||||
Bytes []int `json:"bytes"`
|
||||
} `json:"top_logprobs"`
|
||||
} `json:"logprobs"`
|
||||
}
|
||||
|
||||
func (o Ollama) GenerateChatMessage(reqBody GenerateChatMessageRequest) (GenerateChatMessageResponse, int, error) {
|
||||
reqBodyBytes, err := json.Marshal(reqBody)
|
||||
if err != nil {
|
||||
return GenerateChatMessageResponse{}, -1, err
|
||||
}
|
||||
|
||||
req, err := http.NewRequest(http.MethodPost, fmt.Sprintf("%s/chat", o.baseUrl), bytes.NewReader(reqBodyBytes))
|
||||
if err != nil {
|
||||
return GenerateChatMessageResponse{}, -1, err
|
||||
}
|
||||
|
||||
for key, val := range o.customHeaders {
|
||||
req.Header.Set(key, val)
|
||||
}
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
|
||||
resp, err := http.DefaultClient.Do(req)
|
||||
if err != nil {
|
||||
return GenerateChatMessageResponse{}, -1, err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode != 200 {
|
||||
return GenerateChatMessageResponse{}, resp.StatusCode, errors.New("status code is not 200")
|
||||
}
|
||||
|
||||
var respBody GenerateChatMessageResponse
|
||||
if err := json.NewDecoder(resp.Body).Decode(&respBody); err != nil {
|
||||
return GenerateChatMessageResponse{}, -1, err
|
||||
}
|
||||
return respBody, resp.StatusCode, nil
|
||||
}
|
||||
Reference in New Issue
Block a user