go-ollama: changed development to dev container and added streaming capabilities to methods v0.1.1
This commit is contained in:
@@ -1,6 +1,7 @@
|
||||
package ollama
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
@@ -96,15 +97,34 @@ type GenerateChatMessageResponse struct {
|
||||
} `json:"logprobs"`
|
||||
}
|
||||
|
||||
func (o Ollama) GenerateChatMessage(reqBody GenerateChatMessageRequest) (GenerateChatMessageResponse, int, error) {
|
||||
type GenerateChatMessageResponseStream struct {
|
||||
Model string `json:"model"`
|
||||
CreatedAt string `json:"created_at"`
|
||||
Message struct {
|
||||
Role string `json:"role"`
|
||||
Content string `json:"content"`
|
||||
Thinking string `json:"thinking"`
|
||||
ToolCalls []struct {
|
||||
Function struct {
|
||||
Name string `json:"name"`
|
||||
Description string `json:"description"`
|
||||
Arguments map[string]any `json:"arguments"`
|
||||
} `json:"function"`
|
||||
} `json:"tool_calls"`
|
||||
Images []string `json:"images"`
|
||||
} `json:"message"`
|
||||
Done bool `json:"done"`
|
||||
}
|
||||
|
||||
func (o Ollama) GenerateChatMessage(reqBody GenerateChatMessageRequest) (GenerateChatMessageResponse, error) {
|
||||
reqBodyBytes, err := json.Marshal(reqBody)
|
||||
if err != nil {
|
||||
return GenerateChatMessageResponse{}, -1, err
|
||||
return GenerateChatMessageResponse{}, err
|
||||
}
|
||||
|
||||
req, err := http.NewRequest(http.MethodPost, fmt.Sprintf("%s/chat", o.baseUrl), bytes.NewReader(reqBodyBytes))
|
||||
if err != nil {
|
||||
return GenerateChatMessageResponse{}, -1, err
|
||||
return GenerateChatMessageResponse{}, err
|
||||
}
|
||||
|
||||
for key, val := range o.customHeaders {
|
||||
@@ -114,17 +134,68 @@ func (o Ollama) GenerateChatMessage(reqBody GenerateChatMessageRequest) (Generat
|
||||
|
||||
resp, err := http.DefaultClient.Do(req)
|
||||
if err != nil {
|
||||
return GenerateChatMessageResponse{}, -1, err
|
||||
return GenerateChatMessageResponse{}, err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode != 200 {
|
||||
return GenerateChatMessageResponse{}, resp.StatusCode, errors.New("status code is not 200")
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
return GenerateChatMessageResponse{}, errors.New("status code is not 200")
|
||||
}
|
||||
|
||||
var respBody GenerateChatMessageResponse
|
||||
if err := json.NewDecoder(resp.Body).Decode(&respBody); err != nil {
|
||||
return GenerateChatMessageResponse{}, -1, err
|
||||
return GenerateChatMessageResponse{}, err
|
||||
}
|
||||
return respBody, resp.StatusCode, nil
|
||||
return respBody, nil
|
||||
}
|
||||
|
||||
func (o Ollama) GenerateChatMessageStream(reqBody GenerateChatMessageRequest, onChunk func(chunk GenerateChatMessageResponseStream)) error {
|
||||
reqBody.Stream = PtrOf(true)
|
||||
|
||||
reqBodyBytes, err := json.Marshal(reqBody)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
req, err := http.NewRequest(http.MethodPost, fmt.Sprintf("%s/chat", o.baseUrl), bytes.NewReader(reqBodyBytes))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for key, val := range o.customHeaders {
|
||||
req.Header.Set(key, val)
|
||||
}
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
|
||||
resp, err := http.DefaultClient.Do(req)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
return errors.New("status code is not 200")
|
||||
}
|
||||
|
||||
scanner := bufio.NewScanner(resp.Body)
|
||||
|
||||
for scanner.Scan() {
|
||||
line := bytes.TrimSpace(scanner.Bytes())
|
||||
|
||||
var chunk GenerateChatMessageResponseStream
|
||||
if err := json.Unmarshal(line, &chunk); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
onChunk(chunk)
|
||||
if chunk.Done {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
if err := scanner.Err(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user