aboutsummaryrefslogtreecommitdiff
path: root/web
diff options
context:
space:
mode:
authorXe Iaso <me@xeiaso.net>2023-12-14 20:29:48 -0500
committerXe Iaso <me@xeiaso.net>2023-12-14 20:29:57 -0500
commit1eb8488ae23e46bb217e2026fdae7b9a905763d3 (patch)
treeae6a4b1fa7c10265f8a91e1048e0bf4451cadc3a /web
parent22e6827eefc3e9497017c629828a2576a5c72a0d (diff)
downloadx-1eb8488ae23e46bb217e2026fdae7b9a905763d3.tar.xz
x-1eb8488ae23e46bb217e2026fdae7b9a905763d3.zip
web: add mistral and ollama clients
Signed-off-by: Xe Iaso <me@xeiaso.net>
Diffstat (limited to 'web')
-rw-r--r--web/mistral/mistral.go98
-rw-r--r--web/ollama/ollama.go80
2 files changed, 178 insertions, 0 deletions
diff --git a/web/mistral/mistral.go b/web/mistral/mistral.go
new file mode 100644
index 0000000..3f2d9e7
--- /dev/null
+++ b/web/mistral/mistral.go
@@ -0,0 +1,98 @@
+package mistral
+
+import (
+ "bytes"
+ "context"
+ "encoding/json"
+ "fmt"
+ "net/http"
+
+ "within.website/x/llm"
+ "within.website/x/web"
+)
+
+type Client struct {
+ *http.Client
+ apiKey string
+}
+
+func NewClient(apiKey string) *Client {
+ return &Client{
+ Client: &http.Client{},
+ apiKey: apiKey,
+ }
+}
+
+func (c *Client) Do(req *http.Request) (*http.Response, error) {
+ req.Header.Set("Authorization", "Bearer "+c.apiKey)
+ return c.Client.Do(req)
+}
+
+type CompleteRequest struct {
+ Model string `json:"model"`
+ Messages []llm.Message `json:"messages"`
+ Temperature *float64 `json:"temperature,omitempty"`
+ TopP *float64 `json:"top_p,omitempty"`
+ MaxTokens *int `json:"max_tokens,omitempty"`
+ Stream *bool `json:"stream,omitempty"`
+ SafeMode *bool `json:"safe_mode,omitempty"`
+ RandomSeed *int `json:"random_seed,omitempty"`
+}
+
+type CompleteResponse struct {
+ ID string `json:"id"`
+ Object string `json:"object"`
+ Created int64 `json:"created"`
+ Model string `json:"model"`
+ Choices []CompletionChoice `json:"choices"`
+ Usage UsageInfo `json:"usage"`
+}
+
+type CompletionChoice struct {
+ Index int `json:"index"`
+ Message []Message `json:"message"`
+ FinishReason string `json:"finish_reason"`
+}
+
+type Message struct {
+ Content string `json:"content"`
+ Role string `json:"role"`
+}
+
+type UsageInfo struct {
+ PromptTokens int `json:"prompt_tokens"`
+ CompletionTokens int `json:"completion_tokens"`
+ TotalTokens int `json:"total_tokens"`
+}
+
+func (c *Client) Chat(ctx context.Context, req *CompleteRequest) (*CompleteResponse, error) {
+ var data bytes.Buffer
+ if err := json.NewEncoder(&data).Encode(req); err != nil {
+ return nil, fmt.Errorf("mistral: error encoding request: %w", err)
+ }
+
+ r, err := http.NewRequestWithContext(ctx, http.MethodPost, "https://api.mistral.chat/v1/chat/completions", &data)
+ if err != nil {
+ return nil, fmt.Errorf("mistral: error creating request: %w", err)
+ }
+
+ r.Header.Set("Content-Type", "application/json")
+ r.Header.Set("Accept", "application/json")
+
+ resp, err := c.Do(r)
+ if err != nil {
+ return nil, fmt.Errorf("mistral: error sending request: %w", err)
+ }
+ defer resp.Body.Close()
+
+ if resp.StatusCode != http.StatusOK {
+ return nil, web.NewError(http.StatusOK, resp)
+ }
+
+ var res CompleteResponse
+ if err := json.NewDecoder(resp.Body).Decode(&res); err != nil {
+ return nil, fmt.Errorf("mistral: error decoding response: %w", err)
+ }
+
+ return &res, nil
+}
diff --git a/web/ollama/ollama.go b/web/ollama/ollama.go
new file mode 100644
index 0000000..0f479cc
--- /dev/null
+++ b/web/ollama/ollama.go
@@ -0,0 +1,80 @@
+package ollama
+
+import (
+ "bytes"
+ "context"
+ "encoding/json"
+ "fmt"
+ "net/http"
+ "time"
+)
+
+type Client struct {
+ baseURL string
+}
+
+func NewClient(baseURL string) *Client {
+ return &Client{
+ baseURL: baseURL,
+ }
+}
+
+func NewLocalClient() *Client {
+ return NewClient("http://localhost:11434")
+}
+
+type Message struct {
+ Content string `json:"content"`
+ Role string `json:"role"`
+ Images [][]byte `json:"images"`
+}
+
+type CompleteRequest struct {
+ Model string `json:"model"`
+ Messages Message `json:"messages"`
+ Format *string `json:"format,omitempty"`
+ Template *string `json:"template,omitempty"`
+ Stream bool `json:"stream,omitempty"`
+ Options map[string]any `json:"options"`
+}
+
+type CompleteResponse struct {
+ Model string `json:"model"`
+ CreatedAt time.Time `json:"created_at"`
+ Message Message `json:"message"`
+ Done bool `json:"done"`
+ TotalDuration float64 `json:"total_duration"`
+ LoadDuration float64 `json:"load_duration"`
+ PromptEvalCount int64 `json:"prompt_eval_count"`
+ PromptEvalDuration int64 `json:"prompt_eval_duration"`
+ EvalCount int64 `json:"eval_count"`
+ EvalDuration int64 `json:"eval_duration"`
+}
+
+func (c *Client) Chat(ctx context.Context, inp *CompleteRequest) (*CompleteResponse, error) {
+ buf := &bytes.Buffer{}
+ if err := json.NewEncoder(buf).Encode(inp); err != nil {
+ return nil, fmt.Errorf("ollama: error encoding request: %w", err)
+ }
+
+ req, err := http.NewRequestWithContext(ctx, http.MethodPost, c.baseURL+"/chat", buf)
+ if err != nil {
+ return nil, fmt.Errorf("ollama: error creating request: %w", err)
+ }
+
+ req.Header.Set("Content-Type", "application/json")
+ req.Header.Set("Accept", "application/json")
+
+ resp, err := http.DefaultClient.Do(req)
+ if err != nil {
+ return nil, fmt.Errorf("ollama: error making request: %w", err)
+ }
+ defer resp.Body.Close()
+
+ var result CompleteResponse
+ if err := json.NewDecoder(resp.Body).Decode(&result); err != nil {
+ return nil, fmt.Errorf("ollama: error decoding response: %w", err)
+ }
+
+ return &result, nil
+}