aboutsummaryrefslogtreecommitdiff
path: root/web
diff options
context:
space:
mode:
authorXe Iaso <me@xeiaso.net>2023-12-14 22:54:32 -0500
committerXe Iaso <me@xeiaso.net>2024-01-30 18:28:29 -0500
commit15eb817e4ca36a6240b6beacbeff455fc7e78e3c (patch)
tree83f963487e57658155026ddf9e522a19bda970ca /web
parentaa123ba1985912ec54211284c4ed4a569e1ae864 (diff)
downloadx-15eb817e4ca36a6240b6beacbeff455fc7e78e3c.tar.xz
x-15eb817e4ca36a6240b6beacbeff455fc7e78e3c.zip
llm: add multillm package
Signed-off-by: Xe Iaso <me@xeiaso.net>
Diffstat (limited to 'web')
-rw-r--r--web/mistral/mistral.go14
-rw-r--r--web/ollama/ollama.go2
-rw-r--r--web/openai/chatgpt/chatgpt.go8
3 files changed, 20 insertions, 4 deletions
diff --git a/web/mistral/mistral.go b/web/mistral/mistral.go
index 3f2d9e7..75cf9d3 100644
--- a/web/mistral/mistral.go
+++ b/web/mistral/mistral.go
@@ -4,13 +4,27 @@ import (
"bytes"
"context"
"encoding/json"
+ "expvar"
"fmt"
"net/http"
+ "tailscale.com/metrics"
"within.website/x/llm"
"within.website/x/web"
)
+var (
+ promptTokens = metrics.LabelMap{Label: "model"}
+ completionTokens = metrics.LabelMap{Label: "model"}
+ totalTokens = metrics.LabelMap{Label: "model"}
+)
+
+func init() {
+ expvar.Publish("gauge_x_web_mistral_prompt_tokens", &promptTokens)
+ expvar.Publish("gauge_x_web_mistral_completion_tokens", &completionTokens)
+ expvar.Publish("gauge_x_web_mistral_total_tokens", &totalTokens)
+}
+
type Client struct {
*http.Client
apiKey string
diff --git a/web/ollama/ollama.go b/web/ollama/ollama.go
index 0f479cc..eca0439 100644
--- a/web/ollama/ollama.go
+++ b/web/ollama/ollama.go
@@ -31,7 +31,7 @@ type Message struct {
type CompleteRequest struct {
Model string `json:"model"`
- Messages Message `json:"messages"`
+ Messages []Message `json:"messages"`
Format *string `json:"format,omitempty"`
Template *string `json:"template,omitempty"`
Stream bool `json:"stream,omitempty"`
diff --git a/web/openai/chatgpt/chatgpt.go b/web/openai/chatgpt/chatgpt.go
index 9f37524..ea375df 100644
--- a/web/openai/chatgpt/chatgpt.go
+++ b/web/openai/chatgpt/chatgpt.go
@@ -13,9 +13,11 @@ import (
)
type Request struct {
- Model string `json:"model"`
- Messages []Message `json:"messages"`
- Functions []Function `json:"functions,omitempty"`
+ Model string `json:"model"`
+ Messages []Message `json:"messages"`
+ Functions []Function `json:"functions,omitempty"`
+ Seed *int `json:"seed,omitempty"`
+ Temperature *float64 `json:"temperature,omitempty"`
}
type Function struct {