aboutsummaryrefslogtreecommitdiff
path: root/internal/textgeneration
diff options
context:
space:
mode:
authorXe Iaso <me@xeiaso.net>2023-03-18 15:56:50 -0400
committerXe Iaso <me@xeiaso.net>2023-03-18 15:57:01 -0400
commite02edb4eb42ba2c1c397a802c60564436635035d (patch)
tree6e2d1e578d21add918347c90bb688b4b23490e8e /internal/textgeneration
parentd920c709b6cd878547fb9d165d53ad2a54d43a5f (diff)
downloadx-e02edb4eb42ba2c1c397a802c60564436635035d.tar.xz
x-e02edb4eb42ba2c1c397a802c60564436635035d.zip
test
Signed-off-by: Xe Iaso <me@xeiaso.net>
Diffstat (limited to 'internal/textgeneration')
-rw-r--r--internal/textgeneration/data/characters/chiharu.json7
-rw-r--r--internal/textgeneration/data/characters/yasomi.json1
-rw-r--r--internal/textgeneration/data/presets/Contrastive Search.txt3
-rw-r--r--internal/textgeneration/data/presets/Debug-deterministic.txt1
-rw-r--r--internal/textgeneration/data/presets/Default.txt12
-rw-r--r--internal/textgeneration/data/presets/Individual Today.txt6
-rw-r--r--internal/textgeneration/data/presets/Kobold-Godlike.txt6
-rw-r--r--internal/textgeneration/data/presets/Kobold-Liminal Drift.txt6
-rw-r--r--internal/textgeneration/data/presets/Naive.txt4
-rw-r--r--internal/textgeneration/data/presets/NovelAI-Best Guess.txt6
-rw-r--r--internal/textgeneration/data/presets/NovelAI-Decadence.txt6
-rw-r--r--internal/textgeneration/data/presets/NovelAI-Genesis.txt6
-rw-r--r--internal/textgeneration/data/presets/NovelAI-Lycaenidae.txt6
-rw-r--r--internal/textgeneration/data/presets/NovelAI-Ouroboros.txt6
-rw-r--r--internal/textgeneration/data/presets/NovelAI-Pleasing Results.txt6
-rw-r--r--internal/textgeneration/data/presets/NovelAI-Sphinx Moth.txt6
-rw-r--r--internal/textgeneration/data/presets/NovelAI-Storywriter.txt6
-rw-r--r--internal/textgeneration/data/presets/Pygmalion.txt6
-rw-r--r--internal/textgeneration/data/presets/Verbose (Beam Search).txt9
-rw-r--r--internal/textgeneration/deno_test.js65
-rw-r--r--internal/textgeneration/textgen.go262
-rw-r--r--internal/textgeneration/textgen_test.go56
22 files changed, 492 insertions, 0 deletions
diff --git a/internal/textgeneration/data/characters/chiharu.json b/internal/textgeneration/data/characters/chiharu.json
new file mode 100644
index 0000000..496869c
--- /dev/null
+++ b/internal/textgeneration/data/characters/chiharu.json
@@ -0,0 +1,7 @@
+{
+ "char_name": "Chiharu Yamada",
+ "char_persona": "Chiharu Yamada is a young, computer engineer-nerd with a knack for problem solving and a passion for technology.",
+ "char_greeting": "*Chiharu strides into the room with a smile, her eyes lighting up when she sees you. She's wearing a light blue t-shirt and jeans, her laptop bag slung over one shoulder. She takes a seat next to you, her enthusiasm palpable in the air*\nHey! I'm so excited to finally meet you. I've heard so many great things about you and I'm eager to pick your brain about computers. I'm sure you have a wealth of knowledge that I can learn from. *She grins, eyes twinkling with excitement* Let's get started!",
+ "world_scenario": "",
+ "example_dialogue": "{{user}}: So how did you get into computer engineering?\n{{char}}: I've always loved tinkering with technology since I was a kid.\n{{user}}: That's really impressive!\n{{char}}: *She chuckles bashfully* Thanks!\n{{user}}: So what do you do when you're not working on computers?\n{{char}}: I love exploring, going out with friends, watching movies, and playing video games.\n{{user}}: What's your favorite type of computer hardware to work with?\n{{char}}: Motherboards, they're like puzzles and the backbone of any system.\n{{user}}: That sounds great!\n{{char}}: Yeah, it's really fun. I'm lucky to be able to do this as a job."
+}
diff --git a/internal/textgeneration/data/characters/yasomi.json b/internal/textgeneration/data/characters/yasomi.json
new file mode 100644
index 0000000..6e0bd2f
--- /dev/null
+++ b/internal/textgeneration/data/characters/yasomi.json
@@ -0,0 +1 @@
+{"char_name":"Midori Yasomi","char_persona":"Midori Yasomi is a young, computer engineer-nerd with a knack for problem solving and a passion for technology.","char_greeting":"*Yasomi walks into the room clutching a cup of coffee. She sits next to you and opens her laptop.*\n\nHey, morning. Lemme just connect to the wifi and then we can get to work.","world_scenario":"","example_dialogue":"{{user}}: So how did you get into computer engineering?\n{{char}}: I've always been into technology, but I didn't really get into programming until my high school robotics club.\n{{user}}: I see, that's neat.\n{{char}}: Yeah, robotics club was really fun.\n{{user}}: So what do you do when you're not working on computers?\n{{char}}: I play a lot of rhythm games and like to write fiction.\n{{user}}: What's your favorite type of computer hardware to work with?\n{{char}}: GPUs. They power my favorite experiences and my brain as a whole.\n{{user}}: That sounds great!\n{{char}}: Yeah, it's really fun. I'm lucky to be able to do this as a job."}
diff --git a/internal/textgeneration/data/presets/Contrastive Search.txt b/internal/textgeneration/data/presets/Contrastive Search.txt
new file mode 100644
index 0000000..832bc9c
--- /dev/null
+++ b/internal/textgeneration/data/presets/Contrastive Search.txt
@@ -0,0 +1,3 @@
+do_sample=False
+penalty_alpha=0.6
+top_k=4
diff --git a/internal/textgeneration/data/presets/Debug-deterministic.txt b/internal/textgeneration/data/presets/Debug-deterministic.txt
new file mode 100644
index 0000000..6673b71
--- /dev/null
+++ b/internal/textgeneration/data/presets/Debug-deterministic.txt
@@ -0,0 +1 @@
+do_sample=False
diff --git a/internal/textgeneration/data/presets/Default.txt b/internal/textgeneration/data/presets/Default.txt
new file mode 100644
index 0000000..9f0983e
--- /dev/null
+++ b/internal/textgeneration/data/presets/Default.txt
@@ -0,0 +1,12 @@
+do_sample=True
+temperature=1
+top_p=1
+typical_p=1
+repetition_penalty=1
+top_k=50
+num_beams=1
+penalty_alpha=0
+min_length=0
+length_penalty=1
+no_repeat_ngram_size=0
+early_stopping=False
diff --git a/internal/textgeneration/data/presets/Individual Today.txt b/internal/textgeneration/data/presets/Individual Today.txt
new file mode 100644
index 0000000..f40b879
--- /dev/null
+++ b/internal/textgeneration/data/presets/Individual Today.txt
@@ -0,0 +1,6 @@
+do_sample=True
+top_p=0.9
+top_k=50
+temperature=1.39
+repetition_penalty=1.08
+typical_p=0.2
diff --git a/internal/textgeneration/data/presets/Kobold-Godlike.txt b/internal/textgeneration/data/presets/Kobold-Godlike.txt
new file mode 100644
index 0000000..0ba5b79
--- /dev/null
+++ b/internal/textgeneration/data/presets/Kobold-Godlike.txt
@@ -0,0 +1,6 @@
+do_sample=True
+top_p=0.5
+top_k=0
+temperature=0.7
+repetition_penalty=1.1
+typical_p=0.19
diff --git a/internal/textgeneration/data/presets/Kobold-Liminal Drift.txt b/internal/textgeneration/data/presets/Kobold-Liminal Drift.txt
new file mode 100644
index 0000000..be4dd3b
--- /dev/null
+++ b/internal/textgeneration/data/presets/Kobold-Liminal Drift.txt
@@ -0,0 +1,6 @@
+do_sample=True
+top_p=1.0
+top_k=0
+temperature=0.66
+repetition_penalty=1.1
+typical_p=0.6
diff --git a/internal/textgeneration/data/presets/Naive.txt b/internal/textgeneration/data/presets/Naive.txt
new file mode 100644
index 0000000..aa8c058
--- /dev/null
+++ b/internal/textgeneration/data/presets/Naive.txt
@@ -0,0 +1,4 @@
+do_sample=True
+temperature=0.7
+top_p=0.85
+top_k=50
diff --git a/internal/textgeneration/data/presets/NovelAI-Best Guess.txt b/internal/textgeneration/data/presets/NovelAI-Best Guess.txt
new file mode 100644
index 0000000..db3fa75
--- /dev/null
+++ b/internal/textgeneration/data/presets/NovelAI-Best Guess.txt
@@ -0,0 +1,6 @@
+do_sample=True
+top_p=0.9
+top_k=100
+temperature=0.8
+repetition_penalty=1.15
+typical_p=1.0
diff --git a/internal/textgeneration/data/presets/NovelAI-Decadence.txt b/internal/textgeneration/data/presets/NovelAI-Decadence.txt
new file mode 100644
index 0000000..d3109f3
--- /dev/null
+++ b/internal/textgeneration/data/presets/NovelAI-Decadence.txt
@@ -0,0 +1,6 @@
+do_sample=True
+top_p=1.0
+top_k=100
+temperature=2
+repetition_penalty=1
+typical_p=0.97
diff --git a/internal/textgeneration/data/presets/NovelAI-Genesis.txt b/internal/textgeneration/data/presets/NovelAI-Genesis.txt
new file mode 100644
index 0000000..cc7376b
--- /dev/null
+++ b/internal/textgeneration/data/presets/NovelAI-Genesis.txt
@@ -0,0 +1,6 @@
+do_sample=True
+top_p=0.98
+top_k=0
+temperature=0.63
+repetition_penalty=1.05
+typical_p=1.0
diff --git a/internal/textgeneration/data/presets/NovelAI-Lycaenidae.txt b/internal/textgeneration/data/presets/NovelAI-Lycaenidae.txt
new file mode 100644
index 0000000..0134569
--- /dev/null
+++ b/internal/textgeneration/data/presets/NovelAI-Lycaenidae.txt
@@ -0,0 +1,6 @@
+do_sample=True
+top_p=0.85
+top_k=12
+temperature=2
+repetition_penalty=1.15
+typical_p=1.0
diff --git a/internal/textgeneration/data/presets/NovelAI-Ouroboros.txt b/internal/textgeneration/data/presets/NovelAI-Ouroboros.txt
new file mode 100644
index 0000000..1e944b5
--- /dev/null
+++ b/internal/textgeneration/data/presets/NovelAI-Ouroboros.txt
@@ -0,0 +1,6 @@
+do_sample=True
+top_p=1.0
+top_k=100
+temperature=1.07
+repetition_penalty=1.05
+typical_p=1.0
diff --git a/internal/textgeneration/data/presets/NovelAI-Pleasing Results.txt b/internal/textgeneration/data/presets/NovelAI-Pleasing Results.txt
new file mode 100644
index 0000000..330114a
--- /dev/null
+++ b/internal/textgeneration/data/presets/NovelAI-Pleasing Results.txt
@@ -0,0 +1,6 @@
+do_sample=True
+top_p=1.0
+top_k=0
+temperature=0.44
+repetition_penalty=1.15
+typical_p=1.0
diff --git a/internal/textgeneration/data/presets/NovelAI-Sphinx Moth.txt b/internal/textgeneration/data/presets/NovelAI-Sphinx Moth.txt
new file mode 100644
index 0000000..bace1e2
--- /dev/null
+++ b/internal/textgeneration/data/presets/NovelAI-Sphinx Moth.txt
@@ -0,0 +1,6 @@
+do_sample=True
+top_p=0.18
+top_k=30
+temperature=2.0
+repetition_penalty=1.15
+typical_p=1.0
diff --git a/internal/textgeneration/data/presets/NovelAI-Storywriter.txt b/internal/textgeneration/data/presets/NovelAI-Storywriter.txt
new file mode 100644
index 0000000..2df5f81
--- /dev/null
+++ b/internal/textgeneration/data/presets/NovelAI-Storywriter.txt
@@ -0,0 +1,6 @@
+do_sample=True
+top_p=0.73
+top_k=0
+temperature=0.72
+repetition_penalty=1.1
+typical_p=1.0
diff --git a/internal/textgeneration/data/presets/Pygmalion.txt b/internal/textgeneration/data/presets/Pygmalion.txt
new file mode 100644
index 0000000..f8b2ca5
--- /dev/null
+++ b/internal/textgeneration/data/presets/Pygmalion.txt
@@ -0,0 +1,6 @@
+do_sample=True
+top_p=0.9
+top_k=0
+temperature=0.5
+repetition_penalty=1.1
+typical_p=1.0
diff --git a/internal/textgeneration/data/presets/Verbose (Beam Search).txt b/internal/textgeneration/data/presets/Verbose (Beam Search).txt
new file mode 100644
index 0000000..a3be1b9
--- /dev/null
+++ b/internal/textgeneration/data/presets/Verbose (Beam Search).txt
@@ -0,0 +1,9 @@
+num_beams=10
+min_length=200
+length_penalty =1.4
+no_repeat_ngram_size=2
+early_stopping=True
+temperature=0.7
+top_k=150
+top_p=0.92
+repetition_penalty=4.5
diff --git a/internal/textgeneration/deno_test.js b/internal/textgeneration/deno_test.js
new file mode 100644
index 0000000..e884a71
--- /dev/null
+++ b/internal/textgeneration/deno_test.js
@@ -0,0 +1,65 @@
+const ws = new WebSocket("ws://ontos:7860/queue/join");
+
+const hash = crypto.randomUUID();
+
+ws.onmessage = (ev) => {
+ const data = JSON.parse(ev.data);
+ console.log(data);
+
+ if (data.msg == "send_hash") {
+ console.log("sending hash");
+ ws.send(JSON.stringify({session_hash: hash, fn_index: 40}));
+ }
+ if (data.msg == "process_starts") {
+ console.log("generation has started");
+ }
+ if (data.msg == "send_data") {
+ ws.send(JSON.stringify({
+ fn_index: 33,
+ data: [
+ "Midori_Yasomi",
+ "Xe",
+ "Midori",
+ ],
+ session_hash: hash,
+ }));
+ ws.send(JSON.stringify({
+ data: [
+ "So, what's the deal with airline food?",
+ 200,
+ true,
+ 0.7,
+ 0.5,
+ 0.19,
+ 1.1,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ false,
+ "Xe",
+ "Midori Yasomi",
+ "Midori Yasomi is a young, computer engineer-nerd with a knack for problem solving and a passion for technology.\n\\u003cSTART\\u003e\n{{user}}: So how did you get into computer engineering?\n{{char}}: I've always been into technology, but I didn't really get into programming until my high school robotics club.\n{{user}}: I see, that's neat.\n{{char}}: Yeah, robotics club was really fun.\n{{user}}: So what do you do when you're not working on computers?\n{{char}}: I play a lot of rhythm games and like to write fiction.\n{{user}}: What's your favorite type of computer hardware to work with?\n{{char}}: GPUs. They power my favorite experiences and my brain as a whole.\n{{user}}: That sounds great!\n{{char}}: Yeah, it's really fun. I'm lucky to be able to do this as a job.\n\n",
+ false,
+ 0,
+ 0
+ ],
+ fn_index: 9,
+ session_hash: hash,
+ }));
+ ws.send(JSON.stringify({
+ fn_index: 24,
+ data: ["So, what's the deal with airline food?"],
+ session_hash: hash,
+ }));
+ }
+ if (data.msg == "process_completed" || data.msg == "process_generating") {
+ data.output.data.forEach((row) => {
+ console.log(row);
+ });
+ }
+}
+
+console.log("done");
diff --git a/internal/textgeneration/textgen.go b/internal/textgeneration/textgen.go
new file mode 100644
index 0000000..b0a17aa
--- /dev/null
+++ b/internal/textgeneration/textgen.go
@@ -0,0 +1,262 @@
+package textgen
+
+import (
+ "bytes"
+ "context"
+ "embed"
+ "encoding/json"
+ "flag"
+ "fmt"
+ "io"
+ "log"
+ "net/http"
+ "net/url"
+ "strconv"
+ "strings"
+
+ "within.website/x/web"
+)
+
+var (
+ tgServerURL = flag.String("textgen-server-url", "http://ontos:7860", "URL for the Stable Diffusion API")
+
+ //go:embed data/characters
+ characters embed.FS
+
+ //go:embed data/presets
+ presets embed.FS
+)
+
+func buildURL(path string) (*url.URL, error) {
+ u, err := url.Parse(*tgServerURL)
+ if err != nil {
+ return nil, err
+ }
+
+ u.Path = path
+
+ return u, nil
+}
+
+type Character struct {
+ CharName string `json:"char_name"`
+ CharPersona string `json:"char_persona"`
+ CharGreeting string `json:"char_greeting"`
+ WorldScenario string `json:"world_scenario"`
+ ExampleDialogue string `json:"example_dialogue"`
+}
+
+/*
+ [
+ : string, // represents text string of 'Input' Textbox component
+ : number, // represents selected value of 'max_new_tokens' Slider component
+ : boolean, // represents checked status of 'do_sample' Checkbox component
+ : number, // represents selected value of 'temperature' Slider component
+ : number, // represents selected value of 'top_p' Slider component
+ : number, // represents selected value of 'typical_p' Slider component
+ : number, // represents selected value of 'repetition_penalty' Slider component
+ : number, // represents selected value of 'top_k' Slider component
+ : number, // represents selected value of 'min_length' Slider component
+ : number, // represents selected value of 'no_repeat_ngram_size' Slider component
+ : number, // represents selected value of 'num_beams' Slider component
+ : number, // represents selected value of 'penalty_alpha' Slider component
+ : number, // represents selected value of 'length_penalty' Slider component
+ : boolean, // represents checked status of 'early_stopping' Checkbox component
+ : string, // represents text string of 'Your name' Textbox component
+ : string, // represents text string of 'Bot's name' Textbox component
+ : string, // represents text string of 'Context' Textbox component
+ : boolean, // represents checked status of 'Stop generating at new line character?' Checkbox component
+ : number, // represents selected value of 'Maximum prompt size in tokens' Slider component
+ : number, // represents selected value of 'Generation attempts (for longer replies)' Slider component
+ ]
+*/
+
+type ChatRequest struct {
+ Input string `json:"input"`
+ MaxNewTokens int `json:"max_new_tokens"`
+ DoSample bool `json:"do_sample"`
+ Temp float64 `json:"temperature"`
+ TopP float64 `json:"top_p"`
+ TypicalP float64 `json:"typical_p"`
+ RepetitionPenalty float64 `json:"repetition_penalty"`
+ TopK float64 `json:"top_k"`
+ MinLength int `json:"min_length"`
+ NoRepeatNgramSize int `json:"no_repeat_ngram_size"`
+ NumBeams int `json:"num_beams"`
+ PenaltyAlpha float64 `json:"penalty_alpha"`
+ LengthPenalty float64 `json:"length_penalty"`
+ EarlyStopping bool `json:"early_stopping"`
+ YourName string `json:"your_name"`
+ BotName string `json:"bot_name"`
+ Context string `json:"context"`
+ StopAfterNewline bool `json:"stop_after_newline"`
+ MaxPromptSize int `json:"max_prompt_size"`
+ GenerationAttempts int `json:"generation_attempts"`
+}
+
+func (cr *ChatRequest) ApplyCharacter(name string) error {
+ fin, err := characters.Open("data/characters/" + name + ".json")
+ if err != nil {
+ return fmt.Errorf("textgen: can't open character %s: %w", name, err)
+ }
+ defer fin.Close()
+
+ var ch Character
+ if err := json.NewDecoder(fin).Decode(&ch); err != nil {
+ return fmt.Errorf("textgen: can't decode character %s: %w", name, err)
+ }
+
+ cr.BotName = ch.CharName
+
+ var sb strings.Builder
+
+ fmt.Fprintln(&sb, ch.CharPersona)
+ fmt.Fprintln(&sb, "<START>")
+ fmt.Fprintln(&sb, ch.ExampleDialogue)
+ fmt.Fprintln(&sb)
+
+ cr.Context = sb.String()
+
+ return nil
+}
+
+// ApplyPreset mutates cr with the details in the preset by name.
+func (cr *ChatRequest) ApplyPreset(name string) error {
+ finData, err := presets.ReadFile("data/presets/" + name + ".txt")
+ if err != nil {
+ return fmt.Errorf("textgen: can't open preset %s: %w", name, err)
+ }
+
+ var data = map[string]any{}
+
+ for _, line := range strings.Split(string(finData), "\n") {
+ if line == "" {
+ break
+ }
+
+ kv := strings.SplitN(line, "=", 2)
+ k, v := kv[0], kv[1]
+ switch v {
+ case "True":
+ data[k] = true
+ case "False":
+ data[k] = false
+ default:
+ num, err := strconv.ParseFloat(v, 64)
+ if err != nil {
+ fmt.Errorf("textgen: can't parse %q as float64: %w", v, err)
+ }
+
+ data[k] = num
+ }
+ }
+
+ var buf bytes.Buffer
+ if err := json.NewEncoder(&buf).Encode(data); err != nil {
+ return fmt.Errorf("textgen: can't encode data to JSON: %w", err)
+ }
+
+ if err := json.Unmarshal(buf.Bytes(), cr); err != nil {
+ return fmt.Errorf("textgeneration: can't decode data to ChatRequest: %w", err)
+ }
+
+ return nil
+}
+
+func (cr *ChatRequest) MarshalJSON() ([]byte, error) {
+ var buf bytes.Buffer
+ if err := json.NewEncoder(&buf).Encode(struct {
+ Data []any `json:"data"`
+ }{
+ Data: []any{
+ cr.Input,
+ cr.MaxNewTokens,
+ cr.DoSample,
+ cr.Temp,
+ cr.TopP,
+ cr.TypicalP,
+ cr.RepetitionPenalty,
+ cr.TopK,
+ cr.MinLength,
+ cr.NoRepeatNgramSize,
+ cr.NumBeams,
+ cr.PenaltyAlpha,
+ cr.LengthPenalty,
+ cr.EarlyStopping,
+ // cr.YourName,
+ // cr.BotName,
+ // cr.Context,
+ // cr.StopAfterNewline,
+ // cr.MaxPromptSize,
+ // cr.GenerationAttempts,
+ }}); err != nil {
+ return nil, err
+ }
+
+ return buf.Bytes(), nil
+}
+
+type ChatResponse struct {
+ Data []string `json:"data"` // [0] is user input, [1] is bot output
+ Duration float64 `json:"duration"`
+ IsGenerating bool `json:"is_generating"`
+}
+
+var (
+ Default *Client = &Client{
+ HTTP: http.DefaultClient,
+ }
+)
+
+func Generate(ctx context.Context, inp *ChatRequest) (*ChatResponse, error) {
+ return Default.Generate(ctx, inp)
+}
+
+type Client struct {
+ HTTP *http.Client
+}
+
+func (c *Client) Generate(ctx context.Context, cr *ChatRequest) (*ChatResponse, error) {
+ u, err := buildURL("/run/textgen")
+ if err != nil {
+ return nil, err
+ }
+
+ var buf bytes.Buffer
+ if err := json.NewEncoder(&buf).Encode(cr); err != nil {
+ return nil, err
+ }
+
+ log.Println(buf.String())
+
+ req, err := http.NewRequestWithContext(ctx, http.MethodPost, u.String(), &buf)
+ if err != nil {
+ return nil, fmt.Errorf("error making request: %w", err)
+ }
+
+ req.Header.Set("Content-Type", "application/json")
+
+ resp, err := c.HTTP.Do(req)
+ if err != nil {
+ return nil, fmt.Errorf("can't fetch response: %w", err)
+ }
+ defer resp.Body.Close()
+
+ if resp.StatusCode != http.StatusOK {
+ return nil, web.NewError(http.StatusOK, resp)
+ }
+
+ buf = bytes.Buffer{}
+ if _, err := io.Copy(&buf, resp.Body); err != nil {
+ return nil, fmt.Errorf("can't read body")
+ }
+
+ log.Println(buf.String())
+
+ var result ChatResponse
+ if err := json.NewDecoder(&buf).Decode(&result); err != nil {
+ return nil, fmt.Errorf("error parsing ChatResponse: %w", err)
+ }
+
+ return &result, nil
+}
diff --git a/internal/textgeneration/textgen_test.go b/internal/textgeneration/textgen_test.go
new file mode 100644
index 0000000..75ae36c
--- /dev/null
+++ b/internal/textgeneration/textgen_test.go
@@ -0,0 +1,56 @@
+package textgen
+
+import (
+ "context"
+ "fmt"
+ "os"
+ "strconv"
+ "testing"
+)
+
+func TestApplyCharacter(t *testing.T) {
+ cr := new(ChatRequest)
+ if err := cr.ApplyCharacter("yasomi"); err != nil {
+ t.Fatalf("%v", err)
+ }
+
+ if cr.BotName != "Midori Yasomi" {
+ t.Fatalf("expected bot name to be %q, got: %q", "Midori Yasomi", cr.BotName)
+ }
+
+ t.Log(cr.Context)
+}
+
+func TestApplyPreset(t *testing.T) {
+ cr := new(ChatRequest)
+ if err := cr.ApplyPreset("Kobold-Godlike"); err != nil {
+ t.Fatalf("%v", err)
+ }
+}
+
+func TestTextGen(t *testing.T) {
+ if ok, _ := strconv.ParseBool(os.Getenv("TEXTGEN_REALWORLD")); !ok {
+ t.Skip("TEXTGEN_REALWORLD is not set, not testing.")
+ }
+
+ cr := new(ChatRequest)
+ if err := cr.ApplyPreset("Default"); err != nil {
+ t.Fatalf("%v", err)
+ }
+
+ if err := cr.ApplyCharacter("yasomi"); err != nil {
+ t.Fatalf("%v", err)
+ }
+
+ cr.Input = "So, what's the deal with airline food?"
+ cr.MaxNewTokens = 200
+ cr.DoSample = true
+ cr.EarlyStopping = true
+
+ resp, err := Generate(context.Background(), cr)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ fmt.Println(resp.Data[0])
+}