commit - f9b1f084f28dc386e398399fb58bad7a9543cf60
commit + 3da6127f69f5a9e6a51e9629967075b878a5ebb3
blob - /dev/null
blob + e02ba987eb04c9f18b689ebc3ef7990651f25392 (mode 644)
--- /dev/null
+++ src/llmweb/config.go
+package main
+
+import (
+ "bufio"
+ "fmt"
+ "os"
+ "strings"
+)
+
+type Config struct {
+ BaseURL string
+ Token string
+ DefaultModel string
+}
+
+func readConfig(name string) (*Config, error) {
+ f, err := os.Open(name)
+ if err != nil {
+ return nil, err
+ }
+ defer f.Close()
+
+ var conf Config
+ sc := bufio.NewScanner(f)
+ for sc.Scan() {
+ if strings.HasPrefix(sc.Text(), "#") {
+ continue
+ } else if sc.Text() == "" {
+ continue
+ }
+
+ k, v, ok := strings.Cut(strings.TrimSpace(sc.Text()), " ")
+ if !ok {
+ return nil, fmt.Errorf("key %q: expected space after key", k)
+ }
+ v = strings.TrimSpace(v)
+ switch k {
+ case "token":
+ conf.Token = v
+ case "url":
+ conf.BaseURL = v
+ case "model":
+ conf.DefaultModel = v
+ default:
+ return nil, fmt.Errorf("unknown configuration key %q", k)
+ }
+ }
+ return &conf, sc.Err()
+}
blob - /dev/null
blob + c69318881b850e0759444877c8215d57824a1775 (mode 644)
--- /dev/null
+++ src/llmweb/index.html
+<!DOCTYPE html>
+<html>
+<head>
+<meta charset="utf-8">
+<meta name="viewport" content="width=device-width, initial-scale=1.0">
+
+<link rel="manifest" href="manifest.json">
+<link rel="stylesheet" href="https://www.olowe.co/style.css">
+<style>
+textarea {
+ width: 80%;
+}
+
+pre {
+ font-family: sans-serif;
+ white-space: pre-wrap;
+}
+
+label {
+ font-style: italic;
+}
+</style>
+
+<title>llmweb</title>
+</head>
+
+<body>
+<header>
+<nav>
+<a href="/">New</a>
+Model: <code>{{.Model}}</code>
+</nav>
+</header>
+
+<main>
+<form method="POST">
+{{range .Messages}}
+ <label for="{{.Role}}">{{.Role}}</label>
+ {{- if eq .Role "system"}}
+ <p><textarea name="{{.Role}}" id="system" placeholder="Answer concisely...">{{.Content}}</textarea></p>
+ {{else}}
+ <textarea name="{{.Role}}" hidden>{{.Content}}</textarea>
+ <pre>{{.Content}}</pre>
+ {{end}}
+{{end}}
+<textarea name="user" rows=4 placeholder="Provide an overview of..."></textarea>
+<br>
+<input type="submit" />
+</form>
+</main>
+
+</body>
+</html>
blob - /dev/null
blob + 4147ce59142154b519aaf863ea22eca8f8d91b5c (mode 644)
--- /dev/null
+++ src/llmweb/llmweb.go
+package main
+
+import (
+ "fmt"
+ "html/template"
+ "log"
+ "net"
+ "net/http"
+ "os"
+ "path"
+
+ "olowe.co/x/openai"
+)
+
+type Chat struct {
+ client *openai.Client
+ template *template.Template
+}
+
+func (c *Chat) ServeHTTP(w http.ResponseWriter, req *http.Request) {
+ log.Println(req.RemoteAddr, req.Method, req.URL)
+ chat := openai.Chat{
+ Model: "mistral-small-latest",
+ Messages: []openai.Message{
+ {openai.RoleSystem, ""},
+ },
+ }
+
+ if req.Method == http.MethodGet {
+ if err := c.template.Execute(w, &chat); err != nil {
+ log.Println(err)
+ }
+ return
+ } else if req.Method != http.MethodPost {
+ w.WriteHeader(http.StatusMethodNotAllowed)
+ return
+ }
+
+ if err := req.ParseForm(); err != nil {
+ http.Error(w, err.Error(), http.StatusBadRequest)
+ return
+ }
+ log.Printf("%+v\n", req.PostForm)
+
+ if sys, ok := req.PostForm[openai.RoleSystem]; ok {
+ chat.Messages[0].Content = sys[0]
+ }
+
+ nuser := len(req.PostForm[openai.RoleUser])
+ nassistant := len(req.PostForm[openai.RoleAssistant])
+ if nuser != nassistant+1 {
+ e := fmt.Sprintf("expected %d user messages for %d assistant messages, got %d", nassistant+1, nassistant, nuser)
+ http.Error(w, e, http.StatusBadRequest)
+ return
+ }
+
+ for i := 0; i < nassistant; i++ {
+ user := openai.Message{openai.RoleUser, req.PostForm[openai.RoleUser][i]}
+ chat.Messages = append(chat.Messages, user)
+ reply := openai.Message{openai.RoleAssistant, req.PostForm[openai.RoleAssistant][i]}
+ chat.Messages = append(chat.Messages, reply)
+ }
+ latest := openai.Message{openai.RoleUser, req.PostForm[openai.RoleUser][nuser-1]}
+ chat.Messages = append(chat.Messages, latest)
+
+ reply, err := c.client.Complete(&chat)
+ if err != nil {
+ http.Error(w, err.Error(), http.StatusInternalServerError)
+ return
+ }
+ chat.Messages = append(chat.Messages, *reply)
+ c.template.Execute(w, &chat)
+}
+
+func servePWA(w http.ResponseWriter, req *http.Request) {
+ log.Println(req.RemoteAddr, req.Method, req.URL)
+ http.ServeFile(w, req, "manifest.json")
+}
+
+func main() {
+ confDir, err := os.UserConfigDir()
+ if err != nil {
+ log.Fatal(err)
+ }
+ config, err := readConfig(path.Join(confDir, "openai"))
+ if err != nil {
+ log.Fatalf("read configuration: %v", err)
+ }
+ client := &openai.Client{http.DefaultClient, config.Token, config.BaseURL}
+
+ tmpl, err := template.ParseGlob("*.html")
+ if err != nil {
+ log.Fatal(err)
+ }
+
+ ln, err := net.Listen("tcp", ":8080")
+ if err != nil {
+ log.Fatal(err)
+ }
+ http.Handle("/", &Chat{client, tmpl})
+ http.HandleFunc("/manifest.json", servePWA)
+ log.Fatal(http.Serve(ln, nil))
+}
blob - /dev/null
blob + 4709dabb1fda8efe3b283d6f21d6732d71d19475 (mode 644)
--- /dev/null
+++ src/llmweb/manifest.json
+{
+ "short_name": "llmweb",
+ "name": "LLM Web",
+ "icons": [],
+ "start_url": ".",
+ "display": "standalone",
+ "theme_color": "#000000",
+ "background_color": "#ffffff"
+}