Skip to content

Commit

Permalink
feat: streamline yaml config
Browse files Browse the repository at this point in the history
  • Loading branch information
Toby Padilla authored and toby committed May 25, 2023
1 parent 89f7031 commit 77033f1
Show file tree
Hide file tree
Showing 3 changed files with 61 additions and 56 deletions.
106 changes: 52 additions & 54 deletions config.go
Original file line number Diff line number Diff line change
Expand Up @@ -17,37 +17,34 @@ import (

const configTemplate = `
# {{ index .Help "apis" }}
# LocalAI setup instructions: https://github.com/go-skynet/LocalAI#example-use-gpt4all-j-model
apis:
openai: https://api.openai.com/v1
# LocalAI setup instructions: https://github.com/go-skynet/LocalAI#example-use-gpt4all-j-model
localai: http://localhost:8080
# {{ index .Help "models" }}
models:
gpt-4:
aliases: ["4"]
max-input-chars: 24500
api: openai
fallback: gpt-3.5-turbo
gpt-4-32k:
aliases: ["32k"]
max-input-chars: 98000
api: openai
fallback: gpt-4
gpt-3.5-turbo:
aliases: ["35t"]
max-input-chars: 12250
api: openai
fallback: gpt-3.5
gpt-3.5:
aliases: ["35"]
max-input-chars: 12250
api: openai
fallback:
ggml-gpt4all-j:
aliases: ["local", "4all"]
max-input-chars: 12250
api: localai
fallback:
openai:
base-url: https://api.openai.com/v1
models:
gpt-4:
aliases: ["4"]
max-input-chars: 24500
fallback: gpt-3.5-turbo
gpt-4-32k:
aliases: ["32k"]
max-input-chars: 98000
fallback: gpt-4
gpt-3.5-turbo:
aliases: ["35t"]
max-input-chars: 12250
fallback: gpt-3.5
gpt-3.5:
aliases: ["35"]
max-input-chars: 12250
fallback:
localai:
base-url: http://localhost:8080
models:
ggml-gpt4all-j:
aliases: ["local", "4all"]
max-input-chars: 12250
fallback:
# {{ index .Help "model" }}
default-model: gpt-4
# {{ index .Help "max-input-chars" }}
Expand Down Expand Up @@ -77,22 +74,22 @@ status-text: Generating
`

type config struct {
API string `yaml:"api" env:"API"`
APIs map[string]string `yaml:"apis"`
Model string `yaml:"default-model" env:"MODEL"`
Models map[string]Model `yaml:"models"`
Markdown bool `yaml:"format" env:"FORMAT"`
Quiet bool `yaml:"quiet" env:"QUIET"`
MaxTokens int `yaml:"max-tokens" env:"MAX_TOKENS"`
MaxInputChars int `yaml:"max-input-chars" env:"MAX_INPUT_CHARS"`
Temperature float32 `yaml:"temp" env:"TEMP"`
TopP float32 `yaml:"topp" env:"TOPP"`
NoLimit bool `yaml:"no-limit" env:"NO_LIMIT"`
IncludePromptArgs bool `yaml:"include-prompt-args" env:"INCLUDE_PROMPT_ARGS"`
IncludePrompt int `yaml:"include-prompt" env:"INCLUDE_PROMPT"`
MaxRetries int `yaml:"max-retries" env:"MAX_RETRIES"`
Fanciness uint `yaml:"fanciness" env:"FANCINESS"`
StatusText string `yaml:"status-text" env:"STATUS_TEXT"`
APIs map[string]API `yaml:"apis"`
Model string `yaml:"default-model" env:"MODEL"`
Markdown bool `yaml:"format" env:"FORMAT"`
Quiet bool `yaml:"quiet" env:"QUIET"`
MaxTokens int `yaml:"max-tokens" env:"MAX_TOKENS"`
MaxInputChars int `yaml:"max-input-chars" env:"MAX_INPUT_CHARS"`
Temperature float32 `yaml:"temp" env:"TEMP"`
TopP float32 `yaml:"topp" env:"TOPP"`
NoLimit bool `yaml:"no-limit" env:"NO_LIMIT"`
IncludePromptArgs bool `yaml:"include-prompt-args" env:"INCLUDE_PROMPT_ARGS"`
IncludePrompt int `yaml:"include-prompt" env:"INCLUDE_PROMPT"`
MaxRetries int `yaml:"max-retries" env:"MAX_RETRIES"`
Fanciness uint `yaml:"fanciness" env:"FANCINESS"`
StatusText string `yaml:"status-text" env:"STATUS_TEXT"`
API string
Models map[string]Model
ShowHelp bool
Prefix string
Version bool
Expand All @@ -105,10 +102,9 @@ func newConfig() (config, error) {
var content []byte

help := map[string]string{
"api": "Default OpenAI compatible REST API (openai, localai).",
"api": "OpenAI compatible REST API (openai, localai).",
"apis": "Aliases and endpoints for OpenAI compatible REST API.",
"model": "Default model (gpt-3.5-turbo, gpt-4, ggml-gpt4all-j...).",
"models": "Model details and aliases.",
"max-input-chars": "Default character limit on input to model.",
"format": "Format response as markdown.",
"prompt": "Include the prompt from the arguments and stdin, truncate stdin to specified number of lines.",
Expand Down Expand Up @@ -168,13 +164,15 @@ func newConfig() (config, error) {
return c, err
}

// Set model aliases
ms := make(map[string]Model)
for k, m := range c.Models {
m.Name = k
ms[k] = m
for _, am := range m.Aliases {
ms[am] = m
for ak, av := range c.APIs {
for mk, mv := range av.Models {
mv.Name = mk
mv.API = ak
ms[mk] = mv
for _, a := range mv.Aliases {
ms[a] = mv
}
}
}
c.Models = ms
Expand Down
8 changes: 7 additions & 1 deletion model.go
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,14 @@ package main
// Model represents the LLM model used in the API call.
type Model struct {
Name string
API string
MaxChars int `yaml:"max-input-chars"`
Aliases []string `yaml:"aliases"`
API string `yaml:"api"`
Fallback string `yaml:"fallback"`
}

// API represents an API endpoint and its models.
type API struct {
BaseURL string `yaml:"base-url"`
Models map[string]Model `yaml:"models"`
}
3 changes: 2 additions & 1 deletion mods.go
Original file line number Diff line number Diff line change
Expand Up @@ -226,7 +226,7 @@ func (m *Mods) startCompletionCmd(content string) tea.Cmd {
}
}
ccfg := openai.DefaultConfig(key)
ccfg.BaseURL, ok = cfg.APIs[mod.API]
api, ok := cfg.APIs[mod.API]
if !ok {
eps := make([]string, 0)
for k := range cfg.APIs {
Expand All @@ -237,6 +237,7 @@ func (m *Mods) startCompletionCmd(content string) tea.Cmd {
err: fmt.Errorf("Your configured API endpoints are: %s", eps),
}
}
ccfg.BaseURL = api.BaseURL
client := openai.NewClientWithConfig(ccfg)
ctx, cancel := context.WithCancel(context.Background())
defer cancel()
Expand Down

0 comments on commit 77033f1

Please sign in to comment.