-
Notifications
You must be signed in to change notification settings - Fork 0
/
conversation.go
113 lines (94 loc) · 2.86 KB
/
conversation.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
package conversation
import (
"context"
"encoding/json"
"fmt"
"os"
"github.com/otiai10/openaigo"
)
const (
roleSystem = "system"
roleUser = "user"
roleAssistant = "assistant"
defaultModel = "gpt-3.5-turbo"
)
// Conversation is the abstract conversation view.
type Conversation struct {
History []openaigo.ChatMessage
apiKey string
model string
ctx context.Context
}
// New creates a new conversation.
// Requires an OpenAI API key, and an optional list of options.
// It returns a new conversation and an error if there is a failure.
func New(apiKey string, opts ...Option) (*Conversation, error) {
if apiKey == "" {
return nil, fmt.Errorf("an API key is required")
}
c := &Conversation{apiKey: apiKey, model: defaultModel}
for _, o := range opts {
if err := o(c); err != nil {
return nil, err
}
}
return c, nil
}
// Load loads a conversation from the disk with the given options.
func Load(file, apiKey string, opts ...Option) (*Conversation, error) {
b, err := os.ReadFile(file)
if err != nil {
return nil, err
}
msgs := []openaigo.ChatMessage{}
if err := json.Unmarshal(b, &msgs); err != nil {
return nil, err
}
return New(apiKey, append([]Option{WithHistory(msgs)}, opts...)...)
}
// Save the conversation to disk at the given full path.
func (c *Conversation) Save(dst string) error {
dat, err := json.Marshal(c.History)
if err != nil {
return err
}
return os.WriteFile(dst, dat, 0655)
}
func (c *Conversation) apiCall(ctx context.Context, history []openaigo.ChatMessage) (string, error) {
client := openaigo.NewClient(c.apiKey)
request := openaigo.ChatCompletionRequestBody{
Model: c.model,
Messages: history,
}
resp, err := client.Chat(ctx, request)
if err != nil {
return "", err
}
if len(resp.Choices) != 1 {
return "", fmt.Errorf("invalid response: %+v", resp)
}
return resp.Choices[0].Message.Content, nil
}
func (c *Conversation) converse(ctx context.Context, role, input string) (string, error) {
c.History = append(c.History, openaigo.ChatMessage{Role: role, Content: input})
resp, err := c.apiCall(ctx, c.History)
if err != nil {
return "", err
}
c.History = append(c.History, openaigo.ChatMessage{Role: roleAssistant, Content: resp})
return resp, nil
}
// System writes in the conversation as the "system" role.
// You can use a system level instruction to guide your model's behavior throughout the conversation.
// see: https://help.openai.com/en/articles/7042661-chatgpt-api-transition-guide
func (c *Conversation) System(input string) (string, error) {
return c.converse(c.ctx, roleSystem, input)
}
// User writes in the conversation as the "user" role.
func (c *Conversation) User(input string) (string, error) {
return c.converse(c.ctx, roleUser, input)
}
// Chat is syntax sugar. is equivalent to call User.
func (c *Conversation) Chat(input string) (string, error) {
return c.User(input)
}