spoc-bot-vr/ai.go

57 lines
1.0 KiB
Go

package main
import (
"context"
"net/http"
"time"
"github.com/tmc/langchaingo/llms"
"github.com/tmc/langchaingo/llms/ollama"
)
type AI interface {
Do(context.Context, string) (string, error)
}
type AINoop struct {
}
func NewAINoop() AINoop {
return AINoop{}
}
func (ai AINoop) Do(ctx context.Context, prompt string) (string, error) {
return ":shrug:", nil
}
type AIOllama struct {
model string
url string
}
func NewAIOllama(url, model string) AIOllama {
return AIOllama{url: url, model: model}
}
func (ai AIOllama) Do(ctx context.Context, prompt string) (string, error) {
c := &http.Client{
Timeout: time.Hour,
Transport: &http.Transport{
//DisableKeepAlives: true,
IdleConnTimeout: time.Hour,
ResponseHeaderTimeout: time.Hour,
ExpectContinueTimeout: time.Hour,
},
}
defer c.CloseIdleConnections()
llm, err := ollama.New(
ollama.WithModel(ai.model),
ollama.WithServerURL(ai.url),
ollama.WithHTTPClient(c),
)
if err != nil {
return "", err
}
return llms.GenerateFromSinglePrompt(ctx, llm, prompt)
}