package main import ( "context" "github.com/tmc/langchaingo/llms" "github.com/tmc/langchaingo/llms/ollama" ) type AI interface { Do(context.Context, string) (string, error) } type AINoop struct { } func NewAINoop() AINoop { return AINoop{} } func (ai AINoop) Do(ctx context.Context, prompt string) (string, error) { return ":shrug:", nil } type AIOllama struct { model string url string } func NewAIOllama(url, model string) AIOllama { return AIOllama{url: url, model: model} } func (ai AIOllama) Do(ctx context.Context, prompt string) (string, error) { llm, err := ollama.New( ollama.WithModel(ai.model), ollama.WithServerURL(ai.url), ) if err != nil { return "", err } return llms.GenerateFromSinglePrompt(ctx, llm, prompt) }