Finished code cleanup, readme is mostly done.

This commit is contained in:
2025-04-20 20:22:57 -04:00
parent 9dcd31dd04
commit 1621023958
6 changed files with 171 additions and 25 deletions

View File

@@ -18,12 +18,14 @@ const contextKeyLLM contextKey = "llm"
//go:embed prompts
var prompts embed.FS
// LLM is responsible for abstracting the configuration and implementations of the LLMs used.
type LLM struct {
code llms.Model
chat llms.Model
embedder embeddings.Embedder
}
// FromConfig bootstraps the LLM from a passed in configuration.
func FromConfig(cfg *config.Configuration) (*LLM, error) {
embedLLM, err := cfg.Embedding.GetEmbedding()
if err != nil {
@@ -52,30 +54,32 @@ func FromConfig(cfg *config.Configuration) (*LLM, error) {
}, nil
}
// FromContext retrieves an LLM from a passed in context wrapped with WrapContext.
func FromContext(ctx context.Context) *LLM {
return ctx.Value(contextKeyLLM).(*LLM)
}
// WrapContext embeds an LLM inside a context so it can be retrieved with FromContext.
func WrapContext(ctx context.Context, llmRef *LLM) context.Context {
return context.WithValue(ctx, contextKeyLLM, llmRef)
}
func (llm *LLM) GetEmbedding(ctx context.Context, texts ...string) ([][]float32, error) {
return llm.embedder.EmbedDocuments(ctx, texts)
}
// Embedder gets an embedder that can be used to store and retrieve embeddings.
func (llm *LLM) Embedder() embeddings.Embedder {
return llm.embedder
}
// CodePrompt passes a prompt to the code LLM and returns the response.
func (llm *LLM) CodePrompt(ctx context.Context, prompt string) (string, error) {
return llm.code.Call(ctx, prompt)
}
// ChatPrompt passes a prompt to the chat LLM and returns the response.
func (llm *LLM) ChatPrompt(ctx context.Context, prompt string) (string, error) {
return llm.chat.Call(ctx, prompt)
}
// GetPrompt loads a LLM prompt template and injects variables into it. Uses the go template format.
func GetPrompt(name string, data any) (string, error) {
tmplText, err := prompts.ReadFile("prompts/" + name + ".tmpl")
if err != nil {