Personal AI Assistant
Containerized local LLM assistant built with Go and Ollama. Features streaming responses, conversation memory, and complete data privacy — all running locally.
func StreamResponse(prompt string) {
client := ollama.NewClient()
stream := client.Chat(prompt)
for chunk := range stream {
fmt.Print(chunk.Content)
}
}