Skip to content

Generate completion

Completion

The simple completion can be used to generate a response for a given prompt with a provided model.

package main

import (
    "github.com/parakeet-nest/parakeet/completion"
    "github.com/parakeet-nest/parakeet/llm"
    "github.com/parakeet-nest/parakeet/enums/option"

    "fmt"
    "log"
)

func main() {
    ollamaUrl := "http://localhost:11434"
    model := "tinydolphin"

    options := llm.SetOptions(map[string]interface{}{
        option.Temperature: 0.5,
    })

    question := llm.GenQuery{
        Model: model,
        Prompt: "Who is James T Kirk?",
        Options: options,
    }

    answer, err := completion.Generate(ollamaUrl, question)
    if err != nil {
        log.Fatal("๐Ÿ˜ก:", err)
    }
    fmt.Println(answer.Response)
}

Completion with stream

package main

import (
    "github.com/parakeet-nest/parakeet/completion"
    "github.com/parakeet-nest/parakeet/llm"
    "fmt"
    "log"
)

func main() {
    ollamaUrl := "http://localhost:11434"
    model := "tinydolphin"

    options := llm.Options{
        Temperature: 0.5,
    }

    question := llm.GenQuery{
        Model: model,
        Prompt: "Who is James T Kirk?",
        Options: options,
    }

    answer, err := completion.GenerateStream(ollamaUrl, question,
        func(answer llm.Answer) error {
            fmt.Print(answer.Response)
            return nil
        })

    if err != nil {
        log.Fatal("๐Ÿ˜ก:", err)
    }
}

Completion with context

see: https://github.com/ollama/ollama/blob/main/docs/api.md#generate-a-completion

The context can be used to keep a short conversational memory for the next completion.

package main

import (
    "github.com/parakeet-nest/parakeet/completion"
    "github.com/parakeet-nest/parakeet/llm"

    "fmt"
    "log"
)

func main() {
    ollamaUrl := "http://localhost:11434"
    model := "tinydolphin"

    options := llm.Options{
        Temperature: 0.5,
    }

    firstQuestion := llm.GenQuery{
        Model: model,
        Prompt: "Who is James T Kirk?",
        Options: options,
    }

    answer, err := completion.Generate(ollamaUrl, firstQuestion)
    if err != nil {
        log.Fatal("๐Ÿ˜ก:", err)
    }
    fmt.Println(answer.Response)

    fmt.Println()

    secondQuestion := llm.GenQuery{
        Model: model,
        Prompt: "Who is his best friend?",
        Context: answer.Context,
        Options: options,
    }

    answer, err = completion.Generate(ollamaUrl, secondQuestion)
    if err != nil {
        log.Fatal("๐Ÿ˜ก:", err)
    }
    fmt.Println(answer.Response)
}