diff --git a/.github/scripts/modules/ollama/install-dependencies.sh b/.github/scripts/modules/ollama/install-dependencies.sh
new file mode 100755
index 0000000000..d699158806
--- /dev/null
+++ b/.github/scripts/modules/ollama/install-dependencies.sh
@@ -0,0 +1,6 @@
+#!/usr/bin/env bash
+
+curl -fsSL https://ollama.com/install.sh | sh
+
+# kill any running ollama process so that the tests can start from a clean state
+sudo systemctl stop ollama.service
diff --git a/.github/workflows/ci-test-go.yml b/.github/workflows/ci-test-go.yml
index 82be78435f..0d6af15880 100644
--- a/.github/workflows/ci-test-go.yml
+++ b/.github/workflows/ci-test-go.yml
@@ -107,6 +107,16 @@ jobs:
working-directory: ./${{ inputs.project-directory }}
run: go build
+ - name: Install dependencies
+ shell: bash
+ run: |
+ SCRIPT_PATH="./.github/scripts/${{ inputs.project-directory }}/install-dependencies.sh"
+ if [ -f "$SCRIPT_PATH" ]; then
+ $SCRIPT_PATH
+ else
+ echo "No dependencies script found at $SCRIPT_PATH - skipping installation"
+ fi
+
- name: go test
# only run tests on linux, there are a number of things that won't allow the tests to run on anything else
# many (maybe, all?) images used can only be build on Linux, they don't have Windows in their manifest, and
diff --git a/docs/modules/ollama.md b/docs/modules/ollama.md
index c16e612142..bffe63648e 100644
--- a/docs/modules/ollama.md
+++ b/docs/modules/ollama.md
@@ -16,10 +16,15 @@ go get github.com/testcontainers/testcontainers-go/modules/ollama
## Usage example
+The module allows you to run the Ollama container or the local Ollama binary.
+
[Creating a Ollama container](../../modules/ollama/examples_test.go) inside_block:runOllamaContainer
+[Running the local Ollama binary](../../modules/ollama/examples_test.go) inside_block:localOllama
+If the local Ollama binary fails to execute, the module will fallback to the container version of Ollama.
+
## Module Reference
### Run function
@@ -48,6 +53,50 @@ When starting the Ollama container, you can pass options in a variadic way to co
If you need to set a different Ollama Docker image, you can set a valid Docker image as the second argument in the `Run` function.
E.g. `Run(context.Background(), "ollama/ollama:0.1.25")`.
+#### Use Local
+
+- Not available until the next release of testcontainers-go :material-tag: main
+
+!!!warning
+ Please make sure the local Ollama binary is not running when using the local version of the module:
+ Ollama can be started as a system service, or as part of the Ollama application,
+ and interacting with the logs of a running Ollama process not managed by the module is not supported.
+
+If you need to run the local Ollama binary, you can set the `UseLocal` option in the `Run` function.
+This option accepts a list of environment variables as a string, that will be applied to the Ollama binary when executing commands.
+
+E.g. `Run(context.Background(), "ollama/ollama:0.1.25", WithUseLocal("OLLAMA_DEBUG=true"))`.
+
+All the container methods are available when using the local Ollama binary, but will be executed locally instead of inside the container.
+Please consider the following differences when using the local Ollama binary:
+
+- The local Ollama binary will create a log file in the current working directory, identified by the session ID. E.g. `local-ollama-.log`. It's possible to set the log file name using the `OLLAMA_LOGFILE` environment variable. So if you're running Ollama yourself, from the Ollama app, or the standalone binary, you could use this environment variable to set the same log file name.
+ - For the Ollama app, the default log file resides in the `$HOME/.ollama/logs/server.log`.
+ - For the standalone binary, you should start it redirecting the logs to a file. E.g. `ollama serve > /tmp/ollama.log 2>&1`.
+- `ConnectionString` returns the connection string to connect to the local Ollama binary started by the module instead of the container, which maps to `127.0.0.1:11434`.
+- `ContainerIP` returns `127.0.0.1`.
+- `ContainerIPs` returns `["127.0.0.1"]`.
+- `CopyToContainer`, `CopyDirToContainer`, `CopyFileToContainer` and `CopyFileFromContainer` don't perform any action.
+- `GetLogProductionErrorChannel` returns a nil channel.
+- `Endpoint` returns the endpoint to connect to the local Ollama binary started by the module instead of the container, which maps to `127.0.0.1:11434`.
+- `Exec` passes the command to the local Ollama binary started by the module instead of inside the container. First argument is the command to execute, and the second argument is the list of arguments, else, an error is returned.
+- `GetContainerID` returns the container ID of the local Ollama binary started by the module instead of the container, which maps to `local-ollama-`.
+- `Host` returns `127.0.0.1`.
+- `Inspect` returns a ContainerJSON with the state of the local Ollama binary started by the module.
+- `IsRunning` returns true if the local Ollama binary process started by the module is running.
+- `Logs` returns the logs from the local Ollama binary started by the module instead of the container.
+- `MappedPort` returns the port mapping for the local Ollama binary started by the module instead of the container.
+- `Start` starts the local Ollama binary process.
+- `State` returns the current state of the local Ollama binary process, `stopped` or `running`.
+- `Stop` stops the local Ollama binary process.
+- `Terminate` calls the `Stop` method and then removes the log file.
+
+The local Ollama binary will create a log file in the current working directory, and it will be available in the container's `Logs` method.
+
+!!!info
+ The local Ollama binary will use the `OLLAMA_HOST` environment variable to set the host and port to listen on.
+ If the environment variable is not set, it will use the default host `127.0.0.1` and port `11434`.
+
{% include "../features/common_functional_options.md" %}
### Container Methods
diff --git a/modules/ollama/examples_test.go b/modules/ollama/examples_test.go
index 741db846be..188be45bbb 100644
--- a/modules/ollama/examples_test.go
+++ b/modules/ollama/examples_test.go
@@ -173,3 +173,73 @@ func ExampleRun_withModel_llama2_langchain() {
// Intentionally not asserting the output, as we don't want to run this example in the tests.
}
+
+func ExampleRun_withLocal() {
+ ctx := context.Background()
+
+ // localOllama {
+ ollamaContainer, err := tcollama.Run(ctx, "ollama/ollama:0.3.13", tcollama.WithUseLocal("OLLAMA_DEBUG=true"))
+ defer func() {
+ if err := testcontainers.TerminateContainer(ollamaContainer); err != nil {
+ log.Printf("failed to terminate container: %s", err)
+ }
+ }()
+ if err != nil {
+ log.Printf("failed to start container: %s", err)
+ return
+ }
+ // }
+
+ model := "llama3.2:1b"
+
+ _, _, err = ollamaContainer.Exec(ctx, []string{"ollama", "pull", model})
+ if err != nil {
+ log.Printf("failed to pull model %s: %s", model, err)
+ return
+ }
+
+ _, _, err = ollamaContainer.Exec(ctx, []string{"ollama", "run", model})
+ if err != nil {
+ log.Printf("failed to run model %s: %s", model, err)
+ return
+ }
+
+ connectionStr, err := ollamaContainer.ConnectionString(ctx)
+ if err != nil {
+ log.Printf("failed to get connection string: %s", err)
+ return
+ }
+
+ var llm *langchainollama.LLM
+ if llm, err = langchainollama.New(
+ langchainollama.WithModel(model),
+ langchainollama.WithServerURL(connectionStr),
+ ); err != nil {
+ log.Printf("failed to create langchain ollama: %s", err)
+ return
+ }
+
+ completion, err := llm.Call(
+ context.Background(),
+ "how can Testcontainers help with testing?",
+ llms.WithSeed(42), // the lower the seed, the more deterministic the completion
+ llms.WithTemperature(0.0), // the lower the temperature, the more creative the completion
+ )
+ if err != nil {
+ log.Printf("failed to create langchain ollama: %s", err)
+ return
+ }
+
+ words := []string{
+ "easy", "isolation", "consistency",
+ }
+ lwCompletion := strings.ToLower(completion)
+
+ for _, word := range words {
+ if strings.Contains(lwCompletion, word) {
+ fmt.Println(true)
+ }
+ }
+
+ // Intentionally not asserting the output, as we don't want to run this example in the tests.
+}
diff --git a/modules/ollama/go.mod b/modules/ollama/go.mod
index e22b801031..2aab83b978 100644
--- a/modules/ollama/go.mod
+++ b/modules/ollama/go.mod
@@ -4,6 +4,7 @@ go 1.22
require (
github.com/docker/docker v27.1.1+incompatible
+ github.com/docker/go-connections v0.5.0
github.com/google/uuid v1.6.0
github.com/stretchr/testify v1.9.0
github.com/testcontainers/testcontainers-go v0.34.0
@@ -22,7 +23,6 @@ require (
github.com/davecgh/go-spew v1.1.1 // indirect
github.com/distribution/reference v0.6.0 // indirect
github.com/dlclark/regexp2 v1.8.1 // indirect
- github.com/docker/go-connections v0.5.0 // indirect
github.com/docker/go-units v0.5.0 // indirect
github.com/felixge/httpsnoop v1.0.4 // indirect
github.com/go-logr/logr v1.4.1 // indirect
diff --git a/modules/ollama/local.go b/modules/ollama/local.go
new file mode 100644
index 0000000000..72a1787405
--- /dev/null
+++ b/modules/ollama/local.go
@@ -0,0 +1,516 @@
+package ollama
+
+import (
+ "bytes"
+ "context"
+ "errors"
+ "fmt"
+ "io"
+ "io/fs"
+ "net"
+ "os"
+ "os/exec"
+ "strings"
+ "sync"
+ "syscall"
+ "time"
+
+ "github.com/docker/docker/api/types"
+ "github.com/docker/docker/api/types/container"
+ "github.com/docker/docker/api/types/network"
+ "github.com/docker/go-connections/nat"
+
+ "github.com/testcontainers/testcontainers-go"
+ tcexec "github.com/testcontainers/testcontainers-go/exec"
+ "github.com/testcontainers/testcontainers-go/wait"
+)
+
+const (
+ localIP = "127.0.0.1"
+ localPort = "11434"
+)
+
+var (
+ defaultStopTimeout = time.Second * 5
+ errCopyAPIsNotSupported = errors.New("copy APIs are not supported for local Ollama binary")
+)
+
+// localContext is a type holding the context for local Ollama executions.
+type localContext struct {
+ env []string
+ serveCmd *exec.Cmd
+ logFile *os.File
+ mx sync.Mutex
+ host string
+ port string
+}
+
+// runLocal calls the local Ollama binary instead of using a Docker container.
+func runLocal(ctx context.Context, env map[string]string) (*OllamaContainer, error) {
+ // Apply the environment variables to the command.
+ cmdEnv := make([]string, 0, len(env)*2)
+ for k, v := range env {
+ cmdEnv = append(cmdEnv, k+"="+v)
+ }
+
+ localCtx := &localContext{
+ env: cmdEnv,
+ host: localIP,
+ port: localPort,
+ }
+
+ if envHost := os.Getenv("OLLAMA_HOST"); envHost != "" {
+ host, port, err := net.SplitHostPort(envHost)
+ if err != nil {
+ return nil, fmt.Errorf("invalid OLLAMA_HOST: %w", err)
+ }
+
+ localCtx.host = host
+ localCtx.port = port
+ }
+
+ c := &OllamaContainer{
+ localCtx: localCtx,
+ }
+
+ err := c.startLocalOllama(ctx)
+ if err != nil {
+ return nil, fmt.Errorf("start ollama: %w", err)
+ }
+
+ return c, nil
+}
+
+// logFile returns an existing log file or creates a new one if it doesn't exist.
+func logFile() (*os.File, error) {
+ logName := "local-ollama-" + testcontainers.SessionID() + ".log"
+
+ if envLogName := os.Getenv("OLLAMA_LOGFILE"); envLogName != "" {
+ logName = envLogName
+ }
+
+ file, err := os.Create(logName)
+ if err != nil {
+ return nil, fmt.Errorf("create ollama log file: %w", err)
+ }
+
+ return file, nil
+}
+
+// startLocalOllama starts the Ollama serve command in the background, writing to the
+// provided log file.
+func (c *OllamaContainer) startLocalOllama(ctx context.Context) error {
+ if c.localCtx.serveCmd != nil {
+ return nil
+ }
+
+ c.localCtx.mx.Lock()
+
+ serveCmd := exec.CommandContext(ctx, "ollama", "serve")
+ serveCmd.Env = append(serveCmd.Env, c.localCtx.env...)
+ serveCmd.Env = append(serveCmd.Env, os.Environ()...)
+
+ logFile, err := logFile()
+ if err != nil {
+ c.localCtx.mx.Unlock()
+ return fmt.Errorf("ollama log file: %w", err)
+ }
+
+ serveCmd.Stdout = logFile
+ serveCmd.Stderr = logFile
+
+ // Run the ollama serve command in background
+ err = serveCmd.Start()
+ if err != nil {
+ c.localCtx.mx.Unlock()
+ return fmt.Errorf("start ollama serve: %w", err)
+ }
+
+ c.localCtx.serveCmd = serveCmd
+ c.localCtx.logFile = logFile
+
+ // unlock before waiting for the process to be ready
+ c.localCtx.mx.Unlock()
+
+ waitCtx, cancel := context.WithTimeout(ctx, 10*time.Second)
+ defer cancel()
+
+ err = c.waitForOllama(waitCtx)
+ if err != nil {
+ return fmt.Errorf("wait for ollama to start: %w", err)
+ }
+
+ return nil
+}
+
+// waitForOllama Wait until the Ollama process is ready, checking that the log file contains
+// the "Listening on 127.0.0.1:11434" message
+func (c *OllamaContainer) waitForOllama(ctx context.Context) error {
+ ctx, cancel := context.WithTimeout(ctx, 10*time.Second)
+ defer cancel()
+
+ err := wait.ForLog("Listening on "+c.localCtx.host+":"+c.localCtx.port).WaitUntilReady(ctx, c)
+ if err != nil {
+ logs, err := c.Logs(ctx)
+ if err != nil {
+ return fmt.Errorf("wait for ollama to start: %w", err)
+ }
+
+ // ignore error as we already have an error and the output is already logged
+ bs, _ := io.ReadAll(logs)
+ return fmt.Errorf("wait for ollama to start: %w. Container logs:\n%s", err, string(bs))
+ }
+
+ return nil
+}
+
+// ContainerIP returns the IP address of the local Ollama binary.
+func (c *OllamaContainer) ContainerIP(ctx context.Context) (string, error) {
+ if c.localCtx == nil {
+ return c.Container.ContainerIP(ctx)
+ }
+
+ return localIP, nil
+}
+
+// ContainerIPs returns a slice with the IP address of the local Ollama binary.
+func (c *OllamaContainer) ContainerIPs(ctx context.Context) ([]string, error) {
+ if c.localCtx == nil {
+ return c.Container.ContainerIPs(ctx)
+ }
+
+ return []string{localIP}, nil
+}
+
+// CopyToContainer is a no-op for the local Ollama binary.
+func (c *OllamaContainer) CopyToContainer(ctx context.Context, fileContent []byte, containerFilePath string, fileMode int64) error {
+ if c.localCtx == nil {
+ return c.Container.CopyToContainer(ctx, fileContent, containerFilePath, fileMode)
+ }
+
+ return errCopyAPIsNotSupported
+}
+
+// CopyDirToContainer is a no-op for the local Ollama binary.
+func (c *OllamaContainer) CopyDirToContainer(ctx context.Context, hostDirPath string, containerParentPath string, fileMode int64) error {
+ if c.localCtx == nil {
+ return c.Container.CopyDirToContainer(ctx, hostDirPath, containerParentPath, fileMode)
+ }
+
+ return errCopyAPIsNotSupported
+}
+
+// CopyFileToContainer is a no-op for the local Ollama binary.
+func (c *OllamaContainer) CopyFileToContainer(ctx context.Context, hostFilePath string, containerFilePath string, fileMode int64) error {
+ if c.localCtx == nil {
+ return c.Container.CopyFileToContainer(ctx, hostFilePath, containerFilePath, fileMode)
+ }
+
+ return errCopyAPIsNotSupported
+}
+
+// CopyFileFromContainer is a no-op for the local Ollama binary.
+func (c *OllamaContainer) CopyFileFromContainer(ctx context.Context, filePath string) (io.ReadCloser, error) {
+ if c.localCtx == nil {
+ return c.Container.CopyFileFromContainer(ctx, filePath)
+ }
+
+ return nil, errCopyAPIsNotSupported
+}
+
+// GetLogProductionErrorChannel returns a nil channel.
+func (c *OllamaContainer) GetLogProductionErrorChannel() <-chan error {
+ if c.localCtx == nil {
+ return c.Container.GetLogProductionErrorChannel()
+ }
+
+ return nil
+}
+
+// Endpoint returns the 127.0.0.1:11434 endpoint for the local Ollama binary.
+func (c *OllamaContainer) Endpoint(ctx context.Context, port string) (string, error) {
+ if c.localCtx == nil {
+ return c.Container.Endpoint(ctx, port)
+ }
+
+ return c.localCtx.host + ":" + c.localCtx.port, nil
+}
+
+// Exec executes a command using the local Ollama binary.
+func (c *OllamaContainer) Exec(ctx context.Context, cmd []string, options ...tcexec.ProcessOption) (int, io.Reader, error) {
+ if c.localCtx == nil {
+ return c.Container.Exec(ctx, cmd, options...)
+ }
+
+ c.localCtx.mx.Lock()
+ defer c.localCtx.mx.Unlock()
+
+ if len(cmd) == 0 {
+ err := errors.New("exec: no command provided")
+ return 1, strings.NewReader(err.Error()), err
+ } else if cmd[0] != "ollama" {
+ err := fmt.Errorf("%s: %w", cmd[0], errors.ErrUnsupported)
+ return 1, strings.NewReader(err.Error()), err
+ }
+
+ args := []string{}
+ if len(cmd) > 1 {
+ args = cmd[1:] // prevent when there is only one command
+ }
+
+ command := prepareExec(ctx, cmd[0], args, c.localCtx.env, c.localCtx.logFile)
+ err := command.Run()
+ if err != nil {
+ return command.ProcessState.ExitCode(), c.localCtx.logFile, fmt.Errorf("exec %v: %w", cmd, err)
+ }
+
+ return command.ProcessState.ExitCode(), c.localCtx.logFile, nil
+}
+
+func prepareExec(ctx context.Context, bin string, args []string, env []string, output io.Writer) *exec.Cmd {
+ command := exec.CommandContext(ctx, bin, args...)
+ command.Env = append(command.Env, env...)
+ command.Env = append(command.Env, os.Environ()...)
+
+ command.Stdout = output
+ command.Stderr = output
+
+ return command
+}
+
+// GetContainerID returns a placeholder ID for local execution
+func (c *OllamaContainer) GetContainerID() string {
+ if c.localCtx == nil {
+ return c.Container.GetContainerID()
+ }
+
+ return "local-ollama-" + testcontainers.SessionID()
+}
+
+// Host returns the 127.0.0.1 address for the local Ollama binary.
+func (c *OllamaContainer) Host(ctx context.Context) (string, error) {
+ if c.localCtx == nil {
+ return c.Container.Host(ctx)
+ }
+
+ return localIP, nil
+}
+
+// Inspect returns a ContainerJSON with the state of the local Ollama binary.
+// The version is read from the local Ollama binary (ollama -v), and the port
+// mapping is set to 11434.
+func (c *OllamaContainer) Inspect(ctx context.Context) (*types.ContainerJSON, error) {
+ if c.localCtx == nil {
+ return c.Container.Inspect(ctx)
+ }
+
+ state, err := c.State(ctx)
+ if err != nil {
+ return nil, fmt.Errorf("get ollama state: %w", err)
+ }
+
+ // read the version from the ollama binary
+ var buf bytes.Buffer
+ command := prepareExec(ctx, "ollama", []string{"-v"}, c.localCtx.env, &buf)
+ if err := command.Run(); err != nil {
+ return nil, fmt.Errorf("read ollama -v output: %w", err)
+ }
+
+ bs, err := io.ReadAll(&buf)
+ if err != nil {
+ return nil, fmt.Errorf("read ollama -v output: %w", err)
+ }
+
+ return &types.ContainerJSON{
+ ContainerJSONBase: &types.ContainerJSONBase{
+ ID: c.GetContainerID(),
+ Name: "local-ollama-" + testcontainers.SessionID(),
+ State: state,
+ },
+ Config: &container.Config{
+ Image: string(bs),
+ ExposedPorts: nat.PortSet{
+ nat.Port(c.localCtx.port + "/tcp"): struct{}{},
+ },
+ Hostname: "localhost",
+ Entrypoint: []string{"ollama", "serve"},
+ },
+ NetworkSettings: &types.NetworkSettings{
+ Networks: map[string]*network.EndpointSettings{},
+ NetworkSettingsBase: types.NetworkSettingsBase{
+ Bridge: "bridge",
+ Ports: nat.PortMap{
+ nat.Port(c.localCtx.port + "/tcp"): {
+ {HostIP: c.localCtx.host, HostPort: c.localCtx.port},
+ },
+ },
+ },
+ DefaultNetworkSettings: types.DefaultNetworkSettings{
+ IPAddress: c.localCtx.host,
+ },
+ },
+ }, nil
+}
+
+// IsRunning returns true if the local Ollama process is running.
+func (c *OllamaContainer) IsRunning() bool {
+ if c.localCtx == nil {
+ return c.Container.IsRunning()
+ }
+
+ c.localCtx.mx.Lock()
+ defer c.localCtx.mx.Unlock()
+
+ return c.localCtx.serveCmd != nil
+}
+
+// Logs returns the logs from the local Ollama binary.
+func (c *OllamaContainer) Logs(ctx context.Context) (io.ReadCloser, error) {
+ if c.localCtx == nil {
+ return c.Container.Logs(ctx)
+ }
+
+ c.localCtx.mx.Lock()
+ defer c.localCtx.mx.Unlock()
+
+ // stream the log file
+ return os.Open(c.localCtx.logFile.Name())
+}
+
+// MappedPort returns the configured port for local Ollama binary.
+func (c *OllamaContainer) MappedPort(ctx context.Context, port nat.Port) (nat.Port, error) {
+ if c.localCtx == nil {
+ return c.Container.MappedPort(ctx, port)
+ }
+
+ // Ollama typically uses port 11434 by default
+ return nat.Port(c.localCtx.port + "/tcp"), nil
+}
+
+// Networks returns the networks for local Ollama binary, which is a nil slice.
+func (c *OllamaContainer) Networks(ctx context.Context) ([]string, error) {
+ if c.localCtx == nil {
+ return c.Container.Networks(ctx)
+ }
+
+ return nil, nil
+}
+
+// NetworkAliases returns the network aliases for local Ollama binary, which is a nil map.
+func (c *OllamaContainer) NetworkAliases(ctx context.Context) (map[string][]string, error) {
+ if c.localCtx == nil {
+ return c.Container.NetworkAliases(ctx)
+ }
+
+ return nil, nil
+}
+
+// SessionID returns the session ID for local Ollama binary, which is the session ID
+// of the test execution.
+func (c *OllamaContainer) SessionID() string {
+ if c.localCtx == nil {
+ return c.Container.SessionID()
+ }
+
+ return testcontainers.SessionID()
+}
+
+// Start starts the local Ollama process, not failing if it's already running.
+func (c *OllamaContainer) Start(ctx context.Context) error {
+ if c.localCtx == nil {
+ return c.Container.Start(ctx)
+ }
+
+ err := c.startLocalOllama(ctx)
+ if err != nil {
+ return fmt.Errorf("start ollama: %w", err)
+ }
+
+ return nil
+}
+
+// State returns the current state of the Ollama process, simulating a container state
+// for local execution.
+func (c *OllamaContainer) State(ctx context.Context) (*types.ContainerState, error) {
+ if c.localCtx == nil {
+ return c.Container.State(ctx)
+ }
+
+ c.localCtx.mx.Lock()
+ defer c.localCtx.mx.Unlock()
+
+ if c.localCtx.serveCmd == nil {
+ return &types.ContainerState{Status: "exited"}, nil
+ }
+
+ // Check if process is still running. Signal(0) is a special case in Unix-like systems.
+ // When you send signal 0 to a process:
+ // - It performs all the normal error checking (permissions, process existence, etc.)
+ // - But it doesn't actually send any signal to the process
+ if err := c.localCtx.serveCmd.Process.Signal(syscall.Signal(0)); err != nil {
+ return &types.ContainerState{Status: "created"}, nil
+ }
+
+ // Setting the Running field because it's required by the wait strategy
+ // to check if the given log message is present.
+ return &types.ContainerState{Status: "running", Running: true}, nil
+}
+
+// Stop gracefully stops the local Ollama process
+func (c *OllamaContainer) Stop(ctx context.Context, d *time.Duration) error {
+ if c.localCtx == nil {
+ return c.Container.Stop(ctx, d)
+ }
+
+ c.localCtx.mx.Lock()
+ defer c.localCtx.mx.Unlock()
+
+ if c.localCtx.serveCmd == nil {
+ return nil
+ }
+
+ if err := c.localCtx.serveCmd.Process.Signal(syscall.SIGTERM); err != nil {
+ return fmt.Errorf("signal ollama: %w", err)
+ }
+
+ c.localCtx.serveCmd = nil
+
+ return nil
+}
+
+// Terminate stops the local Ollama process, removing the log file.
+func (c *OllamaContainer) Terminate(ctx context.Context) error {
+ if c.localCtx == nil {
+ return c.Container.Terminate(ctx)
+ }
+
+ // First try to stop gracefully
+ err := c.Stop(ctx, &defaultStopTimeout)
+ if err != nil {
+ return fmt.Errorf("stop ollama: %w", err)
+ }
+
+ c.localCtx.mx.Lock()
+ defer c.localCtx.mx.Unlock()
+
+ if c.localCtx.logFile == nil {
+ return nil
+ }
+
+ var errs []error
+ if err = c.localCtx.logFile.Close(); err != nil {
+ errs = append(errs, fmt.Errorf("close log: %w", err))
+ }
+
+ if err = os.Remove(c.localCtx.logFile.Name()); err != nil && !errors.Is(err, fs.ErrNotExist) {
+ errs = append(errs, fmt.Errorf("remove log: %w", err))
+ }
+
+ if len(errs) > 0 {
+ return errors.Join(errs...)
+ }
+
+ c.localCtx.logFile = nil
+
+ return nil
+}
diff --git a/modules/ollama/local_test.go b/modules/ollama/local_test.go
new file mode 100644
index 0000000000..7bd073ca5e
--- /dev/null
+++ b/modules/ollama/local_test.go
@@ -0,0 +1,342 @@
+package ollama_test
+
+import (
+ "context"
+ "errors"
+ "io"
+ "os"
+ "os/exec"
+ "path/filepath"
+ "testing"
+ "time"
+
+ "github.com/docker/docker/api/types/strslice"
+ "github.com/stretchr/testify/require"
+
+ "github.com/testcontainers/testcontainers-go"
+ tcexec "github.com/testcontainers/testcontainers-go/exec"
+ "github.com/testcontainers/testcontainers-go/modules/ollama"
+)
+
+func TestRun_local(t *testing.T) {
+ // check if the local ollama binary is available
+ if _, err := exec.LookPath("ollama"); err != nil {
+ t.Skip("local ollama binary not found, skipping")
+ }
+
+ ctx := context.Background()
+
+ ollamaContainer, err := ollama.Run(
+ ctx,
+ "ollama/ollama:0.1.25",
+ ollama.WithUseLocal("FOO=BAR"),
+ )
+ testcontainers.CleanupContainer(t, ollamaContainer)
+ require.NoError(t, err)
+
+ t.Run("connection-string", func(t *testing.T) {
+ connectionStr, err := ollamaContainer.ConnectionString(ctx)
+ require.NoError(t, err)
+ require.Equal(t, "http://127.0.0.1:11434", connectionStr)
+ })
+
+ t.Run("container-id", func(t *testing.T) {
+ id := ollamaContainer.GetContainerID()
+ require.Equal(t, "local-ollama-"+testcontainers.SessionID(), id)
+ })
+
+ t.Run("container-ips", func(t *testing.T) {
+ ip, err := ollamaContainer.ContainerIP(ctx)
+ require.NoError(t, err)
+ require.Equal(t, "127.0.0.1", ip)
+
+ ips, err := ollamaContainer.ContainerIPs(ctx)
+ require.NoError(t, err)
+ require.Equal(t, []string{"127.0.0.1"}, ips)
+ })
+
+ t.Run("copy", func(t *testing.T) {
+ err := ollamaContainer.CopyToContainer(ctx, []byte("test"), "/tmp", 0o755)
+ require.Error(t, err)
+
+ err = ollamaContainer.CopyDirToContainer(ctx, ".", "/tmp", 0o755)
+ require.Error(t, err)
+
+ err = ollamaContainer.CopyFileToContainer(ctx, ".", "/tmp", 0o755)
+ require.Error(t, err)
+
+ reader, err := ollamaContainer.CopyFileFromContainer(ctx, "/tmp")
+ require.Error(t, err)
+ require.Nil(t, reader)
+ })
+
+ t.Run("log-production-error-channel", func(t *testing.T) {
+ ch := ollamaContainer.GetLogProductionErrorChannel()
+ require.Nil(t, ch)
+ })
+
+ t.Run("endpoint", func(t *testing.T) {
+ endpoint, err := ollamaContainer.Endpoint(ctx, "88888/tcp")
+ require.NoError(t, err)
+ require.Equal(t, "127.0.0.1:11434", endpoint)
+ })
+
+ t.Run("exec/pull-and-run-model", func(t *testing.T) {
+ const model = "llama3.2:1b"
+
+ code, r, err := ollamaContainer.Exec(ctx, []string{"ollama", "pull", model})
+ require.NoError(t, err)
+ require.Equal(t, 0, code)
+
+ bs, err := io.ReadAll(r)
+ require.NoError(t, err)
+ require.Empty(t, bs)
+
+ code, _, err = ollamaContainer.Exec(ctx, []string{"ollama", "run", model}, tcexec.Multiplexed())
+ require.NoError(t, err)
+ require.Equal(t, 0, code)
+
+ logs, err := ollamaContainer.Logs(ctx)
+ require.NoError(t, err)
+ defer logs.Close()
+
+ bs, err = io.ReadAll(logs)
+ require.NoError(t, err)
+ require.Contains(t, string(bs), "llama runner started")
+ })
+
+ t.Run("exec/unsupported-command", func(t *testing.T) {
+ code, r, err := ollamaContainer.Exec(ctx, []string{"cat", "/etc/passwd"})
+ require.Equal(t, 1, code)
+ require.Error(t, err)
+ require.ErrorIs(t, err, errors.ErrUnsupported)
+
+ bs, err := io.ReadAll(r)
+ require.NoError(t, err)
+ require.Equal(t, "cat: unsupported operation", string(bs))
+
+ code, r, err = ollamaContainer.Exec(ctx, []string{})
+ require.Equal(t, 1, code)
+ require.Error(t, err)
+
+ bs, err = io.ReadAll(r)
+ require.NoError(t, err)
+ require.Equal(t, "exec: no command provided", string(bs))
+ })
+
+ t.Run("is-running", func(t *testing.T) {
+ require.True(t, ollamaContainer.IsRunning())
+
+ err = ollamaContainer.Stop(ctx, nil)
+ require.NoError(t, err)
+
+ require.False(t, ollamaContainer.IsRunning())
+
+ // return it to the running state
+ err = ollamaContainer.Start(ctx)
+ require.NoError(t, err)
+
+ require.True(t, ollamaContainer.IsRunning())
+ })
+
+ t.Run("host", func(t *testing.T) {
+ host, err := ollamaContainer.Host(ctx)
+ require.NoError(t, err)
+ require.Equal(t, "127.0.0.1", host)
+ })
+
+ t.Run("inspect", func(t *testing.T) {
+ inspect, err := ollamaContainer.Inspect(ctx)
+ require.NoError(t, err)
+
+ require.Equal(t, "local-ollama-"+testcontainers.SessionID(), inspect.ContainerJSONBase.ID)
+ require.Equal(t, "local-ollama-"+testcontainers.SessionID(), inspect.ContainerJSONBase.Name)
+ require.True(t, inspect.ContainerJSONBase.State.Running)
+
+ require.Contains(t, string(inspect.Config.Image), "ollama version is")
+ _, exists := inspect.Config.ExposedPorts["11434/tcp"]
+ require.True(t, exists)
+ require.Equal(t, "localhost", inspect.Config.Hostname)
+ require.Equal(t, strslice.StrSlice(strslice.StrSlice{"ollama", "serve"}), inspect.Config.Entrypoint)
+
+ require.Empty(t, inspect.NetworkSettings.Networks)
+ require.Equal(t, "bridge", inspect.NetworkSettings.NetworkSettingsBase.Bridge)
+
+ ports := inspect.NetworkSettings.NetworkSettingsBase.Ports
+ _, exists = ports["11434/tcp"]
+ require.True(t, exists)
+
+ require.Equal(t, "127.0.0.1", inspect.NetworkSettings.Ports["11434/tcp"][0].HostIP)
+ require.Equal(t, "11434", inspect.NetworkSettings.Ports["11434/tcp"][0].HostPort)
+ })
+
+ t.Run("logfile", func(t *testing.T) {
+ openFile, err := os.Open("local-ollama-" + testcontainers.SessionID() + ".log")
+ require.NoError(t, err)
+ require.NotNil(t, openFile)
+ require.NoError(t, openFile.Close())
+ })
+
+ t.Run("logs", func(t *testing.T) {
+ logs, err := ollamaContainer.Logs(ctx)
+ require.NoError(t, err)
+ defer logs.Close()
+
+ bs, err := io.ReadAll(logs)
+ require.NoError(t, err)
+
+ require.Contains(t, string(bs), "Listening on 127.0.0.1:11434")
+ })
+
+ t.Run("mapped-port", func(t *testing.T) {
+ port, err := ollamaContainer.MappedPort(ctx, "11434/tcp")
+ require.NoError(t, err)
+ require.Equal(t, "11434", port.Port())
+ require.Equal(t, "tcp", port.Proto())
+ })
+
+ t.Run("networks", func(t *testing.T) {
+ networks, err := ollamaContainer.Networks(ctx)
+ require.NoError(t, err)
+ require.Empty(t, networks)
+ })
+
+ t.Run("network-aliases", func(t *testing.T) {
+ aliases, err := ollamaContainer.NetworkAliases(ctx)
+ require.NoError(t, err)
+ require.Empty(t, aliases)
+ })
+
+ t.Run("session-id", func(t *testing.T) {
+ id := ollamaContainer.SessionID()
+ require.Equal(t, testcontainers.SessionID(), id)
+ })
+
+ t.Run("stop-start", func(t *testing.T) {
+ d := time.Second * 5
+
+ err := ollamaContainer.Stop(ctx, &d)
+ require.NoError(t, err)
+
+ state, err := ollamaContainer.State(ctx)
+ require.NoError(t, err)
+ require.Equal(t, "exited", state.Status)
+
+ err = ollamaContainer.Start(ctx)
+ require.NoError(t, err)
+
+ state, err = ollamaContainer.State(ctx)
+ require.NoError(t, err)
+ require.Equal(t, "running", state.Status)
+
+ logs, err := ollamaContainer.Logs(ctx)
+ require.NoError(t, err)
+ defer logs.Close()
+
+ bs, err := io.ReadAll(logs)
+ require.NoError(t, err)
+
+ require.Contains(t, string(bs), "Listening on 127.0.0.1:11434")
+ })
+
+ t.Run("start-start", func(t *testing.T) {
+ state, err := ollamaContainer.State(ctx)
+ require.NoError(t, err)
+ require.Equal(t, "running", state.Status)
+
+ err = ollamaContainer.Start(ctx)
+ require.NoError(t, err)
+ })
+
+ t.Run("terminate", func(t *testing.T) {
+ err := ollamaContainer.Terminate(ctx)
+ require.NoError(t, err)
+
+ _, err = os.Stat("ollama-" + testcontainers.SessionID() + ".log")
+ require.True(t, os.IsNotExist(err))
+
+ state, err := ollamaContainer.State(ctx)
+ require.NoError(t, err)
+ require.Equal(t, "exited", state.Status)
+ })
+}
+
+func TestRun_localWithCustomLogFile(t *testing.T) {
+ t.Setenv("OLLAMA_LOGFILE", filepath.Join(t.TempDir(), "server.log"))
+
+ ctx := context.Background()
+
+ ollamaContainer, err := ollama.Run(ctx, "ollama/ollama:0.1.25", ollama.WithUseLocal("FOO=BAR"))
+ require.NoError(t, err)
+ testcontainers.CleanupContainer(t, ollamaContainer)
+
+ logs, err := ollamaContainer.Logs(ctx)
+ require.NoError(t, err)
+ defer logs.Close()
+
+ bs, err := io.ReadAll(logs)
+ require.NoError(t, err)
+
+ require.Contains(t, string(bs), "Listening on 127.0.0.1:11434")
+}
+
+func TestRun_localWithCustomHost(t *testing.T) {
+ t.Setenv("OLLAMA_HOST", "127.0.0.1:1234")
+
+ ctx := context.Background()
+
+ ollamaContainer, err := ollama.Run(ctx, "ollama/ollama:0.1.25", ollama.WithUseLocal())
+ require.NoError(t, err)
+ testcontainers.CleanupContainer(t, ollamaContainer)
+
+ t.Run("connection-string", func(t *testing.T) {
+ connectionStr, err := ollamaContainer.ConnectionString(ctx)
+ require.NoError(t, err)
+ require.Equal(t, "http://127.0.0.1:1234", connectionStr)
+ })
+
+ t.Run("endpoint", func(t *testing.T) {
+ endpoint, err := ollamaContainer.Endpoint(ctx, "1234/tcp")
+ require.NoError(t, err)
+ require.Equal(t, "127.0.0.1:1234", endpoint)
+ })
+
+ t.Run("inspect", func(t *testing.T) {
+ inspect, err := ollamaContainer.Inspect(ctx)
+ require.NoError(t, err)
+
+ require.Contains(t, string(inspect.Config.Image), "ollama version is")
+ _, exists := inspect.Config.ExposedPorts["1234/tcp"]
+ require.True(t, exists)
+ require.Equal(t, "localhost", inspect.Config.Hostname)
+ require.Equal(t, strslice.StrSlice(strslice.StrSlice{"ollama", "serve"}), inspect.Config.Entrypoint)
+
+ require.Empty(t, inspect.NetworkSettings.Networks)
+ require.Equal(t, "bridge", inspect.NetworkSettings.NetworkSettingsBase.Bridge)
+
+ ports := inspect.NetworkSettings.NetworkSettingsBase.Ports
+ _, exists = ports["1234/tcp"]
+ require.True(t, exists)
+
+ require.Equal(t, "127.0.0.1", inspect.NetworkSettings.Ports["1234/tcp"][0].HostIP)
+ require.Equal(t, "1234", inspect.NetworkSettings.Ports["1234/tcp"][0].HostPort)
+ })
+
+ t.Run("logs", func(t *testing.T) {
+ logs, err := ollamaContainer.Logs(ctx)
+ require.NoError(t, err)
+ defer logs.Close()
+
+ bs, err := io.ReadAll(logs)
+ require.NoError(t, err)
+
+ require.Contains(t, string(bs), "Listening on 127.0.0.1:1234")
+ })
+
+ t.Run("mapped-port", func(t *testing.T) {
+ port, err := ollamaContainer.MappedPort(ctx, "1234/tcp")
+ require.NoError(t, err)
+ require.Equal(t, "1234", port.Port())
+ require.Equal(t, "tcp", port.Proto())
+ })
+}
diff --git a/modules/ollama/local_unit_test.go b/modules/ollama/local_unit_test.go
new file mode 100644
index 0000000000..8489bf1aeb
--- /dev/null
+++ b/modules/ollama/local_unit_test.go
@@ -0,0 +1,55 @@
+package ollama
+
+import (
+ "context"
+ "os"
+ "path/filepath"
+ "testing"
+
+ "github.com/stretchr/testify/require"
+)
+
+func TestRun_localWithCustomLogFileError(t *testing.T) {
+ t.Run("terminate/close-log-error", func(t *testing.T) {
+ // Create a temporary file for testing
+ f, err := os.CreateTemp(t.TempDir(), "test-log-*")
+ require.NoError(t, err)
+
+ // Close the file before termination to force a "file already closed" error
+ err = f.Close()
+ require.NoError(t, err)
+
+ c := &OllamaContainer{
+ localCtx: &localContext{
+ logFile: f,
+ },
+ }
+ err = c.Terminate(context.Background())
+ require.Error(t, err)
+ require.ErrorContains(t, err, "close log:")
+ })
+
+ t.Run("terminate/log-file-not-removable", func(t *testing.T) {
+ // Create a temporary file for testing
+ f, err := os.CreateTemp(t.TempDir(), "test-log-*")
+ require.NoError(t, err)
+ defer func() {
+ // Cleanup: restore permissions
+ require.NoError(t, os.Chmod(filepath.Dir(f.Name()), 0o700))
+ }()
+
+ // Make the file read-only and its parent directory read-only
+ // This should cause removal to fail on most systems
+ dir := filepath.Dir(f.Name())
+ require.NoError(t, os.Chmod(dir, 0o500))
+
+ c := &OllamaContainer{
+ localCtx: &localContext{
+ logFile: f,
+ },
+ }
+ err = c.Terminate(context.Background())
+ require.Error(t, err)
+ require.ErrorContains(t, err, "remove log:")
+ })
+}
diff --git a/modules/ollama/ollama.go b/modules/ollama/ollama.go
index 203d80103f..3d0cc6fa4e 100644
--- a/modules/ollama/ollama.go
+++ b/modules/ollama/ollama.go
@@ -20,11 +20,16 @@ const DefaultOllamaImage = "ollama/ollama:0.1.25"
// OllamaContainer represents the Ollama container type used in the module
type OllamaContainer struct {
testcontainers.Container
+ localCtx *localContext
}
// ConnectionString returns the connection string for the Ollama container,
// using the default port 11434.
func (c *OllamaContainer) ConnectionString(ctx context.Context) (string, error) {
+ if c.localCtx != nil {
+ return "http://" + c.localCtx.host + ":" + c.localCtx.port, nil
+ }
+
host, err := c.Host(ctx)
if err != nil {
return "", err
@@ -43,6 +48,10 @@ func (c *OllamaContainer) ConnectionString(ctx context.Context) (string, error)
// of the container into a new image with the given name, so it doesn't override existing images.
// It should be used for creating an image that contains a loaded model.
func (c *OllamaContainer) Commit(ctx context.Context, targetImage string) error {
+ if c.localCtx != nil {
+ return nil
+ }
+
cli, err := testcontainers.NewDockerClientWithOpts(context.Background())
if err != nil {
return err
@@ -94,10 +103,23 @@ func Run(ctx context.Context, img string, opts ...testcontainers.ContainerCustom
// always request a GPU if the host supports it
opts = append(opts, withGpu())
+ useLocal := false
for _, opt := range opts {
if err := opt.Customize(&genericContainerReq); err != nil {
return nil, fmt.Errorf("customize: %w", err)
}
+ if _, ok := opt.(UseLocal); ok {
+ useLocal = true
+ }
+ }
+
+ if useLocal {
+ container, err := runLocal(ctx, req.Env)
+ if err == nil {
+ return container, nil
+ }
+
+ testcontainers.Logger.Printf("failed to run local ollama: %v, switching to docker", err)
}
container, err := testcontainers.GenericContainer(ctx, genericContainerReq)
diff --git a/modules/ollama/options.go b/modules/ollama/options.go
index 605768a379..4761a28530 100644
--- a/modules/ollama/options.go
+++ b/modules/ollama/options.go
@@ -2,6 +2,8 @@ package ollama
import (
"context"
+ "fmt"
+ "strings"
"github.com/docker/docker/api/types/container"
@@ -37,3 +39,34 @@ func withGpu() testcontainers.CustomizeRequestOption {
}
})
}
+
+var _ testcontainers.ContainerCustomizer = (*UseLocal)(nil)
+
+// UseLocal will use the local Ollama instance instead of pulling the Docker image.
+type UseLocal struct {
+ env []string
+}
+
+// WithUseLocal the module will use the local Ollama instance instead of pulling the Docker image.
+// Pass the environment variables you need to set for the Ollama binary to be used,
+// in the format of "KEY=VALUE". KeyValue pairs with the wrong format will cause an error.
+func WithUseLocal(values ...string) UseLocal {
+ return UseLocal{env: values}
+}
+
+// Customize implements the ContainerCustomizer interface, taking the key value pairs
+// and setting them as environment variables for the Ollama binary.
+// In the case of an invalid key value pair, an error is returned.
+func (u UseLocal) Customize(req *testcontainers.GenericContainerRequest) error {
+ env := make(map[string]string)
+ for _, kv := range u.env {
+ parts := strings.SplitN(kv, "=", 2)
+ if len(parts) != 2 {
+ return fmt.Errorf("invalid environment variable: %s", kv)
+ }
+
+ env[parts[0]] = parts[1]
+ }
+
+ return testcontainers.WithEnv(env)(req)
+}
diff --git a/modules/ollama/options_test.go b/modules/ollama/options_test.go
new file mode 100644
index 0000000000..f842d15a17
--- /dev/null
+++ b/modules/ollama/options_test.go
@@ -0,0 +1,49 @@
+package ollama_test
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/require"
+
+ "github.com/testcontainers/testcontainers-go"
+ "github.com/testcontainers/testcontainers-go/modules/ollama"
+)
+
+func TestWithUseLocal(t *testing.T) {
+ req := testcontainers.GenericContainerRequest{}
+
+ t.Run("keyVal/valid", func(t *testing.T) {
+ opt := ollama.WithUseLocal("OLLAMA_MODELS=/path/to/models")
+ err := opt.Customize(&req)
+ require.NoError(t, err)
+ require.Equal(t, "/path/to/models", req.Env["OLLAMA_MODELS"])
+ })
+
+ t.Run("keyVal/invalid", func(t *testing.T) {
+ opt := ollama.WithUseLocal("OLLAMA_MODELS")
+ err := opt.Customize(&req)
+ require.Error(t, err)
+ })
+
+ t.Run("keyVal/valid/multiple", func(t *testing.T) {
+ opt := ollama.WithUseLocal("OLLAMA_MODELS=/path/to/models", "OLLAMA_HOST=localhost")
+ err := opt.Customize(&req)
+ require.NoError(t, err)
+ require.Equal(t, "/path/to/models", req.Env["OLLAMA_MODELS"])
+ require.Equal(t, "localhost", req.Env["OLLAMA_HOST"])
+ })
+
+ t.Run("keyVal/valid/multiple-equals", func(t *testing.T) {
+ opt := ollama.WithUseLocal("OLLAMA_MODELS=/path/to/models", "OLLAMA_HOST=localhost=127.0.0.1")
+ err := opt.Customize(&req)
+ require.NoError(t, err)
+ require.Equal(t, "/path/to/models", req.Env["OLLAMA_MODELS"])
+ require.Equal(t, "localhost=127.0.0.1", req.Env["OLLAMA_HOST"])
+ })
+
+ t.Run("keyVal/invalid/multiple", func(t *testing.T) {
+ opt := ollama.WithUseLocal("OLLAMA_MODELS=/path/to/models", "OLLAMA_HOST")
+ err := opt.Customize(&req)
+ require.Error(t, err)
+ })
+}