diff --git a/cmd/cli/commands/launch.go b/cmd/cli/commands/launch.go new file mode 100644 index 00000000..7c333231 --- /dev/null +++ b/cmd/cli/commands/launch.go @@ -0,0 +1,321 @@ +package commands + +import ( + "errors" + "fmt" + "net" + "os" + "os/exec" + "sort" + "strings" + + "github.com/docker/model-runner/cmd/cli/pkg/types" + "github.com/spf13/cobra" +) + +// openaiPathSuffix is the path appended to the base URL for OpenAI-compatible endpoints. +const openaiPathSuffix = "/engines/v1" + +// dummyAPIKey is a placeholder API key for Docker Model Runner (which doesn't require auth). +const dummyAPIKey = "sk-docker-model-runner" //nolint:gosec // not a real credential + +// engineEndpoints holds the resolved base URLs (without path) for both +// client locations. +type engineEndpoints struct { + // base URL reachable from inside a Docker container + // (e.g., http://model-runner.docker.internal). + container string + // base URL reachable from the host machine + // (e.g., http://127.0.0.1:12434). + host string +} + +// containerApp describes an app that runs as a Docker container. +type containerApp struct { + defaultImage string + defaultHostPort int + containerPort int + envFn func(baseURL string) []string + extraDockerArgs []string // additional docker run args (e.g., volume mounts) +} + +// containerApps are launched via "docker run --rm". +var containerApps = map[string]containerApp{ + "anythingllm": { + defaultImage: "mintplexlabs/anythingllm:latest", + defaultHostPort: 3001, + containerPort: 3001, + envFn: anythingllmEnv, + extraDockerArgs: []string{"-v", "anythingllm_storage:/app/server/storage"}, + }, + "openwebui": {defaultImage: "ghcr.io/open-webui/open-webui:latest", defaultHostPort: 3000, containerPort: 8080, envFn: openaiEnv(openaiPathSuffix)}, +} + +// hostApp describes a native CLI app launched on the host. +type hostApp struct { + envFn func(baseURL string) []string + configInstructions func(baseURL string) []string // for apps that need manual config +} + +// hostApps are launched as native executables on the host. +var hostApps = map[string]hostApp{ + "opencode": {envFn: openaiEnv(openaiPathSuffix)}, + "codex": {envFn: openaiEnv("/v1")}, + "claude": {envFn: anthropicEnv}, + "openclaw": {configInstructions: openclawConfigInstructions}, +} + +// supportedApps is derived from the registries above. +var supportedApps = func() []string { + apps := make([]string, 0, len(containerApps)+len(hostApps)) + for name := range containerApps { + apps = append(apps, name) + } + for name := range hostApps { + apps = append(apps, name) + } + sort.Strings(apps) + return apps +}() + +func newLaunchCmd() *cobra.Command { + var ( + port int + image string + detach bool + dryRun bool + ) + c := &cobra.Command{ + Use: "launch APP [-- APP_ARGS...]", + Short: "Launch an app configured to use Docker Model Runner", + Long: fmt.Sprintf(`Launch an app configured to use Docker Model Runner. + +Supported apps: %s`, strings.Join(supportedApps, ", ")), + Args: cobra.MinimumNArgs(1), + ValidArgs: supportedApps, + RunE: func(cmd *cobra.Command, args []string) error { + app := strings.ToLower(args[0]) + appArgs := args[1:] + + runner, err := getStandaloneRunner(cmd.Context()) + if err != nil { + return fmt.Errorf("unable to determine standalone runner endpoint: %w", err) + } + + ep, err := resolveBaseEndpoints(runner) + if err != nil { + return err + } + + if ca, ok := containerApps[app]; ok { + return launchContainerApp(cmd, ca, ep.container, image, port, detach, appArgs, dryRun) + } + if cli, ok := hostApps[app]; ok { + return launchHostApp(cmd, app, ep.host, cli, appArgs, dryRun) + } + return fmt.Errorf("unsupported app %q (supported: %s)", app, strings.Join(supportedApps, ", ")) + }, + } + c.Flags().IntVar(&port, "port", 0, "Host port to expose (web UIs)") + c.Flags().StringVar(&image, "image", "", "Override container image for containerized apps") + c.Flags().BoolVar(&detach, "detach", false, "Run containerized app in background") + c.Flags().BoolVar(&dryRun, "dry-run", false, "Print what would be executed without running it") + return c +} + +// resolveBaseEndpoints resolves the base URLs (without path) for both +// container and host client locations. +func resolveBaseEndpoints(runner *standaloneRunner) (engineEndpoints, error) { + const ( + localhost = "127.0.0.1" + hostDockerInternal = "host.docker.internal" + ) + + kind := modelRunner.EngineKind() + switch kind { + case types.ModelRunnerEngineKindDesktop: + return engineEndpoints{ + container: "http://model-runner.docker.internal", + host: strings.TrimRight(modelRunner.URL(""), "/"), + }, nil + case types.ModelRunnerEngineKindMobyManual: + ep := strings.TrimRight(modelRunner.URL(""), "/") + containerEP := strings.NewReplacer( + "localhost", hostDockerInternal, + localhost, hostDockerInternal, + ).Replace(ep) + return engineEndpoints{container: containerEP, host: ep}, nil + case types.ModelRunnerEngineKindCloud, types.ModelRunnerEngineKindMoby: + if runner == nil { + return engineEndpoints{}, errors.New("unable to determine standalone runner endpoint") + } + if runner.gatewayIP != "" && runner.gatewayPort != 0 { + port := fmt.Sprintf("%d", runner.gatewayPort) + return engineEndpoints{ + container: "http://" + net.JoinHostPort(runner.gatewayIP, port), + host: "http://" + net.JoinHostPort(localhost, port), + }, nil + } + if runner.hostPort != 0 { + hostPort := fmt.Sprintf("%d", runner.hostPort) + return engineEndpoints{ + container: "http://" + net.JoinHostPort(hostDockerInternal, hostPort), + host: "http://" + net.JoinHostPort(localhost, hostPort), + }, nil + } + return engineEndpoints{}, errors.New("unable to determine standalone runner endpoint") + default: + return engineEndpoints{}, fmt.Errorf("unhandled engine kind: %v", kind) + } +} + +// launchContainerApp launches a container-based app via "docker run". +func launchContainerApp(cmd *cobra.Command, ca containerApp, baseURL string, imageOverride string, portOverride int, detach bool, appArgs []string, dryRun bool) error { + img := imageOverride + if img == "" { + img = ca.defaultImage + } + hostPort := portOverride + if hostPort == 0 { + hostPort = ca.defaultHostPort + } + + dockerArgs := []string{"run", "--rm"} + if detach { + dockerArgs = append(dockerArgs, "-d") + } + dockerArgs = append(dockerArgs, + "-p", fmt.Sprintf("%d:%d", hostPort, ca.containerPort), + ) + dockerArgs = append(dockerArgs, ca.extraDockerArgs...) + if ca.envFn == nil { + return fmt.Errorf("container app requires envFn to be set") + } + for _, e := range ca.envFn(baseURL) { + dockerArgs = append(dockerArgs, "-e", e) + } + dockerArgs = append(dockerArgs, img) + dockerArgs = append(dockerArgs, appArgs...) + + if dryRun { + cmd.Printf("Would run: docker %s\n", strings.Join(dockerArgs, " ")) + return nil + } + + return runExternal(cmd, nil, "docker", dockerArgs...) +} + +// launchHostApp launches a native host app executable. +func launchHostApp(cmd *cobra.Command, bin string, baseURL string, cli hostApp, appArgs []string, dryRun bool) error { + if !dryRun { + if _, err := exec.LookPath(bin); err != nil { + cmd.PrintErrf("%q executable not found in PATH.\n", bin) + if cli.envFn != nil { + cmd.PrintErrf("Configure your app to use:\n") + for _, e := range cli.envFn(baseURL) { + cmd.PrintErrf(" %s\n", e) + } + } + return fmt.Errorf("%s not found; please install it and re-run", bin) + } + } + + if cli.envFn == nil { + return launchUnconfigurableHostApp(cmd, bin, baseURL, cli, appArgs, dryRun) + } + + env := cli.envFn(baseURL) + if dryRun { + cmd.Printf("Would run: %s %s\n", bin, strings.Join(appArgs, " ")) + for _, e := range env { + cmd.Printf(" %s\n", e) + } + return nil + } + return runExternal(cmd, withEnv(env...), bin, appArgs...) +} + +// launchUnconfigurableHostApp handles host apps that need manual config rather than env vars. +func launchUnconfigurableHostApp(cmd *cobra.Command, bin string, baseURL string, cli hostApp, appArgs []string, dryRun bool) error { + enginesEP := baseURL + openaiPathSuffix + cmd.Printf("Configure %s to use Docker Model Runner:\n", bin) + cmd.Printf(" Base URL: %s\n", enginesEP) + cmd.Printf(" API type: openai-completions\n") + cmd.Printf(" API key: %s\n", dummyAPIKey) + + if cli.configInstructions != nil { + cmd.Printf("\nExample:\n") + for _, line := range cli.configInstructions(baseURL) { + cmd.Printf(" %s\n", line) + } + } + if dryRun { + cmd.Printf("Would run: %s %s\n", bin, strings.Join(appArgs, " ")) + return nil + } + return runExternal(cmd, nil, bin, appArgs...) +} + +// openclawConfigInstructions returns configuration commands for openclaw. +func openclawConfigInstructions(baseURL string) []string { + ep := baseURL + openaiPathSuffix + return []string{ + fmt.Sprintf("openclaw config set models.providers.docker-model-runner.baseUrl %q", ep), + "openclaw config set models.providers.docker-model-runner.api openai-completions", + fmt.Sprintf("openclaw config set models.providers.docker-model-runner.apiKey %s", dummyAPIKey), + } +} + +// openaiEnv returns an env builder that sets OpenAI-compatible +// environment variables using the given path suffix. +func openaiEnv(suffix string) func(string) []string { + return func(baseURL string) []string { + ep := baseURL + suffix + return []string{ + "OPENAI_API_BASE=" + ep, + "OPENAI_BASE_URL=" + ep, + "OPENAI_API_BASE_URL=" + ep, + "OPENAI_API_KEY=" + dummyAPIKey, + "OPEN_AI_KEY=" + dummyAPIKey, // AnythingLLM uses this + } + } +} + +// anythingllmEnv returns environment variables for AnythingLLM with Docker Model Runner provider. +func anythingllmEnv(baseURL string) []string { + return []string{ + "STORAGE_DIR=/app/server/storage", + "LLM_PROVIDER=docker-model-runner", + "DOCKER_MODEL_RUNNER_BASE_PATH=" + baseURL, + } +} + +// anthropicEnv returns Anthropic-compatible environment variables. +func anthropicEnv(baseURL string) []string { + return []string{ + "ANTHROPIC_BASE_URL=" + baseURL + "/anthropic", + "ANTHROPIC_API_KEY=" + dummyAPIKey, + } +} + +// withEnv returns the current process environment extended with extra vars. +func withEnv(extra ...string) []string { + return append(os.Environ(), extra...) +} + +// runExternal executes a program inheriting stdio. +// Security: prog and progArgs are either hardcoded values or user-provided +// arguments that the user explicitly intends to pass to the launched app. +func runExternal(cmd *cobra.Command, env []string, prog string, progArgs ...string) error { + c := exec.Command(prog, progArgs...) + c.Stdout = cmd.OutOrStdout() + c.Stderr = cmd.ErrOrStderr() + c.Stdin = os.Stdin + if env != nil { + c.Env = env + } + if err := c.Run(); err != nil { + return fmt.Errorf("failed to run %s %s: %w", prog, strings.Join(progArgs, " "), err) + } + return nil +} diff --git a/cmd/cli/commands/launch_test.go b/cmd/cli/commands/launch_test.go new file mode 100644 index 00000000..f765195a --- /dev/null +++ b/cmd/cli/commands/launch_test.go @@ -0,0 +1,417 @@ +package commands + +import ( + "bytes" + "fmt" + "testing" + + "github.com/docker/model-runner/cmd/cli/desktop" + "github.com/docker/model-runner/cmd/cli/pkg/types" + "github.com/docker/model-runner/pkg/inference" + "github.com/spf13/cobra" + "github.com/stretchr/testify/require" +) + +const ( + testBaseURL = "http://example.com" + testImage = "test/image:latest" + testHostPort = 3000 + testContainerPort = 8080 +) + +func testContainerApp(envFn func(string) []string) containerApp { + return containerApp{ + defaultImage: testImage, + defaultHostPort: testHostPort, + containerPort: testContainerPort, + envFn: envFn, + } +} + +func newTestCmd(buf *bytes.Buffer) *cobra.Command { + cmd := &cobra.Command{} + cmd.SetOut(buf) + return cmd +} + +func TestSupportedAppsContainsAllRegistered(t *testing.T) { + for name := range containerApps { + require.Contains(t, supportedApps, name, "containerApps entry %q missing from supportedApps", name) + } + for name := range hostApps { + require.Contains(t, supportedApps, name, "hostApps entry %q missing from supportedApps", name) + } + require.Equal(t, len(containerApps)+len(hostApps), len(supportedApps)) +} + +func TestResolveBaseEndpointsDesktop(t *testing.T) { + expectedHost := "http://localhost" + inference.ExperimentalEndpointsPrefix + ctx, err := desktop.NewContextForTest( + expectedHost, + nil, + types.ModelRunnerEngineKindDesktop, + ) + require.NoError(t, err) + modelRunner = ctx + + ep, err := resolveBaseEndpoints(nil) + require.NoError(t, err) + require.Equal(t, "http://model-runner.docker.internal", ep.container) + require.Equal(t, expectedHost, ep.host) +} + +func TestResolveBaseEndpointsMobyManual(t *testing.T) { + hostURL := "http://localhost:8080" + ctx, err := desktop.NewContextForTest( + hostURL, + nil, + types.ModelRunnerEngineKindMobyManual, + ) + require.NoError(t, err) + modelRunner = ctx + + ep, err := resolveBaseEndpoints(nil) + require.NoError(t, err) + require.Equal(t, "http://host.docker.internal:8080", ep.container) + require.Equal(t, hostURL, ep.host) +} + +func TestResolveBaseEndpointsCloud(t *testing.T) { + ctx, err := desktop.NewContextForTest( + "http://localhost:12435", + nil, + types.ModelRunnerEngineKindCloud, + ) + require.NoError(t, err) + modelRunner = ctx + + runner := &standaloneRunner{ + gatewayIP: "172.17.0.1", + gatewayPort: 12435, + } + ep, err := resolveBaseEndpoints(runner) + require.NoError(t, err) + require.Equal(t, "http://172.17.0.1:12435", ep.container) + require.Equal(t, "http://127.0.0.1:12435", ep.host) +} + +func TestResolveBaseEndpointsMoby(t *testing.T) { + ctx, err := desktop.NewContextForTest( + "http://localhost:12434", + nil, + types.ModelRunnerEngineKindMoby, + ) + require.NoError(t, err) + modelRunner = ctx + + runner := &standaloneRunner{ + gatewayIP: "172.17.0.1", + gatewayPort: 12434, + } + ep, err := resolveBaseEndpoints(runner) + require.NoError(t, err) + require.Equal(t, "http://172.17.0.1:12434", ep.container) + require.Equal(t, "http://127.0.0.1:12434", ep.host) +} + +func TestUnableToResolveBaseEndpointsCloud(t *testing.T) { + ctx, err := desktop.NewContextForTest( + "http://localhost:12435", + nil, + types.ModelRunnerEngineKindCloud, + ) + require.NoError(t, err) + modelRunner = ctx + + for _, tc := range []struct { + name string + runner *standaloneRunner + }{ + {"nil runner", nil}, + {"empty gateway and hostPort", &standaloneRunner{gatewayIP: "", gatewayPort: 0, hostPort: 0}}, + } { + t.Run(tc.name, func(t *testing.T) { + _, err := resolveBaseEndpoints(tc.runner) + require.Error(t, err) + require.Contains(t, err.Error(), "unable to determine standalone runner endpoint") + }) + } +} + +func TestResolveBaseEndpointsHostPortFallback(t *testing.T) { + ctx, err := desktop.NewContextForTest( + "http://localhost:12434", + nil, + types.ModelRunnerEngineKindMoby, + ) + require.NoError(t, err) + modelRunner = ctx + + runner := &standaloneRunner{hostPort: 12434} + ep, err := resolveBaseEndpoints(runner) + require.NoError(t, err) + require.Equal(t, "http://host.docker.internal:12434", ep.container) + require.Equal(t, "http://127.0.0.1:12434", ep.host) +} + +func TestLaunchContainerAppDryRun(t *testing.T) { + ca := testContainerApp(openaiEnv(openaiPathSuffix)) + buf := new(bytes.Buffer) + cmd := newTestCmd(buf) + + err := launchContainerApp(cmd, ca, testBaseURL, "", 0, false, nil, true) + require.NoError(t, err) + + output := buf.String() + require.Contains(t, output, "Would run: docker") + require.Contains(t, output, "run --rm") + require.Contains(t, output, fmt.Sprintf("-p %d:%d", testHostPort, testContainerPort)) + require.Contains(t, output, testImage) + require.Contains(t, output, "OPENAI_API_BASE="+testBaseURL+"/engines/v1") +} + +func TestLaunchContainerAppOverrides(t *testing.T) { + ca := testContainerApp(openaiEnv(openaiPathSuffix)) + overrideImage := "custom/image:v2" + overridePort := 9999 + buf := new(bytes.Buffer) + cmd := newTestCmd(buf) + + err := launchContainerApp(cmd, ca, testBaseURL, overrideImage, overridePort, false, nil, true) + require.NoError(t, err) + + output := buf.String() + require.Contains(t, output, overrideImage) + require.NotContains(t, output, testImage) + require.Contains(t, output, fmt.Sprintf("-p %d:%d", overridePort, testContainerPort)) +} + +func TestLaunchContainerAppDetach(t *testing.T) { + ca := testContainerApp(openaiEnv(openaiPathSuffix)) + buf := new(bytes.Buffer) + cmd := newTestCmd(buf) + + err := launchContainerApp(cmd, ca, testBaseURL, "", 0, true, nil, true) + require.NoError(t, err) + + output := buf.String() + require.Contains(t, output, "run --rm -d") +} + +func TestLaunchContainerAppUsesEnvFn(t *testing.T) { + customEnv := func(baseURL string) []string { + return []string{"CUSTOM_URL=" + baseURL + "/custom"} + } + ca := testContainerApp(customEnv) + buf := new(bytes.Buffer) + cmd := newTestCmd(buf) + + err := launchContainerApp(cmd, ca, testBaseURL, "", 0, false, nil, true) + require.NoError(t, err) + + output := buf.String() + require.Contains(t, output, "CUSTOM_URL="+testBaseURL+"/custom") + require.NotContains(t, output, "OPENAI_API_BASE") +} + +func TestLaunchContainerAppNilEnvFn(t *testing.T) { + ca := testContainerApp(nil) + buf := new(bytes.Buffer) + cmd := newTestCmd(buf) + + err := launchContainerApp(cmd, ca, testBaseURL, "", 0, false, nil, true) + require.Error(t, err) + require.Contains(t, err.Error(), "container app requires envFn to be set") +} + +func TestLaunchHostAppDryRunOpenai(t *testing.T) { + buf := new(bytes.Buffer) + cmd := newTestCmd(buf) + + cli := hostApp{envFn: openaiEnv(openaiPathSuffix)} + // Use "ls" as a bin that exists in PATH + err := launchHostApp(cmd, "ls", testBaseURL, cli, nil, true) + require.NoError(t, err) + + output := buf.String() + require.Contains(t, output, "Would run: ls") + require.Contains(t, output, "OPENAI_API_BASE="+testBaseURL+"/engines/v1") + require.Contains(t, output, "OPENAI_BASE_URL="+testBaseURL+"/engines/v1") + require.Contains(t, output, "OPENAI_API_KEY="+dummyAPIKey) +} + +func TestLaunchHostAppDryRunCodex(t *testing.T) { + buf := new(bytes.Buffer) + cmd := newTestCmd(buf) + + cli := hostApp{envFn: openaiEnv("/v1")} + err := launchHostApp(cmd, "ls", testBaseURL, cli, nil, true) + require.NoError(t, err) + + output := buf.String() + require.Contains(t, output, "Would run: ls") + require.Contains(t, output, "OPENAI_BASE_URL="+testBaseURL+"/v1") + require.Contains(t, output, "OPENAI_API_KEY="+dummyAPIKey) + require.NotContains(t, output, "/engines/v1") +} + +func TestLaunchHostAppDryRunWithArgs(t *testing.T) { + buf := new(bytes.Buffer) + cmd := newTestCmd(buf) + + cli := hostApp{envFn: openaiEnv(openaiPathSuffix)} + err := launchHostApp(cmd, "ls", testBaseURL, cli, []string{"-m", "ai/qwen3"}, true) + require.NoError(t, err) + + output := buf.String() + require.Contains(t, output, "Would run: ls -m ai/qwen3") +} + +func TestLaunchHostAppDryRunAnthropic(t *testing.T) { + buf := new(bytes.Buffer) + cmd := newTestCmd(buf) + + cli := hostApp{envFn: anthropicEnv} + err := launchHostApp(cmd, "ls", testBaseURL, cli, nil, true) + require.NoError(t, err) + + output := buf.String() + require.Contains(t, output, "Would run: ls") + require.Contains(t, output, "ANTHROPIC_BASE_URL="+testBaseURL+"/anthropic") + require.Contains(t, output, "ANTHROPIC_API_KEY="+dummyAPIKey) + require.NotContains(t, output, "OPENAI_") +} + +func TestLaunchHostAppNotFound(t *testing.T) { + stdout := new(bytes.Buffer) + stderr := new(bytes.Buffer) + cmd := &cobra.Command{} + cmd.SetOut(stdout) + cmd.SetErr(stderr) + + cli := hostApp{envFn: openaiEnv(openaiPathSuffix)} + err := launchHostApp(cmd, "nonexistent-binary-xyz", testBaseURL, cli, nil, false) + require.Error(t, err) + require.Contains(t, err.Error(), "not found") + + errOutput := stderr.String() + require.Contains(t, errOutput, "not found in PATH") + require.Contains(t, errOutput, "Configure your app to use:") +} + +func TestLaunchHostAppNotFoundNilEnvFn(t *testing.T) { + stdout := new(bytes.Buffer) + stderr := new(bytes.Buffer) + cmd := &cobra.Command{} + cmd.SetOut(stdout) + cmd.SetErr(stderr) + + cli := hostApp{envFn: nil} + err := launchHostApp(cmd, "nonexistent-binary-xyz", testBaseURL, cli, nil, false) + require.Error(t, err) + + errOutput := stderr.String() + require.Contains(t, errOutput, "not found in PATH") + require.NotContains(t, errOutput, "Configure your app to use:") +} + +func TestLaunchUnconfigurableHostAppDryRun(t *testing.T) { + buf := new(bytes.Buffer) + cmd := newTestCmd(buf) + + cli := hostApp{configInstructions: openclawConfigInstructions} + err := launchUnconfigurableHostApp(cmd, "openclaw", testBaseURL, cli, nil, true) + require.NoError(t, err) + + output := buf.String() + require.Contains(t, output, "Configure openclaw to use Docker Model Runner:") + require.Contains(t, output, "Base URL: "+testBaseURL+"/engines/v1") + require.Contains(t, output, "API type: openai-completions") + require.Contains(t, output, "API key: "+dummyAPIKey) + require.Contains(t, output, "openclaw config set models.providers.docker-model-runner.baseUrl") +} + +func TestNewLaunchCmdFlags(t *testing.T) { + cmd := newLaunchCmd() + + require.NotNil(t, cmd.Flags().Lookup("port")) + require.NotNil(t, cmd.Flags().Lookup("image")) + require.NotNil(t, cmd.Flags().Lookup("detach")) + require.NotNil(t, cmd.Flags().Lookup("dry-run")) +} + +func TestNewLaunchCmdValidArgs(t *testing.T) { + cmd := newLaunchCmd() + require.Equal(t, supportedApps, cmd.ValidArgs) +} + +func TestNewLaunchCmdRequiresAtLeastOneArg(t *testing.T) { + cmd := newLaunchCmd() + cmd.SetArgs([]string{}) + err := cmd.Execute() + + require.Error(t, err) + require.Contains(t, err.Error(), "requires at least 1 arg") +} + +func TestNewLaunchCmdDispatchContainerApp(t *testing.T) { + ctx, err := desktop.NewContextForTest( + "http://localhost"+inference.ExperimentalEndpointsPrefix, + nil, + types.ModelRunnerEngineKindDesktop, + ) + require.NoError(t, err) + modelRunner = ctx + + buf := new(bytes.Buffer) + cmd := newLaunchCmd() + cmd.SetOut(buf) + cmd.SetArgs([]string{"openwebui", "--dry-run"}) + + err = cmd.Execute() + require.NoError(t, err) + + output := buf.String() + require.Contains(t, output, "Would run: docker") + require.Contains(t, output, "ghcr.io/open-webui/open-webui:latest") +} + +func TestNewLaunchCmdDispatchHostApp(t *testing.T) { + ctx, err := desktop.NewContextForTest( + "http://localhost"+inference.ExperimentalEndpointsPrefix, + nil, + types.ModelRunnerEngineKindDesktop, + ) + require.NoError(t, err) + modelRunner = ctx + + buf := new(bytes.Buffer) + cmd := newLaunchCmd() + cmd.SetOut(buf) + cmd.SetArgs([]string{"openclaw", "--dry-run"}) + + err = cmd.Execute() + require.NoError(t, err) + + output := buf.String() + require.Contains(t, output, "Configure openclaw to use Docker Model Runner:") +} + +func TestNewLaunchCmdDispatchUnsupportedApp(t *testing.T) { + ctx, err := desktop.NewContextForTest( + "http://localhost"+inference.ExperimentalEndpointsPrefix, + nil, + types.ModelRunnerEngineKindDesktop, + ) + require.NoError(t, err) + modelRunner = ctx + + buf := new(bytes.Buffer) + cmd := newLaunchCmd() + cmd.SetOut(buf) + cmd.SetArgs([]string{"bogus"}) + + err = cmd.Execute() + require.Error(t, err) + require.Contains(t, err.Error(), "unsupported app") +} diff --git a/cmd/cli/commands/root.go b/cmd/cli/commands/root.go index 568fe17f..a7d7dce3 100644 --- a/cmd/cli/commands/root.go +++ b/cmd/cli/commands/root.go @@ -107,6 +107,7 @@ func NewRootCmd(cli *command.DockerCli) *cobra.Command { newInspectCmd(), newShowCmd(), newComposeCmd(), + newLaunchCmd(), newTagCmd(), newConfigureCmd(), newPSCmd(), diff --git a/cmd/cli/docs/reference/docker_model.yaml b/cmd/cli/docs/reference/docker_model.yaml index a257839c..13a6c255 100644 --- a/cmd/cli/docs/reference/docker_model.yaml +++ b/cmd/cli/docs/reference/docker_model.yaml @@ -10,6 +10,7 @@ cname: - docker model df - docker model inspect - docker model install-runner + - docker model launch - docker model list - docker model logs - docker model package @@ -36,6 +37,7 @@ clink: - docker_model_df.yaml - docker_model_inspect.yaml - docker_model_install-runner.yaml + - docker_model_launch.yaml - docker_model_list.yaml - docker_model_logs.yaml - docker_model_package.yaml diff --git a/cmd/cli/docs/reference/docker_model_launch.yaml b/cmd/cli/docs/reference/docker_model_launch.yaml new file mode 100644 index 00000000..31ec71e8 --- /dev/null +++ b/cmd/cli/docs/reference/docker_model_launch.yaml @@ -0,0 +1,56 @@ +command: docker model launch +short: Launch an app configured to use Docker Model Runner +long: |- + Launch an app configured to use Docker Model Runner. + + Supported apps: anythingllm, claude, codex, openclaw, opencode, openwebui +usage: docker model launch APP [-- APP_ARGS...] +pname: docker model +plink: docker_model.yaml +options: + - option: detach + value_type: bool + default_value: "false" + description: Run containerized app in background + deprecated: false + hidden: false + experimental: false + experimentalcli: false + kubernetes: false + swarm: false + - option: dry-run + value_type: bool + default_value: "false" + description: Print what would be executed without running it + deprecated: false + hidden: false + experimental: false + experimentalcli: false + kubernetes: false + swarm: false + - option: image + value_type: string + description: Override container image for containerized apps + deprecated: false + hidden: false + experimental: false + experimentalcli: false + kubernetes: false + swarm: false + - option: port + value_type: int + default_value: "0" + description: Host port to expose (web UIs) + deprecated: false + hidden: false + experimental: false + experimentalcli: false + kubernetes: false + swarm: false +deprecated: false +hidden: false +experimental: false +experimentalcli: false +kubernetes: false +swarm: false + diff --git a/cmd/cli/docs/reference/model.md b/cmd/cli/docs/reference/model.md index 77246756..230d904b 100644 --- a/cmd/cli/docs/reference/model.md +++ b/cmd/cli/docs/reference/model.md @@ -11,6 +11,7 @@ Docker Model Runner | [`df`](model_df.md) | Show Docker Model Runner disk usage | | [`inspect`](model_inspect.md) | Display detailed information on one model | | [`install-runner`](model_install-runner.md) | Install Docker Model Runner (Docker Engine only) | +| [`launch`](model_launch.md) | Launch an app configured to use Docker Model Runner | | [`list`](model_list.md) | List the models pulled to your local environment | | [`logs`](model_logs.md) | Fetch the Docker Model Runner logs | | [`package`](model_package.md) | Package a GGUF file, Safetensors directory, DDUF file, or existing model into a Docker model OCI artifact. | diff --git a/cmd/cli/docs/reference/model_launch.md b/cmd/cli/docs/reference/model_launch.md new file mode 100644 index 00000000..fd3c48ae --- /dev/null +++ b/cmd/cli/docs/reference/model_launch.md @@ -0,0 +1,19 @@ +# docker model launch + + +Launch an app configured to use Docker Model Runner. + +Supported apps: anythingllm, claude, codex, openclaw, opencode, openwebui + +### Options + +| Name | Type | Default | Description | +|:------------|:---------|:--------|:------------------------------------------------| +| `--detach` | `bool` | | Run containerized app in background | +| `--dry-run` | `bool` | | Print what would be executed without running it | +| `--image` | `string` | | Override container image for containerized apps | +| `--port` | `int` | `0` | Host port to expose (web UIs) | + + + +