mirror of
https://github.com/ollama/ollama.git
synced 2026-03-11 20:23:55 -05:00
* don't require pulling stubs for cloud models This is a first in a series of PRs that will better integrate Ollama's cloud into the API and CLI. Previously we used to have a layer of indirection where you'd first have to pull a "stub" model that contains a reference to a cloud model. With this change, you don't have to pull first, you can just use a cloud model in various routes like `/api/chat` and `/api/show`. This change respects <https://github.com/ollama/ollama/pull/14221>, so if cloud is disabled, these models won't be accessible. There's also a new, simpler pass-through proxy that doesn't convert the requests ahead of hitting the cloud models, which they themselves already support various formats (e.g., `v1/chat/completions` or Open Responses, etc.). This will help prevent issues caused by double converting (e.g., `v1/chat/completions` converted to `api/chat` on the client, then calling cloud and converting back to a `v1/chat/completions` response instead of the cloud model handling the original `v1/chat/completions` request first). There's now a notion of "source tags", which can be mixed with existing tags. So instead of having different formats like`gpt-oss:20b-cloud` vs. `kimi-k2.5:cloud` (`-cloud` suffix vs. `:cloud`), you can now specify cloud by simply appending `:cloud`. This PR doesn't change model resolution yet, but sets us up to allow for things like omitting the non-source tag, which would make something like `ollama run gpt-oss:cloud` work the same way that `ollama run gpt-oss` already works today. More detailed changes: - Added a shared model selector parser in `types/modelselector`: - supports `:cloud` and `:local` - accepts source tags in any position - supports legacy `:<tag>-cloud` - rejects conflicting source tags - Integrated selector handling across server inference/show routes: - `GenerateHandler`, `ChatHandler`, `EmbedHandler`, `EmbeddingsHandler`, `ShowHandler` - Added explicit-cloud passthrough proxy for ollama.com: - same-endpoint forwarding for `/api/*`, `/v1/*`, and `/v1/messages` - normalizes `model` (and `name` for `/api/show`) before forwarding - forwards request headers except hop-by-hop/proxy-managed headers - uses bounded response-header timeout - handles auth failures in a friendly way - Preserved cloud-disable behavior (`OLLAMA_NO_CLOUD`) - Updated create flow to support `FROM ...:cloud` model sources (though this flow uses the legacy proxy still, supporting Modelfile overrides is more complicated with the direct proxy approach) - Updated CLI/TUI/config cloud detection to use shared selector logic - Updated CLI preflight behavior so explicit cloud requests do not auto-pull local stubs What's next? - Cloud discovery/listing and cache-backed `ollama ls` / `/api/tags` - Modelfile overlay support for virtual cloud models on OpenAI/Anthropic request families - Recommender/default-selection behavior for ambiguous model families - Fully remove the legacy flow Fixes: https://github.com/ollama/ollama/issues/13801 * consolidate pull logic into confirmAndPull helper pullIfNeeded and ShowOrPull shared identical confirm-and-pull logic. Extract confirmAndPull to eliminate the duplication. * skip local existence checks for cloud models ModelExists and the TUI's modelExists both check the local model list, which causes cloud models to appear missing. Return true early for explicit cloud models so the TUI displays them beside the integration name and skips re-prompting the model picker on relaunch. * support optionally pulling stubs for newly-style names We now normalize names like `<family>:<size>:cloud` into legacy-style names like `<family>:<size>-cloud` for pulling and deleting (this also supports stripping `:local`). Support for pulling cloud models is temporary, once we integrate properly into `/api/tags` we won't need this anymore. * Fix server alias syncing * Update cmd/cmd.go Co-authored-by: Parth Sareen <parth.sareen@ollama.com> * address comments * improve some naming --------- Co-authored-by: ParthSareen <parth.sareen@ollama.com>
284 lines
6.8 KiB
Go
284 lines
6.8 KiB
Go
package config
|
|
|
|
import (
|
|
"context"
|
|
"encoding/json"
|
|
"errors"
|
|
"fmt"
|
|
"maps"
|
|
"os"
|
|
"os/exec"
|
|
"path/filepath"
|
|
"slices"
|
|
"strings"
|
|
|
|
"github.com/ollama/ollama/envconfig"
|
|
"github.com/ollama/ollama/internal/modelref"
|
|
)
|
|
|
|
// OpenCode implements Runner and Editor for OpenCode integration
|
|
type OpenCode struct{}
|
|
|
|
// cloudModelLimit holds context and output token limits for a cloud model.
|
|
type cloudModelLimit struct {
|
|
Context int
|
|
Output int
|
|
}
|
|
|
|
// lookupCloudModelLimit returns the token limits for a cloud model.
|
|
// It tries the exact name first, then strips explicit cloud suffixes.
|
|
func lookupCloudModelLimit(name string) (cloudModelLimit, bool) {
|
|
if l, ok := cloudModelLimits[name]; ok {
|
|
return l, true
|
|
}
|
|
base, stripped := modelref.StripCloudSourceTag(name)
|
|
if stripped {
|
|
if l, ok := cloudModelLimits[base]; ok {
|
|
return l, true
|
|
}
|
|
}
|
|
return cloudModelLimit{}, false
|
|
}
|
|
|
|
func (o *OpenCode) String() string { return "OpenCode" }
|
|
|
|
func (o *OpenCode) Run(model string, args []string) error {
|
|
if _, err := exec.LookPath("opencode"); err != nil {
|
|
return fmt.Errorf("opencode is not installed, install from https://opencode.ai")
|
|
}
|
|
|
|
// Call Edit() to ensure config is up-to-date before launch
|
|
models := []string{model}
|
|
if config, err := loadIntegration("opencode"); err == nil && len(config.Models) > 0 {
|
|
models = config.Models
|
|
}
|
|
var err error
|
|
models, err = resolveEditorModels("opencode", models, func() ([]string, error) {
|
|
return selectModels(context.Background(), "opencode", "")
|
|
})
|
|
if errors.Is(err, errCancelled) {
|
|
return nil
|
|
}
|
|
if err != nil {
|
|
return err
|
|
}
|
|
if err := o.Edit(models); err != nil {
|
|
return fmt.Errorf("setup failed: %w", err)
|
|
}
|
|
|
|
cmd := exec.Command("opencode", args...)
|
|
cmd.Stdin = os.Stdin
|
|
cmd.Stdout = os.Stdout
|
|
cmd.Stderr = os.Stderr
|
|
return cmd.Run()
|
|
}
|
|
|
|
func (o *OpenCode) Paths() []string {
|
|
home, err := os.UserHomeDir()
|
|
if err != nil {
|
|
return nil
|
|
}
|
|
|
|
var paths []string
|
|
p := filepath.Join(home, ".config", "opencode", "opencode.json")
|
|
if _, err := os.Stat(p); err == nil {
|
|
paths = append(paths, p)
|
|
}
|
|
sp := filepath.Join(home, ".local", "state", "opencode", "model.json")
|
|
if _, err := os.Stat(sp); err == nil {
|
|
paths = append(paths, sp)
|
|
}
|
|
return paths
|
|
}
|
|
|
|
func (o *OpenCode) Edit(modelList []string) error {
|
|
if len(modelList) == 0 {
|
|
return nil
|
|
}
|
|
|
|
home, err := os.UserHomeDir()
|
|
if err != nil {
|
|
return err
|
|
}
|
|
|
|
configPath := filepath.Join(home, ".config", "opencode", "opencode.json")
|
|
if err := os.MkdirAll(filepath.Dir(configPath), 0o755); err != nil {
|
|
return err
|
|
}
|
|
|
|
config := make(map[string]any)
|
|
if data, err := os.ReadFile(configPath); err == nil {
|
|
_ = json.Unmarshal(data, &config) // Ignore parse errors; treat missing/corrupt files as empty
|
|
}
|
|
|
|
config["$schema"] = "https://opencode.ai/config.json"
|
|
|
|
provider, ok := config["provider"].(map[string]any)
|
|
if !ok {
|
|
provider = make(map[string]any)
|
|
}
|
|
|
|
ollama, ok := provider["ollama"].(map[string]any)
|
|
if !ok {
|
|
ollama = map[string]any{
|
|
"npm": "@ai-sdk/openai-compatible",
|
|
"name": "Ollama",
|
|
"options": map[string]any{
|
|
"baseURL": envconfig.Host().String() + "/v1",
|
|
},
|
|
}
|
|
}
|
|
|
|
// Migrate legacy provider name
|
|
if name, _ := ollama["name"].(string); name == "Ollama (local)" {
|
|
ollama["name"] = "Ollama"
|
|
}
|
|
|
|
models, ok := ollama["models"].(map[string]any)
|
|
if !ok {
|
|
models = make(map[string]any)
|
|
}
|
|
|
|
selectedSet := make(map[string]bool)
|
|
for _, m := range modelList {
|
|
selectedSet[m] = true
|
|
}
|
|
|
|
for name, cfg := range models {
|
|
if cfgMap, ok := cfg.(map[string]any); ok {
|
|
if isOllamaModel(cfgMap) && !selectedSet[name] {
|
|
delete(models, name)
|
|
}
|
|
}
|
|
}
|
|
|
|
|
|
for _, model := range modelList {
|
|
if existing, ok := models[model].(map[string]any); ok {
|
|
// migrate existing models without _launch marker
|
|
if isOllamaModel(existing) {
|
|
existing["_launch"] = true
|
|
if name, ok := existing["name"].(string); ok {
|
|
existing["name"] = strings.TrimSuffix(name, " [Ollama]")
|
|
}
|
|
}
|
|
if isCloudModelName(model) {
|
|
if l, ok := lookupCloudModelLimit(model); ok {
|
|
existing["limit"] = map[string]any{
|
|
"context": l.Context,
|
|
"output": l.Output,
|
|
}
|
|
}
|
|
}
|
|
continue
|
|
}
|
|
entry := map[string]any{
|
|
"name": model,
|
|
"_launch": true,
|
|
}
|
|
if isCloudModelName(model) {
|
|
if l, ok := lookupCloudModelLimit(model); ok {
|
|
entry["limit"] = map[string]any{
|
|
"context": l.Context,
|
|
"output": l.Output,
|
|
}
|
|
}
|
|
}
|
|
models[model] = entry
|
|
}
|
|
|
|
ollama["models"] = models
|
|
provider["ollama"] = ollama
|
|
config["provider"] = provider
|
|
|
|
configData, err := json.MarshalIndent(config, "", " ")
|
|
if err != nil {
|
|
return err
|
|
}
|
|
if err := writeWithBackup(configPath, configData); err != nil {
|
|
return err
|
|
}
|
|
|
|
statePath := filepath.Join(home, ".local", "state", "opencode", "model.json")
|
|
if err := os.MkdirAll(filepath.Dir(statePath), 0o755); err != nil {
|
|
return err
|
|
}
|
|
|
|
state := map[string]any{
|
|
"recent": []any{},
|
|
"favorite": []any{},
|
|
"variant": map[string]any{},
|
|
}
|
|
if data, err := os.ReadFile(statePath); err == nil {
|
|
_ = json.Unmarshal(data, &state) // Ignore parse errors; use defaults
|
|
}
|
|
|
|
recent, _ := state["recent"].([]any)
|
|
|
|
modelSet := make(map[string]bool)
|
|
for _, m := range modelList {
|
|
modelSet[m] = true
|
|
}
|
|
|
|
// Filter out existing Ollama models we're about to re-add
|
|
newRecent := slices.DeleteFunc(slices.Clone(recent), func(entry any) bool {
|
|
e, ok := entry.(map[string]any)
|
|
if !ok || e["providerID"] != "ollama" {
|
|
return false
|
|
}
|
|
modelID, _ := e["modelID"].(string)
|
|
return modelSet[modelID]
|
|
})
|
|
|
|
// Prepend models in reverse order so first model ends up first
|
|
for _, model := range slices.Backward(modelList) {
|
|
newRecent = slices.Insert(newRecent, 0, any(map[string]any{
|
|
"providerID": "ollama",
|
|
"modelID": model,
|
|
}))
|
|
}
|
|
|
|
const maxRecentModels = 10
|
|
newRecent = newRecent[:min(len(newRecent), maxRecentModels)]
|
|
|
|
state["recent"] = newRecent
|
|
|
|
stateData, err := json.MarshalIndent(state, "", " ")
|
|
if err != nil {
|
|
return err
|
|
}
|
|
return writeWithBackup(statePath, stateData)
|
|
}
|
|
|
|
func (o *OpenCode) Models() []string {
|
|
home, err := os.UserHomeDir()
|
|
if err != nil {
|
|
return nil
|
|
}
|
|
config, err := readJSONFile(filepath.Join(home, ".config", "opencode", "opencode.json"))
|
|
if err != nil {
|
|
return nil
|
|
}
|
|
provider, _ := config["provider"].(map[string]any)
|
|
ollama, _ := provider["ollama"].(map[string]any)
|
|
models, _ := ollama["models"].(map[string]any)
|
|
if len(models) == 0 {
|
|
return nil
|
|
}
|
|
keys := slices.Collect(maps.Keys(models))
|
|
slices.Sort(keys)
|
|
return keys
|
|
}
|
|
|
|
// isOllamaModel reports whether a model config entry is managed by us
|
|
func isOllamaModel(cfg map[string]any) bool {
|
|
if v, ok := cfg["_launch"].(bool); ok && v {
|
|
return true
|
|
}
|
|
// previously used [Ollama] as a suffix for the model managed by ollama launch
|
|
if name, ok := cfg["name"].(string); ok {
|
|
return strings.HasSuffix(name, "[Ollama]")
|
|
}
|
|
return false
|
|
}
|