Skip to content

Commit ce986ee

Browse files
committed
Improve error handling for incompatible models messages in chat.
Related to #209
1 parent 109282e commit ce986ee

File tree

4 files changed

+71
-28
lines changed

4 files changed

+71
-28
lines changed

CHANGELOG.md

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,8 @@
22

33
## Unreleased
44

5+
- Improve error handleing for incompatible models messages in chat. #209
6+
57
## 0.87.2
68

79
- Fix openai-chat tool call + support for Mistral API #233

docs/protocol.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -505,7 +505,7 @@ interface ChatPromptResponse {
505505
/**
506506
* What the server is doing after receing this prompt
507507
*/
508-
status: 'prompting' | 'login';
508+
status: 'prompting' | 'login' | 'error';
509509
}
510510
```
511511

src/eca/features/chat.clj

Lines changed: 38 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -16,11 +16,11 @@
1616
[eca.features.tools :as f.tools]
1717
[eca.features.tools.mcp :as f.mcp]
1818
[eca.llm-api :as llm-api]
19+
[eca.llm-util :as llm-util]
1920
[eca.logger :as logger]
2021
[eca.messenger :as messenger]
2122
[eca.metrics :as metrics]
22-
[eca.shared :as shared :refer [assoc-some future*]]
23-
[eca.llm-util :as llm-util]))
23+
[eca.shared :as shared :refer [assoc-some future*]]))
2424

2525
(set! *warn-on-reflection* true)
2626

@@ -1006,6 +1006,22 @@
10061006
nil)))
10071007
{:new-messages (get-in @db* [:chats chat-id :messages])})))))
10081008

1009+
(defn ^:private assert-compatible-apis-between-models!
1010+
"Ensure new request is compatible with last api used.
1011+
E.g. Anthropic is not compatible with openai and vice versa."
1012+
[db chat-id provider config]
1013+
(let [current-api (:api (llm-api/provider->api-handler provider config))
1014+
last-api (get-in db [:chats chat-id :last-api])]
1015+
(cond
1016+
(not last-api) nil
1017+
(not current-api) nil
1018+
1019+
(or (and (= :anthropic current-api)
1020+
(not= :anthropic last-api))
1021+
(and (not= :anthropic current-api)
1022+
(= :anthropic last-api)))
1023+
(throw (ex-info "Incompatible past messages in chat.\nAnthropic models are only compatible with other Anthropic models, switch models or start a new chat." {})))))
1024+
10091025
(defn ^:private prompt-messages!
10101026
"Send user messages to LLM with hook processing.
10111027
source-type controls hook behavior.
@@ -1075,6 +1091,7 @@
10751091
on-usage-updated (fn [usage]
10761092
(when-let [usage (shared/usage-msg->usage usage full-model chat-ctx)]
10771093
(send-content! chat-ctx :system (merge {:type :usage} usage))))]
1094+
(assert-compatible-apis-between-models! db chat-id provider config)
10781095
(when-not (get-in db [:chats chat-id :title])
10791096
(future* config
10801097
(when-let [{:keys [output-text]} (llm-api/sync-prompt!
@@ -1109,6 +1126,7 @@
11091126
(doseq [message user-messages]
11101127
(add-to-history!
11111128
(assoc message :content-id (:user-content-id chat-ctx))))
1129+
(swap! db* assoc-in [:chats chat-id :last-api] (:api (llm-api/provider->api-handler provider config)))
11121130
(send-content! chat-ctx :system {:type :progress
11131131
:state :running
11141132
:text "Generating"}))
@@ -1349,17 +1367,24 @@
13491367
(send-content! chat-ctx :user {:type :text
13501368
:content-id (:user-content-id chat-ctx)
13511369
:text (str message "\n")})
1352-
(case (:type decision)
1353-
:mcp-prompt (send-mcp-prompt! decision chat-ctx)
1354-
:eca-command (handle-command! decision chat-ctx)
1355-
:prompt-message (prompt-messages! user-messages :prompt-message chat-ctx))
1356-
(metrics/count-up! "prompt-received"
1357-
{:full-model full-model
1358-
:behavior behavior}
1359-
metrics)
1360-
{:chat-id chat-id
1361-
:model full-model
1362-
:status :prompting}))
1370+
(try
1371+
(case (:type decision)
1372+
:mcp-prompt (send-mcp-prompt! decision chat-ctx)
1373+
:eca-command (handle-command! decision chat-ctx)
1374+
:prompt-message (prompt-messages! user-messages :prompt-message chat-ctx))
1375+
(metrics/count-up! "prompt-received"
1376+
{:full-model full-model
1377+
:behavior behavior}
1378+
metrics)
1379+
{:chat-id chat-id
1380+
:model full-model
1381+
:status :prompting}
1382+
(catch Exception e
1383+
(send-content! chat-ctx :system {:type :text :text (str "Error: " (ex-message e))})
1384+
(finish-chat-prompt! :idle chat-ctx)
1385+
{:chat-id chat-id
1386+
:model full-model
1387+
:status :error}))))
13631388

13641389
(defn tool-call-approve [{:keys [chat-id tool-call-id save]} db* messenger metrics]
13651390
(let [chat-ctx {:chat-id chat-id

src/eca/llm_api.clj

Lines changed: 30 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -62,6 +62,27 @@
6262
(defn ^:private real-model-name [model model-capabilities]
6363
(or (:model-name model-capabilities) model))
6464

65+
(defn provider->api-handler [provider config]
66+
(cond
67+
(= "openai" provider) {:api :openai-responses
68+
:handler llm-providers.openai/create-response!}
69+
(= "anthropic" provider) {:api :anthropic
70+
:handler llm-providers.anthropic/chat!}
71+
(= "github-copilot" provider) {:api :openai-chat
72+
:handler llm-providers.openai-chat/chat-completion!}
73+
(= "google" provider) {:api :openai-chat
74+
:handler llm-providers.openai-chat/chat-completion!}
75+
(= "ollama" provider) {:api :ollama
76+
:handler llm-providers.ollama/chat!}
77+
:else (case (get-in config [:providers provider :api])
78+
("openai-responses" "openai") {:api :openai-responses
79+
:handler llm-providers.openai/create-response!}
80+
"anthropic" {:api :anthropic
81+
:handler llm-providers.anthropic/chat!}
82+
"openai-chat" {:api :openai-chat
83+
:handler llm-providers.openai-chat/chat-completion!}
84+
nil)))
85+
6586
(defn ^:private prompt!
6687
[{:keys [provider model model-capabilities instructions user-messages config
6788
on-message-received on-error on-prepare-tool-call on-tools-called on-reason on-usage-updated
@@ -78,6 +99,7 @@
7899
extra-payload (:extraPayload model-config)
79100
[auth-type api-key] (llm-util/provider-api-key provider provider-auth config)
80101
api-url (llm-util/provider-api-url provider config)
102+
{:keys [handler]} (provider->api-handler provider config)
81103
callbacks (when-not sync?
82104
{:on-message-received on-message-received
83105
:on-error on-error
@@ -89,7 +111,7 @@
89111
(when-not api-url (throw (ex-info (format "API url not found.\nMake sure you have provider '%s' configured properly." provider) {})))
90112
(cond
91113
(= "openai" provider)
92-
(llm-providers.openai/create-response!
114+
(handler
93115
{:model real-model
94116
:instructions instructions
95117
:user-messages user-messages
@@ -107,7 +129,7 @@
107129
callbacks)
108130

109131
(= "anthropic" provider)
110-
(llm-providers.anthropic/chat!
132+
(handler
111133
{:model real-model
112134
:instructions instructions
113135
:user-messages user-messages
@@ -124,7 +146,7 @@
124146
callbacks)
125147

126148
(= "github-copilot" provider)
127-
(llm-providers.openai-chat/chat-completion!
149+
(handler
128150
{:model real-model
129151
:instructions instructions
130152
:user-messages user-messages
@@ -146,7 +168,7 @@
146168
callbacks)
147169

148170
(= "google" provider)
149-
(llm-providers.openai-chat/chat-completion!
171+
(handler
150172
{:model real-model
151173
:instructions instructions
152174
:user-messages user-messages
@@ -166,7 +188,7 @@
166188
callbacks)
167189

168190
(= "ollama" provider)
169-
(llm-providers.ollama/chat!
191+
(handler
170192
{:api-url api-url
171193
:reason? (:reason? model-capabilities)
172194
:supports-image? supports-image?
@@ -178,18 +200,12 @@
178200
:extra-payload extra-payload}
179201
callbacks)
180202

181-
model-config
182-
(let [provider-fn (case (:api provider-config)
183-
("openai-responses"
184-
"openai") llm-providers.openai/create-response!
185-
"anthropic" llm-providers.anthropic/chat!
186-
"openai-chat" llm-providers.openai-chat/chat-completion!
187-
(on-error {:message (format "Unknown model %s for provider %s" (:api provider-config) provider)}))
188-
url-relative-path (:completionUrlRelativePath provider-config)
203+
(and model-config handler)
204+
(let [url-relative-path (:completionUrlRelativePath provider-config)
189205
think-tag-start (:thinkTagStart provider-config)
190206
think-tag-end (:thinkTagEnd provider-config)
191207
http-client (:httpClient provider-config)]
192-
(provider-fn
208+
(handler
193209
{:model real-model
194210
:instructions instructions
195211
:user-messages user-messages

0 commit comments

Comments
 (0)