From dff88ac4e96832515efa7d7f7ebb2a11caa3aa54 Mon Sep 17 00:00:00 2001 From: Sasindu Alahakoon Date: Thu, 13 Mar 2025 15:41:37 +0530 Subject: [PATCH 1/2] Update the default client backend call --- ballerina/llm_client_default.bal | 23 ++++++++++++++++++++--- 1 file changed, 20 insertions(+), 3 deletions(-) diff --git a/ballerina/llm_client_default.bal b/ballerina/llm_client_default.bal index 149a6bf..5884abb 100644 --- a/ballerina/llm_client_default.bal +++ b/ballerina/llm_client_default.bal @@ -15,6 +15,7 @@ // under the License. import ballerina/http; +import ballerinax/azure.openai.chat; const UNAUTHORIZED = 401; @@ -26,6 +27,13 @@ public type DefaultBallerinaModelConfig record {| string accessToken; |}; +type ChatCompletionChoice record { + chat:ChatCompletionResponseMessage message?; + chat:ContentFilterChoiceResults content_filter_results?; + int index?; + string finish_reason?; +}; + # Default Ballerina model chat completion client. public isolated distinct client class DefaultBallerinaModel { *Model; @@ -55,9 +63,18 @@ public isolated distinct client class DefaultBallerinaModel { return error(string `LLM call failed: ${check chatResponse.getTextPayload()}`); } - string|error resp = check chatResponse.getTextPayload(); - if resp is error { - return error("Failed to retrieve completion message", resp); + ChatCompletionChoice[]?|error choices = check (check chatResponse.getJsonPayload()).cloneWithType(); + if choices is error { + return error("Failed to retrieve completion message", choices); + } + + if choices is () { + return {body: "No completion choices"}; + } + + string? resp = choices[0].message?.content; + if resp is () { + return {body: "No completion message"}; } return parseResponseAsJson(resp); } From aea2bc01d649f1e57190a2fbf80f9eb94e3199d8 Mon Sep 17 00:00:00 2001 From: Sasindu Alahakoon Date: Thu, 13 Mar 2025 17:32:28 +0530 Subject: [PATCH 2/2] Add changes for default np client --- ballerina/llm_client_default.bal | 26 +++++++------------------- 1 file changed, 7 insertions(+), 19 deletions(-) diff --git a/ballerina/llm_client_default.bal b/ballerina/llm_client_default.bal index 5884abb..941c49b 100644 --- a/ballerina/llm_client_default.bal +++ b/ballerina/llm_client_default.bal @@ -15,7 +15,6 @@ // under the License. import ballerina/http; -import ballerinax/azure.openai.chat; const UNAUTHORIZED = 401; @@ -27,11 +26,8 @@ public type DefaultBallerinaModelConfig record {| string accessToken; |}; -type ChatCompletionChoice record { - chat:ChatCompletionResponseMessage message?; - chat:ContentFilterChoiceResults content_filter_results?; - int index?; - string finish_reason?; +type ChatCompletionResponse record { + string[] content?; }; # Default Ballerina model chat completion client. @@ -63,19 +59,11 @@ public isolated distinct client class DefaultBallerinaModel { return error(string `LLM call failed: ${check chatResponse.getTextPayload()}`); } - ChatCompletionChoice[]?|error choices = check (check chatResponse.getJsonPayload()).cloneWithType(); - if choices is error { - return error("Failed to retrieve completion message", choices); + ChatCompletionResponse chatCompleteResponse = check (check chatResponse.getJsonPayload()).cloneWithType(); + string[]? content = chatCompleteResponse?.content; + if content is () { + return error("No completion message"); } - - if choices is () { - return {body: "No completion choices"}; - } - - string? resp = choices[0].message?.content; - if resp is () { - return {body: "No completion message"}; - } - return parseResponseAsJson(resp); + return parseResponseAsJson(content[0]); } }