diff --git a/ballerina/Ballerina.toml b/ballerina/Ballerina.toml index 36dad2d..72efae8 100644 --- a/ballerina/Ballerina.toml +++ b/ballerina/Ballerina.toml @@ -1,7 +1,7 @@ [package] org = "ballerinax" name = "np" -version = "0.8.0" +version = "0.8.1" authors = ["Ballerina"] keywords = ["natural programming", "ai"] repository = "https://github.com/ballerina-platform/module-ballerinax-np" @@ -17,5 +17,5 @@ observabilityIncluded = true [[platform.java21.dependency]] groupId = "io.ballerina.lib" artifactId = "np-native" -version = "0.8.0" -path = "../native/build/libs/np-native-0.8.0.jar" +version = "0.8.1" +path = "../native/build/libs/np-native-0.8.1-SNAPSHOT.jar" diff --git a/ballerina/CompilerPlugin.toml b/ballerina/CompilerPlugin.toml index ee1b152..2d92368 100644 --- a/ballerina/CompilerPlugin.toml +++ b/ballerina/CompilerPlugin.toml @@ -3,4 +3,4 @@ id = "np-compiler-plugin" class = "io.ballerina.lib.np.compilerplugin.CompilerPlugin" [[dependency]] -path = "../compiler-plugin/build/libs/np-compiler-plugin-0.8.0.jar" +path = "../compiler-plugin/build/libs/np-compiler-plugin-0.8.1-SNAPSHOT.jar" diff --git a/ballerina/Dependencies.toml b/ballerina/Dependencies.toml index cd1074d..461aa35 100644 --- a/ballerina/Dependencies.toml +++ b/ballerina/Dependencies.toml @@ -335,7 +335,7 @@ modules = [ [[package]] org = "ballerinax" name = "np" -version = "0.8.0" +version = "0.8.1" dependencies = [ {org = "ballerina", name = "http"}, {org = "ballerina", name = "jballerina.java"}, diff --git a/ballerina/main.bal b/ballerina/main.bal index 35439d2..4b0a1bd 100644 --- a/ballerina/main.bal +++ b/ballerina/main.bal @@ -16,7 +16,7 @@ const JSON_CONVERSION_ERROR = "FromJsonStringError"; const CONVERSION_ERROR = "ConversionError"; -const ERROR_MESSAGE = "Error occurred while converting the LLM response to the given type. Please refine your prompt to get a better result."; +const ERROR_MESSAGE = "Error occurred while attempting to parse the response from the LLM as the expected type. Retrying and/or validating the prompt could fix the response."; type DefaultModelConfig DefaultAzureOpenAIModelConfig|DefaultOpenAIModelConfig|DefaultBallerinaModelConfig; @@ -119,7 +119,7 @@ isolated function parseResponseAsType(json resp, typedesc targetType) retu isolated function handlepParseResponseError(error chatResponseError) returns error { if chatResponseError.message().includes(JSON_CONVERSION_ERROR) || chatResponseError.message().includes(CONVERSION_ERROR) { - return error(string `${ERROR_MESSAGE}`); + return error(string `${ERROR_MESSAGE}`, detail = chatResponseError); } return chatResponseError; } diff --git a/ballerina/tests/tests.bal b/ballerina/tests/tests.bal index bed42a8..9ad8a25 100644 --- a/ballerina/tests/tests.bal +++ b/ballerina/tests/tests.bal @@ -16,7 +16,7 @@ import ballerina/test; -const ERROR_MESSAGE = "Error occurred while converting the LLM response to the given type. Please refine your prompt to get a better result."; +const ERROR_MESSAGE = "Error occurred while attempting to parse the response from the LLM as the expected type. Retrying and/or validating the prompt could fix the response."; @test:Config function testPromptAsCodeFunctionWithSimpleExpectedTypeWithDefaultAzureOpenAIClient() returns error? {