@@ -4,15 +4,13 @@ import { OpenApiV3 } from "./OpenApiV3";
44import { OpenApiV3_1 } from "./OpenApiV3_1" ;
55import { SwaggerV2 } from "./SwaggerV2" ;
66import { HttpLlmComposer } from "./composers/HttpLlmApplicationComposer" ;
7- import { LlmSchemaComposer } from "./composers/LlmSchemaComposer" ;
87import { HttpLlmFunctionFetcher } from "./http/HttpLlmFunctionFetcher" ;
98import { IHttpConnection } from "./structures/IHttpConnection" ;
109import { IHttpLlmApplication } from "./structures/IHttpLlmApplication" ;
1110import { IHttpLlmFunction } from "./structures/IHttpLlmFunction" ;
1211import { IHttpMigrateApplication } from "./structures/IHttpMigrateApplication" ;
1312import { IHttpResponse } from "./structures/IHttpResponse" ;
1413import { ILlmFunction } from "./structures/ILlmFunction" ;
15- import { ILlmSchema } from "./structures/ILlmSchema" ;
1614import { LlmDataMerger } from "./utils/LlmDataMerger" ;
1715
1816/**
@@ -30,36 +28,28 @@ import { LlmDataMerger } from "./utils/LlmDataMerger";
3028 * {@link HttpLlm.propagate HttpLlm.propagate()}.
3129 *
3230 * By the way, if you have configured the
33- * {@link IHttpLlmApplication.IOptions.separate} option to separate the
34- * parameters into human and LLM sides, you can merge these human and LLM sides'
35- * parameters into one through
36- * {@link HttpLlm.mergeParameters HttpLlm.mergeParameters()} before the actual
37- * LLM function call execution.
31+ * {@link IHttpLlmApplication.IConfig.separate} option to separate the parameters
32+ * into human and LLM sides, you can merge these human and LLM sides' parameters
33+ * into one through {@link HttpLlm.mergeParameters HttpLlm.mergeParameters()}
34+ * before the actual LLM function call execution.
3835 *
3936 * @author Jeongho Nam - https://github.com/samchon
4037 */
4138export namespace HttpLlm {
4239 /* -----------------------------------------------------------
4340 COMPOSERS
4441 ----------------------------------------------------------- */
45- /**
46- * Properties for the LLM function calling application composer.
47- *
48- * @template Model Target LLM model
49- */
50- export interface IApplicationProps < Model extends ILlmSchema . Model > {
51- /** Target LLM model. */
52- model : Model ;
53-
42+ /** Properties for the LLM function calling application composer. */
43+ export interface IApplicationProps {
5444 /** OpenAPI document to convert. */
5545 document :
5646 | OpenApi . IDocument
5747 | SwaggerV2 . IDocument
5848 | OpenApiV3 . IDocument
5949 | OpenApiV3_1 . IDocument ;
6050
61- /** Options for the LLM function calling schema conversion. */
62- options ?: Partial < IHttpLlmApplication . IOptions < Model > > ;
51+ /** Configuration for the LLM function calling schema conversion. */
52+ config ?: Partial < IHttpLlmApplication . IConfig > ;
6353 }
6454
6555 /**
@@ -72,57 +62,44 @@ export namespace HttpLlm {
7262 * converted to the {@link IHttpLlmFunction LLM function} type, and they would
7363 * be used for the LLM function calling.
7464 *
75- * If you have configured the {@link IHttpLlmApplication.IOptions .separate}
65+ * If you have configured the {@link IHttpLlmApplication.IConfig .separate}
7666 * option, every parameters in the {@link IHttpLlmFunction} would be separated
7767 * into both human and LLM sides. In that case, you can merge these human and
7868 * LLM sides' parameters into one through {@link HttpLlm.mergeParameters}
7969 * before the actual LLM function call execution.
8070 *
81- * Additionally, if you have configured the
82- * {@link IHttpLlmApplication.IOptions.keyword} as `true`, the number of
83- * {@link IHttpLlmFunction.parameters} are always 1 and the first parameter
84- * type is always {@link ILlmSchemaV3.IObject}. I recommend this option because
85- * LLM can understand the keyword arguments more easily.
86- *
8771 * @param props Properties for composition
8872 * @returns LLM function calling application
8973 */
90- export const application = < Model extends ILlmSchema . Model > (
91- props : IApplicationProps < Model > ,
92- ) : IHttpLlmApplication < Model > => {
74+ export const application = (
75+ props : IApplicationProps ,
76+ ) : IHttpLlmApplication => {
9377 // MIGRATE
9478 const migrate : IHttpMigrateApplication = HttpMigration . application (
9579 props . document ,
9680 ) ;
97- const defaultConfig : ILlmSchema . IConfig < Model > =
98- LlmSchemaComposer . defaultConfig ( props . model ) ;
99- return HttpLlmComposer . application < Model > ( {
81+ return HttpLlmComposer . application ( {
10082 migrate,
101- model : props . model ,
102- options : {
103- ...Object . fromEntries (
104- Object . entries ( defaultConfig ) . map (
105- ( [ key , value ] ) =>
106- [ key , ( props . options as any ) ?. [ key ] ?? value ] as const ,
107- ) ,
108- ) ,
109- separate : props . options ?. separate ?? null ,
110- maxLength : props . options ?. maxLength ?? 64 ,
111- equals : props . options ?. equals ?? false ,
112- } as any as IHttpLlmApplication . IOptions < Model > ,
83+ config : {
84+ reference : props . config ?. reference ?? true ,
85+ strict : props . config ?. strict ?? false ,
86+ separate : props . config ?. separate ?? null ,
87+ maxLength : props . config ?. maxLength ?? 64 ,
88+ equals : props . config ?. equals ?? false ,
89+ } ,
11390 } ) ;
11491 } ;
11592
11693 /* -----------------------------------------------------------
11794 FETCHERS
11895 ----------------------------------------------------------- */
11996 /** Properties for the LLM function call. */
120- export interface IFetchProps < Model extends ILlmSchema . Model > {
97+ export interface IFetchProps {
12198 /** Application of the LLM function calling. */
122- application : IHttpLlmApplication < Model > ;
99+ application : IHttpLlmApplication ;
123100
124101 /** LLM function schema to call. */
125- function : IHttpLlmFunction < ILlmSchema . Model > ;
102+ function : IHttpLlmFunction ;
126103
127104 /** Connection info to the HTTP server. */
128105 connection : IHttpConnection ;
@@ -140,16 +117,12 @@ export namespace HttpLlm {
140117 * sometimes).
141118 *
142119 * By the way, if you've configured the
143- * {@link IHttpLlmApplication.IOptions .separate}, so that the parameters are
144- * separated to human and LLM sides, you have to merge these humand and LLM
120+ * {@link IHttpLlmApplication.IConfig .separate}, so that the parameters are
121+ * separated to human and LLM sides, you have to merge these human and LLM
145122 * sides' parameters into one through {@link HttpLlm.mergeParameters}
146123 * function.
147124 *
148- * About the {@link IHttpLlmApplication.IOptions.keyword} option, don't worry
149- * anything. This `HttmLlm.execute()` function will automatically recognize
150- * the keyword arguments and convert them to the proper sequence.
151- *
152- * For reference, if the target API endpoinnt responds none 200/201 status,
125+ * For reference, if the target API endpoint responds none 200/201 status,
153126 * this would be considered as an error and the {@link HttpError} would be
154127 * thrown. Otherwise you don't want such rule, you can use the
155128 * {@link HttpLlm.propagate} function instead.
@@ -158,9 +131,8 @@ export namespace HttpLlm {
158131 * @returns Return value (response body) from the API endpoint
159132 * @throws HttpError when the API endpoint responds none 200/201 status
160133 */
161- export const execute = < Model extends ILlmSchema . Model > (
162- props : IFetchProps < Model > ,
163- ) : Promise < unknown > => HttpLlmFunctionFetcher . execute < Model > ( props ) ;
134+ export const execute = ( props : IFetchProps ) : Promise < unknown > =>
135+ HttpLlmFunctionFetcher . execute ( props ) ;
164136
165137 /**
166138 * Propagate the LLM function call.
@@ -171,15 +143,11 @@ export namespace HttpLlm {
171143 * sometimes).
172144 *
173145 * By the way, if you've configured the
174- * {@link IHttpLlmApplication.IOptions .separate}, so that the parameters are
146+ * {@link IHttpLlmApplication.IConfig .separate}, so that the parameters are
175147 * separated to human and LLM sides, you have to merge these humand and LLM
176148 * sides' parameters into one through {@link HttpLlm.mergeParameters}
177149 * function.
178150 *
179- * About the {@link IHttpLlmApplication.IOptions.keyword} option, don't worry
180- * anything. This `HttmLlm.propagate()` function will automatically recognize
181- * the keyword arguments and convert them to the proper sequence.
182- *
183151 * For reference, the propagation means always returning the response from the
184152 * API endpoint, even if the status is not 200/201. This is useful when you
185153 * want to handle the response by yourself.
@@ -188,17 +156,16 @@ export namespace HttpLlm {
188156 * @returns Response from the API endpoint
189157 * @throws Error only when the connection is failed
190158 */
191- export const propagate = < Model extends ILlmSchema . Model > (
192- props : IFetchProps < Model > ,
193- ) : Promise < IHttpResponse > => HttpLlmFunctionFetcher . propagate < Model > ( props ) ;
159+ export const propagate = ( props : IFetchProps ) : Promise < IHttpResponse > =>
160+ HttpLlmFunctionFetcher . propagate ( props ) ;
194161
195162 /* -----------------------------------------------------------
196163 MERGERS
197164 ----------------------------------------------------------- */
198165 /** Properties for the parameters' merging. */
199- export interface IMergeProps < Model extends ILlmSchema . Model > {
166+ export interface IMergeProps {
200167 /** Metadata of the target function. */
201- function : ILlmFunction < Model > ;
168+ function : ILlmFunction ;
202169
203170 /** Arguments composed by the LLM. */
204171 llm : object | null ;
@@ -210,22 +177,21 @@ export namespace HttpLlm {
210177 /**
211178 * Merge the parameters.
212179 *
213- * If you've configured the {@link IHttpLlmApplication.IOptions .separate}
180+ * If you've configured the {@link IHttpLlmApplication.IConfig .separate}
214181 * option, so that the parameters are separated to human and LLM sides, you
215182 * can merge these humand and LLM sides' parameters into one through this
216183 * `HttpLlm.mergeParameters()` function before the actual LLM function call
217- * wexecution .
184+ * execution .
218185 *
219186 * On contrary, if you've not configured the
220- * {@link IHttpLlmApplication.IOptions .separate} option, this function would
187+ * {@link IHttpLlmApplication.IConfig .separate} option, this function would
221188 * throw an error.
222189 *
223190 * @param props Properties for the parameters' merging
224191 * @returns Merged parameter values
225192 */
226- export const mergeParameters = < Model extends ILlmSchema . Model > (
227- props : IMergeProps < Model > ,
228- ) : object => LlmDataMerger . parameters ( props ) ;
193+ export const mergeParameters = ( props : IMergeProps ) : object =>
194+ LlmDataMerger . parameters ( props ) ;
229195
230196 /**
231197 * Merge two values.
0 commit comments