typia 7.0.0-dev.20241122 → 7.0.0-dev.20241124

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (42) hide show
  1. package/README.md +2 -2
  2. package/lib/index.mjs +7 -0
  3. package/lib/index.mjs.map +1 -1
  4. package/lib/internal/_llmApplicationFinalize.d.ts +1 -1
  5. package/lib/internal/_llmApplicationFinalize.js +8 -13
  6. package/lib/internal/_llmApplicationFinalize.js.map +1 -1
  7. package/lib/llm.d.ts +58 -3
  8. package/lib/llm.js +7 -0
  9. package/lib/llm.js.map +1 -1
  10. package/lib/programmers/internal/json_schema_native.js +1 -0
  11. package/lib/programmers/internal/json_schema_native.js.map +1 -1
  12. package/lib/programmers/internal/json_schema_object.js +1 -1
  13. package/lib/programmers/internal/json_schema_object.js.map +1 -1
  14. package/lib/programmers/llm/LlmApplicationProgrammer.js +15 -85
  15. package/lib/programmers/llm/LlmApplicationProgrammer.js.map +1 -1
  16. package/lib/programmers/llm/LlmParametersProgrammer.d.ts +10 -0
  17. package/lib/programmers/llm/LlmParametersProgrammer.js +58 -0
  18. package/lib/programmers/llm/LlmParametersProgrammer.js.map +1 -0
  19. package/lib/programmers/llm/LlmSchemaProgrammer.d.ts +2 -2
  20. package/lib/programmers/llm/LlmSchemaProgrammer.js +6 -30
  21. package/lib/programmers/llm/LlmSchemaProgrammer.js.map +1 -1
  22. package/lib/transformers/CallExpressionTransformer.js +2 -0
  23. package/lib/transformers/CallExpressionTransformer.js.map +1 -1
  24. package/lib/transformers/features/llm/LlmApplicationTransformer.js +2 -2
  25. package/lib/transformers/features/llm/LlmApplicationTransformer.js.map +1 -1
  26. package/lib/transformers/features/llm/LlmParametersTransformer.d.ts +5 -0
  27. package/lib/transformers/features/llm/LlmParametersTransformer.js +90 -0
  28. package/lib/transformers/features/llm/LlmParametersTransformer.js.map +1 -0
  29. package/lib/transformers/features/llm/LlmSchemaTransformer.js +2 -2
  30. package/lib/transformers/features/llm/LlmSchemaTransformer.js.map +1 -1
  31. package/package.json +2 -2
  32. package/src/internal/_llmApplicationFinalize.ts +8 -24
  33. package/src/llm.ts +73 -3
  34. package/src/programmers/internal/json_schema_native.ts +1 -0
  35. package/src/programmers/internal/json_schema_object.ts +1 -1
  36. package/src/programmers/llm/LlmApplicationProgrammer.ts +19 -88
  37. package/src/programmers/llm/LlmParametersProgrammer.ts +67 -0
  38. package/src/programmers/llm/LlmSchemaProgrammer.ts +24 -70
  39. package/src/transformers/CallExpressionTransformer.ts +2 -0
  40. package/src/transformers/features/llm/LlmApplicationTransformer.ts +13 -12
  41. package/src/transformers/features/llm/LlmParametersTransformer.ts +112 -0
  42. package/src/transformers/features/llm/LlmSchemaTransformer.ts +13 -12
@@ -0,0 +1,67 @@
1
+ import { ILlmApplication, OpenApi, OpenApiTypeChecker } from "@samchon/openapi";
2
+ import { LlmSchemaConverter } from "@samchon/openapi/lib/converters/LlmSchemaConverter";
3
+
4
+ import { MetadataFactory } from "../../factories/MetadataFactory";
5
+
6
+ import { Metadata } from "../../schemas/metadata/Metadata";
7
+
8
+ import { IJsonSchemaCollection } from "../../module";
9
+ import { JsonSchemasProgrammer } from "../json/JsonSchemasProgrammer";
10
+ import { LlmSchemaProgrammer } from "./LlmSchemaProgrammer";
11
+
12
+ export namespace LlmParametersProgrammer {
13
+ export const write = <Model extends ILlmApplication.Model>(props: {
14
+ model: Model;
15
+ metadata: Metadata;
16
+ }): ILlmApplication.ModelParameters[Model] => {
17
+ const collection: IJsonSchemaCollection<"3.1"> =
18
+ JsonSchemasProgrammer.write({
19
+ version: "3.1",
20
+ metadatas: [props.metadata],
21
+ });
22
+ const schema: OpenApi.IJsonSchema.IObject = (() => {
23
+ const schema: OpenApi.IJsonSchema = collection.schemas[0]!;
24
+ if (OpenApiTypeChecker.isObject(schema)) return schema;
25
+ else if (OpenApiTypeChecker.isReference(schema)) {
26
+ const last =
27
+ collection.components.schemas?.[schema.$ref.split("/").pop()!];
28
+ if (last && OpenApiTypeChecker.isObject(last)) return last;
29
+ }
30
+ throw new Error("Unreachable code. Failed to find the object schema.");
31
+ })();
32
+
33
+ const parameters: ILlmApplication.ModelParameters[Model] | null =
34
+ LlmSchemaConverter.parameters(props.model)({
35
+ config: {
36
+ recursive: 3,
37
+ reference: false,
38
+ constraint: false,
39
+ },
40
+ components: collection.components,
41
+ schema,
42
+ }) as ILlmApplication.ModelParameters[Model] | null;
43
+ if (parameters === null)
44
+ throw new Error("Failed to convert JSON schema to LLM schema.");
45
+ return parameters;
46
+ };
47
+
48
+ export const validate =
49
+ (model: ILlmApplication.Model) =>
50
+ (metadata: Metadata, explore: MetadataFactory.IExplore): string[] => {
51
+ const output: string[] = [];
52
+ if (explore.top === true) {
53
+ if (metadata.objects.length === 0)
54
+ output.push("LLM parameters must be an objec type.");
55
+ else if (metadata.objects.length !== 1 || metadata.size() > 1)
56
+ output.push("LLM parameters must be a single object type.");
57
+ else {
58
+ if (metadata.nullable)
59
+ output.push("LLM parameters must be a non-nullable object type.");
60
+ else if (metadata.isRequired() === false)
61
+ output.push("LLM parameters must be a non-undefined object type.");
62
+ }
63
+ }
64
+ output.push(...LlmSchemaProgrammer.validate(model)(metadata));
65
+ return output;
66
+ };
67
+ }
@@ -1,13 +1,5 @@
1
- import {
2
- IChatGptSchema,
3
- IHttpLlmApplication,
4
- ILlmApplication,
5
- OpenApi,
6
- } from "@samchon/openapi";
7
- import { ChatGptConverter } from "@samchon/openapi/lib/converters/ChatGptConverter";
8
- import { GeminiConverter } from "@samchon/openapi/lib/converters/GeminiConverter";
9
- import { LlmConverterV3 } from "@samchon/openapi/lib/converters/LlmConverterV3";
10
- import { LlmConverterV3_1 } from "@samchon/openapi/lib/converters/LlmConverterV3_1";
1
+ import { ILlmApplication } from "@samchon/openapi";
2
+ import { LlmSchemaConverter } from "@samchon/openapi/lib/converters/LlmSchemaConverter";
11
3
 
12
4
  import { IJsonSchemaCollection } from "../../schemas/json/IJsonSchemaCollection";
13
5
  import { Metadata } from "../../schemas/metadata/Metadata";
@@ -24,8 +16,9 @@ export namespace LlmSchemaProgrammer {
24
16
  export interface IOutput<Model extends ILlmApplication.Model> {
25
17
  model: Model;
26
18
  schema: ILlmApplication.ModelSchema[Model];
27
- $defs: Record<string, IChatGptSchema>;
19
+ $defs: Record<string, ILlmApplication.ModelSchema[Model]>;
28
20
  }
21
+
29
22
  export const write = <Model extends ILlmApplication.Model>(props: {
30
23
  model: Model;
31
24
  metadata: Metadata;
@@ -36,20 +29,18 @@ export namespace LlmSchemaProgrammer {
36
29
  metadatas: [props.metadata],
37
30
  });
38
31
 
39
- const $defs: Record<string, IChatGptSchema> = {};
40
- const schema: ILlmApplication.ModelSchema[Model] | null = CASTERS[
41
- props.model
42
- ]({
43
- options: {
44
- recursive: 3,
45
- reference: false,
46
- constraint: false,
47
- } satisfies Omit<ILlmApplication.IChatGptOptions, "separate"> &
48
- Omit<ILlmApplication.ICommonOptions<any>, "separate"> as any,
49
- components: collection.components,
50
- schema: collection.schemas[0]!,
51
- $defs,
52
- }) as ILlmApplication.ModelSchema[Model] | null;
32
+ const $defs: Record<string, ILlmApplication.ModelSchema[Model]> = {};
33
+ const schema: ILlmApplication.ModelSchema[Model] | null =
34
+ LlmSchemaConverter.schema(props.model)({
35
+ config: {
36
+ recursive: 3,
37
+ reference: false,
38
+ constraint: false,
39
+ },
40
+ components: collection.components,
41
+ schema: collection.schemas[0]!,
42
+ $defs: $defs as any,
43
+ }) as ILlmApplication.ModelSchema[Model] | null;
53
44
  if (schema === null)
54
45
  throw new Error("Failed to convert JSON schema to LLM schema.");
55
46
  return {
@@ -68,6 +59,14 @@ export namespace LlmSchemaProgrammer {
68
59
  metadata.constants.some((c) => c.type === "bigint")
69
60
  )
70
61
  output.push("LLM schema does not support bigint type.");
62
+ if (
63
+ metadata.objects.some((o) =>
64
+ o.type.properties.some(
65
+ (p) => p.key.isSoleLiteral() === false && p.value.size() !== 0,
66
+ ),
67
+ )
68
+ )
69
+ output.push("LLM schema does not support dynamic property in object.");
71
70
  if (
72
71
  metadata.tuples.some((t) =>
73
72
  t.type.elements.some((e) => e.isRequired() === false),
@@ -130,48 +129,3 @@ const size = (metadata: Metadata): number =>
130
129
  }).length,
131
130
  )
132
131
  .reduce((a, b) => a + b, 0);
133
-
134
- const CASTERS = {
135
- "3.0": (props: {
136
- components: OpenApi.IComponents;
137
- schema: OpenApi.IJsonSchema;
138
- options: IHttpLlmApplication.IOptions<"3.0">;
139
- }) =>
140
- LlmConverterV3.schema({
141
- components: props.components,
142
- schema: props.schema,
143
- recursive: props.options.recursive,
144
- }),
145
- "3.1": (props: {
146
- components: OpenApi.IComponents;
147
- schema: OpenApi.IJsonSchema;
148
- options: IHttpLlmApplication.IOptions<"3.1">;
149
- }) =>
150
- LlmConverterV3_1.schema({
151
- components: props.components,
152
- schema: props.schema,
153
- recursive: props.options.recursive,
154
- }),
155
- chatgpt: (props: {
156
- components: OpenApi.IComponents;
157
- schema: OpenApi.IJsonSchema;
158
- $defs: Record<string, IChatGptSchema>;
159
- options: Omit<IHttpLlmApplication.IChatGptOptions, "separate">;
160
- }) =>
161
- ChatGptConverter.schema({
162
- components: props.components,
163
- schema: props.schema,
164
- $defs: props.$defs,
165
- options: props.options,
166
- }),
167
- gemini: (props: {
168
- components: OpenApi.IComponents;
169
- schema: OpenApi.IJsonSchema;
170
- options: IHttpLlmApplication.IOptions<"gemini">;
171
- }) =>
172
- GeminiConverter.schema({
173
- components: props.components,
174
- schema: props.schema,
175
- recursive: props.options.recursive,
176
- }),
177
- };
@@ -67,6 +67,7 @@ import { JsonStringifyTransformer } from "./features/json/JsonStringifyTransform
67
67
  import { JsonValidateParseTransformer } from "./features/json/JsonValidateParseTransformer";
68
68
  import { JsonValidateStringifyTransformer } from "./features/json/JsonValidateStringifyTransformer";
69
69
  import { LlmApplicationTransformer } from "./features/llm/LlmApplicationTransformer";
70
+ import { LlmParametersTransformer } from "./features/llm/LlmParametersTransformer";
70
71
  import { LlmSchemaTransformer } from "./features/llm/LlmSchemaTransformer";
71
72
  import { MiscAssertCloneTransformer } from "./features/misc/MiscAssertCloneTransformer";
72
73
  import { MiscAssertPruneTransformer } from "./features/misc/MiscAssertPruneTransformer";
@@ -394,6 +395,7 @@ const FUNCTORS: Record<string, Record<string, () => Task>> = {
394
395
  },
395
396
  llm: {
396
397
  application: () => LlmApplicationTransformer.transform,
398
+ parameters: () => LlmParametersTransformer.transform,
397
399
  schema: () => LlmSchemaTransformer.transform,
398
400
  },
399
401
  json: {
@@ -29,17 +29,18 @@ export namespace LlmApplicationTransformer {
29
29
  if (ts.isTypeNode(top) === false) return props.expression;
30
30
 
31
31
  // GET MODEL
32
- const model: ILlmApplication.Model = get_parameter<ILlmApplication.Model>({
33
- checker: props.context.checker,
34
- name: "Model",
35
- is: (value) =>
36
- value === "3.1" ||
37
- value === "3.0" ||
38
- value === "chatgpt" ||
39
- value === "gemini",
40
- cast: (value) => value as ILlmApplication.Model,
41
- default: () => "3.1",
42
- })(props.expression.typeArguments[1]);
32
+ const model: ILlmApplication.Model =
33
+ getTemplateArgument<ILlmApplication.Model>({
34
+ checker: props.context.checker,
35
+ name: "Model",
36
+ is: (value) =>
37
+ value === "3.1" ||
38
+ value === "3.0" ||
39
+ value === "chatgpt" ||
40
+ value === "gemini",
41
+ cast: (value) => value as ILlmApplication.Model,
42
+ default: () => "3.1",
43
+ })(props.expression.typeArguments[1]);
43
44
 
44
45
  // GET TYPE
45
46
  const type: ts.Type = props.context.checker.getTypeFromTypeNode(top);
@@ -101,7 +102,7 @@ export namespace LlmApplicationTransformer {
101
102
  );
102
103
  };
103
104
 
104
- const get_parameter =
105
+ const getTemplateArgument =
105
106
  <Value>(props: {
106
107
  checker: ts.TypeChecker;
107
108
  name: string;
@@ -0,0 +1,112 @@
1
+ import { ILlmApplication } from "@samchon/openapi";
2
+ import { ILlmFunction } from "@samchon/openapi/lib/structures/ILlmFunction";
3
+ import ts from "typescript";
4
+
5
+ import { LiteralFactory } from "../../../factories/LiteralFactory";
6
+ import { MetadataCollection } from "../../../factories/MetadataCollection";
7
+ import { MetadataFactory } from "../../../factories/MetadataFactory";
8
+
9
+ import { Metadata } from "../../../schemas/metadata/Metadata";
10
+
11
+ import { LlmParametersProgrammer } from "../../../programmers/llm/LlmParametersProgrammer";
12
+ import { LlmSchemaProgrammer } from "../../../programmers/llm/LlmSchemaProgrammer";
13
+
14
+ import { ValidationPipe } from "../../../typings/ValidationPipe";
15
+
16
+ import { ITransformProps } from "../../ITransformProps";
17
+ import { TransformerError } from "../../TransformerError";
18
+
19
+ export namespace LlmParametersTransformer {
20
+ export const transform = (
21
+ props: Omit<ITransformProps, "modulo">,
22
+ ): ts.Expression => {
23
+ // GET GENERIC ARGUMENT
24
+ if (!props.expression.typeArguments?.length)
25
+ throw new TransformerError({
26
+ code: "typia.llm.parameters",
27
+ message: "no generic argument.",
28
+ });
29
+
30
+ const top: ts.Node = props.expression.typeArguments[0]!;
31
+ if (ts.isTypeNode(top) === false) return props.expression;
32
+
33
+ // GET MODEL
34
+ const model: ILlmApplication.Model =
35
+ getTemplateArgument<ILlmApplication.Model>({
36
+ checker: props.context.checker,
37
+ name: "Model",
38
+ is: (value) =>
39
+ value === "3.1" ||
40
+ value === "3.0" ||
41
+ value === "chatgpt" ||
42
+ value === "gemini",
43
+ cast: (value) => value as ILlmApplication.Model,
44
+ default: () => "3.1",
45
+ })(props.expression.typeArguments[1]);
46
+
47
+ // GET TYPE
48
+ const type: ts.Type = props.context.checker.getTypeFromTypeNode(top);
49
+ const collection: MetadataCollection = new MetadataCollection({
50
+ replace: MetadataCollection.replace,
51
+ });
52
+ const result: ValidationPipe<Metadata, MetadataFactory.IError> =
53
+ MetadataFactory.analyze({
54
+ checker: props.context.checker,
55
+ transformer: props.context.transformer,
56
+ options: {
57
+ escape: true,
58
+ constant: true,
59
+ absorb: false,
60
+ validate: LlmSchemaProgrammer.validate(model),
61
+ },
62
+ collection,
63
+ type,
64
+ });
65
+ if (result.success === false)
66
+ throw TransformerError.from({
67
+ code: "typia.llm.parameters",
68
+ errors: result.errors,
69
+ });
70
+
71
+ // GENERATE LLM SCHEMA
72
+ const out: ILlmFunction<any>["parameters"] = LlmParametersProgrammer.write({
73
+ model,
74
+ metadata: result.data,
75
+ });
76
+ return LiteralFactory.write(out);
77
+ };
78
+
79
+ const getTemplateArgument =
80
+ <Value>(props: {
81
+ checker: ts.TypeChecker;
82
+ name: string;
83
+ is: (value: string) => boolean;
84
+ cast: (value: string) => Value;
85
+ default: () => Value;
86
+ }) =>
87
+ (node: ts.TypeNode | undefined): Value => {
88
+ if (!node) return props.default();
89
+
90
+ // CHECK LITERAL TYPE
91
+ const type: ts.Type = props.checker.getTypeFromTypeNode(node);
92
+ if (
93
+ !type.isLiteral() &&
94
+ (type.getFlags() & ts.TypeFlags.BooleanLiteral) === 0
95
+ )
96
+ throw new TransformerError({
97
+ code: "typia.llm.parameters",
98
+ message: `generic argument "${props.name}" must be constant.`,
99
+ });
100
+
101
+ // GET VALUE AND VALIDATE IT
102
+ const value = type.isLiteral()
103
+ ? type.value
104
+ : props.checker.typeToString(type);
105
+ if (typeof value !== "string" || props.is(value) === false)
106
+ throw new TransformerError({
107
+ code: "typia.llm.parameters",
108
+ message: `invalid value on generic argument "${props.name}".`,
109
+ });
110
+ return props.cast(value);
111
+ };
112
+ }
@@ -30,17 +30,18 @@ export namespace LlmSchemaTransformer {
30
30
  if (ts.isTypeNode(top) === false) return props.expression;
31
31
 
32
32
  // GET MODEL
33
- const model: ILlmApplication.Model = get_parameter<ILlmApplication.Model>({
34
- checker: props.context.checker,
35
- name: "Model",
36
- is: (value) =>
37
- value === "3.1" ||
38
- value === "3.0" ||
39
- value === "chatgpt" ||
40
- value === "gemini",
41
- cast: (value) => value as ILlmApplication.Model,
42
- default: () => "3.1",
43
- })(props.expression.typeArguments[1]);
33
+ const model: ILlmApplication.Model =
34
+ getTemplateArgument<ILlmApplication.Model>({
35
+ checker: props.context.checker,
36
+ name: "Model",
37
+ is: (value) =>
38
+ value === "3.1" ||
39
+ value === "3.0" ||
40
+ value === "chatgpt" ||
41
+ value === "gemini",
42
+ cast: (value) => value as ILlmApplication.Model,
43
+ default: () => "3.1",
44
+ })(props.expression.typeArguments[1]);
44
45
 
45
46
  // GET TYPE
46
47
  const type: ts.Type = props.context.checker.getTypeFromTypeNode(top);
@@ -108,7 +109,7 @@ export namespace LlmSchemaTransformer {
108
109
  );
109
110
  };
110
111
 
111
- const get_parameter =
112
+ const getTemplateArgument =
112
113
  <Value>(props: {
113
114
  checker: ts.TypeChecker;
114
115
  name: string;