@effect/ai-openai 0.17.0 → 0.18.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (70) hide show
  1. package/OpenAiEmbeddingModel/package.json +6 -0
  2. package/OpenAiLanguageModel/package.json +6 -0
  3. package/dist/cjs/OpenAiClient.js +111 -109
  4. package/dist/cjs/OpenAiClient.js.map +1 -1
  5. package/dist/cjs/OpenAiConfig.js +2 -2
  6. package/dist/cjs/OpenAiConfig.js.map +1 -1
  7. package/dist/cjs/{OpenAiEmbeddings.js → OpenAiEmbeddingModel.js} +22 -51
  8. package/dist/cjs/OpenAiEmbeddingModel.js.map +1 -0
  9. package/dist/cjs/OpenAiLanguageModel.js +490 -0
  10. package/dist/cjs/OpenAiLanguageModel.js.map +1 -0
  11. package/dist/cjs/OpenAiTelemetry.js +1 -1
  12. package/dist/cjs/OpenAiTelemetry.js.map +1 -1
  13. package/dist/cjs/OpenAiTokenizer.js +11 -13
  14. package/dist/cjs/OpenAiTokenizer.js.map +1 -1
  15. package/dist/cjs/index.js +5 -5
  16. package/dist/cjs/internal/utilities.js +25 -0
  17. package/dist/cjs/internal/utilities.js.map +1 -0
  18. package/dist/dts/OpenAiClient.d.ts +32 -75
  19. package/dist/dts/OpenAiClient.d.ts.map +1 -1
  20. package/dist/dts/OpenAiConfig.d.ts +5 -6
  21. package/dist/dts/OpenAiConfig.d.ts.map +1 -1
  22. package/dist/dts/{OpenAiEmbeddings.d.ts → OpenAiEmbeddingModel.d.ts} +16 -37
  23. package/dist/dts/OpenAiEmbeddingModel.d.ts.map +1 -0
  24. package/dist/dts/OpenAiLanguageModel.d.ts +108 -0
  25. package/dist/dts/OpenAiLanguageModel.d.ts.map +1 -0
  26. package/dist/dts/OpenAiTelemetry.d.ts +59 -64
  27. package/dist/dts/OpenAiTelemetry.d.ts.map +1 -1
  28. package/dist/dts/OpenAiTokenizer.d.ts +2 -2
  29. package/dist/dts/OpenAiTokenizer.d.ts.map +1 -1
  30. package/dist/dts/index.d.ts +3 -3
  31. package/dist/dts/index.d.ts.map +1 -1
  32. package/dist/dts/internal/utilities.d.ts +2 -0
  33. package/dist/dts/internal/utilities.d.ts.map +1 -0
  34. package/dist/esm/OpenAiClient.js +110 -106
  35. package/dist/esm/OpenAiClient.js.map +1 -1
  36. package/dist/esm/OpenAiConfig.js +2 -2
  37. package/dist/esm/OpenAiConfig.js.map +1 -1
  38. package/dist/esm/{OpenAiEmbeddings.js → OpenAiEmbeddingModel.js} +21 -48
  39. package/dist/esm/OpenAiEmbeddingModel.js.map +1 -0
  40. package/dist/esm/OpenAiLanguageModel.js +478 -0
  41. package/dist/esm/OpenAiLanguageModel.js.map +1 -0
  42. package/dist/esm/OpenAiTelemetry.js +1 -1
  43. package/dist/esm/OpenAiTelemetry.js.map +1 -1
  44. package/dist/esm/OpenAiTokenizer.js +11 -13
  45. package/dist/esm/OpenAiTokenizer.js.map +1 -1
  46. package/dist/esm/index.js +3 -3
  47. package/dist/esm/index.js.map +1 -1
  48. package/dist/esm/internal/utilities.js +16 -0
  49. package/dist/esm/internal/utilities.js.map +1 -0
  50. package/package.json +19 -19
  51. package/src/OpenAiClient.ts +184 -191
  52. package/src/OpenAiConfig.ts +7 -8
  53. package/src/{OpenAiEmbeddings.ts → OpenAiEmbeddingModel.ts} +39 -85
  54. package/src/OpenAiLanguageModel.ts +601 -0
  55. package/src/OpenAiTelemetry.ts +66 -71
  56. package/src/OpenAiTokenizer.ts +35 -26
  57. package/src/index.ts +3 -3
  58. package/src/internal/utilities.ts +19 -0
  59. package/OpenAiCompletions/package.json +0 -6
  60. package/OpenAiEmbeddings/package.json +0 -6
  61. package/dist/cjs/OpenAiCompletions.js +0 -358
  62. package/dist/cjs/OpenAiCompletions.js.map +0 -1
  63. package/dist/cjs/OpenAiEmbeddings.js.map +0 -1
  64. package/dist/dts/OpenAiCompletions.d.ts +0 -75
  65. package/dist/dts/OpenAiCompletions.d.ts.map +0 -1
  66. package/dist/dts/OpenAiEmbeddings.d.ts.map +0 -1
  67. package/dist/esm/OpenAiCompletions.js +0 -345
  68. package/dist/esm/OpenAiCompletions.js.map +0 -1
  69. package/dist/esm/OpenAiEmbeddings.js.map +0 -1
  70. package/src/OpenAiCompletions.ts +0 -500
@@ -15,105 +15,100 @@ import type { Simplify } from "effect/Types"
15
15
  * {@see https://opentelemetry.io/docs/specs/semconv/attributes-registry/gen-ai/}
16
16
  *
17
17
  * @since 1.0.0
18
- * @category models
18
+ * @category Models
19
19
  */
20
20
  export type OpenAiTelemetryAttributes = Simplify<
21
21
  & AiTelemetry.GenAITelemetryAttributes
22
- & AiTelemetry.GenAI.AttributesWithPrefix<OpenAiTelemetry.RequestAttributes, "gen_ai.openai.request">
23
- & AiTelemetry.GenAI.AttributesWithPrefix<OpenAiTelemetry.ResponseAttributes, "gen_ai.openai.request">
22
+ & AiTelemetry.AttributesWithPrefix<RequestAttributes, "gen_ai.openai.request">
23
+ & AiTelemetry.AttributesWithPrefix<ResponseAttributes, "gen_ai.openai.request">
24
24
  >
25
25
 
26
26
  /**
27
+ * All telemetry attributes which are part of the GenAI specification,
28
+ * including the OpenAi-specific attributes.
29
+ *
27
30
  * @since 1.0.0
31
+ * @category Models
28
32
  */
29
- export declare namespace OpenAiTelemetry {
30
- /**
31
- * All telemetry attributes which are part of the GenAI specification,
32
- * including the OpenAi-specific attributes.
33
- *
34
- * @since 1.0.0
35
- * @category models
36
- */
37
- export type AllAttributes = AiTelemetry.GenAI.AllAttributes & RequestAttributes & ResponseAttributes
33
+ export type AllAttributes = AiTelemetry.AllAttributes & RequestAttributes & ResponseAttributes
38
34
 
35
+ /**
36
+ * Telemetry attributes which are part of the GenAI specification and are
37
+ * namespaced by `gen_ai.openai.request`.
38
+ *
39
+ * @since 1.0.0
40
+ * @category Models
41
+ */
42
+ export interface RequestAttributes {
39
43
  /**
40
- * Telemetry attributes which are part of the GenAI specification and are
41
- * namespaced by `gen_ai.openai.request`.
42
- *
43
- * @since 1.0.0
44
- * @category models
44
+ * The response format that is requested.
45
45
  */
46
- export interface RequestAttributes {
47
- /**
48
- * The response format that is requested.
49
- */
50
- readonly responseFormat?: (string & {}) | WellKnownResponseFormat | null | undefined
51
- /**
52
- * The service tier requested. May be a specific tier, `default`, or `auto`.
53
- */
54
- readonly serviceTier?: (string & {}) | WellKnownServiceTier | null | undefined
55
- }
56
-
46
+ readonly responseFormat?: (string & {}) | WellKnownResponseFormat | null | undefined
57
47
  /**
58
- * Telemetry attributes which are part of the GenAI specification and are
59
- * namespaced by `gen_ai.openai.response`.
60
- *
61
- * @since 1.0.0
62
- * @category models
48
+ * The service tier requested. May be a specific tier, `default`, or `auto`.
63
49
  */
64
- export interface ResponseAttributes {
65
- /**
66
- * The service tier used for the response.
67
- */
68
- readonly serviceTier?: string | null | undefined
69
- /**
70
- * A fingerprint to track any eventual change in the Generative AI
71
- * environment.
72
- */
73
- readonly systemFingerprint?: string | null | undefined
74
- }
50
+ readonly serviceTier?: (string & {}) | WellKnownServiceTier | null | undefined
51
+ }
75
52
 
53
+ /**
54
+ * Telemetry attributes which are part of the GenAI specification and are
55
+ * namespaced by `gen_ai.openai.response`.
56
+ *
57
+ * @since 1.0.0
58
+ * @category Models
59
+ */
60
+ export interface ResponseAttributes {
76
61
  /**
77
- * The `gen_ai.openai.request.response_format` attribute has the following
78
- * list of well-known values.
79
- *
80
- * If one of them applies, then the respective value **MUST** be used;
81
- * otherwise, a custom value **MAY** be used.
82
- *
83
- * @since 1.0.0
84
- * @category models
62
+ * The service tier used for the response.
85
63
  */
86
- export type WellKnownResponseFormat = "json_object" | "json_schema" | "text"
87
-
64
+ readonly serviceTier?: string | null | undefined
88
65
  /**
89
- * The `gen_ai.openai.request.service_tier` attribute has the following
90
- * list of well-known values.
91
- *
92
- * If one of them applies, then the respective value **MUST** be used;
93
- * otherwise, a custom value **MAY** be used.
94
- *
95
- * @since 1.0.0
96
- * @category models
66
+ * A fingerprint to track any eventual change in the Generative AI
67
+ * environment.
97
68
  */
98
- export type WellKnownServiceTier = "auto" | "default"
69
+ readonly systemFingerprint?: string | null | undefined
99
70
  }
100
71
 
72
+ /**
73
+ * The `gen_ai.openai.request.response_format` attribute has the following
74
+ * list of well-known values.
75
+ *
76
+ * If one of them applies, then the respective value **MUST** be used;
77
+ * otherwise, a custom value **MAY** be used.
78
+ *
79
+ * @since 1.0.0
80
+ * @category Models
81
+ */
82
+ export type WellKnownResponseFormat = "json_object" | "json_schema" | "text"
83
+
84
+ /**
85
+ * The `gen_ai.openai.request.service_tier` attribute has the following
86
+ * list of well-known values.
87
+ *
88
+ * If one of them applies, then the respective value **MUST** be used;
89
+ * otherwise, a custom value **MAY** be used.
90
+ *
91
+ * @since 1.0.0
92
+ * @category Models
93
+ */
94
+ export type WellKnownServiceTier = "auto" | "default"
95
+
101
96
  /**
102
97
  * @since 1.0.0
103
- * @since models
98
+ * @since Models
104
99
  */
105
100
  export type OpenAiTelemetryAttributeOptions = AiTelemetry.GenAITelemetryAttributeOptions & {
106
101
  openai?: {
107
- request?: OpenAiTelemetry.RequestAttributes | undefined
108
- response?: OpenAiTelemetry.ResponseAttributes | undefined
102
+ request?: RequestAttributes | undefined
103
+ response?: ResponseAttributes | undefined
109
104
  } | undefined
110
105
  }
111
106
 
112
107
  const addOpenAiRequestAttributes = AiTelemetry.addSpanAttributes("gen_ai.openai.request", String.camelToSnake)<
113
- OpenAiTelemetry.RequestAttributes
108
+ RequestAttributes
114
109
  >
115
110
  const addOpenAiResponseAttributes = AiTelemetry.addSpanAttributes("gen_ai.openai.response", String.camelToSnake)<
116
- OpenAiTelemetry.ResponseAttributes
111
+ ResponseAttributes
117
112
  >
118
113
 
119
114
  /**
@@ -123,7 +118,7 @@ const addOpenAiResponseAttributes = AiTelemetry.addSpanAttributes("gen_ai.openai
123
118
  * **NOTE**: This method will mutate the `Span` **in-place**.
124
119
  *
125
120
  * @since 1.0.0
126
- * @since utilities
121
+ * @since Utilities
127
122
  */
128
123
  export const addGenAIAnnotations = dual<
129
124
  /**
@@ -133,7 +128,7 @@ export const addGenAIAnnotations = dual<
133
128
  * **NOTE**: This method will mutate the `Span` **in-place**.
134
129
  *
135
130
  * @since 1.0.0
136
- * @since utilities
131
+ * @since Utilities
137
132
  */
138
133
  (options: OpenAiTelemetryAttributeOptions) => (span: Span) => void,
139
134
  /**
@@ -143,7 +138,7 @@ export const addGenAIAnnotations = dual<
143
138
  * **NOTE**: This method will mutate the `Span` **in-place**.
144
139
  *
145
140
  * @since 1.0.0
146
- * @since utilities
141
+ * @since Utilities
147
142
  */
148
143
  (span: Span, options: OpenAiTelemetryAttributeOptions) => void
149
144
  >(2, (span, options) => {
@@ -2,50 +2,59 @@
2
2
  * @since 1.0.0
3
3
  */
4
4
  import { AiError } from "@effect/ai/AiError"
5
+ import type * as AiInput from "@effect/ai/AiInput"
5
6
  import * as Tokenizer from "@effect/ai/Tokenizer"
6
7
  import * as Arr from "effect/Array"
7
- import * as Chunk from "effect/Chunk"
8
8
  import * as Effect from "effect/Effect"
9
9
  import * as Layer from "effect/Layer"
10
10
  import * as Option from "effect/Option"
11
+ import * as Predicate from "effect/Predicate"
11
12
  import * as GptTokenizer from "gpt-tokenizer"
12
13
 
13
14
  /**
14
15
  * @since 1.0.0
15
- * @category constructors
16
+ * @category Constructors
16
17
  */
17
18
  export const make = (options: { readonly model: string }) =>
18
19
  Tokenizer.make({
19
- tokenize(content) {
20
+ tokenize(input) {
20
21
  return Effect.try({
21
22
  try: () =>
22
23
  GptTokenizer.encodeChat(
23
- content.pipe(
24
- Chunk.toReadonlyArray,
25
- Arr.flatMap((message) =>
26
- message.parts.pipe(
27
- Arr.filterMap((part) => {
28
- if (part._tag === "Image" || part._tag === "ImageUrl") {
29
- return Option.none()
30
- }
31
- return Option.some(
32
- {
33
- role: message.role.kind === "user" ? "user" : "assistant",
34
- name: message.role._tag === "UserWithName" ? message.role.name : undefined,
35
- content: part._tag === "Text"
36
- ? part.content
37
- : JSON.stringify(part._tag === "ToolCall" ? part.params : part.value)
38
- } as const
39
- )
40
- })
41
- )
42
- )
43
- ),
24
+ Arr.flatMap(input.messages, (message) =>
25
+ Arr.filterMap(
26
+ message.parts as Array<
27
+ | AiInput.AssistantMessagePart
28
+ | AiInput.ToolMessagePart
29
+ | AiInput.UserMessagePart
30
+ >,
31
+ (part) => {
32
+ if (
33
+ part._tag === "FilePart" ||
34
+ part._tag === "FileUrlPart" ||
35
+ part._tag === "ImagePart" ||
36
+ part._tag === "ImageUrlPart" ||
37
+ part._tag === "ReasoningPart" ||
38
+ part._tag === "RedactedReasoningPart"
39
+ ) return Option.none()
40
+ return Option.some(
41
+ {
42
+ role: message._tag === "UserMessage" ? "user" : "assistant",
43
+ name: message._tag === "UserMessage" && Predicate.isNotUndefined(message.userName)
44
+ ? message.userName
45
+ : undefined,
46
+ content: part._tag === "TextPart"
47
+ ? part.text
48
+ : JSON.stringify(part._tag === "ToolCallPart" ? part.params : part.result)
49
+ } as const
50
+ )
51
+ }
52
+ )),
44
53
  options.model as any
45
54
  ),
46
55
  catch: (cause) =>
47
56
  new AiError({
48
- module: "OpenAiCompletions",
57
+ module: "OpenAiTokenizer",
49
58
  method: "tokenize",
50
59
  description: "Could not tokenize",
51
60
  cause
@@ -56,7 +65,7 @@ export const make = (options: { readonly model: string }) =>
56
65
 
57
66
  /**
58
67
  * @since 1.0.0
59
- * @category layers
68
+ * @category Layers
60
69
  */
61
70
  export const layer = (options: { readonly model: string }): Layer.Layer<Tokenizer.Tokenizer> =>
62
71
  Layer.succeed(Tokenizer.Tokenizer, make(options))
package/src/index.ts CHANGED
@@ -11,17 +11,17 @@ export * as OpenAiClient from "./OpenAiClient.js"
11
11
  /**
12
12
  * @since 1.0.0
13
13
  */
14
- export * as OpenAiCompletions from "./OpenAiCompletions.js"
14
+ export * as OpenAiConfig from "./OpenAiConfig.js"
15
15
 
16
16
  /**
17
17
  * @since 1.0.0
18
18
  */
19
- export * as OpenAiConfig from "./OpenAiConfig.js"
19
+ export * as OpenAiEmbeddingModel from "./OpenAiEmbeddingModel.js"
20
20
 
21
21
  /**
22
22
  * @since 1.0.0
23
23
  */
24
- export * as OpenAiEmbeddings from "./OpenAiEmbeddings.js"
24
+ export * as OpenAiLanguageModel from "./OpenAiLanguageModel.js"
25
25
 
26
26
  /**
27
27
  * @since 1.0.0
@@ -0,0 +1,19 @@
1
+ import type * as AiResponse from "@effect/ai/AiResponse"
2
+ import * as Predicate from "effect/Predicate"
3
+
4
+ /** @internal */
5
+ export const ProviderMetadataKey = "@effect/ai-openai/OpenAiLanguageModel/ProviderMetadata"
6
+
7
+ const finishReasonMap: Record<string, AiResponse.FinishReason> = {
8
+ content_filter: "content-filter",
9
+ function_call: "tool-calls",
10
+ length: "length",
11
+ stop: "stop",
12
+ tool_calls: "tool-calls"
13
+ }
14
+
15
+ /** @internal */
16
+ export const resolveFinishReason = (finishReason: string): AiResponse.FinishReason => {
17
+ const reason = finishReasonMap[finishReason]
18
+ return Predicate.isUndefined(reason) ? "unknown" : reason
19
+ }
@@ -1,6 +0,0 @@
1
- {
2
- "sideEffects": [],
3
- "main": "../dist/cjs/OpenAiCompletions.js",
4
- "module": "../dist/esm/OpenAiCompletions.js",
5
- "types": "../dist/dts/OpenAiCompletions.d.ts"
6
- }
@@ -1,6 +0,0 @@
1
- {
2
- "sideEffects": [],
3
- "main": "../dist/cjs/OpenAiEmbeddings.js",
4
- "module": "../dist/esm/OpenAiEmbeddings.js",
5
- "types": "../dist/dts/OpenAiEmbeddings.d.ts"
6
- }