promptlayer 1.0.39 → 1.0.40

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,198 @@
1
+ const _buildToolCall = (id: string, name: string, input: any) => {
2
+ return {
3
+ id,
4
+ function: {
5
+ name,
6
+ input,
7
+ },
8
+ };
9
+ };
10
+
11
+ const _buildContentBlock = ({
12
+ type,
13
+ ...rest
14
+ }: {
15
+ type: string;
16
+ [key: string]: any;
17
+ }) => {
18
+ return {
19
+ type,
20
+ ...rest,
21
+ };
22
+ };
23
+
24
+ const _buildAssistantMessage = (content: any[], tool_calls: any[]) => {
25
+ return {
26
+ input_variables: [],
27
+ template_format: "f-string" as const,
28
+ content: content,
29
+ role: "assistant" as const,
30
+ function_call: null,
31
+ name: null,
32
+ tool_calls: tool_calls,
33
+ };
34
+ };
35
+
36
+ const _buildPromptTemplate = (assistantMessage: any, metadata: any) => {
37
+ const promptTemplate = {
38
+ messages: [assistantMessage],
39
+ type: "chat" as const,
40
+ input_variables: [],
41
+ };
42
+
43
+ return {
44
+ prompt_template: promptTemplate,
45
+ metadata: metadata,
46
+ };
47
+ };
48
+
49
+ export const buildPromptBlueprintFromAnthropicEvent = (
50
+ event: any,
51
+ metadata: any
52
+ ) => {
53
+ const assistantContent: any[] = [];
54
+ const tool_calls: any[] = [];
55
+
56
+ if (event.type === "content_block_start") {
57
+ if (event.content_block.type === "thinking") {
58
+ assistantContent.push(
59
+ _buildContentBlock({
60
+ type: "thinking",
61
+ thinking: "",
62
+ signature: "",
63
+ })
64
+ );
65
+ } else if (event.content_block.type === "text") {
66
+ assistantContent.push(
67
+ _buildContentBlock({
68
+ type: "text",
69
+ text: "",
70
+ })
71
+ );
72
+ } else if (event.content_block.type === "tool_use") {
73
+ tool_calls.push(
74
+ _buildToolCall(
75
+ event.content_block.id || "",
76
+ event.content_block.name || "",
77
+ {}
78
+ )
79
+ );
80
+ }
81
+ } else if (event.type === "content_block_delta") {
82
+ if (event.delta.type === "thinking_delta") {
83
+ assistantContent.push(
84
+ _buildContentBlock({
85
+ type: "thinking",
86
+ thinking: event.delta.thinking || "",
87
+ signature: "",
88
+ })
89
+ );
90
+ } else if (event.delta.type === "text_delta") {
91
+ assistantContent.push(
92
+ _buildContentBlock({
93
+ type: "text",
94
+ text: event.delta.text || "",
95
+ })
96
+ );
97
+ } else if (event.delta.type === "signature_delta") {
98
+ assistantContent.push(
99
+ _buildContentBlock({
100
+ type: "thinking",
101
+ thinking: "",
102
+ signature: event.delta.signature || "",
103
+ })
104
+ );
105
+ } else if (event.delta.type === "input_json_delta") {
106
+ tool_calls.push(_buildToolCall("", "", event.delta.partial_json));
107
+ }
108
+ }
109
+
110
+ const assistantMessage = _buildAssistantMessage(assistantContent, tool_calls);
111
+ return _buildPromptTemplate(assistantMessage, metadata);
112
+ };
113
+
114
+ export const buildPromptBlueprintFromGoogleEvent = (
115
+ event: any,
116
+ metadata: any
117
+ ) => {
118
+ const assistantContent: any[] = [];
119
+ const tool_calls: any[] = [];
120
+
121
+ for (const candidate of event.candidates) {
122
+ if (
123
+ candidate.content &&
124
+ candidate.content.parts &&
125
+ Array.isArray(candidate.content.parts)
126
+ ) {
127
+ for (const part of candidate.content.parts) {
128
+ if (part.text) {
129
+ if (part.thought === true) {
130
+ assistantContent.push(
131
+ _buildContentBlock({
132
+ type: "thinking",
133
+ thinking: part.text,
134
+ signature: part.thoughtSignature || "",
135
+ })
136
+ );
137
+ } else {
138
+ assistantContent.push(
139
+ _buildContentBlock({
140
+ type: "text",
141
+ text: part.text,
142
+ })
143
+ );
144
+ }
145
+ } else if (part.functionCall) {
146
+ tool_calls.push(
147
+ _buildToolCall(
148
+ part.functionCall.id || "",
149
+ part.functionCall.name || "",
150
+ part.functionCall.args || {}
151
+ )
152
+ );
153
+ }
154
+ }
155
+ }
156
+ }
157
+
158
+ const assistantMessage = _buildAssistantMessage(assistantContent, tool_calls);
159
+ return _buildPromptTemplate(assistantMessage, metadata);
160
+ };
161
+
162
+ export const buildPromptBlueprintFromOpenAIEvent = (
163
+ event: any,
164
+ metadata: any
165
+ ) => {
166
+ const assistantContent: any[] = [];
167
+ const tool_calls: any[] = [];
168
+
169
+ for (const choice of event.choices) {
170
+ if (choice.delta) {
171
+ if (choice.delta.content) {
172
+ assistantContent.push(
173
+ _buildContentBlock({
174
+ type: "text",
175
+ text: choice.delta.content,
176
+ })
177
+ );
178
+ }
179
+
180
+ if (choice.delta.tool_calls && Array.isArray(choice.delta.tool_calls)) {
181
+ for (const toolCall of choice.delta.tool_calls) {
182
+ if (toolCall.function) {
183
+ tool_calls.push(
184
+ _buildToolCall(
185
+ toolCall.id || "",
186
+ toolCall.function.name || "",
187
+ toolCall.function.arguments || ""
188
+ )
189
+ );
190
+ }
191
+ }
192
+ }
193
+ }
194
+ }
195
+
196
+ const assistantMessage = _buildAssistantMessage(assistantContent, tool_calls);
197
+ return _buildPromptTemplate(assistantMessage, metadata);
198
+ };
@@ -15,6 +15,11 @@ import {
15
15
  TrackScore,
16
16
  WorkflowResponse,
17
17
  } from "@/types";
18
+ import {
19
+ buildPromptBlueprintFromAnthropicEvent,
20
+ buildPromptBlueprintFromGoogleEvent,
21
+ buildPromptBlueprintFromOpenAIEvent,
22
+ } from "./blueprint-builder";
18
23
  import type TypeAnthropic from "@anthropic-ai/sdk";
19
24
  import {
20
25
  Completion as AnthropicCompletion,
@@ -883,7 +888,8 @@ const anthropicStreamCompletion = (results: AnthropicCompletion[]) => {
883
888
  async function* streamResponse<Item>(
884
889
  generator: AsyncIterable<Item>,
885
890
  afterStream: (body: object) => any,
886
- mapResults: any
891
+ mapResults: any,
892
+ metadata: any
887
893
  ) {
888
894
  const data: {
889
895
  request_id: number | null;
@@ -898,6 +904,31 @@ async function* streamResponse<Item>(
898
904
  for await (const result of generator) {
899
905
  results.push(result);
900
906
  data.raw_response = result;
907
+
908
+ // Build prompt blueprint for Anthropic streaming events
909
+ if (result && typeof result === "object" && "type" in result) {
910
+ data.prompt_blueprint = buildPromptBlueprintFromAnthropicEvent(
911
+ result as MessageStreamEvent,
912
+ metadata
913
+ );
914
+ }
915
+
916
+ // Build prompt blueprint for Google streaming events
917
+ if (result && typeof result === "object" && "candidates" in result) {
918
+ data.prompt_blueprint = buildPromptBlueprintFromGoogleEvent(
919
+ result,
920
+ metadata
921
+ );
922
+ }
923
+
924
+ // Build prompt blueprint for OpenAI streaming events
925
+ if (result && typeof result === "object" && "choices" in result) {
926
+ data.prompt_blueprint = buildPromptBlueprintFromOpenAIEvent(
927
+ result,
928
+ metadata
929
+ );
930
+ }
931
+
901
932
  yield data;
902
933
  }
903
934
  const request_response = mapResults(results);
@@ -1157,24 +1188,32 @@ const googleRequest = async (
1157
1188
  const requestToMake =
1158
1189
  MAP_TYPE_TO_GOOGLE_FUNCTION[promptBlueprint.prompt_template.type];
1159
1190
 
1160
- const kwargsCamelCased = convertKeysToCamelCase(kwargs);
1161
- if (kwargsCamelCased.generationConfig)
1162
- kwargsCamelCased.generationConfig = convertKeysToCamelCase(
1163
- kwargsCamelCased.generationConfig
1164
- );
1165
-
1166
- return await requestToMake(genAI, kwargsCamelCased);
1191
+ return await requestToMake(genAI, kwargs);
1167
1192
  };
1168
1193
 
1169
1194
  const snakeToCamel = (str: string): string =>
1170
1195
  str.replace(/_([a-z])/g, (_, letter) => letter.toUpperCase());
1171
1196
 
1172
- const convertKeysToCamelCase = <T>(obj: T): T => {
1197
+ const convertKeysToCamelCase = <T>(
1198
+ obj: T,
1199
+ ignoreValuesWithKeys: Set<string> = new Set()
1200
+ ): T => {
1173
1201
  if (!obj || typeof obj !== "object") return obj;
1174
- if (Array.isArray(obj)) return obj.map(convertKeysToCamelCase) as T;
1202
+ if (Array.isArray(obj))
1203
+ return obj.map((item) =>
1204
+ convertKeysToCamelCase(item, ignoreValuesWithKeys)
1205
+ ) as T;
1175
1206
 
1176
1207
  return Object.fromEntries(
1177
- Object.entries(obj).map(([key, value]) => [snakeToCamel(key), value])
1208
+ Object.entries(obj).map(([key, value]) => {
1209
+ if (ignoreValuesWithKeys.has(key)) {
1210
+ return [snakeToCamel(key), value];
1211
+ }
1212
+ return [
1213
+ snakeToCamel(key),
1214
+ convertKeysToCamelCase(value, ignoreValuesWithKeys),
1215
+ ];
1216
+ })
1178
1217
  ) as T;
1179
1218
  };
1180
1219
 
@@ -1238,12 +1277,18 @@ const configureProviderSettings = (
1238
1277
  );
1239
1278
  }
1240
1279
 
1241
- const kwargs = {
1280
+ let kwargs = {
1242
1281
  ...(promptBlueprint.llm_kwargs || {}),
1243
1282
  ...modelParameterOverrides,
1244
1283
  stream,
1245
1284
  };
1246
1285
 
1286
+ if (
1287
+ ["google", "vertexai"].includes(provider_type) &&
1288
+ promptBlueprint.metadata?.model?.name.startsWith("gemini")
1289
+ )
1290
+ kwargs = convertKeysToCamelCase(kwargs, new Set(["function_declarations"]));
1291
+
1247
1292
  const providerConfig = {
1248
1293
  baseURL: customProvider?.base_url ?? promptBlueprint.provider_base_url?.url,
1249
1294
  apiKey: customProvider?.api_key,