@omnikit-ai/sdk 2.0.7 → 2.0.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.d.mts CHANGED
@@ -229,11 +229,12 @@ interface LLMMessage {
229
229
  }>;
230
230
  }
231
231
  /**
232
- * Available LLM models for InvokeLLM
233
- * - 'gemini-flash': Fast and cost-effective (default)
234
- * - 'gemini-pro': Smarter with extended thinking (128 token thinking budget)
232
+ * Available LLM models:
233
+ * - Gemini 2.5: 'gemini-2.5-flash-lite' (fastest), 'gemini-2.5-flash', 'gemini-2.5-pro'
234
+ * - Gemini 3: 'gemini-3-flash' (best multimodal), 'gemini-3-pro' (advanced reasoning)
235
+ * - Legacy aliases: 'gemini-flash', 'gemini-pro', 'gemini-pro-3' (for backward compatibility)
235
236
  */
236
- type LLMModel = 'gemini-flash' | 'gemini-pro';
237
+ type LLMModel = 'gemini-2.5-flash-lite' | 'gemini-2.5-flash' | 'gemini-2.5-pro' | 'gemini-3-flash' | 'gemini-3-pro' | 'gemini-flash' | 'gemini-pro' | 'gemini-pro-3';
237
238
  interface LLMParams {
238
239
  /** Message-based format for advanced use */
239
240
  messages?: LLMMessage[];
@@ -253,9 +254,11 @@ interface LLMParams {
253
254
  type: 'json_object';
254
255
  } | Record<string, any>;
255
256
  /**
256
- * Model to use for LLM processing
257
- * - 'gemini-flash': Fast and cost-effective (default)
258
- * - 'gemini-pro': Smarter with extended thinking for complex reasoning
257
+ * Model to use for LLM processing.
258
+ * Defaults: 'gemini-2.5-flash-lite' for text, 'gemini-3-flash' for files/images.
259
+ * - 'gemini-2.5-flash-lite': Fastest, best for simple text tasks
260
+ * - 'gemini-3-flash': Best multimodal (images, PDFs, videos)
261
+ * - 'gemini-2.5-pro' / 'gemini-3-pro': Advanced reasoning
259
262
  */
260
263
  model?: LLMModel | string;
261
264
  /**
@@ -842,6 +845,21 @@ interface OmnikitClient {
842
845
  metadata: CachedMetadata;
843
846
  /** Collection operations */
844
847
  collections: Record<string, CollectionClass>;
848
+ /**
849
+ * Get a collection by name (function-style accessor)
850
+ * Alternative to omnikit.collections.name for more intuitive API
851
+ *
852
+ * @param name - Collection name (case-insensitive)
853
+ * @returns CollectionClass with CRUD methods
854
+ *
855
+ * @example
856
+ * ```typescript
857
+ * // Both of these work:
858
+ * await omnikit.collection('user').list()
859
+ * await omnikit.collections.user.list()
860
+ * ```
861
+ */
862
+ collection(name: string): CollectionClass;
845
863
  /** @deprecated Use collections instead */
846
864
  entities: Record<string, CollectionClass>;
847
865
  /** Integration operations */
@@ -1111,6 +1129,19 @@ interface LiveVoiceServerMessage {
1111
1129
  session_id?: string;
1112
1130
  duration_seconds?: number;
1113
1131
  }
1132
+ /**
1133
+ * Global window extensions used by the SDK
1134
+ */
1135
+ declare global {
1136
+ interface Window {
1137
+ /**
1138
+ * Opens the login modal (registered by LoginModalProvider in main.jsx)
1139
+ * Used by auth.login() to show inline login for public apps instead of redirecting
1140
+ * @param returnUrl - URL to return to after successful login
1141
+ */
1142
+ __omnikit_openLoginModal?: (returnUrl?: string) => void;
1143
+ }
1144
+ }
1114
1145
 
1115
1146
  /**
1116
1147
  * Omnikit SDK v2.0 - Main Client
@@ -1196,6 +1227,24 @@ declare class APIClient implements OmnikitClient {
1196
1227
  * Lazy getter for collections - auto-initializes on first access
1197
1228
  */
1198
1229
  get collections(): Record<string, CollectionClass>;
1230
+ /**
1231
+ * Helper method to access a collection by name.
1232
+ * Provides a more intuitive API: omnikit.collection('user').list()
1233
+ * This is an alternative to: omnikit.collections.user.list()
1234
+ *
1235
+ * @param name - Collection name (case-insensitive, supports PascalCase, camelCase, snake_case)
1236
+ * @returns CollectionClass with CRUD methods
1237
+ *
1238
+ * @example
1239
+ * ```typescript
1240
+ * // All of these work:
1241
+ * await omnikit.collection('user').list()
1242
+ * await omnikit.collection('User').list()
1243
+ * await omnikit.collection('chatbot').list()
1244
+ * await omnikit.collection('Chatbot').list()
1245
+ * ```
1246
+ */
1247
+ collection(name: string): CollectionClass;
1199
1248
  /**
1200
1249
  * @deprecated Use collections instead. This alias exists for backward compatibility.
1201
1250
  */
@@ -1271,11 +1320,6 @@ declare class APIClient implements OmnikitClient {
1271
1320
  * Fetch app schema from backend
1272
1321
  */
1273
1322
  private fetchAppSchema;
1274
- /**
1275
- * Fetch integration schema from backend
1276
- * Returns ServicesSchema (new flat format) or IntegrationSchema (legacy)
1277
- */
1278
- private fetchIntegrationSchema;
1279
1323
  /**
1280
1324
  * Create collection classes from schema
1281
1325
  */
package/dist/index.d.ts CHANGED
@@ -229,11 +229,12 @@ interface LLMMessage {
229
229
  }>;
230
230
  }
231
231
  /**
232
- * Available LLM models for InvokeLLM
233
- * - 'gemini-flash': Fast and cost-effective (default)
234
- * - 'gemini-pro': Smarter with extended thinking (128 token thinking budget)
232
+ * Available LLM models:
233
+ * - Gemini 2.5: 'gemini-2.5-flash-lite' (fastest), 'gemini-2.5-flash', 'gemini-2.5-pro'
234
+ * - Gemini 3: 'gemini-3-flash' (best multimodal), 'gemini-3-pro' (advanced reasoning)
235
+ * - Legacy aliases: 'gemini-flash', 'gemini-pro', 'gemini-pro-3' (for backward compatibility)
235
236
  */
236
- type LLMModel = 'gemini-flash' | 'gemini-pro';
237
+ type LLMModel = 'gemini-2.5-flash-lite' | 'gemini-2.5-flash' | 'gemini-2.5-pro' | 'gemini-3-flash' | 'gemini-3-pro' | 'gemini-flash' | 'gemini-pro' | 'gemini-pro-3';
237
238
  interface LLMParams {
238
239
  /** Message-based format for advanced use */
239
240
  messages?: LLMMessage[];
@@ -253,9 +254,11 @@ interface LLMParams {
253
254
  type: 'json_object';
254
255
  } | Record<string, any>;
255
256
  /**
256
- * Model to use for LLM processing
257
- * - 'gemini-flash': Fast and cost-effective (default)
258
- * - 'gemini-pro': Smarter with extended thinking for complex reasoning
257
+ * Model to use for LLM processing.
258
+ * Defaults: 'gemini-2.5-flash-lite' for text, 'gemini-3-flash' for files/images.
259
+ * - 'gemini-2.5-flash-lite': Fastest, best for simple text tasks
260
+ * - 'gemini-3-flash': Best multimodal (images, PDFs, videos)
261
+ * - 'gemini-2.5-pro' / 'gemini-3-pro': Advanced reasoning
259
262
  */
260
263
  model?: LLMModel | string;
261
264
  /**
@@ -842,6 +845,21 @@ interface OmnikitClient {
842
845
  metadata: CachedMetadata;
843
846
  /** Collection operations */
844
847
  collections: Record<string, CollectionClass>;
848
+ /**
849
+ * Get a collection by name (function-style accessor)
850
+ * Alternative to omnikit.collections.name for more intuitive API
851
+ *
852
+ * @param name - Collection name (case-insensitive)
853
+ * @returns CollectionClass with CRUD methods
854
+ *
855
+ * @example
856
+ * ```typescript
857
+ * // Both of these work:
858
+ * await omnikit.collection('user').list()
859
+ * await omnikit.collections.user.list()
860
+ * ```
861
+ */
862
+ collection(name: string): CollectionClass;
845
863
  /** @deprecated Use collections instead */
846
864
  entities: Record<string, CollectionClass>;
847
865
  /** Integration operations */
@@ -1111,6 +1129,19 @@ interface LiveVoiceServerMessage {
1111
1129
  session_id?: string;
1112
1130
  duration_seconds?: number;
1113
1131
  }
1132
+ /**
1133
+ * Global window extensions used by the SDK
1134
+ */
1135
+ declare global {
1136
+ interface Window {
1137
+ /**
1138
+ * Opens the login modal (registered by LoginModalProvider in main.jsx)
1139
+ * Used by auth.login() to show inline login for public apps instead of redirecting
1140
+ * @param returnUrl - URL to return to after successful login
1141
+ */
1142
+ __omnikit_openLoginModal?: (returnUrl?: string) => void;
1143
+ }
1144
+ }
1114
1145
 
1115
1146
  /**
1116
1147
  * Omnikit SDK v2.0 - Main Client
@@ -1196,6 +1227,24 @@ declare class APIClient implements OmnikitClient {
1196
1227
  * Lazy getter for collections - auto-initializes on first access
1197
1228
  */
1198
1229
  get collections(): Record<string, CollectionClass>;
1230
+ /**
1231
+ * Helper method to access a collection by name.
1232
+ * Provides a more intuitive API: omnikit.collection('user').list()
1233
+ * This is an alternative to: omnikit.collections.user.list()
1234
+ *
1235
+ * @param name - Collection name (case-insensitive, supports PascalCase, camelCase, snake_case)
1236
+ * @returns CollectionClass with CRUD methods
1237
+ *
1238
+ * @example
1239
+ * ```typescript
1240
+ * // All of these work:
1241
+ * await omnikit.collection('user').list()
1242
+ * await omnikit.collection('User').list()
1243
+ * await omnikit.collection('chatbot').list()
1244
+ * await omnikit.collection('Chatbot').list()
1245
+ * ```
1246
+ */
1247
+ collection(name: string): CollectionClass;
1199
1248
  /**
1200
1249
  * @deprecated Use collections instead. This alias exists for backward compatibility.
1201
1250
  */
@@ -1271,11 +1320,6 @@ declare class APIClient implements OmnikitClient {
1271
1320
  * Fetch app schema from backend
1272
1321
  */
1273
1322
  private fetchAppSchema;
1274
- /**
1275
- * Fetch integration schema from backend
1276
- * Returns ServicesSchema (new flat format) or IntegrationSchema (legacy)
1277
- */
1278
- private fetchIntegrationSchema;
1279
1323
  /**
1280
1324
  * Create collection classes from schema
1281
1325
  */
package/dist/index.js CHANGED
@@ -437,8 +437,17 @@ var LiveVoiceSessionImpl = class {
437
437
 
438
438
  // src/client.ts
439
439
  var LLM_MODEL_MAP = {
440
+ // Gemini 2.5 models
441
+ "gemini-2.5-flash-lite": "vertex_ai/gemini-2.5-flash-lite",
442
+ "gemini-2.5-flash": "vertex_ai/gemini-2.5-flash",
443
+ "gemini-2.5-pro": "vertex_ai/gemini-2.5-pro",
444
+ // Gemini 3 models
445
+ "gemini-3-flash": "vertex_ai/gemini-3-flash-preview",
446
+ "gemini-3-pro": "vertex_ai/gemini-3-pro-preview",
447
+ // Legacy aliases (for backward compatibility)
440
448
  "gemini-flash": "vertex_ai/gemini-2.5-flash",
441
- "gemini-pro": "vertex_ai/gemini-2.5-pro"
449
+ "gemini-pro": "vertex_ai/gemini-2.5-pro",
450
+ "gemini-pro-3": "vertex_ai/gemini-3-pro-preview"
442
451
  };
443
452
  function mapLLMModel(model) {
444
453
  if (!model) return void 0;
@@ -616,6 +625,26 @@ var APIClient = class {
616
625
  get collections() {
617
626
  return this.createCollectionsProxy(false);
618
627
  }
628
+ /**
629
+ * Helper method to access a collection by name.
630
+ * Provides a more intuitive API: omnikit.collection('user').list()
631
+ * This is an alternative to: omnikit.collections.user.list()
632
+ *
633
+ * @param name - Collection name (case-insensitive, supports PascalCase, camelCase, snake_case)
634
+ * @returns CollectionClass with CRUD methods
635
+ *
636
+ * @example
637
+ * ```typescript
638
+ * // All of these work:
639
+ * await omnikit.collection('user').list()
640
+ * await omnikit.collection('User').list()
641
+ * await omnikit.collection('chatbot').list()
642
+ * await omnikit.collection('Chatbot').list()
643
+ * ```
644
+ */
645
+ collection(name) {
646
+ return this.collections[name];
647
+ }
619
648
  /**
620
649
  * @deprecated Use collections instead. This alias exists for backward compatibility.
621
650
  */
@@ -697,8 +726,12 @@ var APIClient = class {
697
726
  },
698
727
  login(returnUrl) {
699
728
  const fullReturnUrl = returnUrl || (typeof window !== "undefined" ? window.location.href : "/");
700
- const encodedReturnUrl = encodeURIComponent(fullReturnUrl);
701
729
  if (typeof window !== "undefined") {
730
+ if (window.__omnikit_openLoginModal) {
731
+ window.__omnikit_openLoginModal(fullReturnUrl);
732
+ return;
733
+ }
734
+ const encodedReturnUrl = encodeURIComponent(fullReturnUrl);
702
735
  const currentPath = window.location.pathname;
703
736
  const apiSitesMatch = currentPath.match(/^\/api\/sites\/([^\/]+)/);
704
737
  if (apiSitesMatch) {
@@ -1097,10 +1130,15 @@ Example: await ${collectionName}.list({ limit: 100, sort: '-created_at' })`,
1097
1130
  */
1098
1131
  createServicesProxy(useServiceToken) {
1099
1132
  const client = this;
1133
+ function normalizeServiceName(name) {
1134
+ if (!name) return name;
1135
+ return name.charAt(0).toUpperCase() + name.slice(1);
1136
+ }
1100
1137
  return new Proxy({}, {
1101
1138
  get(target, serviceName) {
1102
1139
  if (typeof serviceName === "string" && !serviceName.startsWith("_")) {
1103
- if (serviceName === "CheckJobStatus") {
1140
+ const normalizedName = normalizeServiceName(serviceName);
1141
+ if (normalizedName === "CheckJobStatus") {
1104
1142
  return async function(params) {
1105
1143
  await client.ensureInitialized();
1106
1144
  if (!params?.job_id) {
@@ -1116,7 +1154,7 @@ Example: await ${collectionName}.list({ limit: 100, sort: '-created_at' })`,
1116
1154
  );
1117
1155
  };
1118
1156
  }
1119
- if (serviceName === "DownloadPrivateFile") {
1157
+ if (normalizedName === "DownloadPrivateFile") {
1120
1158
  return async function(params) {
1121
1159
  await client.ensureInitialized();
1122
1160
  if (!params?.file_uri) {
@@ -1140,7 +1178,7 @@ Example: await ${collectionName}.list({ limit: 100, sort: '-created_at' })`,
1140
1178
  }
1141
1179
  return async function(params, asyncOptions) {
1142
1180
  await client.ensureInitialized();
1143
- if (serviceName === "InvokeLLM") {
1181
+ if (normalizedName === "InvokeLLM") {
1144
1182
  if (params?.model) {
1145
1183
  params = { ...params, model: mapLLMModel(params.model) };
1146
1184
  }
@@ -1149,7 +1187,7 @@ Example: await ${collectionName}.list({ limit: 100, sort: '-created_at' })`,
1149
1187
  }
1150
1188
  }
1151
1189
  let response;
1152
- const method = client._services[serviceName];
1190
+ const method = client._services[normalizedName];
1153
1191
  if (method) {
1154
1192
  response = await method(params, useServiceToken);
1155
1193
  } else {
@@ -1165,10 +1203,10 @@ Example: await ${collectionName}.list({ limit: 100, sort: '-created_at' })`,
1165
1203
  "UploadPrivateFile": "services/files/private/init",
1166
1204
  "CreateFileSignedUrl": "services/files/signed-url"
1167
1205
  };
1168
- const servicePath = servicePathMap[serviceName];
1206
+ const servicePath = servicePathMap[normalizedName];
1169
1207
  if (!servicePath) {
1170
1208
  throw new OmnikitError(
1171
- `Service '${serviceName}' not found. Known services: ${Object.keys(servicePathMap).join(", ")}`,
1209
+ `Service '${serviceName}' not found. Known services: ${Object.keys(servicePathMap).join(", ")} (camelCase also supported)`,
1172
1210
  404,
1173
1211
  "SERVICE_NOT_FOUND"
1174
1212
  );
@@ -1181,6 +1219,12 @@ Example: await ${collectionName}.list({ limit: 100, sort: '-created_at' })`,
1181
1219
  } else if (client.userToken) {
1182
1220
  headers["Authorization"] = `Bearer ${client.userToken}`;
1183
1221
  }
1222
+ if ((normalizedName === "UploadFile" || normalizedName === "uploadFile") && params?.file instanceof File) {
1223
+ return client.handleFileUpload(params.file, servicePath, useServiceToken);
1224
+ }
1225
+ if ((normalizedName === "UploadPrivateFile" || normalizedName === "uploadPrivateFile") && params?.file instanceof File) {
1226
+ return client.handleFileUpload(params.file, servicePath, useServiceToken);
1227
+ }
1184
1228
  const fetchResponse = await fetch(
1185
1229
  `${client.baseUrl}/apps/${client.appId}/${servicePath}`,
1186
1230
  {
@@ -1336,23 +1380,6 @@ Example: await ${collectionName}.list({ limit: 100, sort: '-created_at' })`,
1336
1380
  }
1337
1381
  return await response.json();
1338
1382
  }
1339
- /**
1340
- * Fetch integration schema from backend
1341
- * Returns ServicesSchema (new flat format) or IntegrationSchema (legacy)
1342
- */
1343
- async fetchIntegrationSchema() {
1344
- try {
1345
- const response = await fetch(
1346
- `${this.baseUrl}/apps/${this.appId}/services`
1347
- );
1348
- if (!response.ok) {
1349
- return null;
1350
- }
1351
- return await response.json();
1352
- } catch (error) {
1353
- return null;
1354
- }
1355
- }
1356
1383
  /**
1357
1384
  * Create collection classes from schema
1358
1385
  */