notdiamond 0.3.9 → 0.3.10

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.cjs CHANGED
@@ -583,7 +583,14 @@ class NotDiamond {
583
583
  * @returns The preference id.
584
584
  */
585
585
  async createPreferenceId() {
586
- return this.postRequest(this.createUrl, {});
586
+ const response = await this.postRequest(
587
+ this.createUrl,
588
+ {}
589
+ );
590
+ if ("preference_id" in response) {
591
+ return response.preference_id;
592
+ }
593
+ throw new Error("Invalid response: preference_id not found");
587
594
  }
588
595
  /**
589
596
  *
@@ -618,10 +625,17 @@ class NotDiamond {
618
625
  async astream(options) {
619
626
  const selectedModel = await this.modelSelect(options);
620
627
  const { providers } = selectedModel;
628
+ console.log("providers received from modelSelect", providers);
621
629
  const stream = await Promise.resolve(
622
- callLLMStream(providers?.[0] || "openai", options, this.llmKeys)
630
+ callLLMStream(providers?.[0] || {
631
+ provider: "openai",
632
+ model: "gpt-3.5-turbo"
633
+ }, options, this.llmKeys)
623
634
  );
624
- return { provider: providers?.[0] || "openai", stream };
635
+ return { provider: providers?.[0] || {
636
+ provider: "openai",
637
+ model: "gpt-3.5-turbo"
638
+ }, stream };
625
639
  }
626
640
  /**
627
641
  * Streams the results of the model.
@@ -633,6 +647,7 @@ class NotDiamond {
633
647
  if (!options.llmProviders || options.llmProviders.length === 0) {
634
648
  throw new Error("No LLM providers specified");
635
649
  }
650
+ console.log("options received from stream", options);
636
651
  const promise = this.astream(options);
637
652
  if (callback) {
638
653
  promise.then(async ({ provider, stream }) => {
package/dist/index.mjs CHANGED
@@ -564,7 +564,14 @@ class NotDiamond {
564
564
  * @returns The preference id.
565
565
  */
566
566
  async createPreferenceId() {
567
- return this.postRequest(this.createUrl, {});
567
+ const response = await this.postRequest(
568
+ this.createUrl,
569
+ {}
570
+ );
571
+ if ("preference_id" in response) {
572
+ return response.preference_id;
573
+ }
574
+ throw new Error("Invalid response: preference_id not found");
568
575
  }
569
576
  /**
570
577
  *
@@ -599,10 +606,17 @@ class NotDiamond {
599
606
  async astream(options) {
600
607
  const selectedModel = await this.modelSelect(options);
601
608
  const { providers } = selectedModel;
609
+ console.log("providers received from modelSelect", providers);
602
610
  const stream = await Promise.resolve(
603
- callLLMStream(providers?.[0] || "openai", options, this.llmKeys)
611
+ callLLMStream(providers?.[0] || {
612
+ provider: "openai",
613
+ model: "gpt-3.5-turbo"
614
+ }, options, this.llmKeys)
604
615
  );
605
- return { provider: providers?.[0] || "openai", stream };
616
+ return { provider: providers?.[0] || {
617
+ provider: "openai",
618
+ model: "gpt-3.5-turbo"
619
+ }, stream };
606
620
  }
607
621
  /**
608
622
  * Streams the results of the model.
@@ -614,6 +628,7 @@ class NotDiamond {
614
628
  if (!options.llmProviders || options.llmProviders.length === 0) {
615
629
  throw new Error("No LLM providers specified");
616
630
  }
631
+ console.log("options received from stream", options);
617
632
  const promise = this.astream(options);
618
633
  if (callback) {
619
634
  promise.then(async ({ provider, stream }) => {
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "notdiamond",
3
3
  "type": "module",
4
- "version": "0.3.9",
4
+ "version": "0.3.10",
5
5
  "author": "not-diamond",
6
6
  "license": "MIT",
7
7
  "description": "TS/JS client for the NotDiamond API",