arize-phoenix 10.13.2__py3-none-any.whl → 10.15.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of arize-phoenix might be problematic. Click here for more details.

Files changed (25) hide show
  1. {arize_phoenix-10.13.2.dist-info → arize_phoenix-10.15.0.dist-info}/METADATA +3 -2
  2. {arize_phoenix-10.13.2.dist-info → arize_phoenix-10.15.0.dist-info}/RECORD +25 -24
  3. phoenix/config.py +3 -1
  4. phoenix/db/types/model_provider.py +1 -0
  5. phoenix/server/api/helpers/playground_clients.py +459 -0
  6. phoenix/server/api/helpers/prompts/conversions/aws.py +83 -0
  7. phoenix/server/api/helpers/prompts/models.py +67 -0
  8. phoenix/server/api/input_types/GenerativeModelInput.py +2 -0
  9. phoenix/server/api/types/GenerativeProvider.py +33 -20
  10. phoenix/server/email/sender.py +2 -2
  11. phoenix/server/main.py +9 -6
  12. phoenix/server/static/.vite/manifest.json +36 -36
  13. phoenix/server/static/assets/{components-D-yJsfVa.js → components-SpUMF1qV.js} +257 -257
  14. phoenix/server/static/assets/{index-BXRcSHM6.js → index-DIlhmbjB.js} +3 -3
  15. phoenix/server/static/assets/{pages-Dn4XFHMU.js → pages-YX47cEoQ.js} +369 -386
  16. phoenix/server/static/assets/{vendor-BKYy4SMr.js → vendor-DCZoBorz.js} +2 -2
  17. phoenix/server/static/assets/{vendor-arizeai-CaqmrQdQ.js → vendor-arizeai-Ckci3irT.js} +1 -1
  18. phoenix/server/static/assets/{vendor-codemirror-BlmFw5CA.js → vendor-codemirror-BODM513D.js} +1 -1
  19. phoenix/server/static/assets/{vendor-recharts-Bz7zqjbW.js → vendor-recharts-C9O2a-N3.js} +1 -1
  20. phoenix/server/static/assets/{vendor-shiki-BitvudxD.js → vendor-shiki-Dq54rRC7.js} +1 -1
  21. phoenix/version.py +1 -1
  22. {arize_phoenix-10.13.2.dist-info → arize_phoenix-10.15.0.dist-info}/WHEEL +0 -0
  23. {arize_phoenix-10.13.2.dist-info → arize_phoenix-10.15.0.dist-info}/entry_points.txt +0 -0
  24. {arize_phoenix-10.13.2.dist-info → arize_phoenix-10.15.0.dist-info}/licenses/IP_NOTICE +0 -0
  25. {arize_phoenix-10.13.2.dist-info → arize_phoenix-10.15.0.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,83 @@
1
+ from __future__ import annotations
2
+
3
+ from typing import TYPE_CHECKING, Optional, Union
4
+
5
+ from typing_extensions import assert_never
6
+
7
+ if TYPE_CHECKING:
8
+ from anthropic.types import (
9
+ ToolChoiceAnyParam,
10
+ ToolChoiceAutoParam,
11
+ ToolChoiceParam,
12
+ ToolChoiceToolParam,
13
+ )
14
+
15
+ from phoenix.server.api.helpers.prompts.models import (
16
+ PromptToolChoiceNone,
17
+ PromptToolChoiceOneOrMore,
18
+ PromptToolChoiceSpecificFunctionTool,
19
+ PromptToolChoiceZeroOrMore,
20
+ )
21
+
22
+
23
+ class AwsToolChoiceConversion:
24
+ @staticmethod
25
+ def to_aws(
26
+ obj: Union[
27
+ PromptToolChoiceNone,
28
+ PromptToolChoiceZeroOrMore,
29
+ PromptToolChoiceOneOrMore,
30
+ PromptToolChoiceSpecificFunctionTool,
31
+ ],
32
+ disable_parallel_tool_use: Optional[bool] = None,
33
+ ) -> ToolChoiceParam:
34
+ if obj.type == "zero_or_more":
35
+ choice_auto: ToolChoiceAutoParam = {"type": "auto"}
36
+ if disable_parallel_tool_use is not None:
37
+ choice_auto["disable_parallel_tool_use"] = disable_parallel_tool_use
38
+ return choice_auto
39
+ if obj.type == "one_or_more":
40
+ choice_any: ToolChoiceAnyParam = {"type": "any"}
41
+ if disable_parallel_tool_use is not None:
42
+ choice_any["disable_parallel_tool_use"] = disable_parallel_tool_use
43
+ return choice_any
44
+ if obj.type == "specific_function":
45
+ choice_tool: ToolChoiceToolParam = {"type": "tool", "name": obj.function_name}
46
+ if disable_parallel_tool_use is not None:
47
+ choice_tool["disable_parallel_tool_use"] = disable_parallel_tool_use
48
+ return choice_tool
49
+ if obj.type == "none":
50
+ return {"type": "none"}
51
+ assert_never(obj.type)
52
+
53
+ @staticmethod
54
+ def from_aws(
55
+ obj: ToolChoiceParam,
56
+ ) -> Union[
57
+ PromptToolChoiceNone,
58
+ PromptToolChoiceZeroOrMore,
59
+ PromptToolChoiceOneOrMore,
60
+ PromptToolChoiceSpecificFunctionTool,
61
+ ]:
62
+ from phoenix.server.api.helpers.prompts.models import (
63
+ PromptToolChoiceNone,
64
+ PromptToolChoiceOneOrMore,
65
+ PromptToolChoiceSpecificFunctionTool,
66
+ PromptToolChoiceZeroOrMore,
67
+ )
68
+
69
+ if obj["type"] == "auto":
70
+ choice_zero_or_more = PromptToolChoiceZeroOrMore(type="zero_or_more")
71
+ return choice_zero_or_more
72
+ if obj["type"] == "any":
73
+ choice_one_or_more = PromptToolChoiceOneOrMore(type="one_or_more")
74
+ return choice_one_or_more
75
+ if obj["type"] == "tool":
76
+ choice_function_tool = PromptToolChoiceSpecificFunctionTool(
77
+ type="specific_function",
78
+ function_name=obj["name"],
79
+ )
80
+ return choice_function_tool
81
+ if obj["type"] == "none":
82
+ return PromptToolChoiceNone(type="none")
83
+ assert_never(obj)
@@ -9,6 +9,7 @@ from typing_extensions import Annotated, Self, TypeAlias, TypeGuard, assert_neve
9
9
  from phoenix.db.types.db_models import UNDEFINED, DBBaseModel
10
10
  from phoenix.db.types.model_provider import ModelProvider
11
11
  from phoenix.server.api.helpers.prompts.conversions.anthropic import AnthropicToolChoiceConversion
12
+ from phoenix.server.api.helpers.prompts.conversions.aws import AwsToolChoiceConversion
12
13
  from phoenix.server.api.helpers.prompts.conversions.openai import OpenAIToolChoiceConversion
13
14
 
14
15
  JSONSerializable = Union[None, bool, int, float, str, dict[str, Any], list[Any]]
@@ -312,6 +313,14 @@ class AnthropicToolDefinition(DBBaseModel):
312
313
  description: str = UNDEFINED
313
314
 
314
315
 
316
+ class BedrockToolDefinition(DBBaseModel):
317
+ """
318
+ Based on https://github.com/aws/amazon-bedrock-sdk-python/blob/main/src/bedrock/types/tool_param.py#L12
319
+ """
320
+
321
+ toolSpec: dict[str, Any]
322
+
323
+
315
324
  class PromptOpenAIInvocationParametersContent(DBBaseModel):
316
325
  temperature: float = UNDEFINED
317
326
  max_tokens: int = UNDEFINED
@@ -397,6 +406,17 @@ class PromptAnthropicInvocationParameters(DBBaseModel):
397
406
  anthropic: PromptAnthropicInvocationParametersContent
398
407
 
399
408
 
409
+ class PromptAwsInvocationParametersContent(DBBaseModel):
410
+ max_tokens: int = UNDEFINED
411
+ temperature: float = UNDEFINED
412
+ top_p: float = UNDEFINED
413
+
414
+
415
+ class PromptAwsInvocationParameters(DBBaseModel):
416
+ type: Literal["aws"]
417
+ aws: PromptAwsInvocationParametersContent
418
+
419
+
400
420
  class PromptGoogleInvocationParametersContent(DBBaseModel):
401
421
  temperature: float = UNDEFINED
402
422
  max_output_tokens: int = UNDEFINED
@@ -421,6 +441,7 @@ PromptInvocationParameters: TypeAlias = Annotated[
421
441
  PromptDeepSeekInvocationParameters,
422
442
  PromptXAIInvocationParameters,
423
443
  PromptOllamaInvocationParameters,
444
+ PromptAwsInvocationParameters,
424
445
  ],
425
446
  Field(..., discriminator="type"),
426
447
  ]
@@ -443,6 +464,8 @@ def get_raw_invocation_parameters(
443
464
  return invocation_parameters.xai.model_dump()
444
465
  if isinstance(invocation_parameters, PromptOllamaInvocationParameters):
445
466
  return invocation_parameters.ollama.model_dump()
467
+ if isinstance(invocation_parameters, PromptAwsInvocationParameters):
468
+ return invocation_parameters.aws.model_dump()
446
469
  assert_never(invocation_parameters)
447
470
 
448
471
 
@@ -459,6 +482,7 @@ def is_prompt_invocation_parameters(
459
482
  PromptDeepSeekInvocationParameters,
460
483
  PromptXAIInvocationParameters,
461
484
  PromptOllamaInvocationParameters,
485
+ PromptAwsInvocationParameters,
462
486
  ),
463
487
  )
464
488
 
@@ -512,6 +536,11 @@ def validate_invocation_parameters(
512
536
  type="ollama",
513
537
  ollama=PromptOllamaInvocationParametersContent.model_validate(invocation_parameters),
514
538
  )
539
+ elif model_provider is ModelProvider.AWS:
540
+ return PromptAwsInvocationParameters(
541
+ type="aws",
542
+ aws=PromptAwsInvocationParametersContent.model_validate(invocation_parameters),
543
+ )
515
544
  assert_never(model_provider)
516
545
 
517
546
 
@@ -530,12 +559,16 @@ def normalize_tools(
530
559
  ):
531
560
  openai_tools = [OpenAIToolDefinition.model_validate(schema) for schema in schemas]
532
561
  tools = [_openai_to_prompt_tool(openai_tool) for openai_tool in openai_tools]
562
+ elif model_provider is ModelProvider.AWS:
563
+ bedrock_tools = [BedrockToolDefinition.model_validate(schema) for schema in schemas]
564
+ tools = [_bedrock_to_prompt_tool(bedrock_tool) for bedrock_tool in bedrock_tools]
533
565
  elif model_provider is ModelProvider.ANTHROPIC:
534
566
  anthropic_tools = [AnthropicToolDefinition.model_validate(schema) for schema in schemas]
535
567
  tools = [_anthropic_to_prompt_tool(anthropic_tool) for anthropic_tool in anthropic_tools]
536
568
  else:
537
569
  raise ValueError(f"Unsupported model provider: {model_provider}")
538
570
  ans = PromptTools(type="tools", tools=tools)
571
+
539
572
  if tool_choice is not None:
540
573
  if (
541
574
  model_provider is ModelProvider.OPENAI
@@ -545,6 +578,8 @@ def normalize_tools(
545
578
  or model_provider is ModelProvider.OLLAMA
546
579
  ):
547
580
  ans.tool_choice = OpenAIToolChoiceConversion.from_openai(tool_choice) # type: ignore[arg-type]
581
+ elif model_provider is ModelProvider.AWS:
582
+ ans.tool_choice = AwsToolChoiceConversion.from_aws(tool_choice) # type: ignore[arg-type]
548
583
  elif model_provider is ModelProvider.ANTHROPIC:
549
584
  choice, disable_parallel_tool_calls = AnthropicToolChoiceConversion.from_anthropic(
550
585
  tool_choice # type: ignore[arg-type]
@@ -571,6 +606,10 @@ def denormalize_tools(
571
606
  denormalized_tools = [_prompt_to_openai_tool(tool) for tool in tools.tools]
572
607
  if tools.tool_choice:
573
608
  tool_choice = OpenAIToolChoiceConversion.to_openai(tools.tool_choice)
609
+ elif model_provider is ModelProvider.AWS:
610
+ denormalized_tools = [_prompt_to_bedrock_tool(tool) for tool in tools.tools]
611
+ if tools.tool_choice:
612
+ tool_choice = OpenAIToolChoiceConversion.to_openai(tools.tool_choice)
574
613
  elif model_provider is ModelProvider.ANTHROPIC:
575
614
  denormalized_tools = [_prompt_to_anthropic_tool(tool) for tool in tools.tools]
576
615
  if tools.tool_choice and tools.tool_choice.type != "none":
@@ -614,6 +653,19 @@ def _prompt_to_openai_tool(
614
653
  )
615
654
 
616
655
 
656
+ def _bedrock_to_prompt_tool(
657
+ tool: BedrockToolDefinition,
658
+ ) -> PromptToolFunction:
659
+ return PromptToolFunction(
660
+ type="function",
661
+ function=PromptToolFunctionDefinition(
662
+ name=tool.toolSpec["name"],
663
+ description=tool.toolSpec["description"],
664
+ parameters=tool.toolSpec["inputSchema"]["json"],
665
+ ),
666
+ )
667
+
668
+
617
669
  def _anthropic_to_prompt_tool(
618
670
  tool: AnthropicToolDefinition,
619
671
  ) -> PromptToolFunction:
@@ -636,3 +688,18 @@ def _prompt_to_anthropic_tool(
636
688
  name=function.name,
637
689
  description=function.description,
638
690
  )
691
+
692
+
693
+ def _prompt_to_bedrock_tool(
694
+ tool: PromptToolFunction,
695
+ ) -> BedrockToolDefinition:
696
+ function = tool.function
697
+ return BedrockToolDefinition(
698
+ toolSpec={
699
+ "name": function.name,
700
+ "description": function.description,
701
+ "inputSchema": {
702
+ "json": function.parameters,
703
+ },
704
+ }
705
+ )
@@ -17,3 +17,5 @@ class GenerativeModelInput:
17
17
  """ The endpoint to use for the model. Only required for Azure OpenAI models. """
18
18
  api_version: Optional[str] = UNSET
19
19
  """ The API version to use for the model. """
20
+ region: Optional[str] = UNSET
21
+ """ The region to use for the model. """
@@ -17,6 +17,7 @@ class GenerativeProviderKey(Enum):
17
17
  DEEPSEEK = "DeepSeek"
18
18
  XAI = "xAI"
19
19
  OLLAMA = "Ollama"
20
+ AWS = "AWS Bedrock"
20
21
 
21
22
 
22
23
  @strawberry.type
@@ -38,6 +39,7 @@ class GenerativeProvider:
38
39
  GenerativeProviderKey.DEEPSEEK: ["deepseek"],
39
40
  GenerativeProviderKey.XAI: ["grok"],
40
41
  GenerativeProviderKey.OLLAMA: ["llama", "mistral", "codellama", "phi", "qwen", "gemma"],
42
+ GenerativeProviderKey.AWS: ["nova", "titan"],
41
43
  }
42
44
 
43
45
  attribute_provider_to_generative_provider_map: ClassVar[dict[str, GenerativeProviderKey]] = {
@@ -45,6 +47,7 @@ class GenerativeProvider:
45
47
  OpenInferenceLLMProviderValues.ANTHROPIC.value: GenerativeProviderKey.ANTHROPIC,
46
48
  OpenInferenceLLMProviderValues.AZURE.value: GenerativeProviderKey.AZURE_OPENAI,
47
49
  OpenInferenceLLMProviderValues.GOOGLE.value: GenerativeProviderKey.GOOGLE,
50
+ OpenInferenceLLMProviderValues.AWS.value: GenerativeProviderKey.AWS,
48
51
  # Note: DeepSeek uses OpenAI compatibility but we can't duplicate the key in the dict
49
52
  # The provider will be determined through model name prefix matching instead
50
53
  # Note: xAI uses OpenAI compatibility but we can't duplicate the key in the dict
@@ -58,26 +61,36 @@ class GenerativeProvider:
58
61
  E.x. OpenAI requires a single API key
59
62
  """
60
63
  model_provider_to_credential_requirements_map: ClassVar[
61
- dict[GenerativeProviderKey, GenerativeProviderCredentialConfig]
64
+ dict[GenerativeProviderKey, list[GenerativeProviderCredentialConfig]]
62
65
  ] = {
63
- GenerativeProviderKey.AZURE_OPENAI: GenerativeProviderCredentialConfig(
64
- env_var_name="AZURE_OPENAI_API_KEY", is_required=True
65
- ),
66
- GenerativeProviderKey.ANTHROPIC: GenerativeProviderCredentialConfig(
67
- env_var_name="ANTHROPIC_API_KEY", is_required=True
68
- ),
69
- GenerativeProviderKey.OPENAI: GenerativeProviderCredentialConfig(
70
- env_var_name="OPENAI_API_KEY", is_required=True
71
- ),
72
- GenerativeProviderKey.GOOGLE: GenerativeProviderCredentialConfig(
73
- env_var_name="GEMINI_API_KEY", is_required=True
74
- ),
75
- GenerativeProviderKey.DEEPSEEK: GenerativeProviderCredentialConfig(
76
- env_var_name="DEEPSEEK_API_KEY", is_required=True
77
- ),
78
- GenerativeProviderKey.XAI: GenerativeProviderCredentialConfig(
79
- env_var_name="XAI_API_KEY", is_required=True
80
- ),
66
+ GenerativeProviderKey.AZURE_OPENAI: [
67
+ GenerativeProviderCredentialConfig(
68
+ env_var_name="AZURE_OPENAI_API_KEY", is_required=True
69
+ )
70
+ ],
71
+ GenerativeProviderKey.ANTHROPIC: [
72
+ GenerativeProviderCredentialConfig(env_var_name="ANTHROPIC_API_KEY", is_required=True)
73
+ ],
74
+ GenerativeProviderKey.OPENAI: [
75
+ GenerativeProviderCredentialConfig(env_var_name="OPENAI_API_KEY", is_required=True)
76
+ ],
77
+ GenerativeProviderKey.GOOGLE: [
78
+ GenerativeProviderCredentialConfig(env_var_name="GEMINI_API_KEY", is_required=True)
79
+ ],
80
+ GenerativeProviderKey.DEEPSEEK: [
81
+ GenerativeProviderCredentialConfig(env_var_name="DEEPSEEK_API_KEY", is_required=True)
82
+ ],
83
+ GenerativeProviderKey.XAI: [
84
+ GenerativeProviderCredentialConfig(env_var_name="XAI_API_KEY", is_required=True)
85
+ ],
86
+ GenerativeProviderKey.OLLAMA: [],
87
+ GenerativeProviderKey.AWS: [
88
+ GenerativeProviderCredentialConfig(env_var_name="AWS_ACCESS_KEY_ID", is_required=True),
89
+ GenerativeProviderCredentialConfig(
90
+ env_var_name="AWS_SECRET_ACCESS_KEY", is_required=True
91
+ ),
92
+ GenerativeProviderCredentialConfig(env_var_name="AWS_SESSION_TOKEN", is_required=False),
93
+ ],
81
94
  }
82
95
 
83
96
  @strawberry.field
@@ -110,7 +123,7 @@ class GenerativeProvider:
110
123
  credential_requirements = self.model_provider_to_credential_requirements_map.get(self.key)
111
124
  if credential_requirements is None:
112
125
  return []
113
- return [credential_requirements]
126
+ return self.model_provider_to_credential_requirements_map[self.key]
114
127
 
115
128
  @strawberry.field(description="Whether the credentials are set on the server for the provider") # type: ignore
116
129
  async def credentials_set(self) -> bool:
@@ -3,7 +3,6 @@ import ssl
3
3
  from email.message import EmailMessage
4
4
  from pathlib import Path
5
5
  from typing import Literal
6
- from urllib.parse import urljoin
7
6
 
8
7
  from anyio import to_thread
9
8
  from jinja2 import Environment, FileSystemLoader, select_autoescape
@@ -49,9 +48,10 @@ class SimpleEmailSender:
49
48
  template_name = "welcome.html"
50
49
 
51
50
  template = self.env.get_template(template_name)
51
+
52
52
  html_content = template.render(
53
53
  name=name,
54
- welcome_url=urljoin(str(get_env_root_url()), "forgot-password"),
54
+ welcome_url=str(get_env_root_url()),
55
55
  )
56
56
 
57
57
  msg = EmailMessage()
phoenix/server/main.py CHANGED
@@ -91,13 +91,13 @@ _WELCOME_MESSAGE = Environment(loader=BaseLoader()).from_string("""
91
91
  ██║ ██║ ██║╚██████╔╝███████╗██║ ╚████║██║██╔╝ ██╗
92
92
  ╚═╝ ╚═╝ ╚═╝ ╚═════╝ ╚══════╝╚═╝ ╚═══╝╚═╝╚═╝ ╚═╝ v{{ version }}
93
93
 
94
+ | ⭐️⭐️⭐️ Support Open Source ⭐️⭐️⭐️
95
+ | ⭐️⭐️⭐️ Star on GitHub! ⭐️⭐️⭐️
96
+ | https://github.com/Arize-ai/phoenix
94
97
  |
95
98
  | 🌎 Join our Community 🌎
96
99
  | https://arize-ai.slack.com/join/shared_invite/zt-2w57bhem8-hq24MB6u7yE_ZF_ilOYSBw#/shared-invite/email
97
100
  |
98
- | ⭐️ Leave us a Star ⭐️
99
- | https://github.com/Arize-ai/phoenix
100
- |
101
101
  | 📚 Documentation 📚
102
102
  | https://arize.com/docs/phoenix
103
103
  |
@@ -387,12 +387,15 @@ def main() -> None:
387
387
  # Print information about the server
388
388
  http_scheme = "https" if tls_enabled_for_http else "http"
389
389
  grpc_scheme = "https" if tls_enabled_for_grpc else "http"
390
+ # Use localhost for display when host is the loopback address to make URLs clickable
391
+ display_host = "localhost" if host in ("0.0.0.0", "::") else host
390
392
  root_path = urljoin(f"{http_scheme}://{host}:{port}", host_root_path)
393
+ display_root_path = urljoin(f"{http_scheme}://{display_host}:{port}", host_root_path)
391
394
  msg = _WELCOME_MESSAGE.render(
392
395
  version=phoenix_version,
393
- ui_path=root_path,
394
- grpc_path=f"{grpc_scheme}://{host}:{get_env_grpc_port()}",
395
- http_path=urljoin(root_path, "v1/traces"),
396
+ ui_path=display_root_path,
397
+ grpc_path=f"{grpc_scheme}://{display_host}:{get_env_grpc_port()}",
398
+ http_path=urljoin(display_root_path, "v1/traces"),
396
399
  storage=get_printable_db_url(db_connection_str),
397
400
  schema=get_env_database_schema(),
398
401
  auth_enabled=auth_settings.enable_auth,
@@ -1,28 +1,28 @@
1
1
  {
2
- "_components-D-yJsfVa.js": {
3
- "file": "assets/components-D-yJsfVa.js",
2
+ "_components-SpUMF1qV.js": {
3
+ "file": "assets/components-SpUMF1qV.js",
4
4
  "name": "components",
5
5
  "imports": [
6
- "_vendor-BKYy4SMr.js",
7
- "_pages-Dn4XFHMU.js",
8
- "_vendor-arizeai-CaqmrQdQ.js",
9
- "_vendor-codemirror-BlmFw5CA.js",
6
+ "_vendor-DCZoBorz.js",
7
+ "_pages-YX47cEoQ.js",
8
+ "_vendor-arizeai-Ckci3irT.js",
9
+ "_vendor-codemirror-BODM513D.js",
10
10
  "_vendor-three-C5WAXd5r.js"
11
11
  ]
12
12
  },
13
- "_pages-Dn4XFHMU.js": {
14
- "file": "assets/pages-Dn4XFHMU.js",
13
+ "_pages-YX47cEoQ.js": {
14
+ "file": "assets/pages-YX47cEoQ.js",
15
15
  "name": "pages",
16
16
  "imports": [
17
- "_vendor-BKYy4SMr.js",
18
- "_vendor-arizeai-CaqmrQdQ.js",
19
- "_components-D-yJsfVa.js",
20
- "_vendor-codemirror-BlmFw5CA.js",
21
- "_vendor-recharts-Bz7zqjbW.js"
17
+ "_vendor-DCZoBorz.js",
18
+ "_vendor-arizeai-Ckci3irT.js",
19
+ "_components-SpUMF1qV.js",
20
+ "_vendor-codemirror-BODM513D.js",
21
+ "_vendor-recharts-C9O2a-N3.js"
22
22
  ]
23
23
  },
24
- "_vendor-BKYy4SMr.js": {
25
- "file": "assets/vendor-BKYy4SMr.js",
24
+ "_vendor-DCZoBorz.js": {
25
+ "file": "assets/vendor-DCZoBorz.js",
26
26
  "name": "vendor",
27
27
  "imports": [
28
28
  "_vendor-three-C5WAXd5r.js"
@@ -35,33 +35,33 @@
35
35
  "file": "assets/vendor-WIZid84E.css",
36
36
  "src": "_vendor-WIZid84E.css"
37
37
  },
38
- "_vendor-arizeai-CaqmrQdQ.js": {
39
- "file": "assets/vendor-arizeai-CaqmrQdQ.js",
38
+ "_vendor-arizeai-Ckci3irT.js": {
39
+ "file": "assets/vendor-arizeai-Ckci3irT.js",
40
40
  "name": "vendor-arizeai",
41
41
  "imports": [
42
- "_vendor-BKYy4SMr.js"
42
+ "_vendor-DCZoBorz.js"
43
43
  ]
44
44
  },
45
- "_vendor-codemirror-BlmFw5CA.js": {
46
- "file": "assets/vendor-codemirror-BlmFw5CA.js",
45
+ "_vendor-codemirror-BODM513D.js": {
46
+ "file": "assets/vendor-codemirror-BODM513D.js",
47
47
  "name": "vendor-codemirror",
48
48
  "imports": [
49
- "_vendor-BKYy4SMr.js",
50
- "_vendor-shiki-BitvudxD.js"
49
+ "_vendor-DCZoBorz.js",
50
+ "_vendor-shiki-Dq54rRC7.js"
51
51
  ]
52
52
  },
53
- "_vendor-recharts-Bz7zqjbW.js": {
54
- "file": "assets/vendor-recharts-Bz7zqjbW.js",
53
+ "_vendor-recharts-C9O2a-N3.js": {
54
+ "file": "assets/vendor-recharts-C9O2a-N3.js",
55
55
  "name": "vendor-recharts",
56
56
  "imports": [
57
- "_vendor-BKYy4SMr.js"
57
+ "_vendor-DCZoBorz.js"
58
58
  ]
59
59
  },
60
- "_vendor-shiki-BitvudxD.js": {
61
- "file": "assets/vendor-shiki-BitvudxD.js",
60
+ "_vendor-shiki-Dq54rRC7.js": {
61
+ "file": "assets/vendor-shiki-Dq54rRC7.js",
62
62
  "name": "vendor-shiki",
63
63
  "imports": [
64
- "_vendor-BKYy4SMr.js"
64
+ "_vendor-DCZoBorz.js"
65
65
  ]
66
66
  },
67
67
  "_vendor-three-C5WAXd5r.js": {
@@ -69,19 +69,19 @@
69
69
  "name": "vendor-three"
70
70
  },
71
71
  "index.tsx": {
72
- "file": "assets/index-BXRcSHM6.js",
72
+ "file": "assets/index-DIlhmbjB.js",
73
73
  "name": "index",
74
74
  "src": "index.tsx",
75
75
  "isEntry": true,
76
76
  "imports": [
77
- "_vendor-BKYy4SMr.js",
78
- "_vendor-arizeai-CaqmrQdQ.js",
79
- "_pages-Dn4XFHMU.js",
80
- "_components-D-yJsfVa.js",
77
+ "_vendor-DCZoBorz.js",
78
+ "_vendor-arizeai-Ckci3irT.js",
79
+ "_pages-YX47cEoQ.js",
80
+ "_components-SpUMF1qV.js",
81
81
  "_vendor-three-C5WAXd5r.js",
82
- "_vendor-codemirror-BlmFw5CA.js",
83
- "_vendor-shiki-BitvudxD.js",
84
- "_vendor-recharts-Bz7zqjbW.js"
82
+ "_vendor-codemirror-BODM513D.js",
83
+ "_vendor-shiki-Dq54rRC7.js",
84
+ "_vendor-recharts-C9O2a-N3.js"
85
85
  ]
86
86
  }
87
87
  }