tracia 0.1.0__py3-none-any.whl → 0.1.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
tracia/__init__.py CHANGED
@@ -81,6 +81,7 @@ from ._types import (
81
81
  RunResponsesResult,
82
82
  RunResult,
83
83
  Span,
84
+ SpanListItem,
84
85
  StreamResult,
85
86
  TextPart,
86
87
  TokenUsage,
@@ -139,6 +140,7 @@ __all__ = [
139
140
  "CreateSpanPayload",
140
141
  "CreateSpanResult",
141
142
  "Span",
143
+ "SpanListItem",
142
144
  "ListSpansOptions",
143
145
  "ListSpansResult",
144
146
  "EvaluateOptions",
tracia/_constants.py CHANGED
@@ -3,7 +3,7 @@
3
3
  from __future__ import annotations
4
4
 
5
5
  # SDK Version (defined here to avoid circular imports)
6
- SDK_VERSION = "0.1.0"
6
+ SDK_VERSION = "0.1.1"
7
7
 
8
8
  # API Configuration
9
9
  BASE_URL = "https://app.tracia.io"
tracia/_types.py CHANGED
@@ -347,8 +347,27 @@ class CreateSpanResult(BaseModel):
347
347
  trace_id: str = Field(alias="traceId")
348
348
 
349
349
 
350
+ class SpanListItem(BaseModel):
351
+ """A span item from the list endpoint (reduced fields)."""
352
+
353
+ model_config = ConfigDict(populate_by_name=True)
354
+
355
+ id: str
356
+ span_id: str = Field(alias="spanId")
357
+ trace_id: str = Field(alias="traceId")
358
+ prompt_slug: str | None = Field(default=None, alias="promptSlug")
359
+ model: str
360
+ status: Literal["SUCCESS", "ERROR"]
361
+ latency_ms: int = Field(alias="latencyMs")
362
+ input_tokens: int = Field(alias="inputTokens")
363
+ output_tokens: int = Field(alias="outputTokens")
364
+ total_tokens: int = Field(alias="totalTokens")
365
+ cost: float | None = None
366
+ created_at: datetime = Field(alias="createdAt")
367
+
368
+
350
369
  class Span(BaseModel):
351
- """A span from the API."""
370
+ """A span from the API (full detail)."""
352
371
 
353
372
  model_config = ConfigDict(populate_by_name=True)
354
373
 
@@ -397,7 +416,7 @@ class ListSpansResult(BaseModel):
397
416
 
398
417
  model_config = ConfigDict(populate_by_name=True)
399
418
 
400
- spans: list[Span]
419
+ spans: list[SpanListItem]
401
420
  cursor: str | None = None
402
421
  has_more: bool = Field(default=False, alias="hasMore")
403
422
 
@@ -433,6 +452,7 @@ class EvaluateResult(BaseModel):
433
452
  class PromptMessage(BaseModel):
434
453
  """A message in a prompt template."""
435
454
 
455
+ id: str
436
456
  role: Literal["system", "developer", "user", "assistant"]
437
457
  content: str
438
458
 
@@ -487,12 +507,12 @@ class CreatePromptOptions(BaseModel):
487
507
 
488
508
  model_config = ConfigDict(populate_by_name=True)
489
509
 
490
- slug: str
491
510
  name: str
511
+ slug: str | None = None
492
512
  description: str | None = None
493
- messages: list[PromptMessage]
494
- model: str
495
- provider: LLMProvider
513
+ content: list[PromptMessage]
514
+ model: str | None = None
515
+ provider: LLMProvider | None = None
496
516
  temperature: float | None = None
497
517
  max_output_tokens: int | None = Field(default=None, alias="maxOutputTokens")
498
518
  top_p: float | None = Field(default=None, alias="topP")
@@ -505,8 +525,9 @@ class UpdatePromptOptions(BaseModel):
505
525
  model_config = ConfigDict(populate_by_name=True)
506
526
 
507
527
  name: str | None = None
528
+ slug: str | None = None
508
529
  description: str | None = None
509
- messages: list[PromptMessage] | None = None
530
+ content: list[PromptMessage] | None = None
510
531
  model: str | None = None
511
532
  provider: LLMProvider | None = None
512
533
  temperature: float | None = None
@@ -531,10 +552,13 @@ class RunResult(BaseModel):
531
552
 
532
553
  model_config = ConfigDict(populate_by_name=True)
533
554
 
534
- text: str
555
+ text: str | None = None
535
556
  span_id: str = Field(alias="spanId")
536
557
  trace_id: str = Field(alias="traceId")
537
558
  prompt_version: int = Field(alias="promptVersion")
538
559
  latency_ms: int = Field(alias="latencyMs")
539
560
  usage: TokenUsage
540
561
  cost: float
562
+ finish_reason: FinishReason | None = Field(default=None, alias="finishReason")
563
+ tool_calls: list[ToolCall] | None = Field(default=None, alias="toolCalls")
564
+ structured_output: dict[str, Any] | None = Field(default=None, alias="structuredOutput")
@@ -45,7 +45,7 @@ class Prompts:
45
45
  TraciaError: If the request fails.
46
46
  """
47
47
  data = self._client.get("/api/v1/prompts")
48
- return [PromptListItem.model_validate(item) for item in data]
48
+ return [PromptListItem.model_validate(item) for item in data["prompts"]]
49
49
 
50
50
  async def alist(self) -> list[PromptListItem]:
51
51
  """List all prompts asynchronously.
@@ -60,7 +60,7 @@ class Prompts:
60
60
  raise RuntimeError("Async client not initialized")
61
61
 
62
62
  data = await self._async_client.get("/api/v1/prompts")
63
- return [PromptListItem.model_validate(item) for item in data]
63
+ return [PromptListItem.model_validate(item) for item in data["prompts"]]
64
64
 
65
65
  def get(self, slug: str) -> Prompt:
66
66
  """Get a prompt by slug.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: tracia
3
- Version: 0.1.0
3
+ Version: 0.1.1
4
4
  Summary: LLM prompt management and tracing SDK
5
5
  Project-URL: Homepage, https://tracia.io
6
6
  Project-URL: Documentation, https://docs.tracia.io
@@ -1,18 +1,18 @@
1
- tracia/__init__.py,sha256=i3fAcuJjd3QVuXHRGLVmNsQW4VirnHL_dNUpLWL4xPE,3538
1
+ tracia/__init__.py,sha256=awp-IX5sQaXwfHlXn2l8oNI1SlzVlBwxBI-vKRqXqcg,3576
2
2
  tracia/_client.py,sha256=5bTui8EN_vQ9XA9ZxGnkxTWpG2fIseWwdrVGHRmlRbE,40127
3
- tracia/_constants.py,sha256=vM9X3TLawroFqrFLme_SuYrUgboDu7q2NMyvW_JNQTg,821
3
+ tracia/_constants.py,sha256=mOyOchSxpoOPrbBS1LxPRX1rcwn4-Hx9MFv17fzZtuc,821
4
4
  tracia/_errors.py,sha256=NEmqucrx3NqGjYFtelHJw1rs2lvRJky6Xave5bNOswg,2464
5
5
  tracia/_http.py,sha256=KE7_vFUpfCHeWewiCqrROw_vCRU9pOpN2i4ieNsOkdI,11237
6
6
  tracia/_llm.py,sha256=kv3uU4FcVWhYd-N2m5xsp5rnFY7i-NgbCjv4yqh9RRk,31071
7
7
  tracia/_session.py,sha256=6ET-ibGKbU7Sd0o75zPI1O4bRXEqmbD5MMExPO6DO-Y,6903
8
8
  tracia/_streaming.py,sha256=qrTTYWY9P39n1a4dh2vKQedgohdMJA7sAHy39YtLGoE,3873
9
- tracia/_types.py,sha256=XeLNvW32VVv5vVP6BTGC07h9iz21s-KcT2lFxs0yD84,15294
9
+ tracia/_types.py,sha256=B1wI23yZfh7MPUJ2rCVbhEzC0Si21DqHellx8PTZt90,16321
10
10
  tracia/_utils.py,sha256=xB28NRFHcduv2k0N3PXIh7sYCwtDU35R1gCiPwdmvvU,3090
11
11
  tracia/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
12
12
  tracia/resources/__init__.py,sha256=llZvQRPlEsHDsOdpWcCLgBSPjrJALFltAFThZii3NaY,127
13
- tracia/resources/prompts.py,sha256=pVNKBRnGAft7JH3HapXYFZN37eO8_9NG7kpXbau1iVA,7553
13
+ tracia/resources/prompts.py,sha256=ZPorLhbNfaXKc6VENN4V4kuiGJQPtlIDnO-01eEaGcY,7575
14
14
  tracia/resources/spans.py,sha256=zpF21NeJw_FR8TPVj3nR7r0-FZIzWUycol0n5XoMuTg,6591
15
- tracia-0.1.0.dist-info/METADATA,sha256=mBVXXalffBkWQMLcQ8AdKYpnyZmmssqcceBq5YRaauA,7033
16
- tracia-0.1.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
17
- tracia-0.1.0.dist-info/licenses/LICENSE,sha256=jnoLcro_uevZVCUf3nkyE43c6mG6MGNJ_3_TShNJj5s,1063
18
- tracia-0.1.0.dist-info/RECORD,,
15
+ tracia-0.1.1.dist-info/METADATA,sha256=MSlRK5YtxQlYipp4BW3EiHlpKO09z-SItawDe7pARLM,7033
16
+ tracia-0.1.1.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
17
+ tracia-0.1.1.dist-info/licenses/LICENSE,sha256=jnoLcro_uevZVCUf3nkyE43c6mG6MGNJ_3_TShNJj5s,1063
18
+ tracia-0.1.1.dist-info/RECORD,,
File without changes