promptlayer 1.0.70__tar.gz → 1.0.72__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of promptlayer might be problematic. Click here for more details.

Files changed (24) hide show
  1. {promptlayer-1.0.70 → promptlayer-1.0.72}/PKG-INFO +2 -1
  2. {promptlayer-1.0.70 → promptlayer-1.0.72}/promptlayer/__init__.py +1 -1
  3. promptlayer-1.0.72/promptlayer/groups/__init__.py +22 -0
  4. promptlayer-1.0.72/promptlayer/groups/groups.py +9 -0
  5. {promptlayer-1.0.70 → promptlayer-1.0.72}/promptlayer/promptlayer.py +34 -27
  6. {promptlayer-1.0.70 → promptlayer-1.0.72}/promptlayer/promptlayer_base.py +17 -8
  7. {promptlayer-1.0.70 → promptlayer-1.0.72}/promptlayer/promptlayer_mixins.py +3 -3
  8. {promptlayer-1.0.70 → promptlayer-1.0.72}/promptlayer/span_exporter.py +2 -4
  9. {promptlayer-1.0.70 → promptlayer-1.0.72}/promptlayer/templates.py +9 -7
  10. {promptlayer-1.0.70 → promptlayer-1.0.72}/promptlayer/track/__init__.py +12 -22
  11. {promptlayer-1.0.70 → promptlayer-1.0.72}/promptlayer/track/track.py +22 -16
  12. {promptlayer-1.0.70 → promptlayer-1.0.72}/promptlayer/utils.py +182 -79
  13. {promptlayer-1.0.70 → promptlayer-1.0.72}/pyproject.toml +2 -2
  14. promptlayer-1.0.70/promptlayer/groups/__init__.py +0 -20
  15. promptlayer-1.0.70/promptlayer/groups/groups.py +0 -11
  16. {promptlayer-1.0.70 → promptlayer-1.0.72}/LICENSE +0 -0
  17. {promptlayer-1.0.70 → promptlayer-1.0.72}/README.md +0 -0
  18. {promptlayer-1.0.70 → promptlayer-1.0.72}/promptlayer/streaming/__init__.py +0 -0
  19. {promptlayer-1.0.70 → promptlayer-1.0.72}/promptlayer/streaming/blueprint_builder.py +0 -0
  20. {promptlayer-1.0.70 → promptlayer-1.0.72}/promptlayer/streaming/response_handlers.py +0 -0
  21. {promptlayer-1.0.70 → promptlayer-1.0.72}/promptlayer/streaming/stream_processor.py +0 -0
  22. {promptlayer-1.0.70 → promptlayer-1.0.72}/promptlayer/types/__init__.py +0 -0
  23. {promptlayer-1.0.70 → promptlayer-1.0.72}/promptlayer/types/prompt_template.py +0 -0
  24. {promptlayer-1.0.70 → promptlayer-1.0.72}/promptlayer/types/request_log.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: promptlayer
3
- Version: 1.0.70
3
+ Version: 1.0.72
4
4
  Summary: PromptLayer is a platform for prompt engineering and tracks your LLM requests.
5
5
  License: Apache-2.0
6
6
  License-File: LICENSE
@@ -17,6 +17,7 @@ Classifier: Programming Language :: Python :: 3.13
17
17
  Classifier: Programming Language :: Python :: 3.14
18
18
  Requires-Dist: ably (>=2.0.11,<3.0.0)
19
19
  Requires-Dist: aiohttp (>=3.10.10,<4.0.0)
20
+ Requires-Dist: centrifuge-python (>=0.4.1,<0.5.0)
20
21
  Requires-Dist: httpx (>=0.28.1,<0.29.0)
21
22
  Requires-Dist: nest-asyncio (>=1.6.0,<2.0.0)
22
23
  Requires-Dist: opentelemetry-api (>=1.26.0,<2.0.0)
@@ -1,4 +1,4 @@
1
1
  from .promptlayer import AsyncPromptLayer, PromptLayer
2
2
 
3
- __version__ = "1.0.70"
3
+ __version__ = "1.0.72"
4
4
  __all__ = ["PromptLayer", "AsyncPromptLayer", "__version__"]
@@ -0,0 +1,22 @@
1
+ from promptlayer.groups.groups import acreate, create
2
+
3
+
4
+ class GroupManager:
5
+ def __init__(self, api_key: str, base_url: str):
6
+ self.api_key = api_key
7
+ self.base_url = base_url
8
+
9
+ def create(self):
10
+ return create(self.api_key, self.base_url)
11
+
12
+
13
+ class AsyncGroupManager:
14
+ def __init__(self, api_key: str, base_url: str):
15
+ self.api_key = api_key
16
+ self.base_url = base_url
17
+
18
+ async def create(self):
19
+ return await acreate(self.api_key, self.base_url)
20
+
21
+
22
+ __all__ = ["GroupManager", "AsyncGroupManager"]
@@ -0,0 +1,9 @@
1
+ from promptlayer.utils import apromptlayer_create_group, promptlayer_create_group
2
+
3
+
4
+ def create(api_key: str, base_url: str):
5
+ return promptlayer_create_group(api_key, base_url)
6
+
7
+
8
+ async def acreate(api_key: str, base_url: str):
9
+ return await apromptlayer_create_group(api_key, base_url)
@@ -26,6 +26,10 @@ from promptlayer.utils import (
26
26
  logger = logging.getLogger(__name__)
27
27
 
28
28
 
29
+ def get_base_url(base_url: Union[str, None]):
30
+ return base_url or os.environ.get("PROMPTLAYER_BASE_URL", "https://api.promptlayer.com")
31
+
32
+
29
33
  def is_workflow_results_dict(obj: Any) -> bool:
30
34
  if not isinstance(obj, dict):
31
35
  return False
@@ -49,9 +53,7 @@ def is_workflow_results_dict(obj: Any) -> bool:
49
53
 
50
54
  class PromptLayer(PromptLayerMixin):
51
55
  def __init__(
52
- self,
53
- api_key: str = None,
54
- enable_tracing: bool = False,
56
+ self, api_key: Union[str, None] = None, enable_tracing: bool = False, base_url: Union[str, None] = None
55
57
  ):
56
58
  if api_key is None:
57
59
  api_key = os.environ.get("PROMPTLAYER_API_KEY")
@@ -62,11 +64,12 @@ class PromptLayer(PromptLayerMixin):
62
64
  "Please set the PROMPTLAYER_API_KEY environment variable or pass the api_key parameter."
63
65
  )
64
66
 
67
+ self.base_url = get_base_url(base_url)
65
68
  self.api_key = api_key
66
- self.templates = TemplateManager(api_key)
67
- self.group = GroupManager(api_key)
68
- self.tracer_provider, self.tracer = self._initialize_tracer(api_key, enable_tracing)
69
- self.track = TrackManager(api_key)
69
+ self.templates = TemplateManager(api_key, self.base_url)
70
+ self.group = GroupManager(api_key, self.base_url)
71
+ self.tracer_provider, self.tracer = self._initialize_tracer(api_key, self.base_url, enable_tracing)
72
+ self.track = TrackManager(api_key, self.base_url)
70
73
 
71
74
  def __getattr__(
72
75
  self,
@@ -75,15 +78,18 @@ class PromptLayer(PromptLayerMixin):
75
78
  if name == "openai":
76
79
  import openai as openai_module
77
80
 
78
- return PromptLayerBase(openai_module, function_name="openai", api_key=self.api_key, tracer=self.tracer)
81
+ return PromptLayerBase(
82
+ self.api_key, self.base_url, openai_module, function_name="openai", tracer=self.tracer
83
+ )
79
84
  elif name == "anthropic":
80
85
  import anthropic as anthropic_module
81
86
 
82
87
  return PromptLayerBase(
88
+ self.api_key,
89
+ self.base_url,
83
90
  anthropic_module,
84
91
  function_name="anthropic",
85
92
  provider_type="anthropic",
86
- api_key=self.api_key,
87
93
  tracer=self.tracer,
88
94
  )
89
95
  else:
@@ -212,7 +218,7 @@ class PromptLayer(PromptLayerMixin):
212
218
  metadata=metadata,
213
219
  **body,
214
220
  )
215
- return track_request(**track_request_kwargs)
221
+ return track_request(self.base_url, **track_request_kwargs)
216
222
 
217
223
  def run(
218
224
  self,
@@ -277,12 +283,13 @@ class PromptLayer(PromptLayerMixin):
277
283
 
278
284
  results = asyncio.run(
279
285
  arun_workflow_request(
286
+ api_key=self.api_key,
287
+ base_url=self.base_url,
280
288
  workflow_id_or_name=_get_workflow_workflow_id_or_name(workflow_id_or_name, workflow_name),
281
289
  input_variables=input_variables or {},
282
290
  metadata=metadata,
283
291
  workflow_label_name=workflow_label_name,
284
292
  workflow_version_number=workflow_version,
285
- api_key=self.api_key,
286
293
  return_all_outputs=return_all_outputs,
287
294
  )
288
295
  )
@@ -330,6 +337,7 @@ class PromptLayer(PromptLayerMixin):
330
337
  ):
331
338
  return util_log_request(
332
339
  self.api_key,
340
+ self.base_url,
333
341
  provider=provider,
334
342
  model=model,
335
343
  input=input,
@@ -354,9 +362,7 @@ class PromptLayer(PromptLayerMixin):
354
362
 
355
363
  class AsyncPromptLayer(PromptLayerMixin):
356
364
  def __init__(
357
- self,
358
- api_key: str = None,
359
- enable_tracing: bool = False,
365
+ self, api_key: Union[str, None] = None, enable_tracing: bool = False, base_url: Union[str, None] = None
360
366
  ):
361
367
  if api_key is None:
362
368
  api_key = os.environ.get("PROMPTLAYER_API_KEY")
@@ -367,31 +373,30 @@ class AsyncPromptLayer(PromptLayerMixin):
367
373
  "Please set the PROMPTLAYER_API_KEY environment variable or pass the api_key parameter."
368
374
  )
369
375
 
376
+ self.base_url = get_base_url(base_url)
370
377
  self.api_key = api_key
371
- self.templates = AsyncTemplateManager(api_key)
372
- self.group = AsyncGroupManager(api_key)
373
- self.tracer_provider, self.tracer = self._initialize_tracer(api_key, enable_tracing)
374
- self.track = AsyncTrackManager(api_key)
378
+ self.templates = AsyncTemplateManager(api_key, self.base_url)
379
+ self.group = AsyncGroupManager(api_key, self.base_url)
380
+ self.tracer_provider, self.tracer = self._initialize_tracer(api_key, self.base_url, enable_tracing)
381
+ self.track = AsyncTrackManager(api_key, self.base_url)
375
382
 
376
383
  def __getattr__(self, name: Union[Literal["openai"], Literal["anthropic"], Literal["prompts"]]):
377
384
  if name == "openai":
378
385
  import openai as openai_module
379
386
 
380
387
  openai = PromptLayerBase(
381
- openai_module,
382
- function_name="openai",
383
- api_key=self.api_key,
384
- tracer=self.tracer,
388
+ self.api_key, self.base_url, openai_module, function_name="openai", tracer=self.tracer
385
389
  )
386
390
  return openai
387
391
  elif name == "anthropic":
388
392
  import anthropic as anthropic_module
389
393
 
390
394
  anthropic = PromptLayerBase(
395
+ self.api_key,
396
+ self.base_url,
391
397
  anthropic_module,
392
398
  function_name="anthropic",
393
399
  provider_type="anthropic",
394
- api_key=self.api_key,
395
400
  tracer=self.tracer,
396
401
  )
397
402
  return anthropic
@@ -413,12 +418,13 @@ class AsyncPromptLayer(PromptLayerMixin):
413
418
  ) -> Union[Dict[str, Any], Any]:
414
419
  try:
415
420
  return await arun_workflow_request(
421
+ api_key=self.api_key,
422
+ base_url=self.base_url,
416
423
  workflow_id_or_name=_get_workflow_workflow_id_or_name(workflow_id_or_name, workflow_name),
417
424
  input_variables=input_variables or {},
418
425
  metadata=metadata,
419
426
  workflow_label_name=workflow_label_name,
420
427
  workflow_version_number=workflow_version,
421
- api_key=self.api_key,
422
428
  return_all_outputs=return_all_outputs,
423
429
  )
424
430
  except Exception as ex:
@@ -491,6 +497,7 @@ class AsyncPromptLayer(PromptLayerMixin):
491
497
  ):
492
498
  return await autil_log_request(
493
499
  self.api_key,
500
+ self.base_url,
494
501
  provider=provider,
495
502
  model=model,
496
503
  input=input,
@@ -530,7 +537,7 @@ class AsyncPromptLayer(PromptLayerMixin):
530
537
  pl_run_span_id,
531
538
  **body,
532
539
  )
533
- return await atrack_request(**track_request_kwargs)
540
+ return await atrack_request(self.base_url, **track_request_kwargs)
534
541
 
535
542
  return _track_request
536
543
 
@@ -554,7 +561,7 @@ class AsyncPromptLayer(PromptLayerMixin):
554
561
  metadata=metadata,
555
562
  **body,
556
563
  )
557
- return await atrack_request(**track_request_kwargs)
564
+ return await atrack_request(self.base_url, **track_request_kwargs)
558
565
 
559
566
  async def _run_internal(
560
567
  self,
@@ -631,6 +638,6 @@ class AsyncPromptLayer(PromptLayerMixin):
631
638
 
632
639
  return {
633
640
  "request_id": request_log.get("request_id", None),
634
- "raw_response": request_response,
641
+ "raw_response": response,
635
642
  "prompt_blueprint": request_log.get("prompt_blueprint", None),
636
643
  }
@@ -13,14 +13,16 @@ class PromptLayerBase(object):
13
13
  "_provider_type",
14
14
  "_api_key",
15
15
  "_tracer",
16
+ "_base_url",
16
17
  ]
17
18
 
18
- def __init__(self, obj, function_name="", provider_type="openai", api_key=None, tracer=None):
19
+ def __init__(self, api_key: str, base_url: str, obj, function_name="", provider_type="openai", tracer=None):
19
20
  object.__setattr__(self, "_obj", obj)
20
21
  object.__setattr__(self, "_function_name", function_name)
21
22
  object.__setattr__(self, "_provider_type", provider_type)
22
23
  object.__setattr__(self, "_api_key", api_key)
23
24
  object.__setattr__(self, "_tracer", tracer)
25
+ object.__setattr__(self, "_base_url", base_url)
24
26
 
25
27
  def __getattr__(self, name):
26
28
  attr = getattr(object.__getattribute__(self, "_obj"), name)
@@ -41,10 +43,11 @@ class PromptLayerBase(object):
41
43
  )
42
44
  ):
43
45
  return PromptLayerBase(
46
+ object.__getattribute__(self, "_api_key"),
47
+ object.__getattribute__(self, "_base_url"),
44
48
  attr,
45
49
  function_name=f"{object.__getattribute__(self, '_function_name')}.{name}",
46
50
  provider_type=object.__getattribute__(self, "_provider_type"),
47
- api_key=object.__getattribute__(self, "_api_key"),
48
51
  tracer=object.__getattribute__(self, "_tracer"),
49
52
  )
50
53
  return attr
@@ -75,10 +78,11 @@ class PromptLayerBase(object):
75
78
 
76
79
  if inspect.isclass(function_object):
77
80
  result = PromptLayerBase(
81
+ object.__getattribute__(self, "_api_key"),
82
+ object.__getattribute__(self, "_base_url"),
78
83
  function_object(*args, **kwargs),
79
84
  function_name=function_name,
80
85
  provider_type=object.__getattribute__(self, "_provider_type"),
81
- api_key=object.__getattribute__(self, "_api_key"),
82
86
  tracer=tracer,
83
87
  )
84
88
  llm_request_span.set_attribute("function_output", str(result))
@@ -88,13 +92,14 @@ class PromptLayerBase(object):
88
92
 
89
93
  if inspect.iscoroutinefunction(function_object) or inspect.iscoroutine(function_response):
90
94
  return async_wrapper(
95
+ object.__getattribute__(self, "_api_key"),
96
+ object.__getattribute__(self, "_base_url"),
91
97
  function_response,
92
98
  return_pl_id,
93
99
  request_start_time,
94
100
  function_name,
95
101
  object.__getattribute__(self, "_provider_type"),
96
102
  tags,
97
- api_key=object.__getattribute__(self, "_api_key"),
98
103
  llm_request_span_id=llm_request_span_id,
99
104
  tracer=tracer, # Pass the tracer to async_wrapper
100
105
  *args,
@@ -103,6 +108,8 @@ class PromptLayerBase(object):
103
108
 
104
109
  request_end_time = datetime.datetime.now().timestamp()
105
110
  result = promptlayer_api_handler(
111
+ object.__getattribute__(self, "_api_key"),
112
+ object.__getattribute__(self, "_base_url"),
106
113
  function_name,
107
114
  object.__getattribute__(self, "_provider_type"),
108
115
  args,
@@ -111,7 +118,6 @@ class PromptLayerBase(object):
111
118
  function_response,
112
119
  request_start_time,
113
120
  request_end_time,
114
- object.__getattribute__(self, "_api_key"),
115
121
  return_pl_id=return_pl_id,
116
122
  llm_request_span_id=llm_request_span_id,
117
123
  )
@@ -121,29 +127,33 @@ class PromptLayerBase(object):
121
127
  # Without tracing
122
128
  if inspect.isclass(function_object):
123
129
  return PromptLayerBase(
130
+ object.__getattribute__(self, "_api_key"),
131
+ object.__getattribute__(self, "_base_url"),
124
132
  function_object(*args, **kwargs),
125
133
  function_name=function_name,
126
134
  provider_type=object.__getattribute__(self, "_provider_type"),
127
- api_key=object.__getattribute__(self, "_api_key"),
128
135
  )
129
136
 
130
137
  function_response = function_object(*args, **kwargs)
131
138
 
132
139
  if inspect.iscoroutinefunction(function_object) or inspect.iscoroutine(function_response):
133
140
  return async_wrapper(
141
+ object.__getattribute__(self, "_api_key"),
142
+ object.__getattribute__(self, "_base_url"),
134
143
  function_response,
135
144
  return_pl_id,
136
145
  request_start_time,
137
146
  function_name,
138
147
  object.__getattribute__(self, "_provider_type"),
139
148
  tags,
140
- api_key=object.__getattribute__(self, "_api_key"),
141
149
  *args,
142
150
  **kwargs,
143
151
  )
144
152
 
145
153
  request_end_time = datetime.datetime.now().timestamp()
146
154
  return promptlayer_api_handler(
155
+ object.__getattribute__(self, "_api_key"),
156
+ object.__getattribute__(self, "_base_url"),
147
157
  function_name,
148
158
  object.__getattribute__(self, "_provider_type"),
149
159
  args,
@@ -152,6 +162,5 @@ class PromptLayerBase(object):
152
162
  function_response,
153
163
  request_start_time,
154
164
  request_end_time,
155
- object.__getattribute__(self, "_api_key"),
156
165
  return_pl_id=return_pl_id,
157
166
  )
@@ -262,11 +262,11 @@ AMAP_PROVIDER_TO_FUNCTION = {
262
262
 
263
263
  class PromptLayerMixin:
264
264
  @staticmethod
265
- def _initialize_tracer(api_key: str = None, enable_tracing: bool = False):
265
+ def _initialize_tracer(api_key: str, base_url: str, enable_tracing: bool = False):
266
266
  if enable_tracing:
267
267
  resource = Resource(attributes={ResourceAttributes.SERVICE_NAME: "prompt-layer-library"})
268
268
  tracer_provider = TracerProvider(resource=resource)
269
- promptlayer_exporter = PromptLayerSpanExporter(api_key=api_key)
269
+ promptlayer_exporter = PromptLayerSpanExporter(api_key=api_key, base_url=base_url)
270
270
  span_processor = BatchSpanProcessor(promptlayer_exporter)
271
271
  tracer_provider.add_span_processor(span_processor)
272
272
  tracer = tracer_provider.get_tracer(__name__)
@@ -317,7 +317,7 @@ class PromptLayerMixin:
317
317
  function_kwargs = deepcopy(prompt_blueprint["llm_kwargs"])
318
318
  function_kwargs["stream"] = stream
319
319
  provider = prompt_blueprint_model["provider"]
320
- api_type = prompt_blueprint_model["api_type"]
320
+ api_type = prompt_blueprint_model.get("api_type", "chat-completions")
321
321
 
322
322
  if custom_provider := prompt_blueprint.get("custom_provider"):
323
323
  provider = custom_provider["client"]
@@ -4,13 +4,11 @@ import requests
4
4
  from opentelemetry.sdk.trace import ReadableSpan
5
5
  from opentelemetry.sdk.trace.export import SpanExporter, SpanExportResult
6
6
 
7
- from promptlayer.utils import URL_API_PROMPTLAYER
8
-
9
7
 
10
8
  class PromptLayerSpanExporter(SpanExporter):
11
- def __init__(self, api_key: str = None):
9
+ def __init__(self, api_key: str, base_url: str):
12
10
  self.api_key = api_key
13
- self.url = f"{URL_API_PROMPTLAYER}/spans-bulk"
11
+ self.url = f"{base_url}/spans-bulk"
14
12
 
15
13
  def export(self, spans: Sequence[ReadableSpan]) -> SpanExportResult:
16
14
  request_data = []
@@ -11,25 +11,27 @@ from promptlayer.utils import (
11
11
 
12
12
 
13
13
  class TemplateManager:
14
- def __init__(self, api_key: str):
14
+ def __init__(self, api_key: str, base_url: str):
15
15
  self.api_key = api_key
16
+ self.base_url = base_url
16
17
 
17
18
  def get(self, prompt_name: str, params: Union[GetPromptTemplate, None] = None):
18
- return get_prompt_template(prompt_name, params, self.api_key)
19
+ return get_prompt_template(self.api_key, self.base_url, prompt_name, params)
19
20
 
20
21
  def publish(self, body: PublishPromptTemplate):
21
- return publish_prompt_template(body, self.api_key)
22
+ return publish_prompt_template(self.api_key, self.base_url, body)
22
23
 
23
24
  def all(self, page: int = 1, per_page: int = 30, label: str = None):
24
- return get_all_prompt_templates(page, per_page, self.api_key, label)
25
+ return get_all_prompt_templates(self.api_key, self.base_url, page, per_page, label)
25
26
 
26
27
 
27
28
  class AsyncTemplateManager:
28
- def __init__(self, api_key: str):
29
+ def __init__(self, api_key: str, base_url: str):
29
30
  self.api_key = api_key
31
+ self.base_url = base_url
30
32
 
31
33
  async def get(self, prompt_name: str, params: Union[GetPromptTemplate, None] = None):
32
- return await aget_prompt_template(prompt_name, params, self.api_key)
34
+ return await aget_prompt_template(self.api_key, self.base_url, prompt_name, params)
33
35
 
34
36
  async def all(self, page: int = 1, per_page: int = 30, label: str = None):
35
- return await aget_all_prompt_templates(page, per_page, self.api_key, label)
37
+ return await aget_all_prompt_templates(self.api_key, self.base_url, page, per_page, label)
@@ -13,51 +13,41 @@ from promptlayer.track.track import (
13
13
 
14
14
 
15
15
  class TrackManager:
16
- def __init__(self, api_key: str):
16
+ def __init__(self, api_key: str, base_url: str):
17
17
  self.api_key = api_key
18
+ self.base_url = base_url
18
19
 
19
20
  def group(self, request_id, group_id):
20
- return group(request_id, group_id, self.api_key)
21
+ return group(self.api_key, self.base_url, request_id, group_id)
21
22
 
22
23
  def metadata(self, request_id, metadata):
23
- return metadata_(request_id, metadata, self.api_key)
24
+ return metadata_(self.api_key, self.base_url, request_id, metadata)
24
25
 
25
26
  def prompt(self, request_id, prompt_name, prompt_input_variables, version=None, label=None):
26
- return prompt(
27
- request_id,
28
- prompt_name,
29
- prompt_input_variables,
30
- version,
31
- label,
32
- self.api_key,
33
- )
27
+ return prompt(self.api_key, self.base_url, request_id, prompt_name, prompt_input_variables, version, label)
34
28
 
35
29
  def score(self, request_id, score, score_name=None):
36
- return score_(request_id, score, score_name, self.api_key)
30
+ return score_(self.api_key, self.base_url, request_id, score, score_name)
37
31
 
38
32
 
39
33
  class AsyncTrackManager:
40
- def __init__(self, api_key: str):
34
+ def __init__(self, api_key: str, base_url: str):
41
35
  self.api_key = api_key
36
+ self.base_url = base_url
42
37
 
43
38
  async def group(self, request_id, group_id):
44
- return await agroup(request_id, group_id, self.api_key)
39
+ return await agroup(self.api_key, self.base_url, request_id, group_id)
45
40
 
46
41
  async def metadata(self, request_id, metadata):
47
- return await ametadata(request_id, metadata, self.api_key)
42
+ return await ametadata(self.api_key, self.base_url, request_id, metadata, self.api_key)
48
43
 
49
44
  async def prompt(self, request_id, prompt_name, prompt_input_variables, version=None, label=None):
50
45
  return await aprompt(
51
- request_id,
52
- prompt_name,
53
- prompt_input_variables,
54
- version,
55
- label,
56
- self.api_key,
46
+ self.api_key, self.base_url, request_id, prompt_name, prompt_input_variables, version, label
57
47
  )
58
48
 
59
49
  async def score(self, request_id, score, score_name=None):
60
- return await ascore(request_id, score, score_name, self.api_key)
50
+ return await ascore(self.api_key, self.base_url, request_id, score, score_name)
61
51
 
62
52
 
63
53
  __all__ = ["TrackManager"]
@@ -11,72 +11,78 @@ from promptlayer.utils import (
11
11
 
12
12
 
13
13
  def prompt(
14
+ api_key: str,
15
+ base_url: str,
14
16
  request_id,
15
17
  prompt_name,
16
18
  prompt_input_variables,
17
19
  version=None,
18
20
  label=None,
19
- api_key: str = None,
20
21
  ):
21
22
  if not isinstance(prompt_input_variables, dict):
22
23
  raise Exception("Please provide a dictionary of input variables.")
23
- return promptlayer_track_prompt(request_id, prompt_name, prompt_input_variables, api_key, version, label)
24
+ return promptlayer_track_prompt(
25
+ api_key, base_url, request_id, prompt_name, prompt_input_variables, api_key, version, label
26
+ )
24
27
 
25
28
 
26
- def metadata(request_id, metadata, api_key: str = None):
29
+ def metadata(api_key: str, base_url: str, request_id, metadata):
27
30
  if not isinstance(metadata, dict):
28
31
  raise Exception("Please provide a dictionary of metadata.")
29
32
  for key, value in metadata.items():
30
33
  if not isinstance(key, str) or not isinstance(value, str):
31
34
  raise Exception("Please provide a dictionary of metadata with key value pair of strings.")
32
- return promptlayer_track_metadata(request_id, metadata, api_key)
35
+ return promptlayer_track_metadata(api_key, base_url, request_id, metadata)
33
36
 
34
37
 
35
- def score(request_id, score, score_name=None, api_key: str = None):
38
+ def score(api_key: str, base_url: str, request_id, score, score_name=None):
36
39
  if not isinstance(score, int):
37
40
  raise Exception("Please provide a int score.")
38
41
  if not isinstance(score_name, str) and score_name is not None:
39
42
  raise Exception("Please provide a string as score name.")
40
43
  if score < 0 or score > 100:
41
44
  raise Exception("Please provide a score between 0 and 100.")
42
- return promptlayer_track_score(request_id, score, score_name, api_key)
45
+ return promptlayer_track_score(api_key, base_url, request_id, score, score_name)
43
46
 
44
47
 
45
- def group(request_id, group_id, api_key: str = None):
46
- return promptlayer_track_group(request_id, group_id, api_key)
48
+ def group(api_key: str, base_url: str, request_id, group_id):
49
+ return promptlayer_track_group(api_key, base_url, request_id, group_id)
47
50
 
48
51
 
49
52
  async def aprompt(
53
+ api_key: str,
54
+ base_url: str,
50
55
  request_id,
51
56
  prompt_name,
52
57
  prompt_input_variables,
53
58
  version=None,
54
59
  label=None,
55
- api_key: str = None,
56
60
  ):
57
61
  if not isinstance(prompt_input_variables, dict):
58
62
  raise Exception("Please provide a dictionary of input variables.")
59
- return await apromptlayer_track_prompt(request_id, prompt_name, prompt_input_variables, api_key, version, label)
63
+ return await apromptlayer_track_prompt(
64
+ api_key, base_url, request_id, prompt_name, prompt_input_variables, version, label
65
+ )
60
66
 
61
67
 
62
- async def ametadata(request_id, metadata, api_key: str = None):
68
+ async def ametadata(api_key: str, base_url: str, request_id, metadata):
63
69
  if not isinstance(metadata, dict):
64
70
  raise Exception("Please provide a dictionary of metadata.")
65
71
  for key, value in metadata.items():
66
72
  if not isinstance(key, str) or not isinstance(value, str):
67
73
  raise Exception("Please provide a dictionary of metadata with key-value pairs of strings.")
68
- return await apromptlayer_track_metadata(request_id, metadata, api_key)
74
+ return await apromptlayer_track_metadata(api_key, base_url, request_id, metadata)
69
75
 
70
76
 
71
- async def ascore(request_id, score, score_name=None, api_key: str = None):
77
+ async def ascore(api_key: str, base_url: str, request_id, score, score_name=None):
72
78
  if not isinstance(score, int):
73
79
  raise Exception("Please provide an integer score.")
74
80
  if not isinstance(score_name, str) and score_name is not None:
75
81
  raise Exception("Please provide a string as score name.")
76
82
  if score < 0 or score > 100:
77
83
  raise Exception("Please provide a score between 0 and 100.")
78
- return await apromptlayer_track_score(request_id, score, score_name, api_key)
84
+ return await apromptlayer_track_score(api_key, base_url, request_id, score, score_name)
79
85
 
80
86
 
81
- async def agroup(request_id, group_id, api_key: str = None):
82
- return await apromptlayer_track_group(request_id, group_id, api_key)
87
+ async def agroup(api_key: str, base_url: str, request_id, group_id):
88
+ return await apromptlayer_track_group(api_key, base_url, request_id, group_id)